blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dd23f81f0523a7ea828de9f8aa5f5cc5ce00d2d7
|
c9952dcac5658940508ddc139344a7243a591c87
|
/tests/lab18/test_ch018_t01_why_use_classes.py
|
89a74ccf8952816be596452943f8010beaf6bc90
|
[] |
no_license
|
wongcyrus/ite3101_introduction_to_programming
|
5da1c15212528423b3df91997327fe148abef4de
|
7cd76d0861d5355db5a6e2e171735bee2e78f829
|
refs/heads/master
| 2023-08-31T17:27:06.193049
| 2023-08-21T08:30:26
| 2023-08-21T08:30:26
| 136,574,036
| 3
| 2
| null | 2023-08-21T08:30:28
| 2018-06-08T06:06:49
|
Python
|
UTF-8
|
Python
| false
| false
| 1,037
|
py
|
import unittest
from tests.unit_test_helper import is_answer
class TestOutput(unittest.TestCase):
def setUp(self):
if is_answer:
from lab.lab18.ch018_t01_why_use_classes_ans import Fruit
else:
from lab.lab18.ch018_t01_why_use_classes import Fruit
self.fruit = Fruit("lemon", "yellow", "sour", False)
def test_member(self):
self.assertEqual("lemon", self.fruit.name)
self.assertEqual("yellow", self.fruit.color)
self.assertEqual("sour", self.fruit.flavor)
self.assertFalse(self.fruit.poisonous)
def test_create_instance(self):
if is_answer:
from lab.lab18.ch018_t01_why_use_classes_ans import lemon
else:
from lab.lab18.ch018_t01_why_use_classes import lemon
self.assertEqual("lemon", lemon.name)
self.assertEqual("yellow", lemon.color)
self.assertEqual("sour", lemon.flavor)
self.assertFalse(lemon.poisonous)
if __name__ == '__main__':
unittest.main()
|
[
"cywong@vtc.edu.hk"
] |
cywong@vtc.edu.hk
|
537dd076c49ad2ccafc435e3f66ed76126ba6de6
|
b1ddcf4bac9ca603a7a2333912eb29da8bf2cb7b
|
/modelViewset/api/views.py
|
7a9e893c1dd960e695c5c68e31de55adba80160d
|
[] |
no_license
|
sankethalake/django_practice
|
e9477ae0beee4923cd6758cc6d37517ea5979610
|
9877304f0c6415ae8979e5cc13a49559155fdd9d
|
refs/heads/main
| 2023-07-07T07:07:35.598657
| 2021-08-14T06:26:23
| 2021-08-14T06:26:23
| 389,917,128
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 243
|
py
|
from .models import Student
from .serializers import StudentSerializer
from rest_framework import viewsets
class StudentModelViewset(viewsets.ModelViewSet):
queryset = Student.objects.all()
serializer_class = StudentSerializer
|
[
"sankethalake@gmail.com"
] |
sankethalake@gmail.com
|
cc47947af7cebae7fdc2b5543d4508a2c820c757
|
cf74a48db45d0fa8c9ae58931a9368672d07fa19
|
/utils/zgrep.py
|
b587690671719faa84d712598846558189a92885
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
razikallayi/NewsBlur
|
fdb7549d73dfd6765e2cf2e4007f1b9cfb39002f
|
a266d9f585400c506fa9727796a5dddba0e69ffb
|
refs/heads/master
| 2021-01-18T08:12:02.738442
| 2015-05-27T00:58:45
| 2015-05-27T00:58:45
| 36,501,810
| 1
| 0
| null | 2015-05-29T12:01:58
| 2015-05-29T12:01:56
| null |
UTF-8
|
Python
| false
| false
| 3,581
|
py
|
#!/usr/bin/env python
import os
import time
import select
import subprocess
import sys
from optparse import OptionParser
from requests.exceptions import ConnectionError
sys.path.insert(0, '/srv/newsblur')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
import fabfile
NEWSBLUR_USERNAME = 'sclay'
IGNORE_HOSTS = [
'push',
]
def main(role="app", role2="work", command=None, path=None):
delay = 1
while True:
try:
streams = create_streams_for_roles(role, role2, command=command, path=path)
print " --- Loading %s App Log Tails ---" % len(streams)
read_streams(streams)
except UnicodeDecodeError: # unexpected end of data
print " --- Lost connections - Retrying... ---"
time.sleep(1)
continue
except ConnectionError:
print " --- Retrying in %s seconds... ---" % delay
time.sleep(delay)
delay += 1
continue
except KeyboardInterrupt:
print " --- End of Logging ---"
break
def create_streams_for_roles(role, role2, command=None, path=None):
streams = list()
hosts = fabfile.do(split=True)
found = set()
if not path:
path = "/srv/newsblur/logs/newsblur.log"
if not command:
command = "tail -f"
for hostname in (hosts[role] + hosts[role2]):
if isinstance(hostname, dict):
address = hostname['address']
hostname = hostname['name']
elif ':' in hostname:
hostname, address = hostname.split(':', 1)
elif isinstance(hostname, tuple):
hostname, address = hostname[0], hostname[1]
else:
address = hostname
if any(h in hostname for h in IGNORE_HOSTS): continue
if hostname in found: continue
if 'ec2' in hostname:
s = subprocess.Popen(["ssh",
"-i", os.path.expanduser(os.path.join(fabfile.env.SECRETS_PATH,
"keys/ec2.pem")),
address, "%s %s" % (command, path)], stdout=subprocess.PIPE)
else:
s = subprocess.Popen(["ssh", "-l", NEWSBLUR_USERNAME,
"-i", os.path.expanduser(os.path.join(fabfile.env.SECRETS_PATH,
"keys/newsblur.key")),
address, "%s %s" % (command, path)], stdout=subprocess.PIPE)
s.name = hostname
streams.append(s)
found.add(hostname)
return streams
def read_streams(streams):
while True:
r, _, _ = select.select(
[stream.stdout.fileno() for stream in streams], [], [])
for fileno in r:
for stream in streams:
if stream.stdout.fileno() != fileno:
continue
data = os.read(fileno, 4096)
if not data:
streams.remove(stream)
break
combination_message = "[%-6s] %s" % (stream.name[:6], data)
sys.stdout.write(combination_message)
break
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-f", "--find", dest="find")
parser.add_option("-p", "--path", dest="path")
(options, args) = parser.parse_args()
path = options.path
find = options.find
command = "zgrep \"%s\"" % find
main(role="app", role2="dev", command=command, path=path)
|
[
"samuel@ofbrooklyn.com"
] |
samuel@ofbrooklyn.com
|
e2328cbb036a2a53d77b6c6cc430606a33cc18a4
|
c9a4e88111d05cf9db399eba3ae83ddb3b0ad2da
|
/myapp/models.py
|
801590a2819433ea0630630a9b407a91b5cbd1d0
|
[] |
no_license
|
Django-TOPS/07JanPython
|
7861d9a515e9da951b14f8caa5b1bb0578d99557
|
b101e7b2b457250153aedb6a95354e10824ecec5
|
refs/heads/master
| 2023-04-12T17:08:44.644228
| 2021-05-20T03:30:10
| 2021-05-20T03:30:10
| 369,140,333
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 574
|
py
|
from django.db import models
# Create your models here.
class signup(models.Model):
firstname=models.CharField(max_length=20)
lastname=models.CharField(max_length=20)
username=models.EmailField()
password=models.CharField(max_length=20)
city=models.CharField(max_length=20)
state=models.CharField(max_length=20)
zipcode=models.IntegerField()
class notes(models.Model):
title=models.CharField(max_length=100)
category=models.CharField(max_length=100)
myfiles=models.FileField(upload_to="FileUpload")
comments=models.TextField()
|
[
"sanketiosonline@gmail.com"
] |
sanketiosonline@gmail.com
|
a2895c375cdca0634a6d85a52cc1838a3c58292c
|
092f2dd962c9c393904fd9886a726c611f8aa811
|
/palletsprojects_tutorial/tests/test_blog.py
|
dc6550b7ef96d8db0bc0fc4e69758814b26112df
|
[] |
no_license
|
iAnafem/flask_tutorial_projects
|
b5d2b4f7e2eb68ed54de3938d9006889c9fe76fb
|
c0f77fe4056b8f7c4ab16bb9cbc75f4fe90d4bde
|
refs/heads/master
| 2022-12-13T07:13:56.415457
| 2019-08-28T20:14:35
| 2019-08-28T20:16:11
| 200,421,166
| 0
| 0
| null | 2022-12-08T06:00:55
| 2019-08-03T21:09:25
|
Python
|
UTF-8
|
Python
| false
| false
| 2,609
|
py
|
import pytest
from flaskr.db import get_db
def test_index(client, auth):
response = client.get('/')
assert b'Log In' in response.data
assert b'Register' in response.data
auth.login()
response = client.get('/')
assert b'Log Out' in response.data
assert b'test title' in response.data
assert b'by test on 2018-01-01' in response.data
assert b'test\nbody' in response.data
assert b'href="/1/update"' in response.data
@pytest.mark.parametrize('path', (
'/create',
'/1/update',
'/1/delete',
))
def test_login_required(client, path):
response = client.post(path)
assert response.headers['Location'] == 'http://localhost/auth/login'
def test_author_required(app, client, auth):
# change the post author to another user
with app.app_context():
db = get_db()
db.execute('UPDATE post SET author_id = 2 WHERE id = 1')
db.commit()
auth.login()
# current user can't modify other user's post
assert client.post('/1/update').status_code == 403
assert client.post('/1/delete').status_code == 403
# current user doesn't see edit link
assert b'href="/1/update"' not in client.get('/').data
@pytest.mark.parametrize('path', (
'/2/update',
'/2/delete',
))
def test_exists_required(client, auth, path):
auth.login()
assert client.post(path).status_code == 404
def test_create(client, auth, app):
auth.login()
assert client.get('/create').status_code == 200
client.post('create', data={'title': 'created', 'body': ''})
with app.app_context():
db = get_db()
count = db.execute('SELECT COUNT(id) FROM post').fetchone()[0]
assert count == 2
def test_update(client, auth, app):
auth.login()
assert client.get('/1/update').status_code == 200
client.post('/1/update', data={'title': 'updated', 'body': ''})
with app.app_context():
db = get_db()
post = db.execute('SELECT * FROM post WHERE id = 1').fetchone()
assert post['title'] == 'updated'
@pytest.mark.parametrize('path', (
'/create',
'/1/update',
))
def text_create_update_validate(client, auth, path):
auth.login()
response = client.post(path, data={'title': '', 'body': ''})
assert b'Title is required.' in response.data
def test_delete(client, auth, app):
auth.login()
response = client.post('/1/delete')
assert response.headers['Location'] == 'http://localhost/'
with app.app_context():
db = get_db()
post = db.execute('SELECT * FROM post WHERE id = 1').fetchone()
assert post is None
|
[
"DPronkin@mostro.ru"
] |
DPronkin@mostro.ru
|
d7f6722540037a29c7c6722f0fca5d042b7c0552
|
45d6b7739ef7e61779d778b16e2d2cb9b92a08c0
|
/test/run_in_parallel-200PU-grow/SUB-Analyzer-44.py
|
153296b77ea28b2b6bcea87c4d79a9a5af001630
|
[] |
no_license
|
isobelojalvo/phase2L1TauAnalyzer
|
40b545baec97bf287a8d8ab26bea70546bf9f6f8
|
98ef6d31a523698ba0de48763cadee1d5b2ce695
|
refs/heads/master
| 2021-01-22T08:38:17.965156
| 2019-07-25T17:25:51
| 2019-07-25T17:25:51
| 92,623,686
| 0
| 1
| null | 2019-07-23T19:43:55
| 2017-05-27T20:56:25
|
Python
|
UTF-8
|
Python
| false
| false
| 6,667
|
py
|
# Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: step2 --python_filename=rerun_step2_L1_onMCL1_FEVTHLTDEBUG.py --no_exec -s L1 --datatier GEN-SIM-DIGI-RAW -n 1 --era Phase2_timing --eventcontent FEVTDEBUGHLT --filein file:/afs/cern.ch/user/r/rekovic/release/CMSSW_9_3_2/src/step2_DIGI_PU200_10ev.root --conditions 93X_upgrade2023_realistic_v2 --beamspot HLLHC14TeV --geometry Extended2023D17 --fileout file:step2_ZEE_PU200_1ev_rerun-L1-L1Ntuple.root --customise=L1Trigger/L1TNtuples/customiseL1Ntuple.L1NtupleEMU
import FWCore.ParameterSet.Config as cms
from Configuration.StandardSequences.Eras import eras
process = cms.Process('L1',eras.Phase2_trigger)
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('SimGeneral.MixingModule.mixNoPU_cfi')
process.load('Configuration.Geometry.GeometryExtended2023D17Reco_cff')
process.load('Configuration.StandardSequences.MagneticField_cff')
process.load('Configuration.StandardSequences.SimL1Emulator_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.load('L1Trigger.TrackFindingTracklet.L1TrackletTracks_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(4000)
)
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
"root://cmsxrootd.fnal.gov///store/relval/CMSSW_9_3_7/RelValZTT_14TeV/MINIAODSIM/PU25ns_93X_upgrade2023_realistic_v5_2023D17PU200-v1/10000/6CE39BE9-EA2D-E811-8FDA-0242AC130002.root"
),
inputCommands = cms.untracked.vstring("keep *",
"drop l1tHGCalTowerMapBXVector_hgcalTriggerPrimitiveDigiProducer_towerMap_HLT",
"drop l1tEMTFHit2016Extras_simEmtfDigis_CSC_HLT",
"drop l1tEMTFHit2016Extras_simEmtfDigis_RPC_HLT",
"drop l1tEMTFHit2016s_simEmtfDigis__HLT",
"drop l1tEMTFTrack2016Extras_simEmtfDigis__HLT",
"drop l1tEMTFTrack2016s_simEmtfDigis__HLT")
#skipEvents = cms.untracked.uint32(80)
)
process.source.secondaryFileNames = cms.untracked.vstring(
"/store/relval/CMSSW_9_3_7/RelValZTT_14TeV/GEN-SIM-DIGI-RAW/PU25ns_93X_upgrade2023_realistic_v5_2023D17PU200-v1/10000/FC056F35-2E2D-E811-BE3A-0242AC130002.root")
process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange("1:46")
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('step2 nevts:1'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)
# Output definition
process.FEVTDEBUGHLToutput = cms.OutputModule("PoolOutputModule",
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('GEN-SIM-DIGI-RAW'),
filterName = cms.untracked.string('')
),
fileName = cms.untracked.string('file:test_reprocess.root'),
splitLevel = cms.untracked.int32(0)
)
# Additional output definition
# Other statements
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '100X_upgrade2023_realistic_v1', '')
process.load('SimCalorimetry.HcalTrigPrimProducers.hcaltpdigi_cff')
process.load('CalibCalorimetry.CaloTPG.CaloTPGTranscoder_cfi')
process.load('L1Trigger.L1THGCal.hgcalTriggerPrimitives_cff')
process.hgcl1tpg_step = cms.Path(process.hgcalTriggerPrimitives)
process.load('SimCalorimetry.EcalEBTrigPrimProducers.ecalEBTriggerPrimitiveDigis_cff')
process.EcalEBtp_step = cms.Path(process.simEcalEBTriggerPrimitiveDigis)
process.L1TrackTrigger_step = cms.Path(process.L1TrackletTracksWithAssociators)
process.VertexProducer.l1TracksInputTag = cms.InputTag("TTTracksFromTracklet", "Level1TTTracks")
# Path and EndPath definitions
process.L1simulation_step = cms.Path(process.SimL1Emulator)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.FEVTDEBUGHLToutput_step = cms.EndPath(process.FEVTDEBUGHLToutput)
############################################################
# L1 pf object
###########################################################
process.load("L1Trigger.Phase2L1ParticleFlow.pfTracksFromL1Tracks_cfi")
from L1Trigger.Phase2L1ParticleFlow.l1ParticleFlow_cff import *
process.l1pf = cms.Path(process.pfTracksFromL1Tracks+process.l1ParticleFlow)
############################################################
# L1 Tau object
############################################################
process.load("L1Trigger.Phase2L1Taus.L1PFTauProducer_cff")
process.L1PFTauProducer.min_pi0pt = cms.double(2.5);
process.L1PFTauProducer.L1PFObjects = cms.InputTag("l1pfProducer","PF")
process.L1PFTauProducer.L1Neutrals = cms.InputTag("l1pfProducer")
process.L1PFTauProducer.L1Clusters = cms.InputTag("l1pfProducer","PF")
process.L1PFTaus = cms.Path(process.L1PFTauProducer)
# L1 Tau Analyzer
process.load("L1Trigger.phase2L1TauAnalyzer.phase2L1TauAnalyzer_cfi")
process.analyzer = cms.Path(process.L1TauAnalyzer)
process.TFileService = cms.Service("TFileService",
fileName = cms.string("analyzer-grow-l1t.root"),
closeFileFast = cms.untracked.bool(True)
)
# Schedule definition
process.schedule = cms.Schedule(process.EcalEBtp_step,process.L1TrackTrigger_step,process.L1simulation_step,process.l1pf,process.L1PFTaus,process.analyzer,process.endjob_step)
from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
associatePatAlgosToolsTask(process)
# Add early deletion of temporary data products to reduce peak memory need
from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
process = customiseEarlyDelete(process)
# End adding early deletion
#dump_file = open('dump.py','w')
#dump_file.write(process.dumpPython())
process.source.secondaryFileNames = cms.untracked.vstring(
"/store/relval/CMSSW_9_3_7/RelValZTT_14TeV/GEN-SIM-DIGI-RAW/PU25ns_93X_upgrade2023_realistic_v5_2023D17PU200-v1/10000/76A6C136-2E2D-E811-AA99-0242AC130002.root")
process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange("1:48")
process.maxEvents = cms.untracked.PSet(input = cms.untracked.int32(100))
# Input source
process.source.fileNames = cms.untracked.vstring($inputFileNames)
process.TFileService = cms.Service("TFileService",
fileName = cms.string("$outputFileName")
)
|
[
"ojalvo@wisc.edu"
] |
ojalvo@wisc.edu
|
2a12294b3386cab34c9fd7340aa33088ecd84771
|
6345da7e3b648c5e0a2a4ed77a35a1a25066308d
|
/petit_lisp/test_petit.py
|
7e892bde3ad6861f9701633bed4af7e613a569d6
|
[] |
no_license
|
aroberge/splore
|
1550a5348162c110baceb877a2fd3045db78fbf5
|
34d97a19897bae48d9aed5f759589bf9bed1b7ef
|
refs/heads/master
| 2021-10-11T06:05:45.253376
| 2021-10-10T19:23:48
| 2021-10-10T19:23:48
| 28,837,513
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,491
|
py
|
''' usage: python test_petit.py [v?]
optional argument: v? where ? is a number will use the petit_lisp.py version
located in v?/petit_lisp.py otherwise the default (final)
version will be used.
'''
import mock
import unittest
if __name__ == '__main__':
import sys
import os
if len(sys.argv) > 1 and sys.argv[1].startswith('v'):
sys.path.insert(0, os.path.join(os.getcwd(), sys.argv[1]))
try:
version = int(sys.argv[1][1:])
except ValueError:
version = float(sys.argv[1][1:])
sys.argv.pop(1)
import petit_lisp as pl
else:
version = 0
import petit_lisp as pl
# Since we focus on a read-eval-print loop version, we only test the main
# parts of the interpreter ("read", "parse", "eval") and do not
# test the helper functions, which leaves us with the flexibility to
# change them as the design evolves and still have non-failing tests
# for all versions.
class TestRead(unittest.TestCase):
'''Ensures that we handle user input correctly'''
@mock.patch('builtins.input', return_value="(a b c)")
def test_get_expr_all_at_once(self, input):
self.assertEqual("(a b c)", pl.read_expression())
@unittest.skipIf(0 < version < 2, '')
@mock.patch('builtins.input', side_effect=['(a', 'b', 'c)'])
def test_get_expr_in_parts(self, input):
self.assertEqual("(a b c)", pl.read_expression())
class TestParse(unittest.TestCase):
'''Ensures that we parse expressions correctly, transforming them into
the appropriate "list of lists" representation'''
def test_parse_add(self):
self.assertEqual(['+', 3, 4], pl.parse("(+ 3 4)"), msg="basic")
self.assertEqual(['+', 3, 4], pl.parse(" ( + 3 4 ) "), msg="extra spaces")
def test_parse_add_more(self):
self.assertEqual(['+', 3, 4, 5], pl.parse(" ( + 3 4 5)"), msg="more args")
@unittest.skipIf(0 < version < 3, '')
def test_parse_two_levels(self):
self.assertEqual(['*', ['+', 3, 4], ['-', 2, 1]], pl.parse(" (* ( + 3 4) (- 2 1))"))
class TestEvaluate(unittest.TestCase):
'''Evaluate expressions, using the parse function as a first step'''
def test_add(self):
self.assertEqual(7, pl.evaluate(pl.parse("(+ 3 4)")))
@unittest.skipIf(0 < version < 2, '')
def test_add_floats(self):
self.assertEqual(7.75, pl.evaluate(pl.parse("(+ 3.25 4.5)")))
@unittest.skipIf(0 < version < 2, '')
def test_sub(self):
self.assertEqual(1, pl.evaluate(pl.parse("(- 4 3)")))
self.assertEqual(-1, pl.evaluate(pl.parse("(- 3 4)")))
def test_add_many(self):
self.assertEqual(12, pl.evaluate(pl.parse("(+ 3 4 5)")))
@unittest.skipIf(0 < version < 2, '')
def test_mul(self):
self.assertEqual(12, pl.evaluate(pl.parse("(* 3 4)")))
self.assertEqual(2.4, pl.evaluate(pl.parse("(* 0.6 4)")))
@unittest.skipIf(0 < version < 2, 'multiple args for mul')
def test_mul_many(self):
self.assertEqual(60, pl.evaluate(pl.parse("(* 3 4 5)")))
@unittest.skipIf(0 < version < 2, '')
def test_div(self):
self.assertEqual(2.0, pl.evaluate(pl.parse("(/ 8 4)")))
@unittest.skipIf(0 < version < 2, '')
def test_floor_div(self):
self.assertEqual(2, pl.evaluate(pl.parse("(// 8 4)")))
self.assertEqual(2, pl.evaluate(pl.parse("(// 9.1 4)")))
@unittest.skipIf(0 < version < 3, '')
def test_parse_two_levels(self):
self.assertEqual(13, pl.evaluate(pl.parse(" (+ (* 3 4) (- 2 1))")))
@unittest.skipIf(0 < version < 3, '')
def test_parse_three_levels(self):
self.assertEqual(6, pl.evaluate(pl.parse("(// (+ (* 3 4) (- 2 1)) 2)")))
@unittest.skipIf(0 < version < 4, '')
def test_define(self):
self.assertEqual(None, pl.evaluate(pl.parse("(define x 3)")))
self.assertEqual(7, pl.evaluate(pl.parse("(+ x 4)")))
self.assertEqual(3, pl.evaluate(pl.parse("x")))
@unittest.skipIf(0 < version < 4, '')
def test_set(self):
self.assertEqual(None, pl.evaluate(pl.parse("(define x 3)")))
self.assertEqual(3, pl.evaluate(pl.parse("x")))
self.assertEqual(None, pl.evaluate(pl.parse("(set! x 4)")))
self.assertEqual(8, pl.evaluate(pl.parse("(+ x 4)")))
@unittest.skipIf(0 < version < 5, '')
def test_lambda(self):
self.assertEqual(None, pl.evaluate(pl.parse("(define square (lambda (x) (* x x)))")))
self.assertEqual(9, pl.evaluate(pl.parse("(square 3)")))
@unittest.skipIf(0 < version < 6, '')
def test_load_file(self):
pl.REPL_STARTED = True
self.assertEqual(None, pl.load("define_variable_test.lisp"))
self.assertEqual(3, pl.evaluate(pl.parse("x")))
@unittest.skipIf(0 < version < 7, '')
def test_load_file_with_comments(self):
pl.REPL_STARTED = True
self.assertEqual(None, pl.load("comments_test.lisp"))
self.assertEqual(49, pl.evaluate(pl.parse("(square 7)")))
@unittest.skipIf(version not in [0, 7, 8], '')
def test_sqrt(self):
# verify that math functions are loaded properly; only need to verify one
self.assertEqual(4.0, pl.evaluate(pl.parse("(sqrt 16)")))
@unittest.skipIf(0 < version < 9, '')
def test_load_python(self):
# verify that Python module can be imported properly
pl.evaluate(pl.parse('(load-python (quote math))'))
self.assertEqual(4.0, pl.evaluate(pl.parse("(sqrt 16)")))
@unittest.skipIf(0 < version < 9, '')
def test_load_python_scope(self):
pl.REPL_STARTED = True
pl.load("scope_test.lisp")
self.assertEqual(3, pl.evaluate(pl.parse("(* 1 pi)")))
from math import pi
self.assertEqual(pi, pl.evaluate(pl.parse("(mul_pi 1)")))
class TestLogic(unittest.TestCase):
@unittest.skipIf(0 < version < 8, '')
def test_if(self):
# test "if", "#t", "#f"
pl.evaluate(pl.parse("(if #t (define x 1) (define x 2))"))
self.assertEqual(1, pl.evaluate(pl.parse("x")))
self.assertEqual(None, pl.evaluate(pl.parse("(if #f (define x 3) (define x 4))")))
self.assertEqual(4, pl.evaluate(pl.parse("x")))
@unittest.skipIf(0 < version < 8, '')
def test_not(self):
# test "if", "#t", "#f"
self.assertEqual(None, pl.evaluate(pl.parse("(if (not #t) (define x 1) (define x 2))")))
self.assertEqual(2, pl.evaluate(pl.parse("x")))
self.assertEqual(None, pl.evaluate(pl.parse("(if (not #f) (define x 3) (define x 4))")))
self.assertEqual(3, pl.evaluate(pl.parse("x")))
@unittest.skipIf(0 < version < 8, '')
def test_cond(self):
# test "cond", ">", ">" ,"="
expr = """
(define abs (lambda (x)
(cond ((> x 0) x)
((= x 0) 0)
((< x 0) (- x)))))"""
self.assertEqual(None, pl.evaluate(pl.parse(expr)))
self.assertEqual(2, pl.evaluate(pl.parse("(abs 2)")))
self.assertEqual(3, pl.evaluate(pl.parse("(abs -3)")))
self.assertEqual(0, pl.evaluate(pl.parse("(abs 0)")))
@unittest.skipIf(0 < version < 8, '')
def test_cond_with_else(self):
# test "cond", "else", "<="
expr = """
(define abs2 (lambda (x)
(cond ((<= x 0) (- x))
(else x)
)))"""
self.assertEqual(None, pl.evaluate(pl.parse(expr)))
self.assertEqual(2, pl.evaluate(pl.parse("(abs2 2)")))
self.assertEqual(3, pl.evaluate(pl.parse("(abs2 -3)")))
self.assertEqual(0, pl.evaluate(pl.parse("(abs2 0)")))
class TestLists(unittest.TestCase):
@unittest.skipIf(0 < version < 11, '')
def test_cons(self):
expr = "(define a (cons 1 (cons 2 (cons 3 (cons 4 '())))))"
expr2 = "'(1 2 3 4)"
pl.evaluate(pl.parse(expr))
self.assertEqual(pl.evaluate(pl.parse(expr2)), pl.evaluate(pl.parse("a")))
@unittest.skipIf(0 < version < 11, '')
def test_car(self):
expr = "(define a (cons 1 (cons 2 (cons 3 (cons 4 '())))))"
expr2 = "(car a)"
pl.evaluate(pl.parse(expr))
self.assertEqual(1, pl.evaluate(pl.parse(expr2)))
@unittest.skipIf(0 < version < 11, '')
def test_cdr(self):
expr = "(define a (cons 1 (cons 2 (cons 3 (cons 4 '())))))"
expr2 = "(cdr a)"
pl.evaluate(pl.parse(expr))
self.assertEqual(pl.evaluate(pl.parse("'(2 3 4)")), pl.evaluate(pl.parse(expr2)))
if __name__ == '__main__':
unittest.main()
|
[
"andre.roberge@gmail.com"
] |
andre.roberge@gmail.com
|
4069cf696c9532eef3b0edf6220845339f5f76ec
|
786027545626c24486753351d6e19093b261cd7d
|
/ghidra9.2.1_pyi/ghidra/app/util/bin/format/dwarf4/next/DWARFRegisterMappingsManager.pyi
|
ba1a1bbc88343e1b810e395de5dbfa5da91fb33f
|
[
"MIT"
] |
permissive
|
kohnakagawa/ghidra_scripts
|
51cede1874ef2b1fed901b802316449b4bf25661
|
5afed1234a7266c0624ec445133280993077c376
|
refs/heads/main
| 2023-03-25T08:25:16.842142
| 2021-03-18T13:31:40
| 2021-03-18T13:31:40
| 338,577,905
| 14
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,592
|
pyi
|
import generic.jar
import ghidra.app.util.bin.format.dwarf4.next
import ghidra.program.model.lang
import java.lang
import org.jdom
class DWARFRegisterMappingsManager(object):
"""
Factory class to instantiate and cache DWARFRegisterMappings objects.
"""
def __init__(self): ...
def equals(self, __a0: object) -> bool: ...
def getClass(self) -> java.lang.Class: ...
@staticmethod
def getDWARFRegisterMappingFileFor(lang: ghidra.program.model.lang.Language) -> generic.jar.ResourceFile:
"""
Returns {@link ResourceFile} that should contain the specified language's
DWARF register mapping, never null.
@param lang {@link Language} to find the mapping file for.
@return {@link ResourceFile} of where the mapping file should be, never
null.
@throws IOException if not a Sleigh language or no mapping specified or
multiple mappings specified.
"""
...
@staticmethod
def getMappingForLang(lang: ghidra.program.model.lang.Language) -> ghidra.app.util.bin.format.dwarf4.next.DWARFRegisterMappings:
"""
Returns a possibly cached {@link DWARFRegisterMappings} object for the
specified language,
<p>
@param lang {@link Language} to get the matching DWARF register mappings
for
@return {@link DWARFRegisterMappings} instance, never null
@throws IOException if mapping not found or invalid
"""
...
@overload
@staticmethod
def hasDWARFRegisterMapping(lang: ghidra.program.model.lang.Language) -> bool:
"""
Returns true if the specified {@link Language} has DWARF register
mappings.
@param lang The {@link Language} to test
@return true if the language has a DWARF register mapping specified
@throws IOException if there was an error in the language LDEF file.
"""
...
@overload
@staticmethod
def hasDWARFRegisterMapping(langDesc: ghidra.program.model.lang.LanguageDescription) -> bool:
"""
Returns true if the specified {@link LanguageDescription} has DWARF
register mappings.
@param langDesc The {@link LanguageDescription} to test
@return true if the language has a DWARF register mapping specified
@throws IOException if there was an error in the language LDEF file.
"""
...
def hashCode(self) -> int: ...
def notify(self) -> None: ...
def notifyAll(self) -> None: ...
@staticmethod
def readMappingForLang(lang: ghidra.program.model.lang.Language) -> ghidra.app.util.bin.format.dwarf4.next.DWARFRegisterMappings:
"""
Finds the DWARF register mapping information file specified in the
specified language's LDEF file and returns a new
{@link DWARFRegisterMappings} object containing the data read from that
file.
<p>
Throws {@link IOException} if the lang does not have a mapping or it is
invalid.
<p>
@param lang {@link Language} to read the matching DWARF register mappings
for
@return a new {@link DWARFRegisterMappings} instance, created from
information read from the {@link #DWARF_REGISTER_MAPPING_NAME}
xml file referenced in the language's LDEF, never null.
@throws IOException if there is no DWARF register mapping file associated
with the specified {@link Language} or if there was an error
in the register mapping data.
"""
...
@staticmethod
def readMappingFrom(rootElem: org.jdom.Element, lang: ghidra.program.model.lang.Language) -> ghidra.app.util.bin.format.dwarf4.next.DWARFRegisterMappings:
"""
Creates a new {@link DWARFRegisterMappings} from the data present in the
xml element.
<p>
@param rootElem JDom XML element containing the <dwarf> root
element of the mapping file.
@param lang The Ghidra {@link Language} that the DWARF register mapping
applies to
@return a new {@link DWARFRegisterMappings} instance, never null.
@throws IOException if missing or invalid data found in xml
"""
...
def toString(self) -> unicode: ...
@overload
def wait(self) -> None: ...
@overload
def wait(self, __a0: long) -> None: ...
@overload
def wait(self, __a0: long, __a1: int) -> None: ...
|
[
"tsunekou1019@gmail.com"
] |
tsunekou1019@gmail.com
|
2e1ce2b1f525c0e9d47d6fbdb67c819a692334fb
|
245b92f4140f30e26313bfb3b2e47ed1871a5b83
|
/airflow/providers/google_vendor/googleads/v12/services/services/customer_feed_service/transports/base.py
|
023d30df6c5c442ea4db3635f9ad24a98bee2a10
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] |
permissive
|
ephraimbuddy/airflow
|
238d6170a0e4f76456f00423124a260527960710
|
3193857376bc2c8cd2eb133017be1e8cbcaa8405
|
refs/heads/main
| 2023-05-29T05:37:44.992278
| 2023-05-13T19:49:43
| 2023-05-13T19:49:43
| 245,751,695
| 2
| 1
|
Apache-2.0
| 2021-05-20T08:10:14
| 2020-03-08T04:28:27
| null |
UTF-8
|
Python
| false
| false
| 5,984
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Optional, Sequence, Union
import pkg_resources
import google.auth # type: ignore
import google.api_core # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from airflow.providers.google_vendor.googleads.v12.services.types import customer_feed_service
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class CustomerFeedServiceTransport(abc.ABC):
"""Abstract transport class for CustomerFeedService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/adwords",)
DEFAULT_HOST: str = "googleads.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file,
**scopes_kwargs,
quota_project_id=quota_project_id,
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(
service_account.Credentials, "with_always_use_jwt_access"
)
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.mutate_customer_feeds: gapic_v1.method.wrap_method(
self.mutate_customer_feeds,
default_timeout=None,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def mutate_customer_feeds(
self,
) -> Callable[
[customer_feed_service.MutateCustomerFeedsRequest],
Union[
customer_feed_service.MutateCustomerFeedsResponse,
Awaitable[customer_feed_service.MutateCustomerFeedsResponse],
],
]:
raise NotImplementedError()
__all__ = ("CustomerFeedServiceTransport",)
|
[
"noreply@github.com"
] |
ephraimbuddy.noreply@github.com
|
d2be3ec81f8f049e8a70a3c02bca4c7f5d207554
|
96e38b89fa057fa0c1cf34e498b4624041dfc6e2
|
/BOJ/String/Python/4583.py
|
a995a37188226e83d4452414ace1a0952986cac9
|
[] |
no_license
|
malkoG/polyglot-cp
|
66059246b01766da3c359dbd16f04348d3c7ecd2
|
584763144afe40d73e72dd55f90ee1206029ca8f
|
refs/heads/master
| 2021-11-24T13:33:49.625237
| 2019-10-06T07:42:49
| 2019-10-06T07:42:49
| 176,255,722
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 409
|
py
|
mirror=dict()
mirror['b'] = 'd'
mirror['d'] = 'b'
mirror['q'] = 'p'
mirror['p'] = 'q'
for ch in 'iovwx':
mirror[ch] = ch
while True:
s=input()
if s =="#":
break
result = ''
flag = True
for ch in s:
try:
s += mirror[ch]
except:
flag = False
break
if flag:
print(result)
else:
print("INVALID")
|
[
"rijgndqw012@gmail.com"
] |
rijgndqw012@gmail.com
|
f71ec4146bf3702e19f5c68b3fd31bc7127c802f
|
ebfcae1c5ba2997b2ac4471d5bedc3f5daffcb31
|
/flask-smorest-master/flask_smorest/spec/__init__.py
|
f64833894a322e17deab1be0723b1895c67e7fb5
|
[
"MIT"
] |
permissive
|
babiato/flaskapp1
|
84de2d0b26a54f5820d3bbe97926782ad41e005c
|
530beb9e3b8516e0e93960b99521c23a523ef546
|
refs/heads/master
| 2023-02-26T16:36:49.760632
| 2021-02-04T09:08:40
| 2021-02-04T09:08:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,624
|
py
|
"""API specification using OpenAPI"""
import json
import http
import flask
from flask import current_app
import click
import apispec
from apispec.ext.marshmallow import MarshmallowPlugin
from flask_smorest.exceptions import MissingAPIParameterError
from flask_smorest.utils import prepare_response
from .plugins import FlaskPlugin
from .field_converters import uploadfield2properties
def _add_leading_slash(string):
"""Add leading slash to a string if there is None"""
return string if string.startswith('/') else '/' + string
DEFAULT_REQUEST_BODY_CONTENT_TYPE = 'application/json'
DEFAULT_RESPONSE_CONTENT_TYPE = 'application/json'
class DocBlueprintMixin:
"""Extend Api to serve the spec in a dedicated blueprint."""
def _register_doc_blueprint(self):
"""Register a blueprint in the application to expose the spec
Doc Blueprint contains routes to
- json spec file
- spec UI (ReDoc, Swagger UI).
"""
api_url = self._app.config.get('OPENAPI_URL_PREFIX', None)
if api_url is not None:
blueprint = flask.Blueprint(
'api-docs',
__name__,
url_prefix=_add_leading_slash(api_url),
template_folder='./templates',
)
# Serve json spec at 'url_prefix/openapi.json' by default
json_path = self._app.config.get(
'OPENAPI_JSON_PATH', 'openapi.json')
blueprint.add_url_rule(
_add_leading_slash(json_path),
endpoint='openapi_json',
view_func=self._openapi_json)
self._register_redoc_rule(blueprint)
self._register_swagger_ui_rule(blueprint)
self._app.register_blueprint(blueprint)
def _register_redoc_rule(self, blueprint):
"""Register ReDoc rule
The ReDoc script URL should be specified as OPENAPI_REDOC_URL.
"""
redoc_path = self._app.config.get('OPENAPI_REDOC_PATH')
if redoc_path is not None:
redoc_url = self._app.config.get('OPENAPI_REDOC_URL')
if redoc_url is not None:
self._redoc_url = redoc_url
blueprint.add_url_rule(
_add_leading_slash(redoc_path),
endpoint='openapi_redoc',
view_func=self._openapi_redoc)
def _register_swagger_ui_rule(self, blueprint):
"""Register Swagger UI rule
The Swagger UI scripts base URL should be specified as
OPENAPI_SWAGGER_UI_URL.
"""
swagger_ui_path = self._app.config.get('OPENAPI_SWAGGER_UI_PATH')
if swagger_ui_path is not None:
swagger_ui_url = self._app.config.get('OPENAPI_SWAGGER_UI_URL')
if swagger_ui_url is not None:
self._swagger_ui_url = swagger_ui_url
blueprint.add_url_rule(
_add_leading_slash(swagger_ui_path),
endpoint='openapi_swagger_ui',
view_func=self._openapi_swagger_ui)
def _openapi_json(self):
"""Serve JSON spec file"""
# We don't use Flask.jsonify here as it would sort the keys
# alphabetically while we want to preserve the order.
return current_app.response_class(
json.dumps(self.spec.to_dict(), indent=2),
mimetype='application/json')
def _openapi_redoc(self):
"""Expose OpenAPI spec with ReDoc"""
return flask.render_template(
'redoc.html', title=self.spec.title, redoc_url=self._redoc_url)
def _openapi_swagger_ui(self):
"""Expose OpenAPI spec with Swagger UI"""
return flask.render_template(
'swagger_ui.html',
title=self.spec.title,
swagger_ui_url=self._swagger_ui_url,
swagger_ui_config=self._app.config.get(
'OPENAPI_SWAGGER_UI_CONFIG', {})
)
class APISpecMixin(DocBlueprintMixin):
"""Add APISpec related features to Api class"""
def _init_spec(
self, *,
flask_plugin=None, marshmallow_plugin=None, extra_plugins=None,
title=None, version=None, openapi_version=None,
**options
):
# Plugins
self.flask_plugin = flask_plugin or FlaskPlugin()
self.ma_plugin = marshmallow_plugin or MarshmallowPlugin()
plugins = [self.flask_plugin, self.ma_plugin]
plugins.extend(extra_plugins or ())
# APISpec options
title = self._app.config.get('API_TITLE', title)
if title is None:
raise MissingAPIParameterError(
'API title must be specified either as "API_TITLE" '
'app parameter or as "title" spec kwarg.'
)
version = self._app.config.get('API_VERSION', version)
if version is None:
raise MissingAPIParameterError(
'API version must be specified either as "API_VERSION" '
'app parameter or as "version" spec kwarg.'
)
openapi_version = self._app.config.get(
'OPENAPI_VERSION', openapi_version)
if openapi_version is None:
raise MissingAPIParameterError(
'OpenAPI version must be specified either as "OPENAPI_VERSION '
'app parameter or as "openapi_version" spec kwarg.'
)
openapi_major_version = int(openapi_version.split('.')[0])
if openapi_major_version < 3:
base_path = self._app.config.get('APPLICATION_ROOT')
options.setdefault('basePath', base_path)
options.setdefault(
'produces', [DEFAULT_RESPONSE_CONTENT_TYPE, ])
options.setdefault(
'consumes', [DEFAULT_REQUEST_BODY_CONTENT_TYPE, ])
options.update(self._app.config.get('API_SPEC_OPTIONS', {}))
# Instantiate spec
self.spec = apispec.APISpec(
title, version, openapi_version, plugins, **options,
)
# Register custom fields in spec
for args in self._fields:
self._register_field(*args)
# Register custom converters in spec
for args in self._converters:
self._register_converter(*args)
# Register Upload field properties function
self.ma_plugin.converter.add_attribute_function(uploadfield2properties)
# Register OpenAPI command group
self._app.cli.add_command(openapi_cli)
def register_converter(self, converter, func):
"""Register custom path parameter converter
:param BaseConverter converter: Converter
Subclass of werkzeug's BaseConverter
:param callable func: Function returning a parameter schema from
a converter intance
Example: ::
# Register MongoDB's ObjectId converter in Flask application
app.url_map.converters['objectid'] = ObjectIdConverter
# Define custom converter to schema function
def objectidconverter2paramschema(converter):
return {'type': 'string', 'format': 'ObjectID'}
# Register converter in Api
api.register_converter(
ObjectIdConverter,
objectidconverter2paramschema
)
@blp.route('/pets/{objectid:pet_id}')
...
api.register_blueprint(blp)
Once the converter is registered, all paths using it will have
corresponding path parameter documented with the right schema.
Should be called before registering paths with
:meth:`Blueprint.route <Blueprint.route>`.
"""
self._converters.append((converter, func))
# Register converter in spec if app is already initialized
if self.spec is not None:
self._register_converter(converter, func)
def _register_converter(self, converter, func):
self.flask_plugin.register_converter(converter, func)
def register_field(self, field, *args):
"""Register custom Marshmallow field
Registering the Field class allows the Schema parser to set the proper
type and format when documenting parameters from Schema fields.
:param Field field: Marshmallow Field class
``*args`` can be:
- a pair of the form ``(type, format)`` to map to
- a core marshmallow field type (then that type's mapping is used)
Examples: ::
# Map to ('string', 'ObjectId') passing type and format
api.register_field(ObjectId, 'string', 'ObjectId')
# Map to ('string', ) passing type
api.register_field(CustomString, 'string', None)
# Map to ('string, 'date-time') passing a marshmallow Field
api.register_field(CustomDateTime, ma.fields.DateTime)
Should be called before registering schemas with
:meth:`schema <Api.schema>`.
"""
self._fields.append((field, *args))
# Register field in spec if app is already initialized
if self.spec is not None:
self._register_field(field, *args)
def _register_field(self, field, *args):
self.ma_plugin.map_to_openapi_type(*args)(field)
def _register_responses(self):
"""Register default responses for all status codes"""
# Register a response for each status code
for status in http.HTTPStatus:
response = {
'description': status.phrase,
'schema': self.ERROR_SCHEMA,
}
prepare_response(
response, self.spec, DEFAULT_RESPONSE_CONTENT_TYPE)
self.spec.components.response(status.name, response)
# Also register a default error response
response = {
'description': 'Default error response',
'schema': self.ERROR_SCHEMA,
}
prepare_response(response, self.spec, DEFAULT_RESPONSE_CONTENT_TYPE)
self.spec.components.response('DEFAULT_ERROR', response)
openapi_cli = flask.cli.AppGroup('openapi', help='OpenAPI commands.')
@openapi_cli.command('print')
def print_openapi_doc():
"""Print OpenAPI document."""
api = current_app.extensions['flask-smorest']['ext_obj']
print(json.dumps(api.spec.to_dict(), indent=2))
@openapi_cli.command('write')
@click.argument('output_file', type=click.File(mode='w'))
def write_openapi_doc(output_file):
"""Write OpenAPI document to a file."""
api = current_app.extensions['flask-smorest']['ext_obj']
output_file.write(json.dumps(api.spec.to_dict(), indent=2))
|
[
"jinxufang@tencent.com"
] |
jinxufang@tencent.com
|
e3d149b7b7cf48fd12d2013aefb000ecade6610f
|
5d32d0e65aa3bfa677fd1b8c92569e07e9b82af1
|
/Section 5 - Programming Logic/Guess game v3 - multiple if.py
|
1136fb613ad7c5b24b6249a57be9343f93a90ebf
|
[
"CC0-1.0"
] |
permissive
|
pdst-lccs/lccs-python
|
b74ef2a02ac8ad2637f713fff5559f4e56c9827d
|
95cb7ece05716521e9951d7a40de8fb20a88021f
|
refs/heads/master
| 2023-05-28T00:46:57.313972
| 2023-05-22T10:16:43
| 2023-05-22T10:16:43
| 240,501,524
| 21
| 18
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 687
|
py
|
# Event: LCCS Python Fundamental Skills Workshop
# Date: May 2018
# Author: Joe English, PDST
# eMail: computerscience@pdst.ie
# Name: Guessing Game v3
# Purpose: A program to demonstrate the multiple if statement
import random
number = random.randint(1, 10)
# The next line can be commented out later ...
print(number) # have a sneak peek at the number to guess!
guess = int(input("Enter a number between 1 and 10: "))
# Evaluate the condition
if guess == number:
print("Correct")
print("Well done!")
elif guess < number:
print("Hard luck!")
print("Too low")
else:
print("Hard luck!")
print("Too high")
print("Goodbye")
|
[
"noreply@github.com"
] |
pdst-lccs.noreply@github.com
|
85c8a05dbc3ccd700e56696411f9f0987cab48a8
|
8e8ea9e41032398fa8b1c54d73475a54aa11d293
|
/page/quarter/quarter_statistic/quarter_statistic.py
|
3df8bda70c25e22d603ec3c1cedd4f084dcf02b2
|
[] |
no_license
|
xmaimiao/wmPC_quarter
|
6b69812b42039101d89076923c329d8e5952308b
|
255666ccb5d2cac38b6975c0ae1ab055caabe41f
|
refs/heads/master
| 2023-03-28T02:41:21.347163
| 2021-03-29T07:12:44
| 2021-03-29T07:12:44
| 352,538,443
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,686
|
py
|
from common.contants import quarter_statistic_dir
from page.base.basepage import BasePage
from page.quarter.quarter_statistic.quarter_statistic_detail import Quarter_Statistic_Detail
class Quarter_Statistic(BasePage):
def wait_sleep(self,sleeps):
self.sleep(sleeps)
return self
def simple_search(self,keys):
'''
簡易查詢,傳進來一個字典{quarter_type:"全部",keywords:xxxx}
'''
self._params["quarter_name"] = keys["quarter_name"]
self.step(quarter_statistic_dir,"search_input")
return self
def advanced_search(self,keys):
'''
高级查詢,傳進來一個字典
{startTime:{switch:False,value:2020/01/05},endStartTime:{switch:False,value:2020/01/05},
startPlanExpireTime:{switch:False,value:2020/01/05},endPlanExpireTime:{switch:False,value:2020/01/05},
startFinishedNumber:{switch:False,value:0},endFinishedNumber:{switch:False,value:0},
startPushNumber:{switch:False,value:0},endPushNumber:{switch:False,value:0},
frequency:{switch:False,value:每天},
peopleOriented:{switch:False,value:限定人群},
status:{switch:False,value:進行中},
'''
self.step(quarter_statistic_dir, "click_advanced_search")
# if keys["startTime"]["switch"] == True:
# self._params["startTime"] = keys["startTime"]["value"]
# self.step(quarter_statistic_dir,"startTime")
# if keys["endStartTime"]["switch"] == True:
# self._params["endStartTime"] = keys["endStartTime"]["value"]
# self.step(quarter_statistic_dir,"endStartTime")
# if keys["startPlanExpireTime"]["switch"] == True:
# self._params["startPlanExpireTime"] = keys["startPlanExpireTime"]["value"]
# self.step(quarter_statistic_dir,"startPlanExpireTime")
# if keys["endPlanExpireTime"]["switch"] == True:
# self._params["endPlanExpireTime"] = keys["endPlanExpireTime"]["value"]
# self.step(quarter_statistic_dir,"endPlanExpireTime")
# 回收數量-前置
if keys["startFinishedNumber"]["switch"] == True:
self._params["startFinishedNumber"] = keys["startFinishedNumber"]["value"]
self.step(quarter_statistic_dir,"startFinishedNumber")
# 回收數量-後置
if keys["endFinishedNumber"]["switch"] == True:
self._params["endFinishedNumber"] = keys["endFinishedNumber"]["value"]
self.step(quarter_statistic_dir,"endFinishedNumber")
# if keys["startPushNumber"]["switch"] == True:
# self._params["startPushNumber"] = keys["startPushNumber"]["value"]
# self.step(quarter_statistic_dir,"startPushNumber")
# if keys["endPushNumber"]["switch"] == True:
# self._params["endPushNumber"] = keys["endPushNumber"]["value"]
# self.step(quarter_statistic_dir,"endPushNumber")
# 查詢問卷名稱
if keys["title"]["switch"] == True:
self._params["title"] = keys["title"]["value"]
self.step(quarter_statistic_dir,"title")
# # 查詢推送週期
# if keys["frequency"]["switch"] == True:
# self._params["frequency"] = keys["frequency"]["value"]
# self.step(quarter_statistic_dir, "frequency")
# # 查詢問卷類型
# if keys["peopleOriented"]["switch"] == True:
# self._params["frequency"] = keys["peopleOriented"]["value"]
# self.step(quarter_statistic_dir, "peopleOriented")
# 查询問卷状态
if keys["status"]["switch"] == True:
self._params["status"] = keys["status"]["value"]
self.step(quarter_statistic_dir, "status")
self.step(quarter_statistic_dir,"click_search")
return self
def view_the_fir(self,quarter_name):
'''
點擊第一行數據“查看”按鈕
'''
self._params["quarter_name"] = quarter_name
self.step(quarter_statistic_dir,"view_the_fir")
return Quarter_Statistic_Detail(self._driver)
def get_quarter_name_the_fir(self):
'''
編輯第一行問卷的名稱
'''
try:
return self.step(quarter_statistic_dir,"get_quarter_name_the_fir")
except Exception as e:
print("暫無數據!")
raise e
def get_quarter_status_the_fir(self):
'''
編輯第一行問卷的狀態
'''
try:
return self.step(quarter_statistic_dir,"get_quarter_status_the_fir")
except Exception as e:
print("暫無數據!")
raise e
|
[
"765120214@qq.com"
] |
765120214@qq.com
|
1f6ecc9a87a9cf8415c9d78c3fb3778a97bf9c3f
|
255e7b37e9ce28bbafba5a3bcb046de97589f21c
|
/leetcode_everyday/pastqing_491.py
|
d6d078bb2db9b5627bf0299def84a620217e9fd1
|
[] |
no_license
|
dog2humen/ForTheCoffee
|
697d2dc8366921aa18da2fa3311390061bab4b6f
|
2f940aa9dd6ce35588de18db08bf35a2d04a54f4
|
refs/heads/master
| 2023-04-15T09:53:54.711659
| 2021-04-28T13:49:13
| 2021-04-28T13:49:13
| 276,009,709
| 2
| 2
| null | 2020-07-01T08:29:33
| 2020-06-30T05:50:01
|
Python
|
UTF-8
|
Python
| false
| false
| 849
|
py
|
# coding:utf8
from typing import List
class Solution:
def findSubsequences(self, nums: List[int]) -> List[List[int]]:
return self.findSubsequences_v1(nums)
def findSubsequences_v1(self, nums: List[int]) -> List[List[int]]:
res = []
self.helper(nums, 0, [], res)
return res
def helper(self, nums, start, cur, res):
if len(cur) > 1:
res.append(cur[:])
memo = set()
for i in range(start, len(nums)):
if nums[i] in memo:
continue
if len(cur) == 0 or cur[-1] <= nums[i]:
memo.add(nums[i])
self.helper(nums, i + 1, cur + [nums[i]], res)
if __name__ == '__main__':
obj = Solution()
nums = [4, 6, 7, 7]
#nums = [4, 3, 2, 1]
res = obj.findSubsequences(nums)
print(res)
|
[
"116676671@qq.com"
] |
116676671@qq.com
|
921f2814602574d17fbd234becf4865925f0b64f
|
488e0934b8cd97e202ae05368c855a57b299bfd1
|
/Django/advanced/test_Blog/test_bookblog/book_app/migrations/0004_auto_20190408_1955.py
|
b4880c98ea624c5db12cdd69a946c614d2cc847a
|
[] |
no_license
|
didemertens/udemy_webdev
|
4d96a5e7abeec1848ecedb97f0c440cd50eb27ac
|
306215571be8e4dcb939e79b18ff6b302b75c952
|
refs/heads/master
| 2020-04-25T00:24:45.654136
| 2019-04-13T16:00:47
| 2019-04-13T16:00:47
| 172,377,429
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 363
|
py
|
# Generated by Django 2.1.7 on 2019-04-08 19:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('book_app', '0003_blog_intro'),
]
operations = [
migrations.AlterField(
model_name='blog',
name='intro',
field=models.TextField(),
),
]
|
[
"d.g.j.mertens@gmail.com"
] |
d.g.j.mertens@gmail.com
|
f5c263ff57f2af9eca8b0cb37427ffd481a5c178
|
2a54e8d6ed124c64abb9e075cc5524bb859ba0fa
|
/.history/2-Python-Basics-part2/4-ternary-operator_20200413230611.py
|
18fc81dee16c1d156f9fe477dc7652c5da62075c
|
[] |
no_license
|
CaptainStorm21/Python-Foundation
|
01b5fbaf7a913506518cf22e0339dd948e65cea1
|
a385adeda74f43dd7fb2d99d326b0be23db25024
|
refs/heads/master
| 2021-05-23T01:29:18.885239
| 2020-04-23T19:18:06
| 2020-04-23T19:18:06
| 253,171,611
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 38
|
py
|
# conditonal expressions
condition_if
|
[
"tikana4@yahoo.com"
] |
tikana4@yahoo.com
|
6a7cc2d596827c9bde48ed3927efac4efb6ee38c
|
1ffbdfff2c9632fa8ecd6288578e1d02f740ee23
|
/2020_/07/LeetCodeBitManipulation/03E_1356. Sort Integers by The Number of 1 Bits.py
|
dc2f4d00cc3f90c25830cf14864e295c482b40d1
|
[] |
no_license
|
taesookim0412/Python-Algorithms
|
c167c130adbe04100d42506c86402e729f95266c
|
c6272bbcab442ef32f327f877a53ee6e66d9fb00
|
refs/heads/main
| 2023-05-01T09:40:44.957618
| 2021-05-12T10:52:30
| 2021-05-12T10:52:30
| 366,682,846
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 780
|
py
|
import collections
import numpy as np
from typing import List
#Runtime: 72 ms, faster than 68.71% of Python3 online submissions for Sort Integers by The Number of 1 Bits.
#Memory Usage: 13.8 MB, less than 86.25% of Python3 online submissions for Sort Integers by The Number of 1 Bits.
class Solution:
def sortByBits(self, arr: List[int]) -> List[int]:
arr.sort()
data = collections.defaultdict(list)
res = []
for i in range(len(arr)):
numberOfOnes = str(bin(arr[i])).count('1')
data[numberOfOnes] += arr[i],
for key, val in sorted(data.items()):
print(key,val)
res += val
return res
s = Solution()
print(s.sortByBits([0,1,2,3,4,5,6,7,8]))
print(s.sortByBits([10,100,1000,10000]))
|
[
"taesoo.kim0412@gmail.com"
] |
taesoo.kim0412@gmail.com
|
faa0ab004c18bd45116e831d5433c6c545aaedb2
|
2f418a0f2fcca40f84ec0863b31ff974b574350c
|
/scripts/addons_extern/nodes_io/attributes.py
|
5763cb541cfaa72d6cdbe4733176fefca15e8fb1
|
[] |
no_license
|
JT-a/blenderpython279
|
57a81b55564218f3b1417c2ffa97f5161897ec79
|
04846c82f794c22f87d677d9eb8cec1d05c48cda
|
refs/heads/master
| 2021-06-25T06:58:07.670613
| 2017-09-11T11:14:36
| 2017-09-11T11:14:36
| 103,723,697
| 4
| 2
| null | 2017-09-16T04:09:31
| 2017-09-16T04:09:31
| null |
UTF-8
|
Python
| false
| false
| 3,621
|
py
|
#!/usr/bin/python3
# --------------------------------------------------
# ATTRIBUTES
# --------------------------------------------------
defaults = [
"bl_idname",
# "type", # read-only
"name",
"label",
# "parent",
"select",
"location",
# "dimensions", # read-only
"width",
"height",
# "width_hidden",
"use_custom_color",
"color",
"hide",
"mute",
"show_options",
"show_preview",
"show_texture",
# "inputs",
# "outputs",
]
specials = [
"attribute_name", # ["ATTRIBUTE"]
"axis", # ["TANGENT"]
"blend_type", # ["MIX_RGB"]
"bytecode", # ["SCRIPT"]
"bytecode_hash", # ["SCRIPT"]
"color_mapping", # ["TEX_IMAGE", "TEX_ENVIRONMENT", "TEX_NOISE", "TEX_GRADIENT", "TEX_MUSGRAVE", "TEX_MAGIC", "TEX_WAVE", "TEX_SKY", "TEX_VORONOI", "TEX_CHECKER", "TEX_BRICK"]
"color_ramp", # ["VALTORGB"]
"color_space", # ["TEX_IMAGE", "TEX_ENVIRONMENT"]
"coloring", # ["TEX_VORONOI"]
"component", # ["BSDF_HAIR", "BSDF_TOON"]
"convert_from", # ["VECT_TRANSFORM"]
"convert_to", # ["VECT_TRANSFORM"]
"direction_type", # ["TANGENT"]
"distribution", # ["BSDF_GLOSSY", "BSDF_REFRACTION", "BSDF_ANISOTROPIC", "BSDF_GLASS"]
"falloff", # ["SUBSURFACE_SCATTERING"]
"filepath", # ["SCRIPT"]
"from_dupli", # ["UVMAP", "TEX_COORD"]
"gradient_type", # ["TEX_GRADIENT"]
"ground_albedo", # ["TEX_SKY"]
"image", # ["TEX_IMAGE", "TEX_ENVIRONMENT"]
"interpolation", # ["TEX_IMAGE"]
"invert", # ["BUMP"]
"is_active_output", # ["OUTPUT_MATERIAL", "OUTPUT_LAMP"]
"label_size", # ["FRAME"]
"mapping", # ["CURVE_RGB", "CURVE_VEC"]
"max", # ["MAPPING"]
"min", # ["MAPPING"]
"mode", # ["SCRIPT"]
"musgrave_type", # ["TEX_MUSGRAVE"]
"node_tree", # ["GROUP"]
"object", # ["TEX_COORD"]
"offset", # ["TEX_BRICK"]
"offset_frequency", # ["TEX_BRICK"]
"operation", # ["VECT_MATH", "MATH"]
"projection", # ["TEX_IMAGE", "TEX_ENVIRONMENT"]
"projection_blend", # ["TEX_IMAGE"]
"rotation", # ["MAPPING"]
"scale", # ["MAPPING"]
"script", # ["SCRIPT"]
"shrink", # ["FRAME"]
"sky_type", # ["TEX_SKY"]
"space", # ["NORMAL_MAP"]
"squash", # ["TEX_BRICK"]
"squash_frequency", # ["TEX_BRICK"]
"sun_direction", # ["TEX_SKY"]
"text", # ["FRAME"]
"texture_mapping", # ["TEX_IMAGE", "TEX_ENVIRONMENT", "TEX_NOISE", "TEX_GRADIENT", "TEX_MUSGRAVE", "TEX_MAGIC", "TEX_WAVE", "TEX_SKY", "TEX_VORONOI", "TEX_CHECKER", "TEX_BRICK"]
"translation", # ["MAPPING"]
"turbidity", # ["TEX_SKY"]
"turbulence_depth", # ["TEX_MAGIC"]
"use_alpha", # ["MIX_RGB"]
"use_auto_update", # ["SCRIPT"]
"use_clamp", # ["MIX_RGB", "MATH"]
"use_max", # ["MAPPING"]
"use_min", # ["MAPPING"]
"use_pixel_size", # ["WIREFRAME"]
"uv_map", # ["TANGENT", "UVMAP", "NORMAL_MAP"]
"vector_type", # ["MAPPING", "VECT_TRANSFORM"]
"wave_type", # ["TEX_WAVE"]
]
# --------------------------------------------------
# INPUTS / OUTPUTS TYPES
# --------------------------------------------------
sock_vectors = [
"RGBA",
"VECTOR",
]
sock_values = [
"CUSTOM",
"VALUE",
"INT",
"BOOLEAN",
"STRING",
]
|
[
"meta.androcto1@gmail.com"
] |
meta.androcto1@gmail.com
|
9da6da5fba9daedf9b2e92c80aa332916e18eeae
|
659653ebdff3d70e0c04cd0292e489dc537b4112
|
/setup.py
|
709ece69ddc3e6068b1178071932256215b94e81
|
[] |
no_license
|
vangheem/clouddrive
|
48de3dd21f66c4ea207e6cbfefff630825349bb3
|
1b0e74dd4e9cd3dc838a3c13866ccef8a3920b63
|
refs/heads/master
| 2021-01-10T02:11:52.997944
| 2016-09-20T02:30:51
| 2016-09-20T02:30:51
| 48,830,570
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 863
|
py
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from setuptools import find_packages
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
setup(
name='clouddrive',
version='0.1.dev0',
description='',
long_description='',
classifiers=[
"Programming Language :: Python",
],
author='Nathan Van Gheem',
author_email='nathan@vangheem.us',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup']),
install_requires=[
'requests',
'flask',
'ZEO',
'ZODB',
'python-dateutil'
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
run-server = clouddrive:run_server
run-monitor = clouddrive.monitor:run
""",
include_package_data=True,
zip_safe=False,
)
|
[
"vangheem@gmail.com"
] |
vangheem@gmail.com
|
0f907498ef454193d80fa1dbac7f4ef357cb0485
|
f9308d5a8efe2dbb48e9cc87cd06405b60a9dc7b
|
/samples/python/apidocs/ee_featurecollection_aggregate_total_var.py
|
6f681ac3fa4a89f5f1bdf70d9c5cf99b8aef7c56
|
[
"Apache-2.0",
"CC-BY-4.0"
] |
permissive
|
google/earthengine-community
|
4e054b421f66f03507d58668084aee981062fc24
|
ce931040c518860f8788b4888c0acfdebd2952fc
|
refs/heads/master
| 2023-09-01T14:47:54.812703
| 2023-08-31T23:01:00
| 2023-08-31T23:01:39
| 200,732,820
| 428
| 552
|
Apache-2.0
| 2023-09-13T21:46:51
| 2019-08-05T21:42:11
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,021
|
py
|
# Copyright 2023 The Google Earth Engine Community Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START earthengine__apidocs__ee_featurecollection_aggregate_total_var]
# FeatureCollection of power plants in Belgium.
fc = ee.FeatureCollection('WRI/GPPD/power_plants').filter(
'country_lg == "Belgium"')
print('Total variance of power plant capacities (MW):',
fc.aggregate_total_var('capacitymw').getInfo()) # 214307.38335169878
# [END earthengine__apidocs__ee_featurecollection_aggregate_total_var]
|
[
"copybara-worker@google.com"
] |
copybara-worker@google.com
|
d6447c5e8113bc3dfba69e31df59d4e3c714b954
|
5257652fc34ec87fe45d390ba49b15b238860104
|
/single_cell_atacseq_preprocessing/pseudorep_peaks_supported_by_30percent_of_bioreps_in_same_region/get_number_of_samples_that_support_each_regional_peak.py
|
47f15d19a533484f6a223eea9579cbdf122b1557
|
[] |
no_license
|
thekingofall/alzheimers_parkinsons
|
cd247fa2520c989e8dd853ed22b58a9bff564391
|
4ceae6ea3eb4c58919ff41aed8803855bca240c8
|
refs/heads/master
| 2022-11-30T22:36:37.201334
| 2020-08-12T01:23:55
| 2020-08-12T01:23:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,190
|
py
|
#using IDR optimal peaks from the pseudoreplicate set, calculate the number of biological replicates (based on biorep IDR optimal peak sets) that support each peak
import argparse
import pybedtools
import gzip
def parse_args():
parser=argparse.ArgumentParser(description="using IDR optimal peaks from the pseudoreplicate set, calculate the number of biological replicates (based on biorep IDR optimal peak sets) that support each peak")
parser.add_argument("--pseudorep_idr_optimal_peaks",help="file containing full paths to the pseudorep IDR peak sets")
parser.add_argument("--biorep_idr_optimal_peaks",help="file containing full paths to the biorep IDR peak sets")
parser.add_argument("--samples",help="file containing list of samples to annotate")
parser.add_argument("--thresh",default=0.3,type=float,help="percent of bioreps for a given condition/region that must contain a peak for it to be included in the finalized set")
parser.add_argument("--out_suffix",default=".idr.optimal_peaks.support30%.bed.gz",help="file suffix for the sample output peak file prefix")
return parser.parse_args()
def get_sample_to_pseudorep_peak_map(samples,pseudorep_idr_optimal_peaks):
sample_to_pseudorep_peaks=dict()
for pseudorep_peakset in pseudorep_idr_optimal_peaks:
for sample in samples:
if sample in pseudorep_peakset:
sample_to_pseudorep_peaks[sample]=pybedtools.bedtool.BedTool(pseudorep_peakset)
break
return sample_to_pseudorep_peaks
def get_sample_to_biorep_peak_map(samples,biorep_idr_optimal_peaks):
sample_to_biorep_peaks=dict()
for sample in samples:
sample_to_biorep_peaks[sample]=[]
for biorep_peakset in biorep_idr_optimal_peaks:
renamed=biorep_peakset.replace('/','_')
for sample in samples:
if sample in renamed:
sample_to_biorep_peaks[sample].append(pybedtools.bedtool.BedTool(biorep_peakset) )
break
return sample_to_biorep_peaks
def main():
args=parse_args()
pseudorep_idr_optimal_peaks=open(args.pseudorep_idr_optimal_peaks,'r').read().strip().split('\n')
biorep_idr_optimal_peaks=open(args.biorep_idr_optimal_peaks,'r').read().strip().split('\n')
samples=open(args.samples,'r').read().strip().split('\n')
sample_to_pseudorep_peaks=get_sample_to_pseudorep_peak_map(samples,pseudorep_idr_optimal_peaks)
sample_to_biorep_peaks=get_sample_to_biorep_peak_map(samples,biorep_idr_optimal_peaks)
for sample in samples:
print(sample)
pseudorep_peaks=sample_to_pseudorep_peaks[sample]
support_histogram=dict()
for entry in pseudorep_peaks:
support_histogram[tuple(entry[0:3])]=[0,entry]
for biorep_peaks in sample_to_biorep_peaks[sample]:
#intersect them
try:
intersection=pseudorep_peaks.intersect(biorep_peaks,u=True,f=0.4,F=0.4,e=True)
except:
print("could not intersect,skipping")
continue
intersection=list(set([tuple(i[0:3]) for i in intersection]))
print(str(len(intersection))+"/"+str(len(pseudorep_peaks)))
for intersection_entry in intersection:
support_histogram[intersection_entry][0]+=1
outf=gzip.open(sample+args.out_suffix,'wt')
outf_bad=gzip.open(sample+".unsupported"+args.out_suffix,'wt')
min_support_count=args.thresh*len(sample_to_biorep_peaks[sample])
print("min_support_count:"+str(min_support_count))
out_good=[]
out_bad=[]
for entry in support_histogram:
cur_entry_support=support_histogram[entry][0]
if cur_entry_support >= min_support_count:
out_good.append(str(support_histogram[entry][1]).rstrip('\n')+'\t'+str(cur_entry_support))
else:
out_bad.append(str(support_histogram[entry][1]).rstrip('\n')+'\t'+str(cur_entry_support))
outf.write('\n'.join(out_good))
outf_bad.write('\n'.join(out_bad)+'\n')
outf.close()
outf_bad.close()
if __name__=="__main__":
main()
|
[
"annashcherbina@gmail.com"
] |
annashcherbina@gmail.com
|
6793a7e2ed84de8b67e05f62dbff2c37d60be349
|
6c0beed4cd8719bf48982a853183121cea35cadf
|
/thread_syn_scanr_final.py
|
a312b10f43247c3ca188d52d82f490d1d797ed92
|
[] |
no_license
|
vicky288/pythonScripts
|
c90406dd2addc7a72275be0526daae7eba7d8c29
|
2b31c726b5924314b31a37f3a4eb86c132816859
|
refs/heads/master
| 2021-04-30T16:35:30.739238
| 2017-02-09T00:06:16
| 2017-02-09T00:06:16
| 80,077,402
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,356
|
py
|
#!/usr/bin/env python
import threading
import Queue
import time
from scapy.all import *
class WorkerThread(threading.Thread) :
def __init__(self, queue, tid) :
threading.Thread.__init__(self)
self.queue = queue
self.tid = tid
print "Worker %d Reporting for Service Sir!" %self.tid
def run(self) :
total_ports = 0
while True :
port = 0
try :
port = self.queue.get(timeout=1)
except Queue.Empty :
print "Worker %d exiting. Scanned %d ports ..." % (self.tid, total_ports)
return
# port scanning to begin
# we rely on scapy to do this
ip = sys.argv[1]
response = sr1(IP(dst=ip)/TCP(dport=port, flags="S"), verbose=False, timeout=.2)
# only checking for SYN-ACK == flags = 18
# filtererd ports etc. is another story altogether
if response :
if response[TCP].flags == 18 :
print "ThreadId %d: Received port number %d Status: OPEN" %(self.tid, port)
self.queue.task_done()
total_ports += 1
queue = Queue.Queue()
threads = []
for i in range(1, 10) :
print "Creating WorkerThread : %d"%i
worker = WorkerThread(queue, i)
worker.setDaemon(True)
worker.start()
threads.append(worker)
print "WorkerThread %d Created!"%i
for j in range (1,1000) :
queue.put(j)
queue.join()
# wait for all threads to exit
for item in threads :
item.join()
print "Scanning Complete!"
|
[
"root@localhost.localdomain"
] |
root@localhost.localdomain
|
3822dc71dbe9d74b56a67f934b2b21851a2d04bd
|
19da1a56f137a08772c347cf974be54e9c23c053
|
/lib/adafruit_boardtest/boardtest_voltage_monitor.py
|
bcdd23695e093d6d8bed54eddee6c02a1518167c
|
[] |
no_license
|
mk53202/mk53202-timeclock-pyportal
|
d94f45a9d186190a4bc6130077baa6743a816ef3
|
230a858d429f8197c00cab3e67dcfd3b295ffbe0
|
refs/heads/master
| 2021-02-04T05:38:25.533292
| 2020-02-27T22:45:56
| 2020-02-27T22:45:56
| 243,626,362
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,216
|
py
|
# The MIT License (MIT)
#
# Copyright (c) 2018 Shawn Hymel for Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
`adafruit_boardtest.boardtest_voltage_monitor`
====================================================
Prints out the measured voltage on any onboard voltage/battery monitor pins.
Note that some boards have an onboard voltage divider to decrease the voltage
to these pins.
Run this script as its own main.py to individually run the test, or compile
with mpy-cross and call from separate test script.
* Author(s): Shawn Hymel for Adafruit Industries
Implementation Notes
--------------------
**Hardware:**
* `Multimeter <https://www.adafruit.com/product/2034>`_
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the supported boards:
https://github.com/adafruit/circuitpython/releases
"""
import board
import analogio
__version__ = "1.0.1"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_BoardTest.git"
# Constants
VOLTAGE_MONITOR_PIN_NAMES = ['VOLTAGE_MONITOR', 'BATTERY']
ANALOG_REF = 3.3 # Reference analog voltage
ANALOGIN_BITS = 16 # ADC resolution (bits) for CircuitPython
# Test result strings
PASS = "PASS"
FAIL = "FAIL"
NA = "N/A"
def run_test(pins):
"""
Prints out voltage on the battery monitor or voltage monitor pin.
:param list[str] pins: list of pins to run the test on
:return: tuple(str, list[str]): test result followed by list of pins tested
"""
# Look for pins with battery monitoring names
monitor_pins = list(set(pins).intersection(set(VOLTAGE_MONITOR_PIN_NAMES)))
# Print out voltage found on these pins
if monitor_pins:
# Print out the monitor pins found
print("Voltage monitor pins found:", end=' ')
for pin in monitor_pins:
print(pin, end=' ')
print('\n')
# Print out the voltage found on each pin
for pin in monitor_pins:
monitor = analogio.AnalogIn(getattr(board, pin))
voltage = (monitor.value * ANALOG_REF) / (2**ANALOGIN_BITS)
print(pin + ": {:.2f}".format(voltage) + " V")
monitor.deinit()
print()
# Ask the user to check these voltages
print("Use a multimeter to verify these voltages.")
print("Note that some battery monitor pins might have onboard " +
"voltage dividers.")
print("Do the values look reasonable? [y/n]")
if input() == 'y':
return PASS, monitor_pins
return FAIL, monitor_pins
# Else (no pins found)
print("No battery monitor pins found")
return NA, []
def _main():
# List out all the pins available to us
pins = [p for p in dir(board)]
print()
print("All pins found:", end=' ')
# Print pins
for pin in pins:
print(pin, end=' ')
print('\n')
# Run test
result = run_test(pins)
print()
print(result[0])
print("Pins tested: " + str(result[1]))
# Execute only if run as main.py or code.py
if __name__ == "__main__":
_main()
|
[
"mkoster@stack41.com"
] |
mkoster@stack41.com
|
a0322b2f81ed8ef731da2cc2a758f162c0d92b65
|
9b36652dafb58888b7a584806ee69a33fcb609d5
|
/objutils/pickleif.py
|
fd210c74a9e0aa035425e908bbf6ad39a83c3423
|
[] |
no_license
|
pySART/objutils
|
db33e4576cf68111cb4debbafec06a0204844938
|
5ba4631b2245caae80d4dbe0053db0f2706ba53f
|
refs/heads/master
| 2020-06-29T03:35:24.485977
| 2016-11-21T14:21:56
| 2016-11-21T14:21:56
| 74,451,500
| 5
| 2
| null | 2016-11-22T08:36:10
| 2016-11-22T08:36:10
| null |
UTF-8
|
Python
| false
| false
| 1,385
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = "0.1.0"
__copyright__ = """
pyObjUtils - Object file library for Python.
(C) 2010-2013 by Christoph Schueler <github.com/Christoph2,
cpu12.gems@googlemail.com>
All Rights Reserved
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
import abc
DUMMY_PROTOCOL = None
class PickleIF(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def dump(self, obj, file_, protocol = DUMMY_PROTOCOL): pass
@abc.abstractmethod
def dumps(self, obj, protocol = DUMMY_PROTOCOL): pass
@abc.abstractmethod
def load(self, file_): pass
@abc.abstractmethod
def loads(self, string_): pass
|
[
"cpu12.gems@googlemail.com"
] |
cpu12.gems@googlemail.com
|
f9e1d014f00ad100e068a2d024c3d380291478c1
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/HBKAGJZ62JkCTgYX3_21.py
|
715e80114cfa7428c4cd12fde12de966ea9febf6
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 123
|
py
|
def last(a, n):
if n == 0:
return []
elif n > len(a):
return "invalid"
elif n <= len(a):
return a[-n:]
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
34c69a2e6e5163f82c16f2066cc150e2915edd2e
|
13a416a2694d1f6aa1a68cd47610236bf61cafbc
|
/CodePractice/Turtle/circleturtle.py
|
a465b8fe30527faaecef0725052e7c92e49ae8e1
|
[] |
no_license
|
Highjune/Python
|
c637f7d0f9e5d1ac9d6ad87b4e54833b8ff4ae11
|
1be43816d22f5f3b8679cf0cd3939e9d9f54497a
|
refs/heads/master
| 2022-11-24T01:20:54.470172
| 2020-07-27T18:01:47
| 2020-07-27T18:01:47
| 263,271,337
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 170
|
py
|
import turtle as t
t.shape("turtle")
num = int(input('num : '))
go = int((num-1)/2)
for i in range(1,num):
for j in range(1,i):
t.forward(1)
t.right(90)
|
[
"highjune37@gmail.com"
] |
highjune37@gmail.com
|
f546e9e52b380e7d078d7a83b9522f48799bf1fb
|
6e172edee44d5985d19327cf61865d861395d595
|
/2020/11/y2020_d11_p01.py
|
14c61f5a7ba331bb0fdd890b3ea3476e58d1cce5
|
[] |
no_license
|
rHermes/adventofcode
|
bbac479ec1c84c55484effa2cd94889d621b3718
|
4cbe7a952678c5f09438702562b7f6f673a1cf83
|
refs/heads/master
| 2023-01-14T07:05:35.769426
| 2023-01-01T09:53:11
| 2023-01-01T10:13:29
| 225,170,580
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,699
|
py
|
import fileinput
import itertools as it
# Let's build a jump table
def jumptbl(M, ROWS, COLS, x, y):
arounds = []
for dy, dx in [(-1,-1), (-1, 0), (-1, 1), (0,-1), (0, 1), (1,-1), (1,0), (1,1)]:
zx = x + dx
zy = y + dy
idx = zy*COLS + zx
if 0 <= zx < COLS and 0 <= zy < ROWS and M[idx] != None:
arounds.append(idx)
return arounds
# Creates a compressed version of a jump array
def compress(M, ROWS, COLS):
comp = []
# translate from full to sparse
trans = {}
# Build spare index
for y in range(COLS):
for x in range(ROWS):
idx = y*COLS + x
if M[idx] == None:
continue
trans[idx] = len(comp)
comp.append(M[idx])
# Build jump table
jmp = {}
for oidx, nidx in trans.items():
y = oidx // COLS
x = oidx % COLS
# Second pass, now to create jump table
adj = frozenset(trans[k] for k in jumptbl(M, ROWS, COLS, x, y))
if len(adj) < 4:
comp[nidx] = True
else:
jmp[nidx] = adj
return (comp, jmp)
# Step from M to N uing jmp
def step(M, N, jmp):
changed = False
for idx, adj in jmp.items():
t = sum(M[x] for x in adj)
N[idx] = (M[idx] and t < 4) or ((not M[idx]) and t == 0)
changed |= N[idx] != M[idx]
return changed
lines = [line.rstrip() for line in fileinput.input()]
ROWS = len(lines)
COLS = len(lines[0])
# None takes the spot of Empty
M = [{'L': False, '#': True, '.': None}[x] for x in it.chain(*lines)]
comp, jmp = compress(M, ROWS, COLS)
A = comp
B = A.copy()
while step(A, B, jmp):
B, A = A, B
print(sum(A))
|
[
"teodor@spaeren.no"
] |
teodor@spaeren.no
|
8eabc5915442c74698de459405acdb8a6cb90fa6
|
18b3ad3b0e1f7f10969738251e1201d01dfbc6bf
|
/backup_files/practice/rect.py
|
00e007de1004f6dc31ae22f14c65ace2161a43fa
|
[] |
no_license
|
sahthi/backup2
|
11d509b980e731c73733b1399a8143780779e75a
|
16bed38f0867fd7c766c2a008c8d43b0660f0cb0
|
refs/heads/master
| 2020-03-21T12:39:56.890129
| 2018-07-09T08:12:46
| 2018-07-09T08:12:46
| 138,565,151
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 352
|
py
|
#!/usr/bin/python
class rectangle:
def __init__(self,length,breadth):
self.length=length
self.breadth=breadth
def area(self):
return self.breadth*self.length
a=input("enter the length of rectangle:")
b=input("enter the breadth of rectangle:")
obj=rectangle(a,b)
print("area of rectangle:",obj.area())
|
[
"siddamsetty.sahithi@votarytech.com"
] |
siddamsetty.sahithi@votarytech.com
|
cfbd6b9b962886737f7fde0c1dd2f399f97d3ffe
|
d721258b53f0f44b1010cb8e8efac8e2a5c96c26
|
/adventure/models.py
|
babb8aa7915cc58b6585820f8664bb9817390484
|
[
"LicenseRef-scancode-proprietary-license",
"MIT"
] |
permissive
|
kdechant/eamon
|
a6662285f51a6cad5797bb9be92ca709ae36921c
|
080a43aa80c3a1605c402e68616545a8e9c7975c
|
refs/heads/master
| 2023-05-24T08:20:18.551604
| 2022-08-14T10:27:01
| 2023-04-08T07:31:45
| 49,559,304
| 28
| 7
|
MIT
| 2023-03-14T21:09:55
| 2016-01-13T08:07:28
|
TypeScript
|
UTF-8
|
Python
| false
| false
| 22,320
|
py
|
from django.db import models
from taggit.managers import TaggableManager
ARTIFACT_TYPES = (
(0, 'Gold'),
(1, 'Treasure'),
(2, 'Weapon'),
(3, 'Magic Weapon'),
(4, 'Container'),
(5, 'Light Source'),
(6, 'Drinkable'),
(7, 'Readable'),
(8, 'Door/Gate'),
(9, 'Edible'),
(10, 'Bound Monster'),
(11, 'Wearable'), # armor/shield
(12, 'Disguised Monster'),
(13, 'Dead Body'),
(14, 'User 1'),
(15, 'User 2'),
(16, 'User 3'),
)
AXE = 1
BOW = 2
CLUB = 3
SPEAR = 4
SWORD = 5
WEAPON_TYPES = (
(AXE, 'Axe'),
(BOW, 'Bow'),
(CLUB, 'Club'),
(SPEAR, 'Spear'),
(SWORD, 'Sword')
)
CLOTHING_TYPES = (
(0, 'Clothes or Armor/Shield'),
(1, 'Coats, Capes, etc.'),
(2, 'Shoes, boots'),
(3, 'Gloves'),
(4, 'Hats, headwear'),
(5, 'Jewelry'),
(6, 'Undergarments'),
)
ARMOR_TYPES = (
(0, 'Armor'),
(1, 'Shield'),
(2, 'Helmet'),
(3, 'Gloves'),
(4, 'Ring'),
)
MARKDOWN_CHOICES = [(False, "Plain text"), (True, "Markdown")]
class Author(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Adventure(models.Model):
name = models.CharField(max_length=50)
description = models.TextField(default='', blank=True)
full_description = models.TextField(default='', blank=True)
intro_text = models.TextField(
default='', blank=True,
help_text="Text shown to the adventurer when they begin the adventure. Use this to set up the story. Split"
" it into multiple pages by using a line containing three hyphens as a break. Supports Markdown."
)
intro_question = models.TextField(
default='', blank=True,
help_text="If you want to ask the adventurer a question when they start the adventure, put"
" the question text here. The answer will be available in the game object."
)
slug = models.SlugField(null=True)
edx = models.CharField(null=True, max_length=50, blank=True)
edx_version = models.FloatField(default=0, blank=True, null=True)
edx_room_offset = models.IntegerField(default=0, null=True, blank=True)
edx_artifact_offset = models.IntegerField(default=0, null=True, blank=True)
edx_effect_offset = models.IntegerField(default=0, null=True, blank=True)
edx_monster_offset = models.IntegerField(default=0, null=True, blank=True)
edx_program_file = models.CharField(null=True, max_length=50, blank=True)
directions = models.IntegerField(default=6)
dead_body_id = models.IntegerField(
default=0, blank=True, null=True,
help_text="The artifact ID of the first dead body. Leave blank to not use dead body artifacts.")
active = models.BooleanField(default=0)
# the first and last index of hints read from the hints file - used with the import_hints management command
first_hint = models.IntegerField(null=True, blank=True)
last_hint = models.IntegerField(null=True, blank=True)
date_published = models.DateField(null=True, blank=True)
featured_month = models.CharField(null=True, blank=True, max_length=7)
tags = TaggableManager(blank=True)
authors = models.ManyToManyField(Author)
def __str__(self):
return self.name
@property
def times_played(self):
return ActivityLog.objects.filter(type='start adventure', adventure_id=self.id).count()
@property
def avg_ratings(self):
return self.ratings.all().aggregate(models.Avg('overall'), models.Avg('combat'), models.Avg('puzzle'))
@property
def rooms_count(self):
return Room.objects.filter(adventure_id=self.id).count()
@property
def artifacts_count(self):
return Artifact.objects.filter(adventure_id=self.id).count()
@property
def effects_count(self):
return Effect.objects.filter(adventure_id=self.id).count()
@property
def monsters_count(self):
return Monster.objects.filter(adventure_id=self.id).count()
class Meta:
ordering = ['name']
class Room(models.Model):
adventure = models.ForeignKey(Adventure, on_delete=models.CASCADE, related_name='rooms')
room_id = models.IntegerField(default=0) # The in-game room ID.
name = models.CharField(max_length=255)
is_markdown = models.BooleanField(default=False, choices=MARKDOWN_CHOICES, verbose_name="Text format")
description = models.TextField(max_length=1000)
# The ID of an effect to display after the description
effect = models.IntegerField(null=True, blank=True)
# The ID of an effect to display after the description, without a paragraph break.
effect_inline = models.IntegerField(null=True, blank=True)
is_dark = models.BooleanField(default=False)
dark_name = models.CharField(null=True, blank=True, max_length=255,
help_text="The name shown if the room is dark and the player doesn't have a light. "
"Leave blank to use the standard 'in the dark' message.")
dark_description = models.TextField(
null=True, blank=True, max_length=1000,
help_text="The description shown if the room is dark and the player doesn't"
" have a light. Leave blank to use the standard 'it's too dark to see' message.")
data = models.TextField(
max_length=1000, null=True, blank=True,
help_text="Adventure-specific data for this room, e.g., room type or environment "
"(road, cave, snow, etc.). Data can be used in custom code. Enter as a "
"JSON object."
)
def __str__(self):
return self.name
class RoomExit(models.Model):
adventure = models.ForeignKey(Adventure, on_delete=models.CASCADE, related_name='room_exits', null=True)
direction = models.CharField(max_length=2)
room_from = models.ForeignKey(Room, on_delete=models.CASCADE, related_name='exits')
room_to = models.IntegerField(default=0) # Not a real foreign key. Yet.
door_id = models.IntegerField(null=True, blank=True)
effect_id = models.IntegerField(null=True, blank=True,
help_text="The effect will be shown when the player moves in this direction. "
"You can also enter a zero for the connection and an effect ID to set up "
"a custom message on a non-existent exit, e.g., if the player can't go in"
" the ocean without a boat, etc.")
def __str__(self):
return str(self.room_from) + " " + self.direction
def save(self, **kwargs):
if self.room_from and self.adventure_id != self.room_from.adventure_id:
self.adventure_id = self.room_from.adventure_id
super().save(**kwargs)
class Artifact(models.Model):
adventure = models.ForeignKey(Adventure, on_delete=models.CASCADE, related_name='artifacts')
artifact_id = models.IntegerField(default=0) # The in-game artifact ID.
article = models.CharField(max_length=20, null=True, blank=True,
help_text="Optional article or adjective that appears before the name, "
"e.g., 'a', 'the', 'some'.")
name = models.CharField(max_length=255)
synonyms = models.CharField(
null=True, max_length=255, blank=True,
help_text="Other terms for this artifact. E.g., if the artifact name is 'secret door in"
" north wall' you could have a synonym of 'door' to help the player find it.")
is_markdown = models.BooleanField(default=False, choices=MARKDOWN_CHOICES, verbose_name="Text format")
description = models.TextField(max_length=1000)
# The ID of an effect to display after the description
effect = models.IntegerField(null=True, blank=True)
# The ID of an effect to display after the description, without a paragraph break.
effect_inline = models.IntegerField(null=True, blank=True)
room_id = models.IntegerField(
null=True, blank=True,
help_text="If in a room, the room ID"
)
monster_id = models.IntegerField(
null=True, blank=True,
help_text="If carried by a monster, the monster ID"
)
container_id = models.IntegerField(
null=True, blank=True,
help_text="If in a container, the container ID"
)
guard_id = models.IntegerField(
null=True, blank=True,
help_text="If a bound monster, the ID of a monster that prevents the player from freeing it. For other "
"artifact types, the ID of a monster that prevents the player from picking it up."
)
weight = models.IntegerField(
default=0,
help_text="Weight in Gronds. Enter -999 for something that can't be picked up, or 999 to show the message "
"'Don't be absurd' if the player tries to pick it up."
)
value = models.IntegerField(default=0)
type = models.IntegerField(null=True, choices=ARTIFACT_TYPES)
is_worn = models.BooleanField(default=False)
is_open = models.BooleanField(default=False)
key_id = models.IntegerField(
null=True, blank=True,
help_text="If a container, door, or bound monster, the artifact ID of the key that opens it"
)
linked_door_id = models.IntegerField(
null=True, blank=True,
help_text="To make a two-sided door, enter the artifact ID of the other side of the door. "
"They will open and close as a set."
)
hardiness = models.IntegerField(
null=True, blank=True,
help_text="If a door or container that can be smashed open, how much damage does it take to open it?")
weapon_type = models.IntegerField(null=True, blank=True, choices=WEAPON_TYPES)
hands = models.IntegerField(default=1, choices=(
(1, 'One-handed'),
(2, 'Two-handed')
))
weapon_odds = models.IntegerField(null=True, blank=True)
dice = models.IntegerField(null=True, blank=True)
sides = models.IntegerField(null=True, blank=True)
clothing_type = models.IntegerField(null=True, choices=CLOTHING_TYPES, help_text="Reserved for future use.")
armor_class = models.IntegerField(
null=True, default=0,
help_text="(Armor only) How many hits does this armor protect against?"
)
armor_type = models.IntegerField(null=True, blank=True, choices=ARMOR_TYPES)
armor_penalty = models.IntegerField(
default=0, null=True,
help_text="(Armor only) How much does this reduce the player's chance to hit, if they don't have enough "
"armor expertise?"
)
get_all = models.BooleanField(
default=True,
help_text="Will the 'get all' command pick up this item?"
)
embedded = models.BooleanField(
default=False,
help_text="Check this box to make the item not appear in the artifacts list until the player looks at it.")
hidden = models.BooleanField(
default=False,
help_text="(For secret doors only) Check this box for embedded secret doors, so that the player can't "
"pass through them before finding them.")
quantity = models.IntegerField(
null=True, blank=True,
help_text="Drinks or bites, fuel for light source, etc."
)
effect_id = models.IntegerField(
null=True, blank=True,
help_text="First effect ID for Readable artifacts"
)
num_effects = models.IntegerField(
null=True, blank=True,
help_text="Number of effects for Readable artifacts"
)
data = models.TextField(
max_length=1000, null=True, blank=True,
help_text="Adventure-specific data for this artifact, e.g., elemental weapon, etc."
"Enter as a JSON object."
)
def __str__(self):
return self.name
class ArtifactMarking(models.Model):
"""
Markings on a readable artifact
"""
artifact = models.ForeignKey(Artifact, on_delete=models.CASCADE)
marking = models.TextField(max_length=65535)
class Effect(models.Model):
STYLES = (
('', 'Normal'),
('emphasis', 'Bold'),
('success', 'Success (green)'),
('special', 'Special 1 (blue)'),
('special2', 'Special 1 (purple)'),
('warning', 'Warning (orange)'),
('danger', 'Danger (red)'),
)
adventure = models.ForeignKey(Adventure, on_delete=models.CASCADE, related_name='effects')
effect_id = models.IntegerField(default=0) # The in-game effect ID.
is_markdown = models.BooleanField(default=False, choices=MARKDOWN_CHOICES, verbose_name="Text format")
text = models.TextField(max_length=65535)
style = models.CharField(max_length=20, null=True, blank=True, choices=STYLES) # display effect text in color
next = models.IntegerField(null=True, blank=True,
help_text="The next chained effect. Used with EDX conversions.")
next_inline = models.IntegerField(null=True, blank=True,
help_text="The next chained effect, no line break. Used with EDX conversions.")
def __str__(self):
return self.text[0:50]
class Monster(models.Model):
FRIENDLINESS = (
('friend', 'Always Friendly'),
('neutral', 'Always Neutral'),
('hostile', 'Always Hostile'),
('random', 'Random'),
)
COMBAT_CODES = (
(1, "Attacks using generic ATTACK message (e.g., slime, snake, bird)"),
(0, "Uses weapon, or with natural weapons if specified (default)"),
(-1, "Use weapon if it has one, otherwise natural weapons"),
(-2, "Never fights"),
)
adventure = models.ForeignKey(Adventure, on_delete=models.CASCADE, related_name='monsters')
monster_id = models.IntegerField(default=0) # The in-game monster ID.
article = models.CharField(max_length=20, null=True, blank=True,
help_text="Optional article or adjective that appears before the name, "
"e.g., 'a', 'the', 'some'. Does not apply to group monsters.")
name = models.CharField(max_length=255)
name_plural = models.CharField(
max_length=255, null=True, blank=True,
help_text="The plural form of the name. Used only with group monsters.")
synonyms = models.CharField(
null=True, max_length=255, blank=True,
help_text="Other names used for this monster. If the name is 'python' a synonym might be 'snake'")
is_markdown = models.BooleanField(default=False, choices=MARKDOWN_CHOICES, verbose_name="Text format")
description = models.TextField(max_length=1000)
# The ID of an effect to display after the description
effect = models.IntegerField(null=True, help_text="Used only with EDX conversions")
# The ID of an effect to display after the description, without a paragraph break.
effect_inline = models.IntegerField(null=True, help_text="Used only with EDX conversions")
count = models.IntegerField(default=1)
hardiness = models.IntegerField(default=12)
agility = models.IntegerField(default=12)
friendliness = models.CharField(max_length=10, choices=FRIENDLINESS)
friend_odds = models.IntegerField(default=50,
help_text="Used only when 'Friendliness' is 'Random'"
)
combat_code = models.IntegerField(default=0, choices=COMBAT_CODES)
courage = models.IntegerField(default=100)
pursues = models.BooleanField(default=True, help_text="Will the monster pursue a fleeing player?")
room_id = models.IntegerField(null=True, blank=True)
container_id = models.IntegerField(
null=True, blank=True,
help_text="Container artifact ID where this monster starts. The monster will enter the room as soon as the "
"container is opened. e.g., a vampire who awakes when you open his coffin"
)
gender = models.CharField(max_length=6, choices=(
('male', 'Male'),
('female', 'Female'),
('none', 'None'),
), null=True, blank=True)
weapon_id = models.IntegerField(
null=True, blank=True,
help_text="Enter an artifact ID, or zero for natural weapons. Leave blank for no weapon.")
attack_odds = models.IntegerField(
default=50,
help_text="Base attack odds, before agility and armor adjustments. Weapon type does not matter.")
weapon_dice = models.IntegerField(
default=1,
help_text="Applies to natural weapons only. For an artifact weapon, the weapon's dice and sides will be used.")
weapon_sides = models.IntegerField(default=4,
help_text="Applies to natural weapons only.")
defense_bonus = models.IntegerField(
default=0,
help_text="Gives the monster an additional percent bonus to avoid being hit. (Rare)"
)
armor_class = models.IntegerField(default=0)
special = models.CharField(max_length=255, null=True, blank=True)
data = models.TextField(
max_length=1000, null=True, blank=True,
help_text="Adventure-specific data for this monster, e.g., type of monster like "
"vampire, undead, soldier, frost, etc. Data can be used in custom code. "
"Enter as a JSON object."
)
combat_verbs = models.CharField(
max_length=255, null=True, blank=True,
help_text="Custom combat verbs for this monster, e.g., 'stings' or 'breathes fire at'. "
"Leave blank to use the standard verbs.")
def __str__(self):
return self.name
class Hint(models.Model):
"""
Represents a hint for the adventure hints system
"""
adventure = models.ForeignKey(Adventure, on_delete=models.CASCADE, related_name='hints', null=True)
index = models.IntegerField(null=True)
edx = models.CharField(max_length=50, null=True, blank=True)
question = models.CharField(max_length=255)
def __str__(self):
return self.question
class HintAnswer(models.Model):
"""
Represents an answer to a hint. Each hint may have more than one answer.
"""
adventure = models.ForeignKey(Adventure, on_delete=models.CASCADE, related_name='hint_answers', null=True)
hint = models.ForeignKey(Hint, on_delete=models.CASCADE, related_name='answers')
index = models.IntegerField(null=True)
answer = models.TextField(max_length=1000, help_text="Supports Markdown.")
spoiler = models.BooleanField(default=False,
help_text="Obscure the answer until the user shows it.")
def save(self, **kwargs):
if self.hint and self.adventure_id != self.hint.adventure_id:
self.adventure_id = self.hint.adventure_id
super().save(**kwargs)
class PlayerProfile(models.Model):
social_id = models.CharField(max_length=100, null=True)
uuid = models.CharField(max_length=255, null=True)
class Player(models.Model):
"""
Represents the player saved in the main hall.
"""
name = models.CharField(max_length=255)
gender = models.CharField(max_length=6, choices=(
('m', 'Male'),
('f', 'Female')
))
hardiness = models.IntegerField(default=12)
agility = models.IntegerField(default=12)
charisma = models.IntegerField(default=12)
gold = models.IntegerField(default=200)
gold_in_bank = models.IntegerField(default=0)
wpn_axe = models.IntegerField("Axe ability", default=5)
wpn_bow = models.IntegerField("Bow/missile ability", default=-10)
wpn_club = models.IntegerField("Club ability", default=20)
wpn_spear = models.IntegerField("Spear/Polearm ability", default=10)
wpn_sword = models.IntegerField("Sword ability", default=0)
armor_expertise = models.IntegerField(default=0)
spl_blast = models.IntegerField("Blast ability", default=0)
spl_heal = models.IntegerField("Heal ability", default=0)
spl_power = models.IntegerField("Power ability", default=0)
spl_speed = models.IntegerField("Speed ability", default=0)
uuid = models.CharField(max_length=255, null=True)
def __str__(self):
return self.name
def log(self, type, adventure_id=None):
l = ActivityLog(player=self, type=type, adventure_id=adventure_id)
l.save()
class PlayerArtifact(models.Model):
"""
The items (weapons, armor, shield) in the player's inventory in the main hall
"""
TYPES = (
(2, 'Weapon'),
(3, 'Magic Weapon'),
(11, 'Wearable'), # armor/shield
)
ARMOR_TYPES = (
(0, 'Armor'),
(1, 'Shield'), # different in EDX - see manual
(2, 'Helmet'),
(3, 'Gloves'),
(4, 'Ring'),
)
HANDS = (
(1, 'One-handed'),
(2, 'Two-handed')
)
player = models.ForeignKey(Player, on_delete=models.CASCADE, related_name='inventory')
name = models.CharField(max_length=255)
description = models.TextField(max_length=1000)
type = models.IntegerField(choices=TYPES)
weight = models.IntegerField(default=0)
value = models.IntegerField(default=0)
weapon_type = models.IntegerField(default=0, choices=WEAPON_TYPES, null=True)
hands = models.IntegerField(choices=HANDS, default=1)
weapon_odds = models.IntegerField(default=0, null=True)
dice = models.IntegerField(default=1, null=True)
sides = models.IntegerField(default=1, null=True)
armor_type = models.IntegerField(default=0, choices=ARMOR_TYPES, null=True)
armor_class = models.IntegerField(default=0, null=True)
armor_penalty = models.IntegerField(default=0, null=True)
def __str__(self):
return "{} {}".format(self.player, self.name)
class ActivityLog(models.Model):
"""
Used to track player activity (going on adventures, etc.)
"""
player = models.ForeignKey(Player, null=True, blank=True, on_delete=models.CASCADE, related_name='activity_log')
type = models.CharField(max_length=255)
value = models.IntegerField(null=True, blank=True)
adventure = models.ForeignKey(Adventure, on_delete=models.CASCADE, related_name='activity_log', null=True)
created = models.DateTimeField(auto_now_add=True, null=True)
|
[
"keith.dechant@gmail.com"
] |
keith.dechant@gmail.com
|
70252ccd8d751ddb991b9baf48cccda96d0787ae
|
00758be070825c33d9178c8a50d1a59ee2c3c790
|
/ppci/format/pefile/pefile.py
|
9828d6c93963c5f9a97d85c328a03344460d59d7
|
[
"BSD-2-Clause"
] |
permissive
|
jsdelivrbot/ppci-mirror
|
d2a87f21a735a9495ad1130959b599ab317a62f6
|
67195d628275e2332ceaf44c9e13fc58d0877157
|
refs/heads/master
| 2020-04-10T06:23:38.964744
| 2018-12-07T17:05:05
| 2018-12-07T17:05:05
| 160,853,011
| 0
| 0
|
BSD-2-Clause
| 2018-12-07T17:07:00
| 2018-12-07T17:07:00
| null |
UTF-8
|
Python
| false
| false
| 312
|
py
|
from .headers import DosHeader, CoffHeader, PeOptionalHeader64
from .headers import ImageSectionHeader, PeHeader, DataDirectoryHeader
from .headers import ImportDirectoryTable
class PeFile:
""" Pe (exe) file """
def __init__(self):
self.pe_header = PeHeader()
class ExeFile(PeFile):
pass
|
[
"windel@windel.nl"
] |
windel@windel.nl
|
79dc973cef96d3e5eef6f7cd3552f8a91bf78cb4
|
b71f656374293c5f1238fcb449aa4dde78632861
|
/eudplib/utils/blockstru.py
|
c3e9a5159a3ca9e5b4b729aab409a5ff123314d1
|
[
"MIT"
] |
permissive
|
tobeinged/eudplib
|
ce1cdc15f7ec6af857b4b64b5c826b3dd95d3e48
|
066c0faa200dc19e70cdb6979daf8f008b8ae957
|
refs/heads/master
| 2023-05-04T08:49:01.180147
| 2019-03-18T14:30:29
| 2019-03-18T14:30:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,549
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Copyright (c) 2014 trgk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
from .eperror import ep_assert
class BlockStruManager:
def __init__(self):
self._blockstru = []
self._lastblockdict = {}
def empty(self):
return not self._blockstru
_current_bsm = BlockStruManager() # Default one
def SetCurrentBlockStruManager(bsm):
global _current_bsm
old_bsm = _current_bsm
_current_bsm = bsm
return old_bsm
def EUDCreateBlock(name, userdata):
_blockstru = _current_bsm._blockstru
_lastblockdict = _current_bsm._lastblockdict
block = (name, userdata)
_blockstru.append(block)
if name not in _lastblockdict:
_lastblockdict[name] = []
_lastblockdict[name].append(block)
def EUDGetLastBlock():
_blockstru = _current_bsm._blockstru
return _blockstru[-1]
def EUDGetLastBlockOfName(name):
_lastblockdict = _current_bsm._lastblockdict
return _lastblockdict[name][-1]
def EUDPeekBlock(name):
lastblock = EUDGetLastBlock()
ep_assert(lastblock[0] == name, 'Block starting/ending mismatch')
return lastblock
def EUDPopBlock(name):
_blockstru = _current_bsm._blockstru
_lastblockdict = _current_bsm._lastblockdict
lastblock = _blockstru.pop()
ep_assert(lastblock[0] == name, """\
Block starting/ending mismatch:
- Started with %s
- Ended with %s\
""" % (lastblock[0], name))
_lastblockdict[name].pop()
return lastblock
def EUDGetBlockList():
return _current_bsm._blockstru
|
[
"phu54321@naver.com"
] |
phu54321@naver.com
|
a4056e610f35a5a1bfbe93990398a2a61a725fde
|
b7620d0f1a90390224c8ab71774b9c906ab3e8e9
|
/aliyun-python-sdk-imm/aliyunsdkimm/request/v20200930/CreateFigureClusteringTaskRequest.py
|
cfa9780027b8c39cc94abaae16dbba18b98bda90
|
[
"Apache-2.0"
] |
permissive
|
YaoYinYing/aliyun-openapi-python-sdk
|
e9c62940baee1a35b9ec4a9fbd1e4eb0aaf93b2f
|
e9a93cc94bd8290d1b1a391a9cb0fad2e6c64627
|
refs/heads/master
| 2022-10-17T16:39:04.515562
| 2022-10-10T15:18:34
| 2022-10-10T15:18:34
| 117,057,304
| 0
| 0
| null | 2018-01-11T06:03:02
| 2018-01-11T06:03:01
| null |
UTF-8
|
Python
| false
| false
| 2,476
|
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkimm.endpoint import endpoint_data
import json
class CreateFigureClusteringTaskRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'imm', '2020-09-30', 'CreateFigureClusteringTask','imm')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_UserData(self): # String
return self.get_query_params().get('UserData')
def set_UserData(self, UserData): # String
self.add_query_param('UserData', UserData)
def get_ProjectName(self): # String
return self.get_query_params().get('ProjectName')
def set_ProjectName(self, ProjectName): # String
self.add_query_param('ProjectName', ProjectName)
def get_NotifyTopicName(self): # String
return self.get_query_params().get('NotifyTopicName')
def set_NotifyTopicName(self, NotifyTopicName): # String
self.add_query_param('NotifyTopicName', NotifyTopicName)
def get_NotifyEndpoint(self): # String
return self.get_query_params().get('NotifyEndpoint')
def set_NotifyEndpoint(self, NotifyEndpoint): # String
self.add_query_param('NotifyEndpoint', NotifyEndpoint)
def get_DatasetName(self): # String
return self.get_query_params().get('DatasetName')
def set_DatasetName(self, DatasetName): # String
self.add_query_param('DatasetName', DatasetName)
def get_Tags(self): # Map
return self.get_query_params().get('Tags')
def set_Tags(self, Tags): # Map
self.add_query_param("Tags", json.dumps(Tags))
|
[
"sdk-team@alibabacloud.com"
] |
sdk-team@alibabacloud.com
|
7d99e26a6d7d4b0a7f916ad07f46105c644061c7
|
ac2f43c8e0d9649a7f063c59b3dffdfed9fd7ed7
|
/tests2/common/base_slaac_test.py
|
459b17fe8aebffa9efbf641b36e553aada1068c0
|
[] |
no_license
|
facebook/openbmc
|
bef10604ced226288600f55248b7f1be9945aea4
|
32777c66a8410d767eae15baabf71c61a0bef13c
|
refs/heads/helium
| 2023-08-17T03:13:54.729494
| 2023-08-16T23:24:18
| 2023-08-16T23:24:18
| 31,917,712
| 684
| 331
| null | 2023-07-25T21:19:08
| 2015-03-09T19:18:35
|
C
|
UTF-8
|
Python
| false
| false
| 2,578
|
py
|
#!/usr/bin/env python3
#
# Copyright 2018-present Facebook. All Rights Reserved.
#
# This program file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program in a file named COPYING; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301 USA
#
import subprocess
from common.base_interface_test import BaseInterfaceTest
from utils.cit_logger import Logger
class BaseSlaacTest(BaseInterfaceTest):
def get_ipv6_address(self):
"""
Get inet6 address with highest length of a given interface
overriding this method of BaseInterfaceTest class because we want
to have inet6 address with highest length
"""
out = self.get_ip_addr_output_inet6()
# trying to find inet6 address with highest length
ipv6 = ""
for value in out[1:]:
if len(value.split("/")[0]) > len(ipv6):
ipv6 = value.split("/")[0]
Logger.debug("Got ip address for " + str(self.ifname))
return ipv6.lower()
def get_mac_address(self):
"""
Get Ethernet MAC address
"""
f = subprocess.Popen(
["fw_printenv", "-n", "ethaddr"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
out, err = f.communicate()
self.assertEqual(
f.returncode,
0,
"fw_printenv -n ethaddr "
+ "exited with returncode: "
+ str(f.returncode)
+ ", err: "
+ str(err),
)
if out:
out = out.decode("utf-8").rstrip()
return out.lower()
else:
raise Exception("Couldn't find MAC address [FAILED]")
def generate_modified_eui_64_mac_address(self):
"""
Get Modified EUI-64 Mac Address
"""
mac_address = self.get_mac_address().split(":")
# reversing the 7th bit of the mac address
mac_address[0] = hex(int(mac_address[0], 16) ^ 2)[2:]
mac_address[2] = mac_address[2] + "fffe"
return "".join(mac_address)
|
[
"facebook-github-bot@users.noreply.github.com"
] |
facebook-github-bot@users.noreply.github.com
|
9d5eed9ac51e2c80d2bf186b88f4046d7c5f5a3a
|
080c13cd91a073457bd9eddc2a3d13fc2e0e56ae
|
/MY_REPOS/awesome-4-new-developers/tensorflow-master/tensorflow/python/lib/io/file_io_test.py
|
e4767806896723cb6003db317ab58e1c9a241678
|
[
"Apache-2.0"
] |
permissive
|
Portfolio-Projects42/UsefulResourceRepo2.0
|
1dccc8961a09347f124d3ed7c27c6d73b9806189
|
75b1e23c757845b5f1894ebe53551a1cf759c6a3
|
refs/heads/master
| 2023-08-04T12:23:48.862451
| 2021-09-15T12:51:35
| 2021-09-15T12:51:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 28,674
|
py
|
# This Python file uses the following encoding: utf-8
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Testing File IO operations in file_io.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
from absl.testing import parameterized
import numpy as np
from tensorflow.python.framework import errors
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
class PathLike(object):
"""Backport of pathlib.Path for Python < 3.6"""
def __init__(self, name):
self.name = name
def __fspath__(self):
return self.name
def __str__(self):
return self.name
run_all_path_types = parameterized.named_parameters(
("str", os.path.join), ("pathlike", lambda *paths: PathLike(os.path.join(*paths)))
)
class FileIoTest(test.TestCase, parameterized.TestCase):
def setUp(self):
self._base_dir = os.path.join(self.get_temp_dir(), "base_dir")
file_io.create_dir(self._base_dir)
def tearDown(self):
file_io.delete_recursively(self._base_dir)
def testEmptyFilename(self):
f = file_io.FileIO("", mode="r")
with self.assertRaises(errors.NotFoundError):
_ = f.read()
@run_all_path_types
def testFileDoesntExist(self, join):
file_path = join(self._base_dir, "temp_file")
self.assertFalse(file_io.file_exists(file_path))
with self.assertRaises(errors.NotFoundError):
_ = file_io.read_file_to_string(file_path)
@run_all_path_types
def testWriteToString(self, join):
file_path = join(self._base_dir, "temp_file")
file_io.write_string_to_file(file_path, "testing")
self.assertTrue(file_io.file_exists(file_path))
file_contents = file_io.read_file_to_string(file_path)
self.assertEqual("testing", file_contents)
def testAtomicWriteStringToFile(self):
file_path = os.path.join(self._base_dir, "temp_file")
file_io.atomic_write_string_to_file(file_path, "testing")
self.assertTrue(file_io.file_exists(file_path))
file_contents = file_io.read_file_to_string(file_path)
self.assertEqual("testing", file_contents)
def testAtomicWriteStringToFileOverwriteFalse(self):
file_path = os.path.join(self._base_dir, "temp_file")
file_io.atomic_write_string_to_file(file_path, "old", overwrite=False)
with self.assertRaises(errors.AlreadyExistsError):
file_io.atomic_write_string_to_file(file_path, "new", overwrite=False)
file_contents = file_io.read_file_to_string(file_path)
self.assertEqual("old", file_contents)
file_io.delete_file(file_path)
file_io.atomic_write_string_to_file(file_path, "new", overwrite=False)
file_contents = file_io.read_file_to_string(file_path)
self.assertEqual("new", file_contents)
@run_all_path_types
def testReadBinaryMode(self, join):
file_path = join(self._base_dir, "temp_file")
file_io.write_string_to_file(file_path, "testing")
with file_io.FileIO(file_path, mode="rb") as f:
self.assertEqual(b"testing", f.read())
@run_all_path_types
def testWriteBinaryMode(self, join):
file_path = join(self._base_dir, "temp_file")
file_io.FileIO(file_path, "wb").write("testing")
with file_io.FileIO(file_path, mode="r") as f:
self.assertEqual("testing", f.read())
def testAppend(self):
file_path = os.path.join(self._base_dir, "temp_file")
with file_io.FileIO(file_path, mode="w") as f:
f.write("begin\n")
with file_io.FileIO(file_path, mode="a") as f:
f.write("a1\n")
with file_io.FileIO(file_path, mode="a") as f:
f.write("a2\n")
with file_io.FileIO(file_path, mode="r") as f:
file_contents = f.read()
self.assertEqual("begin\na1\na2\n", file_contents)
def testMultipleFiles(self):
file_prefix = os.path.join(self._base_dir, "temp_file")
for i in range(5000):
f = file_io.FileIO(file_prefix + str(i), mode="w+")
f.write("testing")
f.flush()
self.assertEqual("testing", f.read())
f.close()
def testMultipleWrites(self):
file_path = os.path.join(self._base_dir, "temp_file")
with file_io.FileIO(file_path, mode="w") as f:
f.write("line1\n")
f.write("line2")
file_contents = file_io.read_file_to_string(file_path)
self.assertEqual("line1\nline2", file_contents)
def testFileWriteBadMode(self):
file_path = os.path.join(self._base_dir, "temp_file")
with self.assertRaises(errors.PermissionDeniedError):
file_io.FileIO(file_path, mode="r").write("testing")
def testFileReadBadMode(self):
file_path = os.path.join(self._base_dir, "temp_file")
file_io.FileIO(file_path, mode="w").write("testing")
self.assertTrue(file_io.file_exists(file_path))
with self.assertRaises(errors.PermissionDeniedError):
file_io.FileIO(file_path, mode="w").read()
@run_all_path_types
def testFileDelete(self, join):
file_path = join(self._base_dir, "temp_file")
file_io.FileIO(file_path, mode="w").write("testing")
file_io.delete_file(file_path)
self.assertFalse(file_io.file_exists(file_path))
def testFileDeleteFail(self):
file_path = os.path.join(self._base_dir, "temp_file")
with self.assertRaises(errors.NotFoundError):
file_io.delete_file(file_path)
def testGetMatchingFiles(self):
dir_path = os.path.join(self._base_dir, "temp_dir")
file_io.create_dir(dir_path)
files = ["file1.txt", "file2.txt", "file3.txt", "file*.txt"]
for name in files:
file_path = os.path.join(dir_path, name)
file_io.FileIO(file_path, mode="w").write("testing")
expected_match = [os.path.join(dir_path, name) for name in files]
self.assertItemsEqual(
file_io.get_matching_files(os.path.join(dir_path, "file*.txt")),
expected_match,
)
self.assertItemsEqual(file_io.get_matching_files(tuple()), [])
files_subset = [
os.path.join(dir_path, files[0]),
os.path.join(dir_path, files[2]),
]
self.assertItemsEqual(file_io.get_matching_files(files_subset), files_subset)
file_io.delete_recursively(dir_path)
self.assertFalse(file_io.file_exists(os.path.join(dir_path, "file3.txt")))
def testGetMatchingFilesWhenParentDirContainsParantheses(self):
dir_path = os.path.join(self._base_dir, "dir_(special)")
file_io.create_dir(dir_path)
files = ["file1.txt", "file(2).txt"]
for name in files:
file_path = os.path.join(dir_path, name)
file_io.FileIO(file_path, mode="w").write("testing")
expected_match = [os.path.join(dir_path, name) for name in files]
glob_pattern = os.path.join(dir_path, "*")
self.assertItemsEqual(file_io.get_matching_files(glob_pattern), expected_match)
@run_all_path_types
def testCreateRecursiveDir(self, join):
dir_path = join(self._base_dir, "temp_dir/temp_dir1/temp_dir2")
file_io.recursive_create_dir(dir_path)
file_io.recursive_create_dir(dir_path) # repeat creation
file_path = os.path.join(str(dir_path), "temp_file")
file_io.FileIO(file_path, mode="w").write("testing")
self.assertTrue(file_io.file_exists(file_path))
file_io.delete_recursively(os.path.join(self._base_dir, "temp_dir"))
self.assertFalse(file_io.file_exists(file_path))
@run_all_path_types
def testCopy(self, join):
file_path = join(self._base_dir, "temp_file")
file_io.FileIO(file_path, mode="w").write("testing")
copy_path = join(self._base_dir, "copy_file")
file_io.copy(file_path, copy_path)
self.assertTrue(file_io.file_exists(copy_path))
f = file_io.FileIO(file_path, mode="r")
self.assertEqual("testing", f.read())
self.assertEqual(7, f.tell())
def testCopyOverwrite(self):
file_path = os.path.join(self._base_dir, "temp_file")
file_io.FileIO(file_path, mode="w").write("testing")
copy_path = os.path.join(self._base_dir, "copy_file")
file_io.FileIO(copy_path, mode="w").write("copy")
file_io.copy(file_path, copy_path, overwrite=True)
self.assertTrue(file_io.file_exists(copy_path))
self.assertEqual("testing", file_io.FileIO(file_path, mode="r").read())
def testCopyOverwriteFalse(self):
file_path = os.path.join(self._base_dir, "temp_file")
file_io.FileIO(file_path, mode="w").write("testing")
copy_path = os.path.join(self._base_dir, "copy_file")
file_io.FileIO(copy_path, mode="w").write("copy")
with self.assertRaises(errors.AlreadyExistsError):
file_io.copy(file_path, copy_path, overwrite=False)
@run_all_path_types
def testRename(self, join):
file_path = join(self._base_dir, "temp_file")
file_io.FileIO(file_path, mode="w").write("testing")
rename_path = join(self._base_dir, "rename_file")
file_io.rename(file_path, rename_path)
self.assertTrue(file_io.file_exists(rename_path))
self.assertFalse(file_io.file_exists(file_path))
def testRenameOverwrite(self):
file_path = os.path.join(self._base_dir, "temp_file")
file_io.FileIO(file_path, mode="w").write("testing")
rename_path = os.path.join(self._base_dir, "rename_file")
file_io.FileIO(rename_path, mode="w").write("rename")
file_io.rename(file_path, rename_path, overwrite=True)
self.assertTrue(file_io.file_exists(rename_path))
self.assertFalse(file_io.file_exists(file_path))
def testRenameOverwriteFalse(self):
file_path = os.path.join(self._base_dir, "temp_file")
file_io.FileIO(file_path, mode="w").write("testing")
rename_path = os.path.join(self._base_dir, "rename_file")
file_io.FileIO(rename_path, mode="w").write("rename")
with self.assertRaises(errors.AlreadyExistsError):
file_io.rename(file_path, rename_path, overwrite=False)
self.assertTrue(file_io.file_exists(rename_path))
self.assertTrue(file_io.file_exists(file_path))
def testDeleteRecursivelyFail(self):
fake_dir_path = os.path.join(self._base_dir, "temp_dir")
with self.assertRaises(errors.NotFoundError):
file_io.delete_recursively(fake_dir_path)
@run_all_path_types
def testIsDirectory(self, join):
dir_path = join(self._base_dir, "test_dir")
# Failure for a non-existing dir.
self.assertFalse(file_io.is_directory(dir_path))
file_io.create_dir(dir_path)
self.assertTrue(file_io.is_directory(dir_path))
file_path = join(str(dir_path), "test_file")
file_io.FileIO(file_path, mode="w").write("test")
# False for a file.
self.assertFalse(file_io.is_directory(file_path))
# Test that the value returned from `stat()` has `is_directory` set.
file_statistics = file_io.stat(dir_path)
self.assertTrue(file_statistics.is_directory)
@run_all_path_types
def testListDirectory(self, join):
dir_path = join(self._base_dir, "test_dir")
file_io.create_dir(dir_path)
files = ["file1.txt", "file2.txt", "file3.txt"]
for name in files:
file_path = join(str(dir_path), name)
file_io.FileIO(file_path, mode="w").write("testing")
subdir_path = join(str(dir_path), "sub_dir")
file_io.create_dir(subdir_path)
subdir_file_path = join(str(subdir_path), "file4.txt")
file_io.FileIO(subdir_file_path, mode="w").write("testing")
dir_list = file_io.list_directory(dir_path)
self.assertItemsEqual(files + ["sub_dir"], dir_list)
def testListDirectoryFailure(self):
dir_path = os.path.join(self._base_dir, "test_dir")
with self.assertRaises(errors.NotFoundError):
file_io.list_directory(dir_path)
def _setupWalkDirectories(self, dir_path):
# Creating a file structure as follows
# test_dir -> file: file1.txt; dirs: subdir1_1, subdir1_2, subdir1_3
# subdir1_1 -> file: file3.txt
# subdir1_2 -> dir: subdir2
file_io.create_dir(dir_path)
file_io.FileIO(os.path.join(dir_path, "file1.txt"), mode="w").write("testing")
sub_dirs1 = ["subdir1_1", "subdir1_2", "subdir1_3"]
for name in sub_dirs1:
file_io.create_dir(os.path.join(dir_path, name))
file_io.FileIO(os.path.join(dir_path, "subdir1_1/file2.txt"), mode="w").write(
"testing"
)
file_io.create_dir(os.path.join(dir_path, "subdir1_2/subdir2"))
@run_all_path_types
def testWalkInOrder(self, join):
dir_path_str = os.path.join(self._base_dir, "test_dir")
dir_path = join(self._base_dir, "test_dir")
self._setupWalkDirectories(dir_path_str)
# Now test the walk (in_order = True)
all_dirs = []
all_subdirs = []
all_files = []
for (w_dir, w_subdirs, w_files) in file_io.walk(dir_path, in_order=True):
all_dirs.append(w_dir)
all_subdirs.append(w_subdirs)
all_files.append(w_files)
self.assertItemsEqual(
all_dirs,
[dir_path_str]
+ [
os.path.join(dir_path_str, item)
for item in ["subdir1_1", "subdir1_2", "subdir1_2/subdir2", "subdir1_3"]
],
)
self.assertEqual(dir_path_str, all_dirs[0])
self.assertLess(
all_dirs.index(os.path.join(dir_path_str, "subdir1_2")),
all_dirs.index(os.path.join(dir_path_str, "subdir1_2/subdir2")),
)
self.assertItemsEqual(all_subdirs[1:5], [[], ["subdir2"], [], []])
self.assertItemsEqual(all_subdirs[0], ["subdir1_1", "subdir1_2", "subdir1_3"])
self.assertItemsEqual(all_files, [["file1.txt"], ["file2.txt"], [], [], []])
self.assertLess(all_files.index(["file1.txt"]), all_files.index(["file2.txt"]))
def testWalkPostOrder(self):
dir_path = os.path.join(self._base_dir, "test_dir")
self._setupWalkDirectories(dir_path)
# Now test the walk (in_order = False)
all_dirs = []
all_subdirs = []
all_files = []
for (w_dir, w_subdirs, w_files) in file_io.walk(dir_path, in_order=False):
all_dirs.append(w_dir)
all_subdirs.append(w_subdirs)
all_files.append(w_files)
self.assertItemsEqual(
all_dirs,
[
os.path.join(dir_path, item)
for item in ["subdir1_1", "subdir1_2/subdir2", "subdir1_2", "subdir1_3"]
]
+ [dir_path],
)
self.assertEqual(dir_path, all_dirs[4])
self.assertLess(
all_dirs.index(os.path.join(dir_path, "subdir1_2/subdir2")),
all_dirs.index(os.path.join(dir_path, "subdir1_2")),
)
self.assertItemsEqual(all_subdirs[0:4], [[], [], ["subdir2"], []])
self.assertItemsEqual(all_subdirs[4], ["subdir1_1", "subdir1_2", "subdir1_3"])
self.assertItemsEqual(all_files, [["file2.txt"], [], [], [], ["file1.txt"]])
self.assertLess(all_files.index(["file2.txt"]), all_files.index(["file1.txt"]))
def testWalkFailure(self):
dir_path = os.path.join(self._base_dir, "test_dir")
# Try walking a directory that wasn't created.
all_dirs = []
all_subdirs = []
all_files = []
for (w_dir, w_subdirs, w_files) in file_io.walk(dir_path, in_order=False):
all_dirs.append(w_dir)
all_subdirs.append(w_subdirs)
all_files.append(w_files)
self.assertItemsEqual(all_dirs, [])
self.assertItemsEqual(all_subdirs, [])
self.assertItemsEqual(all_files, [])
@run_all_path_types
def testStat(self, join):
file_path = join(self._base_dir, "temp_file")
file_io.FileIO(file_path, mode="w").write("testing")
file_statistics = file_io.stat(file_path)
os_statistics = os.stat(str(file_path))
self.assertEqual(7, file_statistics.length)
self.assertEqual(
int(os_statistics.st_mtime), int(file_statistics.mtime_nsec / 1e9)
)
self.assertFalse(file_statistics.is_directory)
def testReadLine(self):
file_path = os.path.join(self._base_dir, "temp_file")
with file_io.FileIO(file_path, mode="r+") as f:
f.write("testing1\ntesting2\ntesting3\n\ntesting5")
self.assertEqual(36, f.size())
self.assertEqual("testing1\n", f.readline())
self.assertEqual("testing2\n", f.readline())
self.assertEqual("testing3\n", f.readline())
self.assertEqual("\n", f.readline())
self.assertEqual("testing5", f.readline())
self.assertEqual("", f.readline())
def testRead(self):
file_path = os.path.join(self._base_dir, "temp_file")
with file_io.FileIO(file_path, mode="r+") as f:
f.write("testing1\ntesting2\ntesting3\n\ntesting5")
self.assertEqual(36, f.size())
self.assertEqual("testing1\n", f.read(9))
self.assertEqual("testing2\n", f.read(9))
self.assertEqual("t", f.read(1))
self.assertEqual("esting3\n\ntesting5", f.read())
def testReadErrorReacquiresGil(self):
file_path = os.path.join(self._base_dir, "temp_file")
with file_io.FileIO(file_path, mode="r+") as f:
f.write("testing1\ntesting2\ntesting3\n\ntesting5")
with self.assertRaises(errors.InvalidArgumentError):
# At present, this is sufficient to convince ourselves that the change
# fixes the problem. That is, this test will seg fault without the change,
# and pass with it. Unfortunately, this is brittle, as it relies on the
# Python layer to pass the argument along to the wrapped C++ without
# checking the argument itself.
f.read(-2)
def testTell(self):
file_path = os.path.join(self._base_dir, "temp_file")
with file_io.FileIO(file_path, mode="r+") as f:
f.write("testing1\ntesting2\ntesting3\n\ntesting5")
self.assertEqual(0, f.tell())
self.assertEqual("testing1\n", f.readline())
self.assertEqual(9, f.tell())
self.assertEqual("testing2\n", f.readline())
self.assertEqual(18, f.tell())
self.assertEqual("testing3\n", f.readline())
self.assertEqual(27, f.tell())
self.assertEqual("\n", f.readline())
self.assertEqual(28, f.tell())
self.assertEqual("testing5", f.readline())
self.assertEqual(36, f.tell())
self.assertEqual("", f.readline())
self.assertEqual(36, f.tell())
def testSeek(self):
file_path = os.path.join(self._base_dir, "temp_file")
with file_io.FileIO(file_path, mode="r+") as f:
f.write("testing1\ntesting2\ntesting3\n\ntesting5")
self.assertEqual("testing1\n", f.readline())
self.assertEqual(9, f.tell())
# Seek to 18
f.seek(18)
self.assertEqual(18, f.tell())
self.assertEqual("testing3\n", f.readline())
# Seek back to 9
f.seek(9)
self.assertEqual(9, f.tell())
self.assertEqual("testing2\n", f.readline())
f.seek(0)
self.assertEqual(0, f.tell())
self.assertEqual("testing1\n", f.readline())
with self.assertRaises(errors.InvalidArgumentError):
f.seek(-1)
with self.assertRaises(TypeError):
f.seek()
# TODO(jhseu): Delete after position deprecation.
with self.assertRaises(TypeError):
f.seek(offset=0, position=0)
f.seek(position=9)
self.assertEqual(9, f.tell())
self.assertEqual("testing2\n", f.readline())
def testSeekFromWhat(self):
file_path = os.path.join(self._base_dir, "temp_file")
with file_io.FileIO(file_path, mode="r+") as f:
f.write("testing1\ntesting2\ntesting3\n\ntesting5")
self.assertEqual("testing1\n", f.readline())
self.assertEqual(9, f.tell())
# Seek to 18
f.seek(9, 1)
self.assertEqual(18, f.tell())
self.assertEqual("testing3\n", f.readline())
# Seek back to 9
f.seek(9, 0)
self.assertEqual(9, f.tell())
self.assertEqual("testing2\n", f.readline())
f.seek(-f.size(), 2)
self.assertEqual(0, f.tell())
self.assertEqual("testing1\n", f.readline())
with self.assertRaises(errors.InvalidArgumentError):
f.seek(0, 3)
def testReadingIterator(self):
file_path = os.path.join(self._base_dir, "temp_file")
data = ["testing1\n", "testing2\n", "testing3\n", "\n", "testing5"]
with file_io.FileIO(file_path, mode="r+") as f:
f.write("".join(data))
actual_data = []
for line in f:
actual_data.append(line)
self.assertSequenceEqual(actual_data, data)
def testReadlines(self):
file_path = os.path.join(self._base_dir, "temp_file")
data = ["testing1\n", "testing2\n", "testing3\n", "\n", "testing5"]
f = file_io.FileIO(file_path, mode="r+")
f.write("".join(data))
f.flush()
lines = f.readlines()
self.assertSequenceEqual(lines, data)
def testUTF8StringPath(self):
file_path = os.path.join(self._base_dir, "UTF8测试_file")
file_io.write_string_to_file(file_path, "testing")
with file_io.FileIO(file_path, mode="rb") as f:
self.assertEqual(b"testing", f.read())
def testEof(self):
"""Test that reading past EOF does not raise an exception."""
file_path = os.path.join(self._base_dir, "temp_file")
f = file_io.FileIO(file_path, mode="r+")
content = "testing"
f.write(content)
f.flush()
self.assertEqual(content, f.read(len(content) + 1))
@run_all_path_types
def testUTF8StringPathExists(self, join):
file_path = join(self._base_dir, "UTF8测试_file_exist")
file_io.write_string_to_file(file_path, "testing")
v = file_io.file_exists(file_path)
self.assertEqual(v, True)
def testFilecmp(self):
file1 = os.path.join(self._base_dir, "file1")
file_io.write_string_to_file(file1, "This is a sentence\n" * 100)
file2 = os.path.join(self._base_dir, "file2")
file_io.write_string_to_file(file2, "This is another sentence\n" * 100)
file3 = os.path.join(self._base_dir, "file3")
file_io.write_string_to_file(file3, u"This is another sentence\n" * 100)
self.assertFalse(file_io.filecmp(file1, file2))
self.assertTrue(file_io.filecmp(file2, file3))
def testFilecmpSameSize(self):
file1 = os.path.join(self._base_dir, "file1")
file_io.write_string_to_file(file1, "This is a sentence\n" * 100)
file2 = os.path.join(self._base_dir, "file2")
file_io.write_string_to_file(file2, "This is b sentence\n" * 100)
file3 = os.path.join(self._base_dir, "file3")
file_io.write_string_to_file(file3, u"This is b sentence\n" * 100)
self.assertFalse(file_io.filecmp(file1, file2))
self.assertTrue(file_io.filecmp(file2, file3))
def testFilecmpBinary(self):
file1 = os.path.join(self._base_dir, "file1")
file_io.FileIO(file1, "wb").write("testing\n\na")
file2 = os.path.join(self._base_dir, "file2")
file_io.FileIO(file2, "wb").write("testing\n\nb")
file3 = os.path.join(self._base_dir, "file3")
file_io.FileIO(file3, "wb").write("testing\n\nb")
file4 = os.path.join(self._base_dir, "file4")
file_io.FileIO(file4, "wb").write("testing\n\ntesting")
self.assertFalse(file_io.filecmp(file1, file2))
self.assertFalse(file_io.filecmp(file1, file4))
self.assertTrue(file_io.filecmp(file2, file3))
def testFileCrc32(self):
file1 = os.path.join(self._base_dir, "file1")
file_io.write_string_to_file(file1, "This is a sentence\n" * 100)
crc1 = file_io.file_crc32(file1)
file2 = os.path.join(self._base_dir, "file2")
file_io.write_string_to_file(file2, "This is another sentence\n" * 100)
crc2 = file_io.file_crc32(file2)
file3 = os.path.join(self._base_dir, "file3")
file_io.write_string_to_file(file3, "This is another sentence\n" * 100)
crc3 = file_io.file_crc32(file3)
self.assertTrue(crc1 != crc2)
self.assertEqual(crc2, crc3)
def testFileCrc32WithBytes(self):
file1 = os.path.join(self._base_dir, "file1")
file_io.write_string_to_file(file1, "This is a sentence\n" * 100)
crc1 = file_io.file_crc32(file1, block_size=24)
file2 = os.path.join(self._base_dir, "file2")
file_io.write_string_to_file(file2, "This is another sentence\n" * 100)
crc2 = file_io.file_crc32(file2, block_size=24)
file3 = os.path.join(self._base_dir, "file3")
file_io.write_string_to_file(file3, "This is another sentence\n" * 100)
crc3 = file_io.file_crc32(file3, block_size=-1)
self.assertTrue(crc1 != crc2)
self.assertEqual(crc2, crc3)
def testFileCrc32Binary(self):
file1 = os.path.join(self._base_dir, "file1")
file_io.FileIO(file1, "wb").write("testing\n\n")
crc1 = file_io.file_crc32(file1)
file2 = os.path.join(self._base_dir, "file2")
file_io.FileIO(file2, "wb").write("testing\n\n\n")
crc2 = file_io.file_crc32(file2)
file3 = os.path.join(self._base_dir, "file3")
file_io.FileIO(file3, "wb").write("testing\n\n\n")
crc3 = file_io.file_crc32(file3)
self.assertTrue(crc1 != crc2)
self.assertEqual(crc2, crc3)
def testMatchingFilesPermission(self):
# Create top level directory test_dir.
dir_path = os.path.join(self._base_dir, "test_dir")
file_io.create_dir(dir_path)
# Create second level directories `noread` and `any`.
noread_path = os.path.join(dir_path, "noread")
file_io.create_dir(noread_path)
any_path = os.path.join(dir_path, "any")
file_io.create_dir(any_path)
files = ["file1.txt", "file2.txt", "file3.txt"]
for name in files:
file_path = os.path.join(any_path, name)
file_io.FileIO(file_path, mode="w").write("testing")
file_path = os.path.join(noread_path, "file4.txt")
file_io.FileIO(file_path, mode="w").write("testing")
# Change noread to noread access.
os.chmod(noread_path, 0)
expected_match = [os.path.join(any_path, name) for name in files]
self.assertItemsEqual(
file_io.get_matching_files(os.path.join(dir_path, "*", "file*.txt")),
expected_match,
)
# Change noread back so that it could be cleaned during tearDown.
os.chmod(noread_path, 0o777)
def testFileSeekableWithZip(self):
# Note: Test case for GitHub issue 27276, issue only exposed in python 3.7+.
filename = os.path.join(self._base_dir, "a.npz")
np.savez_compressed(filename, {"a": 1, "b": 2})
with gfile.GFile(filename, "rb") as f:
info = np.load(
f, allow_pickle=True
) # pylint: disable=unexpected-keyword-arg
_ = [i for i in info.items()]
def testHasAtomicMove(self):
self.assertTrue(file_io.has_atomic_move("/a/b/c"))
if __name__ == "__main__":
test.main()
|
[
"bryan.guner@gmail.com"
] |
bryan.guner@gmail.com
|
44b3c3e17a72f62f7418db2902479cb858d1cca6
|
c275fc8e52e852a82c240d363fc80d818c938549
|
/fastreid/modeling/backbones/resnest.py
|
54d052d0d66e2081c3a4416c82bce401e9447f8b
|
[] |
no_license
|
JinkaiZheng/fast-reid_20200430
|
67c968698e6a1c837e7c1c49f0078afae96152a3
|
6832d28e8ddc9b743d2de2a1a089175b8dd4bfd4
|
refs/heads/master
| 2022-06-29T09:02:50.443834
| 2020-05-12T12:09:26
| 2020-05-12T12:09:26
| 262,527,154
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,224
|
py
|
# encoding: utf-8
# based on:
# https://github.com/zhanghang1989/ResNeSt/blob/master/resnest/torch/resnest.py
"""ResNeSt models"""
import logging
import math
import torch
from torch import nn
from .build import BACKBONE_REGISTRY
from ...layers import SplAtConv2d, IBN, Non_local
_url_format = 'https://hangzh.s3.amazonaws.com/encoding/models/{}-{}.pth'
_model_sha256 = {name: checksum for checksum, name in [
('528c19ca', 'resnest50'),
('22405ba7', 'resnest101'),
('75117900', 'resnest200'),
('0cc87c48', 'resnest269'),
]}
def short_hash(name):
if name not in _model_sha256:
raise ValueError('Pretrained model for {name} is not available.'.format(name=name))
return _model_sha256[name][:8]
model_urls = {name: _url_format.format(name, short_hash(name)) for
name in _model_sha256.keys()
}
class Bottleneck(nn.Module):
"""ResNet Bottleneck
"""
# pylint: disable=unused-argument
expansion = 4
def __init__(self, inplanes, planes, with_ibn=False, stride=1, downsample=None,
radix=1, cardinality=1, bottleneck_width=64,
avd=False, avd_first=False, dilation=1, is_first=False,
rectified_conv=False, rectify_avg=False,
norm_layer=None, dropblock_prob=0.0, last_gamma=False):
super(Bottleneck, self).__init__()
group_width = int(planes * (bottleneck_width / 64.)) * cardinality
self.conv1 = nn.Conv2d(inplanes, group_width, kernel_size=1, bias=False)
if with_ibn:
self.bn1 = IBN(group_width)
else:
self.bn1 = norm_layer(group_width)
self.dropblock_prob = dropblock_prob
self.radix = radix
self.avd = avd and (stride > 1 or is_first)
self.avd_first = avd_first
if self.avd:
self.avd_layer = nn.AvgPool2d(3, stride, padding=1)
stride = 1
if radix > 1:
self.conv2 = SplAtConv2d(
group_width, group_width, kernel_size=3,
stride=stride, padding=dilation,
dilation=dilation, groups=cardinality, bias=False,
radix=radix, rectify=rectified_conv,
rectify_avg=rectify_avg,
norm_layer=norm_layer,
dropblock_prob=dropblock_prob)
elif rectified_conv:
from rfconv import RFConv2d
self.conv2 = RFConv2d(
group_width, group_width, kernel_size=3, stride=stride,
padding=dilation, dilation=dilation,
groups=cardinality, bias=False,
average_mode=rectify_avg)
self.bn2 = norm_layer(group_width)
else:
self.conv2 = nn.Conv2d(
group_width, group_width, kernel_size=3, stride=stride,
padding=dilation, dilation=dilation,
groups=cardinality, bias=False)
self.bn2 = norm_layer(group_width)
self.conv3 = nn.Conv2d(
group_width, planes * 4, kernel_size=1, bias=False)
self.bn3 = norm_layer(planes * 4)
if last_gamma:
from torch.nn.init import zeros_
zeros_(self.bn3.weight)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.dilation = dilation
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
if self.dropblock_prob > 0.0:
out = self.dropblock1(out)
out = self.relu(out)
if self.avd and self.avd_first:
out = self.avd_layer(out)
out = self.conv2(out)
if self.radix == 1:
out = self.bn2(out)
if self.dropblock_prob > 0.0:
out = self.dropblock2(out)
out = self.relu(out)
if self.avd and not self.avd_first:
out = self.avd_layer(out)
out = self.conv3(out)
out = self.bn3(out)
if self.dropblock_prob > 0.0:
out = self.dropblock3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNest(nn.Module):
"""ResNet Variants ResNest
Parameters
----------
block : Block
Class for the residual block. Options are BasicBlockV1, BottleneckV1.
layers : list of int
Numbers of layers in each block
classes : int, default 1000
Number of classification classes.
dilated : bool, default False
Applying dilation strategy to pretrained ResNet yielding a stride-8 model,
typically used in Semantic Segmentation.
norm_layer : object
Normalization layer used in backbone network (default: :class:`mxnet.gluon.nn.BatchNorm`;
for Synchronized Cross-GPU BachNormalization).
Reference:
- He, Kaiming, et al. "Deep residual learning for image recognition." Proceedings of the IEEE conference on computer vision and pattern recognition. 2016.
- Yu, Fisher, and Vladlen Koltun. "Multi-scale context aggregation by dilated convolutions."
"""
# pylint: disable=unused-variable
def __init__(self, last_stride, with_ibn, with_nl, block, layers, non_layers, radix=1, groups=1,
bottleneck_width=64,
dilated=False, dilation=1,
deep_stem=False, stem_width=64, avg_down=False,
rectified_conv=False, rectify_avg=False,
avd=False, avd_first=False,
final_drop=0.0, dropblock_prob=0,
last_gamma=False, norm_layer=nn.BatchNorm2d):
self.cardinality = groups
self.bottleneck_width = bottleneck_width
# ResNet-D params
self.inplanes = stem_width * 2 if deep_stem else 64
self.avg_down = avg_down
self.last_gamma = last_gamma
# ResNeSt params
self.radix = radix
self.avd = avd
self.avd_first = avd_first
super().__init__()
self.rectified_conv = rectified_conv
self.rectify_avg = rectify_avg
if rectified_conv:
from rfconv import RFConv2d
conv_layer = RFConv2d
else:
conv_layer = nn.Conv2d
conv_kwargs = {'average_mode': rectify_avg} if rectified_conv else {}
if deep_stem:
self.conv1 = nn.Sequential(
conv_layer(3, stem_width, kernel_size=3, stride=2, padding=1, bias=False, **conv_kwargs),
norm_layer(stem_width),
nn.ReLU(inplace=True),
conv_layer(stem_width, stem_width, kernel_size=3, stride=1, padding=1, bias=False, **conv_kwargs),
norm_layer(stem_width),
nn.ReLU(inplace=True),
conv_layer(stem_width, stem_width * 2, kernel_size=3, stride=1, padding=1, bias=False, **conv_kwargs),
)
else:
self.conv1 = conv_layer(3, 64, kernel_size=7, stride=2, padding=3,
bias=False, **conv_kwargs)
self.bn1 = norm_layer(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0], with_ibn=with_ibn, norm_layer=norm_layer, is_first=False)
self.layer2 = self._make_layer(block, 128, layers[1], stride=2, with_ibn=with_ibn, norm_layer=norm_layer)
if dilated or dilation == 4:
self.layer3 = self._make_layer(block, 256, layers[2], stride=1, with_ibn=with_ibn,
dilation=2, norm_layer=norm_layer,
dropblock_prob=dropblock_prob)
self.layer4 = self._make_layer(block, 512, layers[3], stride=1, with_ibn=with_ibn,
dilation=4, norm_layer=norm_layer,
dropblock_prob=dropblock_prob)
elif dilation == 2:
self.layer3 = self._make_layer(block, 256, layers[2], stride=2, with_ibn=with_ibn,
dilation=1, norm_layer=norm_layer,
dropblock_prob=dropblock_prob)
self.layer4 = self._make_layer(block, 512, layers[3], stride=1, with_ibn=with_ibn,
dilation=2, norm_layer=norm_layer,
dropblock_prob=dropblock_prob)
else:
self.layer3 = self._make_layer(block, 256, layers[2], stride=2, with_ibn=with_ibn,
norm_layer=norm_layer,
dropblock_prob=dropblock_prob)
self.layer4 = self._make_layer(block, 512, layers[3], stride=last_stride, with_ibn=with_ibn,
norm_layer=norm_layer,
dropblock_prob=dropblock_prob)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, norm_layer):
m.weight.data.fill_(1)
m.bias.data.zero_()
if with_nl:
self._build_nonlocal(layers, non_layers)
else:
self.NL_1_idx = self.NL_2_idx = self.NL_3_idx = self.NL_4_idx = []
def _make_layer(self, block, planes, blocks, stride=1, with_ibn=False, dilation=1, norm_layer=None,
dropblock_prob=0.0, is_first=True):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
down_layers = []
if self.avg_down:
if dilation == 1:
down_layers.append(nn.AvgPool2d(kernel_size=stride, stride=stride,
ceil_mode=True, count_include_pad=False))
else:
down_layers.append(nn.AvgPool2d(kernel_size=1, stride=1,
ceil_mode=True, count_include_pad=False))
down_layers.append(nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=1, bias=False))
else:
down_layers.append(nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False))
down_layers.append(norm_layer(planes * block.expansion))
downsample = nn.Sequential(*down_layers)
layers = []
if planes == 512:
with_ibn = False
if dilation == 1 or dilation == 2:
layers.append(block(self.inplanes, planes, with_ibn, stride, downsample=downsample,
radix=self.radix, cardinality=self.cardinality,
bottleneck_width=self.bottleneck_width,
avd=self.avd, avd_first=self.avd_first,
dilation=1, is_first=is_first, rectified_conv=self.rectified_conv,
rectify_avg=self.rectify_avg,
norm_layer=norm_layer, dropblock_prob=dropblock_prob,
last_gamma=self.last_gamma))
elif dilation == 4:
layers.append(block(self.inplanes, planes, with_ibn, stride, downsample=downsample,
radix=self.radix, cardinality=self.cardinality,
bottleneck_width=self.bottleneck_width,
avd=self.avd, avd_first=self.avd_first,
dilation=2, is_first=is_first, rectified_conv=self.rectified_conv,
rectify_avg=self.rectify_avg,
norm_layer=norm_layer, dropblock_prob=dropblock_prob,
last_gamma=self.last_gamma))
else:
raise RuntimeError("=> unknown dilation size: {}".format(dilation))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, with_ibn,
radix=self.radix, cardinality=self.cardinality,
bottleneck_width=self.bottleneck_width,
avd=self.avd, avd_first=self.avd_first,
dilation=dilation, rectified_conv=self.rectified_conv,
rectify_avg=self.rectify_avg,
norm_layer=norm_layer, dropblock_prob=dropblock_prob,
last_gamma=self.last_gamma))
return nn.Sequential(*layers)
def _build_nonlocal(self, layers, non_layers):
self.NL_1 = nn.ModuleList(
[Non_local(256) for _ in range(non_layers[0])])
self.NL_1_idx = sorted([layers[0] - (i + 1) for i in range(non_layers[0])])
self.NL_2 = nn.ModuleList(
[Non_local(512) for _ in range(non_layers[1])])
self.NL_2_idx = sorted([layers[1] - (i + 1) for i in range(non_layers[1])])
self.NL_3 = nn.ModuleList(
[Non_local(1024) for _ in range(non_layers[2])])
self.NL_3_idx = sorted([layers[2] - (i + 1) for i in range(non_layers[2])])
self.NL_4 = nn.ModuleList(
[Non_local(2048) for _ in range(non_layers[3])])
self.NL_4_idx = sorted([layers[3] - (i + 1) for i in range(non_layers[3])])
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
NL1_counter = 0
if len(self.NL_1_idx) == 0:
self.NL_1_idx = [-1]
for i in range(len(self.layer1)):
x = self.layer1[i](x)
if i == self.NL_1_idx[NL1_counter]:
_, C, H, W = x.shape
x = self.NL_1[NL1_counter](x)
NL1_counter += 1
# Layer 2
NL2_counter = 0
if len(self.NL_2_idx) == 0:
self.NL_2_idx = [-1]
for i in range(len(self.layer2)):
x = self.layer2[i](x)
if i == self.NL_2_idx[NL2_counter]:
_, C, H, W = x.shape
x = self.NL_2[NL2_counter](x)
NL2_counter += 1
# Layer 3
NL3_counter = 0
if len(self.NL_3_idx) == 0:
self.NL_3_idx = [-1]
for i in range(len(self.layer3)):
x = self.layer3[i](x)
if i == self.NL_3_idx[NL3_counter]:
_, C, H, W = x.shape
x = self.NL_3[NL3_counter](x)
NL3_counter += 1
# Layer 4
NL4_counter = 0
if len(self.NL_4_idx) == 0:
self.NL_4_idx = [-1]
for i in range(len(self.layer4)):
x = self.layer4[i](x)
if i == self.NL_4_idx[NL4_counter]:
_, C, H, W = x.shape
x = self.NL_4[NL4_counter](x)
NL4_counter += 1
return x
@BACKBONE_REGISTRY.register()
def build_resnest_backbone(cfg):
"""
Create a ResNest instance from config.
Returns:
ResNet: a :class:`ResNet` instance.
"""
# fmt: off
pretrain = cfg.MODEL.BACKBONE.PRETRAIN
last_stride = cfg.MODEL.BACKBONE.LAST_STRIDE
with_ibn = cfg.MODEL.BACKBONE.WITH_IBN
with_se = cfg.MODEL.BACKBONE.WITH_SE
with_nl = cfg.MODEL.BACKBONE.WITH_NL
depth = cfg.MODEL.BACKBONE.DEPTH
num_blocks_per_stage = {50: [3, 4, 6, 3], 101: [3, 4, 23, 3], 200: [3, 24, 36, 3], 269: [3, 30, 48, 8]}[depth]
nl_layers_per_stage = {50: [0, 2, 3, 0], 101: [0, 2, 3, 0]}[depth]
stem_width = {50: 32, 101: 64, 200: 64, 269: 64}[depth]
model = ResNest(last_stride, with_ibn, with_nl, Bottleneck, num_blocks_per_stage, nl_layers_per_stage,
radix=2, groups=1, bottleneck_width=64,
deep_stem=True, stem_width=stem_width, avg_down=True,
avd=True, avd_first=False)
if pretrain:
# if not with_ibn:
# original resnet
state_dict = torch.hub.load_state_dict_from_url(
model_urls['resnest' + str(depth)], progress=True, check_hash=True)
# else:
# raise KeyError('Not implementation ibn in resnest')
# # ibn resnet
# state_dict = torch.load(pretrain_path)['state_dict']
# # remove module in name
# new_state_dict = {}
# for k in state_dict:
# new_k = '.'.join(k.split('.')[1:])
# if new_k in model.state_dict() and (model.state_dict()[new_k].shape == state_dict[k].shape):
# new_state_dict[new_k] = state_dict[k]
# state_dict = new_state_dict
res = model.load_state_dict(state_dict, strict=False)
logger = logging.getLogger(__name__)
logger.info('missing keys is {}'.format(res.missing_keys))
logger.info('unexpected keys is {}'.format(res.unexpected_keys))
return model
|
[
"sherlockliao01@gmail.com"
] |
sherlockliao01@gmail.com
|
6b6b8ed46de995cb4125b9f3eae5ad6f987cb563
|
1ff9adfdb9d559e6f81ed9470467bab25e93b5ab
|
/src/ta_lib/_vendor/tigerml/core/reports/lib.py
|
e68184e54ad7fe935d6209b79f71931a46f2af5f
|
[] |
no_license
|
Seemant-tiger/housing-price-prediction
|
a39dbefcb11bc460edeeee92e6becf77d35ff3a8
|
be5d8cca769c7e267cfee1932eb82b70c2855bc1
|
refs/heads/main
| 2023-06-24T00:25:49.776720
| 2021-07-18T16:44:28
| 2021-07-18T16:44:28
| 387,222,852
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 594
|
py
|
def create_report(
contents, name="", path="", format=".html", split_sheets=True, tiger_template=False
):
if format == ".xlsx":
from .excel import create_excel_report
create_excel_report(contents, name=name, path=path, split_sheets=split_sheets)
elif format == ".pptx":
from .ppt.lib import create_ppt_report
create_ppt_report(contents, name=name, path=path, tiger_template=tiger_template)
if format == ".html":
from .html import create_html_report
create_html_report(contents, name=name, path=path, split_sheets=split_sheets)
|
[
"seemantsingh1199@gmail.com"
] |
seemantsingh1199@gmail.com
|
23e3ad4e01e0f76661ea461347891416a38d216c
|
a71a756203a07ccaece6db440410493b3b7ff77f
|
/helios/plugins/builtin/rpc_websocket_proxy_through_ipc_socket/plugin.py
|
f25a485fbaf58be719639402aa3e72f7562385ca
|
[
"MIT"
] |
permissive
|
Helios-Protocol/py-helios-node
|
73735dc24cd4c816d55649ed2f5df822efabfdce
|
691b378938f0a36bf8774dc1ee4e4370b6cf7c63
|
refs/heads/master
| 2021-08-19T23:05:18.841604
| 2020-01-18T19:38:33
| 2020-01-18T19:38:33
| 134,452,574
| 21
| 10
|
MIT
| 2019-06-09T04:43:14
| 2018-05-22T17:39:10
|
Python
|
UTF-8
|
Python
| false
| false
| 1,344
|
py
|
from argparse import (
ArgumentParser,
_SubParsersAction,
)
from helios.extensibility import (
BaseIsolatedPlugin,
)
from .websocket_proxy_server import Proxy as rpc_websocket_proxy
###
# This one is not used anymore because it is synchronous. There is a new asynchronous one in the json_rpc folder
# This one connects through IPC as well. So it wont be stopped by admin_stopRPC.
###
class RpcWebsocketProxyPlugin(BaseIsolatedPlugin):
@property
def name(self) -> str:
return "RPC Websocket Proxy"
def should_start(self) -> bool:
return (not self.context.args.disable_rpc_websocket_proxy) and self.context.chain_config.is_main_instance
def configure_parser(self, arg_parser: ArgumentParser, subparser: _SubParsersAction) -> None:
arg_parser.add_argument(
'--disable_rpc_websocket_proxy-NOT_USED',
action="store_true",
help="Should we disable the RPC websocket proxy server?",
)
def start(self) -> None:
self.logger.info('RPC Websocket proxy started')
self.context.event_bus.connect()
proxy_url = "ws://0.0.0.0:" + str(self.context.chain_config.rpc_port)
rpc_websocket_proxy_service = rpc_websocket_proxy(proxy_url, self.context.chain_config.jsonrpc_ipc_path)
rpc_websocket_proxy_service.run()
|
[
"admin@hyperevo.com"
] |
admin@hyperevo.com
|
9d0ceb0a983d177fb194fa88a84647305cb10f4a
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/PE8XQipGLS5bhpLZ5_12.py
|
135603cb370bc577f19f614c4e375ea0e5b17c54
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 98
|
py
|
def check_equals(lst1, lst2):
if lst1[:] == lst2[:]:
return True
else:
return False
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
e661fdec78b7319ffe2fcad7ed550a0469bf8d6d
|
564d6a4d305a8ac6a7e01c761831fb2081c02d0f
|
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_05_01/operations/_network_interfaces_operations.py
|
f2a6c07097ef93c19db4a57b35f48ccad53b3847
|
[
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] |
permissive
|
paultaiton/azure-sdk-for-python
|
69af4d889bac8012b38f5b7e8108707be679b472
|
d435a1a25fd6097454b7fdfbbdefd53e05029160
|
refs/heads/master
| 2023-01-30T16:15:10.647335
| 2020-11-14T01:09:50
| 2020-11-14T01:09:50
| 283,343,691
| 0
| 0
|
MIT
| 2020-07-28T22:43:43
| 2020-07-28T22:43:43
| null |
UTF-8
|
Python
| false
| false
| 59,447
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfacesOperations(object):
"""NetworkInterfacesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_05_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_interface_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "models.NetworkInterface"
"""Gets information about the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_05_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
parameters, # type: "models.NetworkInterface"
**kwargs # type: Any
):
# type: (...) -> "models.NetworkInterface"
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkInterface')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
network_interface_name, # type: str
parameters, # type: "models.NetworkInterface"
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.NetworkInterface"]
"""Creates or updates a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to the create or update network interface operation.
:type parameters: ~azure.mgmt.network.v2020_05_01.models.NetworkInterface
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either NetworkInterface or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_05_01.models.NetworkInterface]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterface"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
network_interface_name, # type: str
parameters, # type: "models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "models.NetworkInterface"
"""Updates a network interface tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to update network interface tags.
:type parameters: ~azure.mgmt.network.v2020_05_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_05_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["models.NetworkInterfaceListResult"]
"""Gets all network interfaces in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkInterfaces'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.NetworkInterfaceListResult"]
"""Gets all network interfaces in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces'} # type: ignore
def _get_effective_route_table_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["models.EffectiveRouteListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.EffectiveRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
# Construct URL
url = self._get_effective_route_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_effective_route_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore
def begin_get_effective_route_table(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.EffectiveRouteListResult"]
"""Gets all route tables applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either EffectiveRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_05_01.models.EffectiveRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.EffectiveRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_effective_route_table_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_effective_route_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore
def _list_effective_network_security_groups_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["models.EffectiveNetworkSecurityGroupListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.EffectiveNetworkSecurityGroupListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
# Construct URL
url = self._list_effective_network_security_groups_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_effective_network_security_groups_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore
def begin_list_effective_network_security_groups(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.EffectiveNetworkSecurityGroupListResult"]
"""Gets all network security groups applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either EffectiveNetworkSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_05_01.models.EffectiveNetworkSecurityGroupListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.EffectiveNetworkSecurityGroupListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_effective_network_security_groups_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_effective_network_security_groups.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore
def list_virtual_machine_scale_set_vm_network_interfaces(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
virtualmachine_index, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.NetworkInterfaceListResult"]
"""Gets information about all network interfaces in a virtual machine in a virtual machine scale
set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_vm_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_vm_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces'} # type: ignore
def list_virtual_machine_scale_set_network_interfaces(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.NetworkInterfaceListResult"]
"""Gets all network interfaces in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/networkInterfaces'} # type: ignore
def get_virtual_machine_scale_set_network_interface(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
virtualmachine_index, # type: str
network_interface_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "models.NetworkInterface"
"""Get the specified network interface in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_05_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_network_interface.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_network_interface.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}'} # type: ignore
def list_virtual_machine_scale_set_ip_configurations(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
virtualmachine_index, # type: str
network_interface_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Iterable["models.NetworkInterfaceIPConfigurationListResult"]
"""Get the specified network interface ip configuration in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceIPConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceIPConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceIPConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_ip_configurations.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceIPConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_ip_configurations.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations'} # type: ignore
def get_virtual_machine_scale_set_ip_configuration(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
virtualmachine_index, # type: str
network_interface_name, # type: str
ip_configuration_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "models.NetworkInterfaceIPConfiguration"
"""Get the specified network interface ip configuration in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param ip_configuration_name: The name of the ip configuration.
:type ip_configuration_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterfaceIPConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceIPConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceIPConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_ip_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterfaceIPConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_ip_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations/{ipConfigurationName}'} # type: ignore
|
[
"noreply@github.com"
] |
paultaiton.noreply@github.com
|
6d4d1d60f2c789f78d8d5f3257764908e635553d
|
809f263b77b525549cd945c39c4c9cf2b8e6a167
|
/pqcrypto/sign/sphincs_shake256_192s_simple.py
|
6a45aa9c023c21f67bb3b6b83ca198236bb3e8f7
|
[
"BSD-3-Clause"
] |
permissive
|
Kayuii/pqcrypto
|
bdf5014b7590dfe363baedbf47171f4b4cb25349
|
dd8c56fd876a397caef06a00d35537a4f9c1db28
|
refs/heads/master
| 2022-12-14T00:34:36.632689
| 2020-09-08T10:40:26
| 2020-09-08T10:40:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 447
|
py
|
from .._sign.sphincs_shake256_192s_simple import ffi as __ffi, lib as __lib
from .common import _sign_generate_keypair_factory, _sign_sign_factory, _sign_verify_factory
PUBLIC_KEY_SIZE = __lib.CRYPTO_PUBLICKEYBYTES
SECRET_KEY_SIZE = __lib.CRYPTO_SECRETKEYBYTES
SIGNATURE_SIZE = __lib.CRYPTO_BYTES
generate_keypair = _sign_generate_keypair_factory(__ffi, __lib)
sign = _sign_sign_factory(__ffi, __lib)
verify = _sign_verify_factory(__ffi, __lib)
|
[
"inbox@philonas.net"
] |
inbox@philonas.net
|
685c3b447efa9302c1e3ac674770c6ad63a86f80
|
86fc644c327a8d6ea66fd045d94c7733c22df48c
|
/scripts/managed_cpe_services/customer/triple_cpe_site/triple_cpe_site_services/cpe_primary/ospfs/router_ospf/redistribute/redistribute_on_ospf/redistribute_on_ospf.py
|
37bcca908584bc95632c842448489e92bbae1610
|
[] |
no_license
|
lucabrasi83/anutacpedeployment
|
bfe703657fbcf0375c92bcbe7560051817f1a526
|
96de3a4fd4adbbc0d443620f0c53f397823a1cad
|
refs/heads/master
| 2021-09-24T16:44:05.305313
| 2018-10-12T02:41:18
| 2018-10-12T02:41:18
| 95,190,459
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,026
|
py
|
#
# This computer program is the confidential information and proprietary trade
# secret of Anuta Networks, Inc. Possessions and use of this program must
# conform strictly to the license agreement between the user and
# Anuta Networks, Inc., and receipt or possession does not convey any rights
# to divulge, reproduce, or allow others to use this program without specific
# written authorization of Anuta Networks, Inc.
#
# Copyright (c) 2016-2017 Anuta Networks, Inc. All Rights Reserved.
#
#
#DO NOT EDIT THIS FILE ITS AUTOGENERATED ONE
#ALL THE CUSTOMIZATIONS REGARDING DATAPROCESSING SHOULD BE WRITTEN INTO service_customization.py FILE
#
"""
Tree Structure of Handled XPATH:
services
|
managed-cpe-services
|
customer
|
triple-cpe-site
|
triple-cpe-site-services
|
cpe-primary
|
ospfs
|
router-ospf
|
redistribute
|
redistribute-on-ospf
Schema Representation:
/services/managed-cpe-services/customer/triple-cpe-site/triple-cpe-site-services/cpe-primary/ospfs/router-ospf/redistribute/redistribute-on-ospf
"""
from servicemodel import util
from servicemodel import yang
from servicemodel import devicemgr
from cpedeployment.cpedeployment_lib import getLocalObject
from cpedeployment.cpedeployment_lib import getDeviceObject
from cpedeployment.cpedeployment_lib import getCurrentObjectConfig
from cpedeployment.cpedeployment_lib import ServiceModelContext
from cpedeployment.cpedeployment_lib import getParentObject
from cpedeployment.cpedeployment_lib import log
import service_customization
class RedistributeOnOspf(yang.AbstractYangServiceHandler):
_instance = None
def __init__(self):
self.delete_pre_processor = service_customization.DeletePreProcessor()
self.create_pre_processor = service_customization.CreatePreProcessor()
self.opaque_args = {}
def create(self, id, sdata):
sdata.getSession().addYangSessionPreReserveProcessor(self.create_pre_processor)
#Fetch Local Config Object
config = getCurrentObjectConfig(id, sdata, 'redistribute_on_ospf')
#Fetch Service Model Context Object
smodelctx = None
#Fetch Parent Object
parentobj = None
dev = []
inputkeydict = {}
devbindobjs={}
inputdict = {}
opaque_args = self.opaque_args
# START OF FETCHING THE LEAF PARAMETERS
inputdict['protocol'] = config.get_field_value('protocol')
inputdict['process_id'] = config.get_field_value('process_id')
inputdict['route_map'] = config.get_field_value('route_map')
inputdict['metric'] = config.get_field_value('metric')
inputdict['metric_type'] = config.get_field_value('metric_type')
inputdict['tag'] = config.get_field_value('tag')
# END OF FETCHING THE LEAF PARAMETERS
_Gen_obj = getLocalObject(sdata, 'cpe-primary')
device_mgmt_ip_address = _Gen_obj.cpe_primary.device_ip
#Fetch Device Object
dev = getDeviceObject(device_mgmt_ip_address, sdata)
# START OF FETCHING THE PARENT KEY LEAF PARAMETERS
inputkeydict['managed_cpe_services_customer_triple_cpe_site_triple_cpe_site_services_cpe_primary_ospfs_router_ospf_process_id'] = sdata.getRcPath().split('/')[-3].split('=')[1]
inputkeydict['managed_cpe_services_customer_triple_cpe_site_triple_cpe_site_services_site_name'] = sdata.getRcPath().split('/')[-6].split('=')[1]
inputkeydict['managed_cpe_services_customer_name'] = sdata.getRcPath().split('/')[-8].split('=')[1]
# END OF FETCHING THE PARENT KEY LEAF PARAMETERS
#Use the custom methods to process the data
service_customization.ServiceDataCustomization.process_service_create_data(smodelctx, sdata, dev, id=id, parentobj=parentobj, inputdict=inputdict, inputkeydict=inputkeydict, config=config, hopaque=opaque_args)
#Start of Device binding with python bindings
#End of Device binding
#Use the custom method to process/create payload
service_customization.ServiceDataCustomization.process_service_device_bindings(smodelctx, sdata, dev, id=id, device=dev, inputdict=inputdict, inputkeydict=inputkeydict, parentobj=parentobj, config=config, devbindobjs=devbindobjs, hopaque=opaque_args)
def update(self, id, sdata):
#Fetch Local Config Object
config = getCurrentObjectConfig(id, sdata, 'redistribute_on_ospf')
opaque_args = self.opaque_args
#Fetch Service Model Context Object
smodelctx = None
#Fetch Parent Object
parentobj = None
dev = []
inputkeydict = {}
devbindobjs={}
inputdict = {}
opaque_args = self.opaque_args
# START OF FETCHING THE LEAF PARAMETERS
inputdict['protocol'] = config.get_field_value('protocol')
inputdict['process_id'] = config.get_field_value('process_id')
inputdict['route_map'] = config.get_field_value('route_map')
inputdict['metric'] = config.get_field_value('metric')
inputdict['metric_type'] = config.get_field_value('metric_type')
inputdict['tag'] = config.get_field_value('tag')
# END OF FETCHING THE LEAF PARAMETERS
_Gen_obj = getLocalObject(sdata, 'cpe-primary')
device_mgmt_ip_address = _Gen_obj.cpe_primary.device_ip
#Fetch Device Object
dev = getDeviceObject(device_mgmt_ip_address, sdata)
#Use the custom method to process the data
service_customization.ServiceDataCustomization.process_service_update_data(smodelctx, sdata, id=id, dev=dev, parentobj=parentobj, config=config, hopaque=opaque_args, inputdict=inputdict)
def delete(self, id, sdata):
sdata.getSession().addYangSessionPreReserveProcessor(self.delete_pre_processor)
#Fetch Local Config Object
config = getCurrentObjectConfig(id, sdata, 'redistribute_on_ospf')
opaque_args = self.opaque_args
#Fetch Service Model Context Object
smodelctx = None
#Fetch Parent Object
parentobj = None
dev = []
inputkeydict = {}
devbindobjs={}
inputdict = {}
opaque_args = self.opaque_args
# START OF FETCHING THE LEAF PARAMETERS
inputdict['protocol'] = config.get_field_value('protocol')
inputdict['process_id'] = config.get_field_value('process_id')
inputdict['route_map'] = config.get_field_value('route_map')
inputdict['metric'] = config.get_field_value('metric')
inputdict['metric_type'] = config.get_field_value('metric_type')
inputdict['tag'] = config.get_field_value('tag')
# END OF FETCHING THE LEAF PARAMETERS
_Gen_obj = getLocalObject(sdata, 'cpe-primary')
device_mgmt_ip_address = _Gen_obj.cpe_primary.device_ip
#Fetch Device Object
dev = getDeviceObject(device_mgmt_ip_address, sdata)
#Use the custom method to process the data
service_customization.ServiceDataCustomization.process_service_delete_data(smodelctx, sdata, id=id, dev=dev, parentobj=parentobj, config=config, hopaque=opaque_args, inputdict=inputdict)
@staticmethod
def getInstance():
if(RedistributeOnOspf._instance == None):
RedistributeOnOspf._instance = RedistributeOnOspf()
return RedistributeOnOspf._instance
#def rollbackCreate(self, id, sdata):
# log('rollback: id = %s, sdata = %s' % (id, sdata))
# self.delete(id,sdata)
if __name__ == 'redistribute_on_ospf':
from servicemodel.yang import YangServiceData
sdata = YangServiceData()
instance = RedistributeOnOspf().getInstance()
instance.create(None, sdata)
instance.delete(None, sdata)
instance.update(None, sdata)
|
[
"sebastien.pouplin@tatacommunications.com"
] |
sebastien.pouplin@tatacommunications.com
|
e43680be54a96bf074498a59db3df72db549e964
|
eb59f8212f40bd7c316e1ef3be03bf7da3dde65f
|
/annotated2_0/scr_uli.py
|
be140b857a325143f149a0a5aefd35636edbe6d3
|
[] |
no_license
|
shtkn/frameDataParser
|
764cc3197051966717990f7ca3eb2f02639cf438
|
690d44d4bf188a14c4e5ebebd95bdc75b827f5e5
|
refs/heads/master
| 2021-07-05T00:52:53.316670
| 2020-10-03T18:16:52
| 2020-10-03T18:16:52
| 187,556,058
| 0
| 0
| null | 2019-11-25T05:36:06
| 2019-05-20T02:40:24
|
Python
|
UTF-8
|
Python
| false
| false
| 280,392
|
py
|
@Subroutine
def PreInit():
Unknown12019('756c6900000000000000000000000000')
Unknown12050(1)
@Subroutine
def MatchInit():
DashFAccel(1000)
DashFMaxVelocity(32000)
JumpYVelocity(31500)
SuperJumpYVelocity(38000)
DoubleJumpCount(2)
Unknown12038(23000)
Unknown12034(33)
SuperFreezeDuration(-1500)
AirBDashDuration(13)
Unknown12037(-1800)
Unknown12024(2)
Unknown13039(1)
Unknown2049(1)
Unknown15018(2000)
Move_Register('AutoFDash', 0x0)
Unknown14009(6)
Move_AirGround_(0x2000)
Move_Input_(0x78)
Unknown14013('CmnActFDash')
Move_EndRegister()
Move_Register('NmlAtk5A', 0x7)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown14015(0, 300000, -200000, 150000, 2000, 50)
Move_EndRegister()
Move_Register('AN_NmlAtk5A_2nd', 0x7)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown14005(1)
Unknown15013(2000)
Unknown14015(0, 350000, -200000, 200000, 1000, 50)
Move_EndRegister()
Move_Register('AN_NmlAtk5A_3rd', 0x7)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown14005(1)
Unknown15013(2000)
Unknown14015(0, 430000, -200000, 230000, 1000, 50)
Move_EndRegister()
Move_Register('AN_NmlAtk5A_4th', 0x7)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown14005(1)
Unknown14015(0, 800000, -200000, 230000, 1000, 50)
Move_EndRegister()
Move_Register('NmlAtk4A', 0x6)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown14015(0, 250000, -200000, 150000, 1500, 50)
Move_EndRegister()
Move_Register('AN_NmlAtk4A_2nd', 0x6)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown14005(1)
Unknown15013(4000)
Unknown14015(0, 350000, -200000, 200000, 1000, 50)
Move_EndRegister()
Move_Register('AN_NmlAtk4A_3rd', 0x6)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown14005(1)
Unknown15013(4000)
Unknown14015(0, 350000, -200000, 200000, 1000, 50)
Move_EndRegister()
Move_Register('AN_NmlAtk4A_4th', 0x6)
Unknown14013('AN_NmlAtk5A_4th')
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown14005(1)
Unknown15013(4000)
Unknown14015(0, 800000, -200000, 230000, 1000, 50)
Move_EndRegister()
Move_Register('NmlAtk2A', 0x4)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown14015(0, 200000, -100000, 100000, 1200, 50)
Move_EndRegister()
Move_Register('NmlAtk2A_Renda', 0x4)
Unknown14005(1)
MoveMaxChainRepeat(2)
Unknown15013(3000)
Unknown14015(0, 200000, -100000, 100000, 1000, 50)
Move_EndRegister()
Move_Register('NmlAtk5B', 0x19)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown15021(1500)
Unknown15006(3000)
Unknown14015(0, 500000, 100000, 400000, 800, 10)
Move_EndRegister()
Move_Register('NmlAtk2B', 0x16)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown15009()
Unknown15021(1)
Unknown14015(0, 400000, -100000, 100000, 1000, 50)
Move_EndRegister()
Move_Register('AN_NmlAtk5B_2nd', 0x19)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown14005(1)
Unknown15013(4000)
Unknown14015(0, 500000, -200000, 400000, 1000, 50)
Move_EndRegister()
Move_Register('AN_NmlAtk5B_3rd', 0x19)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown14005(1)
Unknown15013(4000)
Unknown14015(0, 500000, -200000, 350000, 1000, 50)
Move_EndRegister()
Move_Register('CmnActCrushAttack', 0x66)
Unknown15008()
Unknown14015(0, 450000, -200000, 200000, 500, 25)
Move_EndRegister()
Move_Register('NmlAtk2C', 0x28)
MoveMaxChainRepeat(1)
Unknown14020(1)
Unknown15009()
Unknown14015(200000, 430000, -100000, 50000, 500, 25)
Move_EndRegister()
Move_Register('NmlAtk3C', 0x29)
Unknown15000(0)
Unknown15003(0)
Move_EndRegister()
Move_Register('NmlAtkAIR5A', 0x10)
MoveMaxChainRepeat(1)
Unknown14015(-100000, 250000, -300000, 100000, 1500, 50)
Move_EndRegister()
Move_Register('NmlAtkAIR5A_2nd', 0x10)
MoveMaxChainRepeat(1)
Unknown14005(1)
Move_EndRegister()
Move_Register('NmlAtkAIR5B', 0x22)
MoveMaxChainRepeat(1)
Unknown14015(-100000, 350000, -200000, 200000, 1000, 50)
Move_EndRegister()
Move_Register('NmlAtkAIR5C', 0x34)
MoveMaxChainRepeat(1)
Unknown14015(0, 350000, -500000, 0, 800, 40)
Move_EndRegister()
Move_Register('CmnActChangePartnerQuickOut', 0x63)
Move_EndRegister()
Move_Register('NmlAtkThrow', 0x5d)
Unknown15010()
Unknown15021(1)
Unknown15013(1)
Unknown14015(0, 350000, -200000, 200000, 1000, 0)
Move_EndRegister()
Move_Register('NmlAtkBackThrow', 0x61)
Unknown15010()
Unknown15021(1)
Unknown15013(1)
Unknown14015(0, 350000, -200000, 200000, 1000, 0)
Move_EndRegister()
Move_Register('ShotDashCancel', INPUT_SPECIALMOVE)
Unknown14005(1)
Move_AirGround_(0x2000)
Move_Input_(0xda)
Move_EndRegister()
Move_Register('Shot_A', INPUT_SPECIALMOVE)
Move_AirGround_(0x2000)
Move_Input_(INPUT_236)
Move_Input_(INPUT_PRESS_A)
Unknown15013(1)
Unknown15012(1)
Unknown15014(1)
Unknown15021(1)
Unknown14015(300000, 700000, -200000, 100000, 500, 10)
Move_EndRegister()
Move_Register('Shot_B', INPUT_SPECIALMOVE)
Move_AirGround_(0x2000)
Move_Input_(INPUT_236)
Move_Input_(INPUT_PRESS_B)
Unknown15013(1)
Unknown15012(1)
Unknown15014(1)
Unknown15021(1)
Unknown14015(400000, 800000, -200000, 100000, 500, 10)
Move_EndRegister()
Move_Register('Assault_A', INPUT_SPECIALMOVE)
Move_AirGround_(0x2000)
Move_Input_(INPUT_214)
Move_Input_(INPUT_PRESS_A)
Unknown15013(1)
Unknown15012(2500)
Unknown14015(0, 450000, -200000, 100000, 100, 10)
Move_EndRegister()
Move_Register('Assault_A_Hasei', INPUT_SPECIALMOVE)
Move_AirGround_(0x2000)
Move_Input_(0xed)
Unknown14005(1)
Unknown15021(2000)
Unknown14015(-250000, 250000, -200000, 200000, 750, 50)
Move_EndRegister()
Move_Register('Assault_B', INPUT_SPECIALMOVE)
Move_AirGround_(0x2000)
Move_Input_(INPUT_214)
Move_Input_(INPUT_PRESS_B)
Unknown15008()
Unknown15016(1, 10, 20)
Unknown14015(0, 450000, -200000, 200000, 500, 10)
Move_EndRegister()
Move_Register('AirShot_A', INPUT_SPECIALMOVE)
Move_AirGround_(0x2001)
Move_Input_(INPUT_236)
Move_Input_(INPUT_PRESS_A)
Unknown15013(1)
Unknown15012(1)
Unknown15014(1)
Unknown14015(300000, 700000, -500000, -100000, 500, 0)
Move_EndRegister()
Move_Register('AirShot_B', INPUT_SPECIALMOVE)
Move_AirGround_(0x2001)
Move_Input_(INPUT_236)
Move_Input_(INPUT_PRESS_B)
Unknown15013(1)
Unknown15012(1)
Unknown15014(1)
Unknown14015(300000, 800000, -300000, 100000, 500, 0)
Move_EndRegister()
Move_Register('Shot_EX', INPUT_SPECIALMOVE)
Move_AirGround_(0x2000)
Move_AirGround_(0x3086)
Move_Input_(INPUT_236)
Move_Input_(INPUT_PRESS_C)
Unknown15021(1)
Unknown14015(0, 800000, -200000, 150000, 250, 20)
Move_EndRegister()
Move_Register('Assault_EX', INPUT_SPECIALMOVE)
Move_AirGround_(0x2000)
Move_AirGround_(0x3086)
Move_Input_(INPUT_214)
Move_Input_(INPUT_PRESS_C)
Unknown15008()
Unknown14015(0, 600000, -200000, 200000, 100, 20)
Move_EndRegister()
Move_Register('AirShot_EX', INPUT_SPECIALMOVE)
Move_AirGround_(0x2001)
Move_AirGround_(0x3086)
Move_Input_(INPUT_236)
Move_Input_(INPUT_PRESS_C)
Unknown15021(1)
Unknown14015(300000, 700000, -100000, -500000, 100, 0)
Move_EndRegister()
Move_Register('CmnActInvincibleAttack', 0x64)
Unknown15013(0)
Unknown15012(0)
Unknown15014(6000)
Unknown15020(500, 1000, 100, 1000)
Unknown14015(0, 300000, -100000, 300000, 250, 5)
Move_EndRegister()
Move_Register('CmnActInvincibleAttackAir', 0x65)
Unknown15013(0)
Unknown15012(0)
Unknown15014(6000)
Unknown15020(500, 1000, 100, 1000)
Unknown14015(0, 300000, -100000, 300000, 250, 5)
Move_EndRegister()
Move_Register('UltimateRush', 0x68)
Move_AirGround_(0x2000)
Move_AirGround_(0x3089)
Move_Input_(INPUT_236)
Move_Input_(0xde)
Unknown15012(1)
Unknown15013(6000)
Unknown14015(150000, 500000, -200000, 150000, 50, 0)
Move_EndRegister()
Move_Register('UltimateRushOD', 0x68)
Move_AirGround_(0x2000)
Move_AirGround_(0x3089)
Move_AirGround_(0x3081)
Move_Input_(INPUT_236)
Move_Input_(0xde)
Unknown15012(1)
Unknown15013(6000)
Unknown14015(150000, 500000, -200000, 150000, 50, 0)
Move_EndRegister()
Move_Register('UltimateRanbu', 0x68)
Move_AirGround_(0x2000)
Move_AirGround_(0x3089)
Move_Input_(INPUT_214)
Move_Input_(0xde)
Unknown15012(1)
Unknown15013(1)
Unknown15014(6000)
Unknown14015(0, 300000, -200000, 300000, 50, 0)
Move_EndRegister()
Move_Register('UltimateRanbuOD', 0x68)
Move_AirGround_(0x2000)
Move_AirGround_(0x3089)
Move_AirGround_(0x3081)
Move_Input_(INPUT_214)
Move_Input_(0xde)
Unknown15012(1)
Unknown15013(1)
Unknown15014(6000)
Unknown14015(0, 300000, -200000, 300000, 50, 0)
Move_EndRegister()
Move_Register('AstralHeat', 0x69)
Move_AirGround_(0x2000)
Move_AirGround_(0x304a)
Move_Input_(0xcd)
Move_Input_(0xde)
Unknown15014(3000)
Unknown15013(3000)
Unknown14015(0, 650000, -200000, 200000, 1000, 50)
Move_EndRegister()
Unknown15024('NmlAtk5A', 'AN_NmlAtk5A_2nd', 10000000)
Unknown15024('AN_NmlAtk5A_2nd', 'AN_NmlAtk5A_3rd', 10000000)
Unknown15024('AN_NmlAtk5A_2nd', 'NmlAtk5B', 10000000)
Unknown15024('AN_NmlAtk5A_3rd', 'AN_NmlAtk5A_4th', 10000000)
Unknown15024('NmlAtk4A', 'AN_NmlAtk4A_2nd', 10000000)
Unknown15024('AN_NmlAtk4A_2nd', 'AN_NmlAtk4A_3rd', 10000000)
Unknown15024('AN_NmlAtk4A_2nd', 'NmlAtk5A', 10000000)
Unknown15024('AN_NmlAtk4A_3rd', 'AN_NmlAtk4A_4th', 10000000)
Unknown15024('AN_NmlAtk4A_3rd', 'NmlAtk5A', 10000000)
Unknown15024('NmlAtk5B', 'AN_NmlAtk5B_2nd', 10000000)
Unknown15024('AN_NmlAtk5B_2nd', 'AN_NmlAtk5B_3rd', 10000000)
Unknown15024('AN_NmlAtk5B_3rd', 'NmlAtk2C', 10000000)
Unknown15024('NmlAtk2C', 'Assault_B', 10000000)
Unknown15024('NmlAtkAIR5A', 'NmlAtkAIR5B', 10000000)
Unknown15024('NmlAtkAIR5B', 'NmlAtkAIR5C', 10000000)
Unknown14048('AntiAir', 0x4, 0xed)
Unknown14048('Assault_C', 0x4, 0x79)
Unknown14048('UltimateAssault2', 0x4, 0x5f)
Unknown14048('UltimateAssault2_OD', 0x4, 0x5f)
Unknown14048('BunshinAssault_A', 0x4, 0x45)
Unknown14048('AirAssault', 0x4, 0xed)
Unknown14049('NmlAtk5A', 'NmlAtk5B', 0, 0)
Unknown14049('NmlAtk5B', 'NmlAtk2C', 12, 0)
Unknown14049('NmlAtk5D', 'BunshinAssault_A', 0, 0)
Unknown14049('NmlAtk5D', 'BunshinAssault_B', 1, 600000)
Unknown14049('NmlAtk5D', 'UltimateAssault', 6, 0)
Unknown14049('NmlAtk5D', 'UltimateAssault_OD', 6, 0)
Unknown14049('NmlAtk2A', 'NmlAtk5B', 0, 0)
Unknown14049('NmlAtk2C', 'FHighJump', 12, 0)
Unknown14049('NmlAtk2C', 'NmlAtk5D', 13, 0)
Unknown14049('NmlAtk2D', 'BunshinAssault_A', 0, 0)
Unknown14049('NmlAtk2D', 'BunshinAssault_B', 1, 600000)
Unknown14049('NmlAtkAIR5A', 'NmlAtkAIR5B', 0, 0)
Unknown14049('NmlAtkAIR5B', 'NmlAtkAIR5C', 0, 0)
Unknown14049('NmlAtkAIR5C', 'AirAssault', 3, 0)
Unknown14049('NmlAtkAIR5C', 'FJump', 12, 0)
Unknown14049('NmlAtkAIR6D', 'AirAssault', 3, 0)
Unknown12018(0, 'Action_330_01')
Unknown12018(1, 'Action_330_04')
Unknown12018(2, 'Action_330_05')
Unknown12018(3, 'Action_330_06')
Unknown12018(4, 'Action_330_07')
Unknown12018(5, 'Action_330_07')
Unknown12018(6, 'Action_330_08')
Unknown12018(7, 'Action_017_01')
Unknown12018(8, 'Action_017_01')
Unknown12018(9, 'Action_019_01')
Unknown12018(10, 'Action_331_00')
Unknown12018(11, 'Action_331_00')
Unknown12018(12, 'Action_320_02')
Unknown12018(13, 'Action_330_08')
Unknown12018(14, 'Action_351_00')
Unknown12018(15, 'Action_290_00')
Unknown12018(16, 'Action_300_00')
Unknown12018(17, 'Action_304_02')
Unknown12018(18, 'Action_305_03')
Unknown12018(19, 'Action_000_00')
Unknown12018(20, 'Action_000_00')
Unknown12018(25, 'Action_326_00')
Unknown12018(26, 'Action_326_02')
Unknown12018(27, 'Action_326_03')
Unknown12018(28, 'Action_351_05')
Unknown12018(29, 'Action_292_00')
Unknown12018(24, 'Action_348_00')
Unknown7010(0, 'uli000')
Unknown7010(1, 'uli001')
Unknown7010(2, 'uli002')
Unknown7010(3, 'uli003')
Unknown7010(4, 'uli004')
Unknown7010(5, 'uli005')
Unknown7010(6, 'uli006')
Unknown7010(7, 'uli007')
Unknown7010(8, 'uli008')
Unknown7010(9, 'uli009')
Unknown7010(10, 'uli010')
Unknown7010(11, 'uli011')
Unknown7010(12, 'uli012')
Unknown7010(13, 'uli013')
Unknown7010(14, 'uli014')
Unknown7010(15, 'uli015')
Unknown7010(16, 'uli016')
Unknown7010(17, 'uli017')
Unknown7010(18, 'uli018')
Unknown7010(19, 'uli019')
Unknown7010(20, 'uli020')
Unknown7010(21, 'uli021')
Unknown7010(22, 'uli022')
Unknown7010(23, 'uli023')
Unknown7010(24, 'uli024')
Unknown7010(25, 'uli025')
Unknown7010(26, 'uli026')
Unknown7010(27, 'uli027')
Unknown7010(28, 'uli028')
Unknown7010(29, 'uli029')
Unknown7010(30, 'uli030')
Unknown7010(31, 'uli031')
Unknown7010(32, 'uli032')
Unknown7010(33, 'uli033')
Unknown7010(34, 'uli034')
Unknown7010(35, 'uli035')
Unknown7010(36, 'uli036')
Unknown7010(37, 'uli037')
Unknown7010(38, 'uli038')
Unknown7010(39, 'uli039')
Unknown7010(40, 'Hyd500')
Unknown7010(41, 'uli041')
Unknown7010(42, 'uli042')
Unknown7010(43, 'uli043')
Unknown7010(44, 'uli044')
Unknown7010(45, 'uli045')
Unknown7010(46, 'uli046')
Unknown7010(47, 'uli047')
Unknown7010(48, 'uli048')
Unknown7010(49, 'uli049')
Unknown7010(50, 'uli050')
Unknown7010(51, 'uli051')
Unknown7010(52, 'uli052')
Unknown7010(53, 'uli053')
Unknown7010(54, 'uli100_0')
Unknown7010(55, 'uli100_1')
Unknown7010(56, 'uli100_2')
Unknown7010(63, 'uli101_0')
Unknown7010(64, 'uli101_1')
Unknown7010(65, 'uli101_2')
Unknown7010(57, 'uli102_0')
Unknown7010(58, 'uli102_1')
Unknown7010(59, 'uli102_2')
Unknown7010(66, 'uli103_0')
Unknown7010(67, 'uli103_1')
Unknown7010(68, 'uli103_2')
Unknown7010(60, 'uli104_0')
Unknown7010(61, 'uli104_1')
Unknown7010(62, 'uli104_2')
Unknown7010(69, 'uli105_0')
Unknown7010(70, 'uli105_1')
Unknown7010(71, 'uli105_2')
Unknown7010(72, 'uli150')
Unknown7010(73, 'uli151')
Unknown7010(74, 'uli152')
Unknown7010(85, 'uli153')
Unknown7010(88, 'uli155')
Unknown7010(94, 'uli400_0')
Unknown7010(95, 'uli401_0')
Unknown7010(96, 'uli161_0')
Unknown7010(97, 'uli161_1')
Unknown7010(98, 'uli163_0')
Unknown7010(99, 'uli163_1')
Unknown7010(100, 'uli164_0')
Unknown7010(101, 'uli164_1')
Unknown7010(102, 'uli166_0')
Unknown7010(103, 'uli166_1')
Unknown7010(92, 'uli162_0')
Unknown7010(93, 'uli162_1')
Unknown7010(90, 'uli167_0')
Unknown7010(91, 'uli167_1')
Unknown7010(105, 'uli165_0')
Unknown7010(106, 'uli165_1')
Unknown7010(107, 'uli168_0')
Unknown7010(108, 'uli168_1')
Unknown7010(110, 'uli169_0')
Unknown7010(111, 'uli169_1')
Unknown7010(112, 'uli159_0')
Unknown7010(113, 'uli159_1')
Unknown12059('00000000436d6e416374496e76696e6369626c6541747461636b00000000000000000000')
Unknown12059('010000004e6d6c41746b3441000000000000000000000000000000000000000000000000')
Unknown12059('02000000556c74696d617465527573680000000000000000000000000000000000000000')
Unknown12059('03000000556c74696d617465527573684f44000000000000000000000000000000000000')
Unknown12059('04000000556c74696d61746552616e627500000000000000000000000000000000000000')
Unknown12059('05000000556c74696d61746552616e62754f440000000000000000000000000000000000')
Unknown12059('06000000436d6e4163744244617368000000000000000000000000000000000000000000')
Unknown12059('070000004e6d6c41746b5468726f77000000000000000000000000000000000000000000')
Unknown12059('08000000436d6e4163744368616e6765506172746e6572517569636b4f75740000000000')
GFX_0('Geboku', -1)
Unknown38(11, 1)
@Subroutine
def OnPreDraw():
Unknown23030('554c495f4c6967687400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000')
@Subroutine
def Func_BurstDD_Easy():
SLOT_47 = 0
if Unknown23145('CmnActOverDriveEnd'):
SLOT_47 = 1
@Subroutine
def OnActionBegin():
if Unknown46(11):
if Unknown23148('CmnActTagBattleWait'):
Unknown23029(11, 9901, 0)
else:
Unknown23029(11, 9900, 0)
@Subroutine
def ChainRoot():
HitOrBlockCancel('NmlAtk5A')
HitOrBlockCancel('NmlAtk2A')
HitOrBlockCancel('NmlAtk4A')
HitOrBlockCancel('NmlAtk5B')
HitOrBlockCancel('NmlAtk2B')
HitOrBlockCancel('NmlAtk2C')
HitOrBlockCancel('CmnActCrushAttack')
HitJumpCancel(1)
@State
def CmnActStand():
sprite('Action_000_00', 1) # 1-1 **attackbox here**
label(0)
sprite('Action_000_00', 7) # 2-8 **attackbox here**
sprite('Action_000_01', 7) # 9-15 **attackbox here**
sprite('Action_000_02', 6) # 16-21 **attackbox here**
sprite('Action_000_03', 6) # 22-27 **attackbox here**
sprite('Action_000_04', 8) # 28-35 **attackbox here**
sprite('Action_000_05', 5) # 36-40 **attackbox here**
sprite('Action_000_06', 5) # 41-45 **attackbox here**
sprite('Action_000_07', 5) # 46-50 **attackbox here**
sprite('Action_000_08', 6) # 51-56 **attackbox here**
sprite('Action_000_09', 5) # 57-61 **attackbox here**
sprite('Action_000_10', 6) # 62-67 **attackbox here**
sprite('Action_000_11', 8) # 68-75 **attackbox here**
sprite('Action_000_12', 5) # 76-80 **attackbox here**
sprite('Action_000_13', 5) # 81-85 **attackbox here**
sprite('Action_000_14', 6) # 86-91 **attackbox here**
loopRest()
random_(1, 2, 87)
if SLOT_ReturnVal:
_gotolabel(0)
random_(0, 2, 122)
if SLOT_ReturnVal:
_gotolabel(0)
random_(2, 0, 90)
if SLOT_ReturnVal:
_gotolabel(0)
sprite('Action_000_15', 7) # 92-98 **attackbox here**
SLOT_88 = 960
sprite('Action_000_16', 4) # 99-102 **attackbox here**
SFX_1('uli000')
sprite('Action_000_17', 7) # 103-109 **attackbox here**
SFX_0('003_swing_grap_0_0')
sprite('Action_000_18', 5) # 110-114 **attackbox here**
sprite('Action_000_19', 8) # 115-122 **attackbox here**
sprite('Action_000_20', 10) # 123-132 **attackbox here**
sprite('Action_000_21', 5) # 133-137 **attackbox here**
sprite('Action_000_22', 7) # 138-144 **attackbox here**
SFX_FOOTSTEP_(100, 0, 1)
sprite('Action_000_23', 3) # 145-147 **attackbox here**
sprite('Action_000_24', 20) # 148-167 **attackbox here**
sprite('Action_000_25', 240) # 168-407 **attackbox here**
sprite('Action_000_26', 5) # 408-412 **attackbox here**
sprite('Action_000_27', 8) # 413-420 **attackbox here**
sprite('Action_000_28', 6) # 421-426 **attackbox here**
sprite('Action_000_29', 4) # 427-430 **attackbox here**
sprite('Action_000_30', 4) # 431-434 **attackbox here**
SFX_0('008_swing_pole_0')
sprite('Action_000_31', 6) # 435-440 **attackbox here**
sprite('Action_000_32', 7) # 441-447
loopRest()
gotoLabel(0)
@State
def CmnActStandTurn():
sprite('Action_015_00', 2) # 1-2
sprite('Action_015_01', 2) # 3-4
sprite('Action_015_02', 2) # 5-6
@State
def CmnActStand2Crouch():
sprite('Action_012_00', 3) # 1-3
sprite('Action_012_01', 3) # 4-6
sprite('Action_012_02', 2) # 7-8
@State
def CmnActCrouch():
label(0)
sprite('Action_013_00', 15) # 1-15
sprite('Action_013_01', 7) # 16-22
sprite('Action_013_02', 6) # 23-28
sprite('Action_013_03', 6) # 29-34
sprite('Action_013_04', 7) # 35-41
sprite('Action_013_05', 7) # 42-48
sprite('Action_013_06', 5) # 49-53
sprite('Action_013_07', 5) # 54-58
sprite('Action_013_08', 5) # 59-63
loopRest()
gotoLabel(0)
@State
def CmnActCrouchTurn():
sprite('Action_016_00', 2) # 1-2
sprite('Action_016_01', 2) # 3-4
sprite('Action_016_02', 2) # 5-6
@State
def CmnActCrouch2Stand():
sprite('Action_014_00', 3) # 1-3
sprite('Action_014_01', 6) # 4-9
sprite('Action_014_02', 4) # 10-13
@State
def CmnActJumpPre():
sprite('Action_036_00', 4) # 1-4
@State
def CmnActJumpUpper():
label(0)
sprite('Action_036_01', 4) # 1-4
sprite('Action_035_02', 3) # 5-7
loopRest()
gotoLabel(0)
@State
def CmnActJumpUpperEnd():
sprite('Action_035_03', 3) # 1-3
sprite('Action_035_04', 6) # 4-9
sprite('Action_035_05', 6) # 10-15
sprite('Action_035_06', 6) # 16-21
@State
def CmnActJumpDown():
label(0)
sprite('Action_022_00', 3) # 1-3
sprite('Action_022_01', 3) # 4-6
loopRest()
gotoLabel(0)
@State
def CmnActJumpLanding():
sprite('Action_023_00', 2) # 1-2
sprite('Action_023_01', 1) # 3-3
sprite('Action_023_02', 2) # 4-5
sprite('Action_023_03', 3) # 6-8
@State
def CmnActLandingStiffLoop():
sprite('Action_023_00', 2) # 1-2
sprite('Action_023_01', 2) # 3-4
sprite('Action_023_02', 32767) # 5-32771
@State
def CmnActLandingStiffEnd():
sprite('Action_023_02', 3) # 1-3
sprite('Action_023_03', 3) # 4-6
@State
def CmnActFWalk():
label(0)
sprite('Action_010_00', 5) # 1-5
sprite('Action_010_01', 5) # 6-10
sprite('Action_010_02', 5) # 11-15
SFX_FOOTSTEP_(100, 1, 1)
sprite('Action_010_03', 5) # 16-20
sprite('Action_010_04', 5) # 21-25
sprite('Action_010_05', 5) # 26-30
sprite('Action_010_06', 5) # 31-35
sprite('Action_010_07', 5) # 36-40
SFX_FOOTSTEP_(100, 1, 1)
sprite('Action_010_08', 5) # 41-45
sprite('Action_010_09', 5) # 46-50
loopRest()
gotoLabel(0)
@State
def CmnActBWalk():
sprite('Action_011_00', 2) # 1-2
sprite('Action_011_01', 2) # 3-4
label(0)
sprite('Action_011_02', 4) # 5-8
SFX_FOOTSTEP_(100, 1, 1)
sprite('Action_011_03', 4) # 9-12
sprite('Action_011_04', 4) # 13-16
sprite('Action_011_05', 4) # 17-20
sprite('Action_011_06', 4) # 21-24
SFX_FOOTSTEP_(100, 1, 1)
sprite('Action_011_07', 4) # 25-28
sprite('Action_011_08', 4) # 29-32
sprite('Action_011_09', 4) # 33-36
sprite('Action_011_10', 4) # 37-40
sprite('Action_011_11', 4) # 41-44
sprite('Action_011_12', 4) # 45-48
sprite('Action_011_13', 4) # 49-52
loopRest()
gotoLabel(0)
@State
def CmnActFDash():
sprite('Action_045_13', 4) # 1-4
sprite('Action_045_00', 3) # 5-7
sprite('Action_045_01', 3) # 8-10
sprite('Action_045_02', 3) # 11-13
label(0)
sprite('Action_045_03', 3) # 14-16
Unknown8006(100, 1, 1)
sprite('Action_045_04', 3) # 17-19
sprite('Action_045_05', 3) # 20-22
sprite('Action_045_06', 3) # 23-25
Unknown8006(100, 1, 1)
sprite('Action_045_07', 3) # 26-28
sprite('Action_045_08', 3) # 29-31
sprite('Action_045_09', 3) # 32-34
sprite('Action_045_02', 3) # 35-37
loopRest()
gotoLabel(0)
@State
def CmnActFDashStop():
sprite('Action_045_11', 2) # 1-2
sprite('Action_045_12', 3) # 3-5
sprite('Action_045_13', 4) # 6-9
@State
def CmnActBDash():
def upon_IMMEDIATE():
Unknown2042(1)
Unknown28(8, '_NEUTRAL')
setInvincibleFor(7)
Unknown1084(1)
sendToLabelUpon(2, 1)
Unknown23001(100, 0)
Unknown23076()
def upon_CLEAR_OR_EXIT():
Unknown1019(90)
sprite('Action_046_00', 1) # 1-1
sprite('Action_046_01', 2) # 2-3
physicsXImpulse(-40000)
physicsYImpulse(8800)
setGravity(1550)
Unknown8002()
sprite('Action_046_02', 2) # 4-5
sprite('Action_046_02', 1) # 6-6
sprite('Action_046_03', 3) # 7-9
loopRest()
label(0)
sprite('Action_046_03', 3) # 10-12
sprite('Action_046_04', 3) # 13-15
loopRest()
gotoLabel(0)
label(1)
sprite('Action_046_05', 3) # 16-18
physicsXImpulse(0)
Unknown8000(100, 1, 1)
clearUponHandler(3)
sprite('Action_046_06', 3) # 19-21
@State
def CmnActBDashLanding():
pass
@State
def CmnActAirFDash():
def upon_IMMEDIATE():
Unknown22001(-1)
sprite('Action_068_01', 3) # 1-3
sprite('Action_068_02', 3) # 4-6
sprite('Action_068_03', 3) # 7-9
sprite('Action_068_04', 3) # 10-12
sprite('Action_068_05', 3) # 13-15
sprite('Action_068_06', 3) # 16-18
sprite('Action_068_07', 3) # 19-21
sprite('Action_068_08', 3) # 22-24
loopRest()
enterState('AirFDashRigor')
@State
def AirFDashRigor():
def upon_IMMEDIATE():
Unknown13014(1)
Unknown13015(1)
Unknown13031(1)
Unknown13019(1)
Unknown28(2, 'CmnActJumpLanding')
sprite('Action_068_09', 3) # 1-3
sprite('Action_068_10', 3) # 4-6
label(0)
sprite('Action_068_11', 3) # 7-9
sprite('Action_068_12', 3) # 10-12
loopRest()
gotoLabel(0)
@State
def CmnActAirBDash():
def upon_IMMEDIATE():
Unknown22001(-1)
sprite('Action_046_03', 4) # 1-4
physicsYImpulse(12000)
sprite('Action_046_04', 4) # 5-8
label(0)
sprite('Action_046_03', 4) # 9-12
sprite('Action_046_04', 4) # 13-16
loopRest()
gotoLabel(0)
@State
def CmnActHitStandLv1():
sprite('Action_300_00', 1) # 1-1
sprite('Action_300_00', 1) # 2-2
sprite('Action_300_01', 2) # 3-4
@State
def CmnActHitStandLv2():
sprite('Action_300_00', 1) # 1-1
sprite('Action_300_00', 2) # 2-3
sprite('Action_300_01', 3) # 4-6
@State
def CmnActHitStandLv3():
sprite('Action_303_00', 1) # 1-1
sprite('Action_303_00', 2) # 2-3
sprite('Action_303_01', 2) # 4-5
sprite('Action_303_02', 2) # 6-7
sprite('Action_303_03', 2) # 8-9
@State
def CmnActHitStandLv4():
sprite('Action_303_00', 1) # 1-1
sprite('Action_303_00', 1) # 2-2
sprite('Action_303_01', 3) # 3-5
sprite('Action_303_02', 3) # 6-8
sprite('Action_303_03', 2) # 9-10
@State
def CmnActHitStandLv5():
sprite('Action_303_00', 1) # 1-1
sprite('Action_303_00', 3) # 2-4
sprite('Action_303_01', 4) # 5-8
sprite('Action_303_02', 4) # 9-12
sprite('Action_303_03', 4) # 13-16
@State
def CmnActHitStandLowLv1():
sprite('Action_304_02', 1) # 1-1
sprite('Action_304_02', 1) # 2-2
sprite('Action_304_03', 2) # 3-4
@State
def CmnActHitStandLowLv2():
sprite('Action_304_02', 1) # 1-1
sprite('Action_304_02', 2) # 2-3
sprite('Action_304_03', 3) # 4-6
@State
def CmnActHitStandLowLv3():
sprite('Action_304_00', 1) # 1-1
sprite('Action_304_00', 1) # 2-2
sprite('Action_304_01', 2) # 3-4
sprite('Action_304_02', 2) # 5-6
sprite('Action_304_03', 2) # 7-8
@State
def CmnActHitStandLowLv4():
sprite('Action_304_00', 1) # 1-1
sprite('Action_304_00', 1) # 2-2
sprite('Action_304_01', 3) # 3-5
sprite('Action_304_02', 3) # 6-8
sprite('Action_304_03', 2) # 9-10
@State
def CmnActHitStandLowLv5():
sprite('Action_304_00', 1) # 1-1
sprite('Action_304_00', 3) # 2-4
sprite('Action_304_01', 4) # 5-8
sprite('Action_304_02', 4) # 9-12
sprite('Action_304_03', 4) # 13-16
@State
def CmnActHitCrouchLv1():
sprite('Action_305_02', 1) # 1-1
sprite('Action_305_02', 1) # 2-2
sprite('Action_305_03', 2) # 3-4
@State
def CmnActHitCrouchLv2():
sprite('Action_305_02', 1) # 1-1
sprite('Action_305_02', 2) # 2-3
sprite('Action_305_03', 3) # 4-6
@State
def CmnActHitCrouchLv3():
sprite('Action_305_00', 1) # 1-1
sprite('Action_305_00', 2) # 2-3
sprite('Action_305_01', 2) # 4-5
sprite('Action_305_02', 2) # 6-7
sprite('Action_305_03', 2) # 8-9
@State
def CmnActHitCrouchLv4():
sprite('Action_305_00', 1) # 1-1
sprite('Action_305_00', 1) # 2-2
sprite('Action_305_01', 3) # 3-5
sprite('Action_305_02', 3) # 6-8
sprite('Action_305_03', 2) # 9-10
@State
def CmnActHitCrouchLv5():
sprite('Action_305_00', 1) # 1-1
sprite('Action_305_00', 3) # 2-4
sprite('Action_305_01', 4) # 5-8
sprite('Action_305_02', 4) # 9-12
sprite('Action_305_03', 4) # 13-16
@State
def CmnActBDownUpper():
sprite('Action_320_00', 4) # 1-4
label(0)
sprite('Action_320_01', 4) # 5-8
sprite('Action_320_01', 4) # 9-12
loopRest()
gotoLabel(0)
@State
def CmnActBDownUpperEnd():
sprite('Action_330_04', 3) # 1-3
sprite('Action_330_05', 3) # 4-6
@State
def CmnActBDownDown():
sprite('Action_330_06', 3) # 1-3
sprite('Action_330_07', 3) # 4-6
label(0)
sprite('Action_330_08', 4) # 7-10
sprite('Action_330_09', 4) # 11-14
loopRest()
gotoLabel(0)
@State
def CmnActBDownCrash():
sprite('Action_351_00', 2) # 1-2
sprite('Action_331_01', 2) # 3-4
@State
def CmnActBDownBound():
sprite('Action_350_01', 3) # 1-3
sprite('Action_350_02', 3) # 4-6
sprite('Action_350_03', 3) # 7-9
sprite('Action_350_04', 3) # 10-12
sprite('Action_350_05', 3) # 13-15
@State
def CmnActBDownLoop():
sprite('Action_350_06', 1) # 1-1
@State
def CmnActBDown2Stand():
sprite('Action_293_11', 4) # 1-4
sprite('Action_293_12', 4) # 5-8
sprite('Action_293_13', 3) # 9-11
sprite('Action_293_14', 3) # 12-14
sprite('Action_293_15', 4) # 15-18
sprite('Action_293_16', 4) # 19-22
sprite('Action_293_17', 4) # 23-26
@State
def CmnActFDownUpper():
sprite('Action_326_00', 3) # 1-3
@State
def CmnActFDownUpperEnd():
sprite('Action_326_02', 3) # 1-3
@State
def CmnActFDownDown():
label(0)
sprite('Action_326_03', 4) # 1-4
sprite('Action_326_04', 4) # 5-8
loopRest()
gotoLabel(0)
@State
def CmnActFDownCrash():
sprite('Action_354_00', 3) # 1-3
@State
def CmnActFDownBound():
sprite('Action_354_01', 4) # 1-4
sprite('Action_354_02', 4) # 5-8
sprite('Action_354_03', 4) # 9-12
@State
def CmnActFDownLoop():
sprite('Action_292_00', 3) # 1-3
@State
def CmnActFDown2Stand():
sprite('Action_294_11', 3) # 1-3
sprite('Action_294_12', 3) # 4-6
sprite('Action_294_13', 3) # 7-9
sprite('Action_294_14', 3) # 10-12
sprite('Action_294_15', 3) # 13-15
sprite('Action_294_16', 3) # 16-18
sprite('Action_294_17', 2) # 19-20
@State
def CmnActVDownUpper():
sprite('Action_330_00', 3) # 1-3
label(0)
sprite('Action_330_01', 3) # 4-6
sprite('Action_330_02', 3) # 7-9
loopRest()
gotoLabel(0)
@State
def CmnActVDownUpperEnd():
sprite('Action_330_03', 2) # 1-2
sprite('Action_330_04', 2) # 3-4
sprite('Action_330_05', 2) # 5-6
@State
def CmnActVDownDown():
sprite('Action_330_06', 3) # 1-3
sprite('Action_330_06', 3) # 4-6
label(0)
sprite('Action_330_08', 4) # 7-10
sprite('Action_330_09', 4) # 11-14
loopRest()
gotoLabel(0)
@State
def CmnActVDownCrash():
sprite('Action_351_00', 3) # 1-3
sprite('Action_351_01', 3) # 4-6
@State
def CmnActBlowoff():
sprite('Action_331_00', 2) # 1-2
label(0)
sprite('Action_331_02', 3) # 3-5
sprite('Action_331_03', 3) # 6-8
loopRest()
gotoLabel(0)
@State
def CmnActKirimomiUpper():
label(0)
sprite('Action_333_00', 2) # 1-2
sprite('Action_333_01', 2) # 3-4
sprite('Action_333_02', 2) # 5-6
sprite('Action_333_03', 2) # 7-8
sprite('Action_333_04', 2) # 9-10
sprite('Action_333_05', 2) # 11-12
sprite('Action_333_06', 2) # 13-14
loopRest()
gotoLabel(0)
@State
def CmnActSkeleton():
label(0)
sprite('Action_301_00', 2) # 1-2
sprite('Action_301_00', 2) # 3-4
loopRest()
gotoLabel(0)
@State
def CmnActFreeze():
sprite('Action_301_00', 1) # 1-1
@State
def CmnActWallBound():
sprite('Action_340_00', 2) # 1-2
sprite('Action_340_01', 2) # 3-4
sprite('Action_340_02', 2) # 5-6
@State
def CmnActWallBoundDown():
sprite('Action_340_03', 3) # 1-3
sprite('Action_340_04', 3) # 4-6
label(0)
sprite('Action_340_05', 3) # 7-9
loopRest()
gotoLabel(0)
@State
def CmnActStaggerLoop():
sprite('Action_327_00', 14) # 1-14
@State
def CmnActStaggerDown():
sprite('Action_327_02', 5) # 1-5
sprite('Action_327_03', 5) # 6-10
sprite('Action_327_04', 4) # 11-14
sprite('Action_328_00', 4) # 15-18
sprite('Action_328_01', 4) # 19-22
@State
def CmnActUkemiStagger():
sprite('Action_327_00', 8) # 1-8
@State
def CmnActUkemiAirF():
sprite('Action_032_00', 2) # 1-2
sprite('Action_032_01', 2) # 3-4
sprite('Action_032_02', 2) # 5-6
sprite('Action_032_03', 1) # 7-7
sprite('Action_032_04', 1) # 8-8
sprite('Action_032_05', 1) # 9-9
@State
def CmnActUkemiAirB():
sprite('Action_032_00', 4) # 1-4
sprite('Action_032_01', 4) # 5-8
sprite('Action_032_02', 4) # 9-12
sprite('Action_032_03', 4) # 13-16
sprite('Action_032_04', 4) # 17-20
sprite('Action_032_05', 3) # 21-23
@State
def CmnActUkemiAirN():
sprite('Action_032_00', 4) # 1-4
sprite('Action_032_01', 4) # 5-8
sprite('Action_032_02', 4) # 9-12
sprite('Action_032_03', 4) # 13-16
sprite('Action_032_04', 4) # 17-20
sprite('Action_032_05', 3) # 21-23
sprite('Action_032_06', 3) # 24-26
sprite('Action_032_07', 3) # 27-29
@State
def CmnActUkemiLandF():
sprite('Action_041_00', 2) # 1-2
sprite('Action_041_01', 2) # 3-4
sprite('Action_041_02', 2) # 5-6
sprite('Action_041_03', 2) # 7-8
sprite('Action_041_04', 2) # 9-10
sprite('Action_041_05', 2) # 11-12
sprite('Action_041_06', 2) # 13-14
sprite('Action_041_07', 2) # 15-16
@State
def CmnActUkemiLandB():
sprite('Action_041_00', 2) # 1-2
sprite('Action_041_01', 2) # 3-4
sprite('Action_041_02', 2) # 5-6
sprite('Action_041_03', 2) # 7-8
sprite('Action_041_04', 2) # 9-10
sprite('Action_041_05', 2) # 11-12
sprite('Action_041_06', 2) # 13-14
sprite('Action_041_07', 2) # 15-16
@State
def CmnActUkemiLandN():
sprite('Action_041_00', 2) # 1-2
sprite('Action_041_01', 2) # 3-4
sprite('Action_041_02', 2) # 5-6
sprite('Action_041_03', 2) # 7-8
sprite('Action_041_04', 2) # 9-10
sprite('Action_041_05', 2) # 11-12
sprite('Action_041_06', 2) # 13-14
sprite('Action_041_07', 2) # 15-16
@State
def CmnActUkemiLandNLanding():
sprite('Action_041_08', 5) # 1-5
sprite('Action_041_09', 5) # 6-10
sprite('Action_041_10', 5) # 11-15
@State
def CmnActMidGuardPre():
sprite('Action_017_00', 3) # 1-3
sprite('Action_017_01', 3) # 4-6
@State
def CmnActMidGuardLoop():
label(0)
sprite('Action_017_00', 3) # 1-3
sprite('Action_017_01', 3) # 4-6
gotoLabel(0)
@State
def CmnActMidGuardEnd():
sprite('Action_017_06', 3) # 1-3
sprite('Action_017_07', 3) # 4-6
@State
def CmnActMidHeavyGuardLoop():
sprite('Action_017_00', 3) # 1-3
sprite('Action_017_01', 3) # 4-6
@State
def CmnActMidHeavyGuardEnd():
sprite('Action_017_06', 3) # 1-3
sprite('Action_017_07', 3) # 4-6
@State
def CmnActHighGuardPre():
sprite('Action_017_00', 3) # 1-3
sprite('Action_017_01', 3) # 4-6
@State
def CmnActHighGuardLoop():
sprite('Action_017_00', 3) # 1-3
@State
def CmnActHighGuardEnd():
sprite('Action_017_06', 3) # 1-3
sprite('Action_017_07', 3) # 4-6
@State
def CmnActHighHeavyGuardLoop():
sprite('Action_017_00', 3) # 1-3
sprite('Action_017_01', 3) # 4-6
@State
def CmnActHighHeavyGuardEnd():
sprite('Action_017_06', 3) # 1-3
sprite('Action_017_07', 3) # 4-6
@State
def CmnActCrouchGuardPre():
sprite('Action_018_00', 3) # 1-3
sprite('Action_018_01', 3) # 4-6
@State
def CmnActCrouchGuardLoop():
label(0)
sprite('Action_018_02', 3) # 1-3
sprite('Action_018_03', 3) # 4-6
gotoLabel(0)
@State
def CmnActCrouchGuardEnd():
sprite('Action_018_06', 3) # 1-3
sprite('Action_018_07', 3) # 4-6
@State
def CmnActCrouchHeavyGuardLoop():
sprite('Action_018_01', 3) # 1-3
sprite('Action_018_02', 3) # 4-6
@State
def CmnActCrouchHeavyGuardEnd():
sprite('Action_018_06', 3) # 1-3
sprite('Action_018_07', 3) # 4-6
@State
def CmnActAirGuardPre():
sprite('Action_019_00', 3) # 1-3
sprite('Action_019_01', 3) # 4-6
@State
def CmnActAirGuardLoop():
label(0)
sprite('Action_019_02', 3) # 1-3
sprite('Action_019_03', 3) # 4-6
loopRest()
gotoLabel(0)
@State
def CmnActAirGuardEnd():
sprite('Action_019_06', 3) # 1-3
sprite('Action_019_07', 3) # 4-6
@State
def CmnActAirHeavyGuardLoop():
sprite('Action_019_02', 3) # 1-3
sprite('Action_019_03', 3) # 4-6
@State
def CmnActAirHeavyGuardEnd():
sprite('Action_019_06', 3) # 1-3
sprite('Action_019_07', 3) # 4-6
@State
def CmnActGuardBreakStand():
sprite('Action_017_00', 2) # 1-2
sprite('Action_017_01', 2) # 3-4
sprite('Action_017_01', 1) # 5-5
Unknown2042(1)
sprite('Action_017_05', 6) # 6-11
sprite('Action_017_06', 6) # 12-17
@State
def CmnActGuardBreakCrouch():
sprite('Action_018_00', 2) # 1-2
sprite('Action_018_01', 2) # 3-4
sprite('Action_018_00', 1) # 5-5
Unknown2042(1)
sprite('Action_018_01', 6) # 6-11
sprite('Action_018_00', 6) # 12-17
@State
def CmnActGuardBreakAir():
sprite('Action_019_00', 2) # 1-2
sprite('Action_019_01', 2) # 3-4
sprite('Action_019_00', 1) # 5-5
Unknown2042(1)
sprite('Action_019_01', 6) # 6-11
sprite('Action_019_00', 6) # 12-17
@State
def CmnActAirTurn():
sprite('Action_036_01', 9) # 1-9
@State
def CmnActLockWait():
sprite('Action_017_00', 3) # 1-3
sprite('Action_017_01', 3) # 4-6
@State
def CmnActLockReject():
sprite('Action_003_02', 1) # 1-1
sprite('Action_003_03', 1) # 2-2
sprite('Action_003_04', 2) # 3-4 **attackbox here**
GFX_0('EffNmlAtk5CBlade', 100)
sprite('Action_003_05', 4) # 5-8 **attackbox here**
sprite('Action_003_06', 7) # 9-15
sprite('Action_003_07', 3) # 16-18
sprite('Action_003_08', 3) # 19-21
sprite('Action_003_09', 3) # 22-24
sprite('Action_003_10', 2) # 25-26
@State
def CmnActAirLockWait():
sprite('Action_019_02', 1) # 1-1
sprite('Action_019_01', 3) # 2-4
sprite('Action_019_00', 3) # 5-7
@State
def CmnActLandSpin():
sprite('hb071_00', 4) # 1-4
sprite('hb071_01', 4) # 5-8
label(0)
sprite('hb071_02', 2) # 9-10
sprite('hb071_03', 2) # 11-12
sprite('hb071_04', 2) # 13-14
sprite('hb071_05', 2) # 15-16
sprite('hb071_06', 2) # 17-18
sprite('hb071_07', 2) # 19-20
sprite('hb071_08', 2) # 21-22
sprite('hb071_09', 2) # 23-24
loopRest()
gotoLabel(0)
@State
def CmnActLandSpinDown():
sprite('hb071_10', 6) # 1-6
sprite('hb071_11', 5) # 7-11
sprite('hb071_12', 5) # 12-16
@State
def CmnActVertSpin():
label(0)
sprite('Action_333_00', 2) # 1-2
sprite('Action_333_01', 2) # 3-4
sprite('Action_333_02', 2) # 5-6
sprite('Action_333_03', 2) # 7-8
sprite('Action_333_04', 2) # 9-10
sprite('Action_333_05', 2) # 11-12
loopRest()
gotoLabel(0)
@State
def CmnActSlideAir():
label(0)
sprite('Action_333_00', 2) # 1-2
sprite('Action_333_01', 2) # 3-4
sprite('Action_333_02', 2) # 5-6
sprite('Action_333_03', 2) # 7-8
sprite('Action_333_04', 2) # 9-10
sprite('Action_333_05', 2) # 11-12
loopRest()
gotoLabel(0)
@State
def CmnActSlideKeep():
sprite('Action_351_00', 1) # 1-1
label(0)
sprite('Action_351_01', 3) # 2-4
loopRest()
gotoLabel(0)
@State
def CmnActSlideEnd():
sprite('Action_351_02', 3) # 1-3
sprite('Action_351_03', 2) # 4-5
sprite('Action_351_04', 2) # 6-7
sprite('Action_351_05', 2) # 8-9
@State
def CmnActAomukeSlideKeep():
sprite('Action_351_04', 1) # 1-1
label(0)
sprite('Action_351_04', 3) # 2-4
loopRest()
gotoLabel(0)
@State
def CmnActAomukeSlideEnd():
sprite('Action_351_02', 3) # 1-3
sprite('Action_351_03', 2) # 4-5
sprite('Action_351_04', 2) # 6-7
sprite('Action_351_05', 2) # 8-9
@State
def CmnActBurstBegin():
sprite('Action_262_00', 4) # 1-4
label(0)
sprite('Action_262_01', 5) # 5-9
loopRest()
gotoLabel(0)
@State
def CmnActBurstLoop():
sprite('Action_262_02', 4) # 1-4
label(0)
sprite('Action_262_03', 5) # 5-9
sprite('Action_262_04', 5) # 10-14
loopRest()
gotoLabel(0)
@State
def CmnActBurstEnd():
sprite('Action_262_05', 3) # 1-3
sprite('Action_262_06', 3) # 4-6
@State
def CmnActAirBurstBegin():
sprite('Action_262_02', 4) # 1-4
label(0)
sprite('Action_262_03', 5) # 5-9
sprite('Action_262_04', 5) # 10-14
loopRest()
gotoLabel(0)
@State
def CmnActAirBurstLoop():
sprite('Action_262_02', 4) # 1-4
label(0)
sprite('Action_262_03', 5) # 5-9
sprite('Action_262_04', 5) # 10-14
loopRest()
gotoLabel(0)
@State
def CmnActAirBurstEnd():
label(0)
sprite('Action_262_07', 4) # 1-4
sprite('Action_262_08', 4) # 5-8
loopRest()
gotoLabel(0)
@State
def CmnActOverDriveBegin():
sprite('Action_262_00', 4) # 1-4
sprite('Action_262_01', 32767) # 5-32771
loopRest()
@State
def CmnActOverDriveLoop():
sprite('Action_262_02', 4) # 1-4
label(0)
sprite('Action_262_03', 5) # 5-9
sprite('Action_262_04', 5) # 10-14
loopRest()
gotoLabel(0)
@State
def CmnActOverDriveEnd():
sprite('Action_262_05', 6) # 1-6
sprite('Action_262_06', 6) # 7-12
@State
def CmnActAirOverDriveBegin():
sprite('Action_262_00', 4) # 1-4
sprite('Action_262_01', 32767) # 5-32771
loopRest()
@State
def CmnActAirOverDriveLoop():
sprite('Action_262_02', 4) # 1-4
label(0)
sprite('Action_262_03', 5) # 5-9
sprite('Action_262_04', 5) # 10-14
loopRest()
gotoLabel(0)
@State
def CmnActAirOverDriveEnd():
sprite('Action_262_07', 3) # 1-3
sprite('Action_262_08', 3) # 4-6
label(0)
sprite('Action_022_00', 4) # 7-10
sprite('Action_022_01', 4) # 11-14
loopRest()
gotoLabel(0)
@State
def CmnActCrossRushBegin():
sprite('Action_262_00', 4) # 1-4
sprite('Action_262_01', 32767) # 5-32771
loopRest()
@State
def CmnActCrossRushLoop():
sprite('Action_262_02', 4) # 1-4
label(0)
sprite('Action_262_03', 5) # 5-9
sprite('Action_262_04', 5) # 10-14
loopRest()
gotoLabel(0)
@State
def CmnActCrossRushEnd():
sprite('Action_262_05', 6) # 1-6
sprite('Action_262_06', 6) # 7-12
@State
def CmnActAirCrossRushBegin():
sprite('Action_262_00', 4) # 1-4
sprite('Action_262_01', 32767) # 5-32771
loopRest()
@State
def CmnActAirCrossRushLoop():
sprite('Action_262_02', 4) # 1-4
label(0)
sprite('Action_262_03', 5) # 5-9
sprite('Action_262_04', 5) # 10-14
loopRest()
gotoLabel(0)
@State
def CmnActAirCrossRushEnd():
sprite('Action_262_07', 3) # 1-3
sprite('Action_262_08', 3) # 4-6
label(0)
sprite('Action_022_00', 4) # 7-10
sprite('Action_022_01', 4) # 11-14
loopRest()
gotoLabel(0)
@State
def CmnActCrossChangeBegin():
sprite('Action_262_00', 4) # 1-4
sprite('Action_262_01', 32767) # 5-32771
loopRest()
@State
def CmnActCrossChangeLoop():
sprite('Action_262_02', 4) # 1-4
label(0)
sprite('Action_262_03', 5) # 5-9
sprite('Action_262_04', 5) # 10-14
loopRest()
gotoLabel(0)
@State
def CmnActCrossChangeEnd():
sprite('Action_262_05', 3) # 1-3
sprite('Action_262_06', 3) # 4-6
@State
def CmnActAirCrossChangeBegin():
sprite('Action_262_00', 4) # 1-4
sprite('Action_262_01', 32767) # 5-32771
loopRest()
@State
def CmnActAirCrossChangeLoop():
sprite('Action_262_02', 4) # 1-4
label(0)
sprite('Action_262_03', 5) # 5-9
sprite('Action_262_04', 5) # 10-14
loopRest()
gotoLabel(0)
@State
def CmnActAirCrossChangeEnd():
sprite('Action_262_07', 3) # 1-3
sprite('Action_262_08', 3) # 4-6
label(0)
sprite('Action_022_00', 4) # 7-10
sprite('Action_022_01', 4) # 11-14
loopRest()
gotoLabel(0)
@State
def CmnActAComboFinalBlow():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
def upon_LANDING():
clearUponHandler(2)
sendToLabel(1)
sprite('null', 30) # 1-30
sprite('null', 1) # 31-31
teleportRelativeX(-25000)
Unknown1007(600000)
setGravity(0)
physicsYImpulse(-60000)
SLOT_12 = SLOT_19
Unknown1019(4)
label(0)
sprite('Action_146_03ex', 3) # 32-34 **attackbox here**
sprite('Action_146_04ex', 3) # 35-37 **attackbox here**
loopRest()
gotoLabel(0)
label(1)
sprite('keep', 1) # 38-38
sprite('keep', 2) # 39-40
if SLOT_3:
enterState('CmnActAComboFinalBlowFinish')
StartMultihit()
Unknown23022(0)
Unknown1084(1)
sprite('Action_146_05', 5) # 41-45
Unknown8000(100, 1, 1)
sprite('Action_146_06', 18) # 46-63
sprite('Action_146_07', 5) # 64-68
@State
def CmnActAComboFinalBlowFinish():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
Unknown9016(1)
sprite('keep', 1) # 1-1
StartMultihit()
Unknown8000(100, 1, 1)
sprite('Action_146_05', 4) # 2-5
sprite('Action_146_06', 5) # 6-10
sprite('Action_146_07', 5) # 11-15
sprite('Action_140_00', 3) # 16-18
sprite('Action_140_01', 6) # 19-24
sprite('Action_140_02', 6) # 25-30
sprite('Action_140_03', 4) # 31-34
Unknown7009(2)
sprite('Action_140_04', 6) # 35-40 **attackbox here**
GFX_0('EffNmlAtk6CBlade1st', 100)
SFX_0('006_swing_blade_0')
sprite('Action_140_05', 25) # 41-65
sprite('Action_140_06', 5) # 66-70
sprite('Action_140_07', 5) # 71-75
@State
def NmlAtk5A():
def upon_IMMEDIATE():
AttackDefaults_StandingNormal()
AttackLevel_(2)
AirPushbackY(18000)
Unknown1112('')
callSubroutine('ChainRoot')
HitOrBlockCancel('AN_NmlAtk5A_2nd')
HitOrBlockCancel('NmlAtkThrow')
HitOrBlockCancel('NmlAtkBackThrow')
HitOrBlockJumpCancel(1)
sprite('Action_002_00', 3) # 1-3
sprite('Action_002_01', 2) # 4-5
sprite('Action_002_02', 1) # 6-6
Unknown7009(0)
SFX_0('004_swing_grap_1_0')
sprite('Action_002_03', 4) # 7-10 **attackbox here**
sprite('Action_002_04', 4) # 11-14
Recovery()
Unknown2063()
sprite('Action_002_05', 4) # 15-18
sprite('Action_002_06', 3) # 19-21
sprite('Action_002_07', 3) # 22-24
@State
def AN_NmlAtk5A_2nd():
def upon_IMMEDIATE():
AttackDefaults_StandingNormal()
AttackLevel_(3)
Damage(1300)
AirPushbackY(0)
Unknown9016(1)
callSubroutine('ChainRoot')
HitOrBlockCancel('AN_NmlAtk5A_3rd')
HitOrBlockCancel('NmlAtkThrow')
HitOrBlockCancel('NmlAtkBackThrow')
sprite('Action_145_00', 2) # 1-2
sprite('Action_145_01', 4) # 3-6
Unknown7009(1)
SFX_0('010_swing_sword_1')
sprite('Action_145_02', 2) # 7-8 **attackbox here**
GFX_0('Lin_082', 100)
sprite('Action_145_03', 3) # 9-11
Recovery()
Unknown2063()
sprite('Action_145_04', 6) # 12-17
sprite('Action_145_05', 5) # 18-22
sprite('Action_145_06', 4) # 23-26
@State
def AN_NmlAtk5A_3rd():
def upon_IMMEDIATE():
AttackDefaults_StandingNormal()
AttackLevel_(3)
Damage(950)
Unknown11092(1)
Hitstop(7)
AirPushbackY(20000)
Unknown9016(1)
callSubroutine('ChainRoot')
sprite('Action_003_00', 2) # 1-2
sprite('Action_003_01', 3) # 3-5
sprite('Action_003_02', 3) # 6-8
sprite('Action_003_03', 1) # 9-9
Unknown7009(2)
SFX_0('010_swing_sword_2')
sprite('Action_003_04', 2) # 10-11 **attackbox here**
GFX_0('EffNmlAtk5CBlade', 100)
sprite('Action_003_05', 4) # 12-15 **attackbox here**
RefreshMultihit()
def upon_ON_HIT_OR_BLOCK():
HitOrBlockCancel('AN_NmlAtk5A_4th')
sprite('Action_003_06', 7) # 16-22
Recovery()
Unknown2063()
HitOrBlockCancel('AN_NmlAtk5A_4th')
sprite('Action_003_07', 4) # 23-26
sprite('Action_003_08', 4) # 27-30
sprite('Action_003_09', 3) # 31-33
sprite('Action_003_10', 3) # 34-36
@State
def AN_NmlAtk5A_4th():
def upon_IMMEDIATE():
AttackDefaults_StandingNormal()
AttackLevel_(4)
Damage(1100)
GroundedHitstunAnimation(9)
AirPushbackY(5000)
AirPushbackX(30000)
AirUntechableTime(30)
Hitstop(4)
Unknown11001(5, 5, 10)
Unknown9016(1)
JumpCancel_(0)
if Unknown23145('AN_NmlAtk5A_3rd'):
Unknown11044(1)
Unknown2037(1)
sprite('Action_441_00', 2) # 1-2
sprite('Action_441_01', 2) # 3-4
sprite('Action_441_02', 2) # 5-6
sprite('Action_441_03', 4) # 7-10
sprite('Action_441_04', 2) # 11-12
GFX_0('Lin_430', 100)
Unknown4020(1)
GFX_0('Lin_433', 100)
Unknown4020(1)
SFX_4('uli204_2')
SFX_0('006_swing_blade_1')
sprite('Action_441_05', 2) # 13-14
sprite('Action_441_06', 2) # 15-16
sprite('Action_441_07', 2) # 17-18
sprite('Action_145_00', 2) # 19-20
Unknown14070('ShotDashCancel')
sprite('Action_145_01', 2) # 21-22
DisableAttackRestOfMove()
sprite('Action_145_02', 3) # 23-25 **attackbox here**
GFX_0('Lin_432', 100)
Unknown4020(1)
GFX_0('Lin_434', 100)
Unknown4020(1)
SFX_0('006_swing_blade_2')
sprite('Action_145_03', 3) # 26-28
RefreshMultihit()
Unknown11001(0, 0, 5)
AirPushbackY(10000)
clearUponHandler(10)
def upon_ON_HIT_OR_BLOCK():
Unknown14072('ShotDashCancel')
if SLOT_2:
Unknown30088(1)
sprite('Action_145_04', 12) # 29-40
Unknown4020(0)
Unknown14072('ShotDashCancel')
Recovery()
Unknown2063()
sprite('Action_145_05', 5) # 41-45
Unknown14074('ShotDashCancel')
sprite('Action_145_06', 4) # 46-49
@State
def NmlAtk4A():
def upon_IMMEDIATE():
AttackDefaults_StandingNormal()
AttackLevel_(2)
Damage(700)
AttackP2(75)
Unknown11092(1)
AirPushbackY(10000)
PushbackX(8000)
Hitstop(6)
Unknown9016(1)
HitOrBlockCancel('NmlAtkThrow')
HitOrBlockCancel('NmlAtkBackThrow')
HitOrBlockJumpCancel(1)
sprite('Action_249_00', 3) # 1-3
sprite('Action_249_01', 1) # 4-4
tag_voice(1, 'uli206_0', 'uli206_1', 'uli206_2', '')
SFX_0('010_swing_sword_1')
sprite('Action_249_02', 1) # 5-5 **attackbox here**
GFX_0('EffARushSlash00', 100)
sprite('Action_249_03', 2) # 6-7
sprite('Action_249_04', 2) # 8-9
sprite('Action_249_05', 2) # 10-11
SFX_0('008_swing_pole_1')
sprite('Action_249_06', 4) # 12-15 **attackbox here**
GFX_0('EffARushSlash01', 100)
RefreshMultihit()
Hitstop(10)
def upon_ON_HIT_OR_BLOCK():
callSubroutine('ChainRoot')
HitOrBlockCancel('AN_NmlAtk4A_2nd')
sprite('Action_249_07', 6) # 16-21
callSubroutine('ChainRoot')
HitOrBlockCancel('AN_NmlAtk4A_2nd')
Recovery()
Unknown2063()
sprite('Action_249_08', 6) # 22-27
@State
def AN_NmlAtk4A_2nd():
def upon_IMMEDIATE():
AttackDefaults_StandingNormal()
AttackLevel_(2)
Damage(800)
Unknown11092(1)
AirPushbackY(10000)
PushbackX(8000)
Hitstop(6)
Unknown9016(1)
HitOrBlockCancel('NmlAtkThrow')
HitOrBlockCancel('NmlAtkBackThrow')
sprite('Action_253_00', 4) # 1-4
sprite('Action_253_01', 2) # 5-6
SFX_0('010_swing_sword_1')
sprite('Action_253_02', 1) # 7-7 **attackbox here**
GFX_0('EffARush2ndSlash00', 100)
sprite('Action_253_03', 2) # 8-9
sprite('Action_253_04', 2) # 10-11
sprite('Action_253_05', 2) # 12-13
tag_voice(0, 'uli207_0', 'uli207_1', 'uli207_2', '')
SFX_0('008_swing_pole_2')
sprite('Action_253_06', 1) # 14-14 **attackbox here**
GFX_0('EffARush2ndSlash01', 100)
RefreshMultihit()
Hitstop(10)
PushbackX(-8000)
AirPushbackX(-5000)
AirPushbackY(-5000)
def upon_ON_HIT_OR_BLOCK():
callSubroutine('ChainRoot')
HitOrBlockCancel('AN_NmlAtk4A_3rd')
sprite('Action_253_06', 3) # 15-17 **attackbox here**
callSubroutine('ChainRoot')
HitOrBlockCancel('AN_NmlAtk4A_3rd')
sprite('Action_253_07', 2) # 18-19
Recovery()
Unknown2063()
sprite('Action_253_08', 8) # 20-27
sprite('Action_253_09', 6) # 28-33
@State
def AN_NmlAtk4A_3rd():
def upon_IMMEDIATE():
AttackDefaults_StandingNormal()
AttackLevel_(3)
Damage(1000)
AttackP2(70)
Unknown11092(1)
AirPushbackY(15000)
Hitstop(6)
Unknown9016(1)
sprite('Action_255_07', 3) # 1-3
sprite('Action_255_08', 3) # 4-6
SFX_0('010_swing_sword_1')
sprite('Action_255_09', 4) # 7-10 **attackbox here**
GFX_0('EffBRush2ndSlash02', 100)
RefreshMultihit()
sprite('Action_255_10', 2) # 11-12
Unknown2015(125)
sprite('Action_255_11', 2) # 13-14
sprite('Action_255_12', 2) # 15-16
sprite('Action_255_13', 2) # 17-18
Unknown2015(150)
teleportRelativeX(55000)
sprite('Action_255_14', 1) # 19-19
Unknown14070('ShotDashCancel')
tag_voice(0, 'uli208_0', 'uli208_1', 'uli208_2', '')
teleportRelativeX(65000)
SFX_0('010_swing_sword_2')
sprite('Action_255_14', 1) # 20-20
sprite('Action_255_15', 2) # 21-22 **attackbox here**
GFX_0('EffEXRushSlash05', 100)
RefreshMultihit()
GroundedHitstunAnimation(9)
AirPushbackX(20000)
AirUntechableTime(30)
Hitstop(13)
def upon_ON_HIT_OR_BLOCK():
callSubroutine('ChainRoot')
HitOrBlockCancel('AN_NmlAtk4A_4th')
Unknown14072('ShotDashCancel')
sprite('Action_255_16', 6) # 23-28
callSubroutine('ChainRoot')
HitOrBlockCancel('AN_NmlAtk4A_4th')
Unknown14072('ShotDashCancel')
Recovery()
Unknown2063()
sprite('Action_255_17', 8) # 29-36
sprite('Action_255_18', 5) # 37-41
Unknown14074('ShotDashCancel')
Unknown2015(125)
teleportRelativeX(-25000)
sprite('Action_255_19', 2) # 42-43
teleportRelativeX(-40000)
Unknown2015(-1)
sprite('Action_255_19', 2) # 44-45
@State
def NmlAtk5B():
def upon_IMMEDIATE():
AttackDefaults_StandingNormal()
AttackLevel_(3)
AttackP1(90)
GroundedHitstunAnimation(13)
AirHitstunAnimation(13)
AirPushbackX(8000)
AirPushbackY(20000)
AirUntechableTime(24)
Unknown9016(1)
Unknown1112('')
callSubroutine('ChainRoot')
HitOrBlockCancel('AN_NmlAtk5B_2nd')
sprite('Action_140_00', 4) # 1-4
sprite('Action_140_01', 3) # 5-7
tag_voice(1, 'uli109_0', 'uli109_1', 'uli109_2', '')
sprite('Action_140_02', 3) # 8-10
sprite('Action_140_03', 2) # 11-12
SFX_0('010_swing_sword_1')
setInvincible(1)
Unknown22019('0100000000000000000000000000000000000000')
sprite('Action_140_04', 6) # 13-18 **attackbox here**
GFX_0('EffNmlAtk6CBlade1st', 100)
sprite('Action_140_04', 13) # 19-31 **attackbox here**
StartMultihit()
Recovery()
Unknown2063()
setInvincible(0)
sprite('Action_140_05', 6) # 32-37
sprite('Action_140_06', 4) # 38-41
sprite('Action_140_07', 3) # 42-44
@State
def AN_NmlAtk5B_2nd():
def upon_IMMEDIATE():
AttackDefaults_StandingNormal()
AttackLevel_(3)
AirHitstunAnimation(10)
GroundedHitstunAnimation(10)
AirPushbackX(1000)
AirPushbackY(20000)
AirUntechableTime(24)
Unknown9016(1)
callSubroutine('ChainRoot')
HitOrBlockCancel('AN_NmlAtk5B_3rd')
sprite('Action_141_00', 5) # 1-5
sprite('Action_141_01', 5) # 6-10
sprite('Action_141_02', 2) # 11-12
tag_voice(0, 'uli110_0', 'uli110_1', 'uli110_2', '')
SFX_0('010_swing_sword_2')
sprite('Action_141_03', 2) # 13-14 **attackbox here**
GFX_0('EffNmlAtk6CBlade2nd', 100)
sprite('Action_141_03', 2) # 15-16 **attackbox here**
sprite('Action_141_04', 6) # 17-22
Recovery()
Unknown2063()
sprite('Action_141_05', 4) # 23-26
sprite('Action_141_05', 4) # 27-30
sprite('Action_141_06', 4) # 31-34
sprite('Action_141_07', 4) # 35-38
@State
def AN_NmlAtk5B_3rd():
def upon_IMMEDIATE():
AttackDefaults_StandingNormal()
AttackLevel_(4)
GroundedHitstunAnimation(11)
AirHitstunAnimation(11)
AirPushbackX(20000)
AirPushbackY(-60000)
AirUntechableTime(24)
Unknown9310(1)
Unknown9016(1)
Unknown11056(0)
Unknown2004(1, 0)
callSubroutine('ChainRoot')
HitOrBlockCancel('ShotDashCancel')
sprite('Action_142_00', 6) # 1-6
Unknown2015(125)
sprite('Action_142_01', 3) # 7-9
Unknown2015(150)
Unknown2016(250)
sprite('Action_142_01', 2) # 10-11
Unknown2015(175)
Unknown2016(300)
sprite('Action_142_02', 4) # 12-15
Unknown2015(200)
sprite('Action_142_03', 3) # 16-18
sprite('Action_142_04', 1) # 19-19
Unknown2016(400)
Unknown2015(250)
tag_voice(0, 'uli111_0', 'uli111_1', 'uli111_2', '')
SFX_0('006_swing_blade_2')
sprite('Action_142_04', 1) # 20-20
GFX_0('EffNmlAtk6CBlade3rd', 100)
sprite('Action_142_05', 5) # 21-25 **attackbox here**
Unknown2015(200)
Unknown2016(-1)
SFX_0('209_down_normal_1')
sprite('Action_142_06', 8) # 26-33
Recovery()
Unknown2063()
Unknown2015(175)
sprite('Action_142_07', 6) # 34-39
Unknown2015(150)
sprite('Action_142_08', 5) # 40-44
Unknown2015(125)
sprite('Action_142_09', 3) # 45-47
Unknown2015(-1)
sprite('Action_142_10', 3) # 48-50
@State
def NmlAtk2A():
def upon_IMMEDIATE():
AttackDefaults_CrouchingNormal()
AttackLevel_(1)
Unknown9016(1)
callSubroutine('ChainRoot')
WhiffCancel('NmlAtk2A_Renda')
HitOrBlockCancel('NmlAtk2A_Renda')
HitOrBlockCancel('NmlAtkThrow')
HitOrBlockCancel('NmlAtkBackThrow')
HitOrBlockJumpCancel(1)
sprite('Action_004_00', 3) # 1-3
sprite('Action_004_01', 2) # 4-5
Unknown7009(0)
SFX_0('010_swing_sword_0')
sprite('Action_004_02', 2) # 6-7 **attackbox here**
GFX_0('EffNmlAtk2ABlade', 100)
sprite('Action_004_03', 3) # 8-10
Recovery()
WhiffCancelEnable(1)
sprite('Action_004_04', 3) # 11-13
sprite('Action_004_04', 2) # 14-15
WhiffCancelEnable(0)
sprite('Action_004_05', 5) # 16-20
@State
def NmlAtk2A_Renda():
def upon_IMMEDIATE():
AttackDefaults_CrouchingNormal()
AttackLevel_(1)
Unknown9016(1)
callSubroutine('ChainRoot')
WhiffCancel('NmlAtk2A_Renda')
HitOrBlockCancel('NmlAtk2A_Renda')
HitOrBlockCancel('NmlAtkThrow')
HitOrBlockCancel('NmlAtkBackThrow')
HitOrBlockJumpCancel(1)
sprite('Action_004_00', 3) # 1-3
sprite('Action_004_01', 2) # 4-5
Unknown7009(0)
SFX_0('010_swing_sword_0')
sprite('Action_004_02', 2) # 6-7 **attackbox here**
GFX_0('EffNmlAtk2ABlade', 100)
sprite('Action_004_03', 3) # 8-10
Unknown2063()
WhiffCancelEnable(1)
sprite('Action_004_04', 5) # 11-15
sprite('Action_004_05', 5) # 16-20
@State
def NmlAtk2B():
def upon_IMMEDIATE():
AttackDefaults_CrouchingNormal()
AttackLevel_(2)
AttackP1(90)
HitLow(2)
Unknown9016(1)
callSubroutine('ChainRoot')
sprite('Action_005_00', 4) # 1-4
sprite('Action_005_01', 3) # 5-7
Unknown7009(1)
SFX_0('010_swing_sword_1')
sprite('Action_005_02', 2) # 8-9 **attackbox here**
teleportRelativeX(40000)
GFX_0('EffNmlAtk2BBlade', 100)
sprite('Action_005_03', 5) # 10-14
Recovery()
Unknown2063()
sprite('Action_005_04', 6) # 15-20
sprite('Action_005_05', 5) # 21-25
teleportRelativeX(40000)
sprite('Action_005_06', 4) # 26-29
teleportRelativeX(50000)
@State
def NmlAtk2C():
def upon_IMMEDIATE():
AttackDefaults_CrouchingNormal()
AttackLevel_(4)
AttackP1(90)
AttackP2(75)
AirPushbackY(20000)
AirUntechableTime(26)
AirHitstunAnimation(11)
GroundedHitstunAnimation(11)
HitLow(2)
Unknown9016(1)
callSubroutine('ChainRoot')
Unknown14085('CmnActCrushAttack')
sprite('Action_006_00', 4) # 1-4
sprite('Action_006_01', 5) # 5-9
teleportRelativeX(50000)
sprite('Action_006_02', 1) # 10-10
teleportRelativeX(43000)
GFX_0('EffNmlAtk2CBlade', 100)
tag_voice(1, 'uli107_0', 'uli107_1', 'uli107_2', '')
SFX_0('010_swing_sword_2')
sprite('Action_006_02', 1) # 11-11
sprite('Action_006_03', 6) # 12-17 **attackbox here**
sprite('Action_006_04', 8) # 18-25
Recovery()
Unknown2063()
sprite('Action_006_05', 5) # 26-30
sprite('Action_006_06', 6) # 31-36
sprite('Action_006_07', 5) # 37-41
@State
def NmlAtkAIR5A():
def upon_IMMEDIATE():
AttackDefaults_AirNormal()
AttackLevel_(3)
Unknown9016(1)
AirPushbackX(10000)
AirPushbackY(18000)
HitOrBlockJumpCancel(1)
HitOrBlockCancel('NmlAtkAIR5A_2nd')
HitOrBlockCancel('NmlAtkAIR5B')
HitOrBlockCancel('NmlAtkAIR5C')
sprite('Action_147_08', 6) # 1-6
sprite('Action_147_09', 2) # 7-8
Unknown7009(3)
sprite('Action_147_09', 2) # 9-10
SFX_0('010_swing_sword_0')
sprite('Action_147_10', 4) # 11-14 **attackbox here**
GFX_0('EffNmlAtkAir5BBlade', 100)
sprite('Action_147_11', 5) # 15-19
Recovery()
Unknown2063()
sprite('Action_147_12', 5) # 20-24
sprite('Action_147_13', 5) # 25-29
@State
def NmlAtkAIR5A_2nd():
def upon_IMMEDIATE():
AttackDefaults_AirNormal()
AttackLevel_(3)
Damage(1200)
Unknown9016(1)
AirPushbackX(10000)
AirPushbackY(18000)
HitOrBlockJumpCancel(1)
HitOrBlockCancel('NmlAtkAIR5B')
HitOrBlockCancel('NmlAtkAIR5C')
sprite('Action_008_00', 4) # 1-4
sprite('Action_008_01', 3) # 5-7
Unknown7009(3)
SFX_0('010_swing_sword_1')
sprite('Action_008_02', 2) # 8-9 **attackbox here**
GFX_0('EffNmlAtkAir5A2ndBlade', 100)
sprite('Action_008_03', 4) # 10-13
Recovery()
Unknown2063()
sprite('Action_008_03', 4) # 14-17
sprite('Action_008_04', 5) # 18-22
sprite('Action_008_05', 5) # 23-27
@State
def NmlAtkAIR5B():
def upon_IMMEDIATE():
AttackDefaults_AirNormal()
AttackLevel_(3)
Damage(1600)
Unknown9016(1)
AirPushbackX(10000)
AirPushbackY(14000)
HitOrBlockJumpCancel(1)
HitOrBlockCancel('NmlAtkAIR5A')
HitOrBlockCancel('NmlAtkAIR5C')
sprite('Action_009_00', 2) # 1-2
sprite('Action_009_01', 4) # 3-6
sprite('Action_009_02', 2) # 7-8
Unknown7009(5)
sprite('Action_009_02', 3) # 9-11
SFX_0('010_swing_sword_2')
sprite('Action_009_03', 4) # 12-15 **attackbox here**
GFX_0('EffNmlAtkAir5CBlade', 100)
sprite('Action_009_04', 8) # 16-23
Recovery()
Unknown2063()
sprite('Action_009_05', 5) # 24-28
sprite('Action_009_06', 4) # 29-32
@State
def NmlAtkAIR5C():
def upon_IMMEDIATE():
AttackDefaults_AirNormal()
AttackLevel_(3)
AirPushbackX(24000)
AirPushbackY(-30000)
PushbackX(10000)
AirUntechableTime(30)
Unknown1084(1)
clearUponHandler(2)
sendToLabelUpon(2, 1)
HitOverhead(0)
sprite('Action_146_00', 5) # 1-5
physicsXImpulse(10000)
physicsYImpulse(10000)
sprite('Action_146_01', 6) # 6-11
sprite('Action_146_02', 1) # 12-12
Unknown1084(1)
physicsYImpulse(-30000)
setGravity(5000)
physicsXImpulse(30000)
tag_voice(1, 'uli108_0', 'uli108_1', 'uli108_2', '')
SFX_0('004_swing_grap_1_1')
SFX_0('000_airdash_2')
sprite('Action_146_03', 3) # 13-15 **attackbox here**
sprite('Action_146_04', 3) # 16-18 **attackbox here**
label(0)
sprite('Action_146_03', 3) # 19-21 **attackbox here**
sprite('Action_146_04', 3) # 22-24 **attackbox here**
loopRest()
gotoLabel(0)
label(1)
sprite('Action_146_05', 5) # 25-29
Unknown8000(100, 1, 1)
Unknown1019(10)
clearUponHandler(2)
Recovery()
Unknown2063()
sprite('Action_146_06', 4) # 30-33
sprite('Action_146_07', 3) # 34-36
loopRest()
@State
def NmlAtk3C():
def upon_IMMEDIATE():
Unknown17013()
sprite('Action_150_00', 5) # 1-5
Unknown18009(1)
Unknown1015(34000)
sprite('Action_150_01', 4) # 6-9
tag_voice(1, 'uli112_0', 'uli112_1', 'uli112_2', '')
setInvincible(1)
EnableCollision(0)
sprite('Action_150_02', 4) # 10-13
sprite('Action_150_03', 2) # 14-15
sprite('Action_150_05', 4) # 16-19
Unknown2006()
sprite('Action_150_07', 4) # 20-23
setInvincible(0)
Unknown1084(1)
sprite('Action_150_06', 3) # 24-26
sprite('Action_150_08', 3) # 27-29
@State
def CmnActCrushAttack():
def upon_IMMEDIATE():
Unknown30072('')
Unknown11058('0100000000000000000000000000000000000000')
Unknown9016(1)
sprite('Action_068_00', 4) # 1-4
sprite('Action_068_02', 3) # 5-7
SLOT_12 = SLOT_19
Unknown1019(5)
physicsYImpulse(23000)
if (SLOT_12 >= 20000):
SLOT_12 = 20000
setGravity(1800)
clearUponHandler(2)
sendToLabelUpon(2, 1)
Unknown23087(100000)
sprite('Action_068_03', 3) # 8-10
sprite('Action_068_04', 3) # 11-13
tag_voice(1, 'uli156_0', 'uli156_1', '', '')
sprite('Action_068_05', 3) # 14-16
sprite('Action_068_06', 3) # 17-19
sprite('Action_147_08', 3) # 20-22
sprite('Action_147_09', 3) # 23-25
sprite('Action_147_10', 3) # 26-28 **attackbox here**
GFX_0('EffNmlAtkAir5BBlade', 100)
SFX_0('010_swing_sword_0')
sprite('Action_147_11', 6) # 29-34
sprite('Action_147_12', 6) # 35-40
sprite('Action_147_13', 7) # 41-47
label(0)
sprite('Action_068_10', 3) # 48-50
sprite('Action_068_11', 2) # 51-52
loopRest()
gotoLabel(0)
label(1)
sprite('Action_069_00', 2) # 53-54
Unknown18009(1)
Unknown8000(100, 1, 1)
clearUponHandler(2)
Unknown1084(1)
sprite('Action_069_01', 3) # 55-57
sprite('Action_069_02', 5) # 58-62
sprite('Action_069_03', 3) # 63-65
Unknown18009(0)
@State
def CmnActCrushAttackChase1st():
def upon_IMMEDIATE():
Unknown30073(0)
Unknown9016(1)
loopRelated(17, 19)
def upon_17():
clearUponHandler(17)
sendToLabel(2)
setGravity(3000)
sendToLabelUpon(2, 1)
sprite('Action_147_11', 6) # 1-6
sprite('Action_147_12', 6) # 7-12
sprite('Action_147_13', 7) # 13-19
sprite('Action_068_10', 3) # 20-22
sprite('Action_068_11', 2) # 23-24
label(1)
sprite('Action_069_00', 4) # 25-28
Unknown8000(100, 1, 1)
sprite('Action_069_01', 50) # 29-78
label(2)
sprite('Action_145_00', 4) # 79-82
clearUponHandler(17)
teleportRelativeY(0)
tag_voice(0, 'uli157_0', 'uli157_1', '', '')
sprite('Action_145_01', 6) # 83-88
sprite('Action_145_02', 2) # 89-90 **attackbox here**
GFX_0('EffNmlAtk5BBlade', 100)
SFX_0('010_swing_sword_1')
sprite('Action_145_03', 3) # 91-93
sprite('Action_145_04', 6) # 94-99
sprite('Action_145_05', 5) # 100-104
sprite('Action_145_06', 4) # 105-108
SFX_FOOTSTEP_(100, 1, 1)
loopRelated(17, 180)
def upon_17():
clearUponHandler(17)
sendToLabel(11)
label(10)
sprite('Action_000_00', 7) # 109-115 **attackbox here**
sprite('Action_000_01', 7) # 116-122 **attackbox here**
sprite('Action_000_02', 6) # 123-128 **attackbox here**
sprite('Action_000_03', 6) # 129-134 **attackbox here**
sprite('Action_000_04', 8) # 135-142 **attackbox here**
sprite('Action_000_05', 5) # 143-147 **attackbox here**
sprite('Action_000_06', 5) # 148-152 **attackbox here**
sprite('Action_000_07', 5) # 153-157 **attackbox here**
sprite('Action_000_08', 6) # 158-163 **attackbox here**
sprite('Action_000_09', 5) # 164-168 **attackbox here**
sprite('Action_000_10', 6) # 169-174 **attackbox here**
sprite('Action_000_11', 8) # 175-182 **attackbox here**
sprite('Action_000_12', 5) # 183-187 **attackbox here**
sprite('Action_000_13', 5) # 188-192 **attackbox here**
sprite('Action_000_14', 6) # 193-198 **attackbox here**
loopRest()
gotoLabel(10)
label(11)
sprite('keep', 1) # 199-199
@State
def CmnActCrushAttackChase2nd():
def upon_IMMEDIATE():
Unknown30074(0)
Unknown9016(1)
loopRelated(17, 180)
def upon_17():
clearUponHandler(17)
sendToLabel(1)
sprite('Action_251_28', 3) # 1-3
sprite('Action_251_29', 3) # 4-6
sprite('Action_251_30', 2) # 7-8
sprite('Action_251_31', 2) # 9-10
sprite('Action_251_32', 2) # 11-12
sprite('Action_251_33', 2) # 13-14
sprite('Action_251_34', 1) # 15-15 **attackbox here**
GFX_0('EffEXRushSlash05', 100)
SFX_0('010_swing_sword_2')
sprite('Action_251_35', 6) # 16-21
sprite('Action_251_36', 5) # 22-26
sprite('Action_251_37', 5) # 27-31
sprite('Action_251_38', 3) # 32-34
sprite('Action_000_00', 7) # 35-41 **attackbox here**
sprite('Action_000_01', 7) # 42-48 **attackbox here**
sprite('Action_000_02', 6) # 49-54 **attackbox here**
sprite('Action_000_03', 6) # 55-60 **attackbox here**
sprite('Action_000_04', 8) # 61-68 **attackbox here**
sprite('Action_000_05', 5) # 69-73 **attackbox here**
sprite('Action_000_06', 5) # 74-78 **attackbox here**
sprite('Action_000_07', 5) # 79-83 **attackbox here**
sprite('Action_000_08', 6) # 84-89 **attackbox here**
sprite('Action_000_09', 5) # 90-94 **attackbox here**
sprite('Action_000_10', 6) # 95-100 **attackbox here**
sprite('Action_000_11', 8) # 101-108 **attackbox here**
sprite('Action_000_12', 5) # 109-113 **attackbox here**
sprite('Action_000_13', 5) # 114-118 **attackbox here**
sprite('Action_000_14', 6) # 119-124 **attackbox here**
label(0)
sprite('Action_000_00', 7) # 125-131 **attackbox here**
sprite('Action_000_01', 7) # 132-138 **attackbox here**
sprite('Action_000_02', 6) # 139-144 **attackbox here**
sprite('Action_000_03', 6) # 145-150 **attackbox here**
sprite('Action_000_04', 8) # 151-158 **attackbox here**
sprite('Action_000_05', 5) # 159-163 **attackbox here**
sprite('Action_000_06', 5) # 164-168 **attackbox here**
sprite('Action_000_07', 5) # 169-173 **attackbox here**
sprite('Action_000_08', 6) # 174-179 **attackbox here**
sprite('Action_000_09', 5) # 180-184 **attackbox here**
sprite('Action_000_10', 6) # 185-190 **attackbox here**
sprite('Action_000_11', 8) # 191-198 **attackbox here**
sprite('Action_000_12', 5) # 199-203 **attackbox here**
sprite('Action_000_13', 5) # 204-208 **attackbox here**
sprite('Action_000_14', 6) # 209-214 **attackbox here**
loopRest()
gotoLabel(0)
label(1)
sprite('keep', 1) # 215-215
@State
def CmnActCrushAttackFinish():
def upon_IMMEDIATE():
Unknown30075(0)
sprite('Action_140_00', 3) # 1-3
sprite('Action_140_01', 6) # 4-9
sprite('Action_140_02', 6) # 10-15
sprite('Action_140_03', 4) # 16-19
tag_voice(0, 'uli158_0', 'uli158_1', '', '')
sprite('Action_140_04', 6) # 20-25 **attackbox here**
GFX_0('EffNmlAtk6CBlade1st', 100)
SFX_0('006_swing_blade_0')
sprite('Action_140_05', 25) # 26-50
sprite('Action_140_06', 5) # 51-55
sprite('Action_140_07', 5) # 56-60
@State
def CmnActCrushAttackExFinish():
def upon_IMMEDIATE():
Unknown30089(0)
loopRelated(17, 60)
def upon_17():
clearUponHandler(17)
sendToLabel(1)
label(0)
sprite('Action_000_00', 7) # 1-7 **attackbox here**
sprite('Action_000_01', 7) # 8-14 **attackbox here**
sprite('Action_000_02', 6) # 15-20 **attackbox here**
sprite('Action_000_03', 6) # 21-26 **attackbox here**
sprite('Action_000_04', 8) # 27-34 **attackbox here**
sprite('Action_000_05', 5) # 35-39 **attackbox here**
sprite('Action_000_06', 5) # 40-44 **attackbox here**
sprite('Action_000_07', 5) # 45-49 **attackbox here**
sprite('Action_000_08', 6) # 50-55 **attackbox here**
sprite('Action_000_09', 5) # 56-60 **attackbox here**
sprite('Action_000_10', 6) # 61-66 **attackbox here**
sprite('Action_000_11', 8) # 67-74 **attackbox here**
sprite('Action_000_12', 5) # 75-79 **attackbox here**
sprite('Action_000_13', 5) # 80-84 **attackbox here**
sprite('Action_000_14', 6) # 85-90 **attackbox here**
loopRest()
gotoLabel(0)
label(1)
sprite('Action_140_00', 3) # 91-93
sprite('Action_140_01', 6) # 94-99
sprite('Action_140_02', 6) # 100-105
sprite('Action_140_03', 4) # 106-109
tag_voice(0, 'uli158_0', 'uli158_1', '', '')
sprite('Action_140_04', 6) # 110-115 **attackbox here**
GFX_0('EffNmlAtk6CBlade1st', 100)
SFX_0('006_swing_blade_0')
sprite('Action_140_05', 25) # 116-140
sprite('Action_140_06', 5) # 141-145
sprite('Action_140_07', 5) # 146-150
@State
def CmnActCrushAttackAssistChase1st():
def upon_IMMEDIATE():
Unknown30073(1)
Unknown9016(1)
sprite('null', 20) # 1-20
Unknown1086(22)
teleportRelativeY(0)
sprite('null', 1) # 21-21
Unknown30081('')
Unknown1086(22)
teleportRelativeX(-1000000)
physicsYImpulse(-4000)
setGravity(0)
SLOT_12 = SLOT_19
Unknown1019(10)
sprite('Action_184_00', 1) # 22-22
sprite('Action_184_01', 1) # 23-23
physicsXImpulse(38000)
physicsYImpulse(23000)
setGravity(3000)
Unknown8001(0, 100)
def upon_LANDING():
clearUponHandler(2)
sendToLabel(1)
sprite('Action_184_02', 1) # 24-24
sprite('Action_184_03', 1) # 25-25
sprite('Action_184_04', 2) # 26-27
sprite('Action_184_05', 2) # 28-29
sprite('Action_184_06', 3) # 30-32
sprite('Action_184_07', 4) # 33-36
Unknown1019(60)
sprite('Action_184_08', 5) # 37-41
sprite('Action_184_09', 5) # 42-46
label(1)
sprite('Action_184_10', 1) # 47-47 **attackbox here**
GFX_0('EffEXAssaultSlash', 100)
SFX_0('010_swing_sword_1')
physicsXImpulse(0)
clearUponHandler(2)
Unknown8000(100, 1, 1)
sprite('Action_184_10', 1) # 48-48 **attackbox here**
sprite('Action_184_10', 1) # 49-49 **attackbox here**
sprite('Action_184_11', 1) # 50-50 **attackbox here**
sprite('Action_184_12', 7) # 51-57
sprite('Action_184_13', 5) # 58-62
sprite('Action_184_14', 4) # 63-66
sprite('Action_184_15', 4) # 67-70
sprite('Action_000_00', 7) # 71-77 **attackbox here**
sprite('Action_000_01', 7) # 78-84 **attackbox here**
sprite('Action_000_02', 6) # 85-90 **attackbox here**
sprite('Action_000_03', 6) # 91-96 **attackbox here**
sprite('Action_000_04', 8) # 97-104 **attackbox here**
sprite('Action_000_05', 5) # 105-109 **attackbox here**
sprite('Action_000_06', 5) # 110-114 **attackbox here**
sprite('Action_000_07', 5) # 115-119 **attackbox here**
sprite('Action_000_08', 6) # 120-125 **attackbox here**
sprite('Action_000_09', 5) # 126-130 **attackbox here**
sprite('Action_000_10', 6) # 131-136 **attackbox here**
sprite('Action_000_11', 8) # 137-144 **attackbox here**
sprite('Action_000_12', 5) # 145-149 **attackbox here**
sprite('Action_000_13', 5) # 150-154 **attackbox here**
sprite('Action_000_14', 6) # 155-160 **attackbox here**
@State
def CmnActCrushAttackAssistChase2nd():
def upon_IMMEDIATE():
Unknown30074(1)
Unknown9016(1)
sprite('Action_142_00', 2) # 1-2
sprite('Action_142_01', 2) # 3-4
sprite('Action_142_02', 2) # 5-6
sprite('Action_142_03', 2) # 7-8
teleportRelativeX(-40000)
sprite('Action_142_04', 3) # 9-11
teleportRelativeX(-60000)
sprite('Action_142_05', 3) # 12-14 **attackbox here**
teleportRelativeX(-80000)
GFX_0('EffNmlAtk6CBlade3rd', 100)
SFX_0('010_swing_sword_2')
Unknown36(1)
teleportRelativeX(250000)
Unknown35()
sprite('Action_142_06', 3) # 15-17
sprite('Action_142_07', 3) # 18-20
sprite('Action_142_08', 3) # 21-23
teleportRelativeX(80000)
sprite('Action_142_09', 3) # 24-26
teleportRelativeX(60000)
sprite('Action_142_10', 3) # 27-29
teleportRelativeX(40000)
sprite('Action_000_00', 7) # 30-36 **attackbox here**
sprite('Action_000_01', 7) # 37-43 **attackbox here**
sprite('Action_000_02', 6) # 44-49 **attackbox here**
sprite('Action_000_03', 6) # 50-55 **attackbox here**
sprite('Action_000_04', 8) # 56-63 **attackbox here**
sprite('Action_000_05', 5) # 64-68 **attackbox here**
sprite('Action_000_06', 5) # 69-73 **attackbox here**
sprite('Action_000_07', 5) # 74-78 **attackbox here**
sprite('Action_000_08', 6) # 79-84 **attackbox here**
sprite('Action_000_09', 5) # 85-89 **attackbox here**
sprite('Action_000_10', 6) # 90-95 **attackbox here**
sprite('Action_000_11', 8) # 96-103 **attackbox here**
sprite('Action_000_12', 5) # 104-108 **attackbox here**
sprite('Action_000_13', 5) # 109-113 **attackbox here**
sprite('Action_000_14', 6) # 114-119 **attackbox here**
@State
def CmnActCrushAttackAssistFinish():
def upon_IMMEDIATE():
Unknown30075(1)
sprite('Action_140_00', 3) # 1-3
sprite('Action_140_01', 6) # 4-9
sprite('Action_140_02', 6) # 10-15
sprite('Action_140_03', 4) # 16-19
sprite('Action_140_04', 6) # 20-25 **attackbox here**
GFX_0('EffNmlAtk6CBlade1st', 100)
SFX_0('006_swing_blade_0')
sprite('Action_140_05', 25) # 26-50
sprite('Action_140_06', 5) # 51-55
sprite('Action_140_07', 5) # 56-60
@State
def CmnActCrushAttackAssistExFinish():
def upon_IMMEDIATE():
Unknown30089(1)
loopRelated(17, 60)
def upon_17():
clearUponHandler(17)
sendToLabel(1)
label(0)
sprite('Action_000_00', 7) # 1-7 **attackbox here**
sprite('Action_000_01', 7) # 8-14 **attackbox here**
sprite('Action_000_02', 6) # 15-20 **attackbox here**
sprite('Action_000_03', 6) # 21-26 **attackbox here**
sprite('Action_000_04', 8) # 27-34 **attackbox here**
sprite('Action_000_05', 5) # 35-39 **attackbox here**
sprite('Action_000_06', 5) # 40-44 **attackbox here**
sprite('Action_000_07', 5) # 45-49 **attackbox here**
sprite('Action_000_08', 6) # 50-55 **attackbox here**
sprite('Action_000_09', 5) # 56-60 **attackbox here**
sprite('Action_000_10', 6) # 61-66 **attackbox here**
sprite('Action_000_11', 8) # 67-74 **attackbox here**
sprite('Action_000_12', 5) # 75-79 **attackbox here**
sprite('Action_000_13', 5) # 80-84 **attackbox here**
sprite('Action_000_14', 6) # 85-90 **attackbox here**
loopRest()
gotoLabel(0)
label(1)
sprite('Action_140_00', 3) # 91-93
sprite('Action_140_01', 6) # 94-99
sprite('Action_140_02', 6) # 100-105
sprite('Action_140_03', 4) # 106-109
sprite('Action_140_04', 6) # 110-115 **attackbox here**
GFX_0('EffNmlAtk6CBlade1st', 100)
SFX_0('006_swing_blade_0')
sprite('Action_140_05', 25) # 116-140
sprite('Action_140_06', 5) # 141-145
sprite('Action_140_07', 5) # 146-150
@State
def NmlAtkThrow():
def upon_IMMEDIATE():
Unknown17011('ThrowExe', 1, 0, 0)
Unknown11054(120000)
physicsXImpulse(8000)
def upon_CLEAR_OR_EXIT():
if (SLOT_18 == 7):
Unknown8007(100, 1, 1)
physicsXImpulse(18000)
if (SLOT_18 >= 7):
Unknown1015(1000)
if (SLOT_12 >= 32000):
SLOT_12 = 32000
if (SLOT_18 >= 25):
sendToLabel(1)
if (SLOT_18 >= 3):
if (SLOT_19 < 180000):
sendToLabel(1)
sprite('Action_045_13', 4) # 1-4
sprite('Action_045_00', 3) # 5-7
sprite('Action_045_01', 3) # 8-10
sprite('Action_045_02', 3) # 11-13
label(0)
sprite('Action_045_03', 3) # 14-16
Unknown8006(100, 1, 1)
sprite('Action_045_04', 3) # 17-19
sprite('Action_045_05', 3) # 20-22
sprite('Action_045_06', 3) # 23-25
Unknown8006(100, 1, 1)
sprite('Action_045_07', 3) # 26-28
sprite('Action_045_08', 3) # 29-31
sprite('Action_045_09', 3) # 32-34
sprite('Action_045_02', 3) # 35-37
loopRest()
gotoLabel(0)
label(1)
sprite('Action_055_00', 2) # 38-39
clearUponHandler(3)
Unknown1019(10)
Unknown8010(100, 1, 1)
sprite('Action_055_01', 1) # 40-40
sprite('Action_055_02', 3) # 41-43 **attackbox here**
SFX_0('003_swing_grap_0_0')
Unknown1084(1)
sprite('Action_055_02', 3) # 44-46 **attackbox here**
StartMultihit()
sprite('Action_055_03', 2) # 47-48
sprite('Action_055_04', 7) # 49-55
SFX_4('uli154')
sprite('Action_055_05', 5) # 56-60
sprite('Action_055_06', 3) # 61-63
sprite('Action_055_07', 3) # 64-66
@State
def ThrowExe():
def upon_IMMEDIATE():
Unknown17012(1, 0, 0)
AttackLevel_(1)
Damage(200)
AttackP2(50)
Unknown11092(1)
Unknown11073(1)
AirHitstunAnimation(11)
GroundedHitstunAnimation(11)
AirPushbackX(2000)
AirPushbackY(12000)
YImpluseBeforeWallbounce(1800)
AirUntechableTime(40)
Unknown9310(90)
JumpCancel_(0)
Unknown13024(0)
Unknown11064(1)
Unknown11069('ThrowExe')
sprite('Action_055_02', 4) # 1-4 **attackbox here**
Unknown5000(8, 0)
Unknown5001('0000000004000000040000000000000000000000')
StartMultihit()
Unknown5003(1)
sprite('Action_057_00', 1) # 5-5
SFX_0('003_swing_grap_0_1')
sprite('Action_057_00', 1) # 6-6
Unknown5000(8, 0)
Unknown5001('0000000004000000040000000000000000000000')
sprite('Action_057_01', 8) # 7-14 **attackbox here**
tag_voice(1, 'uli153_0', '', '', '')
sprite('Action_057_02', 4) # 15-18
DisableAttackRestOfMove()
sprite('Action_057_03', 2) # 19-20
sprite('Action_057_04', 2) # 21-22
sprite('Action_057_05', 3) # 23-25
sprite('Action_057_06', 4) # 26-29
physicsYImpulse(11000)
setGravity(2200)
teleportRelativeX(10000)
sprite('Action_057_07', 3) # 30-32
teleportRelativeX(10000)
sprite('Action_057_08', 3) # 33-35
teleportRelativeX(10000)
sprite('Action_057_09', 1) # 36-36 **attackbox here**
teleportRelativeX(20000)
sprite('Action_057_10', 5) # 37-41
sprite('Action_057_11', 3) # 42-44
tag_voice(1, 'uli153_1', '', '', '')
Unknown1084(1)
sprite('Action_057_12', 1) # 45-45 **attackbox here**
AttackLevel_(4)
Damage(1800)
YImpluseBeforeWallbounce(50000)
Unknown11073(1)
AirHitstunAnimation(11)
GroundedHitstunAnimation(11)
AirPushbackX(500)
AirPushbackY(-40000)
Unknown9310(40)
Unknown9016(1)
RefreshMultihit()
Unknown11064(0)
Unknown11069('')
Unknown11083(1)
clearUponHandler(78)
def upon_78():
Unknown13024(1)
JumpCancel_(1)
sprite('Action_057_13', 6) # 46-51
sprite('Action_057_14', 3) # 52-54
sprite('Action_057_15', 2) # 55-56
sprite('Action_057_16', 5) # 57-61
physicsXImpulse(-10000)
physicsYImpulse(12000)
setGravity(2500)
label(0)
sprite('Action_057_17', 5) # 62-66
sendToLabelUpon(2, 1)
loopRest()
gotoLabel(0)
label(1)
sprite('Action_057_18', 2) # 67-68
Unknown1084(1)
clearUponHandler(2)
sprite('Action_057_19', 2) # 69-70
sprite('Action_057_20', 2) # 71-72
sprite('Action_057_21', 2) # 73-74
@State
def NmlAtkBackThrow():
def upon_IMMEDIATE():
Unknown17011('BackThrowExe', 1, 0, 0)
Unknown11054(120000)
physicsXImpulse(8000)
def upon_CLEAR_OR_EXIT():
if (SLOT_18 == 7):
Unknown8007(100, 1, 1)
physicsXImpulse(18000)
if (SLOT_18 >= 7):
Unknown1015(1000)
if (SLOT_12 >= 32000):
SLOT_12 = 32000
if (SLOT_18 >= 25):
sendToLabel(1)
if (SLOT_18 >= 3):
if (SLOT_19 < 180000):
sendToLabel(1)
sprite('Action_045_13', 4) # 1-4
sprite('Action_045_00', 3) # 5-7
sprite('Action_045_01', 3) # 8-10
sprite('Action_045_02', 3) # 11-13
label(0)
sprite('Action_045_03', 3) # 14-16
Unknown8006(100, 1, 1)
sprite('Action_045_04', 3) # 17-19
sprite('Action_045_05', 3) # 20-22
sprite('Action_045_06', 3) # 23-25
Unknown8006(100, 1, 1)
sprite('Action_045_07', 3) # 26-28
sprite('Action_045_08', 3) # 29-31
sprite('Action_045_09', 3) # 32-34
sprite('Action_045_02', 3) # 35-37
loopRest()
gotoLabel(0)
label(1)
sprite('Action_055_00', 2) # 38-39
clearUponHandler(3)
Unknown1019(10)
Unknown8010(100, 1, 1)
sprite('Action_055_01', 1) # 40-40
sprite('Action_055_02', 3) # 41-43 **attackbox here**
SFX_0('003_swing_grap_0_0')
Unknown1084(1)
sprite('Action_055_02', 3) # 44-46 **attackbox here**
StartMultihit()
sprite('Action_055_03', 2) # 47-48
sprite('Action_055_04', 7) # 49-55
SFX_4('uli154')
sprite('Action_055_05', 5) # 56-60
sprite('Action_055_06', 3) # 61-63
sprite('Action_055_07', 3) # 64-66
@State
def BackThrowExe():
def upon_IMMEDIATE():
Unknown17012(1, 0, 0)
AttackLevel_(1)
Damage(200)
AttackP2(50)
Unknown11092(1)
Unknown11073(1)
AirHitstunAnimation(11)
GroundedHitstunAnimation(11)
AirPushbackX(2000)
AirPushbackY(12000)
YImpluseBeforeWallbounce(1800)
AirUntechableTime(40)
Unknown9310(90)
JumpCancel_(0)
Unknown13024(0)
Unknown11064(1)
Unknown11069('BackThrowExe')
sprite('Action_055_02', 4) # 1-4 **attackbox here**
Unknown5000(8, 0)
Unknown5001('0000000004000000040000000000000000000000')
StartMultihit()
Unknown5003(1)
sprite('Action_057_00', 1) # 5-5
SFX_0('003_swing_grap_0_1')
Unknown2005()
sprite('Action_057_00', 1) # 6-6
Unknown5000(8, 0)
Unknown5001('0000000004000000040000000000000000000000')
sprite('Action_057_01', 8) # 7-14 **attackbox here**
tag_voice(1, 'uli153_0', '', '', '')
sprite('Action_057_02', 4) # 15-18
DisableAttackRestOfMove()
sprite('Action_057_03', 2) # 19-20
sprite('Action_057_04', 2) # 21-22
sprite('Action_057_05', 3) # 23-25
sprite('Action_057_06', 4) # 26-29
physicsYImpulse(11000)
setGravity(2200)
teleportRelativeX(10000)
sprite('Action_057_07', 3) # 30-32
teleportRelativeX(10000)
sprite('Action_057_08', 3) # 33-35
teleportRelativeX(10000)
sprite('Action_057_09', 1) # 36-36 **attackbox here**
teleportRelativeX(20000)
sprite('Action_057_10', 5) # 37-41
sprite('Action_057_11', 3) # 42-44
tag_voice(1, 'uli153_1', '', '', '')
Unknown1084(1)
sprite('Action_057_12', 1) # 45-45 **attackbox here**
AttackLevel_(4)
Damage(1800)
YImpluseBeforeWallbounce(50000)
Unknown11073(1)
AirHitstunAnimation(11)
GroundedHitstunAnimation(11)
AirPushbackX(500)
AirPushbackY(-40000)
Unknown9310(40)
Unknown9016(1)
RefreshMultihit()
Unknown11064(0)
Unknown11069('')
Unknown11083(1)
clearUponHandler(78)
def upon_78():
Unknown13024(1)
JumpCancel_(1)
sprite('Action_057_13', 6) # 46-51
sprite('Action_057_14', 3) # 52-54
sprite('Action_057_15', 2) # 55-56
sprite('Action_057_16', 5) # 57-61
physicsXImpulse(-10000)
physicsYImpulse(12000)
setGravity(2500)
label(0)
sprite('Action_057_17', 5) # 62-66
sendToLabelUpon(2, 1)
loopRest()
gotoLabel(0)
label(1)
sprite('Action_057_18', 2) # 67-68
Unknown1084(1)
clearUponHandler(2)
sprite('Action_057_19', 2) # 69-70
sprite('Action_057_20', 2) # 71-72
sprite('Action_057_21', 2) # 73-74
@State
def CmnActInvincibleAttack():
def upon_IMMEDIATE():
Unknown17024('')
AttackLevel_(3)
Damage(500)
Unknown11092(1)
AirHitstunAnimation(9)
GroundedHitstunAnimation(9)
AirPushbackY(15000)
AirPushbackX(6000)
Unknown9016(1)
AirUntechableTime(60)
Unknown1084(0)
sendToLabelUpon(2, 1)
def upon_12():
Unknown2037(1)
sprite('Action_100_00', 6) # 1-6
sprite('Action_100_01', 3) # 7-9 **attackbox here**
StartMultihit()
sprite('Action_100_01', 3) # 10-12 **attackbox here**
Hitstop(3)
physicsXImpulse(11000)
sprite('Action_101_02', 2) # 13-14 **attackbox here**
DisableAttackRestOfMove()
sprite('Action_101_03', 1) # 15-15 **attackbox here**
RefreshMultihit()
SFX_0('010_swing_sword_0')
AirPushbackY(35000)
physicsYImpulse(23000)
physicsXImpulse(4000)
setGravity(1800)
if SLOT_2:
SFX_4('uli201')
else:
tag_voice(1, 'uli200_0', 'uli200_1', '', '')
Unknown2037(0)
clearUponHandler(12)
sprite('Action_101_04', 2) # 16-17 **attackbox here**
GFX_0('EffNmlReversalAction00', 100)
sprite('Action_101_05', 2) # 18-19 **attackbox here**
RefreshMultihit()
sprite('Action_101_06', 1) # 20-20 **attackbox here**
sprite('Action_101_07', 1) # 21-21
setInvincible(0)
sprite('Action_101_08', 1) # 22-22
sprite('Action_101_09', 1) # 23-23
sprite('Action_101_10', 3) # 24-26
sprite('Action_101_11', 2) # 27-28
sprite('Action_101_12', 1) # 29-29
physicsYImpulse(32000)
physicsXImpulse(7000)
setGravity(2600)
sprite('Action_101_13', 1) # 30-30
sprite('Action_101_14', 2) # 31-32 **attackbox here**
RefreshMultihit()
SFX_0('010_swing_sword_1')
Hitstop(1)
GFX_0('EffNmlReversalAction01', 100)
sprite('Action_101_14', 1) # 33-33 **attackbox here**
if SLOT_2:
SFX_4('uli202')
RefreshMultihit()
sprite('Action_101_15', 3) # 34-36 **attackbox here**
RefreshMultihit()
Hitstop(10)
AirPushbackX(12500)
sprite('Action_101_16', 1) # 37-37
sprite('Action_101_17', 1) # 38-38
sprite('Action_101_18', 2) # 39-40
sprite('Action_101_19', 1) # 41-41
sprite('Action_101_20', 3) # 42-44
sprite('Action_101_21', 2) # 45-46
sprite('Action_101_22', 1) # 47-47
physicsYImpulse(38000)
physicsXImpulse(8000)
setGravity(2600)
sprite('Action_101_23', 1) # 48-48
sprite('Action_101_24', 1) # 49-49 **attackbox here**
Hitstop(1)
RefreshMultihit()
GFX_0('EffNmlReversalAction01', 100)
SFX_0('010_swing_sword_1')
sprite('Action_101_24', 1) # 50-50 **attackbox here**
RefreshMultihit()
sprite('Action_101_25', 7) # 51-57 **attackbox here**
if SLOT_2:
SFX_4('uli203')
Hitstop(15)
RefreshMultihit()
AirPushbackY(32000)
AirPushbackX(9500)
sprite('Action_101_26', 6) # 58-63
sprite('Action_101_27', 3) # 64-66
sprite('Action_101_28', 4) # 67-70
sprite('Action_101_29', 4) # 71-74
sprite('Action_101_30', 7) # 75-81
sprite('Action_101_31', 3) # 82-84
label(0)
sprite('Action_101_32', 2) # 85-86
sprite('Action_101_33', 2) # 87-88
gotoLabel(0)
label(1)
sprite('Action_101_33', 4) # 89-92
Unknown8000(100, 1, 1)
Unknown1084(1)
clearUponHandler(2)
sprite('Action_101_34', 4) # 93-96
sprite('Action_101_35', 6) # 97-102
sprite('Action_101_36', 6) # 103-108
@State
def CmnActInvincibleAttackAir():
def upon_IMMEDIATE():
Unknown17025('')
AttackLevel_(3)
Damage(550)
Unknown11092(1)
AirHitstunAnimation(9)
GroundedHitstunAnimation(9)
AirPushbackY(35000)
AirPushbackX(6000)
Unknown9016(1)
AirUntechableTime(39)
Unknown1084(0)
clearUponHandler(2)
sendToLabelUpon(2, 1)
sprite('Action_101_03', 9) # 1-9 **attackbox here**
StartMultihit()
Hitstop(3)
physicsYImpulse(26500)
physicsXImpulse(4000)
setGravity(1650)
sprite('Action_101_03', 2) # 10-11 **attackbox here**
RefreshMultihit()
SFX_0('010_swing_sword_0')
sprite('Action_101_04', 2) # 12-13 **attackbox here**
tag_voice(1, 'uli200_0', 'uli200_1', '', '')
GFX_0('EffNmlReversalAction00', 100)
sprite('Action_101_05', 2) # 14-15 **attackbox here**
RefreshMultihit()
Hitstop(3)
sprite('Action_101_06', 1) # 16-16 **attackbox here**
sprite('Action_101_07', 1) # 17-17
setInvincible(0)
sprite('Action_101_08', 1) # 18-18
sprite('Action_101_09', 1) # 19-19
sprite('Action_101_10', 3) # 20-22
sprite('Action_101_11', 2) # 23-24
sprite('Action_101_12', 1) # 25-25
physicsYImpulse(27000)
physicsXImpulse(7000)
setGravity(2600)
sprite('Action_101_13', 1) # 26-26
sprite('Action_101_14', 2) # 27-28 **attackbox here**
RefreshMultihit()
Hitstop(1)
GFX_0('EffNmlReversalAction01', 100)
SFX_0('010_swing_sword_1')
sprite('Action_101_14', 1) # 29-29 **attackbox here**
RefreshMultihit()
sprite('Action_101_15', 3) # 30-32 **attackbox here**
RefreshMultihit()
Hitstop(10)
AirPushbackX(12500)
sprite('Action_101_16', 1) # 33-33
sprite('Action_101_17', 1) # 34-34
sprite('Action_101_18', 2) # 35-36
sprite('Action_101_19', 1) # 37-37
sprite('Action_101_20', 3) # 38-40
sprite('Action_101_21', 2) # 41-42
sprite('Action_101_22', 1) # 43-43
physicsYImpulse(33500)
physicsXImpulse(10000)
setGravity(2600)
sprite('Action_101_23', 1) # 44-44
sprite('Action_101_24', 1) # 45-45 **attackbox here**
Hitstop(1)
RefreshMultihit()
GFX_0('EffNmlReversalAction01', 100)
SFX_0('010_swing_sword_1')
sprite('Action_101_24', 1) # 46-46 **attackbox here**
RefreshMultihit()
sprite('Action_101_25', 7) # 47-53 **attackbox here**
Hitstop(15)
RefreshMultihit()
AirPushbackY(32000)
AirPushbackX(10000)
sprite('Action_101_26', 6) # 54-59
sprite('Action_101_27', 3) # 60-62
sprite('Action_101_28', 4) # 63-66
sprite('Action_101_29', 4) # 67-70
sprite('Action_101_30', 7) # 71-77
sprite('Action_101_31', 3) # 78-80
label(0)
sprite('Action_101_32', 2) # 81-82
sprite('Action_101_33', 2) # 83-84
gotoLabel(0)
label(1)
sprite('Action_101_33', 2) # 85-86
Unknown8000(100, 1, 1)
Unknown1084(1)
clearUponHandler(2)
sprite('Action_101_34', 2) # 87-88
sprite('Action_101_35', 3) # 89-91
sprite('Action_101_36', 3) # 92-94
@State
def Shot_A():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
DisableAttackRestOfMove()
sprite('Action_440_00', 6) # 1-6
sprite('Action_440_01', 4) # 7-10
sprite('Action_440_02', 4) # 11-14
Unknown14070('ShotDashCancel')
sprite('Action_440_03', 6) # 15-20
SFX_0('010_swing_sword_2')
sprite('Action_440_04', 2) # 21-22
GFX_0('ShotA', 0)
GFX_0('EffShotSlash', 100)
tag_voice(1, 'uli204_0', 'uli204_1', 'uli204_2', '')
sprite('Action_440_05', 6) # 23-28
Unknown14072('ShotDashCancel')
sprite('Action_440_06', 7) # 29-35
sprite('Action_440_07', 5) # 36-40
sprite('Action_440_08', 5) # 41-45
Unknown14074('ShotDashCancel')
Recovery()
sprite('Action_440_09', 4) # 46-49
@State
def Shot_B():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
DisableAttackRestOfMove()
sprite('Action_441_00', 2) # 1-2
sprite('Action_441_01', 2) # 3-4
sprite('Action_441_02', 3) # 5-7
sprite('Action_441_03', 4) # 8-11
SFX_0('010_swing_sword_2')
sprite('Action_441_04', 2) # 12-13
Unknown14070('ShotDashCancel')
GFX_0('ShotB', 0)
GFX_0('EffShotSlash', 100)
tag_voice(1, 'uli204_0', 'uli204_1', 'uli204_2', '')
sprite('Action_441_05', 6) # 14-19
sprite('Action_441_06', 14) # 20-33
Unknown14072('ShotDashCancel')
sprite('Action_441_07', 5) # 34-38
sprite('Action_441_08', 5) # 39-43
Unknown14074('ShotDashCancel')
Recovery()
sprite('Action_441_09', 4) # 44-47
@State
def ShotDashCancel():
def upon_IMMEDIATE():
Unknown17013()
WhiffCancel('Assault_A')
WhiffCancelEnable(1)
sprite('Action_045_00', 3) # 1-3
sprite('Action_045_01', 3) # 4-6
physicsXImpulse(36000)
sprite('Action_045_02', 3) # 7-9
sprite('Action_045_03', 3) # 10-12
Unknown8006(100, 1, 1)
sprite('Action_045_04', 3) # 13-15
sprite('Action_045_05', 3) # 16-18
Unknown8006(100, 1, 1)
sprite('Action_045_11', 2) # 19-20
Unknown1019(30)
sprite('Action_045_12', 3) # 21-23
Unknown1019(10)
sprite('Action_045_13', 2) # 24-25
physicsXImpulse(0)
loopRest()
@State
def Assault_A():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
def upon_CLEAR_OR_EXIT():
if (SLOT_163 <= 0):
Unknown2037(1)
clearUponHandler(3)
WhiffCancel('Assault_A_Hasei')
sprite('Action_045_00', 2) # 1-2
sprite('Action_045_01', 2) # 3-4
Unknown8006(100, 1, 1)
physicsXImpulse(36000)
sprite('Action_150_00', 5) # 5-9
SFX_0('019_cloth_a')
Unknown18009(1)
EnableCollision(0)
setInvincible(1)
Unknown22019('0100000001000000000000000100000000000000')
Unknown7006('uli112_0', 100, 828992629, 828322353, 0, 0, 100, 828992629, 845099569, 0, 0, 100, 0, 0, 0, 0, 0)
sprite('Action_150_01', 2) # 10-11
sprite('Action_150_01', 2) # 12-13
WhiffCancelEnable(1)
sprite('Action_150_02', 4) # 14-17
EnableCollision(1)
setInvincible(0)
sprite('Action_150_03', 2) # 18-19
Unknown1019(50)
sprite('Action_150_05', 3) # 20-22
Unknown23183('416374696f6e5f3135305f303700000000000000000000000000000000000000030000000200000002000000')
if SLOT_2:
Unknown2005()
Unknown1019(50)
Unknown14072('Assault_A_Hasei')
sprite('Action_150_06', 3) # 23-25
Unknown23183('416374696f6e5f3135305f303800000000000000000000000000000000000000030000000200000002000000')
Unknown1019(0)
WhiffCancelEnable(0)
sprite('Action_014_01', 2) # 26-27
Unknown18009(0)
sprite('Action_014_02', 2) # 28-29
@State
def Assault_A_Hasei():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
AttackLevel_(3)
Damage(1000)
AttackP1(80)
AttackP2(80)
Unknown11092(1)
GroundedHitstunAnimation(10)
AirHitstunAnimation(10)
AirPushbackX(18000)
AirPushbackY(43000)
AirUntechableTime(60)
Unknown9016(1)
Unknown2006()
Unknown11064(1)
Unknown11068(1)
Unknown30068(1)
Unknown11056(0)
def upon_78():
clearUponHandler(78)
sendToLabel(10)
Hitstop(8)
Unknown11069('Assault_A_Hasei')
setInvincible(1)
Unknown11044(1)
Unknown14083(0)
EnableCollision(0)
def upon_STATE_END():
EnableCollision(1)
Unknown3001(255)
Unknown3004(0)
sprite('Action_014_00', 5) # 1-5
Unknown1084(1)
sprite('Action_014_01', 3) # 6-8
Unknown7009(2)
sprite('Action_154_02', 3) # 9-11 **attackbox here**
GFX_0('Lin_157', 0)
SFX_0('007_swing_knife_1')
sprite('Action_154_03', 9) # 12-20
Recovery()
sprite('Action_154_04', 7) # 21-27
sprite('Action_154_05', 5) # 28-32
sprite('Action_154_06', 4) # 33-36
ExitState()
label(10)
sprite('Action_154_02', 2) # 37-38 **attackbox here**
Unknown11023(1)
Unknown30048(1)
Unknown11066(1)
setInvincible(1)
AirHitstunAnimation(10)
GroundedHitstunAnimation(10)
Hitstop(2)
SFX_0('007_swing_knife_1')
Unknown11072(1, 70000, 50000)
sprite('null', 2) # 39-40
GFX_0('Lin_168_2', 0)
sprite('Action_160_07', 3) # 41-43
GFX_0('Lin_160_4', 0)
teleportRelativeX(450000)
Unknown1007(330000)
Unknown2005()
Unknown3001(128)
Unknown3004(20)
sprite('Action_160_08', 2) # 44-45
sprite('Action_160_09', 4) # 46-49 **attackbox here**
GFX_0('Lin_168_3', 0)
RefreshMultihit()
AirHitstunAnimation(11)
GroundedHitstunAnimation(11)
AirPushbackY(-8000)
GFX_0('Lin_091', 0)
SFX_0('007_swing_knife_1')
sprite('null', 1) # 50-50
teleportRelativeX(600000)
teleportRelativeY(0)
Unknown2005()
sprite('Action_160_12', 2) # 51-52
GFX_0('Lin_160_5', 0)
Unknown3001(128)
Unknown3004(20)
Unknown8000(100, 1, 1)
sprite('Action_160_13', 2) # 53-54
sprite('Action_160_14', 2) # 55-56
sprite('Action_160_15', 3) # 57-59 **attackbox here**
StartMultihit()
sprite('Action_160_15', 4) # 60-63 **attackbox here**
AttackLevel_(5)
Damage(1000)
RefreshMultihit()
AirHitstunAnimation(9)
GroundedHitstunAnimation(9)
AirPushbackX(5000)
AirPushbackY(30000)
Hitstop(1)
Unknown11001(0, 15, 15)
Unknown11099(1)
Unknown11072(1, 150000, 50000)
physicsXImpulse(80000)
Unknown8007(100, 1, 1)
GFX_0('Lin_169', 0)
SFX_0('007_swing_knife_2')
SFX_4('uli303')
Unknown11069('')
Unknown11044(0)
Unknown23072()
Unknown11064(0)
clearUponHandler(1)
def upon_STATE_END():
Unknown2006()
EnableCollision(1)
Unknown3001(255)
Unknown3004(0)
sprite('Action_160_16', 5) # 64-68
Unknown3001(128)
Unknown3004(20)
Unknown1019(50)
Unknown14083(1)
Unknown8010(100, 1, 1)
sprite('Action_160_16', 13) # 69-81
Unknown1019(20)
sprite('Action_160_17', 6) # 82-87
setInvincible(0)
EnableCollision(1)
Unknown1084(1)
sprite('Action_160_18', 5) # 88-92
sprite('Action_160_19', 5) # 93-97
sprite('Action_160_20', 4) # 98-101
@State
def Assault_B():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
AttackLevel_(4)
Damage(1000)
AttackP1(80)
Unknown11092(1)
AirPushbackY(18000)
AirUntechableTime(60)
GroundedHitstunAnimation(10)
AirHitstunAnimation(10)
Hitstop(1)
Unknown9016(1)
HitOverhead(2)
def upon_STATE_END():
Unknown3001(255)
sprite('Action_182_00', 3) # 1-3
sprite('Action_182_01', 2) # 4-5
Unknown23087(50000)
physicsXImpulse(24000)
physicsYImpulse(16000)
setGravity(2000)
Unknown8001(0, 100)
tag_voice(1, 'uli212_0', 'uli212_1', 'uli212_2', '')
SFX_0('002_highjump_0')
sprite('Action_182_02', 2) # 6-7
sendToLabelUpon(2, 1)
sprite('Action_182_03', 2) # 8-9
sprite('Action_182_04', 2) # 10-11
sprite('Action_182_05', 2) # 12-13
sprite('Action_182_06', 2) # 14-15
if CheckInput(0xa):
sendToLabel(10)
sprite('Action_182_07', 2) # 16-17
sprite('Action_182_08', 3) # 18-20
sprite('Action_182_09', 32767) # 21-32787
label(1)
sprite('Action_182_10', 1) # 32788-32788 **attackbox here**
StartMultihit()
GFX_0('EffAssaultSlash', 100)
SFX_0('010_swing_sword_2')
Unknown1084(1)
Unknown8000(100, 1, 1)
Unknown23087(-1)
sprite('Action_182_10', 1) # 32789-32789 **attackbox here**
sprite('Action_182_10', 1) # 32790-32790 **attackbox here**
RefreshMultihit()
Hitstop(12)
PushbackX(12000)
sprite('Action_182_11', 2) # 32791-32792
Recovery()
sprite('Action_182_12', 4) # 32793-32796
sprite('Action_182_13', 8) # 32797-32804
sprite('Action_182_14', 4) # 32805-32808
sprite('Action_182_15', 3) # 32809-32811
ExitState()
label(10)
sprite('null', 1) # 32812-32812
AttackLevel_(3)
Damage(1700)
AttackP1(90)
AttackP2(85)
Unknown9016(0)
AirPushbackY(5000)
AirUntechableTime(26)
Hitstop(12)
Unknown11001(0, -5, 0)
Unknown9310(1)
AirHitstunAnimation(11)
GroundedHitstunAnimation(11)
HitLow(2)
HitOverhead(0)
GFX_0('Lin_177', 100)
sprite('Lin104_00', 2) # 32813-32814
Unknown3001(0)
Unknown3004(85)
clearUponHandler(2)
teleportRelativeY(0)
sprite('Lin104_01', 2) # 32815-32816
GFX_0('Lin_167', 100)
sprite('Lin104_02', 1) # 32817-32817
physicsXImpulse(28000)
Unknown8006(100, 1, 1)
sprite('Lin104_03', 4) # 32818-32821 **attackbox here**
Unknown1019(30)
sprite('Lin104_04', 2) # 32822-32823
DisableAttackRestOfMove()
Recovery()
sprite('Lin104_04', 6) # 32824-32829
Unknown1019(10)
Unknown18009(1)
sprite('Lin104_05', 5) # 32830-32834
Unknown1019(0)
sprite('Action_013_00', 5) # 32835-32839
sprite('Action_013_01', 5) # 32840-32844
@State
def AirShot_A():
def upon_IMMEDIATE():
Unknown17003()
Unknown1084(1)
clearUponHandler(2)
sendToLabelUpon(2, 1)
sprite('Action_448_00', 2) # 1-2
physicsXImpulse(3000)
physicsYImpulse(16000)
setGravity(1400)
sprite('Action_448_01', 3) # 3-5
sprite('Action_448_02', 3) # 6-8
sprite('Action_448_03', 3) # 9-11
sprite('Action_448_04', 5) # 12-16
sprite('Action_448_05', 4) # 17-20
SFX_0('010_swing_sword_2')
sprite('Action_448_06', 3) # 21-23
GFX_0('EffAirShotSlash', 100)
GFX_0('AirShotA', 0)
tag_voice(1, 'uli204_0', 'uli204_1', 'uli204_2', '')
Unknown28(2, 'CmnActJumpLanding')
sprite('Action_448_07', 10) # 24-33
sprite('Action_448_08', 6) # 34-39
sprite('Action_448_09', 4) # 40-43
Recovery()
label(0)
sprite('Action_448_09', 4) # 44-47
loopRest()
gotoLabel(0)
label(1)
sprite('Action_023_00', 3) # 48-50
Unknown1084(1)
clearUponHandler(2)
sprite('Action_023_01', 3) # 51-53
sprite('Action_023_02', 3) # 54-56
sprite('Action_023_03', 4) # 57-60
@State
def AirShot_B():
def upon_IMMEDIATE():
Unknown17003()
Unknown1084(1)
clearUponHandler(2)
sendToLabelUpon(2, 1)
sprite('Action_448_00', 2) # 1-2
physicsXImpulse(3000)
physicsYImpulse(20000)
setGravity(1400)
sprite('Action_448_01', 3) # 3-5
sprite('Action_448_02', 3) # 6-8
sprite('Action_448_03', 3) # 9-11
sprite('Action_448_04', 10) # 12-21
sprite('Action_448_05', 4) # 22-25
SFX_0('010_swing_sword_2')
sprite('Action_448_06', 3) # 26-28
GFX_0('EffAirShotSlash', 100)
GFX_0('AirShotB', 0)
tag_voice(1, 'uli204_0', 'uli204_1', 'uli204_2', '')
Unknown28(2, 'CmnActJumpLanding')
sprite('Action_448_07', 10) # 29-38
sprite('Action_448_08', 6) # 39-44
sprite('Action_448_09', 4) # 45-48
Recovery()
label(0)
sprite('Action_448_09', 4) # 49-52
loopRest()
gotoLabel(0)
label(1)
sprite('Action_023_00', 3) # 53-55
Unknown1084(1)
clearUponHandler(2)
sprite('Action_023_01', 3) # 56-58
sprite('Action_023_02', 3) # 59-61
sprite('Action_023_03', 4) # 62-65
@State
def Shot_EX():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
DisableAttackRestOfMove()
sprite('Action_441_00', 2) # 1-2
sprite('Action_441_01', 1) # 3-3
sprite('Action_441_01', 1) # 4-4
Unknown23125('')
ConsumeSuperMeter(-5000)
tag_voice(1, 'uli205_0', 'uli205_1', 'uli205_2', '')
sprite('Action_441_02', 3) # 5-7
sprite('Action_441_03', 4) # 8-11
Unknown14070('ShotDashCancel')
SFX_0('010_swing_sword_2')
sprite('Action_442_04', 2) # 12-13
GFX_0('EffShotSlash', 100)
GFX_0('ShotEX', 0)
sprite('Action_442_05', 6) # 14-19
Unknown14072('ShotDashCancel')
sprite('Action_442_06', 14) # 20-33
sprite('Action_442_07', 5) # 34-38
sprite('Action_442_08', 5) # 39-43
Unknown14074('ShotDashCancel')
Recovery()
sprite('Action_442_09', 4) # 44-47
@State
def Rush_EX():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
AttackLevel_(3)
Damage(500)
Unknown30065(0)
AirUntechableTime(45)
AirHitstunAnimation(10)
GroundedHitstunAnimation(10)
PushbackX(0)
AirPushbackX(-5000)
AirPushbackY(20000)
Unknown9016(1)
Unknown11001(0, 6, 6)
Hitstop(6)
sprite('Action_251_00', 1) # 1-1
sprite('Action_251_01', 2) # 2-3
physicsXImpulse(80000)
Unknown8006(100, 1, 0)
tag_voice(1, 'uli209_0', 'uli209_1', 'uli209_2', '')
sprite('Action_251_02', 2) # 4-5
Unknown23125('')
ConsumeSuperMeter(-5000)
sprite('Action_251_03', 2) # 6-7
sprite('Action_251_04', 4) # 8-11 **attackbox here**
GFX_0('EffEXRushSlash00', 100)
GroundedHitstunAnimation(9)
AirPushbackX(10000)
AirPushbackY(12000)
physicsXImpulse(0)
sprite('Action_251_05', 4) # 12-15
sprite('Action_251_06', 4) # 16-19
sprite('Action_251_07', 3) # 20-22
sprite('Action_251_08', 1) # 23-23
GFX_0('EffEXRushSlash01', 100)
sprite('Action_251_09', 3) # 24-26 **attackbox here**
Hitstop(5)
GroundedHitstunAnimation(11)
AirPushbackX(1000)
AirPushbackY(10000)
RefreshMultihit()
sprite('Action_251_10', 7) # 27-33
sprite('Action_251_11', 1) # 34-34
GFX_0('EffEXRushSlash02', 100)
sprite('Action_251_12', 2) # 35-36 **attackbox here**
Hitstop(4)
GroundedHitstunAnimation(9)
AirPushbackX(1000)
RefreshMultihit()
sprite('Action_251_13', 2) # 37-38
sprite('Action_251_14', 4) # 39-42
sprite('Action_251_15', 2) # 43-44
GFX_0('EffEXRushSlash03', 100)
tag_voice(0, 'uli210_0', 'uli210_1', 'uli210_2', '')
sprite('Action_251_16', 2) # 45-46 **attackbox here**
Hitstop(3)
GroundedHitstunAnimation(11)
AirPushbackX(1000)
RefreshMultihit()
sprite('Action_251_17', 3) # 47-49
sprite('Action_251_18', 3) # 50-52
sprite('Action_251_19', 1) # 53-53
GFX_0('EffEXRushSlash04', 100)
sprite('Action_251_20', 2) # 54-55 **attackbox here**
GroundedHitstunAnimation(9)
AirPushbackX(1000)
RefreshMultihit()
sprite('Action_251_21', 1) # 56-56
sprite('Action_251_22', 2) # 57-58
sprite('Action_251_23', 1) # 59-59
GFX_0('EffEXRushSlash03', 100)
sprite('Action_251_24', 3) # 60-62 **attackbox here**
GroundedHitstunAnimation(11)
AirPushbackX(1000)
AirPushbackY(8000)
Hitstop(3)
RefreshMultihit()
sprite('Action_251_25', 2) # 63-64
sprite('Action_251_26', 2) # 65-66
sprite('Action_251_27', 5) # 67-71 **attackbox here**
GFX_0('EffEXRushSlash04', 100)
Hitstop(12)
GroundedHitstunAnimation(9)
AirPushbackX(1000)
AirPushbackY(8000)
PushbackX(30000)
RefreshMultihit()
sprite('Action_251_28', 3) # 72-74
sprite('Action_251_29', 2) # 75-76
sprite('Action_251_30', 2) # 77-78
sprite('Action_251_31', 1) # 79-79
sprite('Action_251_32', 1) # 80-80
sprite('Action_251_33', 3) # 81-83
tag_voice(0, 'uli211_0', 'uli211_1', 'uli211_2', '')
sprite('Action_251_34', 1) # 84-84 **attackbox here**
GFX_0('EffEXRushSlash05', 100)
Damage(1000)
GroundedHitstunAnimation(18)
AirPushbackX(52000)
AirPushbackY(23000)
Unknown9178(1)
Hitstop(9)
PushbackX(35000)
RefreshMultihit()
sprite('Action_251_35', 6) # 85-90
sprite('Action_251_36', 5) # 91-95
sprite('Action_251_37', 5) # 96-100
sprite('Action_251_38', 3) # 101-103
@State
def Assault_EX():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
AttackLevel_(4)
Damage(1000)
AttackP1(80)
Unknown11092(1)
AirPushbackY(18000)
AirUntechableTime(60)
GroundedHitstunAnimation(10)
AirHitstunAnimation(10)
Hitstop(1)
Unknown9016(1)
HitOverhead(2)
Unknown30065(0)
MinimumDamagePct(10)
sendToLabelUpon(2, 1)
sprite('Action_184_00', 3) # 1-3
sprite('Action_184_01', 2) # 4-5
Unknown23087(50000)
physicsXImpulse(36000)
physicsYImpulse(16000)
setGravity(2300)
Unknown8001(0, 100)
tag_voice(1, 'uli213_0', 'uli213_1', 'uli213_2', '')
SFX_0('002_highjump_0')
Unknown23125('')
ConsumeSuperMeter(-5000)
sprite('Action_184_02', 2) # 6-7
sprite('Action_184_03', 2) # 8-9
sprite('Action_184_04', 2) # 10-11
sprite('Action_184_05', 2) # 12-13
sprite('Action_184_06', 2) # 14-15
sprite('Action_184_07', 2) # 16-17
sprite('Action_184_08', 2) # 18-19
sprite('Action_184_09', 32767) # 20-32786
label(1)
sprite('Action_184_10', 1) # 32787-32787 **attackbox here**
StartMultihit()
GFX_0('EffEXAssaultSlash', 100)
SFX_0('010_swing_sword_2')
Unknown1084(1)
Unknown8000(100, 1, 1)
Unknown23087(-1)
sprite('Action_184_10', 1) # 32788-32788 **attackbox here**
RefreshMultihit()
sprite('Action_184_10', 1) # 32789-32789 **attackbox here**
RefreshMultihit()
sprite('Action_184_11', 2) # 32790-32791 **attackbox here**
RefreshMultihit()
sprite('Action_184_11', 2) # 32792-32793 **attackbox here**
RefreshMultihit()
Hitstop(12)
PushbackX(12000)
sprite('Action_184_12', 4) # 32794-32797
Recovery()
Unknown2063()
sprite('Action_184_13', 8) # 32798-32805
sprite('Action_184_14', 4) # 32806-32809
sprite('Action_184_15', 3) # 32810-32812
@State
def AirShot_EX():
def upon_IMMEDIATE():
Unknown17003()
Unknown1084(1)
clearUponHandler(2)
clearUponHandler(2)
sendToLabelUpon(2, 1)
sprite('Action_450_00', 2) # 1-2
physicsXImpulse(3000)
physicsYImpulse(16000)
setGravity(1400)
sprite('Action_450_01', 1) # 3-3
sprite('Action_450_01', 2) # 4-5
Unknown23125('')
ConsumeSuperMeter(-5000)
sprite('Action_450_02', 3) # 6-8
sprite('Action_450_03', 3) # 9-11
sprite('Action_450_04', 5) # 12-16
sprite('Action_450_05', 4) # 17-20
SFX_0('010_swing_sword_2')
sprite('Action_450_06', 3) # 21-23
GFX_0('EffAirShotSlash', 100)
GFX_0('AirShotEX', 0)
tag_voice(1, 'uli205_0', 'uli205_1', 'uli205_2', '')
Unknown28(2, 'CmnActJumpLanding')
sprite('Action_450_07', 10) # 24-33
sprite('Action_448_08', 6) # 34-39
sprite('Action_448_09', 4) # 40-43
Recovery()
label(0)
sprite('Action_448_09', 4) # 44-47
loopRest()
gotoLabel(0)
label(1)
sprite('Action_023_00', 3) # 48-50
Unknown1084(1)
clearUponHandler(2)
sprite('Action_023_01', 3) # 51-53
sprite('Action_023_02', 3) # 54-56
sprite('Action_023_03', 4) # 57-60
@State
def UltimateRush():
def upon_IMMEDIATE():
AttackDefaults_StandingDD()
Unknown23055('')
AttackLevel_(4)
Damage(360)
MinimumDamagePct(18)
AttackP2(98)
GroundedHitstunAnimation(10)
AirHitstunAnimation(10)
AirPushbackX(6000)
AirPushbackY(3000)
Unknown30056('a08601003200000000000000')
YImpluseBeforeWallbounce(700)
AirUntechableTime(100)
Unknown9310(1)
Hitstop(0)
Unknown11001(5, 5, 5)
Unknown11056(2)
Unknown9016(1)
Unknown11057(800)
Unknown11064(1)
Unknown1084(1)
GFX_0('UltimateRushEff', 100)
def upon_78():
Unknown2037(1)
setInvincible(0)
setInvincibleFor(60)
sprite('Action_189_00', 5) # 1-5
Unknown2036(60, -1, 0)
ConsumeSuperMeter(-10000)
setInvincible(1)
Unknown30080('')
tag_voice(1, 'uli250_0', 'uli250_1', '', '')
sprite('Action_189_01', 8) # 6-13
SFX_3('SE_ApperLightBlade')
sprite('Action_189_02', 7) # 14-20
sprite('Action_189_03', 6) # 21-26
sprite('Action_189_04', 5) # 27-31
sprite('Action_189_05', 4) # 32-35
sprite('Action_189_06', 4) # 36-39
sprite('Action_189_07', 4) # 40-43
sprite('Action_189_08', 3) # 44-46
sprite('Action_189_09', 3) # 47-49
Unknown2015(200)
sprite('Action_190_00', 5) # 50-54
sprite('Action_190_01', 5) # 55-59
teleportRelativeX(20000)
sprite('Action_190_02', 4) # 60-63
sprite('Action_190_03', 4) # 64-67
physicsXImpulse(5000)
Unknown1028(-50)
sprite('Action_190_04', 4) # 68-71
SFX_3('SE_SwingLightSword')
SFX_0('010_swing_sword_2')
sprite('Action_190_05', 4) # 72-75 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 3) # 76-78 **attackbox here**
sprite('Action_190_07', 3) # 79-81 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 4) # 82-85 **attackbox here**
sprite('Action_190_09', 2) # 86-87
if (not SLOT_2):
setInvincible(0)
sprite('Action_190_10', 2) # 88-89
sprite('Action_190_11', 2) # 90-91
sprite('Action_190_12', 2) # 92-93
SFX_3('SE_SwingLightSword')
SFX_0('010_swing_sword_2')
sprite('Action_190_13', 2) # 94-95 **attackbox here**
RefreshMultihit()
sprite('Action_190_14', 2) # 96-97 **attackbox here**
sprite('Action_190_15', 2) # 98-99 **attackbox here**
RefreshMultihit()
sprite('Action_190_16', 2) # 100-101 **attackbox here**
sprite('Action_190_10', 2) # 102-103
sprite('Action_190_11', 2) # 104-105
sprite('Action_190_12', 2) # 106-107
SFX_3('SE_SwingLightSword')
SFX_0('010_swing_sword_2')
sprite('Action_190_13', 2) # 108-109 **attackbox here**
RefreshMultihit()
sprite('Action_190_14', 2) # 110-111 **attackbox here**
sprite('Action_190_15', 2) # 112-113 **attackbox here**
RefreshMultihit()
sprite('Action_190_16', 2) # 114-115 **attackbox here**
sprite('Action_190_17', 2) # 116-117
Unknown1084(1)
sprite('Action_190_18', 2) # 118-119
teleportRelativeX(20000)
sprite('Action_190_19', 2) # 120-121
teleportRelativeX(20000)
sprite('Action_190_20', 2) # 122-123
SFX_3('SE_SwingLightSword')
SFX_0('010_swing_sword_2')
teleportRelativeX(20000)
sprite('Action_190_21', 3) # 124-126 **attackbox here**
teleportRelativeX(20000)
RefreshMultihit()
sprite('Action_190_22', 4) # 127-130 **attackbox here**
Unknown1084(1)
teleportRelativeX(20000)
sprite('Action_190_23', 6) # 131-136 **attackbox here**
teleportRelativeX(20000)
RefreshMultihit()
AirPushbackX(5000)
AirPushbackY(28000)
YImpluseBeforeWallbounce(900)
sprite('Action_190_24', 6) # 137-142 **attackbox here**
teleportRelativeX(20000)
sprite('Action_190_25', 7) # 143-149
physicsXImpulse(0)
sprite('Action_190_26', 10) # 150-159
tag_voice(0, 'uli251_0', 'uli251_1', '', '')
sprite('Action_190_27', 2) # 160-161
Unknown11057(1000)
GFX_0('UltimateSlash', 100)
Unknown2015(-1)
SFX_3('SE_BigBomb')
sprite('Action_190_28', 4) # 162-165 **attackbox here**
Unknown11001(0, 0, 0)
AirPushbackX(25000)
AirPushbackY(-45000)
Unknown30055('305705003200000000000000')
Hitstop(0)
RefreshMultihit()
sprite('Action_190_29', 28) # 166-193
GFX_0('UltimateLightwall', 0)
setInvincible(0)
setInvincibleFor(0)
clearUponHandler(78)
sprite('Action_190_30', 2) # 194-195
sprite('Action_190_31', 6) # 196-201
sprite('Action_190_32', 3) # 202-204
sprite('Action_190_33', 5) # 205-209
sprite('Action_190_34', 3) # 210-212
sprite('Action_190_35', 6) # 213-218
sprite('Action_190_36', 3) # 219-221
sprite('Action_190_37', 4) # 222-225 **attackbox here**
SFX_3('SE_SwingLightSword')
GroundedHitstunAnimation(10)
AirHitstunAnimation(10)
GFX_0('UltimateAssaultFinish', 100)
AirPushbackX(1000)
AirPushbackY(20000)
sprite('Action_190_38', 31) # 226-256
sprite('Action_190_39', 4) # 257-260
sprite('Action_190_40', 6) # 261-266
sprite('Action_190_41', 3) # 267-269
sprite('Action_190_42', 3) # 270-272
sprite('Action_190_43', 3) # 273-275
@State
def UltimateRushOD():
def upon_IMMEDIATE():
AttackDefaults_StandingDD()
Unknown23055('')
AttackLevel_(4)
Damage(340)
MinimumDamagePct(13)
AttackP2(98)
GroundedHitstunAnimation(10)
AirHitstunAnimation(10)
AirPushbackX(5000)
AirPushbackY(4500)
Unknown30056('a08601003200000000000000')
YImpluseBeforeWallbounce(700)
AirUntechableTime(100)
Unknown9310(1)
Hitstop(0)
Unknown11001(5, 5, 5)
Unknown11056(2)
Unknown9016(1)
Unknown11057(800)
Unknown11064(1)
Unknown1084(1)
GFX_0('UltimateRushEffOD', 100)
def upon_78():
Unknown2037(1)
setInvincible(0)
setInvincibleFor(60)
sprite('Action_189_00', 5) # 1-5
Unknown2036(60, -1, 0)
ConsumeSuperMeter(-10000)
setInvincible(1)
Unknown30080('')
tag_voice(1, 'uli250_0', 'uli250_1', '', '')
sprite('Action_189_01', 8) # 6-13
SFX_3('SE_ApperLightBlade')
sprite('Action_189_02', 7) # 14-20
sprite('Action_189_03', 6) # 21-26
sprite('Action_189_04', 5) # 27-31
sprite('Action_189_05', 4) # 32-35
sprite('Action_189_06', 4) # 36-39
sprite('Action_189_07', 4) # 40-43
sprite('Action_189_08', 3) # 44-46
sprite('Action_189_09', 3) # 47-49
Unknown2015(200)
sprite('Action_190_00', 5) # 50-54
sprite('Action_190_01', 5) # 55-59
teleportRelativeX(20000)
sprite('Action_190_02', 4) # 60-63
sprite('Action_190_03', 4) # 64-67
physicsXImpulse(9000)
Unknown1028(-50)
sprite('Action_190_04', 4) # 68-71
SFX_3('SE_SwingLightSword')
sprite('Action_190_05', 4) # 72-75 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 3) # 76-78 **attackbox here**
sprite('Action_190_07', 3) # 79-81 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 4) # 82-85 **attackbox here**
sprite('Action_190_00', 2) # 86-87
if (not SLOT_2):
setInvincible(0)
sprite('Action_190_01', 2) # 88-89
sprite('Action_190_02', 2) # 90-91
sprite('Action_190_03', 2) # 92-93
sprite('Action_190_04', 2) # 94-95
SFX_3('SE_SwingLightSword')
sprite('Action_190_05', 2) # 96-97 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 2) # 98-99 **attackbox here**
sprite('Action_190_07', 2) # 100-101 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 2) # 102-103 **attackbox here**
sprite('Action_190_01', 2) # 104-105
sprite('Action_190_02', 2) # 106-107
sprite('Action_190_03', 2) # 108-109
sprite('Action_190_04', 2) # 110-111
SFX_3('SE_SwingLightSword')
sprite('Action_190_05', 2) # 112-113 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 2) # 114-115 **attackbox here**
sprite('Action_190_07', 2) # 116-117 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 2) # 118-119 **attackbox here**
sprite('Action_190_01', 2) # 120-121
sprite('Action_190_02', 2) # 122-123
sprite('Action_190_03', 2) # 124-125
sprite('Action_190_04', 2) # 126-127
SFX_3('SE_SwingLightSword')
sprite('Action_190_05', 2) # 128-129 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 2) # 130-131 **attackbox here**
sprite('Action_190_07', 2) # 132-133 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 2) # 134-135 **attackbox here**
sprite('Action_190_01', 2) # 136-137
sprite('Action_190_02', 2) # 138-139
sprite('Action_190_03', 2) # 140-141
sprite('Action_190_04', 2) # 142-143
SFX_3('SE_SwingLightSword')
sprite('Action_190_05', 2) # 144-145 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 2) # 146-147 **attackbox here**
sprite('Action_190_07', 2) # 148-149 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 2) # 150-151 **attackbox here**
sprite('Action_190_09', 2) # 152-153
sprite('Action_190_10', 2) # 154-155
sprite('Action_190_11', 2) # 156-157
sprite('Action_190_12', 2) # 158-159
SFX_3('SE_SwingLightSword')
sprite('Action_190_13', 2) # 160-161 **attackbox here**
RefreshMultihit()
sprite('Action_190_14', 2) # 162-163 **attackbox here**
sprite('Action_190_15', 2) # 164-165 **attackbox here**
RefreshMultihit()
sprite('Action_190_16', 2) # 166-167 **attackbox here**
sprite('Action_190_17', 2) # 168-169
Unknown1084(1)
sprite('Action_190_18', 2) # 170-171
teleportRelativeX(20000)
sprite('Action_190_19', 2) # 172-173
teleportRelativeX(20000)
sprite('Action_190_20', 2) # 174-175
SFX_3('SE_SwingLightSword')
teleportRelativeX(20000)
sprite('Action_190_21', 3) # 176-178 **attackbox here**
teleportRelativeX(20000)
RefreshMultihit()
sprite('Action_190_22', 4) # 179-182 **attackbox here**
teleportRelativeX(20000)
sprite('Action_190_23', 6) # 183-188 **attackbox here**
teleportRelativeX(20000)
RefreshMultihit()
AirPushbackX(5000)
AirPushbackY(28000)
YImpluseBeforeWallbounce(900)
sprite('Action_190_24', 6) # 189-194 **attackbox here**
teleportRelativeX(20000)
sprite('Action_190_25', 7) # 195-201
physicsXImpulse(0)
sprite('Action_190_26', 10) # 202-211
tag_voice(0, 'uli251_0', 'uli251_1', '', '')
sprite('Action_190_27', 2) # 212-213
Unknown11057(1000)
GFX_0('UltimateSlashOD', 100)
Unknown2015(-1)
SFX_3('SE_BigBomb')
sprite('Action_190_28', 4) # 214-217 **attackbox here**
Unknown11001(0, 0, 0)
AirPushbackX(25000)
AirPushbackY(-45000)
Unknown30055('305705003200000000000000')
Hitstop(0)
RefreshMultihit()
sprite('Action_190_29', 38) # 218-255
GFX_0('UltimateLightwallOD', 0)
setInvincible(0)
setInvincibleFor(0)
clearUponHandler(78)
sprite('Action_190_30', 2) # 256-257
sprite('Action_190_31', 6) # 258-263
sprite('Action_190_32', 3) # 264-266
sprite('Action_190_33', 5) # 267-271
sprite('Action_190_34', 3) # 272-274
sprite('Action_190_35', 6) # 275-280
sprite('Action_190_36', 3) # 281-283
sprite('Action_190_37', 4) # 284-287 **attackbox here**
SFX_3('SE_SwingLightSword')
GFX_0('UltimateAssaultFinish', 100)
GroundedHitstunAnimation(10)
AirHitstunAnimation(10)
AirPushbackX(1000)
AirPushbackY(20000)
sprite('Action_190_38', 31) # 288-318
sprite('Action_190_39', 4) # 319-322
sprite('Action_190_40', 6) # 323-328
sprite('Action_190_41', 3) # 329-331
sprite('Action_190_42', 3) # 332-334
sprite('Action_190_43', 3) # 335-337
@State
def UltimateShot():
def upon_IMMEDIATE():
AttackDefaults_StandingDD()
Unknown23055('')
Unknown2003(1)
sprite('Action_262_00', 5) # 1-5
sprite('Action_262_01', 5) # 6-10
Unknown2036(60, -1, 0)
ConsumeSuperMeter(-10000)
setInvincible(1)
Unknown30080('')
sprite('Action_262_02', 5) # 11-15
sprite('Action_262_03', 3) # 16-18
sprite('Action_262_04', 3) # 19-21
sprite('Action_262_03', 3) # 22-24
sprite('Action_262_04', 3) # 25-27
sprite('Action_262_03', 3) # 28-30
sprite('Action_262_04', 3) # 31-33
sprite('Action_262_03', 3) # 34-36
sprite('Action_262_04', 3) # 37-39
sprite('Action_262_03', 3) # 40-42
sprite('Action_262_04', 3) # 43-45
sprite('Action_262_05', 5) # 46-50
sprite('Action_262_06', 5) # 51-55
sprite('Action_441_00', 2) # 56-57
sprite('Action_441_01', 2) # 58-59
sprite('Action_441_02', 2) # 60-61
sprite('Action_441_03', 4) # 62-65
sprite('Action_441_04', 2) # 66-67
SFX_0('006_swing_blade_1')
GFX_0('EffShotSlash', 100)
GFX_0('UltimateShot1', 0)
sprite('Action_441_05', 2) # 68-69
sprite('Action_441_06', 2) # 70-71
sprite('Action_441_07', 2) # 72-73
sprite('Action_145_00', 2) # 74-75
sprite('Action_145_01', 2) # 76-77
sprite('Action_145_02', 3) # 78-80 **attackbox here**
GFX_0('Lin_432', 100)
GFX_0('UltimateShot2', 0)
sprite('Action_145_03', 3) # 81-83
sprite('Action_145_04', 12) # 84-95
sprite('Action_145_05', 5) # 96-100
sprite('Action_145_06', 4) # 101-104
@State
def UltimateRanbu():
def upon_IMMEDIATE():
AttackDefaults_StandingDD()
Unknown23055('')
AttackLevel_(4)
MinimumDamagePct(15)
AirHitstunAnimation(10)
GroundedHitstunAnimation(10)
AirPushbackY(40000)
AirUntechableTime(60)
Unknown9016(1)
Unknown11069('UltimateRanbu_Exe')
Unknown11064(1)
Unknown2073(1)
def upon_78():
Unknown13024(0)
enterState('UltimateRanbu_Exe')
setInvincible(1)
Unknown1084(1)
sprite('Action_262_00', 5) # 1-5
sprite('Action_262_01', 5) # 6-10
Unknown2036(60, -1, 0)
ConsumeSuperMeter(-10000)
Unknown30080('')
SFX_4('uli252')
sprite('Action_262_02', 5) # 11-15
sprite('Action_262_03', 3) # 16-18
sprite('Action_262_04', 3) # 19-21
sprite('Action_262_03', 3) # 22-24
sprite('Action_262_04', 3) # 25-27
sprite('Action_262_03', 3) # 28-30
sprite('Action_262_04', 3) # 31-33
sprite('Action_262_03', 3) # 34-36
sprite('Action_262_04', 3) # 37-39
sprite('Action_262_03', 3) # 40-42
sprite('Action_262_04', 3) # 43-45
sprite('Action_262_03', 3) # 46-48
sprite('Action_262_04', 3) # 49-51
sprite('Action_262_05', 5) # 52-56
sprite('Action_262_06', 5) # 57-61
sprite('Action_154_00', 6) # 62-67
sprite('Action_154_01', 4) # 68-71
Unknown1045(65000)
sprite('Action_154_02', 4) # 72-75 **attackbox here**
GFX_0('Lin_157', 0)
Unknown1084(1)
SFX_0('007_swing_knife_1')
sprite('Action_154_03', 9) # 76-84
setInvincible(0)
sprite('Action_154_04', 7) # 85-91
sprite('Action_154_05', 5) # 92-96
sprite('Action_154_06', 4) # 97-100
@State
def UltimateRanbu_Exe():
def upon_IMMEDIATE():
AttackDefaults_StandingDD()
Unknown23056('')
AttackLevel_(4)
Damage(790)
MinimumDamagePct(8)
AttackP2(100)
AirHitstunAnimation(10)
GroundedHitstunAnimation(10)
AirPushbackY(36000)
AirUntechableTime(60)
Hitstop(4)
Unknown30048(1)
Unknown11023(1)
Unknown11072(1, 150000, 50000)
Unknown9016(1)
Unknown11069('UltimateRanbu_Exe')
Unknown11064(1)
DisableAttackRestOfMove()
setInvincible(1)
EnableCollision(0)
Unknown2015(40)
def upon_ON_HIT_OR_BLOCK():
Unknown3001(200)
Unknown3004(-40)
def upon_STATE_END():
Unknown3001(255)
Unknown3004(0)
Unknown13024(0)
sprite('keep', 7) # 1-7
GFX_0('Lin_166_1', 0)
Unknown3004(-40)
sprite('Action_160_07', 6) # 8-13
GFX_0('Lin_160_1', 0)
teleportRelativeX(100000)
Unknown1007(360000)
Unknown3001(128)
Unknown3004(20)
sprite('Action_160_08', 2) # 14-15
sprite('Action_160_09', 4) # 16-19 **attackbox here**
SFX_4('uli253_0')
RefreshMultihit()
AirHitstunAnimation(11)
GroundedHitstunAnimation(11)
AirPushbackX(30000)
AirPushbackY(-18000)
GFX_0('Lin_091', 0)
SFX_0('007_swing_knife_1')
sprite('null', 5) # 20-24
GFX_0('Lin_168_1', 0)
teleportRelativeX(300000)
teleportRelativeY(0)
sprite('Action_154_00', 3) # 25-27
GFX_0('Lin_160_2', 0)
Unknown3001(128)
Unknown3004(20)
Unknown8000(100, 1, 1)
sprite('Action_154_01', 2) # 28-29
sprite('Action_154_02', 2) # 30-31 **attackbox here**
RefreshMultihit()
AirHitstunAnimation(10)
GroundedHitstunAnimation(10)
Unknown9071()
AirPushbackY(40000)
GFX_0('Lin_157', 0)
SFX_0('007_swing_knife_1')
sprite('null', 1) # 32-32
GFX_0('Lin_166_3', 0)
teleportRelativeX(300000)
Unknown1007(300000)
Unknown2005()
Unknown26('Lin_157')
sprite('Action_160_03', 6) # 33-38
GFX_0('Lin_160_3', 0)
Unknown3001(128)
Unknown3004(20)
sprite('Action_160_04', 2) # 39-40
sprite('Action_160_05', 4) # 41-44 **attackbox here**
RefreshMultihit()
GFX_0('EffNmlAtkAir5CBlade', 100)
SFX_0('010_swing_sword_1')
sprite('null', 5) # 45-49
GFX_0('Lin_168_2', 0)
sprite('Action_160_07', 6) # 50-55
GFX_0('Lin_160_4', 0)
teleportRelativeX(300000)
Unknown1007(360000)
Unknown2005()
Unknown3001(128)
Unknown3004(20)
sprite('Action_160_08', 2) # 56-57
sprite('Action_160_09', 4) # 58-61 **attackbox here**
GFX_0('Lin_168_3', 0)
RefreshMultihit()
AirHitstunAnimation(11)
GroundedHitstunAnimation(11)
AirPushbackY(-28000)
GFX_0('Lin_091', 0)
SFX_0('007_swing_knife_1')
sprite('null', 5) # 62-66
teleportRelativeX(500000)
teleportRelativeY(0)
Unknown2005()
sprite('Action_160_12', 4) # 67-70
GFX_0('Lin_160_5', 0)
Unknown3001(128)
Unknown3004(20)
Unknown8000(100, 1, 1)
sprite('Action_160_13', 3) # 71-73
sprite('Action_160_14', 2) # 74-75
sprite('Action_160_15', 6) # 76-81 **attackbox here**
RefreshMultihit()
AirHitstunAnimation(13)
GroundedHitstunAnimation(13)
AirPushbackX(10000)
AirPushbackY(30000)
Hitstop(1)
Unknown11001(0, 20, 20)
Unknown11099(1)
MinimumDamagePct(24)
physicsXImpulse(60000)
Unknown8007(100, 1, 1)
GFX_0('Lin_169', 0)
SFX_0('007_swing_knife_2')
sprite('Action_160_16', 5) # 82-86
Unknown3001(128)
Unknown3004(20)
Unknown1019(50)
Unknown8010(100, 1, 1)
sprite('Action_160_15', 1) # 87-87 **attackbox here**
SFX_4('uli254')
RefreshMultihit()
Unknown11001(0, 0, 5)
Unknown11072(0, 150000, 50000)
Unknown11099(0)
Unknown11069('')
clearUponHandler(10)
Unknown3001(128)
Unknown3004(20)
Unknown1019(30)
Unknown8010(100, 1, 1)
sprite('Action_160_15', 1) # 88-88 **attackbox here**
RefreshMultihit()
sprite('Action_160_15', 1) # 89-89 **attackbox here**
RefreshMultihit()
sprite('Action_160_15', 1) # 90-90 **attackbox here**
RefreshMultihit()
sprite('Action_160_15', 1) # 91-91 **attackbox here**
RefreshMultihit()
sprite('Action_160_15', 6) # 92-97 **attackbox here**
RefreshMultihit()
Unknown11064(0)
sprite('Action_160_16', 11) # 98-108
Unknown1019(20)
Unknown13024(1)
sprite('Action_160_17', 6) # 109-114
Unknown1084(1)
sprite('Action_160_18', 5) # 115-119
sprite('Action_160_19', 5) # 120-124
sprite('Action_160_20', 4) # 125-128
@State
def UltimateRanbuOD():
def upon_IMMEDIATE():
AttackDefaults_StandingDD()
Unknown23055('')
AttackLevel_(4)
MinimumDamagePct(15)
AirHitstunAnimation(10)
GroundedHitstunAnimation(10)
AirPushbackY(40000)
AirUntechableTime(60)
Unknown9016(1)
Unknown11069('UltimateRanbuOD_Exe')
Unknown11064(1)
Unknown2073(1)
def upon_78():
Unknown13024(0)
enterState('UltimateRanbuOD_Exe')
setInvincible(1)
Unknown1084(1)
sprite('Action_262_00', 5) # 1-5
sprite('Action_262_01', 5) # 6-10
Unknown2036(60, -1, 0)
ConsumeSuperMeter(-10000)
Unknown30080('')
SFX_4('uli252')
sprite('Action_262_02', 5) # 11-15
sprite('Action_262_03', 3) # 16-18
sprite('Action_262_04', 3) # 19-21
sprite('Action_262_03', 3) # 22-24
sprite('Action_262_04', 3) # 25-27
sprite('Action_262_03', 3) # 28-30
sprite('Action_262_04', 3) # 31-33
sprite('Action_262_03', 3) # 34-36
sprite('Action_262_04', 3) # 37-39
sprite('Action_262_03', 3) # 40-42
sprite('Action_262_04', 3) # 43-45
sprite('Action_262_03', 3) # 46-48
sprite('Action_262_04', 3) # 49-51
sprite('Action_262_05', 5) # 52-56
sprite('Action_262_06', 5) # 57-61
sprite('Action_154_00', 6) # 62-67
sprite('Action_154_01', 4) # 68-71
Unknown1045(65000)
sprite('Action_154_02', 4) # 72-75 **attackbox here**
GFX_0('Lin_157', 0)
Unknown1084(1)
SFX_0('007_swing_knife_1')
sprite('Action_154_03', 9) # 76-84
setInvincible(0)
sprite('Action_154_04', 7) # 85-91
sprite('Action_154_05', 5) # 92-96
sprite('Action_154_06', 4) # 97-100
@State
def UltimateRanbuOD_Exe():
def upon_IMMEDIATE():
AttackDefaults_StandingDD()
Unknown23056('')
AttackLevel_(4)
Damage(870)
MinimumDamagePct(7)
AttackP2(100)
AirHitstunAnimation(10)
GroundedHitstunAnimation(10)
AirPushbackY(36000)
AirUntechableTime(60)
Hitstop(4)
Unknown30048(1)
Unknown11023(1)
Unknown11072(1, 150000, 50000)
Unknown9016(1)
Unknown11069('UltimateRanbuOD_Exe')
Unknown11064(1)
DisableAttackRestOfMove()
setInvincible(1)
EnableCollision(0)
Unknown2015(40)
def upon_ON_HIT_OR_BLOCK():
Unknown3001(200)
Unknown3004(-40)
def upon_STATE_END():
Unknown3001(255)
Unknown3004(0)
Unknown13024(0)
sprite('keep', 7) # 1-7
GFX_0('Lin_166_1', 0)
Unknown3004(-40)
sprite('null', 1) # 8-8
teleportRelativeX(300000)
Unknown1007(300000)
Unknown2005()
Unknown26('Lin_157')
sprite('Action_160_03', 6) # 9-14
GFX_0('Lin_160_3', 0)
Unknown3001(128)
Unknown3004(20)
sprite('Action_160_04', 2) # 15-16
sprite('Action_160_05', 4) # 17-20 **attackbox here**
SFX_4('uli253_1')
RefreshMultihit()
GFX_0('EffNmlAtkAir5CBlade', 100)
SFX_0('010_swing_sword_1')
sprite('null', 5) # 21-25
GFX_0('Lin_168_2', 0)
sprite('Action_160_07', 6) # 26-31
GFX_0('Lin_160_4', 0)
teleportRelativeX(300000)
Unknown1007(360000)
Unknown2005()
Unknown3001(128)
Unknown3004(20)
sprite('Action_160_08', 2) # 32-33
sprite('Action_160_09', 4) # 34-37 **attackbox here**
GFX_0('Lin_168_3', 0)
RefreshMultihit()
AirHitstunAnimation(11)
GroundedHitstunAnimation(11)
AirPushbackY(-28000)
GFX_0('Lin_091', 0)
SFX_0('007_swing_knife_1')
sprite('null', 5) # 38-42
teleportRelativeX(500000)
teleportRelativeY(0)
Unknown2005()
sprite('Action_154_00', 3) # 43-45
GFX_0('Lin_160_5', 0)
Unknown3001(128)
Unknown3004(20)
Unknown8000(100, 1, 1)
sprite('Action_154_01', 2) # 46-47
sprite('Action_154_02', 2) # 48-49 **attackbox here**
RefreshMultihit()
AirHitstunAnimation(10)
GroundedHitstunAnimation(10)
Unknown9071()
AirPushbackY(40000)
GFX_0('Lin_157', 0)
SFX_0('007_swing_knife_1')
sprite('null', 1) # 50-50
GFX_0('Lin_166_3', 0)
teleportRelativeX(300000)
Unknown1007(300000)
Unknown2005()
Unknown26('Lin_157')
sprite('Action_160_03', 6) # 51-56
GFX_0('Lin_160_3', 0)
Unknown3001(128)
Unknown3004(20)
sprite('Action_160_04', 2) # 57-58
sprite('Action_160_05', 4) # 59-62 **attackbox here**
RefreshMultihit()
GFX_0('EffNmlAtkAir5CBlade', 100)
SFX_0('010_swing_sword_1')
sprite('null', 5) # 63-67
GFX_0('Lin_166_1', 0)
sprite('null', 1) # 68-68
teleportRelativeX(300000)
Unknown1007(300000)
Unknown2005()
Unknown26('Lin_157')
sprite('Action_160_03', 6) # 69-74
GFX_0('Lin_160_2', 0)
Unknown3001(128)
Unknown3004(20)
sprite('Action_160_04', 2) # 75-76
sprite('Action_160_05', 4) # 77-80 **attackbox here**
RefreshMultihit()
GFX_0('EffNmlAtkAir5CBlade', 100)
SFX_0('010_swing_sword_1')
sprite('null', 5) # 81-85
GFX_0('Lin_166_1', 0)
sprite('Action_160_07', 6) # 86-91
GFX_0('Lin_160_1', 0)
teleportRelativeX(300000)
Unknown1007(360000)
Unknown2005()
Unknown3001(128)
Unknown3004(20)
sprite('Action_160_08', 2) # 92-93
sprite('Action_160_09', 4) # 94-97 **attackbox here**
RefreshMultihit()
AirHitstunAnimation(11)
GroundedHitstunAnimation(11)
AirPushbackY(-42000)
GFX_0('Lin_091', 0)
SFX_0('007_swing_knife_1')
sprite('null', 6) # 98-103
GFX_0('Lin_168_3', 0)
teleportRelativeX(500000)
teleportRelativeY(0)
Unknown2005()
sprite('Action_160_12', 4) # 104-107
GFX_0('Lin_160_5', 0)
Unknown3001(128)
Unknown3004(20)
Unknown8000(100, 1, 1)
sprite('Action_160_13', 3) # 108-110
sprite('Action_160_14', 2) # 111-112
sprite('Action_160_15', 6) # 113-118 **attackbox here**
SFX_4('uli254')
RefreshMultihit()
AirHitstunAnimation(13)
GroundedHitstunAnimation(13)
AirPushbackX(10000)
AirPushbackY(30000)
Hitstop(1)
Unknown11001(0, 20, 20)
Unknown11099(1)
MinimumDamagePct(23)
physicsXImpulse(60000)
Unknown8007(100, 1, 1)
GFX_0('Lin_169', 0)
SFX_0('007_swing_knife_2')
sprite('Action_160_16', 5) # 119-123
Unknown3001(128)
Unknown3004(20)
Unknown1019(50)
Unknown8010(100, 1, 1)
sprite('Action_160_15', 1) # 124-124 **attackbox here**
RefreshMultihit()
Unknown11001(0, 0, 5)
Unknown11072(0, 150000, 50000)
Unknown11099(0)
Unknown11069('')
Unknown11064(0)
clearUponHandler(10)
Unknown3001(128)
Unknown3004(20)
Unknown1019(30)
Unknown8010(100, 1, 1)
sprite('Action_160_15', 1) # 125-125 **attackbox here**
RefreshMultihit()
sprite('Action_160_15', 1) # 126-126 **attackbox here**
RefreshMultihit()
sprite('Action_160_15', 1) # 127-127 **attackbox here**
RefreshMultihit()
sprite('Action_160_15', 1) # 128-128 **attackbox here**
RefreshMultihit()
sprite('Action_160_15', 6) # 129-134 **attackbox here**
RefreshMultihit()
sprite('Action_160_16', 11) # 135-145
Unknown1019(20)
Unknown13024(1)
sprite('Action_160_17', 6) # 146-151
Unknown1084(1)
sprite('Action_160_18', 5) # 152-156
sprite('Action_160_19', 5) # 157-161
sprite('Action_160_20', 4) # 162-165
@State
def AstralHeat():
def upon_IMMEDIATE():
AttackDefaults_Astral()
AttackLevel_(2)
Unknown11064(1)
Damage(2000)
MinimumDamagePct(100)
GroundedHitstunAnimation(4)
AirHitstunAnimation(4)
hitstun(120)
Unknown11001(0, 0, 0)
AirUntechableTime(300)
Hitstop(0)
Unknown11068(1)
Unknown11078(1)
Unknown11062(1)
Unknown9016(1)
def upon_78():
clearUponHandler(78)
PushbackX(20000)
sendToLabel(2)
Unknown23157(1)
EnableCollision(0)
Unknown2053(0)
Unknown2034(0)
Unknown20003(1)
Unknown20004(1)
Unknown23084(1)
Unknown23088(1, 1)
Unknown11086(1)
def upon_77():
Unknown21015('41737472616c5f3238395f310000000000000000000000000000000000000000581b000000000000')
Unknown21015('41737472616c5f3238395f320000000000000000000000000000000000000000581b000000000000')
def upon_STATE_END():
Unknown21013('43616c6c5f556e6949574542470000000000000000000000000000000000000020000000')
sprite('Action_270_00', 3) # 1-3 **attackbox here**
setInvincible(1)
sprite('Action_270_00', 5) # 4-8 **attackbox here**
Unknown2036(90, -1, 2)
Unknown23147()
Unknown4004('6175726100000000000000000000000000000000000000000000000000000000ffff0000')
SFX_4('uli290')
GFX_0('Astral_CutIn', 100)
Unknown4004('43616c6c5f556e69495745424700000000000000000000000000000000000000ffff0000')
sprite('Action_270_01', 3) # 9-11 **attackbox here**
sprite('Action_270_02', 2) # 12-13 **attackbox here**
sprite('Action_270_03', 5) # 14-18 **attackbox here**
sprite('Action_270_06', 3) # 19-21 **attackbox here**
sprite('Action_270_07', 3) # 22-24 **attackbox here**
sprite('Action_270_08', 3) # 25-27 **attackbox here**
sprite('Action_270_06', 3) # 28-30 **attackbox here**
sprite('Action_270_07', 3) # 31-33 **attackbox here**
sprite('Action_270_08', 3) # 34-36 **attackbox here**
sprite('Action_270_06', 3) # 37-39 **attackbox here**
sprite('Action_270_07', 3) # 40-42 **attackbox here**
sprite('Action_270_08', 3) # 43-45 **attackbox here**
sprite('Action_270_06', 3) # 46-48 **attackbox here**
sprite('Action_270_07', 3) # 49-51 **attackbox here**
sprite('Action_270_08', 3) # 52-54 **attackbox here**
sprite('Action_270_06', 3) # 55-57 **attackbox here**
sprite('Action_270_07', 3) # 58-60 **attackbox here**
sprite('Action_270_08', 3) # 61-63 **attackbox here**
sprite('Action_270_06', 3) # 64-66 **attackbox here**
sprite('Action_270_07', 3) # 67-69 **attackbox here**
sprite('Action_270_08', 3) # 70-72 **attackbox here**
sprite('Action_270_06', 3) # 73-75 **attackbox here**
sprite('Action_270_07', 3) # 76-78 **attackbox here**
sprite('Action_270_08', 3) # 79-81 **attackbox here**
sprite('Action_270_06', 3) # 82-84 **attackbox here**
sprite('Action_270_07', 3) # 85-87 **attackbox here**
sprite('Action_270_08', 3) # 88-90 **attackbox here**
sprite('Action_270_06', 3) # 91-93 **attackbox here**
sprite('Action_270_07', 3) # 94-96 **attackbox here**
sprite('Action_270_08', 3) # 97-99 **attackbox here**
sprite('Action_270_09', 3) # 100-102 **attackbox here**
loopRest()
sprite('Action_270_10', 2) # 103-104 **attackbox here**
physicsXImpulse(30000)
def upon_CLEAR_OR_EXIT():
if (SLOT_19 <= 200000):
clearUponHandler(3)
sendToLabel(1)
sprite('Action_270_11', 3) # 105-107 **attackbox here**
GFX_0('Astral_289_1', 100)
GFX_0('Astral_289_2', 100)
sprite('Action_270_12', 3) # 108-110 **attackbox here**
sprite('Action_270_13', 2) # 111-112 **attackbox here**
sprite('Action_270_14', 2) # 113-114 **attackbox here**
sprite('Action_270_15', 2) # 115-116 **attackbox here**
sprite('Action_270_16', 2) # 117-118 **attackbox here**
sprite('Action_270_17', 2) # 119-120 **attackbox here**
sprite('Action_270_18', 2) # 121-122 **attackbox here**
label(1)
sprite('Action_270_11ex', 3) # 123-125 **attackbox here**
Unknown21015('41737472616c5f3238395f310000000000000000000000000000000000000000581b000000000000')
Unknown21015('41737472616c5f3238395f320000000000000000000000000000000000000000581b000000000000')
clearUponHandler(3)
Unknown11072(1, 200000, 0)
sprite('Action_045_11', 4) # 126-129
Unknown21013('43616c6c5f556e6949574542470000000000000000000000000000000000000020000000')
setInvincible(0)
Unknown1019(80)
sprite('Action_045_12', 5) # 130-134
sprite('Action_045_11', 4) # 135-138
Unknown1019(50)
sprite('Action_045_12', 5) # 139-143
Unknown1019(0)
sprite('Action_045_13', 6) # 144-149
ExitState()
label(2)
sprite('Action_270_19', 4) # 150-153 **attackbox here**
SFX_4('uli291')
physicsXImpulse(6000)
clearUponHandler(3)
clearUponHandler(78)
sprite('Action_270_20', 2) # 154-155 **attackbox here**
SFX_3('SE_SwingLightSword')
AttackLevel_(4)
Hitstop(2)
sprite('Action_270_20', 2) # 156-157 **attackbox here**
SFX_3('SE_SwingLightSword')
RefreshMultihit()
sprite('Action_270_21', 5) # 158-162 **attackbox here**
sprite('Action_270_22', 3) # 163-165 **attackbox here**
GFX_0('Astral_280', 100)
RefreshMultihit()
sprite('Action_270_23', 2) # 166-167 **attackbox here**
sprite('Action_270_24', 3) # 168-170 **attackbox here**
sprite('Action_270_25', 4) # 171-174 **attackbox here**
SFX_3('SE_SwingLightSword')
GFX_0('Astral_281', 100)
RefreshMultihit()
sprite('Action_270_26', 5) # 175-179 **attackbox here**
sprite('Action_270_27', 3) # 180-182 **attackbox here**
sprite('Action_270_28', 4) # 183-186 **attackbox here**
sprite('Action_270_29', 4) # 187-190 **attackbox here**
sprite('Action_270_30', 1) # 191-191 **attackbox here**
SFX_3('SE_SwingLightSword')
GFX_0('Astral_282', 100)
RefreshMultihit()
sprite('Action_270_31', 4) # 192-195 **attackbox here**
sprite('Action_270_32', 3) # 196-198 **attackbox here**
sprite('Action_270_33', 2) # 199-200 **attackbox here**
sprite('Action_270_34', 5) # 201-205 **attackbox here**
sprite('Action_270_35', 4) # 206-209 **attackbox here**
sprite('Action_270_36', 2) # 210-211 **attackbox here**
sprite('Action_270_37', 5) # 212-216 **attackbox here**
SFX_3('SE_SwingLightSword')
GFX_0('Astral_283', 100)
AirHitstunAnimation(10)
GroundedHitstunAnimation(10)
AirUntechableTime(600)
AirPushbackX(0)
AirPushbackY(80000)
YImpluseBeforeWallbounce(1600)
RefreshMultihit()
sprite('Action_270_38', 22) # 217-238 **attackbox here**
Unknown1084(1)
Unknown21013('43616c6c5f556e6949574542470000000000000000000000000000000000000020000000')
sprite('null', 45) # 239-283
Unknown20000(1)
Unknown20003(1)
Unknown20002(1)
Unknown1000(0)
Unknown36(22)
Unknown3001(0)
Unknown1000(0)
physicsYImpulse(1)
setGravity(0)
Unknown35()
sprite('null', 45) # 284-328
Unknown4004('6566666563745f32363500000000000000000000000000000000000000000000ffff0000')
SFX_4('uli292')
sprite('null', 40) # 329-368
SFX_3('SE039')
GFX_0('Astral_277', 100)
sprite('null', 20) # 369-388
Unknown36(22)
Unknown3001(255)
Unknown35()
GFX_0('Astral_276', 100)
sprite('null', 1) # 389-389
Unknown20000(0)
Unknown20007(1)
Unknown20001(1)
sprite('null', 20) # 390-409
GFX_0('Astral_274', -1)
Unknown1086(22)
teleportRelativeX(-100000)
Unknown1007(-30000)
sprite('Action_271_03', 4) # 410-413 **attackbox here**
Unknown3001(0)
Unknown3004(42)
Unknown3026(-16777216)
Unknown3025(-1, 10)
physicsXImpulse(1200)
physicsYImpulse(10000)
setGravity(100)
sprite('Action_271_04', 4) # 414-417 **attackbox here**
sprite('Action_271_05', 4) # 418-421 **attackbox here**
sprite('Action_271_06', 4) # 422-425 **attackbox here**
YAccel(80)
sprite('Action_271_07', 4) # 426-429 **attackbox here**
SFX_4('uli293')
sprite('Action_271_08', 11) # 430-440 **attackbox here**
sprite('Action_271_09', 10) # 441-450 **attackbox here**
YAccel(50)
sprite('Action_271_10', 10) # 451-460 **attackbox here**
sprite('Action_271_11', 5) # 461-465 **attackbox here**
YAccel(20)
setGravity(0)
sprite('null', 40) # 466-505
SFX_3('SE_SwingGlass')
GFX_0('Astral_272', 100)
Unknown36(22)
Unknown3001(0)
Unknown35()
Unknown26('Astral_274')
sprite('null', 20) # 506-525
Unknown20007(1)
Unknown20001(1)
physicsXImpulse(0)
physicsYImpulse(0)
setGravity(0)
sprite('null', 90) # 526-615
GFX_0('Astral_279', -1)
GFX_0('Astral_274_2', -1)
GFX_0('Astral_278', 100)
Unknown36(22)
Unknown3001(255)
Unknown23178(11)
setGravity(0)
Unknown35()
sprite('null', 30) # 616-645
GFX_0('Astral_275', 100)
SFX_3('SE_BigBomb')
sprite('null', 30) # 646-675
Unknown11064(3)
GFX_0('Astral_Atk_dmy', 100)
Unknown26('Astral_274_2')
sprite('null', 87) # 676-762
Unknown26('Astral_278')
Unknown20007(0)
Unknown20000(1)
Unknown1000(0)
teleportRelativeY(0)
Unknown23024(0)
sprite('Action_269_00', 122) # 763-884
sprite('Action_269_01', 15) # 885-899
Unknown20000(0)
sprite('Action_269_02', 5) # 900-904
sprite('Action_269_03', 4) # 905-908
sprite('Action_269_04', 5) # 909-913
sprite('Action_269_05', 5) # 914-918
sprite('Action_269_06', 2) # 919-920
@State
def CmnActTagBattleWait():
def upon_IMMEDIATE():
setInvincible(1)
label(0)
sprite('null', 1) # 1-1
EnableCollision(0)
Unknown2034(0)
teleportRelativeY(0)
gotoLabel(0)
@State
def CmnActChangePartnerAppeal():
def upon_IMMEDIATE():
AttackDefaults_StandingNormal()
sprite('Action_248_00', 3) # 1-3
Unknown4045('65665f74656b69746f755f67000000000000000000000000000000000000000067000000')
sprite('Action_248_01', 3) # 4-6
sprite('Action_248_02', 3) # 7-9
sprite('Action_248_03', 3) # 10-12
sprite('Action_248_04', 3) # 13-15
sprite('Action_248_05', 3) # 16-18
sprite('Action_248_06', 3) # 19-21
sprite('Action_248_07', 3) # 22-24
sprite('Action_248_08', 3) # 25-27
sprite('Action_248_09', 3) # 28-30
sprite('Action_248_10', 20) # 31-50
sprite('Action_248_06', 3) # 51-53
sprite('Action_248_05', 3) # 54-56
sprite('Action_248_04', 3) # 57-59
sprite('Action_248_03', 3) # 60-62
sprite('Action_248_02', 3) # 63-65
sprite('Action_248_01', 3) # 66-68
sprite('Action_248_00', 3) # 69-71
@State
def CmnActChangePartnerAppealAir():
def upon_IMMEDIATE():
AttackDefaults_AirNormal()
sprite('Action_019_03', 2) # 1-2
Unknown1017()
Unknown1022()
Unknown1037()
Unknown1084(1)
sprite('Action_019_02', 2) # 3-4
label(0)
sprite('Action_019_00', 5) # 5-9
sprite('Action_019_01', 5) # 10-14
Unknown2038(1)
if (SLOT_2 == 5):
Unknown1018()
Unknown1023()
Unknown1038()
Unknown1019(40)
YAccel(40)
(SLOT_2 >= 6)
if SLOT_ReturnVal:
_gotolabel(1)
loopRest()
gotoLabel(0)
label(1)
sprite('Action_019_00', 6) # 15-20
sprite('Action_019_01', 6) # 21-26
@State
def CmnActChangePartnerOut():
def upon_IMMEDIATE():
Unknown17013()
sprite('Action_046_01', 3) # 1-3
sprite('Action_046_02', 3) # 4-6
sprite('Action_046_01', 3) # 7-9
sprite('Action_046_02', 3) # 10-12
sprite('Action_046_01', 3) # 13-15
sprite('Action_046_02', 3) # 16-18
sprite('Action_046_01', 3) # 19-21
sprite('Action_046_02', 3) # 22-24
sprite('Action_046_01', 3) # 25-27
sprite('Action_046_02', 3) # 28-30
sprite('Action_046_01', 30) # 31-60
@State
def CmnActChangeRequest():
def upon_IMMEDIATE():
Unknown17013()
Unknown1084(1)
Unknown2034(0)
EnableCollision(0)
Unknown2053(0)
Unknown2067(2500, 240)
def upon_STATE_END():
Unknown2034(1)
EnableCollision(1)
Unknown2053(1)
sprite('Action_053_00', 3) # 1-3
sprite('Action_053_01', 3) # 4-6
sprite('Action_053_02', 4) # 7-10
sprite('Action_053_03', 4) # 11-14
sprite('Action_053_04', 6) # 15-20
sprite('Action_053_05', 20) # 21-40
sprite('Action_053_06', 2) # 41-42
sprite('Action_053_07', 5) # 43-47
sprite('Action_053_08', 5) # 48-52
sprite('Action_053_09', 10) # 53-62
Unknown30042(24)
if SLOT_ReturnVal:
_gotolabel(2)
sprite('keep', 32767) # 63-32829
label(2)
sprite('Action_053_03', 3) # 32830-32832
sprite('Action_053_02', 3) # 32833-32835
sprite('Action_053_01', 3) # 32836-32838
sprite('Action_053_00', 3) # 32839-32841
@State
def CmnActChangeReturnAppeal():
def upon_IMMEDIATE():
Unknown17013()
sprite('Action_000_15', 1) # 1-1 **attackbox here**
sprite('Action_000_16', 4) # 2-5 **attackbox here**
sprite('Action_000_17', 4) # 6-9 **attackbox here**
sprite('Action_000_18', 4) # 10-13 **attackbox here**
sprite('Action_000_19', 4) # 14-17 **attackbox here**
sprite('Action_000_20', 4) # 18-21 **attackbox here**
sprite('Action_000_21', 4) # 22-25 **attackbox here**
sprite('Action_000_22', 4) # 26-29 **attackbox here**
SFX_FOOTSTEP_(100, 0, 1)
sprite('Action_000_23', 3) # 30-32 **attackbox here**
sprite('Action_000_24', 23) # 33-55 **attackbox here**
sprite('Action_000_26', 4) # 56-59 **attackbox here**
sprite('Action_000_27', 4) # 60-63 **attackbox here**
sprite('Action_000_28', 3) # 64-66 **attackbox here**
sprite('Action_000_29', 3) # 67-69 **attackbox here**
sprite('Action_000_30', 3) # 70-72 **attackbox here**
sprite('Action_000_31', 3) # 73-75 **attackbox here**
sprite('Action_000_32', 30) # 76-105
@State
def CmnActChangePartnerIn():
def upon_IMMEDIATE():
Unknown17021('')
Unknown9015(1)
def upon_41():
clearUponHandler(41)
sendToLabel(100)
def upon_LANDING():
clearUponHandler(2)
sendToLabel(9)
sprite('null', 2) # 1-2
sprite('null', 600) # 3-602
label(100)
sprite('null', 28) # 603-630
sprite('Action_146_01', 2) # 631-632
Unknown1086(22)
teleportRelativeX(-150000)
teleportRelativeY(1200000)
physicsYImpulse(-240000)
setGravity(0)
EnableCollision(1)
Unknown2053(1)
sprite('Action_146_02', 2) # 633-634
SFX_0('004_swing_grap_1_1')
SFX_0('000_airdash_2')
label(1)
sprite('Action_146_03ex', 3) # 635-637 **attackbox here**
sprite('Action_146_04ex', 3) # 638-640 **attackbox here**
loopRest()
gotoLabel(1)
label(9)
sprite('keep', 3) # 641-643
Unknown1084(1)
sprite('Action_146_05', 4) # 644-647
Unknown8000(100, 1, 1)
sprite('Action_146_06', 14) # 648-661
sprite('Action_146_07', 4) # 662-665
@State
def CmnActChangeReturnAppealBurst():
sprite('Action_312_03', 2) # 1-2
sprite('Action_312_04', 2) # 3-4
sprite('Action_312_05', 32) # 5-36
sprite('Action_312_06', 4) # 37-40
sprite('Action_312_07', 4) # 41-44
sprite('Action_312_08', 4) # 45-48
sprite('Action_014_00', 4) # 49-52
sprite('Action_014_01', 4) # 53-56
sprite('Action_014_02', 4) # 57-60
sprite('Action_000_00', 30) # 61-90 **attackbox here**
@State
def CmnActChangePartnerQuickIn():
sprite('Action_045_03', 3) # 1-3
sprite('Action_045_04', 5) # 4-8
sprite('Action_045_11', 7) # 9-15
sprite('Action_045_12', 7) # 16-22
sprite('Action_045_13', 7) # 23-29
@State
def CmnActChangePartnerQuickOut():
def upon_IMMEDIATE():
Unknown17013()
def upon_LANDING():
clearUponHandler(2)
Unknown1084(1)
sendToLabel(1)
sprite('Action_046_00', 1) # 1-1
sprite('Action_046_01', 2) # 2-3
sprite('Action_046_02', 2) # 4-5
sprite('Action_046_02', 1) # 6-6
sprite('Action_046_03', 1) # 7-7
loopRest()
label(0)
sprite('Action_046_03', 3) # 8-10
sprite('Action_046_04', 3) # 11-13
loopRest()
gotoLabel(0)
label(1)
sprite('Action_046_05', 3) # 14-16
sprite('Action_046_06', 3) # 17-19
@State
def CmnActChangePartnerAssistAdmiss():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
def upon_LANDING():
clearUponHandler(2)
Unknown1084(1)
sendToLabel(99)
sprite('null', 2) # 1-2
label(0)
sprite('Action_022_00', 4) # 3-6
Unknown1019(95)
sprite('Action_022_01', 4) # 7-10
Unknown1019(95)
loopRest()
gotoLabel(0)
label(99)
sprite('Action_013_00', 2) # 11-12
Unknown8000(100, 1, 1)
sprite('keep', 100) # 13-112
@State
def CmnActChangePartnerAssistAtk_A():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
Unknown2006()
def upon_STATE_END():
EnableCollision(1)
Unknown2034(1)
Unknown2053(1)
clearUponHandler(2)
sendToLabelUpon(2, 1)
sprite('Action_036_00', 4) # 1-4
sprite('Action_036_01', 4) # 5-8
SLOT_12 = SLOT_19
Unknown1019(3)
if (SLOT_12 <= 1000):
SLOT_12 = 1000
physicsYImpulse(34000)
sprite('Action_448_00', 2) # 9-10
physicsXImpulse(3000)
physicsYImpulse(20000)
setGravity(1400)
sprite('Action_448_01', 3) # 11-13
sprite('Action_448_02', 3) # 14-16
sprite('Action_448_03', 3) # 17-19
sprite('Action_448_04', 10) # 20-29
sprite('Action_448_05', 4) # 30-33
SFX_0('010_swing_sword_2')
sprite('Action_448_06', 3) # 34-36
GFX_0('EffAirShotSlash', 100)
GFX_0('ShotAssist', 0)
tag_voice(1, 'uli204_0', 'uli204_1', 'uli204_2', '')
sprite('Action_448_07', 10) # 37-46
sprite('Action_448_08', 6) # 47-52
sprite('Action_448_09', 4) # 53-56
label(0)
sprite('Action_022_00', 3) # 57-59
sprite('Action_022_01', 3) # 60-62
loopRest()
gotoLabel(0)
label(1)
sprite('Action_023_00', 3) # 63-65
Unknown1084(1)
clearUponHandler(2)
sprite('Action_023_01', 3) # 66-68
sprite('Action_023_02', 3) # 69-71
sprite('Action_023_03', 4) # 72-75
@State
def CmnActChangePartnerAssistAtk_B():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
AttackLevel_(3)
AttackP1(70)
Unknown11092(1)
AirHitstunAnimation(10)
GroundedHitstunAnimation(10)
AirPushbackX(2000)
AirPushbackY(38000)
AirUntechableTime(60)
Unknown9016(1)
Hitstop(7)
Unknown11042(1)
clearUponHandler(2)
sendToLabelUpon(2, 1)
Unknown2006()
def upon_STATE_END():
EnableCollision(1)
Unknown2034(1)
Unknown2053(1)
sprite('Action_099_00', 6) # 1-6
physicsXImpulse(40000)
sprite('Action_099_01', 3) # 7-9
physicsXImpulse(11000)
Unknown7006('uli200_0', 100, 845769845, 828321840, 0, 0, 100, 845769845, 845099056, 0, 0, 100, 0, 0, 0, 0, 0)
sprite('Action_099_02', 3) # 10-12 **attackbox here**
sprite('Action_099_03', 1) # 13-13
SFX_0('010_swing_sword_0')
physicsYImpulse(23000)
physicsXImpulse(4000)
setGravity(1800)
sprite('Action_099_04', 2) # 14-15
GFX_0('EffNmlReversalAction00', 100)
sprite('Action_099_05', 2) # 16-17 **attackbox here**
RefreshMultihit()
Unknown9071()
Unknown9083()
sprite('Action_099_06', 3) # 18-20
sprite('Action_099_07', 2) # 21-22
sprite('Action_099_08', 2) # 23-24
sprite('Action_099_09', 2) # 25-26
sprite('Action_099_10', 2) # 27-28
sprite('Action_099_11', 3) # 29-31
sprite('Action_099_12', 4) # 32-35
label(0)
sprite('Action_099_13', 3) # 36-38
sprite('Action_099_14', 3) # 39-41
loopRest()
gotoLabel(0)
label(1)
sprite('Action_099_15', 2) # 42-43
Unknown1084(1)
Unknown8000(100, 1, 1)
sprite('Action_099_16', 2) # 44-45
sprite('Action_099_17', 3) # 46-48
sprite('Action_099_18', 3) # 49-51
@State
def CmnActChangePartnerAssistAtk_C():
def upon_IMMEDIATE():
Unknown17013()
sprite('keep', 180) # 1-180
@State
def CmnActChangePartnerAssistAtk_D():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
AttackLevel_(3)
Damage(800)
AttackP1(70)
Unknown11092(1)
Hitstop(2)
AirPushbackX(12000)
AirPushbackY(32000)
AirUntechableTime(50)
GroundedHitstunAnimation(13)
AirHitstunAnimation(13)
Unknown9016(1)
Unknown11042(1)
Unknown2006()
def upon_STATE_END():
EnableCollision(1)
Unknown2034(1)
Unknown2053(1)
sprite('Action_045_00', 2) # 1-2
Unknown3029(1)
sprite('Action_045_01', 3) # 3-5
physicsXImpulse(38000)
Unknown1028(-1000)
sprite('Action_045_02', 2) # 6-7
def upon_CLEAR_OR_EXIT():
if (SLOT_19 <= 200000):
sendToLabel(0)
sprite('Action_045_03', 2) # 8-9
Unknown8006(100, 1, 1)
label(0)
sprite('Action_403_00', 1) # 10-10
clearUponHandler(3)
Unknown1084(1)
Unknown1045(28000)
sprite('Action_403_01', 1) # 11-11
sprite('Action_403_02', 2) # 12-13
sprite('Action_403_03', 1) # 14-14
Unknown7009(2)
SFX_0('010_swing_sword_2')
sprite('Action_403_03', 1) # 15-15
sprite('Action_403_04', 1) # 16-16 **attackbox here**
GFX_0('EffNmlAtk5CBlade', 100)
RefreshMultihit()
Unknown1019(80)
sprite('Action_403_04', 1) # 17-17 **attackbox here**
RefreshMultihit()
sprite('Action_403_05', 2) # 18-19 **attackbox here**
RefreshMultihit()
Hitstop(7)
Unknown1019(80)
sprite('Action_403_06', 4) # 20-23
sprite('Action_403_07', 7) # 24-30
sprite('Action_403_08', 4) # 31-34
sprite('Action_403_09', 4) # 35-38
sprite('Action_403_10', 3) # 39-41
sprite('Action_403_11', 1) # 42-42
sprite('Action_403_11', 1) # 43-43
sprite('Action_403_11', 1) # 44-44
@State
def CmnActChangePartnerAttackIn():
def upon_IMMEDIATE():
AttackDefaults_StandingSpecial()
sprite('keep', 180) # 1-180
@State
def CmnActChangePartnerDD():
def upon_IMMEDIATE():
setInvincible(1)
Unknown30063(1)
if SLOT_162:
SLOT_58 = 1
def upon_LANDING():
clearUponHandler(2)
Unknown1084(1)
Unknown8000(100, 1, 1)
sendToLabel(1)
sprite('null', 1) # 1-1
Unknown2036(96, -1, 0)
sprite('null', 1) # 2-2
teleportRelativeX(-1500000)
teleportRelativeY(240000)
setGravity(0)
physicsYImpulse(-9600)
SLOT_12 = SLOT_19
teleportRelativeX(-145000)
Unknown1019(4)
label(0)
sprite('Action_022_00', 3) # 3-5
sprite('Action_022_01', 3) # 6-8
loopRest()
gotoLabel(0)
label(1)
sprite('keep', 10) # 9-18
if SLOT_58:
enterState('AN_CmnActChangePartnerDDODExe')
else:
enterState('AN_CmnActChangePartnerDDExe')
@State
def AN_CmnActChangePartnerDDExe():
def upon_IMMEDIATE():
AttackDefaults_StandingDD()
Unknown23056('')
AttackLevel_(4)
Damage(150)
MinimumDamagePct(100)
AttackP1(100)
AttackP2(100)
GroundedHitstunAnimation(10)
AirHitstunAnimation(10)
AirPushbackX(6000)
AirPushbackY(3000)
Unknown30056('a08601003200000000000000')
YImpluseBeforeWallbounce(700)
AirUntechableTime(100)
Unknown9310(1)
Hitstop(0)
Unknown11001(5, 5, 5)
Unknown11056(2)
Unknown11064(1)
Unknown9016(1)
Unknown11057(800)
Unknown1084(1)
Unknown30063(1)
Unknown30019('0000000001000000')
GFX_0('UltimateRushEff', 100)
def upon_78():
Unknown2037(1)
setInvincible(0)
setInvincibleFor(60)
sprite('Lin392_00', 5) # 1-5
setInvincible(1)
sprite('Action_189_01', 8) # 6-13
SFX_3('SE_ApperLightBlade')
sprite('Action_189_02', 7) # 14-20
sprite('Action_189_03', 6) # 21-26
sprite('Action_189_04', 5) # 27-31
sprite('Action_189_05', 4) # 32-35
sprite('Action_189_06', 4) # 36-39
sprite('Action_189_07', 4) # 40-43
sprite('Action_189_08', 3) # 44-46
sprite('Action_189_09', 3) # 47-49
Unknown2015(200)
sprite('Action_190_00', 5) # 50-54
sprite('Action_190_01', 5) # 55-59
teleportRelativeX(20000)
sprite('Action_190_02', 4) # 60-63
sprite('Action_190_03', 4) # 64-67
physicsXImpulse(5000)
Unknown1028(-50)
sprite('Action_190_04', 4) # 68-71
SFX_3('SE_SwingLightSword')
sprite('Action_190_05duo', 4) # 72-75 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 3) # 76-78 **attackbox here**
sprite('Action_190_07', 3) # 79-81 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 4) # 82-85 **attackbox here**
sprite('Action_190_09', 2) # 86-87
if (not SLOT_2):
setInvincible(0)
sprite('Action_190_10', 2) # 88-89
sprite('Action_190_11', 2) # 90-91
sprite('Action_190_12', 2) # 92-93
SFX_3('SE_SwingLightSword')
sprite('Action_190_13', 2) # 94-95 **attackbox here**
RefreshMultihit()
sprite('Action_190_14', 2) # 96-97 **attackbox here**
sprite('Action_190_15', 2) # 98-99 **attackbox here**
RefreshMultihit()
sprite('Action_190_16', 2) # 100-101 **attackbox here**
sprite('Action_190_10', 2) # 102-103
sprite('Action_190_11', 2) # 104-105
sprite('Action_190_12', 2) # 106-107
SFX_3('SE_SwingLightSword')
sprite('Action_190_13', 2) # 108-109 **attackbox here**
RefreshMultihit()
sprite('Action_190_14', 2) # 110-111 **attackbox here**
sprite('Action_190_15', 2) # 112-113 **attackbox here**
RefreshMultihit()
sprite('Action_190_16', 2) # 114-115 **attackbox here**
sprite('Action_190_17', 2) # 116-117
Unknown1084(1)
sprite('Action_190_18', 2) # 118-119
teleportRelativeX(20000)
sprite('Action_190_19', 2) # 120-121
teleportRelativeX(20000)
sprite('Action_190_20', 2) # 122-123
SFX_3('SE_SwingLightSword')
teleportRelativeX(20000)
sprite('Action_190_21', 3) # 124-126 **attackbox here**
teleportRelativeX(20000)
RefreshMultihit()
SFX_0('010_swing_sword_2')
sprite('Action_190_22', 4) # 127-130 **attackbox here**
Unknown1084(1)
teleportRelativeX(20000)
sprite('Action_190_23', 6) # 131-136 **attackbox here**
teleportRelativeX(20000)
RefreshMultihit()
AirPushbackX(5000)
AirPushbackY(28000)
YImpluseBeforeWallbounce(900)
sprite('Action_190_24', 6) # 137-142 **attackbox here**
teleportRelativeX(20000)
sprite('Action_190_25', 7) # 143-149
physicsXImpulse(0)
sprite('Action_190_26', 10) # 150-159
tag_voice(0, 'uli251_0', 'uli251_1', '', '')
sprite('Action_190_27', 2) # 160-161
Unknown11057(1000)
GFX_0('UltimateSlash', 100)
Unknown2015(-1)
SFX_3('SE_BigBomb')
sprite('Action_190_28', 4) # 162-165 **attackbox here**
Unknown11001(0, 0, 0)
AirPushbackX(25000)
AirPushbackY(-45000)
Unknown30055('305705003200000000000000')
Hitstop(0)
Damage(170)
RefreshMultihit()
sprite('Action_190_29', 28) # 166-193
GFX_0('UltimateLightwallDDD', 0)
setInvincible(0)
setInvincibleFor(0)
clearUponHandler(78)
sprite('Action_190_30', 2) # 194-195
sprite('Action_190_31', 6) # 196-201
sprite('Action_190_32', 3) # 202-204
sprite('Action_190_33', 5) # 205-209
sprite('Action_190_34', 3) # 210-212
sprite('Action_190_35', 6) # 213-218
Unknown21012('556c74696d6174654c6967687477616c6c45666600000000000000000000000029000000')
sprite('Action_190_36', 3) # 219-221
sprite('Action_190_37', 4) # 222-225 **attackbox here**
SFX_3('SE_SwingLightSword')
GroundedHitstunAnimation(10)
AirHitstunAnimation(10)
GFX_0('UltimateAssaultFinish', 100)
AirPushbackX(1000)
AirPushbackY(20000)
sprite('Action_190_38', 31) # 226-256
sprite('Action_190_39', 4) # 257-260
sprite('Action_190_40', 6) # 261-266
sprite('Action_190_41', 3) # 267-269
sprite('Action_190_42', 3) # 270-272
sprite('Action_190_43', 3) # 273-275
@State
def AN_CmnActChangePartnerDDODExe():
def upon_IMMEDIATE():
AttackDefaults_StandingDD()
Unknown23056('')
AttackLevel_(4)
Damage(100)
MinimumDamagePct(100)
AttackP1(100)
AttackP2(100)
GroundedHitstunAnimation(10)
AirHitstunAnimation(10)
AirPushbackX(5000)
AirPushbackY(4500)
Unknown30056('a08601003200000000000000')
YImpluseBeforeWallbounce(700)
AirUntechableTime(100)
Unknown9310(1)
Hitstop(0)
Unknown11001(5, 5, 5)
Unknown11056(2)
Unknown11064(1)
Unknown9016(1)
Unknown11057(800)
Unknown1084(1)
Unknown30063(1)
GFX_0('UltimateRushEffOD', 100)
def upon_78():
Unknown2037(1)
setInvincible(0)
setInvincibleFor(60)
sprite('Lin392_00', 5) # 1-5
setInvincible(1)
sprite('Action_189_01', 8) # 6-13
SFX_3('SE_ApperLightBlade')
sprite('Action_189_02', 7) # 14-20
sprite('Action_189_03', 6) # 21-26
sprite('Action_189_04', 5) # 27-31
sprite('Action_189_05', 4) # 32-35
sprite('Action_189_06', 4) # 36-39
sprite('Action_189_07', 4) # 40-43
sprite('Action_189_08', 3) # 44-46
sprite('Action_189_09', 3) # 47-49
Unknown2015(200)
sprite('Action_190_00', 5) # 50-54
sprite('Action_190_01', 5) # 55-59
teleportRelativeX(20000)
sprite('Action_190_02', 4) # 60-63
sprite('Action_190_03', 4) # 64-67
sprite('Action_190_04', 4) # 68-71
physicsXImpulse(5000)
Unknown1028(-50)
SFX_3('SE_SwingLightSword')
sprite('Action_190_05duo', 4) # 72-75 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 3) # 76-78 **attackbox here**
sprite('Action_190_07', 3) # 79-81 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 4) # 82-85 **attackbox here**
sprite('Action_190_00', 2) # 86-87
if (not SLOT_2):
setInvincible(0)
sprite('Action_190_01', 2) # 88-89
sprite('Action_190_02', 2) # 90-91
sprite('Action_190_03', 2) # 92-93
sprite('Action_190_04', 2) # 94-95
SFX_3('SE_SwingLightSword')
sprite('Action_190_05', 2) # 96-97 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 2) # 98-99 **attackbox here**
sprite('Action_190_07', 2) # 100-101 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 2) # 102-103 **attackbox here**
sprite('Action_190_01', 2) # 104-105
sprite('Action_190_02', 2) # 106-107
sprite('Action_190_03', 2) # 108-109
sprite('Action_190_04', 2) # 110-111
SFX_3('SE_SwingLightSword')
sprite('Action_190_05', 2) # 112-113 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 2) # 114-115 **attackbox here**
sprite('Action_190_07', 2) # 116-117 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 2) # 118-119 **attackbox here**
sprite('Action_190_01', 2) # 120-121
sprite('Action_190_02', 2) # 122-123
sprite('Action_190_03', 2) # 124-125
sprite('Action_190_04', 2) # 126-127
SFX_3('SE_SwingLightSword')
sprite('Action_190_05', 2) # 128-129 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 2) # 130-131 **attackbox here**
sprite('Action_190_07', 2) # 132-133 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 2) # 134-135 **attackbox here**
sprite('Action_190_01', 2) # 136-137
sprite('Action_190_02', 2) # 138-139
sprite('Action_190_03', 2) # 140-141
sprite('Action_190_04', 2) # 142-143
SFX_3('SE_SwingLightSword')
sprite('Action_190_05', 2) # 144-145 **attackbox here**
RefreshMultihit()
sprite('Action_190_06', 2) # 146-147 **attackbox here**
sprite('Action_190_07', 2) # 148-149 **attackbox here**
RefreshMultihit()
sprite('Action_190_08', 2) # 150-151 **attackbox here**
sprite('Action_190_09', 2) # 152-153
sprite('Action_190_10', 2) # 154-155
sprite('Action_190_11', 2) # 156-157
sprite('Action_190_12', 2) # 158-159
SFX_3('SE_SwingLightSword')
sprite('Action_190_13', 2) # 160-161 **attackbox here**
RefreshMultihit()
sprite('Action_190_14', 2) # 162-163 **attackbox here**
sprite('Action_190_15', 2) # 164-165 **attackbox here**
RefreshMultihit()
sprite('Action_190_16', 2) # 166-167 **attackbox here**
sprite('Action_190_17', 2) # 168-169
Unknown1084(1)
sprite('Action_190_18', 2) # 170-171
teleportRelativeX(20000)
sprite('Action_190_19', 2) # 172-173
teleportRelativeX(20000)
sprite('Action_190_20', 2) # 174-175
teleportRelativeX(20000)
SFX_3('SE_SwingLightSword')
sprite('Action_190_21', 3) # 176-178 **attackbox here**
teleportRelativeX(20000)
RefreshMultihit()
sprite('Action_190_22', 4) # 179-182 **attackbox here**
teleportRelativeX(20000)
sprite('Action_190_23', 6) # 183-188 **attackbox here**
teleportRelativeX(20000)
RefreshMultihit()
AirPushbackX(5000)
AirPushbackY(28000)
YImpluseBeforeWallbounce(900)
sprite('Action_190_24', 6) # 189-194 **attackbox here**
teleportRelativeX(20000)
sprite('Action_190_25', 7) # 195-201
physicsXImpulse(0)
sprite('Action_190_26', 10) # 202-211
tag_voice(0, 'uli251_0', 'uli251_1', '', '')
sprite('Action_190_27', 2) # 212-213
Unknown11057(1000)
GFX_0('UltimateSlashOD', 100)
SFX_3('SE_BigBomb')
Unknown2015(-1)
sprite('Action_190_28', 4) # 214-217 **attackbox here**
Unknown11001(0, 0, 0)
AirPushbackX(25000)
AirPushbackY(-45000)
Unknown30055('305705003200000000000000')
Hitstop(0)
RefreshMultihit()
sprite('Action_190_29', 38) # 218-255
GFX_0('UltimateLightwallDDDOD', 0)
setInvincible(0)
setInvincibleFor(0)
clearUponHandler(78)
sprite('Action_190_30', 2) # 256-257
sprite('Action_190_31', 6) # 258-263
sprite('Action_190_32', 3) # 264-266
sprite('Action_190_33', 5) # 267-271
sprite('Action_190_34', 3) # 272-274
sprite('Action_190_35', 6) # 275-280
sprite('Action_190_36', 3) # 281-283
sprite('Action_190_37', 4) # 284-287 **attackbox here**
SFX_3('SE_SwingLightSword')
GFX_0('UltimateAssaultFinish', 100)
GroundedHitstunAnimation(10)
AirHitstunAnimation(10)
AirPushbackX(1000)
AirPushbackY(20000)
sprite('Action_190_38', 36) # 288-323
Unknown21012('556c74696d6174654c6967687477616c6c4566664f440000000000000000000029000000')
sprite('Action_190_39', 4) # 324-327
sprite('Action_190_40', 6) # 328-333
sprite('Action_190_41', 4) # 334-337
sprite('Action_190_42', 4) # 338-341
sprite('Action_190_43', 3) # 342-344
@State
def CmnActChangePartnerBurst():
def upon_IMMEDIATE():
Unknown17021('')
def upon_41():
clearUponHandler(41)
sendToLabel(0)
def upon_LANDING():
clearUponHandler(2)
sendToLabel(9)
sprite('null', 120) # 1-120
label(0)
sprite('null', 5) # 121-125
sprite('Action_146_01', 6) # 126-131
Unknown1086(22)
teleportRelativeX(-150000)
Unknown1007(2400000)
physicsYImpulse(-96000)
setGravity(0)
Unknown2053(1)
sprite('Action_146_02', 3) # 132-134
SFX_0('004_swing_grap_1_1')
SFX_0('000_airdash_2')
label(1)
sprite('Action_146_03ex', 3) # 135-137 **attackbox here**
sprite('Action_146_04ex', 3) # 138-140 **attackbox here**
loopRest()
gotoLabel(1)
label(9)
sprite('keep', 3) # 141-143
Unknown1084(1)
sprite('Action_146_05', 5) # 144-148
Unknown8000(100, 1, 1)
sprite('Action_146_06', 18) # 149-166
sprite('Action_146_07', 5) # 167-171
@Subroutine
def MouthTableInit():
Unknown18011('uli000', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli500', 12643, 14177, 14179, 14177, 13411, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli500', '001')
Unknown18011('uli501', 12643, 14177, 14179, 14177, 13411, 24887, 25399, 24887, 25399, 12337, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli501', '002')
Unknown18011('uli502', 12643, 14177, 12643, 24884, 25399, 24887, 25399, 24887, 25399, 14131, 14177, 14179, 14177, 14179, 14177, 13411, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli502', '003')
Unknown18011('uli503', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli503', '004')
Unknown18011('uli504', 12643, 14177, 12643, 24884, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 14133, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli504', '005')
Unknown18011('uli505', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13411, 24885, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli505', '006')
Unknown18011('uli520', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13411, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 14131, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli520', '007')
Unknown18011('uli521', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli521', '008')
Unknown18011('uli522', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13667, 24887, 25399, 24887, 25399, 12337, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli522', '009')
Unknown18011('uli523', 12643, 14177, 12643, 24880, 25399, 24887, 12337, 14179, 14177, 14179, 12641, 25392, 24887, 25399, 24887, 25399, 24887, 25399, 14132, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli523', '010')
Unknown18011('uli524', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13155, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli524', '011')
Unknown18011('uli525', 12643, 14177, 14179, 14177, 13411, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli525', '012')
Unknown18011('uli402_0', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli402_1', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli403_0', 12643, 14177, 14179, 14177, 12643, 24882, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli403_1', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601bes', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13155, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600bha', 12643, 14177, 14179, 14177, 14179, 14177, 13411, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601bpt', 12643, 14177, 14179, 14177, 13667, 24882, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600brc', 12643, 14177, 14179, 14177, 13667, 24882, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600pla', 12643, 14177, 12643, 24880, 25399, 24887, 25399, 24887, 25399, 14131, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601pyo', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13155, 24880, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601uhy', 12643, 14177, 13411, 24887, 25399, 13105, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 12899, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600umi', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13155, 24887, 25399, 24887, 25399, 24887, 25399, 12337, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601uor', 12643, 12641, 25396, 14132, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13411, 24882, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600uva', 12643, 14177, 14179, 14177, 14179, 14177, 12899, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 14132, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601uwa', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13411, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601bce', 13155, 14433, 14435, 14433, 14435, 14433, 14435, 14433, 13155, 24885, 25400, 13366, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601use', 13155, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13411, 24881, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600pel', 12643, 14433, 14435, 14433, 14435, 14433, 14435, 14433, 13411, 24885, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601uhi', 12643, 14177, 14179, 14177, 14179, 14177, 13155, 24889, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli600bha', '017')
Unknown30092('uli600brc', '018')
Unknown30092('uli600pla', '019')
Unknown30092('uli600umi', '020')
Unknown30092('uli600uva', '021')
Unknown30092('uli601bes', '022')
Unknown30092('uli601bpt', '023')
Unknown30092('uli601pyo', '024')
Unknown30092('uli601uhy', '025')
Unknown30092('uli601uor', '026')
Unknown30092('uli601uwa', '027')
Unknown30092('uli601bce', '028')
Unknown30092('uli601use', '029')
Unknown30092('uli600pel', '030')
Unknown30092('uli601uhi', '031')
Unknown18011('uli701bes', 12643, 12641, 25392, 12340, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701bha', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13411, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700bpt', 12643, 12641, 25394, 12341, 14177, 13155, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701brc', 12643, 12641, 25392, 14133, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700pla', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701pyo', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700uhy', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 12899, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700umi', 12643, 14177, 14179, 24880, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 12849, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701uor', 12643, 14177, 13155, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700uva', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13411, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701uwa', 12643, 14177, 12643, 24882, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 14131, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 12899, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 12594, 12643, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli703pyo', 12643, 14177, 12899, 24880, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701bce', 12899, 14177, 13667, 24885, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701use', 13155, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700pel', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13155, 24888, 25398, 24886, 25398, 24886, 25398, 24886, 25398, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700uhi', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13155, 24884, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown30092('uli700bpt', '032')
Unknown30092('uli700pla', '033')
Unknown30092('uli700uhy', '034')
Unknown30092('uli700umi', '035')
Unknown30092('uli700uva', '036')
Unknown30092('uli701bes', '037')
Unknown30092('uli701bha', '038')
Unknown30092('uli701brc', '039')
Unknown30092('uli701pyo', '040')
Unknown30092('uli701uor', '041')
Unknown30092('uli701uwa', '042')
Unknown30092('uli703pyo', '043')
Unknown30092('uli701bce', '044')
Unknown30092('uli701use', '045')
Unknown30092('uli700pel', '046')
Unknown30092('uli700uhi', '047')
if SLOT_172:
Unknown18011('uli000', 12643, 12899, 14177, 14179, 14177, 14179, 14177, 12899, 12643, 14177, 14179, 13409, 12643, 14177, 14179, 14177, 14179, 13667, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli500', 12643, 14177, 14179, 14177, 14179, 14177, 13411, 13155, 24880, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25396, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli501', 12643, 12899, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 13409, 12899, 24885, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25395, 53, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli502', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13667, 13667, 24880, 25399, 24887, 25399, 24887, 25395, 24882, 25399, 25399, 24882, 25399, 24887, 25399, 25394, 12594, 14177, 13155, 14177, 14179, 14177, 13155, 13411, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli503', 12643, 13155, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 12643, 12643, 14177, 14179, 14177, 14179, 13409, 12643, 14177, 14179, 13921, 12643, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli504', 12643, 13667, 14177, 14179, 12897, 12899, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 12641, 13411, 24880, 25399, 24887, 25399, 24887, 25396, 56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli505', 12643, 13155, 14177, 14179, 14177, 14179, 13665, 12899, 14177, 12899, 13411, 24880, 25399, 24887, 25399, 24887, 25399, 24887, 25397, 24882, 25396, 24883, 25399, 24886, 25399, 25399, 24882, 25399, 24887, 25399, 24887, 25399, 25398, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli520', 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 13411, 24887, 25399, 24887, 25399, 25395, 24882, 25399, 24887, 25399, 25395, 24881, 25399, 24887, 25399, 24887, 25398, 24882, 25399, 24887, 25395, 24883, 25399, 24887, 25399, 25393, 24881, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25394, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli521', 12643, 12643, 14177, 14179, 14177, 13667, 14177, 14179, 14177, 14179, 14177, 13667, 13667, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli522', 12643, 12643, 14177, 14179, 13665, 12643, 14177, 14179, 14177, 14179, 13665, 12899, 13665, 13411, 24885, 25399, 24887, 25399, 24887, 25399, 25396, 24882, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25397, 14385, 14177, 14179, 14177, 14179, 14177, 13923, 13667, 14177, 13923, 13155, 14177, 13411, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli523', 12643, 12643, 14177, 14179, 12643, 14177, 14179, 14177, 14179, 14177, 13667, 12643, 14177, 14179, 13153, 12899, 14177, 14179, 14177, 14179, 13409, 12899, 14177, 13923, 13665, 12899, 24881, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25393, 24881, 25399, 24887, 25393, 24881, 25399, 24887, 25394, 24883, 25399, 24887, 25399, 24887, 25399, 25396, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli524', 12643, 13155, 14177, 14179, 14177, 14179, 14177, 14179, 13411, 24884, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25396, 24881, 25399, 24887, 25397, 12849, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli525', 12643, 12643, 14177, 14179, 14177, 13411, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 12899, 13411, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli402_0', 12643, 14177, 14179, 14177, 14179, 13155, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 13665, 13411, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli402_1', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 13411, 13411, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli403_0', 12643, 12643, 14177, 13411, 13155, 24880, 25399, 24887, 25399, 24887, 25399, 24887, 25396, 24884, 25399, 25397, 56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli403_1', 12643, 12899, 14177, 14179, 14177, 13155, 12643, 14177, 13923, 13155, 14177, 14179, 14177, 14179, 14177, 12899, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601bes', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 12899, 24888, 25399, 24887, 25399, 25397, 24882, 25399, 24881, 25399, 24887, 25399, 24887, 25401, 25399, 25395, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600bha', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 13411, 24885, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25394, 24881, 25399, 24887, 25399, 25397, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601bpt', 12643, 13155, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13923, 13155, 24886, 25399, 24887, 25399, 24887, 25399, 25397, 24883, 25399, 24887, 25394, 24882, 25399, 24887, 25399, 25396, 12337, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600brc', 12643, 12643, 14177, 14179, 12899, 14177, 12643, 12643, 24887, 25399, 24887, 25399, 24883, 25399, 24883, 25399, 24887, 25396, 24881, 25399, 25399, 24883, 25399, 24887, 25399, 25399, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600pla', 12643, 12643, 14177, 14179, 14177, 13155, 12899, 14177, 13667, 12643, 14177, 14179, 14177, 13923, 12899, 24881, 25399, 24887, 25399, 25393, 24881, 25399, 24887, 25399, 24887, 25399, 25395, 24881, 25399, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601pyo', 12643, 12643, 14177, 14179, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 13411, 24886, 25399, 24887, 25399, 25399, 24883, 25399, 24887, 25399, 25399, 24881, 25399, 24887, 12849, 14179, 13923, 12643, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601uhy', 12643, 13667, 14177, 13923, 13667, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25395, 24881, 25399, 24887, 25399, 25393, 12337, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600umi', 12643, 12643, 14177, 14179, 13665, 13411, 14177, 14179, 14177, 14179, 14177, 13923, 13155, 14177, 14179, 14177, 12899, 12899, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25396, 53, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601uor', 12643, 12643, 14177, 14179, 13921, 13155, 24880, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25393, 14385, 14177, 14179, 13665, 12643, 14177, 14179, 14177, 14179, 14177, 12899, 12643, 12641, 12899, 13409, 12899, 14177, 14179, 14177, 14179, 13921, 13411, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600uva', 12643, 12643, 14177, 14179, 14177, 14179, 13921, 13923, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13411, 13155, 24889, 25399, 24887, 25399, 24882, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25395, 24885, 12849, 14179, 14179, 14435, 12643, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601uwa', 12643, 12643, 14177, 13667, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 12899, 12899, 14177, 12899, 13411, 14177, 14179, 14177, 14179, 12641, 12899, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13155, 14179, 13923, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601bce', 12643, 12643, 14177, 14179, 12897, 12899, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13155, 13411, 24881, 25399, 24887, 25395, 24882, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25398, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601use', 12643, 12643, 14177, 14179, 14177, 14179, 13153, 13411, 14177, 14179, 13665, 13667, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 12643, 14177, 13923, 12899, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli600pel', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 12897, 12899, 14177, 14179, 12897, 12643, 14433, 14179, 13923, 12643, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli601uhi', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 12899, 14177, 14179, 13665, 12643, 13409, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13155, 12899, 14177, 12643, 13923, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701bes', 12643, 12643, 14177, 14179, 14177, 13411, 13155, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 13411, 14177, 14179, 14177, 14179, 14177, 12899, 13923, 12643, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701bha', 12643, 13155, 14177, 12899, 13155, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 13409, 12643, 24887, 25399, 24887, 25399, 24887, 25399, 25398, 24882, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25398, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700bpt', 12643, 12643, 14177, 14179, 14177, 12643, 12643, 12641, 13155, 24887, 25399, 24887, 25399, 25397, 24884, 25399, 25394, 24881, 25399, 24887, 25399, 24881, 25399, 25393, 56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701brc', 12643, 12643, 14177, 14179, 14177, 13667, 12643, 24881, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24881, 25399, 25394, 24883, 25399, 24887, 25399, 25398, 24882, 25398, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700pla', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13923, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701pyo', 12643, 12643, 14177, 14179, 14177, 13411, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 12641, 25392, 25399, 25397, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700uhy', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 12643, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 13921, 12643, 14177, 13155, 12643, 14177, 14179, 14177, 14179, 14177, 13667, 12899, 14177, 12899, 13667, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700umi', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 13409, 12643, 14177, 14179, 14177, 14179, 12643, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701uor', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 12897, 12643, 14177, 14179, 14177, 13411, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13923, 12643, 14177, 14179, 14177, 14179, 12641, 25394, 25396, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700uva', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 13153, 13155, 14177, 14179, 14177, 13155, 12643, 14177, 14179, 14177, 13155, 12899, 14689, 14179, 14179, 12643, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701uwa', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14435, 12641, 25393, 24887, 25399, 12593, 14433, 14179, 14177, 12643, 24880, 25399, 24887, 13105, 12643, 24880, 12849, 12643, 24880, 25399, 24887, 13361, 12643, 24887, 25399, 24887, 25399, 24887, 25399, 24884, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25398, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli703pyo', 12643, 13155, 14177, 12643, 12643, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25397, 24883, 25399, 25393, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701bce', 12643, 14177, 14179, 12641, 13411, 24883, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25394, 24882, 25399, 25393, 53, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli701use', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 13923, 13923, 14177, 14179, 14177, 14179, 13153, 13667, 14177, 14179, 14177, 14179, 14177, 13667, 13923, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700pel', 12643, 12643, 14177, 14179, 13921, 12899, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 12643, 12643, 24880, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 24887, 25399, 25395, 53, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
Unknown18011('uli700uhi', 12643, 12643, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 14177, 14179, 13409, 12643, 14177, 12643, 13155, 14177, 14179, 14177, 13667, 13411, 14177, 14179, 14177, 14179, 14177, 13155, 13667, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
@State
def CmnActEntry():
label(0)
sprite('null', 1) # 1-1
loopRest()
if SLOT_17:
_gotolabel(0)
if SLOT_169:
_gotolabel(482)
if SLOT_122:
_gotolabel(482)
if SLOT_123:
_gotolabel(482)
PartnerChar('brc')
if SLOT_ReturnVal:
_gotolabel(100)
PartnerChar('bha')
if SLOT_ReturnVal:
_gotolabel(110)
PartnerChar('bpt')
if SLOT_ReturnVal:
_gotolabel(120)
PartnerChar('bes')
if SLOT_ReturnVal:
_gotolabel(130)
PartnerChar('pyo')
if SLOT_ReturnVal:
_gotolabel(140)
PartnerChar('uhy')
if SLOT_ReturnVal:
_gotolabel(150)
PartnerChar('uwa')
if SLOT_ReturnVal:
_gotolabel(160)
PartnerChar('uor')
if SLOT_ReturnVal:
_gotolabel(170)
PartnerChar('uva')
if SLOT_ReturnVal:
_gotolabel(180)
PartnerChar('pla')
if SLOT_ReturnVal:
_gotolabel(190)
PartnerChar('umi')
if SLOT_ReturnVal:
_gotolabel(200)
PartnerChar('bce')
if SLOT_ReturnVal:
_gotolabel(210)
PartnerChar('use')
if SLOT_ReturnVal:
_gotolabel(220)
PartnerChar('uhi')
if SLOT_ReturnVal:
_gotolabel(230)
PartnerChar('pel')
if SLOT_ReturnVal:
_gotolabel(240)
label(482)
Unknown19(991, 2, 158)
sprite('Action_050_02', 5) # 2-6 **attackbox here**
Unknown23029(11, 9902, 0)
if SLOT_158:
if random_(2, 0, 50):
Unknown7006('uli500', 100, 896101493, 12592, 0, 0, 100, 896101493, 12848, 0, 0, 100, 0, 0, 0, 0, 0)
else:
Unknown7006('uli503', 100, 896101493, 13360, 0, 0, 100, 896101493, 13616, 0, 0, 100, 0, 0, 0, 0, 0)
sprite('Action_050_03', 5) # 7-11 **attackbox here**
sprite('Action_050_04', 5) # 12-16 **attackbox here**
label(1)
sprite('Action_050_02', 5) # 17-21 **attackbox here**
sprite('Action_050_03', 5) # 22-26 **attackbox here**
sprite('Action_050_04', 5) # 27-31 **attackbox here**
if SLOT_97:
_gotolabel(1)
sprite('Action_050_05', 7) # 32-38
sprite('Action_050_06', 9) # 39-47
sprite('Action_050_07', 4) # 48-51
sprite('Action_050_08', 5) # 52-56
SFX_0('019_cloth_a')
sprite('Action_050_09', 8) # 57-64
sprite('Action_050_10', 7) # 65-71
sprite('Action_050_11', 6) # 72-77 **attackbox here**
sprite('Action_050_12', 5) # 78-82 **attackbox here**
sprite('Action_050_13', 5) # 83-87 **attackbox here**
Unknown23018(1)
sprite('Action_050_14', 20) # 88-107 **attackbox here**
sprite('Action_050_15', 5) # 108-112
SFX_0('019_cloth_c')
sprite('Action_050_16', 4) # 113-116
sprite('Action_050_17', 5) # 117-121
SFX_3('SE010')
sprite('Action_050_18', 5) # 122-126
sprite('Action_050_19', 2) # 127-128
loopRest()
ExitState()
label(10)
sprite('Action_000_25', 32767) # 129-32895 **attackbox here**
SFX_1('uli700umi')
label(100)
sprite('Action_000_15', 7) # 32896-32902 **attackbox here**
if SLOT_158:
Unknown1000(-1230000)
else:
Unknown1000(-1465000)
Unknown23029(11, 9902, 0)
sprite('Action_000_16', 4) # 32903-32906 **attackbox here**
SFX_1('uli600brc')
sprite('Action_000_17', 7) # 32907-32913 **attackbox here**
SFX_0('003_swing_grap_0_0')
sprite('Action_000_18', 5) # 32914-32918 **attackbox here**
sprite('Action_000_19', 8) # 32919-32926 **attackbox here**
sprite('Action_000_20', 10) # 32927-32936 **attackbox here**
sprite('Action_000_21', 5) # 32937-32941 **attackbox here**
sprite('Action_000_22', 7) # 32942-32948 **attackbox here**
SFX_FOOTSTEP_(100, 0, 1)
sprite('Action_000_23', 3) # 32949-32951 **attackbox here**
sprite('Action_000_24', 20) # 32952-32971 **attackbox here**
label(101)
sprite('Action_000_25', 1) # 32972-32972 **attackbox here**
if SLOT_97:
_gotolabel(101)
sprite('Action_000_25', 30) # 32973-33002 **attackbox here**
sprite('Action_000_26', 5) # 33003-33007 **attackbox here**
Unknown21007(24, 40)
sprite('Action_000_27', 8) # 33008-33015 **attackbox here**
sprite('Action_000_28', 6) # 33016-33021 **attackbox here**
sprite('Action_000_29', 4) # 33022-33025 **attackbox here**
sprite('Action_000_30', 4) # 33026-33029 **attackbox here**
SFX_0('008_swing_pole_0')
sprite('Action_000_31', 6) # 33030-33035 **attackbox here**
sprite('Action_000_32', 7) # 33036-33042
Unknown21011(240)
label(102)
sprite('Action_000_00', 7) # 33043-33049 **attackbox here**
sprite('Action_000_01', 7) # 33050-33056 **attackbox here**
sprite('Action_000_02', 6) # 33057-33062 **attackbox here**
sprite('Action_000_03', 6) # 33063-33068 **attackbox here**
sprite('Action_000_04', 8) # 33069-33076 **attackbox here**
sprite('Action_000_05', 5) # 33077-33081 **attackbox here**
sprite('Action_000_06', 5) # 33082-33086 **attackbox here**
sprite('Action_000_07', 5) # 33087-33091 **attackbox here**
sprite('Action_000_08', 6) # 33092-33097 **attackbox here**
sprite('Action_000_09', 5) # 33098-33102 **attackbox here**
sprite('Action_000_10', 6) # 33103-33108 **attackbox here**
sprite('Action_000_11', 8) # 33109-33116 **attackbox here**
sprite('Action_000_12', 5) # 33117-33121 **attackbox here**
sprite('Action_000_13', 5) # 33122-33126 **attackbox here**
sprite('Action_000_14', 6) # 33127-33132 **attackbox here**
gotoLabel(102)
ExitState()
label(110)
sprite('Action_050_02', 5) # 33133-33137 **attackbox here**
Unknown23029(11, 9902, 0)
if SLOT_158:
Unknown1000(-1230000)
else:
Unknown1000(-1465000)
SFX_1('uli600bha')
sprite('Action_050_03', 5) # 33138-33142 **attackbox here**
sprite('Action_050_04', 5) # 33143-33147 **attackbox here**
label(111)
sprite('Action_050_02', 5) # 33148-33152 **attackbox here**
sprite('Action_050_03', 5) # 33153-33157 **attackbox here**
sprite('Action_050_04', 5) # 33158-33162 **attackbox here**
if SLOT_97:
_gotolabel(111)
sprite('Action_050_05', 7) # 33163-33169
sprite('Action_050_06', 9) # 33170-33178
sprite('Action_050_07', 4) # 33179-33182
sprite('Action_050_08', 5) # 33183-33187
SFX_0('019_cloth_a')
sprite('Action_050_09', 8) # 33188-33195
sprite('Action_050_10', 7) # 33196-33202
sprite('Action_050_11', 6) # 33203-33208 **attackbox here**
sprite('Action_050_12', 5) # 33209-33213 **attackbox here**
sprite('Action_050_13', 5) # 33214-33218 **attackbox here**
sprite('Action_050_14', 20) # 33219-33238 **attackbox here**
sprite('Action_050_15', 5) # 33239-33243
SFX_0('019_cloth_c')
sprite('Action_050_16', 4) # 33244-33247
sprite('Action_050_17', 5) # 33248-33252
SFX_3('SE010')
sprite('Action_050_18', 5) # 33253-33257
Unknown21007(24, 40)
sprite('Action_050_19', 2) # 33258-33259
Unknown21011(420)
label(112)
sprite('Action_000_00', 7) # 33260-33266 **attackbox here**
sprite('Action_000_01', 7) # 33267-33273 **attackbox here**
sprite('Action_000_02', 6) # 33274-33279 **attackbox here**
sprite('Action_000_03', 6) # 33280-33285 **attackbox here**
sprite('Action_000_04', 8) # 33286-33293 **attackbox here**
sprite('Action_000_05', 5) # 33294-33298 **attackbox here**
sprite('Action_000_06', 5) # 33299-33303 **attackbox here**
sprite('Action_000_07', 5) # 33304-33308 **attackbox here**
sprite('Action_000_08', 6) # 33309-33314 **attackbox here**
sprite('Action_000_09', 5) # 33315-33319 **attackbox here**
sprite('Action_000_10', 6) # 33320-33325 **attackbox here**
sprite('Action_000_11', 8) # 33326-33333 **attackbox here**
sprite('Action_000_12', 5) # 33334-33338 **attackbox here**
sprite('Action_000_13', 5) # 33339-33343 **attackbox here**
sprite('Action_000_14', 6) # 33344-33349 **attackbox here**
gotoLabel(112)
ExitState()
label(120)
sprite('Action_050_02', 1) # 33350-33350 **attackbox here**
Unknown23029(11, 9902, 0)
if SLOT_158:
Unknown1000(-1230000)
else:
Unknown1000(-1465000)
def upon_40():
clearUponHandler(40)
sendToLabel(122)
SFX_1('uli601bpt')
label(121)
sprite('Action_050_02', 5) # 33351-33355 **attackbox here**
sprite('Action_050_03', 5) # 33356-33360 **attackbox here**
sprite('Action_050_04', 5) # 33361-33365 **attackbox here**
gotoLabel(121)
label(122)
sprite('Action_050_02', 5) # 33366-33370 **attackbox here**
sprite('Action_050_03', 5) # 33371-33375 **attackbox here**
sprite('Action_050_04', 5) # 33376-33380 **attackbox here**
if SLOT_97:
_gotolabel(122)
sprite('Action_050_05', 7) # 33381-33387
sprite('Action_050_06', 9) # 33388-33396
sprite('Action_050_07', 4) # 33397-33400
sprite('Action_050_08', 5) # 33401-33405
SFX_0('019_cloth_a')
sprite('Action_050_09', 8) # 33406-33413
sprite('Action_050_10', 7) # 33414-33420
sprite('Action_050_11', 6) # 33421-33426 **attackbox here**
sprite('Action_050_12', 5) # 33427-33431 **attackbox here**
sprite('Action_050_13', 5) # 33432-33436 **attackbox here**
sprite('Action_050_14', 20) # 33437-33456 **attackbox here**
sprite('Action_050_15', 5) # 33457-33461
SFX_0('019_cloth_c')
sprite('Action_050_16', 4) # 33462-33465
sprite('Action_050_17', 5) # 33466-33470
SFX_3('SE010')
sprite('Action_050_18', 5) # 33471-33475
sprite('Action_050_19', 2) # 33476-33477
Unknown21011(120)
label(123)
sprite('Action_000_00', 7) # 33478-33484 **attackbox here**
sprite('Action_000_01', 7) # 33485-33491 **attackbox here**
sprite('Action_000_02', 6) # 33492-33497 **attackbox here**
sprite('Action_000_03', 6) # 33498-33503 **attackbox here**
sprite('Action_000_04', 8) # 33504-33511 **attackbox here**
sprite('Action_000_05', 5) # 33512-33516 **attackbox here**
sprite('Action_000_06', 5) # 33517-33521 **attackbox here**
sprite('Action_000_07', 5) # 33522-33526 **attackbox here**
sprite('Action_000_08', 6) # 33527-33532 **attackbox here**
sprite('Action_000_09', 5) # 33533-33537 **attackbox here**
sprite('Action_000_10', 6) # 33538-33543 **attackbox here**
sprite('Action_000_11', 8) # 33544-33551 **attackbox here**
sprite('Action_000_12', 5) # 33552-33556 **attackbox here**
sprite('Action_000_13', 5) # 33557-33561 **attackbox here**
sprite('Action_000_14', 6) # 33562-33567 **attackbox here**
gotoLabel(123)
ExitState()
label(130)
sprite('Action_000_00', 1) # 33568-33568 **attackbox here**
Unknown23029(11, 9902, 0)
if SLOT_158:
Unknown1000(-1230000)
else:
Unknown1000(-1465000)
def upon_40():
clearUponHandler(40)
sendToLabel(132)
label(131)
sprite('Action_000_00', 7) # 33569-33575 **attackbox here**
sprite('Action_000_01', 7) # 33576-33582 **attackbox here**
sprite('Action_000_02', 6) # 33583-33588 **attackbox here**
sprite('Action_000_03', 6) # 33589-33594 **attackbox here**
sprite('Action_000_04', 8) # 33595-33602 **attackbox here**
sprite('Action_000_05', 5) # 33603-33607 **attackbox here**
sprite('Action_000_06', 5) # 33608-33612 **attackbox here**
sprite('Action_000_07', 5) # 33613-33617 **attackbox here**
sprite('Action_000_08', 6) # 33618-33623 **attackbox here**
sprite('Action_000_09', 5) # 33624-33628 **attackbox here**
sprite('Action_000_10', 6) # 33629-33634 **attackbox here**
sprite('Action_000_11', 8) # 33635-33642 **attackbox here**
sprite('Action_000_12', 5) # 33643-33647 **attackbox here**
sprite('Action_000_13', 5) # 33648-33652 **attackbox here**
sprite('Action_000_14', 6) # 33653-33658 **attackbox here**
gotoLabel(131)
label(132)
sprite('Action_000_15', 7) # 33659-33665 **attackbox here**
sprite('Action_000_16', 4) # 33666-33669 **attackbox here**
SFX_1('uli601bes')
sprite('Action_000_17', 7) # 33670-33676 **attackbox here**
SFX_0('003_swing_grap_0_0')
sprite('Action_000_18', 5) # 33677-33681 **attackbox here**
sprite('Action_000_19', 8) # 33682-33689 **attackbox here**
sprite('Action_000_20', 10) # 33690-33699 **attackbox here**
sprite('Action_000_21', 5) # 33700-33704 **attackbox here**
sprite('Action_000_22', 7) # 33705-33711 **attackbox here**
SFX_FOOTSTEP_(100, 0, 1)
sprite('Action_000_23', 3) # 33712-33714 **attackbox here**
sprite('Action_000_24', 20) # 33715-33734 **attackbox here**
label(133)
sprite('Action_000_25', 1) # 33735-33735 **attackbox here**
if SLOT_97:
_gotolabel(133)
sprite('Action_000_25', 30) # 33736-33765 **attackbox here**
sprite('Action_000_26', 5) # 33766-33770 **attackbox here**
sprite('Action_000_27', 8) # 33771-33778 **attackbox here**
sprite('Action_000_28', 6) # 33779-33784 **attackbox here**
sprite('Action_000_29', 4) # 33785-33788 **attackbox here**
sprite('Action_000_30', 4) # 33789-33792 **attackbox here**
SFX_0('008_swing_pole_0')
sprite('Action_000_31', 6) # 33793-33798 **attackbox here**
sprite('Action_000_32', 7) # 33799-33805
Unknown21011(120)
label(134)
sprite('Action_000_00', 7) # 33806-33812 **attackbox here**
sprite('Action_000_01', 7) # 33813-33819 **attackbox here**
sprite('Action_000_02', 6) # 33820-33825 **attackbox here**
sprite('Action_000_03', 6) # 33826-33831 **attackbox here**
sprite('Action_000_04', 8) # 33832-33839 **attackbox here**
sprite('Action_000_05', 5) # 33840-33844 **attackbox here**
sprite('Action_000_06', 5) # 33845-33849 **attackbox here**
sprite('Action_000_07', 5) # 33850-33854 **attackbox here**
sprite('Action_000_08', 6) # 33855-33860 **attackbox here**
sprite('Action_000_09', 5) # 33861-33865 **attackbox here**
sprite('Action_000_10', 6) # 33866-33871 **attackbox here**
sprite('Action_000_11', 8) # 33872-33879 **attackbox here**
sprite('Action_000_12', 5) # 33880-33884 **attackbox here**
sprite('Action_000_13', 5) # 33885-33889 **attackbox here**
sprite('Action_000_14', 6) # 33890-33895 **attackbox here**
gotoLabel(134)
ExitState()
label(140)
sprite('Action_050_02', 1) # 33896-33896 **attackbox here**
Unknown23029(11, 9902, 0)
if SLOT_158:
Unknown1000(-1230000)
else:
Unknown1000(-1465000)
def upon_40():
clearUponHandler(40)
sendToLabel(142)
SFX_1('uli601pyo')
label(141)
sprite('Action_050_02', 5) # 33897-33901 **attackbox here**
sprite('Action_050_03', 5) # 33902-33906 **attackbox here**
sprite('Action_050_04', 5) # 33907-33911 **attackbox here**
gotoLabel(141)
label(142)
sprite('Action_050_02', 5) # 33912-33916 **attackbox here**
sprite('Action_050_03', 5) # 33917-33921 **attackbox here**
sprite('Action_050_04', 5) # 33922-33926 **attackbox here**
if SLOT_97:
_gotolabel(142)
sprite('Action_050_05', 7) # 33927-33933
sprite('Action_050_06', 9) # 33934-33942
sprite('Action_050_07', 4) # 33943-33946
sprite('Action_050_08', 5) # 33947-33951
SFX_0('019_cloth_a')
sprite('Action_050_09', 8) # 33952-33959
sprite('Action_050_10', 7) # 33960-33966
sprite('Action_050_11', 6) # 33967-33972 **attackbox here**
sprite('Action_050_12', 5) # 33973-33977 **attackbox here**
sprite('Action_050_13', 5) # 33978-33982 **attackbox here**
sprite('Action_050_14', 20) # 33983-34002 **attackbox here**
sprite('Action_050_15', 5) # 34003-34007
SFX_0('019_cloth_c')
sprite('Action_050_16', 4) # 34008-34011
sprite('Action_050_17', 5) # 34012-34016
SFX_3('SE010')
sprite('Action_050_18', 5) # 34017-34021
sprite('Action_050_19', 2) # 34022-34023
Unknown21011(60)
label(143)
sprite('Action_000_00', 7) # 34024-34030 **attackbox here**
sprite('Action_000_01', 7) # 34031-34037 **attackbox here**
sprite('Action_000_02', 6) # 34038-34043 **attackbox here**
sprite('Action_000_03', 6) # 34044-34049 **attackbox here**
sprite('Action_000_04', 8) # 34050-34057 **attackbox here**
sprite('Action_000_05', 5) # 34058-34062 **attackbox here**
sprite('Action_000_06', 5) # 34063-34067 **attackbox here**
sprite('Action_000_07', 5) # 34068-34072 **attackbox here**
sprite('Action_000_08', 6) # 34073-34078 **attackbox here**
sprite('Action_000_09', 5) # 34079-34083 **attackbox here**
sprite('Action_000_10', 6) # 34084-34089 **attackbox here**
sprite('Action_000_11', 8) # 34090-34097 **attackbox here**
sprite('Action_000_12', 5) # 34098-34102 **attackbox here**
sprite('Action_000_13', 5) # 34103-34107 **attackbox here**
sprite('Action_000_14', 6) # 34108-34113 **attackbox here**
gotoLabel(143)
ExitState()
label(150)
sprite('Action_000_00', 7) # 34114-34120 **attackbox here**
Unknown1000(-1260000)
Unknown2019(-1000)
Unknown23029(11, 9902, 0)
def upon_40():
clearUponHandler(40)
SFX_1('uli601uhy')
def upon_CLEAR_OR_EXIT():
if (not SLOT_97):
clearUponHandler(3)
Unknown21007(24, 40)
sendToLabel(152)
label(151)
sprite('Action_000_01', 7) # 34121-34127 **attackbox here**
sprite('Action_000_02', 6) # 34128-34133 **attackbox here**
sprite('Action_000_03', 6) # 34134-34139 **attackbox here**
sprite('Action_000_04', 8) # 34140-34147 **attackbox here**
sprite('Action_000_05', 5) # 34148-34152 **attackbox here**
sprite('Action_000_06', 5) # 34153-34157 **attackbox here**
sprite('Action_000_07', 5) # 34158-34162 **attackbox here**
sprite('Action_000_08', 6) # 34163-34168 **attackbox here**
sprite('Action_000_09', 5) # 34169-34173 **attackbox here**
sprite('Action_000_10', 6) # 34174-34179 **attackbox here**
sprite('Action_000_11', 8) # 34180-34187 **attackbox here**
sprite('Action_000_12', 5) # 34188-34192 **attackbox here**
sprite('Action_000_13', 5) # 34193-34197 **attackbox here**
sprite('Action_000_14', 6) # 34198-34203 **attackbox here**
sprite('Action_000_00', 7) # 34204-34210 **attackbox here**
gotoLabel(151)
label(152)
sprite('Action_190_34', 3) # 34211-34213
sprite('Action_190_35', 6) # 34214-34219
sprite('Action_190_36', 3) # 34220-34222
sprite('Action_190_37', 4) # 34223-34226 **attackbox here**
sprite('Action_190_38', 60) # 34227-34286
sprite('Action_190_39', 4) # 34287-34290
sprite('Action_190_40', 6) # 34291-34296
sprite('Action_190_41', 3) # 34297-34299
sprite('Action_190_42', 3) # 34300-34302
sprite('Action_190_43', 3) # 34303-34305
Unknown21011(30)
label(153)
sprite('Action_000_00', 7) # 34306-34312 **attackbox here**
sprite('Action_000_01', 7) # 34313-34319 **attackbox here**
sprite('Action_000_02', 6) # 34320-34325 **attackbox here**
sprite('Action_000_03', 6) # 34326-34331 **attackbox here**
sprite('Action_000_04', 8) # 34332-34339 **attackbox here**
sprite('Action_000_05', 5) # 34340-34344 **attackbox here**
sprite('Action_000_06', 5) # 34345-34349 **attackbox here**
sprite('Action_000_07', 5) # 34350-34354 **attackbox here**
sprite('Action_000_08', 6) # 34355-34360 **attackbox here**
sprite('Action_000_09', 5) # 34361-34365 **attackbox here**
sprite('Action_000_10', 6) # 34366-34371 **attackbox here**
sprite('Action_000_11', 8) # 34372-34379 **attackbox here**
sprite('Action_000_12', 5) # 34380-34384 **attackbox here**
sprite('Action_000_13', 5) # 34385-34389 **attackbox here**
sprite('Action_000_14', 6) # 34390-34395 **attackbox here**
gotoLabel(153)
label(160)
sprite('Lin637_00', 32767) # 34396-67162 **attackbox here**
Unknown23029(11, 9902, 0)
Unknown1000(-1150000)
teleportRelativeY(240000)
def upon_40():
clearUponHandler(40)
sendToLabel(161)
SFX_1('uli601uwa')
label(161)
sprite('Lin637_00', 1) # 67163-67163 **attackbox here**
if SLOT_97:
_gotolabel(161)
sprite('Lin637_01', 6) # 67164-67169
sprite('Action_035_05', 6) # 67170-67175
physicsYImpulse(-1000)
setGravity(1000)
Unknown21007(24, 40)
sendToLabelUpon(2, 163)
sprite('Action_035_06', 6) # 67176-67181
label(162)
sprite('Action_022_00', 3) # 67182-67184
sprite('Action_022_01', 3) # 67185-67187
gotoLabel(162)
label(163)
sprite('Action_023_00', 3) # 67188-67190
clearUponHandler(2)
Unknown8000(100, 1, 1)
sprite('Action_023_01', 3) # 67191-67193
sprite('Action_023_02', 3) # 67194-67196
Unknown21011(120)
label(164)
sprite('Action_000_00', 7) # 67197-67203 **attackbox here**
sprite('Action_000_01', 7) # 67204-67210 **attackbox here**
sprite('Action_000_02', 6) # 67211-67216 **attackbox here**
sprite('Action_000_03', 6) # 67217-67222 **attackbox here**
sprite('Action_000_04', 8) # 67223-67230 **attackbox here**
sprite('Action_000_05', 5) # 67231-67235 **attackbox here**
sprite('Action_000_06', 5) # 67236-67240 **attackbox here**
sprite('Action_000_07', 5) # 67241-67245 **attackbox here**
sprite('Action_000_08', 6) # 67246-67251 **attackbox here**
sprite('Action_000_09', 5) # 67252-67256 **attackbox here**
sprite('Action_000_10', 6) # 67257-67262 **attackbox here**
sprite('Action_000_11', 8) # 67263-67270 **attackbox here**
sprite('Action_000_12', 5) # 67271-67275 **attackbox here**
sprite('Action_000_13', 5) # 67276-67280 **attackbox here**
sprite('Action_000_14', 6) # 67281-67286 **attackbox here**
gotoLabel(164)
ExitState()
label(170)
sprite('Action_050_02', 1) # 67287-67287 **attackbox here**
Unknown23029(11, 9902, 0)
if SLOT_158:
Unknown1000(-1230000)
else:
Unknown1000(-1465000)
def upon_40():
clearUponHandler(40)
sendToLabel(172)
SFX_1('uli601uor')
label(171)
sprite('Action_050_02', 5) # 67288-67292 **attackbox here**
sprite('Action_050_03', 5) # 67293-67297 **attackbox here**
sprite('Action_050_04', 5) # 67298-67302 **attackbox here**
gotoLabel(171)
label(172)
sprite('Action_050_02', 5) # 67303-67307 **attackbox here**
sprite('Action_050_03', 5) # 67308-67312 **attackbox here**
sprite('Action_050_04', 5) # 67313-67317 **attackbox here**
if SLOT_97:
_gotolabel(172)
sprite('Action_050_05', 7) # 67318-67324
sprite('Action_050_06', 9) # 67325-67333
sprite('Action_050_07', 4) # 67334-67337
sprite('Action_050_08', 5) # 67338-67342
SFX_0('019_cloth_a')
sprite('Action_050_09', 8) # 67343-67350
sprite('Action_050_10', 7) # 67351-67357
sprite('Action_050_11', 6) # 67358-67363 **attackbox here**
sprite('Action_050_12', 5) # 67364-67368 **attackbox here**
sprite('Action_050_13', 5) # 67369-67373 **attackbox here**
sprite('Action_050_14', 20) # 67374-67393 **attackbox here**
sprite('Action_050_15', 5) # 67394-67398
SFX_0('019_cloth_c')
sprite('Action_050_16', 4) # 67399-67402
sprite('Action_050_17', 5) # 67403-67407
SFX_3('SE010')
sprite('Action_050_18', 5) # 67408-67412
sprite('Action_050_19', 2) # 67413-67414
Unknown21011(120)
label(173)
sprite('Action_000_00', 7) # 67415-67421 **attackbox here**
sprite('Action_000_01', 7) # 67422-67428 **attackbox here**
sprite('Action_000_02', 6) # 67429-67434 **attackbox here**
sprite('Action_000_03', 6) # 67435-67440 **attackbox here**
sprite('Action_000_04', 8) # 67441-67448 **attackbox here**
sprite('Action_000_05', 5) # 67449-67453 **attackbox here**
sprite('Action_000_06', 5) # 67454-67458 **attackbox here**
sprite('Action_000_07', 5) # 67459-67463 **attackbox here**
sprite('Action_000_08', 6) # 67464-67469 **attackbox here**
sprite('Action_000_09', 5) # 67470-67474 **attackbox here**
sprite('Action_000_10', 6) # 67475-67480 **attackbox here**
sprite('Action_000_11', 8) # 67481-67488 **attackbox here**
sprite('Action_000_12', 5) # 67489-67493 **attackbox here**
sprite('Action_000_13', 5) # 67494-67498 **attackbox here**
sprite('Action_000_14', 6) # 67499-67504 **attackbox here**
gotoLabel(173)
ExitState()
label(180)
sprite('Action_050_02', 5) # 67505-67509 **attackbox here**
Unknown23029(11, 9902, 0)
if SLOT_158:
Unknown1000(-1230000)
else:
Unknown1000(-1465000)
sprite('Action_050_03', 5) # 67510-67514 **attackbox here**
SFX_1('uli600uva')
sprite('Action_050_04', 5) # 67515-67519 **attackbox here**
label(181)
sprite('Action_050_02', 5) # 67520-67524 **attackbox here**
sprite('Action_050_03', 5) # 67525-67529 **attackbox here**
sprite('Action_050_04', 5) # 67530-67534 **attackbox here**
if SLOT_97:
_gotolabel(181)
sprite('Action_050_05', 7) # 67535-67541
sprite('Action_050_06', 9) # 67542-67550
sprite('Action_050_07', 4) # 67551-67554
sprite('Action_050_08', 5) # 67555-67559
SFX_0('019_cloth_a')
sprite('Action_050_09', 8) # 67560-67567
sprite('Action_050_10', 7) # 67568-67574
sprite('Action_050_11', 6) # 67575-67580 **attackbox here**
sprite('Action_050_12', 5) # 67581-67585 **attackbox here**
sprite('Action_050_13', 5) # 67586-67590 **attackbox here**
sprite('Action_050_14', 20) # 67591-67610 **attackbox here**
sprite('Action_050_15', 5) # 67611-67615
SFX_0('019_cloth_c')
sprite('Action_050_16', 4) # 67616-67619
sprite('Action_050_17', 5) # 67620-67624
SFX_3('SE010')
sprite('Action_050_18', 5) # 67625-67629
Unknown21007(24, 40)
sprite('Action_050_19', 2) # 67630-67631
Unknown21011(300)
label(182)
sprite('Action_000_00', 7) # 67632-67638 **attackbox here**
sprite('Action_000_01', 7) # 67639-67645 **attackbox here**
sprite('Action_000_02', 6) # 67646-67651 **attackbox here**
sprite('Action_000_03', 6) # 67652-67657 **attackbox here**
sprite('Action_000_04', 8) # 67658-67665 **attackbox here**
sprite('Action_000_05', 5) # 67666-67670 **attackbox here**
sprite('Action_000_06', 5) # 67671-67675 **attackbox here**
sprite('Action_000_07', 5) # 67676-67680 **attackbox here**
sprite('Action_000_08', 6) # 67681-67686 **attackbox here**
sprite('Action_000_09', 5) # 67687-67691 **attackbox here**
sprite('Action_000_10', 6) # 67692-67697 **attackbox here**
sprite('Action_000_11', 8) # 67698-67705 **attackbox here**
sprite('Action_000_12', 5) # 67706-67710 **attackbox here**
sprite('Action_000_13', 5) # 67711-67715 **attackbox here**
sprite('Action_000_14', 6) # 67716-67721 **attackbox here**
gotoLabel(182)
ExitState()
label(190)
sprite('Action_050_02', 5) # 67722-67726 **attackbox here**
Unknown23029(11, 9902, 0)
if SLOT_158:
Unknown1000(-1230000)
else:
Unknown1000(-1465000)
sprite('Action_050_03', 5) # 67727-67731 **attackbox here**
SFX_1('uli600pla')
sprite('Action_050_04', 5) # 67732-67736 **attackbox here**
label(191)
sprite('Action_050_02', 5) # 67737-67741 **attackbox here**
sprite('Action_050_03', 5) # 67742-67746 **attackbox here**
sprite('Action_050_04', 5) # 67747-67751 **attackbox here**
if SLOT_97:
_gotolabel(191)
sprite('Action_050_05', 7) # 67752-67758
sprite('Action_050_06', 9) # 67759-67767
sprite('Action_050_07', 4) # 67768-67771
sprite('Action_050_08', 5) # 67772-67776
SFX_0('019_cloth_a')
sprite('Action_050_09', 8) # 67777-67784
Unknown21007(24, 40)
sprite('Action_050_10', 7) # 67785-67791
sprite('Action_050_11', 6) # 67792-67797 **attackbox here**
sprite('Action_050_12', 5) # 67798-67802 **attackbox here**
sprite('Action_050_13', 5) # 67803-67807 **attackbox here**
sprite('Action_050_14', 20) # 67808-67827 **attackbox here**
sprite('Action_050_15', 5) # 67828-67832
SFX_0('019_cloth_c')
sprite('Action_050_16', 4) # 67833-67836
sprite('Action_050_17', 5) # 67837-67841
SFX_3('SE010')
sprite('Action_050_18', 5) # 67842-67846
sprite('Action_050_19', 2) # 67847-67848
Unknown21011(240)
label(192)
sprite('Action_000_00', 7) # 67849-67855 **attackbox here**
sprite('Action_000_01', 7) # 67856-67862 **attackbox here**
sprite('Action_000_02', 6) # 67863-67868 **attackbox here**
sprite('Action_000_03', 6) # 67869-67874 **attackbox here**
sprite('Action_000_04', 8) # 67875-67882 **attackbox here**
sprite('Action_000_05', 5) # 67883-67887 **attackbox here**
sprite('Action_000_06', 5) # 67888-67892 **attackbox here**
sprite('Action_000_07', 5) # 67893-67897 **attackbox here**
sprite('Action_000_08', 6) # 67898-67903 **attackbox here**
sprite('Action_000_09', 5) # 67904-67908 **attackbox here**
sprite('Action_000_10', 6) # 67909-67914 **attackbox here**
sprite('Action_000_11', 8) # 67915-67922 **attackbox here**
sprite('Action_000_12', 5) # 67923-67927 **attackbox here**
sprite('Action_000_13', 5) # 67928-67932 **attackbox here**
sprite('Action_000_14', 6) # 67933-67938 **attackbox here**
gotoLabel(192)
ExitState()
label(200)
sprite('Action_050_02', 5) # 67939-67943 **attackbox here**
Unknown23029(11, 9902, 0)
if SLOT_158:
Unknown1000(-1230000)
else:
Unknown1000(-1465000)
sprite('Action_050_03', 5) # 67944-67948 **attackbox here**
SFX_1('uli600umi')
sprite('Action_050_04', 5) # 67949-67953 **attackbox here**
label(201)
sprite('Action_050_02', 5) # 67954-67958 **attackbox here**
sprite('Action_050_03', 5) # 67959-67963 **attackbox here**
sprite('Action_050_04', 5) # 67964-67968 **attackbox here**
if SLOT_97:
_gotolabel(201)
sprite('Action_050_05', 7) # 67969-67975
sprite('Action_050_06', 9) # 67976-67984
sprite('Action_050_07', 4) # 67985-67988
sprite('Action_050_08', 5) # 67989-67993
SFX_0('019_cloth_a')
sprite('Action_050_09', 8) # 67994-68001
Unknown21007(24, 40)
Unknown21011(300)
sprite('Action_050_10', 7) # 68002-68008
sprite('Action_050_11', 6) # 68009-68014 **attackbox here**
sprite('Action_050_12', 5) # 68015-68019 **attackbox here**
sprite('Action_050_13', 5) # 68020-68024 **attackbox here**
sprite('Action_050_14', 20) # 68025-68044 **attackbox here**
sprite('Action_050_15', 5) # 68045-68049
SFX_0('019_cloth_c')
sprite('Action_050_16', 4) # 68050-68053
sprite('Action_050_17', 5) # 68054-68058
SFX_3('SE010')
sprite('Action_050_18', 5) # 68059-68063
sprite('Action_050_19', 2) # 68064-68065
label(202)
sprite('Action_000_00', 7) # 68066-68072 **attackbox here**
sprite('Action_000_01', 7) # 68073-68079 **attackbox here**
sprite('Action_000_02', 6) # 68080-68085 **attackbox here**
sprite('Action_000_03', 6) # 68086-68091 **attackbox here**
sprite('Action_000_04', 8) # 68092-68099 **attackbox here**
sprite('Action_000_05', 5) # 68100-68104 **attackbox here**
sprite('Action_000_06', 5) # 68105-68109 **attackbox here**
sprite('Action_000_07', 5) # 68110-68114 **attackbox here**
sprite('Action_000_08', 6) # 68115-68120 **attackbox here**
sprite('Action_000_09', 5) # 68121-68125 **attackbox here**
sprite('Action_000_10', 6) # 68126-68131 **attackbox here**
sprite('Action_000_11', 8) # 68132-68139 **attackbox here**
sprite('Action_000_12', 5) # 68140-68144 **attackbox here**
sprite('Action_000_13', 5) # 68145-68149 **attackbox here**
sprite('Action_000_14', 6) # 68150-68155 **attackbox here**
gotoLabel(202)
ExitState()
label(210)
sprite('Action_050_02', 1) # 68156-68156 **attackbox here**
Unknown23029(11, 9902, 0)
def upon_40():
clearUponHandler(40)
SFX_1('uli601bce')
def upon_CLEAR_OR_EXIT():
if (not SLOT_97):
clearUponHandler(3)
sendToLabel(212)
label(211)
sprite('Action_050_02', 5) # 68157-68161 **attackbox here**
sprite('Action_050_03', 5) # 68162-68166 **attackbox here**
sprite('Action_050_04', 5) # 68167-68171 **attackbox here**
gotoLabel(211)
label(212)
sprite('Action_050_05', 7) # 68172-68178
sprite('Action_050_06', 9) # 68179-68187
sprite('Action_050_07', 4) # 68188-68191
sprite('Action_050_08', 5) # 68192-68196
SFX_0('019_cloth_a')
sprite('Action_050_09', 8) # 68197-68204
sprite('Action_050_10', 7) # 68205-68211
sprite('Action_050_11', 6) # 68212-68217 **attackbox here**
sprite('Action_050_12', 5) # 68218-68222 **attackbox here**
sprite('Action_050_13', 5) # 68223-68227 **attackbox here**
sprite('Action_050_14', 20) # 68228-68247 **attackbox here**
sprite('Action_050_15', 5) # 68248-68252
SFX_0('019_cloth_c')
sprite('Action_050_16', 4) # 68253-68256
sprite('Action_050_17', 5) # 68257-68261
SFX_3('SE010')
sprite('Action_050_18', 5) # 68262-68266
sprite('Action_050_19', 2) # 68267-68268
Unknown23018(1)
label(213)
sprite('Action_000_00', 7) # 68269-68275 **attackbox here**
sprite('Action_000_01', 7) # 68276-68282 **attackbox here**
sprite('Action_000_02', 6) # 68283-68288 **attackbox here**
sprite('Action_000_03', 6) # 68289-68294 **attackbox here**
sprite('Action_000_04', 8) # 68295-68302 **attackbox here**
sprite('Action_000_05', 5) # 68303-68307 **attackbox here**
sprite('Action_000_06', 5) # 68308-68312 **attackbox here**
sprite('Action_000_07', 5) # 68313-68317 **attackbox here**
sprite('Action_000_08', 6) # 68318-68323 **attackbox here**
sprite('Action_000_09', 5) # 68324-68328 **attackbox here**
sprite('Action_000_10', 6) # 68329-68334 **attackbox here**
sprite('Action_000_11', 8) # 68335-68342 **attackbox here**
sprite('Action_000_12', 5) # 68343-68347 **attackbox here**
sprite('Action_000_13', 5) # 68348-68352 **attackbox here**
sprite('Action_000_14', 6) # 68353-68358 **attackbox here**
gotoLabel(213)
ExitState()
label(220)
sprite('Action_000_25', 32767) # 68359-101125 **attackbox here**
Unknown1000(-1230000)
Unknown2005()
Unknown23029(11, 9902, 0)
if SLOT_158:
Unknown2019(-100)
def upon_40():
clearUponHandler(40)
SFX_1('uli601use')
Unknown23018(1)
ExitState()
label(230)
sprite('Action_050_02', 1) # 101126-101126 **attackbox here**
Unknown23029(11, 9902, 0)
def upon_40():
clearUponHandler(40)
SFX_1('uli601uhi')
def upon_CLEAR_OR_EXIT():
if (not SLOT_97):
clearUponHandler(3)
sendToLabel(232)
label(231)
sprite('Action_050_02', 5) # 101127-101131 **attackbox here**
sprite('Action_050_03', 5) # 101132-101136 **attackbox here**
sprite('Action_050_04', 5) # 101137-101141 **attackbox here**
gotoLabel(231)
label(232)
sprite('Action_050_05', 7) # 101142-101148
sprite('Action_050_06', 9) # 101149-101157
sprite('Action_050_07', 4) # 101158-101161
sprite('Action_050_08', 5) # 101162-101166
SFX_0('019_cloth_a')
sprite('Action_050_09', 8) # 101167-101174
sprite('Action_050_10', 7) # 101175-101181
sprite('Action_050_11', 6) # 101182-101187 **attackbox here**
sprite('Action_050_12', 5) # 101188-101192 **attackbox here**
sprite('Action_050_13', 5) # 101193-101197 **attackbox here**
sprite('Action_050_14', 20) # 101198-101217 **attackbox here**
sprite('Action_050_15', 5) # 101218-101222
SFX_0('019_cloth_c')
sprite('Action_050_16', 4) # 101223-101226
sprite('Action_050_17', 5) # 101227-101231
SFX_3('SE010')
sprite('Action_050_18', 5) # 101232-101236
sprite('Action_050_19', 2) # 101237-101238
Unknown23018(1)
label(233)
sprite('Action_000_00', 7) # 101239-101245 **attackbox here**
sprite('Action_000_01', 7) # 101246-101252 **attackbox here**
sprite('Action_000_02', 6) # 101253-101258 **attackbox here**
sprite('Action_000_03', 6) # 101259-101264 **attackbox here**
sprite('Action_000_04', 8) # 101265-101272 **attackbox here**
sprite('Action_000_05', 5) # 101273-101277 **attackbox here**
sprite('Action_000_06', 5) # 101278-101282 **attackbox here**
sprite('Action_000_07', 5) # 101283-101287 **attackbox here**
sprite('Action_000_08', 6) # 101288-101293 **attackbox here**
sprite('Action_000_09', 5) # 101294-101298 **attackbox here**
sprite('Action_000_10', 6) # 101299-101304 **attackbox here**
sprite('Action_000_11', 8) # 101305-101312 **attackbox here**
sprite('Action_000_12', 5) # 101313-101317 **attackbox here**
sprite('Action_000_13', 5) # 101318-101322 **attackbox here**
sprite('Action_000_14', 6) # 101323-101328 **attackbox here**
gotoLabel(233)
ExitState()
label(240)
sprite('Action_050_02', 5) # 101329-101333 **attackbox here**
Unknown23029(11, 9902, 0)
if SLOT_158:
Unknown1000(-1230000)
else:
Unknown1000(-1465000)
SFX_1('uli600pel')
sprite('Action_050_03', 5) # 101334-101338 **attackbox here**
sprite('Action_050_04', 5) # 101339-101343 **attackbox here**
label(241)
sprite('Action_050_02', 5) # 101344-101348 **attackbox here**
sprite('Action_050_03', 5) # 101349-101353 **attackbox here**
sprite('Action_050_04', 5) # 101354-101358 **attackbox here**
if SLOT_97:
_gotolabel(241)
sprite('Action_050_05', 7) # 101359-101365
sprite('Action_050_06', 9) # 101366-101374
sprite('Action_050_07', 4) # 101375-101378
sprite('Action_050_08', 5) # 101379-101383
SFX_0('019_cloth_a')
sprite('Action_050_09', 8) # 101384-101391
sprite('Action_050_10', 7) # 101392-101398
sprite('Action_050_11', 6) # 101399-101404 **attackbox here**
sprite('Action_050_12', 5) # 101405-101409 **attackbox here**
sprite('Action_050_13', 5) # 101410-101414 **attackbox here**
sprite('Action_050_14', 20) # 101415-101434 **attackbox here**
sprite('Action_050_15', 5) # 101435-101439
SFX_0('019_cloth_c')
sprite('Action_050_16', 4) # 101440-101443
sprite('Action_050_17', 5) # 101444-101448
SFX_3('SE010')
sprite('Action_050_18', 5) # 101449-101453
Unknown21007(24, 40)
sprite('Action_050_19', 2) # 101454-101455
Unknown21011(120)
label(242)
sprite('Action_000_00', 7) # 101456-101462 **attackbox here**
sprite('Action_000_01', 7) # 101463-101469 **attackbox here**
sprite('Action_000_02', 6) # 101470-101475 **attackbox here**
sprite('Action_000_03', 6) # 101476-101481 **attackbox here**
sprite('Action_000_04', 8) # 101482-101489 **attackbox here**
sprite('Action_000_05', 5) # 101490-101494 **attackbox here**
sprite('Action_000_06', 5) # 101495-101499 **attackbox here**
sprite('Action_000_07', 5) # 101500-101504 **attackbox here**
sprite('Action_000_08', 6) # 101505-101510 **attackbox here**
sprite('Action_000_09', 5) # 101511-101515 **attackbox here**
sprite('Action_000_10', 6) # 101516-101521 **attackbox here**
sprite('Action_000_11', 8) # 101522-101529 **attackbox here**
sprite('Action_000_12', 5) # 101530-101534 **attackbox here**
sprite('Action_000_13', 5) # 101535-101539 **attackbox here**
sprite('Action_000_14', 6) # 101540-101545 **attackbox here**
gotoLabel(242)
ExitState()
label(991)
sprite('Action_000_00', 1) # 101546-101546 **attackbox here**
Unknown23029(11, 9902, 0)
Unknown2019(1000)
Unknown21011(120)
label(992)
sprite('Action_000_00', 7) # 101547-101553 **attackbox here**
sprite('Action_000_01', 7) # 101554-101560 **attackbox here**
sprite('Action_000_02', 6) # 101561-101566 **attackbox here**
sprite('Action_000_03', 6) # 101567-101572 **attackbox here**
sprite('Action_000_04', 8) # 101573-101580 **attackbox here**
sprite('Action_000_05', 5) # 101581-101585 **attackbox here**
sprite('Action_000_06', 5) # 101586-101590 **attackbox here**
sprite('Action_000_07', 5) # 101591-101595 **attackbox here**
sprite('Action_000_08', 6) # 101596-101601 **attackbox here**
sprite('Action_000_09', 5) # 101602-101606 **attackbox here**
sprite('Action_000_10', 6) # 101607-101612 **attackbox here**
sprite('Action_000_11', 8) # 101613-101620 **attackbox here**
sprite('Action_000_12', 5) # 101621-101625 **attackbox here**
sprite('Action_000_13', 5) # 101626-101630 **attackbox here**
sprite('Action_000_14', 6) # 101631-101636 **attackbox here**
gotoLabel(992)
label(993)
sprite('Action_046_00', 2) # 101637-101638
def upon_LANDING():
clearUponHandler(2)
sendToLabel(995)
def upon_STATE_END():
Unknown2019(0)
Unknown3038(1)
Unknown3001(255)
Unknown2034(0)
EnableCollision(0)
Unknown2053(0)
Unknown3001(255)
Unknown3004(-20)
physicsXImpulse(-51000)
physicsYImpulse(18800)
setGravity(1500)
Unknown8002()
sprite('Action_046_01', 2) # 101639-101640
label(994)
sprite('Action_046_03', 3) # 101641-101643
sprite('Action_046_04', 3) # 101644-101646
loopRest()
gotoLabel(994)
label(995)
sprite('null', 3) # 101647-101649
ExitState()
@State
def CmnActMatchWin():
if SLOT_169:
_gotolabel(482)
if SLOT_122:
_gotolabel(482)
if SLOT_123:
_gotolabel(482)
sprite('keep', 2) # 1-2
def upon_CLEAR_OR_EXIT():
SLOT_58 = 1
Unknown48('19000000020000003400000018000000020000003a000000')
if SLOT_52:
if PartnerChar('brc'):
if (SLOT_145 <= 500000):
sendToLabel(100)
clearUponHandler(3)
if PartnerChar('bha'):
if (SLOT_145 <= 500000):
sendToLabel(110)
clearUponHandler(3)
if PartnerChar('bpt'):
if (SLOT_145 <= 500000):
sendToLabel(120)
clearUponHandler(3)
if PartnerChar('bes'):
if (SLOT_145 <= 500000):
sendToLabel(130)
clearUponHandler(3)
if PartnerChar('pyo'):
if (SLOT_145 <= 500000):
sendToLabel(140)
clearUponHandler(3)
if PartnerChar('uhy'):
if (SLOT_145 <= 500000):
sendToLabel(150)
clearUponHandler(3)
if PartnerChar('uwa'):
if (SLOT_145 <= 500000):
sendToLabel(160)
clearUponHandler(3)
if PartnerChar('uor'):
if (SLOT_145 <= 500000):
sendToLabel(170)
clearUponHandler(3)
if PartnerChar('uva'):
if (SLOT_145 <= 500000):
sendToLabel(180)
clearUponHandler(3)
if PartnerChar('pla'):
if (SLOT_145 <= 500000):
sendToLabel(190)
clearUponHandler(3)
if PartnerChar('umi'):
if (SLOT_145 <= 500000):
sendToLabel(200)
clearUponHandler(3)
if PartnerChar('bce'):
if (SLOT_145 <= 500000):
sendToLabel(210)
clearUponHandler(3)
if PartnerChar('use'):
if (SLOT_145 <= 500000):
sendToLabel(220)
clearUponHandler(3)
if PartnerChar('uhi'):
if (SLOT_145 <= 500000):
sendToLabel(230)
clearUponHandler(3)
if PartnerChar('pel'):
if (SLOT_145 <= 500000):
sendToLabel(240)
clearUponHandler(3)
label(482)
sprite('keep', 1) # 3-3
clearUponHandler(3)
SLOT_58 = 0
sprite('Action_052_00', 6) # 4-9
if SLOT_158:
if SLOT_52:
Unknown7006('uli524', 100, 896101493, 13618, 0, 0, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
elif SLOT_108:
Unknown7006('uli402_0', 100, 879324277, 828322352, 0, 0, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
else:
Unknown7006('uli520', 100, 896101493, 12594, 0, 0, 100, 896101493, 12850, 0, 0, 100, 896101493, 13106, 0, 0, 100)
sprite('Action_052_01', 6) # 10-15
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 16-19
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 20-26
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 27-29
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 30-39
sprite('Action_052_06', 6) # 40-45
sprite('Action_052_07', 5) # 46-50
Unknown23018(1)
sprite('Action_052_08', 7) # 51-57 **attackbox here**
label(1000)
sprite('Action_052_09', 30) # 58-87 **attackbox here**
loopRest()
gotoLabel(1000)
label(100)
sprite('Action_000_00', 1) # 88-88 **attackbox here**
Unknown2019(1000)
def upon_40():
clearUponHandler(40)
sendToLabel(102)
label(101)
sprite('Action_000_00', 7) # 89-95 **attackbox here**
sprite('Action_000_01', 7) # 96-102 **attackbox here**
sprite('Action_000_02', 6) # 103-108 **attackbox here**
sprite('Action_000_03', 6) # 109-114 **attackbox here**
sprite('Action_000_04', 8) # 115-122 **attackbox here**
sprite('Action_000_05', 5) # 123-127 **attackbox here**
sprite('Action_000_06', 5) # 128-132 **attackbox here**
sprite('Action_000_07', 5) # 133-137 **attackbox here**
sprite('Action_000_08', 6) # 138-143 **attackbox here**
sprite('Action_000_09', 5) # 144-148 **attackbox here**
sprite('Action_000_10', 6) # 149-154 **attackbox here**
sprite('Action_000_11', 8) # 155-162 **attackbox here**
sprite('Action_000_12', 5) # 163-167 **attackbox here**
sprite('Action_000_13', 5) # 168-172 **attackbox here**
sprite('Action_000_14', 6) # 173-178 **attackbox here**
gotoLabel(101)
label(102)
sprite('Action_052_00', 6) # 179-184
SFX_1('uli701brc')
sprite('Action_052_01', 6) # 185-190
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 191-194
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 195-201
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 202-204
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 205-214
sprite('Action_052_06', 6) # 215-220
sprite('Action_052_07', 5) # 221-225
sprite('Action_052_08', 7) # 226-232 **attackbox here**
label(103)
sprite('Action_052_09', 1) # 233-233 **attackbox here**
if SLOT_97:
_gotolabel(103)
sprite('Action_052_09', 32767) # 234-33000 **attackbox here**
Unknown18008()
label(110)
sprite('Action_000_00', 1) # 33001-33001 **attackbox here**
def upon_40():
clearUponHandler(40)
sendToLabel(112)
label(111)
sprite('Action_000_00', 7) # 33002-33008 **attackbox here**
sprite('Action_000_01', 7) # 33009-33015 **attackbox here**
sprite('Action_000_02', 6) # 33016-33021 **attackbox here**
sprite('Action_000_03', 6) # 33022-33027 **attackbox here**
sprite('Action_000_04', 8) # 33028-33035 **attackbox here**
sprite('Action_000_05', 5) # 33036-33040 **attackbox here**
sprite('Action_000_06', 5) # 33041-33045 **attackbox here**
sprite('Action_000_07', 5) # 33046-33050 **attackbox here**
sprite('Action_000_08', 6) # 33051-33056 **attackbox here**
sprite('Action_000_09', 5) # 33057-33061 **attackbox here**
sprite('Action_000_10', 6) # 33062-33067 **attackbox here**
sprite('Action_000_11', 8) # 33068-33075 **attackbox here**
sprite('Action_000_12', 5) # 33076-33080 **attackbox here**
sprite('Action_000_13', 5) # 33081-33085 **attackbox here**
sprite('Action_000_14', 6) # 33086-33091 **attackbox here**
gotoLabel(111)
label(112)
sprite('Action_052_00', 6) # 33092-33097
SFX_1('uli701bha')
sprite('Action_052_01', 6) # 33098-33103
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 33104-33107
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 33108-33114
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 33115-33117
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 33118-33127
sprite('Action_052_06', 6) # 33128-33133
sprite('Action_052_07', 5) # 33134-33138
sprite('Action_052_08', 7) # 33139-33145 **attackbox here**
Unknown23018(1)
sprite('Action_052_09', 32767) # 33146-65912 **attackbox here**
label(120)
sprite('Action_052_00', 6) # 65913-65918
SFX_1('uli700bpt')
sprite('Action_052_01', 6) # 65919-65924
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 65925-65928
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 65929-65935
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 65936-65938
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 65939-65948
sprite('Action_052_06', 6) # 65949-65954
sprite('Action_052_07', 5) # 65955-65959
sprite('Action_052_08', 7) # 65960-65966 **attackbox here**
label(121)
sprite('Action_052_09', 1) # 65967-65967 **attackbox here**
if SLOT_97:
_gotolabel(121)
sprite('Action_052_09', 45) # 65968-66012 **attackbox here**
sprite('Action_052_09', 32767) # 66013-98779 **attackbox here**
Unknown21007(24, 40)
Unknown21011(280)
label(130)
sprite('Action_000_00', 1) # 98780-98780 **attackbox here**
def upon_40():
clearUponHandler(40)
sendToLabel(132)
label(131)
sprite('Action_000_00', 7) # 98781-98787 **attackbox here**
sprite('Action_000_01', 7) # 98788-98794 **attackbox here**
sprite('Action_000_02', 6) # 98795-98800 **attackbox here**
sprite('Action_000_03', 6) # 98801-98806 **attackbox here**
sprite('Action_000_04', 8) # 98807-98814 **attackbox here**
sprite('Action_000_05', 5) # 98815-98819 **attackbox here**
sprite('Action_000_06', 5) # 98820-98824 **attackbox here**
sprite('Action_000_07', 5) # 98825-98829 **attackbox here**
sprite('Action_000_08', 6) # 98830-98835 **attackbox here**
sprite('Action_000_09', 5) # 98836-98840 **attackbox here**
sprite('Action_000_10', 6) # 98841-98846 **attackbox here**
sprite('Action_000_11', 8) # 98847-98854 **attackbox here**
sprite('Action_000_12', 5) # 98855-98859 **attackbox here**
sprite('Action_000_13', 5) # 98860-98864 **attackbox here**
sprite('Action_000_14', 6) # 98865-98870 **attackbox here**
gotoLabel(131)
label(132)
sprite('Action_052_00', 6) # 98871-98876
SFX_1('uli701bes')
sprite('Action_052_01', 6) # 98877-98882
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 98883-98886
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 98887-98893
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 98894-98896
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 98897-98906
sprite('Action_052_06', 6) # 98907-98912
sprite('Action_052_07', 5) # 98913-98917
sprite('Action_052_08', 7) # 98918-98924 **attackbox here**
label(133)
sprite('Action_052_09', 1) # 98925-98925 **attackbox here**
if SLOT_97:
_gotolabel(133)
sprite('Action_052_09', 32767) # 98926-131692 **attackbox here**
Unknown21011(120)
label(140)
sprite('Action_000_00', 1) # 131693-131693 **attackbox here**
Unknown2034(0)
Unknown2053(0)
def upon_40():
clearUponHandler(40)
sendToLabel(142)
label(141)
sprite('Action_000_00', 7) # 131694-131700 **attackbox here**
sprite('Action_000_01', 7) # 131701-131707 **attackbox here**
sprite('Action_000_02', 6) # 131708-131713 **attackbox here**
sprite('Action_000_03', 6) # 131714-131719 **attackbox here**
sprite('Action_000_04', 8) # 131720-131727 **attackbox here**
sprite('Action_000_05', 5) # 131728-131732 **attackbox here**
sprite('Action_000_06', 5) # 131733-131737 **attackbox here**
sprite('Action_000_07', 5) # 131738-131742 **attackbox here**
sprite('Action_000_08', 6) # 131743-131748 **attackbox here**
sprite('Action_000_09', 5) # 131749-131753 **attackbox here**
sprite('Action_000_10', 6) # 131754-131759 **attackbox here**
sprite('Action_000_11', 8) # 131760-131767 **attackbox here**
sprite('Action_000_12', 5) # 131768-131772 **attackbox here**
sprite('Action_000_13', 5) # 131773-131777 **attackbox here**
sprite('Action_000_14', 6) # 131778-131783 **attackbox here**
gotoLabel(141)
label(142)
sprite('Action_052_00', 6) # 131784-131789
sprite('Action_052_01', 6) # 131790-131795
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 131796-131799
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 131800-131806
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 131807-131809
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 131810-131819
sprite('Action_052_06', 6) # 131820-131825
sprite('Action_052_07', 5) # 131826-131830
sprite('Action_052_08', 7) # 131831-131837 **attackbox here**
SFX_1('uli701pyo')
label(143)
sprite('Action_052_09', 1) # 131838-131838 **attackbox here**
if SLOT_97:
_gotolabel(143)
sprite('Action_052_09', 30) # 131839-131868 **attackbox here**
sprite('Action_052_09', 32767) # 131869-164635 **attackbox here**
Unknown21007(24, 40)
def upon_39():
clearUponHandler(39)
SFX_1('uli703pyo')
Unknown21011(80)
label(150)
sprite('Action_052_00', 6) # 164636-164641
SFX_1('uli700uhy')
sprite('Action_052_01', 6) # 164642-164647
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 164648-164651
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 164652-164658
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 164659-164661
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 164662-164671
sprite('Action_052_06', 6) # 164672-164677
sprite('Action_052_07', 5) # 164678-164682
sprite('Action_052_08', 7) # 164683-164689 **attackbox here**
label(151)
sprite('Action_052_09', 1) # 164690-164690 **attackbox here**
if SLOT_97:
_gotolabel(151)
sprite('Action_052_09', 30) # 164691-164720 **attackbox here**
sprite('Action_052_09', 32767) # 164721-197487 **attackbox here**
Unknown21007(24, 40)
Unknown21011(120)
label(160)
sprite('Action_000_00', 1) # 197488-197488 **attackbox here**
Unknown2019(-100)
def upon_40():
clearUponHandler(40)
sendToLabel(162)
label(161)
sprite('Action_000_00', 7) # 197489-197495 **attackbox here**
sprite('Action_000_01', 7) # 197496-197502 **attackbox here**
sprite('Action_000_02', 6) # 197503-197508 **attackbox here**
sprite('Action_000_03', 6) # 197509-197514 **attackbox here**
sprite('Action_000_04', 8) # 197515-197522 **attackbox here**
sprite('Action_000_05', 5) # 197523-197527 **attackbox here**
sprite('Action_000_06', 5) # 197528-197532 **attackbox here**
sprite('Action_000_07', 5) # 197533-197537 **attackbox here**
sprite('Action_000_08', 6) # 197538-197543 **attackbox here**
sprite('Action_000_09', 5) # 197544-197548 **attackbox here**
sprite('Action_000_10', 6) # 197549-197554 **attackbox here**
sprite('Action_000_11', 8) # 197555-197562 **attackbox here**
sprite('Action_000_12', 5) # 197563-197567 **attackbox here**
sprite('Action_000_13', 5) # 197568-197572 **attackbox here**
sprite('Action_000_14', 6) # 197573-197578 **attackbox here**
gotoLabel(161)
label(162)
sprite('Action_052_00', 6) # 197579-197584
SFX_1('uli701uwa')
sprite('Action_052_01', 6) # 197585-197590
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 197591-197594
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 197595-197601
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 197602-197604
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 197605-197614
sprite('Action_052_06', 6) # 197615-197620
sprite('Action_052_07', 5) # 197621-197625
sprite('Action_052_08', 7) # 197626-197632 **attackbox here**
sprite('Action_052_09', 32767) # 197633-230399 **attackbox here**
Unknown23018(1)
label(170)
sprite('Action_000_00', 1) # 230400-230400 **attackbox here**
def upon_40():
clearUponHandler(40)
sendToLabel(172)
label(171)
sprite('Action_000_00', 7) # 230401-230407 **attackbox here**
sprite('Action_000_01', 7) # 230408-230414 **attackbox here**
sprite('Action_000_02', 6) # 230415-230420 **attackbox here**
sprite('Action_000_03', 6) # 230421-230426 **attackbox here**
sprite('Action_000_04', 8) # 230427-230434 **attackbox here**
sprite('Action_000_05', 5) # 230435-230439 **attackbox here**
sprite('Action_000_06', 5) # 230440-230444 **attackbox here**
sprite('Action_000_07', 5) # 230445-230449 **attackbox here**
sprite('Action_000_08', 6) # 230450-230455 **attackbox here**
sprite('Action_000_09', 5) # 230456-230460 **attackbox here**
sprite('Action_000_10', 6) # 230461-230466 **attackbox here**
sprite('Action_000_11', 8) # 230467-230474 **attackbox here**
sprite('Action_000_12', 5) # 230475-230479 **attackbox here**
sprite('Action_000_13', 5) # 230480-230484 **attackbox here**
sprite('Action_000_14', 6) # 230485-230490 **attackbox here**
gotoLabel(171)
label(172)
sprite('Action_052_00', 6) # 230491-230496
SFX_1('uli701uor')
sprite('Action_052_01', 6) # 230497-230502
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 230503-230506
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 230507-230513
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 230514-230516
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 230517-230526
sprite('Action_052_06', 6) # 230527-230532
sprite('Action_052_07', 5) # 230533-230537
sprite('Action_052_08', 7) # 230538-230544 **attackbox here**
label(173)
sprite('Action_052_09', 1) # 230545-230545 **attackbox here**
if SLOT_97:
_gotolabel(173)
sprite('Action_052_09', 32767) # 230546-263312 **attackbox here**
Unknown21011(120)
label(180)
sprite('Action_052_00', 6) # 263313-263318
SFX_1('uli700uva')
sprite('Action_052_01', 6) # 263319-263324
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 263325-263328
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 263329-263335
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 263336-263338
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 263339-263348
sprite('Action_052_06', 6) # 263349-263354
sprite('Action_052_07', 5) # 263355-263359
sprite('Action_052_08', 7) # 263360-263366 **attackbox here**
label(181)
sprite('Action_052_09', 1) # 263367-263367 **attackbox here**
if SLOT_97:
_gotolabel(181)
sprite('Action_052_09', 32767) # 263368-296134 **attackbox here**
Unknown21007(24, 40)
Unknown21011(480)
label(190)
sprite('Action_052_00', 6) # 296135-296140
SFX_1('uli700pla')
sprite('Action_052_01', 6) # 296141-296146
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 296147-296150
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 296151-296157
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 296158-296160
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 296161-296170
sprite('Action_052_06', 6) # 296171-296176
sprite('Action_052_07', 5) # 296177-296181
sprite('Action_052_08', 7) # 296182-296188 **attackbox here**
label(191)
sprite('Action_052_09', 1) # 296189-296189 **attackbox here**
if SLOT_97:
_gotolabel(191)
sprite('Action_052_09', 20) # 296190-296209 **attackbox here**
sprite('Action_052_09', 32767) # 296210-328976 **attackbox here**
Unknown21007(24, 40)
Unknown21011(360)
label(200)
sprite('Action_052_00', 6) # 328977-328982
SFX_1('uli700umi')
sprite('Action_052_01', 6) # 328983-328988
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 328989-328992
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 328993-328999
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 329000-329002
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 329003-329012
sprite('Action_052_06', 6) # 329013-329018
sprite('Action_052_07', 5) # 329019-329023
sprite('Action_052_08', 7) # 329024-329030 **attackbox here**
label(201)
sprite('Action_052_09', 1) # 329031-329031 **attackbox here**
if SLOT_97:
_gotolabel(201)
sprite('Action_052_09', 20) # 329032-329051 **attackbox here**
sprite('Action_052_09', 32767) # 329052-361818 **attackbox here**
Unknown21007(24, 40)
Unknown21011(250)
label(210)
sprite('Action_000_00', 1) # 361819-361819 **attackbox here**
Unknown2019(-1000)
def upon_40():
clearUponHandler(40)
sendToLabel(212)
label(211)
sprite('Action_000_00', 7) # 361820-361826 **attackbox here**
sprite('Action_000_01', 7) # 361827-361833 **attackbox here**
sprite('Action_000_02', 6) # 361834-361839 **attackbox here**
sprite('Action_000_03', 6) # 361840-361845 **attackbox here**
sprite('Action_000_04', 8) # 361846-361853 **attackbox here**
sprite('Action_000_05', 5) # 361854-361858 **attackbox here**
sprite('Action_000_06', 5) # 361859-361863 **attackbox here**
sprite('Action_000_07', 5) # 361864-361868 **attackbox here**
sprite('Action_000_08', 6) # 361869-361874 **attackbox here**
sprite('Action_000_09', 5) # 361875-361879 **attackbox here**
sprite('Action_000_10', 6) # 361880-361885 **attackbox here**
sprite('Action_000_11', 8) # 361886-361893 **attackbox here**
sprite('Action_000_12', 5) # 361894-361898 **attackbox here**
sprite('Action_000_13', 5) # 361899-361903 **attackbox here**
sprite('Action_000_14', 6) # 361904-361909 **attackbox here**
gotoLabel(211)
label(212)
sprite('Action_052_00', 6) # 361910-361915
SFX_1('uli701bce')
sprite('Action_052_01', 6) # 361916-361921
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 361922-361925
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 361926-361932
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 361933-361935
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 361936-361945
sprite('Action_052_06', 6) # 361946-361951
sprite('Action_052_07', 5) # 361952-361956
sprite('Action_052_08', 7) # 361957-361963 **attackbox here**
sprite('Action_052_09', 32767) # 361964-394730 **attackbox here**
Unknown23018(1)
label(220)
sprite('Action_000_00', 1) # 394731-394731 **attackbox here**
Unknown2019(1000)
def upon_40():
clearUponHandler(40)
sendToLabel(222)
label(221)
sprite('Action_000_00', 7) # 394732-394738 **attackbox here**
sprite('Action_000_01', 7) # 394739-394745 **attackbox here**
sprite('Action_000_02', 6) # 394746-394751 **attackbox here**
sprite('Action_000_03', 6) # 394752-394757 **attackbox here**
sprite('Action_000_04', 8) # 394758-394765 **attackbox here**
sprite('Action_000_05', 5) # 394766-394770 **attackbox here**
sprite('Action_000_06', 5) # 394771-394775 **attackbox here**
sprite('Action_000_07', 5) # 394776-394780 **attackbox here**
sprite('Action_000_08', 6) # 394781-394786 **attackbox here**
sprite('Action_000_09', 5) # 394787-394791 **attackbox here**
sprite('Action_000_10', 6) # 394792-394797 **attackbox here**
sprite('Action_000_11', 8) # 394798-394805 **attackbox here**
sprite('Action_000_12', 5) # 394806-394810 **attackbox here**
sprite('Action_000_13', 5) # 394811-394815 **attackbox here**
sprite('Action_000_14', 6) # 394816-394821 **attackbox here**
gotoLabel(221)
label(222)
sprite('Action_052_00', 6) # 394822-394827
SFX_1('uli701use')
sprite('Action_052_01', 6) # 394828-394833
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 394834-394837
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 394838-394844
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 394845-394847
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 394848-394857
sprite('Action_052_06', 6) # 394858-394863
sprite('Action_052_07', 5) # 394864-394868
sprite('Action_052_08', 7) # 394869-394875 **attackbox here**
sprite('Action_052_09', 32767) # 394876-427642 **attackbox here**
Unknown23018(1)
label(230)
sprite('Action_052_00', 6) # 427643-427648
SFX_1('uli700uhi')
sprite('Action_052_01', 6) # 427649-427654
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 427655-427658
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 427659-427665
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 427666-427668
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 427669-427678
sprite('Action_052_06', 6) # 427679-427684
sprite('Action_052_07', 5) # 427685-427689
sprite('Action_052_08', 7) # 427690-427696 **attackbox here**
label(231)
sprite('Action_052_09', 1) # 427697-427697 **attackbox here**
if SLOT_97:
_gotolabel(231)
sprite('Action_052_09', 30) # 427698-427727 **attackbox here**
sprite('Action_052_09', 32767) # 427728-460494 **attackbox here**
Unknown21007(24, 40)
Unknown21011(180)
label(240)
sprite('Action_052_00', 6) # 460495-460500
SFX_1('uli700pel')
sprite('Action_052_01', 6) # 460501-460506
teleportRelativeX(-25000)
SFX_0('003_swing_grap_0_0')
sprite('Action_052_02', 4) # 460507-460510
teleportRelativeX(-5000)
sprite('Action_052_03', 7) # 460511-460517
teleportRelativeX(-20000)
sprite('Action_052_04', 3) # 460518-460520
SFX_3('SE010')
teleportRelativeX(-20000)
sprite('Action_052_05', 10) # 460521-460530
sprite('Action_052_06', 6) # 460531-460536
sprite('Action_052_07', 5) # 460537-460541
sprite('Action_052_08', 7) # 460542-460548 **attackbox here**
label(241)
sprite('Action_052_09', 1) # 460549-460549 **attackbox here**
if SLOT_97:
_gotolabel(241)
sprite('Action_052_09', 30) # 460550-460579 **attackbox here**
sprite('Action_052_09', 32767) # 460580-493346 **attackbox here**
Unknown21007(24, 40)
Unknown21011(180)
@State
def CmnActLose():
sprite('Action_248_00', 7) # 1-7
sprite('Action_248_01', 4) # 8-11
if SLOT_158:
Unknown7006('uli403_0', 100, 879324277, 828322608, 0, 0, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
SFX_0('003_swing_grap_0_0')
Unknown23018(1)
sprite('Action_248_02', 7) # 12-18
sprite('Action_248_03', 5) # 19-23
sprite('Action_248_04', 8) # 24-31
sprite('Action_248_05', 10) # 32-41
sprite('Action_248_06', 5) # 42-46
sprite('Action_248_07', 7) # 47-53
SFX_FOOTSTEP_(100, 0, 1)
sprite('Action_248_08', 3) # 54-56
sprite('Action_248_09', 3) # 57-59
sprite('Action_248_10', 32767) # 60-32826
|
[
"shtkn001@gmail.com"
] |
shtkn001@gmail.com
|
13afaec093ca5dbb37ccc72918e13c91b3555344
|
2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8
|
/pardus/tags/2011/util/shell/command-not-found/actions.py
|
fd6ae5ea60b235f2996161b9d5463089b352de0a
|
[] |
no_license
|
aligulle1/kuller
|
bda0d59ce8400aa3c7ba9c7e19589f27313492f7
|
7f98de19be27d7a517fe19a37c814748f7e18ba6
|
refs/heads/master
| 2021-01-20T02:22:09.451356
| 2013-07-23T17:57:58
| 2013-07-23T17:57:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 647
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2008-2010 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
def install():
pisitools.dobin("src/command-not-found")
pisitools.insinto("/var/db/command-not-found", "data/packages-%s.db" % get.ARCH(), "packages.db")
for lang in ["da", "de", "es", "fr", "hu", "it", "nl", "ru", "sv", "tr"]:
pisitools.domo("po/%s.po" % lang, lang, "command-not-found.mo")
pisitools.dodoc("AUTHORS", "COPYING", "README")
|
[
"yusuf.aydemir@istanbul.com"
] |
yusuf.aydemir@istanbul.com
|
57f0473df75e076251d0ff6afe0e60431dd1b124
|
5259532bb41382bc05c7f311fdee65c67f67990e
|
/Tools/SampleTool/UI_SampleMainForm.py
|
233a453909cfe5c2f121227d1c0c5bfe19a1f080
|
[] |
no_license
|
csjy309450/MLTools_PyQt4
|
57905cc78284d87349eda511fc78c43f3527bbeb
|
d1af57c279fd12428cda303d22e7a732db3ff257
|
refs/heads/master
| 2021-04-29T10:36:54.792400
| 2018-02-28T17:03:08
| 2018-02-28T17:03:08
| 77,835,494
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,513
|
py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'SampleToolWidget.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import CopyForm as cf
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class UI_SampleMainForm(object):
"""
UI in Sample Tool Main Widget
"""
def setupUi(self, Form):
"""
初始化窗口UI
:param Form:
:return:
"""
self.mainForm = Form
Form.setObjectName(_fromUtf8("Form"))
Form.setWindowModality(QtCore.Qt.NonModal)
Form.resize(705, 579)
##对象成员变量
# QLable控件中的显示图像
self.qImg = QtGui.QPixmap()
self.currentFrameNum = -1
self.filePathsList = QtCore.QStringList()
#获取窗口大小
self.widRect = Form.frameGeometry()
##控件布局
#定义整个垂直布局内的QWidget面板
self.VLayoutWidget = QtGui.QWidget(Form)
self.VLayoutWidget.setGeometry(self.widRect)
self.VLayoutWidget.setObjectName(_fromUtf8("VLayoutWidget"))
#定义第一层中的两个QSlider控件
#HSlider_copyWidScale控制copyWidget窗口的尺寸
self.HSlider_copyWidScale = QtGui.QSlider(self.VLayoutWidget)
self.HSlider_copyWidScale.setCursor(QtGui.QCursor(QtCore.Qt.SizeHorCursor))
self.HSlider_copyWidScale.setOrientation(QtCore.Qt.Horizontal)
self.HSlider_copyWidScale.setObjectName(_fromUtf8("HSlider_copyWidScale"))
#控制图片的分辨率
self.HSlider_imgScale = QtGui.QSlider(self.VLayoutWidget)
self.HSlider_imgScale.setCursor(QtGui.QCursor(QtCore.Qt.SizeHorCursor))
self.HSlider_imgScale.setOrientation(QtCore.Qt.Horizontal)
self.HSlider_imgScale.setObjectName(_fromUtf8("HSlider_imgScale"))
# 定义滑动区域窗口内Widget面板
self.scrollAreaWidgetContents = QtGui.QWidget()
# self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 100, 100))
self.scrollAreaWidgetContents.setObjectName(_fromUtf8("scrollAreaWidgetContents"))
self.scrollAreaWidgetContents.setMinimumSize(1200, 1200)
# 定义滑动区域面板内的QLabel对象
self.label = QtGui.QLabel(self.scrollAreaWidgetContents)
# self.label.setGeometry(QtCore.QRect(0, 0, 500, 500))
# self.label.setPixmap(self.img)
# self.label.setGeometry(self.img.rect())
# self.label.setObjectName(_fromUtf8("label"))
# self.scrollAreaWidgetContents.setMinimumSize(self.img.size())
#滑动区域窗口
self.scrollArea = QtGui.QScrollArea(self.VLayoutWidget)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName(_fromUtf8("scrollArea"))
self.scrollArea.setAutoFillBackground(True)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setGeometry(QtCore.QRect(0, 0, 80, 80))
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
##layout
#定义内层布局的横向网格
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
#加入之前定义的滑动条
self.horizontalLayout.addWidget(self.HSlider_copyWidScale)
self.horizontalLayout.addWidget(self.HSlider_imgScale)
#按顺序定义外层布局的纵向网格
self.verticalLayout = QtGui.QVBoxLayout(self.VLayoutWidget)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
#按顺序加入定义好的横向网格和滑动区域对象
self.verticalLayout.addLayout(self.horizontalLayout)
self.verticalLayout.addWidget(self.scrollArea)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
self.__InitMenubar()
def __InitMenubar(self):
action_exit = QtGui.QAction(QtGui.QIcon(), '&exit', self.mainForm)
action_exit.triggered.connect(self.mainForm.close)
action_load = QtGui.QAction(QtGui.QIcon(), '&load', self.mainForm)
action_load.triggered.connect(self.On_Action_Load)
action_next = QtGui.QAction(QtGui.QIcon(), '&next', self.mainForm)
action_next.triggered.connect(self.On_Action_Next)
action_previous = QtGui.QAction(QtGui.QIcon(), '&previous', self.mainForm)
action_previous.triggered.connect(self.On_Action_Previous)
action_screenShot = QtGui.QAction(QtGui.QIcon(), '&screen shot', self.mainForm)
action_screenShot.triggered.connect(self.On_Action_ScreenShot)
menubar = self.mainForm.menuBar()
fileMenu = menubar.addMenu('&file')
fileMenu.addAction(action_load)
fileMenu.addAction(action_next)
fileMenu.addAction(action_previous)
fileMenu.addAction(action_screenShot)
fileMenu.addAction(action_exit)
def On_Action_Load(self, event):
self.filePathsList = QtGui.QFileDialog.getOpenFileNames(self.mainForm, 'Open file', '/home')
for filePath in self.filePathsList:
print filePath
print self.filePathsList.count()
self.currentFrameNum = -1
self.On_Action_Next(None)
def __getParentClientSize(self):
return self.scrollArea.size() - QtCore.QSize(
self.scrollArea.verticalScrollBar().width(),
self.scrollArea.horizontalScrollBar().height())
def showImage(self):
dis = (abs(self.horizontalLayout.geometry().left() - 0),
abs(self.horizontalLayout.geometry().right() - self.mainForm.width()),
abs(self.horizontalLayout.geometry().top() - 0),
abs(self.mainForm.height() - self.scrollArea.geometry().bottom()))
#从文件夹加载图像
self.qImg.load(self.filePathsList[self.currentFrameNum])
#显示到QLabel对象,并调整QLabel对象的尺寸为图像尺寸
self.label.setPixmap(self.qImg)
self.label.setGeometry(self.qImg.rect())
# #设置 QScrollArea 对象中 QWidget 区域的大小
self.scrollAreaWidgetContents.setMinimumSize(self.qImg.size())
# # #根据图像大小调整scrollArea大小
self.scrollArea.setMaximumSize(self.qImg.size() + QtCore.QSize(self.scrollArea.verticalScrollBar().width(),
self.scrollArea.horizontalScrollBar().height()))
#求当前图像对象的基础上窗口允许的最大尺寸
# print self.horizontalLayout.geometry()
# print self.mainForm.size()
# print self.scrollArea.geometry()
# print dis
self.mainForm.setMaximumSize(self.scrollArea.maximumSize() + QtCore.QSize(
dis[0]+dis[1], self.HSlider_imgScale.height()+dis[2]+dis[3]))
def On_Action_Next(self, event):
if self.currentFrameNum + 1 < self.filePathsList.count():
self.currentFrameNum += 1
self.showImage()
self.mainForm.repaint()
try:
self.copyForm.UpdateImg()
except Exception, e:
pass
def On_Action_Previous(self, event):
if self.currentFrameNum - 1 >= 0:
self.currentFrameNum -= 1
self.showImage()
self.mainForm.repaint()
try:
self.copyForm.UpdateImg()
except Exception, e:
pass
def On_Action_ScreenShot(self, event):
self.copyForm = cf.CopyForm(self.qImg, self.scrollArea)
# self.mainForm.connect(self.copyForm._sinal, QtCore.SIGNAL('Signal_Key(PyQt_PyObject)'),
# self.mainForm, QtCore.SLOT("On_Key_CopyForm(PyQt_PyObject)"))
self.mainForm.connect(self.copyForm, QtCore.SIGNAL('Signal_Key(PyQt_PyObject)'),
self.mainForm, QtCore.SLOT("On_Key_CopyForm(PyQt_PyObject)"))
def retranslateUi(self, Form):
"""
:param Form:
:return:
"""
Form.setWindowTitle(_translate("Form", "Sample Tool", None))
|
[
"="
] |
=
|
9226629a592d1cbce6243737ba9838ff68a1135b
|
98404910e43b88108e658d111e9196839b7aca77
|
/tests/test_layers.py
|
f49e07677ec682f645022d76bc04813595d924fa
|
[
"Apache-2.0"
] |
permissive
|
mbencherif/kraken
|
b45ff5c32ab7d76301f613d738a1281adb05b2ce
|
ffb35d101166e35fad7930d468138e216e48991a
|
refs/heads/master
| 2023-07-30T12:42:33.823848
| 2021-09-29T14:37:18
| 2021-09-29T14:37:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,368
|
py
|
# -*- coding: utf-8 -*-
import unittest
from nose.tools import raises
import torch
from kraken.lib import layers
class TestLayers(unittest.TestCase):
"""
Testing custom layer implementations.
"""
def setUp(self):
torch.set_grad_enabled(False)
def test_maxpool(self):
"""
Test maximum pooling layer.
"""
mp = layers.MaxPool((3, 3), (2, 2))
o = mp(torch.randn(1, 2, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 15, 31))
def test_1d_dropout(self):
"""
Test 1d dropout layer.
"""
do = layers.Dropout(0.2, 1)
o = do(torch.randn(1, 2, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 32, 64))
def test_2d_dropout(self):
"""
Test 2d dropout layer.
"""
do = layers.Dropout(0.2, 2)
o = do(torch.randn(1, 2, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 32, 64))
def test_forward_rnn_layer_x(self):
"""
Test unidirectional RNN layer in x-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'f', False, False)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 32, 64))
def test_forward_rnn_layer_y(self):
"""
Test unidirectional RNN layer in y-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'f', True, False)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 32, 64))
def test_forward_rnn_layer_x_summarize(self):
"""
Test unidirectional summarizing RNN layer in x-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'f', False, True)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 32, 1))
def test_forward_rnn_layer_y_summarize(self):
"""
Test unidirectional summarizing RNN layer in y-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'f', True, True)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 2, 1, 64))
def test_bidi_rnn_layer_x(self):
"""
Test bidirectional RNN layer in x-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'b', False, False)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 4, 32, 64))
def test_bidi_rnn_layer_y(self):
"""
Test bidirectional RNN layer in y-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'b', True, False)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 4, 32, 64))
def test_bidi_rnn_layer_x_summarize(self):
"""
Test bidirectional summarizing RNN layer in x-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'b', False, True)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 4, 32, 1))
def test_bidi_rnn_layer_y_summarize(self):
"""
Test bidirectional summarizing RNN layer in y-dimension.
"""
rnn = layers.TransposedSummarizingRNN(10, 2, 'b', True, True)
o = rnn(torch.randn(1, 10, 32, 64))
self.assertEqual(o[0].shape, (1, 4, 1, 64))
def test_linsoftmax(self):
"""
Test basic function of linear layer.
"""
lin = layers.LinSoftmax(20, 10)
o = lin(torch.randn(1, 20, 12, 24))
self.assertEqual(o[0].shape, (1, 10, 12, 24))
def test_linsoftmax_train(self):
"""
Test function of linear layer in training mode (log_softmax)
"""
lin = layers.LinSoftmax(20, 10).train()
o = lin(torch.randn(1, 20, 12, 24))
self.assertLess(o[0].max(), 0)
def test_linsoftmax_test(self):
"""
Test function of linear layer in eval mode (softmax)
"""
lin = layers.LinSoftmax(20, 10).eval()
o = lin(torch.randn(1, 20, 12, 24))
self.assertGreaterEqual(o[0].min(), 0)
def test_linsoftmax_aug(self):
"""
Test basic function of linear layer with 1-augmentation.
"""
lin = layers.LinSoftmax(20, 10, True)
o = lin(torch.randn(1, 20, 12, 24))
self.assertEqual(o[0].shape, (1, 10, 12, 24))
def test_linsoftmax_aug_train(self):
"""
Test function of linear layer in training mode (log_softmax) with 1-augmentation
"""
lin = layers.LinSoftmax(20, 10, True).train()
o = lin(torch.randn(1, 20, 12, 24))
self.assertLess(o[0].max(), 0)
def test_linsoftmax_aug_test(self):
"""
Test function of linear layer in eval mode (softmax) with 1-augmentation
"""
lin = layers.LinSoftmax(20, 10, True).eval()
o = lin(torch.randn(1, 20, 12, 24))
self.assertGreaterEqual(o[0].min(), 0)
def test_actconv2d_lin(self):
"""
Test convolutional layer without activation.
"""
conv = layers.ActConv2D(5, 12, (3, 3), (1, 1), 'l')
o = conv(torch.randn(1, 5, 24, 12))
self.assertEqual(o[0].shape, (1, 12, 24, 12))
def test_actconv2d_sigmoid(self):
"""
Test convolutional layer with sigmoid activation.
"""
conv = layers.ActConv2D(5, 12, (3, 3), (1, 1), 's')
o = conv(torch.randn(1, 5, 24, 12))
self.assertTrue(0 <= o[0].min() <= 1)
self.assertTrue(0 <= o[0].max() <= 1)
def test_actconv2d_tanh(self):
"""
Test convolutional layer with tanh activation.
"""
conv = layers.ActConv2D(5, 12, (3, 3), (1, 1), 't')
o = conv(torch.randn(1, 5, 24, 12))
self.assertTrue(-1 <= o[0].min() <= 1)
self.assertTrue(-1 <= o[0].max() <= 1)
def test_actconv2d_softmax(self):
"""
Test convolutional layer with softmax activation.
"""
conv = layers.ActConv2D(5, 12, (3, 3), (1, 1), 'm')
o = conv(torch.randn(1, 5, 24, 12))
self.assertTrue(0 <= o[0].min() <= 1)
self.assertTrue(0 <= o[0].max() <= 1)
def test_actconv2d_relu(self):
"""
Test convolutional layer with relu activation.
"""
conv = layers.ActConv2D(5, 12, (3, 3), (1, 1), 'r')
o = conv(torch.randn(1, 5, 24, 12))
self.assertLessEqual(0, o[0].min())
self.assertLessEqual(0, o[0].max())
def test_linsoftmax_resize_add(self):
"""
Tests resizing of a fully connected layer.
"""
lin = layers.LinSoftmax(20, 10)
w_cp = lin.lin.weight.clone()
b_cp = lin.lin.bias.clone()
lin.resize(25)
self.assertTrue(w_cp.eq(lin.lin.weight[:10, :]).all())
self.assertTrue(b_cp.eq(lin.lin.bias[:10]).all())
self.assertTrue(lin.lin.weight.shape[0] == 25)
self.assertTrue(lin.lin.bias.shape[0] == 25)
def test_linsoftmax_resize_remove(self):
"""
Tests resizing of a fully connected layer.
"""
lin = layers.LinSoftmax(20, 10)
w_cp = lin.lin.weight.clone()
b_cp = lin.lin.bias.clone()
lin.resize(5, (1, 5, 6, 7, 9))
self.assertTrue(w_cp[(0, 2, 3, 4, 8), :].eq(lin.lin.weight).all())
self.assertTrue(b_cp[(0, 2, 3, 4, 8),].eq(lin.lin.bias).all())
def test_linsoftmax_resize_both(self):
"""
Tests resizing of a fully connected layer.
"""
lin = layers.LinSoftmax(20, 10)
w_cp = lin.lin.weight.clone()
b_cp = lin.lin.bias.clone()
lin.resize(25, (1, 5, 6, 7, 9))
self.assertTrue(w_cp[(0, 2, 3, 4, 8), :].eq(lin.lin.weight[:5, :]).all())
self.assertTrue(b_cp[(0, 2, 3, 4, 8),].eq(lin.lin.bias[:5]).all())
self.assertTrue(lin.lin.weight.shape[0] == 25)
self.assertTrue(lin.lin.bias.shape[0] == 25)
def test_conv_resize_add(self):
"""
Tests resizing of a convolutional output layer.
"""
conv = layers.ActConv2D(20, 10, (1, 1), (1, 1))
w_cp = conv.co.weight.clone()
b_cp = conv.co.bias.clone()
conv.resize(25)
self.assertTrue(w_cp.eq(conv.co.weight[:10, :]).all())
self.assertTrue(b_cp.eq(conv.co.bias[:10]).all())
self.assertTrue(conv.co.weight.shape[0] == 25)
self.assertTrue(conv.co.bias.shape[0] == 25)
def test_conv_resize_remove(self):
"""
Tests resizing of a convolutional output layer.
"""
conv = layers.ActConv2D(20, 10, (1, 1), (1, 1))
w_cp = conv.co.weight.clone()
b_cp = conv.co.bias.clone()
conv.resize(5, (1, 5, 6, 7, 9))
self.assertTrue(w_cp[(0, 2, 3, 4, 8), :].eq(conv.co.weight).all())
self.assertTrue(b_cp[(0, 2, 3, 4, 8),].eq(conv.co.bias).all())
def test_conv_resize_both(self):
"""
Tests resizing of a convolutional output layer.
"""
conv = layers.ActConv2D(20, 10, (1, 1), (1, 1))
w_cp = conv.co.weight.clone()
b_cp = conv.co.bias.clone()
conv.resize(25, (1, 5, 6, 7, 9))
self.assertTrue(w_cp[(0, 2, 3, 4, 8), :].eq(conv.co.weight[:5, :]).all())
self.assertTrue(b_cp[(0, 2, 3, 4, 8),].eq(conv.co.bias[:5]).all())
self.assertTrue(conv.co.weight.shape[0] == 25)
self.assertTrue(conv.co.bias.shape[0] == 25)
|
[
"mittagessen@l.unchti.me"
] |
mittagessen@l.unchti.me
|
d615b3f87e95f821b1ad96c4a961165d3dcfb242
|
1924da60fa3298e386acc6dac9bd390784a9b5bb
|
/test18.py
|
2eaca7b55757608822e1ea3f6eebcce199ba5a68
|
[] |
no_license
|
yukitomo/NLP100DrillExercises
|
c8a177b56f798cef225ace540e965809a1fc1fbc
|
ea2ceb366de1fa1f27d084e3b9328cc6f34ac1dd
|
refs/heads/master
| 2020-06-01T02:55:11.423238
| 2015-06-10T15:39:03
| 2015-06-10T15:39:03
| 37,205,750
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 425
|
py
|
#!/usr/bin/python
#-*-coding:utf-8-*-
#(18) 仙台市の住所らしき表現にマッチする正規表現を各自で設計し,抽出せよ.
#python test18.py tweet.txt
import sys
import re
pattern = re.compile(u'(仙台市)([^\s\w\d ]{1,20}[\d0-9〇一-九十上下東西]+)*')
for line in sys.stdin:
line = line.decode("utf-8")
match=pattern.search(line)
if match:
print match.group(0).encode("utf-8")
|
[
"over.the.tr0ouble@gmail.com"
] |
over.the.tr0ouble@gmail.com
|
742a110bb63077d24dc9f3b001ade6455c465a66
|
0b85fbdd58eab30cf2ed5676a9c331c1ab6152f6
|
/cdp_viz/handlers/services/dl.py
|
a59f386a88dc13816e7f7bb9a1accba49a601a15
|
[] |
no_license
|
pymonger/cdp-viz-pyramid
|
82ddac3552a0da9c1a831959ff28fdb3b21c126f
|
32c5f3d6f1d63c1e7e6131876da9a19ab3d25e93
|
refs/heads/master
| 2020-03-28T23:46:17.564043
| 2013-02-06T17:48:29
| 2013-02-06T17:48:29
| 149,307,796
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,392
|
py
|
import logging, simplejson, pprint, re, os, sys
from urllib2 import urlopen
from urllib import urlencode
from datetime import datetime
from string import Template
from Levenshtein import ratio, median
from pyramid.httpexceptions import HTTPFound
from pyramid_handlers import action
from beaker.cache import CacheManager
import cdp_viz.handlers.base as base
import cdp_viz.models as model
from cdp_viz.lib.timeUtils import getDatetimeFromString, getISODateTimeString
from cdp_viz.lib.sparql import MD5_SPARQL_TMPL, MANIFEST_SPARQL_TMPL, sparqlQuery
from cdp_viz.lib.sessionGraph import rdf2sessionGraph
log = logging.getLogger(__name__)
CDE_PACKAGE_TMPL = Template('''#!/bin/sh
wget "${LDOS_BASE_URL}/data/download?hash=${hash}" -O $file
for i in `cat session_manifest.txt`; do
file=`echo $$i | awk 'BEGIN{FS=","}{print $$1}'`
filebase=`basename $$file`
dir=`dirname $$file`
md5=`echo $$i | awk 'BEGIN{FS=","}{print $$2}'`
oct_perms=`echo $$i | awk 'BEGIN{FS=","}{print $$6}'`
perms=`python -c "print oct(int($$oct_perms))[-3:]"`
mkdir -p $$dir
wget -q "${LDOS_BASE_URL}/data/download?file=$${filebase}&hash=$${md5}" -O $$file
chmod $$perms $$file
echo "downloaded: $$file"
done
''')
class Download(base.Handler):
@action(renderer="string")
def sessionEntities(self):
sessionId = self.request.params.get('sessionId')
#log.debug("sessionId: %s" % sessionId)
d = simplejson.loads(sparqlQuery(MD5_SPARQL_TMPL.substitute(uri=sessionId)))
#log.debug(pprint.pformat(d))
wgetLines = []
for res in d['results']['bindings']:
entity = res['entity']['value']
hash = res['md5']['value']
match = re.search(r'http://provenance\.jpl\.nasa\.gov/cdp#(.*?)/\d{4}-\d{2}-\d{2}T\d{2}_\d{2}_\d{2}.*?$', entity)
if match:
file = os.path.basename(match.group(1))
wgetLines.append("wget %s/data/download?hash=%s -O %s" %
(self.request.registry.settings['ldos.url'], hash, file))
return "#!/bin/sh\n%s\n" % "\n".join(wgetLines)
@action(renderer="string")
def cde(self):
self.request.response.content_disposition = 'attachment; filename="wget_cde_package.sh"'
sessionId = self.request.params.get('sessionId')
#log.debug("sessionId: %s" % sessionId)
d = simplejson.loads(sparqlQuery(MANIFEST_SPARQL_TMPL.substitute(uri=sessionId)))
#log.debug(pprint.pformat(d))
wgetLines = []
for res in d['results']['bindings']:
loc = res['loc']['value']
hash = res['md5']['value']
match = re.search(r'(.*?)(?:/\d{4}-\d{2}-\d{2}T\d{2}_\d{2}_\d{2}.*?)?$', loc)
if match:
file = os.path.basename(match.group(1))
return CDE_PACKAGE_TMPL.substitute(LDOS_BASE_URL=self.request.registry.settings['ldos.url'],
hash=hash,
file=file)
return "No CDE package for session %s." % sessionId
def download(self):
filename = self.request.params.get('filename')
md5 = self.request.params.get('hash')
return HTTPFound(location="%s/data/download?filename=%s&hash=%s" % (
self.request.registry.settings['ldos.url'],
filename, md5))
|
[
"pymonger@gmail.com"
] |
pymonger@gmail.com
|
5c970dffe7023ba46848e3b65f0ad476cbb2b53e
|
29145db13229d311269f317bf2819af6cba7d356
|
/april circuits/shifts.py
|
bb24c3d034ba66dfbb7a8eba41e99923e3127ea4
|
[] |
no_license
|
rocket3989/hackerEarth2019
|
802d1ca6fd03e80657cbe07a3f123e087679af4d
|
42c0a7005e52c3762496220136cc5c1ee93571bb
|
refs/heads/master
| 2021-07-05T01:32:42.203964
| 2020-12-22T03:40:20
| 2020-12-22T03:40:20
| 211,607,143
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 252
|
py
|
for tc in range(int(input())):
N, M, c = input().split()
N = int(N)
M = int(M)
N = bin(N)[2:].zfill(16)
if c == 'L':
print(int(N[M:] + N[:M], 2))
else:
print(int(N[16 - M:] + N[:16 - M], 2))
|
[
"rocket3989@gmail.com"
] |
rocket3989@gmail.com
|
37918bdb0d4e31428108d8434477b8686f64c19d
|
f75609812d20d46a9f94ee0cfdb91c321d26b63d
|
/flask/flask_fundamentals/Number_Game/server.py
|
6830ce31939d2a6ef2ce63d2e02eb346853fbccf
|
[] |
no_license
|
IanAranha/Python2021
|
eff47a20451f61b144b17f48321a7b06308aadca
|
d9769b8b387b77753b77f6efe3a9a270a1f158d3
|
refs/heads/main
| 2023-04-02T08:20:24.382913
| 2021-04-10T22:27:10
| 2021-04-10T22:27:10
| 345,918,060
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 865
|
py
|
from flask import Flask, redirect, render_template, session, request
import random
app = Flask(__name__)
app.secret_key = "0004ThisIsASecretKey"
@app.route("/")
def index():
if "random_number" not in session:
session["random_number"] = random.randrange(0, 101)
return render_template("index.html")
@app.route("/guess", methods=["post"])
def guess():
if request.form["input"] == "":
print("Cannot be blank")
return redirect("/")
session["guessed_num"] = int(request.form["input"])
if session["guessed_num"] < session["random_number"]:
session["state"] = "low"
elif session["guessed_num"] > session["random_number"]:
session["state"] = "high"
else:
session["state"] = "correct"
return redirect("/")
@app.route("/reset", methods=["post"])
def reset():
session.clear()
return redirect('/')
if __name__ == "__main__":
app.run(debug=True)
|
[
"ianorama@gmail.com"
] |
ianorama@gmail.com
|
243c193623591d29bb3fa6344bb1b2d31f4adb6f
|
2753757e2d13f5dd0d1faf1264031d476e162975
|
/others/assignment/temp.py
|
ae6d46f2f1fb03391ed5c73d858f9a215d0d38a0
|
[] |
no_license
|
florije1988/Suggestions
|
c8846dd089eab816051ecc1fc43a7fcc07580194
|
23718968acc16fa243c248a6ac3d4715c53daaa1
|
refs/heads/master
| 2020-05-20T07:01:54.292081
| 2014-08-11T07:52:02
| 2014-08-11T07:52:02
| 21,298,258
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,279
|
py
|
# -*- coding: utf-8 -*-
__author__ = 'florije'
import time
def reverse_str(str_arg):
if len(str_arg) == 1:
return str_arg
else:
return str_arg[-1] + reverse_str(str_arg[:-1])
if __name__ == '__main__':
# s_arg = input('list:')
# print s_arg
# print type(s_arg)
# for i in range(1, 20):
# print '%02d' % i
#
# print "Age:%02d" % 1
# title = ''
# f =file("%s.html" % title, "a")
u = u'汉'
print repr(u)
s = u.encode('UTF-8')
print repr(s)
u2 = s.decode('UTF-8')
print u2
print repr(u2) # u'\u6c49'
# 对unicode进行解码是错误的
# s2 = u.decode('UTF-8')
# 同样,对str进行编码也是错误的
# u2 = s.encode('UTF-8')
a = ['anhui:0', 'shtel1:0', 'shtel2:0', 'weinan3:0', 'weinan1:0', 'weinan2:0', 'luckyhost:100', 'crh:99']
a.sort(key=lambda item: int(item.split(':')[1]))
print a
print reverse_str('fuboqing')
t = time.clock()
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
# print [item * 3 if tx.index(item) < 3 else item for item in tx]
# tx[:3] = [i*3 for i in tx[:3]]
# print tx
def aa(a):
a[0] = a[0] * 3
a[1] = a[1] * 3
a[2] = a[2] * 3
return a
print aa(a)
print time.clock() - t
|
[
"florije1988@gmail.com"
] |
florije1988@gmail.com
|
7f44ed7c492048c7a2268982590b8ef20b58f77e
|
75dcb56e318688499bdab789262839e7f58bd4f6
|
/_algorithms_challenges/practicepython/python-exercises-master/07-list-comprehension/exercise.py
|
894ad5cd07e85383178aea3f7a25e85196b75242
|
[] |
no_license
|
syurskyi/Algorithms_and_Data_Structure
|
9a1f358577e51e89c862d0f93f373b7f20ddd261
|
929dde1723fb2f54870c8a9badc80fc23e8400d3
|
refs/heads/master
| 2023-02-22T17:55:55.453535
| 2022-12-23T03:15:00
| 2022-12-23T03:15:00
| 226,243,987
| 4
| 1
| null | 2023-02-07T21:01:45
| 2019-12-06T04:14:10
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 270
|
py
|
# /#! /urs/bin/env python
if __name__ == '__main__':
all = [1, 4, 9, 16, 25, 36, 49, 64, 81, 100]
odd = [number for number in all if number % 2 == 1]
even = [number for number in all if number % 2 == 0]
print("All: " + str(all) + '\nOdd: ' + str(odd))
|
[
"sergejyurskyj@yahoo.com"
] |
sergejyurskyj@yahoo.com
|
da32f7c5d77290af8959d8e13d7d608b43117cd9
|
8f6aa9ac9c8c2e409875bbf36fbc49b3eb37d88b
|
/enthought/traits/ui/value_tree.py
|
04dc87e1db7556f3b15b867fb608ebd7c44d0b38
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
enthought/etsproxy
|
5660cf562c810db2ceb6b592b6c12274bce96d73
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
refs/heads/master
| 2023-03-27T04:51:29.297305
| 2020-12-02T09:05:18
| 2020-12-02T09:05:18
| 1,632,969
| 3
| 1
|
NOASSERTION
| 2020-12-02T09:05:20
| 2011-04-18T22:29:56
|
Python
|
UTF-8
|
Python
| false
| false
| 49
|
py
|
# proxy module
from traitsui.value_tree import *
|
[
"ischnell@enthought.com"
] |
ischnell@enthought.com
|
b5fc5c27bf55103c13421385e42b252a54f84749
|
0c1d6b8dff8bedfffa8703015949b6ca6cc83f86
|
/lib/worklists/operator/CT/v4.0/business/GPON_2+1/QoS_DSCP/script.py
|
8027050e03e5cef79e0d59b75c244127b0de19af
|
[] |
no_license
|
samwei8/TR069
|
6b87252bd53f23c37186c9433ce4d79507b8c7dd
|
7f6b8d598359c6049a4e6cb1eb1db0899bce7f5c
|
refs/heads/master
| 2021-06-21T11:07:47.345271
| 2017-08-08T07:14:55
| 2017-08-08T07:14:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,812
|
py
|
#coding:utf-8
# -----------------------------rpc --------------------------
import os
import sys
#debug
DEBUG_UNIT = False
if (DEBUG_UNIT):
g_prj_dir = os.path.dirname(__file__)
parent1 = os.path.dirname(g_prj_dir)
parent2 = os.path.dirname(parent1)
parent3 = os.path.dirname(parent2)
parent4 = os.path.dirname(parent3) # tr069v3\lib
parent5 = os.path.dirname(parent4) # tr069v3\
sys.path.insert(0, parent4)
sys.path.insert(0, os.path.join(parent4, 'common'))
sys.path.insert(0, os.path.join(parent4, 'worklist'))
sys.path.insert(0, os.path.join(parent4, 'usercmd'))
sys.path.insert(0, os.path.join(parent5, 'vendor'))
from TR069.lib.common.event import *
from TR069.lib.common.error import *
from time import sleep
import TR069.lib.common.logs.log as log
g_prj_dir = os.path.dirname(__file__)
parent1 = os.path.dirname(g_prj_dir)
parent2 = os.path.dirname(parent1) # dir is system
try:
i = sys.path.index(parent2)
if (i !=0):
# stratege= boost priviledge
sys.path.pop(i)
sys.path.insert(0, parent2)
except Exception,e:
sys.path.insert(0, parent2)
import _Common
reload(_Common)
from _Common import *
import _QoS
reload(_QoS)
from _QoS import QoS
def test_script(obj):
"""
"""
sn = obj.sn # 取得SN号
DeviceType = "GPON" # 绑定tr069模板类型.只支持ADSL\LAN\EPON三种
rollbacklist = [] # 存储工单失败时需回退删除的实例.目前缺省是不开启回退
# 初始化日志
obj.dict_ret.update(str_result=u"开始执行工单:%s........\n" %
os.path.basename(os.path.dirname(__file__)))
# data传参
Max = obj.dict_data.get("Max")[0]
Min = obj.dict_data.get("Min")[0]
ClassQueue = obj.dict_data.get("ClassQueue")[0]
DSCPMarkValue = obj.dict_data.get("DSCPMarkValue")[0]
M802_1_P_Value = obj.dict_data.get("M802_1_P_Value")[0]
# X_CT-COM_UplinkQoS节点参数
dict_root = {'Mode':[0, 'Null'],
'Enable':[1, '1'],
'Bandwidth':[0, 'Null'],
'Plan':[1, 'priority'],
'EnableForceWeight':[0, 'Null'],
'EnableDSCPMark':[1, '1'],
'Enable802-1_P':[1, '2']}
# X_CT-COM_UplinkQoS.App.{i}.节点下的参数
dict_app = {'AppName':[0, 'Null'],
'ClassQueue':[0, 'Null']}
# X_CT-COM_UplinkQoS.Classification.{i}.type.{i}.节点下的参数
# 注意,使用列表嵌套字典的形式,因为基于业务的QoS保障测试-UDP时需要多个实例
list_value_type = [{'Type':[1, 'DSCP'],
'Max':[1, Max],
'Min':[1, Min],
'ProtocolList':[1, 'TCP,UDP']}]
# X_CT-COM_UplinkQoS.Classification.{i}.节点下的参数
dict_classification = {'ClassQueue':[1, ClassQueue],
'DSCPMarkValue':[1, DSCPMarkValue],
'802-1_P_Value':[1, M802_1_P_Value]}
# X_CT-COM_UplinkQoS.PriorityQueue.{i}.节点下的参数
dict_priorityQueue = {'Enable':[1, '1'],
'Priority':[1, '1'],
'Weight':[0, 'Null']}
# 开始执行QoS工单
ret, ret_data = QoS(obj, sn, DeviceType, dict_root,
dict_app, list_value_type,
dict_classification, dict_priorityQueue,
change_account=1,
rollbacklist=rollbacklist)
# 将工单脚本执行结果返回到OBJ的结果中
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data)
# 如果执行失败,统一调用回退机制(缺省是关闭的)
if ret == ERR_FAIL:
ret_rollback, ret_data_rollback = rollback(sn, rollbacklist, obj)
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data_rollback)
info = u"工单:%s执行结束\n" % os.path.basename(os.path.dirname(__file__))
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
return ret
if __name__ == '__main__':
log_dir = g_prj_dir
log.start(name="nwf", directory=log_dir, level="DebugWarn")
log.set_file_id(testcase_name="tr069")
obj = MsgWorklistExecute(id_="1")
obj.sn = "3F3001880F5CAD80F"
dict_data= {"Min":("10","1"),"Max":("10","2"),
"DSCPMarkValue":("1","3"),"M802_1_P_Value":("1","4"),
"ClassQueue":("1","5")}
obj.dict_data = dict_data
try:
ret = test_script(obj)
if ret == ERR_SUCCESS:
print u"测试成功"
else:
print u"测试失败"
print "****************************************"
print obj.dict_ret["str_result"]
except Exception, e:
print u"测试异常"
|
[
"zhaojunhhu@gmail.com"
] |
zhaojunhhu@gmail.com
|
06e91545546c5d5f9f8c5ae573bbd5682f098d9e
|
e7b7cc34f77c71e61aa0fa05bcc62f54fc2fc0e1
|
/Array/test_q056_merge_intervals.py
|
144e68cff13f68e05cc835a31a46718e9c0dfad5
|
[] |
no_license
|
sevenhe716/LeetCode
|
41d2ef18f5cb317858c9b69d00bcccb743cbdf48
|
4a1747b6497305f3821612d9c358a6795b1690da
|
refs/heads/master
| 2020-03-16T16:12:27.461172
| 2019-04-22T13:27:54
| 2019-04-22T13:27:54
| 130,221,784
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 498
|
py
|
import unittest
from Array.q056_merge_intervals import Solution
from common import Interval
class TestMergeIntervals(unittest.TestCase):
"""Test q056_merge_intervals.py"""
def test_merge_intervals(self):
s = Solution()
self.assertEqual([[1, 6], [8, 10], [15, 18]], s.merge([Interval(1, 3), Interval(2, 6), Interval(8, 10), Interval(15, 18)]))
self.assertEqual([[1, 5]], s.merge([Interval(1, 4), Interval(4, 5)]))
if __name__ == '__main__':
unittest.main()
|
[
"429134862@qq.com"
] |
429134862@qq.com
|
2b9e1a91205de5663111b9f61c7cc6a51b919853
|
53faa0ef3496997412eb5e697bc85eb09a28f8c9
|
/supervised_learning/0x06-keras/5-main.py
|
4c36d29b9b95d170647282429ea17053b98b29ca
|
[] |
no_license
|
oran2527/holbertonschool-machine_learning
|
aaec2ffe762b959573f98a5f4e002272a5d643a3
|
8761eb876046ad3c0c3f85d98dbdca4007d93cd1
|
refs/heads/master
| 2023-08-14T00:37:31.163130
| 2021-09-20T13:34:33
| 2021-09-20T13:34:33
| 330,999,053
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,440
|
py
|
#!/usr/bin/env python3
"""
Main file
"""
# Force Seed - fix for Keras
SEED = 0
import os
os.environ['PYTHONHASHSEED'] = str(SEED)
import random
random.seed(SEED)
import numpy as np
np.random.seed(SEED)
import tensorflow as tf
tf.set_random_seed(SEED)
import tensorflow.keras as K
session_conf = tf.ConfigProto(intra_op_parallelism_threads=1, inter_op_parallelism_threads=1)
sess = tf.Session(graph=tf.get_default_graph(), config=session_conf)
K.backend.set_session(sess)
# Imports
build_model = __import__('1-input').build_model
optimize_model = __import__('2-optimize').optimize_model
one_hot = __import__('3-one_hot').one_hot
train_model = __import__('5-train').train_model
if __name__ == '__main__':
datasets = np.load('../data/MNIST.npz')
X_train = datasets['X_train']
X_train = X_train.reshape(X_train.shape[0], -1)
Y_train = datasets['Y_train']
Y_train_oh = one_hot(Y_train)
X_valid = datasets['X_valid']
X_valid = X_valid.reshape(X_valid.shape[0], -1)
Y_valid = datasets['Y_valid']
Y_valid_oh = one_hot(Y_valid)
lambtha = 0.0001
keep_prob = 0.95
network = build_model(784, [256, 256, 10], ['relu', 'relu', 'softmax'], lambtha, keep_prob)
alpha = 0.001
beta1 = 0.9
beta2 = 0.999
optimize_model(network, alpha, beta1, beta2)
batch_size = 64
epochs = 5
train_model(network, X_train, Y_train_oh, batch_size, epochs, validation_data=(X_valid, Y_valid_oh))
|
[
"orlago250183@gmail.com"
] |
orlago250183@gmail.com
|
a2d10d6ff44f902b929f0b62b703589f1f7756f7
|
19d43b8c175bb5304393cf9c259eacb7110dd4fc
|
/objectModel/Python/cdm/resolvedmodel/resolved_attribute.py
|
77b50937eb6d60885c0362dca92be9f242d7eb5e
|
[
"CC-BY-4.0",
"MIT"
] |
permissive
|
bissont/CDM
|
3fd814566ea1bf9d19e300cd5b438b384ce4bcba
|
0cffb140e0b41e526be072b547cae91a03c4cd6f
|
refs/heads/master
| 2020-12-29T12:55:23.822187
| 2020-02-05T02:19:27
| 2020-02-05T02:19:27
| 238,614,156
| 1
| 0
| null | 2020-02-06T05:21:51
| 2020-02-06T05:21:50
| null |
UTF-8
|
Python
| false
| false
| 5,092
|
py
|
# ----------------------------------------------------------------------
# Copyright (c) Microsoft Corporation.
# All rights reserved.
# ----------------------------------------------------------------------
from typing import Any, cast, Optional, Union, TYPE_CHECKING
from cdm.resolvedmodel.resolved_trait_set import ResolvedTraitSet
if TYPE_CHECKING:
from cdm.objectmodel import CdmAttribute, CdmAttributeContext, CdmObject, SpewCatcher
from cdm.resolvedmodel import AttributeResolutionContext, ResolvedAttributeSet
from cdm.utilities import ApplierState, ResolveOptions, TraitToPropertyMap
ResolutionTarget = Union[CdmAttribute, ResolvedAttributeSet]
class ResolvedAttribute():
def __init__(self, res_opt: 'ResolveOptions', target: 'ResolutionTarget', default_name: str, att_ctx: 'CdmAttributeContext') -> None:
self.applier_state = None # type: Optional[ApplierState]
self.arc = None # type: Optional[AttributeResolutionContext]
self.att_ctx = att_ctx # type: CdmAttributeContext
self.insert_order = 0 # type: int
self.previous_resolved_name = default_name # type: str
self.resolved_traits = ResolvedTraitSet(res_opt) # type: ResolvedTraitSet
self.target = target # type: ResolutionTarget
self._resolved_name = default_name # type: str
self._ttpm = None # type: Optional[TraitToPropertyMap]
@property
def resolved_name(self) -> str:
return self._resolved_name
@resolved_name.setter
def resolved_name(self, value: str) -> None:
self._resolved_name = value
if self.previous_resolved_name is None:
self.previous_resolved_name = value
@property
def is_primary_key(self) -> Optional[bool]:
return self._trait_to_property_map.fetch_property_value('isPrimaryKey')
@property
def is_read_only(self) -> Optional[bool]:
return self._trait_to_property_map.fetch_property_value('isReadOnly')
@property
def is_nullable(self) -> Optional[bool]:
return self._trait_to_property_map.fetch_property_value('isNullable')
@property
def data_format(self) -> str:
return self._trait_to_property_map.fetch_property_value('dataFormat')
@property
def source_name(self) -> str:
return self._trait_to_property_map.fetch_property_value('sourceName')
@property
def source_ordering(self) -> Optional[int]:
return self._trait_to_property_map.fetch_property_value('sourceOrdering')
@property
def display_name(self) -> str:
return self._trait_to_property_map.fetch_property_value('displayName')
@property
def description(self) -> str:
return self._trait_to_property_map.fetch_property_value('description')
@property
def maximum_value(self) -> str:
return self._trait_to_property_map.fetch_property_value('maximumValue')
@property
def minimum_value(self) -> str:
return self._trait_to_property_map.fetch_property_value('minimumValue')
@property
def maximum_length(self) -> Optional[int]:
return self._trait_to_property_map.fetch_property_value('maximumLength')
@property
def value_constrained_to_list(self) -> Optional[bool]:
return self._trait_to_property_map.fetch_property_value('valueConstrainedToList')
@property
def default_value(self) -> Any:
return self._trait_to_property_map.fetch_property_value('defaultValue')
@property
def creation_sequence(self) -> int:
return self.insert_order
@property
def _trait_to_property_map(self) -> 'TraitToPropertyMap':
from cdm.utilities import TraitToPropertyMap
if self._ttpm is not None:
return self._ttpm
self._ttpm = TraitToPropertyMap(cast('CdmObject', self.target))
return self._ttpm
def copy(self) -> 'ResolvedAttribute':
# Use the options from the traits.
copy = ResolvedAttribute(self.resolved_traits.res_opt, self.target, self._resolved_name, self.att_ctx)
copy.resolved_traits = self.resolved_traits.shallow_copy()
copy.insert_order = self.insert_order
copy.arc = self.arc
if self.applier_state is not None:
copy.applier_state = self.applier_state.copy()
return copy
def spew(self, res_opt: 'ResolveOptions', to: 'SpewCatcher', indent: str, name_sort: bool) -> None:
to.spew_line('{}[{}]'.format(indent, self._resolved_name))
self.resolved_traits.spew(res_opt, to, indent + '-', name_sort)
def complete_context(self, res_opt: 'ResolveOptions') -> None:
from cdm.objectmodel import CdmAttribute
if self.att_ctx is None or self.att_ctx.name is not None:
return
self.att_ctx.name = self._resolved_name
if isinstance(self.target, CdmAttribute):
self.att_ctx.definition = self.target.create_simple_reference(res_opt)
self.att_ctx.at_corpus_path = str(self.att_ctx.parent.fetch_object_definition(res_opt).at_corpus_path) + '/' + self._resolved_name
|
[
"nebanfic@microsoft.com"
] |
nebanfic@microsoft.com
|
990db47ec28843c8eb2d8542de7e375dbb43c859
|
9c37742bdd09ccfb02da09be79e20b7333694d9b
|
/pyswagger/tests/v1_2/test_app.py
|
d65c18e48ea45b37e6f89ececb380e1a155dc7f9
|
[
"MIT"
] |
permissive
|
simudream/pyswagger
|
72eea9a24140d3dfbb4f6a4537e10a9b07c4d09f
|
1dcf7ab291d9535dfdb705e0cb0e2c6f2b0fb474
|
refs/heads/master
| 2020-12-11T05:32:38.335378
| 2015-01-22T11:39:10
| 2015-01-22T11:39:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,178
|
py
|
from pyswagger import SwaggerApp, errs
from ..utils import get_test_data_folder
from pyswagger.spec.v2_0.objects import (
Schema,
Operation,
)
import unittest
import httpretty
import os
import six
class HTTPGetterTestCase(unittest.TestCase):
""" test HTTPGetter """
@httpretty.activate
def test_http_getter(self):
""" make sure HTTPGetter works """
folder = get_test_data_folder(version='1.2', which='wordnik')
resource_list = user = pet = store = None
with open(os.path.join(folder, 'resource_list.json')) as f:
resource_list = f.read()
with open(os.path.join(folder, 'user.json')) as f:
user = f.read()
with open(os.path.join(folder, 'pet.json')) as f:
pet = f.read()
with open(os.path.join(folder, 'store.json')) as f:
store = f.read()
httpretty.register_uri(
httpretty.GET, 'http://petstore.swagger.wordnik.com/api/api-docs',
status=200,
body=resource_list
)
httpretty.register_uri(
httpretty.GET, 'http://petstore.swagger.wordnik.com/api/api-docs/user',
status=200,
body=user
)
httpretty.register_uri(
httpretty.GET, 'http://petstore.swagger.wordnik.com/api/api-docs/pet',
status=200,
body=pet
)
httpretty.register_uri(
httpretty.GET, 'http://petstore.swagger.wordnik.com/api/api-docs/store',
status=200,
body=store
)
local_app = SwaggerApp._create_('http://petstore.swagger.wordnik.com/api/api-docs')
self.assertEqual(sorted(local_app.raw._field_names_), sorted(['info', 'authorizations', 'apiVersion', 'swaggerVersion', 'apis']))
op = local_app.raw.apis['pet'].apis['updatePet']
self.assertEqual(sorted(op._field_names_), sorted([
'authorizations',
'consumes',
'defaultValue',
'deprecated',
'enum',
'format',
'items',
'maximum',
'method',
'minimum',
'nickname',
'parameters',
'path',
'produces',
'$ref',
'responseMessages',
'type',
'uniqueItems'
]))
class ValidationTestCase(unittest.TestCase):
""" test case for validation """
def setUp(self):
self.app = SwaggerApp.load(get_test_data_folder(version='1.2', which='err'))
def test_errs(self):
"""
"""
errs = self.app.validate(strict=False)
self.maxDiff = None
self.assertEqual(sorted(errs), sorted([
(('#/info', 'Info'), 'requirement description not meet.'),
(('#/info', 'Info'), 'requirement title not meet.'),
(('#/authorizations/oauth2', 'Authorization'), 'requirement type not meet.'),
(('#/authorizations/oauth2/grantTypes/implicit/loginEndpoint', 'LoginEndpoint'), 'requirement url not meet.'),
(('#/authorizations/oauth2/scopes/0', 'Scope'), 'requirement scope not meet.'),
(('#/authorizations/oauth2/grantTypes/authorization_code/tokenRequestEndpoint', 'TokenRequestEndpoint'), 'requirement url not meet.'),
(('#/apis/pet/apis/getPetById', 'Operation'), 'requirement method not meet.'),
(('#/apis/pet/apis/getPetById/parameters/0', 'Parameter'), 'requirement name not meet.'),
(('#/apis/pet/apis/getPetById/responseMessages/0', 'ResponseMessage'), 'requirement code not meet.'),
(('#/apis/pet/apis', 'Operation'), 'requirement nickname not meet.'),
(('#/apis/pet/models/Pet/properties/tags', 'Property'), 'array should be existed along with items'),
(('#/apis/pet/apis/getPetById/parameters/0', 'Parameter'), 'allowMultiple should be applied on path, header, or query parameters'),
(('#/apis/pet/apis/partialUpdate/parameters/1', 'Parameter'), 'body parameter with invalid name: qqq'),
(('#/apis/pet/apis/partialUpdate/parameters/0', 'Parameter'), 'void is only allowed in Operation object.')
]))
def test_raise_exception(self):
""" raise exceptions in strict mode """
self.assertRaises(errs.ValidationError, self.app.validate)
class SwaggerAppTestCase(unittest.TestCase):
""" test case for SwaggerApp """
def setUp(self):
folder = get_test_data_folder(
version='1.2',
)
def _hook(url):
p = six.moves.urllib.parse.urlparse(url)
if p.scheme != 'file':
return url
path = os.path.join(folder, p.path if not p.path.startswith('/') else p.path[1:])
return six.moves.urllib.parse.urlunparse(p[:2]+(path,)+p[3:])
self.app = SwaggerApp.load('wordnik', url_load_hook=_hook)
self.app.prepare()
def test_ref(self):
""" test ref function """
self.assertRaises(ValueError, self.app.resolve, None)
self.assertRaises(ValueError, self.app.resolve, '')
self.assertTrue(isinstance(self.app.resolve('#/definitions/user!##!User'), Schema))
self.assertTrue(isinstance(self.app.resolve('#/paths/~1api~1user~1{username}/put'), Operation))
self.assertEqual(self.app.resolve('#/paths/~1api~1store~1order/post/produces'), ['application/json'])
self.assertEqual(self.app.resolve('#/host'), 'petstore.swagger.wordnik.com')
# resolve with URL part
# refer to
# http://stackoverflow.com/questions/10246116/python-dereferencing-weakproxy
# for how to dereferencing weakref
self.assertEqual(
self.app.resolve('#/definitions/user!##!User').__repr__(),
self.app.resolve('file:///wordnik#/definitions/user!##!User').__repr__()
)
self.assertEqual(
self.app.resolve('#/paths/~1api~1user~1{username}/put').__repr__(),
self.app.resolve('file:///wordnik#/paths/~1api~1user~1{username}/put').__repr__()
)
def test_scope_dict(self):
""" ScopeDict is a syntactic suger
to access scoped named object, ex. Operation, Model
"""
# Operation
self.assertTrue(self.app.op['user', 'getUserByName'], Operation)
self.assertTrue(self.app.op['user', 'getUserByName'] is self.app.op['user!##!getUserByName'])
self.assertTrue(self.app.op['getUserByName'] is self.app.op['user!##!getUserByName'])
def test_shortcut(self):
""" a short cut to Resource, Operation, Model from SwaggerApp """
# Resource
# TODO: resource is now replaced by tags
#self.assertTrue(isinstance(app.rs['pet'], Resource))
#self.assertTrue(isinstance(app.rs['user'], Resource))
#self.assertTrue(isinstance(app.rs['store'], Resource))
# Operation
self.assertEqual(len(self.app.op.values()), 20)
self.assertEqual(sorted(self.app.op.keys()), sorted([
'pet!##!addPet',
'pet!##!deletePet',
'pet!##!findPetsByStatus',
'pet!##!findPetsByTags',
'pet!##!getPetById',
'pet!##!partialUpdate',
'pet!##!updatePet',
'pet!##!updatePetWithForm',
'pet!##!uploadFile',
'store!##!deleteOrder',
'store!##!getOrderById',
'store!##!placeOrder',
'user!##!createUser',
'user!##!createUsersWithArrayInput',
'user!##!createUsersWithListInput',
'user!##!deleteUser',
'user!##!getUserByName',
'user!##!loginUser',
'user!##!logoutUser',
'user!##!updateUser'
]))
self.assertTrue(self.app.op['user!##!getUserByName'], Operation)
# Model
d = self.app.resolve('#/definitions')
self.assertEqual(len(d.values()), 5)
self.assertEqual(sorted(d.keys()), sorted([
'pet!##!Category',
'pet!##!Pet',
'pet!##!Tag',
'store!##!Order',
'user!##!User'
]))
|
[
"missionaryliao@gmail.com"
] |
missionaryliao@gmail.com
|
4403e503e127c23cb397fe72eb4aca8267bc9fc4
|
a2d36e471988e0fae32e9a9d559204ebb065ab7f
|
/huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/update_url_auth_request.py
|
8044913f81326b931d750d21e5b323e8a54d90bf
|
[
"Apache-2.0"
] |
permissive
|
zhouxy666/huaweicloud-sdk-python-v3
|
4d878a90b8e003875fc803a61414788e5e4c2c34
|
cc6f10a53205be4cb111d3ecfef8135ea804fa15
|
refs/heads/master
| 2023-09-02T07:41:12.605394
| 2021-11-12T03:20:11
| 2021-11-12T03:20:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,802
|
py
|
# coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class UpdateUrlAuthRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'content_type': 'str',
'authorization': 'str',
'x_sdk_date': 'str',
'x_project_id': 'str',
'app_id': 'str',
'body': 'AppAuthReq'
}
attribute_map = {
'content_type': 'Content-Type',
'authorization': 'Authorization',
'x_sdk_date': 'X-Sdk-Date',
'x_project_id': 'X-Project-Id',
'app_id': 'app_id',
'body': 'body'
}
def __init__(self, content_type=None, authorization=None, x_sdk_date=None, x_project_id=None, app_id=None, body=None):
"""UpdateUrlAuthRequest - a model defined in huaweicloud sdk"""
self._content_type = None
self._authorization = None
self._x_sdk_date = None
self._x_project_id = None
self._app_id = None
self._body = None
self.discriminator = None
self.content_type = content_type
if authorization is not None:
self.authorization = authorization
if x_sdk_date is not None:
self.x_sdk_date = x_sdk_date
if x_project_id is not None:
self.x_project_id = x_project_id
self.app_id = app_id
if body is not None:
self.body = body
@property
def content_type(self):
"""Gets the content_type of this UpdateUrlAuthRequest.
内容类型。
:return: The content_type of this UpdateUrlAuthRequest.
:rtype: str
"""
return self._content_type
@content_type.setter
def content_type(self, content_type):
"""Sets the content_type of this UpdateUrlAuthRequest.
内容类型。
:param content_type: The content_type of this UpdateUrlAuthRequest.
:type: str
"""
self._content_type = content_type
@property
def authorization(self):
"""Gets the authorization of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,携带的鉴权信息。
:return: The authorization of this UpdateUrlAuthRequest.
:rtype: str
"""
return self._authorization
@authorization.setter
def authorization(self, authorization):
"""Sets the authorization of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,携带的鉴权信息。
:param authorization: The authorization of this UpdateUrlAuthRequest.
:type: str
"""
self._authorization = authorization
@property
def x_sdk_date(self):
"""Gets the x_sdk_date of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,请求的发生时间。
:return: The x_sdk_date of this UpdateUrlAuthRequest.
:rtype: str
"""
return self._x_sdk_date
@x_sdk_date.setter
def x_sdk_date(self, x_sdk_date):
"""Sets the x_sdk_date of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,请求的发生时间。
:param x_sdk_date: The x_sdk_date of this UpdateUrlAuthRequest.
:type: str
"""
self._x_sdk_date = x_sdk_date
@property
def x_project_id(self):
"""Gets the x_project_id of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,携带项目ID信息。
:return: The x_project_id of this UpdateUrlAuthRequest.
:rtype: str
"""
return self._x_project_id
@x_project_id.setter
def x_project_id(self, x_project_id):
"""Sets the x_project_id of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,携带项目ID信息。
:param x_project_id: The x_project_id of this UpdateUrlAuthRequest.
:type: str
"""
self._x_project_id = x_project_id
@property
def app_id(self):
"""Gets the app_id of this UpdateUrlAuthRequest.
应用id
:return: The app_id of this UpdateUrlAuthRequest.
:rtype: str
"""
return self._app_id
@app_id.setter
def app_id(self, app_id):
"""Sets the app_id of this UpdateUrlAuthRequest.
应用id
:param app_id: The app_id of this UpdateUrlAuthRequest.
:type: str
"""
self._app_id = app_id
@property
def body(self):
"""Gets the body of this UpdateUrlAuthRequest.
:return: The body of this UpdateUrlAuthRequest.
:rtype: AppAuthReq
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this UpdateUrlAuthRequest.
:param body: The body of this UpdateUrlAuthRequest.
:type: AppAuthReq
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UpdateUrlAuthRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"hwcloudsdk@huawei.com"
] |
hwcloudsdk@huawei.com
|
e1b8a2a3c79e07c69c40d3e8faf146679ada1d3f
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/sieve-big-3235.py
|
b14f9bb2d964539516ff9bba1aaebca7d5ad3f67
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038
| 2022-02-03T15:42:39
| 2022-02-03T15:42:39
| 451,969,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 31,737
|
py
|
# A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
$Exp()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
|
[
"647530+Virtlink@users.noreply.github.com"
] |
647530+Virtlink@users.noreply.github.com
|
a448c9227d0b822d8e2f908cfc10bd93e53162b2
|
eacfc1c0b2acd991ec2cc7021664d8e79c9e58f6
|
/ccpnmr2.4/python/memops/gui/DataEntry.py
|
e60cff844461888a85150c46163e540f8db69eb0
|
[] |
no_license
|
edbrooksbank/ccpnmr2.4
|
cfecb0896dcf8978d796e6327f7e05a3f233a921
|
f279ca9bb2d972b1ce075dad5fcc16e6f4a9496c
|
refs/heads/master
| 2021-06-30T22:29:44.043951
| 2019-03-20T15:01:09
| 2019-03-20T15:01:09
| 176,757,815
| 0
| 1
| null | 2020-07-24T14:40:26
| 2019-03-20T14:59:23
|
HTML
|
UTF-8
|
Python
| false
| false
| 5,930
|
py
|
"""
======================COPYRIGHT/LICENSE START==========================
DataEntry.py: <write function here>
Copyright (C) 2005 Wayne Boucher, Rasmus Fogh, Tim Stevens and Wim Vranken (University of Cambridge and EBI/MSD)
=======================================================================
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
A copy of this license can be found in ../../../license/LGPL.license
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
======================COPYRIGHT/LICENSE END============================
for further information, please contact :
- CCPN website (http://www.ccpn.ac.uk/)
- PDBe website (http://www.ebi.ac.uk/pdbe/)
=======================================================================
If you are using this software for academic purposes, we suggest
quoting the following references:
===========================REFERENCE START=============================
R. Fogh, J. Ionides, E. Ulrich, W. Boucher, W. Vranken, J.P. Linge, M.
Habeck, W. Rieping, T.N. Bhat, J. Westbrook, K. Henrick, G. Gilliland,
H. Berman, J. Thornton, M. Nilges, J. Markley and E. Laue (2002). The
CCPN project: An interim report on a data model for the NMR community
(Progress report). Nature Struct. Biol. 9, 416-418.
Wim F. Vranken, Wayne Boucher, Tim J. Stevens, Rasmus
H. Fogh, Anne Pajon, Miguel Llinas, Eldon L. Ulrich, John L. Markley, John
Ionides and Ernest D. Laue (2005). The CCPN Data Model for NMR Spectroscopy:
Development of a Software Pipeline. Proteins 59, 687 - 696.
===========================REFERENCE END===============================
"""
import memops.gui.QueryDialogBox as QueryDialogBox
from memops.gui.FileSelectPopup import FileSelectPopup
def askPassword(title, prompt, parent = None):
return QueryDialogBox.askPassword(title, prompt, parent=parent)
def askString(title, prompt, initial_value = '', parent = None):
return QueryDialogBox.askString(title, prompt,initialvalue=initial_value,
parent=parent)
def askInteger(title, prompt, initial_value = '', min_value = None,
max_value = None, parent = None):
return QueryDialogBox.askInteger(title, prompt, initialvalue=initial_value,
minvalue=min_value, maxvalue=max_value, parent=parent)
def askFloat(title, prompt, initial_value = '', min_value = None,
max_value = None, parent = None):
return QueryDialogBox.askFloat(title, prompt, initialvalue=initial_value,
minvalue=min_value, maxvalue=max_value, parent=parent)
def askFile(title, prompt, initial_value = '', parent = None,
dismiss_text='Cancel', extra_dismiss_text = ''):
if (parent):
popup = FileSelectPopup(parent, title=title, prompt=prompt, show_file=True,
dismiss_text=dismiss_text,
extra_dismiss_text=extra_dismiss_text,
file=initial_value)
file = popup.getFile()
popup.destroy()
return file
else:
return askString(title, prompt, initial_value)
def askDir(title, prompt, initial_value = '', parent = None,
dismiss_text='Cancel', extra_dismiss_text = '', default_dir = None):
if (parent):
popup = FileSelectPopup(parent, title=title, prompt=prompt, show_file=False,
dismiss_text=dismiss_text,
extra_dismiss_text=extra_dismiss_text,
file=initial_value, default_dir = default_dir)
dir = popup.getDirectory()
popup.destroy()
return dir
else:
return askString(title, prompt, initial_value)
class DataEntry:
def askPassword(self, title, prompt, initial_value = '', parent = None, *args, **kw):
return askPassword(title, prompt, initial_value, parent)
def askString(self, title, prompt, initial_value = '', parent = None, *args, **kw):
return askString(title, prompt, initial_value, parent)
def askInteger(self, title, prompt, initial_value = '', min_value = None,
max_value = None, parent = None, *args, **kw):
return askInteger(title, prompt, initial_value, min_value, max_value, parent)
def askFloat(self, title, prompt, initial_value = '', min_value = None,
max_value = None, parent = None, *args, **kw):
return askFloat(title, prompt, initial_value, min_value, max_value, parent)
def askFile(self, title, prompt, initial_value = '', parent = None,
dismiss_text='Cancel', extra_dismiss_text = '', *args, **kw):
return askFile(title, prompt, initial_value, parent)
def askDir(self, title, prompt, initial_value = '', parent = None,
dismiss_text='Cancel', extra_dismiss_text = '', default_dir = None, *args, **kw):
return askDir(title, prompt, initial_value, parent, default_dir = default_dir)
dataEntry = DataEntry()
if (__name__ == '__main__'):
import Tkinter
r = Tkinter.Tk()
print dataEntry.askString('ask string title', 'ask string prompt')
print dataEntry.askInteger('ask integer title', 'ask integer prompt')
print dataEntry.askFloat('ask float title', 'ask float prompt')
print dataEntry.askFile('ask file title', 'ask file prompt', parent=r)
print dataEntry.askDir('ask dir title', 'ask dir prompt', parent=r)
|
[
"ejb66@le.ac.uk"
] |
ejb66@le.ac.uk
|
3fde9b355dbfa3a54a9aa52d7be9cb574bc0ad08
|
8ac8c254db733ac5c021582daeb49931f8ab1d92
|
/src/glomerulus/search/pws_clone/__init__.py
|
7e0bed2c769525c1989a012cb60860426e4325a3
|
[] |
no_license
|
kaglowka/glomerulus
|
c5d0490427f724a733b001b200fb31cfab57f117
|
6f18f9961b2c17725555c5b53d9408228169be00
|
refs/heads/master
| 2022-12-10T01:40:12.374036
| 2018-01-22T22:14:37
| 2018-01-22T22:14:37
| 117,331,213
| 0
| 0
| null | 2022-12-07T23:45:30
| 2018-01-13T09:30:57
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 50
|
py
|
from .google import Google
from .bing import Bing
|
[
"krz.glowka@gmail.com"
] |
krz.glowka@gmail.com
|
26badfb0c04bfb46b37abe64726a3d5fcd24a87b
|
a0134ad7265d7460e7ca9127686a850e7e826da5
|
/models/test/chainer/ctc/test_hierarchical_ctc.py
|
6acb356d8f73a700f3e2254af919d20d8b8891ad
|
[] |
no_license
|
carolinebear/pytorch_end2end_speech_recognition
|
09ce03fb878353db1e25f599a62537655650a19c
|
b6b60a338d65bb369d0034f423feb09db10db8b7
|
refs/heads/master
| 2020-03-19T05:56:51.396599
| 2018-06-01T09:35:49
| 2018-06-01T09:35:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,830
|
py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Test hierarchical CTC models (chainer)."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import time
import unittest
sys.path.append('../../../../')
from models.chainer.ctc.hierarchical_ctc import HierarchicalCTC
from models.test.data import generate_data, idx2char, idx2word
from utils.measure_time_func import measure_time
from utils.evaluation.edit_distance import compute_cer, compute_wer
# from utils.training.learning_rate_controller import Controller
class TestCTC(unittest.TestCase):
def test(self):
print("Hierarchical CTC Working check.")
# CNN-CTC
self.check(encoder_type='cnn', batch_norm=True, activation='relu')
# CLDNN-CTC
self.check(encoder_type='lstm', bidirectional=True,
conv=True)
self.check(encoder_type='lstm', bidirectional=True,
conv=True, batch_norm=True)
# Label smoothing
self.check(encoder_type='lstm', bidirectional=True,
label_smoothing=True)
# Pyramidal encoder
self.check(encoder_type='lstm', bidirectional=True, subsample=True)
# Projection
self.check(encoder_type='lstm', bidirectional=True, projection=True)
self.check(encoder_type='lstm', bidirectional=False, projection=True)
# Residual LSTM-CTC
self.check(encoder_type='lstm', bidirectional=True,
residual=True)
self.check(encoder_type='lstm', bidirectional=True,
dense_residual=True)
# BLSTM-CTC
self.check(encoder_type='lstm', bidirectional=True)
@measure_time
def check(self, encoder_type, bidirectional=False,
subsample=False, projection=False,
conv=False, batch_norm=False, activation='relu',
residual=False, dense_residual=False, label_smoothing=False):
print('==================================================')
print(' encoder_type: %s' % encoder_type)
print(' bidirectional: %s' % str(bidirectional))
print(' projection: %s' % str(projection))
print(' subsample: %s' % str(subsample))
print(' conv: %s' % str(conv))
print(' batch_norm: %s' % str(batch_norm))
print(' residual: %s' % str(residual))
print(' dense_residual: %s' % str(dense_residual))
print(' label_smoothing: %s' % str(label_smoothing))
print('==================================================')
if conv or encoder_type == 'cnn':
# pattern 1
# conv_channels = [32, 32]
# conv_kernel_sizes = [[41, 11], [21, 11]]
# conv_strides = [[2, 2], [2, 1]]
# poolings = [[], []]
# pattern 2 (VGG like)
conv_channels = [64, 64]
conv_kernel_sizes = [[3, 3], [3, 3]]
conv_strides = [[1, 1], [1, 1]]
poolings = [[2, 2], [2, 2]]
fc_list = [786, 786]
else:
conv_channels = []
conv_kernel_sizes = []
conv_strides = []
poolings = []
fc_list = []
# Load batch data
num_stack = 1 if subsample or conv or encoder_type == 'cnn' else 2
splice = 1
xs, ys, ys_sub, x_lens, y_lens, y_lens_sub = generate_data(
label_type='word_char',
batch_size=2,
num_stack=num_stack,
splice=splice,
backend='chainer')
num_classes = 11
num_classes_sub = 27
# Load model
model = HierarchicalCTC(
input_size=xs[0].shape[-1] // splice // num_stack, # 120
encoder_type=encoder_type,
encoder_bidirectional=bidirectional,
encoder_num_units=256,
encoder_num_proj=256 if projection else 0,
encoder_num_layers=2,
encoder_num_layers_sub=1,
fc_list=fc_list,
fc_list_sub=fc_list,
dropout_input=0.1,
dropout_encoder=0.1,
main_loss_weight=0.8,
sub_loss_weight=0.2,
num_classes=num_classes,
num_classes_sub=num_classes_sub,
parameter_init_distribution='uniform',
parameter_init=0.1,
recurrent_weight_orthogonal=False,
init_forget_gate_bias_with_one=True,
subsample_list=[] if not subsample else [True, False],
num_stack=num_stack,
splice=splice,
conv_channels=conv_channels,
conv_kernel_sizes=conv_kernel_sizes,
conv_strides=conv_strides,
poolings=poolings,
batch_norm=batch_norm,
label_smoothing_prob=0.1 if label_smoothing else 0,
weight_noise_std=0,
encoder_residual=residual,
encoder_dense_residual=dense_residual)
# Count total parameters
for name in sorted(list(model.num_params_dict.keys())):
num_params = model.num_params_dict[name]
print("%s %d" % (name, num_params))
print("Total %.3f M parameters" % (model.total_parameters / 1000000))
# Define optimizer
learning_rate = 1e-3
model.set_optimizer('adam',
learning_rate_init=learning_rate,
weight_decay=1e-6,
clip_grad_norm=5,
lr_schedule=None,
factor=None,
patience_epoch=None)
# Define learning rate controller
# lr_controller = Controller(learning_rate_init=learning_rate,
# backend='chainer',
# decay_start_epoch=20,
# decay_rate=0.9,
# decay_patient_epoch=10,
# lower_better=True)
# GPU setting
model.set_cuda(deterministic=False, benchmark=True)
# Train model
max_step = 300
start_time_step = time.time()
for step in range(max_step):
# Step for parameter update
loss, loss_main, loss_sub = model(
xs, ys, x_lens, y_lens, ys_sub, y_lens_sub)
model.optimizer.target.cleargrads()
model.cleargrads()
loss.backward()
loss.unchain_backward()
model.optimizer.update()
# Inject Gaussian noise to all parameters
if (step + 1) % 10 == 0:
# Compute loss
loss, loss_main, loss_sub = model(
xs, ys, x_lens, y_lens, ys_sub, y_lens_sub, is_eval=True)
# Decode
best_hyps, _, _ = model.decode(
xs, x_lens, beam_width=1, task_index=0)
best_hyps_sub, _, _ = model.decode(
xs, x_lens, beam_width=1, task_index=1)
# TODO: fix beam search
str_ref = idx2word(ys[0, :y_lens[0]])
str_hyp = idx2word(best_hyps[0])
str_ref_sub = idx2char(ys_sub[0, :y_lens_sub[0]])
str_hyp_sub = idx2char(best_hyps_sub[0])
# Compute accuracy
try:
wer, _, _, _ = compute_wer(ref=str_ref.split('_'),
hyp=str_hyp.split('_'),
normalize=True)
cer, _, _, _ = compute_wer(
ref=list(str_ref_sub.replace('_', '')),
hyp=list(str_hyp_sub.replace('_', '')),
normalize=True)
except:
wer = 1
cer = 1
duration_step = time.time() - start_time_step
print('Step %d: loss=%.3f(%.3f/%.3f) / wer=%.3f / cer=%.3f / lr=%.5f (%.3f sec)' %
(step + 1, loss, loss_main, loss_sub,
wer, cer, learning_rate, duration_step))
start_time_step = time.time()
# Visualize
print('Ref: %s' % str_ref)
print('Hyp (word): %s' % str_hyp)
print('Hyp (char): %s' % str_hyp_sub)
if cer < 0.1:
print('Modle is Converged.')
break
# Update learning rate
# model.optimizer, learning_rate = lr_controller.decay_lr(
# optimizer=model.optimizer,
# learning_rate=learning_rate,
# epoch=step,
# value=ler)
if __name__ == "__main__":
unittest.main()
|
[
"hiro.mhbc@gmail.com"
] |
hiro.mhbc@gmail.com
|
a605dfcfc2f4d00faa17e9fbac69fb61a709b560
|
b35469b3a3ef3ecb8da35a178ba0994bae2989b3
|
/kubevirt/models/v1_pci_host_device.py
|
65d45e6884a716731c600aef51e52b927476c143
|
[
"Apache-2.0"
] |
permissive
|
CHsixnine/client-python
|
4802d76bbe3761a1311038665d931349298bcd81
|
315335602923dacbc3b73b23339002d69a5a41cc
|
refs/heads/master
| 2023-03-20T22:45:25.578704
| 2021-03-17T07:34:18
| 2021-03-17T07:34:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,120
|
py
|
# coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: kubevirt-dev@googlegroups.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1PciHostDevice(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'external_resource_provider': 'bool',
'pci_vendor_selector': 'str',
'resource_name': 'str'
}
attribute_map = {
'external_resource_provider': 'externalResourceProvider',
'pci_vendor_selector': 'pciVendorSelector',
'resource_name': 'resourceName'
}
def __init__(self, external_resource_provider=None, pci_vendor_selector=None, resource_name=None):
"""
V1PciHostDevice - a model defined in Swagger
"""
self._external_resource_provider = None
self._pci_vendor_selector = None
self._resource_name = None
if external_resource_provider is not None:
self.external_resource_provider = external_resource_provider
self.pci_vendor_selector = pci_vendor_selector
self.resource_name = resource_name
@property
def external_resource_provider(self):
"""
Gets the external_resource_provider of this V1PciHostDevice.
:return: The external_resource_provider of this V1PciHostDevice.
:rtype: bool
"""
return self._external_resource_provider
@external_resource_provider.setter
def external_resource_provider(self, external_resource_provider):
"""
Sets the external_resource_provider of this V1PciHostDevice.
:param external_resource_provider: The external_resource_provider of this V1PciHostDevice.
:type: bool
"""
self._external_resource_provider = external_resource_provider
@property
def pci_vendor_selector(self):
"""
Gets the pci_vendor_selector of this V1PciHostDevice.
:return: The pci_vendor_selector of this V1PciHostDevice.
:rtype: str
"""
return self._pci_vendor_selector
@pci_vendor_selector.setter
def pci_vendor_selector(self, pci_vendor_selector):
"""
Sets the pci_vendor_selector of this V1PciHostDevice.
:param pci_vendor_selector: The pci_vendor_selector of this V1PciHostDevice.
:type: str
"""
if pci_vendor_selector is None:
raise ValueError("Invalid value for `pci_vendor_selector`, must not be `None`")
self._pci_vendor_selector = pci_vendor_selector
@property
def resource_name(self):
"""
Gets the resource_name of this V1PciHostDevice.
:return: The resource_name of this V1PciHostDevice.
:rtype: str
"""
return self._resource_name
@resource_name.setter
def resource_name(self, resource_name):
"""
Sets the resource_name of this V1PciHostDevice.
:param resource_name: The resource_name of this V1PciHostDevice.
:type: str
"""
if resource_name is None:
raise ValueError("Invalid value for `resource_name`, must not be `None`")
self._resource_name = resource_name
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1PciHostDevice):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"travis@travis-ci.org"
] |
travis@travis-ci.org
|
374fb7f9548ddb214ed23c9f91baa6f51c6ecd9a
|
eb722922339781fa6bd9937e69383fcd06256738
|
/day1/kapua-python-client/swagger_client/models/user_query.py
|
f40b1b68ccf746a9a9f1ae2d1ffd2154a5689df1
|
[
"MIT"
] |
permissive
|
mrsrinivas/diec
|
6a0c5da26ff23170b71217bfbc810bb98a897a83
|
ae9a5203b506d5cc18cb381666351bf9ce6b9b6c
|
refs/heads/master
| 2021-01-05T05:41:19.394898
| 2020-01-15T06:24:33
| 2020-01-15T06:24:33
| 240,901,175
| 1
| 0
|
MIT
| 2020-02-16T13:59:53
| 2020-02-16T13:59:52
| null |
UTF-8
|
Python
| false
| false
| 6,757
|
py
|
# coding: utf-8
"""
Eclipse Kapua REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.kapua_sort_criteria import KapuaSortCriteria # noqa: F401,E501
from swagger_client.models.query_predicate import QueryPredicate # noqa: F401,E501
class UserQuery(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'limit': 'int',
'scope_id': 'str',
'fetch_attributes': 'list[str]',
'predicate': 'QueryPredicate',
'sort_criteria': 'KapuaSortCriteria',
'offset': 'int'
}
attribute_map = {
'limit': 'limit',
'scope_id': 'scopeId',
'fetch_attributes': 'fetchAttributes',
'predicate': 'predicate',
'sort_criteria': 'sortCriteria',
'offset': 'offset'
}
def __init__(self, limit=None, scope_id=None, fetch_attributes=None, predicate=None, sort_criteria=None, offset=None): # noqa: E501
"""UserQuery - a model defined in Swagger""" # noqa: E501
self._limit = None
self._scope_id = None
self._fetch_attributes = None
self._predicate = None
self._sort_criteria = None
self._offset = None
self.discriminator = None
if limit is not None:
self.limit = limit
if scope_id is not None:
self.scope_id = scope_id
if fetch_attributes is not None:
self.fetch_attributes = fetch_attributes
if predicate is not None:
self.predicate = predicate
if sort_criteria is not None:
self.sort_criteria = sort_criteria
if offset is not None:
self.offset = offset
@property
def limit(self):
"""Gets the limit of this UserQuery. # noqa: E501
:return: The limit of this UserQuery. # noqa: E501
:rtype: int
"""
return self._limit
@limit.setter
def limit(self, limit):
"""Sets the limit of this UserQuery.
:param limit: The limit of this UserQuery. # noqa: E501
:type: int
"""
self._limit = limit
@property
def scope_id(self):
"""Gets the scope_id of this UserQuery. # noqa: E501
:return: The scope_id of this UserQuery. # noqa: E501
:rtype: str
"""
return self._scope_id
@scope_id.setter
def scope_id(self, scope_id):
"""Sets the scope_id of this UserQuery.
:param scope_id: The scope_id of this UserQuery. # noqa: E501
:type: str
"""
self._scope_id = scope_id
@property
def fetch_attributes(self):
"""Gets the fetch_attributes of this UserQuery. # noqa: E501
:return: The fetch_attributes of this UserQuery. # noqa: E501
:rtype: list[str]
"""
return self._fetch_attributes
@fetch_attributes.setter
def fetch_attributes(self, fetch_attributes):
"""Sets the fetch_attributes of this UserQuery.
:param fetch_attributes: The fetch_attributes of this UserQuery. # noqa: E501
:type: list[str]
"""
self._fetch_attributes = fetch_attributes
@property
def predicate(self):
"""Gets the predicate of this UserQuery. # noqa: E501
:return: The predicate of this UserQuery. # noqa: E501
:rtype: QueryPredicate
"""
return self._predicate
@predicate.setter
def predicate(self, predicate):
"""Sets the predicate of this UserQuery.
:param predicate: The predicate of this UserQuery. # noqa: E501
:type: QueryPredicate
"""
self._predicate = predicate
@property
def sort_criteria(self):
"""Gets the sort_criteria of this UserQuery. # noqa: E501
:return: The sort_criteria of this UserQuery. # noqa: E501
:rtype: KapuaSortCriteria
"""
return self._sort_criteria
@sort_criteria.setter
def sort_criteria(self, sort_criteria):
"""Sets the sort_criteria of this UserQuery.
:param sort_criteria: The sort_criteria of this UserQuery. # noqa: E501
:type: KapuaSortCriteria
"""
self._sort_criteria = sort_criteria
@property
def offset(self):
"""Gets the offset of this UserQuery. # noqa: E501
:return: The offset of this UserQuery. # noqa: E501
:rtype: int
"""
return self._offset
@offset.setter
def offset(self, offset):
"""Sets the offset of this UserQuery.
:param offset: The offset of this UserQuery. # noqa: E501
:type: int
"""
self._offset = offset
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(UserQuery, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UserQuery):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"noreply@github.com"
] |
mrsrinivas.noreply@github.com
|
a65ad9748193a80ca6ea3a3b9948f43ba7938fbe
|
60a831fb3c92a9d2a2b52ff7f5a0f665d4692a24
|
/IronPythonStubs/release/stubs.min/System/ComponentModel/__init___parts/DataObjectAttribute.py
|
fa31cc9ac8a0bb5e911aa72b7329df96aa63c06d
|
[
"MIT"
] |
permissive
|
shnlmn/Rhino-Grasshopper-Scripts
|
a9411098c5d1bbc55feb782def565d535b27b709
|
0e43c3c1d09fb12cdbd86a3c4e2ba49982e0f823
|
refs/heads/master
| 2020-04-10T18:59:43.518140
| 2020-04-08T02:49:07
| 2020-04-08T02:49:07
| 161,219,695
| 11
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,065
|
py
|
class DataObjectAttribute(Attribute,_Attribute):
"""
Identifies a type as an object suitable for binding to an System.Web.UI.WebControls.ObjectDataSource object. This class cannot be inherited.
DataObjectAttribute()
DataObjectAttribute(isDataObject: bool)
"""
def Equals(self,obj):
"""
Equals(self: DataObjectAttribute,obj: object) -> bool
Determines whether this instance of System.ComponentModel.DataObjectAttribute fits the pattern
of another object.
obj: An object to compare with this instance of System.ComponentModel.DataObjectAttribute.
Returns: true if this instance is the same as the instance specified by the obj parameter; otherwise,
false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: DataObjectAttribute) -> int
Returns the hash code for this instance.
Returns: A 32-bit signed integer hash code.
"""
pass
def IsDefaultAttribute(self):
"""
IsDefaultAttribute(self: DataObjectAttribute) -> bool
Gets a value indicating whether the current value of the attribute is the default value for the
attribute.
Returns: true if the current value of the attribute is the default; otherwise,false.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,isDataObject=None):
"""
__new__(cls: type)
__new__(cls: type,isDataObject: bool)
"""
pass
def __ne__(self,*args):
pass
IsDataObject=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether an object should be considered suitable for binding to an System.Web.UI.WebControls.ObjectDataSource object at design time.
Get: IsDataObject(self: DataObjectAttribute) -> bool
"""
DataObject=None
Default=None
NonDataObject=None
|
[
"magnetscoil@gmail.com"
] |
magnetscoil@gmail.com
|
c88a4d8e2cc001c7b88a803278245d24cd0071ad
|
834e36fb8e87b129eb1d67d058132c32a430229a
|
/rest_framework/lib/orm/peewee.py
|
9ba14b497de2490da6db604cd9c15cbdba049227
|
[] |
no_license
|
sjl421/tornado-rest-framework
|
bea06fa8278e4831fcd38abfb6c8aa69e4e1e436
|
c61d11020d5e680b8d1c01469d764517484665bd
|
refs/heads/master
| 2020-03-18T21:09:53.524478
| 2018-05-11T04:57:57
| 2018-05-11T04:57:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 168,492
|
py
|
# -*- coding: utf-8 -*-
import re
import time
import uuid
import weakref
import calendar
import datetime
import decimal
import hashlib
import itertools
import logging
import operator
import threading
from copy import deepcopy
from functools import wraps
from inspect import isclass
from collections import Callable
from functools import reduce
from bisect import bisect_left
from bisect import bisect_right
from collections import deque
from collections import namedtuple
from collections import OrderedDict
from logging import NullHandler
try:
import pymysql as mysql
except ImportError:
mysql = None
def format_date_time(value, formats, post_process=None):
post_process = post_process or (lambda x: x)
for fmt in formats:
try:
return post_process(datetime.datetime.strptime(value, fmt))
except ValueError:
pass
return value
def sort_models_topologically(models):
"""Sort models topologically so that parents will precede children."""
models = set(models)
seen = set()
ordering = []
def dfs(model):
# Omit models which are already sorted
# or should not be in the list at all
if model in models and model not in seen:
seen.add(model)
# First create models on which current model depends
# (either through foreign keys or through depends_on),
# then create current model itself
for foreign_key in model._meta.rel.values():
dfs(foreign_key.rel_model)
if model._meta.depends_on:
for dependency in model._meta.depends_on:
dfs(dependency)
ordering.append(model)
# Order models by name and table initially to guarantee total ordering.
names = lambda m: (m._meta.name, m._meta.db_table)
for m in sorted(models, key=names):
dfs(m)
return ordering
def strip_parens(s):
# Quick sanity check.
if not s or s[0] != '(':
return s
ct = i = 0
l = len(s)
while i < l:
if s[i] == '(' and s[l - 1] == ')':
ct += 1
i += 1
l -= 1
else:
break
if ct:
# If we ever end up with negatively-balanced parentheses, then we
# know that one of the outer parentheses was required.
unbalanced_ct = 0
required = 0
for i in range(ct, l - ct):
if s[i] == '(':
unbalanced_ct += 1
elif s[i] == ')':
unbalanced_ct -= 1
if unbalanced_ct < 0:
required += 1
unbalanced_ct = 0
if required == ct:
break
ct -= required
if ct > 0:
return s[ct:-ct]
return s
_DictQueryResultWrapper = _ModelQueryResultWrapper = _SortedFieldList = _TuplesQueryResultWrapper = None
logger = logging.getLogger('peewee')
logger.addHandler(NullHandler())
binary_construct = lambda s: bytes(s.encode('raw_unicode_escape'))
_METACLASS_ = '_metaclass_helper_'
def with_metaclass(meta, base=object):
return meta(_METACLASS_, (base,), {})
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
MYSQL_DATE_TRUNC_MAPPING = {
'year': '%Y',
'month': '%Y-%m',
'day': '%Y-%m-%d',
'hour': '%Y-%m-%d %H',
'minute': '%Y-%m-%d %H:%i',
'second': '%Y-%m-%d %H:%i:%S'
}
class attrdict(dict):
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError(attr)
# Operators used in binary expressions.
OP = attrdict(
AND='and',
OR='or',
ADD='+',
SUB='-',
MUL='*',
DIV='/',
BIN_AND='&',
BIN_OR='|',
XOR='^',
MOD='%',
EQ='=',
LT='<',
LTE='<=',
GT='>',
GTE='>=',
NE='!=',
IN='in',
NOT_IN='not in',
IS='is',
IS_NOT='is not',
LIKE='like',
ILIKE='ilike', # 忽略大小写
BETWEEN='between',
REGEXP='regexp',
CONCAT='||',
)
JOIN = attrdict(
INNER='INNER',
LEFT_OUTER='LEFT OUTER',
RIGHT_OUTER='RIGHT OUTER',
FULL='FULL',
CROSS='CROSS',
)
JOIN_INNER = JOIN.INNER
JOIN_LEFT_OUTER = JOIN.LEFT_OUTER
JOIN_FULL = JOIN.FULL
RESULTS_NAIVE = 1
RESULTS_MODELS = 2
RESULTS_TUPLES = 3
RESULTS_DICTS = 4
RESULTS_AGGREGATE_MODELS = 5
RESULTS_NAMEDTUPLES = 6
# To support "django-style" double-underscore filters, create a mapping between
# operation name and operation code, e.g. "__eq" == OP.EQ.
DJANGO_MAP = {
'eq': (OP.EQ,), # 相等
'lt': (OP.LT,), #
'lte': (OP.LTE,),
'gt': (OP.GT,),
'gte': (OP.GTE,),
'ne': (OP.NE,),
'in': (OP.IN,),
'is': (OP.IS,),
'like': (OP.LIKE,),
'ilike': (OP.ILIKE,), # 忽略大小写
'regexp': (OP.REGEXP,),
# 补充
"exact": (OP.EQ,), # 精确等于,忽略大小写
"contains": (OP.LIKE, '%%%s%%'), # 包含 like '%aaa%'
"icontains": (OP.ILIKE, '%%%s%%'), # 包含 忽略大小写 ilike '%aaa%'
"startswith": (OP.LIKE, '%s%%'), # 以...开头
"istartswith": (OP.ILIKE, '%s%%'), # 以...开头 忽略大小写
"endswith": (OP.LIKE, '%%%s'), # 以...结尾
"iendswith": (OP.ILIKE, '%%%s'), # 以...结尾,忽略大小写
}
# Helper functions that are used in various parts of the codebase.
def merge_dict(source, overrides):
merged = source.copy()
merged.update(overrides)
return merged
def returns_clone(func):
"""
Method decorator that will "clone" the object before applying the given
method. This ensures that state is mutated in a more predictable fashion,
and promotes the use of method-chaining.
"""
def inner(self, *args, **kwargs):
clone = self.clone() # Assumes object implements `clone`.
func(clone, *args, **kwargs)
return clone
inner.call_local = func # Provide a way to call without cloning.
return inner
def not_allowed(func):
"""
Method decorator to indicate a method is not allowed to be called. Will
raise a `NotImplementedError`.
"""
def inner(self, *args, **kwargs):
raise NotImplementedError('%s is not allowed on %s instances' % (
func, type(self).__name__))
return inner
class Proxy(object):
"""
Proxy class useful for situations when you wish to defer the initialization
of an object.
"""
__slots__ = ('obj', '_callbacks')
def __init__(self):
self._callbacks = []
self.initialize(None)
def initialize(self, obj):
self.obj = obj
for callback in self._callbacks:
callback(obj)
def attach_callback(self, callback):
self._callbacks.append(callback)
return callback
def __getattr__(self, attr):
if self.obj is None:
raise AttributeError('Cannot use uninitialized Proxy.')
return getattr(self.obj, attr)
def __setattr__(self, attr, value):
if attr not in self.__slots__:
raise AttributeError('Cannot set attribute on proxy.')
return super(Proxy, self).__setattr__(attr, value)
class DeferredRelation(object):
_unresolved = set()
def __init__(self, rel_model_name=None):
self.fields = []
if rel_model_name is not None:
self._rel_model_name = rel_model_name.lower()
self._unresolved.add(self)
def set_field(self, model_class, field, name):
self.fields.append((model_class, field, name))
def set_model(self, rel_model):
for model, field, name in self.fields:
field.rel_model = rel_model
field.add_to_class(model, name)
@staticmethod
def resolve(model_cls):
unresolved = list(DeferredRelation._unresolved)
for dr in unresolved:
if dr._rel_model_name == model_cls.__name__.lower():
dr.set_model(model_cls)
DeferredRelation._unresolved.discard(dr)
class _CDescriptor(object):
def __get__(self, instance, instance_type=None):
if instance is not None:
return Entity(instance._alias)
return self
# Classes representing the query tree.
class Node(object):
"""Base-class for any part of a query which shall be composable."""
c = _CDescriptor()
_node_type = 'node'
def __init__(self):
self._negated = False
self._alias = None
self._bind_to = None
self._ordering = None # ASC or DESC.
@classmethod
def extend(cls, name=None, clone=False):
def decorator(method):
method_name = name or method.__name__
if clone:
method = returns_clone(method)
setattr(cls, method_name, method)
return method
return decorator
def clone_base(self):
return type(self)()
def clone(self):
inst = self.clone_base()
inst._negated = self._negated
inst._alias = self._alias
inst._ordering = self._ordering
inst._bind_to = self._bind_to
return inst
@returns_clone
def __invert__(self):
self._negated = not self._negated
@returns_clone
def alias(self, a=None):
self._alias = a
@returns_clone
def bind_to(self, bt):
"""
Bind the results of an expression to a specific model type. Useful
when adding expressions to a select, where the result of the expression
should be placed on a joined instance.
"""
self._bind_to = bt
@returns_clone
def asc(self):
self._ordering = 'ASC'
@returns_clone
def desc(self):
self._ordering = 'DESC'
def __pos__(self):
return self.asc()
def __neg__(self):
return self.desc()
def _e(op, inv=False):
"""
Lightweight factory which returns a method that builds an Expression
consisting of the left-hand and right-hand operands, using `op`.
"""
def inner(self, rhs):
if inv:
return Expression(rhs, op, self)
return Expression(self, op, rhs)
return inner
__and__ = _e(OP.AND)
__or__ = _e(OP.OR)
__add__ = _e(OP.ADD)
__sub__ = _e(OP.SUB)
__mul__ = _e(OP.MUL)
__div__ = __truediv__ = _e(OP.DIV)
__xor__ = _e(OP.XOR)
__radd__ = _e(OP.ADD, inv=True)
__rsub__ = _e(OP.SUB, inv=True)
__rmul__ = _e(OP.MUL, inv=True)
__rdiv__ = __rtruediv__ = _e(OP.DIV, inv=True)
__rand__ = _e(OP.AND, inv=True)
__ror__ = _e(OP.OR, inv=True)
__rxor__ = _e(OP.XOR, inv=True)
def __eq__(self, rhs):
if rhs is None:
return Expression(self, OP.IS, None)
return Expression(self, OP.EQ, rhs)
def __ne__(self, rhs):
if rhs is None:
return Expression(self, OP.IS_NOT, None)
return Expression(self, OP.NE, rhs)
__lt__ = _e(OP.LT)
__le__ = _e(OP.LTE)
__gt__ = _e(OP.GT)
__ge__ = _e(OP.GTE)
__lshift__ = _e(OP.IN)
__rshift__ = _e(OP.IS)
__mod__ = _e(OP.LIKE)
__pow__ = _e(OP.ILIKE)
bin_and = _e(OP.BIN_AND)
bin_or = _e(OP.BIN_OR)
# Special expressions.
def in_(self, rhs):
return Expression(self, OP.IN, rhs)
def not_in(self, rhs):
return Expression(self, OP.NOT_IN, rhs)
def is_null(self, is_null=True):
if is_null:
return Expression(self, OP.IS, None)
return Expression(self, OP.IS_NOT, None)
def contains(self, rhs):
return Expression(self, OP.ILIKE, '%%%s%%' % rhs)
def startswith(self, rhs):
return Expression(self, OP.ILIKE, '%s%%' % rhs)
def endswith(self, rhs):
return Expression(self, OP.ILIKE, '%%%s' % rhs)
def between(self, low, high):
return Expression(self, OP.BETWEEN, Clause(low, R('AND'), high))
def regexp(self, expression):
return Expression(self, OP.REGEXP, expression)
def concat(self, rhs):
return StringExpression(self, OP.CONCAT, rhs)
class SQL(Node):
"""An unescaped SQL string, with optional parameters."""
_node_type = 'sql'
def __init__(self, value, *params):
self.value = value
self.params = params
super(SQL, self).__init__()
def clone_base(self):
return SQL(self.value, *self.params)
R = SQL # backwards-compat.
class Entity(Node):
"""A quoted-name or entity, e.g. "table"."column"."""
_node_type = 'entity'
def __init__(self, *path):
super(Entity, self).__init__()
self.path = path
def clone_base(self):
return Entity(*self.path)
def __getattr__(self, attr):
return Entity(*filter(None, self.path + (attr,)))
class Func(Node):
"""An arbitrary SQL function call."""
_node_type = 'func'
_no_coerce = set(('count', 'sum'))
def __init__(self, name, *arguments):
self.name = name
self.arguments = arguments
self._coerce = (name.lower() not in self._no_coerce) if name else False
super(Func, self).__init__()
@returns_clone
def coerce(self, coerce=True):
self._coerce = coerce
def clone_base(self):
res = Func(self.name, *self.arguments)
res._coerce = self._coerce
return res
def over(self, partition_by=None, order_by=None, start=None, end=None,
window=None):
if isinstance(partition_by, Window) and window is None:
window = partition_by
if start is not None and not isinstance(start, SQL):
start = SQL(*start)
if end is not None and not isinstance(end, SQL):
end = SQL(*end)
if window is None:
sql = Window(partition_by=partition_by, order_by=order_by,
start=start, end=end).__sql__()
else:
sql = SQL(window._alias)
return Clause(self, SQL('OVER'), sql)
def __getattr__(self, attr):
def dec(*args, **kwargs):
return Func(attr, *args, **kwargs)
return dec
# fn is a factory for creating `Func` objects and supports a more friendly
# API. So instead of `Func("LOWER", param)`, `fn.LOWER(param)`.
fn = Func(None)
class Expression(Node):
"""A binary expression, e.g `foo + 1` or `bar < 7`."""
_node_type = 'expression'
def __init__(self, lhs, op, rhs, flat=False):
super(Expression, self).__init__()
self.lhs = lhs
self.op = op
self.rhs = rhs
self.flat = flat
def clone_base(self):
return Expression(self.lhs, self.op, self.rhs, self.flat)
class StringExpression(Expression):
def __add__(self, other):
return self.concat(other)
def __radd__(self, other):
return other.concat(self)
class Param(Node):
"""
Arbitrary parameter passed into a query. Instructs the query compiler to
specifically treat this value as a parameter, useful for `list` which is
special-cased for `IN` lookups.
"""
_node_type = 'param'
def __init__(self, value, adapt=None):
self.value = value
self.adapt = adapt
super(Param, self).__init__()
def clone_base(self):
return Param(self.value, self.adapt)
class Passthrough(Param):
_node_type = 'passthrough'
class Clause(Node):
"""A SQL clause, one or more Node objects joined by spaces."""
_node_type = 'clause'
glue = ' '
parens = False
def __init__(self, *nodes, **kwargs):
if 'glue' in kwargs:
self.glue = kwargs['glue']
if 'parens' in kwargs:
self.parens = kwargs['parens']
super(Clause, self).__init__()
self.nodes = list(nodes)
def clone_base(self):
clone = Clause(*self.nodes)
clone.glue = self.glue
clone.parens = self.parens
return clone
class CommaClause(Clause):
"""One or more Node objects joined by commas, no parens."""
glue = ', '
class EnclosedClause(CommaClause):
"""One or more Node objects joined by commas and enclosed in parens."""
parens = True
Tuple = EnclosedClause
class Window(Node):
CURRENT_ROW = 'CURRENT ROW'
def __init__(self, partition_by=None, order_by=None, start=None, end=None):
super(Window, self).__init__()
self.partition_by = partition_by
self.order_by = order_by
self.start = start
self.end = end
if self.start is None and self.end is not None:
raise ValueError('Cannot specify WINDOW end without start.')
self._alias = self._alias or 'w'
@staticmethod
def following(value=None):
if value is None:
return SQL('UNBOUNDED FOLLOWING')
return SQL('%d FOLLOWING' % value)
@staticmethod
def preceding(value=None):
if value is None:
return SQL('UNBOUNDED PRECEDING')
return SQL('%d PRECEDING' % value)
def __sql__(self):
over_clauses = []
if self.partition_by:
over_clauses.append(Clause(
SQL('PARTITION BY'),
CommaClause(*self.partition_by)))
if self.order_by:
over_clauses.append(Clause(
SQL('ORDER BY'),
CommaClause(*self.order_by)))
if self.start is not None and self.end is not None:
over_clauses.append(Clause(
SQL('RANGE BETWEEN'),
self.start,
SQL('AND'),
self.end))
elif self.start is not None:
over_clauses.append(Clause(SQL('RANGE'), self.start))
return EnclosedClause(Clause(*over_clauses))
def clone_base(self):
return Window(self.partition_by, self.order_by)
def Check(value):
return SQL('CHECK (%s)' % value)
class DQ(Node):
"""A "django-style" filter expression, e.g. {'foo__eq': 'x'}."""
def __init__(self, **query):
super(DQ, self).__init__()
self.query = query
def clone_base(self):
return DQ(**self.query)
class _StripParens(Node):
_node_type = 'strip_parens'
def __init__(self, node):
super(_StripParens, self).__init__()
self.node = node
JoinMetadata = namedtuple('JoinMetadata', (
'src_model', # Source Model class.
'dest_model', # Dest Model class.
'src', # Source, may be Model, ModelAlias
'dest', # Dest, may be Model, ModelAlias, or SelectQuery.
'attr', # Attribute name joined instance(s) should be assigned to.
'primary_key', # Primary key being joined on.
'foreign_key', # Foreign key being joined from.
'is_backref', # Is this a backref, i.e. 1 -> N.
'alias', # Explicit alias given to join expression.
'is_self_join', # Is this a self-join?
'is_expression', # Is the join ON clause an Expression?
))
class Join(namedtuple('_Join', ('src', 'dest', 'join_type', 'on'))):
def get_foreign_key(self, source, dest, field=None):
if isinstance(source, SelectQuery) or isinstance(dest, SelectQuery):
return None, None
fk_field = source._meta.rel_for_model(dest, field)
if fk_field is not None:
return fk_field, False
reverse_rel = source._meta.reverse_rel_for_model(dest, field)
if reverse_rel is not None:
return reverse_rel, True
return None, None
def get_join_type(self):
return self.join_type or JOIN.INNER
def model_from_alias(self, model_or_alias):
if isinstance(model_or_alias, ModelAlias):
return model_or_alias.model_class
elif isinstance(model_or_alias, SelectQuery):
return model_or_alias.model_class
return model_or_alias
def _join_metadata(self):
# Get the actual tables being joined.
src = self.model_from_alias(self.src)
dest = self.model_from_alias(self.dest)
join_alias = isinstance(self.on, Node) and self.on._alias or None
is_expression = isinstance(self.on, (Expression, Func, SQL))
on_field = isinstance(self.on, (Field, FieldProxy)) and self.on or None
if on_field:
fk_field = on_field
is_backref = on_field.name not in src._meta.fields
else:
fk_field, is_backref = self.get_foreign_key(src, dest, self.on)
if fk_field is None and self.on is not None:
fk_field, is_backref = self.get_foreign_key(src, dest)
if fk_field is not None:
primary_key = fk_field.to_field
else:
primary_key = None
if not join_alias:
if fk_field is not None:
if is_backref:
target_attr = dest._meta.db_table
else:
target_attr = fk_field.name
else:
try:
target_attr = self.on.lhs.name
except AttributeError:
target_attr = dest._meta.db_table
else:
target_attr = None
return JoinMetadata(
src_model=src,
dest_model=dest,
src=self.src,
dest=self.dest,
attr=join_alias or target_attr,
primary_key=primary_key,
foreign_key=fk_field,
is_backref=is_backref,
alias=join_alias,
is_self_join=src is dest,
is_expression=is_expression)
@property
def metadata(self):
if not hasattr(self, '_cached_metadata'):
self._cached_metadata = self._join_metadata()
return self._cached_metadata
class FieldDescriptor(object):
# Fields are exposed as descriptors in order to control access to the
# underlying "raw" data.
def __init__(self, field):
self.field = field
self.att_name = self.field.name
def __get__(self, instance, instance_type=None):
if instance is not None:
return instance._data.get(self.att_name)
return self.field
def __set__(self, instance, value):
instance._data[self.att_name] = value
instance._dirty.add(self.att_name)
class Field(Node):
"""A column on a table."""
_field_counter = 0
_order = 0
_node_type = 'field'
db_field = 'unknown'
def __init__(self, null=False, index=False, unique=False,
verbose_name=None, help_text=None, db_column=None,
default=None, choices=None, primary_key=False, sequence=None,
constraints=None, schema=None, undeclared=False):
self.null = null
self.index = index
self.unique = unique
self.verbose_name = verbose_name
self.help_text = help_text
self.db_column = db_column
self.default = default
self.choices = choices # Used for metadata purposes, not enforced.
self.primary_key = primary_key
self.sequence = sequence # Name of sequence, e.g. foo_id_seq.
self.constraints = constraints # List of column constraints.
self.schema = schema # Name of schema, e.g. 'public'.
self.undeclared = undeclared # Whether this field is part of schema.
# Used internally for recovering the order in which Fields were defined
# on the Model class.
Field._field_counter += 1
self._order = Field._field_counter
self._sort_key = (self.primary_key and 1 or 2), self._order
self._is_bound = False # Whether the Field is "bound" to a Model.
super(Field, self).__init__()
def clone_base(self, **kwargs):
inst = type(self)(
null=self.null,
index=self.index,
unique=self.unique,
verbose_name=self.verbose_name,
help_text=self.help_text,
db_column=self.db_column,
default=self.default,
choices=self.choices,
primary_key=self.primary_key,
sequence=self.sequence,
constraints=self.constraints,
schema=self.schema,
undeclared=self.undeclared,
**kwargs)
if self._is_bound:
inst.name = self.name
inst.model_class = self.model_class
inst._is_bound = self._is_bound
return inst
def add_to_class(self, model_class, name):
"""
Hook that replaces the `Field` attribute on a class with a named
`FieldDescriptor`. Called by the metaclass during construction of the
`Model`.
"""
self.name = name
self.model_class = model_class
self.db_column = self.db_column or self.name
if not self.verbose_name:
self.verbose_name = re.sub('_+', ' ', name).title()
model_class._meta.add_field(self)
setattr(model_class, name, FieldDescriptor(self))
self._is_bound = True
def get_database(self):
return self.model_class._meta.database
def get_column_type(self):
field_type = self.get_db_field()
return self.get_database().compiler().get_column_type(field_type)
def get_db_field(self):
return self.db_field
def get_modifiers(self):
return None
def coerce(self, value):
return value
def db_value(self, value):
"""Convert the python value for storage in the database."""
return value if value is None else self.coerce(value)
def python_value(self, value):
"""Convert the database value to a pythonic value."""
return value if value is None else self.coerce(value)
def as_entity(self, with_table=False):
if with_table:
return Entity(self.model_class._meta.db_table, self.db_column)
return Entity(self.db_column)
def __ddl_column__(self, column_type):
"""Return the column type, e.g. VARCHAR(255) or REAL."""
modifiers = self.get_modifiers()
if modifiers:
return SQL(
'%s(%s)' % (column_type, ', '.join(map(str, modifiers))))
return SQL(column_type)
def __ddl__(self, column_type):
"""Return a list of Node instances that defines the column."""
ddl = [self.as_entity(), self.__ddl_column__(column_type)]
if not self.null:
ddl.append(SQL('NOT NULL'))
if self.primary_key:
ddl.append(SQL('PRIMARY KEY'))
if self.sequence:
ddl.append(SQL("DEFAULT NEXTVAL('%s')" % self.sequence))
if self.constraints:
ddl.extend(self.constraints)
return ddl
def __hash__(self):
return hash(self.name + '.' + self.model_class.__name__)
class BareField(Field):
db_field = 'bare'
def __init__(self, coerce=None, *args, **kwargs):
super(BareField, self).__init__(*args, **kwargs)
if coerce is not None:
self.coerce = coerce
def clone_base(self, **kwargs):
return super(BareField, self).clone_base(coerce=self.coerce, **kwargs)
class IntegerField(Field):
db_field = 'int'
coerce = int
class BigIntegerField(IntegerField):
db_field = 'bigint'
class SmallIntegerField(IntegerField):
db_field = 'smallint'
class PrimaryKeyField(IntegerField):
db_field = 'primary_key'
def __init__(self, *args, **kwargs):
kwargs['primary_key'] = True
super(PrimaryKeyField, self).__init__(*args, **kwargs)
class _AutoPrimaryKeyField(PrimaryKeyField):
_column_name = None
def __init__(self, *args, **kwargs):
if 'undeclared' in kwargs and not kwargs['undeclared']:
raise ValueError('%r must be created with undeclared=True.' % self)
kwargs['undeclared'] = True
super(_AutoPrimaryKeyField, self).__init__(*args, **kwargs)
def add_to_class(self, model_class, name):
if name != self._column_name:
raise ValueError('%s must be named `%s`.' % (type(self), name))
super(_AutoPrimaryKeyField, self).add_to_class(model_class, name)
class FloatField(Field):
db_field = 'float'
coerce = float
class DoubleField(FloatField):
db_field = 'double'
class DecimalField(Field):
db_field = 'decimal'
def __init__(self, max_digits=10, decimal_places=5, auto_round=False,
rounding=None, *args, **kwargs):
self.max_digits = max_digits
self.decimal_places = decimal_places
self.auto_round = auto_round
self.rounding = rounding or decimal.DefaultContext.rounding
self._exp = decimal.Decimal(10) ** (-self.decimal_places)
super(DecimalField, self).__init__(*args, **kwargs)
def clone_base(self, **kwargs):
return super(DecimalField, self).clone_base(
max_digits=self.max_digits,
decimal_places=self.decimal_places,
auto_round=self.auto_round,
rounding=self.rounding,
**kwargs)
def get_modifiers(self):
return [self.max_digits, self.decimal_places]
def db_value(self, value):
D = decimal.Decimal
if not value:
return value if value is None else D(0)
elif self.auto_round or not isinstance(value, D):
value = D(str(value))
if value.is_normal() and self.auto_round:
value = value.quantize(self._exp, rounding=self.rounding)
return value
def python_value(self, value):
if value is not None:
if isinstance(value, decimal.Decimal):
return value
return decimal.Decimal(str(value))
def coerce_to_unicode(s, encoding='utf-8'):
if isinstance(s, str):
return s
elif isinstance(s, bytes):
try:
return s.decode(encoding)
except UnicodeDecodeError:
return s
return str(s)
class _StringField(Field):
def coerce(self, value):
return coerce_to_unicode(value or '')
def __add__(self, other):
return self.concat(other)
def __radd__(self, other):
return other.concat(self)
class CharField(_StringField):
db_field = 'string'
def __init__(self, max_length=255, *args, **kwargs):
self.max_length = max_length
super(CharField, self).__init__(*args, **kwargs)
def clone_base(self, **kwargs):
return super(CharField, self).clone_base(
max_length=self.max_length,
**kwargs)
def get_modifiers(self):
return self.max_length and [self.max_length] or None
class FixedCharField(CharField):
db_field = 'fixed_char'
def python_value(self, value):
value = super(FixedCharField, self).python_value(value)
if value:
value = value.strip()
return value
class TextField(_StringField):
db_field = 'text'
class BlobField(Field):
db_field = 'blob'
_constructor = binary_construct
def add_to_class(self, model_class, name):
if isinstance(model_class._meta.database, Proxy):
model_class._meta.database.attach_callback(self._set_constructor)
return super(BlobField, self).add_to_class(model_class, name)
def _set_constructor(self, database):
self._constructor = database.get_binary_type()
def db_value(self, value):
if isinstance(value, str):
value = value.encode('raw_unicode_escape')
if isinstance(value, str):
return self._constructor(value)
return value
class UUIDField(Field):
db_field = 'uuid'
def db_value(self, value):
if isinstance(value, uuid.UUID):
return value.hex
try:
return uuid.UUID(value).hex
except:
return value
def python_value(self, value):
if isinstance(value, uuid.UUID):
return value
return None if value is None else uuid.UUID(value)
def _date_part(date_part):
def dec(self):
return self.model_class._meta.database.extract_date(date_part, self)
return dec
class _BaseFormattedField(Field):
formats = None
def __init__(self, formats=None, *args, **kwargs):
if formats is not None:
self.formats = formats
super(_BaseFormattedField, self).__init__(*args, **kwargs)
def clone_base(self, **kwargs):
return super(_BaseFormattedField, self).clone_base(
formats=self.formats,
**kwargs)
class DateTimeField(_BaseFormattedField):
db_field = 'datetime'
formats = [
'%Y-%m-%d %H:%M:%S.%f',
'%Y-%m-%d %H:%M:%S',
'%Y-%m-%d',
]
def python_value(self, value):
if value and isinstance(value, str):
return format_date_time(value, self.formats)
return value
year = property(_date_part('year'))
month = property(_date_part('month'))
day = property(_date_part('day'))
hour = property(_date_part('hour'))
minute = property(_date_part('minute'))
second = property(_date_part('second'))
class DateField(_BaseFormattedField):
db_field = 'date'
formats = [
'%Y-%m-%d',
'%Y-%m-%d %H:%M:%S',
'%Y-%m-%d %H:%M:%S.%f',
]
def python_value(self, value):
if value and isinstance(value, str):
pp = lambda x: x.date()
return format_date_time(value, self.formats, pp)
elif value and isinstance(value, datetime.datetime):
return value.date()
return value
year = property(_date_part('year'))
month = property(_date_part('month'))
day = property(_date_part('day'))
class TimeField(_BaseFormattedField):
db_field = 'time'
formats = [
'%H:%M:%S.%f',
'%H:%M:%S',
'%H:%M',
'%Y-%m-%d %H:%M:%S.%f',
'%Y-%m-%d %H:%M:%S',
]
def python_value(self, value):
if value:
if isinstance(value, str):
pp = lambda x: x.time()
return format_date_time(value, self.formats, pp)
elif isinstance(value, datetime.datetime):
return value.time()
if value is not None and isinstance(value, datetime.timedelta):
return (datetime.datetime.min + value).time()
return value
hour = property(_date_part('hour'))
minute = property(_date_part('minute'))
second = property(_date_part('second'))
class TimestampField(IntegerField):
# Support second -> microsecond resolution.
valid_resolutions = [10 ** i for i in range(7)]
zero_value = None
def __init__(self, *args, **kwargs):
self.resolution = kwargs.pop('resolution', 1) or 1
if self.resolution not in self.valid_resolutions:
raise ValueError('TimestampField resolution must be one of: %s' %
', '.join(str(i) for i in self.valid_resolutions))
self.utc = kwargs.pop('utc', False) or False
_dt = datetime.datetime
self._conv = _dt.utcfromtimestamp if self.utc else _dt.fromtimestamp
_default = _dt.utcnow if self.utc else _dt.now
kwargs.setdefault('default', _default)
self.zero_value = kwargs.pop('zero_value', None)
super(TimestampField, self).__init__(*args, **kwargs)
def get_db_field(self):
# For second resolution we can get away (for a while) with using
# 4 bytes to store the timestamp (as long as they're not > ~2038).
# Otherwise we'll need to use a BigInteger type.
return (self.db_field if self.resolution == 1
else BigIntegerField.db_field)
def db_value(self, value):
if value is None:
return
if isinstance(value, datetime.datetime):
pass
elif isinstance(value, datetime.date):
value = datetime.datetime(value.year, value.month, value.day)
else:
return int(round(value * self.resolution))
if self.utc:
timestamp = calendar.timegm(value.utctimetuple())
else:
timestamp = time.mktime(value.timetuple())
timestamp += (value.microsecond * .000001)
if self.resolution > 1:
timestamp *= self.resolution
return int(round(timestamp))
def python_value(self, value):
if value is not None and isinstance(value, (int, float)):
if value == 0:
return self.zero_value
elif self.resolution > 1:
ticks_to_microsecond = 1000000 // self.resolution
value, ticks = divmod(value, self.resolution)
microseconds = ticks * ticks_to_microsecond
return self._conv(value).replace(microsecond=microseconds)
else:
return self._conv(value)
return value
class BooleanField(Field):
db_field = 'bool'
coerce = bool
class RelationDescriptor(FieldDescriptor):
"""Foreign-key abstraction to replace a related PK with a related model."""
def __init__(self, field, rel_model):
self.rel_model = rel_model
super(RelationDescriptor, self).__init__(field)
def get_object_or_id(self, instance):
rel_id = instance._data.get(self.att_name)
if rel_id is not None or self.att_name in instance._obj_cache:
if self.att_name not in instance._obj_cache:
obj = self.rel_model.get(self.field.to_field == rel_id)
instance._obj_cache[self.att_name] = obj
return instance._obj_cache[self.att_name]
elif not self.field.null:
raise self.rel_model.DoesNotExist
return rel_id
def __get__(self, instance, instance_type=None):
if instance is not None:
return self.get_object_or_id(instance)
return self.field
def __set__(self, instance, value):
if isinstance(value, self.rel_model):
instance._data[self.att_name] = getattr(
value, self.field.to_field.name)
instance._obj_cache[self.att_name] = value
else:
orig_value = instance._data.get(self.att_name)
instance._data[self.att_name] = value
if orig_value != value and self.att_name in instance._obj_cache:
del instance._obj_cache[self.att_name]
instance._dirty.add(self.att_name)
class ReverseRelationDescriptor(object):
"""Back-reference to expose related objects as a `SelectQuery`."""
def __init__(self, field):
self.field = field
self.rel_model = field.model_class
def __get__(self, instance, instance_type=None):
if instance is not None:
return self.rel_model.select().where(
self.field == getattr(instance, self.field.to_field.name))
return self
class ObjectIdDescriptor(object):
"""Gives direct access to the underlying id"""
def __init__(self, field):
self.attr_name = field.name
self.field = weakref.ref(field)
def __get__(self, instance, instance_type=None):
if instance is not None:
return instance._data.get(self.attr_name)
return self.field()
def __set__(self, instance, value):
setattr(instance, self.attr_name, value)
class ForeignKeyField(IntegerField):
def __init__(self, rel_model, related_name=None, on_delete=None,
on_update=None, extra=None, to_field=None,
object_id_name=None, *args, **kwargs):
if rel_model != 'self' and not \
isinstance(rel_model, (Proxy, DeferredRelation)) and not \
issubclass(rel_model, Model):
raise TypeError('Unexpected value for `rel_model`. Expected '
'`Model`, `Proxy`, `DeferredRelation`, or "self"')
self.rel_model = rel_model
self._related_name = related_name
self.deferred = isinstance(rel_model, (Proxy, DeferredRelation))
self.on_delete = on_delete
self.on_update = on_update
self.extra = extra
self.to_field = to_field
self.object_id_name = object_id_name
super(ForeignKeyField, self).__init__(*args, **kwargs)
def clone_base(self, **kwargs):
return super(ForeignKeyField, self).clone_base(
rel_model=self.rel_model,
related_name=self._get_related_name(),
on_delete=self.on_delete,
on_update=self.on_update,
extra=self.extra,
to_field=self.to_field,
object_id_name=self.object_id_name,
**kwargs)
def _get_descriptor(self):
return RelationDescriptor(self, self.rel_model)
def _get_id_descriptor(self):
return ObjectIdDescriptor(self)
def _get_backref_descriptor(self):
return ReverseRelationDescriptor(self)
def _get_related_name(self):
if self._related_name and isinstance(self._related_name, Callable):
return self._related_name(self)
return self._related_name or ('%s_set' % self.model_class._meta.name)
def add_to_class(self, model_class, name):
if isinstance(self.rel_model, Proxy):
def callback(rel_model):
self.rel_model = rel_model
self.add_to_class(model_class, name)
self.rel_model.attach_callback(callback)
return
elif isinstance(self.rel_model, DeferredRelation):
self.rel_model.set_field(model_class, self, name)
return
self.name = name
self.model_class = model_class
self.db_column = self.db_column or '%s_id' % self.name
obj_id_name = self.object_id_name
if not obj_id_name:
obj_id_name = self.db_column
if obj_id_name == self.name:
obj_id_name += '_id'
elif obj_id_name == self.name:
raise ValueError('Cannot set a foreign key object_id_name to '
'the same name as the field itself.')
if not self.verbose_name:
self.verbose_name = re.sub('_+', ' ', name).title()
model_class._meta.add_field(self)
self.related_name = self._get_related_name()
if self.rel_model == 'self':
self.rel_model = self.model_class
if self.to_field is not None:
if not isinstance(self.to_field, Field):
self.to_field = getattr(self.rel_model, self.to_field)
else:
self.to_field = self.rel_model._meta.primary_key
# TODO: factor into separate method.
if model_class._meta.validate_backrefs:
def invalid(msg, **context):
context.update(
field='%s.%s' % (model_class._meta.name, name),
backref=self.related_name,
obj_id_name=obj_id_name)
raise AttributeError(msg % context)
if self.related_name in self.rel_model._meta.fields:
invalid('The related_name of %(field)s ("%(backref)s") '
'conflicts with a field of the same name.')
elif self.related_name in self.rel_model._meta.reverse_rel:
invalid('The related_name of %(field)s ("%(backref)s") '
'is already in use by another foreign key.')
if obj_id_name in model_class._meta.fields:
invalid('The object id descriptor of %(field)s conflicts '
'with a field named %(obj_id_name)s')
elif obj_id_name in model_class.__dict__:
invalid('Model attribute "%(obj_id_name)s" would be shadowed '
'by the object id descriptor of %(field)s.')
setattr(model_class, name, self._get_descriptor())
setattr(model_class, obj_id_name, self._get_id_descriptor())
setattr(self.rel_model,
self.related_name,
self._get_backref_descriptor())
self._is_bound = True
model_class._meta.rel[self.name] = self
self.rel_model._meta.reverse_rel[self.related_name] = self
def get_db_field(self):
"""
Overridden to ensure Foreign Keys use same column type as the primary
key they point to.
"""
if not isinstance(self.to_field, PrimaryKeyField):
return self.to_field.get_db_field()
return super(ForeignKeyField, self).get_db_field()
def get_modifiers(self):
if not isinstance(self.to_field, PrimaryKeyField):
return self.to_field.get_modifiers()
return super(ForeignKeyField, self).get_modifiers()
def coerce(self, value):
return self.to_field.coerce(value)
def db_value(self, value):
if isinstance(value, self.rel_model):
value = value._get_pk_value()
return self.to_field.db_value(value)
def python_value(self, value):
if isinstance(value, self.rel_model):
return value
return self.to_field.python_value(value)
class CompositeKey(object):
"""A primary key composed of multiple columns."""
_node_type = 'composite_key'
sequence = None
def __init__(self, *field_names):
self.field_names = field_names
def add_to_class(self, model_class, name):
self.name = name
self.model_class = model_class
setattr(model_class, name, self)
def __get__(self, instance, instance_type=None):
if instance is not None:
return tuple([getattr(instance, field_name)
for field_name in self.field_names])
return self
def __set__(self, instance, value):
pass
def __eq__(self, other):
expressions = [(self.model_class._meta.fields[field] == value)
for field, value in zip(self.field_names, other)]
return reduce(operator.and_, expressions)
def __ne__(self, other):
return ~(self == other)
def __hash__(self):
return hash((self.model_class.__name__, self.field_names))
class AliasMap(object):
prefix = 't'
def __init__(self, start=0):
self._alias_map = {}
self._counter = start
def __repr__(self):
return '<AliasMap: %s>' % self._alias_map
def add(self, obj, alias=None):
if obj in self._alias_map:
return
self._counter += 1
self._alias_map[obj] = alias or '%s%s' % (self.prefix, self._counter)
def __getitem__(self, obj):
if obj not in self._alias_map:
self.add(obj)
return self._alias_map[obj]
def __contains__(self, obj):
return obj in self._alias_map
def update(self, alias_map):
if alias_map:
for obj, alias in alias_map._alias_map.items():
if obj not in self:
self._alias_map[obj] = alias
return self
class QueryCompiler(object):
# Mapping of `db_type` to actual column type used by database driver.
# Database classes may provide additional column types or overrides.
field_map = {
'bare': '',
'bigint': 'BIGINT',
'blob': 'BLOB',
'bool': 'SMALLINT',
'date': 'DATE',
'datetime': 'DATETIME',
'decimal': 'DECIMAL',
'double': 'REAL',
'fixed_char': 'CHAR',
'float': 'REAL',
'int': 'INTEGER',
'primary_key': 'INTEGER',
'smallint': 'SMALLINT',
'string': 'VARCHAR',
'text': 'TEXT',
'time': 'TIME',
}
# Mapping of OP. to actual SQL operation. For most databases this will be
# the same, but some column types or databases may support additional ops.
# Like `field_map`, Database classes may extend or override these.
op_map = {
OP.EQ: '=',
OP.LT: '<',
OP.LTE: '<=',
OP.GT: '>',
OP.GTE: '>=',
OP.NE: '!=',
OP.IN: 'IN',
OP.NOT_IN: 'NOT IN',
OP.IS: 'IS',
OP.IS_NOT: 'IS NOT',
OP.BIN_AND: '&',
OP.BIN_OR: '|',
OP.LIKE: 'LIKE',
OP.ILIKE: 'ILIKE',
OP.BETWEEN: 'BETWEEN',
OP.ADD: '+',
OP.SUB: '-',
OP.MUL: '*',
OP.DIV: '/',
OP.XOR: '#',
OP.AND: 'AND',
OP.OR: 'OR',
OP.MOD: '%',
OP.REGEXP: 'REGEXP',
OP.CONCAT: '||',
}
join_map = {
JOIN.INNER: 'INNER JOIN',
JOIN.LEFT_OUTER: 'LEFT OUTER JOIN',
JOIN.RIGHT_OUTER: 'RIGHT OUTER JOIN',
JOIN.FULL: 'FULL JOIN',
JOIN.CROSS: 'CROSS JOIN',
}
alias_map_class = AliasMap
def __init__(self, quote_char='"', interpolation='?', field_overrides=None,
op_overrides=None):
self.quote_char = quote_char
self.interpolation = interpolation
self._field_map = merge_dict(self.field_map, field_overrides or {})
self._op_map = merge_dict(self.op_map, op_overrides or {})
self._parse_map = self.get_parse_map()
self._unknown_types = set(['param'])
def get_parse_map(self):
# To avoid O(n) lookups when parsing nodes, use a lookup table for
# common node types O(1).
return {
'expression': self._parse_expression,
'param': self._parse_param,
'passthrough': self._parse_passthrough,
'func': self._parse_func,
'clause': self._parse_clause,
'entity': self._parse_entity,
'field': self._parse_field,
'sql': self._parse_sql,
'select_query': self._parse_select_query,
'compound_select_query': self._parse_compound_select_query,
'strip_parens': self._parse_strip_parens,
'composite_key': self._parse_composite_key,
}
def quote(self, s):
return '%s%s%s' % (self.quote_char, s, self.quote_char)
def get_column_type(self, f):
return self._field_map[f] if f in self._field_map else f.upper()
def get_op(self, q):
return self._op_map[q]
def _sorted_fields(self, field_dict):
return sorted(field_dict.items(), key=lambda i: i[0]._sort_key)
def _parse_default(self, node, alias_map, conv):
return self.interpolation, [node]
def _parse_expression(self, node, alias_map, conv):
if isinstance(node.lhs, Field):
conv = node.lhs
lhs, lparams = self.parse_node(node.lhs, alias_map, conv)
rhs, rparams = self.parse_node(node.rhs, alias_map, conv)
if node.op == OP.IN and rhs == '()' and not rparams:
return ('0 = 1' if node.flat else '(0 = 1)'), []
template = '%s %s %s' if node.flat else '(%s %s %s)'
sql = template % (lhs, self.get_op(node.op), rhs)
return sql, lparams + rparams
def _parse_passthrough(self, node, alias_map, conv):
if node.adapt:
return self.parse_node(node.adapt(node.value), alias_map, None)
return self.interpolation, [node.value]
def _parse_param(self, node, alias_map, conv):
if node.adapt:
if conv and conv.db_value is node.adapt:
conv = None
return self.parse_node(node.adapt(node.value), alias_map, conv)
elif conv is not None:
return self.parse_node(conv.db_value(node.value), alias_map)
else:
return self.interpolation, [node.value]
def _parse_func(self, node, alias_map, conv):
conv = node._coerce and conv or None
sql, params = self.parse_node_list(node.arguments, alias_map, conv)
return '%s(%s)' % (node.name, strip_parens(sql)), params
def _parse_clause(self, node, alias_map, conv):
sql, params = self.parse_node_list(
node.nodes, alias_map, conv, node.glue)
if node.parens:
sql = '(%s)' % strip_parens(sql)
return sql, params
def _parse_entity(self, node, alias_map, conv):
return '.'.join(map(self.quote, node.path)), []
def _parse_sql(self, node, alias_map, conv):
return node.value, list(node.params)
def _parse_field(self, node, alias_map, conv):
if alias_map:
sql = '.'.join((
self.quote(alias_map[node.model_class]),
self.quote(node.db_column)))
else:
sql = self.quote(node.db_column)
return sql, []
def _parse_composite_key(self, node, alias_map, conv):
fields = []
for field_name in node.field_names:
fields.append(node.model_class._meta.fields[field_name])
return self._parse_clause(CommaClause(*fields), alias_map, conv)
def _parse_compound_select_query(self, node, alias_map, conv):
csq = 'compound_select_query'
lhs, rhs = node.lhs, node.rhs
inv = rhs._node_type == csq and lhs._node_type != csq
if inv:
lhs, rhs = rhs, lhs
new_map = self.alias_map_class()
if lhs._node_type == csq:
new_map._counter = alias_map._counter
sql1, p1 = self.generate_select(lhs, new_map)
sql2, p2 = self.generate_select(rhs, self.calculate_alias_map(rhs,
new_map))
# We add outer parentheses in the event the compound query is used in
# the `from_()` clause, in which case we'll need them.
if node.database.compound_select_parentheses:
if lhs._node_type != csq:
sql1 = '(%s)' % sql1
if rhs._node_type != csq:
sql2 = '(%s)' % sql2
if inv:
sql1, p1, sql2, p2 = sql2, p2, sql1, p1
return '(%s %s %s)' % (sql1, node.operator, sql2), (p1 + p2)
def _parse_select_query(self, node, alias_map, conv):
clone = node.clone()
if not node._explicit_selection:
if conv and isinstance(conv, ForeignKeyField):
clone._select = (conv.to_field,)
else:
clone._select = clone.model_class._meta.get_primary_key_fields()
sub, params = self.generate_select(clone, alias_map)
return '(%s)' % strip_parens(sub), params
def _parse_strip_parens(self, node, alias_map, conv):
sql, params = self.parse_node(node.node, alias_map, conv)
return strip_parens(sql), params
def _parse(self, node, alias_map, conv):
# By default treat the incoming node as a raw value that should be
# parameterized.
node_type = getattr(node, '_node_type', None)
unknown = False
if node_type in self._parse_map:
sql, params = self._parse_map[node_type](node, alias_map, conv)
unknown = (node_type in self._unknown_types and
node.adapt is None and
conv is None)
elif isinstance(node, (list, tuple, set)):
# If you're wondering how to pass a list into your query, simply
# wrap it in Param().
sql, params = self.parse_node_list(node, alias_map, conv)
sql = '(%s)' % sql
elif isinstance(node, Model):
sql = self.interpolation
if conv and isinstance(conv, ForeignKeyField):
to_field = conv.to_field
if isinstance(to_field, ForeignKeyField):
value = conv.db_value(node)
else:
value = to_field.db_value(getattr(node, to_field.name))
else:
value = node._get_pk_value()
params = [value]
elif (isclass(node) and issubclass(node, Model)) or \
isinstance(node, ModelAlias):
entity = node.as_entity().alias(alias_map[node])
sql, params = self.parse_node(entity, alias_map, conv)
elif conv is not None:
value = conv.db_value(node)
sql, params, _ = self._parse(value, alias_map, None)
else:
sql, params = self._parse_default(node, alias_map, None)
unknown = True
return sql, params, unknown
def parse_node(self, node, alias_map=None, conv=None):
sql, params, unknown = self._parse(node, alias_map, conv)
if unknown and (conv is not None) and params:
params = [conv.db_value(i) for i in params]
if isinstance(node, Node):
if node._negated:
sql = 'NOT %s' % sql
if node._alias:
sql = ' '.join((sql, 'AS', node._alias))
if node._ordering:
sql = ' '.join((sql, node._ordering))
if params and any(isinstance(p, Node) for p in params):
clean_params = []
clean_sql = []
for idx, param in enumerate(params):
if isinstance(param, Node):
csql, cparams = self.parse_node(param)
return sql, params
def parse_node_list(self, nodes, alias_map, conv=None, glue=', '):
sql = []
params = []
for node in nodes:
node_sql, node_params = self.parse_node(node, alias_map, conv)
sql.append(node_sql)
params.extend(node_params)
return glue.join(sql), params
def calculate_alias_map(self, query, alias_map=None):
new_map = self.alias_map_class()
if alias_map is not None:
new_map._counter = alias_map._counter
new_map.add(query.model_class, query.model_class._meta.table_alias)
for src_model, joined_models in query._joins.items():
new_map.add(src_model, src_model._meta.table_alias)
for join_obj in joined_models:
if isinstance(join_obj.dest, Node):
new_map.add(join_obj.dest, join_obj.dest.alias)
else:
new_map.add(join_obj.dest, join_obj.dest._meta.table_alias)
return new_map.update(alias_map)
def build_query(self, clauses, alias_map=None):
return self.parse_node(Clause(*clauses), alias_map)
def generate_joins(self, joins, model_class, alias_map):
# Joins are implemented as an adjancency-list graph. Perform a
# depth-first search of the graph to generate all the necessary JOINs.
clauses = []
seen = set()
q = [model_class]
while q:
curr = q.pop()
if curr not in joins or curr in seen:
continue
seen.add(curr)
for join in joins[curr]:
src = curr
dest = join.dest
join_type = join.get_join_type()
if isinstance(join.on, (Expression, Func, Clause, Entity)):
# Clear any alias on the join expression.
constraint = join.on.clone().alias()
elif join_type != JOIN.CROSS:
metadata = join.metadata
if metadata.is_backref:
fk_model = join.dest
pk_model = join.src
else:
fk_model = join.src
pk_model = join.dest
fk = metadata.foreign_key
if fk:
lhs = getattr(fk_model, fk.name)
rhs = getattr(pk_model, fk.to_field.name)
if metadata.is_backref:
lhs, rhs = rhs, lhs
constraint = (lhs == rhs)
else:
raise ValueError('Missing required join predicate.')
if isinstance(dest, Node):
# TODO: ensure alias?
dest_n = dest
else:
q.append(dest)
dest_n = dest.as_entity().alias(alias_map[dest])
join_sql = SQL(self.join_map.get(join_type) or join_type)
if join_type == JOIN.CROSS:
clauses.append(Clause(join_sql, dest_n))
else:
clauses.append(Clause(join_sql, dest_n, SQL('ON'),
constraint))
return clauses
def generate_select(self, query, alias_map=None):
model = query.model_class
db = model._meta.database
alias_map = self.calculate_alias_map(query, alias_map)
if isinstance(query, CompoundSelect):
clauses = [_StripParens(query)]
else:
if not query._distinct:
clauses = [SQL('SELECT')]
else:
clauses = [SQL('SELECT DISTINCT')]
if query._distinct not in (True, False):
clauses += [SQL('ON'), EnclosedClause(*query._distinct)]
select_clause = Clause(*query._select)
select_clause.glue = ', '
clauses.extend((select_clause, SQL('FROM')))
if query._from is None:
clauses.append(model.as_entity().alias(alias_map[model]))
else:
clauses.append(CommaClause(*query._from))
join_clauses = self.generate_joins(query._joins, model, alias_map)
if join_clauses:
clauses.extend(join_clauses)
if query._where is not None:
clauses.extend([SQL('WHERE'), query._where])
if query._group_by:
clauses.extend([SQL('GROUP BY'), CommaClause(*query._group_by)])
if query._having:
clauses.extend([SQL('HAVING'), query._having])
if query._windows is not None:
clauses.append(SQL('WINDOW'))
clauses.append(CommaClause(*[
Clause(
SQL(window._alias),
SQL('AS'),
window.__sql__())
for window in query._windows]))
if query._order_by:
clauses.extend([SQL('ORDER BY'), CommaClause(*query._order_by)])
if query._limit is not None or (query._offset and db.limit_max):
limit = query._limit if query._limit is not None else db.limit_max
clauses.append(SQL('LIMIT %d' % limit))
if query._offset is not None:
clauses.append(SQL('OFFSET %d' % query._offset))
if query._for_update:
clauses.append(SQL(query._for_update))
return self.build_query(clauses, alias_map)
def generate_update(self, query):
model = query.model_class
alias_map = self.alias_map_class()
alias_map.add(model, model._meta.db_table)
if query._on_conflict:
statement = 'UPDATE OR %s' % query._on_conflict
else:
statement = 'UPDATE'
clauses = [SQL(statement), model.as_entity(), SQL('SET')]
update = []
for field, value in self._sorted_fields(query._update):
if not isinstance(value, (Node, Model)):
value = Param(value, adapt=field.db_value)
update.append(Expression(
field.as_entity(with_table=False),
OP.EQ,
value,
flat=True)) # No outer parens, no table alias.
clauses.append(CommaClause(*update))
if query._where:
clauses.extend([SQL('WHERE'), query._where])
if query._returning is not None:
returning_clause = Clause(*query._returning)
returning_clause.glue = ', '
clauses.extend([SQL('RETURNING'), returning_clause])
return self.build_query(clauses, alias_map)
def _get_field_clause(self, fields, clause_type=EnclosedClause):
return clause_type(*[
field.as_entity(with_table=False) for field in fields])
def generate_insert(self, query):
model = query.model_class
meta = model._meta
alias_map = self.alias_map_class()
alias_map.add(model, model._meta.db_table)
if query._upsert:
statement = meta.database.upsert_sql
elif query._on_conflict:
statement = 'INSERT OR %s INTO' % query._on_conflict
else:
statement = 'INSERT INTO'
clauses = [SQL(statement), model.as_entity()]
if query._query is not None:
# This INSERT query is of the form INSERT INTO ... SELECT FROM.
if query._fields:
clauses.append(self._get_field_clause(query._fields))
clauses.append(_StripParens(query._query))
elif query._rows is not None:
fields, value_clauses = [], []
have_fields = False
for row_dict in query._iter_rows():
if not have_fields:
fields = sorted(
row_dict.keys(), key=operator.attrgetter('_sort_key'))
have_fields = True
values = []
for field in fields:
value = row_dict[field]
if not isinstance(value, (Node, Model)):
value = Param(value, adapt=field.db_value)
values.append(value)
value_clauses.append(EnclosedClause(*values))
if fields:
clauses.extend([
self._get_field_clause(fields),
SQL('VALUES'),
CommaClause(*value_clauses)])
elif query.model_class._meta.auto_increment:
# Bare insert, use default value for primary key.
clauses.append(query.database.default_insert_clause(
query.model_class))
if query._returning is not None:
# Return the fields asked for.
returning_clause = Clause(*query._returning)
returning_clause.glue = ', '
clauses.extend([SQL('RETURNING'), returning_clause])
elif query.is_insert_returning:
# Return the primary keys.
clauses.extend([
SQL('RETURNING'),
self._get_field_clause(
meta.get_primary_key_fields(),
clause_type=CommaClause)])
return self.build_query(clauses, alias_map)
def generate_delete(self, query):
model = query.model_class
clauses = [SQL('DELETE FROM'), model.as_entity()]
if query._where:
clauses.extend([SQL('WHERE'), query._where])
if query._returning is not None:
returning_clause = Clause(*query._returning)
returning_clause.glue = ', '
clauses.extend([SQL('RETURNING'), returning_clause])
return self.build_query(clauses)
def field_definition(self, field):
column_type = self.get_column_type(field.get_db_field())
ddl = field.__ddl__(column_type)
return Clause(*ddl)
def foreign_key_constraint(self, field):
ddl = [
SQL('FOREIGN KEY'),
EnclosedClause(field.as_entity()),
SQL('REFERENCES'),
field.rel_model.as_entity(),
EnclosedClause(field.to_field.as_entity())]
if field.on_delete:
ddl.append(SQL('ON DELETE %s' % field.on_delete))
if field.on_update:
ddl.append(SQL('ON UPDATE %s' % field.on_update))
return Clause(*ddl)
def return_parsed_node(function_name):
# TODO: treat all `generate_` functions as returning clauses, instead
# of SQL/params.
def inner(self, *args, **kwargs):
fn = getattr(self, function_name)
return self.parse_node(fn(*args, **kwargs))
return inner
def _create_foreign_key(self, model_class, field, constraint=None):
constraint = constraint or 'fk_%s_%s_refs_%s' % (
model_class._meta.db_table,
field.db_column,
field.rel_model._meta.db_table)
fk_clause = self.foreign_key_constraint(field)
return Clause(
SQL('ALTER TABLE'),
model_class.as_entity(),
SQL('ADD CONSTRAINT'),
Entity(constraint),
*fk_clause.nodes)
create_foreign_key = return_parsed_node('_create_foreign_key')
def _create_table(self, model_class, safe=False):
statement = 'CREATE TABLE IF NOT EXISTS' if safe else 'CREATE TABLE'
meta = model_class._meta
columns, constraints = [], []
if meta.composite_key:
pk_cols = [meta.fields[f].as_entity()
for f in meta.primary_key.field_names]
constraints.append(Clause(
SQL('PRIMARY KEY'), EnclosedClause(*pk_cols)))
for field in meta.declared_fields:
columns.append(self.field_definition(field))
if isinstance(field, ForeignKeyField) and not field.deferred:
constraints.append(self.foreign_key_constraint(field))
if model_class._meta.constraints:
for constraint in model_class._meta.constraints:
if not isinstance(constraint, Node):
constraint = SQL(constraint)
constraints.append(constraint)
return Clause(
SQL(statement),
model_class.as_entity(),
EnclosedClause(*(columns + constraints)))
create_table = return_parsed_node('_create_table')
def _drop_table(self, model_class, fail_silently=False, cascade=False):
statement = 'DROP TABLE IF EXISTS' if fail_silently else 'DROP TABLE'
ddl = [SQL(statement), model_class.as_entity()]
if cascade:
ddl.append(SQL('CASCADE'))
return Clause(*ddl)
drop_table = return_parsed_node('_drop_table')
def _truncate_table(self, model_class, restart_identity=False,
cascade=False):
ddl = [SQL('TRUNCATE TABLE'), model_class.as_entity()]
if restart_identity:
ddl.append(SQL('RESTART IDENTITY'))
if cascade:
ddl.append(SQL('CASCADE'))
return Clause(*ddl)
truncate_table = return_parsed_node('_truncate_table')
def index_name(self, table, columns):
index = '%s_%s' % (table, '_'.join(columns))
if len(index) > 64:
index_hash = hashlib.md5(index.encode('utf-8')).hexdigest()
index = '%s_%s' % (table[:55], index_hash[:8]) # 55 + 1 + 8 = 64
return index
def _create_index(self, model_class, fields, unique, *extra):
tbl_name = model_class._meta.db_table
statement = 'CREATE UNIQUE INDEX' if unique else 'CREATE INDEX'
index_name = self.index_name(tbl_name, [f.db_column for f in fields])
return Clause(
SQL(statement),
Entity(index_name),
SQL('ON'),
model_class.as_entity(),
EnclosedClause(*[field.as_entity() for field in fields]),
*extra)
create_index = return_parsed_node('_create_index')
def _drop_index(self, model_class, fields, fail_silently=False):
tbl_name = model_class._meta.db_table
statement = 'DROP INDEX IF EXISTS' if fail_silently else 'DROP INDEX'
index_name = self.index_name(tbl_name, [f.db_column for f in fields])
return Clause(SQL(statement), Entity(index_name))
drop_index = return_parsed_node('_drop_index')
def _create_sequence(self, sequence_name):
return Clause(SQL('CREATE SEQUENCE'), Entity(sequence_name))
create_sequence = return_parsed_node('_create_sequence')
def _drop_sequence(self, sequence_name):
return Clause(SQL('DROP SEQUENCE'), Entity(sequence_name))
drop_sequence = return_parsed_node('_drop_sequence')
class ResultIterator(object):
def __init__(self, qrw):
self.qrw = qrw
self._idx = 0
def next(self):
if self._idx < self.qrw._ct:
obj = self.qrw._result_cache[self._idx]
elif not self.qrw._populated:
obj = self.qrw.iterate()
self.qrw._result_cache.append(obj)
self.qrw._ct += 1
else:
raise StopIteration
self._idx += 1
return obj
__next__ = next
class QueryResultWrapper(object):
"""
Provides an iterator over the results of a raw Query, additionally doing
two things:
- converts rows from the database into python representations
- ensures that multiple iterations do not result in multiple queries
"""
def __init__(self, model, cursor, meta=None):
self.model = model
self.cursor = cursor
self._ct = 0
self._idx = 0
self._result_cache = []
self._populated = False
self._initialized = False
if meta is not None:
self.column_meta, self.join_meta = meta
else:
self.column_meta = self.join_meta = None
def __iter__(self):
if self._populated:
return iter(self._result_cache)
else:
return ResultIterator(self)
@property
def count(self):
self.fill_cache()
return self._ct
def __len__(self):
return self.count
def process_row(self, row):
return row
def iterate(self):
row = self.cursor.fetchone()
if not row:
self._populated = True
if not getattr(self.cursor, 'name', None):
self.cursor.close()
raise StopIteration
elif not self._initialized:
self.initialize(self.cursor.description)
self._initialized = True
return self.process_row(row)
def iterator(self):
while True:
yield self.iterate()
def next(self):
if self._idx < self._ct:
inst = self._result_cache[self._idx]
self._idx += 1
return inst
elif self._populated:
raise StopIteration
obj = self.iterate()
self._result_cache.append(obj)
self._ct += 1
self._idx += 1
return obj
__next__ = next
def fill_cache(self, n=None):
n = n or float('Inf')
if n < 0:
raise ValueError('Negative values are not supported.')
self._idx = self._ct
while not self._populated and (n > self._ct):
try:
next(self)
except StopIteration:
break
class ExtQueryResultWrapper(QueryResultWrapper):
def initialize(self, description):
n_cols = len(description)
self.conv = conv = []
if self.column_meta is not None:
n_meta = len(self.column_meta)
for i, node in enumerate(self.column_meta):
if not self._initialize_node(node, i):
self._initialize_by_name(description[i][0], i)
if n_cols == n_meta:
return
else:
i = 0
for i in range(i, n_cols):
self._initialize_by_name(description[i][0], i)
def _initialize_by_name(self, name, i):
model_cols = self.model._meta.columns
if name in model_cols:
field = model_cols[name]
self.conv.append((i, field.name, field.python_value))
else:
self.conv.append((i, name, None))
def _initialize_node(self, node, i):
if isinstance(node, Field):
self.conv.append((i, node._alias or node.name, node.python_value))
return True
elif isinstance(node, Func) and len(node.arguments):
arg = node.arguments[0]
if isinstance(arg, Field):
name = node._alias or arg._alias or arg.name
func = node._coerce and arg.python_value or None
self.conv.append((i, name, func))
return True
return False
class TuplesQueryResultWrapper(ExtQueryResultWrapper):
def process_row(self, row):
return tuple([col if self.conv[i][2] is None else self.conv[i][2](col)
for i, col in enumerate(row)])
if _TuplesQueryResultWrapper is None:
_TuplesQueryResultWrapper = TuplesQueryResultWrapper
class NaiveQueryResultWrapper(ExtQueryResultWrapper):
def process_row(self, row):
instance = self.model()
for i, column, f in self.conv:
setattr(instance, column, f(row[i]) if f is not None else row[i])
instance._prepare_instance()
return instance
if _ModelQueryResultWrapper is None:
_ModelQueryResultWrapper = NaiveQueryResultWrapper
class DictQueryResultWrapper(ExtQueryResultWrapper):
def process_row(self, row):
res = {}
for i, column, f in self.conv:
res[column] = f(row[i]) if f is not None else row[i]
return res
if _DictQueryResultWrapper is None:
_DictQueryResultWrapper = DictQueryResultWrapper
class NamedTupleQueryResultWrapper(ExtQueryResultWrapper):
def initialize(self, description):
super(NamedTupleQueryResultWrapper, self).initialize(description)
columns = [column for _, column, _ in self.conv]
self.constructor = namedtuple('Row', columns)
def process_row(self, row):
return self.constructor(*[f(row[i]) if f is not None else row[i]
for i, _, f in self.conv])
class ModelQueryResultWrapper(QueryResultWrapper):
def initialize(self, description):
self.column_map, model_set = self.generate_column_map()
self._col_set = set(col for col in self.column_meta
if isinstance(col, Field))
self.join_list = self.generate_join_list(model_set)
def generate_column_map(self):
column_map = []
models = set([self.model])
for i, node in enumerate(self.column_meta):
attr = conv = None
if isinstance(node, Field):
if isinstance(node, FieldProxy):
key = node._model_alias
constructor = node.model
conv = node.field_instance.python_value
else:
key = constructor = node.model_class
conv = node.python_value
attr = node._alias or node.name
else:
if node._bind_to is None:
key = constructor = self.model
else:
key = constructor = node._bind_to
if isinstance(node, Node) and node._alias:
attr = node._alias
elif isinstance(node, Entity):
attr = node.path[-1]
column_map.append((key, constructor, attr, conv))
models.add(key)
return column_map, models
def generate_join_list(self, models):
join_list = []
joins = self.join_meta
stack = [self.model]
while stack:
current = stack.pop()
if current not in joins:
continue
for join in joins[current]:
metadata = join.metadata
if metadata.dest in models or metadata.dest_model in models:
if metadata.foreign_key is not None:
fk_present = metadata.foreign_key in self._col_set
pk_present = metadata.primary_key in self._col_set
check = metadata.foreign_key.null and (fk_present or
pk_present)
else:
check = fk_present = pk_present = False
join_list.append((
metadata,
check,
fk_present,
pk_present))
stack.append(join.dest)
return join_list
def process_row(self, row):
collected = self.construct_instances(row)
instances = self.follow_joins(collected)
for i in instances:
i._prepare_instance()
return instances[0]
def construct_instances(self, row, keys=None):
collected_models = {}
for i, (key, constructor, attr, conv) in enumerate(self.column_map):
if keys is not None and key not in keys:
continue
value = row[i]
if key not in collected_models:
collected_models[key] = constructor()
instance = collected_models[key]
if attr is None:
attr = self.cursor.description[i][0]
setattr(instance, attr, value if conv is None else conv(value))
return collected_models
def follow_joins(self, collected):
prepared = [collected[self.model]]
for (metadata, check_null, fk_present, pk_present) in self.join_list:
inst = collected[metadata.src]
try:
joined_inst = collected[metadata.dest]
except KeyError:
joined_inst = collected[metadata.dest_model]
has_fk = True
if check_null:
if fk_present:
has_fk = inst._data.get(metadata.foreign_key.name)
elif pk_present:
has_fk = joined_inst._data.get(metadata.primary_key.name)
if not has_fk:
continue
# Can we populate a value on the joined instance using the current?
mpk = metadata.primary_key is not None
can_populate_joined_pk = (
mpk and
(metadata.attr in inst._data) and
(getattr(joined_inst, metadata.primary_key.name) is None))
if can_populate_joined_pk:
setattr(
joined_inst,
metadata.primary_key.name,
inst._data[metadata.attr])
if metadata.is_backref:
can_populate_joined_fk = (
mpk and
(metadata.foreign_key is not None) and
(getattr(inst, metadata.primary_key.name) is not None) and
(joined_inst._data.get(metadata.foreign_key.name) is None))
if can_populate_joined_fk:
setattr(
joined_inst,
metadata.foreign_key.name,
inst)
setattr(inst, metadata.attr, joined_inst)
prepared.append(joined_inst)
return prepared
JoinCache = namedtuple('JoinCache', ('metadata', 'attr'))
class AggregateQueryResultWrapper(ModelQueryResultWrapper):
def __init__(self, *args, **kwargs):
self._row = []
super(AggregateQueryResultWrapper, self).__init__(*args, **kwargs)
def initialize(self, description):
super(AggregateQueryResultWrapper, self).initialize(description)
# Collect the set of all models (and ModelAlias objects) queried.
self.all_models = set()
for key, _, _, _ in self.column_map:
self.all_models.add(key)
# Prepare data structures for analyzing unique rows. Also cache
# foreign key and attribute names for joined models.
self.models_with_aggregate = set()
self.back_references = {}
self.source_to_dest = {}
self.dest_to_source = {}
for (metadata, _, _, _) in self.join_list:
if metadata.is_backref:
att_name = metadata.foreign_key.related_name
else:
att_name = metadata.attr
is_backref = metadata.is_backref or metadata.is_self_join
if is_backref:
self.models_with_aggregate.add(metadata.src)
else:
self.dest_to_source.setdefault(metadata.dest, set())
self.dest_to_source[metadata.dest].add(metadata.src)
self.source_to_dest.setdefault(metadata.src, {})
self.source_to_dest[metadata.src][metadata.dest] = JoinCache(
metadata=metadata,
attr=metadata.alias or att_name)
# Determine which columns could contain "duplicate" data, e.g. if
# getting Users and their Tweets, this would be the User columns.
self.columns_to_compare = {}
key_to_columns = {}
for idx, (key, model_class, col_name, _) in enumerate(self.column_map):
if key in self.models_with_aggregate:
self.columns_to_compare.setdefault(key, [])
self.columns_to_compare[key].append((idx, col_name))
key_to_columns.setdefault(key, [])
key_to_columns[key].append((idx, col_name))
# Also compare columns for joins -> many-related model.
for model_or_alias in self.models_with_aggregate:
if model_or_alias not in self.columns_to_compare:
continue
sources = self.dest_to_source.get(model_or_alias, ())
for joined_model in sources:
self.columns_to_compare[model_or_alias].extend(
key_to_columns[joined_model])
def read_model_data(self, row):
models = {}
for model_class, column_data in self.columns_to_compare.items():
models[model_class] = []
for idx, col_name in column_data:
models[model_class].append(row[idx])
return models
def iterate(self):
if self._row:
row = self._row.pop()
else:
row = self.cursor.fetchone()
if not row:
self._populated = True
if not getattr(self.cursor, 'name', None):
self.cursor.close()
raise StopIteration
elif not self._initialized:
self.initialize(self.cursor.description)
self._initialized = True
def _get_pk(instance):
if instance._meta.composite_key:
return tuple([
instance._data[field_name]
for field_name in instance._meta.primary_key.field_names])
return instance._get_pk_value()
identity_map = {}
_constructed = self.construct_instances(row)
primary_instance = _constructed[self.model]
for model_or_alias, instance in _constructed.items():
identity_map[model_or_alias] = OrderedDict()
identity_map[model_or_alias][_get_pk(instance)] = instance
model_data = self.read_model_data(row)
while True:
cur_row = self.cursor.fetchone()
if cur_row is None:
break
duplicate_models = set()
cur_row_data = self.read_model_data(cur_row)
for model_class, data in cur_row_data.items():
if model_data[model_class] == data:
duplicate_models.add(model_class)
if not duplicate_models:
self._row.append(cur_row)
break
different_models = self.all_models - duplicate_models
new_instances = self.construct_instances(cur_row, different_models)
for model_or_alias, instance in new_instances.items():
# Do not include any instances which are comprised solely of
# NULL values.
all_none = True
for value in instance._data.values():
if value is not None:
all_none = False
if not all_none:
identity_map[model_or_alias][_get_pk(instance)] = instance
stack = [self.model]
instances = [primary_instance]
while stack:
current = stack.pop()
if current not in self.join_meta:
continue
for join in self.join_meta[current]:
try:
metadata, attr = self.source_to_dest[current][join.dest]
except KeyError:
continue
if metadata.is_backref or metadata.is_self_join:
for instance in identity_map[current].values():
setattr(instance, attr, [])
if join.dest not in identity_map:
continue
for pk, inst in identity_map[join.dest].items():
if pk is None:
continue
try:
# XXX: if no FK exists, unable to join.
joined_inst = identity_map[current][
inst._data[metadata.foreign_key.name]]
except KeyError:
continue
getattr(joined_inst, attr).append(inst)
instances.append(inst)
elif attr:
if join.dest not in identity_map:
continue
for pk, instance in identity_map[current].items():
# XXX: if no FK exists, unable to join.
joined_inst = identity_map[join.dest][
instance._data[metadata.foreign_key.name]]
setattr(
instance,
metadata.foreign_key.name,
joined_inst)
instances.append(joined_inst)
stack.append(join.dest)
for instance in instances:
instance._prepare_instance()
return primary_instance
class Query(Node):
"""Base class representing a database query on one or more tables."""
require_commit = True
def __init__(self, model_class):
super(Query, self).__init__()
self.model_class = model_class
self.database = model_class._meta.database
self._dirty = True
self._query_ctx = model_class
self._joins = {self.model_class: []} # Join graph as adjacency list.
self._where = None
def __repr__(self):
sql, params = self.sql()
return '%s %s %s' % (self.model_class, sql, params)
def clone(self):
query = type(self)(self.model_class)
query.database = self.database
return self._clone_attributes(query)
def _clone_attributes(self, query):
if self._where is not None:
query._where = self._where.clone()
query._joins = self._clone_joins()
query._query_ctx = self._query_ctx
return query
def _clone_joins(self):
return dict(
(mc, list(j)) for mc, j in self._joins.items())
def _add_query_clauses(self, initial, expressions, conjunction=None):
reduced = reduce(operator.and_, expressions)
if initial is None:
return reduced
conjunction = conjunction or operator.and_
return conjunction(initial, reduced)
def _model_shorthand(self, args):
accum = []
for arg in args:
if isinstance(arg, Node):
accum.append(arg)
elif isinstance(arg, Query):
accum.append(arg)
elif isinstance(arg, ModelAlias):
accum.extend(arg.get_proxy_fields())
elif isclass(arg) and issubclass(arg, Model):
accum.extend(arg._meta.declared_fields)
return accum
@returns_clone
def where(self, *expressions):
self._where = self._add_query_clauses(self._where, expressions)
@returns_clone
def orwhere(self, *expressions):
self._where = self._add_query_clauses(
self._where, expressions, operator.or_)
@returns_clone
def join(self, dest, join_type=None, on=None):
src = self._query_ctx
if on is None:
require_join_condition = join_type != JOIN.CROSS and (
isinstance(dest, SelectQuery) or
(isclass(dest) and not src._meta.rel_exists(dest)))
if require_join_condition:
raise ValueError('A join condition must be specified.')
elif join_type == JOIN.CROSS:
raise ValueError('A CROSS join cannot have a constraint.')
elif isinstance(on, str):
on = src._meta.fields[on]
self._joins.setdefault(src, [])
self._joins[src].append(Join(src, dest, join_type, on))
if not isinstance(dest, SelectQuery):
self._query_ctx = dest
@returns_clone
def switch(self, model_class=None):
"""Change or reset the query context."""
self._query_ctx = model_class or self.model_class
def ensure_join(self, lm, rm, on=None, **join_kwargs):
ctx = self._query_ctx
for join in self._joins.get(lm, []):
if join.dest == rm:
return self
return self.switch(lm).join(rm, on=on, **join_kwargs).switch(ctx)
def convert_dict_to_node(self, qdict):
accum = []
joins = []
relationship = (ForeignKeyField, ReverseRelationDescriptor)
for key, value in sorted(qdict.items()):
curr = self.model_class
if '__' in key and key.rsplit('__', 1)[1] in DJANGO_MAP:
key, op = key.rsplit('__', 1)
op_group = DJANGO_MAP[op]
op, value = (op_group[0], op_group[1] % value) \
if len(op_group) == 2 else (op_group[0], value)
elif value is None:
op = OP.IS
else:
op = OP.EQ
for piece in key.split('__'):
model_attr = getattr(curr, piece)
if value is not None and isinstance(model_attr, relationship):
curr = model_attr.rel_model
joins.append(model_attr)
accum.append(Expression(model_attr, op, value))
return accum, joins
def _filter_or_exclude(self, negate, *args, **kwargs):
dq_node = Node()
if args:
dq_node &= reduce(operator.and_, [a.clone() for a in args])
if kwargs:
dq_node &= DQ(**kwargs)
# dq_node should now be an Expression, lhs = Node(), rhs = ...
q = deque([dq_node])
dq_joins = set()
while q:
curr = q.popleft()
if not isinstance(curr, Expression):
continue
for side, piece in (('lhs', curr.lhs), ('rhs', curr.rhs)):
if isinstance(piece, DQ):
query, joins = self.convert_dict_to_node(piece.query)
dq_joins.update(joins)
expression = reduce(operator.and_, query)
# Apply values from the DQ object.
expression._negated = piece._negated
expression._alias = piece._alias
setattr(curr, side, expression)
else:
q.append(piece)
dq_node = dq_node.rhs
query = self.clone()
for field in dq_joins:
if isinstance(field, ForeignKeyField):
lm, rm = field.model_class, field.rel_model
field_obj = field
elif isinstance(field, ReverseRelationDescriptor):
lm, rm = field.field.rel_model, field.rel_model
field_obj = field.field
query = query.ensure_join(lm, rm, field_obj)
if negate:
return query.where(~dq_node)
else:
return query.where(dq_node)
def filter(self, *args, **kwargs):
return self._filter_or_exclude(False, *args, **kwargs)
# 增加 exclude过滤函数
def exclude(self, *args, **kwargs):
return self._filter_or_exclude(True, *args, **kwargs)
def compiler(self):
return self.database.compiler()
def sql(self):
raise NotImplementedError
def _execute(self):
sql, params = self.sql()
return self.database.execute_sql(sql, params, self.require_commit)
def execute(self):
raise NotImplementedError
def scalar(self, as_tuple=False, convert=False):
if convert:
row = self.tuples().first()
else:
row = self._execute().fetchone()
if row and not as_tuple:
return row[0]
else:
return row
class RawQuery(Query):
"""
Execute a SQL query, returning a standard iterable interface that returns
model instances.
"""
def __init__(self, model, query, *params):
self._sql = query
self._params = list(params)
self._qr = None
self._tuples = False
self._dicts = False
super(RawQuery, self).__init__(model)
def clone(self):
query = RawQuery(self.model_class, self._sql, *self._params)
query._tuples = self._tuples
query._dicts = self._dicts
return query
join = not_allowed('joining')
where = not_allowed('where')
switch = not_allowed('switch')
@returns_clone
def tuples(self, tuples=True):
self._tuples = tuples
@returns_clone
def dicts(self, dicts=True):
self._dicts = dicts
def sql(self):
return self._sql, self._params
def execute(self):
if self._qr is None:
if self._tuples:
QRW = self.database.get_result_wrapper(RESULTS_TUPLES)
elif self._dicts:
QRW = self.database.get_result_wrapper(RESULTS_DICTS)
else:
QRW = self.database.get_result_wrapper(RESULTS_NAIVE)
self._qr = QRW(self.model_class, self._execute(), None)
return self._qr
def __iter__(self):
return iter(self.execute())
def allow_extend(orig, new_val, **kwargs):
extend = kwargs.pop('extend', False)
if kwargs:
raise ValueError('"extend" is the only valid keyword argument.')
if extend:
return ((orig or []) + new_val) or None
elif new_val:
return new_val
class SelectQuery(Query):
_node_type = 'select_query'
def __init__(self, model_class, *selection):
super(SelectQuery, self).__init__(model_class)
self.require_commit = self.database.commit_select
self.__select(*selection)
self._from = None
self._group_by = None
self._having = None
self._order_by = None
self._windows = None
self._limit = None
self._offset = None
self._distinct = False
self._for_update = None
self._naive = False
self._tuples = False
self._dicts = False
self._namedtuples = False
self._aggregate_rows = False
self._alias = None
self._qr = None
def _clone_attributes(self, query):
query = super(SelectQuery, self)._clone_attributes(query)
query._explicit_selection = self._explicit_selection
query._select = list(self._select)
if self._from is not None:
query._from = []
for f in self._from:
if isinstance(f, Node):
query._from.append(f.clone())
else:
query._from.append(f)
if self._group_by is not None:
query._group_by = list(self._group_by)
if self._having:
query._having = self._having.clone()
if self._order_by is not None:
query._order_by = list(self._order_by)
if self._windows is not None:
query._windows = list(self._windows)
query._limit = self._limit
query._offset = self._offset
query._distinct = self._distinct
query._for_update = self._for_update
query._naive = self._naive
query._tuples = self._tuples
query._dicts = self._dicts
query._namedtuples = self._namedtuples
query._aggregate_rows = self._aggregate_rows
query._alias = self._alias
return query
def compound_op(operator):
def inner(self, other):
supported_ops = self.model_class._meta.database.compound_operations
if operator not in supported_ops:
raise ValueError(
'Your database does not support %s' % operator)
return CompoundSelect(self.model_class, self, operator, other)
return inner
_compound_op_static = staticmethod(compound_op)
__or__ = compound_op('UNION')
__and__ = compound_op('INTERSECT')
__sub__ = compound_op('EXCEPT')
def __xor__(self, rhs):
# Symmetric difference, should just be (self | rhs) - (self & rhs)...
wrapped_rhs = self.model_class.select(SQL('*')).from_(
EnclosedClause((self & rhs)).alias('_')).order_by()
return (self | rhs) - wrapped_rhs
def union_all(self, rhs):
return SelectQuery._compound_op_static('UNION ALL')(self, rhs)
def __select(self, *selection):
self._explicit_selection = len(selection) > 0
selection = selection or self.model_class._meta.declared_fields
self._select = self._model_shorthand(selection)
select = returns_clone(__select)
@returns_clone
def from_(self, *args):
self._from = list(args) if args else None
@returns_clone
def group_by(self, *args, **kwargs):
self._group_by = self._model_shorthand(args) if args else None
@returns_clone
def having(self, *expressions):
self._having = self._add_query_clauses(self._having, expressions)
@returns_clone
def order_by(self, *args, **kwargs):
self._order_by = allow_extend(self._order_by, list(args), **kwargs)
@returns_clone
def window(self, *windows, **kwargs):
self._windows = allow_extend(self._windows, list(windows), **kwargs)
@returns_clone
def limit(self, lim):
self._limit = lim
@returns_clone
def offset(self, off):
self._offset = off
@returns_clone
def paginate(self, page, paginate_by=20):
if page > 0:
page -= 1
self._limit = paginate_by
self._offset = page * paginate_by
@returns_clone
def distinct(self, is_distinct=True):
self._distinct = is_distinct
@returns_clone
def for_update(self, for_update=True, nowait=False):
self._for_update = 'FOR UPDATE NOWAIT' if for_update and nowait else \
'FOR UPDATE' if for_update else None
@returns_clone
def with_lock(self, lock_type='UPDATE'):
self._for_update = ('FOR %s' % lock_type) if lock_type else None
@returns_clone
def naive(self, naive=True):
self._naive = naive
@returns_clone
def tuples(self, tuples=True):
self._tuples = tuples
if tuples:
self._dicts = self._namedtuples = False
@returns_clone
def dicts(self, dicts=True):
self._dicts = dicts
if dicts:
self._tuples = self._namedtuples = False
@returns_clone
def namedtuples(self, namedtuples=True):
self._namedtuples = namedtuples
if namedtuples:
self._dicts = self._tuples = False
@returns_clone
def aggregate_rows(self, aggregate_rows=True):
self._aggregate_rows = aggregate_rows
@returns_clone
def alias(self, alias=None):
self._alias = alias
def annotate(self, rel_model, annotation=None):
if annotation is None:
annotation = fn.Count(rel_model._meta.primary_key).alias('count')
if self._query_ctx == rel_model:
query = self.switch(self.model_class)
else:
query = self.clone()
query = query.ensure_join(query._query_ctx, rel_model)
if not query._group_by:
query._group_by = [x.alias() for x in query._select]
query._select = tuple(query._select) + (annotation,)
return query
def _aggregate(self, aggregation=None):
if aggregation is None:
aggregation = fn.Count(SQL('*'))
query = self.order_by()
query._select = [aggregation]
return query
def aggregate(self, aggregation=None, convert=True):
return self._aggregate(aggregation).scalar(convert=convert)
def count(self, clear_limit=False):
if self._distinct or self._group_by or self._limit or self._offset:
return self.wrapped_count(clear_limit=clear_limit)
# defaults to a count() of the primary key
return self.aggregate(convert=False) or 0
def wrapped_count(self, clear_limit=False):
clone = self.order_by()
if clear_limit:
clone._limit = clone._offset = None
sql, params = clone.sql()
wrapped = 'SELECT COUNT(1) FROM (%s) AS wrapped_select' % sql
rq = self.model_class.raw(wrapped, *params)
return rq.scalar() or 0
def exists(self):
clone = self.paginate(1, 1)
clone._select = [SQL('1')]
return bool(clone.scalar())
def get(self):
clone = self.paginate(1, 1)
try:
return next(clone.execute())
except StopIteration:
raise self.model_class.DoesNotExist(
'Instance matching query does not exist:\nSQL: %s\nPARAMS: %s'
% self.sql())
def peek(self, n=1):
res = self.execute()
res.fill_cache(n)
models = res._result_cache[:n]
if models:
return models[0] if n == 1 else models
def first(self, n=1):
if self._limit != n:
self._limit = n
self._dirty = True
return self.peek(n=n)
def sql(self):
return self.compiler().generate_select(self)
def verify_naive(self):
model_class = self.model_class
for node in self._select:
if isinstance(node, Field) and node.model_class != model_class:
return False
elif isinstance(node, Node) and node._bind_to is not None:
if node._bind_to != model_class:
return False
return True
def get_query_meta(self):
return (self._select, self._joins)
def _get_result_wrapper(self):
if self._tuples:
return self.database.get_result_wrapper(RESULTS_TUPLES)
elif self._dicts:
return self.database.get_result_wrapper(RESULTS_DICTS)
elif self._namedtuples:
return self.database.get_result_wrapper(RESULTS_NAMEDTUPLES)
elif self._naive or not self._joins or self.verify_naive():
return self.database.get_result_wrapper(RESULTS_NAIVE)
elif self._aggregate_rows:
return self.database.get_result_wrapper(RESULTS_AGGREGATE_MODELS)
else:
return self.database.get_result_wrapper(RESULTS_MODELS)
def execute(self):
if self._dirty or self._qr is None:
model_class = self.model_class
query_meta = self.get_query_meta()
ResultWrapper = self._get_result_wrapper()
self._qr = ResultWrapper(model_class, self._execute(), query_meta)
self._dirty = False
return self._qr
else:
return self._qr
def real(self):
"""
实时查询,去掉脏缓存
:return:
"""
self._dirty = True
def __iter__(self):
return iter(self.execute())
def iterator(self):
return iter(self.execute().iterator())
def __getitem__(self, value):
res = self.execute()
if isinstance(value, slice):
index = value.stop
else:
index = value
if index is not None:
index = index + 1 if index >= 0 else None
res.fill_cache(index)
return res._result_cache[value]
def __len__(self):
return len(self.execute())
def __hash__(self):
return id(self)
class NoopSelectQuery(SelectQuery):
def sql(self):
return (self.database.get_noop_sql(), ())
def get_query_meta(self):
return None, None
def _get_result_wrapper(self):
return self.database.get_result_wrapper(RESULTS_TUPLES)
class CompoundSelect(SelectQuery):
_node_type = 'compound_select_query'
def __init__(self, model_class, lhs=None, operator=None, rhs=None):
self.lhs = lhs
self.operator = operator
self.rhs = rhs
super(CompoundSelect, self).__init__(model_class, [])
def _clone_attributes(self, query):
query = super(CompoundSelect, self)._clone_attributes(query)
query.lhs = self.lhs
query.operator = self.operator
query.rhs = self.rhs
return query
def count(self, clear_limit=False):
return self.wrapped_count(clear_limit=clear_limit)
def get_query_meta(self):
return self.lhs.get_query_meta()
def verify_naive(self):
return self.lhs.verify_naive() and self.rhs.verify_naive()
def _get_result_wrapper(self):
if self._tuples:
return self.database.get_result_wrapper(RESULTS_TUPLES)
elif self._dicts:
return self.database.get_result_wrapper(RESULTS_DICTS)
elif self._namedtuples:
return self.database.get_result_wrapper(RESULTS_NAMEDTUPLES)
elif self._aggregate_rows:
return self.database.get_result_wrapper(RESULTS_AGGREGATE_MODELS)
has_joins = self.lhs._joins or self.rhs._joins
is_naive = self.lhs._naive or self.rhs._naive or self._naive
if is_naive or not has_joins or self.verify_naive():
return self.database.get_result_wrapper(RESULTS_NAIVE)
else:
return self.database.get_result_wrapper(RESULTS_MODELS)
class _WriteQuery(Query):
def __init__(self, model_class):
self._returning = None
self._tuples = False
self._dicts = False
self._namedtuples = False
self._qr = None
super(_WriteQuery, self).__init__(model_class)
def _clone_attributes(self, query):
query = super(_WriteQuery, self)._clone_attributes(query)
if self._returning:
query._returning = list(self._returning)
query._tuples = self._tuples
query._dicts = self._dicts
query._namedtuples = self._namedtuples
return query
def requires_returning(method):
def inner(self, *args, **kwargs):
db = self.model_class._meta.database
if not db.returning_clause:
raise ValueError('RETURNING is not supported by your '
'database: %s' % type(db))
return method(self, *args, **kwargs)
return inner
@requires_returning
@returns_clone
def returning(self, *selection):
if len(selection) == 1 and selection[0] is None:
self._returning = None
else:
if not selection:
selection = self.model_class._meta.declared_fields
self._returning = self._model_shorthand(selection)
@requires_returning
@returns_clone
def tuples(self, tuples=True):
self._tuples = tuples
if tuples:
self._dicts = self._namedtuples = False
@requires_returning
@returns_clone
def dicts(self, dicts=True):
self._dicts = dicts
if dicts:
self._tuples = self._namedtuples = False
@requires_returning
@returns_clone
def namedtuples(self, namedtuples=True):
self._namedtuples = namedtuples
if namedtuples:
self._dicts = self._tuples = False
def get_result_wrapper(self):
if self._returning is not None:
if self._tuples:
return self.database.get_result_wrapper(RESULTS_TUPLES)
elif self._dicts:
return self.database.get_result_wrapper(RESULTS_DICTS)
elif self._namedtuples:
return self.database.get_result_wrapper(RESULTS_NAMEDTUPLES)
return self.database.get_result_wrapper(RESULTS_NAIVE)
def _execute_with_result_wrapper(self):
ResultWrapper = self.get_result_wrapper()
meta = (self._returning, {self.model_class: []})
self._qr = ResultWrapper(self.model_class, self._execute(), meta)
return self._qr
class UpdateQuery(_WriteQuery):
def __init__(self, model_class, update=None):
self._update = update
self._on_conflict = None
super(UpdateQuery, self).__init__(model_class)
def _clone_attributes(self, query):
query = super(UpdateQuery, self)._clone_attributes(query)
query._update = dict(self._update)
query._on_conflict = self._on_conflict
return query
@returns_clone
def on_conflict(self, action=None):
self._on_conflict = action
join = not_allowed('joining')
def sql(self):
return self.compiler().generate_update(self)
def execute(self):
if self._returning is not None and self._qr is None:
return self._execute_with_result_wrapper()
elif self._qr is not None:
return self._qr
else:
return self.database.rows_affected(self._execute())
def __iter__(self):
if not self.model_class._meta.database.returning_clause:
raise ValueError('UPDATE queries cannot be iterated over unless '
'they specify a RETURNING clause, which is not '
'supported by your database.')
return iter(self.execute())
def iterator(self):
return iter(self.execute().iterator())
class InsertQuery(_WriteQuery):
def __init__(self, model_class, field_dict=None, rows=None,
fields=None, query=None, validate_fields=False):
super(InsertQuery, self).__init__(model_class)
self._upsert = False
self._is_multi_row_insert = rows is not None or query is not None
self._return_id_list = False
if rows is not None:
self._rows = rows
else:
self._rows = [field_dict or {}]
self._fields = fields
self._query = query
self._validate_fields = validate_fields
self._on_conflict = None
def _iter_rows(self):
model_meta = self.model_class._meta
if self._validate_fields:
valid_fields = model_meta.valid_fields
def validate_field(field):
if field not in valid_fields:
raise KeyError('"%s" is not a recognized field.' % field)
defaults = model_meta._default_dict
callables = model_meta._default_callables
for row_dict in self._rows:
field_row = defaults.copy()
seen = set()
for key in row_dict:
if self._validate_fields:
validate_field(key)
if key in model_meta.fields:
field = model_meta.fields[key]
else:
field = key
field_row[field] = row_dict[key]
seen.add(field)
if callables:
for field in callables:
if field not in seen:
field_row[field] = callables[field]()
yield field_row
def _clone_attributes(self, query):
query = super(InsertQuery, self)._clone_attributes(query)
query._rows = self._rows
query._upsert = self._upsert
query._is_multi_row_insert = self._is_multi_row_insert
query._fields = self._fields
query._query = self._query
query._return_id_list = self._return_id_list
query._validate_fields = self._validate_fields
query._on_conflict = self._on_conflict
return query
join = not_allowed('joining')
where = not_allowed('where clause')
@returns_clone
def upsert(self, upsert=True):
self._upsert = upsert
@returns_clone
def on_conflict(self, action=None):
self._on_conflict = action
@returns_clone
def return_id_list(self, return_id_list=True):
self._return_id_list = return_id_list
@property
def is_insert_returning(self):
if self.database.insert_returning:
if not self._is_multi_row_insert or self._return_id_list:
return True
return False
def sql(self):
return self.compiler().generate_insert(self)
def _insert_with_loop(self):
id_list = []
last_id = None
return_id_list = self._return_id_list
for row in self._rows:
last_id = (InsertQuery(self.model_class, row)
.upsert(self._upsert)
.execute())
if return_id_list:
id_list.append(last_id)
if return_id_list:
return id_list
else:
return last_id
def execute(self):
insert_with_loop = (
self._is_multi_row_insert and
self._query is None and
self._returning is None and
not self.database.insert_many)
if insert_with_loop:
return self._insert_with_loop()
if self._returning is not None and self._qr is None:
return self._execute_with_result_wrapper()
elif self._qr is not None:
return self._qr
else:
cursor = self._execute()
if not self._is_multi_row_insert:
if self.database.insert_returning:
pk_row = cursor.fetchone()
meta = self.model_class._meta
clean_data = [
field.python_value(column)
for field, column
in zip(meta.get_primary_key_fields(), pk_row)]
if self.model_class._meta.composite_key:
return clean_data
return clean_data[0]
return self.database.last_insert_id(cursor, self.model_class)
elif self._return_id_list:
return map(operator.itemgetter(0), cursor.fetchall())
else:
return True
class DeleteQuery(_WriteQuery):
join = not_allowed('joining')
def sql(self):
return self.compiler().generate_delete(self)
def execute(self):
if self._returning is not None and self._qr is None:
return self._execute_with_result_wrapper()
elif self._qr is not None:
return self._qr
else:
return self.database.rows_affected(self._execute())
IndexMetadata = namedtuple(
'IndexMetadata',
('name', 'sql', 'columns', 'unique', 'table'))
ColumnMetadata = namedtuple(
'ColumnMetadata',
('name', 'data_type', 'null', 'primary_key', 'table'))
ForeignKeyMetadata = namedtuple(
'ForeignKeyMetadata',
('column', 'dest_table', 'dest_column', 'table'))
class PeeweeException(Exception): pass
class ImproperlyConfigured(PeeweeException): pass
class DatabaseError(PeeweeException): pass
class DataError(DatabaseError): pass
class IntegrityError(DatabaseError): pass
class InterfaceError(PeeweeException): pass
class InternalError(DatabaseError): pass
class NotSupportedError(DatabaseError): pass
class OperationalError(DatabaseError): pass
class ProgrammingError(DatabaseError): pass
class ExceptionWrapper(object):
__slots__ = ['exceptions']
def __init__(self, exceptions):
self.exceptions = exceptions
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
return
if exc_type.__name__ in self.exceptions:
new_type = self.exceptions[exc_type.__name__]
exc_args = exc_value.args
reraise(new_type, new_type(*exc_args), traceback)
class _BaseConnectionLocal(object):
def __init__(self, **kwargs):
super(_BaseConnectionLocal, self).__init__(**kwargs)
self.autocommit = None
self.closed = True
self.conn = None
self.context_stack = []
self.transactions = []
class _ConnectionLocal(_BaseConnectionLocal, threading.local):
pass
class Database(object):
commit_select = False
compiler_class = QueryCompiler
compound_operations = ['UNION', 'INTERSECT', 'EXCEPT', 'UNION ALL']
compound_select_parentheses = False
distinct_on = False
drop_cascade = False
field_overrides = {}
foreign_keys = True
for_update = False
for_update_nowait = False
insert_many = True
insert_returning = False
interpolation = '?'
limit_max = None
op_overrides = {}
quote_char = '"'
reserved_tables = []
returning_clause = False
savepoints = True
sequences = False
subquery_delete_same_table = True
upsert_sql = None
window_functions = False
exceptions = {
'ConstraintError': IntegrityError,
'DatabaseError': DatabaseError,
'DataError': DataError,
'IntegrityError': IntegrityError,
'InterfaceError': InterfaceError,
'InternalError': InternalError,
'NotSupportedError': NotSupportedError,
'OperationalError': OperationalError,
'ProgrammingError': ProgrammingError}
def __init__(self, database, threadlocals=True, autocommit=True,
fields=None, ops=None, autorollback=False, use_speedups=True,
**connect_kwargs):
self.connect_kwargs = {}
if threadlocals:
self._local = _ConnectionLocal()
else:
self._local = _BaseConnectionLocal()
self.init(database, **connect_kwargs)
self._conn_lock = threading.Lock()
self.autocommit = autocommit
self.autorollback = autorollback
self.use_speedups = use_speedups
self.field_overrides = merge_dict(self.field_overrides, fields or {})
self.op_overrides = merge_dict(self.op_overrides, ops or {})
self.exception_wrapper = ExceptionWrapper(self.exceptions)
def init(self, database, **connect_kwargs):
if not self.is_closed():
self.close()
self.deferred = database is None
self.database = database
self.connect_kwargs.update(connect_kwargs)
def connect(self):
with self._conn_lock:
if self.deferred:
raise OperationalError('Database has not been initialized')
if not self._local.closed:
raise OperationalError('Connection already open')
self._local.conn = self._create_connection()
self._local.closed = False
with self.exception_wrapper:
self.initialize_connection(self._local.conn)
def initialize_connection(self, conn):
pass
def close(self):
with self._conn_lock:
if self.deferred:
raise Exception('Error, database not properly initialized '
'before closing connection')
try:
with self.exception_wrapper:
self._close(self._local.conn)
finally:
self._local.closed = True
def get_conn(self):
if self._local.context_stack:
conn = self._local.context_stack[-1].connection
if conn is not None:
return conn
if self._local.closed:
self.connect()
return self._local.conn
def _create_connection(self):
with self.exception_wrapper:
return self._connect(self.database, **self.connect_kwargs)
def is_closed(self):
return self._local.closed
# def get_cursor(self):
# conn = self.get_conn()
# if conn._sock is None:
# conn.ping()
# return conn.cursor()
def _close(self, conn):
conn.close()
def _connect(self, database, **kwargs):
raise NotImplementedError
@classmethod
def register_fields(cls, fields):
cls.field_overrides = merge_dict(cls.field_overrides, fields)
@classmethod
def register_ops(cls, ops):
cls.op_overrides = merge_dict(cls.op_overrides, ops)
def get_result_wrapper(self, wrapper_type):
if wrapper_type == RESULTS_NAIVE:
return (_ModelQueryResultWrapper if self.use_speedups
else NaiveQueryResultWrapper)
elif wrapper_type == RESULTS_MODELS:
return ModelQueryResultWrapper
elif wrapper_type == RESULTS_TUPLES:
return (_TuplesQueryResultWrapper if self.use_speedups
else TuplesQueryResultWrapper)
elif wrapper_type == RESULTS_DICTS:
return (_DictQueryResultWrapper if self.use_speedups
else DictQueryResultWrapper)
elif wrapper_type == RESULTS_NAMEDTUPLES:
return NamedTupleQueryResultWrapper
elif wrapper_type == RESULTS_AGGREGATE_MODELS:
return AggregateQueryResultWrapper
else:
return (_ModelQueryResultWrapper if self.use_speedups
else NaiveQueryResultWrapper)
def last_insert_id(self, cursor, model):
if model._meta.auto_increment:
return cursor.lastrowid
def rows_affected(self, cursor):
return cursor.rowcount
def compiler(self):
return self.compiler_class(
self.quote_char, self.interpolation, self.field_overrides,
self.op_overrides)
def execute(self, clause):
return self.execute_sql(*self.compiler().parse_node(clause))
def execute_sql(self, sql, params=None, require_commit=True):
logger.debug((sql, params))
with self.exception_wrapper:
conn = self.get_conn()
if conn._sock is None:
conn.ping()
# cursor = self.get_cursor()
try:
cursor = conn.cursor()
cursor.execute(sql, params or ())
cursor.close()
except Exception:
if self.autorollback and self.get_autocommit():
self.rollback()
raise
else:
if require_commit and self.get_autocommit():
self.commit()
finally:
self._close(conn)
return cursor
def begin(self):
pass
def commit(self):
with self.exception_wrapper:
self.get_conn().commit()
def rollback(self):
with self.exception_wrapper:
self.get_conn().rollback()
def set_autocommit(self, autocommit):
self._local.autocommit = autocommit
def get_autocommit(self):
if self._local.autocommit is None:
self.set_autocommit(self.autocommit)
return self._local.autocommit
def push_execution_context(self, transaction):
self._local.context_stack.append(transaction)
def pop_execution_context(self):
self._local.context_stack.pop()
def execution_context_depth(self):
return len(self._local.context_stack)
def execution_context(self, with_transaction=True, transaction_type=None):
return ExecutionContext(self, with_transaction, transaction_type)
__call__ = execution_context
def push_transaction(self, transaction):
self._local.transactions.append(transaction)
def pop_transaction(self):
self._local.transactions.pop()
def transaction_depth(self):
return len(self._local.transactions)
def transaction(self, transaction_type=None):
return transaction(self, transaction_type)
commit_on_success = property(transaction)
def savepoint(self, sid=None):
if not self.savepoints:
raise NotImplementedError
return savepoint(self, sid)
def atomic(self, transaction_type=None):
return _atomic(self, transaction_type)
def get_tables(self, schema=None):
raise NotImplementedError
def get_indexes(self, table, schema=None):
raise NotImplementedError
def get_columns(self, table, schema=None):
raise NotImplementedError
def get_primary_keys(self, table, schema=None):
raise NotImplementedError
def get_foreign_keys(self, table, schema=None):
raise NotImplementedError
def sequence_exists(self, seq):
raise NotImplementedError
def create_table(self, model_class, safe=False):
qc = self.compiler()
return self.execute_sql(*qc.create_table(model_class, safe))
def create_tables(self, models, safe=False):
create_model_tables(models, fail_silently=safe)
def create_index(self, model_class, fields, unique=False):
qc = self.compiler()
if not isinstance(fields, (list, tuple)):
raise ValueError('Fields passed to "create_index" must be a list '
'or tuple: "%s"' % fields)
fobjs = [
model_class._meta.fields[f] if isinstance(f, str) else f
for f in fields]
return self.execute_sql(*qc.create_index(model_class, fobjs, unique))
def drop_index(self, model_class, fields, safe=False):
qc = self.compiler()
if not isinstance(fields, (list, tuple)):
raise ValueError('Fields passed to "drop_index" must be a list '
'or tuple: "%s"' % fields)
fobjs = [
model_class._meta.fields[f] if isinstance(f, str) else f
for f in fields]
return self.execute_sql(*qc.drop_index(model_class, fobjs, safe))
def create_foreign_key(self, model_class, field, constraint=None):
qc = self.compiler()
return self.execute_sql(*qc.create_foreign_key(
model_class, field, constraint))
def create_sequence(self, seq):
if self.sequences:
qc = self.compiler()
return self.execute_sql(*qc.create_sequence(seq))
def drop_table(self, model_class, fail_silently=False, cascade=False):
qc = self.compiler()
if cascade and not self.drop_cascade:
raise ValueError('Database does not support DROP TABLE..CASCADE.')
return self.execute_sql(*qc.drop_table(
model_class, fail_silently, cascade))
def drop_tables(self, models, safe=False, cascade=False):
drop_model_tables(models, fail_silently=safe, cascade=cascade)
def truncate_table(self, model_class, restart_identity=False,
cascade=False):
qc = self.compiler()
return self.execute_sql(*qc.truncate_table(
model_class, restart_identity, cascade))
def truncate_tables(self, models, restart_identity=False, cascade=False):
for model in reversed(sort_models_topologically(models)):
model.truncate_table(restart_identity, cascade)
def drop_sequence(self, seq):
if self.sequences:
qc = self.compiler()
return self.execute_sql(*qc.drop_sequence(seq))
def extract_date(self, date_part, date_field):
return fn.EXTRACT(Clause(date_part, R('FROM'), date_field))
def truncate_date(self, date_part, date_field):
return fn.DATE_TRUNC(date_part, date_field)
def default_insert_clause(self, model_class):
return SQL('DEFAULT VALUES')
def get_noop_sql(self):
return 'SELECT 0 WHERE 0'
def get_binary_type(self):
return binary_construct
class MySQLDatabase(Database):
commit_select = True
compound_select_parentheses = True
compound_operations = ['UNION', 'UNION ALL']
field_overrides = {
'bool': 'BOOL',
'decimal': 'NUMERIC',
'double': 'DOUBLE PRECISION',
'float': 'FLOAT',
'primary_key': 'INTEGER AUTO_INCREMENT',
'text': 'LONGTEXT',
'uuid': 'VARCHAR(40)',
}
for_update = True
interpolation = '%s'
limit_max = 2 ** 64 - 1 # MySQL quirk
op_overrides = {
OP.LIKE: 'LIKE BINARY',
OP.ILIKE: 'LIKE',
OP.XOR: 'XOR',
}
quote_char = '`'
subquery_delete_same_table = False
upsert_sql = 'REPLACE INTO'
def _connect(self, database, **kwargs):
if not mysql:
raise ImproperlyConfigured('MySQLdb or PyMySQL must be installed.')
conn_kwargs = {
'charset': 'utf8',
'use_unicode': True,
}
conn_kwargs.update(kwargs)
if 'password' in conn_kwargs:
conn_kwargs['passwd'] = conn_kwargs.pop('password')
return mysql.connect(db=database, **conn_kwargs)
def get_tables(self, schema=None):
return [row for row, in self.execute_sql('SHOW TABLES')]
def get_indexes(self, table, schema=None):
cursor = self.execute_sql('SHOW INDEX FROM `%s`' % table)
unique = set()
indexes = {}
for row in cursor.fetchall():
if not row[1]:
unique.add(row[2])
indexes.setdefault(row[2], [])
indexes[row[2]].append(row[4])
return [IndexMetadata(name, None, indexes[name], name in unique, table)
for name in indexes]
def get_columns(self, table, schema=None):
sql = """
SELECT column_name, is_nullable, data_type
FROM information_schema.columns
WHERE table_name = %s AND table_schema = DATABASE()"""
cursor = self.execute_sql(sql, (table,))
pks = set(self.get_primary_keys(table))
return [ColumnMetadata(name, dt, null == 'YES', name in pks, table)
for name, null, dt in cursor.fetchall()]
def get_primary_keys(self, table, schema=None):
cursor = self.execute_sql('SHOW INDEX FROM `%s`' % table)
return [row[4] for row in cursor.fetchall() if row[2] == 'PRIMARY']
def get_foreign_keys(self, table, schema=None):
query = """
SELECT column_name, referenced_table_name, referenced_column_name
FROM information_schema.key_column_usage
WHERE table_name = %s
AND table_schema = DATABASE()
AND referenced_table_name IS NOT NULL
AND referenced_column_name IS NOT NULL"""
cursor = self.execute_sql(query, (table,))
return [
ForeignKeyMetadata(column, dest_table, dest_column, table)
for column, dest_table, dest_column in cursor.fetchall()]
def extract_date(self, date_part, date_field):
return fn.EXTRACT(Clause(R(date_part), R('FROM'), date_field))
def truncate_date(self, date_part, date_field):
return fn.DATE_FORMAT(date_field, MYSQL_DATE_TRUNC_MAPPING[date_part])
def default_insert_clause(self, model_class):
return Clause(
EnclosedClause(model_class._meta.primary_key),
SQL('VALUES (DEFAULT)'))
def get_noop_sql(self):
return 'DO 0'
def get_binary_type(self):
return mysql.Binary
class _callable_context_manager(object):
__slots__ = ()
def __call__(self, fn):
@wraps(fn)
def inner(*args, **kwargs):
with self:
return fn(*args, **kwargs)
return inner
class ExecutionContext(_callable_context_manager):
def __init__(self, database, with_transaction=True, transaction_type=None):
self.database = database
self.with_transaction = with_transaction
self.transaction_type = transaction_type
self.connection = None
def __enter__(self):
with self.database._conn_lock:
self.database.push_execution_context(self)
self.connection = self.database._connect(
self.database.database,
**self.database.connect_kwargs)
if self.with_transaction:
self.txn = self.database.transaction()
self.txn.__enter__()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
with self.database._conn_lock:
if self.connection is None:
self.database.pop_execution_context()
else:
try:
if self.with_transaction:
if not exc_type:
self.txn.commit(False)
self.txn.__exit__(exc_type, exc_val, exc_tb)
finally:
self.database.pop_execution_context()
self.database._close(self.connection)
class Using(ExecutionContext):
def __init__(self, database, models, with_transaction=True):
super(Using, self).__init__(database, with_transaction)
self.models = models
def __enter__(self):
self._orig = []
for model in self.models:
self._orig.append(model._meta.database)
model._meta.database = self.database
return super(Using, self).__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
super(Using, self).__exit__(exc_type, exc_val, exc_tb)
for i, model in enumerate(self.models):
model._meta.database = self._orig[i]
class _atomic(_callable_context_manager):
__slots__ = ('db', 'transaction_type', 'context_manager')
def __init__(self, db, transaction_type=None):
self.db = db
self.transaction_type = transaction_type
def __enter__(self):
if self.db.transaction_depth() == 0:
self.context_manager = self.db.transaction(self.transaction_type)
else:
self.context_manager = self.db.savepoint()
return self.context_manager.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
return self.context_manager.__exit__(exc_type, exc_val, exc_tb)
class transaction(_callable_context_manager):
__slots__ = ('db', 'autocommit', 'transaction_type')
def __init__(self, db, transaction_type=None):
self.db = db
self.transaction_type = transaction_type
def _begin(self):
if self.transaction_type:
self.db.begin(self.transaction_type)
else:
self.db.begin()
def commit(self, begin=True):
self.db.commit()
if begin: self._begin()
def rollback(self, begin=True):
self.db.rollback()
if begin: self._begin()
def __enter__(self):
self.autocommit = self.db.get_autocommit()
self.db.set_autocommit(False)
if self.db.transaction_depth() == 0: self._begin()
self.db.push_transaction(self)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
try:
if exc_type:
self.rollback(False)
elif self.db.transaction_depth() == 1:
try:
self.commit(False)
except:
self.rollback(False)
raise
finally:
self.db.set_autocommit(self.autocommit)
self.db.pop_transaction()
class savepoint(_callable_context_manager):
__slots__ = ('db', 'sid', 'quoted_sid', 'autocommit')
def __init__(self, db, sid=None):
self.db = db
_compiler = db.compiler()
self.sid = sid or 's' + uuid.uuid4().hex
self.quoted_sid = _compiler.quote(self.sid)
def _execute(self, query):
self.db.execute_sql(query, require_commit=False)
def _begin(self):
self._execute('SAVEPOINT %s;' % self.quoted_sid)
def commit(self, begin=True):
self._execute('RELEASE SAVEPOINT %s;' % self.quoted_sid)
if begin: self._begin()
def rollback(self):
self._execute('ROLLBACK TO SAVEPOINT %s;' % self.quoted_sid)
def __enter__(self):
self.autocommit = self.db.get_autocommit()
self.db.set_autocommit(False)
self._begin()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
try:
if exc_type:
self.rollback()
else:
try:
self.commit(begin=False)
except:
self.rollback()
raise
finally:
self.db.set_autocommit(self.autocommit)
class FieldProxy(Field):
def __init__(self, alias, field_instance):
self._model_alias = alias
self.model = self._model_alias.model_class
self.field_instance = field_instance
def clone_base(self):
return FieldProxy(self._model_alias, self.field_instance)
def coerce(self, value):
return self.field_instance.coerce(value)
def python_value(self, value):
return self.field_instance.python_value(value)
def db_value(self, value):
return self.field_instance.db_value(value)
def __getattr__(self, attr):
if attr == 'model_class':
return self._model_alias
return getattr(self.field_instance, attr)
class ModelAlias(object):
def __init__(self, model_class):
self.__dict__['model_class'] = model_class
def __getattr__(self, attr):
model_attr = getattr(self.model_class, attr)
if isinstance(model_attr, Field):
return FieldProxy(self, model_attr)
return model_attr
def __setattr__(self, attr, value):
raise AttributeError('Cannot set attributes on ModelAlias instances')
def get_proxy_fields(self, declared_fields=False):
mm = self.model_class._meta
fields = mm.declared_fields if declared_fields else mm.sorted_fields
return [FieldProxy(self, f) for f in fields]
def select(self, *selection):
if not selection:
selection = self.get_proxy_fields()
query = SelectQuery(self, *selection)
if self._meta.order_by:
query = query.order_by(*self._meta.order_by)
return query
def __call__(self, **kwargs):
return self.model_class(**kwargs)
if _SortedFieldList is None:
class _SortedFieldList(object):
__slots__ = ('_keys', '_items')
def __init__(self):
self._keys = []
self._items = []
def __getitem__(self, i):
return self._items[i]
def __iter__(self):
return iter(self._items)
def __contains__(self, item):
k = item._sort_key
i = bisect_left(self._keys, k)
j = bisect_right(self._keys, k)
return item in self._items[i:j]
def index(self, field):
return self._keys.index(field._sort_key)
def insert(self, item):
k = item._sort_key
i = bisect_left(self._keys, k)
self._keys.insert(i, k)
self._items.insert(i, item)
def remove(self, item):
idx = self.index(item)
del self._items[idx]
del self._keys[idx]
class DoesNotExist(Exception): pass
class ModelOptions(object):
def __init__(self, cls, database=None, db_table=None, db_table_func=None,
indexes=None, order_by=None, primary_key=None,
table_alias=None, constraints=None, schema=None,
validate_backrefs=True, only_save_dirty=False,
depends_on=None, **kwargs):
self.model_class = cls
self.name = cls.__name__.lower()
self.fields = {}
self.columns = {}
self.defaults = {}
self._default_by_name = {}
self._default_dict = {}
self._default_callables = {}
self._default_callable_list = []
self._sorted_field_list = _SortedFieldList()
self.sorted_fields = []
self.sorted_field_names = []
self.valid_fields = set()
self.declared_fields = []
self.database = database if database is not None else None
self.db_table = db_table
self.db_table_func = db_table_func
self.indexes = list(indexes or [])
self.order_by = order_by
self.primary_key = primary_key
self.table_alias = table_alias
self.constraints = constraints
self.schema = schema
self.validate_backrefs = validate_backrefs
self.only_save_dirty = only_save_dirty
self.depends_on = depends_on
self.auto_increment = None
self.composite_key = False
self.rel = {}
self.reverse_rel = {}
for key, value in kwargs.items():
setattr(self, key, value)
self._additional_keys = set(kwargs.keys())
if self.db_table_func and not self.db_table:
self.db_table = self.db_table_func(cls)
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.name)
def prepared(self):
if self.order_by:
norm_order_by = []
for item in self.order_by:
if isinstance(item, Field):
prefix = '-' if item._ordering == 'DESC' else ''
item = prefix + item.name
field = self.fields[item.lstrip('-')]
if item.startswith('-'):
norm_order_by.append(field.desc())
else:
norm_order_by.append(field.asc())
self.order_by = norm_order_by
def _update_field_lists(self):
self.sorted_fields = list(self._sorted_field_list)
self.sorted_field_names = [f.name for f in self.sorted_fields]
self.valid_fields = (set(self.fields.keys()) |
set(self.fields.values()) |
set((self.primary_key,)))
self.declared_fields = [field for field in self.sorted_fields
if not field.undeclared]
def add_field(self, field):
self.remove_field(field.name)
self.fields[field.name] = field
self.columns[field.db_column] = field
self._sorted_field_list.insert(field)
self._update_field_lists()
if field.default is not None:
self.defaults[field] = field.default
if isinstance(field.default, Callable):
self._default_callables[field] = field.default
self._default_callable_list.append((field.name, field.default))
else:
self._default_dict[field] = field.default
self._default_by_name[field.name] = field.default
def remove_field(self, field_name):
if field_name not in self.fields:
return
original = self.fields.pop(field_name)
del self.columns[original.db_column]
self._sorted_field_list.remove(original)
self._update_field_lists()
if original.default is not None:
del self.defaults[original]
if self._default_callables.pop(original, None):
for i, (name, _) in enumerate(self._default_callable_list):
if name == field_name:
self._default_callable_list.pop(i)
break
else:
self._default_dict.pop(original, None)
self._default_by_name.pop(original.name, None)
def get_default_dict(self):
dd = self._default_by_name.copy()
for field_name, default in self._default_callable_list:
dd[field_name] = default()
return dd
def get_field_index(self, field):
try:
return self._sorted_field_list.index(field)
except ValueError:
return -1
def get_primary_key_fields(self):
if self.composite_key:
return [
self.fields[field_name]
for field_name in self.primary_key.field_names]
return [self.primary_key]
def rel_for_model(self, model, field_obj=None, multi=False):
is_field = isinstance(field_obj, Field)
is_node = not is_field and isinstance(field_obj, Node)
if multi:
accum = []
for field in self.sorted_fields:
if isinstance(field, ForeignKeyField) and field.rel_model == model:
is_match = (
(field_obj is None) or
(is_field and field_obj.name == field.name) or
(is_node and field_obj._alias == field.name))
if is_match:
if not multi:
return field
accum.append(field)
if multi:
return accum
def reverse_rel_for_model(self, model, field_obj=None, multi=False):
return model._meta.rel_for_model(self.model_class, field_obj, multi)
def rel_exists(self, model):
return self.rel_for_model(model) or self.reverse_rel_for_model(model)
def related_models(self, backrefs=False):
models = []
stack = [self.model_class]
while stack:
model = stack.pop()
if model in models:
continue
models.append(model)
for fk in model._meta.rel.values():
stack.append(fk.rel_model)
if backrefs:
for fk in model._meta.reverse_rel.values():
stack.append(fk.model_class)
return models
class BaseModel(type):
inheritable = set([
'constraints', 'database', 'db_table_func', 'indexes', 'order_by',
'primary_key', 'schema', 'validate_backrefs', 'only_save_dirty'])
def __new__(cls, name, bases, attrs):
if name == _METACLASS_ or bases[0].__name__ == _METACLASS_:
return super(BaseModel, cls).__new__(cls, name, bases, attrs)
meta_options = {"abstract": False}
meta = attrs.pop('Meta', None)
if meta:
for k, v in meta.__dict__.items():
if not k.startswith('_'):
meta_options[k] = v
model_pk = getattr(meta, 'primary_key', None)
parent_pk = None
# inherit any field descriptors by deep copying the underlying field
# into the attrs of the new model, additionally see if the bases define
# inheritable model options and swipe them
for b in bases:
if not hasattr(b, '_meta'):
continue
base_meta = getattr(b, '_meta')
if parent_pk is None:
parent_pk = deepcopy(base_meta.primary_key)
all_inheritable = cls.inheritable | base_meta._additional_keys
for (k, v) in base_meta.__dict__.items():
if k in all_inheritable and k not in meta_options:
meta_options[k] = v
for (k, v) in b.__dict__.items():
if k in attrs:
continue
if isinstance(v, FieldDescriptor):
if not v.field.primary_key:
attrs[k] = deepcopy(v.field)
# initialize the new class and set the magic attributes
cls = super(BaseModel, cls).__new__(cls, name, bases, attrs)
ModelOptionsBase = meta_options.get('model_options_base', ModelOptions)
cls._meta = ModelOptionsBase(cls, **meta_options)
cls._data = None
cls._meta.indexes = list(cls._meta.indexes)
if not cls._meta.db_table:
cls._meta.db_table = re.sub('[^\w]+', '_', cls.__name__.lower())
# replace fields with field descriptors, calling the add_to_class hook
fields = []
for name, attr in cls.__dict__.items():
if isinstance(attr, Field):
if attr.primary_key and model_pk:
raise ValueError('primary key is overdetermined.')
elif attr.primary_key:
model_pk, pk_name = attr, name
else:
fields.append((attr, name))
composite_key = False
if model_pk is None:
if parent_pk:
model_pk, pk_name = parent_pk, parent_pk.name
else:
model_pk, pk_name = PrimaryKeyField(primary_key=True), 'id'
if isinstance(model_pk, CompositeKey):
pk_name = '_composite_key'
composite_key = True
if model_pk is not False:
model_pk.add_to_class(cls, pk_name)
cls._meta.primary_key = model_pk
cls._meta.auto_increment = (
isinstance(model_pk, PrimaryKeyField) or
bool(model_pk.sequence))
cls._meta.composite_key = composite_key
for field, name in fields:
field.add_to_class(cls, name)
# create a repr and error class before finalizing
if hasattr(cls, '__unicode__'):
setattr(cls, '__repr__', lambda self: '<%s: %r>' % (
cls.__name__, self.__unicode__()))
exc_name = '%sDoesNotExist' % cls.__name__
exc_attrs = {'__module__': cls.__module__}
exception_class = type(exc_name, (DoesNotExist,), exc_attrs)
cls.DoesNotExist = exception_class
cls._meta.prepared()
if hasattr(cls, 'validate_model'):
cls.validate_model()
DeferredRelation.resolve(cls)
return cls
def __iter__(self):
return iter(self.select())
class Model(with_metaclass(BaseModel)):
def __init__(self, *args, **kwargs):
self._data = self._meta.get_default_dict()
self._dirty = set(self._data)
self._obj_cache = {}
for k, v in kwargs.items():
setattr(self, k, v)
@classmethod
def alias(cls):
return ModelAlias(cls)
@classmethod
def select(cls, *selection):
query = SelectQuery(cls, *selection)
if cls._meta.order_by:
query = query.order_by(*cls._meta.order_by)
return query
@classmethod
def update(cls, __data=None, **update):
fdict = __data or {}
fdict.update([(cls._meta.fields[f], update[f]) for f in update])
return UpdateQuery(cls, fdict)
@classmethod
def insert(cls, __data=None, **insert):
fdict = __data or {}
fdict.update([(cls._meta.fields[f], insert[f]) for f in insert])
return InsertQuery(cls, fdict)
@classmethod
def insert_many(cls, rows, validate_fields=True):
return InsertQuery(cls, rows=rows, validate_fields=validate_fields)
@classmethod
def insert_from(cls, fields, query):
return InsertQuery(cls, fields=fields, query=query)
@classmethod
def delete(cls):
return DeleteQuery(cls)
@classmethod
def raw(cls, sql, *params):
return RawQuery(cls, sql, *params)
@classmethod
def create(cls, **query):
inst = cls(**query)
inst.save(force_insert=True)
inst._prepare_instance()
return inst
@classmethod
def get(cls, *query, **kwargs):
sq = cls.select().naive()
if query:
sq = sq.where(*query)
if kwargs:
sq = sq.filter(**kwargs)
return sq.get()
@classmethod
def get_or_create(cls, **kwargs):
defaults = kwargs.pop('defaults', {})
query = cls.select()
for field, value in kwargs.items():
if '__' in field:
query = query.filter(**{field: value})
else:
query = query.where(getattr(cls, field) == value)
try:
return query.get(), False
except cls.DoesNotExist:
try:
params = dict((k, v) for k, v in kwargs.items()
if '__' not in k)
params.update(defaults)
with cls._meta.database.atomic():
return cls.create(**params), True
except IntegrityError as exc:
try:
return query.get(), False
except cls.DoesNotExist:
raise exc
@classmethod
def filter(cls, *dq, **query):
return cls.select().filter(*dq, **query)
@classmethod
def table_exists(cls):
kwargs = {}
if cls._meta.schema:
kwargs['schema'] = cls._meta.schema
return cls._meta.db_table in cls._meta.database.get_tables(**kwargs)
@classmethod
def create_table(cls, fail_silently=False):
if fail_silently and cls.table_exists():
return
db = cls._meta.database
pk = cls._meta.primary_key
if db.sequences and pk is not False and pk.sequence:
if not db.sequence_exists(pk.sequence):
db.create_sequence(pk.sequence)
db.create_table(cls)
cls._create_indexes()
@classmethod
def _fields_to_index(cls):
fields = []
for field in cls._meta.sorted_fields:
if field.primary_key:
continue
requires_index = any((
field.index,
field.unique,
isinstance(field, ForeignKeyField)))
if requires_index:
fields.append(field)
return fields
@classmethod
def _index_data(cls):
return itertools.chain(
[((field,), field.unique) for field in cls._fields_to_index()],
cls._meta.indexes or ())
@classmethod
def _create_indexes(cls):
for field_list, is_unique in cls._index_data():
cls._meta.database.create_index(cls, field_list, is_unique)
@classmethod
def _drop_indexes(cls, safe=False):
for field_list, is_unique in cls._index_data():
cls._meta.database.drop_index(cls, field_list, safe)
@classmethod
def sqlall(cls):
queries = []
compiler = cls._meta.database.compiler()
pk = cls._meta.primary_key
if cls._meta.database.sequences and pk.sequence:
queries.append(compiler.create_sequence(pk.sequence))
queries.append(compiler.create_table(cls))
for field in cls._fields_to_index():
queries.append(compiler.create_index(cls, [field], field.unique))
if cls._meta.indexes:
for field_names, unique in cls._meta.indexes:
fields = [cls._meta.fields[f] for f in field_names]
queries.append(compiler.create_index(cls, fields, unique))
return [sql for sql, _ in queries]
@classmethod
def drop_table(cls, fail_silently=False, cascade=False):
cls._meta.database.drop_table(cls, fail_silently, cascade)
@classmethod
def truncate_table(cls, restart_identity=False, cascade=False):
cls._meta.database.truncate_table(cls, restart_identity, cascade)
@classmethod
def as_entity(cls):
if cls._meta.schema:
return Entity(cls._meta.schema, cls._meta.db_table)
return Entity(cls._meta.db_table)
@classmethod
def noop(cls, *args, **kwargs):
return NoopSelectQuery(cls, *args, **kwargs)
def _get_pk_value(self):
return getattr(self, self._meta.primary_key.name)
get_id = _get_pk_value # Backwards-compatibility.
def _set_pk_value(self, value):
if not self._meta.composite_key:
setattr(self, self._meta.primary_key.name, value)
set_id = _set_pk_value # Backwards-compatibility.
def _pk_expr(self):
return self._meta.primary_key == self._get_pk_value()
def _prepare_instance(self):
self._dirty.clear()
self.prepared()
def prepared(self):
pass
def _prune_fields(self, field_dict, only):
new_data = {}
for field in only:
if field.name in field_dict:
new_data[field.name] = field_dict[field.name]
return new_data
def _populate_unsaved_relations(self, field_dict):
for key in self._meta.rel:
conditions = (
key in self._dirty and
key in field_dict and
field_dict[key] is None and
self._obj_cache.get(key) is not None)
if conditions:
setattr(self, key, getattr(self, key))
field_dict[key] = self._data[key]
def save(self, force_insert=False, only=None):
field_dict = dict(self._data)
if self._meta.primary_key is not False:
pk_field = self._meta.primary_key
pk_value = self._get_pk_value()
else:
pk_field = pk_value = None
if only:
field_dict = self._prune_fields(field_dict, only)
elif self._meta.only_save_dirty and not force_insert:
field_dict = self._prune_fields(
field_dict,
self.dirty_fields)
if not field_dict:
self._dirty.clear()
return False
self._populate_unsaved_relations(field_dict)
if pk_value is not None and not force_insert:
if self._meta.composite_key:
for pk_part_name in pk_field.field_names:
field_dict.pop(pk_part_name, None)
else:
field_dict.pop(pk_field.name, None)
rows = self.update(**field_dict).where(self._pk_expr()).execute()
elif pk_field is None:
self.insert(**field_dict).execute()
rows = 1
else:
pk_from_cursor = self.insert(**field_dict).execute()
if pk_from_cursor is not None:
pk_value = pk_from_cursor
self._set_pk_value(pk_value)
rows = 1
self._dirty.clear()
return rows
def is_dirty(self):
return bool(self._dirty)
@property
def dirty_fields(self):
return [f for f in self._meta.sorted_fields if f.name in self._dirty]
def dependencies(self, search_nullable=False):
model_class = type(self)
query = self.select().where(self._pk_expr())
stack = [(type(self), query)]
seen = set()
while stack:
klass, query = stack.pop()
if klass in seen:
continue
seen.add(klass)
for rel_name, fk in klass._meta.reverse_rel.items():
rel_model = fk.model_class
if fk.rel_model is model_class:
node = (fk == self._data[fk.to_field.name])
subquery = rel_model.select().where(node)
else:
node = fk << query
subquery = rel_model.select().where(node)
if not fk.null or search_nullable:
stack.append((rel_model, subquery))
yield (node, fk)
def delete_instance(self, recursive=False, delete_nullable=False):
if recursive:
dependencies = self.dependencies(delete_nullable)
for query, fk in reversed(list(dependencies)):
model = fk.model_class
if fk.null and not delete_nullable:
model.update(**{fk.name: None}).where(query).execute()
else:
model.delete().where(query).execute()
return self.delete().where(self._pk_expr()).execute()
def serializable_value(self, field_name):
try:
field = self._meta.fields[field_name]
except KeyError:
return getattr(self, field_name)
return getattr(self, field.name)
def __hash__(self):
return hash((self.__class__, self._get_pk_value()))
def __eq__(self, other):
return (
other.__class__ == self.__class__ and
self._get_pk_value() is not None and
other._get_pk_value() == self._get_pk_value())
def __ne__(self, other):
return not self == other
def prefetch_add_subquery(sq, subqueries):
fixed_queries = [PrefetchResult(sq)]
for i, subquery in enumerate(subqueries):
if isinstance(subquery, tuple):
subquery, target_model = subquery
else:
target_model = None
if not isinstance(subquery, Query) and issubclass(subquery, Model):
subquery = subquery.select()
subquery_model = subquery.model_class
fks = backrefs = None
for j in reversed(range(i + 1)):
prefetch_result = fixed_queries[j]
last_query = prefetch_result.query
last_model = prefetch_result.model
rels = subquery_model._meta.rel_for_model(last_model, multi=True)
if rels:
fks = [getattr(subquery_model, fk.name) for fk in rels]
pks = [getattr(last_model, fk.to_field.name) for fk in rels]
else:
backrefs = last_model._meta.rel_for_model(
subquery_model,
multi=True)
if (fks or backrefs) and ((target_model is last_model) or
(target_model is None)):
break
if not (fks or backrefs):
tgt_err = ' using %s' % target_model if target_model else ''
raise AttributeError('Error: unable to find foreign key for '
'query: %s%s' % (subquery, tgt_err))
if fks:
expr = reduce(operator.or_, [
(fk << last_query.select(pk))
for (fk, pk) in zip(fks, pks)])
subquery = subquery.where(expr)
fixed_queries.append(PrefetchResult(subquery, fks, False))
elif backrefs:
expr = reduce(operator.or_, [
(backref.to_field << last_query.select(backref))
for backref in backrefs])
subquery = subquery.where(expr)
fixed_queries.append(PrefetchResult(subquery, backrefs, True))
return fixed_queries
__prefetched = namedtuple('__prefetched', (
'query', 'fields', 'backref', 'rel_models', 'field_to_name', 'model'))
class PrefetchResult(__prefetched):
def __new__(cls, query, fields=None, backref=None, rel_models=None,
field_to_name=None, model=None):
if fields:
if backref:
rel_models = [field.model_class for field in fields]
foreign_key_attrs = [field.to_field.name for field in fields]
else:
rel_models = [field.rel_model for field in fields]
foreign_key_attrs = [field.name for field in fields]
field_to_name = list(zip(fields, foreign_key_attrs))
model = query.model_class
return super(PrefetchResult, cls).__new__(
cls, query, fields, backref, rel_models, field_to_name, model)
def populate_instance(self, instance, id_map):
if self.backref:
for field in self.fields:
identifier = instance._data[field.name]
key = (field, identifier)
if key in id_map:
setattr(instance, field.name, id_map[key])
else:
for field, attname in self.field_to_name:
identifier = instance._data[field.to_field.name]
key = (field, identifier)
rel_instances = id_map.get(key, [])
dest = '%s_prefetch' % field.related_name
for inst in rel_instances:
setattr(inst, attname, instance)
setattr(instance, dest, rel_instances)
def store_instance(self, instance, id_map):
for field, attname in self.field_to_name:
identity = field.to_field.python_value(instance._data[attname])
key = (field, identity)
if self.backref:
id_map[key] = instance
else:
id_map.setdefault(key, [])
id_map[key].append(instance)
def prefetch(sq, *subqueries):
if not subqueries:
return sq
fixed_queries = prefetch_add_subquery(sq, subqueries)
deps = {}
rel_map = {}
for prefetch_result in reversed(fixed_queries):
query_model = prefetch_result.model
if prefetch_result.fields:
for rel_model in prefetch_result.rel_models:
rel_map.setdefault(rel_model, [])
rel_map[rel_model].append(prefetch_result)
deps[query_model] = {}
id_map = deps[query_model]
has_relations = bool(rel_map.get(query_model))
for instance in prefetch_result.query:
if prefetch_result.fields:
prefetch_result.store_instance(instance, id_map)
if has_relations:
for rel in rel_map[query_model]:
rel.populate_instance(instance, deps[rel.model])
return prefetch_result.query
def create_model_tables(models, **create_table_kwargs):
"""Create tables for all given models (in the right order)."""
for m in sort_models_topologically(models):
m.create_table(**create_table_kwargs)
def drop_model_tables(models, **drop_table_kwargs):
"""Drop tables for all given models (in the right order)."""
for m in reversed(sort_models_topologically(models)):
m.drop_table(**drop_table_kwargs)
|
[
"caowenbin@xuetangx.com"
] |
caowenbin@xuetangx.com
|
609021985877b887a2b29d318d6e4ee09ffbc4df
|
95b9fc9e1a109ee8612583221269aa5eb4f7de4a
|
/mysitea/settings.py
|
b82bd4078e0e56e73872ed0b18115c84b22b545c
|
[] |
no_license
|
Choi0427/mysitea
|
d3af19cf8cca3e28b13e8c6f8ec9ec1331ad9636
|
bda03507ba3f0a5f22ea061811ac13b0afe6bfa4
|
refs/heads/master
| 2023-01-28T12:57:21.857301
| 2020-12-07T13:04:28
| 2020-12-07T13:04:28
| 319,324,689
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,065
|
py
|
"""
Django settings for mysitea project.
Generated by 'django-admin startproject' using Django 3.1.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '0drzt5l8uuu#m-%vj(i#2^-koq!rs@xk8+%fqi+-knrb4m-hm^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysitea.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysitea.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
|
[
"you@example.com"
] |
you@example.com
|
960c42bb96022428399d3f6c90afa9aafe47ae6a
|
09e8c92187ff8d7a726727041e2dd80850dcce3d
|
/leetcode/965_univalued_binary_tree.py
|
e0da41251305a9165e61a661dbd73b04870d0e67
|
[] |
no_license
|
kakru/puzzles
|
6dd72bd0585f526e75d026f3ba2446b0c14f60e0
|
b91bdf0e68605f7e517446f8a00b1e0f1897c24d
|
refs/heads/master
| 2020-04-09T09:47:31.341475
| 2019-05-03T21:24:41
| 2019-05-03T21:24:41
| 160,246,660
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 970
|
py
|
#!/usr/bin/env python3
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x, left=None, right=None):
self.val = x
self.left = left
self.right = right
class Solution(object):
def isUnivalTree(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
val = root.val
stack = [root]
while stack:
p = stack.pop()
if p.val != val: return False
if p.left: stack.append(p.left)
if p.right: stack.append(p.right)
return True
t = TreeNode(1,
TreeNode(1,
TreeNode(1),
TreeNode(1)
),
TreeNode(1,
None,
TreeNode(1)
)
)
print(Solution().isUnivalTree(t), True)
t = TreeNode(2,
TreeNode(2,
TreeNode(5),
TreeNode(2)
),
TreeNode(2)
)
print(Solution().isUnivalTree(t), False)
|
[
"karol@kruzelecki.com"
] |
karol@kruzelecki.com
|
cc1b9a672163c2594baee1485636929c3ba41bf0
|
3955c3f367a3a60f8602dcb4609faec9898438bb
|
/graylog/apis/systemshutdown_api.py
|
255334f55b57fe4bd65f0fe6abe937ca07111116
|
[
"Apache-2.0"
] |
permissive
|
MinhKMA/graylog.py
|
e89c34defa5422d59d0a501355058f5eb2dfe68c
|
3118f4a49c91c2cbbd660523b0ab99e56fbfd861
|
refs/heads/master
| 2021-05-06T21:03:06.946509
| 2016-09-23T04:31:13
| 2016-09-23T04:31:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,316
|
py
|
# coding: utf-8
"""
No descripton provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.1.1+01d50e5
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class SystemshutdownApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def shutdown(self, **kwargs):
"""
Shutdown this node gracefully.
Attempts to process all buffered and cached messages before exiting, shuts down inputs first to make sure that no new messages are accepted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.shutdown(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.shutdown_with_http_info(**kwargs)
else:
(data) = self.shutdown_with_http_info(**kwargs)
return data
def shutdown_with_http_info(self, **kwargs):
"""
Shutdown this node gracefully.
Attempts to process all buffered and cached messages before exiting, shuts down inputs first to make sure that no new messages are accepted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.shutdown_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method shutdown" % key
)
params[key] = val
del params['kwargs']
resource_path = '/system/shutdown/shutdown'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
|
[
"on99@users.noreply.github.com"
] |
on99@users.noreply.github.com
|
77cfd36b65d609b44e3fa1b960fbfb54748bfadd
|
e9172452ed3777653ec7a4c7ef6d2269a2309a4c
|
/pandasRollingStats.py
|
30244426f797b5c72c4db22f9f4bba209db9fc6a
|
[] |
no_license
|
aiporre/QuinoaMarketForecast
|
ec7163ea52e7c63c34448c302d4539b96270a3dd
|
b76bf5380b930859392a7c6c46eade2464a94143
|
refs/heads/master
| 2021-09-24T09:20:13.704502
| 2016-10-03T06:51:14
| 2016-10-03T06:51:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,524
|
py
|
import pandas as pd
import quandl
import matplotlib.pyplot as plt
from matplotlib import style
style.use('fivethirtyeight')
def get_zinc_price():
api_key = open('data/myApiKey.txt', 'r').read()
gold = quandl.get('ODA/PZINC_USD', authtoken=api_key)
pd.DataFrame(gold).to_pickle('data/zinc.pickle')
def get_wheat_price():
api_key = open('data/myApiKey.txt', 'r').read()
gold = quandl.get('ODA/PWHEAMT_USD', authtoken=api_key)
pd.DataFrame(gold).to_pickle('data/wheat.pickle')
fig = plt.figure()
ax1 = plt.subplot2grid((4,1),(0,0))
ax2 = plt.subplot2grid((4,1),(1,0))
ax3 = plt.subplot2grid((4,1),(2,0))
ax4 = plt.subplot2grid((4,1),(3,0))
# read prices of zinc
try:
zinc = pd.read_pickle('data/zinc.pickle')
except:
zinc = get_zinc_price()
# read prices of wheat
try:
wheat = pd.read_pickle('data/wheat.pickle')
except:
wheat = get_wheat_price()
# calculatin rollings
zinc.columns = ['price_z']
wheat.columns = ['price_w']
zw = zinc.join(wheat)
zinc['priceRA'] = pd.rolling_mean(zinc['price_z'],12)
zinc['priceRS'] = pd.rolling_std(zinc['price_z'],12)
print zw.head(10)
zinc_wheat_corr = pd.rolling_corr(zw['price_z'],zw['price_w'],12)
print zinc.head(15)
print zinc_wheat_corr.head(15)
# zinc.dropna(inplace = True) # posible to use dorpna
zinc[['price_z','priceRA']].plot(ax = ax1)
zinc['priceRS'].plot(ax = ax2)
zw.plot(ax = ax3)
zinc_wheat_corr.plot(ax = ax4)
plt.show()
# standrd deviatio help to filter date that doesnlt fit
# an to undersatd the volatitty of the data.
|
[
"ariel.iporre.rivas@gmail.com"
] |
ariel.iporre.rivas@gmail.com
|
039b5a5d6166730f71fa8dbae29bca022fb667b1
|
a3cc7286d4a319cb76f3a44a593c4a18e5ddc104
|
/lib/surface/logging/metrics/delete.py
|
0fce2d106ce17d119936f16b3a3b14351d2e6cd1
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
jordanistan/Google-Cloud-SDK
|
f2c6bb7abc2f33b9dfaec5de792aa1be91154099
|
42b9d7914c36a30d1e4b84ae2925df7edeca9962
|
refs/heads/master
| 2023-09-01T01:24:53.495537
| 2023-08-22T01:12:23
| 2023-08-22T01:12:23
| 127,072,491
| 0
| 1
|
NOASSERTION
| 2023-08-22T01:12:24
| 2018-03-28T02:31:19
|
Python
|
UTF-8
|
Python
| false
| false
| 1,918
|
py
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""'logging metrics delete' command."""
from googlecloudsdk.api_lib.logging import util
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
class Delete(base.DeleteCommand):
"""Deletes a logs-based metric."""
@staticmethod
def Args(parser):
"""Register flags for this command."""
parser.add_argument(
'metric_name', help='The name of the metric to delete.')
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
"""
console_io.PromptContinue(
'Really delete metric [%s]?' % args.metric_name, cancel_on_no=True)
util.GetClient().projects_metrics.Delete(
util.GetMessages().LoggingProjectsMetricsDeleteRequest(
metricName=util.CreateResourceName(
util.GetCurrentProjectParent(), 'metrics', args.metric_name)))
log.DeletedResource(args.metric_name)
Delete.detailed_help = {
'DESCRIPTION': """\
Deletes a logs-based metric called high_severity_count.
""",
'EXAMPLES': """\
To delete a metric called high_severity_count, run:
$ {command} high_severity_count
""",
}
|
[
"jordan.robison@gmail.com"
] |
jordan.robison@gmail.com
|
70e4497255159185bbd2c4946a1eb958f6f1520f
|
4178f2916d2da72cbb45454fbed941dcfe8f6460
|
/POM_test/TestCase/Detail_Profile/TC_005.py
|
a8f7d88a1374eb3444ef223474fdf03a291f71c2
|
[] |
no_license
|
maxcrup007/Selenium_Webdriver_Python
|
15196cb04ba5cafdc5b776c26d167f0b48fb0e14
|
6be7f0b9f53df1ba592957029e8a4d22e409d1c4
|
refs/heads/main
| 2023-03-24T21:04:31.976451
| 2021-03-22T09:16:04
| 2021-03-22T09:16:04
| 349,379,454
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,008
|
py
|
# ทดสอบการเข้าใช้งานของ "ข้อมูลส่วนตัว"
import time
import unittest
import sys
from selenium import webdriver
from selenium.webdriver import ActionChains
from POM_test.login import *
from POM_test.profilePage import *
from POM_test.scrollbar import *
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "...", "..."))
class TestProfile_5(unittest.TestCase):
@classmethod
def setUpClass(self):
self.driver = webdriver.Chrome(executable_path="C:/Users/voraw/Downloads/Compressed/webdriver/chromedriver/chromedriver")
self.driver.implicitly_wait(10)
self.driver.maximize_window()
def test_login_valid(self):
driver = self.driver
self.driver.get("https://top-upstream-client.mulberrysoft.com/#/older/activity")
login = LoginPage(driver)
scroll = ScrollbarPage(driver)
login.enter_username("demo005")
login.enter_password("123456")
login.click_login()
time.sleep(2)
profile = ProfilePage(driver)
profile.into_profilePage()
time.sleep(5)
profile.profile_name_input("vatcharapong mahachot")
time.sleep(2)
profile.profile_email_input("vatcharapong11@hotmail.com")
time.sleep(2)
profile.profile_phone_number("086799315")
time.sleep(2)
scroll.profile_scrolling()
time.sleep(2)
profile.profile_address_text("555 หมู่17")
time.sleep(2)
scroll.profile_scrolling2()
time.sleep(2)
profile.profile_submit_confirm()
time.sleep(2)
@classmethod
def tearDownClass(cls):
cls.driver.close()
cls.driver.quit()
print("Test Completed")
if __name__ == '__main__':
unittest.main()
|
[
"36732487+maxcrup007@users.noreply.github.com"
] |
36732487+maxcrup007@users.noreply.github.com
|
2a1e29bb5786850365a0cf5fca0e7f577085fec3
|
36e593943be060ca5ea74a3d45923aba422ad2c9
|
/ThinkBayes/code/dungeons.py
|
0df9ed07edd9dfe1089ba8c63d598987c192d448
|
[] |
no_license
|
xjr7670/book_practice
|
a73f79437262bb5e3b299933b7b1f7f662a157b5
|
5a562d76830faf78feec81bc11190b71eae3a799
|
refs/heads/master
| 2023-08-28T19:08:52.329127
| 2023-08-24T09:06:00
| 2023-08-24T09:06:00
| 101,477,574
| 3
| 1
| null | 2021-06-10T18:38:54
| 2017-08-26T09:56:02
|
Python
|
UTF-8
|
Python
| false
| false
| 2,831
|
py
|
"""This file contains code for use with "Think Bayes",
by Allen B. Downey, available from greenteapress.com
Copyright 2012 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
import random
import thinkbayes
import thinkplot
FORMATS = ['pdf', 'eps', 'png']
class Die(thinkbayes.Pmf):
"""Represents the PMF of outcomes for a die."""
def __init__(self, sides, name=''):
"""Initializes the die.
sides: int number of sides
name: string
"""
thinkbayes.Pmf.__init__(self, name=name)
for x in xrange(1, sides+1):
self.Set(x, 1)
self.Normalize()
def PmfMax(pmf1, pmf2):
"""Computes the distribution of the max of values drawn from two Pmfs.
pmf1, pmf2: Pmf objects
returns: new Pmf
"""
res = thinkbayes.Pmf()
for v1, p1 in pmf1.Items():
for v2, p2 in pmf2.Items():
res.Incr(max(v1, v2), p1*p2)
return res
def main():
pmf_dice = thinkbayes.Pmf()
pmf_dice.Set(Die(4), 5)
pmf_dice.Set(Die(6), 4)
pmf_dice.Set(Die(8), 3)
pmf_dice.Set(Die(12), 2)
pmf_dice.Set(Die(20), 1)
pmf_dice.Normalize()
mix = thinkbayes.Pmf()
for die, weight in pmf_dice.Items():
for outcome, prob in die.Items():
mix.Incr(outcome, weight*prob)
mix = thinkbayes.MakeMixture(pmf_dice)
thinkplot.Hist(mix, width=0.9)
thinkplot.Save(root='dungeons3',
xlabel='Outcome',
ylabel='Probability',
formats=FORMATS)
random.seed(17)
d6 = Die(6, 'd6')
dice = [d6] * 3
three = thinkbayes.SampleSum(dice, 1000)
three.name = 'sample'
three.Print()
three_exact = d6 + d6 + d6
three_exact.name = 'exact'
three_exact.Print()
thinkplot.PrePlot(num=2)
thinkplot.Pmf(three)
thinkplot.Pmf(three_exact, linestyle='dashed')
thinkplot.Save(root='dungeons1',
xlabel='Sum of three d6',
ylabel='Probability',
axis=[2, 19, 0, 0.15],
formats=FORMATS)
thinkplot.Clf()
thinkplot.PrePlot(num=1)
# compute the distribution of the best attribute the hard way
best_attr2 = PmfMax(three_exact, three_exact)
best_attr4 = PmfMax(best_attr2, best_attr2)
best_attr6 = PmfMax(best_attr4, best_attr2)
# thinkplot.Pmf(best_attr6)
# and the easy way
best_attr_cdf = three_exact.Max(6)
best_attr_cdf.name = ''
best_attr_pmf = thinkbayes.MakePmfFromCdf(best_attr_cdf)
best_attr_pmf.Print()
thinkplot.Pmf(best_attr_pmf)
thinkplot.Save(root='dungeons2',
xlabel='Best of three d6',
ylabel='Probability',
axis=[2, 19, 0, 0.23],
formats=FORMATS)
if __name__ == '__main__':
main()
|
[
"xjr30226@126.com"
] |
xjr30226@126.com
|
9cdb76e81612b5b87a3078f6f2c985f285dbbe6e
|
be7a79f3c590f0923f1e793c6a36cfebd9ca4d01
|
/brocolli/converter/onnx_layers/concat_func.py
|
32b54e65699b614aff8c4d4dff1d7f195fd89e83
|
[
"MIT"
] |
permissive
|
inisis/brocolli
|
f255d44dc9148fd2b3bc82f6a21e429a579399b4
|
46a3d8c5e19e481746a9c8a85c5e9a71a49b846c
|
refs/heads/master
| 2023-07-22T09:37:19.480983
| 2023-07-17T14:25:35
| 2023-07-17T14:25:35
| 168,733,444
| 326
| 72
|
MIT
| 2023-06-04T17:03:43
| 2019-02-01T17:17:22
|
Python
|
UTF-8
|
Python
| false
| false
| 905
|
py
|
from loguru import logger
from onnx import helper
from onnx import TensorProto as tp
from .base_layer import BaseLayer
class ConcatFunc(BaseLayer):
def __init__(self, source_node, module=None, auto_gen=True):
super(ConcatFunc, self).__init__(source_node, module, auto_gen)
def get_concat_attr(self):
attr_dict = {"axis": []}
dim = self.get_value_by_key_or_index("dim", 1, 0)
attr_dict["axis"] = dim
return attr_dict
def generate_node(self, name=None, params=None, attr_dict=None):
if name is not None:
self._name = name
if attr_dict is None:
attr_dict = self.get_concat_attr()
node = helper.make_node(
"Concat", self._in_names, self._out_names, self._name, **attr_dict
)
logger.info(f"{self.__class__.__name__}: {self._name} created")
self._node.append(node)
|
[
"desmond.yao@buaa.edu.cn"
] |
desmond.yao@buaa.edu.cn
|
ef02ad50203e98899613e15fe68a62ebf283e4ce
|
fed6c6bdb6276d195bc565e527c3f19369d22b74
|
/selection_bias/bias_check/multi_pole_fit.py
|
4baa664a7c606616ae4c495cdcb11ac8dcdde1f8
|
[] |
no_license
|
hekunlie/astrophy-research
|
edbe12d8dde83e0896e982f08b463fdcd3279bab
|
7b2b7ada7e7421585e8993192f6111282c9cbb38
|
refs/heads/master
| 2021-11-15T05:08:51.271669
| 2021-11-13T08:53:33
| 2021-11-13T08:53:33
| 85,927,798
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,795
|
py
|
import os
my_home = os.popen("echo $MYWORK_DIR").readlines()[0][:-1]
from sys import path, argv
path.append('%s/work/mylib/' % my_home)
import numpy
from mpi4py import MPI
import h5py
from plot_tool import Image_Plot
from Fourier_Quad import Fourier_Quad
import component_fit
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
cpus = comm.Get_size()
data_path = argv[1]
data_nm = argv[2]
g1t = [0, -0.04, 0, 0.04, -0.02, 0, 0.02, 0, 0.02]
g2t = [0, 0, 0.04, -0.04, 0, -0.02, 0, 0.02, -0.02]
g1 = g1t[rank]
g2 = g2t[rank]
scale = 100
xy_bin_num, radius_bin_num = 200, 60
h5f = h5py.File(data_path + "/data_%s_%d.hdf5" % (data_nm, rank), "r")
print("g1:", g1, "g2:", g2)
print(data_path + "/data_%s_%d.hdf5" % (data_nm, rank))
mg1 = h5f["/mg1"][()] / scale
mg2 = h5f["/mg2"][()] / scale
mn = h5f["/mn"][()] / scale
mu = h5f["/mu"][()] / scale
h5f.close()
img = Image_Plot(xpad=0.15, ypad=0.2, fig_x=4, fig_y=3)
img.subplots(6, 6)
xy_bin = component_fit.get_bin(mg1, mg2, xy_bin_num)
num, xgrid, ygrid, radius_grid = component_fit.get_2dhist(mg1, mg2, xy_bin)[:4]
dpl, radius_bin, radius_mask, mean_of_annuli = component_fit.get_dipole(num, radius_grid, radius_bin_num)
qpl, dpl_fit, sin_theta, cos_theta = component_fit.get_quadrupole(dpl, xgrid, ygrid, radius_bin, radius_bin_num)
qpl_fit, sin_2theta, cos_2theta = component_fit.fit_quadrupole(qpl, xgrid, ygrid, radius_bin, radius_bin_num)
dpl = numpy.nan_to_num(dpl)
qpl = numpy.nan_to_num(qpl)
fig1 = img.axs[0][0].imshow(dpl)
img.figure.colorbar(fig1, ax=img.axs[0][0])
img.axs[0][1].plot(ygrid[:, 0], dpl.sum(axis=1), label="G1")
img.axs[0][1].plot(xgrid[0], dpl.sum(axis=0), label="G2")
img.axs[0][1].legend()
fig1 = img.axs[0][2].imshow(dpl_fit)
img.figure.colorbar(fig1, ax=img.axs[0][2])
fig1 = img.axs[0][3].imshow(qpl)
img.figure.colorbar(fig1, ax=img.axs[0][3])
img.axs[0][4].plot(ygrid[:, 0], qpl.sum(axis=1), label="G1")
img.axs[0][4].plot(xgrid[0], qpl.sum(axis=0), label="G2")
img.axs[0][4].legend()
fig1 = img.axs[0][5].imshow(qpl_fit)
img.figure.colorbar(fig1, ax=img.axs[0][5])
g1s = numpy.linspace(g1 - 0.1, g1 + 0.1, 5)
g2s = numpy.linspace(g2 - 0.1, g2 + 0.1, 5)
for j in range(5):
mg1_sym = mg1 - g1s[j] * (mn + mu)
mg2_sym = mg2 - g2s[j] * (mn - mu)
num_sym, xgrid_sym, ygrid_sym, radius_grid_sym = component_fit.get_2dhist(mg1_sym, mg2_sym, xy_bin)[:4]
dpl_sym, radius_bin_sym, radius_mask_sym, mean_of_annuli_sym = component_fit.get_dipole(num_sym,
radius_grid_sym,
radius_bin_num)
qpl_sym, dpl_sym_fit, sin_theta_sym, cos_theta_sym = component_fit.get_quadrupole(dpl_sym, xgrid_sym, ygrid_sym,
radius_bin_sym,
radius_bin_num)
qpl_sym_fit, sin_2theta_sym, cos_2theta_sym = component_fit.fit_quadrupole(qpl_sym, xgrid_sym, ygrid_sym,
radius_bin_sym, radius_bin_num)
dpl_sym = numpy.nan_to_num(dpl_sym)
qpl_sym = numpy.nan_to_num(qpl_sym)
fig1 = img.axs[1 + j][0].imshow(dpl_sym)
img.figure.colorbar(fig1, ax=img.axs[1 + j][0])
img.axs[1 + j][1].plot(ygrid_sym[:, 0], dpl_sym.sum(axis=1), label="G1")
img.axs[1 + j][1].plot(xgrid_sym[0], dpl_sym.sum(axis=0), label="G2")
img.axs[1 + j][1].legend()
fig1 = img.axs[1 + j][2].imshow(dpl_sym_fit)
img.figure.colorbar(fig1, ax=img.axs[1 + j][2])
fig1 = img.axs[1 + j][3].imshow(qpl_sym)
img.figure.colorbar(fig1, ax=img.axs[1 + j][3])
img.axs[1 + j][4].plot(ygrid_sym[:, 0], qpl_sym.sum(axis=1), label="G1")
img.axs[1 + j][4].plot(xgrid_sym[0], qpl_sym.sum(axis=0), label="G2")
img.axs[1 + j][4].legend()
fig1 = img.axs[1 + j][5].imshow(qpl_sym_fit)
img.figure.colorbar(fig1, ax=img.axs[1 + j][5])
for i in range(6):
for j in range(6):
if j in [0, 2, 3, 5]:
img.del_ticks(i, j, [0, 1])
img.set_label(i, j, 0, "+ G1 -")
img.set_label(i, j, 1, "- G2 +")
else:
img.axs[i][j].yaxis.major.formatter.set_powerlimits((0, 0))
img.axs[i][j].xaxis.major.formatter.set_powerlimits((0, 0))
img.save_img(data_path + "/%s_%d_vary_g.png" % (data_nm, rank))
img.show_img()
img.close_img()
exit()
fq = Fourier_Quad(12,1234)
data_path = argv[1]
xy_bin_num, radius_bin_num = int(argv[2]), int(argv[3])
shear_scale = float(argv[4])
pic_path = data_path + "/multipole_pic_%.1f"%shear_scale
if rank == 0:
if not os.path.exists(pic_path):
os.makedirs(pic_path)
scale = 1000
# data_nm = [["noise_free"], ["noisy_cpp"], ["cross_term"], ["noise_residual"], ["cross_term", "noise_residual"]]
data_nm = [["noise_residual_1"], ["noise_residual_50"], ["noise_residual_2500"], ["noise_residual_12500"]]
h5f = h5py.File(data_path + "/shear.hdf5", "r")
g1t = h5f["/g1"][()]
g2t = h5f["/g2"][()]
h5f.close()
gh = numpy.linspace(-0.1, 0.1, 21)
for tag, nm in enumerate(data_nm):
pic_nm = "-".join(nm)
for sub_tag, sub_nm in enumerate(nm):
if rank == 0:
print(pic_nm)
if sub_tag == 0:
h5f = h5py.File(data_path + "/data_%s_%d.hdf5"%(sub_nm, rank), "r")
data = h5f["/data"][()]/scale
mg1 = data[:,0]
mg2 = data[:,1]
mn = data[:,2]
mu = data[:,3]
h5f.close()
else:
h5f = h5py.File(data_path + "/data_%s_%d.hdf5"%(sub_nm, rank), "r")
data = h5f["/data"][()]/scale
mg1 = mg1 + data[:,0]
mg2 = mg2 + data[:,1]
mn = mn + data[:,2]
mu = mu + data[:,3]
h5f.close()
num, xgrid, ygrid, radius_grid = component_fit.get_bingrid(mg1, mg2, xy_bin_num, 1, 0.3, 99.7)[:4]
dpl, radius_bin, radius_mask, mean_of_annuli = component_fit.get_dipole(num, radius_grid, radius_bin_num)
qpl, dpl_fit, sin_theta, cos_theta = component_fit.get_quadrupole(dpl, xgrid, ygrid, radius_bin, radius_bin_num)
qpl_fit, sin_2theta, cos_2theta = component_fit.fit_quadrupole(qpl,xgrid, ygrid, radius_bin, radius_bin_num)
mnu1 = mn + mu
mnu2 = mn - mu
mg1_sym = mg1 - g1t[rank]*mnu1*shear_scale
mg2_sym = mg2 - g2t[rank]*mnu2*shear_scale
num_sym, xgrid_sym, ygrid_sym, radius_grid_sym = component_fit.get_bingrid(mg1_sym, mg2_sym, xy_bin_num, 1, 0.3, 99.7)[:4]
dpl_sym, radius_bin_sym, radius_mask_sym, mean_of_annuli_sym = component_fit.get_dipole(num_sym, radius_grid_sym, radius_bin_num)
qpl_sym, dpl_fit_sym, sin_theta_sym, cos_theta_sym = component_fit.get_quadrupole(dpl_sym, xgrid_sym, ygrid_sym, radius_bin_sym, radius_bin_num)
qpl_sym_fit, sin_2theta_sym, cos_2theta_sym = component_fit.fit_quadrupole(qpl_sym,xgrid_sym, ygrid_sym, radius_bin_sym, radius_bin_num)
chisq1 = fq.get_chisq_range(mg1, mnu1, 10, gh)[1]
chisq2 = fq.get_chisq_range(mg2, mnu2, 10, gh)[1]
chisq1_sym = fq.get_chisq_range(mg1_sym, mnu1, 10, gh)[1]
chisq2_sym = fq.get_chisq_range(mg2_sym, mnu2, 10, gh)[1]
numpy.savez(pic_path + "/cache_%s_%d.npz"%(pic_nm,rank), num, dpl, qpl, dpl_fit, qpl_fit)
numpy.savez(pic_path + "/cache_%s_sym_%d.npz"%(pic_nm,rank), num_sym, dpl_sym, qpl_sym, dpl_fit_sym, qpl_sym_fit)
numpy.savez(pic_path + "/cache_%s_chisq_%d.npz"%(pic_nm,rank), chisq1, chisq2, chisq1_sym, chisq2_sym)
#################################################################################
# hist of data
img = Image_Plot(fig_x=6, fig_y=5)
img.subplots(2, 3)
plot_data = [[num, dpl, dpl_fit], [0,qpl, qpl_fit]]
titles = [["G1-G2-hist", "dipole", "dipole-fit"], ["$\chi^2$", "quadrupole","quadrupole-fit"]]
img.axs[1][0].plot(gh, chisq1, label="$\chi^2_{g1}$,g1=%.3f" % g1t[rank])
img.axs[1][0].plot(gh, chisq2, label="$\chi^2_{g2}$,g2=%.3f" % g2t[rank])
img.set_label(1, 0, 0, "$\chi^2$")
img.set_label(1, 0, 1, "$\hat{g}$")
img.axs[1][0].legend()
for i in range(2):
if i == 0:
st = 0
else:
st = 1
for j in range(st,3):
fig = img.axs[i][j].imshow(plot_data[i][j])
img.figure.colorbar(fig, ax=img.axs[i][j])
img.del_ticks(i,j,[0,1])
img.set_label(i,j,0, "+ G1 -")
img.set_label(i,j,1, "- G2 +")
for j in range(3):
img.axs[i][j].set_title(titles[i][j])
pic_name = pic_path + "/%s_%d.png"%(pic_nm, rank)
img.save_img(pic_name)
img.close_img()
#################################################################################
# hist of PDF_SYM_data
img = Image_Plot(fig_x=6, fig_y=5)
img.subplots(2, 3)
plot_data = [[num_sym, dpl_sym, dpl_fit_sym], [0, qpl_sym, qpl_sym_fit]]
titles =[["PDF_SYM-G1-G2-hist", "PDF_SYM-dipole", "PDF_SYM-dipole-fit"],
["PDF_SYM-$\chi^2$", "PDF_SYM-quadrupole", "PDF_SYM-quadrupole-fit"]]
img.axs[1][0].plot(gh, chisq1_sym, label="$\chi^2_{g1}$,g1=%.3f" % g1t[rank])
img.axs[1][0].plot(gh, chisq2_sym, label="$\chi^2_{g2}$,g2=%.3f" % g2t[rank])
img.set_label(1, 0, 0, "$\chi^2$")
img.set_label(1, 0, 1, "$\hat{g}$")
img.axs[1][0].legend()
for i in range(2):
if i == 0:
st = 0
else:
st = 1
for j in range(st, 3):
fig = img.axs[i][j].imshow(plot_data[i][j])
img.figure.colorbar(fig, ax=img.axs[i][j])
img.del_ticks(i, j, [0, 1])
img.set_label(i, j, 0, "+ G1 -")
img.set_label(i, j, 1, "- G2 +")
for j in range(3):
img.axs[i][j].set_title(titles[i][j])
pic_name = pic_path + "/%s_%d_sym.png" % (pic_nm, rank)
img.save_img(pic_name)
img.close_img()
#################################################################################
# compare
img = Image_Plot(fig_x=5, fig_y=4,xpad=0.25, ypad=0.25)
img.subplots(2, 3)
titles = [["$\chi^2$", "dipole-fit", "quadrupole-fit"],
["$\chi^2-SYM$", "dipole-fit_SYM", "quadruple-fit_SYM"]]
img.axs[0][0].plot(gh, chisq1, label="$\chi^2_{g1}$,g1=%.3f" % g1t[rank])
img.axs[0][0].plot(gh, chisq2, label="$\chi^2_{g2}$,g2=%.3f" % g2t[rank])
img.axs[1][0].plot(gh, chisq1_sym, label="$\chi^2_{g1}$,g1=%.3f" % g1t[rank])
img.axs[1][0].plot(gh, chisq2_sym, label="$\chi^2_{g2}$,g2=%.3f" % g2t[rank])
img.set_label(0, 0, 0, "$\chi^2$")
img.set_label(0, 0, 1, "$\hat{g}$")
img.axs[0][0].legend()
img.set_label(1, 0, 0, "$\chi^2-SYM$")
img.set_label(1, 0, 1, "$\hat{g}$")
img.axs[1][0].legend()
dpl_fit = numpy.nan_to_num(dpl_fit)
dpl_fit_sym = numpy.nan_to_num(dpl_fit_sym)
qpl_fit = numpy.nan_to_num(qpl_fit)
qpl_sym_fit = numpy.nan_to_num(qpl_sym_fit)
vmax_dpl = max(numpy.abs(dpl_fit).max(), numpy.abs(dpl_fit_sym).max())
vmax_qpl = max(numpy.abs(qpl_fit).max(), numpy.abs(qpl_sym_fit).max())
fig = img.axs[0][1].imshow(dpl_fit, vmin=-vmax_dpl, vmax=vmax_dpl)
img.figure.colorbar(fig, ax=img.axs[0][1])
fig = img.axs[1][1].imshow(dpl_fit_sym, vmin=-vmax_dpl, vmax=vmax_dpl)
img.figure.colorbar(fig, ax=img.axs[1][1])
fig = img.axs[0][2].imshow(qpl_fit, vmin=-vmax_qpl, vmax=vmax_qpl)
img.figure.colorbar(fig, ax=img.axs[0][2])
fig = img.axs[1][2].imshow(qpl_sym_fit, vmin=-vmax_qpl, vmax=vmax_qpl)
img.figure.colorbar(fig, ax=img.axs[1][2])
for i in range(2):
for j in range(3):
if j > 0:
img.del_ticks(i, j, [0, 1])
img.set_label(i, j, 0, "+ G1 -")
img.set_label(i, j, 1, "- G2 +")
img.axs[i][j].set_title(titles[i][j])
pic_name = pic_path + "/%s_%d_compare.png" % (pic_nm, rank)
img.save_img(pic_name)
img.close_img()
#################################################################################
# x & y grid, raidus ....
img = Image_Plot()
img.subplots(2, 5)
titles = [["x-grid", "y-grid", "radius-grid", "radius-bin", "mean_num_annuli"],
["PDF_SYM-x-grid", "PDF_SYM-y-grid", "PDF_SYM-radius-grid", "PDF_SYM-radius-bin", "PDF_SYM-mean_num_annuli"]]
plot_data = [[xgrid, ygrid, radius_grid, radius_mask, mean_of_annuli],
[xgrid_sym, ygrid_sym, radius_grid_sym, radius_mask_sym, mean_of_annuli_sym]]
for i in range(2):
for j in range(5):
fig = img.axs[i][j].imshow(plot_data[i][j])
img.figure.colorbar(fig, ax=img.axs[i][j])
img.del_ticks(i,j,[0,1])
img.axs[i][j].set_title(titles[i][j])
pic_name = pic_path + "/%s_%d_check1.png"%(pic_nm, rank)
img.save_img(pic_name)
img.close_img()
#################################################################################
# sin_theta .....
img = Image_Plot()
img.subplots(2, 4)
titles = [["sin$\\theta$", "cos$\\theta$", "sin$2\\theta$", "cos$2\\theta$"],
["PDF_SYM-sin$\\theta$", "PDF_SYM-cos$\\theta$", "PDF_SYM-sin$2\\theta$", "PDF_SYM-cos$2\\theta$"]]
plot_data = [[sin_theta, cos_theta, sin_2theta, cos_2theta],
[sin_theta_sym, cos_theta_sym, sin_2theta_sym, cos_2theta_sym]]
for i in range(2):
for j in range(4):
fig = img.axs[i][j].imshow(plot_data[i][j])
img.figure.colorbar(fig, ax=img.axs[i][j])
img.del_ticks(i,j,[0,1])
img.axs[i][j].set_title(titles[i][j])
pic_name = pic_path + "/%s_%d_check2.png"%(pic_nm, rank)
img.save_img(pic_name)
img.close_img()
|
[
"hekun_lee@sjtu.edu.cn"
] |
hekun_lee@sjtu.edu.cn
|
a0a83028a3a6053fbf17d8665c12eeb4ad4e51ef
|
c47340ae6bcac6002961cc2c6d2fecb353c1e502
|
/test/test_passwords_object.py
|
f302c422a7a0449e34de00837f7cdeffa116807f
|
[
"MIT"
] |
permissive
|
rafaeldelrey/controlm_py
|
6d9f56b8b6e72750f329d85b932ace6c41002cbd
|
ed1eb648d1d23e587321227217cbfcc5065535ab
|
refs/heads/main
| 2023-04-23T09:01:32.024725
| 2021-05-19T00:25:53
| 2021-05-19T00:25:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 911
|
py
|
# coding: utf-8
"""
Control-M Services
Provides access to BMC Control-M Services # noqa: E501
OpenAPI spec version: 9.20.115
Contact: customer_support@bmc.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import controlm_py
from controlm_py.models.passwords_object import PasswordsObject # noqa: E501
from controlm_py.rest import ApiException
class TestPasswordsObject(unittest.TestCase):
"""PasswordsObject unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPasswordsObject(self):
"""Test PasswordsObject"""
# FIXME: construct object with mandatory attributes with example values
# model = controlm_py.models.passwords_object.PasswordsObject() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"dcompane@gmail.com"
] |
dcompane@gmail.com
|
5231b1176e669d8aa95fff862a57be460421e78e
|
ae2695f60480aa9dbe2acf68309d7918b67c6954
|
/alembic/versions/aca6937e73_committee_summary_te.py
|
d71879fcec60efa04167e13acbb5c7c1a6ddbb46
|
[
"MIT"
] |
permissive
|
mgax/mptracker
|
0853dd11a7b15bce8d535eb86f65c1e37596a4e6
|
e8d3c489aed36c70f81e89626f02e735e5890435
|
refs/heads/master
| 2023-02-22T03:53:26.481927
| 2020-08-19T09:25:58
| 2020-08-19T09:25:58
| 11,983,896
| 4
| 6
|
MIT
| 2023-02-02T07:16:26
| 2013-08-08T18:52:45
|
Python
|
UTF-8
|
Python
| false
| false
| 278
|
py
|
revision = 'aca6937e73'
down_revision = '58f2cb9046f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('committee_summary',
sa.Column('text', sa.Text(), nullable=True))
def downgrade():
op.drop_column('committee_summary', 'text')
|
[
"alex@grep.ro"
] |
alex@grep.ro
|
10eb3ff34e111d759aa4124b34648aabc0a84e52
|
5febc7f6386fbe18e8788041c0f8f6be4aabe2af
|
/lenstronomy/LensModel/Profiles/shapelet_pot_polar.py
|
3b22b7118cdf8bb86a63a8d3173136ac9a95197c
|
[
"MIT"
] |
permissive
|
guoxiaowhu/lenstronomy
|
2b3aa10d6c72061edfd179d773948a40d9c2014a
|
dcdfc61ce5351ac94565228c822f1c94392c1ad6
|
refs/heads/master
| 2020-04-05T08:10:34.836586
| 2018-11-06T18:34:52
| 2018-11-06T18:34:52
| 156,704,299
| 1
| 0
|
MIT
| 2018-11-08T12:33:00
| 2018-11-08T12:33:00
| null |
UTF-8
|
Python
| false
| false
| 9,446
|
py
|
__author__ = 'sibirrer'
# description of the polar shapelets in potential space
import numpy as np
import scipy.special
import math
import lenstronomy.Util.param_util as param_util
class PolarShapelets(object):
"""
this class contains the function and the derivatives of the Singular Isothermal Sphere
"""
param_names = ['coeffs', 'beta', 'center_x', 'center_y']
lower_limit_default = {'coeffs': [0], 'beta': 0, 'center_x': -100, 'center_y': -100}
upper_limit_default = {'coeffs': [100], 'beta': 100, 'center_x': 100, 'center_y': 100}
def __init__(self):
n = 10
self.poly = [[[] for i in range(n)] for i in range(n)]
for i in range(0,n):
for j in range(0,n):
self.poly[i][j] = scipy.special.genlaguerre(i, j)
def function(self, x, y, coeffs, beta, center_x=0, center_y=0):
shapelets = self._createShapelet(coeffs)
r, phi = param_util.cart2polar(x, y, center=np.array([center_x, center_y]))
f_ = self._shapeletOutput(r, phi, beta, shapelets)
return f_
def derivatives(self, x, y, coeffs, beta, center_x=0, center_y=0):
"""
returns df/dx and df/dy of the function
"""
shapelets = self._createShapelet(coeffs)
r, phi = param_util.cart2polar(x, y, center=np.array([center_x, center_y]))
alpha1_shapelets, alpha2_shapelets = self._alphaShapelets(shapelets, beta)
f_x = self._shapeletOutput(r, phi, beta, alpha1_shapelets)
f_y = self._shapeletOutput(r, phi, beta, alpha2_shapelets)
return f_x, f_y
def hessian(self, x, y, coeffs, beta, center_x=0, center_y=0):
"""
returns Hessian matrix of function d^2f/dx^2, d^f/dy^2, d^2/dxdy
"""
shapelets = self._createShapelet(coeffs)
r, phi = param_util.cart2polar(x, y, center=np.array([center_x, center_y]))
kappa_shapelets=self._kappaShapelets(shapelets, beta)
gamma1_shapelets, gamma2_shapelets=self._gammaShapelets(shapelets, beta)
kappa_value=self._shapeletOutput(r, phi, beta, kappa_shapelets)
gamma1_value=self._shapeletOutput(r, phi, beta, gamma1_shapelets)
gamma2_value=self._shapeletOutput(r, phi, beta, gamma2_shapelets)
f_xx = kappa_value + gamma1_value
f_xy = gamma2_value
f_yy = kappa_value - gamma1_value
return f_xx, f_yy, f_xy
def _createShapelet(self,coeff):
"""
returns a shapelet array out of the coefficients *a, up to order l
:param num_l: order of shapelets
:type num_l: int.
:param coeff: shapelet coefficients
:type coeff: floats
:returns: complex array
:raises: AttributeError, KeyError
"""
n_coeffs = len(coeff)
num_l = self._get_num_l(n_coeffs)
shapelets=np.zeros((num_l+1,num_l+1),'complex')
nl=0
k=0
i=0
while i < len(coeff):
if i%2==0:
shapelets[nl][k]+=coeff[i]/2.
shapelets[k][nl]+=coeff[i]/2.
if k==nl:
nl+=1
k=0
i+=1
continue
else:
k+=1
i+=1
continue
else:
shapelets[nl][k] += 1j*coeff[i]/2.
shapelets[k][nl] -= 1j*coeff[i]/2.
i+=1
return shapelets
def _shapeletOutput(self, r, phi, beta, shapelets):
"""
returns the the numerical values of a set of shapelets at polar coordinates
:param shapelets: set of shapelets [l=,r=,a_lr=]
:type shapelets: array of size (n,3)
:param coordPolar: set of coordinates in polar units
:type coordPolar: array of size (n,2)
:returns: array of same size with coords [r,phi]
:raises: AttributeError, KeyError
"""
if type(r) == float or type(r) == int or type(r) == type(np.float64(1)) or len(r) <= 1:
values = 0.
else:
values = np.zeros(len(r), 'complex')
for nl in range(0,len(shapelets)): #sum over different shapelets
for nr in range(0,len(shapelets)):
value = shapelets[nl][nr]*self._chi_lr(r, phi, nl, nr, beta)
values += value
return values.real
def _chi_lr(self,r, phi, nl,nr,beta):
"""
computes the generalized polar basis function in the convention of Massey&Refregier eqn 8
:param nl: left basis
:type nl: int
:param nr: right basis
:type nr: int
:param beta: beta --the characteristic scale typically choosen to be close to the size of the object.
:type beta: float.
:param coord: coordinates [r,phi]
:type coord: array(n,2)
:returns: values at positions of coordinates.
:raises: AttributeError, KeyError
"""
m=int((nr-nl).real)
n=int((nr+nl).real)
p=int((n-abs(m))/2)
p2=int((n+abs(m))/2)
q=int(abs(m))
if p % 2==0: #if p is even
prefac=1
else:
prefac=-1
prefactor=prefac/beta**(abs(m)+1)*np.sqrt(math.factorial(p)/(np.pi*math.factorial(p2)))
poly=self.poly[p][q]
return prefactor*r**q*poly((r/beta)**2)*np.exp(-(r/beta)**2/2)*np.exp(-1j*m*phi)
def _kappaShapelets(self, shapelets, beta):
"""
calculates the convergence kappa given lensing potential shapelet coefficients (laplacian/2)
:param shapelets: set of shapelets [l=,r=,a_lr=]
:type shapelets: array of size (n,3)
:returns: set of kappa shapelets.
:raises: AttributeError, KeyError
"""
output=np.zeros((len(shapelets)+1,len(shapelets)+1),'complex')
for nl in range(0,len(shapelets)):
for nr in range(0,len(shapelets)):
a_lr=shapelets[nl][nr]
if nl>0:
output[nl-1][nr+1]+=a_lr*np.sqrt(nl*(nr+1))/2
if nr>0:
output[nl-1][nr-1]+=a_lr*np.sqrt(nl*nr)/2
output[nl+1][nr+1]+=a_lr*np.sqrt((nl+1)*(nr+1))/2
if nr>0:
output[nl+1][nr-1]+=a_lr*np.sqrt((nl+1)*nr)/2
return output/beta**2
def _alphaShapelets(self,shapelets, beta):
"""
calculates the deflection angles given lensing potential shapelet coefficients (laplacian/2)
:param shapelets: set of shapelets [l=,r=,a_lr=]
:type shapelets: array of size (n,3)
:returns: set of alpha shapelets.
:raises: AttributeError, KeyError
"""
output_x = np.zeros((len(shapelets)+1, len(shapelets)+1), 'complex')
output_y = np.zeros((len(shapelets)+1, len(shapelets)+1), 'complex')
for nl in range(0,len(shapelets)):
for nr in range(0,len(shapelets)):
a_lr=shapelets[nl][nr]
output_x[nl][nr+1]-=a_lr*np.sqrt(nr+1)/2
output_y[nl][nr+1]-=a_lr*np.sqrt(nr+1)/2*1j
output_x[nl+1][nr]-=a_lr*np.sqrt(nl+1)/2
output_y[nl+1][nr]+=a_lr*np.sqrt(nl+1)/2*1j
if nl>0:
output_x[nl-1][nr]+=a_lr*np.sqrt(nl)/2
output_y[nl-1][nr]-=a_lr*np.sqrt(nl)/2*1j
if nr>0:
output_x[nl][nr-1]+=a_lr*np.sqrt(nr)/2
output_y[nl][nr-1]+=a_lr*np.sqrt(nr)/2*1j
return output_x/beta,output_y/beta #attention complex numbers!!!!
def _gammaShapelets(self,shapelets, beta):
"""
calculates the shear gamma given lensing potential shapelet coefficients
:param shapelets: set of shapelets [l=,r=,a_lr=]
:type shapelets: array of size (n,3)
:returns: set of alpha shapelets.
:raises: AttributeError, KeyError
"""
output_x = np.zeros((len(shapelets)+2,len(shapelets)+2),'complex')
output_y = np.zeros((len(shapelets)+2,len(shapelets)+2),'complex')
for nl in range(0, len(shapelets)):
for nr in range(0, len(shapelets)):
a_lr = shapelets[nl][nr]
output_x[nl+2][nr] += a_lr*np.sqrt((nl+1)*(nl+2))/2
output_x[nl][nr+2] += a_lr*np.sqrt((nr+1)*(nr+2))/2
output_x[nl][nr] += a_lr*(1-(nr+1)-(nl+1))
if nl>1:
output_x[nl-2][nr] += a_lr*np.sqrt((nl)*(nl-1))/2
if nr>1:
output_x[nl][nr-2] += a_lr*np.sqrt((nr)*(nr-1))/2
output_y[nl+2][nr] += a_lr*np.sqrt((nl+1)*(nl+2))*1j/4
output_y[nl][nr+2] -= a_lr*np.sqrt((nr+1)*(nr+2))*1j/4
if nl>0:
output_y[nl-1][nr+1] += a_lr*np.sqrt((nl)*(nr+1))*1j/2
if nr>0:
output_y[nl+1][nr-1] -= a_lr*np.sqrt((nr)*(nl+1))*1j/2
if nl>1:
output_y[nl-2][nr] -= a_lr*np.sqrt((nl)*(nl-1))*1j/4
if nr>1:
output_y[nl][nr-2] += a_lr*np.sqrt((nr)*(nr-1))*1j/4
return output_x/beta**2, output_y/beta**2 #attention complex numbers!!!!
def _get_num_l(self, n_coeffs):
"""
:param n_coeffs: number of coeffs
:return: number of n_l of order of the shapelets
"""
num_l = int(round((math.sqrt(8*n_coeffs + 9)-3)/2 +0.499))
return num_l
|
[
"simon.birrer@pyhs.ethz.ch"
] |
simon.birrer@pyhs.ethz.ch
|
f510be9f877cf397ceb2bf6817365f456d8d5106
|
6490638f15a2dfbe0cec9725186f9784d57c92f0
|
/SCOS/__init__.py
|
a6e179ad6b3b0a273223cde1aa960d4a7e93d834
|
[
"MIT"
] |
permissive
|
khawatkom/SpacePyLibrary
|
af9c490ef796b9d37a13298c41df1fb5bf6b3cee
|
c94415e9d85519f345fc56938198ac2537c0c6d0
|
refs/heads/master
| 2020-05-14T21:52:39.388979
| 2019-04-17T17:06:04
| 2019-04-17T17:06:04
| 181,970,668
| 1
| 0
| null | 2019-04-17T21:26:44
| 2019-04-17T21:26:44
| null |
UTF-8
|
Python
| false
| false
| 1,145
|
py
|
#******************************************************************************
# (C) 2018, Stefan Korner, Austria *
# *
# The Space Python Library is free software; you can redistribute it and/or *
# modify it under under the terms of the MIT License as published by the *
# Massachusetts Institute of Technology. *
# *
# The Space Python Library is distributed in the hope that it will be useful, *
# but WITHOUT ANY WARRANTY; without even the implied warranty of *
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the MIT License *
# for more details. *
#******************************************************************************
# SCOS-2000 Functionality *
#******************************************************************************
__all__ = ["ENV", "MIB"]
|
[
"korner-hajek@gmx.at"
] |
korner-hajek@gmx.at
|
e4ce7e967120ec413c360cfb38e9419d4965a57c
|
5abf069ff84cb7ea465069c258c144460649da35
|
/desktop/toolkit/qscintilla2/actions.py
|
813c39c4e18bf77a3179ae66180e0b5080e6f9a0
|
[] |
no_license
|
poyraz76/Packages-Systemd
|
7628cf6f6a8808f8766735551956e3dd8da9a2a9
|
a515ea0275dc0d8ec38fb6eaacc85904dde9f286
|
refs/heads/master
| 2021-01-09T05:51:48.542336
| 2017-02-04T10:25:22
| 2017-02-04T10:25:22
| 80,849,530
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,161
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/licenses/gpl.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import shelltools
from pisi.actionsapi import pythonmodules
from pisi.actionsapi import get
from pisi.actionsapi import qt5
WorkDir = "QScintilla-gpl-%s" % get.srcVERSION()
NoStrip = ["/usr/share/doc"]
def setup():
shelltools.cd("Qt4Qt5")
shelltools.system("qmake qscintilla.pro")
# Change C/XXFLAGS
pisitools.dosed("Makefile", "^CFLAGS.*\\$\\(DEFINES\\)", "CFLAGS = %s -fPIC $(DEFINES)" % get.CFLAGS())
pisitools.dosed("Makefile", "^CXXFLAGS.*\\$\\(DEFINES\\)", "CXXFLAGS = %s -fPIC $(DEFINES)" % get.CXXFLAGS())
# Get designer plugin's Makefile
shelltools.cd("../designer-Qt4Qt5/")
shelltools.system("qmake designer.pro INCLUDEPATH+=../Qt4Qt5 QMAKE_LIBDIR+=../Qt4Qt5")
# Change C/XXFLAGS of designer plugin's makefile
pisitools.dosed("Makefile", "^CFLAGS.*\\$\\(DEFINES\\)", "CFLAGS = %s -fPIC $(DEFINES)" % get.CFLAGS())
pisitools.dosed("Makefile", "^CXXFLAGS.*\\$\\(DEFINES\\)", "CXXFLAGS = %s -fPIC $(DEFINES)" % get.CXXFLAGS())
def build():
shelltools.system("cp -rf Python Python3")
shelltools.cd("Qt4Qt5")
qt5.make()
shelltools.cd("../designer-Qt4Qt5/")
qt5.make()
# Get Makefile of qscintilla-python via sip
shelltools.cd("../Python")
pythonmodules.run("configure.py -n ../Qt4Qt5 -o ../Qt4Qt5 -c --pyqt=PyQt5 --pyqt-sipdir=/usr/share/sip/Py2Qt5 --qsci-sipdir=/usr/share/sip/Py2Qt5 --sip-incdir=/usr/lib/python2.7/site-packages --qmake /usr/bin/qmake")
pisitools.dosed("Makefile", "/usr/include/qt/QtPrintSupport", "/usr/include/qt5/QtPrintSupport")
pisitools.dosed("Makefile", "/usr/include/qt/QtWidgets", "/usr/include/qt5/QtWidgets")
autotools.make()
shelltools.cd("../Python3")
pythonmodules.run("configure.py -n ../Qt4Qt5 -o ../Qt4Qt5 -c --pyqt=PyQt5 --qmake /usr/bin/qmake", pyVer = "3")
pisitools.dosed("Makefile", "/usr/include/qt/QtPrintSupport", "/usr/include/qt5/QtPrintSupport")
pisitools.dosed("Makefile", "/usr/include/qt/QtWidgets", "/usr/include/qt5/QtWidgets")
autotools.make()
def install():
shelltools.cd("Qt4Qt5")
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
shelltools.cd("../designer-Qt4Qt5/")
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
#build and install qscintilla-python
shelltools.cd("../Python3")
#autotools.rawInstall("DESTDIR=%s" % get.installDIR())
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
pisitools.insinto("/usr/lib/python3.4/site-packages/PyQt5", "Qsci.so")
shelltools.cd("../Python")
#autotools.rawInstall("DESTDIR=%s" % get.installDIR())
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
pisitools.insinto("/usr/lib/python2.7/site-packages/PyQt5", "Qsci.so")
shelltools.cd("..")
pisitools.dohtml("doc/html-Qt4Qt5/")
pisitools.insinto("/usr/share/doc/%s/Scintilla" % get.srcNAME(), "doc/Scintilla/*")
pisitools.dodoc("LICENSE*", "NEWS", "README")
|
[
"ergunsalman@hotmail.com"
] |
ergunsalman@hotmail.com
|
a46e60ebdf24c5dc1a7a082a563e503deea9c428
|
9977e4a5cb94760b380bd0de0faab9c04a3d94db
|
/examples/plot_simulation2d.py
|
84f7ae9dcd7f6be0d65082772180cb538387b9ec
|
[] |
no_license
|
vishalbelsare/mtw
|
82e76826f1382b9602eadad835a9b6355923505c
|
e15e918774bb5b1e020c5b87572004a552eb571e
|
refs/heads/master
| 2022-11-28T18:57:55.045921
| 2019-08-07T21:55:16
| 2019-08-07T21:55:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,779
|
py
|
"""
====================
MTW synthetic images
====================
This example generates 3 synthetic sparse images (as regression coefficients)
which are fed to random gaussian matrices X. Increasing the Wasserstein
hyperparameter increases consistency across regression coefficients.
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
from mtw import MTW, utils
from mtw.examples_utils import (generate_dirac_images, gaussian_design,
contour_coefs)
print(__doc__)
print("Generating data...")
seed = 42
width, n_tasks = 32, 4
nnz = 3 # number of non zero elements per image
overlap = 0.
positive = True
n_features = width ** 2
n_samples = n_features // 2
"""Generate Coefs and X, Y data..."""
coefs = generate_dirac_images(width, n_tasks, nnz=nnz, positive=positive,
seed=seed, overlap=overlap)
coefs_flat = coefs.reshape(-1, n_tasks)
std = 0.25
X, Y = gaussian_design(n_samples, coefs_flat, corr=0.95, sigma=std,
scaled=True, seed=seed)
###############################################################################
# set ot params
epsilon = 2.5 / n_features
M = utils.groundmetric2d(n_features, p=2, normed=True)
gamma = utils.compute_gamma(0.8, M)
###############################################################################
# set hyperparameters and fit MTW
betamax = np.array([x.T.dot(y) for x, y in zip(X, Y)]).max() / n_samples
alpha = 10. / n_samples
beta_fr = 0.35
beta = beta_fr * betamax
callback_options = {'callback': True,
'x_real': coefs.reshape(- 1, n_tasks),
'verbose': True, 'rate': 1}
print("Fitting MTW model...")
mtw = MTW(M=M, alpha=alpha, beta=beta, sigma0=0., positive=positive,
epsilon=epsilon, gamma=gamma, stable=False, tol_ot=1e-6, tol=1e-4,
maxiter_ot=10, maxiter=2000, n_jobs=n_tasks,
gpu=False, **callback_options)
mtw.fit(X, Y)
###############################################################################
# Now we plot the 3 images on top of each other (True), the MTW fitted
# coefficients and their latent Wasserstein barycenter"""
f, axes = plt.subplots(1, 3, figsize=(12, 4))
coefs = coefs.reshape(width, width, -1)
coefs_mtw = mtw.coefs_.reshape(width, width, -1)
thetabar = mtw.barycenter_.reshape(width, width)[:, :, None]
contours = [coefs, coefs_mtw, thetabar]
titles = ["True", "Recovered", "Barycenter"]
cmaps = [cm.Reds, cm.Blues, cm.Greens, cm.Oranges, cm.Greys, cm.Purples]
for ax, data_, t in zip(axes.ravel(), contours, titles):
contour_coefs(data_, ax, cmaps=cmaps, title=t)
axes[-1].clear()
contour_coefs(thetabar, ax=axes[-1], cmaps=cmaps,
title="barycenter Contours")
plt.tight_layout()
plt.show()
|
[
"hicham.janati@inria.fr"
] |
hicham.janati@inria.fr
|
8e75b88201a1a9c29a76c8dbb9c96749e65847cc
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/response/AlipayOpenAppOpenidBatchqueryResponse.py
|
852a2cc617b5cca9294234c0928fbc32c01da61e
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 1,574
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.OpenIdValue import OpenIdValue
class AlipayOpenAppOpenidBatchqueryResponse(AlipayResponse):
def __init__(self):
super(AlipayOpenAppOpenidBatchqueryResponse, self).__init__()
self._illegal_user_id_list = None
self._open_id_list = None
@property
def illegal_user_id_list(self):
return self._illegal_user_id_list
@illegal_user_id_list.setter
def illegal_user_id_list(self, value):
if isinstance(value, list):
self._illegal_user_id_list = list()
for i in value:
self._illegal_user_id_list.append(i)
@property
def open_id_list(self):
return self._open_id_list
@open_id_list.setter
def open_id_list(self, value):
if isinstance(value, list):
self._open_id_list = list()
for i in value:
if isinstance(i, OpenIdValue):
self._open_id_list.append(i)
else:
self._open_id_list.append(OpenIdValue.from_alipay_dict(i))
def parse_response_content(self, response_content):
response = super(AlipayOpenAppOpenidBatchqueryResponse, self).parse_response_content(response_content)
if 'illegal_user_id_list' in response:
self.illegal_user_id_list = response['illegal_user_id_list']
if 'open_id_list' in response:
self.open_id_list = response['open_id_list']
|
[
"jishupei.jsp@alibaba-inc.com"
] |
jishupei.jsp@alibaba-inc.com
|
457511baa39c93fcb58cbf7a167deb248fbf97f0
|
8deef5778d0104682d9e1c25d5ef8fc9a2e63feb
|
/PyInstaller/hooks/hook-h5py.py
|
449c848a3ec3624f6dc9d192bc3d65a5ea3ac1ff
|
[
"MIT",
"GPL-1.0-or-later",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
Weeeendi/Picture2Text
|
a2715a9c0f2d4749eab4768dea16f9720567557e
|
1919d99327b4360291b111fc8c122fffdce7ccc5
|
refs/heads/master
| 2022-07-03T06:30:55.664995
| 2022-06-03T13:01:35
| 2022-06-03T13:01:35
| 195,062,567
| 0
| 3
|
MIT
| 2022-06-03T13:01:36
| 2019-07-03T13:47:27
|
Python
|
UTF-8
|
Python
| false
| false
| 535
|
py
|
#-----------------------------------------------------------------------------
# Copyright (c) 2013-2018, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
"""
Hook for http://pypi.python.org/pypi/h5py/
"""
hiddenimports = ['h5py._proxy', 'h5py.utils', 'h5py.defs', 'h5py.h5ac']
|
[
"wendi1078771091@gmail.com"
] |
wendi1078771091@gmail.com
|
b16e2e9a66be9969b417d12be51b37b00ed3b38c
|
6cc50a15672155f7d66e88830ad1baec6a061077
|
/processing/legacy/icetop_llhratio/python/globals.py
|
515a24a58ef19c6d8f525f718693f26293ddc978
|
[
"MIT"
] |
permissive
|
jrbourbeau/cr-composition
|
16b29c672b2d1c8d75c1c45e35fe6bb60b53ffe2
|
e9efb4b713492aaf544b5dd8bb67280d4f108056
|
refs/heads/master
| 2020-06-24T21:48:21.784277
| 2018-11-01T21:30:56
| 2018-11-01T21:30:56
| 74,618,907
| 0
| 1
|
MIT
| 2018-08-23T21:01:03
| 2016-11-23T22:31:01
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 3,420
|
py
|
# -*- coding: utf-8 -*-
#
## copyright (C) 2018
# The Icecube Collaboration
#
# $Id$
#
# @version $Revision$
# @date $LastChangedDate$
# @author Hershal Pandya <hershal@udel.edu> Last changed by: $LastChangedBy$
#
import numpy as np
logEnergyBins = np.linspace(3,8,26)
logEnergyBins=np.array([logEnergyBins[i] for i in range(len(logEnergyBins)) if i%2==0],dtype=float)
cosZenBin0 = 0.86
cosZenBins = np.linspace(cosZenBin0, 1.0+ np.finfo(float).eps , (1-cosZenBin0)/0.01+1)
cosZenBins=np.array([cosZenBins[i] for i in range(len(cosZenBins)) if i%2==0],dtype=float)
logChargeBins = np.linspace(-3,4,71)
deltaCharge = 0.1
unhitCharge = logChargeBins[0]-0.5*deltaCharge
logChargeBins = np.hstack([unhitCharge-0.5*deltaCharge, logChargeBins])
excludedCharge = logChargeBins[0]-0.5*deltaCharge
logChargeBins = np.hstack([excludedCharge-0.5*deltaCharge, logChargeBins])
deltaT = 0.1
nBins = 5.0/deltaT
tBinsUp = np.linspace(0,5,nBins+1)
tBinsDown = -1.0*tBinsUp
tBinsDown.sort()
logTBins = np.hstack([tBinsDown[0:-1],tBinsUp])
unhitTime = logTBins[0]-0.5*deltaT
logTBins = np.hstack([unhitTime-0.5*deltaT, logTBins])
excludedTime = logTBins[0]-0.5*deltaT
logTBins = np.hstack([excludedTime-0.5*deltaT, logTBins])
logDBins = np.linspace(0,3.5,36)
pulses1='Shield_HLCSLCTimeCorrectedTankMerged_SplineMPEfast_SRT_Split_InIcePulses_singleHits'
pulses2='Shield_HLCSLCTimeCorrectedTankMerged_SplineMPEfast_SRT_Split_InIcePulses_singleHits_UnHit'
pulses3='IceTopExcludedTanks'
reco_track2='SplineMPEfast_SRT_Split_InIcePulses'
reco_track1='MuEx_mie_SplineMPEfast_SRT_Split_InIcePulses'
def rotate_to_shower_cs(x,y,z,phi,theta,core_x,core_y,core_z):
"""
Rotate to shower CS takes a fit (assumes is set) and returns a rotation matrix.
Requires np.
"""
# counter-clockwise (pi + phi) rotation
d_phi = np.matrix([ [ -np.cos(phi), -np.sin(phi), 0],
[ np.sin(phi), -np.cos(phi), 0],
[ 0, 0, 1] ])
# clock-wise (pi - theta) rotation
d_theta = np.matrix([ [ -np.cos(theta), 0, -np.sin(theta)],
[ 0, 1, 0, ],
[ np.sin(theta), 0, -np.cos(theta)] ])
rotation=d_theta*d_phi
origin = np.array([[core_x], [core_y], [core_z]])
det_cs_position = np.array([[x],
[y],
[z]])
shower_cs_position = rotation*(det_cs_position - origin)
shower_cs_radius = np.sqrt(shower_cs_position[0]**2 + shower_cs_position[1]**2)
return np.float(shower_cs_radius)
def to_shower_cs(fit):
"""
Rotate to shower CS takes a fit (assumes fit.dir is set) and returns a rotation matrix.
Requires numpy.
"""
import numpy
from math import cos, sin
# counter-clockwise (pi + phi) rotation
d_phi = numpy.matrix([ [ -cos(fit.dir.phi), -sin(fit.dir.phi), 0],
[ sin(fit.dir.phi), -cos(fit.dir.phi), 0],
[ 0, 0, 1] ])
# clock-wise (pi - theta) rotation
d_theta = numpy.matrix([ [ -cos(fit.dir.theta), 0, -sin(fit.dir.theta)],
[ 0, 1, 0, ],
[ sin(fit.dir.theta), 0, -cos(fit.dir.theta)] ])
return d_theta*d_phi
|
[
"jrbourbeau@gmail.com"
] |
jrbourbeau@gmail.com
|
058fc6c307680b8132797732d1c2935f208e2cff
|
71e43068e82c91acbb3849169d1723f1375ac27f
|
/test/test_login_params.py
|
c5ace7940a02f5cf97942e6a18680162679dbcb8
|
[
"MIT"
] |
permissive
|
talon-one/talon_one.py
|
aa08a1dbddd8ea324846ae022e43d441c57028f6
|
917dffb010e3d3e2f841be9cccba5bba1ea6c5c3
|
refs/heads/master
| 2023-05-11T18:50:00.041890
| 2023-05-03T20:17:39
| 2023-05-03T20:17:39
| 79,575,913
| 1
| 7
|
MIT
| 2023-05-03T15:10:14
| 2017-01-20T16:29:46
|
Python
|
UTF-8
|
Python
| false
| false
| 2,115
|
py
|
# coding: utf-8
"""
Talon.One API
Use the Talon.One API to integrate with your application and to manage applications and campaigns: - Use the operations in the [Integration API section](#integration-api) are used to integrate with our platform - Use the operation in the [Management API section](#management-api) to manage applications and campaigns. ## Determining the base URL of the endpoints The API is available at the same hostname as your Campaign Manager deployment. For example, if you access the Campaign Manager at `https://yourbaseurl.talon.one/`, the URL for the [updateCustomerSessionV2](https://docs.talon.one/integration-api#operation/updateCustomerSessionV2) endpoint is `https://yourbaseurl.talon.one/v2/customer_sessions/{Id}` # noqa: E501
The version of the OpenAPI document:
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import talon_one
from talon_one.models.login_params import LoginParams # noqa: E501
from talon_one.rest import ApiException
class TestLoginParams(unittest.TestCase):
"""LoginParams unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test LoginParams
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = talon_one.models.login_params.LoginParams() # noqa: E501
if include_optional :
return LoginParams(
email = 'john.doe@example.com',
password = 'admin123456'
)
else :
return LoginParams(
email = 'john.doe@example.com',
password = 'admin123456',
)
def testLoginParams(self):
"""Test LoginParams"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
|
[
"noreply@github.com"
] |
talon-one.noreply@github.com
|
1a310d12e5d6aca3b58eccea94976393c70dcc33
|
836d5f7190f6b4503e758c87c71598f18fdfce14
|
/2-Veri-Tipleri-ve-Değişkenler/Float-Veri-Tipi.py
|
e16df4bfbc907194105af1f6be9ca54d54325f37
|
[] |
no_license
|
S-Oktay-Bicici/PYTHON-PROGRAMMING
|
cf452723fd3e7e8ec2aadc7980208d747c502e9a
|
22e864f89544249d6309d6f4570a4104bf47346b
|
refs/heads/main
| 2021-11-30T00:19:21.158084
| 2021-11-16T15:44:29
| 2021-11-16T15:44:29
| 316,716,147
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 461
|
py
|
a = 3.5
print(a)
print(type(a))
b = 2.1
print(b)
print(type(b))
d = 3
print(d)
print(type(d))
z = 3.0
print(z)
print(type(z))
t = 10/5
print(t)
print(type(t))
# // işlemi ile float çıkacak sonucu integer hale getiriyoruz
t = 10//5
print(t)
print(type(t))
#işlem gören sayılardan biri float ise sonuç da float olur
t =10.2//5
print(t)
print(type(t))
#işlem gören sayılardan biri float ise sonuç da float olur
t =10//5.2
print(t)
print(type(t))
|
[
"noreply@github.com"
] |
S-Oktay-Bicici.noreply@github.com
|
7b0597275393a4e60df88ff6dabff13ca0bfa6f1
|
61bc53ec90d92aece91753ec5ec9d25e0879a1e2
|
/content/pythia/pythia/legacy/top_down_bottom_up/unittests.py
|
d1f24d7a35974cc3ea42778088b753406f23a637
|
[
"BSD-3-Clause"
] |
permissive
|
aluka1994/textvqa
|
08a16c9b21ea9c5eca05f5d4d1763c190d2d7275
|
694cb2be08def519ba73be78e34664afa2c607b5
|
refs/heads/master
| 2021-05-26T23:44:21.973827
| 2020-04-08T22:05:58
| 2020-04-08T22:05:58
| 254,190,630
| 0
| 0
|
MIT
| 2020-04-08T20:14:11
| 2020-04-08T20:14:10
| null |
UTF-8
|
Python
| false
| false
| 4,467
|
py
|
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import unittest
import numpy as np
import torch
from torch.autograd import Variable
from global_variables.global_variables import use_cuda
from top_down_bottom_up.classifier import logit_classifier
from top_down_bottom_up.image_embedding import image_embedding
from top_down_bottom_up.question_embeding import QuestionEmbeding
from top_down_bottom_up.top_down_bottom_up_model import \
top_down_bottom_up_model
class Test_top_down_bottom_up_model(unittest.TestCase):
def test_classifier(self):
batch_size = 12
joint_embedding_dim = 10
num_ans_candidates = 20
text_embeding_dim = 64
image_embedding_dim = 32
my_classifier = logit_classifier(
joint_embedding_dim,
num_ans_candidates,
image_embedding_dim,
text_embeding_dim,
)
joint_embedding = Variable(torch.randn(batch_size, joint_embedding_dim))
res = my_classifier(joint_embedding)
self.assertEqual((12, 20), res.shape)
def test_classifier_batch_size_1(self):
batch_size = 1
joint_embedding_dim = 10
num_ans_candidates = 20
text_embeding_dim = 64
image_embedding_dim = 32
my_classifier = logit_classifier(
joint_embedding_dim,
num_ans_candidates,
image_embedding_dim,
text_embeding_dim,
)
joint_embedding = Variable(torch.randn(batch_size, joint_embedding_dim))
res = my_classifier(joint_embedding)
self.assertEqual((1, 20), res.shape)
def test_question_embedding(self):
num_vocab = 20
embedding_dim = 300
lstm_dim = 512
lstm_layer = 1
dropout = 0.1
batch_first = True
batch_size = 32
question_len = 10
my_word_embedding_model = QuestionEmbeding(
num_vocab, embedding_dim, lstm_dim, lstm_layer, dropout, batch_first
)
my_word_embedding_model = (
my_word_embedding_model.cuda() if use_cuda else my_word_embedding_model
)
input_txt = Variable(
torch.rand(batch_size, question_len).type(torch.LongTensor) % num_vocab
)
input_txt = input_txt.cuda() if use_cuda else input_txt
embedding = my_word_embedding_model(input_txt, batch_first=True)
self.assertEqual((32, 512), embedding.shape)
def test_image_embedding(self):
image_feat_dim = 40
txt_embedding_dim = 50
hidden_size = 30
num_of_loc = 5
batch_size = 16
my_image_embeding = image_embedding(
image_feat_dim, txt_embedding_dim, hidden_size
)
image_feat = Variable(torch.randn(batch_size, num_of_loc, image_feat_dim))
txt = Variable(torch.randn(batch_size, txt_embedding_dim))
res = my_image_embeding(image_feat, txt)
self.assertEqual((batch_size, image_feat_dim), res.shape)
def test_model(self):
image_feat_dim = 40
txt_embedding_dim = 300
lstm_dim = 512
hidden_size = 30
num_of_loc = 5
batch_size = 16
num_vocab = 60
num_ans_candidates = 35
joint_embedding_dim = 500
question_len = 13
batch_first = True
image_embedding_model = image_embedding(image_feat_dim, lstm_dim, hidden_size)
question_embedding_model = QuestionEmbeding(
num_vocab,
txt_embedding_dim,
lstm_dim,
lstm_layer=2,
dropout=0.1,
batch_first=batch_first,
)
my_classifier = logit_classifier(
joint_embedding_dim, num_ans_candidates, image_feat_dim, txt_embedding_dim
)
loss = torch.nn.CrossEntropyLoss()
my_model = top_down_bottom_up_model(
image_embedding_model, question_embedding_model, my_classifier, loss
)
image_feat = np.random.rand(batch_size, num_of_loc, image_feat_dim)
input_txt = Variable(
torch.rand(batch_size, question_len).type(torch.LongTensor) % num_vocab
)
res = my_model(image_feat, input_txt, batch_first)
self.assertEqual((batch_size, num_ans_candidates), res.shape)
if __name__ == "__main__":
unittest.main()
|
[
"anandkumar@instance-1.us-central1-a.c.andromanit.internal"
] |
anandkumar@instance-1.us-central1-a.c.andromanit.internal
|
c50c0e9005ec170abfa46abc1f26c3c35a8a774c
|
a99a44aee5cfc5e080f6d83d2bcc1c3d273a3426
|
/htdocs/plotting/auto/scripts/p98.py
|
fce3feefb062bd6cee36902c4ba30cf7d0d5e9f0
|
[
"MIT"
] |
permissive
|
ragesah/iem
|
1513929c8bc7f254048271d61b4c4cf27a5731d7
|
8ed970d426bddeaa3e7ded593665d22f0f9f6e87
|
refs/heads/main
| 2023-08-20T20:01:15.480833
| 2021-10-12T15:44:52
| 2021-10-12T15:44:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,728
|
py
|
"""Day of month frequency."""
import calendar
import numpy as np
from pandas.io.sql import read_sql
from pyiem import network
from pyiem.plot import figure_axes
from pyiem.util import get_autoplot_context, get_dbconn
from pyiem.exceptions import NoDataFound
PDICT = {
"precip": "Daily Precipitation",
"snow": "Daily Snowfall",
"snowd": "Daily Snow Depth",
"high": "High Temperature",
"low": "Low Temperature",
}
PDICT2 = {"above": "At or Above Threshold", "below": "Below Threshold"}
def get_description():
"""Return a dict describing how to call this plotter"""
desc = dict()
desc["data"] = True
desc[
"description"
] = """This plot produces the daily frequency of
a given criterion being meet for a station and month of your choice. The
number labeled above each bar is the actual number of years.
"""
desc["arguments"] = [
dict(
type="station",
name="station",
default="IATDSM",
label="Select Station",
network="IACLIMATE",
),
dict(type="month", name="month", default=9, label="Which Month:"),
dict(
type="select",
name="var",
default="high",
label="Which Variable:",
options=PDICT,
),
dict(
type="text",
name="thres",
default="90",
label="Threshold (F or inch):",
),
dict(
type="select",
name="dir",
default="above",
label="Threshold Direction:",
options=PDICT2,
),
]
return desc
def plotter(fdict):
"""Go"""
pgconn = get_dbconn("coop")
ctx = get_autoplot_context(fdict, get_description())
station = ctx["station"]
varname = ctx["var"]
month = ctx["month"]
threshold = float(ctx["thres"])
if PDICT.get(varname) is None:
return
drct = ctx["dir"]
if PDICT2.get(drct) is None:
return
operator = ">=" if drct == "above" else "<"
table = "alldata_%s" % (station[:2],)
nt = network.Table("%sCLIMATE" % (station[:2],))
df = read_sql(
f"""
SELECT sday,
sum(case when {varname} {operator} %s then 1 else 0 end)
as hit,
count(*) as total
from {table} WHERE station = %s and month = %s
GROUP by sday ORDER by sday ASC
""",
pgconn,
params=(threshold, station, month),
index_col="sday",
)
if df.empty:
raise NoDataFound("No Data Found.")
df["freq"] = df["hit"] / df["total"] * 100.0
title = ("[%s] %s %s %s %s\nduring %s (Avg: %.2f days/year)") % (
station,
nt.sts[station]["name"],
PDICT.get(varname),
PDICT2.get(drct),
threshold,
calendar.month_name[month],
df["hit"].sum() / float(df["total"].sum()) * len(df.index),
)
fig, ax = figure_axes(title=title)
bars = ax.bar(np.arange(1, len(df.index) + 1), df["freq"])
for i, mybar in enumerate(bars):
ax.text(
i + 1,
mybar.get_height() + 0.3,
"%s" % (df["hit"][i],),
ha="center",
)
ax.set_ylabel("Frequency (%)")
ax.set_xlabel(
("Day of %s, number of years (out of %s) meeting criteria labelled")
% (calendar.month_name[month], np.max(df["total"]))
)
ax.grid(True)
ax.set_xlim(0.5, 31.5)
ax.set_ylim(0, df["freq"].max() + 5)
return fig, df
if __name__ == "__main__":
plotter(
dict(
month=9,
dir="below",
thres=65,
station="IA2724",
network="IACLIMATE",
)
)
|
[
"akrherz@iastate.edu"
] |
akrherz@iastate.edu
|
ed65d8cabf8d7f04c1951349663419deb2979c50
|
0d9b5c2842721c2246d4b58890511d154fa6df1b
|
/myadmin/migrations/0018_auto_20180311_1219.py
|
b7424714cb179baa93c9eeaf22cf4097a6e17e01
|
[] |
no_license
|
bhavingandha9/senseshop
|
862c13056cd4f53b265d040fc05337e6e46841e9
|
b2982399bc8223c5eeeb25ce9e1edbd4449d6e93
|
refs/heads/master
| 2021-04-30T08:10:23.039521
| 2018-06-06T16:21:53
| 2018-06-06T16:21:53
| 121,368,692
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 462
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2018-03-11 06:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myadmin', '0017_auto_20180311_1212'),
]
operations = [
migrations.AlterField(
model_name='product',
name='image',
field=models.ImageField(blank=True, upload_to=''),
),
]
|
[
"="
] |
=
|
7b2ae6979df18c1e5d9c6f4544cb5b8e95eb7e4a
|
6d50225574554cf651b7693f22115f6e0a2f3c58
|
/upyutils/SD_AM.py
|
59562cc33a81736240fbd8568190cf173f43efb0
|
[
"MIT"
] |
permissive
|
tovam/upydev
|
abc8f9af5667821bb4644bafcead5f847a4114a1
|
0f9b73cb55750c291d2d016a3fd29d2feb71b8fc
|
refs/heads/master
| 2022-10-03T12:43:38.699244
| 2020-06-07T12:58:19
| 2020-06-07T12:58:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,434
|
py
|
#!/usr/bin/env python
# @Author: carlosgilgonzalez
# @Date: 2019-07-05T20:19:56+01:00
# @Last modified by: carlosgilgonzalez
# @Last modified time: 2019-07-10T00:55:01+01:00
from machine import SPI, Pin
import sdcard
import os
import time
# sd detect pin (15)
sd_detect = Pin(15, Pin.IN, pull=None)
sd_detect.value()
# sd sig (A5)
sd_sig = Pin(4, Pin.OUT)
sd_sig.value()
sd_sig.on()
sd_sig.value()
sd_detect.value()
# Callback
# LED
led = Pin(13, Pin.OUT)
sd_out = True
spi = SPI(1, baudrate=10000000, sck=Pin(5), mosi=Pin(18), miso=Pin(19))
cs = Pin(21, Pin.OUT)
# sd = sdcard.SDCard(spi, cs)
sd = None
irq_busy_sd = False
def pd_txtfiles(path, tabs=0):
print("txt Files on filesystem:")
print("====================")
files = [filename for filename in os.listdir(
path) if filename[-3:] == 'txt']
for file in files:
stats = os.stat(path + "/" + file)
filesize = stats[6]
isdir = stats[0] & 0x4000
_kB = 1024
if filesize < _kB:
sizestr = str(filesize) + " by"
elif filesize < _kB**2:
sizestr = "%0.1f KB" % (filesize / _kB)
elif filesize < _kB**3:
sizestr = "%0.1f MB" % (filesize / _kB**2)
else:
sizestr = "%0.1f GB" % (filesize / _kB**3)
prettyprintname = ""
for _ in range(tabs):
prettyprintname += " "
prettyprintname += file
if isdir:
prettyprintname += "/"
print('{0:<40} Size: {1:>10}'.format(prettyprintname, sizestr))
# # recursively print directory contents
# if isdir:
# print_directory(path + "/" + file, tabs + 1)
def toggle_led_sd(x, butpress=sd_detect, light=led, sd_spi=spi, sd_cs=cs, getinfo=pd_txtfiles):
global irq_busy_sd, sd_out, sd
if irq_busy_sd:
return
else:
irq_busy_sd = True
if butpress.value() == 1: # reverse op == 0
if sd_out is True:
print('SD card detected')
for i in range(4):
led.value(not led.value())
time.sleep_ms(250)
butpress.init(Pin.OUT)
sd = sdcard.SDCard(sd_spi, sd_cs)
time.sleep_ms(1000)
os.mount(sd, '/sd')
print(os.listdir('/'))
# butpress.value(0) # reverse op == 1
butpress.init(Pin.IN)
getinfo("/sd")
sd_out = False
# butpress.init(Pin.IN, Pin.PULL_UP)
elif butpress.value() == 0:
if sd_out is False:
print('SD card removed')
for i in range(4):
led.value(not led.value())
time.sleep_ms(250)
time.sleep_ms(1000)
butpress.init(Pin.OUT)
os.umount('/sd')
time.sleep_ms(1000)
sd_out = True
irq_busy_sd = False
sd_detect.irq(trigger=3, handler=toggle_led_sd)
if sd_detect.value() == 1:
print('SD card detected')
for i in range(4):
led.value(not led.value())
time.sleep_ms(250)
sd_detect.init(Pin.OUT)
sd = sdcard.SDCard(spi, cs)
time.sleep_ms(1000)
os.mount(sd, '/sd')
print(os.listdir('/'))
# butpress.value(0) # reverse op == 1
sd_detect.init(Pin.IN)
pd_txtfiles("/sd")
sd_out = False
else:
print('SD card not detected')
|
[
"carlosgilglez@gmail.com"
] |
carlosgilglez@gmail.com
|
a89274a540eccad2f64b0f01e06449ec329ce901
|
66052f5ba08ddac0a56ee140af17cf78b1ff1174
|
/PLURALSIGHT_BEGINNERS/lib/python3.9/site-packages/anyio/_core/_compat.py
|
8a0cfd088eadb36ec9786f05ab4ea9ab959ecd8e
|
[] |
no_license
|
enriquefariasrdz/Python
|
34704ceed001bbe8a23471eebefbe536b00031a5
|
b9191f7ad87b709a1b83c5cb3797a866b56aaa0d
|
refs/heads/master
| 2022-12-26T03:06:26.481456
| 2022-04-20T14:09:57
| 2022-04-20T14:09:57
| 27,020,899
| 1
| 1
| null | 2022-12-18T21:02:43
| 2014-11-23T03:33:52
|
Python
|
UTF-8
|
Python
| false
| false
| 5,668
|
py
|
from abc import ABCMeta, abstractmethod
from contextlib import AbstractContextManager
from types import TracebackType
from typing import (
TYPE_CHECKING, Any, AsyncContextManager, Callable, ContextManager, Generator, Generic,
Iterable, List, Optional, Tuple, Type, TypeVar, Union, overload)
from warnings import warn
if TYPE_CHECKING:
from ._testing import TaskInfo
else:
TaskInfo = object
T = TypeVar('T')
AnyDeprecatedAwaitable = Union['DeprecatedAwaitable', 'DeprecatedAwaitableFloat',
'DeprecatedAwaitableList[T]', TaskInfo]
@overload
async def maybe_async(__obj: TaskInfo) -> TaskInfo:
...
@overload
async def maybe_async(__obj: 'DeprecatedAwaitableFloat') -> float:
...
@overload
async def maybe_async(__obj: 'DeprecatedAwaitableList[T]') -> List[T]:
...
@overload
async def maybe_async(__obj: 'DeprecatedAwaitable') -> None:
...
async def maybe_async(__obj: 'AnyDeprecatedAwaitable[T]') -> Union[TaskInfo, float, List[T], None]:
"""
Await on the given object if necessary.
This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
methods were converted from coroutine functions into regular functions.
Do **not** try to use this for any other purpose!
:return: the result of awaiting on the object if coroutine, or the object itself otherwise
.. versionadded:: 2.2
"""
return __obj._unwrap()
class _ContextManagerWrapper:
def __init__(self, cm: ContextManager[T]):
self._cm = cm
async def __aenter__(self) -> T:
return self._cm.__enter__()
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
return self._cm.__exit__(exc_type, exc_val, exc_tb)
def maybe_async_cm(cm: Union[ContextManager[T], AsyncContextManager[T]]) -> AsyncContextManager[T]:
"""
Wrap a regular context manager as an async one if necessary.
This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
methods were changed to return regular context managers instead of async ones.
:param cm: a regular or async context manager
:return: an async context manager
.. versionadded:: 2.2
"""
if not isinstance(cm, AbstractContextManager):
raise TypeError('Given object is not an context manager')
return _ContextManagerWrapper(cm)
def _warn_deprecation(awaitable: 'AnyDeprecatedAwaitable[Any]', stacklevel: int = 1) -> None:
warn(f'Awaiting on {awaitable._name}() is deprecated. Use "await '
f'anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x '
f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.',
DeprecationWarning, stacklevel=stacklevel + 1)
class DeprecatedAwaitable:
def __init__(self, func: Callable[..., 'DeprecatedAwaitable']):
self._name = f'{func.__module__}.{func.__qualname__}'
def __await__(self) -> Generator[None, None, None]:
_warn_deprecation(self)
if False:
yield
def __reduce__(self) -> Tuple[Type[None], Tuple[()]]:
return type(None), ()
def _unwrap(self) -> None:
return None
class DeprecatedAwaitableFloat(float):
def __new__(
cls, x: float, func: Callable[..., 'DeprecatedAwaitableFloat']
) -> 'DeprecatedAwaitableFloat':
return super().__new__(cls, x)
def __init__(self, x: float, func: Callable[..., 'DeprecatedAwaitableFloat']):
self._name = f'{func.__module__}.{func.__qualname__}'
def __await__(self) -> Generator[None, None, float]:
_warn_deprecation(self)
if False:
yield
return float(self)
def __reduce__(self) -> Tuple[Type[float], Tuple[float]]:
return float, (float(self),)
def _unwrap(self) -> float:
return float(self)
class DeprecatedAwaitableList(List[T]):
def __init__(self, iterable: Iterable[T] = (), *,
func: Callable[..., 'DeprecatedAwaitableList[T]']):
super().__init__(iterable)
self._name = f'{func.__module__}.{func.__qualname__}'
def __await__(self) -> Generator[None, None, List[T]]:
_warn_deprecation(self)
if False:
yield
return list(self)
def __reduce__(self) -> Tuple[Type[List[T]], Tuple[List[T]]]:
return list, (list(self),)
def _unwrap(self) -> List[T]:
return list(self)
class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta):
@abstractmethod
def __enter__(self) -> T:
pass
@abstractmethod
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
pass
async def __aenter__(self) -> T:
warn(f'Using {self.__class__.__name__} as an async context manager has been deprecated. '
f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to '
f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if '
f'you are completely migrating to AnyIO 3+.', DeprecationWarning)
return self.__enter__()
async def __aexit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
return self.__exit__(exc_type, exc_val, exc_tb)
|
[
"enriquefariasrdz@gmail.com"
] |
enriquefariasrdz@gmail.com
|
2483ff0fde5338d50146941302debdfaf00f2b29
|
27aaadf435779c29012233cb1dacf27bd9dd0d0f
|
/adp-20210720/alibabacloud_adp20210720/client.py
|
0742f891433504b77026d9a6761b748bb785d24a
|
[
"Apache-2.0"
] |
permissive
|
aliyun/alibabacloud-python-sdk
|
afadedb09db5ba6c2bc6b046732b2a6dc215f004
|
e02f34e07a7f05e898a492c212598a348d903739
|
refs/heads/master
| 2023-08-22T20:26:44.695288
| 2023-08-22T12:27:39
| 2023-08-22T12:27:39
| 288,972,087
| 43
| 29
| null | 2022-09-26T09:21:19
| 2020-08-20T10:08:11
|
Python
|
UTF-8
|
Python
| false
| false
| 309,365
|
py
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from typing import Dict
from Tea.core import TeaCore
from alibabacloud_tea_openapi.client import Client as OpenApiClient
from alibabacloud_tea_openapi import models as open_api_models
from alibabacloud_tea_util.client import Client as UtilClient
from alibabacloud_endpoint_util.client import Client as EndpointUtilClient
from alibabacloud_adp20210720 import models as adp_20210720_models
from alibabacloud_tea_util import models as util_models
from alibabacloud_openapi_util.client import Client as OpenApiUtilClient
class Client(OpenApiClient):
"""
*\
"""
def __init__(
self,
config: open_api_models.Config,
):
super().__init__(config)
self._endpoint_rule = ''
self.check_config(config)
self._endpoint = self.get_endpoint('adp', self._region_id, self._endpoint_rule, self._network, self._suffix, self._endpoint_map, self._endpoint)
def get_endpoint(
self,
product_id: str,
region_id: str,
endpoint_rule: str,
network: str,
suffix: str,
endpoint_map: Dict[str, str],
endpoint: str,
) -> str:
if not UtilClient.empty(endpoint):
return endpoint
if not UtilClient.is_unset(endpoint_map) and not UtilClient.empty(endpoint_map.get(region_id)):
return endpoint_map.get(region_id)
return EndpointUtilClient.get_endpoint_rules(product_id, region_id, endpoint_rule, network, suffix)
def add_environment_nodes_with_options(
self,
uid: str,
request: adp_20210720_models.AddEnvironmentNodesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.AddEnvironmentNodesResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.application_disk):
body['applicationDisk'] = request.application_disk
if not UtilClient.is_unset(request.cpu):
body['cpu'] = request.cpu
if not UtilClient.is_unset(request.data_disk):
body['dataDisk'] = request.data_disk
if not UtilClient.is_unset(request.etcd_disk):
body['etcdDisk'] = request.etcd_disk
if not UtilClient.is_unset(request.host_name):
body['hostName'] = request.host_name
if not UtilClient.is_unset(request.labels):
body['labels'] = request.labels
if not UtilClient.is_unset(request.master_private_ips):
body['masterPrivateIPs'] = request.master_private_ips
if not UtilClient.is_unset(request.memory):
body['memory'] = request.memory
if not UtilClient.is_unset(request.os):
body['os'] = request.os
if not UtilClient.is_unset(request.root_password):
body['rootPassword'] = request.root_password
if not UtilClient.is_unset(request.system_disk):
body['systemDisk'] = request.system_disk
if not UtilClient.is_unset(request.taints):
body['taints'] = request.taints
if not UtilClient.is_unset(request.trident_system_disk):
body['tridentSystemDisk'] = request.trident_system_disk
if not UtilClient.is_unset(request.trident_system_size_disk):
body['tridentSystemSizeDisk'] = request.trident_system_size_disk
if not UtilClient.is_unset(request.worker_private_ips):
body['workerPrivateIPs'] = request.worker_private_ips
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='AddEnvironmentNodes',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/nodes',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.AddEnvironmentNodesResponse(),
self.call_api(params, req, runtime)
)
async def add_environment_nodes_with_options_async(
self,
uid: str,
request: adp_20210720_models.AddEnvironmentNodesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.AddEnvironmentNodesResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.application_disk):
body['applicationDisk'] = request.application_disk
if not UtilClient.is_unset(request.cpu):
body['cpu'] = request.cpu
if not UtilClient.is_unset(request.data_disk):
body['dataDisk'] = request.data_disk
if not UtilClient.is_unset(request.etcd_disk):
body['etcdDisk'] = request.etcd_disk
if not UtilClient.is_unset(request.host_name):
body['hostName'] = request.host_name
if not UtilClient.is_unset(request.labels):
body['labels'] = request.labels
if not UtilClient.is_unset(request.master_private_ips):
body['masterPrivateIPs'] = request.master_private_ips
if not UtilClient.is_unset(request.memory):
body['memory'] = request.memory
if not UtilClient.is_unset(request.os):
body['os'] = request.os
if not UtilClient.is_unset(request.root_password):
body['rootPassword'] = request.root_password
if not UtilClient.is_unset(request.system_disk):
body['systemDisk'] = request.system_disk
if not UtilClient.is_unset(request.taints):
body['taints'] = request.taints
if not UtilClient.is_unset(request.trident_system_disk):
body['tridentSystemDisk'] = request.trident_system_disk
if not UtilClient.is_unset(request.trident_system_size_disk):
body['tridentSystemSizeDisk'] = request.trident_system_size_disk
if not UtilClient.is_unset(request.worker_private_ips):
body['workerPrivateIPs'] = request.worker_private_ips
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='AddEnvironmentNodes',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/nodes',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.AddEnvironmentNodesResponse(),
await self.call_api_async(params, req, runtime)
)
def add_environment_nodes(
self,
uid: str,
request: adp_20210720_models.AddEnvironmentNodesRequest,
) -> adp_20210720_models.AddEnvironmentNodesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.add_environment_nodes_with_options(uid, request, headers, runtime)
async def add_environment_nodes_async(
self,
uid: str,
request: adp_20210720_models.AddEnvironmentNodesRequest,
) -> adp_20210720_models.AddEnvironmentNodesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.add_environment_nodes_with_options_async(uid, request, headers, runtime)
def add_environment_product_versions_with_options(
self,
uid: str,
request: adp_20210720_models.AddEnvironmentProductVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.AddEnvironmentProductVersionsResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.product_version_info_list):
body['productVersionInfoList'] = request.product_version_info_list
if not UtilClient.is_unset(request.product_version_uidlist):
body['productVersionUIDList'] = request.product_version_uidlist
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='AddEnvironmentProductVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/product-versions',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.AddEnvironmentProductVersionsResponse(),
self.call_api(params, req, runtime)
)
async def add_environment_product_versions_with_options_async(
self,
uid: str,
request: adp_20210720_models.AddEnvironmentProductVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.AddEnvironmentProductVersionsResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.product_version_info_list):
body['productVersionInfoList'] = request.product_version_info_list
if not UtilClient.is_unset(request.product_version_uidlist):
body['productVersionUIDList'] = request.product_version_uidlist
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='AddEnvironmentProductVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/product-versions',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.AddEnvironmentProductVersionsResponse(),
await self.call_api_async(params, req, runtime)
)
def add_environment_product_versions(
self,
uid: str,
request: adp_20210720_models.AddEnvironmentProductVersionsRequest,
) -> adp_20210720_models.AddEnvironmentProductVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.add_environment_product_versions_with_options(uid, request, headers, runtime)
async def add_environment_product_versions_async(
self,
uid: str,
request: adp_20210720_models.AddEnvironmentProductVersionsRequest,
) -> adp_20210720_models.AddEnvironmentProductVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.add_environment_product_versions_with_options_async(uid, request, headers, runtime)
def add_product_component_version_with_options(
self,
uid: str,
component_version_uid: str,
request: adp_20210720_models.AddProductComponentVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.AddProductComponentVersionResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_version_spec_uid):
body['componentVersionSpecUID'] = request.component_version_spec_uid
if not UtilClient.is_unset(request.component_version_spec_values):
body['componentVersionSpecValues'] = request.component_version_spec_values
if not UtilClient.is_unset(request.release_name):
body['releaseName'] = request.release_name
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='AddProductComponentVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/component-versions/{OpenApiUtilClient.get_encode_param(component_version_uid)}',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.AddProductComponentVersionResponse(),
self.call_api(params, req, runtime)
)
async def add_product_component_version_with_options_async(
self,
uid: str,
component_version_uid: str,
request: adp_20210720_models.AddProductComponentVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.AddProductComponentVersionResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_version_spec_uid):
body['componentVersionSpecUID'] = request.component_version_spec_uid
if not UtilClient.is_unset(request.component_version_spec_values):
body['componentVersionSpecValues'] = request.component_version_spec_values
if not UtilClient.is_unset(request.release_name):
body['releaseName'] = request.release_name
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='AddProductComponentVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/component-versions/{OpenApiUtilClient.get_encode_param(component_version_uid)}',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.AddProductComponentVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def add_product_component_version(
self,
uid: str,
component_version_uid: str,
request: adp_20210720_models.AddProductComponentVersionRequest,
) -> adp_20210720_models.AddProductComponentVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.add_product_component_version_with_options(uid, component_version_uid, request, headers, runtime)
async def add_product_component_version_async(
self,
uid: str,
component_version_uid: str,
request: adp_20210720_models.AddProductComponentVersionRequest,
) -> adp_20210720_models.AddProductComponentVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.add_product_component_version_with_options_async(uid, component_version_uid, request, headers, runtime)
def add_product_version_config_with_options(
self,
uid: str,
request: adp_20210720_models.AddProductVersionConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.AddProductVersionConfigResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_release_name):
body['componentReleaseName'] = request.component_release_name
if not UtilClient.is_unset(request.component_version_uid):
body['componentVersionUID'] = request.component_version_uid
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.parent_component_release_name):
body['parentComponentReleaseName'] = request.parent_component_release_name
if not UtilClient.is_unset(request.parent_component_version_uid):
body['parentComponentVersionUID'] = request.parent_component_version_uid
if not UtilClient.is_unset(request.scope):
body['scope'] = request.scope
if not UtilClient.is_unset(request.value):
body['value'] = request.value
if not UtilClient.is_unset(request.value_type):
body['valueType'] = request.value_type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='AddProductVersionConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/configs',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.AddProductVersionConfigResponse(),
self.call_api(params, req, runtime)
)
async def add_product_version_config_with_options_async(
self,
uid: str,
request: adp_20210720_models.AddProductVersionConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.AddProductVersionConfigResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_release_name):
body['componentReleaseName'] = request.component_release_name
if not UtilClient.is_unset(request.component_version_uid):
body['componentVersionUID'] = request.component_version_uid
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.parent_component_release_name):
body['parentComponentReleaseName'] = request.parent_component_release_name
if not UtilClient.is_unset(request.parent_component_version_uid):
body['parentComponentVersionUID'] = request.parent_component_version_uid
if not UtilClient.is_unset(request.scope):
body['scope'] = request.scope
if not UtilClient.is_unset(request.value):
body['value'] = request.value
if not UtilClient.is_unset(request.value_type):
body['valueType'] = request.value_type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='AddProductVersionConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/configs',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.AddProductVersionConfigResponse(),
await self.call_api_async(params, req, runtime)
)
def add_product_version_config(
self,
uid: str,
request: adp_20210720_models.AddProductVersionConfigRequest,
) -> adp_20210720_models.AddProductVersionConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.add_product_version_config_with_options(uid, request, headers, runtime)
async def add_product_version_config_async(
self,
uid: str,
request: adp_20210720_models.AddProductVersionConfigRequest,
) -> adp_20210720_models.AddProductVersionConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.add_product_version_config_with_options_async(uid, request, headers, runtime)
def add_resource_snapshot_with_options(
self,
request: adp_20210720_models.AddResourceSnapshotRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.AddResourceSnapshotResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cluster_uid):
query['clusterUID'] = request.cluster_uid
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
body = {}
if not UtilClient.is_unset(request.name):
body['name'] = request.name
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query),
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='AddResourceSnapshot',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/resource-snapshots',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.AddResourceSnapshotResponse(),
self.call_api(params, req, runtime)
)
async def add_resource_snapshot_with_options_async(
self,
request: adp_20210720_models.AddResourceSnapshotRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.AddResourceSnapshotResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cluster_uid):
query['clusterUID'] = request.cluster_uid
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
body = {}
if not UtilClient.is_unset(request.name):
body['name'] = request.name
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query),
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='AddResourceSnapshot',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/resource-snapshots',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.AddResourceSnapshotResponse(),
await self.call_api_async(params, req, runtime)
)
def add_resource_snapshot(
self,
request: adp_20210720_models.AddResourceSnapshotRequest,
) -> adp_20210720_models.AddResourceSnapshotResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.add_resource_snapshot_with_options(request, headers, runtime)
async def add_resource_snapshot_async(
self,
request: adp_20210720_models.AddResourceSnapshotRequest,
) -> adp_20210720_models.AddResourceSnapshotResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.add_resource_snapshot_with_options_async(request, headers, runtime)
def batch_add_environment_nodes_with_options(
self,
uid: str,
request: adp_20210720_models.BatchAddEnvironmentNodesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.BatchAddEnvironmentNodesResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.instance_list):
body['instanceList'] = request.instance_list
if not UtilClient.is_unset(request.overwrite):
body['overwrite'] = request.overwrite
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='BatchAddEnvironmentNodes',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/batch/nodes',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.BatchAddEnvironmentNodesResponse(),
self.call_api(params, req, runtime)
)
async def batch_add_environment_nodes_with_options_async(
self,
uid: str,
request: adp_20210720_models.BatchAddEnvironmentNodesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.BatchAddEnvironmentNodesResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.instance_list):
body['instanceList'] = request.instance_list
if not UtilClient.is_unset(request.overwrite):
body['overwrite'] = request.overwrite
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='BatchAddEnvironmentNodes',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/batch/nodes',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.BatchAddEnvironmentNodesResponse(),
await self.call_api_async(params, req, runtime)
)
def batch_add_environment_nodes(
self,
uid: str,
request: adp_20210720_models.BatchAddEnvironmentNodesRequest,
) -> adp_20210720_models.BatchAddEnvironmentNodesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.batch_add_environment_nodes_with_options(uid, request, headers, runtime)
async def batch_add_environment_nodes_async(
self,
uid: str,
request: adp_20210720_models.BatchAddEnvironmentNodesRequest,
) -> adp_20210720_models.BatchAddEnvironmentNodesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.batch_add_environment_nodes_with_options_async(uid, request, headers, runtime)
def batch_add_product_version_config_with_options(
self,
uid: str,
request: adp_20210720_models.BatchAddProductVersionConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.BatchAddProductVersionConfigResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.product_version_config_list):
body['productVersionConfigList'] = request.product_version_config_list
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='BatchAddProductVersionConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/batch/configs',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.BatchAddProductVersionConfigResponse(),
self.call_api(params, req, runtime)
)
async def batch_add_product_version_config_with_options_async(
self,
uid: str,
request: adp_20210720_models.BatchAddProductVersionConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.BatchAddProductVersionConfigResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.product_version_config_list):
body['productVersionConfigList'] = request.product_version_config_list
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='BatchAddProductVersionConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/batch/configs',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.BatchAddProductVersionConfigResponse(),
await self.call_api_async(params, req, runtime)
)
def batch_add_product_version_config(
self,
uid: str,
request: adp_20210720_models.BatchAddProductVersionConfigRequest,
) -> adp_20210720_models.BatchAddProductVersionConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.batch_add_product_version_config_with_options(uid, request, headers, runtime)
async def batch_add_product_version_config_async(
self,
uid: str,
request: adp_20210720_models.BatchAddProductVersionConfigRequest,
) -> adp_20210720_models.BatchAddProductVersionConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.batch_add_product_version_config_with_options_async(uid, request, headers, runtime)
def create_deliverable_with_options(
self,
request: adp_20210720_models.CreateDeliverableRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateDeliverableResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.foundation):
body['foundation'] = request.foundation
if not UtilClient.is_unset(request.products):
body['products'] = request.products
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateDeliverable',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/deliverables',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateDeliverableResponse(),
self.call_api(params, req, runtime)
)
async def create_deliverable_with_options_async(
self,
request: adp_20210720_models.CreateDeliverableRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateDeliverableResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.foundation):
body['foundation'] = request.foundation
if not UtilClient.is_unset(request.products):
body['products'] = request.products
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateDeliverable',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/deliverables',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateDeliverableResponse(),
await self.call_api_async(params, req, runtime)
)
def create_deliverable(
self,
request: adp_20210720_models.CreateDeliverableRequest,
) -> adp_20210720_models.CreateDeliverableResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.create_deliverable_with_options(request, headers, runtime)
async def create_deliverable_async(
self,
request: adp_20210720_models.CreateDeliverableRequest,
) -> adp_20210720_models.CreateDeliverableResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.create_deliverable_with_options_async(request, headers, runtime)
def create_delivery_instance_with_options(
self,
request: adp_20210720_models.CreateDeliveryInstanceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateDeliveryInstanceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.cluster_uid):
body['clusterUID'] = request.cluster_uid
if not UtilClient.is_unset(request.deliverable_config_uid):
body['deliverableConfigUID'] = request.deliverable_config_uid
if not UtilClient.is_unset(request.deliverable_uid):
body['deliverableUID'] = request.deliverable_uid
if not UtilClient.is_unset(request.env_uid):
body['envUID'] = request.env_uid
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateDeliveryInstance',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-instances',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateDeliveryInstanceResponse(),
self.call_api(params, req, runtime)
)
async def create_delivery_instance_with_options_async(
self,
request: adp_20210720_models.CreateDeliveryInstanceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateDeliveryInstanceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.cluster_uid):
body['clusterUID'] = request.cluster_uid
if not UtilClient.is_unset(request.deliverable_config_uid):
body['deliverableConfigUID'] = request.deliverable_config_uid
if not UtilClient.is_unset(request.deliverable_uid):
body['deliverableUID'] = request.deliverable_uid
if not UtilClient.is_unset(request.env_uid):
body['envUID'] = request.env_uid
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateDeliveryInstance',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-instances',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateDeliveryInstanceResponse(),
await self.call_api_async(params, req, runtime)
)
def create_delivery_instance(
self,
request: adp_20210720_models.CreateDeliveryInstanceRequest,
) -> adp_20210720_models.CreateDeliveryInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.create_delivery_instance_with_options(request, headers, runtime)
async def create_delivery_instance_async(
self,
request: adp_20210720_models.CreateDeliveryInstanceRequest,
) -> adp_20210720_models.CreateDeliveryInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.create_delivery_instance_with_options_async(request, headers, runtime)
def create_delivery_package_with_options(
self,
request: adp_20210720_models.CreateDeliveryPackageRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateDeliveryPackageResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.deliverable_uid):
body['deliverableUID'] = request.deliverable_uid
if not UtilClient.is_unset(request.delivery_instance_uid):
body['deliveryInstanceUID'] = request.delivery_instance_uid
if not UtilClient.is_unset(request.origin_deliverable_uid):
body['originDeliverableUID'] = request.origin_deliverable_uid
if not UtilClient.is_unset(request.package_content_type):
body['packageContentType'] = request.package_content_type
if not UtilClient.is_unset(request.package_type):
body['packageType'] = request.package_type
if not UtilClient.is_unset(request.platform):
body['platform'] = request.platform
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateDeliveryPackage',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-packages',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateDeliveryPackageResponse(),
self.call_api(params, req, runtime)
)
async def create_delivery_package_with_options_async(
self,
request: adp_20210720_models.CreateDeliveryPackageRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateDeliveryPackageResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.deliverable_uid):
body['deliverableUID'] = request.deliverable_uid
if not UtilClient.is_unset(request.delivery_instance_uid):
body['deliveryInstanceUID'] = request.delivery_instance_uid
if not UtilClient.is_unset(request.origin_deliverable_uid):
body['originDeliverableUID'] = request.origin_deliverable_uid
if not UtilClient.is_unset(request.package_content_type):
body['packageContentType'] = request.package_content_type
if not UtilClient.is_unset(request.package_type):
body['packageType'] = request.package_type
if not UtilClient.is_unset(request.platform):
body['platform'] = request.platform
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateDeliveryPackage',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-packages',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateDeliveryPackageResponse(),
await self.call_api_async(params, req, runtime)
)
def create_delivery_package(
self,
request: adp_20210720_models.CreateDeliveryPackageRequest,
) -> adp_20210720_models.CreateDeliveryPackageResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.create_delivery_package_with_options(request, headers, runtime)
async def create_delivery_package_async(
self,
request: adp_20210720_models.CreateDeliveryPackageRequest,
) -> adp_20210720_models.CreateDeliveryPackageResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.create_delivery_package_with_options_async(request, headers, runtime)
def create_environment_with_options(
self,
request: adp_20210720_models.CreateEnvironmentRequest,
headers: adp_20210720_models.CreateEnvironmentHeaders,
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateEnvironmentResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.annotations):
body['annotations'] = request.annotations
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.location):
body['location'] = request.location
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.platform):
body['platform'] = request.platform
if not UtilClient.is_unset(request.platform_list):
body['platformList'] = request.platform_list
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.type):
body['type'] = request.type
if not UtilClient.is_unset(request.vendor_config):
body['vendorConfig'] = request.vendor_config
if not UtilClient.is_unset(request.vendor_type):
body['vendorType'] = request.vendor_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.client_token):
real_headers['ClientToken'] = UtilClient.to_jsonstring(headers.client_token)
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateEnvironment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateEnvironmentResponse(),
self.call_api(params, req, runtime)
)
async def create_environment_with_options_async(
self,
request: adp_20210720_models.CreateEnvironmentRequest,
headers: adp_20210720_models.CreateEnvironmentHeaders,
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateEnvironmentResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.annotations):
body['annotations'] = request.annotations
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.location):
body['location'] = request.location
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.platform):
body['platform'] = request.platform
if not UtilClient.is_unset(request.platform_list):
body['platformList'] = request.platform_list
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.type):
body['type'] = request.type
if not UtilClient.is_unset(request.vendor_config):
body['vendorConfig'] = request.vendor_config
if not UtilClient.is_unset(request.vendor_type):
body['vendorType'] = request.vendor_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.client_token):
real_headers['ClientToken'] = UtilClient.to_jsonstring(headers.client_token)
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateEnvironment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateEnvironmentResponse(),
await self.call_api_async(params, req, runtime)
)
def create_environment(
self,
request: adp_20210720_models.CreateEnvironmentRequest,
) -> adp_20210720_models.CreateEnvironmentResponse:
runtime = util_models.RuntimeOptions()
headers = adp_20210720_models.CreateEnvironmentHeaders()
return self.create_environment_with_options(request, headers, runtime)
async def create_environment_async(
self,
request: adp_20210720_models.CreateEnvironmentRequest,
) -> adp_20210720_models.CreateEnvironmentResponse:
runtime = util_models.RuntimeOptions()
headers = adp_20210720_models.CreateEnvironmentHeaders()
return await self.create_environment_with_options_async(request, headers, runtime)
def create_environment_license_with_options(
self,
uid: str,
request: adp_20210720_models.CreateEnvironmentLicenseRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateEnvironmentLicenseResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.company_name):
body['companyName'] = request.company_name
if not UtilClient.is_unset(request.contact):
body['contact'] = request.contact
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.expire_time):
body['expireTime'] = request.expire_time
if not UtilClient.is_unset(request.license_quota):
body['licenseQuota'] = request.license_quota
if not UtilClient.is_unset(request.machine_fingerprint):
body['machineFingerprint'] = request.machine_fingerprint
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.scenario):
body['scenario'] = request.scenario
if not UtilClient.is_unset(request.scope):
body['scope'] = request.scope
if not UtilClient.is_unset(request.type):
body['type'] = request.type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateEnvironmentLicense',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/licenses',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateEnvironmentLicenseResponse(),
self.call_api(params, req, runtime)
)
async def create_environment_license_with_options_async(
self,
uid: str,
request: adp_20210720_models.CreateEnvironmentLicenseRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateEnvironmentLicenseResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.company_name):
body['companyName'] = request.company_name
if not UtilClient.is_unset(request.contact):
body['contact'] = request.contact
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.expire_time):
body['expireTime'] = request.expire_time
if not UtilClient.is_unset(request.license_quota):
body['licenseQuota'] = request.license_quota
if not UtilClient.is_unset(request.machine_fingerprint):
body['machineFingerprint'] = request.machine_fingerprint
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.scenario):
body['scenario'] = request.scenario
if not UtilClient.is_unset(request.scope):
body['scope'] = request.scope
if not UtilClient.is_unset(request.type):
body['type'] = request.type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateEnvironmentLicense',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/licenses',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateEnvironmentLicenseResponse(),
await self.call_api_async(params, req, runtime)
)
def create_environment_license(
self,
uid: str,
request: adp_20210720_models.CreateEnvironmentLicenseRequest,
) -> adp_20210720_models.CreateEnvironmentLicenseResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.create_environment_license_with_options(uid, request, headers, runtime)
async def create_environment_license_async(
self,
uid: str,
request: adp_20210720_models.CreateEnvironmentLicenseRequest,
) -> adp_20210720_models.CreateEnvironmentLicenseResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.create_environment_license_with_options_async(uid, request, headers, runtime)
def create_foundation_reference_with_options(
self,
request: adp_20210720_models.CreateFoundationReferenceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateFoundationReferenceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.cluster_config):
body['clusterConfig'] = request.cluster_config
if not UtilClient.is_unset(request.component_configs):
body['componentConfigs'] = request.component_configs
if not UtilClient.is_unset(request.foundation_reference_configs):
body['foundationReferenceConfigs'] = request.foundation_reference_configs
if not UtilClient.is_unset(request.foundation_version_uid):
body['foundationVersionUID'] = request.foundation_version_uid
if not UtilClient.is_unset(request.origin_foundation_reference_uid):
body['originFoundationReferenceUID'] = request.origin_foundation_reference_uid
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateFoundationReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateFoundationReferenceResponse(),
self.call_api(params, req, runtime)
)
async def create_foundation_reference_with_options_async(
self,
request: adp_20210720_models.CreateFoundationReferenceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateFoundationReferenceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.cluster_config):
body['clusterConfig'] = request.cluster_config
if not UtilClient.is_unset(request.component_configs):
body['componentConfigs'] = request.component_configs
if not UtilClient.is_unset(request.foundation_reference_configs):
body['foundationReferenceConfigs'] = request.foundation_reference_configs
if not UtilClient.is_unset(request.foundation_version_uid):
body['foundationVersionUID'] = request.foundation_version_uid
if not UtilClient.is_unset(request.origin_foundation_reference_uid):
body['originFoundationReferenceUID'] = request.origin_foundation_reference_uid
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateFoundationReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateFoundationReferenceResponse(),
await self.call_api_async(params, req, runtime)
)
def create_foundation_reference(
self,
request: adp_20210720_models.CreateFoundationReferenceRequest,
) -> adp_20210720_models.CreateFoundationReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.create_foundation_reference_with_options(request, headers, runtime)
async def create_foundation_reference_async(
self,
request: adp_20210720_models.CreateFoundationReferenceRequest,
) -> adp_20210720_models.CreateFoundationReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.create_foundation_reference_with_options_async(request, headers, runtime)
def create_product_with_options(
self,
request: adp_20210720_models.CreateProductRequest,
headers: adp_20210720_models.CreateProductHeaders,
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateProductResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.categories):
body['categories'] = request.categories
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.display_name):
body['displayName'] = request.display_name
if not UtilClient.is_unset(request.foundation_version_uid):
body['foundationVersionUID'] = request.foundation_version_uid
if not UtilClient.is_unset(request.product_name):
body['productName'] = request.product_name
if not UtilClient.is_unset(request.vendor):
body['vendor'] = request.vendor
if not UtilClient.is_unset(request.without_product_version):
body['withoutProductVersion'] = request.without_product_version
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.client_token):
real_headers['ClientToken'] = UtilClient.to_jsonstring(headers.client_token)
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateProduct',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/products',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateProductResponse(),
self.call_api(params, req, runtime)
)
async def create_product_with_options_async(
self,
request: adp_20210720_models.CreateProductRequest,
headers: adp_20210720_models.CreateProductHeaders,
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateProductResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.categories):
body['categories'] = request.categories
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.display_name):
body['displayName'] = request.display_name
if not UtilClient.is_unset(request.foundation_version_uid):
body['foundationVersionUID'] = request.foundation_version_uid
if not UtilClient.is_unset(request.product_name):
body['productName'] = request.product_name
if not UtilClient.is_unset(request.vendor):
body['vendor'] = request.vendor
if not UtilClient.is_unset(request.without_product_version):
body['withoutProductVersion'] = request.without_product_version
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.client_token):
real_headers['ClientToken'] = UtilClient.to_jsonstring(headers.client_token)
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateProduct',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/products',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateProductResponse(),
await self.call_api_async(params, req, runtime)
)
def create_product(
self,
request: adp_20210720_models.CreateProductRequest,
) -> adp_20210720_models.CreateProductResponse:
runtime = util_models.RuntimeOptions()
headers = adp_20210720_models.CreateProductHeaders()
return self.create_product_with_options(request, headers, runtime)
async def create_product_async(
self,
request: adp_20210720_models.CreateProductRequest,
) -> adp_20210720_models.CreateProductResponse:
runtime = util_models.RuntimeOptions()
headers = adp_20210720_models.CreateProductHeaders()
return await self.create_product_with_options_async(request, headers, runtime)
def create_product_deployment_with_options(
self,
request: adp_20210720_models.CreateProductDeploymentRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateProductDeploymentResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.environment_uid):
body['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.namespace):
body['namespace'] = request.namespace
if not UtilClient.is_unset(request.old_product_version_uid):
body['oldProductVersionUID'] = request.old_product_version_uid
if not UtilClient.is_unset(request.package_config):
body['packageConfig'] = request.package_config
if not UtilClient.is_unset(request.package_uid):
body['packageUID'] = request.package_uid
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.timeout):
body['timeout'] = request.timeout
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateProductDeployment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/deployments',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateProductDeploymentResponse(),
self.call_api(params, req, runtime)
)
async def create_product_deployment_with_options_async(
self,
request: adp_20210720_models.CreateProductDeploymentRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateProductDeploymentResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.environment_uid):
body['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.namespace):
body['namespace'] = request.namespace
if not UtilClient.is_unset(request.old_product_version_uid):
body['oldProductVersionUID'] = request.old_product_version_uid
if not UtilClient.is_unset(request.package_config):
body['packageConfig'] = request.package_config
if not UtilClient.is_unset(request.package_uid):
body['packageUID'] = request.package_uid
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.timeout):
body['timeout'] = request.timeout
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateProductDeployment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/deployments',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateProductDeploymentResponse(),
await self.call_api_async(params, req, runtime)
)
def create_product_deployment(
self,
request: adp_20210720_models.CreateProductDeploymentRequest,
) -> adp_20210720_models.CreateProductDeploymentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.create_product_deployment_with_options(request, headers, runtime)
async def create_product_deployment_async(
self,
request: adp_20210720_models.CreateProductDeploymentRequest,
) -> adp_20210720_models.CreateProductDeploymentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.create_product_deployment_with_options_async(request, headers, runtime)
def create_product_version_with_options(
self,
uid: str,
request: adp_20210720_models.CreateProductVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateProductVersionResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.base_product_version_uid):
query['baseProductVersionUID'] = request.base_product_version_uid
body = {}
if not UtilClient.is_unset(request.version):
body['version'] = request.version
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query),
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/products/{OpenApiUtilClient.get_encode_param(uid)}/versions',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateProductVersionResponse(),
self.call_api(params, req, runtime)
)
async def create_product_version_with_options_async(
self,
uid: str,
request: adp_20210720_models.CreateProductVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateProductVersionResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.base_product_version_uid):
query['baseProductVersionUID'] = request.base_product_version_uid
body = {}
if not UtilClient.is_unset(request.version):
body['version'] = request.version
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query),
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='CreateProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/products/{OpenApiUtilClient.get_encode_param(uid)}/versions',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateProductVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def create_product_version(
self,
uid: str,
request: adp_20210720_models.CreateProductVersionRequest,
) -> adp_20210720_models.CreateProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.create_product_version_with_options(uid, request, headers, runtime)
async def create_product_version_async(
self,
uid: str,
request: adp_20210720_models.CreateProductVersionRequest,
) -> adp_20210720_models.CreateProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.create_product_version_with_options_async(uid, request, headers, runtime)
def create_product_version_package_with_options(
self,
uid: str,
request: adp_20210720_models.CreateProductVersionPackageRequest,
headers: adp_20210720_models.CreateProductVersionPackageHeaders,
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateProductVersionPackageResponse:
"""
@deprecated
@param request: CreateProductVersionPackageRequest
@param headers: CreateProductVersionPackageHeaders
@param runtime: runtime options for this request RuntimeOptions
@return: CreateProductVersionPackageResponse
Deprecated
"""
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cluster_engine_type):
query['clusterEngineType'] = request.cluster_engine_type
if not UtilClient.is_unset(request.foundation_reference_uid):
query['foundationReferenceUID'] = request.foundation_reference_uid
if not UtilClient.is_unset(request.old_foundation_reference_uid):
query['oldFoundationReferenceUID'] = request.old_foundation_reference_uid
if not UtilClient.is_unset(request.old_product_version_uid):
query['oldProductVersionUID'] = request.old_product_version_uid
if not UtilClient.is_unset(request.package_content_type):
query['packageContentType'] = request.package_content_type
if not UtilClient.is_unset(request.package_tool_type):
query['packageToolType'] = request.package_tool_type
if not UtilClient.is_unset(request.package_type):
query['packageType'] = request.package_type
if not UtilClient.is_unset(request.platform):
query['platform'] = request.platform
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.client_token):
real_headers['ClientToken'] = UtilClient.to_jsonstring(headers.client_token)
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='CreateProductVersionPackage',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/hosting/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/packages',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateProductVersionPackageResponse(),
self.call_api(params, req, runtime)
)
async def create_product_version_package_with_options_async(
self,
uid: str,
request: adp_20210720_models.CreateProductVersionPackageRequest,
headers: adp_20210720_models.CreateProductVersionPackageHeaders,
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.CreateProductVersionPackageResponse:
"""
@deprecated
@param request: CreateProductVersionPackageRequest
@param headers: CreateProductVersionPackageHeaders
@param runtime: runtime options for this request RuntimeOptions
@return: CreateProductVersionPackageResponse
Deprecated
"""
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cluster_engine_type):
query['clusterEngineType'] = request.cluster_engine_type
if not UtilClient.is_unset(request.foundation_reference_uid):
query['foundationReferenceUID'] = request.foundation_reference_uid
if not UtilClient.is_unset(request.old_foundation_reference_uid):
query['oldFoundationReferenceUID'] = request.old_foundation_reference_uid
if not UtilClient.is_unset(request.old_product_version_uid):
query['oldProductVersionUID'] = request.old_product_version_uid
if not UtilClient.is_unset(request.package_content_type):
query['packageContentType'] = request.package_content_type
if not UtilClient.is_unset(request.package_tool_type):
query['packageToolType'] = request.package_tool_type
if not UtilClient.is_unset(request.package_type):
query['packageType'] = request.package_type
if not UtilClient.is_unset(request.platform):
query['platform'] = request.platform
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.client_token):
real_headers['ClientToken'] = UtilClient.to_jsonstring(headers.client_token)
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='CreateProductVersionPackage',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/hosting/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/packages',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.CreateProductVersionPackageResponse(),
await self.call_api_async(params, req, runtime)
)
def create_product_version_package(
self,
uid: str,
request: adp_20210720_models.CreateProductVersionPackageRequest,
) -> adp_20210720_models.CreateProductVersionPackageResponse:
"""
@deprecated
@param request: CreateProductVersionPackageRequest
@return: CreateProductVersionPackageResponse
Deprecated
"""
runtime = util_models.RuntimeOptions()
headers = adp_20210720_models.CreateProductVersionPackageHeaders()
return self.create_product_version_package_with_options(uid, request, headers, runtime)
async def create_product_version_package_async(
self,
uid: str,
request: adp_20210720_models.CreateProductVersionPackageRequest,
) -> adp_20210720_models.CreateProductVersionPackageResponse:
"""
@deprecated
@param request: CreateProductVersionPackageRequest
@return: CreateProductVersionPackageResponse
Deprecated
"""
runtime = util_models.RuntimeOptions()
headers = adp_20210720_models.CreateProductVersionPackageHeaders()
return await self.create_product_version_package_with_options_async(uid, request, headers, runtime)
def delete_environment_with_options(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteEnvironmentResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteEnvironment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteEnvironmentResponse(),
self.call_api(params, req, runtime)
)
async def delete_environment_with_options_async(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteEnvironmentResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteEnvironment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteEnvironmentResponse(),
await self.call_api_async(params, req, runtime)
)
def delete_environment(
self,
uid: str,
) -> adp_20210720_models.DeleteEnvironmentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.delete_environment_with_options(uid, headers, runtime)
async def delete_environment_async(
self,
uid: str,
) -> adp_20210720_models.DeleteEnvironmentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.delete_environment_with_options_async(uid, headers, runtime)
def delete_environment_license_with_options(
self,
uid: str,
license_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteEnvironmentLicenseResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteEnvironmentLicense',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/licenses/{OpenApiUtilClient.get_encode_param(license_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteEnvironmentLicenseResponse(),
self.call_api(params, req, runtime)
)
async def delete_environment_license_with_options_async(
self,
uid: str,
license_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteEnvironmentLicenseResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteEnvironmentLicense',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/licenses/{OpenApiUtilClient.get_encode_param(license_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteEnvironmentLicenseResponse(),
await self.call_api_async(params, req, runtime)
)
def delete_environment_license(
self,
uid: str,
license_uid: str,
) -> adp_20210720_models.DeleteEnvironmentLicenseResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.delete_environment_license_with_options(uid, license_uid, headers, runtime)
async def delete_environment_license_async(
self,
uid: str,
license_uid: str,
) -> adp_20210720_models.DeleteEnvironmentLicenseResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.delete_environment_license_with_options_async(uid, license_uid, headers, runtime)
def delete_environment_node_with_options(
self,
uid: str,
node_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteEnvironmentNodeResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteEnvironmentNode',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/nodes/{OpenApiUtilClient.get_encode_param(node_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteEnvironmentNodeResponse(),
self.call_api(params, req, runtime)
)
async def delete_environment_node_with_options_async(
self,
uid: str,
node_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteEnvironmentNodeResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteEnvironmentNode',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/nodes/{OpenApiUtilClient.get_encode_param(node_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteEnvironmentNodeResponse(),
await self.call_api_async(params, req, runtime)
)
def delete_environment_node(
self,
uid: str,
node_uid: str,
) -> adp_20210720_models.DeleteEnvironmentNodeResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.delete_environment_node_with_options(uid, node_uid, headers, runtime)
async def delete_environment_node_async(
self,
uid: str,
node_uid: str,
) -> adp_20210720_models.DeleteEnvironmentNodeResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.delete_environment_node_with_options_async(uid, node_uid, headers, runtime)
def delete_environment_product_version_with_options(
self,
uid: str,
product_version_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteEnvironmentProductVersionResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteEnvironmentProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/product-versions/{OpenApiUtilClient.get_encode_param(product_version_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteEnvironmentProductVersionResponse(),
self.call_api(params, req, runtime)
)
async def delete_environment_product_version_with_options_async(
self,
uid: str,
product_version_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteEnvironmentProductVersionResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteEnvironmentProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/product-versions/{OpenApiUtilClient.get_encode_param(product_version_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteEnvironmentProductVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def delete_environment_product_version(
self,
uid: str,
product_version_uid: str,
) -> adp_20210720_models.DeleteEnvironmentProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.delete_environment_product_version_with_options(uid, product_version_uid, headers, runtime)
async def delete_environment_product_version_async(
self,
uid: str,
product_version_uid: str,
) -> adp_20210720_models.DeleteEnvironmentProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.delete_environment_product_version_with_options_async(uid, product_version_uid, headers, runtime)
def delete_product_with_options(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteProductResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteProduct',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/products/{OpenApiUtilClient.get_encode_param(uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteProductResponse(),
self.call_api(params, req, runtime)
)
async def delete_product_with_options_async(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteProductResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteProduct',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/products/{OpenApiUtilClient.get_encode_param(uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteProductResponse(),
await self.call_api_async(params, req, runtime)
)
def delete_product(
self,
uid: str,
) -> adp_20210720_models.DeleteProductResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.delete_product_with_options(uid, headers, runtime)
async def delete_product_async(
self,
uid: str,
) -> adp_20210720_models.DeleteProductResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.delete_product_with_options_async(uid, headers, runtime)
def delete_product_component_version_with_options(
self,
uid: str,
relation_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteProductComponentVersionResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteProductComponentVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/relations/{OpenApiUtilClient.get_encode_param(relation_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteProductComponentVersionResponse(),
self.call_api(params, req, runtime)
)
async def delete_product_component_version_with_options_async(
self,
uid: str,
relation_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteProductComponentVersionResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteProductComponentVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/relations/{OpenApiUtilClient.get_encode_param(relation_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteProductComponentVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def delete_product_component_version(
self,
uid: str,
relation_uid: str,
) -> adp_20210720_models.DeleteProductComponentVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.delete_product_component_version_with_options(uid, relation_uid, headers, runtime)
async def delete_product_component_version_async(
self,
uid: str,
relation_uid: str,
) -> adp_20210720_models.DeleteProductComponentVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.delete_product_component_version_with_options_async(uid, relation_uid, headers, runtime)
def delete_product_instance_config_with_options(
self,
config_uid: str,
request: adp_20210720_models.DeleteProductInstanceConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteProductInstanceConfigResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.environment_uid):
query['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DeleteProductInstanceConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/configs/{OpenApiUtilClient.get_encode_param(config_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteProductInstanceConfigResponse(),
self.call_api(params, req, runtime)
)
async def delete_product_instance_config_with_options_async(
self,
config_uid: str,
request: adp_20210720_models.DeleteProductInstanceConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteProductInstanceConfigResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.environment_uid):
query['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DeleteProductInstanceConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/configs/{OpenApiUtilClient.get_encode_param(config_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteProductInstanceConfigResponse(),
await self.call_api_async(params, req, runtime)
)
def delete_product_instance_config(
self,
config_uid: str,
request: adp_20210720_models.DeleteProductInstanceConfigRequest,
) -> adp_20210720_models.DeleteProductInstanceConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.delete_product_instance_config_with_options(config_uid, request, headers, runtime)
async def delete_product_instance_config_async(
self,
config_uid: str,
request: adp_20210720_models.DeleteProductInstanceConfigRequest,
) -> adp_20210720_models.DeleteProductInstanceConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.delete_product_instance_config_with_options_async(config_uid, request, headers, runtime)
def delete_product_version_with_options(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteProductVersionResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteProductVersionResponse(),
self.call_api(params, req, runtime)
)
async def delete_product_version_with_options_async(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteProductVersionResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteProductVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def delete_product_version(
self,
uid: str,
) -> adp_20210720_models.DeleteProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.delete_product_version_with_options(uid, headers, runtime)
async def delete_product_version_async(
self,
uid: str,
) -> adp_20210720_models.DeleteProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.delete_product_version_with_options_async(uid, headers, runtime)
def delete_product_version_config_with_options(
self,
uid: str,
config_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteProductVersionConfigResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteProductVersionConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/configs/{OpenApiUtilClient.get_encode_param(config_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteProductVersionConfigResponse(),
self.call_api(params, req, runtime)
)
async def delete_product_version_config_with_options_async(
self,
uid: str,
config_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.DeleteProductVersionConfigResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='DeleteProductVersionConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/configs/{OpenApiUtilClient.get_encode_param(config_uid)}',
method='DELETE',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.DeleteProductVersionConfigResponse(),
await self.call_api_async(params, req, runtime)
)
def delete_product_version_config(
self,
uid: str,
config_uid: str,
) -> adp_20210720_models.DeleteProductVersionConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.delete_product_version_config_with_options(uid, config_uid, headers, runtime)
async def delete_product_version_config_async(
self,
uid: str,
config_uid: str,
) -> adp_20210720_models.DeleteProductVersionConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.delete_product_version_config_with_options_async(uid, config_uid, headers, runtime)
def generate_product_instance_deployment_config_with_options(
self,
request: adp_20210720_models.GenerateProductInstanceDeploymentConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GenerateProductInstanceDeploymentConfigResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.environment_uid):
body['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.package_content_type):
body['packageContentType'] = request.package_content_type
if not UtilClient.is_unset(request.package_uid):
body['packageUID'] = request.package_uid
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.product_version_uidlist):
body['productVersionUIDList'] = request.product_version_uidlist
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='GenerateProductInstanceDeploymentConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/package-configs',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GenerateProductInstanceDeploymentConfigResponse(),
self.call_api(params, req, runtime)
)
async def generate_product_instance_deployment_config_with_options_async(
self,
request: adp_20210720_models.GenerateProductInstanceDeploymentConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GenerateProductInstanceDeploymentConfigResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.environment_uid):
body['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.package_content_type):
body['packageContentType'] = request.package_content_type
if not UtilClient.is_unset(request.package_uid):
body['packageUID'] = request.package_uid
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.product_version_uidlist):
body['productVersionUIDList'] = request.product_version_uidlist
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='GenerateProductInstanceDeploymentConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/package-configs',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GenerateProductInstanceDeploymentConfigResponse(),
await self.call_api_async(params, req, runtime)
)
def generate_product_instance_deployment_config(
self,
request: adp_20210720_models.GenerateProductInstanceDeploymentConfigRequest,
) -> adp_20210720_models.GenerateProductInstanceDeploymentConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.generate_product_instance_deployment_config_with_options(request, headers, runtime)
async def generate_product_instance_deployment_config_async(
self,
request: adp_20210720_models.GenerateProductInstanceDeploymentConfigRequest,
) -> adp_20210720_models.GenerateProductInstanceDeploymentConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.generate_product_instance_deployment_config_with_options_async(request, headers, runtime)
def get_component_with_options(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetComponentResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetComponent',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/components/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetComponentResponse(),
self.call_api(params, req, runtime)
)
async def get_component_with_options_async(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetComponentResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetComponent',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/components/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetComponentResponse(),
await self.call_api_async(params, req, runtime)
)
def get_component(
self,
uid: str,
) -> adp_20210720_models.GetComponentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_component_with_options(uid, headers, runtime)
async def get_component_async(
self,
uid: str,
) -> adp_20210720_models.GetComponentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_component_with_options_async(uid, headers, runtime)
def get_component_version_with_options(
self,
uid: str,
version_uid: str,
request: adp_20210720_models.GetComponentVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetComponentVersionResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.without_chart_content):
query['withoutChartContent'] = request.without_chart_content
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetComponentVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/components/{OpenApiUtilClient.get_encode_param(uid)}/versions/{OpenApiUtilClient.get_encode_param(version_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetComponentVersionResponse(),
self.call_api(params, req, runtime)
)
async def get_component_version_with_options_async(
self,
uid: str,
version_uid: str,
request: adp_20210720_models.GetComponentVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetComponentVersionResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.without_chart_content):
query['withoutChartContent'] = request.without_chart_content
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetComponentVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/components/{OpenApiUtilClient.get_encode_param(uid)}/versions/{OpenApiUtilClient.get_encode_param(version_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetComponentVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def get_component_version(
self,
uid: str,
version_uid: str,
request: adp_20210720_models.GetComponentVersionRequest,
) -> adp_20210720_models.GetComponentVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_component_version_with_options(uid, version_uid, request, headers, runtime)
async def get_component_version_async(
self,
uid: str,
version_uid: str,
request: adp_20210720_models.GetComponentVersionRequest,
) -> adp_20210720_models.GetComponentVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_component_version_with_options_async(uid, version_uid, request, headers, runtime)
def get_deliverable_with_options(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetDeliverableResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetDeliverable',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/deliverables/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetDeliverableResponse(),
self.call_api(params, req, runtime)
)
async def get_deliverable_with_options_async(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetDeliverableResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetDeliverable',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/deliverables/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetDeliverableResponse(),
await self.call_api_async(params, req, runtime)
)
def get_deliverable(
self,
uid: str,
) -> adp_20210720_models.GetDeliverableResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_deliverable_with_options(uid, headers, runtime)
async def get_deliverable_async(
self,
uid: str,
) -> adp_20210720_models.GetDeliverableResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_deliverable_with_options_async(uid, headers, runtime)
def get_delivery_package_with_options(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetDeliveryPackageResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetDeliveryPackage',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-packages/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetDeliveryPackageResponse(),
self.call_api(params, req, runtime)
)
async def get_delivery_package_with_options_async(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetDeliveryPackageResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetDeliveryPackage',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-packages/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetDeliveryPackageResponse(),
await self.call_api_async(params, req, runtime)
)
def get_delivery_package(
self,
uid: str,
) -> adp_20210720_models.GetDeliveryPackageResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_delivery_package_with_options(uid, headers, runtime)
async def get_delivery_package_async(
self,
uid: str,
) -> adp_20210720_models.GetDeliveryPackageResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_delivery_package_with_options_async(uid, headers, runtime)
def get_environment_with_options(
self,
uid: str,
tmp_req: adp_20210720_models.GetEnvironmentRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetEnvironmentResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.GetEnvironmentShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.options):
request.options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.options, 'options', 'json')
query = {}
if not UtilClient.is_unset(request.options_shrink):
query['options'] = request.options_shrink
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetEnvironment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetEnvironmentResponse(),
self.call_api(params, req, runtime)
)
async def get_environment_with_options_async(
self,
uid: str,
tmp_req: adp_20210720_models.GetEnvironmentRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetEnvironmentResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.GetEnvironmentShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.options):
request.options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.options, 'options', 'json')
query = {}
if not UtilClient.is_unset(request.options_shrink):
query['options'] = request.options_shrink
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetEnvironment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetEnvironmentResponse(),
await self.call_api_async(params, req, runtime)
)
def get_environment(
self,
uid: str,
request: adp_20210720_models.GetEnvironmentRequest,
) -> adp_20210720_models.GetEnvironmentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_environment_with_options(uid, request, headers, runtime)
async def get_environment_async(
self,
uid: str,
request: adp_20210720_models.GetEnvironmentRequest,
) -> adp_20210720_models.GetEnvironmentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_environment_with_options_async(uid, request, headers, runtime)
def get_environment_delivery_instance_with_options(
self,
request: adp_20210720_models.GetEnvironmentDeliveryInstanceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetEnvironmentDeliveryInstanceResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cluster_uid):
query['clusterUID'] = request.cluster_uid
if not UtilClient.is_unset(request.env_uid):
query['envUID'] = request.env_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetEnvironmentDeliveryInstance',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-instances',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetEnvironmentDeliveryInstanceResponse(),
self.call_api(params, req, runtime)
)
async def get_environment_delivery_instance_with_options_async(
self,
request: adp_20210720_models.GetEnvironmentDeliveryInstanceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetEnvironmentDeliveryInstanceResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cluster_uid):
query['clusterUID'] = request.cluster_uid
if not UtilClient.is_unset(request.env_uid):
query['envUID'] = request.env_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetEnvironmentDeliveryInstance',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-instances',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetEnvironmentDeliveryInstanceResponse(),
await self.call_api_async(params, req, runtime)
)
def get_environment_delivery_instance(
self,
request: adp_20210720_models.GetEnvironmentDeliveryInstanceRequest,
) -> adp_20210720_models.GetEnvironmentDeliveryInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_environment_delivery_instance_with_options(request, headers, runtime)
async def get_environment_delivery_instance_async(
self,
request: adp_20210720_models.GetEnvironmentDeliveryInstanceRequest,
) -> adp_20210720_models.GetEnvironmentDeliveryInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_environment_delivery_instance_with_options_async(request, headers, runtime)
def get_environment_license_with_options(
self,
uid: str,
license_uid: str,
tmp_req: adp_20210720_models.GetEnvironmentLicenseRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetEnvironmentLicenseResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.GetEnvironmentLicenseShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.options):
request.options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.options, 'options', 'json')
query = {}
if not UtilClient.is_unset(request.options_shrink):
query['options'] = request.options_shrink
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetEnvironmentLicense',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/licenses/{OpenApiUtilClient.get_encode_param(license_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetEnvironmentLicenseResponse(),
self.call_api(params, req, runtime)
)
async def get_environment_license_with_options_async(
self,
uid: str,
license_uid: str,
tmp_req: adp_20210720_models.GetEnvironmentLicenseRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetEnvironmentLicenseResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.GetEnvironmentLicenseShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.options):
request.options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.options, 'options', 'json')
query = {}
if not UtilClient.is_unset(request.options_shrink):
query['options'] = request.options_shrink
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetEnvironmentLicense',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/licenses/{OpenApiUtilClient.get_encode_param(license_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetEnvironmentLicenseResponse(),
await self.call_api_async(params, req, runtime)
)
def get_environment_license(
self,
uid: str,
license_uid: str,
request: adp_20210720_models.GetEnvironmentLicenseRequest,
) -> adp_20210720_models.GetEnvironmentLicenseResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_environment_license_with_options(uid, license_uid, request, headers, runtime)
async def get_environment_license_async(
self,
uid: str,
license_uid: str,
request: adp_20210720_models.GetEnvironmentLicenseRequest,
) -> adp_20210720_models.GetEnvironmentLicenseResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_environment_license_with_options_async(uid, license_uid, request, headers, runtime)
def get_environment_node_with_options(
self,
uid: str,
node_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetEnvironmentNodeResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetEnvironmentNode',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/nodes/{OpenApiUtilClient.get_encode_param(node_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetEnvironmentNodeResponse(),
self.call_api(params, req, runtime)
)
async def get_environment_node_with_options_async(
self,
uid: str,
node_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetEnvironmentNodeResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetEnvironmentNode',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/nodes/{OpenApiUtilClient.get_encode_param(node_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetEnvironmentNodeResponse(),
await self.call_api_async(params, req, runtime)
)
def get_environment_node(
self,
uid: str,
node_uid: str,
) -> adp_20210720_models.GetEnvironmentNodeResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_environment_node_with_options(uid, node_uid, headers, runtime)
async def get_environment_node_async(
self,
uid: str,
node_uid: str,
) -> adp_20210720_models.GetEnvironmentNodeResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_environment_node_with_options_async(uid, node_uid, headers, runtime)
def get_foundation_component_reference_with_options(
self,
component_reference_uid: str,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetFoundationComponentReferenceResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetFoundationComponentReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references/{OpenApiUtilClient.get_encode_param(uid)}/components/{OpenApiUtilClient.get_encode_param(component_reference_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetFoundationComponentReferenceResponse(),
self.call_api(params, req, runtime)
)
async def get_foundation_component_reference_with_options_async(
self,
component_reference_uid: str,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetFoundationComponentReferenceResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetFoundationComponentReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references/{OpenApiUtilClient.get_encode_param(uid)}/components/{OpenApiUtilClient.get_encode_param(component_reference_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetFoundationComponentReferenceResponse(),
await self.call_api_async(params, req, runtime)
)
def get_foundation_component_reference(
self,
component_reference_uid: str,
uid: str,
) -> adp_20210720_models.GetFoundationComponentReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_foundation_component_reference_with_options(component_reference_uid, uid, headers, runtime)
async def get_foundation_component_reference_async(
self,
component_reference_uid: str,
uid: str,
) -> adp_20210720_models.GetFoundationComponentReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_foundation_component_reference_with_options_async(component_reference_uid, uid, headers, runtime)
def get_foundation_reference_with_options(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetFoundationReferenceResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetFoundationReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references/{OpenApiUtilClient.get_encode_param(uid)}/info',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetFoundationReferenceResponse(),
self.call_api(params, req, runtime)
)
async def get_foundation_reference_with_options_async(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetFoundationReferenceResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetFoundationReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references/{OpenApiUtilClient.get_encode_param(uid)}/info',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetFoundationReferenceResponse(),
await self.call_api_async(params, req, runtime)
)
def get_foundation_reference(
self,
uid: str,
) -> adp_20210720_models.GetFoundationReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_foundation_reference_with_options(uid, headers, runtime)
async def get_foundation_reference_async(
self,
uid: str,
) -> adp_20210720_models.GetFoundationReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_foundation_reference_with_options_async(uid, headers, runtime)
def get_foundation_version_with_options(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetFoundationVersionResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetFoundationVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation/versions/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetFoundationVersionResponse(),
self.call_api(params, req, runtime)
)
async def get_foundation_version_with_options_async(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetFoundationVersionResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetFoundationVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation/versions/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetFoundationVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def get_foundation_version(
self,
uid: str,
) -> adp_20210720_models.GetFoundationVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_foundation_version_with_options(uid, headers, runtime)
async def get_foundation_version_async(
self,
uid: str,
) -> adp_20210720_models.GetFoundationVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_foundation_version_with_options_async(uid, headers, runtime)
def get_product_with_options(
self,
uid: str,
request: adp_20210720_models.GetProductRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.with_icon_url):
query['withIconURL'] = request.with_icon_url
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetProduct',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/products/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductResponse(),
self.call_api(params, req, runtime)
)
async def get_product_with_options_async(
self,
uid: str,
request: adp_20210720_models.GetProductRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.with_icon_url):
query['withIconURL'] = request.with_icon_url
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetProduct',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/products/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductResponse(),
await self.call_api_async(params, req, runtime)
)
def get_product(
self,
uid: str,
request: adp_20210720_models.GetProductRequest,
) -> adp_20210720_models.GetProductResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_product_with_options(uid, request, headers, runtime)
async def get_product_async(
self,
uid: str,
request: adp_20210720_models.GetProductRequest,
) -> adp_20210720_models.GetProductResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_product_with_options_async(uid, request, headers, runtime)
def get_product_component_version_with_options(
self,
relation_uid: str,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductComponentVersionResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetProductComponentVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/relations/{OpenApiUtilClient.get_encode_param(relation_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductComponentVersionResponse(),
self.call_api(params, req, runtime)
)
async def get_product_component_version_with_options_async(
self,
relation_uid: str,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductComponentVersionResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='GetProductComponentVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/relations/{OpenApiUtilClient.get_encode_param(relation_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductComponentVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def get_product_component_version(
self,
relation_uid: str,
uid: str,
) -> adp_20210720_models.GetProductComponentVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_product_component_version_with_options(relation_uid, uid, headers, runtime)
async def get_product_component_version_async(
self,
relation_uid: str,
uid: str,
) -> adp_20210720_models.GetProductComponentVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_product_component_version_with_options_async(relation_uid, uid, headers, runtime)
def get_product_deployment_with_options(
self,
deployment_uid: str,
request: adp_20210720_models.GetProductDeploymentRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductDeploymentResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.environment_uid):
query['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.with_param_config):
query['withParamConfig'] = request.with_param_config
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetProductDeployment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/deployments/{OpenApiUtilClient.get_encode_param(deployment_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductDeploymentResponse(),
self.call_api(params, req, runtime)
)
async def get_product_deployment_with_options_async(
self,
deployment_uid: str,
request: adp_20210720_models.GetProductDeploymentRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductDeploymentResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.environment_uid):
query['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.with_param_config):
query['withParamConfig'] = request.with_param_config
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetProductDeployment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/deployments/{OpenApiUtilClient.get_encode_param(deployment_uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductDeploymentResponse(),
await self.call_api_async(params, req, runtime)
)
def get_product_deployment(
self,
deployment_uid: str,
request: adp_20210720_models.GetProductDeploymentRequest,
) -> adp_20210720_models.GetProductDeploymentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_product_deployment_with_options(deployment_uid, request, headers, runtime)
async def get_product_deployment_async(
self,
deployment_uid: str,
request: adp_20210720_models.GetProductDeploymentRequest,
) -> adp_20210720_models.GetProductDeploymentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_product_deployment_with_options_async(deployment_uid, request, headers, runtime)
def get_product_version_with_options(
self,
uid: str,
request: adp_20210720_models.GetProductVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductVersionResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.with_documentation_url):
query['withDocumentationURL'] = request.with_documentation_url
if not UtilClient.is_unset(request.with_extend_resource_url):
query['withExtendResourceURL'] = request.with_extend_resource_url
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductVersionResponse(),
self.call_api(params, req, runtime)
)
async def get_product_version_with_options_async(
self,
uid: str,
request: adp_20210720_models.GetProductVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductVersionResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.with_documentation_url):
query['withDocumentationURL'] = request.with_documentation_url
if not UtilClient.is_unset(request.with_extend_resource_url):
query['withExtendResourceURL'] = request.with_extend_resource_url
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def get_product_version(
self,
uid: str,
request: adp_20210720_models.GetProductVersionRequest,
) -> adp_20210720_models.GetProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_product_version_with_options(uid, request, headers, runtime)
async def get_product_version_async(
self,
uid: str,
request: adp_20210720_models.GetProductVersionRequest,
) -> adp_20210720_models.GetProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_product_version_with_options_async(uid, request, headers, runtime)
def get_product_version_differences_with_options(
self,
uid: str,
version_uid: str,
request: adp_20210720_models.GetProductVersionDifferencesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductVersionDifferencesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.pre_version_uid):
query['preVersionUID'] = request.pre_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetProductVersionDifferences',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/products/{OpenApiUtilClient.get_encode_param(uid)}/versions/{OpenApiUtilClient.get_encode_param(version_uid)}/differences',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductVersionDifferencesResponse(),
self.call_api(params, req, runtime)
)
async def get_product_version_differences_with_options_async(
self,
uid: str,
version_uid: str,
request: adp_20210720_models.GetProductVersionDifferencesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductVersionDifferencesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.pre_version_uid):
query['preVersionUID'] = request.pre_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetProductVersionDifferences',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/integration/api/v2/products/{OpenApiUtilClient.get_encode_param(uid)}/versions/{OpenApiUtilClient.get_encode_param(version_uid)}/differences',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductVersionDifferencesResponse(),
await self.call_api_async(params, req, runtime)
)
def get_product_version_differences(
self,
uid: str,
version_uid: str,
request: adp_20210720_models.GetProductVersionDifferencesRequest,
) -> adp_20210720_models.GetProductVersionDifferencesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_product_version_differences_with_options(uid, version_uid, request, headers, runtime)
async def get_product_version_differences_async(
self,
uid: str,
version_uid: str,
request: adp_20210720_models.GetProductVersionDifferencesRequest,
) -> adp_20210720_models.GetProductVersionDifferencesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_product_version_differences_with_options_async(uid, version_uid, request, headers, runtime)
def get_product_version_package_with_options(
self,
uid: str,
request: adp_20210720_models.GetProductVersionPackageRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductVersionPackageResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.foundation_reference_uid):
query['foundationReferenceUID'] = request.foundation_reference_uid
if not UtilClient.is_unset(request.old_foundation_reference_uid):
query['oldFoundationReferenceUID'] = request.old_foundation_reference_uid
if not UtilClient.is_unset(request.old_product_version_uid):
query['oldProductVersionUID'] = request.old_product_version_uid
if not UtilClient.is_unset(request.package_content_type):
query['packageContentType'] = request.package_content_type
if not UtilClient.is_unset(request.package_type):
query['packageType'] = request.package_type
if not UtilClient.is_unset(request.package_uid):
query['packageUID'] = request.package_uid
if not UtilClient.is_unset(request.platform):
query['platform'] = request.platform
if not UtilClient.is_unset(request.with_url):
query['withURL'] = request.with_url
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetProductVersionPackage',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/hosting/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/packages',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductVersionPackageResponse(),
self.call_api(params, req, runtime)
)
async def get_product_version_package_with_options_async(
self,
uid: str,
request: adp_20210720_models.GetProductVersionPackageRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetProductVersionPackageResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.foundation_reference_uid):
query['foundationReferenceUID'] = request.foundation_reference_uid
if not UtilClient.is_unset(request.old_foundation_reference_uid):
query['oldFoundationReferenceUID'] = request.old_foundation_reference_uid
if not UtilClient.is_unset(request.old_product_version_uid):
query['oldProductVersionUID'] = request.old_product_version_uid
if not UtilClient.is_unset(request.package_content_type):
query['packageContentType'] = request.package_content_type
if not UtilClient.is_unset(request.package_type):
query['packageType'] = request.package_type
if not UtilClient.is_unset(request.package_uid):
query['packageUID'] = request.package_uid
if not UtilClient.is_unset(request.platform):
query['platform'] = request.platform
if not UtilClient.is_unset(request.with_url):
query['withURL'] = request.with_url
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetProductVersionPackage',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/hosting/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/packages',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetProductVersionPackageResponse(),
await self.call_api_async(params, req, runtime)
)
def get_product_version_package(
self,
uid: str,
request: adp_20210720_models.GetProductVersionPackageRequest,
) -> adp_20210720_models.GetProductVersionPackageResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_product_version_package_with_options(uid, request, headers, runtime)
async def get_product_version_package_async(
self,
uid: str,
request: adp_20210720_models.GetProductVersionPackageRequest,
) -> adp_20210720_models.GetProductVersionPackageResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_product_version_package_with_options_async(uid, request, headers, runtime)
def get_resource_snapshot_with_options(
self,
request: adp_20210720_models.GetResourceSnapshotRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetResourceSnapshotResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.uid):
query['uid'] = request.uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetResourceSnapshot',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/resource-snapshots',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetResourceSnapshotResponse(),
self.call_api(params, req, runtime)
)
async def get_resource_snapshot_with_options_async(
self,
request: adp_20210720_models.GetResourceSnapshotRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetResourceSnapshotResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.uid):
query['uid'] = request.uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetResourceSnapshot',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/resource-snapshots',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetResourceSnapshotResponse(),
await self.call_api_async(params, req, runtime)
)
def get_resource_snapshot(
self,
request: adp_20210720_models.GetResourceSnapshotRequest,
) -> adp_20210720_models.GetResourceSnapshotResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_resource_snapshot_with_options(request, headers, runtime)
async def get_resource_snapshot_async(
self,
request: adp_20210720_models.GetResourceSnapshotRequest,
) -> adp_20210720_models.GetResourceSnapshotResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_resource_snapshot_with_options_async(request, headers, runtime)
def get_workflow_status_with_options(
self,
request: adp_20210720_models.GetWorkflowStatusRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetWorkflowStatusResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.workflow_type):
query['workflowType'] = request.workflow_type
if not UtilClient.is_unset(request.xuid):
query['xuid'] = request.xuid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetWorkflowStatus',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/workflows/status',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetWorkflowStatusResponse(),
self.call_api(params, req, runtime)
)
async def get_workflow_status_with_options_async(
self,
request: adp_20210720_models.GetWorkflowStatusRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.GetWorkflowStatusResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.workflow_type):
query['workflowType'] = request.workflow_type
if not UtilClient.is_unset(request.xuid):
query['xuid'] = request.xuid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='GetWorkflowStatus',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/workflows/status',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.GetWorkflowStatusResponse(),
await self.call_api_async(params, req, runtime)
)
def get_workflow_status(
self,
request: adp_20210720_models.GetWorkflowStatusRequest,
) -> adp_20210720_models.GetWorkflowStatusResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_workflow_status_with_options(request, headers, runtime)
async def get_workflow_status_async(
self,
request: adp_20210720_models.GetWorkflowStatusRequest,
) -> adp_20210720_models.GetWorkflowStatusResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_workflow_status_with_options_async(request, headers, runtime)
def init_environment_resource_with_options(
self,
uid: str,
request: adp_20210720_models.InitEnvironmentResourceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.InitEnvironmentResourceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.access_key_id):
body['accessKeyID'] = request.access_key_id
if not UtilClient.is_unset(request.access_key_secret):
body['accessKeySecret'] = request.access_key_secret
if not UtilClient.is_unset(request.security_token):
body['securityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='InitEnvironmentResource',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/resources',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.InitEnvironmentResourceResponse(),
self.call_api(params, req, runtime)
)
async def init_environment_resource_with_options_async(
self,
uid: str,
request: adp_20210720_models.InitEnvironmentResourceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.InitEnvironmentResourceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.access_key_id):
body['accessKeyID'] = request.access_key_id
if not UtilClient.is_unset(request.access_key_secret):
body['accessKeySecret'] = request.access_key_secret
if not UtilClient.is_unset(request.security_token):
body['securityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='InitEnvironmentResource',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/resources',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.InitEnvironmentResourceResponse(),
await self.call_api_async(params, req, runtime)
)
def init_environment_resource(
self,
uid: str,
request: adp_20210720_models.InitEnvironmentResourceRequest,
) -> adp_20210720_models.InitEnvironmentResourceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.init_environment_resource_with_options(uid, request, headers, runtime)
async def init_environment_resource_async(
self,
uid: str,
request: adp_20210720_models.InitEnvironmentResourceRequest,
) -> adp_20210720_models.InitEnvironmentResourceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.init_environment_resource_with_options_async(uid, request, headers, runtime)
def list_component_versions_with_options(
self,
uid: str,
tmp_req: adp_20210720_models.ListComponentVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListComponentVersionsResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.ListComponentVersionsShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.platforms):
request.platforms_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.platforms, 'platforms', 'json')
query = {}
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.platforms_shrink):
query['platforms'] = request.platforms_shrink
if not UtilClient.is_unset(request.version):
query['version'] = request.version
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListComponentVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/components/{OpenApiUtilClient.get_encode_param(uid)}/versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListComponentVersionsResponse(),
self.call_api(params, req, runtime)
)
async def list_component_versions_with_options_async(
self,
uid: str,
tmp_req: adp_20210720_models.ListComponentVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListComponentVersionsResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.ListComponentVersionsShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.platforms):
request.platforms_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.platforms, 'platforms', 'json')
query = {}
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.platforms_shrink):
query['platforms'] = request.platforms_shrink
if not UtilClient.is_unset(request.version):
query['version'] = request.version
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListComponentVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/components/{OpenApiUtilClient.get_encode_param(uid)}/versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListComponentVersionsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_component_versions(
self,
uid: str,
request: adp_20210720_models.ListComponentVersionsRequest,
) -> adp_20210720_models.ListComponentVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_component_versions_with_options(uid, request, headers, runtime)
async def list_component_versions_async(
self,
uid: str,
request: adp_20210720_models.ListComponentVersionsRequest,
) -> adp_20210720_models.ListComponentVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_component_versions_with_options_async(uid, request, headers, runtime)
def list_components_with_options(
self,
request: adp_20210720_models.ListComponentsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListComponentsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.fuzzy):
query['fuzzy'] = request.fuzzy
if not UtilClient.is_unset(request.name):
query['name'] = request.name
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.public):
query['public'] = request.public
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListComponents',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/components',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListComponentsResponse(),
self.call_api(params, req, runtime)
)
async def list_components_with_options_async(
self,
request: adp_20210720_models.ListComponentsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListComponentsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.fuzzy):
query['fuzzy'] = request.fuzzy
if not UtilClient.is_unset(request.name):
query['name'] = request.name
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.public):
query['public'] = request.public
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListComponents',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/components',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListComponentsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_components(
self,
request: adp_20210720_models.ListComponentsRequest,
) -> adp_20210720_models.ListComponentsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_components_with_options(request, headers, runtime)
async def list_components_async(
self,
request: adp_20210720_models.ListComponentsRequest,
) -> adp_20210720_models.ListComponentsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_components_with_options_async(request, headers, runtime)
def list_delivery_instance_change_records_with_options(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListDeliveryInstanceChangeRecordsResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='ListDeliveryInstanceChangeRecords',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-instances/{OpenApiUtilClient.get_encode_param(uid)}/delivery-records',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListDeliveryInstanceChangeRecordsResponse(),
self.call_api(params, req, runtime)
)
async def list_delivery_instance_change_records_with_options_async(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListDeliveryInstanceChangeRecordsResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='ListDeliveryInstanceChangeRecords',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-instances/{OpenApiUtilClient.get_encode_param(uid)}/delivery-records',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListDeliveryInstanceChangeRecordsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_delivery_instance_change_records(
self,
uid: str,
) -> adp_20210720_models.ListDeliveryInstanceChangeRecordsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_delivery_instance_change_records_with_options(uid, headers, runtime)
async def list_delivery_instance_change_records_async(
self,
uid: str,
) -> adp_20210720_models.ListDeliveryInstanceChangeRecordsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_delivery_instance_change_records_with_options_async(uid, headers, runtime)
def list_delivery_package_with_options(
self,
request: adp_20210720_models.ListDeliveryPackageRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListDeliveryPackageResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.deliverable_uid):
query['deliverableUID'] = request.deliverable_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListDeliveryPackage',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-packages',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListDeliveryPackageResponse(),
self.call_api(params, req, runtime)
)
async def list_delivery_package_with_options_async(
self,
request: adp_20210720_models.ListDeliveryPackageRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListDeliveryPackageResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.deliverable_uid):
query['deliverableUID'] = request.deliverable_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListDeliveryPackage',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-packages',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListDeliveryPackageResponse(),
await self.call_api_async(params, req, runtime)
)
def list_delivery_package(
self,
request: adp_20210720_models.ListDeliveryPackageRequest,
) -> adp_20210720_models.ListDeliveryPackageResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_delivery_package_with_options(request, headers, runtime)
async def list_delivery_package_async(
self,
request: adp_20210720_models.ListDeliveryPackageRequest,
) -> adp_20210720_models.ListDeliveryPackageResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_delivery_package_with_options_async(request, headers, runtime)
def list_environment_licenses_with_options(
self,
uid: str,
request: adp_20210720_models.ListEnvironmentLicensesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListEnvironmentLicensesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.scope):
query['scope'] = request.scope
if not UtilClient.is_unset(request.type):
query['type'] = request.type
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListEnvironmentLicenses',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/licenses',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListEnvironmentLicensesResponse(),
self.call_api(params, req, runtime)
)
async def list_environment_licenses_with_options_async(
self,
uid: str,
request: adp_20210720_models.ListEnvironmentLicensesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListEnvironmentLicensesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.scope):
query['scope'] = request.scope
if not UtilClient.is_unset(request.type):
query['type'] = request.type
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListEnvironmentLicenses',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/licenses',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListEnvironmentLicensesResponse(),
await self.call_api_async(params, req, runtime)
)
def list_environment_licenses(
self,
uid: str,
request: adp_20210720_models.ListEnvironmentLicensesRequest,
) -> adp_20210720_models.ListEnvironmentLicensesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_environment_licenses_with_options(uid, request, headers, runtime)
async def list_environment_licenses_async(
self,
uid: str,
request: adp_20210720_models.ListEnvironmentLicensesRequest,
) -> adp_20210720_models.ListEnvironmentLicensesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_environment_licenses_with_options_async(uid, request, headers, runtime)
def list_environment_nodes_with_options(
self,
uid: str,
request: adp_20210720_models.ListEnvironmentNodesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListEnvironmentNodesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListEnvironmentNodes',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/nodes',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListEnvironmentNodesResponse(),
self.call_api(params, req, runtime)
)
async def list_environment_nodes_with_options_async(
self,
uid: str,
request: adp_20210720_models.ListEnvironmentNodesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListEnvironmentNodesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListEnvironmentNodes',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/nodes',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListEnvironmentNodesResponse(),
await self.call_api_async(params, req, runtime)
)
def list_environment_nodes(
self,
uid: str,
request: adp_20210720_models.ListEnvironmentNodesRequest,
) -> adp_20210720_models.ListEnvironmentNodesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_environment_nodes_with_options(uid, request, headers, runtime)
async def list_environment_nodes_async(
self,
uid: str,
request: adp_20210720_models.ListEnvironmentNodesRequest,
) -> adp_20210720_models.ListEnvironmentNodesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_environment_nodes_with_options_async(uid, request, headers, runtime)
def list_environment_tunnels_with_options(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListEnvironmentTunnelsResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='ListEnvironmentTunnels',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/tunnels',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListEnvironmentTunnelsResponse(),
self.call_api(params, req, runtime)
)
async def list_environment_tunnels_with_options_async(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListEnvironmentTunnelsResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='ListEnvironmentTunnels',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/tunnels',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListEnvironmentTunnelsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_environment_tunnels(
self,
uid: str,
) -> adp_20210720_models.ListEnvironmentTunnelsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_environment_tunnels_with_options(uid, headers, runtime)
async def list_environment_tunnels_async(
self,
uid: str,
) -> adp_20210720_models.ListEnvironmentTunnelsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_environment_tunnels_with_options_async(uid, headers, runtime)
def list_environments_with_options(
self,
request: adp_20210720_models.ListEnvironmentsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListEnvironmentsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cluster_uid):
query['clusterUID'] = request.cluster_uid
if not UtilClient.is_unset(request.foundation_type):
query['foundationType'] = request.foundation_type
if not UtilClient.is_unset(request.fuzzy):
query['fuzzy'] = request.fuzzy
if not UtilClient.is_unset(request.instance_status):
query['instanceStatus'] = request.instance_status
if not UtilClient.is_unset(request.name):
query['name'] = request.name
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.type):
query['type'] = request.type
if not UtilClient.is_unset(request.vendor_type):
query['vendorType'] = request.vendor_type
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListEnvironments',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListEnvironmentsResponse(),
self.call_api(params, req, runtime)
)
async def list_environments_with_options_async(
self,
request: adp_20210720_models.ListEnvironmentsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListEnvironmentsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cluster_uid):
query['clusterUID'] = request.cluster_uid
if not UtilClient.is_unset(request.foundation_type):
query['foundationType'] = request.foundation_type
if not UtilClient.is_unset(request.fuzzy):
query['fuzzy'] = request.fuzzy
if not UtilClient.is_unset(request.instance_status):
query['instanceStatus'] = request.instance_status
if not UtilClient.is_unset(request.name):
query['name'] = request.name
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.type):
query['type'] = request.type
if not UtilClient.is_unset(request.vendor_type):
query['vendorType'] = request.vendor_type
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListEnvironments',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListEnvironmentsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_environments(
self,
request: adp_20210720_models.ListEnvironmentsRequest,
) -> adp_20210720_models.ListEnvironmentsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_environments_with_options(request, headers, runtime)
async def list_environments_async(
self,
request: adp_20210720_models.ListEnvironmentsRequest,
) -> adp_20210720_models.ListEnvironmentsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_environments_with_options_async(request, headers, runtime)
def list_foundation_component_versions_with_options(
self,
uid: str,
request: adp_20210720_models.ListFoundationComponentVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListFoundationComponentVersionsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.parent_component_relation_uid):
query['parentComponentRelationUID'] = request.parent_component_relation_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListFoundationComponentVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation/versions/{OpenApiUtilClient.get_encode_param(uid)}/component-versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListFoundationComponentVersionsResponse(),
self.call_api(params, req, runtime)
)
async def list_foundation_component_versions_with_options_async(
self,
uid: str,
request: adp_20210720_models.ListFoundationComponentVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListFoundationComponentVersionsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.parent_component_relation_uid):
query['parentComponentRelationUID'] = request.parent_component_relation_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListFoundationComponentVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation/versions/{OpenApiUtilClient.get_encode_param(uid)}/component-versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListFoundationComponentVersionsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_foundation_component_versions(
self,
uid: str,
request: adp_20210720_models.ListFoundationComponentVersionsRequest,
) -> adp_20210720_models.ListFoundationComponentVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_foundation_component_versions_with_options(uid, request, headers, runtime)
async def list_foundation_component_versions_async(
self,
uid: str,
request: adp_20210720_models.ListFoundationComponentVersionsRequest,
) -> adp_20210720_models.ListFoundationComponentVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_foundation_component_versions_with_options_async(uid, request, headers, runtime)
def list_foundation_reference_components_with_options(
self,
request: adp_20210720_models.ListFoundationReferenceComponentsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListFoundationReferenceComponentsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.foundation_reference_uid):
query['foundationReferenceUID'] = request.foundation_reference_uid
if not UtilClient.is_unset(request.foundation_version_uid):
query['foundationVersionUID'] = request.foundation_version_uid
if not UtilClient.is_unset(request.only_enabled):
query['onlyEnabled'] = request.only_enabled
if not UtilClient.is_unset(request.parent_component_reference_uid):
query['parentComponentReferenceUID'] = request.parent_component_reference_uid
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListFoundationReferenceComponents',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references/component-versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListFoundationReferenceComponentsResponse(),
self.call_api(params, req, runtime)
)
async def list_foundation_reference_components_with_options_async(
self,
request: adp_20210720_models.ListFoundationReferenceComponentsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListFoundationReferenceComponentsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.foundation_reference_uid):
query['foundationReferenceUID'] = request.foundation_reference_uid
if not UtilClient.is_unset(request.foundation_version_uid):
query['foundationVersionUID'] = request.foundation_version_uid
if not UtilClient.is_unset(request.only_enabled):
query['onlyEnabled'] = request.only_enabled
if not UtilClient.is_unset(request.parent_component_reference_uid):
query['parentComponentReferenceUID'] = request.parent_component_reference_uid
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListFoundationReferenceComponents',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references/component-versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListFoundationReferenceComponentsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_foundation_reference_components(
self,
request: adp_20210720_models.ListFoundationReferenceComponentsRequest,
) -> adp_20210720_models.ListFoundationReferenceComponentsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_foundation_reference_components_with_options(request, headers, runtime)
async def list_foundation_reference_components_async(
self,
request: adp_20210720_models.ListFoundationReferenceComponentsRequest,
) -> adp_20210720_models.ListFoundationReferenceComponentsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_foundation_reference_components_with_options_async(request, headers, runtime)
def list_foundation_versions_with_options(
self,
request: adp_20210720_models.ListFoundationVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListFoundationVersionsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.sort_direct):
query['sortDirect'] = request.sort_direct
if not UtilClient.is_unset(request.sort_key):
query['sortKey'] = request.sort_key
if not UtilClient.is_unset(request.type):
query['type'] = request.type
if not UtilClient.is_unset(request.version):
query['version'] = request.version
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListFoundationVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation/versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListFoundationVersionsResponse(),
self.call_api(params, req, runtime)
)
async def list_foundation_versions_with_options_async(
self,
request: adp_20210720_models.ListFoundationVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListFoundationVersionsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.sort_direct):
query['sortDirect'] = request.sort_direct
if not UtilClient.is_unset(request.sort_key):
query['sortKey'] = request.sort_key
if not UtilClient.is_unset(request.type):
query['type'] = request.type
if not UtilClient.is_unset(request.version):
query['version'] = request.version
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListFoundationVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation/versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListFoundationVersionsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_foundation_versions(
self,
request: adp_20210720_models.ListFoundationVersionsRequest,
) -> adp_20210720_models.ListFoundationVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_foundation_versions_with_options(request, headers, runtime)
async def list_foundation_versions_async(
self,
request: adp_20210720_models.ListFoundationVersionsRequest,
) -> adp_20210720_models.ListFoundationVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_foundation_versions_with_options_async(request, headers, runtime)
def list_product_component_versions_with_options(
self,
uid: str,
request: adp_20210720_models.ListProductComponentVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductComponentVersionsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.release_name):
query['releaseName'] = request.release_name
if not UtilClient.is_unset(request.sort_direct):
query['sortDirect'] = request.sort_direct
if not UtilClient.is_unset(request.sort_key):
query['sortKey'] = request.sort_key
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductComponentVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/component-versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductComponentVersionsResponse(),
self.call_api(params, req, runtime)
)
async def list_product_component_versions_with_options_async(
self,
uid: str,
request: adp_20210720_models.ListProductComponentVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductComponentVersionsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.release_name):
query['releaseName'] = request.release_name
if not UtilClient.is_unset(request.sort_direct):
query['sortDirect'] = request.sort_direct
if not UtilClient.is_unset(request.sort_key):
query['sortKey'] = request.sort_key
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductComponentVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/component-versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductComponentVersionsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_product_component_versions(
self,
uid: str,
request: adp_20210720_models.ListProductComponentVersionsRequest,
) -> adp_20210720_models.ListProductComponentVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_product_component_versions_with_options(uid, request, headers, runtime)
async def list_product_component_versions_async(
self,
uid: str,
request: adp_20210720_models.ListProductComponentVersionsRequest,
) -> adp_20210720_models.ListProductComponentVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_product_component_versions_with_options_async(uid, request, headers, runtime)
def list_product_deployments_with_options(
self,
request: adp_20210720_models.ListProductDeploymentsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductDeploymentsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.environment_uid):
query['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductDeployments',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/deployments',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductDeploymentsResponse(),
self.call_api(params, req, runtime)
)
async def list_product_deployments_with_options_async(
self,
request: adp_20210720_models.ListProductDeploymentsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductDeploymentsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.environment_uid):
query['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductDeployments',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/deployments',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductDeploymentsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_product_deployments(
self,
request: adp_20210720_models.ListProductDeploymentsRequest,
) -> adp_20210720_models.ListProductDeploymentsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_product_deployments_with_options(request, headers, runtime)
async def list_product_deployments_async(
self,
request: adp_20210720_models.ListProductDeploymentsRequest,
) -> adp_20210720_models.ListProductDeploymentsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_product_deployments_with_options_async(request, headers, runtime)
def list_product_environments_with_options(
self,
uid: str,
tmp_req: adp_20210720_models.ListProductEnvironmentsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductEnvironmentsResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.ListProductEnvironmentsShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.options):
request.options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.options, 'options', 'json')
if not UtilClient.is_unset(tmp_req.platforms):
request.platforms_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.platforms, 'platforms', 'json')
query = {}
if not UtilClient.is_unset(request.compatible_product_version_uid):
query['compatibleProductVersionUID'] = request.compatible_product_version_uid
if not UtilClient.is_unset(request.env_type):
query['envType'] = request.env_type
if not UtilClient.is_unset(request.options_shrink):
query['options'] = request.options_shrink
if not UtilClient.is_unset(request.platforms_shrink):
query['platforms'] = request.platforms_shrink
if not UtilClient.is_unset(request.product_version_spec_uid):
query['productVersionSpecUID'] = request.product_version_spec_uid
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductEnvironments',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/hosting/products/{OpenApiUtilClient.get_encode_param(uid)}/environments',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductEnvironmentsResponse(),
self.call_api(params, req, runtime)
)
async def list_product_environments_with_options_async(
self,
uid: str,
tmp_req: adp_20210720_models.ListProductEnvironmentsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductEnvironmentsResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.ListProductEnvironmentsShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.options):
request.options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.options, 'options', 'json')
if not UtilClient.is_unset(tmp_req.platforms):
request.platforms_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.platforms, 'platforms', 'json')
query = {}
if not UtilClient.is_unset(request.compatible_product_version_uid):
query['compatibleProductVersionUID'] = request.compatible_product_version_uid
if not UtilClient.is_unset(request.env_type):
query['envType'] = request.env_type
if not UtilClient.is_unset(request.options_shrink):
query['options'] = request.options_shrink
if not UtilClient.is_unset(request.platforms_shrink):
query['platforms'] = request.platforms_shrink
if not UtilClient.is_unset(request.product_version_spec_uid):
query['productVersionSpecUID'] = request.product_version_spec_uid
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductEnvironments',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/hosting/products/{OpenApiUtilClient.get_encode_param(uid)}/environments',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductEnvironmentsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_product_environments(
self,
uid: str,
request: adp_20210720_models.ListProductEnvironmentsRequest,
) -> adp_20210720_models.ListProductEnvironmentsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_product_environments_with_options(uid, request, headers, runtime)
async def list_product_environments_async(
self,
uid: str,
request: adp_20210720_models.ListProductEnvironmentsRequest,
) -> adp_20210720_models.ListProductEnvironmentsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_product_environments_with_options_async(uid, request, headers, runtime)
def list_product_foundation_references_with_options(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductFoundationReferencesResponse:
"""
@deprecated
@param headers: map
@param runtime: runtime options for this request RuntimeOptions
@return: ListProductFoundationReferencesResponse
Deprecated
"""
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='ListProductFoundationReferences',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/foundation-references',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductFoundationReferencesResponse(),
self.call_api(params, req, runtime)
)
async def list_product_foundation_references_with_options_async(
self,
uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductFoundationReferencesResponse:
"""
@deprecated
@param headers: map
@param runtime: runtime options for this request RuntimeOptions
@return: ListProductFoundationReferencesResponse
Deprecated
"""
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='ListProductFoundationReferences',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/foundation-references',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductFoundationReferencesResponse(),
await self.call_api_async(params, req, runtime)
)
def list_product_foundation_references(
self,
uid: str,
) -> adp_20210720_models.ListProductFoundationReferencesResponse:
"""
@deprecated
@return: ListProductFoundationReferencesResponse
Deprecated
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_product_foundation_references_with_options(uid, headers, runtime)
async def list_product_foundation_references_async(
self,
uid: str,
) -> adp_20210720_models.ListProductFoundationReferencesResponse:
"""
@deprecated
@return: ListProductFoundationReferencesResponse
Deprecated
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_product_foundation_references_with_options_async(uid, headers, runtime)
def list_product_instance_configs_with_options(
self,
request: adp_20210720_models.ListProductInstanceConfigsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductInstanceConfigsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.environment_uid):
query['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.param_type):
query['paramType'] = request.param_type
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductInstanceConfigs',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/configs',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductInstanceConfigsResponse(),
self.call_api(params, req, runtime)
)
async def list_product_instance_configs_with_options_async(
self,
request: adp_20210720_models.ListProductInstanceConfigsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductInstanceConfigsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.environment_uid):
query['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.param_type):
query['paramType'] = request.param_type
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductInstanceConfigs',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/configs',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductInstanceConfigsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_product_instance_configs(
self,
request: adp_20210720_models.ListProductInstanceConfigsRequest,
) -> adp_20210720_models.ListProductInstanceConfigsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_product_instance_configs_with_options(request, headers, runtime)
async def list_product_instance_configs_async(
self,
request: adp_20210720_models.ListProductInstanceConfigsRequest,
) -> adp_20210720_models.ListProductInstanceConfigsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_product_instance_configs_with_options_async(request, headers, runtime)
def list_product_instances_with_options(
self,
tmp_req: adp_20210720_models.ListProductInstancesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductInstancesResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.ListProductInstancesShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.options):
request.options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.options, 'options', 'json')
query = {}
if not UtilClient.is_unset(request.env_uid):
query['envUID'] = request.env_uid
if not UtilClient.is_unset(request.options_shrink):
query['options'] = request.options_shrink
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductInstances',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductInstancesResponse(),
self.call_api(params, req, runtime)
)
async def list_product_instances_with_options_async(
self,
tmp_req: adp_20210720_models.ListProductInstancesRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductInstancesResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.ListProductInstancesShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.options):
request.options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.options, 'options', 'json')
query = {}
if not UtilClient.is_unset(request.env_uid):
query['envUID'] = request.env_uid
if not UtilClient.is_unset(request.options_shrink):
query['options'] = request.options_shrink
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.product_version_uid):
query['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductInstances',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductInstancesResponse(),
await self.call_api_async(params, req, runtime)
)
def list_product_instances(
self,
request: adp_20210720_models.ListProductInstancesRequest,
) -> adp_20210720_models.ListProductInstancesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_product_instances_with_options(request, headers, runtime)
async def list_product_instances_async(
self,
request: adp_20210720_models.ListProductInstancesRequest,
) -> adp_20210720_models.ListProductInstancesResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_product_instances_with_options_async(request, headers, runtime)
def list_product_version_configs_with_options(
self,
uid: str,
request: adp_20210720_models.ListProductVersionConfigsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductVersionConfigsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.config_type):
query['configType'] = request.config_type
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.parameter):
query['parameter'] = request.parameter
if not UtilClient.is_unset(request.scope):
query['scope'] = request.scope
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductVersionConfigs',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/configs',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductVersionConfigsResponse(),
self.call_api(params, req, runtime)
)
async def list_product_version_configs_with_options_async(
self,
uid: str,
request: adp_20210720_models.ListProductVersionConfigsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductVersionConfigsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.config_type):
query['configType'] = request.config_type
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.parameter):
query['parameter'] = request.parameter
if not UtilClient.is_unset(request.scope):
query['scope'] = request.scope
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductVersionConfigs',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/configs',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductVersionConfigsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_product_version_configs(
self,
uid: str,
request: adp_20210720_models.ListProductVersionConfigsRequest,
) -> adp_20210720_models.ListProductVersionConfigsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_product_version_configs_with_options(uid, request, headers, runtime)
async def list_product_version_configs_async(
self,
uid: str,
request: adp_20210720_models.ListProductVersionConfigsRequest,
) -> adp_20210720_models.ListProductVersionConfigsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_product_version_configs_with_options_async(uid, request, headers, runtime)
def list_product_versions_with_options(
self,
tmp_req: adp_20210720_models.ListProductVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductVersionsResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.ListProductVersionsShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.platforms):
request.platforms_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.platforms, 'platforms', 'json')
if not UtilClient.is_unset(tmp_req.supported_foundation_types):
request.supported_foundation_types_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.supported_foundation_types, 'supportedFoundationTypes', 'json')
query = {}
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.platforms_shrink):
query['platforms'] = request.platforms_shrink
if not UtilClient.is_unset(request.product_name):
query['productName'] = request.product_name
if not UtilClient.is_unset(request.product_uid):
query['productUID'] = request.product_uid
if not UtilClient.is_unset(request.released):
query['released'] = request.released
if not UtilClient.is_unset(request.supported_foundation_types_shrink):
query['supportedFoundationTypes'] = request.supported_foundation_types_shrink
if not UtilClient.is_unset(request.version):
query['version'] = request.version
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductVersionsResponse(),
self.call_api(params, req, runtime)
)
async def list_product_versions_with_options_async(
self,
tmp_req: adp_20210720_models.ListProductVersionsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductVersionsResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.ListProductVersionsShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.platforms):
request.platforms_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.platforms, 'platforms', 'json')
if not UtilClient.is_unset(tmp_req.supported_foundation_types):
request.supported_foundation_types_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.supported_foundation_types, 'supportedFoundationTypes', 'json')
query = {}
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.platforms_shrink):
query['platforms'] = request.platforms_shrink
if not UtilClient.is_unset(request.product_name):
query['productName'] = request.product_name
if not UtilClient.is_unset(request.product_uid):
query['productUID'] = request.product_uid
if not UtilClient.is_unset(request.released):
query['released'] = request.released
if not UtilClient.is_unset(request.supported_foundation_types_shrink):
query['supportedFoundationTypes'] = request.supported_foundation_types_shrink
if not UtilClient.is_unset(request.version):
query['version'] = request.version
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProductVersions',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductVersionsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_product_versions(
self,
request: adp_20210720_models.ListProductVersionsRequest,
) -> adp_20210720_models.ListProductVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_product_versions_with_options(request, headers, runtime)
async def list_product_versions_async(
self,
request: adp_20210720_models.ListProductVersionsRequest,
) -> adp_20210720_models.ListProductVersionsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_product_versions_with_options_async(request, headers, runtime)
def list_products_with_options(
self,
request: adp_20210720_models.ListProductsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.description):
query['description'] = request.description
if not UtilClient.is_unset(request.fuzzy):
query['fuzzy'] = request.fuzzy
if not UtilClient.is_unset(request.name):
query['name'] = request.name
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProducts',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/products',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductsResponse(),
self.call_api(params, req, runtime)
)
async def list_products_with_options_async(
self,
request: adp_20210720_models.ListProductsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListProductsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.description):
query['description'] = request.description
if not UtilClient.is_unset(request.fuzzy):
query['fuzzy'] = request.fuzzy
if not UtilClient.is_unset(request.name):
query['name'] = request.name
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListProducts',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/products',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListProductsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_products(
self,
request: adp_20210720_models.ListProductsRequest,
) -> adp_20210720_models.ListProductsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_products_with_options(request, headers, runtime)
async def list_products_async(
self,
request: adp_20210720_models.ListProductsRequest,
) -> adp_20210720_models.ListProductsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_products_with_options_async(request, headers, runtime)
def list_workflow_task_logs_with_options(
self,
step_name: str,
task_name: str,
tmp_req: adp_20210720_models.ListWorkflowTaskLogsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListWorkflowTaskLogsResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.ListWorkflowTaskLogsShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.filter_values):
request.filter_values_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.filter_values, 'filterValues', 'json')
query = {}
if not UtilClient.is_unset(request.filter_values_shrink):
query['filterValues'] = request.filter_values_shrink
if not UtilClient.is_unset(request.order_type):
query['orderType'] = request.order_type
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.workflow_type):
query['workflowType'] = request.workflow_type
if not UtilClient.is_unset(request.xuid):
query['xuid'] = request.xuid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListWorkflowTaskLogs',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/workflows/steps/{OpenApiUtilClient.get_encode_param(step_name)}/tasks/{OpenApiUtilClient.get_encode_param(task_name)}/logs',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListWorkflowTaskLogsResponse(),
self.call_api(params, req, runtime)
)
async def list_workflow_task_logs_with_options_async(
self,
step_name: str,
task_name: str,
tmp_req: adp_20210720_models.ListWorkflowTaskLogsRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ListWorkflowTaskLogsResponse:
UtilClient.validate_model(tmp_req)
request = adp_20210720_models.ListWorkflowTaskLogsShrinkRequest()
OpenApiUtilClient.convert(tmp_req, request)
if not UtilClient.is_unset(tmp_req.filter_values):
request.filter_values_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.filter_values, 'filterValues', 'json')
query = {}
if not UtilClient.is_unset(request.filter_values_shrink):
query['filterValues'] = request.filter_values_shrink
if not UtilClient.is_unset(request.order_type):
query['orderType'] = request.order_type
if not UtilClient.is_unset(request.page_num):
query['pageNum'] = request.page_num
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.workflow_type):
query['workflowType'] = request.workflow_type
if not UtilClient.is_unset(request.xuid):
query['xuid'] = request.xuid
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='ListWorkflowTaskLogs',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/workflows/steps/{OpenApiUtilClient.get_encode_param(step_name)}/tasks/{OpenApiUtilClient.get_encode_param(task_name)}/logs',
method='GET',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ListWorkflowTaskLogsResponse(),
await self.call_api_async(params, req, runtime)
)
def list_workflow_task_logs(
self,
step_name: str,
task_name: str,
request: adp_20210720_models.ListWorkflowTaskLogsRequest,
) -> adp_20210720_models.ListWorkflowTaskLogsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.list_workflow_task_logs_with_options(step_name, task_name, request, headers, runtime)
async def list_workflow_task_logs_async(
self,
step_name: str,
task_name: str,
request: adp_20210720_models.ListWorkflowTaskLogsRequest,
) -> adp_20210720_models.ListWorkflowTaskLogsResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.list_workflow_task_logs_with_options_async(step_name, task_name, request, headers, runtime)
def put_environment_tunnel_with_options(
self,
uid: str,
request: adp_20210720_models.PutEnvironmentTunnelRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.PutEnvironmentTunnelResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.tunnel_config):
body['tunnelConfig'] = request.tunnel_config
if not UtilClient.is_unset(request.tunnel_type):
body['tunnelType'] = request.tunnel_type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='PutEnvironmentTunnel',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/tunnels',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.PutEnvironmentTunnelResponse(),
self.call_api(params, req, runtime)
)
async def put_environment_tunnel_with_options_async(
self,
uid: str,
request: adp_20210720_models.PutEnvironmentTunnelRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.PutEnvironmentTunnelResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.tunnel_config):
body['tunnelConfig'] = request.tunnel_config
if not UtilClient.is_unset(request.tunnel_type):
body['tunnelType'] = request.tunnel_type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='PutEnvironmentTunnel',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/tunnels',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.PutEnvironmentTunnelResponse(),
await self.call_api_async(params, req, runtime)
)
def put_environment_tunnel(
self,
uid: str,
request: adp_20210720_models.PutEnvironmentTunnelRequest,
) -> adp_20210720_models.PutEnvironmentTunnelResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.put_environment_tunnel_with_options(uid, request, headers, runtime)
async def put_environment_tunnel_async(
self,
uid: str,
request: adp_20210720_models.PutEnvironmentTunnelRequest,
) -> adp_20210720_models.PutEnvironmentTunnelResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.put_environment_tunnel_with_options_async(uid, request, headers, runtime)
def put_product_instance_config_with_options(
self,
request: adp_20210720_models.PutProductInstanceConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.PutProductInstanceConfigResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_uid):
body['componentUID'] = request.component_uid
if not UtilClient.is_unset(request.component_version_uid):
body['componentVersionUID'] = request.component_version_uid
if not UtilClient.is_unset(request.config_uid):
body['configUID'] = request.config_uid
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.environment_uid):
body['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.parent_component_name):
body['parentComponentName'] = request.parent_component_name
if not UtilClient.is_unset(request.parent_component_version_uid):
body['parentComponentVersionUID'] = request.parent_component_version_uid
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.release_name):
body['releaseName'] = request.release_name
if not UtilClient.is_unset(request.scope):
body['scope'] = request.scope
if not UtilClient.is_unset(request.value):
body['value'] = request.value
if not UtilClient.is_unset(request.value_type):
body['valueType'] = request.value_type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='PutProductInstanceConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/configs',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.PutProductInstanceConfigResponse(),
self.call_api(params, req, runtime)
)
async def put_product_instance_config_with_options_async(
self,
request: adp_20210720_models.PutProductInstanceConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.PutProductInstanceConfigResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_uid):
body['componentUID'] = request.component_uid
if not UtilClient.is_unset(request.component_version_uid):
body['componentVersionUID'] = request.component_version_uid
if not UtilClient.is_unset(request.config_uid):
body['configUID'] = request.config_uid
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.environment_uid):
body['environmentUID'] = request.environment_uid
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.parent_component_name):
body['parentComponentName'] = request.parent_component_name
if not UtilClient.is_unset(request.parent_component_version_uid):
body['parentComponentVersionUID'] = request.parent_component_version_uid
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
if not UtilClient.is_unset(request.release_name):
body['releaseName'] = request.release_name
if not UtilClient.is_unset(request.scope):
body['scope'] = request.scope
if not UtilClient.is_unset(request.value):
body['value'] = request.value
if not UtilClient.is_unset(request.value_type):
body['valueType'] = request.value_type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='PutProductInstanceConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-instances/configs',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.PutProductInstanceConfigResponse(),
await self.call_api_async(params, req, runtime)
)
def put_product_instance_config(
self,
request: adp_20210720_models.PutProductInstanceConfigRequest,
) -> adp_20210720_models.PutProductInstanceConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.put_product_instance_config_with_options(request, headers, runtime)
async def put_product_instance_config_async(
self,
request: adp_20210720_models.PutProductInstanceConfigRequest,
) -> adp_20210720_models.PutProductInstanceConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.put_product_instance_config_with_options_async(request, headers, runtime)
def set_environment_foundation_reference_with_options(
self,
uid: str,
foundation_reference_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.SetEnvironmentFoundationReferenceResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='SetEnvironmentFoundationReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/foundation-references/{OpenApiUtilClient.get_encode_param(foundation_reference_uid)}',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.SetEnvironmentFoundationReferenceResponse(),
self.call_api(params, req, runtime)
)
async def set_environment_foundation_reference_with_options_async(
self,
uid: str,
foundation_reference_uid: str,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.SetEnvironmentFoundationReferenceResponse:
req = open_api_models.OpenApiRequest(
headers=headers
)
params = open_api_models.Params(
action='SetEnvironmentFoundationReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/foundation-references/{OpenApiUtilClient.get_encode_param(foundation_reference_uid)}',
method='POST',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.SetEnvironmentFoundationReferenceResponse(),
await self.call_api_async(params, req, runtime)
)
def set_environment_foundation_reference(
self,
uid: str,
foundation_reference_uid: str,
) -> adp_20210720_models.SetEnvironmentFoundationReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.set_environment_foundation_reference_with_options(uid, foundation_reference_uid, headers, runtime)
async def set_environment_foundation_reference_async(
self,
uid: str,
foundation_reference_uid: str,
) -> adp_20210720_models.SetEnvironmentFoundationReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.set_environment_foundation_reference_with_options_async(uid, foundation_reference_uid, headers, runtime)
def update_deliverable_with_options(
self,
uid: str,
request: adp_20210720_models.UpdateDeliverableRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateDeliverableResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.foundation):
body['foundation'] = request.foundation
if not UtilClient.is_unset(request.products):
body['products'] = request.products
if not UtilClient.is_unset(request.status):
body['status'] = request.status
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateDeliverable',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/deliverables/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateDeliverableResponse(),
self.call_api(params, req, runtime)
)
async def update_deliverable_with_options_async(
self,
uid: str,
request: adp_20210720_models.UpdateDeliverableRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateDeliverableResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.foundation):
body['foundation'] = request.foundation
if not UtilClient.is_unset(request.products):
body['products'] = request.products
if not UtilClient.is_unset(request.status):
body['status'] = request.status
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateDeliverable',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/deliverables/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateDeliverableResponse(),
await self.call_api_async(params, req, runtime)
)
def update_deliverable(
self,
uid: str,
request: adp_20210720_models.UpdateDeliverableRequest,
) -> adp_20210720_models.UpdateDeliverableResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_deliverable_with_options(uid, request, headers, runtime)
async def update_deliverable_async(
self,
uid: str,
request: adp_20210720_models.UpdateDeliverableRequest,
) -> adp_20210720_models.UpdateDeliverableResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_deliverable_with_options_async(uid, request, headers, runtime)
def update_delivery_instance_with_options(
self,
uid: str,
request: adp_20210720_models.UpdateDeliveryInstanceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateDeliveryInstanceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.deliverable_config_uid):
body['deliverableConfigUID'] = request.deliverable_config_uid
if not UtilClient.is_unset(request.deliverable_uid):
body['deliverableUID'] = request.deliverable_uid
if not UtilClient.is_unset(request.desc):
body['desc'] = request.desc
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateDeliveryInstance',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-instances/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateDeliveryInstanceResponse(),
self.call_api(params, req, runtime)
)
async def update_delivery_instance_with_options_async(
self,
uid: str,
request: adp_20210720_models.UpdateDeliveryInstanceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateDeliveryInstanceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.deliverable_config_uid):
body['deliverableConfigUID'] = request.deliverable_config_uid
if not UtilClient.is_unset(request.deliverable_uid):
body['deliverableUID'] = request.deliverable_uid
if not UtilClient.is_unset(request.desc):
body['desc'] = request.desc
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateDeliveryInstance',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/delivery/delivery-instances/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateDeliveryInstanceResponse(),
await self.call_api_async(params, req, runtime)
)
def update_delivery_instance(
self,
uid: str,
request: adp_20210720_models.UpdateDeliveryInstanceRequest,
) -> adp_20210720_models.UpdateDeliveryInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_delivery_instance_with_options(uid, request, headers, runtime)
async def update_delivery_instance_async(
self,
uid: str,
request: adp_20210720_models.UpdateDeliveryInstanceRequest,
) -> adp_20210720_models.UpdateDeliveryInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_delivery_instance_with_options_async(uid, request, headers, runtime)
def update_environment_with_options(
self,
uid: str,
request: adp_20210720_models.UpdateEnvironmentRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateEnvironmentResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.advanced_configs):
body['advancedConfigs'] = request.advanced_configs
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.location):
body['location'] = request.location
if not UtilClient.is_unset(request.vendor_config):
body['vendorConfig'] = request.vendor_config
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateEnvironment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateEnvironmentResponse(),
self.call_api(params, req, runtime)
)
async def update_environment_with_options_async(
self,
uid: str,
request: adp_20210720_models.UpdateEnvironmentRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateEnvironmentResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.advanced_configs):
body['advancedConfigs'] = request.advanced_configs
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.location):
body['location'] = request.location
if not UtilClient.is_unset(request.vendor_config):
body['vendorConfig'] = request.vendor_config
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateEnvironment',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateEnvironmentResponse(),
await self.call_api_async(params, req, runtime)
)
def update_environment(
self,
uid: str,
request: adp_20210720_models.UpdateEnvironmentRequest,
) -> adp_20210720_models.UpdateEnvironmentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_environment_with_options(uid, request, headers, runtime)
async def update_environment_async(
self,
uid: str,
request: adp_20210720_models.UpdateEnvironmentRequest,
) -> adp_20210720_models.UpdateEnvironmentResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_environment_with_options_async(uid, request, headers, runtime)
def update_environment_node_with_options(
self,
uid: str,
node_uid: str,
request: adp_20210720_models.UpdateEnvironmentNodeRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateEnvironmentNodeResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.application_disk):
body['applicationDisk'] = request.application_disk
if not UtilClient.is_unset(request.etcd_disk):
body['etcdDisk'] = request.etcd_disk
if not UtilClient.is_unset(request.labels):
body['labels'] = request.labels
if not UtilClient.is_unset(request.root_password):
body['rootPassword'] = request.root_password
if not UtilClient.is_unset(request.taints):
body['taints'] = request.taints
if not UtilClient.is_unset(request.trident_system_disk):
body['tridentSystemDisk'] = request.trident_system_disk
if not UtilClient.is_unset(request.trident_system_size_disk):
body['tridentSystemSizeDisk'] = request.trident_system_size_disk
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateEnvironmentNode',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/nodes/{OpenApiUtilClient.get_encode_param(node_uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateEnvironmentNodeResponse(),
self.call_api(params, req, runtime)
)
async def update_environment_node_with_options_async(
self,
uid: str,
node_uid: str,
request: adp_20210720_models.UpdateEnvironmentNodeRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateEnvironmentNodeResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.application_disk):
body['applicationDisk'] = request.application_disk
if not UtilClient.is_unset(request.etcd_disk):
body['etcdDisk'] = request.etcd_disk
if not UtilClient.is_unset(request.labels):
body['labels'] = request.labels
if not UtilClient.is_unset(request.root_password):
body['rootPassword'] = request.root_password
if not UtilClient.is_unset(request.taints):
body['taints'] = request.taints
if not UtilClient.is_unset(request.trident_system_disk):
body['tridentSystemDisk'] = request.trident_system_disk
if not UtilClient.is_unset(request.trident_system_size_disk):
body['tridentSystemSizeDisk'] = request.trident_system_size_disk
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateEnvironmentNode',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/nodes/{OpenApiUtilClient.get_encode_param(node_uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateEnvironmentNodeResponse(),
await self.call_api_async(params, req, runtime)
)
def update_environment_node(
self,
uid: str,
node_uid: str,
request: adp_20210720_models.UpdateEnvironmentNodeRequest,
) -> adp_20210720_models.UpdateEnvironmentNodeResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_environment_node_with_options(uid, node_uid, request, headers, runtime)
async def update_environment_node_async(
self,
uid: str,
node_uid: str,
request: adp_20210720_models.UpdateEnvironmentNodeRequest,
) -> adp_20210720_models.UpdateEnvironmentNodeResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_environment_node_with_options_async(uid, node_uid, request, headers, runtime)
def update_environment_product_version_with_options(
self,
uid: str,
request: adp_20210720_models.UpdateEnvironmentProductVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateEnvironmentProductVersionResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.old_product_version_spec_uid):
body['oldProductVersionSpecUID'] = request.old_product_version_spec_uid
if not UtilClient.is_unset(request.old_product_version_uid):
body['oldProductVersionUID'] = request.old_product_version_uid
if not UtilClient.is_unset(request.product_version_spec_uid):
body['productVersionSpecUID'] = request.product_version_spec_uid
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateEnvironmentProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/product-versions',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateEnvironmentProductVersionResponse(),
self.call_api(params, req, runtime)
)
async def update_environment_product_version_with_options_async(
self,
uid: str,
request: adp_20210720_models.UpdateEnvironmentProductVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateEnvironmentProductVersionResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.old_product_version_spec_uid):
body['oldProductVersionSpecUID'] = request.old_product_version_spec_uid
if not UtilClient.is_unset(request.old_product_version_uid):
body['oldProductVersionUID'] = request.old_product_version_uid
if not UtilClient.is_unset(request.product_version_spec_uid):
body['productVersionSpecUID'] = request.product_version_spec_uid
if not UtilClient.is_unset(request.product_version_uid):
body['productVersionUID'] = request.product_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateEnvironmentProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/product-versions',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateEnvironmentProductVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def update_environment_product_version(
self,
uid: str,
request: adp_20210720_models.UpdateEnvironmentProductVersionRequest,
) -> adp_20210720_models.UpdateEnvironmentProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_environment_product_version_with_options(uid, request, headers, runtime)
async def update_environment_product_version_async(
self,
uid: str,
request: adp_20210720_models.UpdateEnvironmentProductVersionRequest,
) -> adp_20210720_models.UpdateEnvironmentProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_environment_product_version_with_options_async(uid, request, headers, runtime)
def update_foundation_component_reference_with_options(
self,
uid: str,
component_reference_uid: str,
request: adp_20210720_models.UpdateFoundationComponentReferenceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateFoundationComponentReferenceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_orchestration_values):
body['componentOrchestrationValues'] = request.component_orchestration_values
if not UtilClient.is_unset(request.enable):
body['enable'] = request.enable
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateFoundationComponentReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references/{OpenApiUtilClient.get_encode_param(uid)}/components/{OpenApiUtilClient.get_encode_param(component_reference_uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateFoundationComponentReferenceResponse(),
self.call_api(params, req, runtime)
)
async def update_foundation_component_reference_with_options_async(
self,
uid: str,
component_reference_uid: str,
request: adp_20210720_models.UpdateFoundationComponentReferenceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateFoundationComponentReferenceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_orchestration_values):
body['componentOrchestrationValues'] = request.component_orchestration_values
if not UtilClient.is_unset(request.enable):
body['enable'] = request.enable
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateFoundationComponentReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references/{OpenApiUtilClient.get_encode_param(uid)}/components/{OpenApiUtilClient.get_encode_param(component_reference_uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateFoundationComponentReferenceResponse(),
await self.call_api_async(params, req, runtime)
)
def update_foundation_component_reference(
self,
uid: str,
component_reference_uid: str,
request: adp_20210720_models.UpdateFoundationComponentReferenceRequest,
) -> adp_20210720_models.UpdateFoundationComponentReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_foundation_component_reference_with_options(uid, component_reference_uid, request, headers, runtime)
async def update_foundation_component_reference_async(
self,
uid: str,
component_reference_uid: str,
request: adp_20210720_models.UpdateFoundationComponentReferenceRequest,
) -> adp_20210720_models.UpdateFoundationComponentReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_foundation_component_reference_with_options_async(uid, component_reference_uid, request, headers, runtime)
def update_foundation_reference_with_options(
self,
uid: str,
request: adp_20210720_models.UpdateFoundationReferenceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateFoundationReferenceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.cluster_config):
body['clusterConfig'] = request.cluster_config
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateFoundationReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateFoundationReferenceResponse(),
self.call_api(params, req, runtime)
)
async def update_foundation_reference_with_options_async(
self,
uid: str,
request: adp_20210720_models.UpdateFoundationReferenceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateFoundationReferenceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.cluster_config):
body['clusterConfig'] = request.cluster_config
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateFoundationReference',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/foundation-references/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateFoundationReferenceResponse(),
await self.call_api_async(params, req, runtime)
)
def update_foundation_reference(
self,
uid: str,
request: adp_20210720_models.UpdateFoundationReferenceRequest,
) -> adp_20210720_models.UpdateFoundationReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_foundation_reference_with_options(uid, request, headers, runtime)
async def update_foundation_reference_async(
self,
uid: str,
request: adp_20210720_models.UpdateFoundationReferenceRequest,
) -> adp_20210720_models.UpdateFoundationReferenceResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_foundation_reference_with_options_async(uid, request, headers, runtime)
def update_product_with_options(
self,
uid: str,
request: adp_20210720_models.UpdateProductRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateProductResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.categories):
body['categories'] = request.categories
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.display_name):
body['displayName'] = request.display_name
if not UtilClient.is_unset(request.vendor):
body['vendor'] = request.vendor
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateProduct',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/products/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateProductResponse(),
self.call_api(params, req, runtime)
)
async def update_product_with_options_async(
self,
uid: str,
request: adp_20210720_models.UpdateProductRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateProductResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.categories):
body['categories'] = request.categories
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.display_name):
body['displayName'] = request.display_name
if not UtilClient.is_unset(request.vendor):
body['vendor'] = request.vendor
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateProduct',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/products/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateProductResponse(),
await self.call_api_async(params, req, runtime)
)
def update_product(
self,
uid: str,
request: adp_20210720_models.UpdateProductRequest,
) -> adp_20210720_models.UpdateProductResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_product_with_options(uid, request, headers, runtime)
async def update_product_async(
self,
uid: str,
request: adp_20210720_models.UpdateProductRequest,
) -> adp_20210720_models.UpdateProductResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_product_with_options_async(uid, request, headers, runtime)
def update_product_component_version_with_options(
self,
uid: str,
relation_uid: str,
request: adp_20210720_models.UpdateProductComponentVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateProductComponentVersionResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_orchestration_values):
body['componentOrchestrationValues'] = request.component_orchestration_values
if not UtilClient.is_unset(request.component_specification_uid):
body['componentSpecificationUid'] = request.component_specification_uid
if not UtilClient.is_unset(request.component_specification_values):
body['componentSpecificationValues'] = request.component_specification_values
if not UtilClient.is_unset(request.enable):
body['enable'] = request.enable
if not UtilClient.is_unset(request.new_component_version_uid):
body['newComponentVersionUID'] = request.new_component_version_uid
if not UtilClient.is_unset(request.policy):
body['policy'] = request.policy
if not UtilClient.is_unset(request.release_name):
body['releaseName'] = request.release_name
if not UtilClient.is_unset(request.unset_component_version_spec):
body['unsetComponentVersionSpec'] = request.unset_component_version_spec
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateProductComponentVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/relations/{OpenApiUtilClient.get_encode_param(relation_uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateProductComponentVersionResponse(),
self.call_api(params, req, runtime)
)
async def update_product_component_version_with_options_async(
self,
uid: str,
relation_uid: str,
request: adp_20210720_models.UpdateProductComponentVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateProductComponentVersionResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_orchestration_values):
body['componentOrchestrationValues'] = request.component_orchestration_values
if not UtilClient.is_unset(request.component_specification_uid):
body['componentSpecificationUid'] = request.component_specification_uid
if not UtilClient.is_unset(request.component_specification_values):
body['componentSpecificationValues'] = request.component_specification_values
if not UtilClient.is_unset(request.enable):
body['enable'] = request.enable
if not UtilClient.is_unset(request.new_component_version_uid):
body['newComponentVersionUID'] = request.new_component_version_uid
if not UtilClient.is_unset(request.policy):
body['policy'] = request.policy
if not UtilClient.is_unset(request.release_name):
body['releaseName'] = request.release_name
if not UtilClient.is_unset(request.unset_component_version_spec):
body['unsetComponentVersionSpec'] = request.unset_component_version_spec
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateProductComponentVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/relations/{OpenApiUtilClient.get_encode_param(relation_uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateProductComponentVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def update_product_component_version(
self,
uid: str,
relation_uid: str,
request: adp_20210720_models.UpdateProductComponentVersionRequest,
) -> adp_20210720_models.UpdateProductComponentVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_product_component_version_with_options(uid, relation_uid, request, headers, runtime)
async def update_product_component_version_async(
self,
uid: str,
relation_uid: str,
request: adp_20210720_models.UpdateProductComponentVersionRequest,
) -> adp_20210720_models.UpdateProductComponentVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_product_component_version_with_options_async(uid, relation_uid, request, headers, runtime)
def update_product_foundation_version_with_options(
self,
uid: str,
request: adp_20210720_models.UpdateProductFoundationVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateProductFoundationVersionResponse:
"""
@deprecated
@param request: UpdateProductFoundationVersionRequest
@param headers: map
@param runtime: runtime options for this request RuntimeOptions
@return: UpdateProductFoundationVersionResponse
Deprecated
"""
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.foundation_version_uid):
body['foundationVersionUID'] = request.foundation_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateProductFoundationVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/foundation',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateProductFoundationVersionResponse(),
self.call_api(params, req, runtime)
)
async def update_product_foundation_version_with_options_async(
self,
uid: str,
request: adp_20210720_models.UpdateProductFoundationVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateProductFoundationVersionResponse:
"""
@deprecated
@param request: UpdateProductFoundationVersionRequest
@param headers: map
@param runtime: runtime options for this request RuntimeOptions
@return: UpdateProductFoundationVersionResponse
Deprecated
"""
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.foundation_version_uid):
body['foundationVersionUID'] = request.foundation_version_uid
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateProductFoundationVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/foundation',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateProductFoundationVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def update_product_foundation_version(
self,
uid: str,
request: adp_20210720_models.UpdateProductFoundationVersionRequest,
) -> adp_20210720_models.UpdateProductFoundationVersionResponse:
"""
@deprecated
@param request: UpdateProductFoundationVersionRequest
@return: UpdateProductFoundationVersionResponse
Deprecated
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_product_foundation_version_with_options(uid, request, headers, runtime)
async def update_product_foundation_version_async(
self,
uid: str,
request: adp_20210720_models.UpdateProductFoundationVersionRequest,
) -> adp_20210720_models.UpdateProductFoundationVersionResponse:
"""
@deprecated
@param request: UpdateProductFoundationVersionRequest
@return: UpdateProductFoundationVersionResponse
Deprecated
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_product_foundation_version_with_options_async(uid, request, headers, runtime)
def update_product_version_with_options(
self,
uid: str,
request: adp_20210720_models.UpdateProductVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateProductVersionResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.action):
query['action'] = request.action
body = {}
if not UtilClient.is_unset(request.continuous_integration):
body['continuousIntegration'] = request.continuous_integration
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.entry):
body['entry'] = request.entry
if not UtilClient.is_unset(request.timeout):
body['timeout'] = request.timeout
if not UtilClient.is_unset(request.version):
body['version'] = request.version
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query),
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateProductVersionResponse(),
self.call_api(params, req, runtime)
)
async def update_product_version_with_options_async(
self,
uid: str,
request: adp_20210720_models.UpdateProductVersionRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateProductVersionResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.action):
query['action'] = request.action
body = {}
if not UtilClient.is_unset(request.continuous_integration):
body['continuousIntegration'] = request.continuous_integration
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.entry):
body['entry'] = request.entry
if not UtilClient.is_unset(request.timeout):
body['timeout'] = request.timeout
if not UtilClient.is_unset(request.version):
body['version'] = request.version
req = open_api_models.OpenApiRequest(
headers=headers,
query=OpenApiUtilClient.query(query),
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateProductVersion',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateProductVersionResponse(),
await self.call_api_async(params, req, runtime)
)
def update_product_version(
self,
uid: str,
request: adp_20210720_models.UpdateProductVersionRequest,
) -> adp_20210720_models.UpdateProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_product_version_with_options(uid, request, headers, runtime)
async def update_product_version_async(
self,
uid: str,
request: adp_20210720_models.UpdateProductVersionRequest,
) -> adp_20210720_models.UpdateProductVersionResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_product_version_with_options_async(uid, request, headers, runtime)
def update_product_version_config_with_options(
self,
uid: str,
config_uid: str,
request: adp_20210720_models.UpdateProductVersionConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateProductVersionConfigResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_version_uid):
body['componentVersionUID'] = request.component_version_uid
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.parent_component_version_uid):
body['parentComponentVersionUID'] = request.parent_component_version_uid
if not UtilClient.is_unset(request.value):
body['value'] = request.value
if not UtilClient.is_unset(request.value_type):
body['valueType'] = request.value_type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateProductVersionConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/configs/{OpenApiUtilClient.get_encode_param(config_uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateProductVersionConfigResponse(),
self.call_api(params, req, runtime)
)
async def update_product_version_config_with_options_async(
self,
uid: str,
config_uid: str,
request: adp_20210720_models.UpdateProductVersionConfigRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.UpdateProductVersionConfigResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.component_version_uid):
body['componentVersionUID'] = request.component_version_uid
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.parent_component_version_uid):
body['parentComponentVersionUID'] = request.parent_component_version_uid
if not UtilClient.is_unset(request.value):
body['value'] = request.value
if not UtilClient.is_unset(request.value_type):
body['valueType'] = request.value_type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='UpdateProductVersionConfig',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/product-versions/{OpenApiUtilClient.get_encode_param(uid)}/configs/{OpenApiUtilClient.get_encode_param(config_uid)}',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.UpdateProductVersionConfigResponse(),
await self.call_api_async(params, req, runtime)
)
def update_product_version_config(
self,
uid: str,
config_uid: str,
request: adp_20210720_models.UpdateProductVersionConfigRequest,
) -> adp_20210720_models.UpdateProductVersionConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_product_version_config_with_options(uid, config_uid, request, headers, runtime)
async def update_product_version_config_async(
self,
uid: str,
config_uid: str,
request: adp_20210720_models.UpdateProductVersionConfigRequest,
) -> adp_20210720_models.UpdateProductVersionConfigResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_product_version_config_with_options_async(uid, config_uid, request, headers, runtime)
def validate_environment_tunnel_with_options(
self,
uid: str,
request: adp_20210720_models.ValidateEnvironmentTunnelRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ValidateEnvironmentTunnelResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.tunnel_config):
body['tunnelConfig'] = request.tunnel_config
if not UtilClient.is_unset(request.tunnel_type):
body['tunnelType'] = request.tunnel_type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='ValidateEnvironmentTunnel',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/tunnels/validation',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ValidateEnvironmentTunnelResponse(),
self.call_api(params, req, runtime)
)
async def validate_environment_tunnel_with_options_async(
self,
uid: str,
request: adp_20210720_models.ValidateEnvironmentTunnelRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> adp_20210720_models.ValidateEnvironmentTunnelResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.tunnel_config):
body['tunnelConfig'] = request.tunnel_config
if not UtilClient.is_unset(request.tunnel_type):
body['tunnelType'] = request.tunnel_type
req = open_api_models.OpenApiRequest(
headers=headers,
body=OpenApiUtilClient.parse_to_map(body)
)
params = open_api_models.Params(
action='ValidateEnvironmentTunnel',
version='2021-07-20',
protocol='HTTPS',
pathname=f'/api/v2/environments/{OpenApiUtilClient.get_encode_param(uid)}/tunnels/validation',
method='PUT',
auth_type='AK',
style='ROA',
req_body_type='json',
body_type='json'
)
return TeaCore.from_map(
adp_20210720_models.ValidateEnvironmentTunnelResponse(),
await self.call_api_async(params, req, runtime)
)
def validate_environment_tunnel(
self,
uid: str,
request: adp_20210720_models.ValidateEnvironmentTunnelRequest,
) -> adp_20210720_models.ValidateEnvironmentTunnelResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return self.validate_environment_tunnel_with_options(uid, request, headers, runtime)
async def validate_environment_tunnel_async(
self,
uid: str,
request: adp_20210720_models.ValidateEnvironmentTunnelRequest,
) -> adp_20210720_models.ValidateEnvironmentTunnelResponse:
runtime = util_models.RuntimeOptions()
headers = {}
return await self.validate_environment_tunnel_with_options_async(uid, request, headers, runtime)
|
[
"sdk-team@alibabacloud.com"
] |
sdk-team@alibabacloud.com
|
d99f160d8ad572b13e39fa68ab9d1c9ebaeb17c3
|
493c7d9678a0724736fb9dd7c69580a94099d2b4
|
/apps/organization/models.py
|
81669bc929ff0415215ac5e9aca33cf0d6ca3b2d
|
[] |
no_license
|
cuixiaozhao/MxOnline
|
e253c8c5f5fa81747d8e1ca064ce032e9bd42566
|
c96ae16cea9ad966df36e9fcacc902c2303e765c
|
refs/heads/master
| 2020-03-29T18:47:11.158275
| 2018-10-22T14:06:50
| 2018-10-22T14:06:50
| 150,231,387
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,442
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import datetime
from django.db import models
# Create your models here.
class CityDict(models.Model):
name = models.CharField(max_length=20, verbose_name=u"城市")
desc = models.CharField(max_length=200, verbose_name=u"描述")
add_time = models.CharField(default=datetime.now, verbose_name=u"添加时间", max_length=30)
class Meta:
verbose_name = u"城市"
verbose_name_plural = verbose_name
def __unicode__(self):
return self.name
class CourseOrg(models.Model):
name = models.CharField(max_length=50, verbose_name=u"机构名称")
desc = models.TextField(verbose_name=u"机构描述")
category = models.CharField(default="pxjg", max_length=20, choices=(("pxjg", "培训机构"), ("gr", "个人"), ("gx", "高校"),))
click_nums = models.IntegerField(default=0, verbose_name=u"点击数")
fav_nums = models.IntegerField(default=0, verbose_name=u"收藏数")
image = models.ImageField(upload_to="org/%Y/%m", verbose_name=u"LOGO")
address = models.CharField(max_length=150, verbose_name=u"机构地址")
city = models.ForeignKey(CityDict, verbose_name=u"所在城市")
students = models.IntegerField(default=0, verbose_name=u"学生人数")
course_nums = models.IntegerField(default=0, verbose_name=u"学习人数")
add_time = models.DateTimeField(default=datetime.now, verbose_name=u"添加时间")
class Meta:
verbose_name = u"课程机构"
verbose_name_plural = verbose_name
def __unicode__(self):
return self.name
class Teacher(models.Model):
org = models.ForeignKey(CourseOrg, verbose_name=u"所属机构")
name = models.CharField(max_length=50, verbose_name=u"教师名")
work_years = models.IntegerField(default=0, verbose_name=u"工作年限")
work_company = models.CharField(max_length=50, verbose_name=u"就职公司")
work_position = models.CharField(max_length=50, verbose_name=u"公司职位")
points = models.CharField(max_length=50, verbose_name=u"教学特点")
click_nums = models.IntegerField(default=0, verbose_name=u"点击数")
fav_nums = models.IntegerField(default=0, verbose_name=u"收藏数")
add_time = models.DateTimeField(default=datetime.now, verbose_name=u"添加时间")
class Meta:
verbose_name = u"教师"
verbose_name_plural = verbose_name
|
[
"19930911cXS"
] |
19930911cXS
|
68231ea1fdfc7c7ce7e6f4d578d950648ba1ba6d
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/coverage-big-4579.py
|
8c13f6cc0c32208121d23fd4704cad007abaacf3
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038
| 2022-02-03T15:42:39
| 2022-02-03T15:42:39
| 451,969,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,347
|
py
|
count:int = 0
count2:int = 0
count3:int = 0
count4:int = 0
count5:int = 0
def foo(s: str) -> int:
return len(s)
def foo2(s: str, s2: str) -> int:
return len(s)
def foo3(s: str, s2: str, s3: str) -> int:
return len(s)
def foo4(s: str, s2: str, s3: str, s4: str) -> int:
return len(s)
def foo5(s: str, s2: str, s3: str, s4: str, s5: str) -> int:
return len(s)
class bar(object):
p: bool = True
def baz(self:"bar", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar2(object):
p: bool = True
p2: bool = True
def baz(self:"bar2", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar2", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar3(object):
p: bool = True
p2: bool = True
p3: bool = True
def baz(self:"bar3", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar3", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar3", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar4(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
def baz(self:"bar4", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar4", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar4", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar4", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar5(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
p5: bool = True
def baz(self:"bar5", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar5", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar5", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def $ID(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int], xx5: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
x5:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
y5:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
def qux5(y: int, y2: int, y3: int, y4: int, y5: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
nonlocal x5
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
print(bar().baz([1,2]))
|
[
"647530+Virtlink@users.noreply.github.com"
] |
647530+Virtlink@users.noreply.github.com
|
ef3f10ffb9fb82da880e30592f7c192f58c36a89
|
2fc65c833223d282bd9867729ad3ed054c0832c2
|
/timetable/Section/router.py
|
308f412c58640aa5d05ae79329afb640054f1a26
|
[] |
no_license
|
libbyandhelen/DB_timetable
|
72b744ec332e5c1c3e242df1df6b4373493472ba
|
17936821b7064bed2ebb51289e5a9b0e131929d1
|
refs/heads/master
| 2020-09-21T20:43:55.008545
| 2019-12-12T01:18:48
| 2019-12-12T01:18:48
| 224,921,739
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,304
|
py
|
from Section.views import get_selected_sections_by_user, create_select_section, delete_select_section, \
create_select_section_by_section_id
from base.error import Error
from base.response import error_response
def router_selectsection(request):
"""
/api/usersections
GET: get_selected_sections_by_user
POST: create_select_section
"""
if request.method == "GET":
return get_selected_sections_by_user(request)
elif request.method == "POST":
return create_select_section_by_section_id(request)
# elif request.method == "POST":
# return create_select_section(request)
# elif request.method == "DELETE":
# return delete_section_by_category(request)
elif request.method == "DELETE":
return delete_select_section(request)
else:
return error_response(Error.ERROR_METHOD)
def router_selectsection_id(request, section_id):
"""
/api/usersections/:section_id
DELETE: delete_select_section
POST: create_select_section_by_section_id
"""
if request.method == "DELETE":
return delete_select_section(request, section_id)
elif request.method == "POST":
return create_select_section_by_section_id(request, section_id)
else:
return error_response(Error.ERROR_METHOD)
|
[
"libbyandhelen@163.com"
] |
libbyandhelen@163.com
|
4fc695ac70d158a6cba3bae5ba199844e1cd2fc5
|
80dbb004883779f51733f5382040f940507e9180
|
/youtube/urls.py
|
5ade2eef4a72366cff05f9902e55dac1992d6caf
|
[] |
no_license
|
Shayan-9248/youtube_search
|
94824398f498022fb53aa5ca7f08ba6008f70396
|
e07d9a2aa0dac0d76675db028c3584583151b31d
|
refs/heads/master
| 2023-03-26T06:07:55.303627
| 2021-03-24T15:21:19
| 2021-03-24T15:21:19
| 350,349,734
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 131
|
py
|
from django.urls import path
from . import views
app_name = 'youtube'
urlpatterns = [
path('', views.index, name='index'),
]
|
[
"shayan.aimoradii@gmail.com"
] |
shayan.aimoradii@gmail.com
|
7e7469802b3c5b924e652ee98673659d9cfede94
|
6a5477e9bfae8110b2203182ad1db0517d09b2f2
|
/Realestate4/Tagent4/models.py
|
132aff2e669b0c6bb3868eae0e56314f45160235
|
[] |
no_license
|
Jagadishbommareddy/multiadress
|
b90f46ef80b50ddae8d8499e3e8c2d56d10796a9
|
a8fa8f5fe2803f66bd7e5a8668e82b589df846b5
|
refs/heads/master
| 2021-01-23T10:04:07.921653
| 2017-09-06T12:16:31
| 2017-09-06T12:16:31
| 102,604,529
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,237
|
py
|
from django.core.urlresolvers import reverse
from django.db import models
from .validations import *
class Agent(models.Model):
agent_id= models.AutoField(primary_key=True)
first_name= models.CharField(max_length=20,validators=[validate_first_name])
last_name= models.CharField(max_length=20,validators=[validate_last_name])
age=models.IntegerField()
education= models.CharField(max_length=50,validators=[validate_education])
company_name=models.CharField(max_length=50)
specialization= models.CharField(max_length=100,validators=[validate_specelization])
experence=models.IntegerField()
agent_notes=models.TextField()
def get_absolute_url(self):
return reverse('agent-update', kwargs={'pk': self.pk})
class Address(models.Model):
agent= models.ForeignKey(Agent)
address_id= models.AutoField(primary_key=True)
address1 = models.CharField(max_length=100)
address2 = models.CharField(max_length=100)
city = models.CharField(max_length=20,validators=[validate_city])
state= models.CharField(max_length=20,validators=[validate_state])
landmark= models.CharField(max_length=20,validators=[validate_landmark])
pincode= models.IntegerField()
|
[
"noreply@github.com"
] |
Jagadishbommareddy.noreply@github.com
|
2f217ccdcd79a8d5bb7e6c3d2f7d2ab5c1838d56
|
742f15ee3880306a946df7efee0020e42684b109
|
/out/string/python-flask/openapi_server/models/variable_collection.py
|
9cfe8be729a4ce32a9dd09c941f50f540d31840e
|
[] |
no_license
|
potiuk/airflow-api-clients
|
d0196f80caf6e6f4ecfa6b7c9657f241218168ad
|
325ba127f1e9aa808091916d348102844e0aa6c5
|
refs/heads/master
| 2022-09-14T00:40:28.592508
| 2020-05-31T10:05:42
| 2020-05-31T10:15:55
| 268,128,082
| 0
| 0
| null | 2020-05-30T17:28:04
| 2020-05-30T17:28:03
| null |
UTF-8
|
Python
| false
| false
| 1,941
|
py
|
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from openapi_server.models.base_model_ import Model
from openapi_server.models.variable_collection_item import VariableCollectionItem
from openapi_server import util
from openapi_server.models.variable_collection_item import VariableCollectionItem # noqa: E501
class VariableCollection(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, variables=None): # noqa: E501
"""VariableCollection - a model defined in OpenAPI
:param variables: The variables of this VariableCollection. # noqa: E501
:type variables: List[VariableCollectionItem]
"""
self.openapi_types = {
'variables': List[VariableCollectionItem]
}
self.attribute_map = {
'variables': 'variables'
}
self._variables = variables
@classmethod
def from_dict(cls, dikt) -> 'VariableCollection':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The VariableCollection of this VariableCollection. # noqa: E501
:rtype: VariableCollection
"""
return util.deserialize_model(dikt, cls)
@property
def variables(self):
"""Gets the variables of this VariableCollection.
:return: The variables of this VariableCollection.
:rtype: List[VariableCollectionItem]
"""
return self._variables
@variables.setter
def variables(self, variables):
"""Sets the variables of this VariableCollection.
:param variables: The variables of this VariableCollection.
:type variables: List[VariableCollectionItem]
"""
self._variables = variables
|
[
"kamil.bregula@polidea.com"
] |
kamil.bregula@polidea.com
|
2dc861a7f683325aeac69c4dacf18f63fa19f428
|
03f037d0f6371856ede958f0c9d02771d5402baf
|
/graphics/VTK-7.0.0/Examples/Infovis/Python/streaming_statistics_pyqt.py
|
6f778bc6de891077741ee6e337ac42522a554388
|
[
"BSD-3-Clause"
] |
permissive
|
hlzz/dotfiles
|
b22dc2dc5a9086353ed6dfeee884f7f0a9ddb1eb
|
0591f71230c919c827ba569099eb3b75897e163e
|
refs/heads/master
| 2021-01-10T10:06:31.018179
| 2016-09-27T08:13:18
| 2016-09-27T08:13:18
| 55,040,954
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,739
|
py
|
#!/usr/bin/env python
from __future__ import print_function
from vtk import *
import os.path
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
data_dir = VTK_DATA_ROOT + "/Data/Infovis/SQLite/"
if not os.path.exists(data_dir):
data_dir = VTK_DATA_ROOT + "/Data/Infovis/SQLite/"
if not os.path.exists(data_dir):
data_dir = VTK_DATA_ROOT + "/Data/Infovis/SQLite/"
sqlite_file = data_dir + "temperatures.db"
# I'm sure there's a better way then these global vars
currentRow = 0;
numberOfRows = 1;
done = False;
psuedoStreamingData = vtkProgrammableFilter()
def streamData():
global done
global currentRow
input = psuedoStreamingData.GetInput()
output = psuedoStreamingData.GetOutput()
# Copy just the columns names/types
output.GetRowData().CopyStructure(input.GetRowData())
# Loop through all the input data and grab the next bunch of rows
startRow = currentRow
endRow = startRow + numberOfRows
if (endRow >= input.GetNumberOfRows()):
endRow = input.GetNumberOfRows()
done = True;
print("streaming: ", startRow, "-", endRow)
for i in range(startRow, endRow):
output.InsertNextRow(input.GetRow(i))
currentRow = endRow;
psuedoStreamingData.SetExecuteMethod(streamData)
class Timer(QObject):
def __init__(self, parent=None):
super(Timer, self).__init__(parent)
# Setup the data streaming timer
self.timer = QTimer()
QObject.connect(self.timer, SIGNAL("timeout()"), self.update)
self.timer.start(100)
def update(self):
if (done):
quit();
psuedoStreamingData.Modified() # Is there a way to avoid this?
psuedoStreamingData.GetExecutive().Push()
printStats()
def printStats():
sStats = ss.GetOutputDataObject( 1 )
sPrimary = sStats.GetBlock( 0 )
sDerived = sStats.GetBlock( 1 )
sPrimary.Dump( 15 )
sDerived.Dump( 15 )
if __name__ == "__main__":
""" Main entry point of this python script """
# Set up streaming executive
streamingExec = vtkThreadedStreamingPipeline()
vtkAlgorithm.SetDefaultExecutivePrototype(streamingExec)
streamingExec.FastDelete()
vtkThreadedStreamingPipeline.SetAutoPropagatePush(True)
# Pull the table from the database
databaseToTable = vtkSQLDatabaseTableSource()
databaseToTable.SetURL("sqlite://" + sqlite_file)
databaseToTable.SetQuery("select * from main_tbl")
# Hook up the database to the streaming data filter
psuedoStreamingData.SetInputConnection(databaseToTable.GetOutputPort())
# Calculate offline(non-streaming) descriptive statistics
print("# Calculate offline descriptive statistics:")
ds = vtkDescriptiveStatistics()
ds.SetInputConnection(databaseToTable.GetOutputPort())
ds.AddColumn("Temp1")
ds.AddColumn("Temp2")
ds.Update()
dStats = ds.GetOutputDataObject( 1 )
dPrimary = dStats.GetBlock( 0 )
dDerived = dStats.GetBlock( 1 )
dPrimary.Dump( 15 )
dDerived.Dump( 15 )
# Stats filter to place 'into' the streaming filter
inter = vtkDescriptiveStatistics()
inter.AddColumn("Temp1")
inter.AddColumn("Temp2")
# Calculate online(streaming) descriptive statistics
print("# Calculate online descriptive statistics:")
ss = vtkStreamingStatistics()
ss.SetStatisticsAlgorithm(inter)
ss.SetInputConnection(psuedoStreamingData.GetOutputPort())
# Spin up the timer
app = QApplication(sys.argv)
stream = Timer()
sys.exit(app.exec_())
|
[
"shentianweipku@gmail.com"
] |
shentianweipku@gmail.com
|
bde2d17546e4aff0de68b15ffb0c5f017dea7c68
|
e6dab5aa1754ff13755a1f74a28a201681ab7e1c
|
/.parts/lib/django-1.3/django/conf/locale/zh_CN/formats.py
|
00fa8f4a3fd541170626603a495a7c857d6c9a15
|
[] |
no_license
|
ronkagan/Euler_1
|
67679203a9510147320f7c6513eefd391630703e
|
022633cc298475c4f3fd0c6e2bde4f4728713995
|
refs/heads/master
| 2021-01-06T20:45:52.901025
| 2014-09-06T22:34:16
| 2014-09-06T22:34:16
| 23,744,842
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 101
|
py
|
/home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.3/django/conf/locale/zh_CN/formats.py
|
[
"ron.y.kagan@gmail.com"
] |
ron.y.kagan@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.