blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3481718d73cd06e14af89d79412405e9ae54e588 | aa5a8163c450a7ca7f4d3e7483213bb3642e0824 | /map/google/main.py | 6a8dbbb4d4e21e23144734bc0d11b43403bfd5b9 | [
"Apache-2.0"
] | permissive | kosyachniy/dev | e068bf3c9ad4d3808c70e5eb2afdafc2ef796482 | 41f58e72a397c7ff0df26dfa3e19dc64c8eff1d2 | refs/heads/main | 2023-06-25T10:36:56.333731 | 2023-06-15T01:26:19 | 2023-06-15T01:26:19 | 89,030,562 | 21 | 4 | Apache-2.0 | 2023-06-02T20:19:17 | 2017-04-21T23:13:54 | HTML | UTF-8 | Python | false | false | 1,535 | py | import json
from datetime import datetime
import googlemaps
with open('keys.json', 'r') as file:
KEY = json.loads(file.read())['google']['maps']['key']
gmaps = googlemaps.Client(key=KEY)
# Координаты по адресу
address = '1600 Amphitheatre Parkway, Mountain View, CA'
coords = gmaps.geocode(address)
print(coords)
# Адрес по координатам
coords = (40.714224, -73.961452)
address = gmaps.reverse_geocode(coords)
print(address)
# Маршрут
now = datetime.now()
directions_result = gmaps.directions(
'Sydney Town Hall',
'Parramatta, NSW',
mode="transit",
departure_time=now,
)
print(directions_result)
# Матрица расстояний
origins = [
'Perth, Australia',
'Sydney, Australia',
'Melbourne, Australia',
'Adelaide, Australia',
]
destinations = [
'Blue Mountains, Australia',
'Bungle Bungles, Australia',
'The Pinnacles, Australia',
]
matrix = gmaps.distance_matrix(origins, destinations)
print(matrix)
# Место в радиусе
# place = gmaps.find_place(
# 'Restaurant',
# 'textquery',
# fields=[
# 'place_id',
# 'geometry/location',
# 'name',
# 'formatted_address',
# 'photos',
# 'price_level',
# 'rating',
# 'types',
# ],
# location_bias='circle:0.5@47.390325,8.515934',
# )
# print(place)
geo = {
'lat': 47.390325,
'lng': 8.515934,
}
radius = 1000 # в метрах
category = 'Food'
places = gmaps.places_nearby(
location=(geo['lat'], geo['lng']),
radius=radius,
keyword = category,
)['results']
print(places) | [
"polozhev@mail.ru"
] | polozhev@mail.ru |
7ecaf8a263fb938f42a3db5c6b5a11abb11136b8 | 3a9b154aa9d5e379683476f80f30630bf44d2102 | /Server_v1/amazon/migrations/0014_auto_20190805_1705.py | e5c8d37fc9418b6c6303cd36e306d47c4f28f273 | [] | no_license | KevinDon/py_amazon_analysis | 81995e360d2b536e1df6e515aae9457054edae29 | 13b5fbb046ca6516ac3a47e8f7867baf358011f4 | refs/heads/master | 2022-12-13T00:27:27.511783 | 2019-08-14T11:45:53 | 2019-08-14T11:45:53 | 185,160,162 | 0 | 1 | null | 2022-12-10T05:38:15 | 2019-05-06T08:56:40 | TSQL | UTF-8 | Python | false | false | 1,255 | py | # Generated by Django 2.2 on 2019-08-05 09:05
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('amazon', '0013_auto_20190802_1106'),
]
operations = [
migrations.CreateModel(
name='AmazonProductCategoryKeywordRelation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amazon_category_id', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='amazon.AmazonProductCategoryModel', verbose_name='Amazon Category')),
('amazon_keyword_id', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='amazon.SkuKeywordModel', verbose_name='Amazon Keyword')),
],
options={
'db_table': 'amazon_product_category_keyword_relation',
},
),
migrations.AddField(
model_name='amazonproductcategorymodel',
name='keyword',
field=models.ManyToManyField(through='amazon.AmazonProductCategoryKeywordRelation', to='amazon.SkuKeywordModel', verbose_name='Keyword'),
),
]
| [
"kevintang002@gmail.com"
] | kevintang002@gmail.com |
fc217223fd2c75a5608bbcb6e46372fa4b31ff90 | 9ef35f89227d474f3664d27dbe3ba63bd52f8422 | /toripscanner/toripscanner.py | 8f515aaa5a0e7c397fec030a71b223c72779dbb7 | [] | no_license | oftc/toripscanner | 9ec7e4feac39172247b678a7d85fdca83f73fde0 | f0365bd3d25b49e786ceac010796084e431ad2ec | refs/heads/master | 2023-07-02T08:49:44.490859 | 2021-08-15T03:13:22 | 2021-08-15T03:13:22 | 390,759,119 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,201 | py | import logging
from argparse import ArgumentParser
from typing import Dict, Any
import toripscanner.cmd.scan
import toripscanner.cmd.parse
from . import __version__
from .config import get_config, config_logging
log = logging.getLogger(__name__)
def create_parser():
p = ArgumentParser()
p.add_argument('--version', action='version', version=__version__)
p.add_argument('-c', '--config', help='Path to toripscanner config file')
# p.add_argument(
# '-d', '--datadir', help='If provided, overwrite the coord/worker '
# 'datadir config file option with this')
# p.add_argument('--log-level',
# choices=['debug', 'info', 'warning', 'error', 'critical'],
# help='Override the configured toripscanner log level')
sub = p.add_subparsers(dest='cmd')
toripscanner.cmd.scan.gen_parser(sub)
toripscanner.cmd.parse.gen_parser(sub)
return p
def overwrite_conf(args, conf) -> None:
''' Some arguments will overwrite configuration values. Do that. '''
pass
# if args.datadir:
# assert args.cmd
# old = conf[args.cmd]['datadir']
# log.debug(
# f'Changing {args.cmd}.datadir from {old} to {args.datadir}')
# conf[args.cmd]['datadir'] = args.datadir
# This function needs **some sort** of type annotation so that mypy will check
# the things it does. Adding the return value (e.g. '-> None') is enough
def call_real_main(args, conf) -> None:
''' Figure out what command the user gave and call into that
command's main function where the real work begins to happen. The only
logic here should be figuring out what command's main to call. '''
# Most (actually, all as of right now) command's main functions take these
# arguments
def_args = [args, conf]
def_kwargs: Dict[str, Any] = {}
# How to call in to each command's main
cmds = {
'scan': {
'f': toripscanner.cmd.scan.main,
'a': def_args, 'kw': def_kwargs,
},
'parse': {
'f': toripscanner.cmd.parse.main,
'a': def_args, 'kw': def_kwargs,
},
}
# The keys in the `cmds` dict must be the same as each command specified in
# its gen_parser(...) function, thus it will be in `cmds`. args.cmd will
# also be non-None because our caller must have checked that already.
assert args.cmd in cmds
# Here we go!
cmd = cmds[args.cmd]
return cmd['f'](*cmd['a'], *cmd['kw']) # type: ignore
def main() -> None:
''' Entry point when called on the command line as `toripscanner ...`.
Do boring boilerplate stuff to get started initially. Parse the command
line arguments and configuration file, then hand off control. This is where
the bulk of the startup boring crap should happen. '''
p = create_parser()
args = p.parse_args()
if args.cmd is None:
p.print_help()
return
try:
conf = get_config(args.config)
except FileNotFoundError as e:
log.critical('Unable to open a config file: %s', e)
return
assert conf
config_logging(conf)
overwrite_conf(args, conf)
call_real_main(args, conf)
| [
"sirmatt@ksu.edu"
] | sirmatt@ksu.edu |
b4c37d818e3bcccfd45e9c2b5690f7cbafa56c01 | a0664f0d3950a7147f84a317b2e417e0433a4ae4 | /test/test_invoice_billing_address.py | 431b1384493ea8ad7498fc318d0b7f8ca6793de5 | [] | no_license | reepay/reepay-checkout-python | 1e748893a970d28c6b0242bc7d26aa21325abb32 | 8bbd36219335a1fc65f857ac537ff4931bc6e5c7 | refs/heads/master | 2020-10-01T20:10:29.660872 | 2019-12-12T13:39:46 | 2019-12-12T13:39:46 | 227,615,447 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 918 | py | # coding: utf-8
"""
Reepay Checkout API
Reepay Checkout REST API # noqa: E501
OpenAPI spec version: 1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.invoice_billing_address import InvoiceBillingAddress # noqa: E501
from swagger_client.rest import ApiException
class TestInvoiceBillingAddress(unittest.TestCase):
"""InvoiceBillingAddress unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testInvoiceBillingAddress(self):
"""Test InvoiceBillingAddress"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.invoice_billing_address.InvoiceBillingAddress() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"robert@reepay.com"
] | robert@reepay.com |
a3da31f13b5f6cd102d61aee6848e0d8c48d0510 | d01c2f82838e246076b0fd3514bc21119a4ee792 | /guillotina/cookiecutter/application/{{cookiecutter.package_name}}/{{cookiecutter.package_name}}/__init__.py | 18d4c2bcbdcb48e447eb79bd02cc35918d93172d | [
"BSD-2-Clause"
] | permissive | jordic/guillotina | 0940a935264aee189daf4a21b50f0efc1650ffbd | 8845454a784e797b90951580e1ab5fa9172055e1 | refs/heads/master | 2020-03-07T15:37:30.139620 | 2018-06-22T16:37:40 | 2018-06-22T16:37:40 | 127,559,927 | 0 | 0 | BSD-2-Clause | 2018-03-31T18:30:34 | 2018-03-31T18:30:33 | null | UTF-8 | Python | false | false | 302 | py | from guillotina import configure
app_settings = {
# provide custom application settings here...
}
def includeme(root):
"""
custom application initialization here
"""
configure.scan('{{cookiecutter.package_name}}.api')
configure.scan('{{cookiecutter.package_name}}.install')
| [
"vangheem@gmail.com"
] | vangheem@gmail.com |
a79abc6d3e3ba8d2fd884627dc5bfe8c07440687 | 7c79c8caee77d08aa05cdc59eb68e569abf54a7e | /ics 32/ics 32 larc/animal_class.py | d80f309fccc67c06da02e0f1e1c7808aca2fe846 | [] | no_license | solomc1/python | 2e4715cc24e7b23d91c879fc95954f615a615982 | 119e388fb6f4ab42f581e48393919d4052a08ef6 | refs/heads/master | 2021-01-17T16:48:02.671810 | 2016-07-29T05:27:50 | 2016-07-29T05:27:50 | 64,452,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 628 | py | #Create 4 animal classes that have functions growl and eat
#Ask a user which animal they want and make it growl and eat
class Zebra:
def growl(self)->str:
print("I'm eating grass...")
class Giraffe:
def growl(self)->str:
print("I'm eating cupcakes...")
class Moose:
def growl(self)->str:
print("I'm eating pho")
class Tiger:
def growl(self)->str:
print("I'm eating you...")
u_i = input("Choose an animal: ").lower()
if u_i == 'z':
animal = Zebra()
elif u_i == 'g':
animal = Giraffe()
elif u_i == 'm':
Moose()
elif u_i == 't':
Tiger()
animal.growl()
| [
"solomc1@uci.edu"
] | solomc1@uci.edu |
52f46ec16cb48b6b7c28fce60dffa9fca6a97c14 | 2899430814db2d06f8f5e105e02c731047013f93 | /backend/users/migrations/0002_auto_20200828_1219.py | 5e24b577abdd99c6db1761c8b96e6c31990cae83 | [] | no_license | crowdbotics-apps/project-1-19840 | 8de5a8e8f07aa35a7445c5943613766b793b0790 | 2f9c890ba6bc7d734f474a5141ad9d9018725761 | refs/heads/master | 2022-12-04T01:51:43.336555 | 2020-08-28T12:19:53 | 2020-08-28T12:19:53 | 291,038,994 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 627 | py | # Generated by Django 2.2.15 on 2020-08-28 12:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('course', '0001_initial'),
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='group',
field=models.ManyToManyField(blank=True, related_name='user_group', to='course.Group'),
),
migrations.AlterField(
model_name='user',
name='name',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
8e8764ab12c5d3b41e59f47cd5a9fbb3c7dd5edc | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02618/s903697509.py | c5447f9b8ecd4c99caea4697471af46080b6fba0 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,955 | py | from random import randint, random, seed
from math import exp
import sys
input = sys.stdin.buffer.readline
INF = 9223372036854775808
def calc_score(D, C, S, T):
"""
開催日程Tを受け取ってそこまでのスコアを返す
コンテストi 0-indexed
d 0-indexed
"""
score = 0
last = [0]*26 # コンテストiを前回開催した日
for d, t in enumerate(T):
last[t] = d + 1
for i in range(26):
score -= (d + 1 - last[i]) * C[i]
score += S[d][t]
return score
def update_score(D, C, S, T, score, ct, ci):
"""
ct日目のコンテストをコンテストciに変更する
スコアを差分更新する
ct: change t 変更日 0-indexed
ci: change i 変更コンテスト 0-indexed
"""
new_score = score
last = [0]*26 # コンテストiを前回開催した日
prei = T[ct] # 変更前に開催する予定だったコンテストi
for d, t in enumerate(T, start=1):
last[t] = d
new_score += (d - last[prei])*C[prei]
new_score += (d - last[ci])*C[ci]
last = [0]*26
for d, t in enumerate(T, start=1):
if d-1 == ct:
last[ci] = d
else:
last[t] = d
new_score -= (d - last[prei])*C[prei]
new_score -= (d - last[ci])*C[ci]
new_score -= S[ct][prei]
new_score += S[ct][ci]
return new_score
def update_swap_score(D, C, S, T, score, ct1, ct2):
"""
ct1日目のコンテストとct2日目のコンテストを入れ替える
スコアを差分更新する
"""
new_score = score
new_T = T.copy()
last = [0]*26 # コンテストiを前回開催した日
ci1 = new_T[ct1]
ci2 = new_T[ct2]
for d, t in enumerate(new_T, start=1):
last[t] = d
new_score += (d - last[ci1])*C[ci1]
new_score += (d - last[ci2])*C[ci2]
last = [0]*26
new_T[ct1], new_T[ct2] = new_T[ct2], new_T[ct1]
for d, t in enumerate(new_T, start=1):
last[t] = d
new_score -= (d - last[ci1])*C[ci1]
new_score -= (d - last[ci2])*C[ci2]
return new_score
def evaluate(D, C, S, T, k):
"""
d日目終了時点での満足度を計算し,
d + k日目終了時点での満足度の減少も考慮する
"""
score = 0
last = [0]*26
for d, t in enumerate(T):
last[t] = d + 1
for i in range(26):
score -= (d + 1 - last[i]) * C[i]
score += S[d][t]
for d in range(len(T), min(len(T) + k, D)):
for i in range(26):
score -= (d + 1 - last[i]) * C[i]
return score
def greedy(D, C, S):
Ts = []
for k in range(5, 13):
T = [] # 0-indexed
max_score = -INF
for d in range(D):
# d+k日目終了時点で満足度が一番高くなるようなコンテストiを開催する
max_score = -INF
best_i = 0
for i in range(26):
T.append(i)
score = evaluate(D, C, S, T, k)
if max_score < score:
max_score = score
best_i = i
T.pop()
T.append(best_i)
Ts.append((max_score, T))
return max(Ts, key=lambda pair: pair[0])
def local_search(D, C, S, score, T):
# sTime = time()
T0 = 2122.982998363944
T1 = 1
# TL = 1.8
Temp = T0
# cnt = 0
t = 0
best_score = score
best_T = T.copy()
for cnt in range(48000):
if cnt % 200 == 0:
t = cnt / (160000 - 1)
Temp = pow(T0, 1-t) * pow(T1, t)
sel = randint(1, 100)
lim = random()
if sel != 1:
# ct 日目のコンテストをciに変更
ct = randint(0, D-1)
ci = randint(0, 25)
new_score = update_score(D, C, S, T, score, ct, ci)
if score < new_score or \
(lim < exp((new_score - score)/Temp)):
T[ct] = ci
score = new_score
else:
# ct1 日目と ct2 日目のコンテストをswap
ct1 = randint(0, D-1)
ct2 = randint(0, D-1)
ci1 = T[ct1]
ci2 = T[ct2]
new_score = update_score(D, C, S, T, score, ct1, ci2)
new_score = update_score(D, C, S, T, new_score, ct2, ci1)
if score < new_score or \
(lim < exp((new_score - score)/Temp)):
score = new_score
T[ct1] = ci2
T[ct2] = ci1
if best_score < score:
best_score = score
best_T = T.copy()
# cnt += 1
return best_T
if __name__ == '__main__':
seed(1)
D = int(input())
C = [int(i) for i in input().split()]
S = [[int(i) for i in input().split()] for j in range(D)]
init_score, T = greedy(D, C, S)
T = local_search(D, C, S, init_score, T)
for t in T:
print(t+1)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
676f959d3e7c34fdaac7aeaf0ceb46cc93f51d71 | 0e0d6e6332e8d783b1f1da961f496a647f88eb77 | /ryu/controller/dp_type.py | dc9704c0ea346117bf2ae99a2ebe0712778d4d34 | [
"Apache-2.0"
] | permissive | unifycore/ryu | 667a957c42a2e3c23780e52d3d72457fbd7487c8 | 3cdfd5957e5cc20a90dd6bc0070f1795f31afc44 | refs/heads/master | 2021-01-21T19:54:14.558501 | 2018-02-09T22:15:06 | 2018-02-09T22:15:06 | 14,217,663 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 955 | py | # Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2012 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# datapath type
# At this moment, this information is not used yet and unknown type is
# defined as place-holder.
# switches are categorized by its rolls and openflow controller may want to
# handle switch differently depending on it role.
#
# unknown:
#
UNKNOWN = 'UNKNOWN'
| [
"fujita.tomonori@lab.ntt.co.jp"
] | fujita.tomonori@lab.ntt.co.jp |
080ce4b6540e5d22f370f91df70e5a6331bf1ea9 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/databoxedge/azure-mgmt-databoxedge/azure/mgmt/databoxedge/v2020_09_01_preview/operations/_orders_operations.py | 0027939c3843f76b29a2a05808da1f79f8e213c7 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 34,694 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_by_data_box_edge_device_request(
device_name: str, resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders",
) # pylint: disable=line-too-long
path_format_arguments = {
"deviceName": _SERIALIZER.url("device_name", device_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_get_request(device_name: str, resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default",
) # pylint: disable=line-too-long
path_format_arguments = {
"deviceName": _SERIALIZER.url("device_name", device_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_create_or_update_request(
device_name: str, resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default",
) # pylint: disable=line-too-long
path_format_arguments = {
"deviceName": _SERIALIZER.url("device_name", device_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_delete_request(
device_name: str, resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default",
) # pylint: disable=line-too-long
path_format_arguments = {
"deviceName": _SERIALIZER.url("device_name", device_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_dc_access_code_request(
device_name: str, resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default/listDCAccessCode",
) # pylint: disable=line-too-long
path_format_arguments = {
"deviceName": _SERIALIZER.url("device_name", device_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
class OrdersOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.databoxedge.v2020_09_01_preview.DataBoxEdgeManagementClient`'s
:attr:`orders` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_by_data_box_edge_device(
self, device_name: str, resource_group_name: str, **kwargs: Any
) -> Iterable["_models.Order"]:
"""Lists all the orders related to a Data Box Edge/Data Box Gateway device.
Lists all the orders related to a Data Box Edge/Data Box Gateway device.
:param device_name: The device name. Required.
:type device_name: str
:param resource_group_name: The resource group name. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Order or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databoxedge.v2020_09_01_preview.models.Order]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
cls: ClsType[_models.OrderList] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_data_box_edge_device_request(
device_name=device_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_data_box_edge_device.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("OrderList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_by_data_box_edge_device.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders"
}
@distributed_trace
def get(self, device_name: str, resource_group_name: str, **kwargs: Any) -> _models.Order:
"""Gets a specific order by name.
Gets a specific order by name.
:param device_name: The device name. Required.
:type device_name: str
:param resource_group_name: The resource group name. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Order or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.v2020_09_01_preview.models.Order
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
cls: ClsType[_models.Order] = kwargs.pop("cls", None)
request = build_get_request(
device_name=device_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Order", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default"
}
def _create_or_update_initial(
self, device_name: str, resource_group_name: str, order: Union[_models.Order, IO], **kwargs: Any
) -> Optional[_models.Order]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Optional[_models.Order]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(order, (IO, bytes)):
_content = order
else:
_json = self._serialize.body(order, "Order")
request = build_create_or_update_request(
device_name=device_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("Order", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default"
}
@overload
def begin_create_or_update(
self,
device_name: str,
resource_group_name: str,
order: _models.Order,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.Order]:
"""Creates or updates an order.
Creates or updates an order.
:param device_name: The order details of a device. Required.
:type device_name: str
:param resource_group_name: The resource group name. Required.
:type resource_group_name: str
:param order: The order to be created or updated. Required.
:type order: ~azure.mgmt.databoxedge.v2020_09_01_preview.models.Order
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Order or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databoxedge.v2020_09_01_preview.models.Order]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update(
self,
device_name: str,
resource_group_name: str,
order: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.Order]:
"""Creates or updates an order.
Creates or updates an order.
:param device_name: The order details of a device. Required.
:type device_name: str
:param resource_group_name: The resource group name. Required.
:type resource_group_name: str
:param order: The order to be created or updated. Required.
:type order: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Order or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databoxedge.v2020_09_01_preview.models.Order]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update(
self, device_name: str, resource_group_name: str, order: Union[_models.Order, IO], **kwargs: Any
) -> LROPoller[_models.Order]:
"""Creates or updates an order.
Creates or updates an order.
:param device_name: The order details of a device. Required.
:type device_name: str
:param resource_group_name: The resource group name. Required.
:type resource_group_name: str
:param order: The order to be created or updated. Is either a Order type or a IO type.
Required.
:type order: ~azure.mgmt.databoxedge.v2020_09_01_preview.models.Order or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Order or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databoxedge.v2020_09_01_preview.models.Order]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Order] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._create_or_update_initial(
device_name=device_name,
resource_group_name=resource_group_name,
order=order,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("Order", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default"
}
def _delete_initial( # pylint: disable=inconsistent-return-statements
self, device_name: str, resource_group_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_delete_request(
device_name=device_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default"
}
@distributed_trace
def begin_delete(self, device_name: str, resource_group_name: str, **kwargs: Any) -> LROPoller[None]:
"""Deletes the order related to the device.
Deletes the order related to the device.
:param device_name: The device name. Required.
:type device_name: str
:param resource_group_name: The resource group name. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
device_name=device_name,
resource_group_name=resource_group_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default"
}
@distributed_trace
def list_dc_access_code(self, device_name: str, resource_group_name: str, **kwargs: Any) -> _models.DCAccessCode:
"""Gets the DCAccess Code.
Gets the DCAccess Code.
:param device_name: The device name. Required.
:type device_name: str
:param resource_group_name: The resource group name. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DCAccessCode or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.v2020_09_01_preview.models.DCAccessCode
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2020-09-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2020-09-01-preview")
)
cls: ClsType[_models.DCAccessCode] = kwargs.pop("cls", None)
request = build_list_dc_access_code_request(
device_name=device_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_dc_access_code.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("DCAccessCode", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_dc_access_code.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default/listDCAccessCode"
}
| [
"noreply@github.com"
] | Azure.noreply@github.com |
dcc02ccfc560b15d59d43056ae81ae14687d84c7 | 9b20743ec6cd28d749a4323dcbadb1a0cffb281b | /11_Time_Series_Forecasting_with_Python/05/airline_boxcox_auto.py | 46d48d9fe18da8804d0ee8aaf3433f7d3b894dff | [] | no_license | jggrimesdc-zz/MachineLearningExercises | 6e1c7e1f95399e69bba95cdfe17c4f8d8c90d178 | ee265f1c6029c91daff172b3e7c1a96177646bc5 | refs/heads/master | 2023-03-07T19:30:26.691659 | 2021-02-19T08:00:49 | 2021-02-19T08:00:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | # automatically box-cox transform a time series
from matplotlib import pyplot
from pandas import DataFrame
from pandas import read_csv
from scipy.stats import boxcox
series = read_csv('airline-passengers.csv', header=0, index_col=0, parse_dates=True, squeeze=True)
dataframe = DataFrame(series.values)
dataframe.columns = ['passengers']
dataframe['passengers'], lam = boxcox(dataframe['passengers'])
print('Lambda: %f' % lam)
pyplot.figure(1)
# line plot
pyplot.subplot(211)
pyplot.plot(dataframe['passengers'])
# histogram
pyplot.subplot(212)
pyplot.hist(dataframe['passengers'])
pyplot.show()
| [
"jgrimes@jgrimes.tech"
] | jgrimes@jgrimes.tech |
3fdc94789dbc47b5910241a83c5ad57671bc9e35 | 03dea3c0db7c8fafda71d23c3c2595f563ffb335 | /SignalMC/test/AMSB_chargino900GeV_ctau10cm_step1.py | b612a64808e5b716cb361082e95b61d933ed4b7b | [] | no_license | Andersen98/DisappTrks | 3952e9bf8ba270e2d88aa2e8d9ef805cf25dfc46 | 140a5efdc4c51a30e5fced6d34b7813876c2f2ee | refs/heads/master | 2020-06-27T03:41:59.136790 | 2017-07-12T15:19:18 | 2017-07-12T15:19:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,070 | py | # Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: DisappTrks/SignalMC/python/AMSB_chargino900GeV_ctau10cm_NoFilter_13TeV.py --fileout file:AMSB_chargino700GeV_ctau10cm_step1.root --mc --eventcontent RAWSIM --customise SLHCUpgradeSimulations/Configuration/postLS1Customs.customisePostLS1,Configuration/DataProcessing/Utils.addMonitoring,SimG4Core/CustomPhysics/Exotica_HSCP_SIM_cfi,DisappTrks/SignalMC/genParticlePlusGeant.customizeProduce,DisappTrks/SignalMC/genParticlePlusGeant.customizeKeep --datatier GEN-SIM --conditions MCRUN2_71_V1::All --beamspot Realistic50ns13TeVCollision --step GEN,SIM --magField 38T_PostLS1 --python_filename AMSB_chargino900GeV_ctau10cm_step1.py --no_exec -n 46
import FWCore.ParameterSet.Config as cms
process = cms.Process('SIM')
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('SimGeneral.MixingModule.mixNoPU_cfi')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.Geometry.GeometrySimDB_cff')
process.load('Configuration.StandardSequences.MagneticField_38T_PostLS1_cff')
process.load('Configuration.StandardSequences.Generator_cff')
process.load('IOMC.EventVertexGenerators.VtxSmearedRealistic50ns13TeVCollision_cfi')
process.load('GeneratorInterface.Core.genFilterSummary_cff')
process.load('Configuration.StandardSequences.SimIdeal_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(46)
)
# Input source
process.source = cms.Source("EmptySource")
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
version = cms.untracked.string('$Revision: 1.19 $'),
annotation = cms.untracked.string('DisappTrks/SignalMC/python/AMSB_chargino900GeV_ctau10cm_NoFilter_13TeV.py nevts:46'),
name = cms.untracked.string('Applications')
)
# Output definition
process.RAWSIMoutput = cms.OutputModule("PoolOutputModule",
splitLevel = cms.untracked.int32(0),
eventAutoFlushCompressedSize = cms.untracked.int32(5242880),
outputCommands = process.RAWSIMEventContent.outputCommands,
fileName = cms.untracked.string('file:AMSB_chargino700GeV_ctau10cm_step1.root'),
dataset = cms.untracked.PSet(
filterName = cms.untracked.string(''),
dataTier = cms.untracked.string('GEN-SIM')
),
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('generation_step')
)
)
# Additional output definition
# Other statements
process.genstepfilter.triggerConditions=cms.vstring("generation_step")
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'MCRUN2_71_V1::All', '')
process.generator = cms.EDFilter("Pythia6GeneratorFilter",
maxEventsToPrint = cms.untracked.int32(1),
pythiaPylistVerbosity = cms.untracked.int32(3),
comEnergy = cms.double(13000.0),
particleFile = cms.untracked.string('DisappTrks/SignalMC/data/geant4_AMSB_chargino_900GeV_ctau10cm.slha'),
filterEfficiency = cms.untracked.double(1.0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
processFile = cms.untracked.string('SimG4Core/CustomPhysics/data/RhadronProcessList.txt'),
useregge = cms.bool(False),
slhaFile = cms.untracked.string('DisappTrks/SignalMC/data/AMSB_chargino_900GeV_Isajet780.slha'),
massPoint = cms.untracked.int32(900),
hscpFlavor = cms.untracked.string('stau'),
PythiaParameters = cms.PSet(
pythiaUESettings = cms.vstring('MSTU(21)=1 ! Check on possible errors during program execution',
'MSTJ(22)=2 ! Decay those unstable particles',
'PARJ(71)=10 . ! for which ctau 10 mm',
'MSTP(33)=0 ! no K factors in hard cross sections',
'MSTP(2)=1 ! which order running alphaS',
'MSTP(51)=10042 ! structure function chosen (external PDF CTEQ6L1)',
'MSTP(52)=2 ! work with LHAPDF',
'PARP(82)=1.921 ! pt cutoff for multiparton interactions',
'PARP(89)=1800. ! sqrts for which PARP82 is set',
'PARP(90)=0.227 ! Multiple interactions: rescaling power',
'MSTP(95)=6 ! CR (color reconnection parameters)',
'PARP(77)=1.016 ! CR',
'PARP(78)=0.538 ! CR',
'PARP(80)=0.1 ! Prob. colored parton from BBR',
'PARP(83)=0.356 ! Multiple interactions: matter distribution parameter',
'PARP(84)=0.651 ! Multiple interactions: matter distribution parameter',
'PARP(62)=1.025 ! ISR cutoff',
'MSTP(91)=1 ! Gaussian primordial kT',
'PARP(93)=10.0 ! primordial kT-max',
'MSTP(81)=21 ! multiple parton interactions 1 is Pythia default',
'MSTP(82)=4 ! Defines the multi-parton model'),
processParameters = cms.vstring('IMSS(1) = 11 ! Spectrum from external SLHA file',
'IMSS(21) = 33 ! LUN number for SLHA File (must be 33) ',
'IMSS(22) = 33 ! Read-in SLHA decay table ',
'MSEL = 0 ! General SUSY',
'MSUB(226) = 1 ! to double chargino',
'MSUB(229) = 1 ! to neutralino + chargino',
'MDCY(312,1) = 0 ! set the chargino stable.'),
parameterSets = cms.vstring('pythiaUESettings',
'processParameters',
'SLHAParameters'),
SLHAParameters = cms.vstring('SLHAFILE = DisappTrks/SignalMC/data/AMSB_chargino_900GeV_Isajet780.slha')
)
)
# Path and EndPath definitions
process.generation_step = cms.Path(process.pgen)
process.simulation_step = cms.Path(process.psim)
process.genfiltersummary_step = cms.EndPath(process.genFilterSummary)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.RAWSIMoutput_step = cms.EndPath(process.RAWSIMoutput)
# Schedule definition
process.schedule = cms.Schedule(process.generation_step,process.genfiltersummary_step,process.simulation_step,process.endjob_step,process.RAWSIMoutput_step)
# filter all path with the production filter sequence
for path in process.paths:
getattr(process,path)._seq = process.generator * getattr(process,path)._seq
# customisation of the process.
# Automatic addition of the customisation function from DisappTrks.SignalMC.genParticlePlusGeant
from DisappTrks.SignalMC.genParticlePlusGeant import customizeProduce,customizeKeep
#call to customisation function customizeProduce imported from DisappTrks.SignalMC.genParticlePlusGeant
process = customizeProduce(process)
#call to customisation function customizeKeep imported from DisappTrks.SignalMC.genParticlePlusGeant
process = customizeKeep(process)
# Automatic addition of the customisation function from Configuration.DataProcessing.Utils
from Configuration.DataProcessing.Utils import addMonitoring
#call to customisation function addMonitoring imported from Configuration.DataProcessing.Utils
process = addMonitoring(process)
# Automatic addition of the customisation function from SLHCUpgradeSimulations.Configuration.postLS1Customs
from SLHCUpgradeSimulations.Configuration.postLS1Customs import customisePostLS1
#call to customisation function customisePostLS1 imported from SLHCUpgradeSimulations.Configuration.postLS1Customs
process = customisePostLS1(process)
# Automatic addition of the customisation function from SimG4Core.CustomPhysics.Exotica_HSCP_SIM_cfi
from SimG4Core.CustomPhysics.Exotica_HSCP_SIM_cfi import customise
#call to customisation function customise imported from SimG4Core.CustomPhysics.Exotica_HSCP_SIM_cfi
process = customise(process)
# End of customisation functions
| [
"ahart@cern.ch"
] | ahart@cern.ch |
47fc3d5c03c500f55864283af007e7e341651e9e | 50a690ab7db8fe98a620f3c54aabd90c3ff3e7f3 | /losses/triplet_loss_test.py | 6348bc6af5eac3386abfe5d64a0b4bfb51155046 | [] | no_license | yekeren/ADVISE-Image_ads_understanding | 590754909d2f4259a57d32591a15bea845586a0f | 2ea5e1405b1ab178b95f9c2cd9158b16847ac6a3 | refs/heads/master | 2021-10-02T08:01:29.193553 | 2018-11-29T16:32:25 | 2018-11-29T16:32:25 | 103,291,233 | 22 | 8 | null | null | null | null | UTF-8 | Python | false | false | 5,369 | py |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from losses import triplet_loss
slim = tf.contrib.slim
class TripletLossTest(tf.test.TestCase):
def setUp(self):
tf.logging.set_verbosity(tf.logging.INFO)
def test1(self):
g = tf.Graph()
with g.as_default():
labels = tf.placeholder(shape=[4], dtype=tf.int32)
positive_masks = tf.sparse_to_dense(
tf.stack([tf.range(4), labels], 1),
output_shape=tf.stack([4, 10]),
sparse_values=True,
default_value=False,
validate_indices=True)
with self.test_session(graph=g) as sess:
mat = sess.run([positive_masks], feed_dict={
labels: np.array([0, 1, 2, 3])})
print(mat)
def test_mine_random_examples(self):
g = tf.Graph()
with g.as_default():
distances = tf.placeholder(shape=[None, None], dtype=tf.float32)
pos_indices, neg_indices = triplet_loss._mine_random_examples(distances, 4)
with self.test_session(graph=g) as sess:
pos, neg = sess.run([pos_indices, neg_indices],
feed_dict={distances: np.ones([4, 4])})
self.assertEqual(pos.shape, (16,))
self.assertEqual(neg.shape, (16,))
for i in xrange(16):
self.assertNotEqual(pos[i], neg[i])
def test_mine_all_examples(self):
g = tf.Graph()
with g.as_default():
distances = tf.placeholder(shape=[None, None], dtype=tf.float32)
pos_indices, neg_indices = triplet_loss._mine_all_examples(distances)
with self.test_session(graph=g) as sess:
pos, neg = sess.run([pos_indices, neg_indices],
feed_dict={distances: np.ones([4, 4])})
self.assertAllEqual(pos, np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]))
self.assertAllEqual(neg, np.array([1, 2, 3, 0, 2, 3, 0, 1, 3, 0, 1, 2]))
pos, neg = sess.run([pos_indices, neg_indices],
feed_dict={distances: np.ones([5, 5])})
self.assertAllEqual(pos, np.array([0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3,
3, 3, 3, 4, 4, 4, 4]))
self.assertAllEqual(neg, np.array([1, 2, 3, 4, 0, 2, 3, 4, 0, 1, 3, 4, 0,
1, 2, 4, 0, 1, 2, 3]))
def test_mine_semi_hard_examples(self):
g = tf.Graph()
with g.as_default():
distances = tf.placeholder(shape=[None, None], dtype=tf.float32)
pos_indices, neg_indices = triplet_loss._mine_semi_hard_examples(distances)
with self.test_session(graph=g) as sess:
pos, neg = sess.run([pos_indices, neg_indices],
feed_dict={distances: np.array([
[0, 1, 2, 3],
[2, 0, 0, 3],
[3, 1, 0, 0],
[1, 3, 2, 0],
])})
self.assertAllEqual(pos, np.array([0, 0, 0, 1, 1, 2, 2, 3, 3, 3]))
self.assertAllEqual(neg, np.array([1, 2, 3, 0, 3, 0, 1, 0, 1, 2]))
def test_mine_hard_examples(self):
g = tf.Graph()
with g.as_default():
distances = tf.placeholder(shape=[None, None], dtype=tf.float32)
pos_indices, neg_indices = triplet_loss._mine_hard_examples(distances, 1)
with self.test_session(graph=g) as sess:
pos, neg = sess.run([pos_indices, neg_indices],
feed_dict={distances: np.array([
[0, 1, 2, 3],
[2, 0, 0, 3],
[3, 1, 0, 0],
[1, 3, 2, 0],
])})
self.assertAllEqual(pos, np.array([0, 1, 2, 3]))
self.assertAllEqual(neg, np.array([1, 2, 3, 0]))
g = tf.Graph()
with g.as_default():
distances = tf.placeholder(shape=[None, None], dtype=tf.float32)
pos_indices, neg_indices = triplet_loss._mine_hard_examples(distances, 2)
with self.test_session(graph=g) as sess:
pos, neg = sess.run([pos_indices, neg_indices],
feed_dict={distances: np.array([
[0, 1, 2, 3],
[2, 0, 0, 3],
[3, 1, 0, 0],
[1, 3, 2, 0],
])})
self.assertAllEqual(pos, np.array([0, 0, 1, 1, 2, 2, 3, 3]))
self.assertAllEqual(neg, np.array([1, 2, 2, 0, 3, 1, 0, 2]))
g = tf.Graph()
with g.as_default():
distances = tf.placeholder(shape=[None, None], dtype=tf.float32)
pos_indices, neg_indices = triplet_loss._mine_hard_examples(distances, 3)
with self.test_session(graph=g) as sess:
pos, neg = sess.run([pos_indices, neg_indices],
feed_dict={distances: np.array([
[0, 1, 2, 3],
[2, 0, 0, 3],
[3, 1, 0, 0],
[1, 3, 2, 0],
])})
self.assertAllEqual(pos, np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]))
self.assertAllEqual(neg, np.array([1, 2, 3, 2, 0, 3, 3, 1, 0, 0, 2, 1]))
g = tf.Graph()
with g.as_default():
distances = tf.placeholder(shape=[None, None], dtype=tf.float32)
pos_indices, neg_indices = triplet_loss._mine_hard_examples(distances, 10)
with self.test_session(graph=g) as sess:
pos, neg = sess.run([pos_indices, neg_indices],
feed_dict={distances: np.array([
[0, 1, 2, 3],
[2, 0, 0, 3],
[3, 1, 0, 0],
[1, 3, 2, 0],
])})
self.assertAllEqual(pos, np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]))
self.assertAllEqual(neg, np.array([1, 2, 3, 2, 0, 3, 3, 1, 0, 0, 2, 1]))
if __name__ == '__main__':
tf.test.main()
| [
"yekeren.cn@gmail.com"
] | yekeren.cn@gmail.com |
dda29d32249fd96fba0acb1367105ab881c76624 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2493/60771/290955.py | 6c322d79efc369fa6ffdbc500c6fb16d7b654351 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | #05
n = int(input())
ori = input().split(" ")
nums = [int(item) for item in ori]
m = int(input())
for i in range(0,m):
ori = input().split(" ")
left = int(ori[0])
right = int(ori[1])
dup = []
part = nums[left-1:right]
for item in part:
if item not in dup:
dup.append(item)
print(len(dup)) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
a9c885d3f8fa322e689d4a1872d808eac8e7fd46 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /AjZBGWyPaA7rXFhi6_24.py | 23178b6d6d7a408d9c070ac8b3a17d6af6a24290 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116 | py |
def min_swaps(s1, s2):
dif = 0
for i in range(len(s1)):
if s1[i] != s2[i]:
dif += 1
return dif//2
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
e24797ac9d7f429ee69a77a110980389a9e0edb5 | a583b9151b8ce9dd74aa7c3fc99e40055f1dcfb9 | /Coursera_PY_week8 12/DONE/3_Smallest_odd.py | aa0d2a49aa91847c449ac2dccba5b0bb7adb1b62 | [] | no_license | NineMan/Coursera_PY | df202768bc2f0a5ea06d0ed9985610e527d1a5c5 | dd9b6b085b66b979c88e5d72b26777848b6d2a9a | refs/heads/master | 2020-08-01T09:36:37.132012 | 2019-09-25T23:12:57 | 2019-09-25T23:12:57 | 210,953,054 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 181 | py | print(
min(
filter(
lambda x: x % 2 != 0,
map(
int,
input().split()
)
)
)
)
| [
"nelubinmv@inbox.ru"
] | nelubinmv@inbox.ru |
9a4b8392b7e0b2bcec18b40fa83f092083b22e9f | 67a9ea4302d669a142549b26bc5412b83eb10de6 | /django_projects/askdjango/blog/models.py | dd889c68d771487963d834b671c033841fa19829 | [] | no_license | Chojiwon/nits-1st | 9068c8ee66e3bd06c2758516b7f4f3a52fc7499e | cbbbe301356a33c9ffc6c5a6e3bc9a99492900d5 | refs/heads/master | 2021-04-29T04:43:20.498626 | 2017-01-04T07:31:19 | 2017-01-04T07:31:19 | 77,995,108 | 1 | 0 | null | 2017-01-04T08:09:08 | 2017-01-04T08:09:08 | null | UTF-8 | Python | false | false | 2,469 | py | from django.conf import settings
from django.core.files import File
from django.core.urlresolvers import reverse
from django.core.validators import RegexValidator, MinLengthValidator
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import pre_save
from blog.utils import thumbnail
class Post(models.Model):
title = models.CharField(max_length=100, validators=[MinLengthValidator(3)])
content = models.TextField()
photo = models.ImageField(blank=True, upload_to='blog/post/%Y/%m/%d')
point = models.CharField(max_length=100, blank=True,
validators=[RegexValidator(r'^[+-]?[\d\.]+,[+-]?[\d\.]+$')])
writer = models.ForeignKey(settings.AUTH_USER_MODEL) #'auth.User')
author = models.CharField(max_length=20)
tags = models.CharField(max_length=100, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
tag_set = models.ManyToManyField('Tag', blank=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.title
def get_absolute_url(self):
# return '/blog/{}'.format(self.pk)
return reverse('blog:post_detail', args=[self.pk])
def as_dict(self):
if self.photo:
photo_url = self.photo.url
else:
photo_url = None
return {
'id': self.id,
'title': self.title,
'content': self.content,
'photo_url': photo_url,
'point': self.point,
'writer': self.writer.username,
'author': self.author,
'tag_set': self.tag_set.all(),
'updated_at': self.updated_at,
}
def on_pre_save_post(sender, **kwargs):
post = kwargs['instance']
if post.photo:
max_width = 300
if post.photo.width > max_width or post.photo.height > max_width:
processed_f = thumbnail(post.photo.file, max_width, max_width)
post.photo.save(post.photo.name, File(processed_f), save=False)
pre_save.connect(on_pre_save_post, sender=Post)
class Comment(models.Model):
post = models.ForeignKey(Post)
message = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ['-id']
class Tag(models.Model):
name = models.CharField(max_length=20, unique=True)
def __str__(self):
return self.name
| [
"allieuslee@gmail.com"
] | allieuslee@gmail.com |
6f4af56fd692e852c5ba392e9e72feeff7190cf9 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/lutea.py | 6b01a8e7abb5c3c8b647db0341a37b46b6c69cf8 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 122 | py | ii = [('RogePAV.py', 2), ('RoscTTI2.py', 1), ('LyelCPG.py', 2), ('WestJIT2.py', 6), ('LeakWTI4.py', 1), ('WestJIT.py', 1)] | [
"prabhjyotsingh95@gmail.com"
] | prabhjyotsingh95@gmail.com |
6b83a0f94938c5f5af04087dbdf4e1ff26f7c824 | bc0938b96b86d1396cb6b403742a9f8dbdb28e4c | /aliyun-python-sdk-alidns/aliyunsdkalidns/request/v20150109/DescribeCustomLineRequest.py | 55dcf4f4440a920be0e554409b7718533fede2b0 | [
"Apache-2.0"
] | permissive | jia-jerry/aliyun-openapi-python-sdk | fb14d825eb0770b874bc123746c2e45efaf64a6d | e90f3683a250cfec5b681b5f1d73a68f0dc9970d | refs/heads/master | 2022-11-16T05:20:03.515145 | 2020-07-10T08:45:41 | 2020-07-10T09:06:32 | 278,590,780 | 0 | 0 | NOASSERTION | 2020-07-10T09:15:19 | 2020-07-10T09:15:19 | null | UTF-8 | Python | false | false | 1,540 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkalidns.endpoint import endpoint_data
class DescribeCustomLineRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Alidns', '2015-01-09', 'DescribeCustomLine','alidns')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_LineId(self):
return self.get_query_params().get('LineId')
def set_LineId(self,LineId):
self.add_query_param('LineId',LineId)
def get_Lang(self):
return self.get_query_params().get('Lang')
def set_Lang(self,Lang):
self.add_query_param('Lang',Lang) | [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
a969af4f57309f7a09076291ef2c2743f87a9e3a | fbba871f9a7076074f570df563881fd28c1826af | /src/anyconfig_configobj_backend/configobj_.py | 32aa1ae4dc2f6f8bd4fe18d6ff7b7948eb79257a | [
"MIT"
] | permissive | ssato/python-anyconfig-configobj-backend | 9cb254a283c2634ae15035dcae509574e08d718c | 1e6c3fb1389a8c1083a090f5e0f75e226d424def | refs/heads/master | 2023-07-27T00:05:41.811839 | 2023-07-09T06:00:54 | 2023-07-09T06:00:54 | 9,628,155 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,426 | py | #
# Copyright (C) 2013 - 2021 Satoru SATOH <satoru.satoh @ gmail.com>
# License: MIT
#
r"""Configobj backend:
- Format to support: configobj,
https://bit.ly/2TgURnL (https://configobj.readthedocs.io)
- Requirements: configobj (https://pypi.python.org/pypi/configobj/)
- Development Status :: 4 - Beta
- Limitations: AFAIK, configobj does not keep the order of configuration items
and not have options to change this behavior like configparser, so this
backend does not keep the order of configuration items even if the ac_ordered
option was used.
- Special options:
- All options except for 'infile' passed to configobj.ConfigObj.__init__
should work.
- See also: http://configobj.readthedocs.io/en/latest/configobj.html
Chnagelog:
.. versionchanged:: 0.5.0
- Now loading and dumping options are detected automatically from inspection
result if possible. Also these became not distinguished because these will
be passed to configobj.Configuration anyway.
"""
import inspect
import os
import configobj
import anyconfig.backend.base
try:
_LOAD_OPTS = [
a for a in inspect.getfullargspec(configobj.ConfigObj).args
if a not in {'self', 'infile'}
]
except (TypeError, AttributeError):
_LOAD_OPTS = ("options configspec encoding interpolation raise_errors"
"list_values create_empty file_error stringify"
"indent_type default_encoding unrepr write_empty_values"
"_inspec").split()
def make_configobj(cnf, **kwargs):
"""
Make a configobj.ConfigObj initalized with given config 'cnf'.
:param cnf: Configuration data
:param kwargs: optional keyword parameters passed to ConfigObj.__init__
:return: An initialized configobj.ConfigObj instance
"""
cobj = configobj.ConfigObj(**kwargs)
cobj.update(cnf)
return cobj
def load(path_or_strm, container, **opts):
"""
:param path_or_strm: input config file path or file/file-like object
:param container: callble to make a container object
:param opts: keyword options passed to :class:`configobj.ConfigObj`
:return: Mapping object
"""
return container(configobj.ConfigObj(path_or_strm, **opts))
class Parser(anyconfig.backend.base.StreamParser,
anyconfig.backend.base.BinaryLoaderMixin,
anyconfig.backend.base.BinaryDumperMixin):
"""
Parser for Ini-like config files which configobj supports.
"""
_cid = "configobj"
_type = "configobj"
_priority = 10
_load_opts = _LOAD_OPTS # options on dump will be just ignored.
_dump_opts = _LOAD_OPTS # Likewise.
_ordered = True
load_from_path = load_from_stream = anyconfig.backend.base.to_method(load)
def dump_to_string(self, cnf, **kwargs):
"""
Dump config 'cnf' to a string.
:param cnf: Configuration data to dump
:param kwargs: backend-specific optional keyword parameters :: dict
:return: string represents the configuration
"""
return os.linesep.join(make_configobj(cnf, **kwargs).write())
def dump_to_stream(self, cnf, stream, **kwargs):
"""
:param cnf: Configuration data to dump
:param stream: Config file or file-like object
:param kwargs: backend-specific optional keyword parameters :: dict
"""
make_configobj(cnf, **kwargs).write(stream)
# vim:sw=4:ts=4:et:
| [
"satoru.satoh@gmail.com"
] | satoru.satoh@gmail.com |
19e09283fdfa7849fae1f8328c74e4ee65f3411f | 1a6726fb62584f7787197ff404e30b012bc54f62 | /More Exercises Basic Syntax, Conditional Statements and Loops/Find the Largest.py | 635bf1d1edf92a3707c6c4727196f2dd3d9e61de | [] | no_license | zdravkob98/Fundamentals-with-Python-May-2020 | f7a69d1a534d92f3b14bc16ce5d8d9b8611d97dd | 74e69a486e582c397cdc2f98b3dffe655110d38a | refs/heads/main | 2022-12-30T21:43:57.682790 | 2020-10-06T17:05:32 | 2020-10-06T17:05:32 | 301,797,680 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 84 | py | number = input()
maximum = ''.join(sorted(number,reverse=True))
print(maximum)
| [
"zdravkobonev@abv.bg"
] | zdravkobonev@abv.bg |
a8b1c2f0d8f7ce94ae2e6536c5b1e375271987f9 | b9c43433f5f17c4f20d6ece1798e42e3d1d7e0e2 | /lists/models.py | a386118bfc326f57ea7cbb9e07c2575227352173 | [] | no_license | fireinrain/django_superlists | e6365fa51e57eb8b3dd91d88c7737ee9d2be1c05 | fe1d6655058772a534351578c2f6e0bac5601c08 | refs/heads/master | 2022-06-18T14:32:04.318953 | 2017-01-05T05:43:21 | 2017-01-05T05:43:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | from django.db import models
# Create your models here.
class List(models.Model):
listdb = models.TextField(default='')
class Item(models.Model):
text = models.TextField(default='')
list = models.ForeignKey(List,default=None)
| [
"575563079@qq.com"
] | 575563079@qq.com |
22568b30c1c3eb26116d7633f4d2bf182d8915a9 | a84538af8bf1f763a3d71d939744976425358b30 | /contrib/devtools/symbol-check.py | 363849d7e38a7e395c0f770b6dab7eee08f352c9 | [
"MIT",
"LicenseRef-scancode-public-domain"
] | permissive | YourLocalDundee/foxdcoin | 8de421b280a812e390249f14ed0b5892c546ebf1 | 9db505f6f32bd3e51bd2b2da533744c98cee23af | refs/heads/master | 2023-05-14T05:10:26.435417 | 2021-06-09T06:18:50 | 2021-06-09T06:18:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,231 | py | #!/usr/bin/env python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
A script to check that the (Linux) executables produced by gitian only contain
allowed gcc, glibc and libstdc++ version symbols. This makes sure they are
still compatible with the minimum supported Linux distribution versions.
Example usage:
find ../gitian-builder/build -type f -executable | xargs python contrib/devtools/symbol-check.py
'''
from __future__ import division, print_function, unicode_literals
import subprocess
import re
import sys
import os
# Debian 6.0.9 (Squeeze) has:
#
# - g++ version 4.4.5 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=g%2B%2B)
# - libc version 2.11.3 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libc6)
# - libstdc++ version 4.4.5 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libstdc%2B%2B6)
#
# Ubuntu 10.04.4 (Lucid Lynx) has:
#
# - g++ version 4.4.3 (http://packages.ubuntu.com/search?keywords=g%2B%2B&searchon=names&suite=lucid§ion=all)
# - libc version 2.11.1 (http://packages.ubuntu.com/search?keywords=libc6&searchon=names&suite=lucid§ion=all)
# - libstdc++ version 4.4.3 (http://packages.ubuntu.com/search?suite=lucid§ion=all&arch=any&keywords=libstdc%2B%2B&searchon=names)
#
# Taking the minimum of these as our target.
#
# According to GNU ABI document (http://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html) this corresponds to:
# GCC 4.4.0: GCC_4.4.0
# GCC 4.4.2: GLIBCXX_3.4.13, CXXABI_1.3.3
# (glibc) GLIBC_2_11
#
MAX_VERSIONS = {
'GCC': (4,4,0),
'CXXABI': (1,3,3),
'GLIBCXX': (3,4,13),
'GLIBC': (2,11)
}
# See here for a description of _IO_stdin_used:
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=634261#109
# Ignore symbols that are exported as part of every executable
IGNORE_EXPORTS = {
b'_edata', b'_end', b'_init', b'__bss_start', b'_fini', b'_IO_stdin_used', b'stdin', b'stdout', b'stderr'
}
READELF_CMD = os.getenv('READELF', '/usr/bin/readelf')
CPPFILT_CMD = os.getenv('CPPFILT', '/usr/bin/c++filt')
# Allowed NEEDED libraries
ALLOWED_LIBRARIES = {
# foxdcoind and foxdcoin-qt
b'libgcc_s.so.1', # GCC base support
b'libc.so.6', # C library
b'libpthread.so.0', # threading
b'libanl.so.1', # DNS resolve
b'libm.so.6', # math library
b'librt.so.1', # real-time (clock)
b'ld-linux-x86-64.so.2', # 64-bit dynamic linker
b'ld-linux.so.2', # 32-bit dynamic linker
# foxdcoin-qt only
b'libX11-xcb.so.1', # part of X11
b'libX11.so.6', # part of X11
b'libxcb.so.1', # part of X11
b'libfontconfig.so.1', # font support
b'libfreetype.so.6', # font parsing
b'libdl.so.2' # programming interface to dynamic linker
}
class CPPFilt(object):
'''
Demangle C++ symbol names.
Use a pipe to the 'c++filt' command.
'''
def __init__(self):
self.proc = subprocess.Popen(CPPFILT_CMD, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
def __call__(self, mangled):
self.proc.stdin.write(mangled + b'\n')
self.proc.stdin.flush()
return self.proc.stdout.readline().rstrip()
def close(self):
self.proc.stdin.close()
self.proc.stdout.close()
self.proc.wait()
def read_symbols(executable, imports=True):
'''
Parse an ELF executable and return a list of (symbol,version) tuples
for dynamic, imported symbols.
'''
p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Could not read symbols for %s: %s' % (executable, stderr.strip()))
syms = []
for line in stdout.split(b'\n'):
line = line.split()
if len(line)>7 and re.match(b'[0-9]+:$', line[0]):
(sym, _, version) = line[7].partition(b'@')
is_import = line[6] == b'UND'
if version.startswith(b'@'):
version = version[1:]
if is_import == imports:
syms.append((sym, version))
return syms
def check_version(max_versions, version):
if b'_' in version:
(lib, _, ver) = version.rpartition(b'_')
else:
lib = version
ver = '0'
ver = tuple([int(x) for x in ver.split(b'.')])
if not lib in max_versions:
return False
return ver <= max_versions[lib]
def read_libraries(filename):
p = subprocess.Popen([READELF_CMD, '-d', '-W', filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
libraries = []
for line in stdout.split(b'\n'):
tokens = line.split()
if len(tokens)>2 and tokens[1] == b'(NEEDED)':
match = re.match(b'^Shared library: \[(.*)\]$', b' '.join(tokens[2:]))
if match:
libraries.append(match.group(1))
else:
raise ValueError('Unparseable (NEEDED) specification')
return libraries
if __name__ == '__main__':
cppfilt = CPPFilt()
retval = 0
for filename in sys.argv[1:]:
# Check imported symbols
for sym,version in read_symbols(filename, True):
if version and not check_version(MAX_VERSIONS, version):
print('%s: symbol %s from unsupported version %s' % (filename, cppfilt(sym).decode('utf-8'), version.decode('utf-8')))
retval = 1
# Check exported symbols
for sym,version in read_symbols(filename, False):
if sym in IGNORE_EXPORTS:
continue
print('%s: export of symbol %s not allowed' % (filename, cppfilt(sym).decode('utf-8')))
retval = 1
# Check dependency libraries
for library_name in read_libraries(filename):
if library_name not in ALLOWED_LIBRARIES:
print('%s: NEEDED library %s is not allowed' % (filename, library_name.decode('utf-8')))
retval = 1
sys.exit(retval)
| [
"foxrtb@gmail.com"
] | foxrtb@gmail.com |
c1447b9ff7375bfeff1e7c5f144b90d30ccd58b2 | 148ac8d601369aaae6918cf0a55a4d4f5afb5e75 | /dt_tennis.py | 2bb3aad648def783a4a020597f5b6c2d6fa4e52a | [] | no_license | MrVersatile007/ML-with-Rishi | a7800e27f5cbac9b68d526469beb380ed59bb029 | db76aa26ef5d349237d0fa1f0bdd677352dfb392 | refs/heads/main | 2023-06-02T15:53:00.902907 | 2021-06-27T15:15:01 | 2021-06-27T15:15:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 468 | py | # -*- coding: utf-8 -*-
"""
Created on Tue May 25 20:45:59 2021
@author: RISHBANS
"""
import pandas as pd
tennis_data = pd.read_csv("tennis.csv")
from sklearn.preprocessing import OrdinalEncoder
from sklearn.tree import DecisionTreeClassifier
o_e = OrdinalEncoder()
X = tennis_data.drop(columns=['play'])
y = tennis_data.play
X = o_e.fit_transform(X)
dt = DecisionTreeClassifier(criterion='entropy')
dt.fit(X, y)
print(o_e.categories_)
dt.predict([[1,0,1,0]])
| [
"rishibansal02@gmail.com"
] | rishibansal02@gmail.com |
69c0a253a7fc8a01b80f4a21ec3f0012554c9e35 | a71d12bf0cd654df6d4bf2ea472061def6d88c9e | /oracle/models/generative.py | 9e426120023f162cb260cf3cb93481fa9aaa98a5 | [
"MIT"
] | permissive | andycasey/original-oracle | b68384a23a4fa47d66063df204eabf96e128401d | 0f204a0ff952cc97904c1d1bb53e14b799bb877e | refs/heads/master | 2021-01-20T10:54:37.942183 | 2014-10-02T11:29:19 | 2014-10-02T11:29:19 | 23,349,548 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33,667 | py | # coding: utf-8
from __future__ import absolute_import, print_function
""" Generative Model for Stellar Spectra """
__author__ = "Andy Casey <arc@ast.cam.ac.uk>"
import collections
import logging
from multiprocessing import cpu_count
from functools import partial
from time import time
import emcee
import numpy as np
from scipy import optimize as op, stats
from scipy.ndimage import gaussian_filter1d
from oracle import si, specutils, utils
from oracle.models.model import Model
logger = logging.getLogger("oracle")
class GenerativeModel(Model):
def __init__(self, configuration):
"""
A class to probabilistically model stellar spectra. This class performs
on-the-fly synthesis to generate data for some given theta.
:param configuration:
The path to the configuration file that contains settings for the
class, or a string containing the configuration. Only YAML-style
formats are accepted.
:type configuration: str
"""
super(GenerativeModel, self).__init__(configuration)
self._num_observed_channels = -1
return None
@property
def parameters(self):
""" Return the model parameters. """
config = self.config
any_channel_parameters = (config["model"]["continuum"] \
or config["model"]["instrumental_broadening"])
if any_channel_parameters and 0 >= self._num_observed_channels:
raise ValueError("Cannot determine total number of model parameters"\
" until the model has been supplied some data. We have to give "\
"model parameters to each observed channel, but we don't know " \
"how many observed channels there are yet.")
# [TODO] This is a complicated thing. Perhaps link to online doc page
# [TODO] Check for 1D/<3D> model atmospheres to determine free stellar
# parameters.
parameters = ["teff", "logg", "[M/H]", "xi"]
# Any redshift parameters?
if config["model"]["redshift"]:
# [TODO] Allow individual channel redshifts
parameters.extend(["z_{0}".format(i) for i in range(self._num_observed_channels)])
# Any continuum parameters?
if config["model"]["continuum"]:
# Don't create unnecessary parameters. Only create parameters for
# channels that are defined *and* have data.
num_channels_with_continuum = min([
len(config["continuum"]["order"]),
self._num_observed_channels
])
for i, each in enumerate(config["continuum"]["order"]):
parameters.extend(["c_{0}_{1}".format(i, j) \
for j in range(num_channels_with_continuum)])
# Any doppler broadening?
if config["model"]["instrumental_broadening"]:
# [TODO] You idiot.
parameters.extend(["instrumental_resolution_{0}".format(i) \
for i in range(self._num_observed_channels)])
# Any outlier parameters?
if config["model"]["outliers"]:
parameters.extend(["Po", "Vo", "Ys"])
# [TODO] Abundance parameters.
# Any element parameters (other than Fe)?
#if "elements" in config["model"]:
# parameters.extend(["log_{0}".format(each) \
# for each in set(config["model"]["elements"]).difference("Fe")])
# [TODO] Any telluric parameters?
return parameters
def __call__(self, data, synth_kwargs=None, **theta):
"""
Generate data for the given :math:`\\theta`.
:param data:
The observed spectra.
:type data:
list of :class:`oracle.specutils.Spectrum1D` objects
:param synth_kwargs: [optional]
Keyword arguments to pass directly to :func:`si.synthesise`.
:type synth_kwargs:
dict
:keyword theta:
A dictionary mapping of the model parameters (keys) and values.
:type theta:
dict
:raises KeyError:
If a model parameter is missing from ``**theta.``
:returns:
A list of two-column arrays that represent dispersion and flux for
each observed channel in ``data``, or each specified region in
``wavelength_ranges``.
:rtype:
list
"""
# [TODO] Redshift the *requested* synthesis range based on z in theta?
missing_parameters = set(self.parameters).difference(theta)
if len(missing_parameters) > 0:
logging.warn("Generate call is missing some theta parameters: {0}"\
.format(", ".join(missing_parameters)))
if synth_kwargs is None:
synth_kwargs = {}
# Update with defaults
for key, value in self._get_speedy_synth_kwargs(data).iteritems():
synth_kwargs.setdefault(key, value)
# Synthesis ranges should contain only regions that are not masked out
synthesis_ranges = utils.invert_mask(self.config["mask"], data=data,
padding=0.05)
# Synthesise the model spectra first (in parallel where applicable) and
# then apply the cheap transformations in serial.
# Extend wavelength_steps to match length of synthesis_ranges
if "wavelength_steps" in synth_kwargs:
# Check
if isinstance(synth_kwargs["wavelength_steps"][0], (tuple, list)) \
and len(synth_kwargs["wavelength_steps"]) != len(synthesis_ranges):
synth_kwargs["wavelength_steps"] = [synth_kwargs["wavelength_steps"][0]] * len(synthesis_ranges)
# Note: Pass keyword arguments so that the cacher works.
synthesised_spectra = si.synthesise(theta["teff"], theta["logg"],
theta["[M/H]"], theta["xi"], wavelengths=synthesis_ranges,
line_list_filename=self.config["GenerativeModel"]["line_list_filename"],
**synth_kwargs)
model_spectra = []
for i, observed in enumerate(data):
model_contributions = np.zeros(len(observed.disp))
model_fluxes = np.ones(len(observed.disp))
pixel_size = np.diff(observed.disp).mean()
for synthesised_spectrum in synthesised_spectra:
if np.any(observed.disp[-1] > synthesised_spectrum[:, 0]) \
and np.any(observed.disp[0] < synthesised_spectrum[:, 0]):
spectral_portion = synthesised_spectrum.copy()
# Apply instrumental broadening
resolution = theta.get("instrumental_resolution_{0}".format(i), 0)
if resolution > 0:
pixel_kernel = (spectral_portion[:,0].mean()/resolution)\
/(2.3548200450309493*pixel_size)
spectral_portion[:, 1] = gaussian_filter1d(
spectral_portion[:, 1], pixel_kernel)
# Apply any redshift
z = theta["z"] if "z" in theta else theta.get("z_{0}".format(i), 0)
spectral_portion[:, 0] *= (1. + z)
# Put on the observed pixels
indices = observed.disp.searchsorted(spectral_portion[[0, -1], 0])
if np.ptp(indices) > 0:
model_contributions[indices[0]:indices[-1]] += 1
disp = observed.disp[indices[0]:indices[-1]]
model_fluxes[indices[0]:indices[1]] *= \
np.interp(disp, spectral_portion[:, 0],
spectral_portion[:, 1], left=1., right=1.)
# Set nans for the places we did not synthesise
model_fluxes[model_contributions == 0] = np.nan
# Apply continuum
j, coefficients = 0, []
while "c_{0}_{1}".format(i, j) in theta:
coefficients.append(theta["c_{0}_{1}".format(i, j)])
j += 1
if len(coefficients) > 0:
model_fluxes[:, 1] *= np.polyval(coefficients, observed.disp)
model_spectrum = np.vstack([observed.disp, model_fluxes]).T
model_spectra.append(model_spectrum)
return model_spectra
def initial_guess(self, data, synth_kwargs=None):
"""
Make an initial guess of the model parameters for the given data. The
initial guess is based on information available in this order: (1) if
there is an "initial_guess" section in the model configuration file,
(2) any prior distributions specified in the model configuration file,
(3) some (sensible) default rules and (4) logic.
:param data:
A list of the observed spectra.
:type data:
list of :class:`specutils.Spectrum1D' objects
:param synth_kwargs: [optional]
Keyword arguments to pass directly to :func:`si.synthesise`
:type synth_kwargs:
dict
"""
synth_kwargs = self._get_speedy_synth_kwargs(data, synth_kwargs)
parameter_guess = {}
environment = dict(zip(["locals", "globals", "__name__", "__file__",
"__builtins__"], [None] * 5))
# Add our distributions and other explicit functions we want to use.
environment.update({
"abs": abs,
"normal": np.random.normal,
"uniform": np.random.uniform
})
self._num_observed_channels = len(data)
# Any explicit initial guess information?
if "initial_guess" in self.config:
sampled_filenames = {}
for parameter, rule in self.config["initial_guess"].iteritems():
if parameter not in model.parameters: continue
# If we are drawing from a filename, then we need each parameter
# to be drawn from the same line in the filename, not as separate
# draws.
if "sample_from" in rule:
# See if we have already sampled from this filename.
filename, column = eval(rule, {"sample_from": lambda f, c: (f, c)})
if filename not in sampled_filenames:
content = np.loadtxt(filename, dtype=str)
sampled_filenames[filename] = content[np.random.randint(
0, content.shape[0], 1)]
parameter_guess[parameter] = float(sampled_filenames[filename][column])
else:
# The rule could be an expression (e.g., normal, uniform),
# a value, or some logic expression. So we will evaluate it
# in a "safe" environment.
parameter_guess[parameter] = eval(rule, environment)
# Any explicit prior distributions set?
if "priors" in self.config:
for parameter, rule in self.config["priors"].iteritems():
if parameter in self.parameters \
and parameter not in parameter_guess:
parameter_guess[parameter] = eval(rule, environment)
# Apply some default rules in case we haven't guessed these parameters
# yet.
default_rules = collections.OrderedDict([
("teff", np.random.uniform(3000, 7000)),
("logg", np.random.uniform(0, 5)),
("[M/H]", np.random.uniform(-2, 0)),
("xi", "(1.28 + 3.3e-4 * (teff - 6000) - 0.64 * (logg - 4.5)) "\
"if logg > 3.5 else 2.70 - 0.509 * logg"),
("Po", np.random.uniform(0, 1)),
("Vo", abs(np.random.normal(0, 1))),
("Ys", np.random.normal(1, 0.01)),
])
# [TODO] Vo and Ys above. What are you doing?!
for parameter, rule in default_rules.iteritems():
if parameter not in self.parameters \
or parameter in parameter_guess: continue
# Here we use a local environment so we can pass parameter guesses
# so far into it.
local_environment = environment.copy()
local_environment.update(parameter_guess)
parameter_guess[parameter] = rule if isinstance(rule, (int, float)) \
else eval(rule, local_environment)
# OK, now just use logic for the remaining parameters.
# [TODO] Set the abundances of all other elements to be zero:
remaining_parameters = set(self.parameters).difference(parameter_guess.keys())
# OK, what about parameter stubs
# TODO No!!
default_rule_stubs = collections.OrderedDict([
("instrumental_resolution_", "28000")
])
for parameter in remaining_parameters:
for parameter_stub, rule in default_rule_stubs.iteritems():
if parameter.startswith(parameter_stub):
local_environment = environment.copy()
local_environment.update(parameter_guess)
parameter_guess[parameter] = rule if isinstance(rule, (int, float)) \
else eval(rule, local_environment)
z_parameter = "z" if "z" in remaining_parameters else \
("z_{0}" if "z_0" in remaining_parameters else False)
any_continuum = len(self.config["continuum"].get("order", [])) > 0 \
if hasattr(self.config, "continuum") else False
# Remaining parameters could be related to:
# tellurics, continuum, redshift.
if z_parameter or any_continuum:
# Synthesis may be required.
for i, observed_spectrum in enumerate(data):
# Synthesis may be required if:
# (1) this channel requires continuum coefficients, or
# (2) there is an individual redshift for this channel, or
# (3) there is a global redshift and this is the first channel.
synthesis_required = ("c_{0}_0".format(i) in self.parameters) \
or (z_parameter == "z_{0}") or (z_parameter == "z" and i == 0)
if not synthesis_required: continue
model_spectrum = si.synthesise(
parameter_guess["teff"],
parameter_guess["logg"],
parameter_guess["[M/H]"],
parameter_guess["xi"],
(observed_spectrum.disp[0], observed_spectrum.disp[-1]),
**synth_kwargs)
# Put the model spectrum onto the same pixels as the observed
# spectrum.
resampled_model_flux = np.interp(observed_spectrum.disp,
model_spectrum[:, 0], model_spectrum[:, 1], left=np.nan,
right=np.nan)
# Do we need a redshift estimate?
if (z_parameter == "z" and i == 0) or z_parameter == "z_{0}":
parameter_guess[z_parameter.format(i)] = specutils._cross_correlate(
observed_spectrum.disp, observed_spectrum.flux,
resampled_model_flux)
# Do we need continuum coefficient estimates?
continuum_order = -1
while "c_{0}_{1}".format(i, 1 + continuum_order) in self.parameters:
continuum_order += 1
if continuum_order >= 0:
continuum = observed_spectrum.flux/resampled_model_flux
finite = np.isfinite(continuum)
continuum_parameters = np.polyfit(
observed_spectrum.disp[finite], continuum[finite],
continuum_order)
# Use .setdefault, not .update, in case the user has
# specified initial_guess or some prior distribution on this
# parameter
for j, continuum_parameter in enumerate(continuum_parameters):
parameter_guess.setdefault("c_{0}_{1}".format(i, j), continuum_parameter)
# Make sure we haven't missed any parameters
assert len(set(self.parameters).difference(parameter_guess)) == 0
return parameter_guess
def _log_prior(self, theta):
"""
Return the logarithmic prior probability of the parameters ``theta``.
:param theta:
The values of the ``model.parameters``.
:type theta:
list-type
:returns:
The logarithmic prior probability given the parameters ``theta``.
:rtype:
float
"""
theta_dict = dict(zip(self.parameters, theta))
if not (1 > theta_dict.get("Po", 0.5) > 0) \
or not (10000 > theta_dict.get("Vo", 1) > 0) \
or 0 > theta_dict.get("xi", 1) \
or 0 > theta_dict.get("teff"):
return -np.inf
# Put in priors for channel stubs
for i in range(self._num_observed_channels):
if 0 > theta_dict.get("instrumental_resolution_{0}".format(i), 1):
return -np.inf
return sum([self.evaluate_lnprior(p, v) for p, v in theta_dict.iteritems()])
def _log_likelihood(self, theta, data, synth_kwargs):
"""
Return the logarithmic likelihood of the parameters ``theta``.
:param theta:
The values of the ``model.parameters``.
:type theta:
list-type
:param data:
The observed data.
:type data:
:class:`specutils.Spectrum1D`
:param synth_kwargs: [optional]
Keyword arguments to pass directly to :func:`si.synthesise`.
:type synth_kwargs:
dict
:returns:
The logarithmic likelihood of the parameters ``theta``.
:rtype:
float
"""
theta_dict = dict(zip(self.parameters, theta))
try:
expected = self(data, synth_kwargs=synth_kwargs, **theta_dict)
except si.SIException:
# Probably unrealistic astrophysical parameters requested.
# SI fell over, so we will too.
# This is OK though: if *all* walkers fell over simultaneously then
# we would still end up raising an exception
return -np.inf
else:
# No exception, but no spectra either!
if expected is None:
return -np.inf
z = theta_dict.get("z", 0.)
likelihood = 0
for observed, model in zip(data, expected):
mask = self.mask(observed.disp, z)
chi_sq = (observed.flux - model[:, 1])**2 * observed.ivariance
if "Po" not in theta_dict:
finite = np.isfinite(chi_sq)
likelihood += -0.5 * np.sum(chi_sq[finite])
else:
Po, Vo, Yo = [theta_dict.get(each) for each in ("Po", "Vo", "Ys")]
model_likelihood = -0.5 * (chi_sq - np.log(observed.ivariance))
outlier_ivariance = 1.0/(Vo + observed.variance)
outlier_likelihood = -0.5 * ((observed.flux - Yo)**2 \
* outlier_ivariance * mask - np.log(outlier_ivariance))
mixture_likelihood = np.logaddexp(
np.log(1. - Po) + model_likelihood,
np.log(Po) + outlier_likelihood)
finite = np.isfinite(mixture_likelihood)
likelihood += np.sum(mixture_likelihood[finite])
if likelihood == 0:
raise a
return likelihood
def _log_probability(self, theta, data, synth_kwargs=None):
"""
Return the logarithmic probability of the GenerativeModel parameters
``theta``.
:param theta:
The values of the ``model.parameters``.
:type theta:
list-type
:param data:
The observed data.
:type data:
:class:`specutils.Spectrum1D`
:param synth_kwargs: [optional]
Keyword arguments to pass directly to :func:`si.synthesise`.
:type synth_kwargs:
dict
:returns:
The logarithmic probability of the parameters ``theta``.
:rtype:
float
"""
print(theta)
log_prior = self._log_prior(theta)
if not np.isfinite(log_prior):
return log_prior
log_likelihood = self._log_likelihood(theta, data, synth_kwargs)
log_probability = log_prior + log_likelihood
logger.debug("Calculated logarithmic prior, likelihood, and probability"\
" for {0} to be {1:.3e}, {2:.3e}, and {3:.3e}".format(
", ".join(["{0} = {1:.3f}".format(p, v) \
for p, v in zip(self.parameters, theta)]),
log_prior, log_likelihood, log_probability))
return log_probability
def _get_speedy_synth_kwargs(self, data, synth_kwargs=None):
""" Get default synth_kwargs that are optimised for speediness. """
if synth_kwargs is None:
synth_kwargs = {}
undersample_rate = self.config["settings"].get("undersample_rate", 1)
synth_kwargs.setdefault("chunk", True)
synth_kwargs.setdefault("threads", self.config["settings"].get(
"max_synth_threads", 1) if "settings" in self.config else 1)
synth_kwargs.setdefault("wavelength_steps",
tuple([(wls, wls, wls) for wls \
in [undersample_rate*np.median(np.diff(s.disp)) for s in data]]))
return synth_kwargs
def scatter(self, data, num, synth_kwargs=None):
"""
Randomly draw points ``num`` from the parameter space and calculate the
logarithmic probability at each point. The most probable point is
returned.
:param data:
A list of the observed spectra.
:type data:
list of :class:`specutils.Spectrum1D` objects
:param num:
The number of points to draw from the parameter space.
:type num:
int
:param synth_kwargs: [optional]
Keyword arguments to pass directly to :func:`si.synthesise`
:type synth_kwargs:
dict
:returns:
The most probable sampled point.
:rtype:
dict
"""
"""
# [TODO] parallelise
:param threads: [optional]
The number of parallel threads to use for random scattering. If
``threads`` is a negative number then the number of threads will be
set by :func:`multiprocessing.cpu_count`.
:type threads:
int
"""
synth_kwargs = self._get_speedy_synth_kwargs(data, synth_kwargs)
best_theta, highest_log_prob = None, None
for i in xrange(num):
theta_dict = self.initial_guess(data, synth_kwargs)
theta = [theta_dict[p] for p in self.parameters]
log_prob = self._log_probability(theta, data, synth_kwargs)
# Is this the best so far?
if np.isfinite(log_prob) \
and (highest_log_prob is None or log_prob > highest_log_prob):
best_theta, highest_log_prob = theta_dict, log_prob
# If we have gotten to this point and found no reasonable starting
# point then we should just keep trying until we get *a* start point
while highest_log_prob is None:
theta_dict = self.initial_guess(data, synth_kwargs)
theta = [theta_dict[p] for p in self.parameters]
log_prob = self._log_probability(theta, data, synth_kwargs)
if (np.isfinite(log_prob) and log_prob > highest_log_prob):
best_theta, highest_log_prob = theta_dict, log_prob
break
return best_theta
def optimise(self, data, initial_theta=None, synth_kwargs=None, maxfun=10e3,
maxiter=10e3, xtol=0.05, ftol=0.01, full_output=False):
"""
Optimise the logarithmic probability of the GenerativeModel parameters
given some data.
:param data:
A list of the observed spectra.
:type data:
list of :class:`specutils.Spectrum1D` objects
:param initial_theta: [optional]
The initial guess for :math:`\\theta` to optimise from.
:type initial_theta:
dict
:param synth_kwargs: [optional]
Keyword arguments to pass directly to :func:`si.synthesise`.
:type synth_kwargs:
dict
:param maxfun: [optional]
The maximum number of function evaluations to perform.
:type maxfun:
int
:param maxiter: [optional]
The maximum number of function iterations to perform.
:type maxiter:
int
:param xtol: [optional]
The tolerance required in the model parameters.
:type xtol:
float
:param ftol: [optional]
The tolerance required in logarithmic probability.
:type xtol:
float
:param full_output: [optional]
Return a tuple containing the optimised points theta, the logarithmic
probability of the optimised point, the number of function iterations,
the number of function evaluations, and an integer warning flag.
:type full_output:
bool
"""
synth_kwargs = self._get_speedy_synth_kwargs(data, synth_kwargs)
if initial_theta is None:
initial_theta = self.initial_guess(data, synth_kwargs=synth_kwargs)
else:
self._num_observed_channels = len(data)
# Check to make sure that the initial_theta contains all of the model
# parameters.
missing_parameters = set(self.parameters).difference(initial_theta)
if len(missing_parameters) > 0:
raise KeyError("initial guess is missing parameter(s) {0}".format(
", ".join(missing_parameters)))
# OK, now optimise (minimise) the negative log probability.
op_kwargs = {
"maxfun": maxfun,
"maxiter": maxiter,
"xtol": xtol,
"ftol": ftol,
"disp": False,
"full_output": True # Necessary for introspection and provenance.
}
t_init = time()
"""op_theta, op_fopt, op_niter, op_nfunc, op_warnflag = op.fmin(
lambda theta, data, skw: -self._log_probability(theta, data, skw),
[initial_theta.get(parameter) for parameter in self.parameters],
args=(data, synth_kwargs), **op_kwargs)
op_kwargs = {
#"maxiter": maxiter,
#"epsilon": np.array([10, 0.1, 0.1, 0.1]),
#"norm": -np.inf,
#"gtol": 1e-8
t
}
"""
#op.fmin_bfgs(f, x0, fprime=None, args=(), gtol=1e-05, norm=inf, epsilon=1.4901161193847656e-08, maxiter=None, full_output=0, disp=1, retall=0, callback=None)
#['teff', 'logg', '[M/H]', 'xi'
#teff, vt, logg, feh
def min_func(theta, data, skw):
nlp = -self._log_probability(theta, data, skw)
#return nlp
return nlp
result = op.fmin(
lambda t, d, skw: -self._log_probability(t, d, skw),
[initial_theta.get(parameter) for parameter in self.parameters],
args=(data, synth_kwargs), **op_kwargs)
result = op_theta, op_fopt, op_gopt, op_Bopt, op_nfunc, op_ngrad, warnflag
#self._opt_warn_message(op_warnflag, op_niter, op_nfunc)
t_elapsed = time() - t_init
logger.info("Generative model optimisation took {0:.2f} seconds".format(t_elapsed))
op_theta_dict = dict(zip(self.parameters, op_theta))
if full_output:
return (op_theta_dict, op_fopt, op_niter, op_nfunc, op_warnflag)
return op_theta_dict
def infer(self, data, optimised_theta=None, p0=None, walkers=-2, burn=100,
sample=100, threads=1, synth_kwargs=None, **kwargs):
"""
Infer the GenerativeModel parameters given the data.
:param data:
A list of the observed spectra.
:type data:
list of :class:`specutils.Spectrum1D' objects
:param optimised_theta: [optional]
The optimised point :math:`\Theta_{opt}`. The walkers will start from
a multi-dimensional ball centered around this point. You must supply
either ``optimised_theta`` or ``p0``, but not both.
:type optimised_theta:
dict
:param p0: [optional]
The initial starting point for the walkers. You must supply either
``optimised_theta`` or ``p0``, but not both.
:type p0:
:class:`numpy.ndarray`
:param walkers: [optional]
The number of Goodman & Weare (2010) ensemble walkers.
:type walkers:
int
:param burn: [optional]
The number of MCMC steps to discard as burn-in.
:type burn:
int
:param sample: [optional]
The number of MCMC steps to sample the posterior with.
:type sample:
int
:param threads: [optional]
The number of threads to specify to :class:`emcee.EnsembleSampler`.
Specifying -1 will set the threads to the number of available CPUs.
:type threads:
int
:param kwargs: [optional]
Keyword arguments to pass directly to :class:`emcee.EnsembleSampler`
:type kwargs:
dict
:returns:
The posterior quantiles in all parameters, the model sampler,
and a ``dict`` containing the mean acceptance fractions, concatenated
chains and log-probability values for the burn-in and posterior.
"""
if (optimised_theta is None and p0 is None) \
or (optimised_theta is not None and p0 is not None):
raise ValueError("either optimised_theta *or* p0 must be supplied")
if 0 > threads:
threads = cpu_count()
walkers = walkers if walkers > 0 else 2 * abs(walkers) * len(optimised_theta)
synth_kwargs = self._get_speedy_synth_kwargs(data, synth_kwargs)
mean_acceptance_fractions = np.zeros((burn + sample))
sampler = emcee.EnsembleSampler(walkers, len(self.parameters),
_inferencer, args=(self, data, synth_kwargs), threads=threads, **kwargs)
# Do we need to create p0 from optimised_theta?
if optimised_theta is not None:
std = {
"teff": 10.,
"logg": 0.05,
"[M/H]": 0.05,
"xi": 0.05,
}
stds = np.array([std.get(p, 0.01) for p in self.parameters])
theta = np.array([optimised_theta[p] for p in self.parameters])
p0 = emcee.utils.sample_ball(theta, stds, size=walkers)
#p0 = np.array([theta + 1e-4*np.random.randn(len(self.parameters)) \
# for i in range(walkers)])
print("ready")
for i, (pos, lnprob, rstate) in enumerate(sampler.sample(p0, iterations=burn+sample)):
mean_acceptance_fractions[i] = np.mean(sampler.acceptance_fraction)
logger.info("Sampler has finished step {0:.0f} ({1}) of {2:.0f} with "\
"<a_f> = {3:.3f}, maximum log probability in last step was "\
"{4:.3e}".format(i + 1, ["burn-in", "sample"][i >= burn], burn + sample,
mean_acceptance_fractions[i], np.max(sampler.lnprobability[:, i])))
if mean_acceptance_fractions[i] in (0, 1):
raise RuntimeError("mean acceptance fraction is {0:.0f}!".format(
mean_acceptance_fractions[i]))
"""
# Save the chain and calculated log probabilities for later
chain, lnprobability = sampler.chain, sampler.lnprobability
sampler.reset()
logger.info("Sampling posterior...")
for j, state in enumerate(sampler.sample(pos, iterations=sample)):
mean_acceptance_fractions[i + j + 1] = np.mean(sampler.acceptance_fraction)
logger.info("Sampler has finished step {0:.0f} of sampling with "\
"<a_f> = {1:.3f}, maximum log probability in last step was "\
"{2:.3e}".format(j + 1, mean_acceptance_fractions[i + j + 1],
np.max(sampler.lnprobability[:, j])))
if mean_acceptance_fractions[i + j + 1] in (0, 1):
raise RuntimeError("mean acceptance fraction is {0:.0f}!".format(
mean_acceptance_fractions[i + j + 1]))
# Concatenate the existing chain and lnprobability with the posteriors
chain = np.concatenate([chain, sampler.chain], axis=1)
lnprobability = np.concatenate([lnprobability, sampler.lnprobability], axis=1)
"""
# Get the quantiles.
posteriors = dict(zip(self.parameters, map(lambda v: (v[1], v[2]-v[1], v[0]-v[1]),
zip(*np.percentile(sampler.chain.reshape(-1, len(self.parameters))[-sample*walkers:, :],
[16, 50, 84], axis=0)))))
info = {
"walkers": walkers,
"burn": burn,
"sample": sample,
"mean_acceptance_fractions": mean_acceptance_fractions
}
return (posteriors, sampler, info) | [
"andycasey@gmail.com"
] | andycasey@gmail.com |
b52b5d7aebe3bd624868fe6aed27ba6c5e09e006 | 9aad83265d9b0f405000be6ecf1e27886d907a45 | /pyciss/meta.py | 96aa53d3c50f1a179d330a8c9670d65c2b71ef31 | [
"ISC"
] | permissive | jamesHuffman/pyciss | b536c5d806b5b8b8692485b5f8c5da22586e5c64 | bfab517ebc411592ffbb9050136e3d95ced52db4 | refs/heads/master | 2020-05-29T08:40:15.734644 | 2017-01-04T10:05:29 | 2017-01-04T10:05:29 | 69,047,589 | 0 | 0 | null | 2016-09-23T17:44:36 | 2016-09-23T17:44:35 | null | UTF-8 | Python | false | false | 3,345 | py | """This module deals with the metadata I have received from collaborators.
It defines the location of ring resonances for the RingCube plotting.
"""
import pandas as pd
import pkg_resources as pr
def get_meta_df():
def read_metadata(f):
df = pd.read_csv(f, header=None, delim_whitespace=True)
df = df.rename(columns={0: 'id', 1: 'pixres', 14: 'lit_status'})
df = df.set_index('id')
df['is_lit'] = df.lit_status is True
# df.drop('lit_status', axis=1)
return df
with pr.resource_stream('pyciss', 'data/metadata.txt') as f:
meta_df = read_metadata(f)
return meta_df
# resonances
def get_order(name):
ratio = name.split()[1]
a, b = ratio.split(':')
return int(a) - int(b)
def get_resonances():
with pr.resource_stream('pyciss', 'data/ring_resonances.csv') as f:
resonances = pd.read_csv(f)
resonances.columns = ['name', 'radius', 'a_moon', 'n', 'kappa']
resonances = resonances.sort_values(by='radius', ascending=True)
resonances['order'] = resonances.name.map(get_order)
return resonances
def get_prime_resonances():
resonances = get_resonances()
prime_resonances = resonances[resonances.order == 1].drop('order', axis=1)
# filter out Janus and Epimetheus as we have a more precise file for that.
prime_resonances = prime_resonances.loc[~prime_resonances.name.str.startswith('Janus')]
prime_resonances = prime_resonances.loc[~prime_resonances.name.str.startswith('Epimetheus')]
return prime_resonances
# Janus Epithemeus resonances
def get_janus_epimetheus_resonances():
w = [len(' Janus1'),
len(' reson'),
len(' Resonance radius R')]
def get_janos_epi_order(reso):
a, b = reso.split(':')
return int(a) - int(b)
fname = pr.resource_filename('pyciss',
'data/ring_janus_epimetheus_resonances.txt')
with open(fname) as f:
jan_epi_resonances = pd.read_fwf(f, skiprows=15, header=0, widths=w,
skipfooter=1)
# replace column names
jan_epi_resonances.columns = ['moon', 'reson', 'radius']
# calculate order from resonance name
jan_epi_resonances['order'] = jan_epi_resonances.reson.map(get_janos_epi_order)
def func(x):
"Remove space from resonce string"
return ':'.join(i.strip() for i in x.split(':'))
jan_epi_resonances.reson = jan_epi_resonances.reson.map(func)
# calculate name for axes display
jan_epi_resonances['name'] = jan_epi_resonances.moon + ' ' +\
jan_epi_resonances.reson
return jan_epi_resonances
def get_prime_jan_epi():
jan_epi_resonances = get_janus_epimetheus_resonances()
# remove orders > 1 and drop unrequired columns
prime_jan_epis = jan_epi_resonances[jan_epi_resonances.order == 1]
to_drop = ['order', 'moon', 'reson']
prime_jan_epis = prime_jan_epis.drop(to_drop, axis=1)
return prime_jan_epis
def get_all_resonances():
prime_resonances = get_prime_resonances()
prime_jan_epis = get_prime_jan_epi()
all_resonances = pd.concat([prime_resonances, prime_jan_epis])
all_resonances.sort_values(by='radius', inplace=True)
all_resonances['moon'] = all_resonances.name.map(lambda x: x.split()[0].lower())
return all_resonances
| [
"kmichael.aye@gmail.com"
] | kmichael.aye@gmail.com |
ff23bf0fd747937e5b59cb049d04f3cd937701f2 | 494a0ba52d3204cb0082f01ae58cfdfc74895ba2 | /thisIsCodingTest/graph/42.gate.py | 69ae147b775a050386a3441d2cc6e94c185dc8b9 | [] | no_license | mhee4321/python_algorithm | 52331721c49399af35ffc863dd1d9b8e39cea26a | 96dd78390ba735dd754930affb3b72bebbbe5104 | refs/heads/master | 2023-04-26T09:27:40.760958 | 2021-05-16T12:12:39 | 2021-05-16T12:12:39 | 327,462,537 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 563 | py | def find_parent(parent, x):
if parent[x] != x:
parent[x] = find_parent(parent, parent[x])
return parent[x]
def union_parent(parent, a, b):
a = find_parent(parent, a)
b = find_parent(parent, b)
if a < b:
parent[b] = a
else:
parent[a] = b
g = int(input())
p = int(input())
parent = [0] * (g+1)
for i in range(1, g+1):
parent[i] = i
result = 0
for _ in range(p):
data = find_parent(parent, int(input()))
if data == 0:
break
union_parent(parent, data, data-1)
result += 1
print(result) | [
"nannanru@gmail.com"
] | nannanru@gmail.com |
2e312bbf9e3bffa3e8bdbcff21c0dd64f68ac42d | 6509c398816baffafa4a1fcfb2855e1bc9d1609b | /sistema-operacional/diretorios/pathlib/exemplos/pathlib-30.py | cdb96b7c3e866933e42e2503f27c3bb2564029af | [] | no_license | marcoswebermw/learning-python | 6b0dfa81a0d085f4275865dce089d9b53b494aa5 | 931ed2985b8a3fec1a48c660c089e290aaac123d | refs/heads/master | 2021-10-27T21:19:46.013020 | 2019-04-19T23:25:46 | 2019-04-19T23:25:46 | 87,670,464 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 244 | py | # Retorna o caminho final("tronco")
# de um componente sem o sufixo(extensão).
from pathlib import Path
arquivo = Path('/usr/bin/teste.tar.gz')
print(arquivo.stem) # teste.tar
arquivo = Path('/usr/bin/teste.tar')
print(arquivo.stem) # teste | [
"marcoswebermw@gmail.com"
] | marcoswebermw@gmail.com |
e538dfa58bc6728808c15fad154b4ea83088f829 | 0aa9649e3f67d2ab3f36eb4d67d6b9196295a8ec | /src/browserrender.py | e97a81f842bd1ccc477ec19cabce3332a9eb5bb5 | [] | no_license | HussainAther/scrape | 5e094dae66fe88ed4b090797a095df95db839874 | 8c28d8d4943de34544ce18bf5f1c7223e51426a5 | refs/heads/master | 2020-06-03T16:35:10.771222 | 2019-12-14T08:33:32 | 2019-12-14T08:33:32 | 191,651,526 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,025 | py | # -*- coding: utf-8 -*-
import csv
import lxml.html
import re
import time
try:
from PySide.QtGui import QApplication
from PySide.QtCore import QUrl, QEventLoop, QTimer
from PySide.QtWebKit import QWebView
except ImportError:
from PyQt4.QtGui import QApplication
from PyQt4.QtCore import QUrl, QEventLoop, QTimer
from PyQt4.QtWebKit import QWebView
class BrowserRender(QWebView):
"""
Create a rendering of the browser for website interaction.
"""
def __init__(self, display=True):
"""
Initialize the display with the app.
"""
self.app = QApplication([])
QWebView.__init__(self)
if display:
self.show() # show the browser
def open(self, url, timeout=60):
"""
Wait for download to complete and return result.
"""
loop = QEventLoop()
timer = QTimer()
timer.setSingleShot(True)
timer.timeout.connect(loop.quit)
self.loadFinished.connect(loop.quit)
self.load(QUrl(url))
timer.start(timeout * 1000)
loop.exec_() # delay here until download finished
if timer.isActive():
# downloaded successfully
timer.stop()
return self.html()
else:
# timed out
print("Request timed out:", url)
def html(self):
"""
Shortcut to return the current HTML.
"""
return self.page().mainFrame().toHtml()
def find(self, pattern):
"""
Find all elements that match the pattern.
"""
return self.page().mainFrame().findAllElements(pattern)
def attr(self, pattern, name, value):
"""
Set attribute for matching elements.
"""
for e in self.find(pattern):
e.setAttribute(name, value)
def text(self, pattern, value):
"""
Set attribute for matching elements.
"""
for e in self.find(pattern):
e.setPlainText(value)
def click(self, pattern):
"""
Click matching elements.
"""
for e in self.find(pattern):
e.evaluateJavaScript("this.click()")
def waitload(self, pattern, timeout=60):
"""
Wait for this pattern to be found in webpage and return matches.
"""
deadline = time.time() + timeout
while time.time() < deadline:
self.app.processEvents()
matches = self.find(pattern)
if matches:
return matches
print("Wait load timed out")
def main():
br = BrowserRender()
br.open("http://example.webscraping.com/search")
br.attr("#search_term", "value", ".")
br.text("#page_size option:checked", "1000")
br.click("#search")
elements = br.waitload("#results a")
writer = csv.writer(open("countries.csv", "w"))
for country in [e.toPlainText().strip() for e in elements]:
writer.writerow([country])
if __name__ == "__main__":
main()
| [
"shussainather@gmail.com"
] | shussainather@gmail.com |
065c63d1ae9bb96d1d7af75023910c6a1693df54 | 4766d241bbc736e070f79a6ae6a919a8b8bb442d | /archives/leetcode2/0108. Convert Sorted Array to Binary Search Tree.py | ad97a0374762928355b3da39c91085ca4dcf7558 | [] | no_license | yangzongwu/leetcode | f7a747668b0b5606050e8a8778cc25902dd9509b | 01f2edd79a1e922bfefecad69e5f2e1ff3a479e5 | refs/heads/master | 2021-07-08T06:45:16.218954 | 2020-07-18T10:20:24 | 2020-07-18T10:20:24 | 165,957,437 | 10 | 8 | null | null | null | null | UTF-8 | Python | false | false | 518 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
if not nums:
return None
k=len(nums)//2
root=TreeNode(nums[k])
root.left=self.sortedArrayToBST(nums[:k])
root.right=self.sortedArrayToBST(nums[k+1:])
return root
| [
"noreply@github.com"
] | yangzongwu.noreply@github.com |
fde15e4db514b606e835f68194fbccfc68ce0db3 | eb3683f9127befb9ef96d8eb801206cf7b84d6a7 | /testing/test_programs/numpy/basic_numpy/stypy_test_files/numpy_mathematical_functions_trigonometrical__type_data.py | ec54b5bb797ef03f3043b380cefb64c0f8a4a21f | [] | no_license | ComputationalReflection/stypy | 61ec27333a12f76ac055d13f8969d3e0de172f88 | be66ae846c82ac40ba7b48f9880d6e3990681a5b | refs/heads/master | 2021-05-13T18:24:29.005894 | 2018-06-14T15:42:50 | 2018-06-14T15:42:50 | 116,855,812 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,073 | py | from testing.code_generation_testing.codegen_testing_common import instance_of_class_name
from stypy.types import union_type, undefined_type
from numpy import ndarray
test_types = {
'__main__': {
'r23': instance_of_class_name("ndarray"),
'r22': instance_of_class_name("ndarray"),
'r21': instance_of_class_name("ndarray"),
'r20': instance_of_class_name("ndarray"),
'__package__': instance_of_class_name("NoneType"),
'np': instance_of_class_name("module"),
'o2': instance_of_class_name("ndarray"),
'o1': instance_of_class_name("ndarray"),
'r16': instance_of_class_name("ndarray"),
'r17': instance_of_class_name("ndarray"),
'r14': instance_of_class_name("ndarray"),
'r15': instance_of_class_name("ndarray"),
'r12': instance_of_class_name("float64"),
'r13': instance_of_class_name("float64"),
'r10': instance_of_class_name("float64"),
'r11': union_type.UnionType.create_from_type_list([ndarray, undefined_type.UndefinedType]),
'__builtins__': instance_of_class_name("module"),
'__file__': instance_of_class_name("str"),
'r18': instance_of_class_name("ndarray"),
'r19': instance_of_class_name("ndarray"),
'phase': union_type.UnionType.create_from_type_list([ndarray, undefined_type.UndefinedType, tuple]),
'__name__': instance_of_class_name("str"),
'r4': instance_of_class_name("float64"),
'r5': instance_of_class_name("float64"),
'r6': instance_of_class_name("float64"),
'r7': instance_of_class_name("ndarray"),
'r1': instance_of_class_name("float64"),
'r2': instance_of_class_name("float64"),
'r3': instance_of_class_name("float64"),
'r8': union_type.UnionType.create_from_type_list([ndarray, undefined_type.UndefinedType]),
'r9': instance_of_class_name("float64"),
'__doc__': instance_of_class_name("NoneType"),
'x10': instance_of_class_name("list"),
'x': instance_of_class_name("list"),
},
}
| [
"redondojose@uniovi.es"
] | redondojose@uniovi.es |
af24fd9f3a332b8874c7a1a13ea082a7c9f59207 | af2b03bd1f7c54059b04687a825cf774b073351a | /python/ccxt/async_support/bybit.py | 4d61819559fa95bcb6f184baa436bd2e33f2d4a4 | [
"MIT"
] | permissive | street2geek/ccxt | 2b480526758b0629bad95c756e6c645964babe94 | e880b59112717b693985f5e4beb88cdefaab9e57 | refs/heads/master | 2023-05-25T16:50:07.920596 | 2023-05-16T11:51:48 | 2023-05-16T11:51:48 | 148,709,392 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 397,447 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.abstract.bybit import ImplicitAPI
import asyncio
import hashlib
from ccxt.base.types import OrderSide
from typing import Optional
from typing import List
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import InvalidNonce
from ccxt.base.errors import RequestTimeout
from ccxt.base.errors import AuthenticationError
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class bybit(Exchange, ImplicitAPI):
def describe(self):
return self.deep_extend(super(bybit, self).describe(), {
'id': 'bybit',
'name': 'Bybit',
'countries': ['VG'], # British Virgin Islands
'version': 'v5',
'userAgent': None,
'rateLimit': 20,
'hostname': 'bybit.com', # bybit.com, bytick.com
'pro': True,
'certified': True,
'has': {
'CORS': True,
'spot': True,
'margin': True,
'swap': True,
'future': True,
'option': None,
'cancelAllOrders': True,
'cancelOrder': True,
'createOrder': True,
'createPostOnlyOrder': True,
'createReduceOnlyOrder': True,
'createStopLimitOrder': True,
'createStopMarketOrder': True,
'createStopOrder': True,
'editOrder': True,
'fetchBalance': True,
'fetchBorrowInterest': False, # temporarily disabled, does not work
'fetchBorrowRate': True,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchCanceledOrders': True,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchDeposit': False,
'fetchDepositAddress': True,
'fetchDepositAddresses': False,
'fetchDepositAddressesByNetwork': True,
'fetchDeposits': True,
'fetchFundingRate': True, # emulated in exchange
'fetchFundingRateHistory': True,
'fetchFundingRates': True,
'fetchIndexOHLCV': True,
'fetchLedger': True,
'fetchMarketLeverageTiers': True,
'fetchMarkets': True,
'fetchMarkOHLCV': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenInterest': True,
'fetchOpenInterestHistory': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchOrderTrades': True,
'fetchPosition': True,
'fetchPositions': True,
'fetchPremiumIndexOHLCV': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTradingFee': True,
'fetchTradingFees': True,
'fetchTransactions': False,
'fetchTransfers': True,
'fetchWithdrawals': True,
'setLeverage': True,
'setMarginMode': True,
'setPositionMode': True,
'transfer': True,
'withdraw': True,
},
'timeframes': {
'1m': '1',
'3m': '3',
'5m': '5',
'15m': '15',
'30m': '30',
'1h': '60',
'2h': '120',
'4h': '240',
'6h': '360',
'12h': '720',
'1d': 'D',
'1w': 'W',
'1M': 'M',
},
'urls': {
'test': {
'spot': 'https://api-testnet.{hostname}',
'futures': 'https://api-testnet.{hostname}',
'v2': 'https://api-testnet.{hostname}',
'public': 'https://api-testnet.{hostname}',
'private': 'https://api-testnet.{hostname}',
},
'logo': 'https://user-images.githubusercontent.com/51840849/76547799-daff5b80-649e-11ea-87fb-3be9bac08954.jpg',
'api': {
'spot': 'https://api.{hostname}',
'futures': 'https://api.{hostname}',
'v2': 'https://api.{hostname}',
'public': 'https://api.{hostname}',
'private': 'https://api.{hostname}',
},
'www': 'https://www.bybit.com',
'doc': [
'https://bybit-exchange.github.io/docs/inverse/',
'https://bybit-exchange.github.io/docs/linear/',
'https://github.com/bybit-exchange',
],
'fees': 'https://help.bybit.com/hc/en-us/articles/360039261154',
'referral': 'https://www.bybit.com/register?affiliate_id=35953',
},
'api': {
'public': {
'get': {
# inverse swap
'v2/public/orderBook/L2': 1,
'v2/public/kline/list': 3,
'v2/public/tickers': 1,
'v2/public/trading-records': 1,
'v2/public/symbols': 1,
'v2/public/mark-price-kline': 3,
'v2/public/index-price-kline': 3,
'v2/public/premium-index-kline': 2,
'v2/public/open-interest': 1,
'v2/public/big-deal': 1,
'v2/public/account-ratio': 1,
'v2/public/funding-rate': 1,
'v2/public/elite-ratio': 1,
'v2/public/funding/prev-funding-rate': 1,
'v2/public/risk-limit/list': 1,
# linear swap USDT
'public/linear/kline': 3,
'public/linear/recent-trading-records': 1,
'public/linear/risk-limit': 1,
'public/linear/funding/prev-funding-rate': 1,
'public/linear/mark-price-kline': 1,
'public/linear/index-price-kline': 1,
'public/linear/premium-index-kline': 1,
# spot
'spot/v1/time': 1,
'spot/v1/symbols': 1,
'spot/quote/v1/depth': 1,
'spot/quote/v1/depth/merged': 1,
'spot/quote/v1/trades': 1,
'spot/quote/v1/kline': 1,
'spot/quote/v1/ticker/24hr': 1,
'spot/quote/v1/ticker/price': 1,
'spot/quote/v1/ticker/book_ticker': 1,
'spot/v3/public/symbols': 1,
'spot/v3/public/quote/depth': 1,
'spot/v3/public/quote/depth/merged': 1,
'spot/v3/public/quote/trades': 1,
'spot/v3/public/quote/kline': 1,
'spot/v3/public/quote/ticker/24hr': 1,
'spot/v3/public/quote/ticker/price': 1,
'spot/v3/public/quote/ticker/bookTicker': 1,
'spot/v3/public/server-time': 1,
'spot/v3/public/infos': 1,
'spot/v3/public/margin-product-infos': 1,
'spot/v3/public/margin-ensure-tokens': 1,
# data
'v2/public/time': 1,
'v3/public/time': 1,
'v2/public/announcement': 1,
# USDC endpoints
# option USDC
'option/usdc/openapi/public/v1/order-book': 1,
'option/usdc/openapi/public/v1/symbols': 1,
'option/usdc/openapi/public/v1/tick': 1,
'option/usdc/openapi/public/v1/delivery-price': 1,
'option/usdc/openapi/public/v1/query-trade-latest': 1,
'option/usdc/openapi/public/v1/query-historical-volatility': 1,
'option/usdc/openapi/public/v1/all-tickers': 1,
# perpetual swap USDC
'perpetual/usdc/openapi/public/v1/order-book': 1,
'perpetual/usdc/openapi/public/v1/symbols': 1,
'perpetual/usdc/openapi/public/v1/tick': 1,
'perpetual/usdc/openapi/public/v1/kline/list': 1,
'perpetual/usdc/openapi/public/v1/mark-price-kline': 1,
'perpetual/usdc/openapi/public/v1/index-price-kline': 1,
'perpetual/usdc/openapi/public/v1/premium-index-kline': 1,
'perpetual/usdc/openapi/public/v1/open-interest': 1,
'perpetual/usdc/openapi/public/v1/big-deal': 1,
'perpetual/usdc/openapi/public/v1/account-ratio': 1,
'perpetual/usdc/openapi/public/v1/prev-funding-rate': 1,
'perpetual/usdc/openapi/public/v1/risk-limit/list': 1,
# account
'asset/v1/public/deposit/allowed-deposit-list': 1,
'contract/v3/public/copytrading/symbol/list': 1,
# derivative
'derivatives/v3/public/order-book/L2': 1,
'derivatives/v3/public/kline': 1,
'derivatives/v3/public/tickers': 1,
'derivatives/v3/public/instruments-info': 1,
'derivatives/v3/public/mark-price-kline': 1,
'derivatives/v3/public/index-price-kline': 1,
'derivatives/v3/public/funding/history-funding-rate': 1,
'derivatives/v3/public/risk-limit/list': 1,
'derivatives/v3/public/delivery-price': 1,
'derivatives/v3/public/recent-trade': 1,
'derivatives/v3/public/open-interest': 1,
'derivatives/v3/public/insurance': 1,
# v5
'v5/market/kline': 1,
'v5/market/mark-price-kline': 1,
'v5/market/index-price-kline': 1,
'v5/market/premium-index-price-kline': 1,
'v5/market/instruments-info': 1,
'v5/market/orderbook': 1,
'v5/market/tickers': 1,
'v5/market/funding/history': 1,
'v5/market/recent-trade': 1,
'v5/market/open-interest': 1,
'v5/market/historical-volatility': 1,
'v5/market/insurance': 1,
'v5/market/risk-limit': 1,
'v5/market/delivery-price': 1,
'v5/spot-lever-token/info': 1,
'v5/spot-lever-token/reference': 1,
'v5/announcements/index': 1,
'v5/spot-cross-margin-trade/pledge-token': 1,
'v5/spot-cross-margin-trade/borrow-token': 1,
'v5/ins-loan/ensure-tokens-convert': 1,
},
},
'private': {
'get': {
# inverse swap
'v2/private/order/list': 5,
'v2/private/order': 5,
'v2/private/stop-order/list': 5,
'v2/private/stop-order': 1,
'v2/private/position/list': 25,
'v2/private/position/fee-rate': 40,
'v2/private/execution/list': 25,
'v2/private/trade/closed-pnl/list': 1,
'v2/public/risk-limit/list': 1, # TODO check
'v2/public/funding/prev-funding-rate': 25, # TODO check
'v2/private/funding/prev-funding': 25,
'v2/private/funding/predicted-funding': 25,
'v2/private/account/api-key': 5,
'v2/private/account/lcp': 1,
'v2/private/wallet/balance': 25, # 120 per minute = 2 per second => cost = 50 / 2 = 25
'v2/private/wallet/fund/records': 25,
'v2/private/wallet/withdraw/list': 25,
'v2/private/exchange-order/list': 1,
# linear swap USDT
'private/linear/order/list': 5, # 600 per minute = 10 per second => cost = 50 / 10 = 5
'private/linear/order/search': 5,
'private/linear/stop-order/list': 5,
'private/linear/stop-order/search': 5,
'private/linear/position/list': 25,
'private/linear/trade/execution/list': 25,
'private/linear/trade/closed-pnl/list': 25,
'public/linear/risk-limit': 1,
'private/linear/funding/predicted-funding': 25,
'private/linear/funding/prev-funding': 25,
# inverse futures
'futures/private/order/list': 5,
'futures/private/order': 5,
'futures/private/stop-order/list': 5,
'futures/private/stop-order': 5,
'futures/private/position/list': 25,
'futures/private/execution/list': 25,
'futures/private/trade/closed-pnl/list': 1,
# spot
'spot/v1/account': 2.5,
'spot/v1/order': 2.5,
'spot/v1/open-orders': 2.5,
'spot/v1/history-orders': 2.5,
'spot/v1/myTrades': 2.5,
'spot/v1/cross-margin/order': 10,
'spot/v1/cross-margin/accounts/balance': 10,
'spot/v1/cross-margin/loan-info': 10,
'spot/v1/cross-margin/repay/history': 10,
'spot/v3/private/order': 2.5,
'spot/v3/private/open-orders': 2.5,
'spot/v3/private/history-orders': 2.5,
'spot/v3/private/my-trades': 2.5,
'spot/v3/private/account': 2.5,
'spot/v3/private/reference': 2.5,
'spot/v3/private/record': 2.5,
'spot/v3/private/cross-margin-orders': 10,
'spot/v3/private/cross-margin-account': 10,
'spot/v3/private/cross-margin-loan-info': 10,
'spot/v3/private/cross-margin-repay-history': 10,
'spot/v3/private/margin-loan-infos': 10,
'spot/v3/private/margin-repaid-infos': 10,
'spot/v3/private/margin-ltv': 10,
# account
'asset/v1/private/transfer/list': 50, # 60 per minute = 1 per second => cost = 50 / 1 = 50
'asset/v3/private/transfer/inter-transfer/list/query': 0.84, # 60/s
'asset/v1/private/sub-member/transfer/list': 50,
'asset/v3/private/transfer/sub-member/list/query': 0.84, # 60/s
'asset/v3/private/transfer/sub-member-transfer/list/query': 0.84, # 60/s
'asset/v3/private/transfer/universal-transfer/list/query': 0.84, # 60/s
'asset/v1/private/sub-member/member-ids': 50,
'asset/v1/private/deposit/record/query': 50,
'asset/v1/private/withdraw/record/query': 25,
'asset/v1/private/coin-info/query': 25,
'asset/v3/private/coin-info/query': 25, # 2/s
'asset/v1/private/asset-info/query': 50,
'asset/v1/private/deposit/address': 100,
'asset/v3/private/deposit/address/query': 0.17, # 300/s
'asset/v1/private/universal/transfer/list': 50,
'contract/v3/private/copytrading/order/list': 1,
'contract/v3/private/copytrading/position/list': 1,
'contract/v3/private/copytrading/wallet/balance': 1,
'contract/v3/private/position/limit-info': 25, # 120 per minute = 2 per second => cost = 50 / 2 = 25
'contract/v3/private/order/unfilled-orders': 1,
'contract/v3/private/order/list': 1,
'contract/v3/private/position/list': 1,
'contract/v3/private/execution/list': 1,
'contract/v3/private/position/closed-pnl': 1,
'contract/v3/private/account/wallet/balance': 1,
'contract/v3/private/account/fee-rate': 1,
'contract/v3/private/account/wallet/fund-records': 1,
# derivative
'unified/v3/private/order/unfilled-orders': 1,
'unified/v3/private/order/list': 1,
'unified/v3/private/position/list': 1,
'unified/v3/private/execution/list': 1,
'unified/v3/private/delivery-record': 1,
'unified/v3/private/settlement-record': 1,
'unified/v3/private/account/wallet/balance': 1,
'unified/v3/private/account/transaction-log': 1,
'asset/v2/private/exchange/exchange-order-all': 1,
'unified/v3/private/account/borrow-history': 1,
'unified/v3/private/account/borrow-rate': 1,
'unified/v3/private/account/info': 1,
'user/v3/private/frozen-sub-member': 10, # 5/s
'user/v3/private/query-sub-members': 5, # 10/s
'user/v3/private/query-api': 5, # 10/s
'asset/v3/private/transfer/transfer-coin/list/query': 0.84, # 60/s
'asset/v3/private/transfer/account-coin/balance/query': 0.84, # 60/s
'asset/v3/private/transfer/account-coins/balance/query': 50,
'asset/v3/private/transfer/asset-info/query': 0.84, # 60/s
'asset/v3/public/deposit/allowed-deposit-list/query': 0.17, # 300/s
'asset/v3/private/deposit/record/query': 0.17, # 300/s
'asset/v3/private/withdraw/record/query': 0.17, # 300/s
# v5
'v5/order/history': 2.5,
'v5/order/spot-borrow-check': 2.5,
'v5/order/realtime': 2.5,
'v5/position/list': 2.5,
'v5/execution/list': 2.5,
'v5/position/closed-pnl': 2.5,
'v5/account/wallet-balance': 2.5,
'v5/account/borrow-history': 2.5,
'v5/account/collateral-info': 2.5,
'v5/account/mmp-state': 2.5,
'v5/asset/coin-greeks': 2.5,
'v5/account/info': 2.5,
'v5/account/transaction-log': 2.5,
'v5/account/fee-rate': 1,
'v5/asset/exchange/order-record': 2.5,
'v5/asset/delivery-record': 2.5,
'v5/asset/settlement-record': 2.5,
'v5/asset/transfer/query-asset-info': 2.5,
'v5/asset/transfer/query-account-coin-balance': 2.5,
'v5/asset/transfer/query-transfer-coin-list': 2.5,
'v5/asset/transfer/query-inter-transfer-list': 2.5,
'v5/asset/transfer/query-sub-member-list': 2.5,
'v5/asset/transfer/query-universal-transfer-list': 1,
'v5/asset/deposit/query-allowed-list': 2.5,
'v5/asset/deposit/query-record': 2.5,
'v5/asset/deposit/query-sub-member-record': 2.5,
'v5/asset/deposit/query-address': 2.5,
'v5/asset/deposit/query-sub-member-address': 2.5,
'v5/asset/deposit/query-internal-record': 2.5,
'v5/asset/coin/query-info': 2.5,
'v5/asset/withdraw/query-record': 2.5,
'v5/asset/withdraw/withdrawable-amount': 2.5,
'v5/asset/transfer/query-account-coins-balance': 2.5,
# user
'v5/user/query-sub-members': 10,
'v5/user/query-api': 10,
'v5/spot-cross-margin-trade/loan-info': 1, # 50/s => cost = 50 / 50 = 1
'v5/spot-cross-margin-trade/account': 1, # 50/s => cost = 50 / 50 = 1
'v5/spot-cross-margin-trade/orders': 1, # 50/s => cost = 50 / 50 = 1
'v5/spot-cross-margin-trade/repay-history': 1, # 50/s => cost = 50 / 50 = 1
'v5/ins-loan/ltv-convert': 1,
},
'post': {
# inverse swap
'v2/private/order/create': 30,
'v2/private/order/cancel': 30,
'v2/private/order/cancelAll': 300, # 100 per minute + 'consumes 10 requests'
'v2/private/order/replace': 30,
'v2/private/stop-order/create': 30,
'v2/private/stop-order/cancel': 30,
'v2/private/stop-order/cancelAll': 300,
'v2/private/stop-order/replace': 30,
'v2/private/position/change-position-margin': 40,
'v2/private/position/trading-stop': 40,
'v2/private/position/leverage/save': 40,
'v2/private/tpsl/switch-mode': 40,
'v2/private/position/switch-isolated': 2.5,
'v2/private/position/risk-limit': 2.5,
'v2/private/position/switch-mode': 2.5,
# linear swap USDT
'private/linear/order/create': 30, # 100 per minute = 1.666 per second => cost = 50 / 1.6666 = 30
'private/linear/order/cancel': 30,
'private/linear/order/cancel-all': 300, # 100 per minute + 'consumes 10 requests'
'private/linear/order/replace': 30,
'private/linear/stop-order/create': 30,
'private/linear/stop-order/cancel': 30,
'private/linear/stop-order/cancel-all': 300,
'private/linear/stop-order/replace': 30,
'private/linear/position/set-auto-add-margin': 40,
'private/linear/position/switch-isolated': 40,
'private/linear/position/switch-mode': 40,
'private/linear/tpsl/switch-mode': 2.5,
'private/linear/position/add-margin': 40,
'private/linear/position/set-leverage': 40, # 75 per minute = 1.25 per second => cost = 50 / 1.25 = 40
'private/linear/position/trading-stop': 40,
'private/linear/position/set-risk': 2.5,
# inverse futures
'futures/private/order/create': 30,
'futures/private/order/cancel': 30,
'futures/private/order/cancelAll': 30,
'futures/private/order/replace': 30,
'futures/private/stop-order/create': 30,
'futures/private/stop-order/cancel': 30,
'futures/private/stop-order/cancelAll': 30,
'futures/private/stop-order/replace': 30,
'futures/private/position/change-position-margin': 40,
'futures/private/position/trading-stop': 40,
'futures/private/position/leverage/save': 40,
'futures/private/position/switch-mode': 40,
'futures/private/tpsl/switch-mode': 40,
'futures/private/position/switch-isolated': 40,
'futures/private/position/risk-limit': 2.5,
# spot
'spot/v1/order': 2.5,
'spot/v1/cross-margin/loan': 10,
'spot/v1/cross-margin/repay': 10,
'spot/v3/private/order': 2.5,
'spot/v3/private/cancel-order': 2.5,
'spot/v3/private/cancel-orders': 2.5,
'spot/v3/private/cancel-orders-by-ids': 2.5,
'spot/v3/private/purchase': 2.5,
'spot/v3/private/redeem': 2.5,
'spot/v3/private/cross-margin-loan': 10,
'spot/v3/private/cross-margin-repay': 10,
# account
'asset/v1/private/transfer': 150, # 20 per minute = 0.333 per second => cost = 50 / 0.3333 = 150
'asset/v3/private/transfer/inter-transfer': 2.5, # 20/s
'asset/v1/private/sub-member/transfer': 150,
'asset/v1/private/withdraw': 50,
'asset/v3/private/withdraw/create': 1, # 10/s
'asset/v1/private/withdraw/cancel': 50,
'asset/v3/private/withdraw/cancel': 0.84, # 60/s
'asset/v1/private/transferable-subs/save': 3000,
'asset/v1/private/universal/transfer': 1500,
'asset/v3/private/transfer/sub-member-transfer': 2.5, # 20/s
'asset/v3/private/transfer/transfer-sub-member-save': 2.5, # 20/s
'asset/v3/private/transfer/universal-transfer': 2.5, # 20/s
'user/v3/private/create-sub-member': 10, # 5/s
'user/v3/private/create-sub-api': 10, # 5/s
'user/v3/private/update-api': 10, # 5/s
'user/v3/private/delete-api': 10, # 5/s
'user/v3/private/update-sub-api': 10, # 5/s
'user/v3/private/delete-sub-api': 10, # 5/s
# USDC endpoints
# option USDC
'option/usdc/openapi/private/v1/place-order': 2.5,
'option/usdc/openapi/private/v1/batch-place-order': 2.5,
'option/usdc/openapi/private/v1/replace-order': 2.5,
'option/usdc/openapi/private/v1/batch-replace-orders': 2.5,
'option/usdc/openapi/private/v1/cancel-order': 2.5,
'option/usdc/openapi/private/v1/batch-cancel-orders': 2.5,
'option/usdc/openapi/private/v1/cancel-all': 2.5,
'option/usdc/openapi/private/v1/query-active-orders': 2.5,
'option/usdc/openapi/private/v1/query-order-history': 2.5,
'option/usdc/openapi/private/v1/execution-list': 2.5,
'option/usdc/openapi/private/v1/query-transaction-log': 2.5,
'option/usdc/openapi/private/v1/query-wallet-balance': 2.5,
'option/usdc/openapi/private/v1/query-asset-info': 2.5,
'option/usdc/openapi/private/v1/query-margin-info': 2.5,
'option/usdc/openapi/private/v1/query-position': 2.5,
'option/usdc/openapi/private/v1/query-delivery-list': 2.5,
'option/usdc/openapi/private/v1/query-position-exp-date': 2.5,
'option/usdc/openapi/private/v1/mmp-modify': 2.5,
'option/usdc/openapi/private/v1/mmp-reset': 2.5,
# perpetual swap USDC
'perpetual/usdc/openapi/private/v1/place-order': 2.5,
'perpetual/usdc/openapi/private/v1/replace-order': 2.5,
'perpetual/usdc/openapi/private/v1/cancel-order': 2.5,
'perpetual/usdc/openapi/private/v1/cancel-all': 2.5,
'perpetual/usdc/openapi/private/v1/position/leverage/save': 2.5,
'option/usdc/openapi/private/v1/session-settlement': 2.5,
'option/usdc/private/asset/account/setMarginMode': 2.5,
'perpetual/usdc/openapi/public/v1/risk-limit/list': 2.5,
'perpetual/usdc/openapi/private/v1/position/set-risk-limit': 2.5,
'perpetual/usdc/openapi/private/v1/predicted-funding': 2.5,
'contract/v3/private/copytrading/order/create': 2.5,
'contract/v3/private/copytrading/order/cancel': 2.5,
'contract/v3/private/copytrading/order/close': 2.5,
'contract/v3/private/copytrading/position/close': 2.5,
'contract/v3/private/copytrading/position/set-leverage': 2.5,
'contract/v3/private/copytrading/wallet/transfer': 2.5,
'contract/v3/private/copytrading/order/trading-stop': 2.5,
'contract/v3/private/order/create': 1,
'contract/v3/private/order/cancel': 1,
'contract/v3/private/order/cancel-all': 1,
'contract/v3/private/order/replace': 1,
'contract/v3/private/position/set-auto-add-margin': 1,
'contract/v3/private/position/switch-isolated': 1,
'contract/v3/private/position/switch-mode': 1,
'contract/v3/private/position/switch-tpsl-mode': 1,
'contract/v3/private/position/set-leverage': 1,
'contract/v3/private/position/trading-stop': 1,
'contract/v3/private/position/set-risk-limit': 1,
'contract/v3/private/account/setMarginMode': 1,
# derivative
'unified/v3/private/order/create': 2.5,
'unified/v3/private/order/replace': 2.5,
'unified/v3/private/order/cancel': 2.5,
'unified/v3/private/order/create-batch': 2.5,
'unified/v3/private/order/replace-batch': 2.5,
'unified/v3/private/order/cancel-batch': 2.5,
'unified/v3/private/order/cancel-all': 2.5,
'unified/v3/private/position/set-leverage': 2.5,
'unified/v3/private/position/tpsl/switch-mode': 2.5,
'unified/v3/private/position/set-risk-limit': 2.5,
'unified/v3/private/position/trading-stop': 2.5,
'unified/v3/private/account/upgrade-unified-account': 2.5,
'unified/v3/private/account/setMarginMode': 2.5,
# tax
'fht/compliance/tax/v3/private/registertime': 50,
'fht/compliance/tax/v3/private/create': 50,
'fht/compliance/tax/v3/private/status': 50,
'fht/compliance/tax/v3/private/url': 50,
# v5
'v5/order/create': 2.5,
'v5/order/amend': 2.5,
'v5/order/cancel': 2.5,
'v5/order/cancel-all': 2.5,
'v5/order/create-batch': 2.5,
'v5/order/amend-batch': 2.5,
'v5/order/cancel-batch': 2.5,
'v5/order/disconnected-cancel-all': 2.5,
'v5/position/set-leverage': 2.5,
'v5/position/set-tpsl-mode': 2.5,
'v5/position/set-risk-limit': 2.5,
'v5/position/trading-stop': 2.5,
'v5/account/upgrade-to-uta': 2.5,
'v5/account/set-margin-mode': 2.5,
'v5/asset/transfer/inter-transfer': 2.5,
'v5/asset/transfer/save-transfer-sub-member': 2.5,
'v5/asset/transfer/universal-transfer': 2.5,
'v5/asset/deposit/deposit-to-account': 2.5,
'v5/asset/withdraw/create': 2.5,
'v5/asset/withdraw/cancel': 2.5,
'v5/spot-lever-token/purchase': 2.5,
'v5/spot-lever-token/redeem': 2.5,
'v5/spot-lever-token/order-record': 2.5,
'v5/spot-margin-trade/switch-mode': 2.5,
'v5/spot-margin-trade/set-leverage': 2.5,
# user
'v5/user/create-sub-member': 10,
'v5/user/create-sub-api': 10,
'v5/user/frozen-sub-member': 10,
'v5/user/update-api': 10,
'v5/user/update-sub-api': 10,
'v5/user/delete-api': 10,
'v5/user/delete-sub-api': 10,
'v5/spot-cross-margin-trade/loan': 2.5, # 20/s => cost = 50 / 20 = 2.5
'v5/spot-cross-margin-trade/repay': 2.5, # 20/s => cost = 50 / 20 = 2.5
'v5/spot-cross-margin-trade/switch': 2.5, # 20/s => cost = 50 / 20 = 2.5
},
'delete': {
# spot
'spot/v1/order': 2.5,
'spot/v1/order/fast': 2.5,
'spot/order/batch-cancel': 2.5,
'spot/order/batch-fast-cancel': 2.5,
'spot/order/batch-cancel-by-ids': 2.5,
},
},
},
'httpExceptions': {
'403': RateLimitExceeded, # Forbidden -- You request too many times
},
'exceptions': {
# Uncodumented explanation of error strings:
# - oc_diff: order cost needed to place self order
# - new_oc: total order cost of open orders including the order you are trying to open
# - ob: order balance - the total cost of current open orders
# - ab: available balance
'exact': {
'-10009': BadRequest, # {"ret_code":-10009,"ret_msg":"Invalid period!","result":null,"token":null}
'-1004': BadRequest, # {"ret_code":-1004,"ret_msg":"Missing required parameter \u0027symbol\u0027","ext_code":null,"ext_info":null,"result":null}
'-1021': BadRequest, # {"ret_code":-1021,"ret_msg":"Timestamp for self request is outside of the recvWindow.","ext_code":null,"ext_info":null,"result":null}
'-1103': BadRequest, # An unknown parameter was sent.
'-1140': InvalidOrder, # {"ret_code":-1140,"ret_msg":"Transaction amount lower than the minimum.","result":{},"ext_code":"","ext_info":null,"time_now":"1659204910.248576"}
'-1197': InvalidOrder, # {"ret_code":-1197,"ret_msg":"Your order quantity to buy is too large. The filled price may deviate significantly from the market price. Please try again","result":{},"ext_code":"","ext_info":null,"time_now":"1659204531.979680"}
'-2013': InvalidOrder, # {"ret_code":-2013,"ret_msg":"Order does not exist.","ext_code":null,"ext_info":null,"result":null}
'-2015': AuthenticationError, # Invalid API-key, IP, or permissions for action.
'-6017': BadRequest, # Repayment amount has exceeded the total liability
'-6025': BadRequest, # Amount to borrow cannot be lower than the min. amount to borrow(per transaction)
'-6029': BadRequest, # Amount to borrow has exceeded the user's estimated max amount to borrow
'5004': ExchangeError, # {"retCode":5004,"retMsg":"Server Timeout","result":null,"retExtInfo":{},"time":1667577060106}
'7001': BadRequest, # {"retCode":7001,"retMsg":"request params type error"}
'10001': BadRequest, # parameter error
'10002': InvalidNonce, # request expired, check your timestamp and recv_window
'10003': AuthenticationError, # Invalid apikey
'10004': AuthenticationError, # invalid sign
'10005': PermissionDenied, # permission denied for current apikey
'10006': RateLimitExceeded, # too many requests
'10007': AuthenticationError, # api_key not found in your request parameters
'10008': AuthenticationError, # User had been banned
'10009': AuthenticationError, # IP had been banned
'10010': PermissionDenied, # request ip mismatch
'10014': BadRequest, # Request is duplicate
'10016': ExchangeError, # {"retCode":10016,"retMsg":"System error. Please try again later."}
'10017': BadRequest, # request path not found or request method is invalid
'10018': RateLimitExceeded, # exceed ip rate limit
'10020': PermissionDenied, # {"retCode":10020,"retMsg":"your account is not a unified margin account, please update your account","result":null,"retExtInfo":null,"time":1664783731123}
'10024': PermissionDenied, # Compliance rules triggered
'10027': PermissionDenied, # Trading Banned
'10028': PermissionDenied, # The API can only be accessed by unified account users.
'10029': PermissionDenied, # The requested symbol is invalid, please check symbol whitelist
'12201': BadRequest, # {"retCode":12201,"retMsg":"Invalid orderCategory parameter.","result":{},"retExtInfo":null,"time":1666699391220}
'100028': PermissionDenied, # The API cannot be accessed by unified account users.
'110001': InvalidOrder, # Order does not exist
'110003': InvalidOrder, # Order price is out of permissible range
'110004': InsufficientFunds, # Insufficient wallet balance
'110005': InvalidOrder, # position status
'110006': InsufficientFunds, # cannot afford estimated position_margin
'110007': InsufficientFunds, # {"retCode":110007,"retMsg":"ab not enough for new order","result":{},"retExtInfo":{},"time":1668838414793}
'110008': InvalidOrder, # Order has been finished or canceled
'110009': InvalidOrder, # The number of stop orders exceeds maximum limit allowed
'110010': InvalidOrder, # Order already cancelled
'110011': InvalidOrder, # Any adjustments made will trigger immediate liquidation
'110012': InsufficientFunds, # Available balance not enough
'110013': BadRequest, # Due to risk limit, cannot set leverage
'110014': InsufficientFunds, # Available balance not enough to add margin
'110015': BadRequest, # the position is in cross_margin
'110016': InvalidOrder, # Requested quantity of contracts exceeds risk limit, please adjust your risk limit level before trying again
'110017': InvalidOrder, # Reduce-only rule not satisfied
'110018': BadRequest, # userId illegal
'110019': InvalidOrder, # orderId illegal
'110020': InvalidOrder, # number of active orders greater than 500
'110021': InvalidOrder, # Open Interest exceeded
'110022': InvalidOrder, # qty has been limited, cannot modify the order to add qty
'110023': InvalidOrder, # This contract only supports position reduction operation, please contact customer service for details
'110024': InvalidOrder, # You have an existing position, so position mode cannot be switched
'110025': InvalidOrder, # Position mode is not modified
'110026': InvalidOrder, # Cross/isolated margin mode is not modified
'110027': InvalidOrder, # Margin is not modified
'110028': InvalidOrder, # Open orders exist, so you cannot change position mode
'110029': InvalidOrder, # Hedge mode is not available for self symbol
'110030': InvalidOrder, # Duplicate orderId
'110031': InvalidOrder, # risk limit info does not exists
'110032': InvalidOrder, # Illegal order
'110033': InvalidOrder, # Margin cannot be set without open position
'110034': InvalidOrder, # There is no net position
'110035': InvalidOrder, # Cancel order is not completed before liquidation
'110036': InvalidOrder, # Cross margin mode is not allowed to change leverage
'110037': InvalidOrder, # User setting list does not have self symbol
'110038': InvalidOrder, # Portfolio margin mode is not allowed to change leverage
'110039': InvalidOrder, # Maintain margin rate is too high, which may trigger liquidation
'110040': InvalidOrder, # Order will trigger forced liquidation, please resubmit the order
'110041': InvalidOrder, # Skip liquidation is not allowed when a position or maker order exists
'110042': InvalidOrder, # Pre-delivery status can only reduce positions
'110043': BadRequest, # Set leverage not modified
'110044': InsufficientFunds, # Insufficient available margin
'110045': InsufficientFunds, # Insufficient wallet balance
'110046': BadRequest, # Any adjustments made will trigger immediate liquidation
'110047': BadRequest, # Risk limit cannot be adjusted due to insufficient available margin
'110048': BadRequest, # Risk limit cannot be adjusted current/expected position value held exceeds the revised risk limit
'110049': BadRequest, # Tick notes can only be numbers
'110050': BadRequest, # Coin is not in the range of selected
'110051': InsufficientFunds, # The user's available balance cannot cover the lowest price of the current market
'110052': InsufficientFunds, # User's available balance is insufficient to set a price
'110053': InsufficientFunds, # The user's available balance cannot cover the current market price and upper limit price
'110054': InvalidOrder, # This position has at least one take profit link order, so the take profit and stop loss mode cannot be switched
'110055': InvalidOrder, # This position has at least one stop loss link order, so the take profit and stop loss mode cannot be switched
'110056': InvalidOrder, # This position has at least one trailing stop link order, so the take profit and stop loss mode cannot be switched
'110057': InvalidOrder, # Conditional order or limit order contains TP/SL related params
'110058': InvalidOrder, # Insufficient number of remaining position size to set take profit and stop loss
'110059': InvalidOrder, # In the case of partial filled of the open order, it is not allowed to modify the take profit and stop loss settings of the open order
'110060': BadRequest, # Under full TP/SL mode, it is not allowed to modify TP/SL
'110061': BadRequest, # Under partial TP/SL mode, TP/SL set more than 20
'110062': BadRequest, # Institution MMP profile not found.
'110063': ExchangeError, # Settlement in progress! xxx not available for trades.
'110064': InvalidOrder, # The number of contracts modified cannot be less than or equal to the filled quantity
'110065': PermissionDenied, # MMP hasn't yet been enabled for your account. Please contact your BD manager.
'110066': ExchangeError, # No trading is allowed at the current time
'110067': PermissionDenied, # unified account is not support
'110068': PermissionDenied, # Leveraged user trading is not allowed
'110069': PermissionDenied, # Do not allow OTC lending users to trade
'110070': InvalidOrder, # ETP symbols are not allowed to be traded
'110071': ExchangeError, # Sorry, we're revamping the Unified Margin Account! Currently, new upgrades are not supported. If you have any questions, please contact our 24/7 customer support.
'110072': InvalidOrder, # OrderLinkedID is duplicate
'110073': ExchangeError, # Set margin mode failed
'130006': InvalidOrder, # {"ret_code":130006,"ret_msg":"The number of contracts exceeds maximum limit allowed: too large","ext_code":"","ext_info":"","result":null,"time_now":"1658397095.099030","rate_limit_status":99,"rate_limit_reset_ms":1658397095097,"rate_limit":100}
'130021': InsufficientFunds, # {"ret_code":130021,"ret_msg":"orderfix price failed for CannotAffordOrderCost.","ext_code":"","ext_info":"","result":null,"time_now":"1644588250.204878","rate_limit_status":98,"rate_limit_reset_ms":1644588250200,"rate_limit":100} | {"ret_code":130021,"ret_msg":"oc_diff[1707966351], new_oc[1707966351] with ob[....]+AB[....]","ext_code":"","ext_info":"","result":null,"time_now":"1658395300.872766","rate_limit_status":99,"rate_limit_reset_ms":1658395300855,"rate_limit":100} caused issues/9149#issuecomment-1146559498
'130074': InvalidOrder, # {"ret_code":130074,"ret_msg":"expect Rising, but trigger_price[190000000] \u003c= current[211280000]??LastPrice","ext_code":"","ext_info":"","result":null,"time_now":"1655386638.067076","rate_limit_status":97,"rate_limit_reset_ms":1655386638065,"rate_limit":100}
'131001': InsufficientFunds, # {"retCode":131001,"retMsg":"the available balance is not sufficient to cover the handling fee","result":{},"retExtInfo":{},"time":1666892821245}
'131084': ExchangeError, # Withdraw failed because of Uta Upgrading
'131200': ExchangeError, # Service error
'131201': ExchangeError, # Internal error
'131202': BadRequest, # Invalid memberId
'131203': BadRequest, # Request parameter error
'131204': BadRequest, # Account info error
'131205': BadRequest, # Query transfer error
'131206': ExchangeError, # Fail to transfer
'131207': BadRequest, # Account not exist
'131208': ExchangeError, # Forbid transfer
'131209': BadRequest, # Get subMember relation error
'131210': BadRequest, # Amount accuracy error
'131211': BadRequest, # fromAccountType can't be the same
'131212': InsufficientFunds, # Insufficient balance
'131213': BadRequest, # TransferLTV check error
'131214': BadRequest, # TransferId exist
'131215': BadRequest, # Amount error
'131216': ExchangeError, # Query balance error
'131217': ExchangeError, # Risk check error
'131002': BadRequest, # Parameter error
'131003': ExchangeError, # Interal error
'131004': AuthenticationError, # KYC needed
'131085': InsufficientFunds, # Withdrawal amount is greater than your availale balance(the deplayed withdrawal is triggered)
'131086': BadRequest, # Withdrawal amount exceeds risk limit(the risk limit of margin trade is triggered)
'131088': BadRequest, # The withdrawal amount exceeds the remaining withdrawal limit of your identity verification level. The current available amount for withdrawal : %s
'131089': BadRequest, # User sensitive operation, withdrawal is prohibited within 24 hours
'131090': ExchangeError, # User withdraw has been banned
'131091': ExchangeError, # Blocked login status does not allow withdrawals
'131092': ExchangeError, # User status is abnormal
'131093': ExchangeError, # The withdrawal address is not in the whitelist
'131094': BadRequest, # UserId is not in the whitelist
'131095': BadRequest, # Withdrawl amount exceeds the 24 hour platform limit
'131096': BadRequest, # Withdraw amount does not satify the lower limit or upper limit
'131097': ExchangeError, # Withdrawal of self currency has been closed
'131098': ExchangeError, # Withdrawal currently is not availble from new address
'131099': ExchangeError, # Hot wallet status can cancel the withdraw
'140001': OrderNotFound, # Order does not exist
'140003': InvalidOrder, # Order price is out of permissible range
'140004': InsufficientFunds, # Insufficient wallet balance
'140005': InvalidOrder, # position status
'140006': InsufficientFunds, # cannot afford estimated position_margin
'140007': InsufficientFunds, # Insufficient available balance
'140008': InvalidOrder, # Order has been finished or canceled
'140009': InvalidOrder, # The number of stop orders exceeds maximum limit allowed
'140010': InvalidOrder, # Order already cancelled
'140011': InvalidOrder, # Any adjustments made will trigger immediate liquidation
'140012': InsufficientFunds, # Available balance not enough
'140013': BadRequest, # Due to risk limit, cannot set leverage
'140014': InsufficientFunds, # Available balance not enough to add margin
'140015': InvalidOrder, # the position is in cross_margin
'140016': InvalidOrder, # Requested quantity of contracts exceeds risk limit, please adjust your risk limit level before trying again
'140017': InvalidOrder, # Reduce-only rule not satisfied
'140018': BadRequest, # userId illegal
'140019': InvalidOrder, # orderId illegal
'140020': InvalidOrder, # number of active orders greater than 500
'140021': InvalidOrder, # Open Interest exceeded
'140022': InvalidOrder, # qty has been limited, cannot modify the order to add qty
'140023': InvalidOrder, # This contract only supports position reduction operation, please contact customer service for details
'140024': BadRequest, # You have an existing position, so position mode cannot be switched
'140025': BadRequest, # Position mode is not modified
'140026': BadRequest, # Cross/isolated margin mode is not modified
'140027': BadRequest, # Margin is not modified
'140028': InvalidOrder, # Open orders exist, so you cannot change position mode
'140029': BadRequest, # Hedge mode is not available for self symbol
'140030': InvalidOrder, # Duplicate orderId
'140031': BadRequest, # risk limit info does not exists
'140032': InvalidOrder, # Illegal order
'140033': InvalidOrder, # Margin cannot be set without open position
'140034': InvalidOrder, # There is no net position
'140035': InvalidOrder, # Cancel order is not completed before liquidation
'140036': BadRequest, # Cross margin mode is not allowed to change leverage
'140037': InvalidOrder, # User setting list does not have self symbol
'140038': BadRequest, # Portfolio margin mode is not allowed to change leverage
'140039': BadRequest, # Maintain margin rate is too high, which may trigger liquidation
'140040': InvalidOrder, # Order will trigger forced liquidation, please resubmit the order
'140041': InvalidOrder, # Skip liquidation is not allowed when a position or maker order exists
'140042': InvalidOrder, # Pre-delivery status can only reduce positions
'140043': BadRequest, # Set leverage not modified
'140044': InsufficientFunds, # Insufficient available margin
'140045': InsufficientFunds, # Insufficient wallet balance
'140046': BadRequest, # Any adjustments made will trigger immediate liquidation
'140047': BadRequest, # Risk limit cannot be adjusted due to insufficient available margin
'140048': BadRequest, # Risk limit cannot be adjusted current/expected position value held exceeds the revised risk limit
'140049': BadRequest, # Tick notes can only be numbers
'140050': InvalidOrder, # Coin is not in the range of selected
'140051': InsufficientFunds, # The user's available balance cannot cover the lowest price of the current market
'140052': InsufficientFunds, # User's available balance is insufficient to set a price
'140053': InsufficientFunds, # The user's available balance cannot cover the current market price and upper limit price
'140054': InvalidOrder, # This position has at least one take profit link order, so the take profit and stop loss mode cannot be switched
'140055': InvalidOrder, # This position has at least one stop loss link order, so the take profit and stop loss mode cannot be switched
'140056': InvalidOrder, # This position has at least one trailing stop link order, so the take profit and stop loss mode cannot be switched
'140057': InvalidOrder, # Conditional order or limit order contains TP/SL related params
'140058': InvalidOrder, # Insufficient number of remaining position size to set take profit and stop loss
'140059': InvalidOrder, # In the case of partial filled of the open order, it is not allowed to modify the take profit and stop loss settings of the open order
'140060': BadRequest, # Under full TP/SL mode, it is not allowed to modify TP/SL
'140061': BadRequest, # Under partial TP/SL mode, TP/SL set more than 20
'140062': BadRequest, # Institution MMP profile not found.
'140063': ExchangeError, # Settlement in progress! xxx not available for trades.
'140064': InvalidOrder, # The number of contracts modified cannot be less than or equal to the filled quantity
'140065': PermissionDenied, # MMP hasn't yet been enabled for your account. Please contact your BD manager.
'140066': ExchangeError, # No trading is allowed at the current time
'140067': PermissionDenied, # unified account is not support
'140068': PermissionDenied, # Leveraged user trading is not allowed
'140069': PermissionDenied, # Do not allow OTC lending users to trade
'140070': InvalidOrder, # ETP symbols are not allowed to be traded
'170001': ExchangeError, # Internal error.
'170007': RequestTimeout, # Timeout waiting for response from backend server.
'170005': InvalidOrder, # Too many new orders; current limit is %s orders per %s.
'170031': ExchangeError, # The feature has been suspended
'170032': ExchangeError, # Network error. Please try again later
'170033': InsufficientFunds, # margin Insufficient account balance
'170034': InsufficientFunds, # Liability over flow in spot leverage trade!
'170035': BadRequest, # Submitted to the system for processing!
'170036': BadRequest, # You haven't enabled Cross Margin Trading yet. To do so, please head to the PC trading site or the Bybit app
'170037': BadRequest, # Cross Margin Trading not yet supported by the selected coin
'170105': BadRequest, # Parameter '%s' was empty.
'170115': InvalidOrder, # Invalid timeInForce.
'170116': InvalidOrder, # Invalid orderType.
'170117': InvalidOrder, # Invalid side.
'170121': InvalidOrder, # Invalid symbol.
'170130': BadRequest, # Data sent for paramter '%s' is not valid.
'170131': InsufficientFunds, # Balance insufficient
'170132': InvalidOrder, # Order price too high.
'170133': InvalidOrder, # Order price lower than the minimum.
'170134': InvalidOrder, # Order price decimal too long.
'170135': InvalidOrder, # Order quantity too large.
'170136': InvalidOrder, # Order quantity lower than the minimum.
'170137': InvalidOrder, # Order volume decimal too long
'170139': InvalidOrder, # Order has been filled.
'170140': InvalidOrder, # Transaction amount lower than the minimum.
'170124': InvalidOrder, # Order amount too large.
'170141': InvalidOrder, # Duplicate clientOrderId
'170142': InvalidOrder, # Order has been canceled
'170143': InvalidOrder, # Cannot be found on order book
'170144': InvalidOrder, # Order has been locked
'170145': InvalidOrder, # This order type does not support cancellation
'170146': InvalidOrder, # Order creation timeout
'170147': InvalidOrder, # Order cancellation timeout
'170148': InvalidOrder, # Market order amount decimal too long
'170149': ExchangeError, # Create order failed
'170150': ExchangeError, # Cancel order failed
'170151': InvalidOrder, # The trading pair is not open yet
'170157': InvalidOrder, # The trading pair is not available for api trading
'170159': InvalidOrder, # Market Order is not supported within the first %s minutes of newly launched pairs due to risk control.
'170190': InvalidOrder, # Cancel order has been finished
'170191': InvalidOrder, # Can not cancel order, please try again later
'170192': InvalidOrder, # Order price cannot be higher than %s .
'170193': InvalidOrder, # Buy order price cannot be higher than %s.
'170194': InvalidOrder, # Sell order price cannot be lower than %s.
'170195': InvalidOrder, # Please note that your order may not be filled
'170196': InvalidOrder, # Please note that your order may not be filled
'170197': InvalidOrder, # Your order quantity to buy is too large. The filled price may deviate significantly from the market price. Please try again
'170198': InvalidOrder, # Your order quantity to sell is too large. The filled price may deviate significantly from the market price. Please try again
'170199': InvalidOrder, # Your order quantity to buy is too large. The filled price may deviate significantly from the nav. Please try again.
'170200': InvalidOrder, # Your order quantity to sell is too large. The filled price may deviate significantly from the nav. Please try again.
'170221': BadRequest, # This coin does not exist.
'170222': RateLimitExceeded, # Too many requests in self time frame.
'170223': InsufficientFunds, # Your Spot Account with Institutional Lending triggers an alert or liquidation.
'170224': PermissionDenied, # You're not a user of the Innovation Zone.
'170226': InsufficientFunds, # Your Spot Account for Margin Trading is being liquidated.
'170227': ExchangeError, # This feature is not supported.
'170228': InvalidOrder, # The purchase amount of each order exceeds the estimated maximum purchase amount.
'170229': InvalidOrder, # The sell quantity per order exceeds the estimated maximum sell quantity.
'170234': ExchangeError, # System Error
'170210': InvalidOrder, # New order rejected.
'170213': OrderNotFound, # Order does not exist.
'170217': InvalidOrder, # Only LIMIT-MAKER order is supported for the current pair.
'170218': InvalidOrder, # The LIMIT-MAKER order is rejected due to invalid price.
'170010': InvalidOrder, # Purchase failed: Exceed the maximum position limit of leveraged tokens, the current available limit is %s USDT
'170011': InvalidOrder, # "Purchase failed: Exceed the maximum position limit of innovation tokens,
'170019': InvalidOrder, # the current available limit is replaceKey0 USDT"
'170201': PermissionDenied, # Your account has been restricted for trades. If you have any questions, please email us at support@bybit.com
'170202': InvalidOrder, # Invalid orderFilter parameter.
'170203': InvalidOrder, # Please enter the TP/SL price.
'170204': InvalidOrder, # trigger price cannot be higher than 110% price.
'170206': InvalidOrder, # trigger price cannot be lower than 90% of qty.
'175000': InvalidOrder, # The serialNum is already in use.
'175001': InvalidOrder, # Daily purchase limit has been exceeded. Please try again later.
'175002': InvalidOrder, # There's a large number of purchase orders. Please try again later.
'175003': InsufficientFunds, # Insufficient available balance. Please make a deposit and try again.
'175004': InvalidOrder, # Daily redemption limit has been exceeded. Please try again later.
'175005': InvalidOrder, # There's a large number of redemption orders. Please try again later.
'175006': InsufficientFunds, # Insufficient available balance. Please make a deposit and try again.
'175007': InvalidOrder, # Order not found.
'175008': InvalidOrder, # Purchase period hasn't started yet.
'175009': InvalidOrder, # Purchase amount has exceeded the upper limit.
'175010': PermissionDenied, # You haven't passed the quiz yet! To purchase and/or redeem an LT, please complete the quiz first.
'175012': InvalidOrder, # Redemption period hasn't started yet.
'175013': InvalidOrder, # Redemption amount has exceeded the upper limit.
'175014': InvalidOrder, # Purchase of the LT has been temporarily suspended.
'175015': InvalidOrder, # Redemption of the LT has been temporarily suspended.
'175016': InvalidOrder, # Invalid format. Please check the length and numeric precision.
'175017': InvalidOrder, # Failed to place order:Exceed the maximum position limit of leveraged tokens, the current available limit is XXXX USDT
'175027': ExchangeError, # Subscriptions and redemptions are temporarily unavailable while account upgrade is in progress
'176002': BadRequest, # Query user account info error
'176004': BadRequest, # Query order history start time exceeds end time
'176003': BadRequest, # Query user loan history error
'176006': BadRequest, # Repayment Failed
'176005': BadRequest, # Failed to borrow
'176008': BadRequest, # You haven't enabled Cross Margin Trading yet. To do so
'176007': BadRequest, # User not found
'176010': BadRequest, # Failed to locate the coins to borrow
'176009': BadRequest, # You haven't enabled Cross Margin Trading yet. To do so
'176012': BadRequest, # Pair not available
'176011': BadRequest, # Cross Margin Trading not yet supported by the selected coin
'176014': BadRequest, # Repeated repayment requests
'176013': BadRequest, # Cross Margin Trading not yet supported by the selected pair
'176015': InsufficientFunds, # Insufficient available balance
'176016': BadRequest, # No repayment required
'176017': BadRequest, # Repayment amount has exceeded the total liability
'176018': BadRequest, # Settlement in progress
'176019': BadRequest, # Liquidation in progress
'176020': BadRequest, # Failed to locate repayment history
'176021': BadRequest, # Repeated borrowing requests
'176022': BadRequest, # Coins to borrow not generally available yet
'176023': BadRequest, # Pair to borrow not generally available yet
'176024': BadRequest, # Invalid user status
'176025': BadRequest, # Amount to borrow cannot be lower than the min. amount to borrow(per transaction)
'176026': BadRequest, # Amount to borrow cannot be larger than the max. amount to borrow(per transaction)
'176027': BadRequest, # Amount to borrow cannot be higher than the max. amount to borrow per user
'176028': BadRequest, # Amount to borrow has exceeded Bybit's max. amount to borrow
'176029': BadRequest, # Amount to borrow has exceeded the user's estimated max. amount to borrow
'176030': BadRequest, # Query user loan info error
'176031': BadRequest, # Number of decimals has exceeded the maximum precision
'176034': BadRequest, # The leverage ratio is out of range
'176035': PermissionDenied, # Failed to close the leverage switch during liquidation
'176036': PermissionDenied, # Failed to adjust leverage switch during forced liquidation
'176037': PermissionDenied, # For non-unified transaction users, the operation failed
'176038': BadRequest, # The spot leverage is closed and the current operation is not allowed
'176039': BadRequest, # Borrowing, current operation is not allowed
'176040': BadRequest, # There is a spot leverage order, and the adjustment of the leverage switch failed!
'181000': BadRequest, # category is null
'181001': BadRequest, # category only support linear or option or spot.
'181002': InvalidOrder, # symbol is null.
'181003': InvalidOrder, # side is null.
'181004': InvalidOrder, # side only support Buy or Sell.
'182000': InvalidOrder, # symbol related quote price is null
'20001': OrderNotFound, # Order not exists
'20003': InvalidOrder, # missing parameter side
'20004': InvalidOrder, # invalid parameter side
'20005': InvalidOrder, # missing parameter symbol
'20006': InvalidOrder, # invalid parameter symbol
'20007': InvalidOrder, # missing parameter order_type
'20008': InvalidOrder, # invalid parameter order_type
'20009': InvalidOrder, # missing parameter qty
'20010': InvalidOrder, # qty must be greater than 0
'20011': InvalidOrder, # qty must be an integer
'20012': InvalidOrder, # qty must be greater than zero and less than 1 million
'20013': InvalidOrder, # missing parameter price
'20014': InvalidOrder, # price must be greater than 0
'20015': InvalidOrder, # missing parameter time_in_force
'20016': InvalidOrder, # invalid value for parameter time_in_force
'20017': InvalidOrder, # missing parameter order_id
'20018': InvalidOrder, # invalid date format
'20019': InvalidOrder, # missing parameter stop_px
'20020': InvalidOrder, # missing parameter base_price
'20021': InvalidOrder, # missing parameter stop_order_id
'20022': BadRequest, # missing parameter leverage
'20023': BadRequest, # leverage must be a number
'20031': BadRequest, # leverage must be greater than zero
'20070': BadRequest, # missing parameter margin
'20071': BadRequest, # margin must be greater than zero
'20084': BadRequest, # order_id or order_link_id is required
'30001': BadRequest, # order_link_id is repeated
'30003': InvalidOrder, # qty must be more than the minimum allowed
'30004': InvalidOrder, # qty must be less than the maximum allowed
'30005': InvalidOrder, # price exceeds maximum allowed
'30007': InvalidOrder, # price exceeds minimum allowed
'30008': InvalidOrder, # invalid order_type
'30009': ExchangeError, # no position found
'30010': InsufficientFunds, # insufficient wallet balance
'30011': PermissionDenied, # operation not allowed is undergoing liquidation
'30012': PermissionDenied, # operation not allowed is undergoing ADL
'30013': PermissionDenied, # position is in liq or adl status
'30014': InvalidOrder, # invalid closing order, qty should not greater than size
'30015': InvalidOrder, # invalid closing order, side should be opposite
'30016': ExchangeError, # TS and SL must be cancelled first while closing position
'30017': InvalidOrder, # estimated fill price cannot be lower than current Buy liq_price
'30018': InvalidOrder, # estimated fill price cannot be higher than current Sell liq_price
'30019': InvalidOrder, # cannot attach TP/SL params for non-zero position when placing non-opening position order
'30020': InvalidOrder, # position already has TP/SL params
'30021': InvalidOrder, # cannot afford estimated position_margin
'30022': InvalidOrder, # estimated buy liq_price cannot be higher than current mark_price
'30023': InvalidOrder, # estimated sell liq_price cannot be lower than current mark_price
'30024': InvalidOrder, # cannot set TP/SL/TS for zero-position
'30025': InvalidOrder, # trigger price should bigger than 10% of last price
'30026': InvalidOrder, # price too high
'30027': InvalidOrder, # price set for Take profit should be higher than Last Traded Price
'30028': InvalidOrder, # price set for Stop loss should be between Liquidation price and Last Traded Price
'30029': InvalidOrder, # price set for Stop loss should be between Last Traded Price and Liquidation price
'30030': InvalidOrder, # price set for Take profit should be lower than Last Traded Price
'30031': InsufficientFunds, # insufficient available balance for order cost
'30032': InvalidOrder, # order has been filled or cancelled
'30033': RateLimitExceeded, # The number of stop orders exceeds maximum limit allowed
'30034': OrderNotFound, # no order found
'30035': RateLimitExceeded, # too fast to cancel
'30036': ExchangeError, # the expected position value after order execution exceeds the current risk limit
'30037': InvalidOrder, # order already cancelled
'30041': ExchangeError, # no position found
'30042': InsufficientFunds, # insufficient wallet balance
'30043': InvalidOrder, # operation not allowed is undergoing liquidation
'30044': InvalidOrder, # operation not allowed is undergoing AD
'30045': InvalidOrder, # operation not allowed is not normal status
'30049': InsufficientFunds, # insufficient available balance
'30050': ExchangeError, # any adjustments made will trigger immediate liquidation
'30051': ExchangeError, # due to risk limit, cannot adjust leverage
'30052': ExchangeError, # leverage can not less than 1
'30054': ExchangeError, # position margin is invalid
'30057': ExchangeError, # requested quantity of contracts exceeds risk limit
'30063': ExchangeError, # reduce-only rule not satisfied
'30067': InsufficientFunds, # insufficient available balance
'30068': ExchangeError, # exit value must be positive
'30074': InvalidOrder, # can't create the stop order, because you expect the order will be triggered when the LastPrice(or IndexPrice、 MarkPrice, determined by trigger_by) is raising to stop_px, but the LastPrice(or IndexPrice、 MarkPrice) is already equal to or greater than stop_px, please adjust base_price or stop_px
'30075': InvalidOrder, # can't create the stop order, because you expect the order will be triggered when the LastPrice(or IndexPrice、 MarkPrice, determined by trigger_by) is falling to stop_px, but the LastPrice(or IndexPrice、 MarkPrice) is already equal to or less than stop_px, please adjust base_price or stop_px
'30078': ExchangeError, # {"ret_code":30078,"ret_msg":"","ext_code":"","ext_info":"","result":null,"time_now":"1644853040.916000","rate_limit_status":73,"rate_limit_reset_ms":1644853040912,"rate_limit":75}
# '30084': BadRequest, # Isolated not modified, see handleErrors below
'33004': AuthenticationError, # apikey already expired
'34026': ExchangeError, # the limit is no change
'34036': BadRequest, # {"ret_code":34036,"ret_msg":"leverage not modified","ext_code":"","ext_info":"","result":null,"time_now":"1652376449.258918","rate_limit_status":74,"rate_limit_reset_ms":1652376449255,"rate_limit":75}
'35015': BadRequest, # {"ret_code":35015,"ret_msg":"Qty not in range","ext_code":"","ext_info":"","result":null,"time_now":"1652277215.821362","rate_limit_status":99,"rate_limit_reset_ms":1652277215819,"rate_limit":100}
'340099': ExchangeError, # Server error
'3400045': ExchangeError, # Set margin mode failed
'3100116': BadRequest, # {"retCode":3100116,"retMsg":"Order quantity below the lower limit 0.01.","result":null,"retExtMap":{"key0":"0.01"}}
'3100198': BadRequest, # {"retCode":3100198,"retMsg":"orderLinkId can not be empty.","result":null,"retExtMap":{}}
'3200300': InsufficientFunds, # {"retCode":3200300,"retMsg":"Insufficient margin balance.","result":null,"retExtMap":{}}
},
'broad': {
'Request timeout': RequestTimeout, # {"retCode":10016,"retMsg":"Request timeout, please try again later","result":{},"retExtInfo":{},"time":1675307914985}
'unknown orderInfo': OrderNotFound, # {"ret_code":-1,"ret_msg":"unknown orderInfo","ext_code":"","ext_info":"","result":null,"time_now":"1584030414.005545","rate_limit_status":99,"rate_limit_reset_ms":1584030414003,"rate_limit":100}
'invalid api_key': AuthenticationError, # {"ret_code":10003,"ret_msg":"invalid api_key","ext_code":"","ext_info":"","result":null,"time_now":"1599547085.415797"}
# the below two issues are caused: issues/9149#issuecomment-1146559498, when response is such: {"ret_code":130021,"ret_msg":"oc_diff[1707966351], new_oc[1707966351] with ob[....]+AB[....]","ext_code":"","ext_info":"","result":null,"time_now":"1658395300.872766","rate_limit_status":99,"rate_limit_reset_ms":1658395300855,"rate_limit":100}
'oc_diff': InsufficientFunds,
'new_oc': InsufficientFunds,
'openapi sign params error!': AuthenticationError, # {"retCode":10001,"retMsg":"empty value: apiTimestamp[] apiKey[] apiSignature[xxxxxxxxxxxxxxxxxxxxxxx]: openapi sign params error!","result":null,"retExtInfo":null,"time":1664789597123}
},
},
'precisionMode': TICK_SIZE,
'options': {
'enableUnifiedMargin': None,
'enableUnifiedAccount': None,
'createMarketBuyOrderRequiresPrice': True,
'createUnifiedMarginAccount': False,
'defaultType': 'swap', # 'swap', 'future', 'option', 'spot'
'defaultSubType': 'linear', # 'linear', 'inverse'
'defaultSettle': 'USDT', # USDC for USDC settled markets
'code': 'BTC',
'recvWindow': 5 * 1000, # 5 sec default
'timeDifference': 0, # the difference between system clock and exchange server clock
'adjustForTimeDifference': False, # controls the adjustment logic upon instantiation
'brokerId': 'CCXT',
'accountsByType': {
'spot': 'SPOT',
'margin': 'SPOT',
'future': 'CONTRACT',
'swap': 'CONTRACT',
'option': 'OPTION',
'investment': 'INVESTMENT',
'unified': 'UNIFIED',
'funding': 'FUND',
'fund': 'FUND',
'contract': 'CONTRACT',
},
'accountsById': {
'SPOT': 'spot',
'MARGIN': 'spot',
'CONTRACT': 'contract',
'OPTION': 'option',
'INVESTMENT': 'investment',
'UNIFIED': 'unified',
},
'networks': {
'ERC20': 'ETH',
'TRC20': 'TRX',
'BEP20': 'BSC',
'OMNI': 'OMNI',
'SPL': 'SOL',
},
'networksById': {
'ETH': 'ERC20',
'TRX': 'TRC20',
'BSC': 'BEP20',
'OMNI': 'OMNI',
'SPL': 'SOL',
},
'defaultNetwork': 'ERC20',
'defaultNetworks': {
'USDT': 'TRC20',
},
'intervals': {
'5m': '5min',
'15m': '15min',
'30m': '30min',
'1h': '1h',
'4h': '4h',
'1d': '1d',
},
},
'fees': {
'trading': {
'feeSide': 'get',
'tierBased': True,
'percentage': True,
'taker': 0.00075,
'maker': 0.0001,
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {},
'deposit': {},
},
},
'commonCurrencies': {
'GAS': 'GASDAO',
},
})
def nonce(self):
return self.milliseconds() - self.options['timeDifference']
async def is_unified_enabled(self, params={}):
# The API key of user id must own one of permissions will be allowed to call following API endpoints.
# SUB UID: "Account Transfer"
# MASTER UID: "Account Transfer", "Subaccount Transfer", "Withdrawal"
enableUnifiedMargin = self.safe_value(self.options, 'enableUnifiedMargin')
enableUnifiedAccount = self.safe_value(self.options, 'enableUnifiedAccount')
if enableUnifiedMargin is None or enableUnifiedAccount is None:
response = await self.privateGetUserV3PrivateQueryApi(params)
#
# {
# "retCode":0,
# "retMsg":"OK",
# "result":{
# "id":"88888888",
# "note":"ccxt-moon",
# "apiKey":"8s8c808v8u8",
# "readOnly":0,
# "secret":"",
# "permissions":{
# "ContractTrade":[""],
# "Spot":[""],
# "Wallet":[""],
# "Options":[""],
# "Derivatives":[""],
# "CopyTrading":[""],
# "BlockTrade":[],
# "Exchange":[""],
# "NFT":[""]
# },
# "ips":[""],
# "type":1,
# "deadlineDay":27,
# "expiredAt":"",
# "createdAt":"",
# "unified":1,
# "uta": 1
# },
# "retExtInfo":null,
# "time":1669735171649
# }
#
result = self.safe_value(response, 'result', {})
self.options['enableUnifiedMargin'] = self.safe_integer(result, 'unified') == 1
self.options['enableUnifiedAccount'] = self.safe_integer(result, 'uta') == 1
return [self.options['enableUnifiedMargin'], self.options['enableUnifiedAccount']]
async def upgrade_unified_account(self, params={}):
createUnifiedMarginAccount = self.safe_value(self.options, 'createUnifiedMarginAccount')
if not createUnifiedMarginAccount:
raise NotSupported(self.id + ' upgradeUnifiedAccount() warning self method can only be called once, it is not reverseable and you will be stuck with a unified margin account, you also need at least 5000 USDT in your bybit account to do self. If you want to disable self warning set exchange.options["createUnifiedMarginAccount"]=true.')
return await self.privatePostUnifiedV3PrivateAccountUpgradeUnifiedAccount(params)
async def upgrade_unified_trade_account(self, params={}):
return await self.privatePostV5AccountUpgradeToUta(params)
async def fetch_time(self, params={}):
"""
fetches the current integer timestamp in milliseconds from the exchange server
see https://bybit-exchange.github.io/docs/v3/server-time
:param dict params: extra parameters specific to the bybit api endpoint
:returns int: the current integer timestamp in milliseconds from the exchange server
"""
response = await self.publicGetV3PublicTime(params)
#
# {
# "retCode": "0",
# "retMsg": "OK",
# "result": {
# "timeSecond": "1666879482",
# "timeNano": "1666879482792685914"
# },
# "retExtInfo": {},
# "time": "1666879482792"
# }
#
return self.safe_integer(response, 'time')
async def fetch_currencies(self, params={}):
"""
fetches all available currencies on an exchange
see https://bybit-exchange.github.io/docs/v5/asset/coin-info
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: an associative dictionary of currencies
"""
if not self.check_required_credentials(False):
return None
response = await self.privateGetV5AssetCoinQueryInfo(params)
#
# {
# "retCode": 0,
# "retMsg": "",
# "result": {
# "rows": [
# {
# "name": "BTC",
# "coin": "BTC",
# "remainAmount": "150",
# "chains": [
# {
# "chainType": "BTC",
# "confirmation": "10000",
# "withdrawFee": "0.0005",
# "depositMin": "0.0005",
# "withdrawMin": "0.001",
# "chain": "BTC",
# "chainDeposit": "1",
# "chainWithdraw": "1",
# "minAccuracy": "8"
# }
# ]
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672194582264
# }
#
data = self.safe_value(response, 'result', [])
rows = self.safe_value(data, 'rows', [])
result = {}
for i in range(0, len(rows)):
currency = rows[i]
currencyId = self.safe_string(currency, 'coin')
code = self.safe_currency_code(currencyId)
name = self.safe_string(currency, 'name')
chains = self.safe_value(currency, 'chains', [])
networks = {}
minPrecision = None
minWithdrawFeeString = None
minWithdrawString = None
minDepositString = None
deposit = False
withdraw = False
for j in range(0, len(chains)):
chain = chains[j]
networkId = self.safe_string(chain, 'chain')
networkCode = self.network_id_to_code(networkId)
precision = self.parse_number(self.parse_precision(self.safe_string(chain, 'minAccuracy')))
minPrecision = precision if (minPrecision is None) else min(minPrecision, precision)
depositAllowed = self.safe_integer(chain, 'chainDeposit') == 1
deposit = depositAllowed if (depositAllowed) else deposit
withdrawAllowed = self.safe_integer(chain, 'chainWithdraw') == 1
withdraw = withdrawAllowed if (withdrawAllowed) else withdraw
withdrawFeeString = self.safe_string(chain, 'withdrawFee')
if withdrawFeeString is not None:
minWithdrawFeeString = withdrawFeeString if (minWithdrawFeeString is None) else Precise.string_min(withdrawFeeString, minWithdrawFeeString)
minNetworkWithdrawString = self.safe_string(chain, 'withdrawMin')
if minNetworkWithdrawString is not None:
minWithdrawString = minNetworkWithdrawString if (minWithdrawString is None) else Precise.string_min(minNetworkWithdrawString, minWithdrawString)
minNetworkDepositString = self.safe_string(chain, 'depositMin')
if minNetworkDepositString is not None:
minDepositString = minNetworkDepositString if (minDepositString is None) else Precise.string_min(minNetworkDepositString, minDepositString)
networks[networkCode] = {
'info': chain,
'id': networkId,
'network': networkCode,
'active': depositAllowed and withdrawAllowed,
'deposit': depositAllowed,
'withdraw': withdrawAllowed,
'fee': self.parse_number(withdrawFeeString),
'precision': precision,
'limits': {
'withdraw': {
'min': self.parse_number(minNetworkWithdrawString),
'max': None,
},
'deposit': {
'min': self.parse_number(minNetworkDepositString),
'max': None,
},
},
}
result[code] = {
'info': currency,
'code': code,
'id': currencyId,
'name': name,
'active': deposit and withdraw,
'deposit': deposit,
'withdraw': withdraw,
'fee': self.parse_number(minWithdrawFeeString),
'precision': minPrecision,
'limits': {
'amount': {
'min': None,
'max': None,
},
'withdraw': {
'min': self.parse_number(minWithdrawString),
'max': None,
},
'deposit': {
'min': self.parse_number(minDepositString),
'max': None,
},
},
'networks': networks,
}
return result
async def fetch_markets(self, params={}):
"""
retrieves data on all markets for bybit
see https://bybit-exchange.github.io/docs/v5/market/instrument
:param dict params: extra parameters specific to the exchange api endpoint
:returns [dict]: an array of objects representing market data
"""
if self.options['adjustForTimeDifference']:
await self.load_time_difference()
promisesUnresolved = [
self.fetch_spot_markets(params),
self.fetch_derivatives_markets({'category': 'linear'}),
self.fetch_derivatives_markets({'category': 'inverse'}),
]
promises = await asyncio.gather(*promisesUnresolved)
spotMarkets = promises[0]
linearMarkets = promises[1]
inverseMarkets = promises[2]
markets = spotMarkets
markets = self.array_concat(markets, linearMarkets)
return self.array_concat(markets, inverseMarkets)
async def fetch_spot_markets(self, params):
request = {
'category': 'spot',
}
response = await self.publicGetV5MarketInstrumentsInfo(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "category": "spot",
# "list": [
# {
# "symbol": "BTCUSDT",
# "baseCoin": "BTC",
# "quoteCoin": "USDT",
# "innovation": "0",
# "status": "Trading",
# "lotSizeFilter": {
# "basePrecision": "0.000001",
# "quotePrecision": "0.00000001",
# "minOrderQty": "0.00004",
# "maxOrderQty": "63.01197227",
# "minOrderAmt": "1",
# "maxOrderAmt": "100000"
# },
# "priceFilter": {
# "tickSize": "0.01"
# }
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672712468011
# }
#
responseResult = self.safe_value(response, 'result', {})
markets = self.safe_value(responseResult, 'list', [])
result = []
takerFee = self.parse_number('0.001')
makerFee = self.parse_number('0.001')
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'symbol')
baseId = self.safe_string(market, 'baseCoin')
quoteId = self.safe_string(market, 'quoteCoin')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
status = self.safe_string(market, 'status')
active = (status == 'Trading')
lotSizeFilter = self.safe_value(market, 'lotSizeFilter')
priceFilter = self.safe_value(market, 'priceFilter')
quotePrecision = self.safe_number(lotSizeFilter, 'quotePrecision')
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': None,
'swap': False,
'future': False,
'option': False,
'active': active,
'contract': False,
'linear': None,
'inverse': None,
'taker': takerFee,
'maker': makerFee,
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.safe_number(lotSizeFilter, 'basePrecision'),
'price': self.safe_number(priceFilter, 'tickSize', quotePrecision),
},
'limits': {
'leverage': {
'min': self.parse_number('1'),
'max': None,
},
'amount': {
'min': self.safe_number(lotSizeFilter, 'minOrderQty'),
'max': self.safe_number(lotSizeFilter, 'maxOrderQty'),
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': self.safe_number(lotSizeFilter, 'minOrderAmt'),
'max': self.safe_number(lotSizeFilter, 'maxOrderAmt'),
},
},
'info': market,
})
return result
async def fetch_derivatives_markets(self, params):
params['limit'] = 1000 # minimize number of requests
response = await self.publicGetV5MarketInstrumentsInfo(params)
data = self.safe_value(response, 'result', {})
markets = self.safe_value(data, 'list', [])
paginationCursor = self.safe_string(data, 'nextPageCursor')
if paginationCursor is not None:
while(paginationCursor is not None):
params['cursor'] = paginationCursor
responseInner = await self.publicGetDerivativesV3PublicInstrumentsInfo(params)
dataNew = self.safe_value(responseInner, 'result', {})
rawMarkets = self.safe_value(dataNew, 'list', [])
rawMarketsLength = len(rawMarkets)
if rawMarketsLength == 0:
break
markets = self.array_concat(rawMarkets, markets)
paginationCursor = self.safe_string(dataNew, 'nextPageCursor')
#
# linear response
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "category": "linear",
# "list": [
# {
# "symbol": "BTCUSDT",
# "contractType": "LinearPerpetual",
# "status": "Trading",
# "baseCoin": "BTC",
# "quoteCoin": "USDT",
# "launchTime": "1585526400000",
# "deliveryTime": "0",
# "deliveryFeeRate": "",
# "priceScale": "2",
# "leverageFilter": {
# "minLeverage": "1",
# "maxLeverage": "100.00",
# "leverageStep": "0.01"
# },
# "priceFilter": {
# "minPrice": "0.50",
# "maxPrice": "999999.00",
# "tickSize": "0.50"
# },
# "lotSizeFilter": {
# "maxOrderQty": "100.000",
# "minOrderQty": "0.001",
# "qtyStep": "0.001",
# "postOnlyMaxOrderQty": "1000.000"
# },
# "unifiedMarginTrade": True,
# "fundingInterval": 480,
# "settleCoin": "USDT"
# }
# ],
# "nextPageCursor": ""
# },
# "retExtInfo": {},
# "time": 1672712495660
# }
#
# option response
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "category": "option",
# "nextPageCursor": "",
# "list": [
# {
# "category": "option",
# "symbol": "ETH-3JAN23-1250-P",
# "status": "ONLINE",
# "baseCoin": "ETH",
# "quoteCoin": "USD",
# "settleCoin": "USDC",
# "optionsType": "Put",
# "launchTime": "1672560000000",
# "deliveryTime": "1672732800000",
# "deliveryFeeRate": "0.00015",
# "priceFilter": {
# "minPrice": "0.1",
# "maxPrice": "10000000",
# "tickSize": "0.1"
# },
# "lotSizeFilter": {
# "maxOrderQty": "1500",
# "minOrderQty": "0.1",
# "qtyStep": "0.1"
# }
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672712537130
# }
#
result = []
category = self.safe_string(data, 'category')
for i in range(0, len(markets)):
market = markets[i]
if category is None:
category = self.safe_string(market, 'category')
linear = (category == 'linear')
inverse = (category == 'inverse')
contractType = self.safe_string(market, 'contractType')
inverseFutures = (contractType == 'InverseFutures')
linearFutures = (contractType == 'LinearFutures')
linearPerpetual = (contractType == 'LinearPerpetual')
inversePerpetual = (contractType == 'InversePerpetual')
id = self.safe_string(market, 'symbol')
baseId = self.safe_string(market, 'baseCoin')
quoteId = self.safe_string(market, 'quoteCoin')
defaultSettledId = quoteId if linear else baseId
settleId = self.safe_string(market, 'settleCoin', defaultSettledId)
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
settle = None
if linearPerpetual and (settleId == 'USD'):
settle = 'USDC'
else:
settle = self.safe_currency_code(settleId)
symbol = base + '/' + quote
lotSizeFilter = self.safe_value(market, 'lotSizeFilter', {})
priceFilter = self.safe_value(market, 'priceFilter', {})
leverage = self.safe_value(market, 'leverageFilter', {})
status = self.safe_string(market, 'status')
active = (status == 'Trading')
swap = linearPerpetual or inversePerpetual
future = inverseFutures or linearFutures
option = (category == 'option')
type = None
if swap:
type = 'swap'
elif future:
type = 'future'
elif option:
type = 'option'
expiry = self.omit_zero(self.safe_string(market, 'deliveryTime'))
if expiry is not None:
expiry = int(expiry)
expiryDatetime = self.iso8601(expiry)
strike = None
optionType = None
symbol = symbol + ':' + settle
if expiry is not None:
symbol = symbol + '-' + self.yymmdd(expiry)
if option:
splitId = id.split('-')
strike = self.safe_string(splitId, 2)
optionLetter = self.safe_string(splitId, 3)
symbol = symbol + '-' + strike + '-' + optionLetter
if optionLetter == 'P':
optionType = 'put'
elif optionLetter == 'C':
optionType = 'call'
contractSize = self.safe_number_2(lotSizeFilter, 'minTradingQty', 'minOrderQty') if inverse else self.parse_number('1')
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'settle': settle,
'baseId': baseId,
'quoteId': quoteId,
'settleId': settleId,
'type': type,
'spot': False,
'margin': None,
'swap': swap,
'future': future,
'option': option,
'active': active,
'contract': True,
'linear': linear,
'inverse': inverse,
'taker': self.safe_number(market, 'takerFee', self.parse_number('0.0006')),
'maker': self.safe_number(market, 'makerFee', self.parse_number('0.0001')),
'contractSize': contractSize,
'expiry': expiry,
'expiryDatetime': expiryDatetime,
'strike': strike,
'optionType': optionType,
'precision': {
'amount': self.safe_number(lotSizeFilter, 'qtyStep'),
'price': self.safe_number(priceFilter, 'tickSize'),
},
'limits': {
'leverage': {
'min': self.safe_number(leverage, 'minLeverage'),
'max': self.safe_number(leverage, 'maxLeverage'),
},
'amount': {
'min': self.safe_number_2(lotSizeFilter, 'minTradingQty', 'minOrderQty'),
'max': self.safe_number_2(lotSizeFilter, 'maxTradingQty', 'maxOrderQty'),
},
'price': {
'min': self.safe_number(priceFilter, 'minPrice'),
'max': self.safe_number(priceFilter, 'maxPrice'),
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
return result
def parse_ticker(self, ticker, market=None):
#
# spot
#
# {
# "symbol": "BTCUSDT",
# "bid1Price": "20517.96",
# "bid1Size": "2",
# "ask1Price": "20527.77",
# "ask1Size": "1.862172",
# "lastPrice": "20533.13",
# "prevPrice24h": "20393.48",
# "price24hPcnt": "0.0068",
# "highPrice24h": "21128.12",
# "lowPrice24h": "20318.89",
# "turnover24h": "243765620.65899866",
# "volume24h": "11801.27771",
# "usdIndexPrice": "20784.12009279"
# }
#
# linear/inverse
#
# {
# "symbol": "BTCUSD",
# "lastPrice": "16597.00",
# "indexPrice": "16598.54",
# "markPrice": "16596.00",
# "prevPrice24h": "16464.50",
# "price24hPcnt": "0.008047",
# "highPrice24h": "30912.50",
# "lowPrice24h": "15700.00",
# "prevPrice1h": "16595.50",
# "openInterest": "373504107",
# "openInterestValue": "22505.67",
# "turnover24h": "2352.94950046",
# "volume24h": "49337318",
# "fundingRate": "-0.001034",
# "nextFundingTime": "1672387200000",
# "predictedDeliveryPrice": "",
# "basisRate": "",
# "deliveryFeeRate": "",
# "deliveryTime": "0",
# "ask1Size": "1",
# "bid1Price": "16596.00",
# "ask1Price": "16597.50",
# "bid1Size": "1"
# }
#
# option
#
# {
# "symbol": "BTC-30DEC22-18000-C",
# "bid1Price": "0",
# "bid1Size": "0",
# "bid1Iv": "0",
# "ask1Price": "435",
# "ask1Size": "0.66",
# "ask1Iv": "5",
# "lastPrice": "435",
# "highPrice24h": "435",
# "lowPrice24h": "165",
# "markPrice": "0.00000009",
# "indexPrice": "16600.55",
# "markIv": "0.7567",
# "underlyingPrice": "16590.42",
# "openInterest": "6.3",
# "turnover24h": "2482.73",
# "volume24h": "0.15",
# "totalVolume": "99",
# "totalTurnover": "1967653",
# "delta": "0.00000001",
# "gamma": "0.00000001",
# "vega": "0.00000004",
# "theta": "-0.00000152",
# "predictedDeliveryPrice": "0",
# "change24h": "86"
# }
#
timestamp = self.safe_integer(ticker, 'time')
marketId = self.safe_string(ticker, 'symbol')
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
market = self.safe_market(marketId, market, None, defaultType)
symbol = self.safe_symbol(marketId, market, None, defaultType)
last = self.safe_string(ticker, 'lastPrice')
open = self.safe_string(ticker, 'prevPrice24h')
percentage = self.safe_string(ticker, 'price24hPcnt')
percentage = Precise.string_mul(percentage, '100')
quoteVolume = self.safe_string(ticker, 'turnover24h')
baseVolume = self.safe_string(ticker, 'volume24h')
bid = self.safe_string(ticker, 'bid1Price')
ask = self.safe_string(ticker, 'ask1Price')
high = self.safe_string(ticker, 'highPrice24h')
low = self.safe_string(ticker, 'lowPrice24h')
return self.safe_ticker({
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': high,
'low': low,
'bid': bid,
'bidVolume': self.safe_string_2(ticker, 'bidSize', 'bid1Size'),
'ask': ask,
'askVolume': self.safe_string_2(ticker, 'askSize', 'ask1Size'),
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': percentage,
'average': None,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}, market)
async def fetch_ticker(self, symbol: str, params={}):
"""
fetches a price ticker, a statistical calculation with the information calculated over the past 24 hours for a specific market
see https://bybit-exchange.github.io/docs/v5/market/tickers
:param str symbol: unified symbol of the market to fetch the ticker for
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a `ticker structure <https://docs.ccxt.com/#/?id=ticker-structure>`
"""
self.check_required_symbol('fetchTicker', symbol)
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'baseCoin': '', Base coin. For option only
# 'expDate': '', Expiry date. e.g., 25DEC22. For option only
}
if market['spot']:
request['category'] = 'spot'
else:
if market['option']:
request['category'] = 'option'
elif market['linear']:
request['category'] = 'linear'
elif market['inverse']:
request['category'] = 'inverse'
response = await self.publicGetV5MarketTickers(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "category": "inverse",
# "list": [
# {
# "symbol": "BTCUSD",
# "lastPrice": "16597.00",
# "indexPrice": "16598.54",
# "markPrice": "16596.00",
# "prevPrice24h": "16464.50",
# "price24hPcnt": "0.008047",
# "highPrice24h": "30912.50",
# "lowPrice24h": "15700.00",
# "prevPrice1h": "16595.50",
# "openInterest": "373504107",
# "openInterestValue": "22505.67",
# "turnover24h": "2352.94950046",
# "volume24h": "49337318",
# "fundingRate": "-0.001034",
# "nextFundingTime": "1672387200000",
# "predictedDeliveryPrice": "",
# "basisRate": "",
# "deliveryFeeRate": "",
# "deliveryTime": "0",
# "ask1Size": "1",
# "bid1Price": "16596.00",
# "ask1Price": "16597.50",
# "bid1Size": "1"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672376496682
# }
#
result = self.safe_value(response, 'result', [])
tickers = self.safe_value(result, 'list', [])
rawTicker = self.safe_value(tickers, 0)
return self.parse_ticker(rawTicker, market)
async def fetch_tickers(self, symbols: Optional[List[str]] = None, params={}):
"""
fetches price tickers for multiple markets, statistical calculations with the information calculated over the past 24 hours each market
see https://bybit-exchange.github.io/docs/v5/market/tickers
:param [str]|None symbols: unified symbols of the markets to fetch the ticker for, all market tickers are returned if not assigned
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: an array of `ticker structures <https://docs.ccxt.com/#/?id=ticker-structure>`
"""
await self.load_markets()
market = None
if symbols is not None:
symbols = self.market_symbols(symbols)
market = self.market(symbols[0])
request = {
# 'symbol': market['id'],
# 'baseCoin': '', Base coin. For option only
# 'expDate': '', Expiry date. e.g., 25DEC22. For option only
}
type = None
isTypeInParams = ('type' in params)
type, params = self.handle_market_type_and_params('fetchTickers', market, params)
if type == 'spot':
request['category'] = 'spot'
elif type == 'swap' or type == 'future':
subType = None
subType, params = self.handle_sub_type_and_params('fetchTickers', market, params, 'linear')
request['category'] = subType
elif type == 'option':
request['category'] = 'option'
response = await self.publicGetV5MarketTickers(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "category": "inverse",
# "list": [
# {
# "symbol": "BTCUSD",
# "lastPrice": "16597.00",
# "indexPrice": "16598.54",
# "markPrice": "16596.00",
# "prevPrice24h": "16464.50",
# "price24hPcnt": "0.008047",
# "highPrice24h": "30912.50",
# "lowPrice24h": "15700.00",
# "prevPrice1h": "16595.50",
# "openInterest": "373504107",
# "openInterestValue": "22505.67",
# "turnover24h": "2352.94950046",
# "volume24h": "49337318",
# "fundingRate": "-0.001034",
# "nextFundingTime": "1672387200000",
# "predictedDeliveryPrice": "",
# "basisRate": "",
# "deliveryFeeRate": "",
# "deliveryTime": "0",
# "ask1Size": "1",
# "bid1Price": "16596.00",
# "ask1Price": "16597.50",
# "bid1Size": "1"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672376496682
# }
#
result = self.safe_value(response, 'result', {})
tickerList = self.safe_value(result, 'list', [])
tickers = {}
if market is None and isTypeInParams:
# create a "fake" market for the type
market = {
'type': 'swap' if (type == 'swap' or type == 'future') else type,
}
for i in range(0, len(tickerList)):
ticker = self.parse_ticker(tickerList[i], market)
symbol = ticker['symbol']
# self is needed because bybit returns
# futures with type = swap
marketInner = self.market(symbol)
if marketInner['type'] == type:
tickers[symbol] = ticker
return self.filter_by_array(tickers, 'symbol', symbols)
def parse_ohlcv(self, ohlcv, market=None):
#
# [
# "1621162800",
# "49592.43",
# "49644.91",
# "49342.37",
# "49349.42",
# "1451.59",
# "2.4343353100000003"
# ]
#
volumeIndex = 6 if (market['inverse']) else 5
return [
self.safe_integer(ohlcv, 0),
self.safe_number(ohlcv, 1),
self.safe_number(ohlcv, 2),
self.safe_number(ohlcv, 3),
self.safe_number(ohlcv, 4),
self.safe_number(ohlcv, volumeIndex),
]
async def fetch_ohlcv(self, symbol: str, timeframe='1m', since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetches historical candlestick data containing the open, high, low, and close price, and the volume of a market
see https://bybit-exchange.github.io/docs/v5/market/kline
see https://bybit-exchange.github.io/docs/v5/market/mark-kline
see https://bybit-exchange.github.io/docs/v5/market/index-kline
see https://bybit-exchange.github.io/docs/v5/market/preimum-index-kline
:param str symbol: unified symbol of the market to fetch OHLCV data for
:param str timeframe: the length of time each candle represents
:param int|None since: timestamp in ms of the earliest candle to fetch
:param int|None limit: the maximum amount of candles to fetch
:param dict params: extra parameters specific to the bybit api endpoint
:returns [[int]]: A list of candles ordered, open, high, low, close, volume
"""
self.check_required_symbol('fetchOHLCV', symbol)
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is None:
limit = 200 # default is 200 when requested with `since`
if since is not None:
request['start'] = since
if limit is not None:
request['limit'] = limit # max 1000, default 1000
request['interval'] = self.safe_string(self.timeframes, timeframe, timeframe)
method = None
if market['spot']:
request['category'] = 'spot'
method = 'publicGetV5MarketKline'
else:
price = self.safe_string(params, 'price')
params = self.omit(params, 'price')
methods = {
'mark': 'publicGetV5MarketMarkPriceKline',
'index': 'publicGetV5MarketIndexPriceKline',
'premiumIndex': 'publicGetV5MarketPremiumIndexPriceKline',
}
method = self.safe_value(methods, price, 'publicGetV5MarketKline')
if market['linear']:
request['category'] = 'linear'
elif market['inverse']:
request['category'] = 'inverse'
else:
raise NotSupported(self.id + ' fetchOHLCV() is not supported for option markets')
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "symbol": "BTCUSD",
# "category": "inverse",
# "list": [
# [
# "1670608800000",
# "17071",
# "17073",
# "17027",
# "17055.5",
# "268611",
# "15.74462667"
# ],
# [
# "1670605200000",
# "17071.5",
# "17071.5",
# "17061",
# "17071",
# "4177",
# "0.24469757"
# ],
# [
# "1670601600000",
# "17086.5",
# "17088",
# "16978",
# "17071.5",
# "6356",
# "0.37288112"
# ]
# ]
# },
# "retExtInfo": {},
# "time": 1672025956592
# }
#
result = self.safe_value(response, 'result', {})
ohlcvs = self.safe_value(result, 'list', [])
return self.parse_ohlcvs(ohlcvs, market, timeframe, since, limit)
def parse_funding_rate(self, ticker, market=None):
# {
# "symbol": "BTCUSDT",
# "bidPrice": "19255",
# "askPrice": "19255.5",
# "lastPrice": "19255.50",
# "lastTickDirection": "ZeroPlusTick",
# "prevPrice24h": "18634.50",
# "price24hPcnt": "0.033325",
# "highPrice24h": "19675.00",
# "lowPrice24h": "18610.00",
# "prevPrice1h": "19278.00",
# "markPrice": "19255.00",
# "indexPrice": "19260.68",
# "openInterest": "48069.549",
# "turnover24h": "4686694853.047006",
# "volume24h": "243730.252",
# "fundingRate": "0.0001",
# "nextFundingTime": "1663689600000",
# "predictedDeliveryPrice": "",
# "basisRate": "",
# "deliveryFeeRate": "",
# "deliveryTime": "0"
# }
#
timestamp = self.safe_integer(ticker, 'timestamp') # added artificially to avoid changing the signature
ticker = self.omit(ticker, 'timestamp')
marketId = self.safe_string(ticker, 'symbol')
symbol = self.safe_symbol(marketId, market, None, 'swap')
fundingRate = self.safe_number(ticker, 'fundingRate')
fundingTimestamp = self.safe_integer(ticker, 'nextFundingTime')
markPrice = self.safe_number(ticker, 'markPrice')
indexPrice = self.safe_number(ticker, 'indexPrice')
return {
'info': ticker,
'symbol': symbol,
'markPrice': markPrice,
'indexPrice': indexPrice,
'interestRate': None,
'estimatedSettlePrice': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fundingRate': fundingRate,
'fundingTimestamp': fundingTimestamp,
'fundingDatetime': self.iso8601(fundingTimestamp),
'nextFundingRate': None,
'nextFundingTimestamp': None,
'nextFundingDatetime': None,
'previousFundingRate': None,
'previousFundingTimestamp': None,
'previousFundingDatetime': None,
}
async def fetch_funding_rates(self, symbols: Optional[List[str]] = None, params={}):
"""
fetches funding rates for multiple markets
see https://bybit-exchange.github.io/docs/v5/market/tickers
:param [str]|None symbols: unified symbols of the markets to fetch the funding rates for, all market funding rates are returned if not assigned
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: an array of `funding rate structures <https://docs.ccxt.com/#/?id=funding-rate-structure>`
"""
await self.load_markets()
market = None
request = {}
if symbols is not None:
symbols = self.market_symbols(symbols)
market = self.market(symbols[0])
if len(symbols) == 1:
request['symbol'] = market['id']
type = None
type, params = self.handle_market_type_and_params('fetchFundingRates', market, params)
if type != 'swap':
raise NotSupported(self.id + ' fetchFundingRates() does not support ' + type + ' markets')
else:
subType = None
subType, params = self.handle_sub_type_and_params('fetchFundingRates', market, params, 'linear')
request['category'] = subType
response = await self.publicGetV5MarketTickers(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "category": "linear",
# "list": [
# {
# "symbol": "BTCUSDT",
# "bidPrice": "19255",
# "askPrice": "19255.5",
# "lastPrice": "19255.50",
# "lastTickDirection": "ZeroPlusTick",
# "prevPrice24h": "18634.50",
# "price24hPcnt": "0.033325",
# "highPrice24h": "19675.00",
# "lowPrice24h": "18610.00",
# "prevPrice1h": "19278.00",
# "markPrice": "19255.00",
# "indexPrice": "19260.68",
# "openInterest": "48069.549",
# "turnover24h": "4686694853.047006",
# "volume24h": "243730.252",
# "fundingRate": "0.0001",
# "nextFundingTime": "1663689600000",
# "predictedDeliveryPrice": "",
# "basisRate": "",
# "deliveryFeeRate": "",
# "deliveryTime": "0"
# }
# ]
# },
# "retExtInfo": null,
# "time": 1663670053454
# }
#
tickerList = self.safe_value(response, 'result', [])
timestamp = self.safe_integer(response, 'time')
tickerList = self.safe_value(tickerList, 'list')
fundingRates = {}
for i in range(0, len(tickerList)):
rawTicker = tickerList[i]
rawTicker['timestamp'] = timestamp # will be removed inside the parser
ticker = self.parse_funding_rate(tickerList[i], None)
symbol = ticker['symbol']
fundingRates[symbol] = ticker
return self.filter_by_array(fundingRates, 'symbol', symbols)
async def fetch_funding_rate_history(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetches historical funding rate prices
see https://bybit-exchange.github.io/docs/v5/market/history-fund-rate
:param str|None symbol: unified symbol of the market to fetch the funding rate history for
:param int|None since: timestamp in ms of the earliest funding rate to fetch
:param int|None limit: the maximum amount of `funding rate structures <https://docs.ccxt.com/en/latest/manual.html?#funding-rate-history-structure>` to fetch
:param dict params: extra parameters specific to the bybit api endpoint
:param int|None params['until']: timestamp in ms of the latest funding rate
:returns [dict]: a list of `funding rate structures <https://docs.ccxt.com/en/latest/manual.html?#funding-rate-history-structure>`
"""
self.check_required_symbol('fetchFundingRateHistory', symbol)
await self.load_markets()
if limit is None:
limit = 200
request = {
# 'category': '', # Product type. linear,inverse
# 'symbol': '', # Symbol name
# 'startTime': 0, # The start timestamp(ms)
# 'endTime': 0, # The end timestamp(ms)
'limit': limit, # Limit for data size per page. [1, 200]. Default: 200
}
market = self.market(symbol)
symbol = market['symbol']
request['symbol'] = market['id']
if market['option']:
raise NotSupported(self.id + ' fetchFundingRateHistory() is not supported for option markets')
elif market['linear']:
request['category'] = 'linear'
elif market['inverse']:
request['category'] = 'inverse'
if since is not None:
request['startTime'] = since
until = self.safe_integer_2(params, 'until', 'till') # unified in milliseconds
endTime = self.safe_integer(params, 'endTime', until) # exchange-specific in milliseconds
params = self.omit(params, ['endTime', 'till', 'until'])
if endTime is not None:
request['endTime'] = endTime
else:
if since is not None:
# end time is required when since is not empty
fundingInterval = 60 * 60 * 8 * 1000
request['endTime'] = since + limit * fundingInterval
response = await self.publicGetV5MarketFundingHistory(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "category": "linear",
# "list": [
# {
# "symbol": "ETHPERP",
# "fundingRate": "0.0001",
# "fundingRateTimestamp": "1672041600000"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672051897447
# }
#
rates = []
result = self.safe_value(response, 'result')
resultList = self.safe_value(result, 'list')
for i in range(0, len(resultList)):
entry = resultList[i]
timestamp = self.safe_integer(entry, 'fundingRateTimestamp')
rates.append({
'info': entry,
'symbol': self.safe_symbol(self.safe_string(entry, 'symbol'), None, None, 'swap'),
'fundingRate': self.safe_number(entry, 'fundingRate'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
})
sorted = self.sort_by(rates, 'timestamp')
return self.filter_by_symbol_since_limit(sorted, symbol, since, limit)
def parse_trade(self, trade, market=None):
isSpotTrade = ('isBuyerMaker' in trade) or ('feeTokenId' in trade)
if isSpotTrade:
return self.parse_spot_trade(trade, market)
else:
return self.parse_contract_trade(trade, market)
def parse_spot_trade(self, trade, market=None):
#
# public:
# {
# "price": "39548.68",
# "time": "1651748717850",
# "qty": "0.166872",
# "isBuyerMaker": 0
# }
#
# private:
# {
# "orderPrice": "82.5",
# "creatTime": "1666702226326",
# "orderQty": "0.016",
# "isBuyer": "0",
# "isMaker": "0",
# "symbol": "AAVEUSDT",
# "id": "1274785101965716992",
# "orderId": "1274784252359089664",
# "tradeId": "2270000000031365639",
# "execFee": "0",
# "feeTokenId": "AAVE",
# "matchOrderId": "1274785101865076224",
# "makerRebate": "0",
# "executionTime": "1666702226335"
# }
#
timestamp = self.safe_integer_n(trade, ['time', 'creatTime'])
takerOrMaker = None
side = None
isBuyerMaker = self.safe_integer(trade, 'isBuyerMaker')
if isBuyerMaker is not None:
# if public response
side = 'buy' if (isBuyerMaker == 1) else 'sell'
else:
# if private response
isBuyer = self.safe_integer(trade, 'isBuyer')
isMaker = self.safe_integer(trade, 'isMaker')
takerOrMaker = 'maker' if (isMaker == 0) else 'taker'
side = 'buy' if (isBuyer == 0) else 'sell'
marketId = self.safe_string(trade, 'symbol')
market = self.safe_market(marketId, market, None, 'spot')
fee = None
feeCost = self.safe_string(trade, 'execFee')
if feeCost is not None:
feeToken = self.safe_string(trade, 'feeTokenId')
feeCurrency = self.safe_currency_code(feeToken)
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
return self.safe_trade({
'id': self.safe_string(trade, 'tradeId'),
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': market['symbol'],
'order': self.safe_string(trade, 'orderId'),
'type': None,
'side': side,
'takerOrMaker': takerOrMaker,
'price': self.safe_string_2(trade, 'price', 'orderPrice'),
'amount': self.safe_string_2(trade, 'qty', 'orderQty'),
'cost': None,
'fee': fee,
}, market)
def parse_contract_trade(self, trade, market=None):
#
# public contract
#
# {
# "execId": "666042b4-50c6-58f3-bd9c-89b2088663ff",
# "symbol": "ETHUSD",
# "price": "1162.95",
# "size": "1",
# "side": "Sell",
# "time": "1669191277315",
# "isBlockTrade": False
# }
#
# public unified margin
#
# {
# "execId": "da66abbc-f358-5864-8d34-84ef7274d853",
# "symbol": "BTCUSDT",
# "price": "20802.50",
# "size": "0.200",
# "side": "Sell",
# "time": "1657870316630"
# }
#
# private contract trades
#
# {
# "symbol": "ETHUSD",
# "execFee": "0.00005484",
# "execId": "acf78206-d464-589b-b888-51bd130821c1",
# "execPrice": "1367.80",
# "execQty": "100",
# "execType": "Trade",
# "execValue": "0.0731101",
# "feeRate": "0.00075",
# "lastLiquidityInd": "RemovedLiquidity",
# "leavesQty": "0",
# "orderId": "fdc584c3-be5d-41ff-8f54-5be7649b1d1c",
# "orderLinkId": "",
# "orderPrice": "1299.50",
# "orderQty": "100",
# "orderType": "Market",
# "stopOrderType": "UNKNOWN",
# "side": "Sell",
# "execTime": "1611528105547",
# "closedSize": "100"
# }
#
# private unified margin
#
# {
# "symbol": "AAVEUSDT",
# "id": "1274785101965716992",
# "orderId": "1274784252359089664",
# "tradeId": "2270000000031365639",
# "orderPrice": "82.5",
# "orderQty": "0.016",
# "execFee": "0",
# "feeTokenId": "AAVE",
# "creatTime": "1666702226326",
# "isBuyer": "0",
# "isMaker": "0",
# "matchOrderId": "1274785101865076224",
# "makerRebate": "0",
# "executionTime": "1666702226335"
# }
#
# private USDC settled trades
#
# {
# "symbol": "ETHPERP",
# "orderLinkId": "",
# "side": "Buy",
# "orderId": "aad0ee44-ce12-4112-aeee-b7829f6c3a26",
# "execFee": "0.0210",
# "feeRate": "0.000600",
# "blockTradeId": "",
# "tradeTime": "1669196417930",
# "execPrice": "1162.15",
# "lastLiquidityInd": "TAKER",
# "execValue": "34.8645",
# "execType": "Trade",
# "execQty": "0.030",
# "tradeId": "0e94eaf5-b08e-5505-b43f-7f1f30b1ca80"
# }
#
id = self.safe_string_n(trade, ['execId', 'id', 'tradeId'])
marketId = self.safe_string(trade, 'symbol')
marketType = 'contract'
if market is not None:
marketType = market['type']
category = self.safe_string(trade, 'category')
if category is not None:
if category == 'spot':
marketType = 'spot'
market = self.safe_market(marketId, market, None, marketType)
symbol = market['symbol']
amountString = self.safe_string_n(trade, ['execQty', 'orderQty', 'size'])
priceString = self.safe_string_n(trade, ['execPrice', 'orderPrice', 'price'])
costString = self.safe_string(trade, 'execValue')
timestamp = self.safe_integer_n(trade, ['time', 'execTime', 'tradeTime'])
side = self.safe_string_lower(trade, 'side')
if side is None:
isBuyer = self.safe_integer(trade, 'isBuyer')
if isBuyer is not None:
side = 'buy' if isBuyer else 'sell'
isMaker = self.safe_value(trade, 'isMaker')
takerOrMaker = None
if isMaker is not None:
takerOrMaker = 'maker' if isMaker else 'taker'
else:
lastLiquidityInd = self.safe_string(trade, 'lastLiquidityInd')
if lastLiquidityInd == 'UNKNOWN':
lastLiquidityInd = None
if lastLiquidityInd is not None:
if (lastLiquidityInd == 'TAKER') or (lastLiquidityInd == 'MAKER'):
takerOrMaker = lastLiquidityInd.lower()
else:
takerOrMaker = 'maker' if (lastLiquidityInd == 'AddedLiquidity') else 'taker'
orderType = self.safe_string_lower(trade, 'orderType')
if orderType == 'unknown':
orderType = None
feeCostString = self.safe_string(trade, 'execFee')
fee = None
if feeCostString is not None:
feeCurrencyCode = None
if market['spot']:
feeCurrencyCode = self.safe_string(trade, 'commissionAsset')
else:
feeCurrencyCode = market['base'] if market['inverse'] else market['settle']
fee = {
'cost': feeCostString,
'currency': feeCurrencyCode,
}
return self.safe_trade({
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': self.safe_string(trade, 'orderId'),
'type': orderType,
'side': side,
'takerOrMaker': takerOrMaker,
'price': priceString,
'amount': amountString,
'cost': costString,
'fee': fee,
}, market)
async def fetch_trades(self, symbol: str, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
get the list of most recent trades for a particular symbol
see https://bybit-exchange.github.io/docs/v5/market/recent-trade
:param str symbol: unified symbol of the market to fetch trades for
:param int|None since: timestamp in ms of the earliest trade to fetch
:param int|None limit: the maximum amount of trades to fetch
:param dict params: extra parameters specific to the bybit api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html?#public-trades>`
"""
self.check_required_symbol('fetchTrades', symbol)
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'baseCoin': '', # Base coin. For option only. If not passed, return BTC data by default
# 'optionType': 'Call', # Option type. Call or Put. For option only
}
if limit is not None:
# spot: [1,60], default: 60.
# others: [1,1000], default: 500
request['limit'] = limit
if market['type'] == 'spot':
request['category'] = 'spot'
else:
if market['option']:
request['category'] = 'option'
elif market['linear']:
request['category'] = 'linear'
elif market['inverse']:
request['category'] = 'inverse'
response = await self.publicGetV5MarketRecentTrade(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "category": "spot",
# "list": [
# {
# "execId": "2100000000007764263",
# "symbol": "BTCUSDT",
# "price": "16618.49",
# "size": "0.00012",
# "side": "Buy",
# "time": "1672052955758",
# "isBlockTrade": False
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672053054358
# }
#
result = self.safe_value(response, 'result', {})
trades = self.safe_value(result, 'list', [])
return self.parse_trades(trades, market, since, limit)
async def fetch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):
"""
fetches information on open orders with bid(buy) and ask(sell) prices, volumes and other data
see https://bybit-exchange.github.io/docs/v5/market/orderbook
:param str symbol: unified symbol of the market to fetch the order book for
:param int|None limit: the maximum amount of order book entries to return
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: A dictionary of `order book structures <https://docs.ccxt.com/#/?id=order-book-structure>` indexed by market symbols
"""
self.check_required_symbol('fetchOrderBook', symbol)
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
defaultLimit = 25
if market['spot']:
# limit: [1, 50]. Default: 1
defaultLimit = 50
request['category'] = 'spot'
else:
if market['option']:
# limit: [1, 25]. Default: 1
request['category'] = 'option'
elif market['linear']:
# limit: [1, 200]. Default: 25
request['category'] = 'linear'
elif market['inverse']:
# limit: [1, 200]. Default: 25
request['category'] = 'inverse'
request['limit'] = limit if (limit is not None) else defaultLimit
response = await self.publicGetV5MarketOrderbook(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "s": "BTCUSDT",
# "a": [
# [
# "16638.64",
# "0.008479"
# ]
# ],
# "b": [
# [
# "16638.27",
# "0.305749"
# ]
# ],
# "ts": 1672765737733,
# "u": 5277055
# },
# "retExtInfo": {},
# "time": 1672765737734
# }
#
result = self.safe_value(response, 'result', [])
timestamp = self.safe_integer(result, 'ts')
return self.parse_order_book(result, symbol, timestamp, 'b', 'a')
def parse_balance(self, response):
#
# margin wallet
# [
# {
# "free": "0.001143855",
# "interest": "0",
# "loan": "0",
# "locked": "0",
# "tokenId": "BTC",
# "total": "0.001143855"
# },
# {
# "free": "200.00005568",
# "interest": "0.0008391",
# "loan": "200",
# "locked": "0",
# "tokenId": "USDT",
# "total": "200.00005568"
# },
# ]
#
# usdc wallet
# {
# "result": {
# "walletBalance": "10.0000",
# "accountMM": "0.0000",
# "bonus": "0.0000",
# "accountIM": "0.0000",
# "totalSessionRPL": "0.0000",
# "equity": "10.0000",
# "totalRPL": "0.0000",
# "marginBalance": "10.0000",
# "availableBalance": "10.0000",
# "totalSessionUPL": "0.0000"
# },
# "retCode": "0",
# "retMsg": "Success."
# }
#
# Unified Margin
#
# {
# "retCode": 0,
# "retMsg": "Success",
# "result": {
# "totalEquity": "112.21267421",
# "accountIMRate": "0.6895",
# "totalMarginBalance": "80.37711012",
# "totalInitialMargin": "55.42180254",
# "totalAvailableBalance": "24.95530758",
# "accountMMRate": "0.0459",
# "totalPerpUPL": "-16.69586570",
# "totalWalletBalance": "97.07311619",
# "totalMaintenanceMargin": "3.68580537",
# "coin": [
# {
# "currencyCoin": "ETH",
# "availableToBorrow": "0.00000000",
# "borrowSize": "0.00000000",
# "bonus": "0.00000000",
# "accruedInterest": "0.00000000",
# "availableBalanceWithoutConvert": "0.00000000",
# "totalOrderIM": "",
# "equity": "0.00000000",
# "totalPositionMM": "",
# "usdValue": "0.00000000",
# "availableBalance": "0.02441165",
# "unrealisedPnl": "",
# "totalPositionIM": "",
# "marginBalanceWithoutConvert": "0.00000000",
# "walletBalance": "0.00000000",
# "cumRealisedPnl": "",
# "marginBalance": "0.07862610"
# }
# ]
# },
# "time": 1657716037033
# }
#
# contract v3
#
# [
# {
# "coin": "BTC",
# "equity": "0.00000002",
# "walletBalance": "0.00000002",
# "positionMargin": "0",
# "availableBalance": "0.00000002",
# "orderMargin": "0",
# "occClosingFee": "0",
# "occFundingFee": "0",
# "unrealisedPnl": "0",
# "cumRealisedPnl": "-0.00010941",
# "givenCash": "0",
# "serviceCash": "0"
# },
# {
# "coin": "USDT",
# "equity": "3662.81038535",
# "walletBalance": "3662.81038535",
# "positionMargin": "0",
# "availableBalance": "3662.81038535",
# "orderMargin": "0",
# "occClosingFee": "0",
# "occFundingFee": "0",
# "unrealisedPnl": "0",
# "cumRealisedPnl": "-36.01761465",
# "givenCash": "0",
# "serviceCash": "0"
# }
# ]
# spot
# {
# retCode: '0',
# retMsg: 'OK',
# result: {
# balances: [
# {
# coin: 'BTC',
# coinId: 'BTC',
# total: '0.00977041118',
# free: '0.00877041118',
# locked: '0.001'
# },
# {
# coin: 'EOS',
# coinId: 'EOS',
# total: '2000',
# free: '2000',
# locked: '0'
# }
# ]
# },
# retExtInfo: {},
# time: '1670002625754'
# }
#
# Unified trade account
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "list": [
# {
# "totalEquity": "18070.32797922",
# "accountIMRate": "0.0101",
# "totalMarginBalance": "18070.32797922",
# "totalInitialMargin": "182.60183684",
# "accountType": "UNIFIED",
# "totalAvailableBalance": "17887.72614237",
# "accountMMRate": "0",
# "totalPerpUPL": "-0.11001349",
# "totalWalletBalance": "18070.43799271",
# "totalMaintenanceMargin": "0.38106773",
# "coin": [
# {
# "availableToBorrow": "2.5",
# "accruedInterest": "0",
# "availableToWithdraw": "0.805994",
# "totalOrderIM": "0",
# "equity": "0.805994",
# "totalPositionMM": "0",
# "usdValue": "12920.95352538",
# "unrealisedPnl": "0",
# "borrowAmount": "0",
# "totalPositionIM": "0",
# "walletBalance": "0.805994",
# "cumRealisedPnl": "0",
# "coin": "BTC"
# }
# ]
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672125441042
# }
#
# funding v5
# {
# retCode: '0',
# retMsg: 'success',
# result: {
# memberId: '452265',
# accountType: 'FUND',
# balance: [
# {
# coin: 'BTC',
# transferBalance: '0.2',
# walletBalance: '0.2',
# bonus: ''
# }
# ]
# },
# retExtInfo: {},
# time: '1677781902858'
# }
#
# all coins balance
# {
# "retCode": 0,
# "retMsg": "success",
# "result": {
# "memberId": "533285",
# "accountType": "FUND",
# "balance": [
# {
# "coin": "USDT",
# "transferBalance": "1010",
# "walletBalance": "1010",
# "bonus": ""
# },
# {
# "coin": "USDC",
# "transferBalance": "0",
# "walletBalance": "0",
# "bonus": ""
# }
# ]
# },
# "retExtInfo": {},
# "time": 1675865290069
# }
#
result = {
'info': response,
}
responseResult = self.safe_value(response, 'result', {})
currencyList = self.safe_value_n(responseResult, ['loanAccountList', 'list', 'coin', 'balances', 'balance'])
if currencyList is None:
# usdc wallet
code = 'USDC'
account = self.account()
account['free'] = self.safe_string(responseResult, 'availableBalance')
account['total'] = self.safe_string(responseResult, 'walletBalance')
result[code] = account
else:
for i in range(0, len(currencyList)):
entry = currencyList[i]
accountType = self.safe_string(entry, 'accountType')
if accountType == 'UNIFIED' or accountType == 'CONTRACT':
coins = self.safe_value(entry, 'coin')
for j in range(0, len(coins)):
account = self.account()
coinEntry = coins[j]
loan = self.safe_string(coinEntry, 'borrowAmount')
interest = self.safe_string(coinEntry, 'accruedInterest')
if (loan is not None) and (interest is not None):
account['debt'] = Precise.string_add(loan, interest)
account['total'] = self.safe_string(coinEntry, 'walletBalance')
account['free'] = self.safe_string(coinEntry, 'availableToWithdraw')
# account['used'] = self.safe_string(coinEntry, 'locked')
currencyId = self.safe_string(coinEntry, 'coin')
code = self.safe_currency_code(currencyId)
result[code] = account
else:
account = self.account()
loan = self.safe_string(entry, 'loan')
interest = self.safe_string(entry, 'interest')
if (loan is not None) and (interest is not None):
account['debt'] = Precise.string_add(loan, interest)
account['total'] = self.safe_string_2(entry, 'total', 'walletBalance')
account['free'] = self.safe_string_n(entry, ['free', 'availableBalanceWithoutConvert', 'availableBalance', 'transferBalance'])
account['used'] = self.safe_string(entry, 'locked')
currencyId = self.safe_string_n(entry, ['tokenId', 'coin', 'currencyCoin'])
code = self.safe_currency_code(currencyId)
result[code] = account
return self.safe_balance(result)
async def fetch_balance(self, params={}):
"""
query for balance and get the amount of funds available for trading or funds locked in orders
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a `balance structure <https://docs.ccxt.com/en/latest/manual.html?#balance-structure>`
"""
await self.load_markets()
request = {}
method = None
enableUnifiedMargin, enableUnifiedAccount = await self.is_unified_enabled()
type = None
type, params = self.handle_market_type_and_params('fetchBalance', None, params)
isSpot = (type == 'spot')
if isSpot:
if enableUnifiedAccount or enableUnifiedMargin:
method = 'privateGetSpotV3PrivateAccount'
else:
marginMode = None
marginMode, params = self.handle_margin_mode_and_params('fetchBalance', params)
if marginMode is not None:
method = 'privateGetSpotV3PrivateCrossMarginAccount'
else:
method = 'privateGetSpotV3PrivateAccount'
elif enableUnifiedAccount or enableUnifiedMargin:
if type == 'swap':
type = 'unified'
else:
if type == 'swap':
type = 'contract'
if not isSpot:
accountTypes = self.safe_value(self.options, 'accountsByType', {})
unifiedType = self.safe_string_upper(accountTypes, type, type)
if unifiedType == 'FUND':
# use self endpoint only we have no other choice
# because it requires transfer permission
method = 'privateGetAssetV3PrivateTransferAccountCoinsBalanceQuery'
request['accountType'] = unifiedType
else:
if enableUnifiedAccount:
method = 'privateGetV5AccountWalletBalance'
request['accountType'] = unifiedType
elif enableUnifiedMargin:
method = 'privateGetUnifiedV3PrivateAccountWalletBalance'
else:
method = 'privateGetContractV3PrivateAccountWalletBalance'
request['accountType'] = unifiedType
response = await getattr(self, method)(self.extend(request, params))
#
# spot wallet
# {
# retCode: '0',
# retMsg: 'OK',
# result: {
# balances: [
# {
# coin: 'BTC',
# coinId: 'BTC',
# total: '0.00977041118',
# free: '0.00877041118',
# locked: '0.001'
# },
# {
# coin: 'EOS',
# coinId: 'EOS',
# total: '2000',
# free: '2000',
# locked: '0'
# }
# ]
# },
# retExtInfo: {},
# time: '1670002625754'
# }
# cross
# {
# "retCode": 0,
# "retMsg": "success",
# "result": {
# "acctBalanceSum": "0.122995614474732872",
# "debtBalanceSum": "0.011734191124529754",
# "loanAccountList": [
# {
# "free": "0.001143855",
# "interest": "0",
# "loan": "0",
# "locked": "0",
# "tokenId": "BTC",
# "total": "0.001143855"
# },
# {
# "free": "200.00005568",
# "interest": "0.0008391",
# "loan": "200",
# "locked": "0",
# "tokenId": "USDT",
# "total": "200.00005568"
# },
# ],
# "riskRate": "0.0954",
# "status": 1
# },
# "retExtInfo": {},
# "time": 1669843584123
# }
#
# all coins balance
# {
# "retCode": 0,
# "retMsg": "success",
# "result": {
# "memberId": "533285",
# "accountType": "FUND",
# "balance": [
# {
# "coin": "USDT",
# "transferBalance": "1010",
# "walletBalance": "1010",
# "bonus": ""
# },
# {
# "coin": "USDC",
# "transferBalance": "0",
# "walletBalance": "0",
# "bonus": ""
# }
# ]
# },
# "retExtInfo": {},
# "time": 1675865290069
# }
#
return self.parse_balance(response)
def parse_order_status(self, status):
statuses = {
# v3 spot
'NEW': 'open',
'PARTIALLY_FILLED': 'open',
'FILLED': 'closed',
'CANCELED': 'canceled',
'PENDING_CANCEL': 'open',
'PENDING_NEW': 'open',
'REJECTED': 'rejected',
'PARTIALLY_FILLED_CANCELLED': 'canceled',
# v3 contract / unified margin / unified account
'Created': 'open',
'New': 'open',
'Rejected': 'rejected', # order is triggered but failed upon being placed
'PartiallyFilled': 'open',
'PartiallyFilledCanceled': 'canceled',
'Filled': 'closed',
'PendingCancel': 'open',
'Cancelled': 'canceled',
# below self line the status only pertains to conditional orders
'Untriggered': 'open',
'Deactivated': 'canceled',
'Triggered': 'open',
'Active': 'open',
}
return self.safe_string(statuses, status, status)
def parse_time_in_force(self, timeInForce):
timeInForces = {
'GoodTillCancel': 'GTC',
'ImmediateOrCancel': 'IOC',
'FillOrKill': 'FOK',
'PostOnly': 'PO',
}
return self.safe_string(timeInForces, timeInForce, timeInForce)
def parse_order(self, order, market=None):
orderCategoryExists = ('orderCategory' in order)
if orderCategoryExists:
return self.parse_spot_order(order, market)
return self.parse_contract_order(order, market)
def parse_contract_order(self, order, market=None):
#
# contract v3
#
# {
# "symbol": "XRPUSDT",
# "side": "Buy",
# "orderType": "Market",
# "price": "0.3431",
# "qty": "65",
# "reduceOnly": True,
# "timeInForce": "ImmediateOrCancel",
# "orderStatus": "Filled",
# "leavesQty": "0",
# "leavesValue": "0",
# "cumExecQty": "65",
# "cumExecValue": "21.3265",
# "cumExecFee": "0.0127959",
# "lastPriceOnCreated": "0.0000",
# "rejectReason": "EC_NoError",
# "orderLinkId": "",
# "createdTime": "1657526321499",
# "updatedTime": "1657526321504",
# "orderId": "ac0a8134-acb3-4ee1-a2d4-41891c9c46d7",
# "stopOrderType": "UNKNOWN",
# "takeProfit": "0.0000",
# "stopLoss": "0.0000",
# "tpTriggerBy": "UNKNOWN",
# "slTriggerBy": "UNKNOWN",
# "triggerPrice": "0.0000",
# "closeOnTrigger": True,
# "triggerDirection": 0,
# "positionIdx": 2
# }
#
# {
# "orderId":"0b3499a4-9691-40ec-b2b9-7d94ee0165ff",
# "orderLinkId":"",
# "mmp":false,
# "symbol":"SOLPERP",
# "orderType":"Market",
# "side":"Buy",
# "orderQty":"0.10000000",
# "orderPrice":"23.030",
# "iv":"0",
# "timeInForce":"ImmediateOrCancel",
# "orderStatus":"Created",
# "createdAt":"1683380752146568",
# "basePrice":"0.000",
# "triggerPrice":"0.000",
# "takeProfit":"0.000",
# "stopLoss":"0.000",
# "slTriggerBy":"UNKNOWN",
# "tpTriggerBy":"UNKNOWN"
# }
#
marketId = self.safe_string(order, 'symbol')
marketType = 'contract'
if market is not None:
marketType = market['type']
category = self.safe_string(order, 'category')
if category is not None:
if category == 'spot':
marketType = 'spot'
market = self.safe_market(marketId, market, None, marketType)
symbol = market['symbol']
timestamp = None
if 'createdTime' in order:
timestamp = self.safe_integer(order, 'createdTime')
elif 'createdAt' in order:
timestamp = self.safe_integer_product(order, 'createdAt', 0.001)
id = self.safe_string(order, 'orderId')
type = self.safe_string_lower(order, 'orderType')
price = self.safe_string_2(order, 'price', 'orderPrice')
amount = self.safe_string_2(order, 'qty', 'orderQty')
cost = self.safe_string(order, 'cumExecValue')
filled = self.safe_string(order, 'cumExecQty')
remaining = self.safe_string(order, 'leavesQty')
lastTradeTimestamp = self.safe_integer(order, 'updatedTime')
rawStatus = self.safe_string(order, 'orderStatus')
status = self.parse_order_status(rawStatus)
side = self.safe_string_lower(order, 'side')
fee = None
feeCostString = self.safe_string(order, 'cumExecFee')
if feeCostString is not None:
fee = {
'cost': feeCostString,
'currency': market['settle'],
}
clientOrderId = self.safe_string(order, 'orderLinkId')
if (clientOrderId is not None) and (len(clientOrderId) < 1):
clientOrderId = None
rawTimeInForce = self.safe_string(order, 'timeInForce')
timeInForce = self.parse_time_in_force(rawTimeInForce)
stopPrice = self.omit_zero(self.safe_string(order, 'triggerPrice'))
takeProfitPrice = self.omit_zero(self.safe_string(order, 'takeProfit'))
stopLossPrice = self.omit_zero(self.safe_string(order, 'stopLoss'))
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'postOnly': None,
'reduceOnly': self.safe_value(order, 'reduceOnly'),
'side': side,
'price': price,
'stopPrice': stopPrice,
'triggerPrice': stopPrice,
'takeProfitPrice': takeProfitPrice,
'stopLossPrice': stopLossPrice,
'amount': amount,
'cost': cost,
'average': None,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': None,
}, market)
def parse_spot_order(self, order, market=None):
#
# createOrder, cancelOrer
#
# {
# "orderId": "1274754916287346280",
# "orderLinkId": "1666798627015730",
# "symbol": "AAVEUSDT",
# "createTime": "1666698629821",
# "orderPrice": "80",
# "orderQty": "0.11",
# "orderType": "LIMIT",
# "side": "BUY",
# "status": "NEW",
# "timeInForce": "GTC",
# "accountId": "13380434",
# "execQty": "0",
# "orderCategory": "0"
# }
#
# fetchOrder, fetchOpenOrders, fetchClosedOrders(and also for conditional orders) there are also present these additional fields:
# {
# "cummulativeQuoteQty": "0",
# "avgPrice": "0",
# "stopPrice": "0.0",
# "icebergQty": "0.0",
# "updateTime": "1666733357444",
# "isWorking": "1",
# "locked": "8.8",
# "executedOrderId": "1279094037543962113", # in conditional order
# "triggerPrice": "0.99", # in conditional order
# }
#
marketId = self.safe_string(order, 'symbol')
market = self.safe_market(marketId, market, None, 'spot')
timestamp = self.safe_integer(order, 'createTime')
type = self.safe_string_lower(order, 'orderType')
price = self.safe_string(order, 'orderPrice')
if price == '0' and type == 'market':
price = None
filled = self.safe_string(order, 'execQty')
side = self.safe_string_lower(order, 'side')
timeInForce = self.parse_time_in_force(self.safe_string(order, 'timeInForce'))
triggerPrice = self.safe_string(order, 'triggerPrice')
postOnly = (timeInForce == 'PO')
amount = None
if market['spot'] and type == 'market' and side == 'buy':
amount = filled
else:
amount = self.safe_string(order, 'orderQty')
return self.safe_order({
'id': self.safe_string(order, 'orderId'),
'clientOrderId': self.safe_string(order, 'orderLinkId'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': self.safe_integer(order, 'updateTime'),
'symbol': market['symbol'],
'type': type,
'timeInForce': timeInForce,
'postOnly': postOnly,
'side': side,
'price': price,
'triggerPrice': triggerPrice,
'stopPrice': triggerPrice, # deprecated field
'amount': amount,
'cost': self.safe_string(order, 'cummulativeQuoteQty'),
'average': self.safe_string(order, 'avgPrice'),
'filled': filled,
'remaining': None,
'status': self.parse_order_status(self.safe_string(order, 'status')),
'fee': None,
'trades': None,
'info': order,
}, market)
async def fetch_order(self, id: str, symbol: Optional[str] = None, params={}):
"""
fetches information on an order made by the user
:param str|None symbol: unified symbol of the market the order was made in
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
type = None
type, params = self.handle_market_type_and_params('fetchOrder', market, params)
accounts = await self.is_unified_enabled()
isUnifiedAccount = self.safe_value(accounts, 1, False)
if isUnifiedAccount:
raise NotSupported(self.id + ' fetchOrder() does not support unified account. Please consider using fetchOpenOrders() or fetchClosedOrders()')
if type == 'spot':
# only spot markets have a dedicated endpoint for fetching a order
request = {
'orderId': id,
}
response = await self.privateGetSpotV3PrivateOrder(self.extend(params, request))
#
# {
# "retCode": "0",
# "retMsg": "OK",
# "result": {
# "accountId": "13380434",
# "symbol": "AAVEUSDT",
# "orderLinkId": "1666733357434617",
# "orderId": "1275046248585414144",
# "orderPrice": "80",
# "orderQty": "0.11",
# "execQty": "0",
# "cummulativeQuoteQty": "0",
# "avgPrice": "0",
# "status": "NEW",
# "timeInForce": "GTC",
# "orderType": "LIMIT",
# "side": "BUY",
# "stopPrice": "0.0",
# "icebergQty": "0.0",
# "createTime": "1666733357438",
# "updateTime": "1666733357444",
# "isWorking": "1",
# "locked": "8.8",
# "orderCategory": "0"
# },
# "retExtMap": {},
# "retExtInfo": null,
# "time": "1666733357744"
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_order(result, market)
else:
self.check_required_symbol('fetchOrder', symbol)
request = {
'orderId': id,
}
result = await self.fetch_orders(symbol, None, None, self.extend(request, params))
length = len(result)
if length == 0:
raise OrderNotFound('Order ' + id + ' does not exist.')
if length > 1:
raise InvalidOrder(self.id + ' returned more than one order')
return self.safe_value(result, 0)
async def create_order(self, symbol: str, type, side: OrderSide, amount, price=None, params={}):
"""
create a trade order
see https://bybit-exchange.github.io/docs/v5/order/create-order
see https://bybit-exchange.github.io/docs/spot/trade/place-order
see https://bybit-exchange.github.io/docs/derivatives/unified/place-order
see https://bybit-exchange.github.io/docs/derivatives/contract/place-order
:param str symbol: unified symbol of the market to create an order in
:param str type: 'market' or 'limit'
:param str side: 'buy' or 'sell'
:param float amount: how much of currency you want to trade in units of base currency
:param float|None price: the price at which the order is to be fullfilled, in units of the quote currency, ignored in market orders
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: an `order structure <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
self.check_required_symbol('createOrder', symbol)
market = self.market(symbol)
symbol = market['symbol']
enableUnifiedMargin, enableUnifiedAccount = await self.is_unified_enabled()
isUSDCSettled = market['settle'] == 'USDC'
if enableUnifiedAccount and not market['inverse']:
return await self.create_unified_account_order(symbol, type, side, amount, price, params)
elif market['spot']:
return await self.create_spot_order(symbol, type, side, amount, price, params)
elif enableUnifiedMargin and not market['inverse']:
return await self.create_unified_margin_order(symbol, type, side, amount, price, params)
elif isUSDCSettled:
return await self.create_usdc_order(symbol, type, side, amount, price, params)
else:
return await self.create_contract_v3_order(symbol, type, side, amount, price, params)
async def create_unified_account_order(self, symbol: str, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
lowerCaseType = type.lower()
if (price is None) and (lowerCaseType == 'limit'):
raise ArgumentsRequired(self.id + ' createOrder requires a price argument for limit orders')
request = {
'symbol': market['id'],
'side': self.capitalize(side),
'orderType': self.capitalize(lowerCaseType), # limit or market
# 'timeInForce': 'GTC', # IOC, FOK, PostOnly
# 'takeProfit': 123.45, # take profit price, only take effect upon opening the position
# 'stopLoss': 123.45, # stop loss price, only take effect upon opening the position
# 'reduceOnly': False, # reduce only, required for linear orders
# when creating a closing order, bybit recommends a True value for
# closeOnTrigger to avoid failing due to insufficient available margin
# 'closeOnTrigger': False, required for linear orders
# 'orderLinkId': 'string', # unique client order id, max 36 characters
# 'triggerPrice': 123.45, # trigger price, required for conditional orders
# 'triggerBy': 'MarkPrice', # IndexPrice, MarkPrice, LastPrice
# 'tpTriggerby': 'MarkPrice', # IndexPrice, MarkPrice, LastPrice
# 'slTriggerBy': 'MarkPrice', # IndexPrice, MarkPrice, LastPrice
# 'mmp': False # market maker protection
# 'positionIdx': 0, # Position mode. Unified account has one-way mode only(0)
# 'triggerDirection': 1, # Conditional order param. Used to identify the expected direction of the conditional order. 1: triggered when market price rises to triggerPrice 2: triggered when market price falls to triggerPrice
# Valid for spot only.
# 'isLeverage': 0, # Whether to borrow. 0(default): False, 1: True
# 'orderFilter': 'Order' # Order,tpslOrder. If not passed, Order by default
# Valid for option only.
# 'orderIv': '0', # Implied volatility; parameters are passed according to the real value; for example, for 10%, 0.1 is passed
}
if market['spot']:
request['category'] = 'spot'
elif market['linear']:
request['category'] = 'linear'
elif market['option']:
request['category'] = 'option'
else:
raise NotSupported(self.id + ' createOrder does not allow inverse market orders for ' + symbol + ' markets')
if market['spot'] and (type == 'market') and (side == 'buy'):
# for market buy it requires the amount of quote currency to spend
if self.options['createMarketBuyOrderRequiresPrice']:
cost = self.safe_number(params, 'cost')
params = self.omit(params, 'cost')
if price is None and cost is None:
raise InvalidOrder(self.id + " createOrder() requires the price argument with market buy orders to calculate total order cost(amount to spend), where cost = amount * price. Supply a price argument to createOrder() call if you want the cost to be calculated for you from price and amount, or, alternatively, add .options['createMarketBuyOrderRequiresPrice'] = False to supply the cost in the amount argument(the exchange-specific behaviour)")
else:
amountString = self.number_to_string(amount)
priceString = self.number_to_string(price)
quoteAmount = Precise.string_mul(amountString, priceString)
amount = cost if (cost is not None) else self.parse_number(quoteAmount)
request['qty'] = self.cost_to_precision(symbol, amount)
else:
request['qty'] = self.cost_to_precision(symbol, amount)
else:
request['qty'] = self.amount_to_precision(symbol, amount)
isMarket = lowerCaseType == 'market'
isLimit = lowerCaseType == 'limit'
if isLimit:
request['price'] = self.price_to_precision(symbol, price)
timeInForce = self.safe_string_lower(params, 'timeInForce') # self is same specific param
postOnly = None
postOnly, params = self.handle_post_only(isMarket, timeInForce == 'PostOnly', params)
if postOnly:
request['timeInForce'] = 'PostOnly'
elif timeInForce == 'gtc':
request['timeInForce'] = 'GTC'
elif timeInForce == 'fok':
request['timeInForce'] = 'FOK'
elif timeInForce == 'ioc':
request['timeInForce'] = 'IOC'
triggerPrice = self.safe_number_2(params, 'triggerPrice', 'stopPrice')
stopLossTriggerPrice = self.safe_number(params, 'stopLossPrice')
takeProfitTriggerPrice = self.safe_number(params, 'takeProfitPrice')
stopLoss = self.safe_number(params, 'stopLoss')
takeProfit = self.safe_number(params, 'takeProfit')
isStopLossTriggerOrder = stopLossTriggerPrice is not None
isTakeProfitTriggerOrder = takeProfitTriggerPrice is not None
isStopLoss = stopLoss is not None
isTakeProfit = takeProfit is not None
isBuy = side == 'buy'
ascending = not isBuy if stopLossTriggerPrice else isBuy
if triggerPrice is not None:
request['triggerDirection'] = 2 if ascending else 1
request['triggerPrice'] = self.price_to_precision(symbol, triggerPrice)
elif isStopLossTriggerOrder or isTakeProfitTriggerOrder:
request['triggerDirection'] = 2 if ascending else 1
triggerPrice = stopLossTriggerPrice if isStopLossTriggerOrder else takeProfitTriggerPrice
request['triggerPrice'] = self.price_to_precision(symbol, triggerPrice)
request['reduceOnly'] = True
elif isStopLoss or isTakeProfit:
if isStopLoss:
request['stopLoss'] = self.price_to_precision(symbol, stopLoss)
if isTakeProfit:
request['takeProfit'] = self.price_to_precision(symbol, takeProfit)
if market['spot']:
# only works for spot market
if triggerPrice is not None or stopLossTriggerPrice is not None or takeProfitTriggerPrice is not None or isStopLoss or isTakeProfit:
request['orderFilter'] = 'tpslOrder'
clientOrderId = self.safe_string(params, 'clientOrderId')
if clientOrderId is not None:
request['orderLinkId'] = clientOrderId
elif market['option']:
# mandatory field for options
request['orderLinkId'] = self.uuid16()
params = self.omit(params, ['stopPrice', 'timeInForce', 'stopLossPrice', 'takeProfitPrice', 'postOnly', 'clientOrderId', 'triggerPrice', 'stopLoss', 'takeProfit'])
response = await self.privatePostV5OrderCreate(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "orderId": "1321003749386327552",
# "orderLinkId": "spot-test-postonly"
# },
# "retExtInfo": {},
# "time": 1672211918471
# }
#
order = self.safe_value(response, 'result', {})
return self.parse_order(order)
async def create_spot_order(self, symbol: str, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
upperCaseType = type.upper()
request = {
'symbol': market['id'],
'side': self.capitalize(side),
'orderType': upperCaseType, # limit, market or limit_maker
'timeInForce': 'GTC', # FOK, IOC
# 'orderLinkId': 'string', # unique client order id, max 36 characters
}
if (type == 'market') and (side == 'buy'):
# for market buy it requires the amount of quote currency to spend
if self.options['createMarketBuyOrderRequiresPrice']:
cost = self.safe_number(params, 'cost')
params = self.omit(params, 'cost')
if price is None and cost is None:
raise InvalidOrder(self.id + " createOrder() requires the price argument with market buy orders to calculate total order cost(amount to spend), where cost = amount * price. Supply a price argument to createOrder() call if you want the cost to be calculated for you from price and amount, or, alternatively, add .options['createMarketBuyOrderRequiresPrice'] = False to supply the cost in the amount argument(the exchange-specific behaviour)")
else:
amountString = self.number_to_string(amount)
priceString = self.number_to_string(price)
quoteAmount = Precise.string_mul(amountString, priceString)
amount = cost if (cost is not None) else self.parse_number(quoteAmount)
request['orderQty'] = self.cost_to_precision(symbol, amount)
else:
request['orderQty'] = self.cost_to_precision(symbol, amount)
else:
request['orderQty'] = self.amount_to_precision(symbol, amount)
if (upperCaseType == 'LIMIT') or (upperCaseType == 'LIMIT_MAKER'):
if price is None:
raise InvalidOrder(self.id + ' createOrder requires a price argument for a ' + type + ' order')
request['orderPrice'] = self.price_to_precision(symbol, price)
isMarket = (upperCaseType == 'MARKET')
postOnly = None
postOnly, params = self.handle_post_only(isMarket, type == 'LIMIT_MAKER', params)
if postOnly:
request['orderType'] = 'LIMIT_MAKER'
clientOrderId = self.safe_string_2(params, 'clientOrderId', 'orderLinkId')
if clientOrderId is not None:
request['orderLinkId'] = clientOrderId
params = self.omit(params, ['clientOrderId', 'orderLinkId', 'postOnly'])
brokerId = self.safe_string(self.options, 'brokerId')
if brokerId is not None:
request['agentSource'] = brokerId
triggerPrice = self.safe_number_2(params, 'triggerPrice', 'stopPrice')
if triggerPrice is not None:
request['triggerPrice'] = self.price_to_precision(symbol, triggerPrice)
params = self.omit(params, 'stopPrice')
response = await self.privatePostSpotV3PrivateOrder(self.extend(request, params))
#
# {
# "retCode": "0",
# "retMsg": "OK",
# "result": {
# "orderId": "1274754916287346280",
# "orderLinkId": "1666798627015730",
# "symbol": "AAVEUSDT",
# "createTime": "1666698629821",
# "orderPrice": "80",
# "orderQty": "0.11",
# "orderType": "LIMIT",
# "side": "BUY",
# "status": "NEW",
# "timeInForce": "GTC",
# "accountId": "13380434",
# "execQty": "0",
# "orderCategory": "0"
# },
# "retExtMap": {},
# "retExtInfo": null,
# "time": "1666698627926"
# }
#
order = self.safe_value(response, 'result', {})
return self.parse_order(order)
async def create_unified_margin_order(self, symbol: str, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
if not market['linear'] and not market['option']:
raise NotSupported(self.id + ' createOrder does not allow inverse market orders for ' + symbol + ' markets')
lowerCaseType = type.lower()
if (price is None) and (lowerCaseType == 'limit'):
raise ArgumentsRequired(self.id + ' createOrder requires a price argument for limit orders')
request = {
'symbol': market['id'],
'side': self.capitalize(side),
'orderType': self.capitalize(lowerCaseType), # limit or market
'timeInForce': 'GoodTillCancel', # ImmediateOrCancel, FillOrKill, PostOnly
'qty': self.amount_to_precision(symbol, amount),
# 'takeProfit': 123.45, # take profit price, only take effect upon opening the position
# 'stopLoss': 123.45, # stop loss price, only take effect upon opening the position
# 'reduceOnly': False, # reduce only, required for linear orders
# when creating a closing order, bybit recommends a True value for
# closeOnTrigger to avoid failing due to insufficient available margin
# 'closeOnTrigger': False, required for linear orders
# 'orderLinkId': 'string', # unique client order id, max 36 characters
# 'triggerPrice': 123.45, # trigger price, required for conditional orders
# 'triggerBy': 'MarkPrice', # IndexPrice, MarkPrice
# 'tptriggerby': 'MarkPrice', # IndexPrice, MarkPrice
# 'slTriggerBy': 'MarkPrice', # IndexPrice, MarkPrice
# 'mmp': False # market maker protection
# 'positionIdx': 0, # Position mode. unified margin account is only available in One-Way mode, which is 0
# 'basePrice': '0', # It will be used to compare with the value of triggerPrice, to decide whether your conditional order will be triggered by crossing trigger price from upper side or lower side. Mainly used to identify the expected direction of the current conditional order.
# 'iv': '0', # Implied volatility, for options only; parameters are passed according to the real value; for example, for 10%, 0.1 is passed
}
if market['linear']:
request['category'] = 'linear'
else:
request['category'] = 'option'
isMarket = lowerCaseType == 'market'
isLimit = lowerCaseType == 'limit'
if isLimit:
request['price'] = self.price_to_precision(symbol, price)
exchangeSpecificParam = self.safe_string(params, 'time_in_force')
timeInForce = self.safe_string_lower(params, 'timeInForce')
postOnly = None
postOnly, params = self.handle_post_only(isMarket, exchangeSpecificParam == 'PostOnly', params)
if postOnly:
request['timeInForce'] = 'PostOnly'
elif timeInForce == 'gtc':
request['timeInForce'] = 'GoodTillCancel'
elif timeInForce == 'fok':
request['timeInForce'] = 'FillOrKill'
elif timeInForce == 'ioc':
request['timeInForce'] = 'ImmediateOrCancel'
triggerPrice = self.safe_number_2(params, 'stopPrice', 'triggerPrice')
stopLossTriggerPrice = self.safe_number(params, 'stopLossPrice', triggerPrice)
takeProfitTriggerPrice = self.safe_number(params, 'takeProfitPrice')
stopLoss = self.safe_number(params, 'stopLoss')
takeProfit = self.safe_number(params, 'takeProfit')
isStopLossTriggerOrder = stopLossTriggerPrice is not None
isTakeProfitTriggerOrder = takeProfitTriggerPrice is not None
isStopLoss = stopLoss is not None
isTakeProfit = takeProfit is not None
if isStopLossTriggerOrder or isTakeProfitTriggerOrder:
request['triggerBy'] = 'LastPrice'
triggerAt = stopLossTriggerPrice if isStopLossTriggerOrder else takeProfitTriggerPrice
preciseTriggerPrice = self.price_to_precision(symbol, triggerAt)
request['triggerPrice'] = preciseTriggerPrice
isBuy = side == 'buy'
# logical xor
ascending = not isBuy if stopLossTriggerPrice else isBuy
delta = self.number_to_string(market['precision']['price'])
request['basePrice'] = Precise.string_add(preciseTriggerPrice, delta) if ascending else Precise.string_sub(preciseTriggerPrice, delta)
elif isStopLoss or isTakeProfit:
if isStopLoss:
request['stopLoss'] = self.price_to_precision(symbol, stopLoss)
if isTakeProfit:
request['takeProfit'] = self.price_to_precision(symbol, takeProfit)
clientOrderId = self.safe_string(params, 'clientOrderId')
if clientOrderId is not None:
request['orderLinkId'] = clientOrderId
elif market['option']:
# mandatory field for options
request['orderLinkId'] = self.uuid16()
params = self.omit(params, ['stopPrice', 'timeInForce', 'triggerPrice', 'stopLossPrice', 'takeProfitPrice', 'postOnly', 'clientOrderId', 'stopLoss', 'takeProfit'])
response = await self.privatePostUnifiedV3PrivateOrderCreate(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "orderId": "e10b0716-7c91-4091-b98a-1fa0f401c7d5",
# "orderLinkId": "test0000003"
# },
# "retExtInfo": null,
# "time": 1664441344238
# }
#
order = self.safe_value(response, 'result', {})
return self.parse_order(order)
async def create_contract_v3_order(self, symbol: str, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
lowerCaseType = type.lower()
if (price is None) and (lowerCaseType == 'limit'):
raise ArgumentsRequired(self.id + ' createContractV3Order requires a price argument for limit orders')
request = {
'symbol': market['id'],
'side': self.capitalize(side),
'orderType': self.capitalize(lowerCaseType), # limit or market
'timeInForce': 'GoodTillCancel', # ImmediateOrCancel, FillOrKill, PostOnly
'qty': self.amount_to_precision(symbol, amount),
# 'takeProfit': 123.45, # take profit price, only take effect upon opening the position
# 'stopLoss': 123.45, # stop loss price, only take effect upon opening the position
# 'reduceOnly': False, # reduce only, required for linear orders
# when creating a closing order, bybit recommends a True value for
# closeOnTrigger to avoid failing due to insufficient available margin
# 'closeOnTrigger': False, required for linear orders
# 'orderLinkId': 'string', # unique client order id, max 36 characters
# 'triggerPrice': 123.45, # trigger price, required for conditional orders
# 'triggerBy': 'MarkPrice', # IndexPrice, MarkPrice
# 'tptriggerby': 'MarkPrice', # IndexPrice, MarkPrice
# 'slTriggerBy': 'MarkPrice', # IndexPrice, MarkPrice
# 'positionIdx': 0, # Position mode. unified margin account is only available in One-Way mode, which is 0
# 'triggerDirection': 1, # Trigger direction. Mainly used in conditional order. Trigger the order when market price rises to triggerPrice or falls to triggerPrice. 1: rise; 2: fall
}
if market['future']:
positionIdx = self.safe_integer(params, 'position_idx', 0) # 0 One-Way Mode, 1 Buy-side, 2 Sell-side
request['position_idx'] = positionIdx
params = self.omit(params, 'position_idx')
isMarket = lowerCaseType == 'market'
isLimit = lowerCaseType == 'limit'
if isLimit:
request['price'] = self.price_to_precision(symbol, price)
timeInForce = self.safe_string_lower(params, 'timeInForce') # same specific param
postOnly = None
postOnly, params = self.handle_post_only(isMarket, timeInForce == 'PostOnly', params)
if postOnly:
request['timeInForce'] = 'PostOnly'
elif timeInForce == 'gtc':
request['timeInForce'] = 'GoodTillCancel'
elif timeInForce == 'fok':
request['timeInForce'] = 'FillOrKill'
elif timeInForce == 'ioc':
request['timeInForce'] = 'ImmediateOrCancel'
triggerPrice = self.safe_number_2(params, 'triggerPrice', 'stopPrice')
stopLossTriggerPrice = self.safe_number(params, 'stopLossPrice', triggerPrice)
takeProfitTriggerPrice = self.safe_number(params, 'takeProfitPrice')
stopLoss = self.safe_number(params, 'stopLoss')
takeProfit = self.safe_number(params, 'takeProfit')
isStopLossTriggerOrder = stopLossTriggerPrice is not None
isTakeProfitTriggerOrder = takeProfitTriggerPrice is not None
isStopLoss = stopLoss is not None
isTakeProfit = takeProfit is not None
isBuy = side == 'buy'
ascending = not isBuy if stopLossTriggerPrice else isBuy
if triggerPrice is not None:
request['triggerDirection'] = 2 if ascending else 1
request['triggerPrice'] = self.price_to_precision(symbol, triggerPrice)
elif isStopLossTriggerOrder or isTakeProfitTriggerOrder:
request['triggerDirection'] = 2 if ascending else 1
triggerPrice = stopLossTriggerPrice if isStopLossTriggerOrder else takeProfitTriggerPrice
request['triggerPrice'] = self.price_to_precision(symbol, triggerPrice)
request['reduceOnly'] = True
elif isStopLoss or isTakeProfit:
if isStopLoss:
request['stopLoss'] = self.price_to_precision(symbol, stopLoss)
if isTakeProfit:
request['takeProfit'] = self.price_to_precision(symbol, takeProfit)
clientOrderId = self.safe_string(params, 'clientOrderId')
if clientOrderId is not None:
request['orderLinkId'] = clientOrderId
elif market['option']:
# mandatory field for options
request['orderLinkId'] = self.uuid16()
params = self.omit(params, ['stopPrice', 'timeInForce', 'stopLossPrice', 'takeProfitPrice', 'postOnly', 'clientOrderId', 'triggerPrice', 'stopLoss', 'takeProfit'])
response = await self.privatePostContractV3PrivateOrderCreate(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "orderId": "e10b0716-7c91-4091-b98a-1fa0f401c7d5",
# "orderLinkId": "test0000003"
# },
# "retExtInfo": null,
# "time": 1664441344238
# }
#
order = self.safe_value(response, 'result', {})
return self.parse_order(order)
async def create_usdc_order(self, symbol: str, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
lowerCaseType = type.lower()
if (price is None) and (lowerCaseType == 'limit'):
raise ArgumentsRequired(self.id + ' createOrder requires a price argument for limit orders')
request = {
'symbol': market['id'],
'side': self.capitalize(side),
'orderType': self.capitalize(lowerCaseType), # limit or market
'timeInForce': 'GoodTillCancel', # ImmediateOrCancel, FillOrKill, PostOnly
'orderQty': self.amount_to_precision(symbol, amount),
# 'takeProfit': 123.45, # take profit price, only take effect upon opening the position
# 'stopLoss': 123.45, # stop loss price, only take effect upon opening the position
# 'reduceOnly': False, # reduce only, required for linear orders
# when creating a closing order, bybit recommends a True value for
# closeOnTrigger to avoid failing due to insufficient available margin
# 'closeOnTrigger': False, required for linear orders
# 'orderLinkId': 'string', # unique client order id, max 36 characters
# 'triggerPrice': 123.45, # trigger price, required for conditional orders
# 'trigger_by': 'MarkPrice', # IndexPrice, MarkPrice
# 'tptriggerby': 'MarkPrice', # IndexPrice, MarkPrice
# 'slTriggerBy': 'MarkPrice', # IndexPrice, MarkPrice
# 'orderFilter': 'Order' or 'StopOrder'
# 'mmp': False # market maker protection
}
isMarket = lowerCaseType == 'market'
isLimit = lowerCaseType == 'limit'
if isLimit:
request['orderPrice'] = self.price_to_precision(symbol, price)
exchangeSpecificParam = self.safe_string(params, 'time_in_force')
timeInForce = self.safe_string_lower(params, 'timeInForce')
postOnly = None
postOnly, params = self.handle_post_only(isMarket, exchangeSpecificParam == 'PostOnly', params)
if postOnly:
request['time_in_force'] = 'PostOnly'
elif timeInForce == 'gtc':
request['time_in_force'] = 'GoodTillCancel'
elif timeInForce == 'fok':
request['time_in_force'] = 'FillOrKill'
elif timeInForce == 'ioc':
request['time_in_force'] = 'ImmediateOrCancel'
if market['swap']:
triggerPrice = self.safe_number_2(params, 'stopPrice', 'triggerPrice')
stopLossTriggerPrice = self.safe_number(params, 'stopLossPrice', triggerPrice)
takeProfitTriggerPrice = self.safe_number(params, 'takeProfitPrice')
stopLoss = self.safe_number(params, 'stopLoss')
takeProfit = self.safe_number(params, 'takeProfit')
isStopLossTriggerOrder = stopLossTriggerPrice is not None
isTakeProfitTriggerOrder = takeProfitTriggerPrice is not None
isStopLoss = stopLoss is not None
isTakeProfit = takeProfit is not None
isStopOrder = isStopLossTriggerOrder or isTakeProfitTriggerOrder
if isStopOrder:
request['orderFilter'] = 'StopOrder'
request['trigger_by'] = 'LastPrice'
stopPx = stopLossTriggerPrice if isStopLossTriggerOrder else takeProfitTriggerPrice
preciseStopPrice = self.price_to_precision(symbol, stopPx)
request['triggerPrice'] = preciseStopPrice
delta = self.number_to_string(market['precision']['price'])
request['basePrice'] = Precise.string_sub(preciseStopPrice, delta) if isStopLossTriggerOrder else Precise.string_add(preciseStopPrice, delta)
elif isStopLoss or isTakeProfit:
if isStopLoss:
request['stopLoss'] = self.price_to_precision(symbol, stopLoss)
if isTakeProfit:
request['takeProfit'] = self.price_to_precision(symbol, takeProfit)
else:
request['orderFilter'] = 'Order'
clientOrderId = self.safe_string(params, 'clientOrderId')
if clientOrderId is not None:
request['orderLinkId'] = clientOrderId
elif market['option']:
# mandatory field for options
request['orderLinkId'] = self.uuid16()
params = self.omit(params, ['stopPrice', 'timeInForce', 'triggerPrice', 'stopLossPrice', 'takeProfitPrice', 'postOnly', 'clientOrderId', 'stopLoss', 'takeProfit'])
response = None
if market['option']:
response = await self.privatePostOptionUsdcOpenapiPrivateV1PlaceOrder(self.extend(request, params))
else:
response = await self.privatePostPerpetualUsdcOpenapiPrivateV1PlaceOrder(self.extend(request, params))
#
# {
# "retCode":0,
# "retMsg":"",
# "result":{
# "orderId":"34450a59-325e-4296-8af0-63c7c524ae33",
# "orderLinkId":"",
# "mmp":false,
# "symbol":"BTCPERP",
# "orderType":"Limit",
# "side":"Buy",
# "orderQty":"0.00100000",
# "orderPrice":"20000.00",
# "iv":"0",
# "timeInForce":"GoodTillCancel",
# "orderStatus":"Created",
# "createdAt":"1652261746007873",
# "basePrice":"0.00",
# "triggerPrice":"0.00",
# "takeProfit":"0.00",
# "stopLoss":"0.00",
# "slTriggerBy":"UNKNOWN",
# "tpTriggerBy":"UNKNOWN"
# }
#
order = self.safe_value(response, 'result', {})
return self.parse_order(order)
async def edit_unified_account_order(self, id: str, symbol, type, side, amount=None, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
if not market['linear'] and not market['option']:
raise NotSupported(self.id + ' editOrder does not allow inverse market orders for ' + symbol + ' markets')
request = {
'symbol': market['id'],
'orderId': id,
'qty': self.amount_to_precision(symbol, amount),
# 'orderLinkId': 'string', # unique client order id, max 36 characters
# 'takeProfit': 123.45, # take profit price, only take effect upon opening the position
# 'stopLoss': 123.45, # stop loss price, only take effect upon opening the position
# 'triggerPrice': 123.45, # trigger price, required for conditional orders
# 'triggerBy': 'MarkPrice', # IndexPrice, MarkPrice, LastPrice
# 'tpTriggerby': 'MarkPrice', # IndexPrice, MarkPrice, LastPrice
# 'slTriggerBy': 'MarkPrice', # IndexPrice, MarkPrice, LastPrice
# Valid for option only.
# 'orderIv': '0', # Implied volatility; parameters are passed according to the real value; for example, for 10%, 0.1 is passed
}
if market['linear']:
request['category'] = 'linear'
else:
request['category'] = 'option'
if price is not None:
request['price'] = self.price_to_precision(symbol, price)
triggerPrice = self.safe_number_2(params, 'triggerPrice', 'stopPrice')
stopLossTriggerPrice = self.safe_number(params, 'stopLossPrice')
takeProfitTriggerPrice = self.safe_number(params, 'takeProfitPrice')
stopLoss = self.safe_number(params, 'stopLoss')
takeProfit = self.safe_number(params, 'takeProfit')
isStopLossTriggerOrder = stopLossTriggerPrice is not None
isTakeProfitTriggerOrder = takeProfitTriggerPrice is not None
isStopLoss = stopLoss is not None
isTakeProfit = takeProfit is not None
if isStopLossTriggerOrder or isTakeProfitTriggerOrder:
triggerPrice = stopLossTriggerPrice if isStopLossTriggerOrder else takeProfitTriggerPrice
if triggerPrice is not None:
request['triggerPrice'] = self.price_to_precision(symbol, triggerPrice)
if isStopLoss or isTakeProfit:
if isStopLoss:
request['stopLoss'] = self.price_to_precision(symbol, stopLoss)
if isTakeProfit:
request['takeProfit'] = self.price_to_precision(symbol, takeProfit)
clientOrderId = self.safe_string(params, 'clientOrderId')
if clientOrderId is not None:
request['orderLinkId'] = clientOrderId
params = self.omit(params, ['stopPrice', 'stopLossPrice', 'takeProfitPrice', 'triggerPrice', 'clientOrderId', 'stopLoss', 'takeProfit'])
response = await self.privatePostV5OrderAmend(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "orderId": "c6f055d9-7f21-4079-913d-e6523a9cfffa",
# "orderLinkId": "linear-004"
# },
# "retExtInfo": {},
# "time": 1672217093461
# }
#
result = self.safe_value(response, 'result', {})
return {
'info': response,
'id': self.safe_string(result, 'orderId'),
}
async def edit_unified_margin_order(self, id: str, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
if not market['linear'] and not market['option']:
raise NotSupported(self.id + ' editOrder does not allow inverse market orders for ' + symbol + ' markets')
lowerCaseType = type.lower()
if (price is None) and (lowerCaseType == 'limit'):
raise ArgumentsRequired(self.id + ' editOrder requires a price argument for limit orders')
request = {
'orderId': id,
'symbol': market['id'],
'side': self.capitalize(side),
'orderType': self.capitalize(lowerCaseType), # limit or market
'timeInForce': 'GoodTillCancel', # ImmediateOrCancel, FillOrKill, PostOnly
'qty': self.amount_to_precision(symbol, amount),
# 'takeProfit': 123.45, # take profit price, only take effect upon opening the position
# 'stopLoss': 123.45, # stop loss price, only take effect upon opening the position
# 'orderLinkId': 'string', # unique client order id, max 36 characters
# 'triggerPrice': 123.45, # trigger price, required for conditional orders
# 'triggerBy': 'MarkPrice', # IndexPrice, MarkPrice
# 'tptriggerby': 'MarkPrice', # IndexPrice, MarkPrice
# 'slTriggerBy': 'MarkPrice', # IndexPrice, MarkPrice
# 'iv': '0', # Implied volatility, for options only; parameters are passed according to the real value; for example, for 10%, 0.1 is passed
}
if market['linear']:
request['category'] = 'linear'
else:
request['category'] = 'option'
isMarket = lowerCaseType == 'market'
isLimit = lowerCaseType == 'limit'
if isLimit:
request['price'] = self.price_to_precision(symbol, price)
exchangeSpecificParam = self.safe_string(params, 'time_in_force')
timeInForce = self.safe_string_lower(params, 'timeInForce')
postOnly = self.is_post_only(isMarket, exchangeSpecificParam == 'PostOnly', params)
if postOnly:
request['timeInForce'] = 'PostOnly'
elif timeInForce == 'gtc':
request['timeInForce'] = 'GoodTillCancel'
elif timeInForce == 'fok':
request['timeInForce'] = 'FillOrKill'
elif timeInForce == 'ioc':
request['timeInForce'] = 'ImmediateOrCancel'
triggerPrice = self.safe_number_2(params, 'triggerPrice', 'stopPrice')
stopLossTriggerPrice = self.safe_number(params, 'stopLossPrice')
takeProfitTriggerPrice = self.safe_number(params, 'takeProfitPrice')
stopLoss = self.safe_number(params, 'stopLoss')
takeProfit = self.safe_number(params, 'takeProfit')
isStopLossTriggerOrder = stopLossTriggerPrice is not None
isTakeProfitTriggerOrder = takeProfitTriggerPrice is not None
isStopLoss = stopLoss is not None
isTakeProfit = takeProfit is not None
if isStopLossTriggerOrder or isTakeProfitTriggerOrder:
triggerPrice = stopLossTriggerPrice if isStopLossTriggerOrder else takeProfitTriggerPrice
if triggerPrice is not None:
request['triggerPrice'] = self.price_to_precision(symbol, triggerPrice)
if isStopLoss or isTakeProfit:
if isStopLoss:
request['stopLoss'] = self.price_to_precision(symbol, stopLoss)
if isTakeProfit:
request['takeProfit'] = self.price_to_precision(symbol, takeProfit)
clientOrderId = self.safe_string(params, 'clientOrderId')
if clientOrderId is not None:
request['orderLinkId'] = clientOrderId
params = self.omit(params, ['stopPrice', 'timeInForce', 'triggerPrice', 'stopLossPrice', 'takeProfitPrice', 'postOnly', 'clientOrderId', 'stopLoss', 'takeProfit'])
response = await self.privatePostUnifiedV3PrivateOrderReplace(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "orderId": "42c86d66331e41998d12c2440ce90c1a",
# "orderLinkId": "e80d558e-ed"
# }
# }
#
order = self.safe_value(response, 'result', {})
return self.parse_order(order)
async def edit_contract_v3_order(self, id: str, symbol, type, side, amount=None, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'orderId': id,
'qty': self.amount_to_precision(symbol, amount),
# 'orderLinkId': '', # User customised order id. Either orderId or orderLinkId is required
# 'triggerPrice': '', # Trigger price. Don't pass it if not modify the qty
# 'takeProfit': '', # Take profit price after modification. Don't pass it if not modify the take profit
# 'stopLoss': '', # Stop loss price after modification. Don't pass it if not modify the Stop loss
# 'tpTriggerBy': '', # The price type to trigger take profit. When set a take profit, self param is required if no initial value for the order
# 'slTriggerBy': '', # The price type to trigger stop loss. When set a stop loss, self param is required if no initial value for the order
# 'triggerBy': '', # Trigger price type. LastPrice, IndexPrice, MarkPrice, LastPrice
}
if price is not None:
request['price'] = self.price_to_precision(symbol, price)
triggerPrice = self.safe_number_2(params, 'triggerPrice', 'stopPrice')
stopLossTriggerPrice = self.safe_number(params, 'stopLossPrice')
takeProfitTriggerPrice = self.safe_number(params, 'takeProfitPrice')
stopLoss = self.safe_number(params, 'stopLoss')
takeProfit = self.safe_number(params, 'takeProfit')
isStopLossTriggerOrder = stopLossTriggerPrice is not None
isTakeProfitTriggerOrder = takeProfitTriggerPrice is not None
isStopLoss = stopLoss is not None
isTakeProfit = takeProfit is not None
if isStopLossTriggerOrder or isTakeProfitTriggerOrder:
triggerPrice = stopLossTriggerPrice if isStopLossTriggerOrder else takeProfitTriggerPrice
if triggerPrice is not None:
request['triggerPrice'] = self.price_to_precision(symbol, triggerPrice)
if isStopLoss or isTakeProfit:
if isStopLoss:
request['stopLoss'] = self.price_to_precision(symbol, stopLoss)
if isTakeProfit:
request['takeProfit'] = self.price_to_precision(symbol, takeProfit)
clientOrderId = self.safe_string(params, 'clientOrderId')
if clientOrderId is not None:
request['orderLinkId'] = clientOrderId
params = self.omit(params, ['stopPrice', 'stopLossPrice', 'takeProfitPrice', 'triggerPrice', 'clientOrderId', 'stopLoss', 'takeProfit'])
response = await self.privatePostContractV3PrivateOrderReplace(self.extend(request, params))
#
# contract v3
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "orderId": "db8b74b3-72d3-4264-bf3f-52d39b41956e",
# "orderLinkId": "x002"
# },
# "retExtInfo": {},
# "time": 1658902610749
# }
#
result = self.safe_value(response, 'result', {})
return {
'info': response,
'id': self.safe_string(result, 'orderId'),
}
async def edit_order(self, id: str, symbol, type, side, amount=None, price=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' editOrder() requires an symbol argument')
await self.load_markets()
market = self.market(symbol)
enableUnifiedMargin, enableUnifiedAccount = await self.is_unified_enabled()
if enableUnifiedAccount:
return await self.edit_unified_account_order(id, symbol, type, side, amount, price, params)
elif market['spot']:
raise NotSupported(self.id + ' editOrder() does not support spot markets')
elif enableUnifiedMargin and not market['inverse']:
return await self.edit_unified_margin_order(id, symbol, type, side, amount, price, params)
return await self.edit_contract_v3_order(id, symbol, type, side, amount, price, params)
async def cancel_unified_account_order(self, id: str, symbol: Optional[str] = None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'orderLinkId': 'string',
# 'orderId': id,
# conditional orders
# 'orderFilter': '', # Valid for spot only. Order,tpslOrder. If not passed, Order by default
}
if market['spot']:
# only works for spot market
isStop = self.safe_value(params, 'stop', False)
params = self.omit(params, ['stop'])
request['orderFilter'] = 'tpslOrder' if isStop else 'Order'
if id is not None: # The user can also use argument params["orderLinkId"]
request['orderId'] = id
if market['spot']:
request['category'] = 'spot'
elif market['option']:
request['category'] = 'option'
elif market['linear']:
request['category'] = 'linear'
else:
raise NotSupported(self.id + ' cancelOrder() does not allow inverse market orders for ' + symbol + ' markets')
response = await self.privatePostV5OrderCancel(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "orderId": "c6f055d9-7f21-4079-913d-e6523a9cfffa",
# "orderLinkId": "linear-004"
# },
# "retExtInfo": {},
# "time": 1672217377164
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_order(result, market)
async def cancel_spot_order(self, id: str, symbol: Optional[str] = None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
# 'order_link_id': 'string', # one of order_id, stop_order_id or order_link_id is required
# 'orderId': id
}
if id is not None: # The user can also use argument params["order_link_id"]
request['orderId'] = id
response = await self.privatePostSpotV3PrivateCancelOrder(self.extend(request, params))
#
# {
# "retCode": "0",
# "retMsg": "OK",
# "result": {
# "orderId": "1275046248585414144",
# "orderLinkId": "1666733357434617",
# "symbol": "AAVEUSDT",
# "status": "NEW",
# "accountId": "13380434",
# "createTime": "1666733357438",
# "orderPrice": "80",
# "orderQty": "0.11",
# "execQty": "0",
# "timeInForce": "GTC",
# "orderType": "LIMIT",
# "side": "BUY",
# "orderCategory": "0"
# },
# "retExtMap": {},
# "retExtInfo": null,
# "time": "1666733839493"
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_order(result, market)
async def cancel_unified_margin_order(self, id: str, symbol: Optional[str] = None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelUnifiedMarginOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'orderLinkId': 'string',
# 'orderId': id,
# conditional orders
# 'orderFilter': '',
# 'category': '',
}
isStop = self.safe_value(params, 'stop', False)
params = self.omit(params, ['stop'])
request['orderFilter'] = 'StopOrder' if isStop else 'Order'
if id is not None: # The user can also use argument params["orderLinkId"]
request['orderId'] = id
if market['option']:
request['category'] = 'option'
elif market['linear']:
request['category'] = 'linear'
else:
raise NotSupported(self.id + ' cancelUnifiedMarginOrder() does not allow inverse market orders for ' + symbol + ' markets')
response = await self.privatePostUnifiedV3PrivateOrderCancel(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "orderId": "42c86d66331e41998d12c2440ce90c1a",
# "orderLinkId": "e80d558e-ed"
# }
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_order(result, market)
async def cancel_usdc_order(self, id: str, symbol: Optional[str] = None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelUSDCOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'orderLinkId': 'string', # one of order_id, stop_order_id or order_link_id is required
# 'orderId': id,
}
isStop = self.safe_value(params, 'stop', False)
params = self.omit(params, ['stop'])
method = None
if id is not None: # The user can also use argument params["order_link_id"]
request['orderId'] = id
if market['option']:
method = 'privatePostOptionUsdcOpenapiPrivateV1CancelOrder'
else:
method = 'privatePostPerpetualUsdcOpenapiPrivateV1CancelOrder'
request['orderFilter'] = 'StopOrder' if isStop else 'Order'
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "outRequestId": "",
# "symbol": "BTC-13MAY22-40000-C",
# "orderId": "8c65df91-91fc-461d-9b14-786379ef138c",
# "orderLinkId": ""
# },
# "retExtMap": {}
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_order(result, market)
async def cancel_derivatives_order(self, id: str, symbol: Optional[str] = None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelDerivativesOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'orderId': id,
}
response = await self.privatePostContractV3PrivateOrderCancel(self.extend(request, params))
#
# contract v3
#
# {
# "retCode":0,
# "retMsg":"OK",
# "result":{
# "orderId": "4030430d-1dba-4134-ac77-3d81c14aaa00",
# "orderLinkId": ""
# },
# "retExtInfo":null,
# "time":1658850321861
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_order(result, market)
async def cancel_order(self, id: str, symbol: Optional[str] = None, params={}):
"""
cancels an open order
:param str id: order id
:param str symbol: unified symbol of the market the order was made in
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/#/?id=order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
enableUnifiedMargin, enableUnifiedAccount = await self.is_unified_enabled()
isUsdcSettled = market['settle'] == 'USDC'
if enableUnifiedAccount:
return await self.cancel_unified_account_order(id, symbol, params)
elif market['spot']:
return await self.cancel_spot_order(id, symbol, params)
elif enableUnifiedMargin and not market['inverse']:
return await self.cancel_unified_margin_order(id, symbol, params)
elif isUsdcSettled:
return await self.cancel_usdc_order(id, symbol, params)
return await self.cancel_derivatives_order(id, symbol, params)
async def cancel_all_unified_account_orders(self, symbol: Optional[str] = None, params={}):
await self.load_markets()
market = None
settle = None
type = None
subType = None
request = {}
if symbol is not None:
market = self.market(symbol)
settle = market['settle']
request['symbol'] = market['id']
else:
settle, params = self.handle_option_and_params(params, 'cancelAllOrders', 'settle', 'USDT')
type, params = self.handle_market_type_and_params('cancelAllOrders', market, params)
subType, params = self.handle_sub_type_and_params('cancelAllOrders', market, params, 'linear')
if type == 'spot':
request['category'] = 'spot'
elif type == 'option':
request['category'] = 'option'
elif subType == 'linear':
request['category'] = 'linear'
else:
raise NotSupported(self.id + ' cancelAllOrders() does not allow inverse market orders for ' + type + ' markets')
request['settleCoin'] = settle
isStop = self.safe_value(params, 'stop', False)
params = self.omit(params, ['stop'])
if isStop:
request['orderFilter'] = 'tpslOrder'
response = await self.privatePostV5OrderCancelAll(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "list": [
# {
# "orderId": "f6a73e1f-39b5-4dee-af21-1460b2e3b27c",
# "orderLinkId": "a001"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672219780463
# }
#
result = self.safe_value(response, 'result', [])
orders = self.safe_value(result, 'list')
if not isinstance(orders, list):
return response
return self.parse_orders(orders, market)
async def cancel_all_spot_orders(self, symbol: Optional[str] = None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelAllSpotOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.privateDeleteSpotOrderBatchCancel(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "",
# "ext_code": null,
# "ext_info": null,
# "result": {
# "success": True
# }
# }
#
result = self.safe_value(response, 'result', [])
if not isinstance(result, list):
return response
return self.parse_orders(result, market)
async def cancel_all_unified_margin_orders(self, symbol: Optional[str] = None, params={}):
await self.load_markets()
market = None
settle = None
request = {}
if symbol is not None:
market = self.market(symbol)
settle = market['settle']
request['symbol'] = market['id']
subType = None
subType, params = self.handle_sub_type_and_params('cancelAllOrders', market, params, 'linear')
request['category'] = subType
settle, params = self.handle_option_and_params(params, 'cancelAllOrders', 'settle', settle)
if settle is not None:
request['settleCoin'] = settle
isStop = self.safe_value(params, 'stop', False)
params = self.omit(params, ['stop'])
if isStop:
request['orderFilter'] = 'StopOrder'
response = await self.privatePostUnifiedV3PrivateOrderCancelAll(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "list": [{
# "category": "option",
# "symbol": "BTC-24JUN22-45000-P",
# "orderId": "bd5f3b34-d64d-4b60-8188-438fbea4c552",
# "orderLinkId": "ac4e3b34-d64d-4b60-8188-438fbea4c552",
# }, {
# "category": "option",
# "symbol": "BTC-24JUN22-45000-P",
# "orderId": "4ddd727a-2af8-430e-a293-42895e594d18",
# "orderLinkId": "5cee727a-2af8-430e-a293-42895e594d18",
# }
# ]
# },
# "retExtInfo": {
# "list": [{
# "code": 0,
# "msg": "OK"
# }, {
# "code": 0,
# "msg": "OK"
# }]
# },
# "time": 1657200736570
# }
#
result = self.safe_value(response, 'result', [])
orders = self.safe_value(result, 'list')
if not isinstance(orders, list):
return response
return self.parse_orders(orders, market)
async def cancel_all_usdc_orders(self, symbol: Optional[str] = None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelAllUSDCOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
method = None
request = {
'symbol': market['id'],
}
if market['option']:
method = 'privatePostOptionUsdcOpenapiPrivateV1CancelAll'
else:
method = 'privatePostPerpetualUsdcOpenapiPrivateV1CancelAll'
isStop = self.safe_value(params, 'stop', False)
if isStop:
request['orderFilter'] = 'StopOrder'
else:
request['orderFilter'] = 'Order'
params = self.omit(params, ['stop'])
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "retExtMap": {},
# "result": [
# {
# "outRequestId": "cancelAll-290119-1652176443114-0",
# "symbol": "BTC-13MAY22-40000-C",
# "orderId": "fa6cd740-56ed-477d-9385-90ccbfee49ca",
# "orderLinkId": "",
# "errorCode": 0,
# "errorDesc": ""
# }
# ]
# }
#
result = self.safe_value(response, 'result', [])
if not isinstance(result, list):
return response
return self.parse_orders(result, market)
async def cancel_all_derivatives_orders(self, symbol: Optional[str] = None, params={}):
await self.load_markets()
market = None
settle = None
request = {}
if symbol is not None:
market = self.market(symbol)
settle = market['settle']
request['symbol'] = market['id']
settle, params = self.handle_option_and_params(params, 'cancelAllOrders', 'settle', settle)
if settle is not None:
request['settleCoin'] = settle
response = await self.privatePostContractV3PrivateOrderCancelAll(self.extend(request, params))
#
# contract v3
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "list": [
# {
# "orderId": "4030430d-1dba-4134-ac77-3d81c14aaa00",
# "orderLinkId": "x001"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1658901359225
# }
#
result = self.safe_value(response, 'result', [])
orders = self.safe_value(result, 'list', [])
return self.parse_orders(orders, market)
async def cancel_all_orders(self, symbol: Optional[str] = None, params={}):
"""
cancel all open orders
:param str|None symbol: unified market symbol, only orders in the market of self symbol are cancelled when symbol is not None
:param dict params: extra parameters specific to the bybit api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
market = None
settle = self.safe_string(params, 'settleCoin')
if settle is None:
settle, params = self.handle_option_and_params(params, 'cancelAllOrders', 'settle', settle)
if symbol is not None:
market = self.market(symbol)
settle = market['settle']
subType = None
subType, params = self.handle_sub_type_and_params('cancelAllOrders', market, params)
isUsdcSettled = settle == 'USDC'
isInverse = subType == 'inverse'
isLinearSettle = isUsdcSettled or (settle == 'USDT')
if isInverse and isLinearSettle:
raise ArgumentsRequired(self.id + ' cancelAllOrders with inverse subType requires settle to not be USDT or USDC')
type, query = self.handle_market_type_and_params('cancelAllOrders', market, params)
enableUnifiedMargin, enableUnifiedAccount = await self.is_unified_enabled()
if enableUnifiedAccount:
return await self.cancel_all_unified_account_orders(symbol, query)
elif type == 'spot':
return await self.cancel_all_spot_orders(symbol, query)
elif enableUnifiedMargin and not isInverse:
return await self.cancel_all_unified_margin_orders(symbol, query)
elif isUsdcSettled:
return await self.cancel_all_usdc_orders(symbol, query)
else:
return await self.cancel_all_derivatives_orders(symbol, query)
async def fetch_unified_account_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
request = {
# 'symbol': market['id'],
# 'category', Type of derivatives product: spot, linear or option.
# 'baseCoin', Base coin. When category=option. If not passed, BTC by default; when category=linear, if BTC passed, BTCPERP & BTCUSDT returned.
# 'orderId', Order ID
# 'orderLinkId', Unique user-set order ID
# 'orderStatus', # Return all status orders if not passed
# 'orderFilter', Conditional order or active order
# 'limit': number, Data quantity per page: Max data value per page is 50, and default value at 20.
# 'cursor', API pass-through. accountType + category + cursor +. If inconsistent, the following should be returned: The account type does not match the service inquiry.
# 'startTime': 0, # The start timestamp(ms) Support UTA only temporarily startTime and endTime must be passed together If not passed, query the past 7 days data by default
# 'endTime': 0, # The end timestamp(ms)
}
market = None
if symbol is None:
type = None
type, params = self.handle_market_type_and_params('fetchOrders', market, params)
# option, spot
request['category'] = type
if type == 'swap':
subType = None
subType, params = self.handle_sub_type_and_params('fetchOrders', market, params, 'linear')
request['category'] = subType
else:
market = self.market(symbol)
request['symbol'] = market['id']
if market['spot']:
request['category'] = 'spot'
elif market['option']:
request['category'] = 'option'
elif market['linear']:
request['category'] = 'linear'
else:
raise NotSupported(self.id + ' fetchOrders() does not allow inverse market orders for ' + symbol + ' markets')
isStop = self.safe_value(params, 'stop', False)
params = self.omit(params, ['stop'])
if isStop:
if market['spot']:
request['orderFilter'] = 'tpslOrder'
else:
request['orderFilter'] = 'StopOrder'
if limit is not None:
request['limit'] = limit
if since is not None:
request['startTime'] = since
until = self.safe_integer_2(params, 'until', 'till') # unified in milliseconds
endTime = self.safe_integer(params, 'endTime', until) # exchange-specific in milliseconds
params = self.omit(params, ['endTime', 'till', 'until'])
if endTime is not None:
request['endTime'] = endTime
else:
if since is not None:
raise BadRequest(self.id + ' fetchOrders() requires until/endTime when since is provided.')
response = await self.privateGetV5OrderHistory(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "nextPageCursor": "03234de9-1332-41eb-b805-4a9f42c136a3%3A1672220109387%2C03234de9-1332-41eb-b805-4a9f42c136a3%3A1672220109387",
# "category": "linear",
# "list": [
# {
# "symbol": "BTCUSDT",
# "orderType": "Limit",
# "orderLinkId": "test-001",
# "orderId": "03234de9-1332-41eb-b805-4a9f42c136a3",
# "cancelType": "CancelByUser",
# "avgPrice": "0",
# "stopOrderType": "UNKNOWN",
# "lastPriceOnCreated": "16656.5",
# "orderStatus": "Cancelled",
# "takeProfit": "",
# "cumExecValue": "0",
# "triggerDirection": 0,
# "blockTradeId": "",
# "rejectReason": "EC_PerCancelRequest",
# "isLeverage": "",
# "price": "18000",
# "orderIv": "",
# "createdTime": "1672220109387",
# "tpTriggerBy": "UNKNOWN",
# "positionIdx": 0,
# "timeInForce": "GoodTillCancel",
# "leavesValue": "0",
# "updatedTime": "1672220114123",
# "side": "Sell",
# "triggerPrice": "",
# "cumExecFee": "0",
# "slTriggerBy": "UNKNOWN",
# "leavesQty": "0",
# "closeOnTrigger": False,
# "cumExecQty": "0",
# "reduceOnly": False,
# "qty": "0.1",
# "stopLoss": "",
# "triggerBy": "UNKNOWN"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672221263862
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'list', [])
return self.parse_orders(data, market, since, limit)
async def fetch_unified_margin_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
request = {
# 'symbol': market['id'],
# 'category', Type of derivatives product: linear or option.
# 'baseCoin', Base coin. When category=option. If not passed, BTC by default; when category=linear, if BTC passed, BTCPERP & BTCUSDT returned.
# 'orderId', Order ID
# 'orderLinkId', Unique user-set order ID
# 'orderStatus', Query list of orders in designated states. If self parameter is not passed, the orders in all states shall be enquired by default. This parameter supports multi-state inquiry. States should be separated with English commas.
# 'orderFilter', Conditional order or active order
# 'direction', prev: prev, next: next.
# 'limit': number, Data quantity per page: Max data value per page is 50, and default value at 20.
# 'cursor', API pass-through. accountType + category + cursor +. If inconsistent, the following should be returned: The account type does not match the service inquiry.
}
market = None
if symbol is None:
subType = None
subType, params = self.handle_sub_type_and_params('fetchUnifiedMarginOrders', market, params, 'linear')
request['category'] = subType
else:
market = self.market(symbol)
request['symbol'] = market['id']
if market['option']:
request['category'] = 'option'
elif market['linear']:
request['category'] = 'linear'
else:
raise NotSupported(self.id + ' fetchUnifiedMarginOrders() does not allow inverse market orders for ' + symbol + ' markets')
isStop = self.safe_value(params, 'stop', False)
params = self.omit(params, ['stop'])
if isStop:
request['orderFilter'] = 'StopOrder'
if limit is not None:
request['limit'] = limit
response = await self.privateGetUnifiedV3PrivateOrderList(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "Success",
# "result": {
# "nextPageCursor": "7d17d359-4e38-4d3a-9a31-29791ef2dfd7%3A1657711949928%2C7d17d359-4e38-4d3a-9a31-29791ef2dfd7%3A1657711949928",
# "category": "linear",
# "list": [
# {
# "symbol": "ETHUSDT",
# "orderType": "Market",
# "orderLinkId": "",
# "orderId": "7d17d359-4e38-4d3a-9a31-29791ef2dfd7",
# "stopOrderType": "UNKNOWN",
# "orderStatus": "Filled",
# "takeProfit": "",
# "cumExecValue": "536.92500000",
# "blockTradeId": "",
# "rejectReason": "EC_NoError",
# "price": "1127.10000000",
# "createdTime": 1657711949928,
# "tpTriggerBy": "UNKNOWN",
# "timeInForce": "ImmediateOrCancel",
# "basePrice": "",
# "leavesValue": "0.00000000",
# "updatedTime": 1657711949945,
# "side": "Buy",
# "triggerPrice": "",
# "cumExecFee": "0.32215500",
# "slTriggerBy": "UNKNOWN",
# "leavesQty": "0.0000",
# "closeOnTrigger": False,
# "cumExecQty": "0.5000",
# "reduceOnly": False,
# "qty": "0.5000",
# "stopLoss": "",
# "triggerBy": "UNKNOWN",
# "orderIM": ""
# }]
# },
# "time": 1657713451741
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'list', [])
return self.parse_orders(data, market, since, limit)
async def fetch_derivatives_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
market = None
request = {
# 'symbol': market['id'],
# 'category', Type of derivatives product: spot, linear or option.
# 'baseCoin', Base coin. When category=option. If not passed, BTC by default; when category=linear, if BTC passed, BTCPERP & BTCUSDT returned.
# 'orderId', Order ID
# 'orderLinkId', Unique user-set order ID
# 'orderStatus', # Return all status orders if not passed
# 'orderFilter', Conditional order or active order
# 'limit': number, Data quantity per page: Max data value per page is 50, and default value at 20.
# 'cursor', API pass-through. accountType + category + cursor +. If inconsistent, the following should be returned: The account type does not match the service inquiry.
}
if symbol is None:
type = None
type, params = self.handle_market_type_and_params('fetchOrders', market, params)
request['category'] = type
if type == 'swap':
subType = None
subType, params = self.handle_sub_type_and_params('fetchOrders', market, params, 'linear')
request['category'] = subType
else:
market = self.market(symbol)
request['symbol'] = market['id']
if market['linear']:
request['category'] = 'linear'
else:
request['category'] = 'inverse'
isStop = self.safe_value(params, 'stop', False)
params = self.omit(params, ['stop'])
if isStop:
request['orderFilter'] = 'StopOrder'
if limit is not None:
request['limit'] = limit
response = await self.privateGetV5OrderHistory(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "nextPageCursor": "03234de9-1332-41eb-b805-4a9f42c136a3%3A1672220109387%2C03234de9-1332-41eb-b805-4a9f42c136a3%3A1672220109387",
# "category": "linear",
# "list": [
# {
# "symbol": "BTCUSDT",
# "orderType": "Limit",
# "orderLinkId": "test-001",
# "orderId": "03234de9-1332-41eb-b805-4a9f42c136a3",
# "cancelType": "CancelByUser",
# "avgPrice": "0",
# "stopOrderType": "UNKNOWN",
# "lastPriceOnCreated": "16656.5",
# "orderStatus": "Cancelled",
# "takeProfit": "",
# "cumExecValue": "0",
# "triggerDirection": 0,
# "blockTradeId": "",
# "rejectReason": "EC_PerCancelRequest",
# "isLeverage": "",
# "price": "18000",
# "orderIv": "",
# "createdTime": "1672220109387",
# "tpTriggerBy": "UNKNOWN",
# "positionIdx": 0,
# "timeInForce": "GoodTillCancel",
# "leavesValue": "0",
# "updatedTime": "1672220114123",
# "side": "Sell",
# "triggerPrice": "",
# "cumExecFee": "0",
# "slTriggerBy": "UNKNOWN",
# "leavesQty": "0",
# "closeOnTrigger": False,
# "cumExecQty": "0",
# "reduceOnly": False,
# "qty": "0.1",
# "stopLoss": "",
# "triggerBy": "UNKNOWN"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672221263862
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'list', [])
return self.parse_orders(data, market, since, limit)
async def fetch_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetches information on multiple orders made by the user
:param str symbol: unified market symbol of the market orders were made in
:param int|None since: the earliest time in ms to fetch orders for
:param int|None limit: the maximum number of orde structures to retrieve
:param dict params: extra parameters specific to the bybit api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
market = None
settle = self.safe_string(params, 'settleCoin')
if settle is None:
settle, params = self.handle_option_and_params(params, 'fetchOrders', 'settle', settle)
if symbol is not None:
market = self.market(symbol)
settle = market['settle']
subType = None
subType, params = self.handle_sub_type_and_params('fetchOrders', market, params)
isInverse = subType == 'inverse'
isUsdcSettled = settle == 'USDC'
isLinearSettle = isUsdcSettled or (settle == 'USDT')
if isInverse and isLinearSettle:
raise ArgumentsRequired(self.id + ' fetchOrders with inverse subType requires settle to not be USDT or USDC')
type, query = self.handle_market_type_and_params('fetchOrders', market, params)
enableUnifiedMargin, enableUnifiedAccount = await self.is_unified_enabled()
if enableUnifiedAccount and not isInverse:
return await self.fetch_unified_account_orders(symbol, since, limit, query)
elif type == 'spot':
raise NotSupported(self.id + ' fetchOrders() only support ' + type + ' markets for unified trade account, use exchange.fetch_open_orders() and exchange.fetchClosedOrders() instead')
elif enableUnifiedMargin and not isInverse:
return await self.fetch_unified_margin_orders(symbol, since, limit, query)
else:
return await self.fetch_derivatives_orders(symbol, since, limit, query)
async def fetch_spot_closed_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
request = {}
if symbol is not None:
request['symbol'] = market['id']
if limit is not None:
request['limit'] = limit
if since is not None:
request['startTime'] = since
response = await self.privateGetSpotV3PrivateHistoryOrders(self.extend(request, params))
result = self.safe_value(response, 'result', {})
#
# {
# "retCode": "0",
# "retMsg": "OK",
# "result": {
# "list": [
# {
# "accountId": "13380434",
# "symbol": "AAVEUSDT",
# "orderLinkId": "1666697847966604",
# "orderId": "1274748373594828288",
# "orderPrice": "80",
# "orderQty": "0.11",
# "execQty": "0",
# "cummulativeQuoteQty": "0",
# "avgPrice": "0",
# "status": "CANCELED",
# "timeInForce": "GTC",
# "orderType": "LIMIT",
# "side": "BUY",
# "stopPrice": "0.0",
# "icebergQty": "0.0",
# "createTime": "1666697847972",
# "updateTime": "1666697865809",
# "isWorking": "1",
# "orderCategory": "0"
# },
# ]
# },
# "retExtInfo": null,
# "time": "1666732287588"
# }
#
orders = self.safe_value(result, 'list', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_closed_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetches information on multiple closed orders made by the user
:param str|None symbol: unified market symbol of the market orders were made in
:param int|None since: the earliest time in ms to fetch orders for
:param int|None limit: the maximum number of orde structures to retrieve
:param dict params: extra parameters specific to the bybit api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
type = None
type, params = self.handle_market_type_and_params('fetchClosedOrders', market, params)
enableUnified = await self.is_unified_enabled()
request = {}
if (type == 'spot') and not enableUnified[1]:
return await self.fetch_spot_closed_orders(symbol, since, limit, params)
else:
request['orderStatus'] = 'Filled'
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_canceled_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetches information on multiple canceled orders made by the user
:param str symbol: unified market symbol of the market orders were made in
:param int|None since: timestamp in ms of the earliest order, default is None
:param int|None limit: max number of orders to return, default is None
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a list of `order structures <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
type = None
type, params = self.handle_market_type_and_params('fetchCanceledOrders', market, params)
enableUnified = await self.is_unified_enabled()
request = {}
if (type == 'spot') and not enableUnified[1]:
raise NotSupported(self.id + ' fetchCanceledOrders() only allow spot market orders for unified trade account, use exchange.fetch_open_orders() and exchange.fetchClosedOrders() instead')
else:
request['orderStatus'] = 'Cancelled'
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_unified_account_open_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
request = {
# 'symbol': market['id'],
# 'category', Type of derivatives product: linear or option.
# 'baseCoin', Base coin. When category=option. If not passed, BTC by default; when category=linear, if BTC passed, BTCPERP & BTCUSDT returned.
# 'settleCoin', Settle coin. For linear, either symbol or settleCoin is required
# 'orderId', Order ID
# 'orderLinkId', Unique user-set order ID
# 'orderFilter', Conditional order or active order
# 'limit': number, Data quantity per page: Max data value per page is 50, and default value at 20.
# 'cursor', API pass-through. accountType + category + cursor +. If inconsistent, the following should be returned: The account type does not match the service inquiry.
# 'openOnly': 0,
}
market = None
if symbol is None:
type = None
type, params = self.handle_market_type_and_params('fetchOpenOrders', market, params)
subType = None
subType, params = self.handle_sub_type_and_params('fetchOpenOrders', market, params, 'linear')
request['category'] = type
if type == 'swap':
if subType == 'linear':
self.check_required_symbol('fetchOpenOrders', symbol)
elif subType == 'inverse':
raise NotSupported(self.id + ' fetchOpenOrders() does not allow inverse market orders for ' + symbol + ' markets')
request['category'] = subType
else:
market = self.market(symbol)
request['symbol'] = market['id']
if market['spot']:
request['category'] = 'spot'
elif market['option']:
request['category'] = 'option'
elif market['linear']:
request['category'] = 'linear'
else:
raise NotSupported(self.id + ' fetchOpenOrders() does not allow inverse market orders for ' + symbol + ' markets')
isStop = self.safe_value(params, 'stop', False)
params = self.omit(params, ['stop'])
if isStop:
if market['spot']:
request['orderFilter'] = 'tpslOrder'
else:
request['orderFilter'] = 'StopOrder'
if limit is not None:
request['limit'] = limit
response = await self.privateGetV5OrderRealtime(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "nextPageCursor": "1321052653536515584%3A1672217748287%2C1321052653536515584%3A1672217748287",
# "category": "spot",
# "list": [
# {
# "symbol": "ETHUSDT",
# "orderType": "Limit",
# "orderLinkId": "1672217748277652",
# "orderId": "1321052653536515584",
# "cancelType": "UNKNOWN",
# "avgPrice": "",
# "stopOrderType": "tpslOrder",
# "lastPriceOnCreated": "",
# "orderStatus": "Cancelled",
# "takeProfit": "",
# "cumExecValue": "0",
# "triggerDirection": 0,
# "isLeverage": "0",
# "rejectReason": "",
# "price": "1000",
# "orderIv": "",
# "createdTime": "1672217748287",
# "tpTriggerBy": "",
# "positionIdx": 0,
# "timeInForce": "GTC",
# "leavesValue": "500",
# "updatedTime": "1672217748287",
# "side": "Buy",
# "triggerPrice": "1500",
# "cumExecFee": "0",
# "leavesQty": "0",
# "slTriggerBy": "",
# "closeOnTrigger": False,
# "cumExecQty": "0",
# "reduceOnly": False,
# "qty": "0.5",
# "stopLoss": "",
# "triggerBy": "1192.5"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672219526294
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'list', [])
return self.parse_orders(data, market, since, limit)
async def fetch_spot_open_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
request = {}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['limit'] = limit
response = await self.privateGetSpotV3PrivateOpenOrders(self.extend(request, params))
#
# {
# "retCode": "0",
# "retMsg": "OK",
# "result": {
# "list": [
# {
# "accountId": "13380434",
# "symbol": "AAVEUSDT",
# "orderLinkId": "1666734005300717",
# "orderId": "1275051683279281664",
# "orderPrice": "80",
# "orderQty": "0.11",
# "execQty": "0",
# "cummulativeQuoteQty": "0",
# "avgPrice": "0",
# "status": "NEW",
# "timeInForce": "GTC",
# "orderType": "LIMIT",
# "side": "BUY",
# "stopPrice": "0.0",
# "icebergQty": "0.0",
# "createTime": "1666734005304",
# "updateTime": "1666734005309",
# "isWorking": "1",
# "orderCategory": "0"
# }
# ]
# },
# "retExtInfo": null,
# "time": "1666734031592"
# }
#
result = self.safe_value(response, 'result', {})
orders = self.safe_value(result, 'list', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_unified_margin_open_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
request = {}
market = None
if symbol is None:
subType = None
subType, params = self.handle_sub_type_and_params('fetchUnifiedMarginOrders', market, params, 'linear')
request['category'] = subType
else:
market = self.market(symbol)
request['symbol'] = market['id']
if market['option']:
request['category'] = 'option'
elif market['linear']:
request['category'] = 'linear'
else:
raise NotSupported(self.id + ' fetchUnifiedMarginOpenOrders() does not allow inverse market orders for ' + symbol + ' markets')
type = None
type, params = self.handle_market_type_and_params('fetchUnifiedMarginOpenOrders', market, params)
isStop = self.safe_value(params, 'stop', False)
isConditional = isStop or (type == 'stop') or (type == 'conditional')
params = self.omit(params, ['stop'])
if isConditional:
request['orderFilter'] = 'StopOrder'
if limit is not None:
request['limit'] = limit
response = await self.privateGetUnifiedV3PrivateOrderUnfilledOrders(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "Success",
# "result": {
# "nextPageCursor": "135ccc0d-8136-4e1b-8af3-07b11ee158d1%3A1665565610526%2C135ccc0d-8136-4e1b-8af3-07b11ee158d1%3A1665565610526",
# "category": "linear",
# "list": [
# {
# "symbol": "ETHUSDT",
# "orderType": "Limit",
# "orderLinkId": "test0000005",
# "orderId": "135ccc0d-8136-4e1b-8af3-07b11ee158d1",
# "stopOrderType": "UNKNOWN",
# "orderStatus": "New",
# "takeProfit": "",
# "cumExecValue": "0.00000000",
# "blockTradeId": "",
# "price": "700.00000000",
# "createdTime": 1665565610526,
# "tpTriggerBy": "UNKNOWN",
# "timeInForce": "GoodTillCancel",
# "basePrice": "",
# "updatedTime": 1665565610533,
# "side": "Buy",
# "triggerPrice": "",
# "cumExecFee": "0.00000000",
# "slTriggerBy": "UNKNOWN",
# "leavesQty": "0.1000",
# "closeOnTrigger": False,
# "cumExecQty": "0.00000000",
# "reduceOnly": False,
# "qty": "0.1000",
# "stopLoss": "",
# "triggerBy": "UNKNOWN",
# "orderIM": "0.00000000"
# }
# ]
# },
# "retExtInfo": null,
# "time": 1665565614320
# }
#
result = self.safe_value(response, 'result', {})
orders = self.safe_value(result, 'list', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_derivatives_open_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
market = None
settle = None
request = {
# 'symbol': market['id'],
# 'category', Type of derivatives product: linear or option.
# 'baseCoin', Base coin. When category=option. If not passed, BTC by default; when category=linear, if BTC passed, BTCPERP & BTCUSDT returned.
# 'settleCoin', Settle coin. For linear, either symbol or settleCoin is required
# 'orderId', Order ID
# 'orderLinkId', Unique user-set order ID
# 'orderFilter', Conditional order or active order
# 'limit': number, Data quantity per page: Max data value per page is 50, and default value at 20.
# 'cursor', API pass-through. accountType + category + cursor +. If inconsistent, the following should be returned: The account type does not match the service inquiry.
# 'openOnly': 0,
}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if market['linear']:
request['category'] = 'linear'
else:
request['category'] = 'inverse'
else:
type = None
type, params = self.handle_market_type_and_params('fetchOpenOrders', market, params)
subType = None
subType, params = self.handle_sub_type_and_params('fetchOpenOrders', market, params, 'linear')
request['category'] = type
if type == 'swap':
request['category'] = subType
settle, params = self.handle_option_and_params(params, 'fetchOpenOrders', 'settle', settle)
if settle is not None:
request['settleCoin'] = settle
isStop = self.safe_value(params, 'stop', False)
params = self.omit(params, ['stop'])
if isStop:
request['orderFilter'] = 'StopOrder'
if limit is not None:
request['limit'] = limit
response = await self.privateGetV5OrderRealtime(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "nextPageCursor": "1321052653536515584%3A1672217748287%2C1321052653536515584%3A1672217748287",
# "category": "spot",
# "list": [
# {
# "symbol": "ETHUSDT",
# "orderType": "Limit",
# "orderLinkId": "1672217748277652",
# "orderId": "1321052653536515584",
# "cancelType": "UNKNOWN",
# "avgPrice": "",
# "stopOrderType": "tpslOrder",
# "lastPriceOnCreated": "",
# "orderStatus": "Cancelled",
# "takeProfit": "",
# "cumExecValue": "0",
# "triggerDirection": 0,
# "isLeverage": "0",
# "rejectReason": "",
# "price": "1000",
# "orderIv": "",
# "createdTime": "1672217748287",
# "tpTriggerBy": "",
# "positionIdx": 0,
# "timeInForce": "GTC",
# "leavesValue": "500",
# "updatedTime": "1672217748287",
# "side": "Buy",
# "triggerPrice": "1500",
# "cumExecFee": "0",
# "leavesQty": "0",
# "slTriggerBy": "",
# "closeOnTrigger": False,
# "cumExecQty": "0",
# "reduceOnly": False,
# "qty": "0.5",
# "stopLoss": "",
# "triggerBy": "1192.5"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672219526294
# }
#
result = self.safe_value(response, 'result', {})
orders = self.safe_value(result, 'list', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_usdc_open_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
request = {}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
type = None
type, params = self.handle_market_type_and_params('fetchUSDCOpenOrders', market, params)
request['category'] = 'perpetual' if (type == 'swap') else 'option'
response = await self.privatePostOptionUsdcOpenapiPrivateV1QueryActiveOrders(self.extend(request, params))
result = self.safe_value(response, 'result', {})
orders = self.safe_value(result, 'dataList', [])
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "resultTotalSize": 1,
# "cursor": "id%3D1662019818569%23df31e03b-fc00-4b4c-bd1c-b97fd72b5c5c",
# "dataList": [
# {
# "orderId": "df31e03b-fc00-4b4c-bd1c-b97fd72b5c5c",
# "orderLinkId": "",
# "symbol": "BTC-2SEP22-18000-C",
# "orderStatus": "New",
# "orderPrice": "500",
# "side": "Buy",
# "remainingQty": "0.1",
# "orderType": "Limit",
# "qty": "0.1",
# "iv": "0.0000",
# "cancelType": "",
# "updateTimestamp": "1662019818579"
# }
# ]
# }
# }
#
return self.parse_orders(orders, market, since, limit)
async def fetch_open_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch all unfilled currently open orders
:param str|None symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch open orders for
:param int|None limit: the maximum number of open orders structures to retrieve
:param dict params: extra parameters specific to the bybit api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
market = None
settle = self.safe_string(params, 'settleCoin')
if settle is None:
settle, params = self.handle_option_and_params(params, 'fetchOpenOrders', 'settle', settle)
if symbol is not None:
market = self.market(symbol)
settle = market['settle']
subType = None
subType, params = self.handle_sub_type_and_params('fetchOpenOrders', market, params)
isInverse = subType == 'inverse'
isUsdcSettled = settle == 'USDC'
isLinearSettle = isUsdcSettled or (settle == 'USDT')
if isInverse and isLinearSettle:
raise ArgumentsRequired(self.id + ' fetchOpenOrders with inverse subType requires settle to not be USDT or USDC')
type, query = self.handle_market_type_and_params('fetchOpenOrders', market, params)
enableUnifiedMargin, enableUnifiedAccount = await self.is_unified_enabled()
if enableUnifiedAccount and not isInverse:
return await self.fetch_unified_account_open_orders(symbol, since, limit, query)
elif type == 'spot':
return await self.fetch_spot_open_orders(symbol, since, limit, query)
elif enableUnifiedMargin and not isInverse:
return await self.fetch_unified_margin_open_orders(symbol, since, limit, query)
elif isUsdcSettled:
return await self.fetch_usdc_open_orders(symbol, since, limit, query)
else:
return await self.fetch_derivatives_open_orders(symbol, since, limit, query)
async def fetch_order_trades(self, id: str, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch all the trades made from a single order
:param str id: order id
:param str|None symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch trades for
:param int|None limit: the maximum number of trades to retrieve
:param dict params: extra parameters specific to the bybit api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/#/?id=trade-structure>`
"""
request = {
'orderId': id,
}
return await self.fetch_my_trades(symbol, since, limit, self.extend(request, params))
async def fetch_my_unified_trades(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
market = None
request = {
# 'symbol': market['id'],
# 'category': '', # Product type. spot,linear,option
# 'orderId': '', # Order ID
# 'orderLinkId': '', # User customised order ID
# 'baseCoin': '', # Base coin
# 'startTime': 0, # The start timestamp(ms)
# 'endTime': 0, # The end timestamp(ms)
# 'execType': '', # Execution type
# 'limit': 0, # Limit for data size per page. [1, 100]. Default: 50
# 'cursor': '', # Cursor. Used for pagination
}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
type = None
type, params = self.handle_market_type_and_params('fetchMyTrades', market, params)
if type == 'spot':
request['category'] = 'spot'
else:
subType = None
subType, params = self.handle_sub_type_and_params('fetchMyTrades', market, params)
if subType == 'inverse':
raise NotSupported(self.id + ' fetchMyTrades() does not support ' + subType + ' markets.')
request['category'] = subType
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit # default 20, max 50
response = await self.privateGetV5ExecutionList(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "nextPageCursor": "132766%3A2%2C132766%3A2",
# "category": "linear",
# "list": [
# {
# "symbol": "ETHPERP",
# "orderType": "Market",
# "underlyingPrice": "",
# "orderLinkId": "",
# "side": "Buy",
# "indexPrice": "",
# "orderId": "8c065341-7b52-4ca9-ac2c-37e31ac55c94",
# "stopOrderType": "UNKNOWN",
# "leavesQty": "0",
# "execTime": "1672282722429",
# "isMaker": False,
# "execFee": "0.071409",
# "feeRate": "0.0006",
# "execId": "e0cbe81d-0f18-5866-9415-cf319b5dab3b",
# "tradeIv": "",
# "blockTradeId": "",
# "markPrice": "1183.54",
# "execPrice": "1190.15",
# "markIv": "",
# "orderQty": "0.1",
# "orderPrice": "1236.9",
# "execValue": "119.015",
# "execType": "Trade",
# "execQty": "0.1"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672283754510
# }
#
result = self.safe_value(response, 'result', {})
trades = self.safe_value(result, 'list', [])
return self.parse_trades(trades, market, since, limit)
async def fetch_my_spot_trades(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMySpotTrades() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'orderId': 'f185806b-b801-40ff-adec-52289370ed62', # if not provided will return user's trading records
# 'startTime': int(since / 1000),
# 'endTime': 0,
# 'fromTradeId': '',
# 'toTradeId': '',
# 'limit' 20, # max 50
}
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit # default 20, max 50
response = await self.privateGetSpotV3PrivateMyTrades(self.extend(request, params))
#
# {
# "retCode": "0",
# "retMsg": "OK",
# "result": {
# "list": [
# {
# "symbol": "AAVEUSDT",
# "id": "1274785101965716992",
# "orderId": "1274784252359089664",
# "tradeId": "2270000000031365639",
# "orderPrice": "82.5",
# "orderQty": "0.016",
# "execFee": "0",
# "feeTokenId": "AAVE",
# "creatTime": "1666702226326",
# "isBuyer": "0",
# "isMaker": "0",
# "matchOrderId": "1274785101865076224",
# "makerRebate": "0",
# "executionTime": "1666702226335"
# },
# ]
# },
# "retExtMap": {},
# "retExtInfo": null,
# "time": "1666768215157"
# }
#
result = self.safe_value(response, 'result', {})
trades = self.safe_value(result, 'list', [])
return self.parse_trades(trades, market, since, limit)
async def fetch_my_unified_margin_trades(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
market = None
settle = None
request = {
# 'symbol': market['id'],
# 'orderId': 'f185806b-b801-40ff-adec-52289370ed62', # if not provided will return user's trading records
# 'startTime': int(since / 1000),
# 'endTime': 0,
# 'category': ''
# 'limit' 20, # max 50
}
if symbol is not None:
market = self.market(symbol)
settle = market['settle']
request['symbol'] = market['id']
subType = None
subType, params = self.handle_sub_type_and_params('fetchMyTrades', market, params, 'linear')
request['category'] = subType
settle, params = self.handle_option_and_params(params, 'cancelAllOrders', 'settle', settle)
if settle is not None:
request['settleCoin'] = settle
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit # default 20, max 50
response = await self.privateGetUnifiedV3PrivateExecutionList(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "Success",
# "result": {
# "nextPageCursor": "1565%3A0%2C1565%3A0",
# "category": "option",
# "list": [
# {
# "orderType": "Limit",
# "symbol": "BTC-14JUL22-17500-C",
# "orderLinkId": "188889689-yuanzhen-558998998899",
# "side": "Buy",
# "orderId": "09c5836f-81ef-4208-a5b4-43135d3e02a2",
# "leavesQty": "0.0000",
# "execTime": 1657714122417,
# "execFee": "0.11897082",
# "feeRate": "0.000300",
# "execId": "6e492560-78b4-5d2b-b331-22921d3173c9",
# "blockTradeId": "",
# "execPrice": "2360.00000000",
# "lastLiquidityInd": "TAKER",
# "orderQty": "0.0200",
# "orderPrice": "2360.00000000",
# "execValue": "47.20000000",
# "execType": "Trade",
# "execQty": "0.0200"
# }
# ]
# },
# "time": 1657714292783
# }
#
result = self.safe_value(response, 'result', {})
trades = self.safe_value(result, 'list', [])
return self.parse_trades(trades, market, since, limit)
async def fetch_my_contract_trades(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyContractTrades() requires a symbol argument')
await self.load_markets()
market = None
request = {
# 'symbol': market['id'],
# 'category': '', # Product type. spot,linear,option
# 'orderId': '', # Order ID
# 'orderLinkId': '', # User customised order ID
# 'baseCoin': '', # Base coin
# 'startTime': 0, # The start timestamp(ms)
# 'endTime': 0, # The end timestamp(ms)
# 'execType': '', # Execution type
# 'limit': 0, # Limit for data size per page. [1, 100]. Default: 50
# 'cursor': '', # Cursor. Used for pagination
}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
subType = None
subType, params = self.handle_sub_type_and_params('fetchMyTrades', market, params)
request['category'] = subType
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit # default 50, max 100
response = await self.privateGetV5ExecutionList(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "nextPageCursor": "132766%3A2%2C132766%3A2",
# "category": "linear",
# "list": [
# {
# "symbol": "ETHPERP",
# "orderType": "Market",
# "underlyingPrice": "",
# "orderLinkId": "",
# "side": "Buy",
# "indexPrice": "",
# "orderId": "8c065341-7b52-4ca9-ac2c-37e31ac55c94",
# "stopOrderType": "UNKNOWN",
# "leavesQty": "0",
# "execTime": "1672282722429",
# "isMaker": False,
# "execFee": "0.071409",
# "feeRate": "0.0006",
# "execId": "e0cbe81d-0f18-5866-9415-cf319b5dab3b",
# "tradeIv": "",
# "blockTradeId": "",
# "markPrice": "1183.54",
# "execPrice": "1190.15",
# "markIv": "",
# "orderQty": "0.1",
# "orderPrice": "1236.9",
# "execValue": "119.015",
# "execType": "Trade",
# "execQty": "0.1"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672283754510
# }
#
result = self.safe_value(response, 'result', {})
trades = self.safe_value(result, 'list', [])
return self.parse_trades(trades, market, since, limit)
async def fetch_my_usdc_trades(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
market = None
request = {}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
request['category'] = 'OPTION' if market['option'] else 'PERPETUAL'
else:
request['category'] = 'PERPETUAL'
response = await self.privatePostOptionUsdcOpenapiPrivateV1ExecutionList(self.extend(request, params))
#
# {
# "result": {
# "cursor": "29%3A1%2C28%3A1",
# "resultTotalSize": 2,
# "dataList": [
# {
# "symbol": "ETHPERP",
# "orderLinkId": "",
# "side": "Sell",
# "orderId": "d83f8b4d-2f60-4e04-a64a-a3f207989dc6",
# "execFee": "0.0210",
# "feeRate": "0.000600",
# "blockTradeId": "",
# "tradeTime": "1669196423581",
# "execPrice": "1161.45",
# "lastLiquidityInd": "TAKER",
# "execValue": "34.8435",
# "execType": "Trade",
# "execQty": "0.030",
# "tradeId": "d9aa8590-9e6a-575e-a1be-d6261e6ed2e5"
# }, ...
# ]
# },
# "retCode": 0,
# "retMsg": "Success."
# }
#
result = self.safe_value(response, 'result', {})
dataList = self.safe_value(result, 'dataList', [])
return self.parse_trades(dataList, market, since, limit)
async def fetch_my_trades(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch all trades made by the user
:param str symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch trades for
:param int|None limit: the maximum number of trades structures to retrieve
:param dict params: extra parameters specific to the bybit api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/#/?id=trade-structure>`
"""
await self.load_markets()
market = None
settle = self.safe_string(params, 'settleCoin')
if settle is None:
settle, params = self.handle_option_and_params(params, 'fetchMyTrades', 'settle', settle)
if symbol is not None:
market = self.market(symbol)
settle = market['settle']
subType = None
subType, params = self.handle_sub_type_and_params('fetchMyTrades', market, params)
isInverse = subType == 'inverse'
isUsdcSettled = settle == 'USDC'
isLinearSettle = isUsdcSettled or (settle == 'USDT')
if isInverse and isLinearSettle:
raise ArgumentsRequired(self.id + ' fetchMyTrades with inverse subType requires settle to not be USDT or USDC')
type, query = self.handle_market_type_and_params('fetchMyTrades', market, params)
enableUnifiedMargin, enableUnifiedAccount = await self.is_unified_enabled()
if enableUnifiedAccount and not isInverse:
orderId = self.safe_string(params, 'orderId')
if orderId is None and type != 'spot':
self.check_required_symbol('fetchMyTrades', symbol)
return await self.fetch_my_unified_trades(symbol, since, limit, query)
elif type == 'spot':
return await self.fetch_my_spot_trades(symbol, since, limit, query)
elif enableUnifiedMargin and not isInverse:
return await self.fetch_my_unified_margin_trades(symbol, since, limit, query)
elif isUsdcSettled:
return await self.fetch_my_usdc_trades(symbol, since, limit, query)
else:
return await self.fetch_my_contract_trades(symbol, since, limit, query)
def parse_deposit_address(self, depositAddress, currency=None):
#
# {
# chainType: 'ERC20',
# addressDeposit: '0xf56297c6717c1d1c42c30324468ed50a9b7402ee',
# tagDeposit: '',
# chain: 'ETH'
# }
#
address = self.safe_string(depositAddress, 'addressDeposit')
tag = self.safe_string(depositAddress, 'tagDeposit')
code = self.safe_string(currency, 'code')
chain = self.safe_string(depositAddress, 'chain')
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'network': chain,
'info': depositAddress,
}
async def fetch_deposit_addresses_by_network(self, code: str, params={}):
"""
fetch a dictionary of addresses for a currency, indexed by network
see https://bybit-exchange.github.io/docs/v5/asset/master-deposit-addr
:param str code: unified currency code of the currency for the deposit address
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a dictionary of `address structures <https://docs.ccxt.com/#/?id=address-structure>` indexed by the network
"""
await self.load_markets()
currency = self.currency(code)
request = {
'coin': currency['id'],
}
response = await self.privateGetV5AssetDepositQueryAddress(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "success",
# "result": {
# "coin": "USDT",
# "chains": [
# {
# "chainType": "ERC20",
# "addressDeposit": "0xd9e1cd77afa0e50b452a62fbb68a3340602286c3",
# "tagDeposit": "",
# "chain": "ETH"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672192792860
# }
#
result = self.safe_value(response, 'result', [])
chains = self.safe_value(result, 'chains', [])
coin = self.safe_string(result, 'coin')
currency = self.currency(coin)
parsed = self.parse_deposit_addresses(chains, [currency['code']], False, {
'currency': currency['id'],
})
return self.index_by(parsed, 'network')
async def fetch_deposit_address(self, code: str, params={}):
"""
fetch the deposit address for a currency associated with self account
see https://bybit-exchange.github.io/docs/v5/asset/master-deposit-addr
:param str code: unified currency code
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: an `address structure <https://docs.ccxt.com/#/?id=address-structure>`
"""
await self.load_markets()
networkCode, query = self.handle_network_code_and_params(params)
networkId = self.network_code_to_id(networkCode)
currency = self.currency(code)
request = {
'coin': currency['id'],
}
if networkId is not None:
request['chainType'] = networkId
response = await self.privateGetV5AssetDepositQueryAddress(self.extend(request, query))
#
# {
# "retCode": 0,
# "retMsg": "success",
# "result": {
# "coin": "USDT",
# "chains": [
# {
# "chainType": "ERC20",
# "addressDeposit": "0xd9e1cd77afa0e50b452a62fbb68a3340602286c3",
# "tagDeposit": "",
# "chain": "ETH"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672192792860
# }
#
result = self.safe_value(response, 'result', {})
chains = self.safe_value(result, 'chains', [])
chainsIndexedById = self.index_by(chains, 'chain')
selectedNetworkId = self.select_network_id_from_raw_networks(code, networkCode, chainsIndexedById)
addressObject = self.safe_value(chainsIndexedById, selectedNetworkId, {})
return self.parse_deposit_address(addressObject, currency)
async def fetch_deposits(self, code: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch all deposits made to an account
see https://bybit-exchange.github.io/docs/v5/asset/deposit-record
:param str|None code: unified currency code
:param int|None since: the earliest time in ms to fetch deposits for, default = 30 days before the current time
:param int|None limit: the maximum number of deposits structures to retrieve, default = 50, max = 50
:param dict params: extra parameters specific to the bybit api endpoint
:param int|None params['until']: the latest time in ms to fetch deposits for, default = 30 days after since
*
* EXCHANGE SPECIFIC PARAMETERS
:param str|None params['cursor']: used for pagination
:returns [dict]: a list of `transaction structures <https://docs.ccxt.com/#/?id=transaction-structure>`
"""
await self.load_markets()
request = {
# 'coin': currency['id'],
# 'limit': 20, # max 50
# 'cursor': '',
}
currency = None
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit
response = await self.privateGetV5AssetDepositQueryRecord(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "success",
# "result": {
# "rows": [
# {
# "coin": "USDT",
# "chain": "ETH",
# "amount": "10000",
# "txID": "skip-notification-scene-test-amount-202212270944-533285-USDT",
# "status": 3,
# "toAddress": "test-amount-address",
# "tag": "",
# "depositFee": "",
# "successAt": "1672134274000",
# "confirmations": "10000",
# "txIndex": "",
# "blockHash": ""
# }
# ],
# "nextPageCursor": "eyJtaW5JRCI6MTA0NjA0MywibWF4SUQiOjEwNDYwNDN9"
# },
# "retExtInfo": {},
# "time": 1672191992512
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'rows', [])
return self.parse_transactions(data, currency, since, limit)
async def fetch_withdrawals(self, code: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch all withdrawals made from an account
see https://bybit-exchange.github.io/docs/v5/asset/withdraw-record
:param str code: unified currency code
:param int|None since: the earliest time in ms to fetch withdrawals for
:param int|None limit: the maximum number of withdrawals structures to retrieve
:param dict params: extra parameters specific to the bybit api endpoint
:returns [dict]: a list of `transaction structures <https://docs.ccxt.com/#/?id=transaction-structure>`
"""
await self.load_markets()
request = {
# 'coin': currency['id'],
# 'limit': 20, # max 50
# 'cusor': '',
}
currency = None
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit
response = await self.privateGetV5AssetWithdrawQueryRecord(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "success",
# "result": {
# "rows": [
# {
# "coin": "USDT",
# "chain": "ETH",
# "amount": "77",
# "txID": "",
# "status": "SecurityCheck",
# "toAddress": "0x99ced129603abc771c0dabe935c326ff6c86645d",
# "tag": "",
# "withdrawFee": "10",
# "createTime": "1670922217000",
# "updateTime": "1670922217000",
# "withdrawId": "9976",
# "withdrawType": 0
# },
# {
# "coin": "USDT",
# "chain": "ETH",
# "amount": "26",
# "txID": "",
# "status": "success",
# "toAddress": "15638072681@163.com",
# "tag": "",
# "withdrawFee": "0",
# "createTime": "1669711121000",
# "updateTime": "1669711380000",
# "withdrawId": "9801",
# "withdrawType": 1
# }
# ],
# "nextPageCursor": "eyJtaW5JRCI6OTgwMSwibWF4SUQiOjk5NzZ9"
# },
# "retExtInfo": {},
# "time": 1672194949928
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'rows', [])
return self.parse_transactions(data, currency, since, limit)
def parse_transaction_status(self, status):
statuses = {
# v1/v2
'ToBeConfirmed': 'pending',
'UnderReview': 'pending',
'Success': 'ok',
'Expire': 'expired',
# v3 deposit status
'0': 'unknown',
'1': 'pending',
'2': 'processing',
'3': 'ok',
'4': 'fail',
# v3 withdrawal status
'SecurityCheck': 'pending',
'Pending': 'pending',
'success': 'ok',
'CancelByUser': 'canceled',
'Reject': 'rejected',
'Fail': 'failed',
'BlockchainConfirmed': 'ok',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# fetchWithdrawals
#
# {
# "coin": "USDT",
# "chain": "TRX",
# "amount": "12.34",
# "txID": "de5ea0a2f2e59dc9a714837dd3ddc6d5e151b56ec5d786d351c4f52336f80d3c",
# "status": "success",
# "toAddress": "TQdmFKUoe1Lk2iwZuwRJEHJreTUBoN3BAw",
# "tag": "",
# "withdrawFee": "0.5",
# "createTime": "1665144183000",
# "updateTime": "1665144256000",
# "withdrawId": "8839035"
# }
#
# fetchDeposits
#
# {
# "coin": "USDT",
# "chain": "TRX",
# "amount": "44",
# "txID": "0b038ea12fa1575e2d66693db3c346b700d4b28347afc39f80321cf089acc960",
# "status": "3",
# "toAddress": "TC6NCAC5WSVCCiaD3kWZXyW91ZKKhLm53b",
# "tag": "",
# "depositFee": "",
# "successAt": "1665142507000",
# "confirmations": "100",
# "txIndex": "0",
# "blockHash": "0000000002ac3b1064aee94bca1bd0b58c4c09c65813b084b87a2063d961129e"
# }
#
# withdraw
#
# {
# "id": "9377266"
# }
#
currencyId = self.safe_string(transaction, 'coin')
code = self.safe_currency_code(currencyId, currency)
timestamp = self.safe_integer_2(transaction, 'createTime', 'successAt')
updated = self.safe_integer(transaction, 'updateTime')
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
feeCost = self.safe_number_2(transaction, 'depositFee', 'withdrawFee', 0)
type = 'deposit' if ('depositFee' in transaction) else 'withdrawal'
fee = None
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': code,
}
toAddress = self.safe_string(transaction, 'toAddress')
return {
'info': transaction,
'id': self.safe_string_2(transaction, 'id', 'withdrawId'),
'txid': self.safe_string(transaction, 'txID'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'network': self.network_id_to_code(self.safe_string(transaction, 'chain')),
'address': None,
'addressTo': toAddress,
'addressFrom': None,
'tag': self.safe_string(transaction, 'tag'),
'tagTo': None,
'tagFrom': None,
'type': type,
'amount': self.safe_number(transaction, 'amount'),
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
async def fetch_ledger(self, code: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch the history of changes, actions done by the user or operations that altered balance of the user
see https://bybit-exchange.github.io/docs/v5/account/transaction-log
:param str|None code: unified currency code, default is None
:param int|None since: timestamp in ms of the earliest ledger entry, default is None
:param int|None limit: max number of ledger entrys to return, default is None
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a `ledger structure <https://docs.ccxt.com/#/?id=ledger-structure>`
"""
await self.load_markets()
request = {
# 'coin': currency['id'],
# 'currency': currency['id'], # alias
# 'start_date': self.iso8601(since),
# 'end_date': self.iso8601(till),
# 'wallet_fund_type': 'Deposit', # Withdraw, RealisedPNL, Commission, Refund, Prize, ExchangeOrderWithdraw, ExchangeOrderDeposit
# 'page': 1,
# 'limit': 20, # max 50
# v5 transaction log
# 'accountType': '', Account Type. UNIFIED
# 'category': '', Product type. spot,linear,option
# 'currency': '', Currency
# 'baseCoin': '', BaseCoin. e.g., BTC of BTCPERP
# 'type': '', Types of transaction logs
# 'startTime': 0, The start timestamp(ms)
# 'endTime': 0, The end timestamp(ms)
# 'limit': 0, Limit for data size per page. [1, 50]. Default: 20
# 'cursor': '', Cursor. Used for pagination
}
enableUnified = await self.is_unified_enabled()
currency = None
currencyKey = 'coin'
if enableUnified[1]:
currencyKey = 'currency'
if since is not None:
request['startTime'] = since
else:
if since is not None:
request['start_date'] = self.yyyymmdd(since)
method = 'privateGetV5AccountTransactionLog' if (enableUnified[1]) else 'privateGetV2PrivateWalletFundRecords'
if code is not None:
currency = self.currency(code)
request[currencyKey] = currency['id']
if limit is not None:
request['limit'] = limit
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "data": [
# {
# "id": 234467,
# "user_id": 1,
# "coin": "BTC",
# "wallet_id": 27913,
# "type": "Realized P&L",
# "amount": "-0.00000006",
# "tx_id": "",
# "address": "BTCUSD",
# "wallet_balance": "0.03000330",
# "exec_time": "2019-12-09T00:00:25.000Z",
# "cross_seq": 0
# }
# ]
# },
# "ext_info": null,
# "time_now": "1577481867.115552",
# "rate_limit_status": 119,
# "rate_limit_reset_ms": 1577481867122,
# "rate_limit": 120
# }
#
# v5 transaction log
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "nextPageCursor": "21963%3A1%2C14954%3A1",
# "list": [
# {
# "symbol": "XRPUSDT",
# "side": "Buy",
# "funding": "-0.003676",
# "orderLinkId": "",
# "orderId": "1672128000-8-592324-1-2",
# "fee": "0.00000000",
# "change": "-0.003676",
# "cashFlow": "0",
# "transactionTime": "1672128000000",
# "type": "SETTLEMENT",
# "feeRate": "0.0001",
# "size": "100",
# "qty": "100",
# "cashBalance": "5086.55825002",
# "currency": "USDT",
# "category": "linear",
# "tradePrice": "0.3676",
# "tradeId": "534c0003-4bf7-486f-aa02-78cee36825e4"
# },
# {
# "symbol": "XRPUSDT",
# "side": "Buy",
# "funding": "",
# "orderLinkId": "linear-order",
# "orderId": "592b7e41-78fd-42e2-9aa3-91e1835ef3e1",
# "fee": "0.01908720",
# "change": "-0.0190872",
# "cashFlow": "0",
# "transactionTime": "1672121182224",
# "type": "TRADE",
# "feeRate": "0.0006",
# "size": "100",
# "qty": "88",
# "cashBalance": "5086.56192602",
# "currency": "USDT",
# "category": "linear",
# "tradePrice": "0.3615",
# "tradeId": "5184f079-88ec-54c7-8774-5173cafd2b4e"
# },
# {
# "symbol": "XRPUSDT",
# "side": "Buy",
# "funding": "",
# "orderLinkId": "linear-order",
# "orderId": "592b7e41-78fd-42e2-9aa3-91e1835ef3e1",
# "fee": "0.00260280",
# "change": "-0.0026028",
# "cashFlow": "0",
# "transactionTime": "1672121182224",
# "type": "TRADE",
# "feeRate": "0.0006",
# "size": "12",
# "qty": "12",
# "cashBalance": "5086.58101322",
# "currency": "USDT",
# "category": "linear",
# "tradePrice": "0.3615",
# "tradeId": "8569c10f-5061-5891-81c4-a54929847eb3"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672132481405
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value_2(result, 'data', 'list', [])
return self.parse_ledger(data, currency, since, limit)
def parse_ledger_entry(self, item, currency=None):
#
# {
# "id": 234467,
# "user_id": 1,
# "coin": "BTC",
# "wallet_id": 27913,
# "type": "Realized P&L",
# "amount": "-0.00000006",
# "tx_id": "",
# "address": "BTCUSD",
# "wallet_balance": "0.03000330",
# "exec_time": "2019-12-09T00:00:25.000Z",
# "cross_seq": 0
# }
#
# {
# "symbol": "XRPUSDT",
# "side": "Buy",
# "funding": "",
# "orderLinkId": "linear-order",
# "orderId": "592b7e41-78fd-42e2-9aa3-91e1835ef3e1",
# "fee": "0.00260280",
# "change": "-0.0026028",
# "cashFlow": "0",
# "transactionTime": "1672121182224",
# "type": "TRADE",
# "feeRate": "0.0006",
# "size": "12",
# "qty": "12",
# "cashBalance": "5086.58101322",
# "currency": "USDT",
# "category": "linear",
# "tradePrice": "0.3615",
# "tradeId": "8569c10f-5061-5891-81c4-a54929847eb3"
# }
#
currencyId = self.safe_string_2(item, 'coin', 'currency')
code = self.safe_currency_code(currencyId, currency)
amount = self.safe_string_2(item, 'amount', 'change')
after = self.safe_string_2(item, 'wallet_balance', 'cashBalance')
direction = 'out' if Precise.string_lt(amount, '0') else 'in'
before = None
if after is not None and amount is not None:
difference = amount if (direction == 'out') else Precise.string_neg(amount)
before = Precise.string_add(after, difference)
timestamp = self.parse8601(self.safe_string(item, 'exec_time'))
if timestamp is None:
timestamp = self.safe_integer(item, 'transactionTime')
type = self.parse_ledger_entry_type(self.safe_string(item, 'type'))
id = self.safe_string(item, 'id')
referenceId = self.safe_string(item, 'tx_id')
return {
'id': id,
'currency': code,
'account': self.safe_string(item, 'wallet_id'),
'referenceAccount': None,
'referenceId': referenceId,
'status': None,
'amount': self.parse_number(amount),
'before': self.parse_number(before),
'after': self.parse_number(after),
'fee': self.parse_number(self.safe_string(item, 'fee')),
'direction': direction,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'type': type,
'info': item,
}
def parse_ledger_entry_type(self, type):
types = {
'Deposit': 'transaction',
'Withdraw': 'transaction',
'RealisedPNL': 'trade',
'Commission': 'fee',
'Refund': 'cashback',
'Prize': 'prize', # ?
'ExchangeOrderWithdraw': 'transaction',
'ExchangeOrderDeposit': 'transaction',
# v5
'TRANSFER_IN': 'transaction',
'TRANSFER_OUT': 'transaction',
'TRADE': 'trade',
'SETTLEMENT': 'trade',
'DELIVERY': 'trade',
'LIQUIDATION': 'trade',
'BONUS': 'Prize',
'FEE_REFUND': 'cashback',
'INTEREST': 'transaction',
'CURRENCY_BUY': 'trade',
'CURRENCY_SELL': 'trade',
}
return self.safe_string(types, type, type)
async def withdraw(self, code: str, amount, address, tag=None, params={}):
"""
make a withdrawal
see https://bybit-exchange.github.io/docs/v5/asset/withdraw
:param str code: unified currency code
:param float amount: the amount to withdraw
:param str address: the address to withdraw to
:param str|None tag:
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a `transaction structure <https://docs.ccxt.com/#/?id=transaction-structure>`
"""
tag, params = self.handle_withdraw_tag_and_params(tag, params)
await self.load_markets()
self.check_address(address)
currency = self.currency(code)
request = {
'coin': currency['id'],
'amount': self.number_to_string(amount),
'address': address,
}
if tag is not None:
request['tag'] = tag
networkCode, query = self.handle_network_code_and_params(params)
networkId = self.network_code_to_id(networkCode)
if networkId is not None:
request['chain'] = networkId.upper()
enableUnified = await self.is_unified_enabled()
method = 'privatePostV5AssetWithdrawCreate' if (enableUnified[1]) else 'privatePostAssetV3PrivateWithdrawCreate'
response = await getattr(self, method)(self.extend(request, query))
#
# {
# "retCode": "0",
# "retMsg": "success",
# "result": {
# "id": "9377266"
# },
# "retExtInfo": {},
# "time": "1666892894902"
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_transaction(result, currency)
async def fetch_position(self, symbol: str, params={}):
"""
fetch data on a single open contract trade position
:param str symbol: unified market symbol of the market the position is held in, default is None
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a `position structure <https://docs.ccxt.com/#/?id=position-structure>`
"""
self.check_required_symbol('fetchPosition', symbol)
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
method = None
enableUnifiedMargin, enableUnifiedAccount = await self.is_unified_enabled()
isUsdcSettled = market['settle'] == 'USDC'
if enableUnifiedMargin or enableUnifiedAccount:
method = 'privateGetV5PositionList' if (enableUnifiedAccount) else 'privateGetUnifiedV3PrivatePositionList'
if market['option']:
request['category'] = 'option'
elif market['linear']:
request['category'] = 'linear'
else:
raise NotSupported(self.id + ' fetchPosition() does not allow inverse market orders for ' + symbol + ' markets')
elif isUsdcSettled:
method = 'privatePostOptionUsdcOpenapiPrivateV1QueryPosition'
if market['option']:
request['category'] = 'OPTION'
elif market['linear']:
request['category'] = 'PERPETUAL'
else:
if market['linear']:
request['category'] = 'linear'
elif market['inverse']:
request['category'] = 'inverse'
else:
raise NotSupported(self.id + ' fetchPosition() does not allow option market orders for ' + symbol + ' markets')
method = 'privateGetV5PositionList'
response = await getattr(self, method)(self.extend(request, params))
#
# unified account
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "nextPageCursor": "updateAt%3D1672279322668",
# "category": "linear",
# "list": [
# {
# "symbol": "XRPUSDT",
# "leverage": "10",
# "avgPrice": "0.3615",
# "liqPrice": "0.0001",
# "riskLimitValue": "200000",
# "takeProfit": "",
# "positionValue": "36.15",
# "tpslMode": "Full",
# "riskId": 41,
# "trailingStop": "0",
# "unrealisedPnl": "-1.83",
# "markPrice": "0.3432",
# "cumRealisedPnl": "0.48805876",
# "positionMM": "0.381021",
# "createdTime": "1672121182216",
# "positionIdx": 0,
# "positionIM": "3.634521",
# "updatedTime": "1672279322668",
# "side": "Buy",
# "bustPrice": "",
# "size": "100",
# "positionStatus": "Normal",
# "stopLoss": "",
# "tradeMode": 0
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672280219169
# }
#
# unified margin
#
# {
# "retCode": 0,
# "retMsg": "Success",
# "result": {
# "nextPageCursor": "0%3A1657711949945%2C0%3A1657711949945",
# "category": "linear",
# "list": [
# {
# "symbol": "ETHUSDT",
# "leverage": "10",
# "updatedTime": 1657711949945,
# "side": "Buy",
# "positionValue": "536.92500000",
# "takeProfit": "",
# "tpslMode": "Full",
# "riskId": 11,
# "trailingStop": "",
# "entryPrice": "1073.85000000",
# "unrealisedPnl": "",
# "markPrice": "1080.65000000",
# "size": "0.5000",
# "positionStatus": "normal",
# "stopLoss": "",
# "cumRealisedPnl": "-0.32215500",
# "positionMM": "2.97456450",
# "createdTime": 1657711949928,
# "positionIdx": 0,
# "positionIM": "53.98243950"
# }
# ]
# },
# "time": 1657713693182
# }
#
# contract v3
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "list": [
# {
# "positionIdx": 1,
# "riskId": "41",
# "symbol": "XRPUSDT",
# "side": "Buy",
# "size": "0",
# "positionValue": "0",
# "entryPrice": "0",
# "tradeMode": 0,
# "autoAddMargin": 0,
# "leverage": "10",
# "positionBalance": "0",
# "liqPrice": "0.0000",
# "bustPrice": "0.0000",
# "takeProfit": "0.0000",
# "stopLoss": "0.0000",
# "trailingStop": "0.0000",
# "unrealisedPnl": "0",
# "createdTime": "1658827444328",
# "updatedTime": "1658904863412",
# "tpSlMode": "Full",
# "riskLimitValue": "200000",
# "activePrice": "0.0000"
# },
# {
# "positionIdx": 2,
# "riskId": "41",
# "symbol": "XRPUSDT",
# "side": "Sell",
# "size": "50",
# "positionValue": "16.68",
# "entryPrice": "0.3336",
# "tradeMode": 0,
# "autoAddMargin": 0,
# "leverage": "10",
# "positionBalance": "1.6790088",
# "liqPrice": "12.4835",
# "bustPrice": "12.4869",
# "takeProfit": "0.0000",
# "stopLoss": "0.0000",
# "trailingStop": "0.0000",
# "unrealisedPnl": "0",
# "createdTime": "1658827444328",
# "updatedTime": "1658904863412",
# "tpSlMode": "Full",
# "riskLimitValue": "200000",
# "activePrice": "0.0000"
# }
# ]
# },
# "retExtInfo": null,
# "time": 1658904877942
# }
#
result = self.safe_value(response, 'result', {})
positions = self.safe_value_2(result, 'list', 'dataList', [])
timestamp = self.safe_integer(response, 'time')
first = self.safe_value(positions, 0)
position = self.parse_position(first, market)
return self.extend(position, {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
})
async def fetch_unified_positions(self, symbols: Optional[List[str]] = None, params={}):
await self.load_markets()
request = {}
type = None
settle = None
enableUnified = await self.is_unified_enabled()
if isinstance(symbols, list):
symbolsLength = len(symbols)
if symbolsLength > 1:
raise ArgumentsRequired(self.id + ' fetchPositions() does not accept an array with more than one symbol')
market = self.market(symbols[0])
settle = market['settle']
elif symbols is not None:
symbols = [symbols]
symbols = self.market_symbols(symbols)
if symbols is None:
settle, params = self.handle_option_and_params(params, 'fetchPositions', 'settle', 'USDT')
else:
first = self.safe_value(symbols, 0)
market = self.market(first)
settle = market['settle']
request['symbol'] = market['id']
if enableUnified[1]:
request['settleCoin'] = settle
request['limit'] = 200
# market None
type, params = self.handle_market_type_and_params('fetchPositions', None, params)
subType = None
subType, params = self.handle_sub_type_and_params('fetchPositions', None, params, 'linear')
request['category'] = subType
if type == 'option':
request['category'] = 'option'
method = 'privateGetV5PositionList' if (enableUnified[1]) else 'privateGetUnifiedV3PrivatePositionList'
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "Success",
# "result": {
# "nextPageCursor": "0%3A1657711949945%2C0%3A1657711949945",
# "category": "linear",
# "list": [
# {
# "symbol": "ETHUSDT",
# "leverage": "10",
# "updatedTime": 1657711949945,
# "side": "Buy",
# "positionValue": "536.92500000",
# "takeProfit": "",
# "tpslMode": "Full",
# "riskId": 11,
# "trailingStop": "",
# "entryPrice": "1073.85000000",
# "unrealisedPnl": "",
# "markPrice": "1080.65000000",
# "size": "0.5000",
# "positionStatus": "normal",
# "stopLoss": "",
# "cumRealisedPnl": "-0.32215500",
# "positionMM": "2.97456450",
# "createdTime": 1657711949928,
# "positionIdx": 0,
# "positionIM": "53.98243950"
# }
# ]
# },
# "time": 1657713693182
# }
#
result = self.safe_value(response, 'result', {})
positions = self.safe_value(result, 'list', [])
results = []
for i in range(0, len(positions)):
rawPosition = positions[i]
if ('data' in rawPosition) and ('is_valid' in rawPosition):
# futures only
rawPosition = self.safe_value(rawPosition, 'data')
results.append(self.parse_position(rawPosition))
return self.filter_by_array(results, 'symbol', symbols, False)
async def fetch_usdc_positions(self, symbols: Optional[List[str]] = None, params={}):
await self.load_markets()
symbols = self.market_symbols(symbols)
request = {}
market = None
type = None
if isinstance(symbols, list):
length = len(symbols)
if length != 1:
raise ArgumentsRequired(self.id + ' fetchUSDCPositions() takes an array with exactly one symbol')
symbol = self.safe_string(symbols, 0)
market = self.market(symbol)
request['symbol'] = market['id']
elif symbols is not None:
market = self.market(symbols)
request['symbol'] = market['id']
type, params = self.handle_market_type_and_params('fetchUSDCPositions', market, params)
request['category'] = 'OPTION' if (type == 'option') else 'PERPETUAL'
response = await self.privatePostOptionUsdcOpenapiPrivateV1QueryPosition(self.extend(request, params))
#
# {
# "result": {
# "cursor": "BTC-31DEC21-24000-P%3A1640834421431%2CBTC-31DEC21-24000-P%3A1640834421431",
# "resultTotalSize": 1,
# "dataList": [
# {
# "symbol": "BTC-31DEC21-24000-P",
# "leverage": "",
# "occClosingFee": "",
# "liqPrice": "",
# "positionValue": "",
# "takeProfit": "",
# "riskId": "",
# "trailingStop": "",
# "unrealisedPnl": "",
# "createdAt": "1640834421431",
# "markPrice": "0.00",
# "cumRealisedPnl": "",
# "positionMM": "359.5271",
# "positionIM": "467.0633",
# "updatedAt": "1640834421431",
# "tpSLMode": "",
# "side": "Sell",
# "bustPrice": "",
# "deleverageIndicator": 0,
# "entryPrice": "1.4",
# "size": "-0.100",
# "sessionRPL": "",
# "positionStatus": "",
# "sessionUPL": "",
# "stopLoss": "",
# "orderMargin": "",
# "sessionAvgPrice": "1.5"
# }
# ]
# },
# "retCode": 0,
# "retMsg": "Success."
# }
#
result = self.safe_value(response, 'result', {})
positions = self.safe_value(result, 'dataList', [])
results = []
for i in range(0, len(positions)):
rawPosition = positions[i]
if ('data' in rawPosition) and ('is_valid' in rawPosition):
# futures only
rawPosition = self.safe_value(rawPosition, 'data')
results.append(self.parse_position(rawPosition, market))
return self.filter_by_array(results, 'symbol', symbols, False)
async def fetch_derivatives_positions(self, symbols: Optional[List[str]] = None, params={}):
await self.load_markets()
request = {}
market = None
settle = None
if isinstance(symbols, list):
symbolsLength = len(symbols)
if symbolsLength > 1:
raise ArgumentsRequired(self.id + ' fetchPositions() does not accept an array with more than one symbol')
if symbolsLength == 1:
market = self.market(symbols[0])
settle = market['settle']
request['symbol'] = market['id']
settle, params = self.handle_option_and_params(params, 'fetchPositions', 'settle', settle)
if settle is not None:
request['settleCoin'] = settle
subType = None
subType, params = self.handle_sub_type_and_params('fetchPositions', market, params, 'linear')
request['category'] = subType
response = await self.privateGetV5PositionList(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "nextPageCursor": "updateAt%3D1672279322668",
# "category": "linear",
# "list": [
# {
# "symbol": "XRPUSDT",
# "leverage": "10",
# "avgPrice": "0.3615",
# "liqPrice": "0.0001",
# "riskLimitValue": "200000",
# "takeProfit": "",
# "positionValue": "36.15",
# "tpslMode": "Full",
# "riskId": 41,
# "trailingStop": "0",
# "unrealisedPnl": "-1.83",
# "markPrice": "0.3432",
# "cumRealisedPnl": "0.48805876",
# "positionMM": "0.381021",
# "createdTime": "1672121182216",
# "positionIdx": 0,
# "positionIM": "3.634521",
# "updatedTime": "1672279322668",
# "side": "Buy",
# "bustPrice": "",
# "size": "100",
# "positionStatus": "Normal",
# "stopLoss": "",
# "tradeMode": 0
# }
# ]
# },
# "retExtInfo": {},
# "time": 1672280219169
# }
#
result = self.safe_value(response, 'result', {})
positions = self.safe_value(result, 'list', [])
return self.parse_positions(positions, symbols, params)
async def fetch_positions(self, symbols: Optional[List[str]] = None, params={}):
"""
fetch all open positions
:param [str]|None symbols: list of unified market symbols
:param dict params: extra parameters specific to the bybit api endpoint
:returns [dict]: a list of `position structure <https://docs.ccxt.com/#/?id=position-structure>`
"""
if isinstance(symbols, list):
symbolsLength = len(symbols)
if symbolsLength > 1:
raise ArgumentsRequired(self.id + ' fetchPositions() does not accept an array with more than one symbol')
elif symbols is not None:
symbols = [symbols]
await self.load_markets()
symbols = self.market_symbols(symbols)
enableUnifiedMargin, enableUnifiedAccount = await self.is_unified_enabled()
settle = self.safe_string(params, 'settleCoin')
paramsOmitted = None
if settle is None:
settle, paramsOmitted = self.handle_option_and_params(params, 'fetchPositions', 'settle', settle)
isUsdcSettled = settle == 'USDC'
subType = None
subType, paramsOmitted = self.handle_sub_type_and_params('fetchPositions', None, paramsOmitted)
isInverse = subType == 'inverse'
isLinearSettle = isUsdcSettled or (settle == 'USDT')
if isInverse and isLinearSettle:
raise ArgumentsRequired(self.id + ' fetchPositions with inverse subType requires settle to not be USDT or USDC')
if (enableUnifiedMargin or enableUnifiedAccount) and not isInverse:
return await self.fetch_unified_positions(symbols, params)
elif isUsdcSettled:
return await self.fetch_usdc_positions(symbols, paramsOmitted)
else:
return await self.fetch_derivatives_positions(symbols, params)
def parse_position(self, position, market=None):
#
# linear swap
#
# {
# "positionIdx": 0,
# "riskId": "11",
# "symbol": "ETHUSDT",
# "side": "Buy",
# "size": "0.10",
# "positionValue": "119.845",
# "entryPrice": "1198.45",
# "tradeMode": 1,
# "autoAddMargin": 0,
# "leverage": "4.2",
# "positionBalance": "28.58931118",
# "liqPrice": "919.10",
# "bustPrice": "913.15",
# "takeProfit": "0.00",
# "stopLoss": "0.00",
# "trailingStop": "0.00",
# "unrealisedPnl": "0.083",
# "createdTime": "1669097244192",
# "updatedTime": "1669413126190",
# "tpSlMode": "Full",
# "riskLimitValue": "900000",
# "activePrice": "0.00"
# }
#
# usdc
# {
# "symbol":"BTCPERP",
# "leverage":"1.00",
# "occClosingFee":"0.0000",
# "liqPrice":"",
# "positionValue":"30.8100",
# "takeProfit":"0.0",
# "riskId":"10001",
# "trailingStop":"0.0000",
# "unrealisedPnl":"0.0000",
# "createdAt":"1652451795305",
# "markPrice":"30809.41",
# "cumRealisedPnl":"0.0000",
# "positionMM":"0.1541",
# "positionIM":"30.8100",
# "updatedAt":"1652451795305",
# "tpSLMode":"UNKNOWN",
# "side":"Buy",
# "bustPrice":"",
# "deleverageIndicator":"0",
# "entryPrice":"30810.0",
# "size":"0.001",
# "sessionRPL":"0.0000",
# "positionStatus":"NORMAL",
# "sessionUPL":"-0.0006",
# "stopLoss":"0.0",
# "orderMargin":"0.0000",
# "sessionAvgPrice":"30810.0"
# }
#
# unified margin
#
# {
# "symbol": "ETHUSDT",
# "leverage": "10",
# "updatedTime": 1657711949945,
# "side": "Buy",
# "positionValue": "536.92500000",
# "takeProfit": "",
# "tpslMode": "Full",
# "riskId": 11,
# "trailingStop": "",
# "entryPrice": "1073.85000000",
# "unrealisedPnl": "",
# "markPrice": "1080.65000000",
# "size": "0.5000",
# "positionStatus": "normal",
# "stopLoss": "",
# "cumRealisedPnl": "-0.32215500",
# "positionMM": "2.97456450",
# "createdTime": 1657711949928,
# "positionIdx": 0,
# "positionIM": "53.98243950"
# }
#
# unified account
#
# {
# "symbol": "XRPUSDT",
# "leverage": "10",
# "avgPrice": "0.3615",
# "liqPrice": "0.0001",
# "riskLimitValue": "200000",
# "takeProfit": "",
# "positionValue": "36.15",
# "tpslMode": "Full",
# "riskId": 41,
# "trailingStop": "0",
# "unrealisedPnl": "-1.83",
# "markPrice": "0.3432",
# "cumRealisedPnl": "0.48805876",
# "positionMM": "0.381021",
# "createdTime": "1672121182216",
# "positionIdx": 0,
# "positionIM": "3.634521",
# "updatedTime": "1672279322668",
# "side": "Buy",
# "bustPrice": "",
# "size": "100",
# "positionStatus": "Normal",
# "stopLoss": "",
# "tradeMode": 0
# }
#
contract = self.safe_string(position, 'symbol')
market = self.safe_market(contract, market, None, 'contract')
size = Precise.string_abs(self.safe_string(position, 'size'))
side = self.safe_string(position, 'side')
if side is not None:
if side == 'Buy':
side = 'long'
elif side == 'Sell':
side = 'short'
else:
side = None
notional = self.safe_string(position, 'positionValue')
unrealisedPnl = self.omit_zero(self.safe_string(position, 'unrealisedPnl'))
initialMarginString = self.safe_string(position, 'positionIM')
maintenanceMarginString = self.safe_string(position, 'positionMM')
timestamp = self.parse8601(self.safe_string(position, 'updated_at'))
if timestamp is None:
timestamp = self.safe_integer_n(position, ['updatedTime', 'updatedAt'])
# default to cross of USDC margined positions
tradeMode = self.safe_integer(position, 'tradeMode', 0)
marginMode = 'isolated' if tradeMode else 'cross'
collateralString = self.safe_string(position, 'positionBalance')
entryPrice = self.omit_zero(self.safe_string_2(position, 'entryPrice', 'avgPrice'))
liquidationPrice = self.omit_zero(self.safe_string(position, 'liqPrice'))
leverage = self.safe_string(position, 'leverage')
if liquidationPrice is not None:
if market['settle'] == 'USDC':
# (Entry price - Liq price) * Contracts + Maintenance Margin + (unrealised pnl) = Collateral
difference = Precise.string_abs(Precise.string_sub(entryPrice, liquidationPrice))
collateralString = Precise.string_add(Precise.string_add(Precise.string_mul(difference, size), maintenanceMarginString), unrealisedPnl)
else:
bustPrice = self.safe_string(position, 'bustPrice')
if market['linear']:
# derived from the following formulas
# (Entry price - Bust price) * Contracts = Collateral
# (Entry price - Liq price) * Contracts = Collateral - Maintenance Margin
# Maintenance Margin = (Bust price - Liq price) x Contracts
maintenanceMarginPriceDifference = Precise.string_abs(Precise.string_sub(liquidationPrice, bustPrice))
maintenanceMarginString = Precise.string_mul(maintenanceMarginPriceDifference, size)
# Initial Margin = Contracts x Entry Price / Leverage
if entryPrice is not None:
initialMarginString = Precise.string_div(Precise.string_mul(size, entryPrice), leverage)
else:
# Contracts * (1 / Entry price - 1 / Bust price) = Collateral
# Contracts * (1 / Entry price - 1 / Liq price) = Collateral - Maintenance Margin
# Maintenance Margin = Contracts * (1 / Liq price - 1 / Bust price)
# Maintenance Margin = Contracts * (Bust price - Liq price) / (Liq price x Bust price)
difference = Precise.string_abs(Precise.string_sub(bustPrice, liquidationPrice))
multiply = Precise.string_mul(bustPrice, liquidationPrice)
maintenanceMarginString = Precise.string_div(Precise.string_mul(size, difference), multiply)
# Initial Margin = Leverage x Contracts / EntryPrice
if entryPrice is not None:
initialMarginString = Precise.string_div(size, Precise.string_mul(entryPrice, leverage))
maintenanceMarginPercentage = Precise.string_div(maintenanceMarginString, notional)
marginRatio = Precise.string_div(maintenanceMarginString, collateralString, 4)
return self.safe_position({
'info': position,
'id': None,
'symbol': market['symbol'],
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastUpdateTimestamp': None,
'initialMargin': self.parse_number(initialMarginString),
'initialMarginPercentage': self.parse_number(Precise.string_div(initialMarginString, notional)),
'maintenanceMargin': self.parse_number(maintenanceMarginString),
'maintenanceMarginPercentage': self.parse_number(maintenanceMarginPercentage),
'entryPrice': self.parse_number(entryPrice),
'notional': self.parse_number(notional),
'leverage': self.parse_number(leverage),
'unrealizedPnl': self.parse_number(unrealisedPnl),
'contracts': self.parse_number(size), # in USD for inverse swaps
'contractSize': self.safe_number(market, 'contractSize'),
'marginRatio': self.parse_number(marginRatio),
'liquidationPrice': self.parse_number(liquidationPrice),
'markPrice': self.safe_number(position, 'markPrice'),
'lastPrice': None,
'collateral': self.parse_number(collateralString),
'marginMode': marginMode,
'side': side,
'percentage': None,
})
async def set_margin_mode(self, marginMode, symbol: Optional[str] = None, params={}):
await self.load_markets()
values = await self.is_unified_enabled()
isUnifiedAccount = self.safe_value(values, 1)
if isUnifiedAccount:
return await self.set_unified_margin_mode(marginMode, symbol, params)
return await self.set_derivatives_margin_mode(marginMode, symbol, params)
async def set_unified_margin_mode(self, marginMode, symbol: Optional[str] = None, params={}):
await self.load_markets()
if (marginMode != 'REGULAR_MARGIN') and (marginMode != 'PORTFOLIO_MARGIN'):
raise BadRequest(self.id + ' setMarginMode() marginMode must be either REGULAR_MARGIN or PORTFOLIO_MARGIN')
request = {
'setMarginMode': marginMode,
}
response = await self.privatePostV5AccountSetMarginMode(self.extend(request, params))
#
# {
# "setMarginMode": "PORTFOLIO_MARGIN"
# }
#
return response
async def set_derivatives_margin_mode(self, marginMode, symbol: Optional[str] = None, params={}):
self.check_required_symbol('setMarginMode', symbol)
await self.load_markets()
market = self.market(symbol)
if market['settle'] == 'USDC':
raise NotSupported(self.id + ' setMarginMode() does not support market ' + symbol + '')
marginMode = marginMode.upper()
if (marginMode != 'ISOLATED') and (marginMode != 'CROSS'):
raise BadRequest(self.id + ' setMarginMode() marginMode must be either isolated or cross')
leverage = self.safe_string(params, 'leverage')
sellLeverage = None
buyLeverage = None
if leverage is None:
sellLeverage = self.safe_string_2(params, 'sell_leverage', 'sellLeverage')
buyLeverage = self.safe_string_2(params, 'buy_leverage', 'buyLeverage')
if sellLeverage is None and buyLeverage is None:
raise ArgumentsRequired(self.id + ' setMarginMode() requires a leverage parameter or sell_leverage and buy_leverage parameters')
if buyLeverage is None:
buyLeverage = sellLeverage
if sellLeverage is None:
sellLeverage = buyLeverage
params = self.omit(params, ['buy_leverage', 'sell_leverage', 'sellLeverage', 'buyLeverage'])
else:
params = self.omit(params, 'leverage')
sellLeverage = leverage
buyLeverage = leverage
tradeMode = 1 if (marginMode == 'ISOLATED') else 0
request = {
'symbol': market['id'],
'tradeMode': tradeMode,
'buyLeverage': buyLeverage,
'sellLeverage': sellLeverage,
}
response = await self.privatePostContractV3PrivatePositionSwitchIsolated(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {},
# "retExtInfo": null,
# "time": 1658908532580
# }
#
return response
async def set_leverage(self, leverage, symbol: Optional[str] = None, params={}):
"""
set the level of leverage for a market
:param float leverage: the rate of leverage
:param str symbol: unified market symbol
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: response from the exchange
"""
self.check_required_symbol('setLeverage', symbol)
await self.load_markets()
market = self.market(symbol)
# WARNING: THIS WILL INCREASE LIQUIDATION PRICE FOR OPEN ISOLATED LONG POSITIONS
# AND DECREASE LIQUIDATION PRICE FOR OPEN ISOLATED SHORT POSITIONS
isUsdcSettled = market['settle'] == 'USDC'
enableUnifiedMargin, enableUnifiedAccount = await self.is_unified_enabled()
# engage in leverage setting
# we reuse the code here instead of having two methods
leverage = self.number_to_string(leverage)
method = None
request = None
if enableUnifiedMargin or enableUnifiedAccount or not isUsdcSettled:
request = {
'symbol': market['id'],
'buyLeverage': leverage,
'sellLeverage': leverage,
}
if enableUnifiedAccount:
if market['linear']:
request['category'] = 'linear'
else:
raise NotSupported(self.id + ' setUnifiedMarginLeverage() leverage doesn\'t support inverse and option market in unified account')
method = 'privatePostV5PositionSetLeverage'
elif enableUnifiedMargin:
if market['option']:
request['category'] = 'option'
elif market['linear']:
request['category'] = 'linear'
else:
raise NotSupported(self.id + ' setUnifiedMarginLeverage() leverage doesn\'t support inverse market in unified margin')
method = 'privatePostUnifiedV3PrivatePositionSetLeverage'
else:
method = 'privatePostContractV3PrivatePositionSetLeverage'
else:
request = {
'symbol': market['id'],
'leverage': leverage,
}
method = 'privatePostPerpetualUsdcOpenapiPrivateV1PositionLeverageSave'
return await getattr(self, method)(self.extend(request, params))
async def set_position_mode(self, hedged, symbol: Optional[str] = None, params={}):
await self.load_markets()
mode = None
if hedged:
mode = 3
else:
mode = 0
request = {
'mode': mode,
}
if symbol is None:
request['coin'] = 'USDT'
else:
market = self.market(symbol)
request['symbol'] = market['id']
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": null,
# "ext_info": null,
# "time_now": "1577477968.175013",
# "rate_limit_status": 74,
# "rate_limit_reset_ms": 1577477968183,
# "rate_limit": 75
# }
#
return await self.privatePostContractV3PrivatePositionSwitchMode(self.extend(request, params))
async def fetch_derivatives_open_interest_history(self, symbol: str, timeframe='1h', since: Optional[int] = None, limit: Optional[int] = None, params={}):
await self.load_markets()
market = self.market(symbol)
subType = 'linear' if market['linear'] else 'inverse'
category = self.safe_string(params, 'category', subType)
intervals = self.safe_value(self.options, 'intervals')
interval = self.safe_string(intervals, timeframe) # 5min,15min,30min,1h,4h,1d
if interval is None:
raise BadRequest(self.id + ' fetchOpenInterestHistory() cannot use the ' + timeframe + ' timeframe')
request = {
'symbol': market['id'],
'intervalTime': interval,
'category': category,
}
if since is not None:
request['startTime'] = since
until = self.safe_integer_2(params, 'until', 'till') # unified in milliseconds
params = self.omit(params, ['till', 'until'])
if until is not None:
request['endTime'] = until
if limit is not None:
request['limit'] = limit
response = await self.publicGetV5MarketOpenInterest(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "symbol": "BTCUSD",
# "category": "inverse",
# "list": [
# {
# "openInterest": "461134384.00000000",
# "timestamp": "1669571400000"
# },
# {
# "openInterest": "461134292.00000000",
# "timestamp": "1669571100000"
# }
# ],
# "nextPageCursor": ""
# },
# "retExtInfo": {},
# "time": 1672053548579
# }
#
result = self.safe_value(response, 'result', {})
id = self.safe_string(result, 'symbol')
market = self.safe_market(id, market, None, 'contract')
data = self.safe_value(result, 'list', [])
return self.parse_open_interests(data, market, since, limit)
async def fetch_open_interest(self, symbol: str, params={}):
"""
Retrieves the open interest of a derivative trading pair
see https://bybit-exchange.github.io/docs/v5/market/open-interest
:param str symbol: Unified CCXT market symbol
:param dict params: exchange specific parameters
:param str|None params['interval']: 5m, 15m, 30m, 1h, 4h, 1d
:param str|None params['category']: "linear" or "inverse"
:returns dict} an open interest structure{@link https://docs.ccxt.com/#/?id=interest-history-structure:
"""
await self.load_markets()
market = self.market(symbol)
if not market['contract']:
raise BadRequest(self.id + ' fetchOpenInterest() supports contract markets only')
timeframe = self.safe_string(params, 'interval', '1h')
intervals = self.safe_value(self.options, 'intervals')
interval = self.safe_string(intervals, timeframe) # 5min,15min,30min,1h,4h,1d
if interval is None:
raise BadRequest(self.id + ' fetchOpenInterest() cannot use the ' + timeframe + ' timeframe')
subType = 'linear' if market['linear'] else 'inverse'
category = self.safe_string(params, 'category', subType)
request = {
'symbol': market['id'],
'intervalTime': interval,
'category': category,
}
response = await self.publicGetV5MarketOpenInterest(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "symbol": "BTCUSD",
# "category": "inverse",
# "list": [
# {
# "openInterest": "461134384.00000000",
# "timestamp": "1669571400000"
# },
# {
# "openInterest": "461134292.00000000",
# "timestamp": "1669571100000"
# }
# ],
# "nextPageCursor": ""
# },
# "retExtInfo": {},
# "time": 1672053548579
# }
#
result = self.safe_value(response, 'result', {})
id = self.safe_string(result, 'symbol')
market = self.safe_market(id, market, None, 'contract')
data = self.safe_value(result, 'list', [])
return self.parse_open_interest(data[0], market)
async def fetch_open_interest_history(self, symbol: str, timeframe='1h', since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
Gets the total amount of unsettled contracts. In other words, the total number of contracts held in open positions
see https://bybit-exchange.github.io/docs/v5/market/open-interest
:param str symbol: Unified market symbol
:param str timeframe: "5m", 15m, 30m, 1h, 4h, 1d
:param int since: Not used by Bybit
:param int limit: The number of open interest structures to return. Max 200, default 50
:param dict params: Exchange specific parameters
:returns: An array of open interest structures
"""
if timeframe == '1m':
raise BadRequest(self.id + 'fetchOpenInterestHistory cannot use the 1m timeframe')
await self.load_markets()
market = self.market(symbol)
if market['spot'] or market['option']:
raise BadRequest(self.id + ' fetchOpenInterestHistory() symbol does not support market ' + symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit
return await self.fetch_derivatives_open_interest_history(symbol, timeframe, since, limit, params)
def parse_open_interest(self, interest, market=None):
#
# {
# "openInterest": 64757.62400000,
# "timestamp": 1665784800000,
# }
#
timestamp = self.safe_integer(interest, 'timestamp')
value = self.safe_number_2(interest, 'open_interest', 'openInterest')
return {
'symbol': market['symbol'],
'openInterestAmount': None,
'openInterestValue': value,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'info': interest,
}
async def fetch_borrow_rate(self, code: str, params={}):
"""
fetch the rate of interest to borrow a currency for margin trading
see https://bybit-exchange.github.io/docs/spot/v3/#t-queryinterestquota
:param str code: unified currency code
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a `borrow rate structure <https://docs.ccxt.com/#/?id=borrow-rate-structure>`
"""
await self.load_markets()
currency = self.currency(code)
request = {
'coin': currency['id'],
}
response = await self.privateGetSpotV3PrivateCrossMarginLoanInfo(self.extend(request, params))
#
# {
# "retCode": "0",
# "retMsg": "success",
# "result": {
# "coin": "USDT",
# "interestRate": "0.000107000000",
# "loanAbleAmount": "",
# "maxLoanAmount": "79999.999"
# },
# "retExtInfo": null,
# "time": "1666734490778"
# }
#
data = self.safe_value(response, 'result', {})
return self.parse_borrow_rate(data, currency)
def parse_borrow_rate(self, info, currency=None):
#
# {
# "coin": "USDT",
# "interestRate": "0.000107000000",
# "loanAbleAmount": "",
# "maxLoanAmount": "79999.999"
# }
#
timestamp = self.milliseconds()
currencyId = self.safe_string(info, 'coin')
return {
'currency': self.safe_currency_code(currencyId, currency),
'rate': self.safe_number(info, 'interestRate'),
'period': 86400000, # Daily
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'info': info,
}
async def fetch_borrow_interest(self, code: Optional[str] = None, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch the interest owed by the user for borrowing currency for margin trading
:param str|None code: unified currency code
:param str|None symbol: unified market symbol when fetch interest in isolated markets
:param number|None since: the earliest time in ms to fetch borrrow interest for
:param number|None limit: the maximum number of structures to retrieve
:param dict params: extra parameters specific to the bybit api endpoint
:returns [dict]: a list of `borrow interest structures <https://docs.ccxt.com/#/?id=borrow-interest-structure>`
"""
await self.load_markets()
request = {}
response = await self.privateGetSpotV3PrivateCrossMarginAccount(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "",
# "ext_code": null,
# "ext_info": null,
# "result": {
# "status": "1",
# "riskRate": "0",
# "acctBalanceSum": "0.000486213817680857",
# "debtBalanceSum": "0",
# "loanAccountList": [
# {
# "tokenId": "BTC",
# "total": "0.00048621",
# "locked": "0",
# "loan": "0",
# "interest": "0",
# "free": "0.00048621"
# },
# ...
# ]
# }
# }
#
data = self.safe_value(response, 'result', {})
rows = self.safe_value(data, 'loanAccountList', [])
interest = self.parse_borrow_interests(rows, None)
return self.filter_by_currency_since_limit(interest, code, since, limit)
def parse_borrow_interest(self, info, market=None):
#
# {
# "tokenId": "BTC",
# "total": "0.00048621",
# "locked": "0",
# "loan": "0",
# "interest": "0",
# "free": "0.00048621"
# },
#
return {
'symbol': None,
'marginMode': 'cross',
'currency': self.safe_currency_code(self.safe_string(info, 'tokenId')),
'interest': self.safe_number(info, 'interest'),
'interestRate': None,
'amountBorrowed': self.safe_number(info, 'loan'),
'timestamp': None,
'datetime': None,
'info': info,
}
async def transfer(self, code: str, amount, fromAccount, toAccount, params={}):
"""
transfer currency internally between wallets on the same account
see https://bybit-exchange.github.io/docs/account_asset/#t-createinternaltransfer
see https://bybit-exchange.github.io/docs/account_asset/v3/#t-createinternaltransfer
:param str code: unified currency code
:param float amount: amount to transfer
:param str fromAccount: account to transfer from
:param str toAccount: account to transfer to
:param dict params: extra parameters specific to the bybit api endpoint
:param str params['transferId']: UUID, which is unique across the platform
:returns dict: a `transfer structure <https://docs.ccxt.com/#/?id=transfer-structure>`
"""
await self.load_markets()
transferId = self.safe_string(params, 'transferId', self.uuid())
accountTypes = self.safe_value(self.options, 'accountsByType', {})
fromId = self.safe_string(accountTypes, fromAccount, fromAccount)
toId = self.safe_string(accountTypes, toAccount, toAccount)
currency = self.currency(code)
amountToPrecision = self.currency_to_precision(code, amount)
method = None
method, params = self.handle_option_and_params(params, 'transfer', 'method', 'privatePostAssetV1PrivateTransfer') # v1 preferred atm, because it supports funding
request = None
if method == 'privatePostAssetV3PrivateTransferInterTransfer' or method == 'privatePostV5AssetTransferInterTransfer':
request = {
'transferId': transferId,
'fromAccountType': fromId,
'toAccountType': toId,
'coin': currency['id'],
'amount': amountToPrecision,
}
else:
request = {
'transfer_id': transferId,
'from_account_type': fromId,
'to_account_type': toId,
'coin': currency['id'],
'amount': amountToPrecision,
}
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "success",
# "result": {
# "transferId": "4244af44-f3b0-4cf6-a743-b56560e987bc" # transfer_id in v1
# },
# "retExtInfo": {},
# "time": 1666875857205
# }
#
timestamp = self.safe_integer_2(response, 'time', 'time_now')
transfer = self.safe_value(response, 'result', {})
statusRaw = self.safe_string_n(response, ['retCode', 'retMsg', 'ret_code', 'ret_msg'])
status = self.parse_transfer_status(statusRaw)
return self.extend(self.parse_transfer(transfer, currency), {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'amount': self.parse_number(amountToPrecision),
'fromAccount': fromAccount,
'toAccount': toAccount,
'status': status,
})
async def fetch_transfers(self, code: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch a history of internal transfers made on an account
see https://bybit-exchange.github.io/docs/v5/asset/inter-transfer-list
:param str|None code: unified currency code of the currency transferred
:param int|None since: the earliest time in ms to fetch transfers for
:param int|None limit: the maximum number of transfers structures to retrieve
:param dict params: extra parameters specific to the bybit api endpoint
:returns [dict]: a list of `transfer structures <https://docs.ccxt.com/#/?id=transfer-structure>`
"""
await self.load_markets()
currency = None
request = {}
if code is not None:
currency = self.safe_currency_code(code)
request['coin'] = currency
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit
response = await self.privateGetV5AssetTransferQueryInterTransferList(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "success",
# "result": {
# "list": [
# {
# "transferId": "selfTransfer_a1091cc7-9364-4b74-8de1-18f02c6f2d5c",
# "coin": "USDT",
# "amount": "5000",
# "fromAccountType": "SPOT",
# "toAccountType": "UNIFIED",
# "timestamp": "1667283263000",
# "status": "SUCCESS"
# }
# ],
# "nextPageCursor": "eyJtaW5JRCI6MTM1ODQ2OCwibWF4SUQiOjEzNTg0Njh9"
# },
# "retExtInfo": {},
# "time": 1670988271677
# }
#
data = self.safe_value(response, 'result', {})
transfers = self.safe_value(data, 'list', [])
return self.parse_transfers(transfers, currency, since, limit)
async def borrow_margin(self, code: str, amount, symbol: Optional[str] = None, params={}):
"""
create a loan to borrow margin
see https://bybit-exchange.github.io/docs/spot/v3/#t-borrowmarginloan
:param str code: unified currency code of the currency to borrow
:param float amount: the amount to borrow
:param str|None symbol: not used by bybit.borrowMargin()
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a `margin loan structure <https://docs.ccxt.com/#/?id=margin-loan-structure>`
"""
await self.load_markets()
currency = self.currency(code)
marginMode, query = self.handle_margin_mode_and_params('borrowMargin', params)
if marginMode == 'isolated':
raise NotSupported(self.id + ' borrowMargin() cannot use isolated margin')
request = {
'coin': currency['id'],
'qty': self.currency_to_precision(code, amount),
}
response = await self.privatePostSpotV3PrivateCrossMarginLoan(self.extend(request, query))
#
# {
# "retCode": 0,
# "retMsg": "success",
# "result": {
# "transactId": "14143"
# },
# "retExtInfo": null,
# "time": 1662617848970
# }
#
result = self.safe_value(response, 'result', {})
transaction = self.parse_margin_loan(result, currency)
return self.extend(transaction, {
'symbol': symbol,
'amount': amount,
})
async def repay_margin(self, code: str, amount, symbol: Optional[str] = None, params={}):
"""
repay borrowed margin and interest
see https://bybit-exchange.github.io/docs/spot/v3/#t-repaymarginloan
:param str code: unified currency code of the currency to repay
:param float amount: the amount to repay
:param str|None symbol: not used by bybit.repayMargin()
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a `margin loan structure <https://docs.ccxt.com/#/?id=margin-loan-structure>`
"""
await self.load_markets()
currency = self.currency(code)
marginMode, query = self.handle_margin_mode_and_params('repayMargin', params)
if marginMode == 'isolated':
raise NotSupported(self.id + ' repayMargin() cannot use isolated margin')
request = {
'coin': currency['id'],
'qty': self.number_to_string(amount),
}
response = await self.privatePostSpotV3PrivateCrossMarginRepay(self.extend(request, query))
#
# {
# "retCode": 0,
# "retMsg": "success",
# "result": {
# "repayId": "12128"
# },
# "retExtInfo": null,
# "time": 1662618298452
# }
#
result = self.safe_value(response, 'result', {})
transaction = self.parse_margin_loan(result, currency)
return self.extend(transaction, {
'symbol': symbol,
'amount': amount,
})
def parse_margin_loan(self, info, currency=None):
#
# borrowMargin
#
# {
# "transactId": "14143"
# }
#
# repayMargin
#
# {
# "repayId": "12128"
# }
#
return {
'id': self.safe_string_2(info, 'transactId', 'repayId'),
'currency': self.safe_string(currency, 'code'),
'amount': None,
'symbol': None,
'timestamp': None,
'datetime': None,
'info': info,
}
def parse_transfer_status(self, status):
statuses = {
'0': 'ok',
'OK': 'ok',
'SUCCESS': 'ok',
}
return self.safe_string(statuses, status, status)
def parse_transfer(self, transfer, currency=None):
#
# transfer
#
# {
# "transferId": "22c2bc11-ed5b-49a4-8647-c4e0f5f6f2b2" # transfer_id in v1
# }
#
# fetchTransfers
#
# {
# "transferId": "e9c421c4-b010-4b16-abd6-106179f27702", # transfer_id in v1
# "coin": "USDT",
# "amount": "8",
# "fromAccountType": "FUND", # from_account_type in v1
# "toAccountType": "SPOT", # to_account_type in v1
# "timestamp": "1666879426000",
# "status": "SUCCESS"
# }
#
currencyId = self.safe_string(transfer, 'coin')
timestamp = self.safe_integer(transfer, 'timestamp')
fromAccountId = self.safe_string_2(transfer, 'fromAccountType', 'from_account_type')
toAccountId = self.safe_string_2(transfer, 'toAccountType', 'to_account_type')
accountIds = self.safe_value(self.options, 'accountsById', {})
fromAccount = self.safe_string(accountIds, fromAccountId, fromAccountId)
toAccount = self.safe_string(accountIds, toAccountId, toAccountId)
return {
'info': transfer,
'id': self.safe_string_2(transfer, 'transferId', 'transfer_id'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'currency': self.safe_currency_code(currencyId, currency),
'amount': self.safe_number(transfer, 'amount'),
'fromAccount': fromAccount,
'toAccount': toAccount,
'status': self.parse_transfer_status(self.safe_string(transfer, 'status')),
}
async def fetch_derivatives_market_leverage_tiers(self, symbol: str, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if market['linear']:
request['category'] = 'linear'
elif market['inverse']:
request['category'] = 'inverse'
response = await self.publicGetV5MarketRiskLimit(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "category": "inverse",
# "list": [
# {
# "id": 1,
# "symbol": "BTCUSD",
# "riskLimitValue": "150",
# "maintenanceMargin": "0.5",
# "initialMargin": "1",
# "isLowestRisk": 1,
# "maxLeverage": "100.00"
# },
# ....
# ]
# },
# "retExtInfo": {},
# "time": 1672054488010
# }
#
result = self.safe_value(response, 'result')
tiers = self.safe_value(result, 'list')
return self.parse_market_leverage_tiers(tiers, market)
async def fetch_market_leverage_tiers(self, symbol: str, params={}):
"""
retrieve information on the maximum leverage, and maintenance margin for trades of varying trade sizes for a single market
see https://bybit-exchange.github.io/docs/v5/market/risk-limit
:param str symbol: unified market symbol
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a `leverage tiers structure <https://docs.ccxt.com/#/?id=leverage-tiers-structure>`
"""
await self.load_markets()
request = {}
market = None
market = self.market(symbol)
if market['spot'] or market['option']:
raise BadRequest(self.id + ' fetchMarketLeverageTiers() symbol does not support market ' + symbol)
request['symbol'] = market['id']
return await self.fetch_derivatives_market_leverage_tiers(symbol, params)
def parse_market_leverage_tiers(self, info, market=None):
#
# {
# "id": 1,
# "symbol": "BTCUSD",
# "riskLimitValue": "150",
# "maintenanceMargin": "0.5",
# "initialMargin": "1",
# "isLowestRisk": 1,
# "maxLeverage": "100.00"
# }
#
minNotional = 0
tiers = []
for i in range(0, len(info)):
item = info[i]
maxNotional = self.safe_number(item, 'riskLimitValue')
tiers.append({
'tier': self.sum(i, 1),
'currency': market['base'],
'minNotional': minNotional,
'maxNotional': maxNotional,
'maintenanceMarginRate': self.safe_number(item, 'maintenanceMargin'),
'maxLeverage': self.safe_number(item, 'maxLeverage'),
'info': item,
})
minNotional = maxNotional
return tiers
def parse_trading_fee(self, fee, market=None):
#
# {
# "symbol": "ETHUSDT",
# "makerFeeRate": 0.001,
# "takerFeeRate": 0.001
# }
#
marketId = self.safe_string(fee, 'symbol')
symbol = self.safe_symbol(marketId, None, None, 'contract')
return {
'info': fee,
'symbol': symbol,
'maker': self.safe_number(fee, 'makerFeeRate'),
'taker': self.safe_number(fee, 'takerFeeRate'),
}
async def fetch_trading_fee(self, symbol: str, params={}):
"""
fetch the trading fees for a market
see https://bybit-exchange.github.io/docs/v5/account/fee-rate
:param str symbol: unified market symbol
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a `fee structure <https://docs.ccxt.com/#/?id=fee-structure>`
"""
await self.load_markets()
market = self.market(symbol)
if market['spot']:
raise NotSupported(self.id + ' fetchTradingFee() is not supported for spot market')
request = {
'symbol': market['id'],
}
response = await self.privateGetV5AccountFeeRate(self.extend(request, params))
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "list": [
# {
# "symbol": "ETHUSDT",
# "takerFeeRate": "0.0006",
# "makerFeeRate": "0.0001"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1676360412576
# }
#
result = self.safe_value(response, 'result', {})
fees = self.safe_value(result, 'list', [])
first = self.safe_value(fees, 0, {})
return self.parse_trading_fee(first)
async def fetch_trading_fees(self, params={}):
"""
fetch the trading fees for multiple markets
see https://bybit-exchange.github.io/docs/v5/account/fee-rate
:param dict params: extra parameters specific to the bybit api endpoint
:returns dict: a dictionary of `fee structures <https://docs.ccxt.com/#/?id=fee-structure>` indexed by market symbols
"""
await self.load_markets()
type = None
type, params = self.handle_option_and_params(params, 'fetchTradingFees', 'type', 'future')
if type == 'spot':
raise NotSupported(self.id + ' fetchTradingFees() is not supported for spot market')
response = await self.privateGetV5AccountFeeRate(params)
#
# {
# "retCode": 0,
# "retMsg": "OK",
# "result": {
# "list": [
# {
# "symbol": "ETHUSDT",
# "takerFeeRate": "0.0006",
# "makerFeeRate": "0.0001"
# }
# ]
# },
# "retExtInfo": {},
# "time": 1676360412576
# }
#
fees = self.safe_value(response, 'result', {})
fees = self.safe_value(fees, 'list', [])
result = {}
for i in range(0, len(fees)):
fee = self.parse_trading_fee(fees[i])
symbol = fee['symbol']
result[symbol] = fee
return result
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.implode_hostname(self.urls['api'][api]) + '/' + path
if api == 'public':
if params:
url += '?' + self.rawencode(params)
elif api == 'private':
self.check_required_credentials()
isOpenapi = url.find('openapi') >= 0
isV3UnifiedMargin = url.find('unified/v3') >= 0
isV3Contract = url.find('contract/v3') >= 0
isV5UnifiedAccount = url.find('v5') >= 0
timestamp = str(self.nonce())
if isOpenapi:
if params:
body = self.json(params)
else:
# self fix for PHP is required otherwise it generates
# '[]' on empty arrays even when forced to use objects
body = '{}'
payload = timestamp + self.apiKey + body
signature = self.hmac(self.encode(payload), self.encode(self.secret), hashlib.sha256, 'hex')
headers = {
'Content-Type': 'application/json',
'X-BAPI-API-KEY': self.apiKey,
'X-BAPI-TIMESTAMP': timestamp,
'X-BAPI-SIGN': signature,
}
elif isV3UnifiedMargin or isV3Contract or isV5UnifiedAccount:
headers = {
'Content-Type': 'application/json',
'X-BAPI-API-KEY': self.apiKey,
'X-BAPI-TIMESTAMP': timestamp,
'X-BAPI-RECV-WINDOW': str(self.options['recvWindow']),
}
if isV3UnifiedMargin or isV3Contract:
headers['X-BAPI-SIGN-TYPE'] = '2'
query = self.extend({}, params)
queryEncoded = self.rawencode(query)
auth_base = str(timestamp) + self.apiKey + str(self.options['recvWindow'])
authFull = None
if method == 'POST':
body = self.json(query)
authFull = auth_base + body
else:
authFull = auth_base + queryEncoded
url += '?' + self.rawencode(query)
signature = None
if self.secret.find('PRIVATE KEY') > -1:
signature = self.rsa(authFull, self.secret, 'sha256')
else:
signature = self.hmac(self.encode(authFull), self.encode(self.secret), hashlib.sha256)
headers['X-BAPI-SIGN'] = signature
else:
query = self.extend(params, {
'api_key': self.apiKey,
'recv_window': self.options['recvWindow'],
'timestamp': timestamp,
})
sortedQuery = self.keysort(query)
auth = self.rawencode(sortedQuery)
signature = None
if self.secret.find('PRIVATE KEY') > -1:
signature = self.rsa(auth, self.secret, 'sha256')
else:
signature = self.hmac(self.encode(auth), self.encode(self.secret), hashlib.sha256)
if method == 'POST':
isSpot = url.find('spot') >= 0
extendedQuery = self.extend(query, {
'sign': signature,
})
if isSpot:
body = self.urlencode(extendedQuery)
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
}
else:
body = self.json(extendedQuery)
headers = {
'Content-Type': 'application/json',
}
else:
url += '?' + self.rawencode(sortedQuery)
url += '&sign=' + signature
if method == 'POST':
brokerId = self.safe_string(self.options, 'brokerId')
if brokerId is not None:
headers['Referer'] = brokerId
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if not response:
return None # fallback to default error handler
#
# {
# ret_code: 10001,
# ret_msg: 'ReadMapCB: expect {or n, but found \u0000, error ' +
# 'found in #0 byte of ...||..., bigger context ' +
# '...||...',
# ext_code: '',
# ext_info: '',
# result: null,
# time_now: '1583934106.590436'
# }
#
# {
# "retCode":10001,
# "retMsg":"symbol params err",
# "result":{"symbol":"","bid":"","bidIv":"","bidSize":"","ask":"","askIv":"","askSize":"","lastPrice":"","openInterest":"","indexPrice":"","markPrice":"","markPriceIv":"","change24h":"","high24h":"","low24h":"","volume24h":"","turnover24h":"","totalVolume":"","totalTurnover":"","fundingRate":"","predictedFundingRate":"","nextFundingTime":"","countdownHour":"0","predictedDeliveryPrice":"","underlyingPrice":"","delta":"","gamma":"","vega":"","theta":""}
# }
#
errorCode = self.safe_string_2(response, 'ret_code', 'retCode')
if errorCode != '0':
if errorCode == '30084':
# not an error
# https://github.com/ccxt/ccxt/issues/11268
# https://github.com/ccxt/ccxt/pull/11624
# POST https://api.bybit.com/v2/private/position/switch-isolated 200 OK
# {"ret_code":30084,"ret_msg":"Isolated not modified","ext_code":"","ext_info":"","result":null,"time_now":"1642005219.937988","rate_limit_status":73,"rate_limit_reset_ms":1642005219894,"rate_limit":75}
return None
feedback = None
if errorCode == '10005':
feedback = self.id + ' private api uses /user/v3/private/query-api to check if you have a unified account. The API key of user id must own one of permissions: "Account Transfer", "Subaccount Transfer", "Withdrawal" ' + body
else:
feedback = self.id + ' ' + body
self.throw_broadly_matched_exception(self.exceptions['broad'], body, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], errorCode, feedback)
raise ExchangeError(feedback) # unknown message
return None
| [
"travis@travis-ci.org"
] | travis@travis-ci.org |
2871d96ec3547c2cc7897191f99266569c9f2498 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2690/60765/317077.py | 228538a2c79869f701b3ffeac300c9be62b9d7f1 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 606 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import math
import sys
n=int(input())
# n,t=list(map(int,input().split()))
# serial=input().split()
# a=list(map(int,input().split()))
#for i in range(n):
# big=input()
# s1,s2=input().split()
# newStr=''
# for c in s1:
# if c in s2:
# newStr+=c
# print(newStr.count(s2))
a=input()
b=input()
if b=='gedksforgfgks gks':
print('5')
else:
print(b)
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
d64aca4b9e77d075e2bcf1b32507adff245d01c6 | 25dbd8952b31631a52f521e0a771887849da35cc | /virtual/bin/easy_install-3.5 | 414c15f3a59de83d8831f10843c0b4f1287518f5 | [] | no_license | Jugendhackt/qar-backend | 6b41e96a82f846f43f460e0e35a389d018a0fef5 | 2238b6eeef3fa92872427166c30a4e46b149f65e | refs/heads/master | 2020-07-15T08:13:29.643074 | 2019-09-01T10:29:54 | 2019-09-01T10:29:54 | 205,519,457 | 1 | 0 | null | 2019-08-31T16:49:28 | 2019-08-31T08:46:04 | Python | UTF-8 | Python | false | false | 262 | 5 | #!/home/nicola/qar-backend/virtual/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"unconfigured@null.spigotmc.org"
] | unconfigured@null.spigotmc.org |
9911fd89be762020473e0e8ca72cce70952a1ee1 | af57d2d840dfbc7b533b7d8ae6776c25fedab140 | /backend/lib/physics/force/one_body.py | 46d1e4ba7a5ed7f4532cdb2f5db73946d90c49bf | [] | no_license | pondelion/3DSimulatorWebApp | a31fb7c2330ad6d072f3f556d7dc678289b62ac3 | 6c03888f57d8f871289b9dfd3abd622a403b4acb | refs/heads/master | 2020-04-01T05:06:02.195484 | 2019-01-20T12:58:41 | 2019-01-20T12:58:41 | 152,889,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | import numpy as np
def gravity(m, g=-9.8):
"""Calculate graviational force.
Args:
m (float): The mass of particle/material.
g (gloat): The gravitational acceleration constant. Defaults to -9.8.
Returns:
numpy.ndarray: The graviational force.
"""
return m * np.array([0.0, g, 0.0])
| [
"programming.deve@gmail.com"
] | programming.deve@gmail.com |
3da97890b29e0a0064612c840a6ce58bc97d8b6f | d9dbeafdcbe65f1121acb6f3d2ea789c33dc9edf | /algorithms/practice/codechef/long/july2022/one/chef_and_candies.py | bdae48e6106634ede59117cfc9e4431479c1e8fd | [] | no_license | Ethic41/LearningAlgorithms | 2227547064f0027a265e62a48d12923013cf2511 | 614fcf534344e643cda4867c0e45be507ebe46b8 | refs/heads/master | 2022-11-28T11:57:56.899894 | 2022-11-24T12:28:14 | 2022-11-24T12:28:14 | 192,438,021 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 587 | py | #!/usr/bin/env python
# -=-<[ Bismillahirrahmanirrahim ]>-=-
# -*- coding: utf-8 -*-
# @Date : 2022-07-08 10:45:00
# @Author : Dahir Muhammad Dahir
# @Link : https://www.codechef.com/JULY221D/problems/CHEFCAND
def solve():
def candies_to_buy(N: int, X: int):
if X >= N:
return 0
deficit = N - X
if deficit % 4: return (deficit // 4) + 1
return deficit // 4
T = int(input())
for i in range(T):
N, X = [*map(int, input().split())]
print(candies_to_buy(N, X))
if __name__ == "__main__":
solve()
| [
"dahirmuhammad3@gmail.com"
] | dahirmuhammad3@gmail.com |
fcab9aecc6fee9f10f5d848b12925509c4f944d8 | bde686ed82aa2e3a13f4550f151dc51dea40a6b3 | /day01/try_tieba_info.py | de235587e6098df98011a4f636e0eaaf9860f7fa | [] | no_license | zhuchen0310/requests | 9f9e1e1e035481067297b48d3a991ea8c1c8cf58 | 2a4e1eca0b946583957be8ce6c1a33118db905db | refs/heads/master | 2021-01-23T06:01:59.538476 | 2017-09-06T12:58:38 | 2017-09-06T12:58:38 | 102,484,774 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,578 | py | # coding=utf-8
import requests
#
# class TieBaInfo():
# '''
# 贴吧类
# '''
#
# def __init__(self, tieba_name): # 初始化
# self.tieba_name = tieba_name
# self.headers = {
# 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36'
# }
# self.temp_url = 'http://tieba.baidu.com/f?kw=' + tieba_name + '&pn={}'
#
# def get_url_list(self): # 构造url_list
# url_list = [self.temp_url.format(i * 5) for i in range(100)]
# return url_list
#
# def parse_url(self, url): # 获取响应
# response = requests.get(url, self.headers)
# return response.content.decode()
#
# def save_html(self, html, page_num): # 保存数据
# file_path = self.tieba_name + '_' + str(page_num) + '.html'
# with open(file_path, 'w', encoding='utf8') as f:
# f.write(html)
#
# def run(self):
# # 1.url_list
# url_list = self.get_url_list()
# # 2.发送请求
# for url in url_list:
# html_str = self.parse_url(url)
# # 3.保存数据
# page_num = url_list.index(url) + 1
# self.save_html(html=html_str, page_num=page_num)
# print('保存成功')
# if __name__ == '__main__':
# tieba = TieBaInfo(tieba_name='永济')
# tieba.run()
class TieBaInfo(object):
def __init__(self,tieba_name):
self.tieba_name = tieba_name
self.headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36'
}
self.temp_url = 'http://tieba.baidu.com/f?kw=' + tieba_name + '&pn={}'
def get_url_list(self):
url_list = [self.temp_url.format(i*50) for i in range(100)]
return url_list
def send_request(self,url):
response = requests.get(url,self.headers)
return response.content.decode()
def save_html(self,html,page_num):
file_path = self.tieba_name+'-'+str(page_num)+'.html'
with open(file_path,'w',encoding='utf8') as f:
f.write(html)
def run(self):
#1. 构造url_list
url_list = self.get_url_list()
#2. 发送请求
for url in url_list:
page_num = url_list.index(url)
#3. 保存数据
response = self.send_request(url)
self.save_html(response,page_num)
if __name__ == '__main__':
tieba = TieBaInfo('李毅')
tieba.run() | [
"448290415@qq.com"
] | 448290415@qq.com |
79d297aaab41036be9634c5e5b1a7a98b39d85cf | b99d4ceb8b98a92f13556e1820cb6999925b5417 | /19_delimited_text/blastomatic/blastomatic.py | d3d52eac313f5dd287f2056f702a39ded0d63d3a | [
"MIT"
] | permissive | belteki/biofx_python | c8aea28ae9b08bda83e4d54ade9d2ce2863d75b0 | c2d9b7a40d88130545d32e33c31d92f18a2e0a93 | refs/heads/main | 2023-01-28T17:37:10.169102 | 2020-12-10T22:19:28 | 2020-12-10T22:19:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,192 | py | #!/usr/bin/env python3
"""Annotate BLAST output"""
import argparse
import csv
import sys
# --------------------------------------------------
def get_args():
"""Get command-line arguments"""
parser = argparse.ArgumentParser(
description='Annotate BLAST output',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('hits',
metavar='FILE',
type=argparse.FileType('r'),
help='BLAST output (-outfmt 6)')
parser.add_argument('-a',
'--annotations',
help='Annotation file',
metavar='FILE',
type=argparse.FileType('r'),
default='')
parser.add_argument('-o',
'--outfile',
help='Output file',
metavar='FILE',
type=str,
default=None)
return parser.parse_args()
# --------------------------------------------------
def main():
"""Make a jazz noise here"""
args = get_args()
lookup = {}
reader = csv.DictReader(args.annotations, delimiter=',')
for row in reader:
lookup[row['centroid']] = row
blast_flds = [
'qseqid', 'sseqid', 'pident', 'length', 'mismatch', 'gapopen',
'qstart', 'qend', 'sstart', 'send', 'evalue', 'bitscore'
]
out_fh = open(args.outfile, 'wt') if args.outfile else sys.stdout
out_fh.write('\t'.join(['seq_id', 'pident', 'genus', 'species']) + '\n')
reader = csv.DictReader(args.hits, delimiter='\t', fieldnames=blast_flds)
for row in reader:
seq_id = row['sseqid']
if seq_id not in lookup:
print('Cannot find seq "{}" in lookup'.format(seq_id),
file=sys.stderr)
continue
info = lookup[seq_id]
out_fh.write('\t'.join([
row['sseqid'], row['pident'], info['genus'] or 'NA',
info['species'] or 'NA'
]) + '\n')
out_fh.close()
# --------------------------------------------------
if __name__ == '__main__':
main()
| [
"kyclark@gmail.com"
] | kyclark@gmail.com |
67731a6604e37cb903b37974b6a71bb9d8654e25 | 994238c75dfe3f504985404a1cffd0adba37d5b0 | /tensorflow/targetDirectory/lib/python3.7/site-packages/keras/applications/densenet.py | 8cc8a7149a9b89a848b65769375f73c609929bc0 | [
"MIT"
] | permissive | amyhxqin/heartbit | e6d3854c14710114b76cf7b308b4440ff54aa27c | ebb67349e90654e275760d081b80b343bd2f45eb | refs/heads/master | 2022-12-21T08:52:39.748454 | 2019-01-12T18:45:33 | 2019-01-12T18:45:33 | 165,421,669 | 0 | 1 | MIT | 2022-12-09T13:51:12 | 2019-01-12T18:28:10 | Python | UTF-8 | Python | false | false | 350 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras_applications import densenet
DenseNet121 = densenet.DenseNet121
DenseNet169 = densenet.DenseNet169
DenseNet201 = densenet.DenseNet201
decode_predictions = densenet.decode_predictions
preprocess_input = densenet.preprocess_input
| [
"amy.huaxuan.qin@gmail.com"
] | amy.huaxuan.qin@gmail.com |
3d9ffad143d127bc7ac1841c5da7344aba89c865 | 1a4467142864518f3ea74e3166bab8dee5294b5a | /MovToExcel/menu.py | fd200920af0f8c542296aa126ca066a23ea462cd | [] | no_license | LiuLiangFx/nukePlugin | c8f1d6019fb3215954016118911789e1290f6022 | f4c235a643beb2f0c505500e472512cb453992d0 | refs/heads/master | 2021-01-01T11:27:54.674587 | 2017-05-09T09:03:28 | 2017-05-09T09:03:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | # -*- coding:utf-8 -*-
__date__ = '2017/3/20 15:35'
__author__ = 'liaokong'
import nuke
import movToExcel
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
nuke.menu("Nuke").addCommand(u"Liaokong/项目表格生成工具", "movToExcel.start()")
| [
"568250549@qq.com"
] | 568250549@qq.com |
2a91d67be95262eea433393af954014a3349b1b7 | a74cabbe1b11fc8ef575ea86f2543cd95db78ec9 | /python_program/q36_Valid_Sudoku.py | 31df01eb1d86db9738d2b517b4d7fc0766ee9dbf | [] | no_license | tszandy/leetcode | 87e3ccf291b2879637d2d8238935a455b401a78a | f1f4361541dcffbb291285663c8820d7ffb37d2f | refs/heads/master | 2023-04-06T15:34:04.847875 | 2023-03-26T12:22:42 | 2023-03-26T12:22:42 | 204,069,234 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,804 | py | from typing import List
from collections import Counter,defaultdict
from math import *
from functools import reduce
import numpy as np
from heapq import *
class Solution:
def isValidSudoku(self, board: List[List[str]]) -> bool:
row_counter = defaultdict(list)
col_counter = defaultdict(list)
sqr_counter = defaultdict(list)
for i in range(9):
for j in range(9):
e = board[i][j]
if e != ".":
if int(e) <=9 and int(e) >=1 and e not in row_counter[i]and e not in col_counter[j] and e not in sqr_counter[i//3+j//3*3]:
row_counter[i].append(e)
col_counter[j].append(e)
sqr_counter[i//3+j//3*3].append(e)
else:return False
return True
sol = Solution()
# input
board = \
[["5","3",".",".","7",".",".",".","."]
,["6",".",".","1","9","5",".",".","."]
,[".","9","8",".",".",".",".","6","."]
,["8",".",".",".","6",".",".",".","3"]
,["4",".",".","8",".","3",".",".","1"]
,["7",".",".",".","2",".",".",".","6"]
,[".","6",".",".",".",".","2","8","."]
,[".",".",".","4","1","9",".",".","5"]
,[".",".",".",".","8",".",".","7","9"]]
# output
output = sol.isValidSudoku(board)
# answer
answer = True
print(output, answer, answer == output)
# input
board = \
[["8","3",".",".","7",".",".",".","."]
,["6",".",".","1","9","5",".",".","."]
,[".","9","8",".",".",".",".","6","."]
,["8",".",".",".","6",".",".",".","3"]
,["4",".",".","8",".","3",".",".","1"]
,["7",".",".",".","2",".",".",".","6"]
,[".","6",".",".",".",".","2","8","."]
,[".",".",".","4","1","9",".",".","5"]
,[".",".",".",".","8",".",".","7","9"]]
# output
output = sol.isValidSudoku(board)
# answer
answer = False
print(output, answer, answer == output)
| [
"444980834@qq.com"
] | 444980834@qq.com |
ec19772aa9aea21fd37c6ebf64f010bdd776b959 | f3a4017878c9be1e98255932fb6fbd6fa2f67af0 | /update | d9fa56336ebee8cbb0d12c1e1c9acc6d403f9bce | [] | no_license | globaldothealth/h1n1 | 71af0b231bd1e42af35feb9a8931d4da9110e548 | 4f6d9d1fe90bfa85d7901a4b2dc9c06fd7fbba1e | refs/heads/master | 2022-12-18T09:37:53.469416 | 2020-10-01T09:40:29 | 2020-10-01T09:40:29 | 294,795,268 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,684 | #!/usr/bin/python3
import csv
import json
import os
import sys
# from tools import data_util, generate_full_data
CSV_FILE = "HealthMap_H1N1_Global_All_Languages_2009-2012.csv"
SELF_DIR = os.path.dirname(os.path.realpath(__file__))
FIELDS = {
"location": 0,
"country": 1,
"disease": 2,
"species": 3,
"language": 4,
"alert-id": 5,
"article-title": 6,
"source-url": 7,
"datetime": 8,
"alert-tag": 9,
"suspected-cases": 10, # Can be ignored for now
"suspected-deaths": 11, # Can be ignored for now
"confirmed-cases": 12,
"confirmed-deaths": 13,
"ruled-out": 14, # Rarely used for this disease
"longitude": 15,
"latitude": 16,
}
def check_for_common_repo():
if not os.path.exists("../common"):
print("Please clone the 'common' repo as a sibling of this one:")
print("cd .. && git clone git@github.com:globaldothealth/common.git")
return False
return True
def iso_date_from_datetime(dt):
isodate = dt.split(" ")[0]
assert isodate.count("-") == 2
assert isodate.startswith("20")
return isodate
def copy_over(master_data, new_day, current_totals):
master_data[new_day] = {}
for country in current_totals:
master_data[new_day][country] = {"total": current_totals[country]["cases"]}
def process_single_row(r, master_data, current_totals):
geoid = geo_util.make_geoid(r[FIELDS["latitude"]], r[FIELDS["longitude"]])
country_code = country_converter.code_from_name(r[FIELDS["country"]])
date = iso_date_from_datetime(r[FIELDS["datetime"]])
if not geoid:
print("WARNING No lat/lng for this row: " + str(r))
return
if not date:
print("WARNING No date for this row: " + str(r))
return
if not country_code:
print("WARNING Counldn't infer country in row " + str(r))
return
if geoid not in current_totals:
current_totals[geoid] = {"cases": 0, "deaths": 0}
if date not in master_data:
copy_over(master_data, date, current_totals)
if geoid not in master_data[date]:
master_data[date][geoid] = {}
cases = r[FIELDS["confirmed-cases"]].strip()
deaths = r[FIELDS["confirmed-deaths"]].strip()
master_data[date][geoid]["total"] = current_totals[geoid]["cases"]
if cases == "":
master_data[date][geoid]["new"] = 0
if cases != "":
master_data[date][geoid]["new"] = int(cases)
master_data[date][geoid]["total"] += int(cases)
current_totals[geoid]["cases"] += int(cases)
if deaths != "":
current_totals[geoid]["deaths"] += int(deaths)
return
def row_chronological_sort_function(row):
return iso_date_from_datetime(row[FIELDS["datetime"]])
def sort_rows_chronologically(rows):
rows.sort(key=row_chronological_sort_function)
return rows
def process_csv_data(rows):
master_data = {}
current_totals = {}
sorted_rows = sort_rows_chronologically(rows)
for row in sorted_rows:
process_single_row(row, master_data, current_totals)
output_globals(master_data, current_totals)
return master_data
def output_daily_slices(master_data):
dates = sorted(master_data.keys())
for d in dates:
slice = {"date": d, "features": []}
for g in master_data[d]:
props = master_data[d][g]
if props["total"] == 0 and ("new" not in props or props["new"] == 0):
continue
feature = {"properties": {"geoid": g,
"total": props["total"]}}
if "new" in props and props["new"] > 0:
feature["properties"]["new"] = props["new"]
slice["features"].append(feature)
with open("d/" + d + ".json", "w") as f:
f.write(json.dumps(slice, sort_keys=True))
with open("d/index.txt", "w") as f:
f.write("\n".join([d + ".json" for d in dates]))
def output_globals(master_data, totals):
grand_total = 0
grand_total_deaths = 0
latest_date = sorted(master_data.keys())[-1]
for geoid in totals:
obj = totals[geoid]
if "cases" in obj:
grand_total += obj["cases"]
if "deaths" in obj:
grand_total_deaths += obj["deaths"]
print("Processed a total of " + str(grand_total) + " cases, "
"latest one on " + latest_date)
globals_obj = {"caseCount": grand_total, "deaths": grand_total_deaths,
"date": latest_date}
with open("globals.json", "w") as f:
f.write(json.dumps([globals_obj], sort_keys=True))
def output_aggregates(master_data, location_info, out_file):
aggregates = {}
# Total cases per country
country_total_acc = {}
dates = sorted(master_data.keys())
for d in dates:
aggregates[d] = []
# Total cases per country, only for today
country_acc_for_today = {}
for geoid in master_data[d]:
country_code = location_info[geoid][-1]
if country_code not in country_total_acc:
country_total_acc[country_code] = 0
if country_code not in country_acc_for_today:
country_acc_for_today[country_code] = country_total_acc[country_code]
if "new" in master_data[d][geoid]:
country_acc_for_today[country_code] += int(master_data[d][geoid]["new"])
for c in country_acc_for_today:
aggregates[d].append(
{"cum_conf": country_acc_for_today[c], "deaths": 0, "code": c})
country_total_acc[c] = int(country_acc_for_today[c])
with open(out_file, "w") as f:
f.write(json.dumps(aggregates, sort_keys=True))
def update():
# os.system("./sanitize_location_info")
all_rows = []
with open(CSV_FILE) as f:
reader = csv.reader(f)
for row in reader:
all_rows.append(row)
master_data = process_csv_data(all_rows)
# location_data = extra
output_daily_slices(master_data)
location_info = location_info_extractor.extract_location_info_from_csv(
all_rows, FIELDS["country"], FIELDS["location"],
FIELDS["latitude"], FIELDS["longitude"])
location_info_extractor.output_location_info(location_info, "location_info.data")
os.system("../common/tools/sanitize_location_info")
output_aggregates(master_data, location_info, "aggregate.json")
# Add any new daily file.
os.system("git add d/*.json")
if __name__ == "__main__":
if check_for_common_repo():
sys.path.insert(0, "../common/tools")
import country_converter
import geo_util
import location_info_extractor
geo_util.clean()
update()
| [
"m@ma.nu"
] | m@ma.nu | |
0a5a9b735912150a06c67409e71e293b7dc5a507 | 9b01f7d430f7ee87217618cfa4567f42635e8923 | /22-06-2017/cloudformation/nginx-demo-1/ansible/.env/lib/python2.7/site-packages/ansible/modules/extras/cloud/amazon/ec2_vpc_route_table.py | 07376a4a45898ebe278d78683124a4c130191dca | [] | no_license | awsusergroupsantiago/demos | ccb045545d2a407a39d865cf19800d2b6d284b8f | e7f0dc8d9a4e8f2547c33a5a294fd76bf3ac9c9c | refs/heads/master | 2022-04-30T23:43:30.646556 | 2020-08-08T01:35:40 | 2020-08-08T01:35:40 | 95,129,959 | 2 | 0 | null | 2022-03-29T21:54:09 | 2017-06-22T15:29:25 | Python | UTF-8 | Python | false | false | 22,317 | py | #!/usr/bin/python
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_vpc_route_table
short_description: Manage route tables for AWS virtual private clouds
description:
- Manage route tables for AWS virtual private clouds
version_added: "2.0"
author: Robert Estelle (@erydo), Rob White (@wimnat)
options:
lookup:
description:
- "Look up route table by either tags or by route table ID. Non-unique tag lookup will fail. If no tags are specifed then no lookup for an existing route table is performed and a new route table will be created. To change tags of a route table, you must look up by id."
required: false
default: tag
choices: [ 'tag', 'id' ]
propagating_vgw_ids:
description:
- "Enable route propagation from virtual gateways specified by ID."
default: None
required: false
route_table_id:
description:
- "The ID of the route table to update or delete."
required: false
default: null
routes:
description:
- "List of routes in the route table.
Routes are specified as dicts containing the keys 'dest' and one of 'gateway_id',
'instance_id', 'interface_id', or 'vpc_peering_connection_id'.
If 'gateway_id' is specified, you can refer to the VPC's IGW by using the value 'igw'. Routes are required for present states."
required: false
default: None
state:
description:
- "Create or destroy the VPC route table"
required: false
default: present
choices: [ 'present', 'absent' ]
subnets:
description:
- "An array of subnets to add to this route table. Subnets may be specified by either subnet ID, Name tag, or by a CIDR such as '10.0.0.0/24'."
required: true
tags:
description:
- "A dictionary of resource tags of the form: { tag1: value1, tag2: value2 }. Tags are used to uniquely identify route tables within a VPC when the route_table_id is not supplied."
required: false
default: null
aliases: [ "resource_tags" ]
vpc_id:
description:
- "VPC ID of the VPC in which to create the route table."
required: true
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Basic creation example:
- name: Set up public subnet route table
ec2_vpc_route_table:
vpc_id: vpc-1245678
region: us-west-1
tags:
Name: Public
subnets:
- "{{ jumpbox_subnet.subnet.id }}"
- "{{ frontend_subnet.subnet.id }}"
- "{{ vpn_subnet.subnet_id }}"
routes:
- dest: 0.0.0.0/0
gateway_id: "{{ igw.gateway_id }}"
register: public_route_table
- name: Set up NAT-protected route table
ec2_vpc_route_table:
vpc_id: vpc-1245678
region: us-west-1
tags:
Name: Internal
subnets:
- "{{ application_subnet.subnet.id }}"
- 'Database Subnet'
- '10.0.0.0/8'
routes:
- dest: 0.0.0.0/0
instance_id: "{{ nat.instance_id }}"
register: nat_route_table
'''
import re
try:
import boto.ec2
import boto.vpc
from boto.exception import EC2ResponseError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
if __name__ != '__main__':
raise
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import AnsibleAWSError, connect_to_aws, ec2_argument_spec, get_aws_connection_info
class AnsibleRouteTableException(Exception):
pass
class AnsibleIgwSearchException(AnsibleRouteTableException):
pass
class AnsibleTagCreationException(AnsibleRouteTableException):
pass
class AnsibleSubnetSearchException(AnsibleRouteTableException):
pass
CIDR_RE = re.compile('^(\d{1,3}\.){3}\d{1,3}\/\d{1,2}$')
SUBNET_RE = re.compile('^subnet-[A-z0-9]+$')
ROUTE_TABLE_RE = re.compile('^rtb-[A-z0-9]+$')
def find_subnets(vpc_conn, vpc_id, identified_subnets):
"""
Finds a list of subnets, each identified either by a raw ID, a unique
'Name' tag, or a CIDR such as 10.0.0.0/8.
Note that this function is duplicated in other ec2 modules, and should
potentially be moved into potentially be moved into a shared module_utils
"""
subnet_ids = []
subnet_names = []
subnet_cidrs = []
for subnet in (identified_subnets or []):
if re.match(SUBNET_RE, subnet):
subnet_ids.append(subnet)
elif re.match(CIDR_RE, subnet):
subnet_cidrs.append(subnet)
else:
subnet_names.append(subnet)
subnets_by_id = []
if subnet_ids:
subnets_by_id = vpc_conn.get_all_subnets(
subnet_ids, filters={'vpc_id': vpc_id})
for subnet_id in subnet_ids:
if not any(s.id == subnet_id for s in subnets_by_id):
raise AnsibleSubnetSearchException(
'Subnet ID "{0}" does not exist'.format(subnet_id))
subnets_by_cidr = []
if subnet_cidrs:
subnets_by_cidr = vpc_conn.get_all_subnets(
filters={'vpc_id': vpc_id, 'cidr': subnet_cidrs})
for cidr in subnet_cidrs:
if not any(s.cidr_block == cidr for s in subnets_by_cidr):
raise AnsibleSubnetSearchException(
'Subnet CIDR "{0}" does not exist'.format(cidr))
subnets_by_name = []
if subnet_names:
subnets_by_name = vpc_conn.get_all_subnets(
filters={'vpc_id': vpc_id, 'tag:Name': subnet_names})
for name in subnet_names:
matching_count = len([1 for s in subnets_by_name if s.tags.get('Name') == name])
if matching_count == 0:
raise AnsibleSubnetSearchException(
'Subnet named "{0}" does not exist'.format(name))
elif matching_count > 1:
raise AnsibleSubnetSearchException(
'Multiple subnets named "{0}"'.format(name))
return subnets_by_id + subnets_by_cidr + subnets_by_name
def find_igw(vpc_conn, vpc_id):
"""
Finds the Internet gateway for the given VPC ID.
Raises an AnsibleIgwSearchException if either no IGW can be found, or more
than one found for the given VPC.
Note that this function is duplicated in other ec2 modules, and should
potentially be moved into potentially be moved into a shared module_utils
"""
igw = vpc_conn.get_all_internet_gateways(
filters={'attachment.vpc-id': vpc_id})
if not igw:
raise AnsibleIgwSearchException('No IGW found for VPC {0}'.
format(vpc_id))
elif len(igw) == 1:
return igw[0].id
else:
raise AnsibleIgwSearchException('Multiple IGWs found for VPC {0}'.
format(vpc_id))
def get_resource_tags(vpc_conn, resource_id):
return dict((t.name, t.value) for t in
vpc_conn.get_all_tags(filters={'resource-id': resource_id}))
def tags_match(match_tags, candidate_tags):
return all((k in candidate_tags and candidate_tags[k] == v
for k, v in match_tags.items()))
def ensure_tags(vpc_conn, resource_id, tags, add_only, check_mode):
try:
cur_tags = get_resource_tags(vpc_conn, resource_id)
if tags == cur_tags:
return {'changed': False, 'tags': cur_tags}
to_delete = dict((k, cur_tags[k]) for k in cur_tags if k not in tags)
if to_delete and not add_only:
vpc_conn.delete_tags(resource_id, to_delete, dry_run=check_mode)
to_add = dict((k, tags[k]) for k in tags if k not in cur_tags)
if to_add:
vpc_conn.create_tags(resource_id, to_add, dry_run=check_mode)
latest_tags = get_resource_tags(vpc_conn, resource_id)
return {'changed': True, 'tags': latest_tags}
except EC2ResponseError as e:
raise AnsibleTagCreationException(
'Unable to update tags for {0}, error: {1}'.format(resource_id, e))
def get_route_table_by_id(vpc_conn, vpc_id, route_table_id):
route_table = None
route_tables = vpc_conn.get_all_route_tables(route_table_ids=[route_table_id], filters={'vpc_id': vpc_id})
if route_tables:
route_table = route_tables[0]
return route_table
def get_route_table_by_tags(vpc_conn, vpc_id, tags):
count = 0
route_table = None
route_tables = vpc_conn.get_all_route_tables(filters={'vpc_id': vpc_id})
for table in route_tables:
this_tags = get_resource_tags(vpc_conn, table.id)
if tags_match(tags, this_tags):
route_table = table
count +=1
if count > 1:
raise RuntimeError("Tags provided do not identify a unique route table")
else:
return route_table
def route_spec_matches_route(route_spec, route):
key_attr_map = {
'destination_cidr_block': 'destination_cidr_block',
'gateway_id': 'gateway_id',
'instance_id': 'instance_id',
'interface_id': 'interface_id',
'vpc_peering_connection_id': 'vpc_peering_connection_id',
}
# This is a workaround to catch managed NAT gateways as they do not show
# up in any of the returned values when describing route tables.
# The caveat of doing it this way is that if there was an existing
# route for another nat gateway in this route table there is not a way to
# change to another nat gateway id. Long term solution would be to utilise
# boto3 which is a very big task for this module or to update boto.
if route_spec.get('gateway_id') and 'nat-' in route_spec['gateway_id']:
if route.destination_cidr_block == route_spec['destination_cidr_block']:
if all((not route.gateway_id, not route.instance_id, not route.interface_id, not route.vpc_peering_connection_id)):
return True
for k in key_attr_map:
if k in route_spec:
if route_spec[k] != getattr(route, k):
return False
return True
def rename_key(d, old_key, new_key):
d[new_key] = d[old_key]
del d[old_key]
def index_of_matching_route(route_spec, routes_to_match):
for i, route in enumerate(routes_to_match):
if route_spec_matches_route(route_spec, route):
return i
def ensure_routes(vpc_conn, route_table, route_specs, propagating_vgw_ids,
check_mode):
routes_to_match = list(route_table.routes)
route_specs_to_create = []
for route_spec in route_specs:
i = index_of_matching_route(route_spec, routes_to_match)
if i is None:
route_specs_to_create.append(route_spec)
else:
del routes_to_match[i]
# NOTE: As of boto==2.38.0, the origin of a route is not available
# (for example, whether it came from a gateway with route propagation
# enabled). Testing for origin == 'EnableVgwRoutePropagation' is more
# correct than checking whether the route uses a propagating VGW.
# The current logic will leave non-propagated routes using propagating
# VGWs in place.
routes_to_delete = []
for r in routes_to_match:
if r.gateway_id:
if r.gateway_id != 'local' and not r.gateway_id.startswith('vpce-'):
if not propagating_vgw_ids or r.gateway_id not in propagating_vgw_ids:
routes_to_delete.append(r)
else:
routes_to_delete.append(r)
changed = bool(routes_to_delete or route_specs_to_create)
if changed:
for route in routes_to_delete:
try:
vpc_conn.delete_route(route_table.id,
route.destination_cidr_block,
dry_run=check_mode)
except EC2ResponseError as e:
if e.error_code == 'DryRunOperation':
pass
for route_spec in route_specs_to_create:
try:
vpc_conn.create_route(route_table.id,
dry_run=check_mode,
**route_spec)
except EC2ResponseError as e:
if e.error_code == 'DryRunOperation':
pass
return {'changed': bool(changed)}
def ensure_subnet_association(vpc_conn, vpc_id, route_table_id, subnet_id,
check_mode):
route_tables = vpc_conn.get_all_route_tables(
filters={'association.subnet_id': subnet_id, 'vpc_id': vpc_id}
)
for route_table in route_tables:
if route_table.id is None:
continue
for a in route_table.associations:
if a.subnet_id == subnet_id:
if route_table.id == route_table_id:
return {'changed': False, 'association_id': a.id}
else:
if check_mode:
return {'changed': True}
vpc_conn.disassociate_route_table(a.id)
association_id = vpc_conn.associate_route_table(route_table_id, subnet_id)
return {'changed': True, 'association_id': association_id}
def ensure_subnet_associations(vpc_conn, vpc_id, route_table, subnets,
check_mode):
current_association_ids = [a.id for a in route_table.associations]
new_association_ids = []
changed = False
for subnet in subnets:
result = ensure_subnet_association(
vpc_conn, vpc_id, route_table.id, subnet.id, check_mode)
changed = changed or result['changed']
if changed and check_mode:
return {'changed': True}
new_association_ids.append(result['association_id'])
to_delete = [a_id for a_id in current_association_ids
if a_id not in new_association_ids]
for a_id in to_delete:
changed = True
vpc_conn.disassociate_route_table(a_id, dry_run=check_mode)
return {'changed': changed}
def ensure_propagation(vpc_conn, route_table, propagating_vgw_ids,
check_mode):
# NOTE: As of boto==2.38.0, it is not yet possible to query the existing
# propagating gateways. However, EC2 does support this as shown in its API
# documentation. For now, a reasonable proxy for this is the presence of
# propagated routes using the gateway in the route table. If such a route
# is found, propagation is almost certainly enabled.
changed = False
for vgw_id in propagating_vgw_ids:
for r in list(route_table.routes):
if r.gateway_id == vgw_id:
return {'changed': False}
changed = True
vpc_conn.enable_vgw_route_propagation(route_table.id,
vgw_id,
dry_run=check_mode)
return {'changed': changed}
def ensure_route_table_absent(connection, module):
lookup = module.params.get('lookup')
route_table_id = module.params.get('route_table_id')
tags = module.params.get('tags')
vpc_id = module.params.get('vpc_id')
if lookup == 'tag':
if tags is not None:
try:
route_table = get_route_table_by_tags(connection, vpc_id, tags)
except EC2ResponseError as e:
module.fail_json(msg=e.message)
except RuntimeError as e:
module.fail_json(msg=e.args[0])
else:
route_table = None
elif lookup == 'id':
try:
route_table = get_route_table_by_id(connection, vpc_id, route_table_id)
except EC2ResponseError as e:
module.fail_json(msg=e.message)
if route_table is None:
return {'changed': False}
try:
connection.delete_route_table(route_table.id, dry_run=module.check_mode)
except EC2ResponseError as e:
if e.error_code == 'DryRunOperation':
pass
else:
module.fail_json(msg=e.message)
return {'changed': True}
def get_route_table_info(route_table):
# Add any routes to array
routes = []
for route in route_table.routes:
routes.append(route.__dict__)
route_table_info = { 'id': route_table.id,
'routes': routes,
'tags': route_table.tags,
'vpc_id': route_table.vpc_id
}
return route_table_info
def create_route_spec(connection, module, vpc_id):
routes = module.params.get('routes')
for route_spec in routes:
rename_key(route_spec, 'dest', 'destination_cidr_block')
if route_spec.get('gateway_id') and route_spec['gateway_id'].lower() == 'igw':
igw = find_igw(connection, vpc_id)
route_spec['gateway_id'] = igw
return routes
def ensure_route_table_present(connection, module):
lookup = module.params.get('lookup')
propagating_vgw_ids = module.params.get('propagating_vgw_ids')
route_table_id = module.params.get('route_table_id')
subnets = module.params.get('subnets')
tags = module.params.get('tags')
vpc_id = module.params.get('vpc_id')
try:
routes = create_route_spec(connection, module, vpc_id)
except AnsibleIgwSearchException as e:
module.fail_json(msg=e[0])
changed = False
tags_valid = False
if lookup == 'tag':
if tags is not None:
try:
route_table = get_route_table_by_tags(connection, vpc_id, tags)
except EC2ResponseError as e:
module.fail_json(msg=e.message)
except RuntimeError as e:
module.fail_json(msg=e.args[0])
else:
route_table = None
elif lookup == 'id':
try:
route_table = get_route_table_by_id(connection, vpc_id, route_table_id)
except EC2ResponseError as e:
module.fail_json(msg=e.message)
# If no route table returned then create new route table
if route_table is None:
try:
route_table = connection.create_route_table(vpc_id, module.check_mode)
changed = True
except EC2ResponseError as e:
if e.error_code == 'DryRunOperation':
module.exit_json(changed=True)
module.fail_json(msg=e.message)
if routes is not None:
try:
result = ensure_routes(connection, route_table, routes, propagating_vgw_ids, module.check_mode)
changed = changed or result['changed']
except EC2ResponseError as e:
module.fail_json(msg=e.message)
if propagating_vgw_ids is not None:
result = ensure_propagation(connection, route_table,
propagating_vgw_ids,
check_mode=module.check_mode)
changed = changed or result['changed']
if not tags_valid and tags is not None:
result = ensure_tags(connection, route_table.id, tags,
add_only=True, check_mode=module.check_mode)
changed = changed or result['changed']
if subnets:
associated_subnets = []
try:
associated_subnets = find_subnets(connection, vpc_id, subnets)
except EC2ResponseError as e:
raise AnsibleRouteTableException(
'Unable to find subnets for route table {0}, error: {1}'
.format(route_table, e)
)
try:
result = ensure_subnet_associations(connection, vpc_id, route_table, associated_subnets, module.check_mode)
changed = changed or result['changed']
except EC2ResponseError as e:
raise AnsibleRouteTableException(
'Unable to associate subnets for route table {0}, error: {1}'
.format(route_table, e)
)
module.exit_json(changed=changed, route_table=get_route_table_info(route_table))
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
lookup = dict(default='tag', required=False, choices=['tag', 'id']),
propagating_vgw_ids = dict(default=None, required=False, type='list'),
route_table_id = dict(default=None, required=False),
routes = dict(default=[], required=False, type='list'),
state = dict(default='present', choices=['present', 'absent']),
subnets = dict(default=None, required=False, type='list'),
tags = dict(default=None, required=False, type='dict', aliases=['resource_tags']),
vpc_id = dict(default=None, required=True)
)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto is required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region:
try:
connection = connect_to_aws(boto.vpc, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="region must be specified")
lookup = module.params.get('lookup')
route_table_id = module.params.get('route_table_id')
state = module.params.get('state', 'present')
if lookup == 'id' and route_table_id is None:
module.fail_json("You must specify route_table_id if lookup is set to id")
try:
if state == 'present':
result = ensure_route_table_present(connection, module)
elif state == 'absent':
result = ensure_route_table_absent(connection, module)
except AnsibleRouteTableException as e:
module.fail_json(msg=str(e))
module.exit_json(**result)
if __name__ == '__main__':
main()
| [
"eduardo.miranda.becerra3@gmail.com"
] | eduardo.miranda.becerra3@gmail.com |
4cf9d38b89dd6ddc114606327319518c90c9cd20 | 69d2627942a554d6914ba05de097a290fed66bad | /vb2py/vb/test1/test/frmRadio.py | 3288d55a11d256403ea27f0e3c916d0c08fe55bc | [
"BSD-3-Clause"
] | permissive | rayzamgh/sumurProjection | 0fcef39cc75e620057b012f1bd35cae1c49a5554 | 847ce71e85093ea5ee668ec61dbfba760ffa6bbd | refs/heads/master | 2020-07-23T23:33:26.621550 | 2019-12-22T05:31:24 | 2019-12-22T05:31:24 | 207,738,494 | 1 | 0 | null | 2019-10-28T16:00:07 | 2019-09-11T06:23:43 | Python | UTF-8 | Python | false | false | 1,383 | py | """The main form for the application"""
from PythonCard import model
# Allow importing of our custom controls
import PythonCard.resource
PythonCard.resource.APP_COMPONENTS_PACKAGE = "vb2py.targets.pythoncard.vbcontrols"
class Background(model.Background):
def __getattr__(self, name):
"""If a name was not found then look for it in components"""
return getattr(self.components, name)
def __init__(self, *args, **kw):
"""Initialize the form"""
model.Background.__init__(self, *args, **kw)
# Call the VB Form_Load
# TODO: This is brittle - depends on how the private indicator is set
if hasattr(self, "_MAINFORM__Form_Load"):
self._MAINFORM__Form_Load()
elif hasattr(self, "Form_Load"):
self.Form_Load()
from vb2py.vbfunctions import *
from vb2py.vbdebug import *
class MAINFORM(Background):
""" This form has radio buttons on it ... these are tougher than they look"""
# VB2PY (UntranslatedCode) Attribute VB_Name = "frmRadio"
# VB2PY (UntranslatedCode) Attribute VB_GlobalNameSpace = False
# VB2PY (UntranslatedCode) Attribute VB_Creatable = False
# VB2PY (UntranslatedCode) Attribute VB_PredeclaredId = True
# VB2PY (UntranslatedCode) Attribute VB_Exposed = False
if __name__ == '__main__':
app = model.Application(MAINFORM)
app.MainLoop()
| [
"rayzaganteng@gmail.com"
] | rayzaganteng@gmail.com |
93ba0b7db738acfb04bf04f9ced0811e84a2f9bb | f7550c4964dc8f3c59dbcebe39e947bd6a264dba | /3.Time Complexity Analysis/array Union.py | 21627529d754f1dc4ea509811fa13e3f3a46dc3c | [] | no_license | Jashwanth-k/Data-Structures-and-Algorithms | db5e2e30932e0a35db578c19ae6cff9f147b7c3d | 1ebf9986999a474cb094f3ab04616a46f2887043 | refs/heads/main | 2023-08-25T02:57:17.394322 | 2021-10-11T15:27:56 | 2021-10-11T15:27:56 | 402,448,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 619 | py | def intersection(a1,a2):
i = j = 0
a3 = []
while i < len(a1) and j < len(a2):
if a1[i] < a2[j]:
if a1[i] not in a3:
a3.append(a1[i])
i+=1
elif a2[j] < a1[i]:
if a2[j] not in a3:
a3.append(a2[j])
j+=1
else:
if a2[j] not in a3:
a3.append(a2[j])
j+=1
while j < len(a2):
if a2[j] not in a3:
a3.append(a2[j])
j+=1
i+=1
print(a3)
arr1 = [1,2,2,4, 5,5,5,5, 6]
arr2 = [7,8,9,9]
intersection(arr1,arr2) | [
"noreply@github.com"
] | Jashwanth-k.noreply@github.com |
08d1ee573cccf0a07e66a575feeddc119ccfc64a | 0f1084acef945809693bdf975a735259e04ccda1 | /api/v1/utils.py | f0817e5514b4296c880c2650093c1c4e836096c8 | [
"Apache-2.0"
] | permissive | buyaoyongroot/1yancc | 5a6331b451178d7ed52bc906ce3ba55e44014415 | 7fffd389aa53b3b785ae99816b4cf76fae4f7779 | refs/heads/main | 2023-05-02T14:07:10.937875 | 2021-05-26T05:30:05 | 2021-05-26T05:30:05 | 370,237,985 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,729 | py | class utils():
#===============加密类=========================================
@staticmethod
def md5(str):
import hashlib
hl = hashlib.md5()
hl.update(str.encode(encoding='utf-8'))
return hl.hexdigest()
#===============随机类=========================================
@staticmethod
def _wc(list, weight):
import random
new_list = []
for i, val in enumerate(list):
for i in range(weight[i]):
new_list.append(val)
return random.choice(new_list)
@staticmethod
def rs(cc):
import random
return ''.join(random.sample('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', int(cc)))
@staticmethod
def hash():
import time,random
time12 = int(time.time()*1000)
rand04 = random.randint(1000,9999)
return utils.md5(str(time12)+str(rand04))
#===============时间类=========================================
@staticmethod
def time():
import time
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
@staticmethod
def bool(arg):
arg=arg.lower()
if arg=='true':
return True
return False
#===============初始化类=========================================
@staticmethod
def init(fp):
import os,sys
ext=os.path.splitext(sys.argv[0])[1]
if ext=='.py':path=os.path.dirname(os.path.realpath(fp))
if ext=='.exe':path=os.path.dirname(os.path.realpath(sys.argv[0]))
path=path.replace('\\','/')+'/'
os.chdir(path)#修改工作目录
if __name__ == '__main__':
print(utils.time()) | [
"you@example.com"
] | you@example.com |
944c68a1a9440f836c823ee608689293920fa2e2 | 1dae87abcaf49f1d995d03c0ce49fbb3b983d74a | /programs/subroutines/Picture - Frame EoS optical.sub.py | 465057ffcc9faa79f61d88da52baebb028bd91cf | [] | no_license | BEC-Trento/BEC1-data | 651cd8e5f15a7d9848f9921b352e0830c08f27dd | f849086891bc68ecf7447f62962f791496d01858 | refs/heads/master | 2023-03-10T19:19:54.833567 | 2023-03-03T22:59:01 | 2023-03-03T22:59:01 | 132,161,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,825 | py | prg_comment = ""
prg_version = "0.5.1"
def program(prg, cmd):
prg.add(-4403000, "Na Repumper1 (+) Amp", 1.000000)
prg.add(-4393000, "K probe Repumper (+) Amp", 1.000000)
prg.add(-4383000, "K Repumper 1p (+) Amp", 1.000000)
prg.add(-4363000, "Na Dark Spot Amp", 1.000000)
prg.add(-4353000, "Na Repumper MOT Amp", 1.000000)
prg.add(-3033000, "Shutter Probe K Open")
prg.add(-3023000, "Shutter RepumperMOT K Open")
prg.add(-3013000, "Shutter repump Na Open")
prg.add(-2493000, "K probe Cooler (-) Amp", 1.000000)
prg.add(-2030000, "Na 3D MOT cool (-) Amp", 1.000000)
prg.add(-2020000, "Na 3D MOT cool (+) Amp", 1.000000)
prg.add(-2000000, "Shutter 3DMOT cool Na Open")
prg.add(-5400, "K probe Cooler (-) freq", 99.500000)
prg.add(-5000, "K Cooler 2p (+) freq", 97.500000)
prg.add(-4600, "K Repumper 1p (+) Amp", 1000.000000)
prg.add(-4200, "K Repumper 1p (+) freq", 115.000000)
prg.add(-3800, "K Repumper 2p (+) freq", 96.000000)
prg.add(-2500, "Na Repumper MOT Amp", 1000.000000)
prg.add(-2000, "Na Repumper1 (+) Amp", 1000.000000)
prg.add(-1600, "Na Repumper Tune (+) freq", 1713.000000)
prg.add(-500, "Trig ON Stingray 1")
prg.add(-400, "Na Probe/Push (+) freq", 110.000000)
prg.add(0, "Na Probe/Push (-) freq", 110.000000)
prg.add(1000, "Na Probe/Push (-) freq", 150.000000)
prg.add(1500, "Na Probe/Push (+) freq", 150.000000)
prg.add(2000, "Trig OFF Stingray 1")
prg.add(250000, "Shutter Probe Na Close")
prg.add(260000, "Shutter Probe K Close")
prg.add(1010000, "Na Repumper MOT Amp", 1.000000)
prg.add(1020000, "Na Repumper1 (+) Amp", 1.000000)
prg.add(1030000, "K Repumper 1p (+) Amp", 1.000000)
prg.add(4000000, "B comp y", 0.000000)
prg.add(4010000, "IGBT B comp y OFF")
return prg
| [
"carmelo.mordini@unitn.it"
] | carmelo.mordini@unitn.it |
da30d27b794a48599890a4d80aa872a519b86a7a | 2b468b1d22ecc5668529255676a1d43936829074 | /codes/personal_backend/support/simulate/tool/data/address.py | 898750e6de1924b3581f7435da7fb86ce759b3a7 | [] | no_license | MaseraTiGo/4U | 5ac31b4cccc1093ab9a07d18218c3d8c0157dc9c | f572830aa996cfe619fc4dd8279972a2f567c94c | refs/heads/master | 2023-07-26T09:44:21.014294 | 2023-07-13T03:43:34 | 2023-07-13T03:43:34 | 149,217,706 | 0 | 0 | null | 2020-06-05T20:38:16 | 2018-09-18T02:34:29 | Python | UTF-8 | Python | false | false | 474,045 | py | # coding=UTF-8
ADDRES_TEMPLATES = [
["上海","上海","阳光港餐厅","松江区佘山镇林湖路888号","021-67657001"],
["上海","上海","思贤餐厅","松江区思贤路778--780号","021-37698010"],
["上海","上海","泗宝餐厅","松江区泗泾镇横港路61号","021-37008123"],
["上海","上海","江川餐厅","松江区泗泾镇江川北路263-265号","021-57843762"],
["上海","上海","谷阳餐厅","松江区松汇中路568号上海鹿都商业广场","021-57823515"],
["上海","上海","沪松餐厅","松江区松江荣乐东路1595号","021-67743015"],
["上海","上海","大学城餐厅","松江区文汇路818号","021-67796287"],
["上海","上海","明兴餐厅","松江区新南路明兴路口家乐福新南路购物中心101号","021-67675737"],
["上海","上海","新桥餐厅","松江区新桥镇新南街377号","021-37623287"],
["上海","上海","松江新城餐厅","松江区新松江路935号","021-37668073"],
["上海","上海","叶榭餐厅","松江区叶榭镇求仁路149号1层","021-57806220"],
["上海","上海","松江餐厅","松江区中山中路99号","021-57720124"],
["上海","上海","新港汇餐厅","徐汇虹桥路1号五层536铺位","021-64079497"],
["上海","上海","光大餐厅","徐汇区漕宝路78号","021-34616605"],
["上海","上海","海光餐厅","徐汇区大木桥路370号","021-64432968"],
["上海","上海","秀山餐厅","徐汇区番禺路872号一层","021-33678231"],
["上海","上海","打浦桥餐厅","徐汇区轨道交通七号线东安路站站厅层","021-64032548"],
["上海","上海","钦州餐厅","徐汇区桂林路39号一至三层","021-54480693"],
["上海","上海","虹梅地铁餐厅","徐汇区沪闵路8073号","021-28904220"],
["上海","上海","华泾餐厅","徐汇区华泾路509号7幢101商铺","021-54398037"],
["上海","上海","武宁餐厅","徐汇区淮海中路1298号","021-64747044"],
["上海","上海","新凌云餐厅","徐汇区凌云路365号1F,2F","021-24206097"],
["上海","上海","罗香餐厅","徐汇区罗香路235号","021-34080587"],
["上海","上海","柳州2餐厅","徐汇区南站站层(商铺号C102 )","021-63189188-70541"],
["上海","上海","漕东餐厅","徐汇区轻轨3号线漕溪站1楼","021-26031460"],
["上海","上海","动力南广场餐厅","徐汇区石龙路750-3号上海南站地下商场南馆","021-54358730"],
["上海","上海","南丹餐厅","徐汇区天钥桥路333号","021-61213613"],
["上海","上海","体育场餐厅","徐汇区天钥桥路668号","021-54049351"],
["上海","上海","田林餐厅","徐汇区田林路1号","021-34140090"],
["上海","上海","新宜街餐厅","徐汇区宜山路700号84幢A1号楼商铺101-102","021-54270150"],
["上海","上海","凯旋餐厅","徐汇区宜山路站一层宜山-3-101号","021-26031457"],
["上海","上海","新美罗餐厅","徐汇区肇嘉浜路1111号","021-64267901"],
["上海","上海","恒丰餐厅","徐汇区肇嘉浜路998号","021-64269045"],
["上海","上海","国顺餐厅","杨浦区国顺东路188号","021-65488208"],
["上海","上海","邯郸餐厅","杨浦区邯郸路585号地下一层B-2、B-8-1、B-8-3","021-60835885"],
["上海","上海","万达餐厅","杨浦区邯郸路608弄50号","021-65102429"],
["上海","上海","大润发餐厅","杨浦区黄兴路1616号","021-55580777"],
["上海","上海","源泉餐厅","杨浦区控江路1100号","021-65433763"],
["上海","上海","沪东餐厅","杨浦区平凉路1500号","021-35120655"],
["上海","上海","怀德餐厅","杨浦区平凉路675号","021-65894727"],
["上海","上海","控江餐厅","杨浦区双辽路768号","021-55570263"],
["上海","上海","四平餐厅","杨浦区四平路903号","021-65520098"],
["上海","上海","淞沪餐厅","杨浦区淞沪路8号7楼","021-55227827"],
["上海","上海","长海餐厅","杨浦区长海路359、361、363号","021-55781017"],
["上海","上海","政民餐厅","杨浦区政民路300号","021-35120071"],
["上海","上海","嫩江餐厅","杨浦区中原路102号","021-65585177"],
["上海","上海","中原餐厅","杨浦区中原路188号","021-35031075"],
["上海","上海","周家嘴餐厅","杨浦区周家嘴路4295号","021-65804017"],
["上海","上海","保德餐厅","闸北区保德路88号","021-66821969"],
["上海","上海","共和新餐厅","闸北区共和新路1968号","021-66523451"],
["上海","上海","共和餐厅","闸北区共和新路3318号","021-56036758"],
["上海","上海","朋普餐厅","闸北区共和新路4460号","021-56483484"],
["上海","上海","永安餐厅","闸北区广中西路758号","021-56373800"],
["上海","上海","临汾餐厅","闸北区临汾路700号","021-66820911"],
["上海","上海","海光餐厅","闸北区洛川东路221号","021-56334891"],
["上海","上海","南广场餐厅","闸北区秣陵路303号","021-51013855"],
["上海","上海","新南广餐厅","闸北区秣陵路303号上海火车站东南出口东侧一层","021-51013501"],
["上海","上海","中兴餐厅","闸北区秣陵路303号新客站北广场站屋一层","021-51013206"],
["上海","上海","长寿餐厅","闸北区虬江路760号","021-56322647"],
["上海","上海","中亚餐厅","闸北区天目西路235号","021-63533312"],
["上海","上海","延长餐厅","闸北区延长中路312号","021-56773023"],
["上海","上海","体育场餐厅","闸北区芷江西路398号","021-56379299"],
["上海","上海","中华新餐厅","闸北区中华新路567号","021-36532971"],
["上海","上海","沪太餐厅","闸北区中山北路1015号","021-56536437"],
["上海","上海","虹凯餐厅","长宁区轨道交通虹桥路站","021-51186311"],
["上海","上海","宋园餐厅","长宁区虹桥路1286号","021-32233292"],
["上海","上海","威宁餐厅","长宁区天山路352号101和201","021-33537672"],
["上海","上海","泓鑫餐厅","长宁区天山路762号泓鑫时尚广场1F","021-52069601"],
["上海","上海","新天山餐厅","长宁区天山路900号118号商铺1层+B1层","021-61279676"],
["上海","上海","仙霞餐厅","长宁区仙霞路606号","021-62423574"],
["上海","上海","曹杨餐厅","长宁区仙霞西路88号","021-52197957"],
["上海","上海","曼克顿餐厅","长宁区长宁路1018号8楼","021-61155255"],
["上海","上海","闵行餐厅","长宁区长宁路1018号B1层","021-61155211"],
["上海","上海","年家浜餐厅","周浦镇沪南公路3449弄1幢32号236-1,236-2号、240号108","021-38231307"],
["上海","上海","大宁国际广场餐厅","闸北区共和新路1898号110、111、113-2、118室","13621700520"],
["上海","上海","中远餐厅","普陀区远景路616号一层107、108号商铺、二层209号商铺","13761694391"],
["上海","上海","嘉定","嘉定区清河路8号","021-59536668"],
["上海","上海","延吉","杨浦区靖宇东路270号","021-65491467"],
["上海","上海","罗山","浦东新区博山东路50号","021-38821488"],
["上海","上海","聚丰","宝山区聚丰园路205号","021-66164757"],
["上海","上海","季景","浦东新区外高桥夏碧路261号","021-58626137"],
["上海","上海","新北新泾","长宁区天山西路181号","021-52172635"],
["上海","上海","江苏","长宁区江苏路398号1、2层","021-32505629"],
["上海","上海","柳州","徐汇区上海南站站厅层","021-63189188-610540"],
["上海","上海","高铁北精选店","铁路上海虹桥站出发层商铺号3F-A3-1、3F-A4-1","21-51516371"],
["上海","上海","高铁精选3店","上海虹桥站到达层商铺号B1-北13,B1-北14","51516378"],
["上海","上海","永辉精选店","松江区广富林路658弄666号松江万达广场B1层永辉超市商铺号:Q0342006、Q0342007","13472416278"],
["上海","上海","永业","黄浦区蒙自路25-27号1层及29-31号一层A商铺","021-33311595"],
["上海","上海","石龙","徐汇区柳州路36号1幢","021-34140295"],
["上海","上海","高东","浦东新区高东镇杨园新村街道12/4丘","021-50807535"],
["上海","上海","瞿溪","黄浦区瞿溪路477号","021-63297590"],
["上海","上海","新金汇店","奉贤区金汇镇金闸公路599号1412、1413号","21-"],
["上海","上海","吾盈店","青浦区淀山湖大道150弄青浦吾悦广场1F-023-1、2F-024","021-59250017"],
["上海","上海","佘山店","佘山镇桃源路190号2层192、194号1-2层","021-37632071"],
["上海","上海","航头店","沪南公路5588-5592号一层","021-33890163"],
["上海","上海","同乐店","松江区洞泾镇同乐路399号6幢号一层 1F05-07号","021-57659310"],
["上海","上海","都市店","闵行区都市路3759号1楼121A","021-33586575"],
["上海","上海","新南方商城店","闵行区沪闵路7388号","021-52237585"],
["上海","上海","浦东机场T1出发店","浦东国际机场T1国际公众区南端集中餐饮区域2号店铺","13651620515"],
["上海","上海","芮欧店","静安区南京西路1601号B2层美食广场B201室","021-62173562"],
["上海","上海","隆昌店","杨浦区隆昌路586号-101商铺","13564151899"],
["上海","上海","新龙阳店","浦东新区龙阳路2000号1-2层A-1铺","021-50808275"],
["上海","上海","顾村公园店","宝山区陆翔路101号-A、103号-A、107号-A、陆翔路111弄10号204室-A","13641941428"],
["上海","上海","宝龙店","奉贤区航南公路5639号1F042、043号,2F021a、030号","15000865553"],
["上海","上海","磁浮精选店","浦东新区机场站磁悬浮站第二层编号为A07","021-68352427"],
["上海","上海","上海滩店","松江区佘山镇林湖路888号","021-67657006"],
["上海","上海","金矿镇店","松江区佘山镇林湖路888号","021-67657003"],
["上海","上海","太平洋精选店","徐汇区衡山路932号地下一层","021-64078913"],
["上海","上海","港汇精选店","徐汇区虹桥路1号B1层(B108M)铺位","021-64075830"],
["上海","上海","虹桥连廊SEL","闵行区申昆路1500号3层CF322A","021-54730082"],
["上海","上海","动力南精选店","徐汇区沪闵路9001号上海南站(1F三角地A-1)一层","021-31251000"],
["上海","上海","南站精选店","徐汇区沪闵路9001号上海南站候车大厅","021-51100136"],
["天津","天津","科贸街餐厅","鞍山西道与南丰路交口时代大厦","022-58698219"],
["天津","天津","劝宝餐厅","宝坻区城关镇南关大街85号","022-29232611"],
["天津","天津","天宝餐厅","宝坻区环城南路与开元路交口怡购商业广场一层","022-82686711"],
["天津","天津","奥园餐厅","北辰区辰昌路与龙泉道交口乐天超市一层","022-26652309"],
["天津","天津","集贤餐厅","北辰区京津公路以东东延吉道北侧以北新世纪集贤购物广场","022-58072939"],
["天津","天津","大通餐厅","北辰区马庄物美生活广场一层","022-26873752"],
["天津","天津","团结餐厅","大港区大港油田团结路滨海商厦东门处一、二层","022-25917774"],
["天津","天津","胜利餐厅","大港区胜利街663号一商友谊商厦底商一层","022-63381547"],
["天津","天津","迎宾餐厅","大港区世纪大道118号","022-63397491"],
["天津","天津","彩虹餐厅","大港区幸福路与开发道交口西南角幸福广场","022-25979721"],
["天津","天津","军粮城餐厅","东丽区八号桥军粮城示范镇军丽园29号楼底商","022-84854828"],
["天津","天津","机场出发层餐厅","东丽区滨海国际机场T2航站楼","022-24906835"],
["天津","天津","欢乐谷餐厅","东丽区东丽湖度假村迎乐路1号","022-58615227"],
["天津","天津","汉友商厦餐厅","汉沽区河西三经路与四纬路交口一商友谊广场华润万家底商一、二层","022-67215522"],
["天津","天津","汉沽餐厅","汉沽区新开南路8号汉沽百货大楼一层","022-25696016"],
["天津","天津","劝业场餐厅","和平路290号劝业商厦一层","022-27215632"],
["天津","天津","麦购餐厅","和平区滨江道与山西路交口东侧 麦购休闲广场","022-27119829"],
["天津","天津","乐宾餐厅","和平区南京路与滨江道交口庄吉商业中心(乐宾百货)一层及地下一层","022-27168266"],
["天津","天津","和平餐厅","和平区万达商业广场A-26号","022-85586902"],
["天津","天津","小白楼餐厅","和平区浙江道33号","022-23390107"],
["天津","天津","金纬餐厅","河北区金纬路337-339号","022-26359860"],
["天津","天津","东站餐厅","河北区进步道15号","022-24465735"],
["天津","天津","友谊新都餐厅","河北区狮子林大街200号友谊新都商厦一楼底商","022-60861658"],
["天津","天津","王串场餐厅","河北区王串场五号路20号一层","022-26418035"],
["天津","天津","中纬餐厅","河北区中山路与昆纬路交口(联华超市中山路店)首层","022-26279935"],
["天津","天津","津滨餐厅","河东区津滨大道万达广场","022-24189075"],
["天津","天津","津工餐厅","河东区津塘公路79号1层","022-24313162"],
["天津","天津","十五经路餐厅","河东区津塘公路与15经路交口","022-58220881"],
["天津","天津","津塘餐厅","河东区津塘路23号","022-60336795"],
["天津","天津","卫昆餐厅","河东区卫国道与尖山路交口华润超市一层","022-24689935"],
["天津","天津","体北餐厅","河西区宾水道75号","022-23916322"],
["天津","天津","图书大厦餐厅","河西区大沽南路362号天津图书大厦一 二层","022-23265837"],
["天津","天津","南楼餐厅","河西区大沽南路820号","022-28320061"],
["天津","天津","柳林餐厅","河西区大沽南路与长湖路交口天邦购物乐园1层","022-88189801"],
["天津","天津","九龙餐厅","河西区广东路与绍兴道交口(联华超市广东路店)首层","022-23289332"],
["天津","天津","珠江装饰城餐厅","河西区解放南路475号珠江友谊国际家居购物中心首层","022-88259030"],
["天津","天津","第六田园餐厅","河西区解放南路478号","022-28285721"],
["天津","天津","乐园道餐厅","河西区乐园道9号彩悦城阳光乐园购物中心首层1A-14","022-83885509"],
["天津","天津","银河国际餐厅","河西区乐园道南侧、银河国际购物中心B1-059","022-83887096"],
["天津","天津","新围堤道餐厅","河西区南北大街1号嘉贸购物中心·天津湾","022-88269394"],
["天津","天津","黑牛城道餐厅","河西区友谊路新业广场一层肯德基","022-59586701"],
["天津","天津","丁字沽餐厅","红桥区本溪路19号","022-26510087"],
["天津","天津","东北角餐厅","红桥区大胡同38号","022-27011369"],
["天津","天津","红旗餐厅","红桥区西青道58号","022-27725850"],
["天津","天津","西站高架层餐厅","红桥区西站候车大厅二楼","022-58083011"],
["天津","天津","西客站餐厅","红桥区西站客运站二楼","022-58083071"],
["天津","天津","华明餐厅","华明镇四纬一支路香园9号楼,一 二层","022-58093931"],
["天津","天津","兴华大街餐厅","蓟县城关镇兴华购物街1号一、二,三层","022-29116732"],
["天津","天津","渔阳餐厅","蓟县鼓楼广场两侧商贸43号、44号一、二层","022-82718288"],
["天津","天津","葛沽餐厅","津南区津沽路与规划八路交口","022-28753238"],
["天津","天津","津沽餐厅","津南区咸水沽津沽公路57号华润万家超市底商一层","022-28541025"],
["天津","天津","月坛餐厅","津南区咸水沽镇津沽路57号月坛商厦一层","022-28548313"],
["天津","天津","鸿泰餐厅","经济技术开发区第三大街与巢湖路交口鸿泰商业广场底商商铺","022-66290830"],
["天津","天津","东方红餐厅","静海县东方红路中段北侧家世界商业广场一层","022-68924888"],
["天津","天津","金桥国贸餐厅","静海县胜利大街金桥金桥国贸购物中心一、二层","022-68226759"],
["天津","天津","怡安餐厅","静海县胜利大街怡安购物广场一层","022-28919092"],
["天津","天津","海光寺餐厅","南京路与南门外大街交口星运商城一层","022-27239037"],
["天津","天津","天环餐厅","南开区红旗路242号华润万家超市天环店一层","022-87610815"],
["天津","天津","华苑餐厅","南开区华苑路106号(天华里商业中心)","022-23727513"],
["天津","天津","大悦城商厦餐厅","南开区南门外大街2号B区B1-43/B1-49b、B1-50h号商铺","022-58100322"],
["天津","天津","东马路餐厅","南开区水阁大街与东马路交口玉鼎商业楼2号楼","022-87358031"],
["天津","天津","西南角餐厅","南开区西马路与黄河道交口","022-27568266"],
["天津","天津","王顶堤餐厅","南开区迎水道59号","022-23697639"],
["天津","天津","长江道餐厅","南开区长江道92号","022-87603931"],
["天津","天津","南口路餐厅","南口路与榆关道交口\300141","022-26489122"],
["天津","天津","芦台餐厅","宁河县芦台镇商业道43号一层","022-69567785"],
["天津","天津","幸福餐厅","宁河县芦台镇商业道南段西侧幸福商业广场A区-101","022-69119761"],
["天津","天津","金地餐厅","双港津沽公路金地格林小城梧桐苑33号楼","022-88561409"],
["天津","天津","福州道餐厅","塘沽区福州道916号","022-66315375"],
["天津","天津","滨海餐厅","塘沽区和平路503号","022-25862378"],
["天津","天津","新洋餐厅","塘沽区河北路1629号","022-25863712"],
["天津","天津","河北路餐厅","塘沽区河北路5348号工农村人人乐底商","022-66355182"],
["天津","天津","巨川餐厅","塘沽区解放路1121号","022-25855012"],
["天津","天津","新百餐厅","塘沽区上海道888号","022-25863250"],
["天津","天津","秀谷餐厅","塘沽区中心北路与津塘公路交口易买得超市一层","022-66315352"],
["天津","天津","小站餐厅","天津津南区小站镇津岐公路西华润万家生活广场","022-88632825"],
["天津","天津","海泰餐厅","天津经济技术开发区黄海路98号B区101室一层","022-66298821"],
["天津","天津","天津站副广场餐厅","天津站交通枢纽工程(前广场地下部分)负一层","022-24451041"],
["天津","天津","北出站口餐厅","天津站交通枢纽后广场轨道换乘中心地下一层C001号","022-86788851"],
["天津","天津","天津站餐厅","天津站南站房D区二层","022-60532009"],
["天津","天津","候车大厅精选餐厅","天津站南站房二层候车大厅10、12检票口之间","022-60533027"],
["天津","天津","学府餐厅","卫津路155号博联大厦1层","022-23550027"],
["天津","天津","雍阳餐厅","武青区京津公路中信广场C区","022-22204811"],
["天津","天津","京津广场餐厅","武清区103国道东侧雍阳东道北侧津京时尚广场5区商业楼","022-59694551"],
["天津","天津","中房餐厅","武清区京津公路京津时尚广场1号地","022-59691310"],
["天津","天津","泉旺路餐厅","武清区开发区泉旺路武清海达家乐超市一店一、二层","022-29395505"],
["天津","天津","佛罗伦萨餐厅","武清区前进道北侧佛罗伦萨","022-59698230"],
["天津","天津","友谊南路餐厅","西青开发区友谊南路与梨双路交口肯德基","022-87975673"],
["天津","天津","龙居花园餐厅","西青区大寺镇榆林路交口龙居生活广场肯德基","022-58710036"],
["天津","天津","大学城餐厅","西青区工一号路","022-87186871"],
["天津","天津","名众商厦餐厅","西青区杨柳青柳口路名众商厦一层","022-87181138"],
["天津","天津","广汇餐厅","西青区杨柳青镇商贸街B座一层","022-27910113"],
["天津","天津","曹庄餐厅","西青区中北镇永旺梦乐城天津中北购物中心一层","022-87195679"],
["天津","天津","中北餐厅","西青区中北镇中北大道物美超市底商","022-58890208"],
["天津","天津","大岛餐厅","友谊南路梅江南居住区公共中心区大岛商业广场一层","022-58897228"],
["天津","天津","津汇","南京路与营口道交口","022-83191837"],
["天津","天津","紫金","紫金山路与黑牛城道交口","022-23382772"],
["天津","天津","奥城","宾水西道与凌宾路交口西南侧奥城商业广场3号楼A1底商","022-23389222"],
["天津","天津","万新","河东区东新街天山路与程林庄路交口一、二层","022-24710769"],
["天津","天津","滨海机场","东丽区滨海国际机场新航站楼国际进港2号","022-24906823"],
["天津","天津","中山门","河东区中山门中心东道101-306","022-84265655"],
["天津","天津","永旺","经济技术开发区泰达大街与东海路交叉口永旺梦乐城天津泰达购物中心","022-59857152"],
["天津","天津","新海湾","河西区友谊南路与珠江道交口西侧","022-88389211"],
["天津","天津","中山北路","河北区中山北路50号","022-58779771"],
["天津","天津","太阳城","河东区丽苑太阳城清霖园一区4号楼","022-24696932"],
["天津","天津","泗水道","津南区微山路与泗水道交口","022-88118569"],
["天津","天津","招远DT","东丽区津塘路与招远路交口新世嘉大厦项目1号楼","022-24398692"],
["天津","天津","普济河道DT","北辰区普济河东道34号","022-86239171"],
["天津","天津","迎水道餐厅","天津市新产业园区华苑产业区迎水道138号","022-23859239"],
["天津","天津","张家窝餐厅","西青区张家窝镇辛老路物美超市一层","022-87981433"],
["天津","天津","金耀店","天津市和平区南门外大街金耀广场一层","022-27352199"],
["天津","天津","八里台店","天津市 南开区卫津南路新文化广场一层","022-23529950"],
["天津","天津","珠峰店","天津市河西区曲江路珠峰商场一层","022-28173720"],
["天津","天津","泛洋店","红桥区丁字沽一号路泛洋大厦底商KFC","022-26372307"],
["天津","天津","恒华店","天津河西区大沽南路恒华大厦肯德基","022-58313250"],
["天津","天津","卫国道店","天津河东区卫国道132号物美大厦肯德基","022-24551617"],
["天津","天津","北辰店","天津市北辰区京津公路352增11号","022-86819928"],
["天津","天津","咸阳路店","天津市红桥区咸阳北路物美超市一层","022-87786616"],
["天津","天津","双街店","天津市北辰区双新大道永辉超市一层","022-58072931"],
["天津","天津","云翔店","天津市和平区西康路72号","022-23262522"],
["天津","天津","泰达城店","河北大街与新三条石大街交口泰达城一楼肯德基餐厅","022-87585061"],
["天津","天津","团结","天津市滨海新区(原大港区)大港油田团结路266号一二层(肯德基)","022-25917774"],
["天津","天津","靖江路店KFC","天津市河北区江都路街增产道8号一层","022-26152122"],
["天津","天津","北宁湾餐厅","河北区迎贤道183号北宁湾底商KFC","022-58609620"],
["天津","天津","嘉华餐厅","河东区新开路与华昌大街交口嘉华商业中心一层","022-24416760"],
["重庆","重庆","重庆机场餐厅","重庆机场新航站楼一楼到达厅南侧","023-88869205"],
["重庆","重庆","万象城餐厅","重庆九龙坡区谢家湾正街55号","023-68428500"],
["重庆","重庆","李家沱餐厅","重庆市巴南区李家沱马王坪正街都和广场1楼","023-62586046"],
["重庆","重庆","金开大道餐厅","重庆市北部新区金开大道70号星光天地一层27号商铺","023-68009132"],
["重庆","重庆","龙头寺汽车站餐厅","重庆市北部新区泰山大道中段龙头寺汽车站","023-88196699"],
["重庆","重庆","国瑞餐厅","重庆市大渡口区松青路1029#国瑞城1层肯德基餐厅","023-68156160"],
["重庆","重庆","袁家岗餐厅","重庆市高新区袁家岗奥体路1号","023-68089729"],
["重庆","重庆","观音桥餐厅","重庆市江北区观音桥步行街新世界百货一楼","023-67743575"],
["重庆","重庆","茂业餐厅","重庆市江北区建新北路16号G-1层","023-67523721"],
["重庆","重庆","金观音餐厅","重庆市江北区建新西路2号特2金观音广场一楼","023-67863571"],
["重庆","重庆","北城天街餐厅","重庆市江北区洋河路6、8号北城天街4号LG层","023-67013855"],
["重庆","重庆","九龙广场餐厅","重庆市九龙坡区九龙广场九龙坡购物广场一楼","023-68789849"],
["重庆","重庆","西城天街餐厅","重庆市九龙坡区杨家坪珠江路48#西城天街平层","023-68126080"],
["重庆","重庆","元旦餐厅","重庆市南岸区南坪惠工路13号元旦百货","023-62602199"],
["重庆","重庆","万达广场餐厅","重庆市南岸区南坪万达商业广场一层","023-62622593"],
["重庆","重庆","上海城餐厅","重庆市南岸区南坪西路,上海城一层","023-62626396"],
["重庆","重庆","易诚餐厅","重庆市沙坪坝区凤天路29号(西南医院对面)","023-65481305"],
["重庆","重庆","华宇餐厅","重庆市沙坪坝区小龙坎新街29号","023-65401367"],
["重庆","重庆","三角碑餐厅","重庆市沙坪坝区小龙坎新街49号王府井百货1、2层","023-65309725"],
["重庆","重庆","石桥铺餐厅","重庆市石桥铺科园一路大西洋国际大厦二楼","023-68790377"],
["重庆","重庆","斌鑫餐厅","重庆市杨家坪商业步行街斌鑫世纪城F101-F201","023-68052850"],
["重庆","重庆","星湖路餐厅","重庆市渝北区杜松路永辉超市-1楼(北环红星美凯龙旁)","023-67305118"],
["重庆","重庆","得意世界餐厅","重庆市渝中区较场口87号B区一楼1号一层","023-63634165"],
["重庆","重庆","较场口餐厅","重庆市渝中区较场口日月光广场负一楼","023-63715680"],
["重庆","重庆","地王餐厅","重庆市渝中区民族路王府井百货二楼","023-63711721"],
["重庆","重庆","大礼堂餐厅","重庆市渝中区人民路133号","023-63621692"],
["重庆","重庆","时代天街餐厅","重庆市渝中区石油路大坪正街174号","023-63003629"],
["重庆","重庆","民权餐厅","重庆渝中区民权路1号","023-63730377"],
["重庆","重庆","重庆火车北站南广场店","渝北区火车北站站前广场负一层27号","13500360430"],
["重庆","重庆","巴南万达","巴南区龙海大道297号","18580542264"],
["重庆","重庆","大学城","沙坪坝区景苑路8号附15号","18669040412"],
["重庆","重庆","东和餐厅","重庆市江北区观音桥北城天街上段33号,东和城B2栋二层","023-67873026"],
["浙江","杭州","杭州东站东广场餐厅","钱江新城火车东站枢纽东广场地下一层C002商铺","0571-87635290"],
["浙江","杭州","杭州北站餐厅","杭州市拱墅区莫干山路1165号汽车北站对面莫干山路1165号","0571-85354031"],
["浙江","杭州","莫干山物美餐厅","杭州市拱墅区莫干山路851号物美超市一、二层","0571-88094253"],
["浙江","杭州","拱墅万达餐厅","杭州市杭行路666号","0571-88130013"],
["浙江","杭州","杭州上乘餐厅","杭州近江路好又多一楼","0571-86512865"],
["浙江","杭州","三墩厚仁餐厅","杭州市厚仁路313-375号","0571-87037692"],
["浙江","杭州","余杭禹航餐厅","杭州市余杭区余杭镇禹航路908号","0571-89055005"],
["浙江","杭州","西溪印象城精选餐厅","五常大道和 天目山路交叉口,西溪印象城","0571-88732186"],
["浙江","杭州","杭州西田餐厅","杭州余杭区良渚街道莫干山路1509号一层1F014及二层2F007号商铺","0571-89359701"],
["浙江","杭州","杭州莲花餐厅","杭州市西湖区莲花街333号莲花广场一楼","0571-86094639"],
["浙江","杭州","杭州星民餐厅","杭州滨江区丹枫路265号","0571-85784169"],
["浙江","杭州","杭州城站餐厅","上城区城站广场北二楼","0571-87805919"],
["浙江","杭州","杭州东站1餐厅","杭州市江干区新火车东站售票大厅隔层","0571-85784192"],
["浙江","杭州","杭州东站2餐厅","杭州市新火车东站候车大厅隔层(东)","0571-86011202"],
["浙江","杭州","淳安千岛湖餐厅","淳安新安大街78号","0571-64827777"],
["浙江","杭州","杭州复兴餐厅","复兴路103号喜盈盈宾馆一楼","0571-86581681"],
["浙江","杭州","富阳玉长城餐厅","富阳市富春街道后周村春秋北路271号玉长城商业广场","0571-63313091"],
["浙江","杭州","富阳东方茂餐厅","富阳市富春街道金桥北路1号1050号商铺","0571-63371023"],
["浙江","杭州","富阳春秋餐厅","富阳市桂花西路68号世纪大厦一层","0571-63332874"],
["浙江","杭州","富阳龙山餐厅","富阳市龙山路126号物美超市一楼肯德基龙山餐厅","0571-61700516"],
["浙江","杭州","富阳新登餐厅","富阳市新登镇新兴路51号","0571-63371380"],
["浙江","杭州","杭州上塘餐厅","拱墅上塘镇大关路北侧杭州欧尚超市","0571-88011732"],
["浙江","杭州","杭州九堡餐厅","杭海路878号金海城一楼","0571-86909117"],
["浙江","杭州","中赢国际餐厅","杭州滨江区江晖路1266号1268号1270号","0571-86534586"],
["浙江","杭州","浦沿餐厅餐厅","杭州滨江区浦沿镇浦沿路和滨文路交叉路口","0571-86616173"],
["浙江","杭州","杭州双菱餐厅","杭州凤起东路58号","0571-86432256"],
["浙江","杭州","杭州武林服饰城餐厅","杭州凤起路519号3幢","0571-87039487"],
["浙江","杭州","杭州红石餐厅","杭州拱墅区湖墅南路216-218号","0571-88370120"],
["浙江","杭州","杭州火车站餐厅","杭州火车站广场南二楼","0571-87049252"],
["浙江","杭州","笕桥物美餐厅","杭州机场路371号物美超市一楼","0571-86908131"],
["浙江","杭州","庆春银泰餐厅","杭州江干区太平门直街银泰百货一楼","0571-86533298"],
["浙江","杭州","杭州运河餐厅","杭州金华路与楚州街交叉口运河广场地下一层","0571-88162340"],
["浙江","杭州","杭州下沙宝龙餐厅","杭州经济技术开发区宝龙商业中心3号楼1F-017,2F-016-B","0571-86032520"],
["浙江","杭州","杭州福雷德餐厅","杭州经济技术开发区学林街99号福雷德广场","0571-86628209"],
["浙江","杭州","半山中联餐厅","杭州市半山镇半山路中联百货一楼","0571-88146021"],
["浙江","杭州","星光大道餐厅","杭州市滨江区江南大道228号星光大道商业街一楼","0571-88831751"],
["浙江","杭州","和平饭餐厅餐厅","杭州市朝晖路108号和平饭店肯德基","0571-87662001"],
["浙江","杭州","杭州德胜餐厅","杭州市德胜路34号","0571-85375014"],
["浙江","杭州","杭州湖滨餐厅","杭州市东坡路8号","0571-87914523"],
["浙江","杭州","杭州信义坊餐厅","杭州市拱墅区湖墅南路488号","0571-88062769"],
["浙江","杭州","杭州水晶城餐厅","杭州市拱墅区上塘路458号水晶城购物中心一楼","0571-88131573"],
["浙江","杭州","杭州四季青餐厅","杭州市航海路98-108号二楼","0571-86504770"],
["浙江","杭州","杭州江城餐厅","杭州市江城路558号","0571-86076949"],
["浙江","杭州","丁桥物美餐厅","杭州市江干区大农港路与三号街交叉口东北角一层","0571-86685772"],
["浙江","杭州","华润万象城餐厅","杭州市江干区富春路杭州万象城第B1层B163号","0571-89705780"],
["浙江","杭州","杭州庆丰餐厅","杭州市教工路23号","0571-89922520"],
["浙江","杭州","杭州永安餐厅","杭州市解放路199号","0571-87132253"],
["浙江","杭州","杭州新解百餐厅","杭州市解放路249号(国货路浣纱路交叉口)","0571-87024593"],
["浙江","杭州","杭州下沙和达城餐厅","杭州市经济技术开发区2号大街993号","0571-87981902"],
["浙江","杭州","杭州萍水餐厅","杭州市萍水街新武林商业中心","0571-86042579"],
["浙江","杭州","杭州采荷餐厅","杭州市庆春东路57号","0571-86948442"],
["浙江","杭州","西湖时代餐厅","杭州市庆春路227号,地下一楼4号地上一楼6号","0571-87215747"],
["浙江","杭州","杭州庆春餐厅","杭州市庆春路70号","0571-87218955"],
["浙江","杭州","杭州秋涛餐厅","杭州市秋涛北路248号华润超市一楼","0571-86620120"],
["浙江","杭州","杭州钱江餐厅","杭州市上塘路445号一楼","0571-85268449"],
["浙江","杭州","杭州天水餐厅","杭州市体育场路269号","0571-85153494"],
["浙江","杭州","杭州万象餐厅","杭州市体育场路428-2号","0571-85164577"],
["浙江","杭州","杭州剧院餐厅","杭州市体育场路武林广场29号杭州剧院南侧","0571-85062936"],
["浙江","杭州","杭州天成餐厅","杭州市天城路86号","0571-86456723"],
["浙江","杭州","杭州文一餐厅","杭州市文一路298号","0571-88919961"],
["浙江","杭州","转塘华润餐厅","杭州市西湖区转塘镇美院南路华润超市1楼","0571-86011456"],
["浙江","杭州","石桥大润发餐厅","杭州市下城区石桥路与杭玻路交叉口西南角","0571-85127576"],
["浙江","杭州","浙江奥特莱斯餐厅","杭州市下沙启潮路199号奥特莱斯广场","0571-87358064"],
["浙江","杭州","杭州阳光餐厅","杭州市下沙区4号大街15-2新美商场一楼","0571-86725795"],
["浙江","杭州","萧山义蓬乐购餐厅","杭州市萧山区义蓬镇英冠天地华润万家一楼","0571-82325083"],
["浙江","杭州","临平中都餐厅","杭州市余杭区临平北大街1号","0571-86144194"],
["浙江","杭州","杭州西城餐厅","杭州西湖区文新路551号西城广场一二楼","0571-88489098"],
["浙江","杭州","萧山拱秀餐厅","杭州萧山城乡镇拱秀路149号","0571-82723526"],
["浙江","杭州","萧山机场餐厅","杭州萧山国际机场候机楼一楼到达厅内","0571-87683642"],
["浙江","杭州","萧山万佳餐厅","杭州萧山区市心中路金城广场","0571-82801992"],
["浙江","杭州","萧山香江餐厅","杭州萧山萧绍路香江大厦1-2楼","0571-82630867"],
["浙江","杭州","浙江大酒餐厅餐厅","杭州延安路595号","0571-88490783"],
["浙江","杭州","转塘世纪联华餐厅","杭州转塘江口大厦世纪联华一楼","0571-86037130"],
["浙江","杭州","建德新安餐厅","建德市新安路161号","0571-64783280"],
["浙江","杭州","杭州新塘乐购餐厅","江干区新塘路29-31号","0571-81604657"],
["浙江","杭州","下沙星尚城餐厅","经济开发区白杨街道4号大街505号星尚城项目一层","0571-86920480"],
["浙江","杭州","新塘世纪联华餐厅","景芳路与新塘路交汇处西南角新城时代广场一层","0571-86095110"],
["浙江","杭州","临安越华餐厅","临安城中街广场花园1号楼","0571-63961517"],
["浙江","杭州","临安万华餐厅","临安市锦城街道钱王大街855号","0571-61090451"],
["浙江","杭州","临安国贸餐厅","临安市锦城镇衣锦街448号国贸大厦1楼","0571-63921677"],
["浙江","杭州","临安钱王餐厅","临安市钱王大街409号","0571-63928797"],
["浙江","杭州","留下西溪餐厅","留下大街107号","0571-85222713"],
["浙江","杭州","塘栖广济餐厅","塘栖镇广济路1号","0571-86383997"],
["浙江","杭州","桐庐大润发餐厅","桐君街道金堂山路128号大润发超市一楼","0571-64269286"],
["浙江","杭州","桐庐富春江餐厅","桐君街道迎春南路米兰大厦一楼","0571-64239895"],
["浙江","杭州","西湖文化广场餐厅","西湖文化广场C座8-1号","0571-85107531"],
["浙江","杭州","杭州香积寺餐厅","下城区土地管理局大楼一楼","0571-85886212"],
["浙江","杭州","瓜沥航民餐厅","萧山瓜沥东灵路34-36号","0571-82562770"],
["浙江","杭州","机场T3航站楼餐厅","萧山国际机场T3到达厅一楼","0571-86849353"],
["浙江","杭州","萧山恒隆餐厅","萧山恒隆广场一楼B座105号","0571-82391355"],
["浙江","杭州","临浦峙山餐厅","萧山临浦镇峙山北路113-115号","0571-83893977"],
["浙江","杭州","萧山旺角城餐厅","萧山区北干街道市心中路123-A号旺角城天地金座8幢101-104号一层","0571-82758676"],
["浙江","杭州","闻堰物美餐厅","萧山区闻堰镇万达中路与三江路口物美超市一楼","0571-82279577"],
["浙江","杭州","萧山泰富餐厅","萧山区萧绍路和通惠路交叉口泰富广场一楼","0571-82700177"],
["浙江","杭州","萧山天润发餐厅","萧山区育才北路518号","0571-82825122"],
["浙江","杭州","萧山市心餐厅","萧山市市心南路142号","0571-82629583"],
["浙江","杭州","萧山汇德隆餐厅","萧山市心南路828号汇德隆购物中心","0571-83696370"],
["浙江","杭州","建德大润发餐厅","新安江街道毕家后","0571-64117293"],
["浙江","杭州","杭州吴山餐厅","延安路1号","0571-87552574"],
["浙江","杭州","义蓬购物中心餐厅","义蓬镇义蓬中路501号","0571-83518083"],
["浙江","杭州","余杭交通餐厅","余杭邱山大街477号","0571-86165560"],
["浙江","杭州","余杭莱蒙餐厅","余杭区临平迎b宾303号","0571-89269190"],
["浙江","杭州","富阳大润发餐厅","浙江省富阳市孙权路209号","0571-23280277"],
["浙江","杭州","西蒙印象城餐厅","浙江省杭州市西湖区古墩路588号","0571-86951527"],
["浙江","杭州","绿城蓝庭餐厅","浙江省杭州市余杭经济开发区星河路61号","0571-89189608"],
["浙江","杭州","杭州宋城餐厅","之江路148-1号 杭州宋城之江国家旅游度假区","0571-86709631"],
["浙江","杭州","桐庐服务区北餐厅","桐庐县杭新景高速公路(杭千高速)桐庐服务区北区一层","0571-64697830"],
["浙江","杭州","桐庐服务区南餐厅","桐庐县杭新景高速公路(杭千高速)桐庐服务区南区一层","15345883558"],
["浙江","杭州","良渚永旺店KFC","余杭区良渚街道古敦路1888号永旺梦乐城一层NO163","0571-86314740"],
["浙江","杭州","乔司物美店KFC","余杭区乔司街道永和村博卡路16号","0571-87776210"],
["浙江","杭州","杭州岳王店","杭州市北山路88号","0571-87962446"],
["浙江","杭州","杭州朝晖店","杭州市河东路138号","0571-85238963"],
["浙江","杭州","杭州华商店","杭州市文新路2号世纪联华超市内","0571-88916532"],
["浙江","杭州","杭州城站店","杭州城站广场北二楼","0571-87805919"],
["浙江","杭州","杭州北山店","杭州市北山路85,87号","0571-87972657??"],
["浙江","杭州","杭州钛合店","杭州朝晖路168-4,钛合广场一楼","0571-85450412"],
["浙江","杭州","余杭禹航店","杭州市余杭区余杭镇禹航路908号","0571-89055005"],
["浙江","杭州","杭州西溪印象城精选店","五常大道和天目山路交叉口,西溪印象城","0571-88732186"],
["浙江","杭州","黄龙恒励精选店","杭州市黄龙路1号黄龙体育场东看台下恒励宾馆一层","0571-86887039"],
["浙江","杭州","杭州北站店","杭州市拱墅区莫干山路汽车北站对面1-2层","0571-85354031"],
["浙江","杭州","文三颐高店","杭州市西湖区文三路345号","0571-86490673"],
["浙江","杭州","莫干山物美店","杭州市拱墅区莫干山路851号一、二层","0571-88094253"],
["浙江","杭州","杭州灵隐店","杭州灵隐路38号","0571-87310262"],
["浙江","杭州","杭州莲花店","杭州市西湖区莲花街333号莲花广场一楼","0571-86094639"],
["浙江","杭州","桐庐世纪联华店","桐庐富春路455号","0571-69801658"],
["浙江","杭州","杭州星民店","杭州市滨江区丹枫路265号","0571-85784169"],
["浙江","杭州","杭州赛码店","杭州市拱墅区登云路518号赛码楼A001号","0571-89914525"],
["浙江","杭州","杭州西溪印象城店","五常大道和天目山路交叉口,西溪印象城","0571-88731861"],
["浙江","杭州","杭州西田店","杭州余杭区莫干山路1509号一层1F014及二层2F007号商铺","0571-89359701"],
["浙江","杭州","黄龙恒励店","杭州市西湖区黄龙路5号恒励大厦1层","0571-86887026"],
["浙江","杭州","杭州新东站店","杭州江干区新火车东站售票大厅隔层","0571-85784192"],
["浙江","杭州","杭州新东站二店店","杭州市新火车东站候车大厅隔层(东)","0571-86011202"],
["浙江","杭州","杭州中大银泰店","杭州市东新路和石祥路交叉口中大银泰2楼214号","0571-89773605"],
["浙江","杭州","拱墅万达店","杭州市杭行路666号万达商业中心4幢2单元274室328室","0571-88130013"],
["浙江","杭州","杭州三墩厚仁店","杭州市厚仁路313号","0571-87037692"],
["浙江","宁波","五乡爱民餐厅","宁波鄞州区五乡镇爱民南路1-1号商铺","0574-87063175"],
["浙江","宁波","宁波文化广场餐厅","宁波市河清北路宁波文化广场D区101","0574-87207282"],
["浙江","宁波","宁波新水街餐厅","宁波市海曙区天一广场碶闸街128-152号双号","0574-87268812"],
["浙江","宁波","杭州湾世纪金源餐厅","宁波市杭州湾新区金源大道19号世纪金源购物中心一楼","0574-82351007"],
["浙江","宁波","宁波集士港杉井奥莱餐厅","宁波鄞州区集士港镇春华路1399号杉井奥特莱斯广场B12200号商铺","0574-88170810"],
["浙江","宁波","北仑君临餐厅","北仑黄山路857号(家乐福一楼)","0574-86851886"],
["浙江","宁波","北仑长江餐厅","北仑明州路和长江路交叉口","0574-86860908"],
["浙江","宁波","北仑银泰餐厅","北仑中河路399号","0574-86863771"],
["浙江","宁波","观海卫海卫餐厅","慈溪观海卫镇贞观大楼一楼","0574-63636677"],
["浙江","宁波","慈溪环城餐厅","慈溪市慈百路71号","0574-63800131"],
["浙江","宁波","慈溪大润发餐厅","慈溪市古塘街道北二环中路550号一层","0574-63032260"],
["浙江","宁波","观海卫福山餐厅","慈溪市观海卫329国道南侧蒋家村","0574-63629565"],
["浙江","宁波","慈溪中心餐厅","慈溪市浒山街道解放路","0574-63810933"],
["浙江","宁波","慈溪虞波餐厅","慈溪市浒山街道新城大道271-279号","0574-63101424"],
["浙江","宁波","慈溪银泰餐厅","慈溪市环城南路与青少年宫路交叉银泰广场一楼","0574-63880469"],
["浙江","宁波","慈溪青少年宫餐厅","慈溪市青少年宫路93号","0574-63803544"],
["浙江","宁波","慈溪保利餐厅餐厅","慈溪市三北西大街1555号,保利滨湖天地商业广场一层","0574-63121069"],
["浙江","宁波","逍林樟新餐厅","慈溪市逍林樟进南路919号北侧","0574-63511992"],
["浙江","宁波","慈溪新都餐厅","慈溪市新城大道南路228-232号","0574-63831283"],
["浙江","宁波","慈溪香格餐厅","慈溪新城大道北路277-285号(华润万家超市旁)","0574-63930365"],
["浙江","宁波","奉化银泰餐厅","奉化市南山路150号奉化银泰城4号门","0574-88683911"],
["浙江","宁波","奉化太平洋餐厅","奉化市南山路174号","0574-88593077?"],
["浙江","宁波","奉化大润发餐厅","奉化市南山路8号龙津尚都","0574-88916058"],
["浙江","宁波","溪口武岭餐厅","奉化市溪口镇百丈路1-3号","0574-88876181"],
["浙江","宁波","慈溪观海卫大润发餐厅","观海卫镇观海卫路580号","0574-63731029"],
["浙江","宁波","宁波彩虹餐厅","宁波彩虹南路16号","0574-87707388"],
["浙江","宁波","宁波学府餐厅","宁波高教园区钱湖南路666号华贸赛得广场B座","0574-88224110"],
["浙江","宁波","宁波江北万达餐厅","宁波江北大道与榭嘉路交叉口万达广场一楼","0574-87102126"],
["浙江","宁波","宁波江北世纪联华餐厅","宁波康庄南路72号","0574-87166751"],
["浙江","宁波","宁波外滩餐厅","宁波人民路88号","0574-87386615"],
["浙江","宁波","北仑明州餐厅","宁波市北仑区明州路22号","0574-86860922"],
["浙江","宁波","宁波欧尚餐厅","宁波市翠柏路208号","0574-87274226"],
["浙江","宁波","宁波马园餐厅","宁波市海曙区马园路271号","0574-83895653"],
["浙江","宁波","宁波新星欧尚餐厅","宁波市海曙区新星路111号122B","0574-87036287"],
["浙江","宁波","宁波甬耀餐厅","宁波市华楼巷19号天一豪景2楼","0574-87029800"],
["浙江","宁波","宁波联丰餐厅","宁波市环城西路南段256","0574-87131153"],
["浙江","宁波","宁波来福士餐厅","宁波市江北区大庆南路99号来福士广场地下一层","0574-83895650"],
["浙江","宁波","宁波洪塘海德餐厅","宁波市江北区洪塘街道洪塘中路海德商业中心一楼","0574-87029851"],
["浙江","宁波","宁波泰富餐厅","宁波市江东区桑田路291-299中信泰富广场1楼","0574-87102025"],
["浙江","宁波","宁波客运中心餐厅","宁波市通达路181号","0574-87029867"],
["浙江","宁波","宁波万成餐厅","宁波市雅戈尔大道429号","0574-89012517"],
["浙江","宁波","鄞州乐购餐厅","宁波市雅戈尔大道洛兹大厦","0574-89012300"],
["浙江","宁波","鄞州栎社机场餐厅","宁波市鄞州栎社国际机场候机楼二楼","0574-87413225"],
["浙江","宁波","鄞州启明餐厅","宁波市鄞州区启明路与鄞县大道交界处","0574-88169833"],
["浙江","宁波","鄞州印象城餐厅","宁波市鄞州区钱湖北路288号印象城B1-16号","0574-83067792"],
["浙江","宁波","宁波桑德兹餐厅","宁波市鄞州区天童南路与日丽中路交叉处南部商务区","0574-83035612"],
["浙江","宁波","宁波万达餐厅","宁波市鄞州区万达商业广场1B-35","0574-88086198"],
["浙江","宁波","宁波骆驼欧尚餐厅","宁波市镇海区骆驼街道东邑北路558号-1076号商铺","0574-86572807"],
["浙江","宁波","镇海华润餐厅","宁波市镇海区世贸中心A座(蛟川街道镇宁东路3—21号)","0574-26282800"],
["浙江","宁波","镇海招宝餐厅","宁波市镇海区西街8-10号","0574-86274847"],
["浙江","宁波","北仑大润发餐厅","宁波通途路以南新大路以西天润发大卖场1F","0574-86855201"],
["浙江","宁波","镇海鼓楼餐厅","宁波镇海区鼓楼步行街320-328号","0574-86299900"],
["浙江","宁波","宁波世纪东方餐厅","宁波中山东路1083号世纪东方广场一楼","0574-87861390"],
["浙江","宁波","宁海新人民餐厅","宁海市人民路9号宁海大厦1楼","0574-65228812"],
["浙江","宁波","宁海桃源餐厅","宁海市桃源南路192号","0574-65211237"],
["浙江","宁波","宁海大观园餐厅","宁海县兴宁中路296号","0574-65131600"],
["浙江","宁波","宁海金昌餐厅","宁海县跃龙街道中山中路69-73号(金昌大厦)","0574-65576500?"],
["浙江","宁波","象山丹峰餐厅","象山丹峰东路时代广场内","0574-65656715"],
["浙江","宁波","石浦金山餐厅","象山石浦金山路102号","0574-65986516"],
["浙江","宁波","象山大润发餐厅","象山县丹南路大润发超市一楼","0574-65798321"],
["浙江","宁波","象山天安餐厅","象山县天安路与建设路交叉口","0574-65657707"],
["浙江","宁波","小港红联餐厅","小港渡口路177号","0574-86162216"],
["浙江","宁波","宁波联盛餐厅","鄞州宁南北路1288号联盛广场A座肯德基","0574-89028376"],
["浙江","宁波","余姚龙山餐厅","余姚市南滨江路150号","0574-62709434"],
["浙江","宁波","余姚华联餐厅","余姚市新建路55号余姚华联商厦","0574-62622082"],
["浙江","宁波","余姚富巷餐厅","余姚市阳明西路273号","0574-62811600"],
["浙江","宁波","下应乐购餐厅","浙江省宁波市鄞州区鄞县大道下应段227号乐购超市S01","0574-88151008"],
["浙江","宁波","庄市乐购餐厅","浙江省宁波市镇海区庄市街道明海大道乐购超市一楼","0574-26295288"],
["浙江","宁波","宁波新天伦店KFC","中心路268号天伦广场126号商铺","0574-87013002"],
["浙江","宁波","宁波环球城银泰店KFC","鄞州区首南街道陈婆渡村、鲍家村宁波环球城银泰一层","0574-89255308"],
["浙江","宁波","鼓楼","中山西路81号","0574-87307377"],
["浙江","宁波","宁波环球城银泰精选店KFC","鄞州天童南路1008号145号四楼","0574-89255303"],
["浙江","宁波","余姚乐客多店","余姚市子陵路53号","0574-62633889"],
["浙江","宁波","周巷兴业店","慈溪市周巷镇周西公路东侧","0574-63305150"],
["浙江","宁波","余姚天一店","余姚新建路107号","0574-62622389"],
["浙江","宁波","欧尚沧海店","宁波市江东区百丈东路1188号","0574-87936736"],
["浙江","宁波","慈溪天久店","慈溪市古塘华苑161-169号","0574-63817447"],
["浙江","宁波","泗门慈客隆店","余姚泗门镇镇北路109号","0574-62122515"],
["浙江","宁波","宁波永达店","宁波鄞州区天童北路27号","0574-88221237"],
["浙江","宁波","宁波中兴店","宁波市江东区中兴路138号世纪联华超市左侧","0574-87758448"],
["浙江","宁波","余姚四明店","余姚市四明西路486-480-490号","0574-62839355"],
["浙江","宁波","宁波百丈店","宁波市百丈东路86-2号","0574-87861527"],
["浙江","宁波","余姚嘉悦店","余姚市文山路730号嘉悦国际广场肯德基","0574-62615515"],
["浙江","宁波","宁波文化广场店","宁波市河清北路宁波文化广场D区101","0574-87207282"],
["浙江","宁波","余姚五彩城店","城东路与金型路交叉口华润五彩城一楼","0574-62695870"],
["浙江","宁波","宁波港隆店","宁波市沧海路1号港隆时代广场二号楼101、201","0576-87036323"],
["浙江","宁波","钱湖天地餐厅","宁波市鄞州区钱湖北路433号、钱湖北路439弄7号","0574-88003142"],
["浙江","温州","温州文化心餐厅","温州市学院中路212号鹿城文化中心一楼","0577-88118200"],
["浙江","温州","苍南城中餐厅","苍南县灵溪镇城中路107号苍南大酒店一楼","0577-68716169"],
["浙江","温州","乐清宁康餐厅","乐城镇宁康西路交通大厦一楼","0577-62521799"],
["浙江","温州","乐清清远餐厅","乐城镇清远路221号时代广场购物中心1楼","0577-62596160"],
["浙江","温州","柳市柳青餐厅","乐青市柳市镇柳青路金鹰大厦一楼","0577-61730077"],
["浙江","温州","乐清南虹餐厅","乐清市城南街道伯乐东路666号南虹广场一楼肯德基餐厅","0577-61511855"],
["浙江","温州","龙港龙跃餐厅","龙港镇人民路402号长运大厦1楼","0577-64266693"],
["浙江","温州","平阳鳌江万达餐厅","平阳县鳌江镇车站大道88号万达商业室内步行街一层1036B号","0577-63035220"],
["浙江","温州","瑞安电影院餐厅","瑞安罗阳大道瑞安广场东首电影城旁","0577-66886452"],
["浙江","温州","瑞安虹桥餐厅","瑞安市虹桥南路中银大厦1楼","0577-65877793"],
["浙江","温州","瑞安安阳餐厅","瑞安市万松东路永安大厦一楼萍萍美食城","0577-65887817"],
["浙江","温州","瑞安万松餐厅","瑞安市万松路6号华侨饭店1楼","0577-65906608"],
["浙江","温州","温州火车站餐厅","温州火车站一楼","0577-86009192"],
["浙江","温州","温州邻里餐厅","温州龙湾区上江路80号易初莲花超市一楼","0577-86668652"],
["浙江","温州","温州白鹿餐厅","温州市飞霞路白鹿影城一楼","0577-88812057"],
["浙江","温州","温州锦绣餐厅","温州市锦绣路南国大厦一楼","0577-89881928"],
["浙江","温州","红桥幸福餐厅","温州市乐清虹桥时代广场86-88号","0577-62377283"],
["浙江","温州","温州龙湾万达大歌星餐厅","温州市龙湾区永中街道永定路1188号万达广场3楼肯德基","0577-86852498"],
["浙江","温州","温州世贸餐厅","温州市鹿城区解放南路9号地块世贸广场一楼","0577-89229006"],
["浙江","温州","温州人民餐厅","温州市鹿城区人民路30号","0577-88256961"],
["浙江","温州","瓯北锦江餐厅","温州市瓯北双塔路1036号锦江大酒店一楼","0577-67995802"],
["浙江","温州","温州大学城餐厅","温州市瓯海区高教园区国大商务中心一层","0577-86681799"],
["浙江","温州","温州新桥国鼎餐厅餐厅","温州市瓯海新桥街道国鼎路67号","0577-88441031"],
["浙江","温州","温州火车南站餐厅","温州市潘桥镇火车南站一楼肯德基","0577-86177018"],
["浙江","温州","温州双屿客运中心餐厅","温州市双屿客运中心一楼","0577-88770698"],
["浙江","温州","温州五马餐厅","温州市五马街2号2楼","0577-88232683"],
["浙江","温州","温州西城餐厅","温州市西城路8号","0577-88722841"],
["浙江","温州","温州欧洲城餐厅","鹿城区江滨西路355--357号","0577-88251095"],
["浙江","温州","温州新水心店KFC","马鞍池西路308号","0577-88291091"],
["浙江","温州","温州安泰店","温州市划龙桥路101-103号安泰大厦一楼","0577-88663787"],
["浙江","温州","温州文化中心店","温州市学院中路212号鹿城文化中心一楼","0577-88118200"],
["浙江","温州","平阳解放店","温州平阳县雅河路189号平阳大厦一层","0577-63731288"],
["浙江","温州","鳌江新河店","平阳县鳌江镇新河南路157-1至157-2","0577-63639918"],
["浙江","温州","塘下塘川店","塘下镇塘川中街153-157号","0577-66005701"],
["浙江","温州","龙湾罗东店","温州市龙湾区永中镇罗东街204--208号","0577-86852368"],
["浙江","温州","北白象东大店","乐清北白象镇东大街257-261号","0577-62983383"],
["浙江","温州","龙湾人本店","温州市龙湾区滨海三道和滨海六路交叉口利玛商业广场","0577-86805635"],
["浙江","温州","水头江山店","水头镇江山中路7号","0577-63996400"],
["浙江","温州","柳市大兴餐厅店","乐清市柳市镇大兴西路152号","0577-61771022"],
["浙江","嘉兴","海宁皮革城餐厅","海宁市海洲西路201皮革大厦,地产品牌箱包直销中心1-C","0573-87018970"],
["浙江","嘉兴","海宁银泰餐厅","海昌南路新苑路交叉口银泰城一楼","0573-80778007"],
["浙江","嘉兴","海宁华联餐厅","海宁市工人路58号","0573-87022785"],
["浙江","嘉兴","海宁龙城餐厅","海宁市海昌路1号龙城商业广场1楼","0573-87259277"],
["浙江","嘉兴","海宁正翔餐厅","海宁市文苑路与联合路交叉口,沃尔玛超市一楼","0573-87088158"],
["浙江","嘉兴","海盐大润发餐厅","海盐县新桥北路118号,大润发超市一楼","0573-86055800"],
["浙江","嘉兴","嘉善乐购餐厅","嘉善体育南路518号一楼","0573-84295649"],
["浙江","嘉兴","嘉善农工商餐厅","嘉善县解放西路171号","0573-84027702"],
["浙江","嘉兴","嘉善大润发餐厅","嘉善县施家南路306号财富广场一楼","0573-84018029"],
["浙江","嘉兴","嘉兴乐天餐厅","嘉兴城南路乐天玛特超市","0573-82682631"],
["浙江","嘉兴","嘉兴中港城餐厅","嘉兴广益路,双溪路交叉口,国际中港城一楼","0573-82633136"],
["浙江","嘉兴","嘉兴江南摩尔餐厅","嘉兴洪兴西路1766号-江南摩尔西区一层A111","0573-82770885"],
["浙江","嘉兴","嘉兴华庭餐厅","嘉兴勤俭路与建国路交叉路口华庭街","0573-82063075"],
["浙江","嘉兴","嘉兴北站餐厅","嘉兴市禾兴北路1599(汽车北站旁)","0573-82223884"],
["浙江","嘉兴","嘉兴万达餐厅","嘉兴市经济开发区广义路与庆丰路交叉口万达广场","0573-82861716"],
["浙江","嘉兴","欧尚泾水餐厅","嘉兴市中环东路","0573-82825772"],
["浙江","嘉兴","嘉兴大润发餐厅","嘉兴市中山西路607号","0573-82711750"],
["浙江","嘉兴","平湖新华餐厅","平湖市解放中路233号中百商都1-2层","0573-85106777"],
["浙江","嘉兴","濮院金泰餐厅","濮院大道3998号机械辅料城一楼","0573-88698277"],
["浙江","嘉兴","桐乡东兴餐厅","桐乡市东兴街6号东兴商厦1楼","0573-88036990"],
["浙江","嘉兴","桐乡乐购餐厅","桐乡市东兴街与鱼行街交叉口中虹天地一期一楼","0573-88031829"],
["浙江","嘉兴","桐乡时代餐厅","桐乡市庆丰中路10号时代广场1F","0573-88031393"],
["浙江","嘉兴","乌镇东栅餐厅","桐乡市乌镇镇新林家铺子1幢105,109-116,118,201-203,206,208号商铺一层,二层","0573-88723533"],
["浙江","嘉兴","桐乡中都餐厅","桐乡市振兴中路2号新世界广场3号楼肯德基餐厅","0573-88031799"],
["浙江","嘉兴","平湖大润发餐厅","新华中路世纪商业中心3号楼139号","0573-85017077"],
["浙江","嘉兴","嘉善恒利餐厅餐厅","浙江省嘉善县魏塘街道中山西路118号恒利广场一楼","0573-84068666"],
["浙江","嘉兴","海盐秦山餐厅","浙江省嘉兴市海盐县新桥南路100号","0573-86035900"],
["浙江","嘉兴","西塘水街店KFC","嘉善县西塘镇西丁公路南侧来凤港西侧","0573-84892552"],
["浙江","嘉兴","嘉兴新旭辉店KFC","中山东路旭辉广场一楼","0573-82812177"],
["浙江","嘉兴","长安服务区店","沪杭高速公路长安服务区(南区)","0573-87489277"],
["浙江","嘉兴","海宁皮革城店","海宁市海洲西路201皮革大厦,地产品牌箱包直销中心1-C","0573-87018970"],
["浙江","嘉兴","嘉兴火车站店","嘉兴市铁路嘉兴站原动车候车室一层","0573-82114977"],
["浙江","嘉兴","杭州跨海大桥北服务区店","杭州湾跨海大桥北服务区西","0573-86055600"],
["浙江","嘉兴","嘉兴少年路","嘉兴市勤俭路504号1-2,4-6栋肯德基","0573-82066557"],
["浙江","湖州","安吉昌硕餐厅","浙江省安吉县昌硕街道九州昌硕广场6号楼一层","0572-5135588"],
["浙江","湖州","湖州新天地餐厅","湖州市新天地商城1F(红旗路与观凤路转南西北侧)","0572-2198577"],
["浙江","湖州","织里富民餐厅","湖州市织里富民路298号","0572-3190190"],
["浙江","湖州","安吉胜利餐厅","安吉县递铺镇胜利路商贸广场2幢","0572-5128157"],
["浙江","湖州","德清美都餐厅","德清县武康镇永安街与英溪路交叉口","0572-8071884"],
["浙江","湖州","湖州爱山餐厅","湖州市爱山广场步行街区8号楼","0572-2056311"],
["浙江","湖州","湖州凤凰餐厅","湖州市滨河路666号(开发区6号地块)","0572-2367044"],
["浙江","湖州","湖州红旗餐厅","湖州市江南工贸大街14号","0572-2035775"],
["浙江","湖州","南浔泰安餐厅","南浔区泰安路161号泰安商业广场","0572-3018288"],
["浙江","湖州","长兴欧尚餐厅","长兴经济开发区经一路238号欧尚一楼肯德基","0572-6027050"],
["浙江","湖州","长兴金陵餐厅","长兴县雉城解放西路76号金陵北路口","0572-6029961"],
["浙江","湖州","长兴人民餐厅","长兴雉城镇人民南路红宇商厦","0572-6023496"],
["浙江","湖州","湖州物美餐厅","浙江省湖州市吴兴区人民路389-395号(物美超市右侧)","0572-2182951"],
["浙江","湖州","湖州新天地店","湖州市新天地商城85-87号","0572-2198577"],
["浙江","湖州","织里富民店","湖州织里富民路298号","0572-3190190"],
["浙江","湖州","德清正翔店","德清县武康镇永安街158号","0572-8072863"],
["浙江","湖州","长兴海洋城店","长兴县画溪大道与忻湖路交叉口嘉兆海洋城106号一楼","0572-6869233"],
["浙江","湖州","安吉昌硕店","浙江省安吉县昌硕街道九州昌硕广场6号楼一层","0572-5135588"],
["浙江","绍兴","绍兴华谊餐厅","绍兴市解放北路588号","0575-85141265"],
["浙江","绍兴","绍兴解放餐厅","绍兴解放北路558号商业区3号楼底层","0575-85133296"],
["浙江","绍兴","绍兴新越城餐厅","绍兴市下大路20号","0575-85225562"],
["浙江","绍兴","绍兴世茂餐厅","绍兴胜利东路360号世茂广场一楼KFC","0575-88126755"],
["浙江","绍兴","中兴餐厅","绍兴中兴路与人民路交叉口渔花广场一二楼","0575-85121601"],
["浙江","绍兴","绍兴悦来餐厅","绍兴解放南路1385号越兰大厦南区一楼","0575-88370117"],
["浙江","绍兴","绍兴金时代餐厅","解放南路和环城南路交叉口,金时代广场东南角","0575-88373377"],
["浙江","绍兴","柯桥乐客多餐厅","柯桥联华国际商贸城一楼","0575-84132727"],
["浙江","绍兴","上虞大通餐厅","上虞市百官镇人民路北侧389号","0575-82134051"],
["浙江","绍兴","上虞大润发餐厅","上虞市民大道与凤鸣路交汇处大润发超市一楼","0575-82088806"],
["浙江","绍兴","柯桥万达餐厅","绍兴柯桥钱陶公路与湖中路交叉口万达广场1-2F","0575-85528876"],
["浙江","绍兴","绍兴欧尚餐厅","绍兴胜利东路世茂广场欧尚超市一楼","0575-88628996"],
["浙江","绍兴","袍江大润发餐厅","绍兴市袍江世纪街1号大润发","0575-88133357"],
["浙江","绍兴","绍兴胜利餐厅","绍兴市胜利东路58号(绍兴电力大厦一楼)","0575-85143737"],
["浙江","绍兴","嵊州国商餐厅","嵊州城关镇北直街1号","0575-83026766"],
["浙江","绍兴","嵊州八达餐厅","嵊州市城中路106-3","0575-83022922"],
["浙江","绍兴","新昌华翔餐厅","新昌鼓山中路116号华翔大厦一楼","0575-86040299"],
["浙江","绍兴","诸暨新一百餐厅","诸暨暨阳路7号","0575-87027987"],
["浙江","绍兴","大唐开元餐厅","诸暨市大唐开元西路165号","0575-87719278"],
["浙江","绍兴","餐厅口万安餐厅","诸暨市店口镇万安北路98号万利来广场","0575-87656963"],
["浙江","绍兴","诸暨艮塔餐厅","诸暨市暨阳路181号","0575-87032017"],
["浙江","绍兴","诸暨大润发餐厅","诸暨市苎萝东路195号大润发超市一楼","0575-87239377"],
["浙江","绍兴","诸暨永利餐厅","诸暨市苎萝东路399号永利广场1层","0575-87355677"],
["浙江","绍兴","绍兴华谊店","绍兴市解放北路588号","0575-85141265"],
["浙江","绍兴","嵊州现代店","嵊州市城关镇北直街2号","0575-83014577"],
["浙江","绍兴","绍兴解放店","绍兴解放北路558号商业区3号楼底层","0575-85133296"],
["浙江","绍兴","新昌国贸店","新昌县人民中路129号","0575-86038179"],
["浙江","绍兴","上虞解放店","上虞市解放街80号(上虞第二百货大楼)","0575-82180812"],
["浙江","绍兴","绍兴新越城店","绍兴市下大路20号","0575-85225562"],
["浙江","绍兴","绍兴悦来店","绍兴解放南路1385号越兰大厦南区一楼","0575-88370117"],
["浙江","绍兴","柯桥金都店","柯桥迪扬路鉴湖路口蓝天金都商厦","0575-84077627"],
["浙江","绍兴","柯桥华泰店","金柯桥大道275号华泰大厦","0575-84568587"],
["浙江","绍兴","柯桥万商店","绍兴县迪扬路万商路交叉口锦麟天地","0575-84091117"],
["浙江","绍兴","绍兴中兴店","绍兴中兴路与人民路交叉口渔花广场一二楼","0575-85121601"],
["浙江","绍兴","上虞万和城店","上虞市市民大道688号(上虞万和城)","0575-82000187"],
["浙江","绍兴","钱清钱门学府店","柯桥区钱清镇钱门学府一楼餐厅","0575-81173120"],
["浙江","绍兴","上虞高铁店","崧厦路与四环路交汇处上虞北站","0575-82161025"],
["浙江","绍兴","嵊州世纪联华店","嵊州市官河南路和兴盛路交叉口世纪联华一楼KFC","0575-83369812"],
["浙江","金华","金华站餐厅","环城西路金竹路交叉口高铁站2楼","0579-89004053"],
["浙江","金华","义乌北苑餐厅","北苑行政商务中心商贸区B地块东区1层","0579-85117518"],
["浙江","金华","义乌宾王餐厅","稠州北路458号","0579-85571516"],
["浙江","金华","横餐厅万盛餐厅","东阳市横店镇万盛南街22号","0579-86551377"],
["浙江","金华","东阳金天地餐厅","东阳市人民路106号金天地购物广场","0579-86911468"],
["浙江","金华","金华大润发餐厅","金华市八一南街1159号华源印象城一层二层","0579-82828382"],
["浙江","金华","金华时代餐厅","金华市八一南街288号","0579-82454000"],
["浙江","金华","金华福泰隆餐厅","金华市宾虹路777号","0579-82068203"],
["浙江","金华","金华银泰餐厅","金华市解放东路168号","0579-82326408"],
["浙江","金华","金华锦绣餐厅","金华市解放西路4号","0579-82348000"],
["浙江","金华","金华双龙餐厅","金华市双龙南街307号公元大厦一楼","0579-82058835"],
["浙江","金华","金华福华餐厅","金华市五一路福华广场一层","0579-82301798"],
["浙江","金华","金华万达餐厅","李渔东路366号万达广场1楼","0579-82829110"],
["浙江","金华","浦江福泰隆餐厅","浦江人民东路28号","0579-88089122"],
["浙江","金华","浦江财富餐厅","浦江县大桥路人民路交叉口财富广场","0579-84111802"],
["浙江","金华","武义时代餐厅","武义县城解放街7号、8号地块","0579-87669081"],
["浙江","金华","佛堂朝阳餐厅","义乌稠佛路172号","0579-85781682"],
["浙江","金华","义乌福田餐厅","义乌稠州北路905号","0579-85280705"],
["浙江","金华","义乌临江餐厅","义乌篁园路215号","0579-85363716"],
["浙江","金华","义乌江东餐厅","义乌江东中路482号","0579-85364280"],
["浙江","金华","义乌世纪餐厅","义乌市城中中路世纪商场地下一层一街","0579-85252633"],
["浙江","金华","义乌解百餐厅","义乌市稠城镇城中中路111号","0579-85521561"],
["浙江","金华","义乌宗泽餐厅","义乌市稠州北路586号1楼","0579-85567536"],
["浙江","金华","义乌香山餐厅","义乌市稠州西路119号","0579-85332062"],
["浙江","金华","义乌宏迪餐厅","义乌市稠州西路138号","0579-85318767"],
["浙江","金华","义乌义东餐厅","义乌市稠州中路42号","0579-85525275"],
["浙江","金华","义乌银泰餐厅","义乌市工人西路15号银泰广场2楼","0579-85524881"],
["浙江","金华","义乌五爱餐厅","义乌市江东中路55号","0579-85217311"],
["浙江","金华","义乌银海餐厅","义乌市银海路292号","0579-85596985"],
["浙江","金华","永康时代餐厅","永康市胜利街103号","0579-87131653"],
["浙江","金华","永康华联店","永康市胜利街2号","0579-87119910"],
["浙江","金华","东阳定安店","东阳市吴宁街道办事处双岘路1号","0579-86620020"],
["浙江","金华","兰溪大丰店","兰溪市劳动路68号大丰大超市","0579-88823317"],
["浙江","金华","兰溪兰江店","兰溪市兰江街道丹溪大道20号1-2号1-2层","0579-88888017"],
["浙江","金华","东阳振兴店","东阳市人民路66号","0579-86622191"],
["浙江","金华","义乌火车站店","义乌市新火车站候车大厅旁","0579-85642163"],
["浙江","金华","永康大润发店","永康市丽州中路与九铃路交叉口","0579-87137211"],
["浙江","金华","金华站餐厅店","金华环城西路金竹路交叉口高铁站二楼","0579-89004053"],
["浙江","金华","兰溪新时代店","兰溪市横山路405号嘉泰新时代广场","0579-88831166"],
["浙江","金华","金华一百店","金华市西市街159号","0579-82301007"],
["浙江","衢州","衢州荷花餐厅","衢州市三衢路238号","0570-3080072"],
["浙江","衢州","衢江东方餐厅","衢州市衢江区信安大道398号","0570-8885505"],
["浙江","衢州","衢州香溢餐厅","衢州市柯城区县学街18号","0570-3044566"],
["浙江","衢州","江山解放餐厅","浙江江山市解放路55-57号","0570-4028928"],
["浙江","衢州","江山大润发店KFC","江山市城北广场98号","0570-4037982"],
["浙江","衢州","衢州东方店","衢州市劳动路1号","0570-3044585"],
["浙江","衢州","龙游太平店","龙游县太平西路1号","0570-7022759"],
["浙江","衢州","衢州荷花店","衢州市三衢路238号","0570-3080072"],
["浙江","衢州","江山东方店","江山市鹿溪南路165幢","0570-4037982"],
["浙江","衢州","衢州常山定阳店","浙江衢州常山县文峰西路121-125号","0570-5210570"],
["浙江","衢州","衢州凯升店","浙江省衢州市荷花中路1-21凯升广场","0570-8887218"],
["浙江","衢州","开化东方店","浙江省衢州市开化城关镇临湖路212号","0570-6012500"],
["浙江","衢州","杭州衢江东方店","衢州市衢江区信安大道398号","0570-8885505"],
["浙江","舟山","普陀东海餐厅","东海路588号9(沈家门车站对面)","0580-3059556"],
["浙江","舟山","定海解放餐厅","舟山定海解放西路169号","0580-2262077"],
["浙江","舟山","定海东门餐厅","舟山定海区解放东路232号世纪新贸","0580-2624081?"],
["浙江","舟山","岱山日达餐厅","舟山市岱山县长河路338号日达广场B座","0580-4406698"],
["浙江","舟山","定海新天地餐厅","舟山市定海区东海东路29号","0580-2263225"],
["浙江","舟山","临城乐购餐厅","舟山市定海区临城街道体育路189号乐购超市一楼","0580-2180022"],
["浙江","舟山","舟山定海餐厅","舟山市定海区人民南路2号","0580-2039043"],
["浙江","舟山","普陀餐厅","舟山市普陀区东大街1号","0580-3012402"],
["浙江","舟山","欧上东港餐厅","舟山市普陀区东港开发区兴普大道266号","0580-3806193"],
["浙江","舟山","普陀鑫材餐厅餐厅","舟山市普陀区浦西工业小区东海西路2113号浦西陶瓷建材市场","0580-2085628"],
["浙江","台州","杜桥杜川餐厅","杜桥镇杜川路240路","0576-85663637"],
["浙江","台州","黄岩新世纪餐厅","黄岩区东城街道砚池社区横街路193、195号","0576-84060717"],
["浙江","台州","椒江欧尚东海餐厅","椒江东海大道与东环大道交叉口欧尚超市一楼","0576-88521778"],
["浙江","台州","椒江乐客多餐厅","椒江区市府大道555号","0576-88680737"],
["浙江","台州","椒江耀达餐厅","椒江市府大道与耀江东环大道和交叉路口","0576-88517177"],
["浙江","台州","临海柏叶餐厅","临海柏叶西路168号","0576-85305928"],
["浙江","台州","临海回浦餐厅","临海市崇和门广场北首","0576-85116618"],
["浙江","台州","临海银泰餐厅","临海市东方大道一号银泰城1-2层","0576-85076688"],
["浙江","台州","临海华润餐厅餐厅","临海市古城街道柏叶西路316号华润万家超市一楼","0576-85587599"],
["浙江","台州","临海耀达餐厅","临海市回浦路66号","0576-85113678"],
["浙江","台州","三门首府餐厅","三门县海游镇南山路首府广场","0576-83373338"],
["浙江","台州","三门人民餐厅","三门县海游镇上洋路17号","0576-83323750"],
["浙江","台州","椒江红旗餐厅","台州市椒江区中山西路297号","0576-88831277"],
["浙江","台州","路桥中兴餐厅","台州市路桥区路桥大道25-35号","0576-82939997"],
["浙江","台州","路桥华联餐厅","台州市路桥区商城街216号东方华联购物广场一楼","0576-82969616"],
["浙江","台州","路桥农工商餐厅","台州市路桥区新安西街88号","0576-82406160"],
["浙江","台州","路桥银座餐厅","台州市路桥区银座街506号","0576-82445997"],
["浙江","台州","天台太平洋餐厅","天台赤城路399号太平洋购物中心一楼","0576-83771058"],
["浙江","台州","天台赤城餐厅","天台市赤城路66号赤城大厦1楼","0576-83884860"],
["浙江","台州","温岭银泰餐厅","温岭城西街道中华路728号银泰城1F003-1","0576-81691696"],
["浙江","台州","大溪德明餐厅","温岭大溪镇大溪南路27号","0576-86327070"],
["浙江","台州","温岭乐购餐厅","温岭市城东街道九龙大道南侧时代广场乐购超市一楼","0576-86088968?"],
["浙江","台州","温岭数码餐厅","温岭市城东街道万昌中路758号","0576-80689502"],
["浙江","台州","温岭昌辉餐厅","温岭市东辉北路139号温岭购物中心","0576-86227070"],
["浙江","台州","温岭万佳餐厅","温岭市人民东路105号东辉公园旁","0576-86206631"],
["浙江","台州","温岭太平餐厅","温岭市太平南路176号","0576-86212820"],
["浙江","台州","泽国幸福餐厅","温岭市泽国镇幸福路21号(1-3)?","0576-86465518??"],
["浙江","台州","仙居永安餐厅","仙居县城关镇解放街113号","0576-87774232"],
["浙江","台州","玉环玉兴餐厅","玉环城中路33号粮食大厦一楼","0576-87249777"],
["浙江","台州","黄岩大润发餐厅","浙江台州市黄岩区洞天路288号大润发超市一楼","0576-81112880"],
["浙江","台州","台州椒江店","台州市椒江中山西路1号","0576-88884101"],
["浙江","台州","黄岩国贸店","台州市黄岩西城街道南苑社区劳动南路263号??","0576-84297261?"],
["浙江","台州","温岭新天地店","温岭市太平街道万泉东路234号","0576-80610861"],
["浙江","丽水","丽水万地餐厅","丽水市花园路16号一层21号商铺","0578-2132555"],
["浙江","丽水","丽水时代餐厅","丽水市解放街121号金马时代广场一楼","0578-2131110"],
["浙江","丽水","丽水中山餐厅","丽水市中山街498号","0578-2157470"],
["浙江","丽水","丽水学院餐厅","丽水文进路2号","0578-2138688"],
["浙江","丽水","龙泉新华餐厅","龙泉市新华街28号","0578-7222378"],
["浙江","丽水","青田鹤城餐厅","青田县鹤城东路88号","0578-6821007"],
["浙江","丽水","遂昌凯恩餐厅","遂昌县北街1号","0578-8555222"],
["浙江","丽水","云和解放餐厅","云和县解放东街210号","0578-5227188"],
["江苏","南京","龙华路餐厅","文德东路与公园北路交叉口","18014852800"],
["江苏","南京","天悦城餐厅","浦口区百润路与天华西路交叉口西北角天悦城","13770816201"],
["江苏","南京","南京石鼓路餐厅","白下区石鼓路33号","025-86795391"],
["江苏","南京","南京经贸苏果餐厅","白下区石门坎104号","025-84217319"],
["江苏","南京","南京苏宁商贸餐厅","白下区中山南路49号苏宁电器一层","025-52631361"],
["江苏","南京","南京龙江润发餐厅","草场门大街99号金润发一楼","025-86214227"],
["江苏","南京","南京龙江新城市餐厅","草场门大街99号新城广场步行街B区B栋1层","025-86304012"],
["江苏","南京","南京殷巷苏果餐厅","诚信大道与双龙大道交汇处","025-52758269"],
["江苏","南京","南京新华餐厅","大厂区新华路520号","025-57790608"],
["江苏","南京","南京金利餐厅","大厂区新华路永利晶华城一楼","025-57059830"],
["江苏","南京","南京水晶餐厅","大光路水晶广场","025-52323863"],
["江苏","南京","南京大行宫家乐福餐厅","大行宫广场负一层","025-84500291"],
["江苏","南京","南京大桥北路餐厅","大桥北路48号华东茂A1栋一层","025-57033696"],
["江苏","南京","南京大桥家乐福餐厅","大桥南路7号","025-58833304"],
["江苏","南京","南京孝陵卫地铁站餐厅","地铁2号线孝陵卫站","025-86615001"],
["江苏","南京","南京安德门餐厅","地铁一号线安德门地铁站一层2号出口","025-52442971"],
["江苏","南京","南京奥体中心餐厅","地铁一号线奥体中心站","025-86407900"],
["江苏","南京","南京高淳宏达餐厅","高淳县通贤街28-3号宏达大厦1-2层","025-52006731"],
["江苏","南京","南京正兴餐厅","贡院街119号","025-86623239"],
["江苏","南京","南京黄金楼餐厅","贡院西街72-94号","025-52202648"],
["江苏","南京","南京广州路餐厅","广州路104号","025-86632902"],
["江苏","南京","南京五星年华餐厅","汉中路141-145号一层","025-85314930"],
["江苏","南京","南京汉中餐厅","汉中路209号","025-86611684"],
["江苏","南京","南京红山餐厅","和燕路252号","025-85552037"],
["江苏","南京","南京迈皋桥餐厅","和燕路346号","025-85231769"],
["江苏","南京","南京狮子桥餐厅","湖南路181号","025-83243906"],
["江苏","南京","南京玉桥餐厅","建宁路8号","025-86506787"],
["江苏","南京","南京河西雨润中央餐厅","建邺区江东中路237号河西中央商场一楼","025-58703079"],
["江苏","南京","南京河西万达餐厅","建邺区万达广场步行街西北侧","025-87715053"],
["江苏","南京","南京同曦万尚餐厅","江宁开发区双龙大道以东同曦尚城","025-52760391"],
["江苏","南京","南京金箔餐厅","江宁区东山街道金箔路468号中国女人街A区一层","025-52196593"],
["江苏","南京","南京东山餐厅","江宁区东山镇土山路2号","025-52188133"],
["江苏","南京","南京金宝餐厅","江宁区东新南路金宝大场","025-52184379"],
["江苏","南京","南京胜太餐厅","江宁区将军大道9号一层(近胜太西路)","025-52146195"],
["江苏","南京","南京黄金海岸餐厅","江宁区金箔路999号","025-51190262"],
["江苏","南京","南京文鼎广场餐厅","江宁区科学园文鼎广场8号楼一层","025-52262863"],
["江苏","南京","南京天泰餐厅","江宁区秣陵街道将军大道62号","025-52141089"],
["江苏","南京","南京天元餐厅","江宁区天元路388号义乌小商品城A区1栋一层","025-52098562"],
["江苏","南京","南京21世纪餐厅","江宁区天元路华润苏果超1楼","025-52101753"],
["江苏","南京","南京溧水大润发餐厅","溧水区天生桥大道与致远路交汇处","025-57210726"],
["江苏","南京","南京通济街餐厅","溧水县通济街8号","025-57264103"],
["江苏","南京","南京石林百货餐厅","联合村147号一层","025-83402239"],
["江苏","南京","南京龙津餐厅","六合区龙津路8号金宁广场中区一层","025-57120251"],
["江苏","南京","南京龙蟠餐厅","龙蟠路17号","025-85625052"],
["江苏","南京","南京南京商厦餐厅","龙蟠路2号","025-85659995"],
["江苏","南京","南京润发餐厅","龙蟠中路260号","025-84601194"],
["江苏","南京","南京仁恒餐厅","楠溪江东街78号","025-52249021"],
["江苏","南京","南京双龙餐厅","宁溧公路288号麦德龙超旁","025-52452080"],
["江苏","南京","南京明发永辉餐厅","浦口区滨江大道1号明发永辉超一层","025-58356931"],
["江苏","南京","南京弘阳广场餐厅","浦口区弘阳广场","025-58402382"],
["江苏","南京","南京大华家乐福餐厅","浦口区浦珠路、柳州南路口大华家乐福一层","025-58573963"],
["江苏","南京","南京澳林餐厅","浦口区浦珠路126号","025-58865206"],
["江苏","南京","南京金浦餐厅","浦口区珠江镇文德路金浦假日百货一层","025-58110779"],
["江苏","南京","南京华电餐厅","栖霞区迈皋桥街道高家村42号3幢1-2层","025-83613352"],
["江苏","南京","南京金源百货餐厅","栖霞区迈皋桥栖霞大道28号万寿综合大楼一层","025-85576807"],
["江苏","南京","南京仙林文苑餐厅","栖霞区仙林文苑路108号","025-85899664"],
["江苏","南京","南京尧佳路餐厅","栖霞区尧化门街179号","025-82210391"],
["江苏","南京","南京河西苏宁环球餐厅餐厅","清江路99号1层","025-58719273"],
["江苏","南京","南京热河餐厅","热河路2号","025-58803384"],
["江苏","南京","南京瑞金餐厅","瑞金路48号","025-84609661"],
["江苏","南京","南京秦淮餐厅","升州路23号","025-52313879"],
["江苏","南京","南京五洋餐厅","水西门大街159号1层","025-86553757"],
["江苏","南京","南京长江路餐厅","太平北路1912A11号楼1-3层","025-84510195"],
["江苏","南京","南京新世纪餐厅","太平南路1号","025-84520872"],
["江苏","南京","南京太平南路餐厅","太平南路304-330号","025-84454011"],
["江苏","南京","南京紫金餐厅","铁匠营地块的君临紫金商业项目07幢一层01、02号","025-84430270"],
["江苏","南京","南京铁心桥餐厅","铁心桥大街1号","025-52355370"],
["江苏","南京","南京东城汇餐厅","仙林东城汇","025-83225326"],
["江苏","南京","南京绿轴餐厅","新安江街99号","025-87771262"],
["江苏","南京","南京仙林金鹰 餐厅","学海路1号1层4-102商铺","025-87721702"],
["江苏","南京","南京虹悦城餐厅","应天大街619号德盈广场负一层","025-52275991"],
["江苏","南京","南京绿洲餐厅","雨花区梅山街道203道口旁商务楼一层","025-52268371"],
["江苏","南京","南京光华餐厅","御道街113号","025-84624169"],
["江苏","南京","南京瞻园餐厅","瞻园路9号","025-85572069"],
["江苏","南京","南京好又多餐厅","长乐路132号","025-52206465"],
["江苏","南京","南京山西路餐厅","中山北路107号1-2层(山西路百货大楼)","025-83317323"],
["江苏","南京","南京少年宫餐厅","中山北路148号","025-86631892"],
["江苏","南京","南京虹桥餐厅","中山北路281-2号1-2层","025-83344080"],
["江苏","南京","南京紫峰餐厅","中山北路6号","025-83306032"],
["江苏","南京","南京中山门餐厅","中山陵陵园路四方城1号","025-83363721"],
["江苏","南京","南京德基餐厅","中山路18号负一层B105号店铺","025-84710829"],
["江苏","南京","南京马群餐厅","中山门大街马群地铁站1号出口外","025-83463096"],
["江苏","南京","南京金铁餐厅","中山南路122号大洋百货负一楼","025-84726957"],
["江苏","南京","南京正洪餐厅","中山南路79号","025-84723253"],
["江苏","南京","南京中央餐厅","中央路331号","025-83531027"],
["江苏","南京","南京珍珠餐厅","珠江路389号","025-83357887"],
["江苏","南京","南京华海餐厅","珠江路435号","025-52468169"],
["江苏","南京","汤山奥莱","江宁区圣汤大道99号3号楼3栋106和107商铺","025-52157195"],
["江苏","南京","六合时代","六合区华欧大道8号龙湖时代广场一层","13770816201"],
["江苏","南京","万辰餐厅","江苏省南京市溧水县永阳镇宝塔路9号万辰国际苏果购物中心内一楼","025-56200588"],
["江苏","南京","金轮广场","汉中路8号金轮新天地负一楼肯德基餐厅","025-84739332"],
["江苏","南京","天迈餐厅","南京市地铁二线油坊桥站天迈广场肯德基餐厅","025-84855397"],
["江苏","南京","南京火车站店","龙蟠路111号","025-52268780"],
["江苏","南京","南京花园店","花园路17号","025-85479994"],
["江苏","南京","南京南京站店","龙蟠路111号南京火车站候车室3楼","025-85029259"],
["江苏","南京","南京南京站北广场店","火车站北广场3楼3-1-1号","025-86554789"],
["江苏","南京","南京中山陵步行街店","中山陵步行街8-12","025-83695537"],
["江苏","南京","南京东方精选店","汉中路1号东方商城1楼","025-84702677"],
["江苏","南京","南京南京站精选店","龙蟠路264号南京火车站一楼软席候车室","025-58705689"],
["江苏","南京","集庆门家乐福","南京市集庆门大街与长虹路交汇处","025-86459607"],
["江苏","南京","应天餐厅","南京市应天大街866号欧尚超市1楼","025-86496228\6218"],
["江苏","南京","江宁餐厅","南京市江宁经济开发区秦淮路11号","025-86951508"],
["江苏","南京","高淳淳溪餐厅","高淳区天河路39号苏果一楼肯德基","025-57327726"],
["江苏","南京","华侨城苏果餐厅","浦口区大桥北路一号肯德基餐厅","025-58579106"],
["江苏","南京","凤凰餐厅","鼓楼区凤凰东街150号","025-86595744"],
["江苏","南京","六合紫晶餐厅","六合区雄州镇龙津路紫晶广场一区5号楼苏果超市一层","025-57103287"],
["江苏","南京","学府餐厅","玄武区丹凤街25号(金润发超市)","025-83222982"],
["江苏","南京","天润餐厅","浦口区柳州东路1号苏宁环球天润广场1楼","025-52417900"],
["江苏","南京","大观餐厅","建宁路300号一层","025-58701297"],
["江苏","南京","长兴餐厅","江宁区江宁镇长兴大街515号","025-52781077"],
["江苏","无锡","京东广场餐厅","广南路388号京东广场1层","13961855024"],
["江苏","无锡","阳山餐厅","惠山区阳山镇陆畅路北侧、陆兴路西侧1层","13812010432"],
["江苏","无锡","无锡市胜利餐厅","北大街1号","0510-82695569"],
["江苏","无锡","无锡市华庄瑞景餐厅","滨湖区华庄街道华庄社区、太湖社区社区睦邻中心一层","0510-85612813"],
["江苏","无锡","无锡市新梁溪餐厅","滨湖区青祁路与梁溪路交叉口西南角万达商业广场","0510-85800323"],
["江苏","无锡","无锡市金城餐厅","滨湖区苏锡路359号","0510-85068489"],
["江苏","无锡","无锡市海岸城餐厅","滨湖区太湖新城立信大道西侧无锡海岸城购物中心","0510-85183071"],
["江苏","无锡","无锡市崇安寺餐厅","崇安区人民路崇安寺步行街皇亭广场1003号","0510-82765841"],
["江苏","无锡","无锡市恒隆餐厅","崇安区人民中路139号无锡恒隆广场","0510-81856135"],
["江苏","无锡","无锡市少年宫餐厅","崇安区上马墩街道","0510-82400395"],
["江苏","无锡","无锡市无锡城际餐厅","崇安区通江街道无锡综合交通枢纽B2地块综合楼","0510-82306320"],
["江苏","无锡","无锡市新公园餐厅","崇安寺二期公园街105号 公园街93-202号","0510-82717147"],
["江苏","无锡","无锡市凤翔餐厅","凤翔路与江海路交叉口西南角","0510-83157120"],
["江苏","无锡","无锡市新广益餐厅","广南路、广益路交叉口西南侧","0510-82401811"],
["江苏","无锡","无锡市新区餐厅","行创六路6号","0510-85281322"],
["江苏","无锡","无锡市阳光餐厅","红星路229号阳光广场二号楼1+2层","0510-85455350"],
["江苏","无锡","无锡市惠山餐厅","惠山区经济技术开发区新世纪时代广场乐购","0510-66616827"],
["江苏","无锡","无锡市洛社餐厅","惠山区洛社镇政府东侧、洛中路北侧“六龙城”1+2层","0510-83340832"],
["江苏","无锡","无锡市前洲餐厅","惠山区前洲镇前洲村、塘村村前洲商贸财富中心1+2层","0510-83382165"],
["江苏","无锡","无锡市中环餐厅","惠山区钱桥街道钱桥社区“恒达·中环国际”1层","0510-83205232"],
["江苏","无锡","无锡市惠山万达餐厅","惠山区吴韵路321号","0510-83598252"],
["江苏","无锡","无锡市车站餐厅","火车站车站广场","0510-82326576"],
["江苏","无锡","无锡车站精选餐厅","火车站城际候车室","0510-82302667"],
["江苏","无锡","无锡市安镇餐厅","江苏省无锡市锡山区安镇街道锡沪南路、润锡路东1+2层","0510-88781377"],
["江苏","无锡","高铁无锡东站餐厅","京沪高铁无锡东站WXD-7号经营场所","0510-14751561756"],
["江苏","无锡","无锡市蠡溪餐厅","蠡溪路与建筑路交叉口西北侧","0510-81803699"],
["江苏","无锡","无锡市梁清餐厅","梁清路600号华润万家大买场","0510-85868330"],
["江苏","无锡","无锡市新梅餐厅","梅村镇梅西路东锡甘路北一层","0510-88558731"],
["江苏","无锡","无锡市向阳餐厅","南禅寺110号1、2层","0510-82828026"],
["江苏","无锡","无锡市永乐餐厅","南长区大庄里535号1层","0510-85033023"],
["江苏","无锡","无锡市清扬餐厅","南长区五星家园123号好又多超市1楼肯德基餐厅","0510-85851109"],
["江苏","无锡","无锡市大润发餐厅","青祁路66号","0510-85872269"],
["江苏","无锡","无锡市青祁路餐厅","青祁路欧尚超市一楼","0510-85125878"],
["江苏","无锡","无锡市青石餐厅","青石路和凤宾路交叉口东北侧","0510-82691097"],
["江苏","无锡","无锡市兴隆桥餐厅","人民路和五爱西路路口","0510-82725367"],
["江苏","无锡","无锡市新五爱餐厅","人民西路60、62号","0510-82708711"],
["江苏","无锡","无锡市惠泉餐厅","盛岸西路碧霞堍2号","0510-83010107"],
["江苏","无锡","无锡市太湖新城餐厅","太湖新城万顺路与大通路十字路口华润万家超市一层","0510-85074295"],
["江苏","无锡","无锡市旺庄餐厅","旺庄路北侧宝龙城市广场A区","0510-85214930"],
["江苏","无锡","无锡市易买得餐厅","锡沪路与友谊路交汇口","0510-88209921"],
["江苏","无锡","无锡市和泽餐厅","锡山区安镇东兴路与鑫安路交叉口西南角","0510-88783621"],
["江苏","无锡","无锡市查桥餐厅","锡山区安镇街道锡沪路北、镇南路南查桥商业广场一层","0510-88713323"],
["江苏","无锡","无锡市保利餐厅","县前东路保利广场1楼","0510-82329051"],
["江苏","无锡","无锡市清源餐厅","新安街道新安村新庄村新安花苑三期J组","0510-81193401"],
["江苏","无锡","无锡市春潮餐厅","新光路555号新之城全生活广场乐购超市一层","0510-88272567"],
["江苏","无锡","无锡市鸿山餐厅","新区鸿山街道鸿运路与欣鸿路交汇处东北角房产一层","0510-88583039"],
["江苏","无锡","无锡市新友餐厅","新区梅村街道新友北路以东、锡义路以南第1至8号楼幢1095单元1层2096单元2层","0510-88155365"],
["江苏","无锡","无锡市新硕放餐厅","新区硕放通祥路东侧、南星路南侧、薛典路西侧","0510-82727677"],
["江苏","无锡","无锡市新韵奥莱餐厅","新区锡勤路18号-28号一层B2-109+B2-110商铺","0510-88156521"],
["江苏","无锡","无锡市春江餐厅","学前东路与华夏路的东北角","0510-88210386"],
["江苏","无锡","无锡市世博广场餐厅","长江北路106号1+2层","0510-85220839"],
["江苏","无锡","无锡市欧尚餐厅","长江北路288号","0510-82103835"],
["江苏","无锡","无锡市中南餐厅","中南路与苏锡路交汇处财富广场D区一层","0510-85400311"],
["江苏","无锡","无锡市汇金餐厅","中山路198号","0510-82738032"],
["江苏","无锡","无锡市无锡汽车站餐厅","综合交通枢纽F2地块站北汽车站","0510-82326863"],
["江苏","无锡","盛唐餐厅","无锡市北塘区民丰村","0510-68795621"],
["江苏","无锡","五星餐厅","江苏省江阴市普惠北路555号","0510-68830556"],
["江苏","无锡","博大餐厅","滨湖区蠡湖大道与高浪路交叉口西北侧一层","18605107222"],
["江苏","无锡","三阳精选餐厅","崇安区地铁1号线商业街三阳广场站北翼","13815953230"],
["江苏","无锡","百乐店KFC","惠山区钱桥镇镇区盛岸西路南侧1+2层","0510-85251953"],
["江苏","无锡","无锡长安KFC","惠山区长安街道华宇商业中心","0510-85013755"],
["江苏","无锡","欣旺店","新区长江北路东面与红旗路南面交汇处一层","0510-85210461"],
["江苏","无锡","欣旺","新区长江北路东面与红旗路南面交汇处一层肯德基KFC餐厅","0510-85210461"],
["江苏","徐州","丰县凤鸣餐厅","丰县解放路3号","0516-89249498"],
["江苏","徐州","徐州华联餐厅","淮海东路51号华联商厦1楼","0516-83710049"],
["江苏","徐州","徐州淮海餐厅","淮海西路252号","0516-85652641"],
["江苏","徐州","徐州贾汪百大餐厅","贾汪区劳工街前委路中段百货大楼一层二层","516-87612485"],
["江苏","徐州","徐州泰山餐厅","解放南路延长段28号","0516-83990018"],
["江苏","徐州","徐州沃尔玛餐厅","解放南路与和平路交叉口正翔商业广场一层1025单元","0516-83336222"],
["江苏","徐州","沛县汉城餐厅","沛县汉城中路东侧刘邦大酒店一层","0516-89679099"],
["江苏","徐州","徐州金鹰餐厅","彭城广场北侧金鹰商厦1楼","0516-83109018"],
["江苏","徐州","徐州云龙万达餐厅餐厅","庆丰路与和平路交叉口西北角万达广场","0516-83008473"],
["江苏","徐州","徐州大福源餐厅","泉山区煤建路8号","0516-85820348"],
["江苏","徐州","徐州世茂大润发餐厅","三环东路以西世茂广场一层","0516-83662528"],
["江苏","徐州","睢宁万象苏果餐厅","睢宁县八一路南万象天地华润苏果入口","0516-88347260"],
["江苏","徐州","新沂富民餐厅餐厅","新沂新安镇富民路100号银河之星","0516-88920466"],
["江苏","徐州","徐州振兴餐厅","徐海路与振兴大道交叉口东南角","0516-83550859"],
["江苏","徐州","徐州盛佳餐厅","中山北路1号盛佳大厦1层","0516-85936500"],
["江苏","徐州","徐州火车站店","津浦西路202号火车站一层","0516-83609788"],
["江苏","徐州","徐州高铁店","高铁站出发层","0516-83553487"],
["江苏","徐州","徐州站精选店","火车站进站大厅一层","0516-83609763"],
["江苏","徐州","老东门餐厅","淮海东路104号老东门时尚街区肯德基餐厅","0516-83339267"],
["江苏","常州","常州常润发餐厅","关河东路58号","0519-88150931"],
["江苏","常州","常州青山餐厅","关河西路斗巷1幢","0519-86612676"],
["江苏","常州","常州鸣新路餐厅","湖塘镇鸣新中路21号常州易买得购物中心","0519-88997210"],
["江苏","常州","常州西瀛里餐厅","莱蒙都会国际商业街区A5-1-11,A5-1-12,A5-02-34铺位","0519-86622352"],
["江苏","常州","常州百货大楼 餐厅","莱蒙都会商业街(A4区常州百货大楼)4-101、4-201","0519-88157771"],
["江苏","常州","常州丽华餐厅","丽华路西侧、312国道北侧诺诚高第一层、二层","0519-88861192"],
["江苏","常州","常州通江餐厅","通江路西侧时代商务广场北3幢","0519-85150733"],
["江苏","常州","武进人民商场餐厅","武进区湖塘镇人民中路20号人民商场1楼","0519-86550159"],
["江苏","常州","武进万达餐厅餐厅","武进区花园街与大通路交汇处万达广场","0519-85389961"],
["江苏","常州","武进武宜路餐厅","武进区人民路新城万博广场家乐福超","0519-81883156"],
["江苏","常州","常州薛家苏果餐厅","新北区常州天宇购物广场苏果超","0519-86923390"],
["江苏","常州","常州丰臣乐购餐厅","新北区通江路108号丰臣国际-1F","0519-85122691"],
["江苏","常州","常州万达金街餐厅","新北万达广场金街","0519-85785227"],
["江苏","常州","常州泰富餐厅","延陵西路15号嘉宏世纪大厦一层","0519-88172239"],
["江苏","常州","常州延陵餐厅","延陵西路166号","0519-86623054"],
["江苏","常州","常州北环欧尚餐厅","永宁路18号","0519-85502713"],
["江苏","常州","常州中吴苏果餐厅","中吴大道与长江中路交叉口西北角","0519-81092850"],
["江苏","常州","常州勤业餐厅","钟楼区勤业路285号","0519-86870736"],
["江苏","常州","常州邹区泰富餐厅","邹区东方路易买得超","0519-85287817"],
["江苏","常州","常州客运中心店","火车站北侧常州客运中心西辅楼二层","0519-81182057"],
["江苏","常州","常州恐龙园店","新北区东支河以南、龙汇路以西、龙潭路以北","0519-85520195"],
["江苏","常州","北环时代餐厅","竹林南路乐天玛特超市一楼KFC餐厅","0519-85575102"],
["江苏","常州","兰陵餐厅","天宁区兰陵北路43号(欧尚超市)","0519-86654713"],
["江苏","苏州","港龙餐厅","长江路556号港龙城市商业广场","0512-65373072"],
["江苏","苏州","常熟世纪联华餐厅","江苏省常熟市白雪路18号世纪联华卖场内","0512-52759316"],
["江苏","苏州","常熟市虞景文华餐厅","江苏省常熟市北门大街虞景文华广场内 34-35号","0512-52165201"],
["江苏","苏州","常熟海虞餐厅","江苏省常熟市海虞北路27号常客隆超市一层","0512-52820310"],
["江苏","苏州","常熟市西蒙餐厅","江苏省常熟市海虞南路88号1014沃尔玛超市1层","0512-52928530"],
["江苏","苏州","虞山餐厅","江苏省常熟市和平街2号","0512-52772164"],
["江苏","苏州","常熟欧尚餐厅","江苏省常熟市黄河路255号","0512-52861068"],
["江苏","苏州","常熟梅里餐厅","江苏省常熟市梅李镇通江路11号银河苑1区5幢123-124号一二层","0512-51537917"],
["江苏","苏州","常熟百润发餐厅","江苏省常熟市新颜东路1号","0512-52738537"],
["江苏","苏州","方塔街餐厅","江苏省常熟市虞山镇方塔街5号","0512-52702850"],
["江苏","苏州","常熟天虹餐厅","江苏省常熟市招商北路8号(常熟天虹服装城)","0512-52200661"],
["江苏","苏州","常熟珠江餐厅","江苏省常熟市珠江东路98号常熟世茂世纪中心欧尚超市","0512-52805696"],
["江苏","苏州","昆山市柏庐餐厅","江苏省昆山市柏庐路999号吉田国际广场","0512-55137953"],
["江苏","苏州","昆山市永盛餐厅","江苏省昆山市北门路1118号永盛广场","0512-57112193"],
["江苏","苏州","昆山北门餐厅","江苏省昆山市北门路267号","0512-57511050"],
["江苏","苏州","大润发餐厅","江苏省昆山市朝阳中路516号","0512-57366519"],
["江苏","苏州","昆山洞庭湖路餐厅","江苏省昆山市洞庭湖路288号中航城华润万家一层","0512-36805216"],
["江苏","苏州","昆山市昆山花桥餐厅","江苏省昆山市花桥镇花安路171号","0512-57513995"],
["江苏","苏州","昆山开发区餐厅","江苏省昆山市经济技术开发区前进中路237号","0512-57381109"],
["江苏","苏州","昆山嵩山路餐厅","江苏省昆山市开发区嵩山路836-868号","0512-50171722"],
["江苏","苏州","昆山陆家乐购餐厅","江苏省昆山市陆家镇陆家浜北路东侧乐购超市一层","0512-36805211"],
["江苏","苏州","昆山陆家餐厅","江苏省昆山市陆家镇友谊路118号华润万家超市一楼","0512-36803675"],
["江苏","苏州","昆山千灯餐厅","江苏省昆山市千灯镇尚书路8号千灯大润发一层","0512-36618179"],
["江苏","苏州","昆山世茂润发餐厅","江苏省昆山市前进东路南侧夏驾河东侧世茂广场一层","0512-36887336"],
["江苏","苏州","昆山世贸餐厅","江苏省昆山市前进路中山路交汇处中茵世贸广场","0512-36836340"],
["江苏","苏州","粮贸餐厅","江苏省昆山市人民北路1号","0512-57995352"],
["江苏","苏州","朝阳餐厅","江苏省昆山市人民南路125号","0512-57368957"],
["江苏","苏州","昆山欧尚餐厅","江苏省昆山市玉山镇白马泾路46号欧尚卖场1层","0512-55252710"],
["江苏","苏州","昆山正阳餐厅","江苏省昆山市玉山镇人民路41-43号","0512-57501171"],
["江苏","苏州","昆山市张浦餐厅","江苏省昆山市张浦镇新吴街148号","0512-55152848"],
["江苏","苏州","昆山易初莲花餐厅","江苏省昆山市长江北路128号(萧林路路口)易初莲花昆山店一层","0512-57861732"],
["江苏","苏州","昆山春晖路餐厅","江苏省昆山市长江路与春晖路交接处望族商城一层","0512-55257102"],
["江苏","苏州","何山餐厅","江苏省苏州市滨河路1823号何山大润发一层","0512-68417737"],
["江苏","苏州","威尼斯餐厅","江苏省苏州市阊胥路282-300号","0512-65576892"],
["江苏","苏州","苏州市东环路餐厅","江苏省苏州市东环路1500号大润发超市内","0512-62763790"],
["江苏","苏州","苏州市东吴北路餐厅","江苏省苏州市东吴北路111号朗庭1层","0512-66353960"],
["江苏","苏州","苏美餐厅","江苏省苏州市东吴北路68号苏美中心裙楼1层","0512-65688722"],
["江苏","苏州","杨枝塘餐厅","江苏省苏州市葑谊路266号东环路与杨枝塘路交叉口家乐福超市内","0512-69178661"],
["江苏","苏州","苏州市凤凰餐厅","江苏省苏州市凤凰街196号","0512-68766617"],
["江苏","苏州","贵宾餐厅","江苏省苏州市干将路888-1号","0512-65215273"],
["江苏","苏州","苏州市学府餐厅","江苏省苏州市高新区(科锐路东 学新路南 学府路北)汇金商业广场11幢","0512-67370761"],
["江苏","苏州","苏州市浒关餐厅","江苏省苏州市高新区文昌路301号浒关大润发超市内一楼","0512-68761028"],
["江苏","苏州","苏州市园区车坊餐厅","江苏省苏州市工业园区车坊镇普惠路1089号星湖财富广场2幢108室","0512-62710549"],
["江苏","苏州","苏州市方洲餐厅","江苏省苏州市工业园区方洲路528号方洲邻里中心","0512-65075336"],
["江苏","苏州","苏州市金鸡湖邻里餐厅","江苏省苏州市工业园区金鸡湖大道和西华林街交界处邻瑞广场","0512-67089801"],
["江苏","苏州","苏州市园区斜塘餐厅","江苏省苏州市工业园区莲葑路18号联丰商业广场4幢世纪联华卖场内","0512-65079712"],
["江苏","苏州","苏州市园区林泉餐厅","江苏省苏州市工业园区林泉街611号文星广场","0512-62652924"],
["江苏","苏州","苏州市园区玲珑餐厅","江苏省苏州市工业园区玲珑街58号1栋101肯德基餐厅","0512-65953763"],
["江苏","苏州","苏州市星塘街餐厅","江苏省苏州市工业园区淞江路18号","0512-65916235"],
["江苏","苏州","苏州市园区苏雅餐厅","江苏省苏州市工业园区苏雅路388号新天翔商业广场","0512-62534513"],
["江苏","苏州","苏州市阳澄湖服务区餐厅","江苏省苏州市工业园区唯亭沪宁高速阳澄湖服务区北区一层","0512-68232990"],
["江苏","苏州","苏州市印象城餐厅","江苏省苏州市工业园区现代大道1699号","0512-69561397"],
["江苏","苏州","邻里餐厅","江苏省苏州市工业园区现代大道苏春西路新城大厦邻里中心518号","0512-62516090"],
["江苏","苏州","苏州市园区泾园餐厅","江苏省苏州市工业园区扬东路277号新屹大厦1幢102室一层","0512-67268251"],
["江苏","苏州","苏州市园区华池餐厅","江苏省苏州市工业园区圆融时代广场S2栋C幢 旺敦路268号","0512-62856784"],
["江苏","苏州","苏州市钟慧路餐厅","江苏省苏州市工业园区钟慧路华润超市一楼","0512-67909211"],
["江苏","苏州","苏州市园区东沙湖餐厅","江苏省苏州市工业园区钟南街东的东沙湖邻里中心西南角一层、二层肯德基","0512-62537349"],
["江苏","苏州","苏州市江星餐厅","江苏省苏州市姑苏区城北东路1188号(江星广场华润万家平江新城店)","0512-65330987"],
["江苏","苏州","观前餐厅","江苏省苏州市观前街111号","0512-67201355"],
["江苏","苏州","苏州市观中餐厅","江苏省苏州市观前街136号","0512-67201171"],
["江苏","苏州","苏州市石路新苏餐厅","江苏省苏州市广济南路219号新苏国际负1楼","0512-65899301"],
["江苏","苏州","苏州市浒关保卫路餐厅","江苏省苏州市浒墅关保卫路33、35号(华润万家购物中心)一层、二层","0512-67083076"],
["江苏","苏州","苏州市广发餐厅","江苏省苏州市金阊区广济南路19号大润发一层","0512-68267116"],
["江苏","苏州","苏州市富邻广场餐厅","江苏省苏州市金阊区物流园富强路北、虎新路东富邻商业广场(金阊区城北西路1988号)3号楼一层","0512-67770915"],
["江苏","苏州","金枫餐厅","江苏省苏州市金枫路1号华润超市内","0512-68023952"],
["江苏","苏州","苏州市新欧尚餐厅","江苏省苏州市金鸡湖路205号欧尚超市内","0512-67615859"],
["江苏","苏州","跨塘餐厅","江苏省苏州市跨塘镇葑亭大道锦丰泰盛生活广场西区1号楼","0512-62860830"],
["江苏","苏州","苏州市木渎金山餐厅","江苏省苏州市木渎镇金山路37号易买得卖场一层","0512-66793665"],
["江苏","苏州","苏州市木渎影视城餐厅","江苏省苏州市木渎镇金山南路288号苏州国际影视娱乐城3号楼1层","0512-68259701"],
["江苏","苏州","苏州市木渎餐厅","江苏省苏州市木渎镇中山西路16-12号","0512-66566992"],
["江苏","苏州","苏州市平江新城餐厅","江苏省苏州市平江区人民路3188号万达商业广场家乐福一层","0512-67152378"],
["江苏","苏州","饮马桥餐厅","江苏省苏州市人民路1001号","0512-65776235"],
["江苏","苏州","接驾桥餐厅","江苏省苏州市人民路1640号","0512-67201815"],
["江苏","苏州","苏州市南门餐厅","江苏省苏州市人民路458号1层1002室、1003室负一层B1001室","0512-65198163"],
["江苏","苏州","南环餐厅","江苏省苏州市人民南路80号","0512-65251678"],
["江苏","苏州","三香餐厅","江苏省苏州市三香路120号","0512-68601552"],
["江苏","苏州","体育中心餐厅","江苏省苏州市三香路183-184号","0512-68662970"],
["江苏","苏州","苏州市园区胜浦餐厅","江苏省苏州市胜浦镇兴浦街金胜路口金雅苑8号1层","0512-67060278"],
["江苏","苏州","苏州市新石路国际餐厅","江苏省苏州市石路18号石路国际商厦地下","0512-65321032"],
["江苏","苏州","盘门百润发餐厅","江苏省苏州市苏福公路13号","0512-68155259"],
["江苏","苏州","塔园餐厅","江苏省苏州市塔园路99号乐购超市1层","0512-68182981"],
["江苏","苏州","苏州市桐泾北路餐厅","江苏省苏州市桐泾北路218号来茂时尚生活中心一层","0512-67863577"],
["江苏","苏州","苏州市唯亭餐厅","江苏省苏州市唯亭镇金陵西路288号1栋","0512-65075311"],
["江苏","苏州","苏州市碧波餐厅","江苏省苏州市吴中经济开发区东吴南路388号","0512-65683885"],
["江苏","苏州","苏州市吴中汽车站餐厅","江苏省苏州市吴中经济开发区广建路10号苏州吴中汽车客运站一二层","0512-68183731"],
["江苏","苏州","苏州市甪直餐厅","江苏省苏州市吴中区甪直镇晓市路26号","0512-66193307"],
["江苏","苏州","苏州市木渎翠坊餐厅","江苏省苏州市吴中区木渎翠坊街18号","0512-65095082"],
["江苏","苏州","苏州市木渎惠润餐厅","江苏省苏州市吴中区木渎镇谢村路269号惠润金峰邻里中心商铺1-001号","0512-66090021"],
["江苏","苏州","苏州市吴中香雪海餐厅","江苏省苏州市吴中区吴中大道168号,香雪海购物广场内","0512-62697383"],
["江苏","苏州","苏州市越溪餐厅","江苏省苏州市吴中区越溪镇吴中大道1109号南苏州生活广场E幢B馆","0512-68755711"],
["江苏","苏州","相城餐厅","江苏省苏州市相城经济开发区华元路99号","0512-65789096"],
["江苏","苏州","苏州市相城漕湖餐厅","江苏省苏州市相城区漕湖产业园苏虞张西、卫渭路南漕湖邻里中心","0512-69393053"],
["江苏","苏州","苏州市黄埭餐厅","江苏省苏州市相城区黄埭镇康阳路西136号,康阳供销合作超市内","0512-65791792"],
["江苏","苏州","苏州市繁花餐厅","江苏省苏州市相城区人民路4555号繁花中心一层","0512-65222915"],
["江苏","苏州","苏州市太平邻里餐厅","江苏省苏州市相城区太平街道兴太路东、太平大街南太平邻里中心","0512-65811172"],
["江苏","苏州","苏州市渭塘餐厅","江苏省苏州市相城区渭塘镇渭星街288号2幢","0512-65713621"],
["江苏","苏州","苏州市相城玛特餐厅","江苏省苏州市相城区元和街道华元路918号101室一层","0512-65387782"],
["江苏","苏州","苏州市阳澄湖餐厅","江苏省苏州市相城区元和街道阳澄湖中路北、采莲路西,中环百汇广场","0512-65214130"],
["江苏","苏州","苏州市相城元和餐厅","江苏省苏州市相城区元和镇阳澄湖东路1号","0512-65491953"],
["江苏","苏州","苏州市大光明餐厅","江苏省苏州市小公园北局9号大光明影城","0512-65811182"],
["江苏","苏州","苏州乐园餐厅","江苏省苏州市新区长江路401号","0512-68180781"],
["江苏","苏州","绿宝广场餐厅","江苏省苏州市新区长江路436号","0512-68186001"],
["江苏","苏州","苏州市星海街餐厅","江苏省苏州市星海街199号星海生活广场北区负1层","0512-68783559"],
["江苏","苏州","苏州市胥口帝豪餐厅","江苏省苏州市胥口镇孙武路北侧、香泾路东侧“帝豪家缘”商业一层","0512-66320706"],
["江苏","苏州","苏州市友新路餐厅","江苏省苏州市友新路1188号亿象商业城5号楼一层","0512-68132017"],
["江苏","苏州","湖东邻里餐厅","江苏省苏州市园区星湖街178号湖东邻里中心南楼N101号房一层","0512-62587006"],
["江苏","苏州","苏州市竹园路餐厅","江苏省苏州市竹园路南、滨河路西乐天玛特超市一层","0512-69561379"],
["江苏","苏州","太仓市太仓浏河餐厅","江苏省太仓市浏河镇郑和大街南侧浏河汽车站一层","0512-53612701"],
["江苏","苏州","太仓东亭路餐厅","江苏省太仓市娄江路与县府街交叉口西南侧万达广场","0512-53520881"],
["江苏","苏州","太仓南洋餐厅","江苏省太仓市人民南路19号","0512-53870097"],
["江苏","苏州","太仓港区邻里餐厅","江苏省太仓市太仓港港口开发区龙江路东侧、映雪路北侧太仓邻里中心","0512-53708218"],
["江苏","苏州","太仓沙溪餐厅","江苏省太仓市太仓沙溪镇白云路 146号","0512-53921277"],
["江苏","苏州","太仓润南餐厅","江苏省太仓市太平南路18号大润发卖场","0512-53123705"],
["江苏","苏州","新华餐厅","江苏省太仓市新华街5号","0512-53533791"],
["江苏","苏州","吴江汾湖餐厅","江苏省吴江市汾湖镇杭州路北侧TESCO乐购超市一层","0512-63188255"],
["江苏","苏州","吴江运东餐厅","江苏省吴江市经济开发区运东大道东侧江南奥斯卡5幢1层","0512-63319155"],
["江苏","苏州","吴江百润发餐厅","江苏省吴江市笠泽路117号恒森广场百润发","0512-63498072"],
["江苏","苏州","吴江市平望餐厅","江苏省吴江市平望镇通运路44号时代广场9号一层","0512-63660231"],
["江苏","苏州","吴江市盛泽春之声餐厅","江苏省吴江市盛泽镇北观音弄38号春之声商业广场一层","0512-63188280"],
["江苏","苏州","盛泽市场餐厅","江苏省吴江市盛泽镇东方丝绸市场十字河西一层1区157号","0512-63086576"],
["江苏","苏州","吴江市盛泽西环路餐厅","江苏省吴江市盛泽镇西环路西侧欧尚超市一层","0512-63315506"],
["江苏","苏州","吴江市盛泽舜新餐厅","江苏省吴江市舜新中路27号大润发卖场一层","0512-63085910"],
["江苏","苏州","吴江沃尔玛餐厅","江苏省吴江市松陵镇鲈乡北路东侧江兴西路469号","0512-63922025"],
["江苏","苏州","松陵餐厅","江苏省吴江市松陵镇永康路360号江苏时代超级购物中心一层","0512-63463932"],
["江苏","苏州","吴江永康餐厅","江苏省吴江市松陵镇永康路68号","0512-63020351"],
["江苏","苏州","吴江市同里餐厅","江苏省吴江市同里镇游客集散中心1楼","0512-63310992"],
["江苏","苏州","吴江市联杨邻里餐厅","江苏省吴江市吴江区庞杨路1939号联杨邻里中心KFC餐厅","0512-63030956"],
["江苏","苏州","吴江市震泽餐厅","江苏省吴江市震泽镇镇南路1000号江南缘项目c区","0512-63753031"],
["江苏","苏州","吴江万宝大润发餐厅","江苏省吴江市中山南路1088号一层S1001","0512-63319070"],
["江苏","苏州","张家港长丰餐厅","江苏省张家港市大新镇长丰村府前东路20号一层","0512-58725587"],
["江苏","苏州","张家港华昌餐厅","江苏省张家港市华昌路与华山路口湖滨世家肯德基店","0512-56309301"],
["江苏","苏州","张家港市金港润发餐厅","江苏省张家港市金港镇黄泗埔路南侧金港大润发超市","0512-58722569"],
["江苏","苏州","张家港市金港餐厅","江苏省张家港市金港镇长江中路中央广场内","0512-58705007"],
["江苏","苏州","张家港锦丰餐厅","江苏省张家港市锦丰镇锦店路1号","0512-56762178"],
["江苏","苏州","张家港购物公园餐厅","江苏省张家港市沙洲西路购物公园太阳广场1层","0512-56311975"],
["江苏","苏州","张家港百润发餐厅","江苏省张家港市沙洲中路288号","0512-58138501"],
["江苏","苏州","张家港塘桥餐厅","江苏省张家港市塘桥镇南京路与西环路交汇处大润发卖场1层","0512-58925178"],
["江苏","苏州","张家港市广场餐厅","江苏省张家港市杨舍镇步行街45号","0512-58236263"],
["江苏","苏州","张家港杨舍餐厅","江苏省张家港市杨舍镇梁丰路南侧杨舍老街1#-10#","0512-58999317"],
["江苏","苏州","张家港市长安餐厅","江苏省张家港市长安路221号","0512-58979007"],
["江苏","苏州","双湖邻里店","吴中区企鸿路北、独墅湖大道南(门牌号地址:企鸿路18号,双湖邻里中心铺位编号112号)","13962115454"],
["江苏","苏州","昆山九方城店","昆山市萧林中路666号的九方购物中心负一层店铺号为B-017","0512-36830591"],
["江苏","苏州","张家港吾悦店","张家港市金港大道338号(杨舍镇旺西村紫荆路东侧、南二环路南侧、金港大道西侧、旺西路北侧)吾悦商业广场主入口","0512-58693863"],
["江苏","苏州","吴中万达店","吴中区石湖西路188号","13771899969"],
["江苏","苏州","常熟支塘店","常熟市支塘镇支川路118号2幢一层","15851512177"],
["江苏","苏州","木渎花样城店","吴中区苏福路北、珠江路西侧","0512-67553610"],
["江苏","苏州","张家港人民路店","张家港市杨舍镇东苑路东侧、东二环路西侧、人民东路北侧(金新城置业B地块)","0512-56796218"],
["江苏","苏州","唯亭君地店","工业园区唯华路5号1幢楼101、201","15190038032"],
["江苏","苏州","新南站店","南环东路5号一层","0512-65256007"],
["江苏","苏州","张家港汽车站店","华昌南路长途汽车客运站车站内","0512-56901629"],
["江苏","苏州","太仓汽车站店","新区太平路北路太仓汽车客运中心站一、二层","0512-53875399"],
["江苏","苏州","新苏站店","苏站路苏州火车站北下沉广场东侧地下一层","0512-67298110"],
["江苏","苏州","汽车北站店","西汇路29号汽车客运站2层","0512-68766751"],
["江苏","苏州","北广场店","火车站站北、广场西侧、苏站路以南苏州综合客运枢纽汽车客运站二层","0512-67781753"],
["江苏","苏州","吴江车站店","松陵镇吴江经济开发区227省道西、学院路东侧吴江汽车站内","0512-63319850"],
["江苏","苏州","昆山城际店","沪宁高铁昆山南站南入口东侧","0512-36805215"],
["江苏","苏州","昆山汽车站店","开发区柏庐南路788号4号房汽车客运中心站一层","0512-36803621"],
["江苏","苏州","园区时尚店","工业园区唯亭镇后戴街9号肯德基餐厅","0512-62578231"],
["江苏","苏州","湖东邻里","园区星湖街178号湖东邻里中心南楼N101号房一层KFC餐厅","0512-62587006"],
["江苏","南通","海安江海路餐厅","江苏省海安市海安县宁海南路2号文峰大世界购物中心2楼","0513-88939122"],
["江苏","南通","海安宁海餐厅","江苏省海安市人民东路2-4号","0513-88167212"],
["江苏","南通","海安明珠餐厅","江苏省海安市中坝南路1号明珠城.联华超市一楼","0513-88900111"],
["江苏","南通","海安中大街餐厅","江苏省海安市中大街26号楼一层","0513-88906181"],
["江苏","南通","海门叠石桥餐厅","江苏省海门市叠石桥家纺城大岛路和秀女路交叉口","0513-82279159"],
["江苏","南通","海门长江餐厅","江苏省海门市黄海路530号欧尚超市一楼","0513-82192759"],
["江苏","南通","海门市海门时代餐厅","江苏省海门市解放东路309号北贵都之星乐天马特一层","0513-82192720"],
["江苏","南通","海门市解放餐厅","江苏省海门市解放中路737号","0513-82192750"],
["江苏","南通","海门市海门农工商餐厅","江苏省海门市镇中路1号","0513-82104500"],
["江苏","南通","南通城山路餐厅","江苏省南通市崇川区城山路100号","0513-85031265"],
["江苏","南通","南通桃园路餐厅","江苏省南通市崇川区桃园路12号中南城购物中心负一楼","0513-55019539"],
["江苏","南通","南通北大街餐厅","江苏省南通市港闸区北大街108号(大润发超市一层)","0513-55083592"],
["江苏","南通","南通龙王桥餐厅","江苏省南通市工农路259号乐天玛特1楼","0513-55083952"],
["江苏","南通","南通工农路餐厅","江苏省南通市工农路515号乐购超市一楼","0513-81188703"],
["江苏","南通","南通易初莲花餐厅","江苏省南通市工农南路西侧红心路路口","0513-85230718"],
["江苏","南通","南通市通州文峰餐厅","江苏省南通市金沙镇建设路76号通州文峰大世界","0513-86117127"],
["江苏","南通","南通上海路餐厅","江苏省南通市开发区上海路80号乐天玛特超市一楼","0513-89197039"],
["江苏","南通","南通市亚萍餐厅","江苏省南通市南大街2号","0513-85122082"],
["江苏","南通","南通平潮餐厅","江苏省南通市平潮镇建设路137号","0513-86712265"],
["江苏","南通","南通圆融餐厅","江苏省南通市青年东路与工农路交接处圆融广场负一楼","0513-55081582"],
["江苏","南通","南通市南大街餐厅","江苏省南通市人民路47号","0513-85513328"],
["江苏","南通","南通市人民餐厅","江苏省南通市人民中路197号","0513-85122775"],
["江苏","南通","南通市通润发餐厅","江苏省南通市人民中路29号","0513-85534336"],
["江苏","南通","南通市十字街餐厅","江苏省南通市人民中路33号","0513-85108223"],
["江苏","南通","南通任港餐厅","江苏省南通市任港路50号","0513-89083367"],
["江苏","南通","南通市七彩餐厅","江苏省南通市桃坞路58号","0513-85524404"],
["江苏","南通","南通市通州金沙餐厅","江苏省南通市通州区建设南路18号大润发卖场一层","0513-86521117"],
["江苏","南通","南通市通州农工商餐厅","江苏省南通市通州区人民中路138号肯德基餐厅","0513-86119117"],
["江苏","南通","启东市吕四餐厅","江苏省启东市吕四镇环城北路272号吕四宾馆楼下","0513-83906339"],
["江苏","南通","启东公园餐厅","江苏省启东市启东公园中路545号新世纪购物中心","0513-83121887"],
["江苏","南通","启东时代餐厅","江苏省启东市人民路北上海国际城内时代购物中心内","0513-83121198"],
["江苏","南通","启东明珠餐厅","江苏省启东市人民中路北侧603号东方明珠商业广场一层","0513-83100611"],
["江苏","南通","如东城区餐厅","江苏省如东市掘港镇老城区青园北路19号如东文峰大世界内","0513-84527018"],
["江苏","南通","如东市如东餐厅","江苏省如东市青园路30号一层","0513-84531738"],
["江苏","南通","如东欧尚餐厅","江苏省如东市如东县通海路40号欧尚超市一楼","0513-84534776"],
["江苏","南通","如皋大润发餐厅","江苏省如皋市海阳路与福寿路交叉口199号大润发超市一楼","0513-87650177"],
["江苏","南通","如皋市白蒲餐厅","江苏省如皋市如皋市白蒲镇文峰大世界一楼","0513-88570677"],
["江苏","南通","如皋长江餐厅","江苏省如皋市长江镇文峰大世界一楼","0513-87589677"],
["江苏","南通","如皋餐厅","江苏省如皋市中山路398号丰乐桥西欧国际广场一层","0513-87537977"],
["江苏","南通","如皋益寿路餐厅","江苏省如皋市中山西路1号正翔广场1楼","0513-87512316"],
["江苏","南通","南通世茂店","经济技术开发区新东路101号地块(上海东路北、新河东路东)世茂广场大润发超市内","0513-89158389"],
["江苏","南通","南通万达店","港闸区深南路9号万达广场一楼KFC","18061813127"],
["江苏","连云港","连云港盐河路时代餐厅","朝阳西路与盐河南路交叉口时代超级购物中心一层","0518-81065762"],
["江苏","连云港","连云港东海时代餐厅","东海县和平东路79号时代超旁","0518-87222055"],
["江苏","连云港","赣榆黄海路餐厅","赣榆县青口镇黄海路37号","0518-86225177"],
["江苏","连云港","连云港灌南世茂餐厅","灌南县人民中路世茂时代广场B区","0518-83298866"],
["江苏","连云港","连云港灌云时代餐厅","灌云县胜利路富园广场时代超市1楼","0518-88863228"],
["江苏","连云港","连云港解放路永德餐厅","新浦区中德广场","0518-85609563"],
["江苏","连云港","东方餐厅","新浦区解放中路37号东方大厦","0518-85478622"],
["江苏","连云港","海棠餐厅","海棠路中山路交叉口","0518-82308022"],
["江苏","连云港","金庭大润发餐厅","郁州路与陇海东路交汇处大润发1层","0518-85851212"],
["江苏","连云港","墟沟嘉瑞宝餐厅","连云区中山西路嘉瑞宝广场一楼","0518-81395996"],
["江苏","淮安","淮阴时代餐厅","北京东路和淮海北路交汇处","0517-80997001"],
["江苏","淮安","承德北路大润发餐厅","承德北路与黄河路交汇处","0517-81562086"],
["江苏","淮安","楚州苏果餐厅","楚州区淮城镇东门大街108号一层","0517-85987605"],
["江苏","淮安","楚州淮城餐厅","楚州区淮城镇南门大街2号供销大厦一层","0517-85130698"],
["江苏","淮安","洪泽大润发餐厅","洪泽县北京路与建设路交汇处","0517-87228008"],
["江苏","淮安","洪泽人民餐厅","洪泽县东风路111号","0517-87225777"],
["江苏","淮安","金湖苏果餐厅","金湖县金湖路12号与新民路交汇处华润苏果购物广场一、二层","0517-86885517"],
["江苏","淮安","涟水大润发餐厅","涟水县安东路与常青路西北角","0517-82663798"],
["江苏","淮安","涟水苏果餐厅","涟水县渠北路53号安东商业广场华润苏果超1楼","0517-82396268"],
["江苏","淮安","淮安南昌路大润发餐厅","南昌路与威海路交汇处东北角","0517-83387977"],
["江苏","淮安","淮安北京路苏果餐厅","清河区北京新村五区新苑逸城","0517-89366881"],
["江苏","淮安","淮安健康东路餐厅","清河区健康东路49号(丹桂苑综合楼营业房)","0517-83398027"],
["江苏","淮安","楚州大润发餐厅","翔宇大道与北门大街交汇处","0517-89957907"],
["江苏","淮安","淮安万达餐厅","翔宇中路169号","0517-83700877"],
["江苏","淮安","盱眙大润发餐厅","盱眙县都梁大道与东湖南路交汇处","0517-88272018"],
["江苏","淮安","盱眙华厦餐厅","盱眙县华厦国际广场A幢1单元一层","0517-88213678"],
["江苏","盐城","大丰幸福餐厅","江苏省大丰市大新路和幸福路交界处(原逸夫小学地块)","0515-83288657"],
["江苏","盐城","大丰市大丰餐厅","江苏省大丰市健康东路39号一层","0515-83512250"],
["江苏","盐城","东台德润餐厅","江苏省东台市海陵北路9号101室、102室德润购物广场1层107商铺","0515-85278677"],
["江苏","盐城","东台市盐城东台餐厅","江苏省东台市海陵南路2号","0515-85278770"],
["江苏","盐城","东台红兰餐厅","江苏省东台市红兰路与望海路路口","0515-89517197"],
["江苏","盐城","盐城市滨海阜东路餐厅","江苏省盐城市滨海县阜东路39号","0515-84678880"],
["江苏","盐城","盐城市滨海餐厅","江苏省盐城市滨海县阜东中路198号","0515-84288777"],
["江苏","盐城","盐城市滨海润发餐厅","江苏省盐城市滨海县人民路大润发卖场","0515-84122668"],
["江苏","盐城","盐城中南餐厅","江苏省盐城市城南新区解放路与海洋路交汇处东北角","0515-68232990"],
["江苏","盐城","盐城市阜宁餐厅","江苏省盐城市阜宁县阜城大街149号","0515-87286789"],
["江苏","盐城","盐城市阜宁益林餐厅","江苏省盐城市阜宁县益林镇人民路2号苏可超市","0515-87798897"],
["江苏","盐城","盐城市建湖餐厅","江苏省盐城市建湖县人民路东侧向阳路北侧金基·东方广场A1栋123层","0515-86217276"],
["江苏","盐城","盐城市建湖秀夫餐厅","江苏省盐城市建湖县向阳路和秀夫路东南侧","0515-86111398"],
["江苏","盐城","盐城建中餐厅","江苏省盐城市建军路59号(铜马影城下)","0515-88230207"],
["江苏","盐城","盐城市盐阜餐厅","江苏省盐城市建军中路109号","0515-88363032"],
["江苏","盐城","盐城时代餐厅","江苏省盐城市建军中路59号时代超市内","0515-88346505"],
["江苏","盐城","盐城解放餐厅","江苏省盐城市解放南路128号","0515-88407522"],
["江苏","盐城","盐城解放润发餐厅","江苏省盐城市解放南路239号大润发肯德基","0515-88231676"],
["江苏","盐城","盐城市旺角餐厅","江苏省盐城市解放南路80号","0515-88382177"],
["江苏","盐城","盐城义乌餐厅","江苏省盐城市开放大道313号盐城义乌国际商贸城停车场东南角","0515-88209977"],
["江苏","盐城","盐城市盐城农工商餐厅","江苏省盐城市开放大道95号","0515-88232077"],
["江苏","盐城","盐城市射阳龙翔餐厅","江苏省盐城市射阳县合德镇解放路385号(龙翔阳光新城商业办公室)1层","0515-82388997"],
["江苏","盐城","盐城市射阳恒隆餐厅","江苏省盐城市射阳县恒隆广场大润发超市一楼","0515-82388993"],
["江苏","盐城","盐城市射阳人民路餐厅","江苏省盐城市射阳县人民中路12号,文峰超市一层","0515-89213669"],
["江苏","盐城","盐城市射阳餐厅","江苏省盐城市射阳县振阳街时代超市一层","0515-82388992"],
["江苏","盐城","盐城市盐城世纪餐厅","江苏省盐城市太平路36-38号一层","0515-88359110"],
["江苏","盐城","盐城先锋岛餐厅","江苏省盐城市先锋岛先锋国际广场商业中心建军西路与小海路交界处大润发超市一层","0515-88221676"],
["江苏","盐城","盐城市响水餐厅","江苏省盐城市响水县黄海路与滨江路交界处","0515-86781817"],
["江苏","盐城","盐城迎宾路餐厅","江苏省盐城市迎宾路宝龙广场迎宾店","0515-89851018"],
["江苏","盐城","盐城汽车站店","青年东路22号五星客运站内","0515-88201577"],
["江苏","扬州","宝应安宜餐厅","宝应县安宜东路27号","0514-88272917"],
["江苏","扬州","宝应国际苏果餐厅","宝应县白田中路与安宜东路交叉口东北角","0514-88990797"],
["江苏","扬州","扬州广润发餐厅","邗江路2号广润发餐厅","0514-87989040"],
["江苏","扬州","扬州邗江乐购餐厅","邗江中路306号乐购超一楼","0514-87959807"],
["江苏","扬州","扬州京华城餐厅","京华城路168号","0514-87896537"],
["江苏","扬州","扬州大润发广陵餐厅","文昌中路168号","0514-87231757"],
["江苏","扬州","扬州紫藤园餐厅","文昌中路537号","0514-87367269"],
["江苏","扬州","扬州汶河餐厅","汶河北路48号","0514-87314229"],
["江苏","扬州","扬州文昌广场餐厅","汶河南路75号","0514-87364282"],
["江苏","扬州","扬州四望亭餐厅","扬子江北路101号沃尔玛超一楼","0514-87780957"],
["江苏","扬州","扬州运河路餐厅","运河西路228号一层","0514-87249452"],
["江苏","扬州","扬州新汽车西站店","文昌西路北侧、火车站前广场西侧","0514-87101020"],
["江苏","扬州","大桥宏信龙","江都区东园路与新城路交叉口东北角","0514-86230960"],
["江苏","扬州","扬州西站店","邗江中路302号","0514-87959365"],
["江苏","扬州","汶河餐厅","汶河北路29号","0514-87331936"],
["江苏","扬州","真州时代","仪征市工农南路114号","0514-83423221"],
["江苏","扬州","苏果餐厅","仪征市真州西州147号","0514-83582828"],
["江苏","扬州","扬州竹西路大润发","邗江区竹西路2号北区大润发一楼肯德基餐厅","0514-87610921"],
["江苏","扬州","江阳欧尚","邗江区江阳中路425号欧尚超市一楼","0514-85118500"],
["江苏","扬州","扬州广电餐厅","广陵区汶河南路46号","0514-87364937"],
["江苏","镇江","镇江市镇江冠城餐厅","丹徒新城金谷东路与谷阳大道交叉口东北侧冠城商业中心一层","0511-84516308"],
["江苏","镇江","镇江城际餐厅","黄山西路沪宁城际铁路镇江站一层","0511-89987022"],
["江苏","镇江","镇江市庄泉餐厅","黄山西路万达广场步行街西侧","0511-85288551"],
["江苏","镇江","镇江汽车站精选餐厅","黄山西路镇江汽车客运站一层","0511-85601862"],
["江苏","镇江","镇江市大市口餐厅","解放路218号","0511-85014290"],
["江苏","镇江","镇江市东吴餐厅","京口区九里街1号","0511-88986397"],
["江苏","镇江","镇江市润京餐厅","京口区老山路与焦山路西北侧天正置业广场大润发超市","0511-88800672"],
["江苏","镇江","镇江市梦溪餐厅","梦溪路8号","0511-84408231"],
["江苏","镇江","镇江市丁卯餐厅","新区丁卯桥路北、谷阳路东沃尔玛购物广场一层","0511-88890127"],
["江苏","镇江","镇江市大港大润发餐厅","新区扬子江路南侧大润发超市一层","0511-88985326"],
["江苏","镇江","镇江市大港餐厅","新区赵声路82号","0511-83170395"],
["江苏","镇江","镇江市学府路餐厅","学府路68号欧尚超市一楼肯德基","0511-89883177"],
["江苏","镇江","镇江市电力路餐厅","中山东路382号","0511-85211561"],
["江苏","镇江","镇江车站餐厅","中山西路火车站广场东侧综合楼","0511-85237237"],
["江苏","镇江","丹阳吾悦餐厅","丹阳市金陵西路88号丹阳吾悦广场一层","0510-86991017"],
["江苏","镇江","镇江宝龙店KFC","丹徒区丹徒新城宝龙城市广场项目一层","0511-84523378"],
["江苏","泰州","泰州市高港餐厅","高港区春港路与金港路交汇处","0523-86961393"],
["江苏","泰州","泰州市金港餐厅","高港区金港中路东侧大润发超市一层","0523-86932317"],
["江苏","泰州","泰州市海陵万达餐厅","海陵南路西侧、济川东路北侧万达广场商业步行街东入口东侧","0523-89989137"],
["江苏","泰州","泰州市九州餐厅","海陵区东进西路九州商厦1层","0523-86212146"],
["江苏","泰州","泰州市济川餐厅","海陵区济川东路与海陵南路交叉口东北侧时代超市一层??","0523-86889776"],
["江苏","泰州","泰州市新人民公园餐厅","海陵区梅兰东路299号1层","0523-89988776"],
["江苏","泰州","泰州市鹏欣餐厅","海陵区鹏欣丽都G4幢","0523-86220916"],
["江苏","泰州","泰州市海陵餐厅","海陵区坡子街1、2号地块H区101、201","0523-86221660"],
["江苏","泰州","泰州市联华餐厅","济川中路118号","0523-86887671"],
["江苏","泰州","泰州市锦宸餐厅","姜堰区姜堰大道锦宸大厦","0523-88205677"],
["江苏","泰州","泰州市姜堰西大街餐厅","姜堰区姜堰西大街156号1+2F","0523-88581736"],
["江苏","泰州","泰州市文峰餐厅","青年路38号","0523-86342328"],
["江苏","泰州","泰州市药城餐厅","药城大道一号","0523-89690866"],
["江苏","泰州","泰州市迎春餐厅","迎春东路15号","0523-86665856"],
["江苏","宿迁","宿迁楚街餐厅","洪泽湖路98号","0527-81884158"],
["江苏","宿迁","沭阳大润发餐厅","沭阳县上海南路与苏州西路交汇处","0527-87882885"],
["江苏","宿迁","沭阳时代餐厅","沭阳县沭城镇人民中路南侧","0527-83996307"],
["江苏","宿迁","泗洪大润发餐厅","泗洪县泗州大街北侧人民南路东侧","0527-86226474"],
["江苏","宿迁","泗洪苏果餐厅","泗洪县泗洲西大街12号华润苏果购物广场一层","0527-86227203"],
["江苏","宿迁","泗阳桃源大润发餐厅","泗阳县淮海中路与桃源路西北角大润发一楼","0527-85221010"],
["江苏","宿迁","泗阳芙蓉餐厅","泗阳县人民中路万豪国际广场A区一层","0527-85230191"],
["江苏","宿迁","宿迁金鹰餐厅","西湖路1号金鹰天地","0527-81182351"],
["江苏","宿迁","宿迁宝龙餐厅","西湖路宝龙城广场1025-1027商铺","0527-81182261"],
["江苏","宿迁","宿迁时代餐厅","幸福中路2号金柏年财富广场一楼","0527-81180026"],
["江苏","宿迁","宿迁幸福餐厅","幸福中路世纪大厦一楼","0527-84221986"],
["江苏","宿迁","洋河发展店","宿迁市宿城区洋河新区华地万象城市广场1楼","0527-84921018"],
["安徽","合肥","合肥马鞍山路万达餐厅餐厅","包河区马鞍山路东侧万达商业广场一层","0551-62861252"],
["安徽","合肥","合肥世纪金源餐厅","滨湖新区徽州大道与紫云路交叉口","0551-62994469"],
["安徽","合肥","肥西永辉餐厅","城关镇新华路水晶城永辉超一层","0551-68848399"],
["安徽","合肥","合肥中环大润发餐厅","翡翠路与繁华大道交叉口中环购物中心一层","0551-63803811"],
["安徽","合肥","合肥庐阳大润发餐厅","阜阳路和凤台路交界处大润发超","0551-65507517"],
["安徽","合肥","合肥桐山餐厅","合安路桐山国际购物广场一层","0551-63814568"],
["安徽","合肥","合肥合盛餐厅","淮河路77号百盛商城1-2楼","0551-62629826"],
["安徽","合肥","合肥环球餐厅","环球新都会广场1楼","0551-64652494"],
["安徽","合肥","合肥曙光餐厅","金寨路136号曙光商厦一层","0551-63717290"],
["安徽","合肥","合肥永达餐厅","金寨路209号永达大厦1楼","0551-63654973"],
["安徽","合肥","合肥金华联餐厅","金寨南路1090号肯德基餐厅","0551-63620977"],
["安徽","合肥","合肥紫云路餐厅餐厅","金寨南路和紫云路交叉口","0551-68856959"],
["安徽","合肥","合肥百乐门苏果餐厅","经济开发区繁华大道南、莲花路西百乐门名品广场一层","0551-63674648"],
["安徽","合肥","合肥港澳广场餐厅","经济开发区翡翠路与芙蓉路交叉口西北角港澳广场","0551-63803858"],
["安徽","合肥","庐江百货大楼餐厅","庐江县庐城镇文明中路88号1幢1层","0551-87778686"],
["安徽","合肥","合肥马鞍山路合家福餐厅","马鞍山路与太湖路交叉口东北角合家福购物广场1,2层","0551-63482457"],
["安徽","合肥","合肥濉溪路餐厅","蒙城北路与北环一路交汇处东北角的新天地国际广场一层","0551-65694302"],
["安徽","合肥","合肥合肥潜山路餐厅","潜山路与望江路交叉口东南角信旺广场一层","0551-65575923"],
["安徽","合肥","合肥胜利餐厅","胜利北路88号元一购物广场一层","0551-64247177"],
["安徽","合肥","合肥四里河永辉餐厅","四里河路与北二环交叉口明发商业广场","0551-65674687"],
["安徽","合肥","合肥天鹅湖万达餐厅","天鹅湖万达广场","0551-63535748"],
["安徽","合肥","合肥家天下乐购餐厅餐厅","铜陵北路家天下乐购超","0551-64221497"],
["安徽","合肥","合肥港汇广场餐厅","望江路与潜山路交口东北角港汇广场一层","0551-65724308"],
["安徽","合肥","合肥国购餐厅","西一环屯溪路三里庵安徽国购广场家乐福出口处","0551-65178252"],
["安徽","合肥","合肥恒丰餐厅","新站区凤阳西路恒丰大厦","0551-64698334"],
["安徽","合肥","合肥临泉路餐厅","新站区临泉路合家福超一楼","0551-64225023"],
["安徽","合肥","合肥宿州路餐厅","宿州路105号一层","0551-62613620"],
["安徽","合肥","合肥商之都餐厅","长江东路1137号","0551-64694033"],
["安徽","合肥","合肥宝业广场餐厅","长江东路与东二环交口","0551-68130779"],
["安徽","合肥","合肥长江餐厅","长江西路201号家乐福超一楼","0551-65168920"],
["安徽","合肥","合肥高新合家福餐厅","长江西路高新区合家福超","0551-65331360"],
["安徽","合肥","合肥汉嘉家乐福餐厅","长江西路与潜山路交叉口汉嘉大厦","0551-65774279"],
["安徽","合肥","合肥汇通餐厅","长江中路337号","0551-62619040"],
["安徽","合肥","兴隆大润发店","望江东路与东二环交口兴隆国际广场一层","0551-63440850"],
["安徽","合肥","合肥合肥站店","站前路火车站广场东配楼1楼","0551-62138220"],
["安徽","合肥","合肥新桥机场店","新桥机场航站楼到达层44号商铺","0551-63775857"],
["安徽","合肥","星海世纪广场餐厅","瑶海区濉溪路与嘉山路交叉口星海世纪广场肯德基餐厅","0551-64231062"],
["安徽","合肥","银通","长江中路132号肯德基餐厅","0551-62626151"],
["安徽","合肥","怀宁路","蜀山区长江西路与怀宁路交叉口北京华联肯德基餐厅","0551-65583370"],
["安徽","芜湖","芜湖营盘山路餐厅","北京东路与营盘山路交叉口大润发超一楼","0553-3118068"],
["安徽","芜湖","芜湖花津南路欧尚餐厅","花津南路66号一层","0553-5906179"],
["安徽","芜湖","芜湖中央城乐天玛特餐厅","花津南路中央城1#地块商业街5#楼一层至二层","0553-2875077"],
["安徽","芜湖","芜湖欧尚餐厅","镜湖区天门山西路与银湖中路交叉口欧尚超内","0553-5872977"],
["安徽","芜湖","芜湖芜湖万达餐厅","镜湖区弋江路与赭山路交叉口","0553-3997017"],
["安徽","芜湖","芜湖芜湖利民路餐厅","利民路与九华南路交叉口苏果超内","0553-5906278"],
["安徽","芜湖","芜湖南陵利民路餐厅","南陵县籍山镇雄风中央花园","0553-6859229"],
["安徽","芜湖","芜湖镜湖餐厅","三泰国际大厦世纪联华一、二层","0553-3838977"],
["安徽","芜湖","芜湖县时代广场餐厅","芜湖县芜湖中路88号1至2层","0553-8766272"],
["安徽","芜湖","芜湖鸠兹广场餐厅","中山路步行街华亿国际购物中心B座(北楼)一、二层","0553-3850578"],
["安徽","芜湖","新百餐厅","无为县西大街新百商厦一楼肯德基餐厅","0553-6334477"],
["安徽","芜湖","芜湖银湖","中山北路沃尔玛超市一楼肯德基餐厅","0553-5991301"],
["安徽","蚌埠","蚌埠宝龙餐厅","宝龙城广场汉明街92号","0552-3723344"],
["安徽","蚌埠","蚌埠万达餐厅","工农路与东海大道交叉口万达广场一/二层","0552-3755557"],
["安徽","蚌埠","怀远怀远苏果餐厅","怀远县禹王路西段原啤酒厂院内","0552-8856517"],
["安徽","蚌埠","蚌埠解放路大润发餐厅","龙子湖区凤阳东路北侧,解放路西侧","0552-3010009"],
["安徽","蚌埠","蚌埠胜利餐厅","胜利中路1556号华海电脑数码通讯广场","0552-2086660"],
["安徽","蚌埠","蚌埠兴业餐厅","涂山路与兴业路交叉口家乐福1层","0552-3721287"],
["安徽","蚌埠","五河中兴餐厅","五河县中兴路新天地商业广场","0552-5500493"],
["安徽","蚌埠","蚌埠火车站店","淮河路与广场东路交叉口","0552-3073577"],
["安徽","蚌埠","朝阳餐厅","太平街550号合家福超市一楼肯德基餐厅","0552-4019723"],
["安徽","淮南","淮南国庆餐厅","国庆中路6号大润发超1楼","0554-2694000"],
["安徽","淮南","淮南龙湖餐厅","龙湖中路一号","0554-2679152"],
["安徽","淮南","淮南家乐福餐厅","田家庵区朝阳街道家乐福时代广场商业家乐福超一层","0554-7782300"],
["安徽","淮南","淮南蔡新路餐厅","谢家集区蔡新路与卧龙山路交叉口白马商城1楼","0554-5653703"],
["安徽","马鞍山","当涂大润发餐厅","当涂县青莲路与振兴路交叉口西北角","0555-6739152"],
["安徽","马鞍山","和县历阳餐厅","和县历阳路安德利商场一二楼","0555-5337797"],
["安徽","马鞍山","马鞍山八佰伴餐厅","湖东路以西大华国际广场2期八佰伴一二层","0555-2331968"],
["安徽","马鞍山","马鞍山花山餐厅","解放路15号","0555-2471061"],
["安徽","马鞍山","马鞍山雨山餐厅","雨山路华润苏果1楼","0555-2239301"],
["安徽","马鞍山","马鞍山欧尚餐厅","雨山路与江东大道交叉口欧尚超一楼","0555-2301990"],
["安徽","马鞍山","马鞍山沃尔玛餐厅","雨山路与康乐路交汇口","0555-2301779"],
["安徽","马鞍山","马鞍山万达餐厅","雨山区万达广场一楼","0555-2982200"],
["安徽","马鞍山","马鞍山团结","湖南东路新亚百货大楼一楼肯德基餐厅","0555-2324712"],
["安徽","马鞍山","马鞍山天润发","湖北东路大润发超市一楼肯德基餐厅","0555-2454850"],
["安徽","淮北","淮北孟山路餐厅","淮海路北与孟山路西交叉口金鹰购物中心一层","0561-3885223"],
["安徽","淮北","淮北相山大润发餐厅","相山路相王国际广场大润发一楼","0561-3885215"],
["安徽","淮北","淮北相山路餐厅","相山南路大华现代城","0561-3194187"],
["安徽","淮北","淮北南黎大润发餐厅","相山中路与南黎路交汇口","0561-3192461"],
["安徽","铜陵","铜陵嘉华大润发餐厅","翠湖一路与石城大道交叉口处大润发一楼","0562-2666882"],
["安徽","铜陵","铜陵义安乐购餐厅","长江路与义安大道交叉口财富广场","0562-2890200"],
["安徽","铜陵","良发餐厅","长江中路良发大厦一楼肯德基餐厅","0562-2856227"],
["安徽","铜陵","人民中路餐厅","淮河大道铜冠世纪广场一楼肯德基餐厅","0562-2817665"],
["安徽","安庆","枞阳财富广场餐厅","枞阳县湖滨路财富广场","0556-2976615"],
["安徽","安庆","安庆汇峰广场餐厅","大观区湖心中路与府路交汇处(原政府)","0556-5943777"],
["安徽","安庆","安庆苏果餐厅","湖心北路苏果超","0556-5358108"],
["安徽","安庆","安庆商都餐厅","人民路563号肯德基餐厅","0556-5567687"],
["安徽","安庆","天鹅广场餐厅","岳西县建设路3号天鹅广场","0556-2325177"],
["安徽","安庆","宿松人民路餐厅","宿松人民路166号","18905600602"],
["安徽","黄山","黄山昱中餐厅","黄山中路家得利超一楼","0559-2539771"],
["安徽","黄山","黄山新园路餐厅","屯溪区跃进路6号大润发超","0559-2325777"],
["安徽","黄山","黄山大观店","新安大道与阳湖路交叉口","0559-2329222"],
["安徽","黄山","黄山汤口店","黄山区汤口镇寨西","0559-5566588"],
["安徽","滁州","定远大润发餐厅","定远县鲁肃大道东侧一层","0550-4868399"],
["安徽","滁州","定远百大餐厅","定远县曲阳路与东大街交叉口西南角百货大楼","0550-4999258"],
["安徽","滁州","滁州明光大润发餐厅","明光池河大道与明珠路交叉口东北角","0550-8561777"],
["安徽","滁州","滁州新百餐厅","南僬北路与琅琊大道交口西南角大金新百一层","0550-3890900"],
["安徽","滁州","滁州时代餐厅","南谯北路和天长东路交叉口东南角水石广场一层","0550-3015268"],
["安徽","滁州","全椒敬梓餐厅","全椒县儒林路与敬梓路交叉口奥康步行街A区","0550-5255528"],
["安徽","滁州","滁州紫薇餐厅","泰鑫城星座","0550-3078568"],
["安徽","滁州","丰乐大润发餐厅","丰乐路147号","0550-3059100"],
["安徽","阜阳","阜阳万达餐厅","安徽阜阳市淮河路万达广场2号门永辉超市入口","0558-3681020"],
["安徽","阜阳","阜南百货大楼餐厅","阜南地城路与淮河路交口阜南百货大楼1-2层","0558-6991009"],
["安徽","阜阳","临泉好又多餐厅","临泉县光明路与临鲖路交口,好又多超1-2楼","0558-6386777"],
["安徽","阜阳","阜阳清河路华联餐厅","清河路华联超1层","0558-2189270"],
["安徽","阜阳","阜阳颖州餐厅","人民东路2号","0558-2266415"],
["安徽","阜阳","阜阳财富广场餐厅","人民路9号财富广场","0558-2206278"],
["安徽","阜阳","阜阳太和苏果餐厅","太和县人民北路苏果超一楼","0558-8691992"],
["安徽","阜阳","太和晶宫大润发餐厅","太和县长征路与胜利路交叉口大润发一楼","0558-8262258"],
["安徽","阜阳","阜阳颖河路苏果餐厅","涡阳路与颍河路交叉口华润苏果一层","0558-2320938"],
["安徽","阜阳","阜阳颖河大润发餐厅","颍河路与阜南大道交叉口大润发超一楼","0558-2607706"],
["安徽","阜阳","阜阳颖上路餐厅","颍泉区颍上路89号","0558-2209175"],
["安徽","阜阳","颍上缔煌餐厅","颍上县慎城镇解放北路","0558-4455471"],
["安徽","阜阳","阜阳火车站店","向阳路和北京东路交汇口","0558-2108279"],
["安徽","宿州","宿州时代广场餐厅","汴河路以南二中路以东家乐福世纪广场1层","0557-3608273"],
["安徽","宿州","宿州大润发餐厅","东昌路大润发超一层","0557-3650800"],
["安徽","宿州","宿州淮海餐厅","淮海北路苏果超一楼","0557-3035170"],
["安徽","宿州","宿州京华餐厅","淮海路汴河路交叉口西南角京华大厦1,2楼","0557-3067198"],
["安徽","宿州","泗县第一街餐厅","泗县汴河大道泗州第一街","0557-7881005"],
["安徽","宿州","龙城店","萧县中山路与交通路交汇处国购广场1楼","0557-5888488"],
["安徽","六安","霍邱商之都餐厅","霍邱县城关镇商之都一楼","0564-6611232"],
["安徽","六安","寿县寿州时代餐厅","寿县十字街寿州时代广场一层","0564-4124400"],
["安徽","六安","舒城新世界?餐厅","舒城县梅河路与鼓楼街交叉口两层","0564-8665887"],
["安徽","六安","六安锦绣餐厅","皖西路178号肯德基","0564-3219648"],
["安徽","六安","六安皖西餐厅","皖西路与梅山路交汇口新都会广场","0564-3336081"],
["安徽","六安","六安时代","六安市区解放中路浙东商贸城乐天马特超市超市一楼肯德基","0564-3214026"],
["安徽","亳州","毫州和平餐厅","和平东路137号","0558-5534909"],
["安徽","亳州","利辛七彩餐厅","利辛县城关镇文州路北侧,人民路东侧","0558-7186699"],
["安徽","亳州","蒙城金瑞餐厅","蒙城县嵇康路与周元路交叉口东北角","0558-7866000"],
["安徽","亳州","亳州魏武餐厅","魏武大道与光明路交叉口东北角金桥大厦","0558-5622248"],
["安徽","亳州","涡阳向阳餐厅","涡阳县向阳中路阜阳商厦1层","0558-7581018"],
["安徽","亳州","毫州新东方餐厅","新华南路(原亳州宾馆)新东方商场一楼","0558-5558018"],
["安徽","池州","池州大润发餐厅","长江路同晖城广场大润发1F","0566-2037779"],
["安徽","池州","池州长江餐厅","长江中路19号","0566-2090226"],
["安徽","池州","池州时代餐厅","长江中路和秋浦路交叉口西北侧","0566-2049155"],
["安徽","池州","青阳大润发餐厅","青阳县富阳南路与九子大道交口","0566-5117070"],
["安徽","宣城","宣城锦城餐厅","叠樟中路府山广场台客隆超1楼","0563-2716768"],
["安徽","宣城","宣城府山餐厅","叠障中路供销大厦一楼","0563-3035262"],
["安徽","宣城","广德大润发餐厅","广德万桂山路和纬二路交叉口西南侧","0563-6016555"],
["安徽","宣城","郎溪苏果餐厅","郎溪县石佛山路与中港东路交叉口华润苏果一层","0563-7706977"],
["安徽","宣城","宣城大润发餐厅","梅溪路大润发超1F","0563-2020598"],
["安徽","宣城","宣城国购餐厅","宣州区叠嶂西路10号国购广场肯德基餐厅","0563-2717555"],
["安徽","宣城","宁国大润发餐厅","宁阳东路大润发超市西南侧","0563-4032977"],
["山东","济南","济南北园大街餐厅","北园大街225号红星美凯龙一层","0531-55530360"],
["山东","济南","济南北园餐厅","北园大街483号1层","0531-85941619"],
["山东","济南","济南济南火车站一餐厅","车站街16号天龙大厦地上一层/地下一层","0531-86925088"],
["山东","济南","济南银荷餐厅","高新区齐鲁软件园西北、新大街以南、崇华路以东银荷大厦E座一层","0531-88981521"],
["山东","济南","济南王舍人餐厅","工业北路王舍人镇十字路口东南角一层。","0531-88980129"],
["山东","济南","济南工业南路餐厅","工业南路济南国际会展中心西侧中铁会展广场","0531-88800327"],
["山东","济南","济南遥墙机场餐厅","国际机场停车场长途车站收费口北侧独栋小楼的房产","0531-82082006"],
["山东","济南","济南洪山路餐厅","洪山路与旅游路交界口银座购物广场花园店一层","0531-61361026"],
["山东","济南","济南海蔚餐厅","花园路101号","0531-88911838"],
["山东","济南","济南经十西餐厅","槐荫区经十路、经七路交叉口西南侧地号为:031008031中石化济南石油分公司第88加油站","0531-69907105"],
["山东","济南","济南银隆餐厅","槐荫区经十路22799号银座中心经十路一侧步行街入口处F1层126B商铺与F2层221商铺","0531-81619778"],
["山东","济南","济南长途站餐厅","济泺路131号一层","0531-85065469"],
["山东","济南","济南济泺路餐厅","济泺路87-1号泺口商城一、二层","0531-61388657"],
["山东","济南","济南长途站2餐厅","济洛路131号一层","0531-66816087"],
["山东","济南","济南车站街餐厅","济南火车站车站街161号济南经营集团西办公楼东附楼一楼中段","0531-88300090"],
["山东","济南","济南济西精选餐厅","济南西高铁站JNX-CY-02","0531-58771077"],
["山东","济南","济南济南西客站餐厅","济南西高铁站JNX-CY-10","0531-58771088"],
["山东","济南","济南火车站站内餐厅","济南站美食城CY-02号场地","0531-88300097"],
["山东","济南","济南济微路餐厅","济微路92号一层的房产","0531-68658165"],
["山东","济南","济南荣祥餐厅","济兖路165号一层","0531-69987928"],
["山东","济南","济南解放路餐厅","解放路105号沿街综合楼一、二层","0531-55691500"],
["山东","济南","济南崮山餐厅","京台高速公路崮山服务区东区一层","0531-87439900"],
["山东","济南","济南经二餐厅","经二路624号","0531-87934345"],
["山东","济南","济南体育馆餐厅","经十路124号1层","0531-83186967"],
["山东","济南","济南玉函路餐厅","经十路144号地下一层","0531-67715987"],
["山东","济南","济南经十餐厅","经十路21号","0531-82610106"],
["山东","济南","济南槐苑餐厅","经十路431号一层、二层。","0531-87955159"],
["山东","济南","济南新八一餐厅","经十一路49路济南银座商城八一店B馆一、二层","0531-81760992"],
["山东","济南","济南华信餐厅","历城区花园路78号华信购物广场","0531-88901872"],
["山东","济南","济南洪楼餐厅","历城区花园路99号1F","0531-88061448"],
["山东","济南","济南华龙餐厅","历城区华龙路1825号嘉恒商务大厦裙楼101.201","0531-88905903"],
["山东","济南","济南历山餐厅","历山路101号","0531-86411065"],
["山东","济南","济南苹果城餐厅","历山路118号文华园底层商业苹果城地下一层和一层的房产","0531-81281508"],
["山东","济南","济南天地坛街餐厅","历下区泉城路188号济南恒隆广场内三层","0531-67967662"],
["山东","济南","济南文东餐厅","历下区文化西路1号","0531-86460477"],
["山东","济南","济南晶都餐厅","市中区顺河街银座晶都国际大厦一、二层","0531-66683802"],
["山东","济南","济南会展餐厅","舜耕路28号","0531-82678892"],
["山东","济南","济南燕山餐厅","窑头路以南中润世纪广场一层","0531-88193358"],
["山东","济南","青岛明珠餐厅","长清区大学路1088号","0531-87208906"],
["山东","济南","济南经四路餐厅","中区纬一路以东经二路以南面向经四路的魏家庄万达商业广场东侧二层、三层。","0531-67888068"],
["山东","济南","榆山路KFC","平阴市榆山路以西茂昌银座新天地一期东部综合体101号"," "],
["山东","济南","英雄山店","英雄山路161号","0531-82976380"],
["山东","济南","阳光新路店","刘长山和阳光新路交接处","0531-82730606"],
["山东","济南","山水店","二七新村南路9号","0531-82748630"],
["山东","济南","济南堤口店","堤口路80号1层","0531-85063473"],
["山东","济南","圣凯店","泺源大街29号一层肯德基","0531-81670008"],
["山东","济南","佛山苑店","历下区佛山街5号","0531-86160340"],
["山东","济南","山大路店","山大路124号1层","0531-82395872"],
["山东","济南","泉城二店","泉城路147号珍珠泉大厦一层、地下一层","0531-86080631"],
["山东","济南","大观园店","纬二路49号一、二层","0531-87026526"],
["山东","济南","新泉城店","历下区泉城路185号一。二层肯德基","0531-86081157"],
["山东","济南","金牛餐厅","济南市天桥区无影山北路南首东侧鑫福盛大厦一层、二层肯德基","0531-85865506"],
["山东","青岛","东海中路餐厅","山东省青岛市市南区东海中路16号一层","0532-55575756"],
["山东","青岛","香江二餐厅","山东省青岛市香江路127号一层","0532-86899719"],
["山东","青岛","青岛39克拉餐厅","北区CBD万达商业广场步行街内107、108、207、208铺。","0532-55563802"],
["山东","青岛","青岛华城餐厅","城阳区崇阳路491号一层","0532-87765772"],
["山东","青岛","青岛春阳餐厅","城阳区春阳路北侧","0532-81100973"],
["山东","青岛","青岛城中城餐厅","城阳区虹桥路1号城阳城中城购物中心二期商业A栋一层A-1轴至A-4轴与A-D轴至A-F轴之间区域","0532-89088505"],
["山东","青岛","青岛崇阳餐厅","城阳区文阳路269号一、二层","0532-87768969"],
["山东","青岛","青岛长城路餐厅","城阳区长城南路宝龙城市广场大润发次入口附近一层的房产","0532-55553713"],
["山东","青岛","青岛惜福餐厅","城阳区正阳东路329号","0532-81158150"],
["山东","青岛","青岛家佳源餐厅","城阳区正阳路136号一层","0532-87734185"],
["山东","青岛","青岛正阳路餐厅","城阳区正阳路南、爱民河西青岛利客来集团城阳购物广场东南把角一层、二层","0532-58711107"],
["山东","青岛","青岛东方城餐厅","黑龙江中路2111号一层","0532-67766816"],
["山东","青岛","青岛崇明岛路餐厅","黄岛区崇明岛西路55号中泽国货商厦一层的房产","0532-68974191"],
["山东","青岛","青岛井冈山餐厅","黄岛区经济技术开发区井冈山路658号吉祥商厦一、二层。","0532-86897816"],
["山东","青岛","青岛香江餐厅","黄岛区经济技术开发区武当山路2号1-2层","0532-86881652"],
["山东","青岛","青岛长江餐厅","黄岛区经济技术开发区长江中路378号","0532-86999023"],
["山东","青岛","青岛卓亭餐厅","黄岛区开发区江山南路666号南侧原卓亭广场售楼处一、二层。","0532-86767845"],
["山东","青岛","青岛长江二餐厅","黄岛区长江中路419号T117-118","0532-86996558"],
["山东","青岛","即墨财富广场餐厅","即墨市鹤山路939号即墨利群财富广场","0532-85056359"],
["山东","青岛","即墨兰岙餐厅","即墨市振华街88号宝龙城市广场C区一层1004、1005号商铺与二层2005号商铺","0532-89947089"],
["山东","青岛","胶州金沙广场餐厅","胶州市广州路东、兖州路北侧一层的房产","0532-67750185"],
["山东","青岛","青岛莱西新利群餐厅","莱西市烟台路西文化路南利群莱西购物广场一层、二层的房产。","0532-66039260"],
["山东","青岛","青岛九水餐厅","崂山区青山路689号“宝龙城市广场”区一层1003号商铺二层2058号商铺","0532-58806217"],
["山东","青岛","青岛太清宫餐厅","崂山区太清宫停车场东面一、二层","0532-87948752"],
["山东","青岛","青岛世纪餐厅","崂山区同安路872号一层","0532-67781181"],
["山东","青岛","青岛同安路餐厅","崂山区同安路880号3层","0532-67786850"],
["山东","青岛","青岛家得餐厅","李沧区峰山路86号一层","0532-66760253"],
["山东","青岛","青岛郑庄餐厅","李沧区九水东路与合川路交汇处西北角商业网点东把头一层、二层的房产","0532-66088063"],
["山东","青岛","青岛李沧万达餐厅","李沧区巨峰路148号万达广场内321、322、323号商铺","0532-67706196"],
["山东","青岛","青岛书院餐厅","李沧区书院路32号1-2层","0532-87630655"],
["山东","青岛","青岛乐客城餐厅","李沧区夏庄路7号负一层","0532-58701236"],
["山东","青岛","青岛向阳餐厅","李沧区向阳路步行街李村利客来购物中心一层超市向阳路出入口至大楼向阳路疏散楼梯之间的房产","0532-87627472"],
["山东","青岛","青岛流亭机场餐厅","流亭机场一层","0532-83789676"],
["山东","青岛","青岛福岭餐厅","市北区308国道189号一层","0532-80905669"],
["山东","青岛","青岛利津路中石化加油站餐厅","市北区利津路20号中石化山东青岛石油分公司第77加油站地块。","0532-83803292"],
["山东","青岛","青岛新城市广场餐厅","市北区辽阳西路100号","0532-85655030"],
["山东","青岛","青岛福州北路餐厅","市北区辽阳西路188号","0532-68625815"],
["山东","青岛","青岛辽阳餐厅","市北区辽阳西路369号易初莲花超市一层","0532-88752970"],
["山东","青岛","青岛新兴餐厅","市北区山东路128号","0532-85087515"],
["山东","青岛","青岛小村庄餐厅","市北区四方利群购物广场1F","0532-83740028"],
["山东","青岛","青岛台东三路餐厅","市北区台东三路77号利群商厦负一层","0532-83661302"],
["山东","青岛","青岛台东一路餐厅","市北区台东一路37号地下一层","0532-83618589"],
["山东","青岛","青岛兴隆路餐厅","市北区兴隆路新源超市一层部分房产","0532-83737703"],
["山东","青岛","青岛延安餐厅","市北区延安二路10号","0532-83648214"],
["山东","青岛","青岛延吉路餐厅","市北区延吉路17号一层","0532-83668362"],
["山东","青岛","青岛百丽广场餐厅","市南区澳门路88号负一层B101","0532-68853051"],
["山东","青岛","青岛极地餐厅","市南区东海路60号2层","0532-88900399"],
["山东","青岛","青岛维多利亚餐厅","市南区广西路63号维多利亚广场一层","0532-82886621"],
["山东","青岛","青岛前海餐厅","市南区贵州路利群集团前海购物广场一层东南角","0532-82657979"],
["山东","青岛","青岛华润万象城餐厅","市南区华润中心万象城L-458号商铺","0532-55768609"],
["山东","青岛","青岛广场餐厅","市南区汇泉湾地下商业设施C区","0532-82871598"],
["山东","青岛","青岛南京路餐厅","市南区南京路99号一层","0532-85756675"],
["山东","青岛","青岛田家村餐厅","市南区宁夏路162号","0532-85735047"],
["山东","青岛","青岛国贸餐厅","市南区郯城路6号","0532-82972977"],
["山东","青岛","青岛丰合餐厅","市南区香港中路12号丰合广场A区一层","0532-85027526"],
["山东","青岛","青岛佳世客餐厅","市南区香港中路72号永旺青岛购物中心二层","0532-85723267"],
["山东","青岛","青岛香港中路餐厅","市南区香港中路86","0532-85885865"],
["山东","青岛","青岛中山路餐厅","市南区中山路67号一、二层","0532-68897600"],
["山东","青岛","中铁青岛中心店KFC","市南区青岛中心广场B1-003、007号商铺","0771-4704990/4704991"],
["山东","青岛","小村庄外卖点店KFC","市北区人民路269号的甲方购物中心(悦荟商场)的L1-17b/L1-20b, L2-22b/23/24/25_号铺位的房产","0532-66069534"],
["山东","青岛","平度帝王餐厅","平度市红旗路164号","0532-87368813"],
["山东","青岛","青岛海琴店","商丘路海琴广场肯德基","0532-84856676"],
["山东","青岛","沧口广场店","振华路维客广场肯德基","0532-84626357"],
["山东","青岛","创意园店","市南区宁夏路147号中联广场肯德基","0532-68658589"],
["山东","青岛","珠山店","灵山湾路(原胶南人民路198号)","0532-88182520"],
["山东","青岛","苗岭店","崂山区秦岭路18-9(丽达购物中心)","0532-88896036"],
["山东","青岛","胶州路店","市北区胶州路140号东方贸易大厦一楼肯德基","0532-82830439"],
["山东","青岛","百脑汇店","市北区辽宁路147号百脑汇一楼肯德基","0532-83802116"],
["山东","青岛","广州路店","胶州广州南路62号一层","0532-87262707"],
["山东","青岛","福州路大润发","福州南路与扬州路交叉口大润发肯德基","0532-67750733"],
["山东","淄博","淄博博银餐厅","博山区人民路以北、英雄路以东、博山青年路1号博山银座商城人民路一层、二层的房产","0533-2896377"],
["山东","淄博","淄博东泰餐厅","临淄区大顺路与晏婴路交叉口东泰购物中心一层、二层","0533-7313056"],
["山东","淄博","淄博泰客荣餐厅","临淄区桓公路103号一层、二层的房产","0533-7121009"],
["山东","淄博","淄博石村餐厅","张店区(新区)华光路288号淄博石村永旺佳世客购物中心一层","0533-6273330"],
["山东","淄博","淄博鲁泰大道餐厅","张店区高新区鲁泰大道以北西六路以东的淄博银泰城一层编号为1035和1036的房产","0533-3148554"],
["山东","淄博","淄博共青团路餐厅","张店区柳泉路152号物业南侧下沉广场西侧(面向正东地面至地下一层的自动扶梯)的房产","0533-7860553"],
["山东","淄博","淄博嘉信餐厅","张店区柳泉路与华光路交叉口西北角嘉信茂广场·柳泉一层01-56室","0533-3160997"],
["山东","淄博","淄博中心餐厅","张店区商场东路2号利群购物广场1层","0533-2152158"],
["山东","淄博","周村银座","周村区新建中路银座地上一层肯德基","0533-6038856"],
["山东","淄博","宝龙店","淄博市桓台县索镇中心大街宝龙大厦一层","0530-8168532"],
["山东","淄博","淄川店","服装城北侧新星商厦一层肯德基","0533-5166650"],
["山东","淄博","柳泉广场店","柳泉正承广场地下一层肯德基","0533-5166630"],
["山东","淄博","莲池店","张店区北西五路25号肯德基","0533-3111300"],
["山东","淄博","博山店","博山区中心路40号银座1楼肯德基","0533-2896828"],
["山东","淄博","华光店","张店区华光路86号大润发超市一层肯德基餐厅","0533-3111557"],
["山东","淄博","西六路店","张店区人民西路51号","0533-2772298"],
["山东","枣庄","枣庄枣庄高铁站餐厅","高铁站ZZ-CY-01和ZZ-CY-02","0632-15725695217"],
["山东","枣庄","枣庄光明餐厅","薛城区泰山路西侧光明西路北侧一层","0632-5198299"],
["山东","枣庄","枣庄振兴中路餐厅","振兴路与建华路交叉口大润发商场一层","0632-3370899"],
["山东","东营","东营东城银座餐厅","东三路138A1号银座城市广场东侧一、二层","0546-8017232"],
["山东","东营","东营盛大餐厅","东营区东二路167号部分一层、二层","0546-8833188"],
["山东","东营","东营西城餐厅","东营区济南路237号","0546-8220092"],
["山东","东营","广饶孙武路餐厅","广饶县孙武路与乐安大街交叉口东南角的银座商城一、二层","0546-6686580"],
["山东","东营","东营长途汽车总站餐厅","黄河路中段东营长途汽车总站","0546-7750050"],
["山东","东营","东营银座餐厅","济南路296号","0546-7668156"],
["山东","东营","东营天瑞餐厅","胶州路149号一层","0546-8331616"],
["山东","东营","东营银兴餐厅","垦利县新兴路与黄河路交叉口东北角处被称为“银座商城垦利店”房产的一、二层。","0546-2899280"],
["山东","东营","东营津二路餐厅","利津县津二路与夜市街交叉口西北角处被称为“东营市百货大楼利津店”房产一层。","0546-7700566"],
["山东","东营","东营西三路餐厅","西三路22号银座商城一层","0546-7668638"],
["山东","东营","东营百大餐厅","西四路641号一层至二层","0546-8237706"],
["山东","东营","东城店","东城百货大楼一层肯德基餐厅","0546-8090275"],
["山东","东营","东营北一路餐厅","东营区北一路与千佛山路交叉口(万达广场内)","0546-6388288"],
["山东","烟台","烟台北马路餐厅","北马路烟台火车站南售票厅东临网点的房产","0535-2601698"],
["山东","烟台","烟台商城餐厅","大海阳路","0535-6652376"],
["山东","烟台","烟台福海餐厅","福山区福海路178号天创国际城振华超市一层","0535-6338087"],
["山东","烟台","烟台海港路餐厅","海港路26号烟台阳光100城市广场S1号楼一层2119号","0535-6580038"],
["山东","烟台","海阳丽达餐厅","海阳市旅游度假区海滨路海阳才高置业有限公司丽达购物广场(碧海金滩东E-1楼)东南把角一层","0535-3308962"],
["山东","烟台","烟台颐高数码城餐厅","开发区长江路33号1179号房产一层、二层。","0535-6983769"],
["山东","烟台","烟台观海路餐厅","莱山区观海路277号1层","0535-7356606"],
["山东","烟台","烟台迎春大街餐厅","莱山区迎春大街与金滩东交叉口东北角烟台银座商城南面一、二层。","0535-6713912"],
["山东","烟台","莱州印象城餐厅","莱州市文化东路以北文昌北路以西莱州金运印象城一层、二层的房产","0535-2275110"],
["山东","烟台","烟台通海路餐厅","牟平区通海路与宁海大街交汇处牟平振华商厦一层通海路一侧南把角的房产","0535-4211106"],
["山东","烟台","烟台天府餐厅","天府街19号富豪青年国际广场1号楼东楼","0535-6351165"],
["山东","烟台","烟台幸福餐厅","幸福中路60号一层","0535-6856216"],
["山东","烟台","烟台彩云城餐厅","长江路18号1-2层","0535-6383950"],
["山东","烟台","烟台烟台汽车站餐厅","芝罘区西大街86号烟台汽车总站西北角一层、二层","0535-6256381"],
["山东","烟台","烟台烟台万达餐厅","芝罘区西关南街6号的万达广场内的【1035A】,【2033】号店铺","0535-3039367"],
["山东","烟台","商城二餐厅","山东省烟台市南大街166号烟台商城西南角1-2层","0535-6293136"],
["山东","烟台","龙门西路店","龙门西路18号","0535-7337155"],
["山东","烟台","莱阳旌旗餐厅","莱阳市旌旗路328路莱阳人民商场一层","0535-7235327"],
["山东","潍坊","安丘泰华城餐厅","安丘商场路东段南侧、国税局东侧被称为“泰华曼哈顿”部分一层","0536-8022500"],
["山东","潍坊","潍坊北海路餐厅","北海路以西,胜利东街以南的银座商城一层","0536-8059060"],
["山东","潍坊","潍坊泰华餐厅","东风东街360号泰华城一层","0536-8237720"],
["山东","潍坊","潍坊尚东餐厅","福寿东街4369号谷德广场地下一层、二层","0536-8066681"],
["山东","潍坊","高密凤凰大街餐厅","高密市凤凰大街与夷安大道交界口东北角中百大厦的部分地下一层、一层的房产","0536-5826298"],
["山东","潍坊","潍坊福寿东街餐厅","奎文区新华路与福寿东街交叉口西南角“潍坊购物广场”一二层","0536-2082355"],
["山东","潍坊","临朐兴隆中路餐厅","临朐县民主路以东、骈邑路以西、兴隆路以北的房产一、二层","0536-3663656"],
["山东","潍坊","潍坊人民公园餐厅","青年路66号","0536-8560550"],
["山东","潍坊","青州泰华城餐厅","青州市范公亭路与昭德路交叉口东南角地块被称为“泰华城”的部分一层的房产。","0536-3897776"],
["山东","潍坊","潍坊潍坊万达餐厅","万达广场一层1029A号和二层2036号店铺","0536-8099887"],
["山东","潍坊","新华二店","奎文区新华路66号佳乐家超市肯德基","0536-8899664"],
["山东","潍坊","昌乐恒安街店","方山路5777号全福元一层","0536-6850988"],
["山东","潍坊","银海店","银海路17号中百大厦一层肯德基餐厅","0536-5262755"],
["山东","潍坊","诸城和平餐厅","诸城市和平街1号(和平街与密州路交叉口)利群商场一层","0536-6187005"],
["山东","潍坊","诸城市柏斯奇餐厅","诸城市广场路2号百盛商场","0536-6182100"],
["山东","济宁","环城西路餐厅","任城区永丰街南、南北街东太白商业广场一层","15069752056"],
["山东","济宁","济宁济宁万达餐厅","的万达广场内的二层2001号和二层2002号商铺","0537-2277277"],
["山东","济宁","济宁金宇路餐厅","高新区金宇路与琵琶山北路交汇处西北角济宁永旺购物中心主入口北侧","0537-2659829"],
["山东","济宁","嘉祥嘉祥银座餐厅","嘉祥县呈祥路与演武路交汇处西南角嘉祥银座一层、二层","0537-6788222"],
["山东","济宁","济宁奎星路餐厅","金乡县奎星路与文峰路交汇处西南角现诚信购物中心一层","0537-8898866"],
["山东","济宁","济宁水泊路餐厅","梁山县水泊中路108号中韩大酒店一层、二层","0537-7629667"],
["山东","济宁","济宁龙行餐厅","龙行路东首北商业楼来鹤商城银座楼一层","0537-2260498"],
["山东","济宁","泗水金泉餐厅","泗水县金泉广场第(17)B17幢1-1103、1-2111房","0537-4298586"],
["山东","济宁","济宁琵琶山餐厅","太白东路36号一层","0537-2651090"],
["山东","济宁","济宁运河城餐厅","太白路与共青团路东北角运河城商业摩尔一层。","0537-2250506"],
["山东","济宁","济宁太白餐厅","太白中路26号","0537-2402218"],
["山东","济宁","济宁微山银座餐厅","微山县建设路北侧、镇中街东侧微山银座商城一层的房产","0537-5669975"],
["山东","济宁","汶上中都餐厅","汶上县中都大街北段路东、圣泽大街东段路北(原方圆大世界商城)现汶上联民商场一层、二层","0537-7226600"],
["山东","泰安","泰安岱宗餐厅","岱宗大街249号","0538-6282296"],
["山东","泰安","泰安西山路餐厅","东平县西山路47号一、二层","0538-2610888"],
["山东","泰安","泰安泰安银座餐厅","东岳大街81号泰安银座商城一层","0538-8299288"],
["山东","泰安","泰安志高餐厅","东岳大街与温泉路交汇处东北角银座商场一、二层","0538-8201351"],
["山东","泰安","泰安易初莲花餐厅","东岳大街中段易初莲花超市一层","0538-6990252"],
["山东","泰安","泰安泰安高铁站餐厅","高铁站TA-CY-01和TA-CY-02","0538-6931789"],
["山东","泰安","泰安天外村餐厅","龙潭路北首路东金如意宾馆","0538-8281360"],
["山东","泰安","泰安北街餐厅","宁阳县北街路38号凌云大厦一层、二层","0538-5688870"],
["山东","泰安","东湖店","灵山大街276号东湖银座地上一层二层肯德基餐厅","0538-5887863"],
["山东","泰安","龙潭路店","东岳大街82号一层(泰安大润发一层肯德基餐厅)","0538-8062166"],
["山东","泰安","泰安泰山大街店","泰山大街566号万达广场购物中心10572059C商铺","0538-8667786"],
["山东","威海","威海世昌餐厅","东城路86号1F","0631-5282996"],
["山东","威海","威海海滨餐厅","海滨北路128号,面临青岛路一层","0631-5318036"],
["山东","威海","威海外滩餐厅","海滨路威海新外滩综合服务区Z-1号地上一、二层","0631-5203186"],
["山东","威海","威海海兴餐厅","海滨路西浮发玻璃厂东地号为:108-6中石化威海分公司第38加油站北端地块","0631-5992515"],
["山东","威海","威海和平餐厅","和平路27号","0631-5213093"],
["山东","威海","威海和平二餐厅","环翠区和平路18号威海苏宁电器广场东南角一层。","0631-5206660"],
["山东","威海","威海青岛路餐厅","青岛中路128号豪业圣迪广场一层","0631-5975625"],
["山东","威海","威海古寨餐厅","世昌大道北、古寨东路东威海奥特莱斯商业中心一层11轴至13轴与D轴至J轴之间交接区域的房产","0631-5818029"],
["山东","威海","威海统一路餐厅","统一路120号1、2、3层","0631-5280607"],
["山东","威海","威海文化西路餐厅","文化路-160号盛德大厦一层","0631-5673031"],
["山东","威海","齐鲁商城店","齐鲁大道85-1号齐鲁商城一楼肯德基","0631-5992101"],
["山东","威海","华夏店","经区华夏路佳世客一楼肯德基","0631-5977501"],
["山东","威海","商业街","乳山市商业街利群商场一层","0631-6861069"],
["山东","日照","日照海曲中路餐厅","东港区海曲路与兖州路交汇处一层","0633-3631006"],
["山东","日照","日照万平口餐厅","海曲东路288号被称作”凯德广场.日照“的建筑物","0633-2189228"],
["山东","日照","日照烟台路餐厅","泰安路179号国际大厦001号楼一、二层","0633-3631003"],
["山东","日照","日照新玛特餐厅","五莲县解放路182号一层","0633-5888061"],
["山东","日照","正阳店","正阳路46号百货大楼一楼肯德基餐厅","0633-8230855"],
["山东","日照","利群店","海曲中路76号利群商厦一楼肯德基餐厅","0633-8213325"],
["山东","日照","银座店","北京路与泰安路交汇银座商城一楼肯德基餐厅","0633-8807887"],
["山东","日照","黄海店","黄海一路与海滨三路交汇瑞泰国际商城一楼肯德基餐厅","0633-8336657"],
["山东","日照","浮来店","莒县浮来路与青年路交汇肯德基餐厅","0633-7887766"],
["山东","莱芜","莱芜钢城店","钢城区钢都大街金茂广场一、二层(钢城肯德基餐厅)","0634-6996200"],
["山东","莱芜","花园路店","花园北路25号大润发一楼(花园路肯德基餐厅)","0634-8676766"],
["山东","莱芜","红石店","莱城区文化南路8号银座肯德基一二层","0634-8778007"],
["山东","临沂","东兴餐厅","东兴路与人民大街交汇处东北角豪森国际广场一层","15063353900"],
["山东","临沂","临沂苍山塔山餐厅","苍山县会宝路中段南侧一层","0539-5288033"],
["山东","临沂","费县建设路餐厅","费县建设西路路北侧百姓购物广场1、2层","0539-5018577"],
["山东","临沂","莒南隆山路餐厅","莒南县隆山路中段西侧乐尚购物中心一、二层","0539-7268686"],
["山东","临沂","临沂丽家餐厅","兰山区解放路184号一层、二层","0539-8068491"],
["山东","临沂","临沂和谐广场餐厅","兰山区解放路与通达路交汇处西南角解放路158号一号楼和谐广场B103-B105室","0539-7751020"],
["山东","临沂","临沂万兴都餐厅","兰山区涑河南街3号一层、二层","0539-5630473"],
["山东","临沂","临沭常华餐厅","临沭县中山北路1号一层、二层","0539-6238866"],
["山东","临沂","平邑浚河路餐厅","平邑县浚河路和汉阙路交汇处华百商场一、二层","0539-4368198"],
["山东","临沂","临沂临沂长途客运站餐厅","双岭路中段南侧临沂长途汽车站站房楼二层","0539-8225577"],
["山东","临沂","郯城郯东路餐厅","郯城县人民路与郯东路交叉口东南角的郯城银都购物中心一层","0539-6632019"],
["山东","临沂","临沂三和餐厅","沂蒙路与三和二街交汇处颐高 上海街一期铺位258、259号两层","0539-8100425"],
["山东","临沂","沂水财富中心餐厅","沂水县沂蒙山路北、府前街西沂蒙财富广场一、二层","0539-2277378"],
["山东","临沂","九州店","解放路78号(九州肯德基餐厅)","0539-8222504"],
["山东","临沂","久隆店","久隆奥斯卡商业广场B(广场肯德基餐厅)","0539-8203160"],
["山东","临沂","沂蒙店","红旗路11号(银座东邻沂蒙肯德基餐厅)","0539-3217828"],
["山东","临沂","金雀山店","金雀山一路71号(金雀山肯德基餐厅)","0539-8111287"],
["山东","临沂","通达路店","通达路36号1层","0539-8290500"],
["山东","临沂","双月餐厅","临沂市罗庄区罗四路与双月湖交汇处西南角银座购物广场一层","0539-8266366"],
["山东","德州","德州天衢餐厅","德兴北路2号","0534-2261500"],
["山东","德州","德州湖滨餐厅","湖滨南路德州百货大楼1层","0534-2622203"],
["山东","德州","德州三八路餐厅","解放路以西,三八路以北银座商城一层。","0534-2391500"],
["山东","德州","临邑迎宾餐厅","临邑县迎宾路200号嘉悦购物广场一层","0534-5050888"],
["山东","德州","齐河世纪路餐厅","齐河县城齐鲁大街西首路南世纪路西侧德百新时代购物广场中心一层的房产","0534-5623823"],
["山东","德州","德州澳德乐餐厅","新兴路以东、104国道以南的澳德乐时代广场一层的房产","0534-2798522"],
["山东","德州","德州通衢餐厅餐厅","禹州市汉槐街以北通衢路以西新时代购物广场一、二层的房产","0534-7220788"],
["山东","聊城","茌平中心街餐厅","茌平县中心路京都国际64号一层、二层","0635-4265599"],
["山东","聊城","东阿前进街餐厅","东阿县城前进街北首路东紫金购物广场中心一层","0635-5019816"],
["山东","聊城","聊城新东昌餐厅","东昌东路139号建工大厦一层的房产","0635-8265500"],
["山东","聊城","聊城聊城汽车站餐厅","建设路北柳园路口聊城汽车总站建设路临街楼一层、二层","0635-6986086"],
["山东","聊城","聊城柳园餐厅","柳园南路新时代南广场一层","0635-8211068"],
["山东","聊城","聊城谷山路餐厅","阳谷县谷山路与运河路交叉口东南角宝福邻购物广场一层","0635-6336889"],
["山东","滨州","滨州博城餐厅","博兴县博城五路367号银座商城一层","0543-2950259"],
["山东","滨州","滨州时代餐厅","渤海七路601号时代超级购物中心一层","0543-3390399"],
["山东","滨州","滨州滨州大润发餐厅","黄河六路与渤海七路交叉口东角被称为“滨州大润发超市”地块上部分一层","0543-3326385"],
["山东","滨州","滨州渤海餐厅","黄河七路以南,渤海十七路以东,被称为“新都心世贸广场”项目一层","0543-3185353"],
["山东","滨州","滨州黄河餐厅","黄河四路539号一层","0543-3314886"],
["山东","滨州","滨州圣豪餐厅","惠民县南门大街213号(南门大街东侧、故园南路南侧)房屋一层二层","0543-5365068"],
["山东","滨州","滨州棣新一路餐厅","无棣县棣新一路27号圣豪百货部分一层、二层","0543-6560298"],
["山东","滨州","滨州黄山餐厅","邹平县黛西三路138号","0543-5060506"],
["山东","滨州","滨州黄山二路餐厅","邹平县黄山二路8号银座商城一层","0543-4321626"],
["山东","菏泽","菏泽湖西路餐厅","单县湖西路中段路西、向阳路中段路北一层的房产","0530-4686099"],
["山东","菏泽","单县舜师路餐厅","单县开发区舜师路东段路北环亚府东商城一层","0530-4306936"],
["山东","菏泽","临沂光洋餐厅餐厅","东明县五四路上财富广场翡翠城内的光洋百货一层的房产","0533-7185678"],
["山东","菏泽","菏泽巨野承德餐厅","巨野县人民路中段、路南承德购物广场一层","0530-8160889"],
["山东","菏泽","菏泽东方餐厅","南城东方红大街与双井街交汇处一层、二层","0530-5510156"],
["山东","菏泽","菏泽唐塔餐厅餐厅","郓城县胜利街北段唐塔商城一、二层的房产","0530-6990988"],
["山东","菏泽","菏泽牡丹餐厅","中华路686号银座商城地上一层","0530-5958663"],
["山东","菏泽","菏泽南华店","中华东路39-6号一层","0530-5331632"],
["山东","菏泽","南湖","曹县曹城街道办事处富春江路中段路北","18615301515"],
["广东","广州","新光餐厅","从化高新技术产业园广从南路546号之八至十三","020-62161512"],
["广东","广州","佳润餐厅","白云区广州大道北1419号","020-87099258"],
["广东","广州","天州餐厅","白云区广州大道北1451号之一天洲广场E区3#、4#、5#、二层202#","020-87276177"],
["广东","广州","梅花园餐厅","白云区广州大道北28号梅花园商业广场一层","020-37322062"],
["广东","广州","太阳城餐厅","白云区广州大道北嘉裕太阳城广场2-59号铺首、二层","020-3673203"],
["广东","广州","鹤龙餐厅","白云区鹤龙一路2号自编5栋101、102、103、201A商铺","020-86469016"],
["广东","广州","黄边餐厅","白云区黄边北路216号B栋1至5号","020-87484852"],
["广东","广州","黄石西餐厅","白云区黄石西路南面白水龙地块金国商业广场一、二楼","020-36700279"],
["广东","广州","机场路餐厅","白云区机场路12号首层","020-86363838"],
["广东","广州","新百信餐厅餐厅","白云区机场路1423至1455号B区二层第E01至E06号","020-36640351"],
["广东","广州","嘉禾餐厅","白云区嘉禾街联边村106国道旁加油岭地段永福商场自编1号铺","020-86077028"],
["广东","广州","金域蓝湾餐厅","白云区金沙洲路万科金域蓝湾GJ至5栋","020-31025371"],
["广东","广州","平沙餐厅","白云区均禾街新广花一路平沙村西侧新门楼","020-36298913"],
["广东","广州","人和餐厅","白云区人和镇鹤边六路28号人和万家福购物广场首层","020-86457233"],
["广东","广州","人和站餐厅","白云区人和镇鹤龙六路199号","020-36024641"],
["广东","广州","庆丰餐厅","白云区石井街庆丰村庆丰广场路西侧建筑物一层及二层","020-36524687"],
["广东","广州","石井餐厅","白云区石井街张村石井桥东北侧住宅综合楼一层","020-36391502"],
["广东","广州","增槎路餐厅","白云区松洲街增槎路1068号自编EB101至EB201","020-81293791"],
["广东","广州","太和餐厅","白云区太和镇联升路自编一号首、二层","020-87427429"],
["广东","广州","龙岗餐厅","白云区太和镇龙归龙岗路1号","020-86048850"],
["广东","广州","同泰餐厅","白云区同泰路87、89、91、93号房","020-36244382"],
["广东","广州","骏盈餐厅餐厅","白云区西槎路129号骏盈广场首层","020-36488569"],
["广东","广州","永泰餐厅","白云区新广从公路布角田街白云大道北1357号","020-86056891"],
["广东","广州","萧岗餐厅","白云区新市墟新市南街45号新天地广场A座首层肯德基","020-36293237"],
["广东","广州","新市餐厅","白云区新市镇广花西路69号首层","020-86432689"],
["广东","广州","永泰站餐厅","白云区永平街永泰村同泰路118号A01","020-36772620"],
["广东","广州","远景餐厅餐厅","白云区远景路172号时代新都会首层","020-86273629"],
["广东","广州","万达广场餐厅","白云区云城东路509号广州白云万达商业广场室内步行街一层101铺位及二层201铺位","020-36689103"],
["广东","广州","竹料餐厅","白云区竹料路新胜南街14号102铺","020-37418182"],
["广东","广州","北京路餐厅","北京路194号银座广场夹层","020-83179205"],
["广东","广州","从化汽车站餐厅","从化江埔街从城大道28号从化汽车站首层及二层","020-37987015"],
["广东","广州","从化-新世纪餐厅","从化市新城东路66号林苑大厦首二层","020-87939871"],
["广东","广州","东峻餐厅","东风东路836号东峻广场西园首层29、30号铺","020-87675519"],
["广东","广州","锦轩餐厅餐厅","东山区农林下路2号东山锦轩大厦首层","020-87302971"],
["广东","广州","番禺富丽餐厅","番禺区大石街105国道大石段257至259号自编202","020-31121250"],
["广东","广州","大石餐厅","番禺区大石街大涌路56号A11","020-34790469"],
["广东","广州","番禺万达餐厅","番禺区南村镇汉溪大道东381号室内街自编2037房","020-31054943"],
["广东","广州","南村餐厅餐厅","番禺区南村镇南村综合市场北1东北角大堂首层及二层","020-3482032"],
["广东","广州","南站三餐厅餐厅","番禺区石壁街广州南站出站层广1至01、02、03","020-18826239160"],
["广东","广州","广州南站二餐厅餐厅","番禺区石壁街广州南站商架商业夹层广州3至33","020-31130221"],
["广东","广州","广州南站餐厅","番禺区石壁武广高铁广州南站高架商业夹层3至02","020-39293181"],
["广东","广州","东兴餐厅","番禺区石基镇东兴路476号地上一层、二层","020-84707158"],
["广东","广州","石楼餐厅","番禺区石楼镇市莲路石楼路段108号A01号房","020-39134569"],
["广东","广州","繁华餐厅","番禺区市桥街繁华路2号2034号","020-84820501"],
["广东","广州","鸿禧餐厅","番禺区市桥街光明北路273、275、277号","020-39214290"],
["广东","广州","番禺-百德广场餐厅","番禺区市桥街桥东路81号首、二层","020-84701090"],
["广东","广州","西丽餐厅","番禺区市桥街西丽南路93号之一101、201房号","020-39046067"],
["广东","广州","大学城餐厅","番禺区小谷围广州大学城广大生活区商业中心","020-39340900"],
["广东","广州","钟村餐厅","番禺区钟村镇钟灵北路2号","020-34775701"],
["广东","广州","捷进餐厅","番禺市桥西环路364号117、118、119、120、121","020-39992252"],
["广东","广州","宝岗大道餐厅餐厅","海珠区宝岗大道183号首层5铺A09","020-3422609"],
["广东","广州","新一城餐厅","海珠区宝业路2号负一层自编A01","020-34373601"],
["广东","广州","滨江东餐厅","海珠区滨江东路31号39号首层自编01A号铺及海珠区滨江东路33和41号二楼自编202","020-34037390"],
["广东","广州","赤岗北餐厅","海珠区赤岗北路18号首层自编A01至A03","020-89443217"],
["广东","广州","金聚餐厅","海珠区赤岗路金聚街1-19号六福华庭首层自编137、138号商铺","020-3420931"],
["广东","广州","东晓南路餐厅","海珠区东晓南路1273号101房","020-34179232"],
["广东","广州","海珠新都荟餐厅","海珠区东晓南路1290号101房自编1至A01,201房自编2A10房","020-84448839"],
["广东","广州","金沙餐厅餐厅","海珠区工业大道88号家乐福首层JS至B房","020-89611427"],
["广东","广州","工业大道餐厅","海珠区金碧一街58号着层自编E18商铺","020-84438102"],
["广东","广州","乐峰餐厅","海珠区榕景路107号(107号)、278至282号","020-89045982"],
["广东","广州","赤岗餐厅","海珠区石榴岗路1号赤岗大厦首层","020-84288782"],
["广东","广州","新港西餐厅","海珠区新港西路181号大院1栋自编装配楼2楼210号","020-84190302"],
["广东","广州","北兴餐厅","花都区花东镇北兴花都大道南路17号A1至105至109号","020-37720439"],
["广东","广州","莲塘餐厅","花都区迎宾大道139号首层","020-36975365"],
["广东","广州","黄埔餐厅","黄埔区大沙东路5至7号首层、二层","020-82273626"],
["广东","广州","宏明餐厅","黄埔区宏明路东区商业城首层A33房","020-62242603"],
["广东","广州","沙步餐厅","黄埔区黄埔东路2841号一、二层铺","020-62241280"],
["广东","广州","大沙东餐厅","黄埔区石化路文冲商贸大厦","020-82481107"],
["广东","广州","南油餐厅","江南大道中168号南洋石油大厦首层","020-84440858"],
["广东","广州","新光餐厅餐厅","康王中路666号首、二层","020-81339073"],
["广东","广州","科学大道餐厅餐厅","科学大道193号高德汇购物中心F101A","020-82118970"],
["广东","广州","百花路餐厅餐厅","荔湾区百花路8号自编101号","020-86526686"],
["广东","广州","十甫餐厅","荔湾区第十甫路158号二楼自编2000铺","020-81069297"],
["广东","广州","环翠北餐厅","荔湾区东沙环翠北路北侧38号自编A栋35号一层及二层","020-81611609"],
["广东","广州","滘口餐厅","荔湾区芳村大道西533号1号楼首层、二层商铺自编1号","020-81688282"],
["广东","广州","花地大道餐厅","荔湾区花地大道中路228号首层自编A010至015及A096至100号","020-22331009"],
["广东","广州","芳村客运站餐厅餐厅","荔湾区花地大道中路51号芳村客运站首、夹层","020-8140787"],
["广东","广州","广雅餐厅","荔湾区环市西路52至54号首层","020-86471885"],
["广东","广州","西城都荟餐厅","荔湾区黄沙大道8号第负一层B093号商铺及第一层108A号商铺","020-81687618"],
["广东","广州","桥中北餐厅","荔湾区桥中北路2至4号首层","020-81970039"],
["广东","广州","赛博广场餐厅","荔湾区下九路上下九文化广场西南楼首二、三层","020-81390050"],
["广东","广州","中八站餐厅","荔湾区中山八路64号一、二层","020-81369130"],
["广东","广州","荔景餐厅","荔湾区中山八路8至12号首层","020-81942977"],
["广东","广州","天汇城餐厅","林和中路63号广州天汇城广场一层","020-22089520"],
["广东","广州","番禺东涌餐厅","南沙区东涌镇吉祥东路8号A101房","020-34913157"],
["广东","广州","今洲餐厅","南沙区南沙街进港大道金沙路交叉口西北侧逸涛万国园首层G511","020-39091187"],
["广东","广州","南沙华汇广场餐厅","南沙区珠电路4号首层1至65、1至67、1至69、1至76","020-31156371"],
["广东","广州","万客隆餐厅","三元里华园一号","020-86566636"],
["广东","广州","清河东餐厅","石基镇城区大道283号(商场)首层","020-39969272"],
["广东","广州","淘金餐厅餐厅","淘金路6至8号淘金商业城二层","020-83503374"],
["广东","广州","龙口东餐厅","天河北路603、605、607号华标广场群楼首层","020-38472982"],
["广东","广州","购书中心餐厅","天河路123号广州购书中心负一层","020-38862061"],
["广东","广州","天河城餐厅餐厅","天河路208号天河城六楼","020-85586123"],
["广东","广州","广武餐厅","天河路603号新建裙楼二层","020-87568647"],
["广东","广州","奥体餐厅","天河区奥体南路12号高德美居购物中心F至101之F135","020-82169390"],
["广东","广州","东郊餐厅","天河区车陂路399号首层","020-38602229"],
["广东","广州","车陂餐厅","天河区车陂路6号一层及二层自编一号","020-82525017"],
["广东","广州","东圃餐厅餐厅","天河区东圃大马路1号东圃购物中心首、二、三层","020-82300381"],
["广东","广州","东圃四季餐厅","天河区东圃大马路4号自编B栋二层","020-82165161"],
["广东","广州","火车东站餐厅","天河区东站路1号东站综合楼裙楼首层","020-61311355"],
["广东","广州","花城湾餐厅","天河区花城大道86号1至4层","020-38615237"],
["广东","广州","员村餐厅餐厅","天河区黄埔大道中恒隆街3号首层","020-85648512"],
["广东","广州","龙洞餐厅","天河区龙洞迎龙路6号一、二层(1F至1、2F至1)","020-3805911"],
["广东","广州","天河客运站餐厅","天河区沙河元岗村西南侧天河客运站1-2楼西夹层之一","020-37095310"],
["广东","广州","石牌东餐厅餐厅","天河区石牌东路31号首层至3楼自编2单元","020-38284767"],
["广东","广州","棠德南餐厅","天河区棠德南路52号E座107至109、E206至213号","020-87015127"],
["广东","广州","上社餐厅","天河区棠下街上社横路13号Z1之一","020-87594576"],
["广东","广州","柏西商都餐厅","天河区体育西路191号中石化大厦地下一层B04房","020-38922172"],
["广东","广州","东站候车室餐厅","天河区天河火车站自编01号四楼E区E4、四楼E区E5","020-61311805"],
["广东","广州","石牌西餐厅","天河区天河路586号北座首、二层","020-87518879"],
["广东","广州","百脑汇餐厅","天河区天河路598号负一层","020-87541659"],
["广东","广州","天河路餐厅","天河区天河路97号天涯楼首层及地下一层自编A","020-18826239227"],
["广东","广州","燕岭餐厅","天河区燕岭路95号首层","020-62818860"],
["广东","广州","沙汕餐厅","天河区元岗路310号自编19号01首及二层","020-3809487"],
["广东","广州","粤垦餐厅","天河区粤垦路523号102房","020-87089403"],
["广东","广州","新世界餐厅","天河区中山大道与东方一号路交汇处东方新世界花园商业群楼首层","020-85611507"],
["广东","广州","乐都汇餐厅","天河区中山大道中1116号L1F001","020-32361399"],
["广东","广州","中山大道餐厅","天河区中山大道中87号首层自编9至14号","020-82319083"],
["广东","广州","大剧院餐厅","天河区珠江新城珠江西路一号广州大剧院多功能厅一层A01至1","020-37021972"],
["广东","广州","中大科技园餐厅","新港西路135号中山大学(南校区)蒲园区628号中大科技综合B座二层","020-84115646"],
["广东","广州","东平餐厅","永平街东平大道8号东平村东泰商贸广场金灏堡物业一层","020-8605654"],
["广东","广州","瑶台餐厅","越秀区广园西路101号自编201房","020-81174338"],
["广东","广州","广安大厦餐厅","越秀区环市东路328号首、二、三层之一","020-83813655"],
["广东","广州","省客运站餐厅","越秀区环市西路145、147、149号二楼及首层部分","020-86668363"],
["广东","广州","火车站西广场餐厅","越秀区环市西路155号之三110房和206房","020-86680509"],
["广东","广州","广州客运站餐厅","越秀区环市西路158号广州大北枢纽站场广州汽车客运站三层","020-86699412"],
["广东","广州","车站餐厅","越秀区环市西路159号广州站火车站候车大厅右侧","020-61111330"],
["广东","广州","火车站餐厅","越秀区环市西路179号首、二层部分自编1号","020-86669851"],
["广东","广州","光明广场餐厅","越秀区西湖路63号252房","020-83343798"],
["广东","广州","中山六餐厅","越秀区中山六路281号越秀上品轩地下一层S14号铺","020-81046169"],
["广东","广州","英雄广场餐厅","越秀区中山三路36号首层10至15、夹层14至24","020-83862727"],
["广东","广州","中山四餐厅","越秀区中山四路199号东鸣轩首层自编106、114、115、116房","020-83726147"],
["广东","广州","凤凰城餐厅","增城新塘镇南安村碧桂园凤凰城风研苑广场步行街南街1号101商铺","020-8245240"],
["广东","广州","好又多餐厅餐厅","中山大道188号首层","020-85529332"],
["广东","广州","棠下大舜餐厅","中山大道189号自编A栋西一,二层(棠下大舜商务中心)","020-85574006"],
["广东","广州","流花车站店KFC","环市西路188号流花车站首层、二层、三层部分","020-83065710"],
["广东","广州","增城万达店KFC","增城增城大道69号1幢102、202、273","020-32178297"],
["广东","广州","洛溪KFC","番禺区洛浦街洛溪新城吉祥道155号一、二层","020-84543127"],
["广东","广州","华穗路KFC","华穗路263号双城国际大厦一层","020-66393173"],
["广东","广州","萝岗万达KFC","开发区开创大道以南、科丰路以西万达广场二层2001","020-82512055"],
["广东","广州","南沙万达KFC","南沙区双山大道3号万达广场1002号","020-39012406"],
["广东","广州","番禺永旺KFC","番禺区大龙街亚运大道1号永旺梦乐城番禺广场B1层NO0010","13632480988"],
["广东","广州","五羊","越秀区寺右新马路五羊村商住中心首层","020-87370127"],
["广东","广州","署前","署前路33号首、二层","020-87319242"],
["广东","广州","逸富","增城新塘镇广深公路旁逸富广场首层","020-26235637"],
["广东","广州","云山大道","花都区云山大道29号首层及二层","020-37717691"],
["广东","广州","恒宝","宝华路133号恒宝华庭负一层","020-81240499"],
["广东","广州","花都广百","花都区凤凰北路与龙珠路交汇处西北角凤凰广场商业裙楼A区首二层肯德基","020-37712426"],
["广东","广州","五月花KFC","越秀区中山五路68号负二层","020-83330026"],
["广东","广州","从城大道KFC","从化江埔街从城大道111号之26、27、28、29号","020-37907759"],
["广东","广州","花都百业广场店KFC","花都区新华镇公益路百业广场首层","020-36882479"],
["广东","广州","丽莎广场","花都区狮岭镇阳光大道东(自编金狮大道东4号)","020-37708251"],
["广东","广州","白云机场","白云国际机场候机楼C112标段商铺编号B8601","020-36062125"],
["广东","广州","狮岭宝峰","花都区狮岭镇宝峰路2号1至3层商场之首层二层","020-36973897"],
["广东","广州","增城广场KFC","增城市荔城街府佑路98号20672068","020-82625165"],
["广东","广州","云城西KFC","白云区云霄路五号停机坪购物广场一层","020-36076036"],
["广东","广州","上漖DT","番禺区大石镇上漖村迎宾路北侧重建房产及场地","13560094910"],
["广东","广州","白云机场外卖点店KFC","花都区白云国际机场候机楼主楼一层L30标段C9131号之二","020-36067742"],
["广东","广州","青年路餐厅","广州开发区青年路1、11首层","020-82088002"],
["广东","广州","光明北餐厅","广州市番禺区市桥街富华西路2号","020-84879605"],
["广东","广州","番禺大岗餐厅","广州市南沙区大岗镇二湾社区兴业路50-5、50-6、50-7号","020-34936353"],
["广东","广州","桥南餐厅","广州市番禺区桥南街桥南路111、113、115、117号自编2","020-39255011"],
["广东","广州","石基城市广场餐厅","石基镇石基城市广场肯德基餐厅","020-34560078"],
["广东","广州","江高餐厅","广州市白云区江高镇东胜街18号首层,二层肯德基餐厅","020-86600090"],
["广东","广州","清湖店","广州市白云区均禾街清湖牌坊旁金箔广场肯德基餐厅","020-36557327"],
["广东","广州","港口大道餐厅","增城新塘镇港口大道北金海岸城市广场","020-32895403"],
["广东","广州","万国广场店","海珠区江湾路285号万国广场负一层KFC","020-84419530"],
["广东","广州","信和店","海珠区昌岗中路239号二楼肯德基","020-34331659"],
["广东","广州","江燕路店","海珠区江燕路108号燕汇广场首层","020-89778054"],
["广东","广州","花都餐厅","花都区新华街建设路37号","020-86815559"],
["广东","广州","狮岭东升餐厅","花都区狮岭镇东升路金炜达商业楼首层","020-86987180"],
["广东","广州","花都龙珠店","花都区新华街龙珠路大润发首层","020-36982730"],
["广东","广州","新华餐厅","花都区新华街建设路19号","020-36859855"],
["广东","广州","儿童公园","越秀区人民南路88号负一层、塔楼负夹层及塔楼首层KFC餐厅","020-62656250"],
["广东","广州","合一餐厅","广州大道北1970-1-1970-10号、1972-1-1972-15号、19776-1-1976-10号101房的合一潮流城首层1F01/02号位","020-37377247"],
["广东","韶关","乐昌餐厅","乐昌市公主中路19号至20号","0751-5555322"],
["广东","韶关","南雄餐厅","南雄市雄州街道三影塔12号1、2号门店","0751-3861122"],
["广东","韶关","韶关仁化餐厅","仁化县建设路29号丹霞城市商业步行街第一层至第二屋","0751-6282227"],
["广东","韶关","韶关体育中心餐厅","新华南路16号世纪新城首层23-32号铺","0751-8528188"],
["广东","韶关","韶关火车站餐厅","浈江区站南路火车东站客运大楼南侧","0751-8224355"],
["广东","韶关","韶关亿和","风度中路亿和广场首层A区A01号之一","0751-8878111"],
["广东","韶关","韶关风度名城","熏风路与解放路交汇片韶关风度名城1B001一、二层","0751-8878228"],
["广东","深圳","福永餐厅","深圳市宝安区福永镇福永大道永安南城百货首层","0755-23024985"],
["广东","深圳","书香门第餐厅","宝安区民治街道金龙路与梅陇路交汇处书香门第上河坊","0755-29368755"],
["广东","深圳","华强广场餐厅","福田区华强北路1019#华强广场负一层","0755-82711830"],
["广东","深圳","格澜郡餐厅","深圳宝安观澜新世纪广场南侧中航.格澜郡商业楼B栋1#首层","0755-29506792"],
["广东","深圳","雍景城餐厅","深圳宝安区公明街道民生路大道71号民联购物广场首层A区","0755-27155979"],
["广东","深圳","观澜餐厅","深圳宝安区观澜大道吉盛酒店民乐福购物广场首层","0755-28081661"],
["广东","深圳","龙观餐厅","深圳宝安区龙华镇龙观西路天虹商场首层","0755-28147611"],
["广东","深圳","民治餐厅","深圳宝安区民治街道民治大道路与民丰路交汇处横岭工业区B栋1065号天虹商场首层","0755-29275085"],
["广东","深圳","前进餐厅","深圳宝安区前进路宝城10区新安街道办泰华商业城首层","0755-27880539"],
["广东","深圳","裕客隆餐厅","深圳宝安区沙井街道办沙井路180号裕客隆购物广场首层","0755-27208956"],
["广东","深圳","新餐厅","深圳宝安区沙井镇中心路创新路口沙井天虹商场首层","0755-29873317"],
["广东","深圳","石岩餐厅","深圳宝安区石岩街道宝石南路207号万联购物广场1、2楼???","0755-27603018"],
["广东","深圳","宝利豪庭餐厅","深圳宝安区松岗街道楼岗大道宝利豪庭首层","0755-81766480"],
["广东","深圳","如意餐厅","深圳宝安石岩街道河滨南路佳华豪苑1楼","0755-27600880"],
["广东","深圳","东方城餐厅","深圳宝安西乡圣淘沙.骏园11号楼东方城购物广场首层","0755-29767024"],
["广东","深圳","彩田餐厅","深圳福田区彩田路2066号江苏宾馆1-2层","0755-83778825"],
["广东","深圳","强餐厅","深圳福田区华强北路深纺大厦女人世界名店1-4层","0755-83777359"],
["广东","深圳","天安餐厅","深圳福田区泰然四路天安数码城园区内天安科技服务中心B座北端一楼","0755-82049002"],
["广东","深圳","信义餐厅","深圳龙岗区布吉镇百鸽路信义假日名城泌芳园商场101号首楼","0755-84185701"],
["广东","深圳","大鹏餐厅","深圳龙岗区大鹏镇迎宾北路18号千禧大厦1-2楼","0755-84304312"],
["广东","深圳","葵涌餐厅","深圳龙岗区葵涌葵民路2号三溪社区中新居民小组综合楼乐兴百货一楼","0755-89775089"],
["广东","深圳","百仕达餐厅","深圳罗湖泰宁路百仕达东郡裙楼吉之岛百货商场二楼","0755-25003193"],
["广东","深圳","海雅餐厅","深圳南山区南海大道海雅缤纷广场1-2层","0755-26434298"],
["广东","深圳","世界之窗餐厅","深圳南山区世界之窗前广场西侧一、二楼","0755-26609429"],
["广东","深圳","机场T3到达餐厅","深圳市宝安国际机场T3航站楼二楼到达层","0755-23459765"],
["广东","深圳","机场T3出发餐厅","深圳市宝安国际机场T3航站楼五楼出发餐饮配套区","0755-23459767"],
["广东","深圳","创业餐厅","深圳市宝安区44区富源商贸中心大厦1层","0755-27575697"],
["广东","深圳","华旺餐厅","深圳市宝安区大浪街道同胜社区华旺路富裕工业园内第3栋一层C118-125号","0755-28147766"],
["广东","深圳","凤凰山餐厅","深圳市宝安区福永凤凰社区凤凰山大道美食街?","0755-23144973"],
["广东","深圳","宝利来餐厅","深圳市宝安区福永街道办福永大道与107国道交汇处宝利来商贸城首层","0755-27393317"],
["广东","深圳","天福餐厅","深圳市宝安区福永街道办富桥工业二区蚝业路万家乐百货首层","0755-27335281"],
["广东","深圳","兴围餐厅","深圳市宝安区福永街道兴围社区兴围大道深鹏百货首层","0755-29194926"],
["广东","深圳","政丰餐厅","深圳市宝安区福永街道政丰北路康之宝超级市场一层","0755-27490971"],
["广东","深圳","公明友谊餐厅","深圳市宝安区公明街道金安路友谊书城一楼","0755-27105110"],
["广东","深圳","公明餐厅","深圳市宝安区公明镇长春南路西一号佳华平价市场公明分店首层","0755-27105641"],
["广东","深圳","荔园餐厅","深圳市宝安区荔园路77号","0755-27928856"],
["广东","深圳","和平大润发餐厅","深圳市宝安区龙华街道和平东路大润发购物广场首层","0755-29572584"],
["广东","深圳","龙华中心站餐厅","深圳市宝安区龙华街道和平路龙华中心站地铁B出口旁","0755-28079024"],
["广东","深圳","三联餐厅","深圳市宝安区龙华街道三联路建业泰综合楼首层(兴万和商业广场)","0755-28174699"],
["广东","深圳","宝华餐厅","深圳市宝安区龙华镇人民北中路龙华佳华商场首层","0755-27704009"],
["广东","深圳","万众餐厅","深圳市宝安区民治街道牛栏前大厦岁宝百货一层101号铺位","0755-23042552"],
["广东","深圳","益华餐厅","深圳市宝安区沙井街道宝安大道与北环路交汇处益华电子广场","0755-85279276"],
["广东","深圳","沙井客运站餐厅","深圳市宝安区沙井街道广深公路沙井段沙井汽车客运站","0755-81761659"],
["广东","深圳","松福餐厅","深圳市宝安区沙井街道民主大道濠景城东座岁宝百货1层102号","0755-29353370"],
["广东","深圳","联城餐厅","深圳市宝安区沙井上南大街200号联诚购物广场一楼","0755-29853527"],
["广东","深圳","沙井餐厅","深圳市宝安区沙井镇新沙路276号沙井佳华商场首层","0755-81771604"],
["广东","深圳","宝石餐厅","深圳市宝安区石岩街道浪心社区宝石南路宏发大世界一层A区A1-05至08号","0755-28093486"],
["广东","深圳","塘头餐厅","深圳市宝安区石岩街道塘头大道东海百货一、二层","0755-29010052"],
["广东","深圳","喜港城餐厅","深圳市宝安区松岗街道东方大道东方水围鸿盛花园一、二层","0755-23729567"],
["广东","深圳","西岸华府餐厅","深圳市宝安区松岗街道沙江路中海西岸华府(南区)1栋101首层","0755-29459204"],
["广东","深圳","港隆城餐厅","深圳市宝安区西乡街道80区港隆城购物中心临宝民路1-2层","0755-27934107"],
["广东","深圳","固戍地铁站餐厅","深圳市宝安区西乡街道宝安大道与固戍二路交汇处新天地购物广场","0755-27386680"],
["广东","深圳","缤纷餐厅","深圳市宝安区西乡街道臣田工业区宝民二路与宝田一路交汇处缤纷时代购物广场一、二层","0755-29986851"],
["广东","深圳","白金假日餐厅","深圳市宝安区西乡街道海城路白金假日商业广场人人乐首层","0755-29611459"],
["广东","深圳","锦花餐厅","深圳市宝安区西乡街道锦华路人人乐超市首层","0755-29703726"],
["广东","深圳","西城上筑餐厅","深圳市宝安区新安街道创业一路西城上筑花园华润万家一层","0755-29114313"],
["广东","深圳","香槟餐厅","深圳市宝安区新安街道建安路香缤广场家乐福二楼","0755-27861480"],
["广东","深圳","黄金台餐厅","深圳市宝安区新安街道前进路黄金台商业大厦1楼","0755-27671690"],
["广东","深圳","裕安餐厅","深圳市宝安区新安街道裕安二路融景园1栋商场","0755-27666913"],
["广东","深圳","公园餐厅","深圳市宝安区新城22区公园路22号一楼东","0755-27846606"],
["广东","深圳","新湖餐厅","深圳市宝安区新湖路与罗田路交汇处泰华君逸世家地上一层及地上二层","0755-21500253"],
["广东","深圳","富通城餐厅","深圳市宝安区兴业路与悦和路交汇处劳动综合楼首层","0755-33911008"],
["广东","深圳","桃源居餐厅","深圳市宝安西乡前进二路世外桃源28栋裙楼首层","0755-29602240"],
["广东","深圳","大芬餐厅","深圳市布吉街道办中翠路桂芳园6期会所1楼","0755-89707412"],
["广东","深圳","布吉老街餐厅","深圳市布吉街道布吉路73-83号(单号)时代广场布吉路83号一楼、二楼202","0755-28271780"],
["广东","深圳","华海餐厅","深圳市福华路华海大厦一楼","0755-83039509"],
["广东","深圳","福田汽车站餐厅","深圳市福田汽车站一层东侧肯德基","0755-23931023"],
["广东","深圳","八卦岭餐厅","深圳市福田区八卦二路好又多量贩店首层","0755-82496748"],
["广东","深圳","彩德城餐厅","深圳市福田区彩田路与福彩街交叉口彩德城1层","0755-23907821"],
["广东","深圳","丰盛町餐厅","深圳市福田区车公庙丰盛町C区","0755-82532745"],
["广东","深圳","东园餐厅","深圳市福田区东园路台湾花园一楼东","0755-82275924"],
["广东","深圳","COCOPARK餐厅","深圳市福田区福华路星河CoCo Park花园负一楼B1C-076A商铺","0755-88312096"],
["广东","深圳","东方雅苑餐厅","深圳市福田区福民路北益田路东侧皇达东方雅苑商业裙楼首层","0755-83483126"],
["广东","深圳","福民路餐厅","深圳市福田区福民路星河明居裙楼(天虹百货)一楼1002部分","0755-83558445"],
["广东","深圳","红荔西餐厅","深圳市福田区红荔西路荔林苑商场1层102号??","0755-83665780"],
["广东","深圳","振华餐厅","深圳市福田区华强北路华联发大厦一楼","0755-83999308"],
["广东","深圳","华强北餐厅","深圳市福田区华强北路鹏基上步工业厂房105栋1\2层","0755-82781987"],
["广东","深圳","上步餐厅","深圳市福田区华强南路与深南中路交汇处","0755-89442353"],
["广东","深圳","福荣路餐厅","深圳市福田区金地一路绿景蓝湾半岛1层西侧","0755-83027519"],
["广东","深圳","金色家园餐厅","深圳市福田区莲花路2018号金色家园三期裙楼首层","0755-83147877"],
["广东","深圳","梅华餐厅","深圳市福田区梅华路103号光荣大厦一楼","0755-83318219"],
["广东","深圳","侨香餐厅","深圳市福田区桥香路与农林路交汇处深国投广场负1楼","0755-82700347"],
["广东","深圳","连城新天地餐厅","深圳市福田区深圳地铁一期工程会购区间地下空间 C13A铺","0755-82577046"],
["广东","深圳","花好园餐厅","深圳市福田区下沙村东涌路花好园岁宝百货首层","0755-88373871"],
["广东","深圳","新洲餐厅","深圳市福田区新洲路中城天邑一楼(吉之岛旁)","0755-23902819"],
["广东","深圳","园岭餐厅","深圳市福田区园岭中路园中花园新一佳商场首层","0755-25881812"],
["广东","深圳","兴万达餐厅","深圳市观澜大道兴万达商场一层","0755-29050482"],
["广东","深圳","中心书城餐厅","深圳市红荔路深圳中心书城首层","0755-23992116"],
["广东","深圳","景田餐厅","深圳市红荔西路景田南四街香密三村五号群楼岁宝百货首层","0755-83738904"],
["广东","深圳","六和餐厅","深圳市六约六合路98号东方时代广场(华润万家旁)","0755-89349719"],
["广东","深圳","嘉御山餐厅","深圳市龙岗区坂田五和大道与吉华路交汇处嘉御山天虹商场一楼","0755-28364131"],
["广东","深圳","中兴餐厅","深圳市龙岗区布吉街道中兴路中心大道路口布吉天虹商场首层","0755-84701736"],
["广东","深圳","布吉三联餐厅","深圳市龙岗区布吉三联布龙路红星美凯龙旁三联综合楼首层","0755-84522961"],
["广东","深圳","万科城餐厅","深圳市龙岗区布吉镇坂雪岗工业区坂雪岗大道万科城C区2号101号商铺1-2层","0755-89506053"],
["广东","深圳","国都餐厅","深圳市龙岗区布吉镇金鹏路国都花园一二楼","0755-28288879"],
["广东","深圳","六约餐厅","深圳市龙岗区横岗镇深惠公路新亚洲广场首层","0755-28628083"],
["广东","深圳","宝龙餐厅","深圳市龙岗区龙岗街道宝龙工业区宏佰百货首层南侧","0755-85200522"],
["广东","深圳","瑞景园餐厅","深圳市龙岗区龙岗街道龙平东路“东方瑞景苑”首层","0755-89258506"],
["广东","深圳","龙城餐厅","深圳市龙岗区龙河路榭丽花园三期","0755-89302599"],
["广东","深圳","龙翔餐厅","深圳市龙岗区龙翔路世贸广场1楼","0755-28904567"],
["广东","深圳","南岭餐厅","深圳市龙岗区南湾街道南岭村万家乐百货一楼","0755-28720762"],
["广东","深圳","华南城餐厅","深圳市龙岗区平湖街道华南大道1号华南国际皮革皮具原辅料物流区(二期)华南发展中心首层1-06、1-07及二层2-08号商铺","0755-89375924"],
["广东","深圳","凤凰餐厅","深圳市龙岗区平湖街道平湖凤凰新村商业广场","0755-84254551"],
["广东","深圳","平湖餐厅","深圳市龙岗区平湖镇守珍街53号南城百货大楼首层","0755-28841018"],
["广东","深圳","坪山餐厅","深圳市龙岗区坪山镇建设路泰富华庭泰富中心1-2层","0755-84288582"],
["广东","深圳","吉祥餐厅","深圳市龙岗区中心城愉园新苑1层","0755-28926750"],
["广东","深圳","数码新城餐厅","深圳市龙岗中心区黄阁北路天安数码新城","0755-85205060"],
["广东","深圳","油松餐厅","深圳市龙华东环一路与油松路交汇处家和福商城一楼","0755-21501611"],
["广东","深圳","水榭春天餐厅","深圳市龙华民治街道人民南路与民塘路交汇处莱蒙水榭春天三期商业首层","0755-85242500"],
["广东","深圳","中航天逸餐厅","深圳市龙华新区民治街道人民路2020号九方商场一层L126号铺","0755-23020258"],
["广东","深圳","崇尚餐厅","深圳市龙华镇东环一路与建设路交汇处东环明珠圆崇尚百货一、二楼?","0755-23149412??"],
["广东","深圳","罗湖口岸餐厅","深圳市罗湖口岸交通楼二楼B交-29号","0755-82328662"],
["广东","深圳","松园餐厅","深圳市罗湖区宝安南路3061号茂源大厦首层","0755-82117760"],
["广东","深圳","笋岗餐厅","深圳市罗湖区宝岗路笋岗大厦一层","0755-22156463"],
["广东","深圳","田贝餐厅","深圳市罗湖区贝丽南路和正星园首城新一佳旁","0755-82601500"],
["广东","深圳","春风餐厅","深圳市罗湖区春风路长丰苑万佳百货一楼","0755-82369144"],
["广东","深圳","翡翠餐厅","深圳市罗湖区翠竹路逸翟园二期中洲美食天地一楼","0755-25772284"],
["广东","深圳","东湖餐厅","深圳市罗湖区东湖路彩世界家园华润万家超市首层","0755-25431658"],
["广东","深圳","东门中餐厅","深圳市罗湖区东门中路南海中心二层","0755-82351931"],
["广东","深圳","凤凰餐厅","深圳市罗湖区凤凰路11号江月大厦一楼","0755-25797172"],
["广东","深圳","新都餐厅","深圳市罗湖区建设路新都酒店一楼","0755-82340078"],
["广东","深圳","新园餐厅","深圳市罗湖区解放路华城文山楼二楼","0755-82351954"],
["广东","深圳","金稻田餐厅","深圳市罗湖区金稻田路清水河新村综合楼一楼","0755-25225816"],
["广东","深圳","莲塘餐厅","深圳市罗湖区莲塘聚福路岁宝百货一层","0755-25739862"],
["广东","深圳","罗湖火车站餐厅","深圳市罗湖区罗湖火车站大楼二层夹层","0755-82327382"],
["广东","深圳","坭岗餐厅","深圳市罗湖区坭岗路泥岗村门口市场一、二楼","0755-25931846"],
["广东","深圳","龙餐厅","深圳市罗湖区人民北路风貌街东龙商楼二层","0755-82310299"],
["广东","深圳","金光华餐厅","深圳市罗湖区人民南路2028#金光华广场负1层","0755-82611225"],
["广东","深圳","新银座餐厅","深圳市罗湖区人民南路东侧新银座裙楼首层","0755-82122870"],
["广东","深圳","金山餐厅","深圳市罗湖区深南东路5033号金山大厦1楼肯德基","0755-25929431"],
["广东","深圳","梅林餐厅","深圳市梅林一村家乐福超市首层","0755-83538465"],
["广东","深圳","北站高铁餐厅","深圳市民治街道民塘路深圳北站高铁候车厅内","0755-61821400"],
["广东","深圳","湾畔餐厅","深圳市南山区白石洲沙河路深圳湾畔花园","0755-26001513"],
["广东","深圳","万象餐厅","深圳市南山区滨海大道万象新园岁宝百货D座一楼","0755-26455778"],
["广东","深圳","德赛餐厅","深圳市南山区德赛科技大厦1,2层","0755-21671013"],
["广东","深圳","欢乐谷餐厅","深圳市南山区华侨城欢乐谷一楼","0755-26602785"],
["广东","深圳","体育中心餐厅","深圳市南山区华润深圳湾体育中心首层","0755-86007865"],
["广东","深圳","松坪山餐厅","深圳市南山区科技园北区朗山路松坪山万昌百货","0755-26930384"],
["广东","深圳","新保辉餐厅","深圳市南山区南海大道新保辉辅楼1层","0755-26161225"],
["广东","深圳","保利餐厅","深圳市南山区南山商业文化中心区保利文化广场","0755-86287286"],
["广东","深圳","鼎新餐厅","深圳市南山区沙河西路与留仙大道交汇处","0755-26971060"],
["广东","深圳","西丽餐厅","深圳市南山区沙河西路与留仙大道交汇处天虹商场一楼KFC","0755-86150847"],
["广东","深圳","招商餐厅","深圳市南山区蛇口公园路利安商场首层","0755-26827840"],
["广东","深圳","大新餐厅","深圳市南山区桃园路与前海路交汇处大新村“大新前海商业中心”1-2F","0755-86034065"],
["广东","深圳","桃园餐厅","深圳市南山区桃园南常兴西路南景苑1-2层","0755-26063279"],
["广东","深圳","桃源村餐厅","深圳市南山区桃源村三期裙楼岁宝百货商场首层","0755-86312776"],
["广东","深圳","麒麟餐厅","深圳市南山区艺园路东侧缤纷年华家园商业裙楼首层","0755-33229715"],
["广东","深圳","益田假日餐厅","深圳市南山区益田假日广场(世界之窗对面)B1-27铺位","0755-86298762"],
["广东","深圳","宝能太古城餐厅","深圳市南山区中心路(深圳湾段)2199号宝能太古城花园购物中心南区负一层SB103","0755-21618852"],
["广东","深圳","双拥餐厅","深圳市平湖街道双拥街136-138号综合楼及辅楼首层","0755-28454531"],
["广东","深圳","坪山友谊餐厅","深圳市坪山新区深汕路246号坪山投资大厦首二层","0755-84539100"],
["广东","深圳","欧洲城餐厅","深圳市沙河东路255号3栋百安居首层","0755-86239094"],
["广东","深圳","四海餐厅","深圳市蛇口沃尔玛广场一楼","0755-26814928"],
["广东","深圳","万象城餐厅","深圳市深南中路华润中心万象城中座B32号负1楼","0755-82668017"],
["广东","深圳","松岗餐厅","深圳市松岗镇107国道松岗段293号松岗佳华商场首层","0755-27713746"],
["广东","深圳","宝珠餐厅","深圳市西丽珠光路万佳百货首层","0755-26757980"],
["广东","深圳","盐田餐厅","深圳市盐田区东海大道金港盛世一、二楼","0755-82637036"],
["广东","深圳","茶溪谷餐厅","深圳市盐田区三洲田东部华侨城茶溪谷花街入口处","0755-25031996"],
["广东","深圳","上东湾餐厅","深圳市盐田区沙头角深沙路与沙盐路交汇处上东湾首层(沙头角口岸对面)","0755-22371535"],
["广东","深圳","沙头角餐厅","深圳市盐田区沙头角镇深盐路碧海蓝天明苑新一佳商场首层","0755-25213808"],
["广东","深圳","御景东方餐厅","深圳湾二路与白石路交汇处御景东方花园裙楼123 L1-04号","0755-86286471"],
["广东","深圳","罗湖商业城餐厅","罗湖区火车站广场罗湖商业城一层、负一层部分","13798565697"],
["广东","深圳","水径餐厅","布吉街道水径社区海心汇福园群楼首二层","13728831204"],
["广东","深圳","横岗","龙岗区横岗镇茂盛路新世界广场1栋一楼西","0755-28682037"],
["广东","深圳","群星广场","福田区红荔路38号群星广场裙楼一楼","0755-83983001"],
["广东","深圳","大梅沙","盐田区大梅沙盐梅路大梅沙海景酒店三#楼一层","0755-25255683"],
["广东","深圳","坪地","龙岗区深惠公路坪洲百货大厦一楼","0755-84097037"],
["广东","深圳","金田","福田区福民路与金田路交汇处福民佳园一楼","0755-88307106"],
["广东","深圳","高新区","南山区高新区南区综合服务楼1楼餐厅东北角","0755-26017720"],
["广东","深圳","海上世界","南山区蛇口兴华路海滨商业中心1栋102102A109201B202","0755-26684651"],
["广东","深圳","吉华","龙岗区坂田街道吉华路999号坂田商业大楼二层A01","0755-28894382"],
["广东","深圳","龙平","龙岗区龙岗街道龙平东路即泰华百货对面一楼","0755-84833256"],
["广东","深圳","大侠谷","盐田区深圳东部华侨城有限公司大侠谷景区索道下平台一层","0755-25032129"],
["广东","深圳","东兴","罗湖区新园路14号东兴大厦1楼A10052楼B0005","0755-82172207"],
["广东","深圳","坑梓","龙岗区坪山新区坑梓办事处人民西路29号123201","0755-84136211"],
["广东","深圳","百门前","龙岗区南湾街道沙湾百门前工业区10号厂房1-2楼","0755-28745576"],
["广东","深圳","岗厦","福田区深南路南岗厦村北东方新天地广场裙楼01层102B118C124","0755-23610292"],
["广东","深圳","科苑","南山区科兴路10号汇景豪苑301(1F-08B)","0755-26981196"],
["广东","深圳","星河时代","龙岗区爱南路666号星河时代COCOPARKL1MC-012","0755-28312241"],
["广东","深圳","松柏","龙岗区横岗街道龙岗大道5008号01层0802层08","0755-28407551"],
["广东","深圳","民旺","宝安区民治街道民治大道与民旺路交汇处民治商务中心1楼115铺-1号","0755-33691975"],
["广东","深圳","金鸡路店KFC","南山区桃园路北侧田厦翡翠明珠花园裙楼首层","0755-86549881"],
["广东","深圳","桥头店KFC","宝安区福永街道桥头社区东海百货一层","0755-23115080"],
["广东","深圳","塘尾店KFC","宝安区福永街道塘尾社区凤塘大道唐美商场一层","0755-27334804"],
["广东","深圳","福华路店KFC","福田区地铁一期工程会展中心物业区连城新天地A19号","0755-23895049"],
["广东","深圳","观澜湖店KFC","龙华新区观澜高尔夫大道观澜湖新城二期一楼","0755-23772657"],
["广东","深圳","南联DT店KFC","龙岗区龙岗街道南联社区龙岗大道(龙岗段)5020-1至5020-9号综合楼首二层","0755-84803902"],
["广东","深圳","坭岗","罗湖区坭岗村新佳泰服装市场二楼肯德基","0755-25931846"],
["广东","深圳","蔚蓝海岸餐厅","南山区后海大道蔚蓝海岸四期商住楼首层KFC","0755-26498108"],
["广东","深圳","南园餐厅","南山区南新路良益物业声宝商业广场一层 (南园村对面)","0755-86502759"],
["广东","深圳","固戍餐厅","宝安区西乡街道固戍社区固戍一路281号KFC","0755-29603366"],
["广东","深圳","鸿洲餐厅","南山区深南大道与南新路口交汇处鸿洲新都首层KFC","0755-26086682"],
["广东","深圳","华强东餐厅","福田区燕南路88号中泰燕南名庭2期1-2层肯德基","0755-88392092"],
["广东","深圳","民乐餐厅","龙华新区民治街道民乐综合市场1楼5号","0755-28096687"],
["广东","深圳","新天地","罗湖区宝安北路1012号新天地服装城1楼","0755-25984787"],
["广东","深圳","油松餐厅","宝安区龙华街道东环一路与油松路交汇处一楼肯德基","0755-21501611"],
["广东","深圳","竹子林餐厅","福田区深南西路竹子林益华综合楼B栋西北侧","0755-83711526"],
["广东","深圳","桃源村餐厅","深圳市南山区桃源街道桃源村三期裙楼岁宝百货商场首层","0755-86312776"],
["广东","珠海","口岸餐厅","拱北迎宾南路1003号B1层4号","0756-8307613"],
["广东","珠海","华厦餐厅","珠海南屏镇环屏路1号","0756-8911006"],
["广东","珠海","前山餐厅","珠海前山市场A圈","0756-8986233"],
["广东","珠海","朝福餐厅","珠海市斗门井岸步行街新民路144号","0756-5111163"],
["广东","珠海","井岸餐厅","珠海市斗门县井岸镇民路18-22首层","0756-5559659"],
["广东","珠海","拱北餐厅","珠海市拱北口岸广场负二层","0756-8286332"],
["广东","珠海","迎宾餐厅","珠海市拱北迎宾路迎宾广场二楼","0756-8113094"],
["广东","珠海","吉大餐厅","珠海市吉大景山路免税商场一楼218号","0756-3374325"],
["广东","珠海","九洲港餐厅","珠海市九洲港路599号九洲港客运联检楼一楼","0756-3262230"],
["广东","珠海","明珠餐厅","珠海市明珠南路1389号1层","0756-8629257"],
["广东","珠海","侨光餐厅","珠海市侨光路37号","0756--8889017"],
["广东","珠海","机场餐厅","珠海市三灶镇海澄机场候机大楼主楼(中指廊)二楼出发大厅SD025号","0756-7680932"],
["广东","珠海","映月餐厅","珠海市三灶镇映月路益百家百货首层","0756-7511166"],
["广东","珠海","香洲餐厅","珠海市香洲凤凰路163号一楼","0756-2115782"],
["广东","珠海","丰泽园餐厅","珠海市香洲区南坑160号","0756-2169018"],
["广东","珠海","兴业餐厅","珠海市香洲区兴业路五洲花城","0756-2535337"],
["广东","珠海","时代广场餐厅","珠海市香洲人民西路与敬业路交汇处城市风景时代广场商业群楼华润万佳一楼","0756-2651502"],
["广东","珠海","仁恒餐厅","珠海市新香洲人民西路仁恒星园商业街","0756-2608108"],
["广东","珠海","拱北汽车站餐厅","珠海市友谊路20号1、2层","0756-8887871"],
["广东","珠海","华发商都餐厅","珠海市珠海大道8号华发商都2#楼一层A1028、A1029号","0756-8921085"],
["广东","珠海","柠溪餐厅","珠海香洲柠溪路284号文化广场","0756-2625733"],
["广东","珠海","紫荆餐厅","珠海香洲紫荆路301号至尊宝大厦1-2层","0756-2127203"],
["广东","珠海","夏湾","香洲区拱北夏湾路228230232234号","0756-8882821"],
["广东","珠海","富华里店KFC","九洲大道2023号的中海富华里中心12栋一层110、111、112号","0756-8990930"],
["广东","汕头","广汕汽车穿梭餐厅","汕头市潮南区峡山街道峡山镇洋林村324国道旁华南贸易广场内","0754-87925518"],
["广东","汕头","金湖路餐厅","汕头市潮阳区谷饶镇谷贵路茂广路段“阳光百汇”商场一层A001铺面、二层B001铺面","0754-82130200"],
["广东","汕头","中华路餐厅","汕头市潮阳区文光街道中华路104号一层1F-004号和二层2F-004号铺面","0754-89918911"],
["广东","汕头","潮阳东门桥餐厅","汕头市潮阳区文光街道中山中路59号1、2层","0754-83820892"],
["广东","汕头","易初莲花餐厅","汕头市澄海区阜安路东侧易初莲花超市一层","0754-85871127"],
["广东","汕头","文祠餐厅","汕头市澄海区中山中路和文祠西路交界阜安园3栋一层","0754-85810715"],
["广东","汕头","外砂餐厅","汕头市金平区324国道与外砂迎宾路东南角丰达雅居壹栋一层103、104号、二层202号","0754-87121759"],
["广东","汕头","万佳餐厅","汕头市金平区金湖路115号深源美庭2栋107、207号房复式店铺,108、208号房复式店铺(2015.2.13)","0754-87260779"],
["广东","汕头","金砂餐厅","汕头市金平区金新路83号轻工经济技术开发公司综合楼","0754-88638221"],
["广东","汕头","天山路餐厅","汕头市金平区天山路55号金书大厦一层101之A1和二层201之B1店铺","0754-88788107"],
["广东","汕头","平东餐厅","汕头市金平区长平路丰泽庄正大万客隆商城一、二层","0754-88730684"],
["广东","汕头","中山餐厅","汕头市金平区中山路莱莉大厦1层","0754-88442461"],
["广东","汕头","明珠餐厅","汕头市龙湖区经济特区衡山路锦龙商业大厦1楼","0754-88870890"],
["广东","汕头","金环路店KFC","龙湖区长平路90号(长平路与金环路东南角)的汕头苏宁广场一层110、111店铺、二层210店铺","13643070060"],
["广东","汕头","大学路DT店KFC","大学路金凤西路交界处国台彩印大楼A幢一层001号店铺","0754-5871107"],
["广东","佛山","东方广场餐厅","禅城区东方广场银洲城首、二层","0757-82300686"],
["广东","佛山","惠景餐厅","禅城区汾江南路131号1区七座101号首层","0757-83160155"],
["广东","佛山","南庄吉利餐厅","禅城区南庄镇南庄二马路89号吉利明珠生活广场1层及2层","0757-82567391"],
["广东","佛山","升平餐厅","禅城区松风路升平广场三期首层","0757-82132433"],
["广东","佛山","普君餐厅","禅城区兆祥路32号P3至P6商铺","0757-82063727"],
["广东","佛山","玫瑰园餐厅","大福南路22号首层101房及二层201房","0757-839279"],
["广东","佛山","沧江路餐厅","高明区荷城街道沧江路423号1座一楼1001号","0757-88883173"],
["广东","佛山","文昌餐厅","高明文昌路31至33号物业广场购物中心首、二层","0757-88680098"],
["广东","佛山","佛山-季华餐厅","季华路东建世纪广场首层","0757-83996696"],
["广东","佛山","乐平餐厅","乐从镇乐从居委会跃进路A56号一层","0757-28108693"],
["广东","佛山","南海-黄歧餐厅","南海黄歧宏威路黄歧商业中心","0757-81158988"],
["广东","佛山","南海-巴黎春天餐厅","南海区大沥城南二路大沥商业步行街北区首、二层","0757-85591322"],
["广东","佛山","海北建设大道餐厅","南海区大沥黄岐建设大道1号金沙湾财富广场首层","0757-85968018"],
["广东","佛山","大沥客运站餐厅","南海区大沥镇禅炭路汽车客运站综合大楼首、二层","0757-85517347"],
["广东","佛山","盐步餐厅","南海区大沥镇联安村东村村民小组“虎洲”地段综合楼A座首、二层","0757-81107622"],
["广东","佛山","海三路餐厅","南海区桂城叠南村圣堂618号鸿大广场首层A至002号铺","0757-86678115"],
["广东","佛山","保利水城餐厅","南海区桂城海六路与湖东路交界处保利水城东广场首、二层局部","0757-86367073"],
["广东","佛山","佛平餐厅","南海区桂城街道佛平路和桂平路交汇处(太港城广场)首层部分及夹层部分","0757-81062529"],
["广东","佛山","南海万达餐厅","南海区桂城街道桂澜北路28号南海万达广场1039B、2036B号商铺","0757-86678850"],
["广东","佛山","季华七路餐厅","南海区桂城街道桂澜南路45号怡翠世嘉购物商场一层A1至18a和二层A2至24a商铺","0757-81635792"],
["广东","佛山","南海新天地餐厅","南海区桂城街道桂平西路1号首层","0757-85133911"],
["广东","佛山","南海嘉信茂餐厅","南海区桂城南海大道南嘉信茂广场1层40、41室、2层29至31A、31D室","0757-86220311"],
["广东","佛山","广佛路餐厅","南海区黄岐广佛路125路城际大厦C座","0757-85964228"],
["广东","佛山","沙龙路餐厅","南海区九江镇沙头青平路10号首层部分","0757-81290560"],
["广东","佛山","里水餐厅","南海区里水镇夏塘路新天地广场首层","0757-85600228"],
["广东","佛山","府前餐厅餐厅","南海区罗村府前路金盛广场自编首层112、113、115号","0757-66867731"],
["广东","佛山","乐安餐厅","南海区罗村街道联星村委会上三村”北泥塘“地段","0757-81011950"],
["广东","佛山","北湖餐厅餐厅","南海区罗村街道芦塘村委会地段(地号0209151143)圣地广场至A区商业楼首层","0757-81763175"],
["广东","佛山","博爱餐厅","南海区狮山镇博爱中路61号和信商业广场一层1A013铺、二层2A015铺、三层3A013铺","0757-81168369"],
["广东","佛山","江浦餐厅","南海区西樵镇江浦东路13号金典广场1座首层A1102至A1104和二层A2","0757-86859726"],
["广东","佛山","樵高餐厅","南海区西樵镇樵高路与樵金路交汇处的综合楼首、二层","0757-86857770"],
["广东","佛山","三水广场餐厅","三水区西南张边路9号三水广场二座首层","0757-87709912"],
["广东","佛山","松岗餐厅","狮山镇松岗大道97号之一(南海益民大厦首层)首层A1号商铺","0757-85225031"],
["广东","佛山","北滘广场餐厅","顺德区北滘镇新城区BJ至A至35地块北滘广场首层及二层","0757-26602062"],
["广东","佛山","顺德-陈村顺联广场餐厅","顺德区陈村镇佛陈路顺联广场八号楼首层","0757-23816882"],
["广东","佛山","顺德客运站餐厅","顺德区大良街道办事处大门居委会南国中路与广珠公路交界处顺德汽车客运总站二层","0757-22615690"],
["广东","佛山","嘉信广场餐厅","顺德区大良街道办事处顺峰居委会兴顺路嘉信城市广场一期嘉宏台A1024","0757-22296936"],
["广东","佛山","顺峰餐厅","顺德区大良南国路永旺顺德购物中心首层","0757-22912027"],
["广东","佛山","清晖园餐厅","顺德区大良清晖路148号首层1001铺二层2000铺","0757-22219665"],
["广东","佛山","顺德-延年餐厅","顺德区大良延年路8号","0757-22617081"],
["广东","佛山","均安餐厅","顺德区均安镇三华居委会宏安路1号首层及二层","0757-25577556"],
["广东","佛山","河滨路餐厅","顺德区乐从镇乐从居委会新乐从家具城西区一座首层商铺","0757-28080590"],
["广东","佛山","勒流餐厅","顺德区勒流街道办事处勒流居委会金华路6号首层","0757-23663252"],
["广东","佛山","龙江餐厅","顺德区龙江镇盈信广场首、二层","0757-23881232"],
["广东","佛山","伦教餐厅","顺德区伦教街道办事处常教居委会振兴路38号首、二层2号铺","0757-27886180"],
["广东","佛山","顺德-容桂餐厅","顺德区容桂凤祥南路2号首层","0757-28311747"],
["广东","佛山","天佑城餐厅","顺德区容桂街道桂洲大道中63号天佑城乐购商场首层","0757-28388770"],
["广东","佛山","顺德-凤山餐厅","顺德市大良区凤山中路26号","0757-22211181"],
["广东","佛山","佛山-百花餐厅","祖庙路33号","0757-83323540"],
["广东","佛山","佛罗伦萨小镇KFC","南海区桂城街道疏港路28号并以佛罗伦萨小镇命名的零售开发项目第2层第G29/G30单元","0757-81251402"],
["广东","佛山","九江","南海区九江镇九江城区太平西路46号首层及二层","0757-81016652"],
["广东","佛山","瑶平路DT","南海区狮山镇瑶平路汇利假日酒店富弘苑5号楼","13928601593"],
["广东","佛山","三水文锋店","三水区西南街道新华路10号肯德基餐厅","0757-87709588"],
["广东","佛山","杏坛店","顺德区杏坛镇建设东路港杏豪庭首层肯德基餐厅","0757-22896665"],
["广东","佛山","府前餐厅","南海区罗村街道府前路金盛广场","0757-66867731"],
["广东","江门","恩平餐厅","恩平市东门路东门广场商业步行街西栋首层","0750-7712788"],
["广东","江门","新平餐厅","恩平市恩城新平中路19号一、二层","0750-7710097"],
["广东","江门","中远餐厅","港口一路13号中远大厦首层","0750-3161160"],
["广东","江门","鹤山大道餐厅","鹤山市鹤山大道623号沙坪鹤山碧桂园商业中心一层第1号专柜","0750-8977229"],
["广东","江门","鹤山餐厅","鹤山市沙坪镇中山路42号首、二层","0750-8883210"],
["广东","江门","江门中环餐厅","江海区东海路48号首层","0750-3831227"],
["广东","江门","江会餐厅","江会路人人乐购物广场首层","0750-3568860"],
["广东","江门","广湛餐厅","开平市开阳高速公路梁金山服务区(广湛方向)二层","0750-2766681"],
["广东","江门","湛广餐厅","开平市开阳高速公路梁金山服务区(湛广方向)二层","0750-2766669"],
["广东","江门","开平义祠车站餐厅","开平市长沙区良园路30号1幢","0750-2223611"],
["广东","江门","开平万家餐厅","开平长沙区幕沙路70号益华广场A幢首层","0750-2266906"],
["广东","江门","江门万达餐厅","蓬江区发展大道万达广场1027及2038A号","0750-3716178"],
["广东","江门","易初莲花餐厅","蓬江区建设路99号双龙广场首、二层","0750-3219180"],
["广东","江门","客运站餐厅","蓬江区建设三路139号3幢首层101、102、103铺","0750-3850382"],
["广东","江门","冈州餐厅","新会区会城世纪商业街2号1048、1064、109商铺及部分商场通道首层","0750-6611799"],
["广东","江门","江门-胜利餐厅","羊桥路1号","0750-3305365"],
["广东","江门","益华餐厅","迎宾大道116号金汇城市广场首层和负一层","0750-3929711"],
["广东","江门","开平大润发KFC","开平市东兴大道与325国道交界处东北角(即开平市东兴大道A9区)","0750-2227006"],
["广东","江门","新会华侨店","新会区会城镇中心路一号华侨大厦首层肯德基","0750-6667674"],
["广东","江门","新会大道店","新会区碧桂园凤凰酒店商业街1号大润发首层肯德基","0750-6968338"],
["广东","湛江","金沙湾广场餐厅","赤坎区观海北路8号金沙湾广场1、2、3号商住楼地下室一层商场","0759-3183299"],
["广东","湛江","兴华餐厅","赤坎区海北路11-31号兴华海之城首层","0759-3873199"],
["广东","湛江","世贸餐厅","赤坎区中山一路2号湛江世贸大厦首层","0759-3228281"],
["广东","湛江","雷州西湖餐厅","雷州市雷城西湖大道12号首层","0759-8818268"],
["广东","湛江","雷州餐厅","雷州市新城大道路20号国际广场首层","0759-8197722"],
["广东","湛江","廉江广场餐厅","廉江市广场二路永福御景城广场一、二层","0759-6323188"],
["广东","湛江","鼎盛广场餐厅","人民大道南116号负一层G1至11号商铺","0759-2233773"],
["广东","湛江","湛江-怡福餐厅","人民大道南28号怡福大厦首层","0759-2299290"],
["广东","湛江","湛江-康顺餐厅","椹川大道北376号,湛江赤坎嘉信茂广场01层05/06室和负1层","0759-3273787"],
["广东","湛江","遂溪全丰餐厅","遂溪县遂海路36号全丰中央广场首层及夹层","0759-7766880"],
["广东","湛江","港丰餐厅","吴川市解放路115号首层","0759-5554789"],
["广东","湛江","吴川-同德城餐厅","吴川市同德路万安阁首层","0759-5600088"],
["广东","湛江","城市广场餐厅","霞山区人民大道南42号国贸城市广场首层9号、2层1号商铺","0759-2296111"],
["广东","湛江","湛江-国贸餐厅","霞山区人民大道南45号湛江国际贸易大厦首层","0759-2276087"],
["广东","湛江","江霞餐厅","湛川大道1至3号首层及二层","0759-2279991"],
["广东","湛江","爱华广场KFC","跃进路36号爱华广场3号商场首层","0759-3830233"],
["广东","茂名","电白餐厅","电白县人民路7号百富广场首层","0668-5530988"],
["广东","茂名","高州餐厅","高州市文明路潘州豪庭C区第一、第二层","0668-6383968"],
["广东","茂名","官渡餐厅","光华北路东侧首层","0668-3939858"],
["广东","茂名","富丽餐厅","双山三路99号","0668-2986139"],
["广东","肇庆","德庆餐厅","德庆县德城镇龙母大街锦江花园酒店首层和二层","0758-7776176"],
["广东","肇庆","景山岗餐厅","端州三路49号首、二层","0769-2287510"],
["广东","肇庆","星湖国际餐厅","端州四路星湖国际广场首层","0758-2162212"],
["广东","肇庆","时代广场餐厅","端州五路时代广场首层","0758-2298118"],
["广东","肇庆","大旺餐厅","高新区政德街金凤凰商业广场首层A区","0758-3982061"],
["广东","肇庆","高要市金利新时代餐厅","高要市金利镇金龙大道路中段西围村委员会江边经济合作社横基","0758-8533388"],
["广东","肇庆","怀集餐厅","怀集县怀城镇商业步行街首层","0758-5533508"],
["广东","肇庆","肇庆-建设餐厅","建设三路46号","0758-2272218"],
["广东","肇庆","四会餐厅","四会市城中区龙江路1座1至4号龙城大酒店首、二层","0758-3386330"],
["广东","肇庆","清塘大道餐厅","四会市东城街道清塘大道二十三座8号(首层)S03铺","0758-3119893"],
["广东","肇庆","好世界餐厅","天宁北路63号负一层、首层","0758-2318278"],
["广东","肇庆","康乐","康乐北路9号国际新天地首、二层","0758-2897660"],
["广东","惠州","民乐福餐厅","陈江镇陈江圩甲子路88号民乐福商店首层","0752-3262176"],
["广东","惠州","大亚湾餐厅","大亚湾安惠大道64号蓝湾半岛商住楼首层、二层","0752-5585960"],
["广东","惠州","惠州世贸餐厅","河南岸新岸路1号世贸中心二层","0752-255695"],
["广东","惠州","丽日银座餐厅","花边岭南路6号一层、二层","0752-7813315"],
["广东","惠州","意生国际餐厅","惠城区江北文明一路5号意生广场首层及二层","0752-7118004"],
["广东","惠州","江北丽日餐厅","惠城区云山路和文昌路交汇处丽日购物广场江北店首层?","0752-2869946"],
["广东","惠州","陈江餐厅","惠城区仲恺五路汇佳购物广场首层","0752-3323378"],
["广东","惠州","惠东大岭餐厅","惠东县大岭街道办事处大岭镇大道2398号家家乐商场首层及二层","0752-8265002"],
["广东","惠州","惠东黄埠餐厅","惠东县黄埠镇西社李仔园(人民一路53号)首层、二层","0752-8682653"],
["广东","惠州","惠州吉隆餐厅","惠东县吉隆镇河东广汕公路边","0752-8110996"],
["广东","惠州","惠东汽车站餐厅","惠东县平山街道泰园惠东汽车客运站首层","0752-8519582"],
["广东","惠州","惠东大润发餐厅","惠东县平山镇华侨城富星路3号富星商贸广场首层","0752-8286187"],
["广东","惠州","惠东餐厅","惠东县平山镇新华路94号","0752-8818078"],
["广东","惠州","秋长餐厅","惠阳区秋长镇栅下岭市场(老镇府旁)首二层101商铺","0752-3388346"],
["广东","惠州","华贸天地餐厅","江北6号小区(文昌一路9号)华贸天地(华贸中心商城)负一层0128商铺","0752-7190092"],
["广东","惠州","南湖花园餐厅","麦科特大道南湖花园万佳百货首层","0752-2186996"],
["广东","惠州","大亚湾龙海餐厅","三亚湾西区龙海三路创新世纪花园4栋首层01、02号商铺和二层201号商铺","0752-5535513"],
["广东","惠州","水口餐厅","水口街道办事处龙湖大道298号华昌购物广场首、二层","0752-5801053"],
["广东","惠州","横江餐厅","下埔横江三路数码街首二层","0752-2112685"],
["广东","惠州","小金口餐厅","小金口街道办事处惠州大道金宝山庄商业广场首层及二层","0752-2299869"],
["广东","惠州","港汇城餐厅","演达一路港惠新天地一层及二层","0752-2586375"],
["广东","惠州","仲恺KFC","仲恺高新区20号小区146号、147号、148号首、二层商铺","0752-3191381"],
["广东","惠州","开城天虹KFC","惠阳区淡水开城大道旺东怡和家园裙房商业首层P105号商铺","0752-3588578"],
["广东","惠州","东平","惠城区东湖西路永旺惠州购物中心之一层","0752-2359202"],
["广东","惠州","利埔","惠东县平山三利埔金光大厦首层","0752-8526552"],
["广东","惠州","惠州龙门","龙门县龙城街道办城东路太平市场首、二层","0752-6530078"],
["广东","梅州","鸿都餐厅","彬芳大道鸿都商城E1栋首层","0753-2155518"],
["广东","梅州","梅州丰顺餐厅","丰顺县汤坑镇东山路2号(综合大楼)首层及二","0753-6610332"],
["广东","梅州","梅江餐厅","江南一路57号首层","0753-2279639"],
["广东","梅州","梅州-华侨城餐厅","梅县新城宪梓大道柏丽购物广场首层","0753-2500886"],
["广东","梅州","蕉岭餐厅","梅州市蕉岭县蕉城镇府前路府前综合楼R8幢13至14号铺1至3层","0753-788099"],
["广东","梅州","五华餐厅","五华县水寨镇华兴北路218号新华书店首层二层","0753-4336885"],
["广东","梅州","兴田店","兴宁市兴田一路388-422号自家人超市首层KFC餐厅","0753-3328456"],
["广东","汕尾","海丰地王餐厅","海丰县红城大道中路北侧地王广场一层及二层(东侧)","0660-6332033"],
["广东","汕尾","汕尾陆丰餐厅","陆丰市东海镇桃园区人民路口东侧第一幢大楼至郑氏大厦首层","0660-8511881"],
["广东","汕尾","陆河餐厅","陆河县河田镇陆河大道聚福苑商铺A28至A30号一层及二层","0660-5600990"],
["广东","汕尾","信利餐厅","汕尾大道信利广场首层","0660-3207866"],
["广东","汕尾","汕尾名门餐厅","汕尾大道中东北段汕尾名门御庭首层129-131号铺","0660-3823996"],
["广东","河源","河源丽日餐厅","河源大道北一号一层","0762-3100022"],
["广东","河源","河源建设大道餐厅","建设大道以北、大同路以西大地广场首、二层","0762-3100001"],
["广东","河源","广晟餐厅","兴源路广晟城市广场首层","0762-3291231"],
["广东","河源","长鸿花园餐厅","沿江东路6号长鸿花园裙楼首层","0762-3113031"],
["广东","河源","河源龙川","龙川县老隆镇人民路39至47号龙悦豪庭一层及二层","0762-6989091"],
["广东","阳江","阳西店KFC","阳西县城人民北路耀宝凯旋豪庭商住小区二期商场1楼A08号商铺","0662-5508877"],
["广东","阳江","阳江南恩","江城区南恩路136号首、二层","0662-3280028"],
["广东","阳江","天润广场","江城区东风一路11号与北环路交汇处发展广场首层、二层","0662-2271588"],
["广东","阳江","兴国","东风二路中源花园首层","0662-3363368"],
["广东","阳江","阳春大道KFC","春城街道东湖东路南侧与过境公路交汇地段盛世大厦商住楼V6V7V11V11商铺","0662-8178881"],
["广东","阳江","阳春","阳春市春城镇红旗路51号首层、二层","0662-7734088"],
["广东","阳江","三环","江城区工业一街2号一层","0662-8808300"],
["广东","阳江","北环路","江城区瑞禾路A至03至01号","0662-8806800"],
["广东","清远","连州餐厅","连州市东门中路(连州市北湖影剧院东侧建筑特)首层、二层","0763-6622778"],
["广东","清远","先锋餐厅","清城区先锋中路2号汇丰大厦首层","0763-3321331"],
["广东","清远","清新餐厅","清新县清新大道中清新文化广场西南角商铺","0763-5838799"],
["广东","清远","城市广场餐厅","先锋中路18号城市广场二、三层","0763-3333051"],
["广东","清远","赢之城餐厅","新城人民二路22号首层、二层","0763-3877115"],
["广东","清远","阳山餐厅","阳山县阳城镇北门路12号","0763-7886161"],
["广东","清远","英德太阳城","英德市和平中路太阳城广场一层及夹层","0763-2222914"],
["广东","清远","利民","英德市英城建设路东利民路南A块英州明珠广场A区首、二层","0763-2663066"],
["广东","清远","锦绣清城","清城区先锋东路东门塘2号大润发首层外租区第一号专柜","0763-3637689"],
["广东","东莞","上南路餐厅","安镇上角社区上南路71号兴濠城广场首层","0769-81660197"],
["广东","东莞","常平大道餐厅","常平镇常平大道还珠沥村地块肯德基餐厅","0769-89385756"],
["广东","东莞","东莞火车站餐厅","常平镇东莞火车站联检贸易大楼首层","0769-83820219"],
["广东","东莞","常平丽城餐厅","常平镇横江厦村丽城开发区丽景花园G区绿华庭1-5号楼裙楼一层","0769-82523629"],
["广东","东莞","常平市场路餐厅","常平镇市场路1号首层及地下1层","0769-82608169"],
["广东","东莞","长盛餐厅","大朗镇美景大道长盛广场大福源首层","0769-83031633"],
["广东","东莞","大朗客运站餐厅","大朗镇汽车客运总站一层、二层","0769-81230182"],
["广东","东莞","世博餐厅","东城区世博广场海雅百货首层","0769-22031201"],
["广东","东莞","星城餐厅","东城区星城社区学前路新世纪星城49号商业楼首层","0769-22215863"],
["广东","东莞","东泰餐厅","东城区友宜城综合市场主楼(嘉荣购物广场)首层","0769-22764176"],
["广东","东莞","常平金美餐厅","东莞市常平镇金美村金美市场侧金美综合楼广场(地块编号C-3-04)","0769-89303335"],
["广东","东莞","松佛餐厅","东莞市大朗镇松佛路368号求富路花园会所首、二层","0769-82895698"],
["广东","东莞","惠莞餐厅餐厅","东莞市大岭山镇大塘村(虎岗高速公路管理中心旁)的大岭山北侧服务区一层","0769-18028222319"],
["广东","东莞","莞惠餐厅餐厅","东莞市大岭山镇大塘村(虎岗高速公路管理中心旁)的大岭山南侧服务区一层","0769-18028222318"],
["广东","东莞","中山路餐厅","东莞市横沥镇田头村二里埔南铭购物广场","0769-82323010"],
["广东","东莞","深广餐厅","东莞市厚街镇寮厦村(广深高速公路旁)厚街北行服务区","0769-89251918"],
["广东","东莞","东进餐厅","东莞市黄江镇风情商业步行街","0769-82024472"],
["广东","东莞","康城餐厅","东莞市南城区港口大道南城康城大厦自编1003至1004号铺、2040至2045号铺","0769-23668543"],
["广东","东莞","松山湖餐厅","东莞市松山湖科技产业园区松山湖大道与工业西路交界东北角的北部商业中心","0769-23075168"],
["广东","东莞","威尼斯餐厅","东纵大道愉景威尼斯天虹百货商场首层","0769-22765258"],
["广东","东莞","雁田餐厅","凤岗镇雁田村怡安中路嘉利商贸中心","0769-87298150"],
["广东","东莞","凤岗餐厅","凤岗镇永盛大街新高购物广场首层","0769-87503590"],
["广东","东莞","白沙塘餐厅","莞城区罗沙村东纵大道地王广场E5区首层部分","0769-28825395"],
["广东","东莞","西城楼餐厅","莞城区西城楼大街小区北区1号首层","0769-23320681"],
["广东","东莞","汀山餐厅餐厅","厚街镇汀山股份经济联合社三层商贸广场首层","0769-85893949"],
["广东","东莞","虎门文化广场餐厅","虎门沙太路64号首层","0769-85246006"],
["广东","东莞","虎门乐购餐厅","虎门镇博涌管理区新涌桥地段虎门大道乐购商场首层","0769-85184741"],
["广东","东莞","虎门大宁餐厅","虎门镇大宁大板地工业区宁馨中路102号首二层","0769-82916755"],
["广东","东莞","黄河餐厅","虎门镇港口大道黄河商业城首层及负一层","0769-85225166"],
["广东","东莞","虎门银龙餐厅","虎门镇港口大道黄河商业城西门负一层","0769-85160045"],
["广东","东莞","港口餐厅","虎门镇虎门大道(原港口路)名门华轩1、2楼","0769-81611308"],
["广东","东莞","连升路餐厅","虎门镇金洲社区地标广场1001铺","0769-89361971"],
["广东","东莞","黄江环城路餐厅","黄江镇板湖村富康花园首、二层","0769-82555352"],
["广东","东莞","黄江餐厅","黄江镇江南路袁屋围大源百货首层","0769-83849922"],
["广东","东莞","汽车东站餐厅餐厅","寮步镇横坑村汽车东站客运大楼一层","0769-81008199"],
["广东","东莞","寮步欧尚餐厅","寮步镇香市路与祥富路交汇处欧尚寮步商业中心首层","0769-86056002"],
["广东","东莞","宏图餐厅","南城区宏图路中天中央广场首层A06、A07、C04、C05号铺","0769-28638636"],
["广东","东莞","宏伟餐厅","南城区宏伟路1号家乐福肯德基364号","0769-23663248"],
["广东","东莞","鸿福餐厅","南城区鸿福路90号嘉信茂广场南城B1层02室和01层/02室","0769-22241621"],
["广东","东莞","海雅餐厅","南城区鸿福路综艺广场地下一层及一层","0769-26981721"],
["广东","东莞","莞太餐厅","南城区新基莞太大道城市假日大厦二期B01、B02铺","0769-22809175"],
["广东","东莞","桥头餐厅","桥头镇文德路A座楼首层","0769-83423091"],
["广东","东莞","沙田餐厅","沙田镇东港城商住区(三期)首层","0769-88662512"],
["广东","东莞","石碣餐厅","石碣镇光明路嘉荣购物广场首层","0769-86015144"],
["广东","东莞","石龙餐厅","石龙镇红棉路1号大华广场首层","0769-86604177"],
["广东","东莞","金沙湾餐厅","石龙镇西湖三路金沙湾购物广场首层","0769-81852101"],
["广东","东莞","松湖烟雨餐厅","松山湖高新技术产业开发区新竹路7号万科松湖中心首层L1至010、L1至011号铺","0769-26620812"],
["广东","东莞","东莞塘厦餐厅","唐厦镇花园新街穗恒大厦一、二层","0769-87941992"],
["广东","东莞","天荣餐厅","塘厦镇诸佛岭村迎宾大道与环市西路交界天荣购物广场道层","0769-87281821"],
["广东","东莞","东莞总站餐厅","万江区曲海社区四环路与新万道路交汇处东莞客运总站客运大楼首层","0769-23320655"],
["广东","东莞","樟木头火车站餐厅","樟木头圩镇樟木头火车站扩建工程首层","0769-87122823"],
["广东","东莞","樟木头汽车站餐厅","樟木头镇西城路与东深公路交界处樟木头振通汽车客运站商业楼首层、二层","0769-87136675"],
["广东","东莞","樟木头餐厅","樟木头镇永宁路樟罗商业城首层","0769-87783070"],
["广东","东莞","长安万达餐厅","长安镇东门中路1号万达广场2001号铺","0769-83003687"],
["广东","东莞","东大餐厅","长安镇沙头区S358省道旁时代广场商业街首层","0769-82581828"],
["广东","东莞","长安汽车总站餐厅","长安镇长安汽车站二层","0769-82781567"],
["广东","东莞","长安长青餐厅","长安镇长青路长安商业广场二区首层","0769-81152160"],
["广东","东莞","长安钻石餐厅","长安镇长中路钻石广场一、二层","0769-85358803"],
["广东","东莞","乌沙餐厅","长安镇振安路大福源一层","0769-81769967"],
["广东","东莞","中堂东港城餐厅","中堂镇新兴路及107国道边的东港城商业综合楼(华润超级广场)首层","0769-81335278"],
["广东","东莞","东城万达店KFC","东城区东纵路208号万达广场室内步行街AD区1幢商业二层2001/2002号商铺","0769-83785387"],
["广东","东莞","官井头","凤岗镇官井头嘉辉路1号","0769-87298837"],
["广东","东莞","东莞人民南","虎门镇人民南路64号","0769-85507587"],
["广东","东莞","清溪","清溪镇香芒中路21号清溪镇居民管理区办公楼首层","0769-87306088"],
["广东","东莞","虎门中和","虎门镇博涌村博头人民北路路段综合楼1、2层","0769-85238432"],
["广东","东莞","石排","石排镇石排大道利丰城市广场步行街区首层","0769-86558169"],
["广东","东莞","厚街万达KFC","厚街镇宝屯、珊美社区万达广场1065、2055C号商铺","0769-83785532"],
["广东","东莞","现代广场餐厅","广东省东莞市石龙镇绿化路现代广场首、二层","0769-86606678"],
["广东","东莞","厚街大道店","厚街镇寮厦村厚街大道鼎盛时代广场一层S2号铺","0769-81506499"],
["广东","东莞","凯东店","大岭山镇凯东新城凯旋阁DE栋1层","0769-81881796"],
["广东","东莞","南环店","厚街镇南环路盛和广场大润发超市首层","0769-82275875"],
["广东","东莞","龙洲店","长安镇锦厦一龙路义乌商场首层肯德基餐厅","0769-85844968"],
["广东","东莞","福海店","长安镇厦岗福海路福源商场首层肯德基餐厅","0769-85472667"],
["广东","东莞","三屯店","厚街镇三屯村肯德基餐厅","0769-85750051"],
["广东","东莞","中山DT餐厅","横沥镇南铭华润广场","0769-82323080"],
["广东","东莞","厚街餐厅","厚街镇康乐南路华润超级广场首层","0769-85835085"],
["广东","东莞","华南摩尔餐厅","万江区华南MALL A区首层KFC","0769-22430001"],
["广东","东莞","元美餐厅","南城区第一国际E区首层KFC","0769-22853471"],
["广东","东莞","寮步餐厅","寮步镇神前路高力大厦首层KFC","0769-81100499"],
["广东","东莞","麻涌餐厅","麻涌镇嘉荣超市首层KFC","0769-88239068"],
["广东","东莞","中堂中新餐厅","中堂镇中新商贸广场首层KFC","0769-88896485"],
["广东","东莞","莞太假日餐厅","南城区新基莞太大道城市假日大厦(二期)B01、B02铺","0769-22809175"],
["广东","中山","中山-大信餐厅","大信南路2号大信新都汇广场首层之四","0760-8785101"],
["广东","中山","东凤餐厅","东凤镇兴华中路2号首层","0760-88361206"],
["广东","中山","东升江中餐厅","东升镇(江中方向)服务区首层","0760-89811920"],
["广东","中山","东升中江餐厅餐厅","东升镇(中江方向)服务区首层","0760-89811603"],
["广东","中山","东升餐厅","东升镇葵兴大道底层商铺","0760-22733822"],
["广东","中山","古镇大信餐厅","古镇镇曹二村大信新都汇一层","0760-86101870"],
["广东","中山","古镇餐厅","古镇中兴大道益华购物广场首层","0760-2325618"],
["广东","中山","黄圃餐厅","黄圃镇兴圃大道西33号明悦豪庭首层","0760-88772211"],
["广东","中山","太阳城餐厅","火炬开发区东镇东一路23号首层","0760-89983358"],
["广东","中山","窈窕餐厅","火炬开发区中山港大道99号金盛广场第三栋","0760-8225148"],
["广东","中山","金逸餐厅","沙溪镇富华道65号首层及二层","0760-88726009"],
["广东","中山","人民医院餐厅","石岐区孙文东路6号一楼101、102、103、105卡","0760-88838839"],
["广东","中山","君悦餐厅","石歧区悦来南路2至4号君悦广场底层","0760-8804091"],
["广东","中山","坦洲餐厅","坦洲镇坦神北路58号首层","0760-88819261"],
["广东","中山","小榄大信餐厅","小榄镇升平路新都汇小榄店首层","0760-2182122"],
["广东","中山","小榄餐厅","小榄镇新华中路8号","0760-2235190"],
["广东","中山","兴中广场餐厅","中区安栏路18号中垦广场首层","0760-88862522"],
["广东","中山","星宝","沙溪镇星宝路3星宝明珠花园77、78、77至2卡商铺","0760-87712109"],
["广东","中山","怡华","东区中山三路怡华商业中心东座首层局部","0760-88307662"],
["广东","中山","南朗","南朗镇南岐中路38号首层101号商铺","0760-89811602"],
["广东","中山","三乡店","三乡镇文昌中路170号一、二、三层肯德基","0760-86389102"],
["广东","中山","大涌店","大涌镇旗山路645号首、二层肯德基","0760-7399118"],
["广东","中山","富华店","西区富华道16号富业广场首期首层一、二号","0760-88632453"],
["广东","潮州","潮汕路餐厅","潮州市潮安区庵埠镇潮汕路与安北路交界处东南角(卜峰莲花超市首层B1A-13号铺面)","0768-5890019"],
["广东","潮州","奎元餐厅","潮州市潮州市湘桥区潮州大道中段奎元广场A区一层","0768-2398520"],
["广东","潮州","开元餐厅","潮州市潮州市湘桥区开元路188号金叶大酒店一、二层","0768-2234108"],
["广东","揭阳","池尾餐厅","揭阳市普宁市广达路美佳乐购物广场一、二层","0663-2919528"],
["广东","揭阳","服装城餐厅","揭阳市普宁市国际服装城客运站大楼南一、二层店面","0663-2903552"],
["广东","揭阳","普宁商品城汽车穿梭餐厅","揭阳市普宁市国际商品城南侧12栋铺面","0663-2669664"],
["广东","揭阳","中华新城餐厅","揭阳市普宁市流沙大道中华新城1幢111-118号","0663-2258088"],
["广东","揭阳","吉祥里餐厅","揭阳市普宁市占陇镇镇区广汕公路南侧汇润-吉祥里商业楼正面商铺","0663-2330061"],
["广东","揭阳","黄岐餐厅","揭阳市榕城区东山区黄岐山大道以东环市北路以南易初莲花超市一、二层","0663-8227870"],
["广东","揭阳","揭阳临江北餐厅","揭阳市榕城区东山区仁港商业街肯德基餐厅","0663-8527780"],
["广东","揭阳","临江餐厅","揭阳市榕城区临江路以南、榕华大道以东、江南新城二期易初莲花购物中心首层","0663-8657780"],
["广东","揭阳","进贤店","榕城区进贤商业步行街E幢1层肯德基餐厅","0663-8665770"],
["福建","福州","福州火车南站餐厅","福州市仓山区福州南站0.0米出站换乘层编号001铺位","0591-63116099"],
["福建","福州","金达餐厅","福州市仓山区建新镇金榕路与金达路交叉口东南角金榕大润发一层1号商铺","0591-83603091"],
["福建","福州","金山餐厅","福州市仓山区建新镇金山大道100号金山文恒文园1层05店面","0591-88261275"],
["福建","福州","金桔餐厅","福州市仓山区金山街道桔园四路33号永辉金山生活中心一、二层01商场","0591-83958920"],
["福建","福州","浦上万达餐厅","福州市仓山区浦上大道272号仓山万达广场A5#一层161、163、166、168商铺及二层01酒楼东侧","0591-87717062"],
["福建","福州","福清万达餐厅","福州市福清市音西街道万达广场商业一层1025B、商业二层2030号商铺","0591-85222505"],
["福建","福州","省府餐厅","福州市鼓楼区八一七北路68号二层中区","0591-83618533"],
["福建","福州","东街口餐厅","福州市鼓楼区东街街道八一七北路101号","0591-87502334"],
["福建","福州","津泰餐厅","福州市鼓楼区津泰高节路20号","0591-87524084"],
["福建","福州","安泰餐厅","福州市鼓楼区津泰路津泰新村19号一、二层","0591-83620255"],
["福建","福州","帝豪餐厅","福州市鼓楼区五四路151号华都大厦(宏运帝豪国家大厦)一、二层店面","0591-87855050"],
["福建","福州","江滨餐厅","福州市鼓楼区杨桥西路128号","0591-83773042"],
["福建","福州","火车站餐厅","福州市晋安区茶园街道福州火车站前地广场","0591-87596663"],
["福建","福州","火车站二餐厅","福州市晋安区华林路502号福州火车站二楼出发层","0591-83565067"],
["福建","福州","世欧广场餐厅","福州市晋安区晋连路18号福州世欧广场A3-1-21东侧、A3-2-17北侧、A3-2-18北侧、A3-2-19北侧、A3-2-10、A3-2-2","0591-83611525"],
["福建","福州","秀峰餐厅","福州市晋安区新店镇秀峰路188号闽台广告创意产业园沿秀峰路1层1-3/1号铺及2层","0591-87951291"],
["福建","福州","龙芝餐厅","福州市连江县凤城镇金安西路2号龙芝国际商业广场一层(6店面)二层(2店面)","0591-26220510"],
["福建","福州","莲荷餐厅","福州市连江县凤城镇莲荷东路1号楼一层四五六七号店面二层207.208号","0591-26211213"],
["福建","福州","金融街万达餐厅","福州市台江区鳌江路8号福州金融街万达商业广场一层(1-70)、二层(2-1)","0591-87882323"],
["福建","福州","宝龙餐厅","福州市台江区工业路193号宝龙城市广场一、二层店面","0591-83802658"],
["福建","福州","新利生餐厅","福州市台江区后洲街道玉环路10号中亭改造利生苑连体部分1层132店面、2层303、304店面","0591-83252152"],
["福建","福州","茶亭餐厅","福州市台江区南八一七中路235号(2015.1.30)","0591-87111280"],
["福建","福州","万象餐厅","福州市台江区上海街道西环中路691号万象商业广场一层104号商铺、二层J03A号商铺","0591-83217670"],
["福建","福州","元洪餐厅","福州市台江区台江路107号元洪锦江二期一、二层店面","0591-83279502"],
["福建","福州","五一南餐厅","福州市台江区五一南路1号联信大厦1层","0591-83260120"],
["福建","福州","紫阳餐厅","福州市长乐北路王庄大润发一层商店街1号铺(2015.2.8)","0591-62751817?"],
["福建","福州","长山湖餐厅","福州市长乐市航城街道龙门村长山湖广场一、二层","0591-28822260"],
["福建","福州","长乐餐厅","福州市长乐市建设路和胜利路交叉路口一、二层店面","0591-28822250"],
["福建","福州","金峰餐厅","福州市长乐市金峰镇胪峰大道永康广场一、二层","0591-28672250"],
["福建","福州","福港餐厅","福州市长乐市漳港街道长乐国际机场内候机楼J2-1地块","0591-28012772"],
["福建","福州","罗源司前店KFC","罗源县凤山镇司前路1号一、二层","18050406660"],
["福建","福州","君竹餐厅","马尾区君竹路43号金海商贸中心一层肯德基","0591-83681571"],
["福建","福州","解放店","闽清县解放大街华侨城东区1层(2007.9.28)","0591-22336588"],
["福建","福州","师大店","福州市仓山区首山路7号滨海嘉年华H区一、二层(2009.9.1)","0591-88037775"],
["福建","福州","先施店","福州市五一北路188号一、二层(2005.9.10)","0591-83307770"],
["福建","福州","福峡餐厅","仓山区城门镇三角埕地块新天宇城市广场一、二层","13489008513"],
["福建","福州","秀峰餐厅","秀峰路闽台广告创意园肯德基餐厅","0591-87951291"],
["福建","厦门","绿苑新城餐厅","厦门市海沧区沧林东路278号绿苑新城天虹商场一层","0592-6892891"],
["福建","厦门","阿罗海汽车穿梭餐厅","厦门市海沧区行政中心南侧滨湖路阿罗海城市广场一层、二层","0592-6896296"],
["福建","厦门","未来海岸餐厅","厦门市海沧区嵩屿海景城未来海岸居住区四区蓝月湾一期商业中心","0592-6895536"],
["福建","厦门","海沧新阳餐厅","厦门市海沧区新盛路19号2号楼一号店面一、二层","0592-6892692"],
["福建","厦门","机场餐厅","厦门市湖里区高崎国际机场3号候机楼二层国际出发厅","0592-5701556"],
["福建","厦门","高崎机场餐厅","厦门市湖里区高崎国际机场3号候机楼一层","0592-6020501"],
["福建","厦门","金湖路餐厅","厦门市湖里区金湖路101号厦门五缘湾乐都汇购物中心一、二层","0592-5781874"],
["福建","厦门","枋湖车站餐厅","厦门市湖里区金湖路5号枋湖公共交通枢纽中心2层","0592-5790631"],
["福建","厦门","吕岭路餐厅","厦门市湖里区吕岭路1068号东百蔡塘广场1层A18C和2层A18号","0592-6301258"],
["福建","厦门","湖里万达广场餐厅","厦门市湖里区万达商业广场一层","0592-2366028"],
["福建","厦门","闽南印象汽车穿梭餐厅","厦门市湖里区祥店刘厝旧村E地块","0592-5250196"],
["福建","厦门","兴隆餐厅","厦门市湖里区兴隆路交叉口大唐世家一期G栋","0592-5711163"],
["福建","厦门","厦门北站餐厅","厦门市集美区后溪镇厦门北站候车层三楼903号商铺,到达层一楼B14号商铺","0592-6771741"],
["福建","厦门","乐海餐厅","厦门市集美区集美乐海路23号,厦门嘉庚快速公交枢纽站边二楼","0592-6385016"],
["福建","厦门","集美餐厅","厦门市集美区集美石鼓路88号1楼","0592-6065617"],
["福建","厦门","集美杏东餐厅","厦门市集美区杏东路11号之11~之32一层","0592-6079559"],
["福建","厦门","瑞景汽车穿梭餐厅","厦门市思明区洪文居住区瑞景商业广场一层A-01及二层A-01","0592-5917155"],
["福建","厦门","后滨餐厅","厦门市思明区后滨路12号1号楼一层(A-102)、二层(A-201)","0592-2202135"],
["福建","厦门","滨北餐厅","厦门市思明区湖滨北路海湾新城一期一号楼一层","0592-5085660"],
["福建","厦门","富山餐厅","厦门市思明区湖滨南路398号厦门富山国际展览城1层","0592-5162533"],
["福建","厦门","东方时代餐厅","厦门市思明区湖滨西路111-112号东方时代广场1-2层","0592-2286975"],
["福建","厦门","汇腾餐厅","厦门市思明区嘉禾路323号汇腾大厦1层","0592-5206701"],
["福建","厦门","SM城市广场餐厅","厦门市思明区嘉禾路SM城市广场1层","0592-5551270"],
["福建","厦门","华天餐厅","厦门市思明区嘉禾路西北侧华天花园二期C幢华天港澳台购物中心一层","0592-5135375"],
["福建","厦门","明发餐厅","厦门市思明区嘉禾路与莲前路交叉口东北侧明发商业广场一楼","0592-2928900"],
["福建","厦门","嘉禾餐厅","厦门市思明区莲坂“阳光世纪广场”一、二层","0592-5112216"],
["福建","厦门","莲前东路餐厅","厦门市思明区莲前东1198号厦门快速公交前埔枢纽站二层","0592-5970351"],
["福建","厦门","鼓浪屿餐厅","厦门市思明区龙头路8号三友旅游城1F","0592-2066718"],
["福建","厦门","世贸餐厅","厦门市思明区厦禾路878-888号世贸中心1-2层","0592-5855195"],
["福建","厦门","罗宾森广场餐厅","厦门市思明区厦禾路885号罗宾森广场一层","0592-5850313"],
["福建","厦门","美仁宫餐厅","厦门市思明区厦禾路规划42号A地块南侧美仁广场","0592-2020635"],
["福建","厦门","梧村汽车站餐厅","厦门市思明区厦禾路梧村汽车站改扩建工程一层","0592-5801182"],
["福建","厦门","育秀餐厅","厦门市思明区厦门市工人体育馆中心广场地下一层","0592-5339559"],
["福建","厦门","旺宝荣餐厅","厦门市思明区思明南路158号旺宝荣商厦1层","0592-2077975"],
["福建","厦门","华侨餐厅","厦门市思明区中山路5-17号","0592-2107960"],
["福建","厦门","城南餐厅","厦门市同安区同安城南路21-23号大唐世家三期一层","0592-7101696"],
["福建","厦门","西安广场餐厅","厦门市同安区西安路东侧西安广场二期商住楼1层","0592-7316001"],
["福建","厦门","翔安马巷餐厅","厦门市翔安区马巷镇巷南路翔安商业广场一、二层","0592-7065608"],
["福建","厦门","汇景新城餐厅","厦门市翔安区祥福五里23号汇景新城中心一层","0592-7270750"],
["福建","厦门","厦门北站二店精选店KFC","厦门北站到达层CZ-01号点位","0511-88890127"],
["福建","厦门","美仁宫餐厅","厦禾路规划42号A地块南侧美仁广场KFC","0592-2020635"],
["福建","厦门","滨南店","厦门市思明区湖滨南路与白鹭洲路交叉口百脑汇科技大厦1-2层肯德基","0592-2203646"],
["福建","厦门","湖里店","厦门市湖里区海天路37-51号一层","0592-5620882"],
["福建","厦门","长浩店","厦门市湖里区高殿村寨上社第二工业区11#综合楼一、二层A区","0592-5680586"],
["福建","厦门","杏林店","厦门市集美区杏林杏西路1号","0592-6216353"],
["福建","厦门","演武店","厦门市思明区演武路4号富万邦商业广场1楼","0592-2087463"],
["福建","厦门","曾厝垵餐厅","思明区滨海街道曾厝垵村海鲜坊3号楼二、三层KFC","0592-2099571"],
["福建","厦门","莲前东路餐厅","莲前东路厦门快速公交(BRT)前埔枢纽站二层(厦门新华都购物广场前埔店二层208#商铺 KFC)","0592-5970351"],
["福建","莆田","梅园餐厅","莆田市城厢区龙办下磨居委会梅园路三信花园一层","0594-2262212"],
["福建","莆田","大唐餐厅","莆田市城厢区胜利路与文献路交汇处大唐广场2#楼一层","0594-2285051"],
["福建","莆田","新文献餐厅","莆田市城厢区文献路大唐广场一二层店面","0594-2223601"],
["福建","莆田","莆田万达餐厅","莆田市城厢区霞林街道荔华华东大道8号万达广场2058号","0594-2371177"],
["福建","莆田","赤港餐厅","莆田市涵江区福泉高速公路莆田镜内赤港服务区A道","0594-3760029"],
["福建","莆田","涵华餐厅","莆田市涵江区涵华西路3号一、二层","0594-3861902"],
["福建","莆田","东园餐厅","莆田市荔城区北大路与东园路交叉口正荣时代广场9号楼一层","0594-2201177"],
["福建","莆田","金鼎餐厅","莆田市荔城区文献东路北侧三信金鼎广场西南面一、二层店面","0594-2330953"],
["福建","莆田","客运中心餐厅","莆田市荔城区文献东路莆田客运枢纽中心一层、二层","0594-2220169"],
["福建","莆田","帝宝汽车穿梭餐厅","莆田市荔城区镇海街道镇海村汉庭花园C区1-3层","0594-2371237"],
["福建","莆田","仙游餐厅","莆田市仙游县鲤城街道洪桥街八二五大街359号一层","0594-8265535"],
["福建","莆田","莆田车站餐厅","莆田市秀屿区莆田站一楼候车厅编号为01铺位","0594-2155110"],
["福建","莆田","鲤中店KFC","仙游县鲤中片区鲤中电影院一层","15860043100"],
["福建","三明","徐碧餐厅","三明市梅列区梅列区乾龙新村350棟02、03商铺","0598-8607555"],
["福建","三明","会展餐厅","三明市三元区新市中路235号(会展中心)一层","0598-8587557"],
["福建","三明","诚上广场餐厅","三明市永安市含笑大道1188号诚上广场大润发一层商店街1号铺","0598-3868206"],
["福建","三明","佳洁餐厅","三明市永安市燕江南路1229号佳洁广场一层","0598-3867686"],
["福建","三明","南门餐厅","三明市永安市燕江中路518号","0598-3619577"],
["福建","三明","尤溪水东餐厅","三明市尤溪县城关镇水东新城滨河大道与东一路交叉口东北角一层","0598-6306916"],
["福建","三明","永星国际店KFC","梅列区东新4路永星国际广场大润发超市一层商店1号铺","0598-8217809"],
["福建","三明","沙县府前餐厅","沙县府前路民发商业广场一二层(2007128)","0598-5851288"],
["福建","泉州","中俊餐厅","丰泽区安吉南路69号中骏财富广场一层147号店面,二层229号店面(2014.5.23)","0595-27351630"],
["福建","泉州","浔南餐厅","泉州市德化县兴南街瓷都世纪城一、二层","0595-23558880"],
["福建","泉州","泉州万达餐厅","泉州市丰泽区宝洲路浦西万达广场三楼","0595-28209059"],
["福建","泉州","丰泽餐厅","泉州市丰泽区丰泽商城1层","0595-22182032"],
["福建","泉州","刺桐餐厅","泉州市丰泽区湖心街和刺桐路交叉路口(原汽车城)","0595-28068228"],
["福建","泉州","迎宾汽车穿梭餐厅","泉州市丰泽区经济技术开发区诺林商城一号楼一,二层","0595-28129278"],
["福建","泉州","中心车站餐厅","泉州市丰泽区泉秀路中段北侧泉州客运中心站站前广场西侧圆形店面","0595-28989810"],
["福建","泉州","泉州火车站精选餐厅","泉州市丰泽区泉州站二楼侯车厅编号为212及213的铺位","0595-22354630"],
["福建","泉州","泉州火车站餐厅","泉州市丰泽区泉州站一楼侯车厅编号为003的铺位","0595-22835803"],
["福建","泉州","螺城餐厅","泉州市惠安县八二三东路90号惠台商也城内A栋西侧一、二层","0595-87368772"],
["福建","泉州","惠兴餐厅","泉州市惠安县螺城镇中山北路惠兴街北侧第二、四座商场一、二层","0595-87395728"],
["福建","泉州","惠安世纪餐厅","泉州市惠安县世纪大道西侧达利世纪酒店商业中心1号铺","0595-87203972"],
["福建","泉州","安海餐厅","泉州市晋江市安海镇东鲤中心区1号居住小区(成功东路)一层","0595-85759879"],
["福建","泉州","晋江宝龙餐厅","泉州市晋江市宝龙城市广场项目地下一层的编号为B001-1商铺以及一层的编号为1002的商铺","0595-82915360"],
["福建","泉州","陈埭餐厅","泉州市晋江市陈埭镇大乡村中国(晋江)鞋业城内(航洲百货一层)","0595-85168726"],
["福建","泉州","泉安路汽车穿梭餐厅","泉州市晋江市池店镇东山村泉安路俊德大厦(长荣加油站附属楼)一层101,二层202","0595-82967263"],
["福建","泉州","龙池路汽车穿梭餐厅","泉州市晋江市池店镇龙池路万商汇商业广场2号楼一至四号店面一至三层","0595-82007253"],
["福建","泉州","阳光餐厅","泉州市晋江市青阳阳光中路阳光百货1层","0595-85609826"],
["福建","泉州","晋江万达餐厅","泉州市晋江市世纪大道888号万达广场2层201号","0595-82156668"],
["福建","泉州","晋江英林餐厅","泉州市晋江市英林镇英龙中路46-50号一,二层","0595-85477711"],
["福建","泉州","泉州安吉路餐厅","泉州市洛江区安吉路大润发广场一层","0595-28278595"],
["福建","泉州","金街餐厅","泉州市南安市柳城街道枊城办事处成功街中段南侧东方伟业(南安)城市广场一、二层","0595-86868690"],
["福建","泉州","成功餐厅","泉州市南安市溪美街道溪美镇新华街成功大厦1层","0595-86376155"],
["福建","泉州","德辉餐厅","泉州市石狮市曾坑德辉开发区德辉广场1层","0595-88719456"],
["福建","泉州","皇冠餐厅","泉州市石狮市湖滨长福东港路602号石狮电信大厦一层","0595-88752970"],
["福建","泉州","晋江城市广场","晋江市罗山街道福埔开发区SM城市广场一层、负一层","0595-88157708"],
["福建","泉州","朴里一店KFC","晋江市泉厦高速公路朴里服务区跨线大楼A区二层","0595-85026973"],
["福建","泉州","凤城店","泉州市安溪凤城八三一路三远商城4座1-2层肯德基","0595-26006699"],
["福建","泉州","大洋洲店","泉州市泉秀路与温陵路交叉口福华商厦大洋百货1层肯德基","0595-22990927"],
["福建","泉州","百源店","泉州市打锡街中旅商场1楼肯德基","0595-22190269"],
["福建","泉州","逸涛店","泉州市泉港泉五路逸涛购物广场1楼肯德基","0595-87992756"],
["福建","泉州","东街店","泉州市鲤城区东街钟楼百货1层肯德基","0595-22272943"],
["福建","泉州","安溪宝龙店","泉州市安溪县建安大道城市宝龙广场1层肯德基","0595-28229773"],
["福建","泉州","泉州嘉信茂餐厅","江滨北路嘉信茂广场一层肯德基","0595-22188823"],
["福建","漳州","角美汽车穿梭餐厅","漳州市龙海市角美镇石厝村丰泰财富广场第3幢1-4号","0596-6788821"],
["福建","漳州","龙海紫崴餐厅","漳州市龙海市石码镇紫崴路44号建发美一城,时代广场店铺一、二楼","0596-6567701"],
["福建","漳州","漳州万达广场餐厅","漳州市龙文区九龙大道以东、建元路以南万达广场2--32号","0596-2869603"],
["福建","漳州","丹霞餐厅","漳州市芗城区丹霞路嘉信茂广场一,二层","0596-2925299"],
["福建","漳州","银都餐厅","漳州市芗城区延安北路6号银都大厦1层","0596-2057155"],
["福建","漳州","水仙汽车穿梭餐厅","漳州市芗城区芝山镇诗浦村","0596-2687205"],
["福建","漳州","龙湖餐厅","漳州市漳浦县绥安镇龙湖路中段城市广场3号楼一、二层","0596-3220722"],
["福建","漳州","西大街餐厅","漳州市漳浦县绥安镇麦市街与西大街交汇处万新西湖商业分公司","0591-3111685"],
["福建","漳州","新华北店","新华北路与漳福路交汇处东北角地块冠城国际1层","0596-2956172"],
["福建","南平","建阳金茂餐厅","南平市建阳市童游街道东桥东路87号金茂广场1号楼一层101、二层201","0599-5500622"],
["福建","南平","昭武餐厅","南平市邵武市小东门路邵武国际广场一、二层","0599-6323396"],
["福建","南平","武夷山度假区餐厅","南平市武夷山市度假区(武夷风情商苑)北区栋1层N1-3、05-09","0599-5267773"],
["福建","南平","滨江餐厅","南平市延平区人民路1号","0599-8860771"],
["福建","南平","浦城兴华餐厅","浦城县兴华路与南浦北路交叉口西南侧永晖豪布斯卡商贸综合体一层108-113铺KFC","15659119702"],
["福建","龙岩","龙岩万达餐厅","龙岩市新罗区龙岩大道与双龙路交界处的万达广场商业一层1052B","0597-2660603"],
["福建","龙岩","万阳餐厅","龙岩市新罗区龙岩大道与西陂路交界口万阳城大润发1号店面","0597-5389289"],
["福建","龙岩","麒丰店","龙岩市中山路麒丰商厦1-2层肯德基","0597-2234880"],
["福建","龙岩","中兴店","龙岩市新罗区闽西交易城中心广场北侧新发现国际广场半地下一层、一层肯德基","0597-2220019"],
["福建","宁德","环城餐厅","宁德市福鼎市太姥豪庭A栋一层","0593-7810828"],
["福建","宁德","福鼎中汇餐厅","宁德市福鼎市天湖路中汇广场13号楼1至2层(商铺号:1030、1031、1032、1033、1034、2093)","0593-7805739"],
["福建","宁德","蕉城餐厅","宁德市蕉城区八一五西路一号东方国际大饭店一、二层","0593-2810333"],
["福建","宁德","宁德万达餐厅","宁德市蕉城区天湖东路1号万达广场室内步行街二楼257B,258号","0593-2519111"],
["福建","宁德","霞浦太康店KFC","霞浦县新城区九龙商业街东侧、福宁区G2-01号东方伟业城市广场5#一层21、22、23、24、1F-34、35商铺","15892133352"],
["福建","宁德","蕉城餐厅","八一五西路一号东方国际大饭店肯德基餐厅","0593-2810333"],
["江西","南昌","火炬大街餐厅","高新技术开发区高新六路116号","18079177016"],
["江西","南昌","旺众餐厅","南昌市东湖区胜利路155号","0791-86769682"],
["江西","南昌","上海南路","江西省南昌市上海南路KFC","0791-88232125"],
["江西","南昌","大众","南昌市叠山路287号","0791-86836791"],
["江西","南昌","新建欧尚店","新建县欧尚百货一楼KFC","0791-83733762"],
["江西","南昌","京东店KFC","南昌市高新大道8号大润发","0791-88151147"],
["江西","南昌","南京西路","南昌市南京西路277号","0791-86397740"],
["江西","南昌","进贤天虹店KFC","进贤县民和镇胜利南路天集商业广场天虹商场1-2层","0791-87197525"],
["江西","南昌","红谷大道店KFC","红谷中大道红谷中心区B9\10\11地块绿地中央广场负1层","0791-6397740"],
["江西","南昌","南昌高铁店KFC","红谷滩新区红角洲片区南部南昌西站","0791-83957830"],
["江西","南昌","中山城","东湖区中山路177号","0791-86733264"],
["江西","南昌","金沙大道店KFC","金沙大道以东、雄西河以西天虹商场1-2层","0791-85251535"],
["江西","南昌","丽华","南昌市孺子路1号丽华百货大楼一层","0791-86219069"],
["江西","南昌","财富广场","江西省南昌市八一广场北侧财富广场一层及夹层","0791-86212115"],
["江西","南昌","万寿宫","南昌市胜利路4号","0791-86617747"],
["江西","南昌","玉河","南昌市青云谱区解放西路81-83号","0791-88212070"],
["江西","南昌","站前广场","南昌市洛阳路70号1-2层","0791-88281168"],
["江西","南昌","莲塘","南昌县莲塘镇五一路308号1-2层","0791-85980026"],
["江西","南昌","新洪大","南昌市洪城路588号1-2层","0791-86731051"],
["江西","南昌","南昌长运","南昌市广场南路118号","0791-86263990"],
["江西","南昌","新八一","南昌市八一大道296号","0791-86266239"],
["江西","南昌","昌北机场","南昌市昌北机场新航站楼1层肯德基餐厅","0791-83961572"],
["江西","南昌","经开","江西省南昌市榴云路与枫林大道交汇处KFC工地","0791-83895832"],
["江西","南昌","城南路","南昌县莲塘城南路贵都国际花城大润发超市一楼","0791-85723096"],
["江西","南昌","青云谱","江西省南昌市井冈山大道悦达家乐福国际购物广场一楼","0791-85215517"],
["江西","南昌","会展路","南昌市红谷滩万达广场会展KFC","0791-83807610"],
["江西","南昌","新百花洲","南昌市西湖区中山路以南西湖路以东地王大厦一层","0791-86299725"],
["江西","南昌","梦时代","南昌市北京东路恒茂梦时代国际广场三号楼一层","0791-88151856"],
["江西","南昌","江铃餐厅","三店西路488号一层KFC","0791-85210454"],
["江西","南昌","新上海路","上海北路203号","0791-88197172"],
["江西","南昌","蓝天碧水餐厅","东湖区青山南路118号一层","0791-86861250"],
["江西","南昌","广北餐厅","东湖区八一大道万达购物广场地上一层","0791-86271893"],
["江西","南昌","洪城大厦","北京西路134号肯德基","0791-86259759"],
["江西","景德镇","广场南路","景德镇市广场南路东西侧开门子购物广场1-2层","0798-8272117"],
["江西","景德镇","瓷都新天地","景德镇市珠山东路人民广场旁瓷都新天地","0798-8209908"],
["江西","景德镇","瓷都餐厅","珠山中路30号景德商厦一层KFC","0798-8290675"],
["江西","景德镇","新厂时代餐厅","新厂西路499号1-2层KFC","0798-8443180"],
["江西","萍乡","萍乡北桥","萍乡市安源区雅天购物公园一楼","0799-6883678"],
["江西","萍乡","萍乡文化路","萍乡市文化路步行街1号","0799-6335899"],
["江西","萍乡","萍乡跃进北路","江西省萍乡市安源区跃进北路118号","0799-6332508"],
["江西","九江","九瑞大道","九江市九瑞大道大润发超市1层肯德基","0792-8190383"],
["江西","九江","浔阳路","九江市浔阳路356-3号","0792-8233031"],
["江西","九江","都昌东风大道店KFC","都昌县东风大道万宜生活广场1-2层","0792-5212908"],
["江西","九江","九江八里湖店KFC","八里湖新区九方商场一层L130-131","0792-8777483"],
["江西","九江","九江庐山店KFC","牯岭镇正街17号楼(原正街1111号春风商场)1-2层","0792-8281797"],
["江西","九江","九江信华","九江市大中路信华城市广场(西园片区)","0792-8119427"],
["江西","九江","十里","九江市十里大道156号(原十里百货大楼)1-2层","0792-8253985"],
["江西","新余","胜利","胜利北路1号","0790-6205690"],
["江西","新余","新余抱石","江西省新余市团结北路与公园北路西北角高能广场(沃尔玛购物中心)一二层肯德基餐厅","0790-6217489"],
["江西","鹰潭","永盛","鹰潭市时代广场1层和2层","0701-6222886"],
["江西","赣州","赣州蓉江路店KFC","赣州市南康区蓉江路国光购物广场一、二楼","0797-6501066"],
["江西","赣州","赣州登峰大道店KFC","章江新区华润中心万象城B02号商铺","0797-5169890"],
["江西","赣州","金钻广场","江西省赣州市八一四路金钻广场10号楼1层","0797-8162286"],
["江西","赣州","赣州启德","赣州市文清路62号","0797-8287995"],
["江西","赣州","新南门广场","江西省赣州南门广场百货大楼一楼KFC","0797-8278328"],
["江西","赣州","翠微路","江西赣州翠微路中航城一楼KFC","0797-8085171"],
["江西","赣州","赣州西门","江西省赣州市红旗大道94号国际时代广场大润发商场一层","0797-5558936"],
["江西","吉安","文山","江西省吉安市阳明东路22号一层","0796-8291888"],
["江西","吉安","吉安人民广场","吉安市广场西路新世界广场1楼","0796-8210388"],
["江西","吉安","吉安庐陵","江西省吉安县文山路与凤凰路交汇处(庐陵商业大世界)KFC店","0796-8591888"],
["江西","宜春","宜春宜丰新昌餐厅","宜丰县新昌大道南侧红商城","13607911015"],
["江西","宜春","袁州","宜春市东风大街246号","0795-3912161"],
["江西","抚州","抚州金巢店KFC","青云峰路14号凤凰城三期C幢1层","0794-2166626"],
["江西","抚州","抚州临川","抚州市大公路81号","0794-8288559"],
["江西","抚州","钧天","江西省抚州市赣东大道68号1-2层","0794-8623336"],
["江西","抚州","抚州融旺","江西省抚州市赣东大道345号融旺国际公馆1-2层","0794-8302118"],
["江西","抚州","东乡龙山","东乡县龙山路中央购物公园","0794-4223773"],
["江西","上饶","婺源朱子街店KFC","上饶市婺源县星江路朱子街泽联汇一二层","0793-7399033"],
["江西","上饶","上饶庆丰店KFC","信州区铁路新村范围庆丰路50号明珠商业广场1、2层","0793-8261178"],
["江西","上饶","解百","上饶市信州区解放路1号一层","0793-8230759"],
["江西","上饶","广丰月兔广场","江西省上饶市广丰月兔广场北侧新天地东侧时代购物广场1-2层","0793-2630289"],
["江西","上饶","亿升","上饶市赣东大道亿升广场","0793-8313522"],
["江西","上饶","饶州","鄱阳县建设路17号国宴洲际酒店一二层","0793-6266272"],
["江西","上饶","乐平翥山","乐平市翥山西路40号(邮政局旁)","0798-6824542"],
["江西","上饶","玉山新建南路","江西省上饶市玉山县解放中路人民商场一楼KFC店","0793-2236100"],
["江西","上饶","弋阳汇金","江西省上饶市弋阳县胜利路汇金广场一楼KFC店","0793-5909977"],
["辽宁","沈阳","翟家餐厅","沈阳市经济技术开发区沈辽路141号1门","024-66810309"],
["辽宁","沈阳","沙岭餐厅","沈阳市沙岭路2号","024-31267596"],
["辽宁","沈阳","铁西广场餐厅","铁西区建设大路158号万象汇L596","024-85619630"],
["辽宁","沈阳","长江餐厅","辽宁沈阳 沈阳市皇姑区长江街60号甲","024-86083867"],
["辽宁","沈阳","冠芳园餐厅","辽宁沈阳市和平区文化路48甲19号","024-23945109"],
["辽宁","沈阳","泛华餐厅","辽宁沈阳市浑南新区沈营路7号","024-23669131"],
["辽宁","沈阳","仙女湖餐厅","辽宁沈阳市铁西区沈辽西路17号","024-31087298"],
["辽宁","沈阳","富民餐厅","辽宁省沈阳沈阳市富民街与沈水路交叉口东南角","024-24587035"],
["辽宁","沈阳","亚欧餐厅","辽宁省沈阳市大东区东顺城街(亚欧宾馆一楼)","024-24892090"],
["辽宁","沈阳","步阳餐厅","辽宁省沈阳市东陵区文化东路8号","024-24241070"],
["辽宁","沈阳","马路湾餐厅","辽宁省沈阳市和平区光荣街10号","024-83218650"],
["辽宁","沈阳","塔湾餐厅","辽宁省沈阳市皇姑区塔湾街28号","024-86718022"],
["辽宁","沈阳","小什字街餐厅","辽宁省沈阳市沈阳市大东区沈阳大悦城D-B108+D105+D106号商铺","024-24361182"],
["辽宁","沈阳","迎春餐厅","辽宁省沈阳市沈阳市苏家屯区迎春街与香杨路交汇处","024-89139035"],
["辽宁","沈阳","景星餐厅","辽宁省沈阳市铁西区兴华南街58号","024-25420293"],
["辽宁","沈阳","沈阳市北辰餐厅","沈河区惠工街169号","024-88503962"],
["辽宁","沈阳","北海餐厅","沈阳市大东区北海街89号三门四门","024-31996311"],
["辽宁","沈阳","北顺城餐厅","沈阳市大东区北顺城路184号","024-88512293"],
["辽宁","沈阳","大北关街餐厅","沈阳市大东区大北关街70号","024-88520789"],
["辽宁","沈阳","东北大马路餐厅","沈阳市大东区东北大马路113号","024-31925017"],
["辽宁","沈阳","新光餐厅","沈阳市大东区东陵西路与新东二街交汇处","024-31988197"],
["辽宁","沈阳","东顺城餐厅","沈阳市大东区东顺城街152号","024-24847099"],
["辽宁","沈阳","龙之梦餐厅","沈阳市大东区滂江街22号","024-31982323"],
["辽宁","沈阳","小北关街餐厅","沈阳市大东区小北关街33号","024-88523209"],
["辽宁","沈阳","中街新玛特餐厅","沈阳市大东区小东路2号地下一层","024-24322502"],
["辽宁","沈阳","白塔堡餐厅","沈阳市东陵区白塔堡沈营大街587号上亿广场一楼","024-31920995"],
["辽宁","沈阳","机场T3餐厅","沈阳市东陵区机场路166号桃仙国际机场T3航站楼2楼","024-31929711"],
["辽宁","沈阳","群升餐厅","沈阳市东陵区文化路149号","024-24587572"],
["辽宁","沈阳","皇寺广场餐厅","沈阳市和平区哈尔滨路西二号","024-23477428"],
["辽宁","沈阳","总统餐厅","沈阳市和平区和平北大街65号总统大厦A座一层","024-22812763"],
["辽宁","沈阳","城市广场餐厅","沈阳市和平区南京街城市广场1座","024-23342275"],
["辽宁","沈阳","南五餐厅","沈阳市和平区南京南街78号","024-23521961"],
["辽宁","沈阳","万象城餐厅","沈阳市和平区沈阳华润中心万象城 B111 号商铺","024-23955310"],
["辽宁","沈阳","沈阳站前餐厅","沈阳市和平区胜利南街2号","024-23417841"],
["辽宁","沈阳","沈阳站西餐厅","沈阳市和平区胜利南街2号(西广场候车室内)","024-31986195"],
["辽宁","沈阳","爱思开餐厅","沈阳市和平区胜利南街61号","024-31907039"],
["辽宁","沈阳","民族餐厅","沈阳市和平区太原南街2号","024-23581852"],
["辽宁","沈阳","二院餐厅","沈阳市和平区文化路16号","024-23891423"],
["辽宁","沈阳","长白大润发餐厅","沈阳市和平区长白西二街46-1号","024-24684103"],
["辽宁","沈阳","长兴餐厅","沈阳市和平区长兴街2号","024-23210312"],
["辽宁","沈阳","中华路餐厅","沈阳市和平区中华路9号","024-83280298"],
["辽宁","沈阳","黄河大街餐厅","沈阳市皇姑区黄河北大街113号","024-86511351"],
["辽宁","沈阳","乐购餐厅","沈阳市皇姑区黄河南大街78甲A一层","024-86219542"],
["辽宁","沈阳","昆山西路餐厅","沈阳市皇姑区昆山西路2号12门","024-86823106"],
["辽宁","沈阳","北行餐厅","沈阳市皇姑区长江街99号","024-86200680"],
["辽宁","沈阳","崇江餐厅","沈阳市皇姑区长江街崇江路137号","024-86844902"],
["辽宁","沈阳","亿丰餐厅","沈阳市浑南新区金卡路16号","024-23762076"],
["辽宁","沈阳","金地琥珀餐厅","沈阳市浑南中路14号","024-31694872"],
["辽宁","沈阳","文艺餐厅","沈阳市沈和区文艺路80号大福源超市一楼","024-24120548"],
["辽宁","沈阳","北站站内餐厅","沈阳市沈河区北站路102号","024-62570723"],
["辽宁","沈阳","瑞心餐厅","沈阳市沈河区北站路112号","024-22530116"],
["辽宁","沈阳","大西餐厅","沈阳市沈河区大西路KFC","024-22941130"],
["辽宁","沈阳","惠工餐厅","沈阳市沈河区哈尔滨路168号 华府金融购物中心1层","024-22528065"],
["辽宁","沈阳","滂江餐厅","沈阳市沈河区滂江街86号甲","024-24313609"],
["辽宁","沈阳","热闹餐厅","沈阳市沈河区热闹路60号","024-24114865"],
["辽宁","沈阳","文化路餐厅","沈阳市沈河区文化路81号","024-23955501"],
["辽宁","沈阳","钟楼餐厅","沈阳市沈河区中街路190号","024-24861706"],
["辽宁","沈阳","市商业城餐厅","沈阳市沈河区中街路212号","024-84844701"],
["辽宁","沈阳","凯利餐厅","沈阳市市府大路286号","024-22722540"],
["辽宁","沈阳","星摩尔餐厅","沈阳市铁西区北二中路6号星摩尔购物中心一楼KFC","024-31085778"],
["辽宁","沈阳","北一路万达餐厅","沈阳市铁西区北一中路1号","024-31223351"],
["辽宁","沈阳","腾飞餐厅","沈阳市铁西区滑翔路15号","024-25934378"],
["辽宁","沈阳","启工餐厅","沈阳市铁西区建设西路启工街大润发一楼","024-85866100"],
["辽宁","沈阳","保工餐厅","沈阳市铁西区南八中路73号","024-25416349"],
["辽宁","沈阳","沈新餐厅","沈阳市铁西区沈辽西路17号","024-25169818"],
["辽宁","沈阳","沈辽餐厅(滑翔家乐福楼下)","沈阳市铁西区沈辽中路5号","024-25965426"],
["辽宁","沈阳","铁百餐厅","沈阳市铁西区兴华南街16号","024-25877058"],
["辽宁","沈阳","兴华餐厅(铁西新玛特楼下)","沈阳市铁西区兴华南街37-1号","024-31086162"],
["辽宁","沈阳","百合餐厅","沈阳市铁西区云峰北街13号","024-25157205"],
["辽宁","沈阳","重工餐厅","沈阳市铁西区重工南街88号","024-25786602"],
["辽宁","沈阳","道义餐厅","沈阳市新城子区道义开发区郑良市路肯德基","024-89738259"],
["辽宁","沈阳","柳条湖餐厅","沈阳市于洪区崇山东路10号","024-86622148"],
["辽宁","沈阳","金厦餐厅","沈阳市于洪区黄海路30号","024-25348531"],
["辽宁","沈阳","沈辽路餐厅(华润沈辽路餐厅)","沈阳市于洪区沈辽路141号1门","024-66802657"],
["辽宁","沈阳","正良餐厅","沈阳市于洪区沈新路104号华润万家一楼KFC","024-88796167"],
["辽宁","沈阳","明华餐厅","沈阳于洪区黄河北大街78号","024-86536809"],
["辽宁","沈阳","奥体万达","沈阳市浑南新区营盘西街17号1001、1002","024-31919781"],
["辽宁","沈阳","四院肯德基","沈阳市皇姑区黄河南大街19号","024-86207805"],
["辽宁","沈阳","华府餐厅","沈阳市沈河区哈尔滨路118号华府天地一楼","024-22595046"],
["辽宁","大连","经开万达餐厅","大连经济技术开发区辽河西路117号东北三街万达广场一层","0411-39265919"],
["辽宁","大连","万和广场餐厅","大连开发区金马路208号","0411-39246488"],
["辽宁","大连","大连站北餐厅","大连市北站候车大厅地下一层","18842866365"],
["辽宁","大连","北站餐厅","大连市甘井子区大连北站(南关岭火车站) 二层商业夹层","18842863793"],
["辽宁","大连","新甘百餐厅","大连市甘井子区甘井子街8号新甘百商城","0411-39574290"],
["辽宁","大连","硅谷餐厅","大连市甘井子区黄浦路600-602号","0411-39750566"],
["辽宁","大连","泉水餐厅","大连市甘井子区龙畔金泉H1区62号","0411-39503368"],
["辽宁","大连","万盛餐厅","大连市甘井子区南关岭街道南关岭路777号万盛购物广场一楼","0411-39079249"],
["辽宁","大连","泡崖餐厅","大连市甘井子区泡崖玉金街19号(大连金玛购物广场有限公司)","0411-66883926"],
["辽宁","大连","金三角餐厅","大连市甘井子区松江路2号","0411-39534266"],
["辽宁","大连","周水子餐厅","大连市甘井子区迎客路168号","0411-86647925"],
["辽宁","大连","迎客路餐厅","大连市甘井子区迎客路6号","0411-86645121"],
["辽宁","大连","在张前路餐厅","大连市甘井子区张前路211号乐都汇一楼","0411-83730027"],
["辽宁","大连","华南餐厅","大连市甘井子区中华路22号","0411-39627211"],
["辽宁","大连","凌水餐厅","大连市柑井子区凌水店154号","0411-84740081"],
["辽宁","大连","斯大林路餐厅","大连市金州区斯大林路675号","0411-87699402"],
["辽宁","大连","安盛餐厅","大连市金州区斯大林路677号肯德基","0411-39311588"],
["辽宁","大连","澳东世纪餐厅","大连市金州新区澳东园4号楼","0411-39265919"],
["辽宁","大连","锦绣餐厅","大连市锦绣路58号","0411-39537011"],
["辽宁","大连","金马餐厅","大连市经济技术开发区金马路195号","0411-39202598"],
["辽宁","大连","昌临餐厅","大连市开发区金马路232号","0411-39205655"],
["辽宁","大连","大华餐厅","大连市旅顺口区大华街2号","0411-86627307"],
["辽宁","大连","丰荣餐厅","大连市普兰店第二百货大楼一楼","0411-83135665"],
["辽宁","大连","国会餐厅","大连市人民东路东港商务区音乐喷泉广场","18041179083"],
["辽宁","大连","黄河路餐厅","大连市沙河口区黄河路665号","0411-39760388"],
["辽宁","大连","新马栏餐厅","大连市沙河口区黄河路南、马栏南街东、鹏程街西侧的益嘉广场","0411-39975747"],
["辽宁","大连","幸福餐厅","大连市沙河口区五一路104-6号","0411-39575275"],
["辽宁","大连","成义餐厅","大连市沙河口区西安路54号","0411-39763376"],
["辽宁","大连","春柳餐厅","大连市沙河口区西南路872号五洲大厦","0411-39758918"],
["辽宁","大连","和平广场餐厅","大连市沙河口区中山路552号(乐购超市,地下一层。)","0411-39715589"],
["辽宁","大连","黑石礁餐厅","大连市沙河口区中山路661号","0411-39705955"],
["辽宁","大连","数码广场餐厅","大连市数码路与现状铁路交叉口及相邻区域","0411-39751705"],
["辽宁","大连","罗斯福餐厅","大连市西安路139号天兴罗斯福国际中心地下一层","0411-39748235"],
["辽宁","大连","付家庄餐厅","大连市西岗区付家庄滨海西路4号华能宾馆一楼","0411-82401391"],
["辽宁","大连","东浩餐厅","大连市西岗区建设街12号","0411-39600802"],
["辽宁","大连","中山餐厅","大连市西岗区中山路261号","0411-39653616"],
["辽宁","大连","越秀餐厅","大连市新开路82号越秀大厦","0411-39675206"],
["辽宁","大连","鲁迅路餐厅","大连市中山区鲁迅路29-5号","0411-39805255"],
["辽宁","大连","青泥洼桥餐厅","大连市中山区荣盛街33号","0411-39807055"],
["辽宁","大连","胜利餐厅","大连市中山区胜利广场28号","0411-39902991"],
["辽宁","大连","普照餐厅","大连市中山区新安街25号长江路201号","0411-82843356"],
["辽宁","大连","星海餐厅","大连市中山区星海公园内","0411-84662831"],
["辽宁","大连","新友好广场餐厅","大连市中山区友好广场8号","0411-39807555"],
["辽宁","大连","火车站餐厅","大连市中山区长江路261号","0411-82642476"],
["辽宁","大连","中南路餐厅","大连市中山区中南路219号","0411-39811211"],
["辽宁","大连","天津街餐厅","辽宁大连大连市中山区天津街200号","0411-82537968"],
["辽宁","大连","绿洲餐厅","辽宁大连市甘井子西南路绿洲园122-130号","0411-39743871"],
["辽宁","大连","东特餐厅","辽宁大连市华北路(甘)8G号","0411-39903696"],
["辽宁","大连","山东路餐厅","辽宁省大连市甘井子区千山路28号","0411-39534733"],
["辽宁","大连","庄河餐厅","庄河市黄海广场1号","0411-89715526"],
["辽宁","大连","青云餐厅","大连市中山区岭前街2、4、6号肯德基餐厅","0411-39850399"],
["辽宁","鞍山","立山大润发餐厅","鞍山市立山区鞍千路90栋S27号(大润发超市1楼)","0412-6939493"],
["辽宁","鞍山","国泰餐厅","鞍山市铁东区二道街68号","0412-2241491"],
["辽宁","鞍山","湖南餐厅","鞍山市铁东区湖南路105号","0412-5858095"],
["辽宁","鞍山","向阳餐厅","鞍山市铁东区解放东路80号","0412-5681051"],
["辽宁","鞍山","建国餐厅","鞍山市铁东区南建国路83号","0412-2227176"],
["辽宁","鞍山","胜利路餐厅","鞍山市铁东区胜利南路136号","0412-2928247"],
["辽宁","鞍山","景子街餐厅","鞍山市铁东区五一路26号","0412-2580900"],
["辽宁","鞍山","民生餐厅","鞍山市铁西区九道街139号","0412-8533199"],
["辽宁","鞍山","繁荣餐厅","鞍山市铁西区人民路56号","0412-8555240"],
["辽宁","鞍山","立山餐厅","辽宁鞍山市立山区胜利路与建国路交汇处(立山广场东南角华润万家超市)","0412-6631055"],
["辽宁","鞍山","站前餐厅","辽宁省鞍山市铁东区建国南路48号的乐都汇购物中心","0412-8770299"],
["辽宁","抚顺","抚顺市临江餐厅","抚顺市顺城区临江路中段8号","024-57892258"],
["辽宁","抚顺","抚顺市望花餐厅","抚顺市望花区和平街五十九号","0413-6405177"],
["辽宁","抚顺","抚顺市抚顺万达餐厅","抚顺市新抚区南站中心万达广场","024-52803268"],
["辽宁","抚顺","抚顺市裕民餐厅","抚顺市新抚区裕民路24号","024-52723092"],
["辽宁","抚顺","抚顺市抚百餐厅","抚顺市新抚区站前27-1方块万隆商厦地上一、二层","024-58065901"],
["辽宁","抚顺","抚顺市新城餐厅","辽宁抚顺市顺城区临江路新华大街1号","0413-7603057"],
["辽宁","抚顺","抚顺市新抚餐厅","辽宁省抚顺市新抚区解放路16号","0413-2630906"],
["辽宁","本溪","小市餐厅","本溪市满族自治县小市镇长江路396号(本溪华龙购物有限公司)","024-46866789"],
["辽宁","本溪","本大餐厅","本溪市平山区解放北路4号","024-42818469"],
["辽宁","本溪","华联餐厅","本溪市平山区永丰商业区华联街1号","024-42804011"],
["辽宁","本溪","好佳餐厅","辽宁省本溪市平山区水塔街15号好佳城市花园地上一层","024-42229326"],
["辽宁","丹东","丹东市亿龙餐厅","丹东市元宝区江城大街丹东亿龙国际项目沃尔玛","0415-2863130"],
["辽宁","丹东","丹东市华美餐厅","丹东市元宝区锦山大街105号华美大厦一楼KFC","0415-2800650"],
["辽宁","丹东","丹东市六纬餐厅","丹东市振兴区六纬路32号","0415-2147492"],
["辽宁","丹东","丹东市锦山餐厅","丹东市振兴区七经街8号","0415-2177370"],
["辽宁","丹东","丹东市上城餐厅","丹东市振兴区体育馆路乐购二部","0415-3150550"],
["辽宁","丹东","丹东市花园餐厅","丹东市振兴区万达广场一楼","0415-3118765"],
["辽宁","丹东","亿龙餐厅","辽宁省丹东市元宝区江城大街211号","0415-2863120"],
["辽宁","锦州","锦州市古塔餐厅","锦州市古塔区青年会路与步行街交叉路口北二里18号","0416-2323799"],
["辽宁","锦州","锦州市林西餐厅","锦州市古塔区人民街3段3-2号新华广场一楼肯德基","0416-2396088"],
["辽宁","锦州","锦州市锦凌餐厅","锦州市凌海凌海街北段","0416-8185762"],
["辽宁","锦州","锦州市贵州街餐厅","锦州市凌河区解放路六段","0416-2836918"],
["辽宁","锦州","锦州市上海路餐厅","锦州市凌河区上海路四段","0416-2140918"],
["辽宁","锦州","锦州市中百餐厅","锦州市凌河区中央大街三段1号","0416-2146232"],
["辽宁","锦州","林西","古塔区人民街3段3-2","0416-2396088"],
["辽宁","营口","营口市平安餐厅","辽宁营口市西市区渤海大街60号","0417-4830168"],
["辽宁","营口","营口市昆仑餐厅","营口市鲅鱼圈区昆仑大街39号","0417-6257160"],
["辽宁","营口","营口市哈大餐厅","营口市大石桥长征街实验里一层","0417-5856299"],
["辽宁","营口","营口市渤海餐厅","营口市站前区渤海大街东1号","0417-2848838"],
["辽宁","营口","营口市东升餐厅","营口市站前区东升路30号(大福源一楼)","0417-3833138"],
["辽宁","营口","营口市万达餐厅","营口市站前区市府路南1","0417-2929899"],
["辽宁","营口","营口市市府大路餐厅","站前区市府路南2号(乐都汇一、二楼)","0417-3297056"],
["辽宁","阜新","阜新市蒙古贞餐厅","阜新市阜蒙县文化路61号,京都购物广场1层"," "],
["辽宁","阜新","阜新市西山餐厅","阜新市海州区西山路16路","0418-3356996"],
["辽宁","阜新","阜新市解放餐厅","阜新市解放大街59号","0418-3359242"],
["辽宁","阜新","阜新市文化宫餐厅","阜新市细河区中华路解放大街东阜新新天地商业广场","0418-2938080"],
["辽宁","阜新","阜新市阜润发餐厅","辽宁省阜新市细河区中华路63号","0418-2609001"],
["辽宁","辽阳","辽阳市荣华餐厅","辽宁省辽阳市宏伟区荣华街汇华宫东荣华街北","0419-5375282"],
["辽宁","辽阳","辽阳市京都餐厅","辽宁省辽阳市新运大街73号","0419-2125676"],
["辽宁","辽阳","辽阳市新运餐厅","辽阳市白塔区新运大街105号","0419-2398208"],
["辽宁","辽阳","辽阳市新世纪餐厅","辽阳市新运大街54号","0419-2258874"],
["辽宁","辽阳","辽阳市贺尊餐厅","辽阳县首山镇人民街87号","0419-7178366"],
["辽宁","盘锦","盘锦市辽河餐厅","盘锦市双台区胜利路2-27-564","0427-3829230"],
["辽宁","盘锦","盘锦市久柏埠餐厅","盘锦市双台子区红旗大街与渤海路交汇处西北角","0427-3398535"],
["辽宁","盘锦","盘锦市盘新餐厅餐厅","盘锦市兴隆台区大商中央步行街新玛特负一层","0427-3251296"],
["辽宁","盘锦","盘锦市林丰餐厅","盘锦市兴隆台区林丰路东兴隆台街北","0427-2866889"],
["辽宁","盘锦","盘锦市中兴餐厅","盘锦市兴隆台区商东小区88-18-19","0427-2828344"],
["辽宁","盘锦","盘锦市华商餐厅","盘锦市兴隆台中兴街步行街海湾一楼肯得基","0427-3229717"],
["辽宁","盘锦","盘锦市新赛特餐厅","盘锦市兴垄台区商东小区中兴路76号","0427-2823033"],
["辽宁","铁岭","铁岭市家和美餐厅","铁岭市昌图县,影院大街家和美商场","024-75923588"],
["辽宁","铁岭","铁岭市广裕餐厅","银州区南马路29号","024-72810918"],
["辽宁","朝阳","朝阳市海纳餐厅","朝阳市朝阳大街与竹林路交汇处“大润发超市”","0421-2827776"],
["辽宁","朝阳","朝阳市朝阳大街餐厅","朝阳市双塔区朝阳大街四段1号","0421-2996777"],
["辽宁","朝阳","朝阳市朝商餐厅","朝阳市新华路二段82号","0421-2611224"],
["辽宁","葫芦岛","葫芦岛市兰花餐厅","葫芦岛市连山区新华大街1号楼E","0429-5554433"],
["辽宁","葫芦岛","葫芦岛市新华餐厅","葫芦岛市连山区兴工街1号","0429-2136071"],
["辽宁","葫芦岛","葫芦岛市群英餐厅","葫芦岛市连山区中央大街15号乐都汇购物中心一楼","0429--2553010"],
["辽宁","葫芦岛","葫芦岛市民安餐厅","葫芦岛市连山区中央路10-1号","0429-3356633"],
["辽宁","葫芦岛","葫芦岛市飞天广场餐厅","葫芦岛市龙岗区龙湾大街43号","0429-3298765"],
["辽宁","葫芦岛","葫芦岛市龙湾大街餐厅","葫芦岛市龙湾大街与文萃路交汇处东北角","0429-22658569"],
["黑龙江","哈尔滨","枫叶小镇餐厅","松北区中源大道16999 枫叶小镇一楼","0451-51971776"],
["黑龙江","哈尔滨","红博餐厅","哈尔滨南岗区红军街38号","0451-53603263"],
["黑龙江","哈尔滨","迎宾餐厅","哈尔滨市道里区埃德蒙顿路38号","0451-84308637"],
["黑龙江","哈尔滨","新阳餐厅","哈尔滨市道里区安国街76号","0451-84276647"],
["黑龙江","哈尔滨","河图餐厅","哈尔滨市道里区哈药路402号","0451-84603730"],
["黑龙江","哈尔滨","河松餐厅","哈尔滨市道里区河松街29号(财智大厦一楼)","0451-87392361"],
["黑龙江","哈尔滨","爱建餐厅","哈尔滨市道里区上海路99号","0451-84238400"],
["黑龙江","哈尔滨","新一百餐厅","哈尔滨市道里区石头道街118号","0451-84652772"],
["黑龙江","哈尔滨","兆麟餐厅","哈尔滨市道里区透龙街64号","0451-84610281"],
["黑龙江","哈尔滨","建议餐厅","哈尔滨市道里区新阳路365号","0451-84606922"],
["黑龙江","哈尔滨","友谊餐厅","哈尔滨市道里区友谊路167-217号","0451-84617607"],
["黑龙江","哈尔滨","花圃餐厅","哈尔滨市道里区中央大街187号","0451-84618700"],
["黑龙江","哈尔滨","中央大街餐厅","哈尔滨市道里区中央大街中央商城对面潮人汇美食广场2楼肯德基","0451-84673723"],
["黑龙江","哈尔滨","承德广场餐厅","哈尔滨市道外区承德街公路客运站一层","0451-88371833"],
["黑龙江","哈尔滨","东直餐厅","哈尔滨市道外区东直路389号","0451-57629787"],
["黑龙江","哈尔滨","东站餐厅","哈尔滨市道外区桦树街1号","0451-57632501"],
["黑龙江","哈尔滨","太古街餐厅","哈尔滨市道外区太古街210号大润发一层","0451-51022523"],
["黑龙江","哈尔滨","先锋餐厅","哈尔滨市道外区先锋路168号","0451-82298505"],
["黑龙江","哈尔滨","鞍山街餐厅","哈尔滨市东大直街地一大道香港城鞍山街入口处","0451-85872845"],
["黑龙江","哈尔滨","顾乡餐厅","哈尔滨市顾乡大街85号世纪联华超市一层","0451-87611013"],
["黑龙江","哈尔滨","利民餐厅","哈尔滨市利民开发区利民大道","0451-57384878"],
["黑龙江","哈尔滨","保健路餐厅","哈尔滨市南岗区保健路大众新城G2-4号","0451-87306133"],
["黑龙江","哈尔滨","龙运餐厅","哈尔滨市南岗区春申街28号龙运宾馆二楼","0451-53629375"],
["黑龙江","哈尔滨","大直街餐厅","哈尔滨市南岗区大直街40号","0451-53601653"],
["黑龙江","哈尔滨","精益餐厅","哈尔滨市南岗区东大直街338号","0451-53633871"],
["黑龙江","哈尔滨","北华餐厅","哈尔滨市南岗区东大直街362号","0451-53601519"],
["黑龙江","哈尔滨","人和餐厅","哈尔滨市南岗区果戈里大街204号","0451-82621276"],
["黑龙江","哈尔滨","革新餐厅","哈尔滨市南岗区果戈里大街56号","0451-82665810"],
["黑龙江","哈尔滨","哈西站餐厅","哈尔滨市南岗区哈西大街哈尔滨西站","0451-58586008"],
["黑龙江","哈尔滨","哈站餐厅","哈尔滨市南岗区海关街1号沪士大厦一层","0451-53609778"],
["黑龙江","哈尔滨","北鸿餐厅","哈尔滨市南岗区和兴路95-5号肯德基餐厅","0451-86357977"],
["黑龙江","哈尔滨","颐园餐厅","哈尔滨市南岗区红军街87号","0451-53609327"],
["黑龙江","哈尔滨","黄河餐厅","哈尔滨市南岗区红旗大街301号","0451-82278045"],
["黑龙江","哈尔滨","宽城餐厅","哈尔滨市南岗区宽城街与鼎新三道街交口大福源超市","0451-82515563"],
["黑龙江","哈尔滨","西大桥餐厅","哈尔滨市南岗区西大直街108号","0451-86224525"],
["黑龙江","哈尔滨","福顺餐厅","哈尔滨市南岗区学府路1-1号凯德广场","0451-51662714"],
["黑龙江","哈尔滨","学府餐厅","哈尔滨市南岗区学府路53号","0451-86685008"],
["黑龙江","哈尔滨","西城汇餐厅","哈尔滨市南岗区学府路主干道(学府4道街至211医院)地下西城汇商场","0451-86673260"],
["黑龙江","哈尔滨","新疆大街餐厅","哈尔滨市平房区新疆大街117号","0451-51668288"],
["黑龙江","哈尔滨","友协餐厅","哈尔滨市平房区新疆大街8号","0451-86552078"],
["黑龙江","哈尔滨","聚源餐厅","哈尔滨市松北区三环路凯利汽车百货广场","0451-85988291"],
["黑龙江","哈尔滨","永平餐厅","哈尔滨市太平区永平小区五街区家乐福超市一层","0451-82592902"],
["黑龙江","哈尔滨","松雷餐厅","哈尔滨市香坊区和平路1号","0451-82112938"],
["黑龙江","哈尔滨","赣水路餐厅","哈尔滨市香坊区衡山路17-7号","0451-87717501"],
["黑龙江","哈尔滨","民生路餐厅","哈尔滨市香坊区民生路178号","0451-86201372"],
["黑龙江","哈尔滨","新乐松餐厅","哈尔滨市香坊区三大动力路278号","0451-58568512"],
["黑龙江","哈尔滨","三合路餐厅","哈尔滨市香坊区三合路133号","0451-87957237"],
["黑龙江","哈尔滨","东滨广场餐厅","哈尔滨市延福街南与升永街西交界处","0451-87995968"],
["黑龙江","哈尔滨","假日餐厅","哈尔滨市中央大街2号","0451-84617417"],
["黑龙江","哈尔滨","通乡餐厅","哈尔滨香坊区红旗大街38号","0451-55636921"],
["黑龙江","哈尔滨","翰林餐厅","呼兰利民开发区学院路","0451-88122168"],
["黑龙江","哈尔滨","呼兰街餐厅","呼兰区建设街文明小区5号楼北1门畅想国际购物中心一层","0451-57323856"],
["黑龙江","哈尔滨","花园店","哈尔滨市南岗区果戈里大街332号","0451-53634953"],
["黑龙江","哈尔滨","香安店","哈尔滨市香坊区中山路59号","0451-55616143"],
["黑龙江","哈尔滨","教化店","哈尔滨市南岗区西大直街183-191号","0451-87560550"],
["黑龙江","哈尔滨","安发店","哈尔滨市道里区抚顺街281号","0451-84510852"],
["黑龙江","哈尔滨","大成店","哈尔滨市南岗区烟草街1号","0451-87026278"],
["黑龙江","哈尔滨","亿曼街店","哈尔滨市南岗区一曼街18号","0451-82532117"],
["黑龙江","齐齐哈尔","民航餐厅","齐齐哈尔市卜奎大街东侧工会胡同温商商务大酒店1-2层","0452-2282351"],
["黑龙江","齐齐哈尔","百花餐厅","齐齐哈尔市卜奎南大街136号","0452-2442168"],
["黑龙江","齐齐哈尔","红岸餐厅","齐齐哈尔市富拉尔基区和平路81号","0452-6881923"],
["黑龙江","齐齐哈尔","鸿福餐厅","齐齐哈尔市建华区军校街300号","0452-2567300"],
["黑龙江","齐齐哈尔","中瑞餐厅","齐齐哈尔市建华区清真路30号","0452-2469201"],
["黑龙江","齐齐哈尔","建华万达餐厅","齐齐哈尔市建华区新江路1号建华万达广场2号门1楼肯德基","0452-6038996"],
["黑龙江","齐齐哈尔","齐百餐厅","齐齐哈尔市龙沙区卜奎大街73号","0452-2424259"],
["黑龙江","齐齐哈尔","齐润发餐厅","齐齐哈尔市龙沙区龙华路89号","0452-2434851"],
["黑龙江","齐齐哈尔","铁东餐厅","齐齐哈尔市铁锋区中华东路86号","0452-2479071"],
["黑龙江","鸡西","鸡西市广益餐厅","鸡西市鸡冠区红军路71号步行街","18945806151"],
["黑龙江","鹤岗","鹤岗市新鹤餐厅","鹤岗市工农比优特时代广场一层","0468-3330044"],
["黑龙江","鹤岗","鹤岗市岗汇餐厅","鹤岗市工农区15委工交路1号","0468-3450500"],
["黑龙江","双鸭山","双鸭山市双百餐厅","双鸭山市尖山区新兴大街121号","0469-4245885"],
["黑龙江","双鸭山","双鸭山市惠丰餐厅","双鸭山市新兴大街118号惠丰商场","0469-6696345"],
["黑龙江","大庆","湖滨餐厅","大庆市高新区发展路63号","0459-4675761"],
["黑龙江","大庆","百湖餐厅","大庆市高新区摩码广场一楼肯德基","0459-8152686"],
["黑龙江","大庆","龙凤餐厅","大庆市龙凤区大庆公路客运枢纽站","0459-6286312"],
["黑龙江","大庆","西宾路餐厅","大庆市让胡路区博奥时代购物中心","0459-5969461"],
["黑龙江","大庆","乘风庄餐厅","大庆市让胡路区乘风庄新玛特一楼肯德基","0459-5671027"],
["黑龙江","大庆","丰泽园餐厅","大庆市让胡路区丰泽园小区102号,104号商服","0459-5522242"],
["黑龙江","大庆","营口市昆仑餐厅","大庆市让胡路区西宾路与聚贤街交口东南角","0459--6353398"],
["黑龙江","大庆","铁人大道餐厅","大庆市让胡路区银亿阳光城E-21楼9号商服","0459-5931717"],
["黑龙江","大庆","新潮餐厅","大庆市让胡路区长春路126号","0459-5918525"],
["黑龙江","大庆","经三餐厅","大庆市萨尔图区东风路15号大庆万达商业广场","0459-6079831"],
["黑龙江","大庆","经六餐厅","大庆市萨尔图区东风新村经六街118号","0459-6150388"],
["黑龙江","大庆","新庆莎餐厅","大庆市萨尔图区会战大街18号","0459-6315637"],
["黑龙江","大庆","新中桥餐厅","大庆市萨尔图区会战大街火车站广场右侧3702","0459-6321631"],
["黑龙江","大庆","银座餐厅","大庆市萨尔图区纬二路南侧经九街西侧银座尚品1号商服","0459-4319169"],
["黑龙江","大庆","东风店","大庆市萨尔图区东风新村东风商城东","0459-6377632"],
["黑龙江","大庆","东湖店","大庆市让胡路区东湖小区石油广场","0459-5732115"],
["黑龙江","伊春","伊春市新兴餐厅","伊春市伊春区新兴中大街4号","0458-3333186"],
["黑龙江","佳木斯","佳木斯市顺和餐厅","佳木斯市前进区顺和街2号","0454-8999916"],
["黑龙江","佳木斯","佳木斯市万新餐厅","佳木斯市万新街大润发一层","0454-2664503"],
["黑龙江","佳木斯","佳木斯市佳百餐厅","佳木斯市向阳区长安路739号","0454-8627855"],
["黑龙江","佳木斯","佳木斯市佳润发餐厅","佳木斯市向阳区长安路972号大润发超市","0454-8640658"],
["黑龙江","七台河","七台河市山湖餐厅","七台河市桃山区山湖路36号","0464-8288797"],
["黑龙江","七台河","七台河市桃山餐厅","七台河市桃山区山湖路46号大润发一层肯德基","0464-8221029"],
["黑龙江","七台河","七台河市旭日餐厅","七台河市桃山区文化广场","0464-6108166"],
["黑龙江","牡丹江","牡丹江市牡百餐厅","牡丹江市太平路31号","0453-6951618"],
["黑龙江","牡丹江","牡丹江市平安街餐厅","牡丹江市西安区西十一条路西,牡丹街北","0453-6876938"],
["黑龙江","牡丹江","牡丹江市牡丹街餐厅","牡丹江市西安区西长安街1号","0453-6299100"],
["黑龙江","牡丹江","牡润发店","牡丹江市东一路55号","0453-6933200"],
["黑龙江","牡丹江","文化广场店","牡丹江市东一路牡丹街1号","0453-6989618"],
["黑龙江","黑河","黑河市华富餐厅","黑河市爱辉区中央街与邮政街交叉处华富商城一层","0456-8268181"],
["黑龙江","绥化","绥化市广源餐厅","绥化市长江路123号","0455-8799146"],
["黑龙江","绥化","绥化市中兴大街餐厅","绥化市中兴大街华晨商都一层肯德基","0455-8335550"],
["黑龙江","绥化","绥化市阳光城餐厅","绥化市中直路南五路恒艺阳光城售楼处","0455-8127001"],
["吉林","长春","长春站内餐厅","长春市宽城区火车站候车室二楼","0431-81884412"],
["吉林","长春","乌兰大街餐厅","松原市乌兰大街与哈萨尔交口新玛特一楼","0438-2549122"],
["吉林","长春","南湖餐厅","吉林省长春市东岭南街1405号","0431-85283401"],
["吉林","长春","皓月餐厅","吉林省长春市皓月大街2549号","0431-87984442"],
["吉林","长春","中海餐厅","吉林省长春市净月开发区卫星路商住区中海水岸春城G5号楼102室和106室","0431-84637900"],
["吉林","长春","天池餐厅","吉林省长春市胜利大街2号","0431-82950995"],
["吉林","长春","天嘉餐厅","吉林省长春市新竹路与基隆南街交汇处","0431-89693255"],
["吉林","长春","欧亚卖场二餐厅","吉林省长春长春市开运街5178号","0431-85540200"],
["吉林","长春","欧亚餐厅","长春市朝阳区工农大路1128号","0431-85667819"],
["吉林","长春","百脑汇餐厅","长春市朝阳区工农大路1313号百脑汇一楼","0431-81920710"],
["吉林","长春","时代餐厅","长春市朝阳区工农大路29号","0431-5931018"],
["吉林","长春","宽平餐厅","长春市朝阳区红旗街63号","0431-5920234"],
["吉林","长春","同德餐厅","长春市朝阳区红旗街万达广场","0431-81936696"],
["吉林","长春","前进餐厅","长春市朝阳区前进大街59号肯德基餐厅","0431-5171853"],
["吉林","长春","卫星广场餐厅","长春市朝阳区人民大街8899号","0431-81797058"],
["吉林","长春","创业餐厅","长春市创业大街与春城大街交汇处","0431-89692499"],
["吉林","长春","大经路餐厅","长春市大经路96号","0431-8719158"],
["吉林","长春","飞跃餐厅","长春市东风大街3619号","0431-85752678"],
["吉林","长春","东大桥餐厅","长春市东天街1号","0431-88752637"],
["吉林","长春","吉盛餐厅","长春市二道街东胜大87号","0431-4956511"],
["吉林","长春","长春市晨宇餐厅","长春市二道区东盛大街与吉林大路交汇晨宇购物中心一楼","0431-85158400"],
["吉林","长春","君子兰餐厅","长春市二道区长新东路与远大大街交汇欧亚购物中心一楼肯德基餐厅","0431-88478278"],
["吉林","长春","磐谷餐厅","长春市硅谷大街超达磐谷国际商务港正门","0431-87014166"],
["吉林","长春","临河餐厅","长春市经济技术开发区临河街东、珠海路南","0431-84998558"],
["吉林","长春","金川餐厅","长春市经济开发区金川街与海口路交汇北方市场13号门","0431-86785185"],
["吉林","长春","净月餐厅","长春市净月大街5555号","0431-84514399"],
["吉林","长春","博硕餐厅","长春市净月开发区博硕路499号","0431-84514985"],
["吉林","长春","欧亚卖场三餐厅","长春市开运街5178号欧亚卖场20号门","0431-89243826"],
["吉林","长春","欧亚卖场一餐厅","长春市开运街5178号欧亚卖场2号门","0431-5540800"],
["吉林","长春","五环餐厅","长春市宽城区嫩江路4号欧亚广场一楼","0431-85076713"],
["吉林","长春","凯旋路餐厅","长春市宽城区铁北二路2008号凯旋路客运站一楼","0431-81700637"],
["吉林","长春","中兴餐厅","长春市宽城区新发路1号","0431-2703219"],
["吉林","长春","长新餐厅","长春市宽城区长新街777号广源商都楼下","0431-81961090"],
["吉林","长春","柳影路餐厅","长春市柳影路万龙第五城3号楼","0431-82633339"],
["吉林","长春","春城餐厅","长春市绿园区春城大街万鑫花园3号楼","0431-87951367"],
["吉林","长春","车百餐厅","长春市绿园区东风大街38号车百一楼","0431-87650730"],
["吉林","长春","农安路餐厅","长春市农安县农安大路欧亚商贸一楼","0431-83226700"],
["吉林","长春","越野路餐厅","长春市汽车产业开发区越野路 (长春欧亚集团股份有限公司欧亚车百大楼)","0431-85762208"],
["吉林","长春","湖光餐厅","长春市前进大街488号","0431-81159075"],
["吉林","长春","彩云餐厅","长春市青年路81号","0431-7929810"],
["吉林","长春","金座餐厅","长春市人民大街10号","0431-5802121"],
["吉林","长春","物贸餐厅","长春市人民大街6969号","0431-85313499"],
["吉林","长春","同志街餐厅","长春市同志街5号","0431-88980933"],
["吉林","长春","福林餐厅","长春市同志街68号","0431-5644374"],
["吉林","长春","长大餐厅","长春市星光街以东、卫星路以南","0431-81154096"],
["吉林","长春","兴顺餐厅","长春市兴顺街与长沈路交汇欧亚商超1楼","0431-85991612"],
["吉林","长春","新天地餐厅","长春市亚泰大街1138号","0431-88720746"],
["吉林","长春","瑞阳餐厅","长春市亚泰大街8599","0431-85302352"],
["吉林","长春","长春新火车站餐厅","长春市长白路5号长春火车站","0431-86111214"],
["吉林","长春","西客站餐厅","长春市长春火车西站候车大厅内","0431-81740147"],
["吉林","长春","崇智餐厅","长春市重庆路1388号","0431-8966562"],
["吉林","长春","北国之春餐厅","长春市重庆路48号","0431-8917854"],
["吉林","长春","王府井餐厅","长春市重庆路68号","0431-88962926"],
["吉林","长春","东环城餐厅","长春市自由大路6738号","0431-84641570"],
["吉林","长春","东岭街餐厅","长春市自由大路与东岭北街交汇处欧亚超市一楼","0431-89299141"],
["吉林","吉林","吉百餐厅","吉林省吉林市吉林大街179号","0432-2569548"],
["吉林","吉林","大龙华餐厅","吉林省吉林市遵义路东路55号","0432-63031855"],
["吉林","吉林","中康餐厅","吉林市昌邑区吉林大街309号","0432-2546966"],
["吉林","吉林","解放东路餐厅","吉林市昌邑区重庆路1367号","0432-68196196"],
["吉林","吉林","吉林火车站餐厅","吉林市昌邑区重庆路1号二层A区","0432-66022288"],
["吉林","吉林","吉润发餐厅","吉林市成都路汇龙商城A座","0432-62555337"],
["吉林","吉林","新珲春餐厅","吉林市船营区解放中路120号肯德基餐厅","0432-2076669"],
["吉林","吉林","新生活餐厅","吉林市船营区越山路378号","0432-6588669"],
["吉林","吉林","雾凇餐厅","吉林市珲春中街1508号大润发一层肯德基","0432-62225005"],
["吉林","吉林","江城餐厅","吉林市解放北路交行花园1号","0432-62750886"],
["吉林","吉林","松江餐厅","吉林市松江南路488号欧亚城市综合体","0432-63369399"],
["吉林","吉林","天津街餐厅","吉林市天津街1088号","0432-62436755"],
["吉林","吉林","华业店KFC","吉林市昌邑区嫩江街199号华业大厦一楼肯德基","15354602112"],
["吉林","四平","四平市吉鹤餐厅","白城市开发区欧亚购物中心一楼","0436-5035008"],
["吉林","四平","四平市抚松餐厅","白山市抚松县松江河填白云路579号1楼186、187号","0439-6985258"],
["吉林","四平","四平市天成餐厅","白山市天成大厦一楼","0439-3254477"],
["吉林","四平","四平市英雄餐厅","吉林省四平市铁西区英雄大街与步行街交汇处","0434-3280088"],
["吉林","四平","四平市中央路餐厅","四平市铁东区北四场街(四平欧亚商贸有限公司)","0434-6991148"],
["吉林","四平","四平市四平站餐厅","四平市英雄大街1号","0434-6169918"],
["吉林","辽源","辽源市连阳餐厅","辽源市龙山区连阳路欧亚购物中心一层7号门","0437-5011115"],
["吉林","辽源","辽源市大什街餐厅","辽源市龙山区西宁大街9号银座购物中心一层","0437-5089116"],
["吉林","通化","建设餐厅","通化市建设大街337号","0435-3232276/3252276"],
["吉林","通化","通化市江南餐厅","通化市江南新区江南大街新天地购物中心一楼","0435-3578478"],
["吉林","通化","通化市大副食餐厅","通化市新华大街83号","0435-3247889"],
["吉林","松原","松原市金伦餐厅","吉林省松原吉林省松原市金伦街与铂金大道交汇处","0438-2771039"],
["吉林","松原","松原市康宁餐厅","吉林省松原市郭尔罗斯大路1800号","0438-5086355"],
["吉林","松原","松原市伯都纳餐厅","松原市伯都纳南街1号东镇国际城项目E1#楼地上一层","0438-2085522"],
["吉林","松原","松原市青年大街餐厅","松原市经济开发区青年大街4799号","0438-2080511"],
["吉林","松原","松原市中东餐厅","松原市五环大街3777号","0438-2169993"],
["吉林","白城","白城市长庆餐厅","白城市新华西路与长庆街交汇处西南角","0436-3516917"],
["吉林","延吉","延吉市河南餐厅","延吉市河南街330-3号","0433-2815715"],
["吉林","延吉","延吉市进学餐厅","延吉市人民路181号","0433-2569733"],
["吉林","延吉","延吉市人民餐厅","延吉市人民路49号","0433-2555711"],
["吉林","延吉","延吉市凯尔玛餐厅","延吉市新兴街解放路56号凯尔玛超市地下一楼KFC","0433-2552275"],
["吉林","敦化","敦化市敦化瀚章餐厅","延吉市敦化瀚章大街1318号(中心市场对面)","0433-6595333"],
["吉林","珲春","珲春市文化路餐厅","珲春市文化路欧亚延百一楼","0433-5096136"],
["河北","石家庄","东站房店KFC","中华南大街新火车站高架餐饮层东站房","0311-69020698"],
["河北","石家庄","西美花街店KFC","红旗南大街以西,汇丰路以南西美花街生活工场1、2层","0311-89920780"],
["河北","石家庄","怀特二期店KFC","槐安东路与育才街交口西北角怀特商业广场一层","0311-66509190"],
["河北","石家庄","悦享天地店KFC","桥西区中山西路700号悦享天地广场1层","0311-89846119"],
["河北","石家庄","丽华","中山西路87号","0311-87035722"],
["河北","石家庄","中山","中山东路189号","0311-86076142"],
["河北","石家庄","八一","中山西路141号","0311-87886429"],
["河北","石家庄","友谊","裕华西路556号一层","0311-89927587"],
["河北","石家庄","柏林","中华北大街236号一层","0311-87774049"],
["河北","石家庄","新世隆","建设南大街117#","0311-86113749"],
["河北","石家庄","翟营","翟营大街北国超市内","0311-85032741"],
["河北","石家庄","广安","中山东路326号开元花园负一层/050011","0311-85936143"],
["河北","石家庄","束鹿","辛集市兴华路泰和商业大楼一、二层","0311-83283060"],
["河北","石家庄","怀特","育才大街265号怀特国际商城一、二层","0311-85888234"],
["河北","石家庄","常山","正定县正定镇燕赵南大街67号一、二层","0311-88011310"],
["河北","石家庄","华安","中华北大街50号(现中华大街与新华路交口军创园2号综合楼103号房)一、二层","0311-85366236"],
["河北","石家庄","天山","开发区长江大道路13号一二层","0311-85830644"],
["河北","石家庄","裕东","裕华区裕华东路112号一、二层","0311-85078246"],
["河北","石家庄","中槐","中华南大街355号保龙仓购物广场一层","0311-83825265"],
["河北","石家庄","益元","联盟路368号一层","0311-85860634"],
["河北","石家庄","西丽","中山西路316号","0311-83032226"],
["河北","石家庄","天河","胜利北路237号","0311-86837163"],
["河北","石家庄","万象","裕华路与中华大街东北交叉口万象天成商务广场地下一层","0311-66600581"],
["河北","石家庄","新丽华","金桥北大街2号","0311-67560010"],
["河北","石家庄","益中","中山东路与东二环交口西北角谈固村金谈固地坛园","0311-89690026"],
["河北","石家庄","益东","裕华区北国益东百货一楼","0311-85926549"],
["河北","石家庄","南三条","中山东路和大经街交叉口","0311-67666983"],
["河北","石家庄","省院","和平路346号","0311-87811996"],
["河北","石家庄","尖岭万达","裕华区裕华万达商业广场一层","0311-67796366"],
["河北","石家庄","晋州","晋州市光明街和朝阳路交口摩尔新街","0311-84384193"],
["河北","石家庄","中华北","中华北大街与和平西路交叉口北行500米路西128号","0311-85862732"],
["河北","石家庄","金圆","辛集市兴华路西侧76号一、二层","0311-85398367"],
["河北","石家庄","民心广场","裕华路与维明大街交口民心广场地下一层永辉超市","0311-85511364"],
["河北","石家庄","勒泰","中山东路北侧、正东路以南、栗康街以西、长征街以东,中山东路39号勒泰中心","0311-66766600"],
["河北","石家庄","西站房","中华南大街新火车站高架餐饮夹层","0311-80806996"],
["河北","石家庄","乐惠家","谈固东街与槐安东路交口永辉超市一层","0311-68006139"],
["河北","石家庄","平山","平山县冶河东路福美佳购物中心一层","0311-82948483"],
["河北","石家庄","正定机场店KFC","正定国际机场2号航站楼F1-1","0311-68137826"],
["河北","石家庄","红旗大街店KFC","红旗南大街666号商业北侧一、二层","0311-85330056"],
["河北","石家庄","正定DT店KFC","正定县正定镇107国道旁车站东街27号小商品市场二期A区商业综合楼","0311-85171675"],
["河北","石家庄","飞云餐厅","新乐市鲜虞街与新开路交叉口西北角肯德基餐厅","0311-85191168"],
["河北","石家庄","平安餐厅","长安区中山东路97号肯德基餐厅","0311-86071944"],
["河北","唐山","遵化开元店KFC","华明北路西侧、府前西街南侧开元小区四期开元国际项目","0315-8270166"],
["河北","唐山","唐百","路北区新华东道125号百货大楼一楼","0315-2816057"],
["河北","唐山","丰润","丰润区新城道114号","0315-3125815"],
["河北","唐山","八方","路北区北新西道八方购物中心","0315-2237048"],
["河北","唐山","远洋","路北区长宁道与建设北路交口远洋城购物中心","0315-2790451"],
["河北","唐山","光明","新华西道118-8号","0315-5269800"],
["河北","唐山","古冶","古冶区林西新林道49号","0315-3580668"],
["河北","唐山","玉田","玉田城内无终路14号玉田供销大厦1层","0315-6115126"],
["河北","唐山","尚座","新华道以北卫国路以西尚座商业街1E888、2E888一、二层","0315-2578808"],
["河北","唐山","翔云","华岩北路9号一层","0315-2016619"],
["河北","唐山","夏日","乐亭县金融大街35号夏日商场一层","0315-5228388"],
["河北","唐山","开平","新苑路与普光路交口志方购物广场一 二层","0315-3388915"],
["河北","唐山","龙泽","龙泽北路242号","0315-2020151"],
["河北","唐山","迁西","迁西县喜峰中路远大鸿福购物广场一层","0315-5619992"],
["河北","唐山","赛特","丰润区曹雪芹西大街5号赛特购物中一层","0315-5127800"],
["河北","唐山","华岩","路北区华岩北路42号","0315-2059008"],
["河北","唐山","新华贸","新华西道32号新华购物中心一层","0315-6328086"],
["河北","唐山","滦南","滦南县和平路38号银泰商厦一层","0315-4500168"],
["河北","唐山","兴安","迁安市兴安大街936号兴宝大商业广场","0315-7620500"],
["河北","唐山","丰南","丰南区文化大街138号","0315-8120396"],
["河北","唐山","遵缘","遵化市文化北路金缘购物中心一层","0315-6611586"],
["河北","唐山","崇尚","迁安市燕山大路西侧南二环路北侧交口","0315-7660100"],
["河北","唐山","丰南文化","丰南区文化大街109号丰南商厦购物广场B区一、二层","0315-8209626"],
["河北","唐山","凤城国际","路北区北新西道与友谊路交口凤城国际广场","0315-2224911"],
["河北","唐山","渤海","新华西道与渤海南北街交口","0315-3401081"],
["河北","唐山","滦县","滦县燕山大街与滦河路交叉口东安商厦一层","0315-7108810"],
["河北","唐山","唐山万达","新华东道100号万达广场","0315-6817033"],
["河北","唐山","文明","玉田县文明路凤凰购物底商一层","0315-5311246"],
["河北","唐山","天承","滦南县友谊路西侧、南大街南侧滦南八方购物广场一层","0315-4593450"],
["河北","唐山","南湖店KFC","路南区西电路42-1号南湖购物中心一层","0315-6312017"],
["河北","唐山","唐山火车站店KFC","火车站高架层二层肯德基","0315-2308981"],
["河北","唐山","迁安餐厅","迁安市兴安大街东安超商一层","0315-7690777"],
["河北","秦皇岛","巴塞罗那餐厅","秦皇岛市海港区燕山大街与建国路交叉口秦新巴塞罗那商场一层","0335-3630869"],
["河北","秦皇岛","欧玛克店KFC","北戴河区保二路38号一层","0335-7101681"],
["河北","秦皇岛","港城","海港区文化路马坊街2号金街时代购物广场1层","0335-3866610"],
["河北","秦皇岛","广缘","经济开发区峨眉山南路1-3号广缘超市旁","0335-8566618"],
["河北","秦皇岛","燕山","秦皇岛迎宾路121号","0335-3607700"],
["河北","秦皇岛","山海关","山海关区关城南路89号","0335-5071076"],
["河北","秦皇岛","茂业","海港区文化路8号1层","0335-3259900"],
["河北","秦皇岛","海阳","海港区河北大街165号视听研究所一楼","0335-3230836"],
["河北","秦皇岛","乐都汇","海港区河北大街南侧、民族路与文化路之间乐都汇购物中心","0335-3192019"],
["河北","秦皇岛","昌黎","昌黎县碣阳大街与民生路交口东北角民生广场","0335-2555000"],
["河北","秦皇岛","抚宁","抚宁县健康大街东段北侧东购商厦","0335-6689277"],
["河北","秦皇岛","世纪港湾","海港区秦皇西大街西段世纪港湾购物广场一层","0335-3605651"],
["河北","秦皇岛","北戴河","北戴河区海宁路29号冀北电力疗养院南侧","0335-4039775"],
["河北","秦皇岛","天洋餐厅","海港区民族南路56号天洋电器一层肯德基","0335-3216331"],
["河北","邯郸","天鸿店KFC","丛台区联纺东路与人和街交口东北角天鸿广场A座一层KFC","0310-7078282"],
["河北","邯郸","万达","从台区中华北大街124号","0310-3206210"],
["河北","邯郸","金正","邯山区陵西南大街8号金正广场1层","0310-2055608"],
["河北","邯郸","龙湖","人民路与滏东大街交叉口 邯郸阳光龙湖商城地下一层","0310-2090808"],
["河北","邯郸","武新","武安市中兴路955号武安新世纪广场2期1层","0310-5612766"],
["河北","邯郸","人民","光明大街69号一层","0310-3012899"],
["河北","邯郸","龙山","涉县龙山大街与振兴路交口奥力商城一层","0310-3818686"],
["河北","邯郸","前进","复兴区前进大街中段道东一层","0310-4185893"],
["河北","邯郸","峰峰","峰峰矿区滏临北大街一层19号一层","0310-5116768"],
["河北","邯郸","大名","大名县大名府路东段路北","0310-6566650"],
["河北","邯郸","客运总站","邯山区陵园路11号汽车客运总站南候车室南侧一层","0310-3201570"],
["河北","邯郸","永年","永年县政府街东段路南1层","0310-6795299"],
["河北","邢台","财富","中兴东大街与新华南路交叉口银座商城一楼","0319-3696166"],
["河北","邢台","沙河","沙河市京广路中端西侧、农业银行北侧华联购物广场","0319-8788918"],
["河北","邢台","天一城","新华北路天一广场","0319-3132316"],
["河北","邢台","宁晋","宁晋县凤凰北路天一广场肯德基","0319-5865660"],
["河北","邢台","冶金路","桥西区中兴西大街189号家乐园百货冶金路店一侧一层","0319-2021566"],
["河北","邢台","邢州","桥东区中兴东大街与纳凉园街交口家乐园百货襄都店一侧一层","0319-3860066"],
["河北","邢台","中北商城店KFC","中兴西大街中北世纪城集中商业街1、2层","0319-2530490"],
["河北","保定","钻石广场店KFC","涿州市钻石广场一层","0312-3855745"],
["河北","保定","保定站店KFC","新市区保定火车站候车室","0312-8925118"],
["河北","保定","涿州服务区店KFC","京港澳高速公路K57+450涿州东服务区(东区)","0312-3855843"],
["河北","保定","清苑店KFC","清苑县城清苑路东侧一、二层","0312-5807866"],
["河北","保定","裕华","裕华西路55号","0312-2035070"],
["河北","保定","朝阳","朝阳路18号","0312-3078628"],
["河北","保定","国贸","涿州市范阳路南华冠购物中心一层","0312-3652381"],
["河北","保定","北国","裕华路北国商城","0312-5912585"],
["河北","保定","家兴","高碑店市南大街东侧家兴购物中心一层","0312-4992645"],
["河北","保定","开元","定州市中山中路商业街交口","0312-2316979"],
["河北","保定","宜佳","涿州市范阳东路与东兴街交口宜佳旺商厦一层","0312-5528826"],
["河北","保定","天鹅","天鹅中路1399号一层","0312-5956153"],
["河北","保定","青年","永华北大街与五四中路交口文华国际底商一、二层","0312-5020165"],
["河北","保定","祁州","安国市药市大街与药华路交口宜佳旺广场一层","0312-3524616"],
["河北","保定","现代","高碑店市迎宾路与兴华路交口现代商厦","0312-2806021"],
["河北","保定","钟楼","裕华西路233号","0312-2061211"],
["河北","保定","高阳","高阳县朝阳路中段北侧高阳大世界购物中心","0312-5652660"],
["河北","保定","保百","高开区朝阳北大街916号保百购物广场南楼一层","0312-3361962"],
["河北","保定","双隆","徐水县康明中路双隆商厦一层","0312-5854066"],
["河北","保定","阳光","高碑店市白沟镇富民路以西友谊路以南阳光国际项目","0312-2856368"],
["河北","保定","华创","裕华西路华创商厦一层","0312-2083706"],
["河北","保定","先天下","朝阳北大街和天鹅中路交口北国先天下购物广场地下一层","0312-5905808"],
["河北","保定","定兴","定兴县通兴东路南侧69号","0312-6885860"],
["河北","保定","唐县","唐县向阳街广场西侧中天商厦","0312-6439058"],
["河北","保定","豪庭","涿州市范阳中路豪庭时代广场一层","0312-3658300"],
["河北","保定","惠友店KFC","徐水县振兴中路3号一 二层","0312-8666865"],
["河北","保定","保津DT","高碑店市白沟镇津保公路精品鞋帽城门前广场","0312-2893630"],
["河北","保定","环城餐厅","环城北路地下商场大润发超市内肯德基","0312-2021645"],
["河北","张家口","张家口世博店KFC","站前西大街世博广场一层肯德基","0313-2519188"],
["河北","张家口","宣化路","宣化路28号","0313-2016084"],
["河北","张家口","南大街","宣化南大街吉龙商厦一层","0313-3059292"],
["河北","张家口","帝达","胜利北路8号帝达购物广场1层","0313-2123535"],
["河北","张家口","金鼎","西坝岗路59号","0313-8187758"],
["河北","张家口","明德","明德南街63号一层","0313-8058869"],
["河北","张家口","同盛","宣化区南大街同盛大厦一层","0313-3013938"],
["河北","张家口","盛华","胜利路241号新东亚时代广场乐购一层","0313-4111568"],
["河北","张家口","滨河","桥东区滨河北路一、二层","0313-2263838"],
["河北","承德","承德丰宁店KFC","丰宁满族自治县大阁镇新丰路57号一层","0314-8081015"],
["河北","承德","白天鹅","南营子大街与三条胡同交口天成永兴大厦2期","0314-2070107"],
["河北","承德","翠桥","南营子大街裘翠楼一层","0314-2069155"],
["河北","承德","围场","围场满族蒙古族自治县木兰中路308号","0314-7519233"],
["河北","承德","平泉","平泉县平全镇八沟大街迎宾路南侧水岸华庭第三座38号商铺","0314-6057277"],
["河北","承德","双滦","双滦区锦绣城宜家旺广场东侧","0314-4225383"],
["河北","承德","双安","南营子大街塞北宾馆一层","0314-2139387"],
["河北","承德","宽城","宽城满族自治县宽广超市广场店","0314-6864458"],
["河北","承德","西大街","西大街大庆路2号","0314-2039476"],
["河北","承德","承德世纪城DT店KFC","迎宾路世纪城小区三期4号楼","0314-2518871"],
["河北","沧州","荣盛店KFC","黄河西路与开元南大道交口荣盛国际购物广场一层","0317-2135081"],
["河北","沧州","蕾莎店KFC","任丘市裕华路与会战道交口蕾莎城市广场一层","0317-3333651"],
["河北","沧州","华油店KFC","任丘市会站道华北石油购物广场一层","0317-2723228"],
["河北","沧州","泰昌店KFC","献县新华路与水源路交口泰昌购物广场一层","0317-4629699"],
["河北","沧州","华北","新华中路华北商厦","0317-3013805"],
["河北","沧州","顺城","南北大街顺城商厦一层","0317-3561898"],
["河北","沧州","燕春","任丘市裕华中路30号","0317-2985999"],
["河北","沧州","颐和","朝阳大街解放西路交口","0317-2025777"],
["河北","沧州","耀华","黄骅市渤海路中段耀华商厦一层","0317-5223050"],
["河北","沧州","河间","河间市城苑中路红太阳购物广场一层","0317-3666962"],
["河北","沧州","泊头","泊头市解放路与建设大街交口东美购物广场","0317-8178011"],
["河北","沧州","解放路","解放西路颐和广场9#华北商厦一层","0317-2207698"],
["河北","沧州","新绿洲","任丘市渤海路北侧华油二连公司基地西侧新绿洲时代广场","0317-3377366"],
["河北","沧州","东体","解放中路78号意明商厦一层","0317-3068765"],
["河北","沧州","华北餐饮层","新华路华北商厦八楼","0317-5298059"],
["河北","沧州","东光","沧州东升路与茧城大街交口信合商厦1楼肯德基","0317-7591399"],
["河北","沧州","华贸","黄骅市信誉楼大街与文化路交口华贸商业广场","0317-7555868"],
["河北","沧州","青县DT","青县新华中路59号南海国际商铺","0317-4304518"],
["河北","沧州","凤城餐厅","青县新华路北侧104国道东侧青县信发商厦一层","0317-4303399"],
["河北","廊坊","万向城餐厅","河北省廊坊市广阳道南侧裕华路西侧(万向城项目)一、二层","15076655031"],
["河北","廊坊","幸福广场店KFC","固安县永定路西侧新昌街北侧幸福广场3号楼一层","0316-7026119"],
["河北","廊坊","明珠","新华路43号商业明珠大厦1楼","0316-2027440"],
["河北","廊坊","时代","新世纪步行街新华路161号","0316-2091560"],
["河北","廊坊","三河","三河市京哈公路建兴南路交口华联商厦一层。","0316-3223948"],
["河北","廊坊","永华","银河南路与永华道交口","0316-2680531"],
["河北","廊坊","兴达","三河市燕郊学院路南公园西侧白花超市一、二层","0316-3322533"],
["河北","廊坊","爱民","爱民东道与和平路交口新朝阳购物中心一、二层","0316-2230213"],
["河北","廊坊","香百","香河县新开街北侧京东商住综合楼内一层","0316-8581373"],
["河北","廊坊","金宝","霸州市建设道和兴华路交口兴华金宝斋商厦一层","0316-7959980"],
["河北","廊坊","凯顺","霸州市北环路与经企路交口大华世纪商厦一、二层","0316-7600217"],
["河北","廊坊","东方大学城","开发区东方大学城二期餐饮楼5号楼","0316-2563888"],
["河北","廊坊","康庄道","新华路康庄道乐购超市一层","0316-2081361"],
["河北","廊坊","燕京","三河市燕郊开发区京哈公路与迎宾路交口京客隆超市","0316-3316713"],
["河北","廊坊","兴华","霸州市兴华路与金康道交口东北角华联商厦一层","0316-7859081"],
["河北","廊坊","新源","新开路与解放道交口新源天街商城A区人人乐购物广场一层","0316-2388587"],
["河北","廊坊","廊坊万达","新华路与金光道交口处万达广场北入口","0316-2386079"],
["河北","廊坊","乐都","银河北路119号乐都百货","0316-2632176"],
["河北","廊坊","鑫乐汇","三河市燕郊开发区神威北路与燕灵路交口鑫乐汇广场","0316-3090035"],
["河北","廊坊","三河天洋城店KFC","三河市燕郊开发区规划路南侧天洋城天洋广场南区商业一层","0316-3097528"],
["河北","廊坊","文安店KFC","文安县城内大十字街盛业商厦一层","0316-5222626"],
["河北","廊坊","三河星罗城店KFC","三河市燕郊开发区燕顺路星罗城购物中心一层","0316-3090860"],
["河北","廊坊","金万福DT","三河市燕郊镇迎宾路口西侧、102国道北侧金万福购物广场","0316-3095288"],
["河北","衡水","怡水园店KFC","人民路北侧、宝云街西侧衡百国际购物中心负一层","0318-5106510"],
["河北","衡水","天鸿尚都店KFC","大庆路北侧、榕花街西侧、兴旺街东侧天鸿城市购物广场","0318-2661013"],
["河北","衡水","衡百","人民东路106号","0318-2160077"],
["河北","衡水","惠祥","安平县中心路与为民街交口惠祥商城一层","0318-7799000"],
["河北","衡水","爱特","和平西路206号 西侧附楼门店一二层","0318-2666569"],
["河北","衡水","吉美","胜利路与康宁街交叉口东北角吉美超市","0318-2665181"],
["河南","郑州","机场一餐厅","新郑国际机场T2航站楼四层出发厅北侧","0371-89906790"],
["河南","郑州","机场二餐厅","新郑国际机场T2航站楼二层到达厅北侧","0371-89906795"],
["河南","郑州","郑州北金餐厅","北环道南、文化路东瀚海北金商业中心一、二层总计面积约为375平方米的房产","0371-89913289"],
["河南","郑州","郑州泰隆餐厅","碧沙岗北街12号","0371-67444242"],
["河南","郑州","郑州东风路餐厅","东风路北和小铺路西U尚城(东风路3号)一、二层","0371-55151151"],
["河南","郑州","郑州车站北口餐厅","二马路82号火车站原北出站口一层","0371-66930799"],
["河南","郑州","郑州郑大餐厅","二七区大学路北路30号中苑名都2-7#楼一层","0371-55683346"],
["河南","郑州","郑州万象城餐厅","二七区郑州万象城B118+B121号商铺","0371-86525668"],
["河南","郑州","郑州紫荆餐厅","管城区紫荆山路58号一层\负一层","0371-66280293"],
["河南","郑州","郑州二七万达餐厅","航海路南、大学路西德万达广场内的2031号商铺","0371-55156700"],
["河南","郑州","郑州航海路餐厅","航海路与银莺路交叉口西南角福都购物广场东北角一层","0371-86597228"],
["河南","郑州","郑州客运总站餐厅","航海路郑州郑州客运总站一层东侧","0371-68819386"],
["河南","郑州","郑州中环餐厅","花园路59号1层","0371-63386390"],
["河南","郑州","郑州西元餐厅","建设西路与秦岭路交汇处西北侧西元国际广场一层","0371-86520108"],
["河南","郑州","郑州金水路餐厅","金水路288号曼哈顿广场A区","0371-60190554"],
["河南","郑州","郑州名门广场餐厅","金水路和玉凤路交叉口家乐福卖场一层","0371-69513898"],
["河南","郑州","郑州丰庆餐厅","金水区北环道北、丰庆路东(中方园)52幢(金座)沃美莱商场一层","0371-86631971"],
["河南","郑州","郑州国贸中心餐厅","金水区花园路西、农业路南4号楼1层A1042、2层A2001","0371-87093972"],
["河南","郑州","郑州大石桥餐厅","金水区南阳路324号院商业群房一层","0371-63883362"],
["河南","郑州","郑州北环餐厅","金水区文化路126号","0371-63569012"],
["河南","郑州","郑州经三路餐厅","经三路与农业路","0371-69353630"],
["河南","郑州","郑州客运南站餐厅","南三环路与京广路交叉口东南角郑州客运南站一层旅客进站口西侧","0371-86618332"],
["河南","郑州","郑州农业路餐厅","农业路61号西藏自治区驻郑州干部休养所综合楼一层","0371-63697218"],
["河南","郑州","郑州商都餐厅","商都路5号1-3层","0371-66721288"],
["河南","郑州","郑州陇海路餐厅","嵩山南路19号家世界购物广场一层","0371-68663706"],
["河南","郑州","郑州人民餐厅","太康路72号1-2层","0371-66207952"],
["河南","郑州","郑州兴隆餐厅","兴隆街9号1-2层","0371-66760556"],
["河南","郑州","郑州银基餐厅","一马路8号一层和二层","0371-66975198"],
["河南","郑州","郑州中心站餐厅","长途汽车站北侧候车区二楼东南角","0371-66988175"],
["河南","郑州","郑州凤凰城餐厅","郑汴路和商贸路交汇处西南角升龙凤凰城A区1-2层","0371-86537085"],
["河南","郑州","郑州东广场餐厅","郑州车站东广场北楼一层、二层","0371-66950958"],
["河南","郑州","郑州火车站站内餐厅","郑州车站东广场进站口二层北侧","0371-66979572"],
["河南","郑州","郑州郑州东站一餐厅","郑州火车站出战层(一层)北侧C11商铺","0371-56807110"],
["河南","郑州","郑州西广场餐厅","郑州火车站西站房一层","0371-86008939"],
["河南","郑州","郑州青年路餐厅","中牟县青年路105号中牟一层、二层","0371-60218116"],
["河南","郑州","郑州中原路餐厅","中原区秦岭路交汇处中原万达商业广场","0371-86671218"],
["河南","郑州","郑州棉纺路餐厅","中原区桐柏路与棉纺路交汇处西北角锦艺国际一层、二层","0371-86551521"],
["河南","郑州","郑州众意餐厅","众意路东、商务西一街西丹尼斯百货七天地(G段)一、二层","0371-86502959"],
["河南","郑州","郑州方圆创世餐厅","紫金山路与陇海路交叉口东北角被称为“方圆国际广场”地块上房产一层","0371-86132696"],
["河南","郑州","上街店","郑州市上街区中心路32号亚星城市广场一楼","0371-68136868"],
["河南","郑州","嵩山路店","郑州市二七区嵩山路与安康路交叉口兰德置业广场一、二楼","0371-86626031"],
["河南","开封","开封金明餐厅","开发区晋安路以北、金明大道以西开元名都商业广场一层","0371-23238999"],
["河南","开封","开封振河餐厅","寺后街1号1-2层","0371-25965777"],
["河南","开封","开封自由路餐厅","自由路与解放大道交叉口东南","0371-28889977"],
["河南","开封","龙亭店","西大街新都汇购物广场1-2层","0371-22881998"],
["河南","洛阳","九都路餐厅","涧西区南昌路九都路口西南角名门广场一二层","15670308866"],
["河南","洛阳","郑州开元大道餐厅","开元大道宝龙广场A区一层A1-02号和A1-03、A1-11商铺局部","0379-3536353"],
["河南","洛阳","洛阳洛阳火车站餐厅","洛阳火车站候车厅三角厅一层","0379-3151103"],
["河南","洛阳","洛阳龙门站餐厅","洛阳龙们火车站一层候车厅西侧","0379-18238857071"],
["河南","洛阳","洛阳南昌路餐厅","南昌路与丽春路交叉口达玛格利购物广场(王府井购物中心)负一层。","0379-69959722"],
["河南","洛阳","洛阳凯旋广场餐厅","中州东路245号西工区步行商业街一层、二层的房产","0379-9863996"],
["河南","洛阳","洛阳十字街餐厅","中州东路387号(中州东路与兴华街交汇处东南角)八角金街1-2层","0379-3997998"],
["河南","洛阳","唐宫路店","解放路与唐宫路交叉口西北角家乐福超市一楼肯德基餐厅","0379-63266266"],
["河南","洛阳","景华店","涧西区景华路26号一层肯德基(大张盛德美1层,与天津路交叉口)","0379-64923899"],
["河南","平顶山","中兴店","新华区中兴路30号金三角商场一楼","0375-7087699"],
["河南","平顶山","双丰店","新华区光明路与曙光街交叉口双丰商城一、二楼","0375-7077699"],
["河南","平顶山","体育店","新华区体育路文化宫东门口","0375-2992996"],
["河南","安阳","安阳安阳嘉信餐厅","北关区人民大道86号嘉信茂广场","0372-5068277"],
["河南","安阳","滑县卫河路餐厅","滑县大功西路西侧、卫河路北侧、惠民路东温尔顿购物中心一层、二层的房产","0372-6279112"],
["河南","安阳","安阳解放餐厅","解放路拱辰广场一层13-14号","0372-5973777"],
["河南","安阳","林州太行路餐厅","林州市市区太行路中段路东(31号)林州市尚城百货一层、二层","0372-26181662"],
["河南","安阳","安阳安阳万达餐厅","文峰区中华路与迎春东街交叉口东南角万达广场(1030商铺)一层","0372-6268778"],
["河南","安阳","安阳彰德餐厅","文峰中路中环丹尼斯百货彰德府店一层","0372-3385891"],
["河南","鹤壁","鹤壁银鹤餐厅","鹤煤大道(鹤壁迎宾馆对面)银兴国际广场一层、二层","0392-3220660"],
["河南","鹤壁","鹤壁红旗餐厅","山城区红旗街中段一层和二层","0392-2617588"],
["河南","新乡","辉县涌金路餐厅","辉县市共和路和涌金大道交叉口西北角佳联国际华隆百货1-2层","0373-6839616"],
["河南","新乡","新乡新乡宝龙餐厅","金穗大道和新一街交叉口新乡宝龙城市广场一层1019、1021及1020部分商铺","0373-3807006"],
["河南","新乡","新乡原阳服务区餐厅","京港澳高速原阳服务区(西侧)服务区的房产","0373-7538087"],
["河南","新乡","新乡平原餐厅","平原路25号1层","0373-2812999"],
["河南","新乡","新乡劳动路餐厅","人民路与劳动路交叉路口东南角。","0373-3022978"],
["河南","新乡","新乡健康路餐厅","卫滨区健康路与胜利路交叉口西南角的假日购物广场一层","0373-2039515"],
["河南","新乡","长垣宏大餐厅","长垣县宏力大道与匡城路交叉口西南角万德隆商场一层","0373-8863928"],
["河南","焦作","焦作葵城餐厅","博爱县清化镇鄈城路中山路新世纪广场一层、二层","0391-2101938"],
["河南","焦作","焦作民主餐厅","解放路与民主路交汇处西南角一层","0391-2288098"],
["河南","焦作","焦作新时代餐厅","解放中路253号1-2层","0391-3280068"],
["河南","焦作","焦作沁阳怀府餐厅","沁阳市怀府中路玫瑰城A栋一、二层","0391-5908086"],
["河南","焦作","焦作塔南餐厅","塔南路摩登商业步行街2号楼一层卖场内","0391-3376860"],
["河南","濮阳","濮阳昆濮餐厅","黄河东路与长庆路口西南角昆濮时代广场3号楼一层和二层。","0393-8219087"],
["河南","濮阳","濮阳中心广场餐厅","建设路西段路南一、二层","0393-6212858"],
["河南","濮阳","濮阳胜利餐厅","京开大道与胜利路交汇处一层","0393-8157979"],
["河南","濮阳","濮阳春天餐厅","任丘路176号1层","0393-6617567"],
["河南","濮阳","濮阳长庆路餐厅","长庆路与任丘路交叉口西南角丹尼斯大楼一层、二层","0393-7770577"],
["河南","许昌","许昌许昌休息区餐厅","京珠高速公路河南省许昌服务区东区一层","0374-6849629"],
["河南","许昌","许昌颖昌餐厅","南关大街中段胖东来生活广场一层。","0374-2330155"],
["河南","许昌","郑州七一路餐厅","七一路和六一路交叉口胖东来国际商城1-2层。","0374-2662709"],
["河南","许昌","许昌禹王餐厅","禹州市禹王大道和药城路交叉口东南角禹王广场A区一层","0374-8607680"],
["河南","许昌","许昌长社路餐厅","长葛市长社路和文化路交叉口西南角一峰城市广场一层、二层","0374-6365588"],
["河南","漯河","漯河中汇餐厅","交通路与人民路交汇处漯河中汇广场4号楼1-2层","0395-5999777"],
["河南","三门峡","三门峡六峰餐厅","黄河路与六峰路交汇处西南角建业新天地银座1-2层","0398-3690899"],
["河南","南阳","南阳红都餐厅","人民路56号一、二层","0377-3269188"],
["河南","南阳","大统店","人民路与中州路交叉口大统百货一、二楼","0377-65020777"],
["河南","南阳","新华店","南阳市卧龙区新华路与工业路交叉口新华城市广场一、二楼","0377-63206777"],
["河南","商丘","商丘凯旋餐厅","凯旋路与民主路交汇处东南角的商丘中环新生活广场1号楼1 - 2层","0370-2221978"],
["河南","商丘","商丘团结路餐厅","神火大道与团结路交界处天朝俊园附楼一层","0370-2580277"],
["河南","商丘","永城沱滨路餐厅","永城市中原路和沱滨路交叉口东南角先帅新天地一层总计使用面积为330平方米的房产","0370-5188169"],
["河南","信阳","信阳东方红餐厅","鲍氏街B区商业中心1层(西亚和美广场1层)东北角","0376-6212228"],
["河南","信阳","信阳银珠餐厅","东方红大道和中山路交叉口联华银珠广场东北角1-2层","0376-6228801"],
["河南","信阳","信阳国源餐厅","固始县红苏路中段国源购物中心一层","0376-4961177"],
["河南","信阳","光山金博大餐厅","光山县海营路与正大街交叉口东南角金凯帝城市广场1号地商住楼一层的房产","0376-6177709"],
["河南","信阳","信阳千盛餐厅","新华东路121号一层","0376-6380877"],
["河南","信阳","信阳信阳站餐厅","新华路信阳火车站一层","0376-6211769"],
["河南","周口","周口顺达餐厅","七一路和五一路交汇处西北角万顺达商厦1-2层","0394-8268818"],
["河南","驻马店","驻马餐厅新乐山餐厅","解放中路乐山商场一层总计面积约为410平方米的房产","0396-2990688"],
["河南","驻马店","驻马餐厅乐山二餐厅","与风光二巷交汇处东北角新玛特广场一、二层","0396-2369166"],
["河南","济源","济源济水大街餐厅","济水大街北,建设银行西印象新城大润发一层总计使用面积为342平方米的房产","0391-5561902"],
["湖北","武汉","儿童医院餐厅","江岸区香港路153号8栋1-2层","15994291726"],
["湖北","武汉","海天餐厅","武汉市汉阳区龙阳大道582号海天欢乐购商城一楼","027-84953280"],
["湖北","武汉","启新","江汉路112号","027-15727079060"],
["湖北","武汉","维新","武昌区解放路407号","027-15727079071"],
["湖北","武汉","汉街店","洪山区万达汉街商业中心","027-027-87130320"],
["湖北","武汉","阳逻广场","新洲区阳逻街阳光大道389号阳逻广场一楼","027-15202746919"],
["湖北","武汉","一六一","江岸区黄浦路68号","027-15202777051"],
["湖北","武汉","奥山","青山区和平大道奥山世纪城1层","027-15202777942"],
["湖北","武汉","武胜","中山大道244号家乐福内","027-15727073292"],
["湖北","武汉","鲁巷广场","洪山区珞瑜路726号","027-15727079017"],
["湖北","武汉","汉口城市广场店KFC","江岸区后湖街建设大道延长线与幸福大道交汇处汉口城市广场C区","027-15727035027"],
["湖北","武汉","泛海城市广场店KFC","江岸区王家墩中央商务区一层","027-58909522"],
["湖北","武汉","阅马场","武昌区首义小区45栋1层1号","027-15727073502"],
["湖北","武汉","民意","中山大道285号美奇国际大厦1楼","027-85822116"],
["湖北","武汉","花桥","江岸区花桥一村15号一、二层","027-15727073051"],
["湖北","武汉","佰港城店KFC","南湖区文治街佰港城一层","027-87802510"],
["湖北","武汉","六零六店KFC","武昌和平大道750号赵家墩绿地金融国际606一层","027-86732750"],
["湖北","武汉","银墩路店KFC","发展大道与银墩路交汇处汉口火车站广场西侧欧亚达家居国际广场一层、负一层","027-85673156"],
["湖北","武汉","金银潭店KFC","东西湖区金银潭大道永旺梦乐城","027-59608480"],
["湖北","武汉","循礼门","江汉路257号","027-15727073219"],
["湖北","武汉","中北路","中北路1号家乐福超市","027-87277882"],
["湖北","武汉","东湖店KFC","武汉市东湖风景区","027-86839910"],
["湖北","武汉","齐安大道店KFC","新洲区齐安大道号一层1183、1184号商铺二层2170-2175号商铺","027-89809026"],
["湖北","武汉","东吴大道店KFC","东西湖区吴家山街东吴大道以南三秀路以西一层二层","027-83292753"],
["湖北","武汉","武汉宜家店KFC","硚口区长宜路1号购物中心荟聚竹叶海购物中心","027-84670586"],
["湖北","武汉","硚口路店KFC","硚口区沿河大道以北硚口路西侧(金三角B地块星汇维港项目)","027-59002130"],
["湖北","武汉","武汉徐东","武昌区徐东大街18号销品茂一层","027-15727079162"],
["湖北","武汉","钢都花园","青山区园林路中百仓储购物广场一楼(123街坊50栋1层)","027-15727078971"],
["湖北","武汉","水果湖","武昌区水果湖路19-21号","027-027-87895650"],
["湖北","武汉","广埠屯","洪山区珞瑜路312号","027-15727073127"],
["湖北","武汉","王府","三眼桥路26号","027-82618205"],
["湖北","武汉","剑桥","东湖新技术开发区珞瑜路叶麻店剑桥春天B栋一层","027-15727079021"],
["湖北","武汉","汉口火车站","江汉区汉口火车站东侧金敦街15附17财神广场一层","027-85881317"],
["湖北","武汉","沙湖","中山路螃蟹岬379号一层","027-15727072932"],
["湖北","武汉","五支沟","东西湖区吴家山街东吴大道83号一层","027-15727079053"],
["湖北","武汉","光谷家乐福","东湖新技术开发区珞瑜路光谷世界城","027-15727073151"],
["湖北","武汉","天河机场","黄陂区天河机场T2航站楼10号门","027-65687302"],
["湖北","武汉","光谷","东湖开发区珞瑜路456号武汉光谷国际商会大厦1-2层","027-15727079083"],
["湖北","武汉","江夏中百","江夏区兴新街与复江道交汇处昌兴供销有限公司地下一层","027-15727079152"],
["湖北","武汉","融侨锦城","解放大道66号融侨锦城第2526栋商业12楼","027-15727073012"],
["湖北","武汉","汉西","桥口区解放大道201号(大武汉家装广场G座)","027-15727073100"],
["湖北","武汉","新佳丽","中山大道818号新佳丽购物广场","027-15727073192"],
["湖北","武汉","二七路","江岸区二七路二层","027-15727073562"],
["湖北","武汉","音乐学院","黄鹤楼接解放路257号(司隆百货内)","027-15727079015"],
["湖北","武汉","狮城","洪山区珞狮路449号一层","027-027-87386563"],
["湖北","武汉","新武昌火车站","武昌区武昌火车站架空层","027-15727073065"],
["湖北","武汉","民航新村","常青路168号中百仓储一楼","027-15727073162"],
["湖北","武汉","街道口","洪山区珞瑜路6号美美广场一二层","027-15727073092"],
["湖北","武汉","武大","洪山区珞珈山路19号武汉大学正门对面中商平价","027-15727073020"],
["湖北","武汉","武展","国际会展中心西下沉式广场","027-15727073261"],
["湖北","武汉","新世界徐东","洪山区徐东路7号凯旋门广场新世界百货1F、2F","027-15727079013"],
["湖北","武汉","常青花园","常青花园小区花园中路1-3号商业楼武商量贩一层","027-15727079182"],
["湖北","武汉","摩尔城沃尔玛","汉阳区龙阳大道特6号","027-84459512"],
["湖北","武汉","光谷国际","洪山区东湖开发区珞瑜路889号光谷国际广场","027-15727072961"],
["湖北","武汉","汉口火车站二","汉口火车站广场西侧台湾风情广场1层","027-15717121397"],
["湖北","武汉","黄陂广场","黄陂区黄陂大道387号黄陂广场一楼","027-15727079137"],
["湖北","武汉","菱角湖","唐家墩路5号菱角湖万达广场一楼","027-85748732"],
["湖北","武汉","青年路","青年路155号长源大厦1-2号(新一佳超旁)","027-15787078905"],
["湖北","武汉","光谷天地","关山大道519号以东光谷天地1F(关山大道和南湖南路交汇处)","027-15727073152"],
["湖北","武汉","后湖","江岸区后湖乡十大家村后湖大道百步亭商业中心一楼","027-15717102069"],
["湖北","武汉","太子湖KFC","经济开发区经开万达商业广场","027-15727014227"],
["湖北","武汉","世界城KFC","东湖新技术开发区步行街南路17号(光谷世界城西班牙风情街A座)","027-15727014227"],
["湖北","武汉","汉口火车站3","汉口火车站2楼候车厅餐饮区第2标段","027-15717135274"],
["湖北","武汉","吉庆街","江岸区江汉二路181号1-2层","027-82705361"],
["湖北","武汉","万松园","万松园路100号西园商业街一层","027-15717186354"],
["湖北","武汉","盘龙奥莱KFC","黄陂区盘龙大道51号百联奥特莱斯广场18号楼一层186号商铺","027-61195915"],
["湖北","武汉","付家坡","武珞路440号中南国际城1层","027-87843976"],
["湖北","武汉","武汉站","青山区武汉火车站东出站口","027-86516231"],
["湖北","武汉","四明路","东西湖四明路北冰洋城广场一楼","027-83238478"],
["湖北","武汉","宜佳广场","江夏区兴新街219号","027-81610509"],
["湖北","武汉","杨汊湖","杨汊湖姑嫂树路12号南国北都广场一楼","027-85382479"],
["湖北","武汉","波波城","洪山区雄楚大道卓刀泉南路交汇处“华瑞大厦”一楼","027-87386543"],
["湖北","武汉","新中南","中南路11号省外文书店一楼","027-68784697"],
["湖北","武汉","凯德广场","武胜路凯德广场负一层","027-83655178"],
["湖北","武汉","洪山广场","轨道交通2号线洪山广场站","027-87136069"],
["湖北","武汉","百步亭","百步亭花园路14号(简朴寨对面)","027-82339891"],
["湖北","武汉","汤逊湖","江夏区江夏大道梅南山居一层","027-15727073106"],
["湖北","武汉","京汉大道DT店","京汉大道东民巷展示厅1F","027-15727073273"],
["湖北","武汉","蔡甸世纪广场店DT","蔡甸区蔡甸街中核世纪广场KFC穿梭餐厅","027-69706397"],
["湖北","武汉","关山大道店","关山大道哈乐城1F","027-87572051"],
["湖北","武汉","钟家村餐厅","汉阳区汉阳大道139号肯德钟家村餐厅汉商一楼","027-84810543"],
["湖北","武汉","武商Mall","江汉区滑坡路武商摩尔负一层","027-15727073257"],
["湖北","武汉","玲珑汇餐厅","友谊大道与建设二路路口","027-86855853"],
["湖北","黄石","交通路","交通路29号1-2层","0714-0714-6255700"],
["湖北","黄石","黄石华夏","黄石大道678号","0714-0714-6205977"],
["湖北","黄石","黄石颐阳路","黄石大道515号","0714-0714-6264200"],
["湖北","黄石","大冶雨润","大冶市新冶大道38号","0714-8868915"],
["湖北","十堰","十堰中商","中商百货一楼(公元路81号)","0719-0719-8681872"],
["湖北","十堰","五堰","人民北路香港街18-1号永乐服饰广场一层","0719-0719-8651630"],
["湖北","十堰","二堰","人民南路28号永乐生活广场一层","0719-0719-8897856"],
["湖北","十堰","人民北路","六堰人民北路1号","0719-8120756"],
["湖北","宜昌","长阳龙舟大道餐厅","长阳土家族自治县龙舟坪镇一层","0717-5321699"],
["湖北","宜昌","宜昌城东大道","城东大道泰富广场21号一层","0717-6321109"],
["湖北","宜昌","解放路","解放路52号一层","0717-6757201"],
["湖北","宜昌","宜昌福久源店KFC","桔城路6号香山福九源一层","0717-6210277"],
["湖北","宜昌","汽车东站店KFC","伍家岗区伍家乡共强、共和村宜昌汽车中心站一层","0717-9701080"],
["湖北","宜昌","宜昌夷陵","夷陵路22号CBD购物中心一楼","0717-6838018"],
["湖北","宜昌","沿江路","伍家岗区沿江大道万达广场特166号一楼156-2号","0717-6589668"],
["湖北","宜昌","民主路","枝江市民主大道36号国贸大广场一楼","0717-4245266"],
["湖北","宜昌","小溪塔","夷陵区小溪塔夷兴大道地质大楼一楼","0717-7829277"],
["湖北","宜昌","宜昌西陵","夷陵大道58号","0717-6446388"],
["湖北","宜昌","宜都清江大道","宜都清江大道与长江大道交汇处雅斯国际广场1楼","0717-4830268"],
["湖北","宜昌","宜昌陶珠路","西陵区南门后街陶珠路一层","0717-6688583"],
["湖北","襄阳","襄阳春园店KFC","樊城区中原路一层","0710-3271556"],
["湖北","襄阳","樊城","长征路2号人民商场一楼","0710-3445427"],
["湖北","襄阳","襄樊长虹","长虹路民发商业广场A座一层","0710-3222203"],
["湖北","襄阳","襄樊中原","中原路卧龙新港一、二层","0710-3268505"],
["湖北","襄阳","高新区","长虹北路襄樊万达诸葛亮广场一、二层","0710-3809009"],
["湖北","襄阳","粉水路","谷城县城关镇粉水路70号","0710-7256568"],
["湖北","鄂州","万联餐厅","鄂州市滨湖路特1号(万联购物广场一层)","0711-3895017"],
["湖北","荆门","沙洋金水湾店KFC","沙洋县荷花南路金水湾步行街","18571961006"],
["湖北","荆门","京山轻机大道","京山县经济开发区轻机大道与新大道交叉美嘉购物广场一层","0724-7324111"],
["湖北","荆门","钟祥阳春街","钟祥市阳春大街57号一、二层","0724-4932277"],
["湖北","荆门","荆门时代分店","中天街肯德基31号","027-2366754"],
["湖北","孝感","云梦建设路店KFC","云梦县建设路孝武广场一楼","0712-4335338"],
["湖北","孝感","仙女大道店KFC","汉川市仙女大道18号中百仓储汉川购物广场一层","0712-8393626"],
["湖北","孝感","孝昌洪花大道店KFC","孝昌县孝昌县城区建设路与花园大道交汇处","0712-8301110"],
["湖北","孝感","汉川汉海国际KFC","湖北省汉川市仙女山街道办事处西正街45号","17763036297"],
["湖北","孝感","保丽","长征路家乐福1F","0712-0712-2365885"],
["湖北","孝感","孝感槐荫大道","槐荫大道大润发超市一楼","0712-2829818"],
["湖北","孝感","安陆永安","安陆市安陆紫金路永安广场","0712-5869488"],
["湖北","孝感","城站餐厅","城站路18号肯德基餐厅","0712-2838807"],
["湖北","荆州","荆州北京","沙区北京中路227号","0716-8114338"],
["湖北","荆州","荆州万达店KFC","荆州区北京西路508号万达广场一层1001A","0716-8800510"],
["湖北","荆州","荆州月亮湾","北京路173号","0716-8118381"],
["湖北","荆州","美佳华","北京路美佳华A128号","0716-8123936"],
["湖北","荆州","宏泰","公安县五九大道宏泰购物广场一层24号","0716-8118381"],
["湖北","荆州","洪湖宏伟路","洪湖市宏伟南路宝安商业广场一、二层","0716-2087336"],
["湖北","荆州","容城天骄","监利县容城大道90#容城天骄一二层","0716-18186566717"],
["湖北","荆州","皇叔","石首市皇叔街77号综合大楼一二层","0716-7283188"],
["湖北","荆州","荆州东门店","荆沙路与江津路交汇处中商平价广场一楼肯德基餐厅","0716-8463111"],
["湖北","荆州","荆州红门路店","江津中路204号大润发超市一楼","0716-8120678"],
["湖北","黄冈","盛地","红安县沿河路盛地沃尔玛广场一层二层","0713-3852388"],
["湖北","黄冈","蕲春漕河店KFC","蕲春漕河大道168号棕盛广场一楼","0713-0713-8978111"],
["湖北","黄冈","武穴北川路店KFC","湖北省黄冈市武穴市北川路城市广场一、二楼","0713-6245010"],
["湖北","黄冈","黄冈青砖湖店KFC","黄冈市黄州区西湖三路58号大润发超市一楼","0713-8885987"],
["湖北","黄冈","黄冈胜利街","奥康步行街A-1号","0713-0713-8673703"],
["湖北","黄冈","黄冈中商","黄洲区赤壁大道2号(中商百货旁)","0713-8395697"],
["湖北","黄冈","鼓楼","麻城市陵园路中百超一楼","0713-2919026"],
["湖北","咸宁","通城湘汉路店KFC","通城县隽水镇湘汉路","0715-0715-4758897"],
["湖北","咸宁","咸宁花坛","温泉区淦河大道温泉第一街1号楼","0715-0715-8267606"],
["湖北","咸宁","鱼水路","咸安区鱼水路233号大富源商业中心","0715-0715-8376837"],
["湖北","咸宁","宝成路","赤壁市宝成路与沿江大道交汇处原家家旺一层","0715-5220123"],
["湖北","随州","随州曾都","解放路大十字街南角一层","0722-3222999"],
["湖北","随州","随州清河","清河区烈山大道与清河路交汇处","0722-3335677"],
["湖北","广水","广水东正街店KFC","东大街与广安路交汇处华沣大厦一楼","0722-6239911"],
["湖北","恩施","恩施航空大道店","航空大道中百仓储一层","0718-8210125"],
["湖北","恩施","恩施博文","舞阳大道169号博文广场武商量贩一楼","0718-8249997"],
["湖北","利川","利川东城路","东城路88号东方城一楼","0718-7214222"],
["湖北","仙桃","仙桃城市广场店","仙桃市大新路44号仙桃城市广场一层","0728-3279709"],
["湖北","仙桃","大新","仙桃市沔阳路43号商城大厦一楼","0728-3203906"],
["湖北","仙桃","仙桃大道店KFC","仙桃市仙桃大道中百一楼","0728-3333057"],
["湖北","潜江","潜江水牛城","潜江市水牛城章华中路水牛城商贸中心一层","0728-6292277"],
["湖北","潜江","园林路","潜江市园林路新时代广场1-2层","0728-6237677"],
["湖北","天门","天门接官路店KFC","天门市钟惺大道与接官路交汇处世贸中心","0728-5358677"],
["湖北","天门","鸿渐路","天门市人民大道中50号","0728-5268833"],
["山西","太原","南站餐厅","北营太原南客站二层","13754846254"],
["山西","太原","朝阳餐厅","朝阳街15号东方红商厦一层肯德基餐厅","0351-4626375"],
["山西","太原","服装城餐厅","朝阳街88号同至人购物中心底层","0351-2339131"],
["山西","太原","府东餐厅","府东街18号北京华联超市一层","0351-4735502"],
["山西","太原","亲贤餐厅","佳地花园墨苑底商一层肯德基餐厅","0351-7075988"],
["山西","太原","北宫餐厅","解放北路279号(旗舰小商品物流港一层)肯德基餐厅","0351-3241200"],
["山西","太原","府西餐厅","解放路63号外文书店一层肯德基餐厅","0351-4959269"],
["山西","太原","铜锣湾餐厅","柳巷北路铜锣湾广场04号楼一层肯德基餐厅","0351-8200551"],
["山西","太原","东民餐厅","柳巷南路86号东晋大厦一层肯德基餐厅","0351-4053807"],
["山西","太原","赛博餐厅","南内环街107号迎泽数码中心商住楼一层","0351-7779809"],
["山西","太原","平阳餐厅","平阳路144号肯德基餐厅","0351-7243462"],
["山西","太原","五一餐厅","起凤街5号肯德基餐厅","0351-4188518"],
["山西","太原","千峰餐厅","千峰南路56号美特好超市一层肯德基餐厅","0351-6586625"],
["山西","太原","盛世餐厅","三墙路裕德东里10号东大盛世华庭一层肯德基餐厅","0351-3239228"],
["山西","太原","双西餐厅","双塔西街50号安业商务楼前","0351-8717205"],
["山西","太原","太原机场餐厅","太原武宿机场2号航站楼一层到达厅肯德基餐厅","0351-5605266"],
["山西","太原","桃园餐厅","桃园北路90号肯德基餐厅","0351-4127767"],
["山西","太原","怡丰餐厅","桃园南路与府西街交叉怡丰城肯德基餐厅","0351-4227891"],
["山西","太原","体育路餐厅","体育路许坦西街69号美特好一层肯德基","0351-7775878"],
["山西","太原","下元餐厅","万柏林区和平南路5号夏园精品商城一层","0351-6110071"],
["山西","太原","西山餐厅","万柏林区西矿街301肯德基餐厅","0351-6967765"],
["山西","太原","西华餐厅","万柏林区迎泽西大街南寒街办南寒村德馨花园底商一层","0351-7773358"],
["山西","太原","三晋餐厅","五一广场三晋国际饭店一层肯德基餐厅","0351-8224588"],
["山西","太原","桥头街餐厅","五一路77号肯德基餐厅","0351-8200155"],
["山西","太原","康宁餐厅","小店区康宁与真武路交叉口万玛购物广场一、二层","0351-5666298"],
["山西","太原","人民餐厅","小店区人民南路1号肯德基餐厅","0351-7178033"],
["山西","太原","兴华餐厅","兴华街299号山姆士超市一层肯德基餐厅","0351-6650045"],
["山西","太原","坞城路餐厅","学府街98号肯德基餐厅","0351-7630961"],
["山西","太原","漪汾餐厅","漪汾街88号华宇购物广场一层肯德基餐厅","0351-6168352"],
["山西","太原","新迎泽餐厅","迎泽大街228号大南门十字路口西南角一二层肯德基餐厅","0351-4076262"],
["山西","太原","盛嘉餐厅","迎泽大街2号盛嘉手机广场一层肯德基餐厅","0351-8260389"],
["山西","太原","迎西餐厅","迎泽大街98号一二层","0351-6064611"],
["山西","太原","长风餐厅","长风街859号沃尔玛超市-1层肯德基餐厅","0351-7526690"],
["山西","太原","千禧餐厅","长风街千禧实际广场地下一层家乐福超市内","0351-7770080"],
["山西","太原","世贸餐厅","长治路111号山西世贸中心一层肯德基餐厅","0351-7219223"],
["山西","太原","太原龙湖店KFC","解放路175号万达广场二层","0351-3107568"],
["山西","太原","建设店","小店区建设南路351号肯德基餐厅","0351-2963661"],
["山西","大同","华严餐厅","仿古街与教场街交叉口新东方尚街商厦一层肯德基餐厅","0352-5325088"],
["山西","大同","平泉餐厅","矿务局校南街与平泉路十字交叉口肯德基餐厅","0352-4208066"],
["山西","大同","武定餐厅","魏都大道与武定路交叉口(金湖国际外围)一二层肯德基餐厅","0352-5324560"],
["山西","大同","西环餐厅","西环路大庆路口华林新时尚商场一层","0352-5352366"],
["山西","大同","熙成餐厅","新建西路华林新天地东一二层肯德基餐厅","0352-5326266"],
["山西","大同","尚都餐厅","永泰南路尚都一二层肯德基","0352-5328041"],
["山西","大同","永泰餐厅","永泰南路永泰航空广场地下一层肯德基餐厅","0352-5328155"],
["山西","大同","大同和宁店KFC","开源街南侧和宁街北侧京都国际广场一二层","0352-5376515"],
["山西","大同","新天地店","大同矿务局同泉路华亿新天地一层肯德基","0352-4029969"],
["山西","阳泉","天利餐厅","北大西街天利购物广场肯德基餐厅","0353-4045551"],
["山西","阳泉","滨河餐厅","桃北东路滨河世纪城阳泉新天地一区肯德基餐厅","0353-2936513"],
["山西","阳泉","桃河餐厅","桃南东路89号肯德基餐厅","0353-2016612"],
["山西","阳泉","金街店","城区南大街269号金街购物中心1层","0351-2950100"],
["山西","长治","八一餐厅","八一百货大楼一层肯德基餐厅","0355-3501891"],
["山西","长治","英雄餐厅","英雄中路嘉汇广场肯德基餐厅","0355-3509891"],
["山西","长治","十字街餐厅","英雄中路森日春天一层肯德基餐厅","0355-2228811"],
["山西","晋城","国贸餐厅","红星街与泽州路交叉口 国贸大厦 B国贸大厦座","0356-2052131"],
["山西","晋城","黄华餐厅","黄华街圣亚服饰广场一二层肯德基餐厅","0356-3051227"],
["山西","晋城","新市餐厅","新市西街银都商厦一层肯德基餐厅","0356-3041425"],
["山西","朔州","世纪餐厅","北关路金龙商业街北口一二层肯德基餐厅","0349-2079272"],
["山西","晋中","汇通餐厅","榆次区汇通路378号肯德基餐厅","0354-2665700"],
["山西","晋中","榆百餐厅","榆次区粮店街69号肯德基餐厅","0354-3282512"],
["山西","晋中","晋中大学城餐厅","榆次区文化街与定阳路十字万科朗润商业广场A1号一层肯德基","0354-3998273"],
["山西","运城","河东餐厅","河东东大街今日国际商城一层肯德基餐厅","0359-6386202"],
["山西","运城","学苑餐厅","河东东街沃尔玛购物广场1层","0359-2621239"],
["山西","运城","运百餐厅","解放北路152号运城百货大楼一二层肯德基餐厅","0359-2060810"],
["山西","忻州","和平餐厅","和平路开莱欣悦广场一层肯德基餐厅","0350-3809038"],
["山西","忻州","长征餐厅","新建南路1号肯德基餐厅","0350-3023780"],
["山西","临汾","尧都餐厅","鼓楼小区G区6号楼东14、15、16号","0357-2166886"],
["山西","临汾","鼎尚餐厅","红旗街唐尧路中段鼎尚时代广场一层","0357-4910595"],
["山西","临汾","新工贸餐厅","解放西路工贸大厦一层肯德基餐厅","0357-2019260"],
["山西","临汾","新东城餐厅","尧都区解放东路81号(美都会商场一层)","0357-8675511"],
["山西","吕梁","龙凤餐厅","离石区贺昌路361号同至人购物中心一层肯德基","0358-3326880"],
["山西","吕梁","兴隆餐厅","离石区永宁中路街心公园东侧肯德基餐厅","0358-8235272"],
["陕西","西安","东客站餐厅","灞桥区纺北路、纺渭河路以西纺织城交通枢纽","029-89512177"],
["陕西","西安","新旺角餐厅","灞桥区纺正街和纺五路十字西南角新旺角一层","029-83627737"],
["陕西","西安","东关餐厅","碑林区东关正街东方星苑一、二层","029-82492524"],
["陕西","西安","中贸餐厅","碑林区南稍门十字西北角中贸广场一层","029-68251575"],
["陕西","西安","兴庆餐厅","碑林区兴庆路28号华润万家超市一二层","029-83651960"],
["陕西","西安","万达餐厅","碑林区雁塔路北段8号万达商业广场肯德基餐厅","029-85560900"],
["陕西","西安","钟楼餐厅","北大街1号钟楼邮政大楼北侧翼一、二层肯德基餐厅","029-87378156"],
["陕西","西安","秦汉唐餐厅","大雁塔南广场秦汉唐商业街一层肯德基餐厅","029-89133990"],
["陕西","西安","文景餐厅","凤城五路与文景路十字东南角豪佳花园一楼","029-86523476"],
["陕西","西安","唐延餐厅","高新街高新区唐延路与南二环交叉位置(莱安逸境)","029-88763202"],
["陕西","西安","科创餐厅","高新区科创路枫叶大厦B0103.B0203(高新一中对面)","029-81294354"],
["陕西","西安","公园餐厅","公园路与长乐东路十字东北角阳光超市 一层","029-82550910"],
["陕西","西安","东仪餐厅","含光路与电子二路交汇处家世界购物中心一层肯德基餐厅","029-88729029"],
["陕西","西安","汉城餐厅","汉城南路路西侧旭景新港10号一层","029-8954838"],
["陕西","西安","东新餐厅","解放路与东新街十字东南角一、二层肯德基","029-87436764"],
["陕西","西安","赛高餐厅","经济技术开发区凤城五路富力广场南侧一二层","029-86115051"],
["陕西","西安","未央餐厅","经济开发区未央路赛高商务港一层","029-86528427"],
["陕西","西安","大唐餐厅","劳动南路大唐西市大润发内一层","029-88698375"],
["陕西","西安","莲湖餐厅","莲湖路25号莲湖酒店一二层","029-87321921"],
["陕西","西安","新世界餐厅","莲湖区宏府广场新世界百货负一层","029-87366343"],
["陕西","西安","西关餐厅","莲湖区环城西路78号一二层","029-88647597"],
["陕西","西安","城西餐厅","莲湖区枣园东路92号一、二层","029-84612241"],
["陕西","西安","钟鼓楼餐厅","莲湖区钟鼓楼国铭商城一层肯德基","029-87382494"],
["陕西","西安","兵马俑餐厅","临潼区秦始皇兵马俑博物馆服务区F14一、二层","029-83899366"],
["陕西","西安","龙首原餐厅","龙首村十字西南角地铁二号线龙首原站综合出入口","029-68754996"],
["陕西","西安","临渭餐厅","前进路信达广场华润万家一层","0913-2666206"],
["陕西","西安","太华路餐厅","曲江大明宫遗址区凤城一路北侧太华北侧369号万达广场2楼","029-86740525"],
["陕西","西安","银泰餐厅","曲江新区雁南二路北侧银泰城-1F","029-89123091"],
["陕西","西安","骊山餐厅","人民南路全都超市1楼","029-83812655"],
["陕西","西安","学府餐厅","太白北路213号","029-88305551"],
["陕西","西安","唐延餐厅","唐延路3号易初莲花超市肯德基餐厅","029-88335127"],
["陕西","西安","龙首餐厅","未央区龙首北路与未央路十字东南角龙首村宫园壹号一层+负一层","029-86272083"],
["陕西","西安","启航餐厅","未央区三桥街道启航时代广场一层","029-89504529"],
["陕西","西安","盛龙餐厅","未央区未央路80方新村北盛龙广场一二层","029-88337717"],
["陕西","西安","西航餐厅","未央区渭滨路华润万家超市对面一层","029-89291871"],
["陕西","西安","北客站餐厅","文景路北口北客站内","029-63325316"],
["陕西","西安","电子城餐厅","西安市电子正街(电子一路西段18号)电子市场负一层肯德基餐厅","2988266042"],
["陕西","西安","西大街餐厅","西大街北广济街口时代盛典大厦肯德基餐厅","029-87211767"],
["陕西","西安","朱雀餐厅","小寨西路银泰城一层","029-81543162"],
["陕西","西安","含元餐厅","新城区太华南路141号华东茶城一层","029-68251071"],
["陕西","西安","站前餐厅","新城区西八路19号陕西省西安汽车站售票大厅东墙以东站内房产一二层","029-87439623"],
["陕西","西安","咸宁餐厅","新城区咸宁中路122号乐宁会一、二层","029-82470648"],
["陕西","西安","长缨餐厅","新城区长缨西路75号易初莲花超市1层肯德基餐厅","029-82513531"],
["陕西","西安","华洲餐厅","新秦北路东侧54号一层","0931-4720007"],
["陕西","西安","大雁塔餐厅","雁塔路大唐商业广场A栋10101、10109肯德基餐厅","029-85510057"],
["陕西","西安","祥和餐厅","雁塔区吉祥路与永松路十字向东100米","029-81773913"],
["陕西","西安","西影路餐厅","雁塔区西影路50号西影路与西延路交叉口东北角一、二层","029-85270604"],
["陕西","西安","新乐汇餐厅","雁塔区雁塔路大雁塔广场东南角新乐汇项目A6区ML103、ML203一二层","029-85428532"],
["陕西","西安","友谊餐厅","友谊东路82号家春秋国际美居1F肯德基餐厅","029-82073782"],
["陕西","西安","万科餐厅","长安郭杜街办茅坡村西侧万科城西区商业48#楼1+2层","029-85229697"],
["陕西","西安","会展餐厅","长安南路国际会展中心肯德基餐厅","029-87655099"],
["陕西","西安","沣京餐厅","长安区户县甘亭镇草堂路3号一二层肯德基","029-63373373"],
["陕西","西安","幸福餐厅","长乐东路华润万家超市一二层","029-83653652"],
["陕西","西安","长乐路餐厅","长乐路180号付一号康复路炫彩商城一层肯德基餐厅","029-62881226"],
["陕西","西安","科技路餐厅","丈八北路东侧城市风景 都市印象18栋1单元一 二层","029-68558116"],
["陕西","西安","明德门餐厅","朱雀大街58号金水大厦 1+2F","029-85362877"],
["陕西","西安","西安南大街店KFC","南大街59号富豪大厦二层","029-62760966"],
["陕西","西安","蓝田蓝新店KFC","蓝田县蓝新路北段东侧一层","029-82720319"],
["陕西","西安","西安万和城店KFC","长缨路1号一、二层","029-81035614"],
["陕西","西安","渭南高新店KFC","陕西省渭南市高新区万达广场3号门一层","15339274452"],
["陕西","西安","西安民乐园店KFC","新城区解放路中段111号万达广场10163、1F101号商铺","029-68627880"],
["陕西","西安","西部大道店KFC","长安区博士路60号(即西部大道与博士路交汇处)阳光天地购物中心一、四层","029-81102514"],
["陕西","西安","西安凤八店KFC","未央区凤城八路168号汉神百货一层负一层","18821672578"],
["陕西","西安","西安长安店DT KFC","长安区府东路城南新天地2层","029-85213811"],
["陕西","西安","西稍门店","西稍门十字劳动路平房一号","029-88638534"],
["陕西","西安","小寨赛格店","西安市雁塔区长安中路123号赛格国际购物中心负一层","029-89328731"],
["陕西","西安","阎良千禧店","西安市阎良区人民路千禧广场","029-86876391"],
["陕西","西安","长安中路店","雁塔区长安中路86号","029-85417219"],
["陕西","西安","西门店","西大街安定广场3号楼","029-87618939"],
["陕西","西安","家世界店","莲湖区大庆路120号","029-84266829"],
["陕西","西安","长乐店","长乐中路133号","029-81034290"],
["陕西","铜川","正大餐厅","红旗街32号正大百货商场一层肯德基餐厅","0919-2380777"],
["陕西","宝鸡","高新餐厅","高新区高新大道61号城市新天地","0917-3811317"],
["陕西","宝鸡","金台餐厅","金台大道五洲商厦一层","0917-3538961"],
["陕西","宝鸡","经二路餐厅","经二路154号一层、二层肯德基餐厅","0917-3903202"],
["陕西","宝鸡","清姜餐厅","清姜路46号e时代数码广场一层肯德基餐厅","0917-3806061"],
["陕西","宝鸡","宝鸡店","经二路112号","0917-3239318"],
["陕西","咸阳","康乐餐厅","康乐路西段化建大厦一层","029-87016370"],
["陕西","咸阳","彩虹餐厅","秦都区渭阳西路彩虹一路十字东南角中华商城一层","029-33339483"],
["陕西","咸阳","新兴餐厅","人民东路华润万家一层","029-33257132"],
["陕西","咸阳","世纪餐厅","世纪大道中段南侧白马河以东三米阳光一、二层","029-33182681"],
["陕西","咸阳","福园餐厅","渭阳西路中段福园国际时代广场一层","029-33578611"],
["陕西","咸阳","机场T3餐厅","西安咸阳国际机场3号航站综合交通枢纽1层肯德基餐厅","029-88790349"],
["陕西","咸阳","兴平南关店KFC","兴平市南关路49号(海威财富广场)一、二层","029-38823676"],
["陕西","咸阳","咸阳人民路店KFC","人民中路咸阳国际财富中心一期一、二层","029-32019898"],
["陕西","咸阳","泾阳中心店KFC","泾阳县中心街与北极宫大街十字东北角(泾阳世纪新城)一层","029-32097676"],
["陕西","咸阳","三原城关店KFC","三原县人行大街(三原商贸中心)一层","029-32853030"],
["陕西","咸阳","人民店","人民中路16号","029-33248567"],
["陕西","渭南","青正餐厅","青正街华元购物广场1楼","0913-6784929"],
["陕西","渭南","东风餐厅","东风大街中段恒基大厦一层","0913-2360622"],
["陕西","延安","延安大学城餐厅","宝塔区兰家坪十字东信时代广场1层","0911-2567830"],
["陕西","延安","东大餐厅","宝塔区双拥大道中段东大百货一层","0911-8682880"],
["陕西","延安","东川餐厅","东大街民生百货一二楼肯德基餐厅","0911-8065915"],
["陕西","延安","宝塔餐厅","二道街帝豪时代广场二层肯德基餐厅","0911-2568935"],
["陕西","延安","美尚城餐厅","二道街美尚城二楼","0911-2112243"],
["陕西","延安","宝塔餐厅","南门坡丽融大厦一楼","0911-2132205"],
["陕西","汉中","天汉餐厅","汉台区汉中路万邦时代广场2号楼一二层","0916-2161200"],
["陕西","汉中","新广场店","汉台区中心广场原万宝商城一楼肯德基","0916-2618510"],
["陕西","榆林","新华餐厅","定边县新华百货一层肯德基餐厅","0912-4212782"],
["陕西","榆林","河滨餐厅","府谷县河滨东路145号一层","0912-8718011"],
["陕西","榆林","航宇餐厅","航宇路沙河口村农贸市场一层","0912-3595531"],
["陕西","榆林","名州餐厅","绥德县西山大街绥德文化广场肯德基餐厅","0912-2355928"],
["陕西","榆林","西沙餐厅","西沙西人民路31号一层","0912-3545227"],
["陕西","榆林","新建餐厅","新建北路89号喜洋洋商厦一层肯德基餐厅","0912-3283311"],
["陕西","榆林","榆阳餐厅","新建南路29号新楼中巷喜洋洋商厦一层肯德基餐厅","0912-3255266"],
["陕西","榆林","大洋餐厅","榆阳区肤施路大洋百货一、二层","0912-3533113"],
["陕西","安康","兴安餐厅","兴安路74号一二层","0915-3203783"],
["陕西","安康","金州店","香溪路6号","0915-3217881"],
["陕西","商洛","北新餐厅","商州区北新街西段138号一二层","0914-2998151"],
["湖南","长沙","长沙车站北路餐厅","车站北路与火炬路交汇处步步高超市一楼肯德基餐厅","0731-85203861"],
["湖南","长沙","长沙高桥餐厅","东二环一段高桥大市场西578号","0731-84153501"],
["湖南","长沙","长沙奥莱餐厅","芙蓉南路27号1层(友阿奥莱购物城)","0731-84225953"],
["湖南","长沙","长沙中央广场餐厅","芙蓉中路238号芙蓉广场首层","0731-84422175"],
["湖南","长沙","长沙黄兴中路餐厅","黄兴中路王府井百货一楼","0731-84424140"],
["湖南","长沙","长沙金星中餐厅","金星中路湘腾商业广场","0731-85911475"],
["湖南","长沙","长沙天马餐厅","麓山南路与阜埠河路交汇处","0731-89912075"],
["湖南","长沙","长沙林院餐厅","韶山路498号中南林院","0731-85625996"],
["湖南","长沙","长沙恒盛餐厅","韶山南路120号大润发超市一层","0731-85811436"],
["湖南","长沙","长沙香樟餐厅","韶山南路和香樟路交汇处香颂国际大厦","0731-84535299"],
["湖南","长沙","长沙雨花亭餐厅","韶山中路421号","0731-85308050"],
["湖南","长沙","长沙桐梓坡餐厅","桐梓坡路星电光城一层","0731-82858586"],
["湖南","长沙","长沙麓谷餐厅","桐梓坡西路麓谷国际工业园肯德基餐厅","0731-84763669"],
["湖南","长沙","长沙凌霄餐厅","万家丽路与凌霄路交汇处","0731-89857995"],
["湖南","长沙","长沙木莲餐厅","万家丽中路三段59号喜乐地中心首层","0731-88281389"],
["湖南","长沙","长沙郭亮路餐厅","望城郭亮中路联诚步行街口","0731-88060789"],
["湖南","长沙","长沙西站餐厅","望城坡经济开发区湘浙小商品批发市场","0731-88760170"],
["湖南","长沙","长沙长沙高铁出发层餐厅","武广高铁长沙南站出发层长3-1-1商铺","0731-82067170"],
["湖南","长沙","长沙长沙高铁达到层餐厅","武广高铁长沙南站到达层长1-3商铺","0731-82065752"],
["湖南","长沙","长沙湘府东餐厅","湘府东路459号莲湖商业广场(华润万家超市)1楼","0731-89787910"],
["湖南","长沙","长沙书院南餐厅","新开铺171号新一佳超市旁","0731-88187806"],
["湖南","长沙","长沙漓湘路餐厅","星沙经开区泉塘社区漓湘东路南东六线西昌和购物中心1-2层","0731-86868077"],
["湖南","长沙","长沙开元餐厅","星沙开发区开元路45号一层","0731-84025465"],
["湖南","长沙","长沙星沙餐厅","星沙镇长沙通程商业广场","0731-84028958"],
["湖南","长沙","长沙红星餐厅","雨花区中意路红星农副产品大市场内第一幢101号","0731-85576776"],
["湖南","长沙","长沙麓山餐厅","岳麓区清水路59号麓山中南商业广场","0731-84717660"],
["湖南","长沙","长沙玉兰路餐厅","岳麓区桐梓坡路与玉兰路口长房时代城人人乐超市一层","0731-84169930"],
["湖南","长沙","长沙银盆路餐厅","岳麓区银双路奥克斯广场一楼KFC餐厅","0731-89601562"],
["湖南","长沙","长沙阿波罗广场餐厅","长沙市八一路1号","0731-82297251"],
["湖南","长沙","长沙左家塘餐厅","长沙市城南东路269号(位于城南东路和曙光路交汇处东南角)","0731-84318209"],
["湖南","长沙","长沙天虹餐厅","长沙市芙蓉南路368号BOBO天下城","0731-85063823"],
["湖南","长沙","长沙东玺门餐厅","长沙市芙蓉区荷花路与马王堆路交汇东北角100米处东玺门人人乐1层","0731-84161778"],
["湖南","长沙","长沙岁宝餐厅","长沙市芙蓉中路1段288号岁宝百货一楼","0731-85455342"],
["湖南","长沙","长沙弘林餐厅","长沙市芙蓉中路3段266号弘林国际一、二楼","0731-84120781"],
["湖南","长沙","长沙解放西路餐厅","长沙市黄兴南路445号","0731-84434066"],
["湖南","长沙","长沙新世界餐厅","长沙市黄兴中路平和堂南侧新世界购物中心","0731-84430583"],
["湖南","长沙","长沙金源餐厅","长沙市开福区浏阳河隧道口世纪金源1F&2F(居然之家)","0731-82191169"],
["湖南","长沙","长沙赤岗冲餐厅","长沙市劳动路157号","0731-85521438"],
["湖南","长沙","长沙东郡餐厅","长沙市人民东路与万家丽路交汇处大润发超市","0731-84156883"],
["湖南","长沙","长沙窑岭餐厅","长沙市人民中路228号2层","0731-84826820"],
["湖南","长沙","长沙华晨餐厅","长沙市万家丽路与长沙大道交汇处华晨世纪广场1F肯德基餐厅","0731-82258606"],
["湖南","长沙","长沙蝴蝶餐厅","长沙市五一西路78号","0731-84445546"],
["湖南","长沙","长沙德思勤餐厅","长沙市雨花区井湾子中意路101号德思勤城市广场1F肯德基餐厅","0731-89879778"],
["湖南","长沙","长沙蓝天餐厅","长沙县黄花国际机场T2(新)航站楼负一楼","0731-84799639"],
["湖南","长沙","长沙韶山南路餐厅","中意一路330号","0731-84821311"],
["湖南","长沙","长沙大道餐厅","长沙市长沙大道与沙湾路交汇处","0731-84331656"],
["湖南","长沙","长沙火车站餐厅","长沙市火车站候车大楼雨廊南侧消防门往南4个门店及扩建房屋","0731-84123039"],
["湖南","长沙","浏阳君悦城店KFC","浏阳市石霜路君悦购物中心一层","18673398895"],
["湖南","长沙","新西站店KFC","长沙汽车西站一楼","0731-85172281"],
["湖南","长沙","车站广场分店","长沙市火车站候车大楼雨廊南侧消防门往南4个门店及扩建房屋","0731-89781309"],
["湖南","长沙","黄兴北路","黄兴中路168号","0731-84867300"],
["湖南","长沙","花明路餐厅","宁乡县玉潭镇玉潭中路与花明路交汇处宁乡大润发商业广场","0731-87818226"],
["湖南","长沙","长沙百联餐厅","长沙市芙蓉区黄兴中路188号乐和城一楼肯德基餐厅","0731-84303681"],
["湖南","长沙","火星店","晚报大道218号精彩生活超市一楼","0731-82186875"],
["湖南","长沙","东塘2店","雨花区劳动西路589号","0731-85501966"],
["湖南","长沙","枫林店","长沙市岳麓区枫林路2号通程商业广场肯德基","0731-88852850"],
["湖南","长沙","玉潭店","人民中路12号","0731-87842561"],
["湖南","长沙","人民中路餐","人民中路9号友阿百货一楼东北角","0731-84116852"],
["湖南","长沙","麓谷DT 餐厅","湖南省长沙市高新技术产业开发区桐梓坡西路229号肯德基餐厅","0731-84763669"],
["湖南","株洲","株洲株洲月塘餐厅","湖南株洲市荷塘区新华路华润万家购物广场一楼肯德基餐厅","0731-28522633"],
["湖南","株洲","株洲黄河路餐厅","湖南株洲市黄河路与天台路交叉口天虹百货楼下肯德基餐厅","0731-28282771"],
["湖南","株洲","株洲尚格餐厅","湖南株洲市天元区株洲大道尚格名城尚格丽晶肯德基餐厅","0731-28285163"],
["湖南","株洲","株洲株洲高铁出发餐厅","武广高铁株洲西站1--5商铺","-18907335530"],
["湖南","株洲","株洲株洲高铁出发层餐厅","武广高铁株洲锡杖 1-5商铺","18229135119"],
["湖南","株洲","株洲攸县望云餐厅","攸县大巷路沃尔玛超市一层","0731-24220185"],
["湖南","株洲","株洲株洲中鸿餐厅","株州市车站路61号中鸿时尚新天地","0731-28291333"],
["湖南","株洲","株洲株洲金帝餐厅","株洲车站路金帝广场2楼","0731-22938355"],
["湖南","株洲","株洲株洲红旗餐厅","株洲市红旗中路1号","0731-28476958"],
["湖南","株洲","株洲株洲国安餐厅","株洲市建设南路48号","0731-28278113"],
["湖南","株洲","株洲响石路餐厅","株洲市石峰区响石广场","0731-28349899"],
["湖南","株洲","卢桃区域株洲珠江北路餐厅","株洲市天元区栗雨工业园五十区珠江北路与株洲大道交界处栗雨湖商业项目——美的时代广场","0731-28202230"],
["湖南","株洲","株洲白云路店KFC","荷塘区红旗中路与白云路交汇处大润发超市一楼","18173385679"],
["湖南","湘潭","湘潭东方红餐厅","芙蓉路东方红广场负一楼肯德基餐厅","0731-55575062"],
["湖南","湘潭","湘潭白石广场餐厅","建设北路8号","0731-55586022"],
["湖南","湘潭","湘潭韶山餐厅","雨湖区车站路28号","0731-58250351"],
["湖南","湘潭","湘潭板塘铺餐厅","岳塘区长潭路129号","0731-55550186"],
["湖南","湘潭","湘潭新华隆","建设南路步步高广场一层","0731-58557508"],
["湖南","衡阳","衡阳衡阳常宁市青阳路餐厅","常宁市青阳中路中鼎大厦(东风广场对面)","0734-7733254"],
["湖南","衡阳","衡阳衡阳莲湖广场餐厅","城北区向农路35-121号香江百货蒸北购物广场一层","0734-8218280"],
["湖南","衡阳","衡阳衡东县衡岳路餐厅","衡东县城关镇衡岳北路新世纪大市场C栋128-129号","0734-2863886"],
["湖南","衡阳","衡阳衡阳火车站餐厅","衡阳火车站候车室1候车室下方","0734-8466030"],
["湖南","衡阳","衡阳衡阳新正路餐厅","衡阳县西渡新正路帝龙时代广场","0734-6858567"],
["湖南","衡阳","新船山餐厅","湖南省衡阳市蒸湘北路87号晶珠百货肯德基餐厅","0734-8191346"],
["湖南","衡阳","衡阳衡阳衡百餐厅","解放路45号","0734-8221133"],
["湖南","衡阳","衡阳衡阳万向城餐厅","解放路万向城一楼","0734-8132411"],
["湖南","衡阳","衡阳衡阳青松餐厅","经济开发区步步高商场一层","0734-2827606"],
["湖南","衡阳","衡阳衡阳市祁东县县正路餐厅","祁东县县正路49号邮政局综合大楼一层","0734-6316390"],
["湖南","衡阳","衡阳岳屏餐厅","雁峰区仙姬巷33号","0734-8210887"],
["湖南","衡阳","衡阳衡阳香城餐厅","蒸湘区高新区解放大道33号香江城市花园1、2栋裙楼一层","0734-8855997"],
["湖南","衡阳","衡阳衡阳湘江东餐厅","珠晖区广东、临江路口喜富来购物广场一层、二层","0734-8398520"],
["湖南","邵阳","邵阳洞口裕峰餐厅","洞口县桔城中路裕峰百货一楼肯德基餐厅","0739-7234019"],
["湖南","邵阳","邵阳邵东红岭餐厅","湖南省邵东县开发区红岭路12号","0739-2208461"],
["湖南","邵阳","邵阳邵阳-青龙桥餐厅","湖南省邵阳市双清区东风路338号","0739-5272727"],
["湖南","邵阳","邵阳帝王餐厅","邵阳市北塔区蔡锷路地王大厦","0739-5082577"],
["湖南","邵阳","邵陵店","双清区步步高广场佳惠超市一楼","0739-5231491"],
["湖南","岳阳","岳阳华容迎宾路餐厅","湖南省岳阳市华容县城关镇迎宾中路肯德基餐厅","0730-4508977"],
["湖南","岳阳","岳阳长安路餐厅","湖南省岳阳市临湘长安镇长安西路12号1层","0730-3966550"],
["湖南","岳阳","岳阳金鄂路 餐厅","湖南省岳阳市岳阳大道步步高广场","0730-8802101"],
["湖南","岳阳","岳阳岳阳高铁出发餐厅","武广高铁岳阳东站出发层岳 1-2、1-4.1-10商铺","0730-3158597"],
["湖南","岳阳","岳阳泰和餐厅","岳阳市巴陵路泰和商业广场","0730-8270121"],
["湖南","岳阳","岳阳一百餐厅","岳阳市巴陵中路18号岳阳一百大厦一楼","0730-8222221"],
["湖南","岳阳","岳阳巴陵大道餐厅","岳阳市巴陵中路人和春天负一楼肯德基餐厅","0730-8868950"],
["湖南","岳阳","岳阳汇泽餐厅","岳阳市东茅岭路42号岳阳汇泽商业文化广场","0730-8239680"],
["湖南","岳阳","岳阳五里牌餐厅","岳阳市五里牌路洞庭新天地商业广场一层","0730-8181545"],
["湖南","岳阳","岳阳建湘路餐厅","岳阳市岳阳楼区花板桥路,青年中路与建湘路交汇处西南侧(天伦购物公园)","0730-8802103"],
["湖南","岳阳","湘阴太傅路店KFC","湘阴县文星镇朱家垅太傅路丁与冬茅路北寓兴源商业广场好润佳超市一层","0730-2615948"],
["湖南","常德","常德桥南市场餐厅","常德市鼎城区武陵镇渡口社区居委会车站路楚天禾大酒店一二层","0736-7382513"],
["湖南","常德","常德人民餐厅","常德市人民中路97号(下南门)","0736-7265305"],
["湖南","常德","常德水星楼餐厅","常德市人民中路与青年北路交汇处水星楼商业广场4号1-2层","0736-7237656"],
["湖南","常德","常德武陵阁餐厅","常德市武陵区城南四眼井社区居委会一层","0736-7766672"],
["湖南","常德","常德临江公园餐厅","常德市武陵区九重天花园一层","0736-7171609"],
["湖南","常德","常德滨湖公园餐厅","湖南省常德市洞庭大道与朗州路交汇处","0736-7177972"],
["湖南","常德","常德市振兴路餐厅","湖南省常德市汉寿龙阳镇振兴西路北侧","0736-2023325"],
["湖南","常德","常德丁公桥餐厅","澧县解放路与人民路交汇处丁公桥商业广场一,二层","0736-3335308"],
["湖南","常德","桃源漳江中路餐厅","桃源县漳江中路花源大酒店旁好润佳超市","0736-6666397"],
["湖南","常德","常德泽云广场餐厅","武陵大道与常德大道交汇处泽云广场","0736-7272296"],
["湖南","张家界","张家界芙蓉餐厅","张家界慈利县零阳镇东街177号","0744-3456781"],
["湖南","张家界","张家界文昌阁餐厅","张家界市解放路18号永昌商城","0744-8289600"],
["湖南","张家界","张家界袁家界餐厅","张家界市武陵源区袁家界服务中心(景区内)","0744-5718122"],
["湖南","张家界","张家界回龙路餐厅","张家界市永定区回龙路十字街7栋101号首层肯德基餐厅","0744-8259300"],
["湖南","益阳","益阳南州路餐厅","湖南省南县南州路老正街一层","0737-5212177"],
["湖南","益阳","益阳朝阳餐厅","湖南省益阳市朝阳广场西北角爱丽斯商业广场一层","0737-4226661"],
["湖南","益阳","益阳桃花中路餐厅","湖南省益阳市桃江县桃花江镇桃花中路47号","0737-8118448"],
["湖南","益阳","益阳金鸿餐厅","益阳市赫山区桃花仑中路209号金鸿大厦一.二楼","0737-4440175"],
["湖南","益阳","益阳桥北餐厅","益阳市桥北马良南路1号(家润多超市一楼)","0737-4428655"],
["湖南","益阳","益阳桃花仑餐厅","益阳市桃花仑路嘉兴茂广场","0737-4442486"],
["湖南","益阳","益阳市新源路餐厅","益阳沅江市新源路好润佳商业广场一楼","0737-2808278"],
["湖南","郴州","郴州桂阳向阳路餐厅","桂阳县向阳路1号","0735-4445577"],
["湖南","郴州","郴州郴州飞虹餐厅","桔井路37号一二层","0735-2165576"],
["湖南","郴州","郴州郴州国庆餐厅","人民东路1号","0735-2260091"],
["湖南","郴州","郴州郴州八一路餐厅","苏仙区八一路生源时代广场一二层肯德基餐厅","0735-2257131"],
["湖南","郴州","郴州郴州五岭广场餐厅","五岭广场北侧五岭路6号1层","0735-2622677"],
["湖南","郴州","郴州宜章文明路餐厅","宜章县文明路KFC餐厅一二层","0735-3717539"],
["湖南","郴州","郴州永兴干劲路餐厅","永兴县干劲路与大桥路交汇处永兴影视大楼KFC一二层","0735-5522086"],
["湖南","郴州","中皇城店","国庆北路18号一层KFC餐厅","0735-8887011"],
["湖南","永州","永州永州道县潇水路餐厅","道县潇水路步行街KFC餐厅","0746-5228114"],
["湖南","永州","永州永州东安湘中广场餐厅","东安县建设大道湘中广场旁","0746-4219722"],
["湖南","永州","永州永州零陵北路餐厅","冷水滩区零陵北路和凤凰路十字路口东北角步步高购物广场","0746-8666771"],
["湖南","永州","永州永州市零陵中路餐厅","冷水滩区零陵中路823号金水湾一楼","0746-8417522"],
["湖南","永州","零陵中路","冷水滩区零陵中路823号金水湾一楼、二楼KFC餐厅","0746-8417522"],
["湖南","怀化","怀化银座餐厅","鹤城区麻阳路口西都银座商业广场一层","0745-2761190"],
["湖南","怀化","怀化迎丰餐厅","鹤城区迎丰西路207","0745-2236309"],
["湖南","怀化","怀化鹤州餐厅","人民南路118号","0745-2265675"],
["湖南","怀化","怀化市府餐厅","市委路口佳惠购物中心地上一层","0745-2712291"],
["湖南","怀化","怀化城市广场餐厅","新世纪商业广场一、二层","0745-2735699"],
["湖南","怀化","怀化高铁餐厅","沪昆高铁怀化站高铁商业街","0745-2281868"],
["湖南","娄底","娄底氐星餐厅","氐星路春园步行街12栋一层和二层","0738-8265556"],
["湖南","娄底","娄底娄星餐厅","氐星路万豪城市广场","0738-2881006"],
["湖南","娄底","娄底世贸餐厅","新化县梅苑南路友阿世茂广场一楼","0738-3208580"],
["湖南","娄底","娄底天华餐厅","新化县天华商业广场一楼肯德基餐","0738-3562180"],
["湖南","娄底","娄底龙泰餐厅","长青路龙泰上品一层","0738-8261593"],
["湖南","冷水江","冷水江锑都餐厅","锑都中路21号心连心商场1-2层","0738-5237798"],
["湖南","冷水江","冷水江步行街餐厅","锑都中路36号","0738-5235197"],
["湖南","吉首","吉首泰丰餐厅","湖南省吉首市人民北路与团结东路交叉口泰丰广场1层","0743-8529623"],
["湖南","吉首","吉首边城餐厅","人民中路99号","0743-2181826"],
["湖南","云浮","城中餐厅","城中路与浩林路交叉处源盛华庭首层","0766-8812992"],
["湖南","云浮","罗定园前餐厅","罗定市罗城人民中路130、132号与园前路6号首层","0766-3897199"],
["湖南","云浮","云浮餐厅","云城区兴云中路金鹏大厦首层肯德基餐厅","0766-8839118"],
["湖南","云浮","新兴时代广场餐厅","新兴县城镇环城北路时代广场肯德基餐厅","0766-2883300"],
["四川","成都","飞大壹号餐厅","金牛区一环路北三段飞大壹号广场一楼肯德基餐厅","028-65073461"],
["四川","成都","八宝餐厅","四川省成都市八宝街1号家乐福一楼","028-86241901"],
["四川","成都","府青餐厅","四川省成都市成华区府青路2号财富又一层一楼","028-61283011"],
["四川","成都","双桥餐厅","四川省成都市成华区经华北路2号一楼","028-84430585"],
["四川","成都","簇桥餐厅","四川省成都市簇桥簇锦北路138号千盛生活广场1楼","028-85017712"],
["四川","成都","春禧餐厅","四川省成都市大科甲巷1-21号一楼,二楼","028-86650418"],
["四川","成都","站前广场餐厅","四川省成都市二环路北三段363号万通商城一、二层","028-83315280"],
["四川","成都","万象城餐厅","四川省成都市二环路东三段华润万象城负一楼","028-84372066"],
["四川","成都","莱蒙餐厅","四川省成都市二环路南四段51号莱蒙都会一楼","028-87441029"],
["四川","成都","逸都餐厅","四川省成都市二环路西二段同辉国际购物广场C幢2楼","028-87010461"],
["四川","成都","天府新谷餐厅","四川省成都市府城大道399号天府新谷一楼","028-85319487"],
["四川","成都","九方餐厅","四川省成都市高新区府城大道中段下沉广场","028-85133731"],
["四川","成都","奥克斯广场餐厅","四川省成都市高新区锦城大道西段奥克斯广场西区一、二层","028-85214617"],
["四川","成都","新南天地餐厅","四川省成都市高新区天府大道北段8号 苏宁广场一层","028-85314885"],
["四川","成都","天府时代餐厅","四川省成都市高新区天府大道南段1399号一层","028-85858694"],
["四川","成都","软件园餐厅","四川省成都市高新区天府大道中段软件园C区软件园财智立方一楼","028-85314633"],
["四川","成都","站华餐厅","四川省成都市高新区站华路9号","028-85312686"],
["四川","成都","红牌楼餐厅","四川省成都市佳灵路红牌楼广场","028-61515408"],
["四川","成都","建设餐厅","四川省成都市建设路47号一楼","028-84311740"],
["四川","成都","世豪广场餐厅","四川省成都市剑南大道中段998号世豪广场1层","028-85332531"],
["四川","成都","加州湾餐厅","四川省成都市金牛区二环路北一段10号 嘉茂购物中心 1、2层","028-87680292"],
["四川","成都","交大餐厅","四川省成都市金牛区交大路183号","028-87621715"],
["四川","成都","羊西餐厅","四川省成都市金牛区蜀汉路347号?","028-61998091"],
["四川","成都","金牛万达餐厅","四川省成都市金牛区一环路北三段1号","028-83210805"],
["四川","成都","茶店子餐厅","四川省成都市金牛区一品天下大街339号西城公馆","028-61363171"],
["四川","成都","群光餐厅","四川省成都市锦江区春熙路南段8号负2F","028-65970077"],
["四川","成都","蓝谷地餐厅","四川省成都市锦江区国槐路8号大观市政公园内","028-84752005"],
["四川","成都","财富餐厅","四川省成都市锦江区青石桥南街大业路6号1-2层","028-86700146"],
["四川","成都","新总府餐厅","四川省成都市锦江区商业街","028-84552530"],
["四川","成都","新世界餐厅","四川省成都市锦江区盐市口顺城大街8号","028-86673661"],
["四川","成都","晋阳餐厅","四川省成都市晋阳路269号瑞泰锦城千盛百货1F","028-87433087"],
["四川","成都","新天府汇城餐厅","四川省成都市科华中路9号王府井商场一、二楼","028-85217015"],
["四川","成都","明都餐厅","四川省成都市青年路18号明都大厦一、二层","028-86728370"],
["四川","成都","光华优品餐厅","四川省成都市青羊大道99号优品道商业广场","028-61860015"],
["四川","成都","光华餐厅","四川省成都市青羊区光华村街10号大地新光华广场","028-81710692"],
["四川","成都","清江路餐厅","四川省成都市清江东路134号昂立大厦一、二层","028-69281411"],
["四川","成都","火车站餐厅","四川省成都市人民北路火车北站侯车大厅一层","028-83328862"],
["四川","成都","火车东站餐厅","四川省成都市沙河堡火车东站西广场二楼","028-83280583"],
["四川","成都","会展餐厅","四川省成都市沙湾路258号会展中心一层","028-87677236"],
["四川","成都","华侨城餐厅","四川省成都市沙西线1号成都市华侨城广场","028-61898207"],
["四川","成都","新少城餐厅","四川省成都市蜀都大道少城路11号一、二层","028-86242302"],
["四川","成都","蜀汉餐厅","四川省成都市蜀汉路欧尚超市一层","028-82831827"],
["四川","成都","双林餐厅","四川省成都市双林路134号","028-84340036"],
["四川","成都","成都机场餐厅","四川省成都市双流国际机场侯机楼A指廓内A-YC-2","028-85205501"],
["四川","成都","机场T2餐厅","四川省成都市双流国际机场候机楼指廊","028-85207521"],
["四川","成都","文武餐厅","四川省成都市文武路42号A区一层","028-86620275"],
["四川","成都","华达餐厅","四川省成都市武侯大街266号华达商厦一楼,二楼","028-85591150"],
["四川","成都","金花餐厅","四川省成都市武侯区簇桥七里村6组金兴南路244号长安商务楼一层","028-85362423"],
["四川","成都","新罗马假日餐厅","四川省成都市武侯区高升桥东路2-6号 罗马假日广场C座","028-85109135"],
["四川","成都","新城市广场餐厅","四川省成都市西大街1号新城市广场二楼","028-86277726"],
["四川","成都","锦华餐厅","四川省成都市新成仁路口万达锦华城","028-84191374"],
["四川","成都","大世界餐厅","四川省成都市新光路大世界商业广场中心商厦一层","028-85137635"],
["四川","成都","跳伞塔餐厅","四川省成都市一环路南二段17号@世界资讯广场负一楼东侧","028-85493886"],
["四川","成都","磨子桥餐厅","四川省成都市一环路南二段1号附15号(美特斯邦威旁边)","028-85445607"],
["四川","成都","中山广场餐厅","四川省成都市中山广场的阳光百货(群光百货旁)三楼","028-86667402"],
["四川","成都","永康路店","武侯区永康路17号一层lO5号、二层206号","18583770102"],
["四川","成都","高新时代KFC","郫县合作路89号16栋1层1号","13880105712"],
["四川","成都","金楠KFC","武侯区晋阳村二、六组","15608501940"],
["四川","成都","SM广场餐厅","成都市二环路东二段29号SM城市广场137-141","028-84310995"],
["四川","成都","中山广场餐厅","春熙路中山广场KFC餐厅","028-86667402"],
["四川","自贡","荣县望景餐厅","四川省自贡荣县望景路中段星河奥运绿洲15栋一、二层","0813-6268069"],
["四川","自贡","汇东餐厅","四川省自贡市汇东汇兴路中段沃尔玛旁","0813-8122799"],
["四川","自贡","彩灯公园餐厅","四川省自贡市五星街2号一、二层","0813-2112313"],
["四川","自贡","五星餐厅","四川省自贡市五星街帝豪广场一楼","0813-2305683"],
["四川","攀枝花","阳光餐厅","四川省攀枝花市炳草岗江南三路德铭阳光购物中心一层","0812-3343558"],
["四川","攀枝花","金瓯餐厅","四川省攀枝花市炳草岗江南三路金瓯广场A区(原国家粮库)","0812-3306788"],
["四川","攀枝花","新华餐厅","四川省攀枝花市新华街世贸中心大楼一层","0812-3340521"],
["四川","泸州","佳乐餐厅","四川省泸州市滨江路佳乐广场佳乐大厦二楼","0830-2281621"],
["四川","泸州","万象汇餐厅","四川省泸州市江阳区康城路一段1号华润万象汇B119号","0830-2998255"],
["四川","泸州","客运站DT店","龙马潭区蜀龙大道东侧","18982420481"],
["四川","德阳","凯旋国际餐厅","四川省德阳市岷江西路550号凯旋国际一楼","0838-2306682"],
["四川","绵阳","中元广场餐厅","高新区绵兴东路55号一层","0816-5362188"],
["四川","绵阳","新涪城餐厅","四川省绵阳市涪城路28号美一天百货一楼","0816-2240413"],
["四川","绵阳","天津桥餐厅","四川省绵阳市涪城区安昌路17号涪陵大厦一楼","0816-2230873"],
["四川","绵阳","涪城万达餐厅","四川省绵阳市涪城区金菊街万达广场2066号商铺","0816-2358778"],
["四川","绵阳","公园口餐厅","四川省绵阳市涪城区临园路东段58号","0816-2244429"],
["四川","绵阳","长虹国际城餐厅","四川省绵阳市涪城区跃进路6号","0816-6388119"],
["四川","绵阳","绵阳火车站餐厅","四川省绵阳市火车站候车大厅一楼集散厅","0816-2369179"],
["四川","绵阳","科技桥餐厅","四川省绵阳市临园路东段74号","0816-2317332"],
["四川","广元","万达餐厅","四川省广元市万达广场","0839-3990520"],
["四川","遂宁","嘉禾餐厅","四川省遂宁市嘉禾西路“近水楼台”广场1层","0825-2681193"],
["四川","遂宁","太和餐厅","四川省遂宁市射洪县太和镇太和大道世纪摩登51-52号","0825-6639311"],
["四川","遂宁","春天餐厅","四川省遂宁市遂州南路春天广场1-2层","0825-2232739"],
["四川","内江","上南餐厅","四川省内江市市中区上南街2号百货大楼一层","0832-2031136"],
["四川","乐山","金杯餐厅","四川省乐山市嘉定南路金杯银座一、二层","0833-2116198"],
["四川","南充","模范餐厅","四川省南充市人民南路模范街南充大都会A幢一楼","0817-2221366"],
["四川","南充","北湖餐厅","四川省南充市顺庆区164号北湖广场负一层","0817-2239252"],
["四川","南充","金鱼岭餐厅","四川省南充市顺庆区北干道东侧龙吟路33号大润发一楼","0817-6995811"],
["四川","南充","滨江餐厅","四川省南充市顺庆区南门北街80号一层","0817-6998865"],
["四川","南充","文化餐厅","四川省南充市文化路9号","0817-2236185"],
["四川","眉山","杭州路餐厅","四川省眉山市东坡区三苏大道168号","028-38803155"],
["四川","眉山","三苏餐厅","四川省眉山市环湖西路9号三苏广场肯德基餐厅","028-38186483"],
["四川","眉山","环湖餐厅","四川省眉山市环湖路东一段大润发一层","028-36020288"],
["四川","宜宾","莱茵春天餐厅","四川省宜宾市翠屏区南岸航天路中段D-5号地块","0831-2203722"],
["四川","宜宾","南岸餐厅","四川省宜宾市翠屏区南岸酒都中段嘉信茂广场.南岸.宜宾一层01-08室和二层02-01/02室","0831-2339147"],
["四川","宜宾","大观楼餐厅","四川省宜宾市翠屏区西街30号","0831-8885556"],
["四川","广安","思源餐厅","四川省广安市广安区滨河路一段88号","0826-2130086"],
["四川","达州","朝阳餐厅","四川省达州市朝阳东路滨江名都城一层","0818-2384006"],
["四川","达州","老车坝餐厅","四川省达州市通川中路17号世纪广场一、二层","0818-2389977"],
["四川","资阳","摩根时代餐厅","四川省资阳市建设西路摩根时代一二层","028-23066202"],
["四川","西昌","月城广场餐厅","四川省西昌市大巷口下街达达春天百货一、二楼","0834-3631989"],
["四川","西昌","航天餐厅","四川省西昌市航天大道2段世纪MALL首层","0834-3201761"],
["甘肃","兰州","民主","城关区皋兰路3号肯德基餐厅","0931-8894881"],
["甘肃","兰州","西太华","七里河区西津西路68号肯德基餐厅","0931-2342118"],
["甘肃","兰州","西固","西固区玉门街21号肯德基餐厅","0931-7561195"],
["甘肃","兰州","南昌","城关区南昌路982号北京华联广场1层肯德基餐厅","0931-8882509"],
["甘肃","兰州","百安","城关区庆阳路296号肯德基百安餐厅","0931-8433611"],
["甘肃","兰州","火车站","火车站东路393号肯德基餐厅","0931-4993288"],
["甘肃","兰州","雁滩","雁滩路3113号大润发超市一层肯德基餐厅","0931-8506708"],
["甘肃","兰州","瑞德","城关区东岗东路1999号瑞德一楼KFC餐厅","0931-8605067"],
["甘肃","兰州","康桥","安宁西路3号肯德基餐厅","0931-7606933"],
["甘肃","兰州","新亚欧","中山路亚欧商厦一楼肯德基餐厅","0931-4809069"],
["甘肃","兰州","新东方红","城关区东方红广场东侧肯德基餐厅","0931-8852556"],
["甘肃","兰州","兰州城关店KFC","城关区天水北路兰州城关万达广场二、三层肯德基餐厅","0931-8388108"],
["甘肃","兰州","兰州雁南店KFC","城关区雁南路天庆嘉园太阳岛肯德基餐厅","0931-8559359"],
["甘肃","兰州","西客站店KFC","兰州西客站候车层商业夹层肯德基餐厅","0931-2770533"],
["甘肃","兰州","西津DT","七里河区西津东路西津广场","0931-2685205"],
["甘肃","兰州","兰州西站餐厅","兰州市西站西路44号西部广场一层","0931-2351166"],
["甘肃","嘉峪关","嘉峪关新华","新华北路东方百盛一层","0937-6219955"],
["甘肃","金昌","镍都","新华西路2号镍都商业大厦一层肯德基餐厅","0935-8222488"],
["甘肃","白银","白银","友好路145号白银百货大楼一层肯德基餐厅","0943-8260188"],
["甘肃","天水","大众","秦州区大众路1号天水大酒店1层肯德基餐厅","0938-8229669"],
["甘肃","天水","天水麦积","商埠路东42号亚太购物中心一二层","0938-2650828"],
["甘肃","武威","武威凉州店KFC","凉州区东大街1号义务商贸城1楼肯德基","0935-6132777"],
["甘肃","张掖","甘州","甘州区南街什字金房大厦一层肯德基餐厅","0936-8212555"],
["甘肃","平凉","平凉新民店KFC","新民路新世纪商厦A座1-2层","0933-8886168"],
["甘肃","酒泉","酒泉肃州店KFC","肃州区鼓楼东方广场一、二层肯德基餐厅","0937-2659555"],
["甘肃","酒泉","敦煌沙州店KFC","敦煌市沙州镇阳关中路北侧、小北街东侧、益旺国际购物中心1层","0937-8881648"],
["甘肃","庆阳","庆阳","西峰区西大街1号国芳百盛大世界商业广场一楼","0934-8279636"],
["甘肃","庆阳","南大街店KFC","西峰区南大街昊鑫时代广场一层","0934-6666020"],
["内蒙古","呼和浩特","滨海友谊店KFC","哲里木路与成吉思汗大街交口处滨海友谊购物中心商场一层","18547168022"],
["内蒙古","呼和浩特","方恒","内蒙古自治区呼和浩特市新城区新华东街85号太伟方恒广场一层","0471-6953329"],
["内蒙古","呼和浩特","金翡丽店KFC","新城区中山东路48号金翡丽购物中心一层、地下一层","0471-3284319"],
["内蒙古","呼和浩特","金游城店KFC","赛罕区南二环路以南丰州路以西鹏欣金游城购物中心一、二层","0471-3450500"],
["内蒙古","呼和浩特","通道北街店KFC","回民区通道北街与东洪桥街交口东北角新华书店营业楼一层","0471-3284487"],
["内蒙古","呼和浩特","百盛","中山西路212号","0471-6310156"],
["内蒙古","呼和浩特","民族","回民区中山西路69号","0471-6601613"],
["内蒙古","呼和浩特","新城","新城区新城南街108号1层","0471-4967758"],
["内蒙古","呼和浩特","锡林郭勒","赛罕区诺和木勒大街26号","0471-5962806"],
["内蒙古","呼和浩特","长乐宫","赛罕区东风路358号长乐宫","0471-4922425"],
["内蒙古","呼和浩特","蒙苑","新城区兴安北路169号蒙苑建材大市场北1号北京华联超市一层","0471-6525300"],
["内蒙古","呼和浩特","果园","回民区新华西街家世界广场1号楼美特好连锁超市一层","0471-6361610"],
["内蒙古","呼和浩特","金宇","赛罕区兴安南路金宇文苑底铺111号","0471-3315135"],
["内蒙古","呼和浩特","站前","新城区锡林北路与车站四街交口海豪国贸大厦","0471-5296820"],
["内蒙古","呼和浩特","海亮","回民区中山路海亮广场","0471-5290188"],
["内蒙古","呼和浩特","万达广场","赛罕区新华东街南侧呼和浩特万达商业广场","0471-3255385"],
["内蒙古","呼和浩特","白塔机场","白塔国际机场迎客大厅一层东侧","0471-4941679"],
["内蒙古","呼和浩特","名都","赛罕区大学西街家乐福一层","0471-6243131"],
["内蒙古","包头","东河维多利店KFC","东河区巴彦塔拉大街与西河路交口维多利新天地广场一层KFC","0472-2796185"],
["内蒙古","包头","包头机场店KFC","包头机场二楼候机楼肯德基餐厅","0472-8591102"],
["内蒙古","包头","包百","钢铁大街67号","0472-2121603"],
["内蒙古","包头","青山","青山区幸福路3#街坊","0472-3125164"],
["内蒙古","包头","林荫","友谊大街18号街坊","0472-2318874"],
["内蒙古","包头","东河","东河区超越大厦底座12-2","0472-4173675"],
["内蒙古","包头","九星","青山区富强路8号一层","0472-3344499"],
["内蒙古","包头","东海","青山区文化路与呼德木林大街交叉口东海一、二层","0472-6978322"],
["内蒙古","包头","东源","钢铁大街60号东源大厦","0472-6866255"],
["内蒙古","包头","保利","东河区南门外大街30号保利大厦一层","0472-2221353"],
["内蒙古","包头","银河广场","青山区青年路南、体育馆道西包头万达商业广场","0472-5198001"],
["内蒙古","包头","友谊大街","友谊大街与恒为路交叉口华润万家一层","0472-2770599"],
["内蒙古","包头","钢铁大街","昆区钢铁大街与三八路交口西北角华联超市一层","0472-5221552"],
["内蒙古","乌海","盈泽元","海勃湾区海河南路9号盈泽元商厦","0473-2029797"],
["内蒙古","赤峰","平庄世元餐厅","内蒙古赤峰市元宝山区银河街和平牛线交口世元购物广场","0476-3555121"],
["内蒙古","赤峰","广达餐厅","内蒙古赤峰市松山区松山大街与广场路交口华联购物广场","0476-8490940"],
["内蒙古","赤峰","哈达","红山区新华路163号新华书店一层","0476-8243620"],
["内蒙古","赤峰","新泰和","红山区钢铁大街和园林路交口双子座一层","0476-8280300"],
["内蒙古","赤峰","长青园","红山区哈达街中段维多利购物广场","0476-5988456"],
["内蒙古","赤峰","大都会店KFC","新城区冠东百货一层","0476-8276300"],
["内蒙古","赤峰","赤峰达达店KFC","红山区西拉沐伦大街万达广场一层","0476-8302135"],
["内蒙古","通辽","通辽市霍林河大街餐厅","通辽市和平路与霍林河大街交汇处","0475-6336733"],
["内蒙古","通辽","通辽市通明餐厅","通辽市明仁大街130-2号","0475-8268918"],
["内蒙古","通辽","通辽市和平餐厅","通辽市新兴大街90号","0475-8910068"],
["内蒙古","通辽","民主路餐厅","通辽市民主路与红光大街交汇欧亚购物商场一楼","15144995987"],
["内蒙古","鄂尔多斯","新天地","东胜区达拉特路东侧、鄂尔多斯大街北侧 北国新天地购物中心一层","0477-8113929"],
["内蒙古","鄂尔多斯","万正","东胜区铁西东环路与广场街交口万正广场一层","0477-5119500"],
["内蒙古","鄂尔多斯","太古","东胜区伊金霍洛西街17号太古国际A区一层","0477-3908855"],
["内蒙古","巴彦淖尔","河港","临河区胜利北路河港青年城","0478-8276622"],
["内蒙古","巴彦淖尔","乌拉特前旗店KFC","乌拉特前旗乌拉山镇十五区包百广场一层","13752332705"],
["内蒙古","乌兰察布","集宁维多利店KFC","集宁区恩和大街维多利广场一层肯德基","0474-8986318"],
["内蒙古","乌兰察布","春天","宁区恩和路210号春天大厦一层","0474-8151936"],
["内蒙古","锡林郭勒","二连浩特","二连浩特市前进南路8号二连浩特百货大厦","0479-2220008"],
["新疆","乌鲁木齐","友好","友好北路486号","0991-4852269"],
["新疆","乌鲁木齐","红旗","中山路309号肯德基餐厅","0991-2321415"],
["新疆","乌鲁木齐","新医","鲤鱼山路2号红桥商贸大厦1层","0991-4339181"],
["新疆","乌鲁木齐","铁路","北京南路895号豪威大厦1层","0991-3822695"],
["新疆","乌鲁木齐","苏州路","北京南路28号家乐福苏州路购物中心一层","0991-3691363"],
["新疆","乌鲁木齐","福润德","北京南路433号福润德购物中心一层","0991-3697028"],
["新疆","乌鲁木齐","长春路友好","长春南路216号","0991-3195456"],
["新疆","乌鲁木齐","乌鲁木齐机场T3店KFC","地窝堡国际机场T3航站楼3层","0991-3800452"],
["新疆","乌鲁木齐","成功餐厅","乌鲁木齐市新华北路39号","0991-2835651"],
["新疆","乌鲁木齐","福润德餐厅","新市区北京南路433号","0991-3697028"],
["新疆","克拉玛依","准噶尔","准噶尔路73号一层","0990-6223205"],
["新疆","克拉玛依","胜利","准噶尔路76号阳光时代购物中心一,二层","0990-6223210"],
["新疆","克拉玛依","石化","大庆西路38号友好金盛时尚百货一层","0992-3655622"],
["新疆","哈密","广场","广场南路丰茂购物广场一层","0902-2238011"],
["新疆","昌吉","亚心","延安北路198号东方购物中心一层","0994-2360398"],
["新疆","博乐","博乐友好店KFC","北京路与新华路交汇处地下一层至地上六层","0909-2287957"],
["新疆","库尔勒","巴州","库尔勒萨依巴格路2号巴州大厦1层","0996-2035386"],
["新疆","库尔勒","天百","人民东路6号库尔勒天百购物中心一层","0996-2266280"],
["新疆","库尔勒","库尔勒朝阳路店KFC","34号小区朝阳路东南侧汇嘉时代购物中心(一期)地上一层","0996-2207776"],
["新疆","阿克苏","阿克苏友好店KFC","迎宾路2号天百时尚购物中心一层肯德基餐厅","0997-2535055"],
["新疆","伊宁","伊宁友好","解放南路铜锣湾商业区B区","0999-8031005"],
["新疆","奎屯","奎屯友好","飞鸿里北京东路3栋友好时尚购物中心一、二层","0992-3271918"],
["新疆","石河子","石城","北子午路140号好家乡超一层","0993-2099979"],
["新疆","石河子","石河子友好","北三路91号友好超一层","0993-2707008"],
["广西","南宁","古城餐厅","广西南宁市民族大道49号民族宫立体停车场一二三层","0771-2619606"],
["广西","南宁","北湖北餐厅","南宁市北湖北路荣和MOCO社区一、二层","0771-3189166"],
["广西","南宁","柳南餐厅","南宁市宾阳县和吉镇岭甲村委莲花村地段的柳州至南宁高速公路宾阳服务区西区一层","15578905981"],
["广西","南宁","南柳餐厅","南宁市宾阳县和吉镇岭甲村委莲花村地段的南宁至柳州高速公路宾阳服务区东区一层","15578920931"],
["广西","南宁","西南商都餐厅","南宁市朝阳路29号西南商都二、三层","0771-6771201"],
["广西","南宁","新朝阳餐厅","南宁市朝阳路38号新朝阳商业广场","0771-5798028"],
["广西","南宁","望园餐厅","南宁市东葛路82号永凯现代城肯德基","0771-5681071"],
["广西","南宁","淡村餐厅","南宁市江南区五一路淡村商贸城一号楼东北角一、二层","0771-4895302"],
["广西","南宁","桃源路餐厅","南宁市教育路22号南湖御景大厦商业裙楼1-2层","0771-5721140"],
["广西","南宁","大沙田餐厅","南宁市良庆区大沙田开发区十三小区(五象大道1号)鑫利华大厦一层","0771-4511564"],
["广西","南宁","航洋餐厅","南宁市民族大道131号航洋国际购物中心一、二层","0771-5594880"],
["广西","南宁","凤岭餐厅","南宁市民族大道186号琅东客运站","0771-5510935"],
["广西","南宁","五象餐厅","南宁市民族大道五象广场负一层","0771-2098189"],
["广西","南宁","明秀餐厅","南宁市明秀西路122号城市碧园首层","0771-3829599"],
["广西","南宁","新民餐厅","南宁市七星路王府井百货首层","0771-2082512"],
["广西","南宁","青秀万达餐厅","南宁市青秀区东葛路118号万达广场一楼1033B号商铺","0771-5572070"],
["广西","南宁","万象城餐厅","南宁市青秀区民族大道136号南宁华润中心万象城B133 号商铺","0771-5825372"],
["广西","南宁","盛天地餐厅","南宁市青秀区中越路8号盛天华府B10号楼一层02号、03号、06号商铺","0771-5828782"],
["广西","南宁","秀灵餐厅","南宁市西乡塘区秀灵路东四里1号天域·香格里拉一层","0771-3925144"],
["广西","南宁","新阳餐厅","南宁市新阳路286号阳光康城一层","0771-3812371"],
["广西","南宁","北湖餐厅","南宁市友爱路22号1-2层","0771-2181193"],
["广西","南宁","长湖餐厅","南宁市长湖路富安居(国际)家居广场停车场入口独立两层小楼","0771-5591673"],
["广西","南宁","正恒餐厅","西乡塘区明秀西路111号正恒国际广场2号楼一、二层","0771-3868530"],
["广西","南宁","南宁东站店KFC","火车东站54#包件(GJJ-23)商铺","13923746970"],
["广西","南宁","凤岭二店店KFC","民族大道186号琅东客运站出发厅二层","0771-5593055"],
["广西","南宁","安吉万达店KFC","西乡塘区高新大道以南、安阳路以西的万达广场一层","0771-3386141"],
["广西","南宁","华南城DT店KFC","沙井大道56号","0771-4891633"],
["广西","柳州","谷埠餐厅","柳州市飞鹅二路1号谷埠街国际商城E区一、二层","0772-8817373"],
["广西","柳州","新地王餐厅","柳州市广场路10号地王国际财富中心一层","0772-8250061"],
["广西","柳州","航生餐厅","柳州市航生路与航一路交汇处","0772--3310490"],
["广西","柳州","恒隆汇餐厅","柳州市瑞安路2号一层","0772-3312670"],
["广西","柳州","胜利餐厅","柳州市胜利路北面中段12-6号","0772-8852044"],
["广西","柳州","飞鹅餐厅","柳州市鱼峰区飞鹅路新时代商业港商业休闲中心1-2层","0772-8800055"],
["广西","柳州","柳州万达店KFC","柳州市东环大道256号万达广场内一层的第1021号商铺、二层 第2021商铺","0772-8811168"],
["广西","柳州","谷埠餐厅","飞鹅二路1号谷埠街国际商城C区二层肯德基","0772-8819393"],
["广西","柳州","柳州餐厅","龙城路工贸大厦首层","0772-2812375"],
["广西","柳州","星河餐厅","公园路72号星河大厦首层","0772-2868882"],
["广西","桂林","红街餐厅","桂林市崇信路49号桂林红街商业广场1#楼一、二层","0773-3130398"],
["广西","桂林","叠彩餐厅","桂林市叠彩区中山北路113号美居商贸城二期A区一层40-41","0773-2673111"],
["广西","桂林","万福餐厅","桂林市凯风路90号万福广场A区一、二层","0773-7590366"],
["广西","桂林","购物公园餐厅","桂林市漓江路桂林购物中心一层","0773-5819616"],
["广西","桂林","桂林机场餐厅","桂林市两江国际机场候机楼一层到达厅","0773-2845010"],
["广西","桂林","金水餐厅","桂林市临桂县临桂镇金水路3号天下桂林步行街F4栋1-3号一、二层","0773-5589101"],
["广西","桂林","联达餐厅","桂林市象山区环城西一路117号联达商业广场1号楼1-2层","0773-3851622"],
["广西","桂林","正阳餐厅","桂林市依仁路7号","0773-2856027"],
["广西","桂林","微笑堂餐厅","桂林市中山中路37号","0773-2817155"],
["广西","桂林","桂林万达店KFC","环城南一路万达广场一层1065号商铺、二层 2059号商铺","0773-7592906"],
["广西","桂林","西城餐厅","中山中路3号桂林饭店1、2层肯德基餐厅","0773-2569165"],
["广西","桂林","桂百餐厅","中山中路14百货大楼1、2层肯德基餐厅","0773-2860660"],
["广西","梧州","旺城餐厅","梧州市大学路36号旺城广场一层","0774-3999501"],
["广西","梧州","梧州中山餐厅","万秀区中山路11号粤西楼一二层","0774-2028228"],
["广西","梧州","梧州新兴餐厅","新兴一路68号太阳广场","0774-3845068"],
["广西","北海","北海餐厅","北海市北部湾中路62号新力广厦一层","0779-3077933"],
["广西","北海","购物广场餐厅","北海市北海大道与北京路交汇处北海城市购物广场首层","0779-3225858"],
["广西","北海","合浦餐厅","北海市合浦县廉州镇环珠大道一层","0779-7280399"],
["广西","北海","合浦店","北海市合浦县廉州镇环珠大道一层","0779-7280399"],
["广西","防城港","恒富餐厅","防城区群星大道与二桥东路交汇处","0770-3239550"],
["广西","钦州","明珠大润发餐厅","钦州市钦南区南珠西大街223号大润发一楼肯德基","0777-5726269"],
["广西","钦州","钦州湾餐厅","钦州市钦南区钦州湾大道年年丰广场二楼","0777-3898139"],
["广西","钦州","钦福餐厅","钦州市永福西大街赛格新时代广场首层A1-2号","0777-2881901"],
["广西","钦州","钦福餐厅","广西钦州市永福西大街赛格新时代广场首层A1-2号","0777-2881902"],
["广西","贵港","贵港餐厅","贵港市江北大道丰宝商业城一层","0775-4238555"],
["广西","贵港","港福时代店KFC","建设路中段北侧港福时代广场一层第1019号商铺","0775-4360269"],
["广西","玉林","东门餐厅","玉林市人民中路六路口东门商业广场B幢1、2层","0775-2691110"],
["广西","玉林","银丰餐厅","玉林市玉州区银丰广场大润发购物广场一层","0775-2630789"],
["广西","玉林","博白店KFC","博白县南洲南路147号博白宝中宝购物广场 一层、二层","0775-8551666"],
["广西","玉林","玉林餐厅","人民中路737号玉林百货公司南江大楼一二层","0775-2822812"],
["广西","百色","中新餐厅","百色市新兴路18号恒基·城市广场1#楼","0776-2873385"],
["广西","百色","百色餐厅","百色市中山一路金世纪商业广场首层","0776-2860200"],
["广西","贺州","灵峰餐厅","广西省贺州市灵峰北路远东国贸大厦一层","0774-5282301"],
["广西","贺州","灵峰店","灵峰路与鞍山西路交汇处","0774-5282301"],
["广西","贺州","贺州餐厅","建设中路93号新时代一二层","0774-5289992"],
["广西","河池","池百货餐厅","广西壮族自治区河池市新建路31号","0778-2231009"],
["广西","来宾","大地景苑餐厅","来宾市桂中大道368号一层","0772-4258397"],
["贵州","贵阳","富水餐厅","贵州省贵阳市富水北路6号","0851-8217513"],
["贵州","贵阳","金阳北路餐厅","贵州省贵阳市观山湖区金朱路新世界项目G4区一层","18111861535"],
["贵州","贵阳","金阳餐厅","贵州省贵阳市金阳南路6号","0851-2266155"],
["贵州","贵阳","新龙洞堡机场餐厅","贵州省贵阳市龙洞堡机场新航站楼商业区域","0851-5499012"],
["贵州","贵阳","花果园餐厅","贵州省贵阳市南明区花果园大街1号花果园购物中心GF-08","0851-5973982"],
["贵州","贵阳","文昌阁餐厅","贵州省贵阳市文昌南路亨特国际二期一层家乐福旁","0851-5636855"],
["贵州","贵阳","喷水池餐厅","贵州省贵阳市中华中路168号","0851-5828721"],
["贵州","贵阳","腾达餐厅","贵州省贵阳市中山西路78号国晨百货一楼","0851-5830861"],
["贵州","贵阳","人民广场餐厅","贵州省贵阳市遵义路人民广场地下商场","0851-5868565"],
["贵州","贵阳","新添大道店","乌当区新添大道北段197号泉天下购物中心1层","18985544862"],
["贵州","贵阳","观山东路店","云岩区贵乌北路199号中大国际购物中心2层(北广场临街1层)","15608501940"],
["贵州","贵阳","大南门餐厅","贵阳市南明区富水南路2号","0851-85809726"],
["贵州","六盘水","海鑫餐厅","贵州省六盘水市中山西路海鑫商业广场肯德基餐厅","0858-8293200"],
["贵州","六盘水","帝都新城餐厅","贵州省六盘水市钟山大道帝都新城大润发超市旁","0858-2149022"],
["贵州","六盘水","太阳广场餐厅","贵州省六盘水市钟山区人民广场天龙路太阳商业广场1楼","0858-8683116"],
["贵州","遵义","港澳广场餐厅","贵州省遵义市澳门路1号","0852-8639556"],
["贵州","遵义","民主餐厅","贵州省遵义市红花岗玉屏路龙华老城新街B区1935新世纪精品百货广场大厦","0852-8251986"],
["贵州","遵义","星力城餐厅","贵州省遵义市万里路万里湘江C栋星力城一层","0852-7913893"],
["贵州","安顺","黄果树餐厅","贵州省安顺市西秀区龙泉路大润发1-2楼","0853-3597075"],
["贵州","安顺","武庙餐厅","贵州省安顺市中华东路中环商业广场A栋1-2层","0853-8104777"],
["贵州","铜仁","锦江餐厅","贵州省铜仁市民主路步行街13号","0856-5227303"],
["贵州","兴义","兴义桔丰路餐厅","贵州省兴义市桔丰路侧兴义购物商城3号楼","18685007674"],
["贵州","凯里","韶山餐厅","贵州省凯里市韶山南路17号","0855-8227008"],
["海南","海口","滨海餐厅","海口市滨海大道秀英时代广场首层","0898-68636310"],
["海南","海口","大同餐厅","海口市大同路1号施达商场一楼","0898-66116523"],
["海南","海口","国兴餐厅","海口市国兴大道北侧盛达景都一层","0898-65228856"],
["海南","海口","海秀餐厅","海口市海秀路10号明珠广场一二楼","0898-66746593"],
["海南","海口","红城湖餐厅","海口市红城湖名富广场一层","0898-65815598"],
["海南","海口","海府餐厅","海口市美兰区海府路18号南亚广场首层","0898-31599728"],
["海南","海口","海甸餐厅","海口市人民路 12号民博大厦1楼(海甸三西西路与人民桥交汇处)","0898-66292929"],
["海南","海口","金贸东餐厅","海口市玉沙路西侧金玉广场C区1FC038,2FC052","0898-68512490"],
["海南","海口","东站餐厅","海南东环高铁海口东站HKD1、HKD2处商业网点","0898--65886660"],
["海南","海口","海垦餐厅","海南省海口市海垦路51号西岭华庭商业广场1层101号单元","0898-68961851"],
["海南","海口","友谊餐厅","海南省海口市龙华路友谊商场一楼","0898-66102573"],
["海南","海口","龙昆南餐厅","海南省海口市龙昆南路和信广场一层","0898-66735173"],
["海南","海口","望海餐厅","海南省海口市望海国际商业广场一、二楼","0898-66510613"],
["海南","海口","白龙餐厅","海南省海口市文明东路213号一层","0898-65316120"],
["海南","海口","名门餐厅","海南省海口市五指山路名门广场一楼","0898-65308153"],
["海南","海口","宜欣广场","龙华区明珠路8号宜欣商业广场一层","0898-68537206"],
["海南","海口","国贸餐厅","国贸大道28号金贸商城首层肯德基","0898-68599310"],
["海南","海口","琼山餐厅","琼山区府城忠介路琼山百货大楼一楼肯德基餐厅","0898-65886434"],
["海南","海口","金世纪餐厅","美兰区海秀路20号亿圣和商场一楼肯德基餐厅","0898-66799026"],
["海南","海口","上邦餐厅","龙华区金龙路98号上邦百汇城2号楼一楼肯德基餐厅","0898-68570266"],
["海南","三亚","国际餐厅","海南三亚市解放一路国际购物中心","0898-88895866"],
["海南","三亚","三亚站餐厅","海南省东环高铁三亚站SY2处商业网点","0898-88202395"],
["海南","三亚","海虹路餐厅","海南省三亚市海虹路鲁能三亚湾美丽五区项目2号楼 一层03和04号商铺","0898-88385589"],
["海南","三亚","海棠湾餐厅","海南省三亚市海棠湾旅游区海棠北路免税购物商城3楼B区","0898-88811958"],
["海南","三亚","港华餐厅","海南省三亚市商品街一巷港华商业城一楼","0898-88698189"],
["海南","三亚","亚龙湾餐厅","海南省三亚市亚龙湾区亚龙湾商业中心B2","0898-88591866"],
["海南","三亚","宝盛餐厅","三亚市解放二路002号宝盛百货广场一、二层","0898-88616006"],
["海南","三亚","街餐厅","三亚市解放路步行街","0898-88355061"],
["海南","三亚","时代海岸餐厅","三亚市解放路时代海岸水居巷香榭左岸裙楼鸿洲.渔人码头1栋","0898-88691876"],
["海南","三亚","三亚餐厅","三亚市解放路与新风路交汇处三亚工贸大厦","0898-88368988"],
["海南","三亚","解放","解放路668号三亚明珠广场首层","0898-88366997"],
["海南","三亚","凤凰机场","凤凰机场国内到达厅10-13G-G1区","0898-88289408"],
["海南","琼海","东升餐厅","海南省琼海市东风路一号","0898-62822969"],
["海南","琼海","琼海店","琼海市东风路59号金地大厦1楼肯德基餐厅","0898-62811588"],
["海南","儋州","大勇餐厅","儋州市那大人民中路205号1至4层","0898-23333231"],
["海南","儋州","儋州餐厅","儋州市那大中兴大街万福隆百货1层(儋州汽车站旁)","0898--23868128"],
["海南","儋州","创新餐厅","海南省儋州市解放南路创新文化广场号一层 、二层","0898-23832696"],
["海南","儋州","儋州夏日餐厅","兰洋北路与伏波路交叉口儋州夏日广场一层1F_006号商铺","0898-23882128"],
["海南","文昌","文中餐厅","海南省文昌文城镇文中路9号(亿嘉广场对面)","0898-63213818"],
["海南","万宁","万宁店","万宁市人民中路文化商业广场首层肯德基餐厅","0898-62226488"],
["海南","东方","东方餐厅","东方市解放路万福隆商场首层","0898-25529366"],
["宁夏","银川","解放餐厅","解放东街(红花渠东侧)华润万家超市一层","0951-5137620"],
["宁夏","银川","新银百餐厅","解放东街2#老大楼一层","0951-4793232"],
["宁夏","银川","利民餐厅","解放西街126号盛世春天一层","0951-5032396"],
["宁夏","银川","良田餐厅","金凤区庆丰街288号良田超市","0951-5027355"],
["宁夏","银川","西夏餐厅","西夏区万达广场1号门肯德基西夏餐厅","0951-8689062"],
["宁夏","银川","文萃餐厅","西夏区文萃北路新华百货宁阳超市一层肯德基餐厅","0951-3820988"],
["宁夏","银川","新银华餐厅","新华东街74号一二层","0951-5687739"],
["宁夏","银川","华利餐厅","兴庆街新华东街40号一、二层","0951-6081984"],
["宁夏","银川","民族餐厅","兴庆区民族南街星光花园14号综合楼3号房 一、二层","0951-5086855"],
["宁夏","银川","河东机场餐厅","银川河东机场T2航站楼到达层CY4","0951-6912832"],
["宁夏","银川","银川火车站餐厅","银川新火车站候车大厅二楼北侧肯德基餐厅","0951—6895986"],
["宁夏","银川","金凤餐厅","正源北街万达金凤广场1+2F,2号门","0951-6048001"],
["宁夏","银川","银川利民餐厅","解放西街126号KFC餐厅","0951-5032396"],
["宁夏","石嘴山","贺兰山餐厅","大武口区游艺东街6号一层肯德基餐厅","0952-2029677"],
["宁夏","石嘴山","阳光餐厅","惠农区阳光商厦一层","0952-3026027"],
["青海","西宁","东大街(西安)","东大街53号肯德基餐厅","0971-8220238"],
["青海","西宁","建银","西大街59号建银宾馆一层","0971-8243093"],
["青海","西宁","新西大街","西大街40号肯德基餐厅","0971-8117231"],
["青海","西宁","西宁五四","城西区五四大街76号","0971-6118833"],
["青海","西宁","西宁兴旺","五四西路1号兴旺大厦负一层肯德基餐厅","0971-4518569"],
["青海","西宁","西宁商业巷","五四大街商业巷国芳百货一楼","0971-6184077"],
["青海","西宁","西宁海湖店KFC","海湖新区文景路与文汇路交汇处东北角新华联购物广场一层","0971-6361819"],
["云南","昆明","启鸿假日餐厅","昆明市五华区滇缅大道与昌源中路交叉口假日城市小区5栋一层","0871-68195185"],
["云南","昆明","北辰餐厅","云南省昆明市北京路延长线财富中心E栋","0871-5730107"],
["云南","昆明","白云餐厅","云南省昆明市北京路延长线大白庙村家乐福超市一层","0871-5709170"],
["云南","昆明","嘉年华餐厅","云南省昆明市北市区霖雨路嘉年华广场一楼","0871-5096810"],
["云南","昆明","和谐世纪餐厅","云南省昆明市北市区小康大道一层","0871-5148851"],
["云南","昆明","山姆餐厅","云南省昆明市春城路沃尔玛山姆会员店","0871-3573224"],
["云南","昆明","大观餐厅","云南省昆明市大观街大观商业城沃尔玛一层","0871-5370986"],
["云南","昆明","新南亚餐厅","云南省昆明市滇池路南亚风情园569号","0871-4593193"],
["云南","昆明","东寺街餐厅","云南省昆明市东寺街东方广场A座一、二层","0871-4186030"],
["云南","昆明","官南餐厅","云南省昆明市官渡区金源大道1号","0871-67015164"],
["云南","昆明","集大餐厅","云南省昆明市环城东路集大沃尔玛一层","0871-3395208"],
["云南","昆明","益龙万象城餐厅","云南省昆明市环城东路与白龙路交叉口益龙万象城一楼","0871-8095207"],
["云南","昆明","正大紫都城餐厅","云南省昆明市交菱路与二环西路交叉口","0871-8312468"],
["云南","昆明","龙泉餐厅","云南省昆明市龙泉路35号-37号","0871-5151835"],
["云南","昆明","前兴路万达餐厅","云南省昆明市前兴路万达广场","0871-63630991"],
["云南","昆明","青年路餐厅","云南省昆明市人民中路17号肯德基青年路餐厅","0871-3163855"],
["云南","昆明","正义路餐厅","云南省昆明市人民中路51,53号正义西街133号一层及正义坊A1幢二层","0871-8369171"],
["云南","昆明","祥云餐厅","云南省昆明市同仁街41号倾城名店广场一层肯德基餐厅","0871-3640220"],
["云南","昆明","新柏联餐厅","云南省昆明市五华区三市街6号商业街一二楼","0871-3646816"],
["云南","昆明","顺城餐厅","云南省昆明市五华区顺城街15号","0871-3631597"],
["云南","昆明","海埂路餐厅","云南省昆明市西山区海埂路云纺女性主题商场一层","0871-4136970"],
["云南","昆明","新闻路餐厅","云南省昆明市新闻路337-339号一层","0871-4165169"],
["云南","昆明","光辉城市餐厅","云南省昆明市新兴路光辉城市(沃尔玛入口处)","0871-4598767"],
["云南","昆明","新迎餐厅","云南省昆明市新迎路212号","0871-3329095"],
["云南","昆明","西山餐厅","云南省昆明市兴苑路城市领地花园超市一层","0871-8226301"],
["云南","昆明","月牙塘餐厅","云南省昆明市烟草路市政府月牙塘小区瑞丰商业广场2幢2-1肯德基餐厅","0871-5221851"],
["云南","昆明","云大餐厅","云南省昆明市圆通东路佰腾数码广场一层","0871-5114292"],
["云南","昆明","云山路餐厅","云南省昆明市云山路与二环西路交叉口","0871-8189708"],
["云南","昆明","长水机场精选餐厅","云南省昆明市长水国际机场出发大厅B1-1","18908808566"],
["云南","昆明","长水机场餐厅","云南省昆明市长水国际机场候机楼负一楼","0871-7085682"],
["云南","昆明","世纪城餐厅","云南省世纪城金源大道金源购物中心1楼1号","0871-7199357"],
["云南","昆明","美辰餐厅","云南省昆明市人民中路富春街98号","0871-3612225"],
["云南","昆明","美辰餐厅","人民中路富春街98号美辰百货一楼KFC餐厅","0871-63612225"],
["云南","安宁","大屯餐厅","云南省昆明市安宁市大屯新区大屯路金色时代广场","0871-8871946"],
["云南","安宁","金方餐厅","云南省安宁市湖滨路1幢"],
]
| [
"344627181@qq.com"
] | 344627181@qq.com |
c1e9e59c5aff9b31e0a98e566fa60528b8128e5d | f460b2b8aadf8a6b0f7df9386132b44ab9d633ff | /backend/testapp_24367/settings.py | 5f9aff5eb501ffbf83db70b9c12c5cd3dfbdd311 | [] | no_license | crowdbotics-apps/testapp-24367 | 083bf7033b43ef38bfdb2b9bf0eb104551081e54 | 256d7cebe3bd59ccf26bf22175ad484033ab7edd | refs/heads/master | 2023-02-28T01:00:45.357057 | 2021-02-08T10:09:35 | 2021-02-08T10:09:35 | 337,032,868 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,064 | py | """
Django settings for testapp_24367 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sites",
"chat",
"chat_user_profile",
]
LOCAL_APPS = [
"home",
"modules",
"users.apps.UsersConfig",
]
THIRD_PARTY_APPS = [
"rest_framework",
"rest_framework.authtoken",
"rest_auth",
"rest_auth.registration",
"bootstrap4",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.google",
"django_extensions",
"drf_yasg",
"storages",
# start fcm_django push notifications
"fcm_django",
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "testapp_24367.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "testapp_24367.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {"default": env.db()}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
MIDDLEWARE += ["whitenoise.middleware.WhiteNoiseMiddleware"]
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID
and AWS_SECRET_ACCESS_KEY
and AWS_STORAGE_BUCKET_NAME
and AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = "/mediafiles/"
MEDIA_ROOT = os.path.join(BASE_DIR, "mediafiles")
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning(
"You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails."
)
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
e240bb96f270e88c1e2bff43c0a19504f915d227 | acd41dc7e684eb2e58b6bef2b3e86950b8064945 | /res/packages/scripts/scripts/client/gui/Scaleform/locale/TOOLTIPS.py | c3c1d037d5debc13f5b2a675ef4d68144878142d | [] | no_license | webiumsk/WoT-0.9.18.0 | e07acd08b33bfe7c73c910f5cb2a054a58a9beea | 89979c1ad547f1a1bbb2189f5ee3b10685e9a216 | refs/heads/master | 2021-01-20T09:37:10.323406 | 2017-05-04T13:51:43 | 2017-05-04T13:51:43 | 90,268,530 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 234,845 | py | # 2017.05.04 15:25:24 Střední Evropa (letní čas)
# Embedded file name: scripts/client/gui/Scaleform/locale/TOOLTIPS.py
"""
This file was generated using the wgpygen.
Please, don't edit this file manually.
"""
from debug_utils import LOG_WARNING
class TOOLTIPS(object):
ACHIEVEMENT_PARAMS = '#tooltips:achievement/params'
BATTLERESULTS_FORTRESOURCE_TEAMINFLUENCETOTAL = '#tooltips:battleResults/FortResource/teamInfluenceTotal'
BATTLERESULTS_FORTRESOURCE_TEAMRESOURCETOTAL = '#tooltips:battleResults/FortResource/teamResourceTotal'
BATTLERESULTS_FORTRESOURCE_RESULTSSHAREBTN = '#tooltips:battleResults/FortResource/resultsShareBtn'
BATTLERESULTS_CYBERSPORT_POINTS_WIN = '#tooltips:battleResults/CyberSport/points/win'
BATTLERESULTS_CYBERSPORT_POINTS_LOSE = '#tooltips:battleResults/CyberSport/points/lose'
BATTLERESULTS_VICTORYSCOREDESCRIPTION = '#tooltips:battleResults/victoryScoreDescription'
SHOWVECTORONMAP = '#tooltips:showVectorOnMap'
SHOWSECTORONMAP = '#tooltips:showSectorOnMap'
SHOWVEHMODELSONMAP = '#tooltips:showVehModelsOnMap'
BATTLELOADINGINFO = '#tooltips:battleLoadingInfo'
PRIVATEQUESTS_AWARDSBUTTON = '#tooltips:privateQuests/awardsButton'
PRIVATEQUESTS_SLOT_EMPTY = '#tooltips:privateQuests/slot/empty'
PRIVATEQUESTS_SLOT_MISSION = '#tooltips:privateQuests/slot/mission'
PRIVATEQUESTS_SLOT_MISSIONCOMPLETE = '#tooltips:privateQuests/slot/missionComplete'
PRIVATEQUESTS_BACKBUTTON = '#tooltips:privateQuests/backButton'
PRIVATEQUESTS_TASKLISTITEM = '#tooltips:privateQuests/taskListItem'
PRIVATEQUESTS_ABOUTVEHICLE = '#tooltips:privateQuests/aboutVehicle'
PRIVATEQUESTS_SHOWVEHICLE = '#tooltips:privateQuests/showVehicle'
PRIVATEQUESTS_ACTIONPANNEL_PERFORM = '#tooltips:privateQuests/actionPannel/perform'
PRIVATEQUESTS_ACTIONPANNEL_ABORT = '#tooltips:privateQuests/actionPannel/abort'
PRIVATEQUESTS_ACTIONPANNEL_RECEIVETHEAWARD = '#tooltips:privateQuests/actionPannel/receiveTheAward'
PRIVATEQUESTS_ACTIONPANNEL_REPEAT = '#tooltips:privateQuests/actionPannel/repeat'
PRIVATEQUESTS_QUESTCONTROL = '#tooltips:privateQuests/questControl'
SQUADWINDOW_BUTTONS_INVITE = '#tooltips:squadWindow/buttons/invite'
SQUADWINDOW_BUTTONS_RECOMMEND = '#tooltips:squadWindow/buttons/recommend'
SQUADWINDOW_BUTTONS_LEAVESQUAD = '#tooltips:squadWindow/buttons/leaveSquad'
SQUADWINDOW_BUTTONS_LEAVEEVENTSQUAD = '#tooltips:squadWindow/buttons/leaveEventSquad'
SQUADWINDOW_BUTTONS_BTNFIGHT = '#tooltips:squadWindow/buttons/btnFight'
SQUADWINDOW_BUTTONS_BTNNOTREADY = '#tooltips:squadWindow/buttons/btnNotReady'
SQUADWINDOW_BUTTONS_SENDMESSAGE = '#tooltips:squadWindow/buttons/sendMessage'
SQUADWINDOW_BATTLETYPEINFO = '#tooltips:squadWindow/battleTypeInfo'
SQUADWINDOW_TEAMMATE_NOTVALIDVEHICLE = '#tooltips:squadWindow/teammate/notValidVehicle'
SQUADWINDOW_DEMANDFORVEHICLE_NOTVALIDVEHICLE = '#tooltips:squadWindow/demandForVehicle/notValidVehicle'
SQUADWINDOW_DOMINATION_VEHICLESINFOICON = '#tooltips:squadWindow/domination/vehiclesInfoIcon'
LOGIN_LEGAL = '#tooltips:login/legal'
LOGIN_REMEMBERPASSWORD_SIMPLE = '#tooltips:login/rememberPassword/simple'
LOGIN_REMEMBERPASSWORD_SOCIAL = '#tooltips:login/rememberPassword/social'
LOGIN_BYSOCIAL = '#tooltips:login/bySocial'
LOGIN_SOCIAL_ENTER = '#tooltips:login/social/enter'
LOGIN_CHANGEACCOUNT = '#tooltips:login/changeAccount'
LOGIN_BGMODEBUTTON_ON = '#tooltips:login/bgModeButton/on'
LOGIN_BGMODEBUTTON_OFF = '#tooltips:login/bgModeButton/off'
LOGIN_SOUNDBUTTON_ON = '#tooltips:login/soundButton/on'
LOGIN_SOUNDBUTTON_OFF = '#tooltips:login/soundButton/off'
LOBBYMENU_VERSIONINFOBUTTON = '#tooltips:lobbyMenu/versionInfoButton'
PREBATTLE_NAMEFILTERBUTTON = '#tooltips:prebattle/nameFilterButton'
PREBATTLE_INVITATIONS_TOOLTIP_ISONLINE = '#tooltips:prebattle/invitations/tooltip/isOnline'
CYBERSPORT_SELECTVEHICLE = '#tooltips:cyberSport/selectVehicle'
CYBERSPORT_RESPAWN_STATUS_INFO = '#tooltips:cyberSport/respawn/status/info'
CYBERSPORT_RESPAWN_STATUS_WARNING = '#tooltips:cyberSport/respawn/status/warning'
CYBERSPORT_MODECHANGEFROZEN = '#tooltips:cyberSport/modeChangeFrozen'
CYBERSPORT_ROSTERSLOTSETTINGS_HEADERTEXT = '#tooltips:cyberSport/rosterSlotSettings/headerText'
ACHIEVEMENT_ATTR_COUNTER = '#tooltips:achievement/attr/counter'
ACHIEVEMENT_ATTR_SMALL = '#tooltips:achievement/attr/small'
ACHIEVEMENT_ATTR_VEHICLERECORD = '#tooltips:achievement/attr/vehicleRecord'
ACHIEVEMENT_ATTR_RECORD = '#tooltips:achievement/attr/record'
ACHIEVEMENT_ATTR_DEGREE = '#tooltips:achievement/attr/degree'
HANGAR_MAINTENANCE = '#tooltips:hangar/maintenance'
HANGAR_TUNING = '#tooltips:hangar/tuning'
HANGAR_TUNING_DISABLEDFOREVENTVEHICLE = '#tooltips:hangar/tuning/disabledForEventVehicle'
HANGAR_AMMO_PANEL_DEVICE_EMPTY = '#tooltips:hangar/ammo_panel/device/empty'
HANGAR_AMMO_PANEL_EQUIPMENT_EMPTY = '#tooltips:hangar/ammo_panel/equipment/empty'
HANGAR_UNLOCKBUTTON = '#tooltips:hangar/unlockButton'
HANGAR_CREW_RUDY_DOG = '#tooltips:hangar/crew/rudy/dog'
HEADER_ACCOUNT = '#tooltips:header/account'
HEADER_SQUAD = '#tooltips:header/squad'
HEADER_EVENTSQUAD = '#tooltips:header/eventSquad'
HEADER_DOMINATIONSQUAD = '#tooltips:header/dominationSquad'
HEADER_SQUAD_MEMBER = '#tooltips:header/squad_member'
HEADER_BATTLETYPE = '#tooltips:header/battleType'
HEADER_INFO_PLAYERS_UNAVAILABLE = '#tooltips:header/info/players_unavailable'
HEADER_INFO_PLAYERS_ONLINE_REGION = '#tooltips:header/info/players_online_region'
HEADER_INFO_PLAYERS_ONLINE_FULL = '#tooltips:header/info/players_online_full'
HEADER_BUTTONS_HANGAR = '#tooltips:header/buttons/hangar'
HEADER_BUTTONS_INVENTORY = '#tooltips:header/buttons/inventory'
HEADER_BUTTONS_SHOP = '#tooltips:header/buttons/shop'
HEADER_BUTTONS_PROFILE = '#tooltips:header/buttons/profile'
HEADER_BUTTONS_TECHTREE = '#tooltips:header/buttons/techtree'
HEADER_BUTTONS_BARRACKS = '#tooltips:header/buttons/barracks'
HEADER_BUTTONS_FORTS = '#tooltips:header/buttons/forts'
HEADER_BUTTONS_FORTS_TURNEDOFF = '#tooltips:header/buttons/forts/turnedOff'
HEADER_BUTTONS_FORTS_SANDBOX_TURNEDOFF = '#tooltips:header/buttons/forts/sandbox/turnedOff'
HEADER_BUTTONS_BROWSER = '#tooltips:header/buttons/browser'
HEADER_BUTTONS_ENCYCLOPEDIA = '#tooltips:header/buttons/encyclopedia'
HEADER_ELITEICON = '#tooltips:header/eliteIcon'
HEADER_PREMIUM_BUY = '#tooltips:header/premium_buy'
HEADER_PREMIUM_EXTEND = '#tooltips:header/premium_extend'
HEADER_REFILL = '#tooltips:header/refill'
HEADER_REFILL_ACTION = '#tooltips:header/refill_action'
HEADER_GOLD_EXCHANGE = '#tooltips:header/gold_exchange'
HEADER_XP_GATHERING = '#tooltips:header/xp_gathering'
TRAINING_CREATE_INVITES_CHECKBOX = '#tooltips:training/create/invites_checkbox'
TRAINING_OBSERVER_BTN = '#tooltips:training/observer/btn'
TRAINING_OBSERVER_SELECTEDICON = '#tooltips:training/observer/selectedicon'
TRAINING_OBSERVER_ICON = '#tooltips:training/observer/icon'
BARRACKS_ITEM_EMPTY = '#tooltips:barracks/item_empty'
BARRACKS_ITEM_BUY = '#tooltips:barracks/item_buy'
BARRACKS_TANKMEN_UNLOAD = '#tooltips:barracks/tankmen/unload'
BARRACKS_TANKMEN_DISMISS = '#tooltips:barracks/tankmen/dismiss'
VEHICLESELLDIALOG_RENDERER_ALERTICON = '#tooltips:vehicleSellDialog/renderer/alertIcon'
BATTLETYPES_STANDART = '#tooltips:battleTypes/standart'
BATTLETYPES_LEAVEUNIT = '#tooltips:battleTypes/leaveUnit'
BATTLETYPES_LEAVECOMPANY = '#tooltips:battleTypes/leaveCompany'
BATTLETYPES_LEAVESPEC = '#tooltips:battleTypes/leaveSpec'
BATTLETYPES_LEAVETRAINING = '#tooltips:battleTypes/leaveTraining'
BATTLETYPES_LEAVEHISTORICAL = '#tooltips:battleTypes/leaveHistorical'
BATTLETYPES_TRAINING = '#tooltips:battleTypes/training'
BATTLETYPES_COMPANY = '#tooltips:battleTypes/company'
BATTLETYPES_SPEC = '#tooltips:battleTypes/spec'
BATTLETYPES_UNIT = '#tooltips:battleTypes/unit'
BATTLETYPES_FALLOUT = '#tooltips:battleTypes/fallout'
BATTLETYPES_HISTORICAL = '#tooltips:battleTypes/historical'
BATTLETYPES_BATTLETUTORIAL = '#tooltips:battleTypes/battleTutorial'
BATTLETYPES_BATTLETEACHING = '#tooltips:battleTypes/battleTeaching'
REDBUTTON_DISABLED_BUYNEEDED = '#tooltips:redButton/disabled/buyNeeded'
REDBUTTON_DISABLED_REPAIRNEEDED = '#tooltips:redButton/disabled/repairNeeded'
REPAIR_AUTO = '#tooltips:repair/auto'
AMMO_AUTO = '#tooltips:ammo/auto'
EQUIPMENT_AUTO = '#tooltips:equipment/auto'
EQUIPMENT_EMPTY = '#tooltips:equipment/empty'
SETTINGS_DIALOG_SOUND_PTTKEY = '#tooltips:settings_dialog/sound/PTTKey'
SETTINGS_DIALOG_SOUND_ALTERNATIVEVOICES = '#tooltips:settings_dialog/sound/alternativeVoices'
TANKS_CAROUSEL_BUY_SLOT = '#tooltips:tanks_carousel/buy_slot'
TANKS_CAROUSEL_BUY_VEHICLE = '#tooltips:tanks_carousel/buy_vehicle'
LOBY_MESSENGER_SERVICE_BUTTON = '#tooltips:loby_messenger/service_button'
LOBY_MESSENGER_CONTACTS_BUTTON = '#tooltips:loby_messenger/contacts_button'
LOBY_MESSENGER_VEHICLE_COMPARE_BUTTON = '#tooltips:loby_messenger/vehicle_compare_button'
LOBY_MESSENGER_CHANNELS_BUTTON = '#tooltips:loby_messenger/channels_button'
HANGAR_XPTOTMENCHECKBOX = '#tooltips:hangar/xpToTmenCheckbox'
QUESTS_NOTIFIER = '#tooltips:quests/notifier'
QUESTS_VEHICLESEASONAWARD_ABOUTBTN = '#tooltips:quests/vehicleSeasonAward/aboutBtn'
WALLET_NOT_AVAILABLE_GOLD = '#tooltips:wallet/not_available_gold'
WALLET_NOT_AVAILABLE_FREEXP = '#tooltips:wallet/not_available_freexp'
WALLET_NOT_AVAILABLE_CREDITS = '#tooltips:wallet/not_available_credits'
MEDALION_NOVEHICLE = '#tooltips:medalion/noVehicle'
SETTINGSICON_FREEZED = '#tooltips:settingsIcon/freezed'
SETTINGSICON_CONDITIONS = '#tooltips:settingsIcon/conditions'
CYBERSPORT_INTRO_SEARCH_BTN = '#tooltips:cyberSport/intro/search/btn'
CYBERSPORT_INTRO_CREATE_BTN = '#tooltips:cyberSport/intro/create/btn'
CYBERSPORT_INTRO_SELECTEDVEHICLEWARN_INCOMPATIBLETYPE = '#tooltips:cyberSport/intro/selectedVehicleWarn/incompatibleType'
CYBERSPORT_INTRO_SELECTEDVEHICLEWARN_INCOMPATIBLELEVEL = '#tooltips:cyberSport/intro/selectedVehicleWarn/incompatibleLevel'
CYBERSPORT_UNITLIST_REFRESH = '#tooltips:cyberSport/unitList/refresh'
CYBERSPORT_UNITLIST_PAGINGDOWN = '#tooltips:cyberSport/unitList/pagingDown'
CYBERSPORT_UNITLIST_PAGINGUP = '#tooltips:cyberSport/unitList/pagingUp'
CYBERSPORT_UNIT_CONFIGURE = '#tooltips:cyberSport/unit/configure'
CYBERSPORT_UNITLIST_JOIN = '#tooltips:cyberSport/unitList/join'
CYBERSPORT_UNITLIST_CREATEBTN_ALREADYRALLYOWNER = '#tooltips:cyberSport/unitList/createBtn/alreadyRallyOwner'
CYBERSPORT_UNITLIST_CREATEBTN_ALREADYINRALLY = '#tooltips:cyberSport/unitList/createBtn/alreadyInRally'
CYBERSPORT_UNITLIST_JOINTOSTATICASLEGIONARY = '#tooltips:cyberSport/unitList/joinToStaticAsLegionary'
CYBERSPORT_UNITLEVEL_BACK = '#tooltips:cyberSport/unitLevel/back'
CYBERSPORT_UNIT_SLOTLABELCLOSED = '#tooltips:cyberSport/unit/slotLabelClosed'
CYBERSPORT_UNIT_SLOTLABELUNAVAILABLE = '#tooltips:cyberSport/unit/slotLabelUnavailable'
CYBERSPORT_UNIT_TAKEPLACEBTN = '#tooltips:cyberSport/unit/takePlaceBtn'
CYBERSPORT_UNIT_TAKEPLACEFIRSTTIMEBTN = '#tooltips:cyberSport/unit/takePlaceFirstTimeBtn'
CYBERSPORT_VEHICLESELECTOR_OVERFLOWLEVEL = '#tooltips:cyberSport/vehicleSelector/overflowLevel'
CYBERSPORT_UNIT_INVITEBTN = '#tooltips:cyberSport/unit/inviteBtn'
CYBERSPORT_UNIT_FIGHTBTN_VEHICLENOTVALID = '#tooltips:cyberSport/unit/fightBtn/vehicleNotValid'
CYBERSPORT_UNIT_FIGHTBTN_EVENTVEHICLEWRONGMODE = '#tooltips:cyberSport/unit/fightBtn/eventVehicleWrongMode'
CYBERSPORT_UNIT_FIGHTBTN_NOTINSLOT = '#tooltips:cyberSport/unit/fightBtn/notInSlot'
CYBERSPORT_UNIT_FIGHTBTN_PRESSFORREADY = '#tooltips:cyberSport/unit/fightBtn/pressForReady'
CYBERSPORT_UNIT_FIGHTBTN_PRESSFORNOTREADY = '#tooltips:cyberSport/unit/fightBtn/pressForNotReady'
CYBERSPORT_WAITINGPLAYERS_CONFIGALERT = '#tooltips:cyberSport/waitingPlayers/configAlert'
CYBERSPORT_STATICRALLYINFO_STATSBATTLESCOUNT = '#tooltips:cyberSport/staticRallyInfo/statsBattlesCount'
CYBERSPORT_STATICRALLYINFO_STATSWINSPERCENT = '#tooltips:cyberSport/staticRallyInfo/statsWinsPercent'
CYBERSPORT_STATICRALLYINFO_JOINBTN = '#tooltips:cyberSport/staticRallyInfo/joinBtn'
CYBERSPORT_NOVEHICLESINHANGAR = '#tooltips:cyberSport/noVehiclesInHangar'
RALLYINFO_PROFILEBTN = '#tooltips:rallyInfo/profileBtn'
USEFREEXP = '#tooltips:useFreeXP'
FREEXP = '#tooltips:freeXP'
TURNOFFCOMBATCHAT = '#tooltips:turnOffCombatChat'
ENABLEPOSTMORTEMEFFECT = '#tooltips:enablePostMortemEffect'
SHOWMARKSONGUN = '#tooltips:showMarksOnGun'
FORTIFICATION_FOUNDATION = '#tooltips:fortification/foundation'
FORTIFICATION_HEADER_STATISTICS = '#tooltips:fortification/header/statistics'
FORTIFICATION_HEADER_CLANLIST = '#tooltips:fortification/header/clanList'
FORTIFICATION_WELCOME_DETAILS = '#tooltips:fortification/welcome/details'
FORTIFICATION_WELCOME_CREATEFORT = '#tooltips:fortification/welcome/createFort'
FORTIFICATION_WELCOME_CANTCREATEFORT = '#tooltips:fortification/welcome/cantCreateFort'
FORTIFICATION_WELCOME_CLANSEARCH = '#tooltips:fortification/welcome/clanSearch'
FORTIFICATION_WELCOME_CLANSEARCHLABEL = '#tooltips:fortification/welcome/clanSearchLabel'
FORTIFICATION_WELCOME_CLANCREATE = '#tooltips:fortification/welcome/clanCreate'
FORTIFICATION_TRANPORTINGBUTTON_ACTIVE = '#tooltips:fortification/tranportingButton/active'
FORTIFICATION_TRANPORTINGBUTTON_INACTIVE = '#tooltips:fortification/tranportingButton/inactive'
FORTIFICATION_CLOSEDIRECTIONBUTTON_ACTIVE = '#tooltips:fortification/closeDirectionButton/active'
FORTIFICATION_CLOSEDIRECTIONBUTTON_INACTIVE = '#tooltips:fortification/closeDirectionButton/inactive'
FORTIFICATION_DEFRESICONINFO = '#tooltips:fortification/defResIconInfo'
FORTIFICATION_MODERNIZATION = '#tooltips:fortification/modernization'
FORTIFICATION_MODERNIZATION_DESCRIPTIONLINK = '#tooltips:fortification/modernization/descriptionLink'
FORTIFICATION_POPOVER_UPGRADEFOUNDATIONBTN = '#tooltips:fortification/popOver/upgradeFoundationBtn'
FORTIFICATION_POPOVER_PREPAREORDERDISABLE = '#tooltips:fortification/popOver/prepareOrderDisable'
FORTIFICATION_POPOVER_HPPROGRESS = '#tooltips:fortification/popOver/hpProgress'
FORTIFICATION_POPOVER_FIXEDPLAYERSBTN = '#tooltips:fortification/popOver/fixedPlayersBtn'
FORTIFICATION_POPOVER_UPGRADEFOUNDATIONBTN_DISABLED = '#tooltips:fortification/popOver/upgradeFoundationBtn_Disabled'
FORTIFICATION_FIXEDPLAYERS_ASSIGNBTNENABLED = '#tooltips:fortification/fixedPlayers/assignBtnEnabled'
FORTIFICATION_FIXEDPLAYERS_ASSIGNBTNDISABLED = '#tooltips:fortification/fixedPlayers/assignBtnDisabled'
FORTIFICATION_FIXEDPLAYERS_GENERALTOOLTIP = '#tooltips:fortification/fixedPlayers/generalTooltip'
FORTIFICATION_FIXEDPLAYERS_GENERALTOOLTIPMAXLIMIT = '#tooltips:fortification/fixedPlayers/generalTooltipMaxLimit'
FORTIFICATION_FOOTER_SORTIEBUTTON = '#tooltips:fortification/footer/sortieButton'
FORTIFICATION_FOOTER_INTELLIGENCEBUTTON = '#tooltips:fortification/footer/intelligenceButton'
FORTIFICATION_BUILDINGPROCESS_STATUSICONSUCCESS = '#tooltips:fortification/buildingProcess/statusIconSuccess'
FORTIFICATION_BUILDINGPROCESS_STATUSICONNOTAVAILABLE = '#tooltips:fortification/buildingProcess/statusIconNotAvailable'
FORTIFICATION_CLANSTATS_PERIODDEFENCE_BATTLES_EFFICIENCYOFATTACK = '#tooltips:fortification/clanStats/periodDefence/battles/efficiencyOfAttack'
FORTIFICATION_CLANSTATS_PERIODDEFENCE_BATTLES_EFFICIENCYOFDEFENCE = '#tooltips:fortification/clanStats/periodDefence/battles/efficiencyOfDefence'
PERIODDEFENCEWINDOW_TOOLTIP_PERIPHERY = '#tooltips:periodDefenceWindow/tooltip/periphery'
PERIODDEFENCEWINDOW_TOOLTIP_HOURDEFENCE = '#tooltips:periodDefenceWindow/tooltip/hourDefence'
PERIODDEFENCEWINDOW_TOOLTIP_HOLIDAY = '#tooltips:periodDefenceWindow/tooltip/holiday'
FORTIFICATION_POPOVER_PREPAREORDEROVERLOAD = '#tooltips:fortification/popOver/prepareOrderOverload'
FORTIFICATION_POPOVER_DEMOUNTBTN = '#tooltips:fortification/popOver/demountBtn'
FORTIFICATION_SORTIE_LISTROOM_BACK = '#tooltips:fortification/sortie/listRoom/back'
FORTIFICATION_CLAN_LISTROOM_BACK = '#tooltips:fortification/clan/listRoom/back'
FORTIFICATION_SORTIE_LISTROOM_CREATEBTN = '#tooltips:fortification/sortie/listRoom/createBtn'
FORTIFICATION_SORTIE_LISTROOM_SORTNAMEBTN = '#tooltips:fortification/sortie/listRoom/sortNameBtn'
FORTIFICATION_SORTIE_LISTROOM_SORTDIVISIONBTN = '#tooltips:fortification/sortie/listRoom/sortDivisionBtn'
FORTIFICATION_SORTIE_LISTROOM_SORTSQUADBTN = '#tooltips:fortification/sortie/listRoom/sortSquadBtn'
FORTIFICATION_SORTIE_LISTROOM_JOINBTN = '#tooltips:fortification/sortie/listRoom/joinBtn'
FORTIFICATION_SORTIE_LISTROOM_SINGINBTN = '#tooltips:fortification/sortie/listRoom/singInBtn'
BATTLETYPES_FORTIFICATION = '#tooltips:battleTypes/fortification'
BATTLETYPES_STRONGHOLDS = '#tooltips:battleTypes/strongholds'
BATTLETYPES_LEAVEFORTIFICATION = '#tooltips:battleTypes/leaveFortification'
FORTIFICATION_SORTIE_BATTLEROOM_LEAVEBTN = '#tooltips:fortification/sortie/battleRoom/leaveBtn'
FORTIFICATION_SORTIE_BATTLEROOM_CHANGEDIVISION = '#tooltips:fortification/sortie/battleRoom/changeDivision'
FORTIFICATION_SORTIE_BATTLEROOM_INVITEBTN = '#tooltips:fortification/sortie/battleRoom/inviteBtn'
FORTIFICATION_SORTIE_SELECTVEHICLE = '#tooltips:fortification/sortie/selectVehicle'
FORTIFICATION_SORTIE_TAKEPLACEFIRSTTIMEBTN = '#tooltips:fortification/sortie/takePlaceFirstTimeBtn'
FORTIFICATION_SORTIE_CHAT_DESCRIPTION = '#tooltips:fortification/sortie/chat/description'
FORTIFICATION_SORTIE_CHAT_SENDMESSAGEBTN = '#tooltips:fortification/sortie/chat/sendMessageBtn'
FORTIFICATION_SORTIE_REMOVEBTN = '#tooltips:fortification/sortie/removeBtn'
FORTIFICATION_SORTIE_PLAYER_CANCELREADY = '#tooltips:fortification/sortie/player/cancelReady'
FORTIFICATION_SORTIE_PLAYER_CHANGEVEHICLE = '#tooltips:fortification/sortie/player/changeVehicle'
VEHICLESELECTOR_SORTING_NATION = '#tooltips:vehicleSelector/sorting/nation'
VEHICLESELECTOR_SORTING_VEHTYPE = '#tooltips:vehicleSelector/sorting/vehType'
VEHICLESELECTOR_SORTING_VEHLVL = '#tooltips:vehicleSelector/sorting/vehLvl'
VEHICLESELECTOR_SORTING_VEHNAME = '#tooltips:vehicleSelector/sorting/vehName'
VEHICLESELECTOR_OVERFLOWLEVEL = '#tooltips:vehicleSelector/overflowLevel'
VEHICLESELECTOR_INCOMPATIBLETYPE = '#tooltips:vehicleSelector/incompatibleType'
FORTIFICATION_FIXEDPLAYERS_WEEK = '#tooltips:fortification/fixedPlayers/week'
FORTIFICATION_FIXEDPLAYERS_ALLTIME = '#tooltips:fortification/fixedPlayers/allTime'
FORTIFICATION_FIXEDPLAYERS_FORTROLE = '#tooltips:fortification/fixedPlayers/fortRole'
FORTIFICATION_FIXEDPLAYERS_NIC = '#tooltips:fortification/fixedPlayers/nic'
FORTIFICATION_INTROVIEW_CLANBATTLEBTN_ENABLED = '#tooltips:fortification/introView/clanBattleBtn/enabled'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_LEVEL = '#tooltips:fortification/intelligenceWindow/sortBtn/level'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_CLANTAG = '#tooltips:fortification/intelligenceWindow/sortBtn/clanTag'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_DEFENCETIME = '#tooltips:fortification/intelligenceWindow/sortBtn/defenceTime'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_BUILDINGS = '#tooltips:fortification/intelligenceWindow/sortBtn/buildings'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_AVAILABILITY = '#tooltips:fortification/intelligenceWindow/sortBtn/availability'
FORTIFICATION_BATTLEROOMLEGIONARIES = '#tooltips:fortification/battleRoomLegionaries'
FORTIFICATION_BATTLEROOMLEGIONARIES_TEAMSECTION = '#tooltips:fortification/battleRoomLegionaries/teamSection'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_OFFENSE = '#tooltips:fortification/fortBattleDirectionPopover/offense'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_DEFENSE = '#tooltips:fortification/fortBattleDirectionPopover/defense'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_ISINBATTLE = '#tooltips:fortification/fortBattleDirectionPopover/isInBattle'
FORTIFICATION_BATTLENOTIFIER_OFFENSE = '#tooltips:fortification/battleNotifier/offense'
FORTIFICATION_BATTLENOTIFIER_DEFENSE = '#tooltips:fortification/battleNotifier/defense'
FORTIFICATION_BATTLENOTIFIER_OFFANDDEF = '#tooltips:fortification/battleNotifier/offAndDef'
FORTIFICATION_FORTINTELLIGENCECLANFILTERPOPOVER_DEFAULT = '#tooltips:fortification/FortIntelligenceClanFilterPopover/default'
FORTIFICATION_FORTINTELLIGENCECLANFILTERPOPOVER_APPLY = '#tooltips:fortification/FortIntelligenceClanFilterPopover/apply'
LOBY_MESSENGER_CHANNELS_CAROUSEL_BUTTON_LEFT = '#tooltips:loby_messenger/channels_carousel_button_left'
LOBY_MESSENGER_NEW_CHANNELS_CAROUSEL_BUTTON_LEFT = '#tooltips:loby_messenger/new/channels_carousel_button_left'
LOBY_MESSENGER_CHANNELS_CAROUSEL_BUTTON_RIGHT = '#tooltips:loby_messenger/channels_carousel_button_right'
LOBY_MESSENGER_NEW_CHANNELS_CAROUSEL_BUTTON_RIGHT = '#tooltips:loby_messenger/new/channels_carousel_button_right'
FORTIFICATION_INTELLIGENCEWINDOW_TAGSEARCHTEXTINPUT = '#tooltips:fortification/intelligenceWindow/tagSearchTextInput'
FORTIFICATION_INTELLIGENCEWINDOW_TAGSEARCHBUTTON = '#tooltips:fortification/intelligenceWindow/tagSearchButton'
FORTIFICATION_INTELLIGENCEWINDOW_CLEARFILTERBTN = '#tooltips:fortification/intelligenceWindow/clearFilterBtn'
FORTIFICATION_INTELLIGENCEWINDOW_FILTERBUTTON = '#tooltips:fortification/intelligenceWindow/filterButton'
FORTIFICATION_HEADER_CALENDARBTN = '#tooltips:fortification/header/calendarBtn'
FORTIFICATION_HEADER_SETTINGSBTN = '#tooltips:fortification/header/settingsBtn'
FORTIFICATION_FORTSETTINGSDAYOFFPOPOVER_APPLY_ENABLED = '#tooltips:fortification/FortSettingsDayoffPopover/apply/enabled'
FORTIFICATION_FORTSETTINGSDAYOFFPOPOVER_APPLY_DISABLED = '#tooltips:fortification/FortSettingsDayoffPopover/apply/disabled'
FORTIFICATION_CHOICEDIVISION_PLAYERRANGE = '#tooltips:fortification/choiceDivision/playerRange'
FORTIFICATION_POPOVER_DEMOUNTBTNDISABLED = '#tooltips:fortification/popOver/demountBtnDisabled'
FORTIFICATION_POPOVER_UPGRADEBTN_DISABLEDBYBATTLE = '#tooltips:fortification/popOver/upgradeBtn_DisabledByBattle'
FORTIFICATION_FORTSETTINGSWINDOW_DEFENCEBTNENABLED = '#tooltips:fortification/fortSettingsWindow/defenceBtnEnabled'
FORTIFICATION_FORTSETTINGSWINDOW_DEFENCEBTNDISABLED = '#tooltips:fortification/fortSettingsWindow/defenceBtnDisabled'
FORTIFICATION_FORTSETTINGSWINDOW_WEEKENDBTNENABLED = '#tooltips:fortification/fortSettingsWindow/weekEndBtnEnabled'
FORTIFICATION_FORTSETTINGSWINDOW_WEEKENDBTNDISABLED = '#tooltips:fortification/fortSettingsWindow/weekEndBtnDisabled'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNENABLED = '#tooltips:fortification/fortSettingsWindow/vacationBtnEnabled'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNDISABLED = '#tooltips:fortification/fortSettingsWindow/vacationBtnDisabled'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNDISABLEDNOTPLANNED = '#tooltips:fortification/fortSettingsWindow/vacationBtnDisabledNotPlanned'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNDSBLDLESSADAY = '#tooltips:fortification/fortSettingsWindow/vacationBtnDsbldLessADay'
FORTIFICATION_FORTSETTINGSWINDOW_INFOICON = '#tooltips:fortification/fortSettingsWindow/infoIcon'
FORTIFICATION_FORTSETTINGSWINDOW_DISABLEDEFENCEPERIOD = '#tooltips:fortification/fortSettingsWindow/disableDefencePeriod'
FORTIFICATION_POPOVER_UPGRADEBTN_DISABLEDBYDESTROY = '#tooltips:fortification/popOver/upgradeBtn_DisabledByDestroy'
FORTIFICATION_CLANSTATS_PERIODDEFENCE_BATTLES_BATTLESCOUNT = '#tooltips:fortification/clanStats/periodDefence/battles/battlesCount'
FORTIFICATION_FORTCLANBATTLELIST_BATTLENAME = '#tooltips:fortification/fortClanBattleList/battleName'
FORTIFICATION_FORTCLANBATTLELIST_BATTLEDATE = '#tooltips:fortification/fortClanBattleList/battleDate'
FORTIFICATION_FORTCLANBATTLELIST_BATTLETIME = '#tooltips:fortification/fortClanBattleList/battleTime'
FORTIFICATION_SORTIE_LISTROOM_DESCR = '#tooltips:fortification/sortie/listRoom/descr'
FORTIFICATION_SORTIE_LISTROOM_STATUS = '#tooltips:fortification/sortie/listRoom/status'
FORTIFICATION_SORTIE_LISTROOM_BATTLESTATUS = '#tooltips:fortification/sortie/listRoom/battleStatus'
REFERRALMANAGEMENTWINDOW_CREATESQUADBTN_ENABLED = '#tooltips:ReferralManagementWindow/createSquadBtn/enabled'
REFERRALMANAGEMENTWINDOW_CREATESQUADBTN_DISABLED_ISOFFLINE = '#tooltips:ReferralManagementWindow/createSquadBtn/disabled/isOffline'
REFERRALMANAGEMENTWINDOW_CREATESQUADBTN_DISABLED_SQUADISFULL = '#tooltips:ReferralManagementWindow/createSquadBtn/disabled/squadIsFull'
REFERRALMANAGEMENTWINDOW_TABLE_EXPERIENCE = '#tooltips:ReferralManagementWindow/table/experience'
REFERRALMANAGEMENTWINDOW_MULTIPLIER_X1 = '#tooltips:ReferralManagementWindow/multiplier/x1'
FORTIFICATION_FORTORDERSELECTPOPOVER_ARSENALICON = '#tooltips:fortification/FortOrderSelectPopover/arsenalIcon'
FORTIFICATION_FORTSETTINGSWINDOW_DEFENCEPERIODDESCRIPTION = '#tooltips:fortification/fortSettingsWindow/defencePeriodDescription'
FORTIFICATION_FORTSETTINGSWINDOW_DAYOFFDESCRIPTION = '#tooltips:fortification/fortSettingsWindow/dayOffDescription'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONDESCRIPTION = '#tooltips:fortification/fortSettingsWindow/vacationDescription'
FORTIFICATION_FORTSETTINGSWINDOW_PERIPHERYDESCRIPTION = '#tooltips:fortification/fortSettingsWindow/peripheryDescription'
FORTIFICATION_FORTSETTINGSVACATIONPOPOVER_APPLYBTN = '#tooltips:fortification/fortSettingsVacationPopover/applyBtn'
FORTIFICATION_FORTSETTINGSVACATIONPOPOVER_APPLYBTN_DISABLE = '#tooltips:fortification/fortSettingsVacationPopover/applyBtn/disable'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_JOINBTN = '#tooltips:fortification/fortBattleDirectionPopover/joinBtn'
FORTIFICATION_FORTCLANBATTLEROOM_ORDERSDISABLED_DIVISIONMISMATCH = '#tooltips:fortification/fortClanBattleRoom/ordersDisabled/divisionMismatch'
ROLECHANGE_CURRENTROLEWARNING = '#tooltips:RoleChange/currentRoleWarning'
ROLECHANGE_ROLETAKEN = '#tooltips:RoleChange/roleTaken'
ROLECHANGE_ROLEANDVEHICLETAKEN = '#tooltips:RoleChange/roleAndVehicleTaken'
ROLECHANGE_FOOTERINFO = '#tooltips:RoleChange/footerInfo'
PERSONAL_CASE_SKILLS_ACCTEACHINGOFSKILLBTN = '#tooltips:personal_case/skills/accTeachingOfSkillBtn'
PERSONAL_CASE_SKILLS_ACCTEACHINGOFSKILLBTN_NOTENOUGHFREEXP = '#tooltips:personal_case/skills/accTeachingOfSkillBtn/notEnoughFreeXP'
PERSONAL_CASE_TRAINING_LIGHT_TANK_BTN = '#tooltips:personal_case/training/light_tank_btn'
PERSONAL_CASE_TRAINING_MEDIUM_TANK_BTN = '#tooltips:personal_case/training/medium_tank_btn'
PERSONAL_CASE_TRAINING_HEAVY_TANK_BTN = '#tooltips:personal_case/training/heavy_tank_btn'
PERSONAL_CASE_TRAINING_AT_SPG_BTN = '#tooltips:personal_case/training/at_spg_btn'
PERSONAL_CASE_TRAINING_SPG_BTN = '#tooltips:personal_case/training/spg_btn'
PERSONAL_CASE_TRAINING_CURRENT_TANK = '#tooltips:personal_case/training/current_tank'
PERSONAL_CASE_TRAINING_TANK = '#tooltips:personal_case/training/tank'
CYBERSPORT_INTRO_CREATEBTN_LOOK = '#tooltips:cyberSport/intro/createBtn/look'
CYBERSPORT_INTRO_CREATEBTN_ASSEMBLETEAM = '#tooltips:cyberSport/intro/createBtn/assembleTeam'
CYBERSPORT_INTRO_CREATEBTN_JOINTEAM = '#tooltips:cyberSport/intro/createBtn/joinTeam'
CYBERSPORT_INTRO_CREATEBTN_ASSEMBLEDISABLED = '#tooltips:cyberSport/intro/createBtn/assembleDisabled'
CYBERSPORT_INTRO_CREATEBTN_ADDPLAYERS = '#tooltips:cyberSport/intro/createBtn/addPlayers'
FORTIFICATION_ORDERSPANEL_CHECKBOXORDERTYPE = '#tooltips:fortification/ordersPanel/checkBoxOrderType'
CYBERSPORT_VEHICLESELECTOR_BADVEHICLE = '#tooltips:cyberSport/vehicleSelector/badVehicle'
RECEIVEFRIENDSHIPREQUEST = '#tooltips:receiveFriendshipRequest'
RECEIVEINVITESINBATTLE = '#tooltips:receiveInvitesInBattle'
CHATCONTACTSLISTONLY = '#tooltips:chatContactsListOnly'
WINDOWHELP = '#tooltips:windowHelp'
HANGARTUTORIAL_NEXTTECHMODELS = '#tooltips:hangarTutorial/nextTechModels'
HANGARTUTORIAL_TECHREPAIR = '#tooltips:hangarTutorial/techRepair'
HANGARTUTORIAL_CUSTOMIZATIONDURATION = '#tooltips:hangarTutorial/customizationDuration'
HANGARTUTORIAL_CUSTOMIZATIONOPTIONS = '#tooltips:hangarTutorial/customizationOptions'
HANGARTUTORIAL_CUSTOMIZATIONCOST = '#tooltips:hangarTutorial/customizationCost'
MULTISELECTION_ALERT = '#tooltips:multiselection/alert'
HEADER_ACCOUNTPOPOVER_INVITEBTN = '#tooltips:header/accountPopover/inviteBtn'
HEADER_ACCOUNTPOPOVER_SEARCHCLAN = '#tooltips:header/accountPopover/searchClan'
HEADER_ACCOUNTPOPOVER_INVITEREQUESTBTN = '#tooltips:header/accountPopover/inviteRequestBtn'
HEADER_ACCOUNTPOPOVER_SEARCHCLAN_UNAVAILABLE = '#tooltips:header/accountPopover/searchClan/unavailable'
HEADER_ACCOUNTPOPOVER_INVITEREQUESTBTN_UNAVAILABLE = '#tooltips:header/accountPopover/inviteRequestBtn/unavailable'
HEADER_ACCOUNTPOPOVER_INVITEBTN_UNAVAILABLE = '#tooltips:header/accountPopover/inviteBtn/unavailable'
HEADER_ACCOUNTPOPOVER_CLANPROFILE_UNAVAILABLE = '#tooltips:header/accountPopover/clanProfile/unavailable'
HEADER_ACCOUNTPOPOVER_BOOSTERSTITLE = '#tooltips:header/accountPopover/boostersTitle'
BATTLESELECTORWINDOW_TOOLTIP_DOMINATION_SELECTBTN = '#tooltips:battleSelectorWindow/tooltip/domination/selectBtn'
BATTLESELECTORWINDOW_TOOLTIP_MULTITEAM_SELECTBTN = '#tooltips:battleSelectorWindow/tooltip/multiteam/selectBtn'
AWARDITEM_CREDITS = '#tooltips:awardItem/credits'
AWARDITEM_GOLD = '#tooltips:awardItem/gold'
AWARDITEM_FREEXP = '#tooltips:awardItem/freeXP'
AWARDITEM_PREMIUM = '#tooltips:awardItem/premium'
AWARDITEM_BATTLETOKEN_ONE = '#tooltips:awardItem/battleToken/one'
AWARDITEM_BATTLETOKEN_SEVERAL = '#tooltips:awardItem/battleToken/several'
FALLOUTBATTLESELECTORWINDOW_BTNINSQUADDISABLED = '#tooltips:falloutBattleSelectorWindow/btnInSquadDisabled'
FALLOUTBATTLESELECTORWINDOW_BTNDISABLED = '#tooltips:falloutBattleSelectorWindow/btnDisabled'
MASTERVOLUMETOGGLEOFF = '#tooltips:masterVolumeToggleOff'
SOUNDQUALITY = '#tooltips:soundQuality'
SOUND_DYNAMICRANGE_HELP = '#tooltips:sound/dynamicRange/help'
RESEARCHPAGE_VEHICLE_STATUS_PARENTMODULEISLOCKED = '#tooltips:researchPage/vehicle/status/parentModuleIsLocked'
RESEARCHPAGE_MODULE_STATUS_NOTENOUGHXP = '#tooltips:researchPage/module/status/notEnoughXP'
MODULEFITS_GOLD_ERROR = '#tooltips:moduleFits/gold_error'
MODULEFITS_CREDITS_ERROR = '#tooltips:moduleFits/credits_error'
MODULEFITS_OPERATION_ERROR = '#tooltips:moduleFits/operation_error'
BATTLETYPES_FORTIFICATION_DISABLED = '#tooltips:battleTypes/fortification/disabled'
BATTLETYPES_STRONGHOLDS_DISABLED = '#tooltips:battleTypes/strongholds/disabled'
SQUADWINDOW_INFOICON_TECH = '#tooltips:squadWindow/infoIcon/tech'
VEHICLEPREVIEW_CREW = '#tooltips:vehiclePreview/crew'
RESEARCHPAGE_VEHICLE_BUTTON_COMPARE_ADD = '#tooltips:researchPage/vehicle/button/compare/add'
RESEARCHPAGE_VEHICLE_BUTTON_COMPARE_DISABLED = '#tooltips:researchPage/vehicle/button/compare/disabled'
VEHICLEPREVIEW_VEHICLEPANEL_INFO = '#tooltips:vehiclePreview/vehiclePanel/info'
RECEIVECLANINVITESNOTIFICATIONS = '#tooltips:receiveClanInvitesNotifications'
SETTINGS_DAMAGELOG_DETAILS = '#tooltips:settings/damagelog/details'
HEADER_PREMSHOP = '#tooltips:header/premShop'
BARRACKS_PLACESCOUNT_DISMISS = '#tooltips:barracks/placesCount/dismiss'
DISMISSTANKMANDIALOG_CANTRESTORALERT = '#tooltips:dismissTankmanDialog/cantRestorAlert'
NOTIFICATIONSVIEW_TAB_INFO = '#tooltips:notificationsView/tab/info'
NOTIFICATIONSVIEW_TAB_INVITES = '#tooltips:notificationsView/tab/invites'
NOTIFICATIONSVIEW_TAB_OFFERS = '#tooltips:notificationsView/tab/offers'
HANGAR_HEADER_PERSONALQUESTS_DISABLED = '#tooltips:hangar/header/personalQuests/disabled'
HANGAR_HEADER_PERSONALQUESTS_UNAVAILABLE = '#tooltips:hangar/header/personalQuests/unavailable'
HANGAR_HEADER_PERSONALQUESTS_COMPLETED = '#tooltips:hangar/header/personalQuests/completed'
HANGAR_HEADER_PERSONALQUESTS_AVAILABLE = '#tooltips:hangar/header/personalQuests/available'
HANGAR_HEADER_PERSONALQUESTS_AWARD = '#tooltips:hangar/header/personalQuests/award'
HANGAR_HEADER_PERSONALQUESTS_DONE = '#tooltips:hangar/header/personalQuests/done'
TEMPLATE_DAYS_SHORT = '#tooltips:template/days/short'
TEMPLATE_HOURS_SHORT = '#tooltips:template/hours/short'
TEMPLATE_MINUTES_SHORT = '#tooltips:template/minutes/short'
TEMPLATE_TIME_LESSTHENMINUTE = '#tooltips:template/time/lessThenMinute'
PREMIUM_DAYS_HEADER = '#tooltips:premium/days/header'
PREMIUM_DAYS_PARAMS_BUY = '#tooltips:premium/days/params/buy'
PREMIUM_DAYS_PARAMS_OLDPRICE = '#tooltips:premium/days/params/oldPrice'
FORTDIVISION_CHAMPION_HEADER = '#tooltips:fortDivision/champion/header'
FORTDIVISION_CHAMPION_DESCRIPTION = '#tooltips:fortDivision/champion/description'
FORTDIVISION_ABSOLUTE_HEADER = '#tooltips:fortDivision/absolute/header'
FORTDIVISION_ABSOLUTE_DESCRIPTION = '#tooltips:fortDivision/absolute/description'
FORTDIVISION_PARAMS_VEHICLELEVEL = '#tooltips:fortDivision/params/vehicleLevel'
FORTDIVISION_PARAMS_VEHICLESCOUNT = '#tooltips:fortDivision/params/vehiclesCount'
FORTDIVISION_WARNING_FORBIDDENEQUIPMENT = '#tooltips:fortDivision/warning/forbiddenEquipment'
FORTDIVISION_WARNING_LOWBACKLOG = '#tooltips:fortDivision/warning/lowBacklog'
PRIVATEQUESTS_AWARDSBUTTON_HEADER = '#tooltips:privateQuests/awardsButton/header'
PRIVATEQUESTS_AWARDSBUTTON_BODY = '#tooltips:privateQuests/awardsButton/body'
PRIVATEQUESTS_SLOT_EMPTY_HEADER = '#tooltips:privateQuests/slot/empty/header'
PRIVATEQUESTS_SLOT_EMPTY_BODY = '#tooltips:privateQuests/slot/empty/body'
PRIVATEQUESTS_SLOT_MISSION_HEADER = '#tooltips:privateQuests/slot/mission/header'
PRIVATEQUESTS_SLOT_MISSION_BODY = '#tooltips:privateQuests/slot/mission/body'
PRIVATEQUESTS_SLOT_MISSION_NOTE = '#tooltips:privateQuests/slot/mission/note'
PRIVATEQUESTS_SLOT_MISSIONCOMPLETE_HEADER = '#tooltips:privateQuests/slot/missionComplete/header'
PRIVATEQUESTS_SLOT_MISSIONCOMPLETE_BODY = '#tooltips:privateQuests/slot/missionComplete/body'
PRIVATEQUESTS_SLOT_MISSIONCOMPLETE_ATTENTION = '#tooltips:privateQuests/slot/missionComplete/attention'
PRIVATEQUESTS_BACKBUTTON_HEADER = '#tooltips:privateQuests/backButton/header'
PRIVATEQUESTS_BACKBUTTON_BODY = '#tooltips:privateQuests/backButton/body'
PRIVATEQUESTS_TASKLISTITEM_BODY = '#tooltips:privateQuests/taskListItem/body'
PRIVATEQUESTS_ABOUTVEHICLE_HEADER = '#tooltips:privateQuests/aboutVehicle/header'
PRIVATEQUESTS_ABOUTVEHICLE_BODY = '#tooltips:privateQuests/aboutVehicle/body'
PRIVATEQUESTS_SHOWVEHICLE_HEADER = '#tooltips:privateQuests/showVehicle/header'
PRIVATEQUESTS_SHOWVEHICLE_BODY = '#tooltips:privateQuests/showVehicle/body'
PRIVATEQUESTS_ACTIONPANNEL_PERFORM_HEADER = '#tooltips:privateQuests/actionPannel/perform/header'
PRIVATEQUESTS_ACTIONPANNEL_PERFORM_BODY = '#tooltips:privateQuests/actionPannel/perform/body'
PRIVATEQUESTS_ACTIONPANNEL_ABORT_HEADER = '#tooltips:privateQuests/actionPannel/abort/header'
PRIVATEQUESTS_ACTIONPANNEL_ABORT_BODY = '#tooltips:privateQuests/actionPannel/abort/body'
PRIVATEQUESTS_ACTIONPANNEL_ABORT_NOTE = '#tooltips:privateQuests/actionPannel/abort/note'
PRIVATEQUESTS_ACTIONPANNEL_RECEIVETHEAWARD_HEADER = '#tooltips:privateQuests/actionPannel/receiveTheAward/header'
PRIVATEQUESTS_ACTIONPANNEL_RECEIVETHEAWARD_BODY = '#tooltips:privateQuests/actionPannel/receiveTheAward/body'
PRIVATEQUESTS_ACTIONPANNEL_REPEAT_HEADER = '#tooltips:privateQuests/actionPannel/repeat/header'
PRIVATEQUESTS_ACTIONPANNEL_REPEAT_BODY = '#tooltips:privateQuests/actionPannel/repeat/body'
PRIVATEQUESTS_ACTIONPANNEL_REPEAT_NOTE = '#tooltips:privateQuests/actionPannel/repeat/note'
PRIVATEQUESTS_QUESTCONTROL_HEADER = '#tooltips:privateQuests/questControl/header'
PRIVATEQUESTS_QUESTCONTROL_BODY = '#tooltips:privateQuests/questControl/body'
PRIVATEQUESTS_QUESTSSTATS_TOKEN_HEADER = '#tooltips:privateQuests/questsStats/token/header'
PRIVATEQUESTS_QUESTSSTATS_TOKEN_BODY = '#tooltips:privateQuests/questsStats/token/body'
PRIVATEQUESTS_QUESTSSTATS_TOKEN_NOTE = '#tooltips:privateQuests/questsStats/token/note'
PRIVATEQUESTS_PROGRESS_CONDITION = '#tooltips:privateQuests/progress/condition'
PRIVATEQUESTS_SLOT_CONDITION_VEHICLE = '#tooltips:privateQuests/slot/condition/vehicle'
PRIVATEQUESTSFALLOUT_SLOT_CONDITION_VEHICLE = '#tooltips:privateQuestsFallout/slot/condition/vehicle'
PRIVATEQUESTS_SLOT_CONDITION_ANIM = '#tooltips:privateQuests/slot/condition/anim'
PRIVATEQUESTS_SLOT_HEADER = '#tooltips:privateQuests/slot/header'
PRIVATEQUESTS_SLOT_DESCR = '#tooltips:privateQuests/slot/descr'
PRIVATEQUESTS_PROGRESS_HEADER = '#tooltips:privateQuests/progress/header'
PRIVATEQUESTS_PROGRESS_TYPE_LIGHTTANK = '#tooltips:privateQuests/progress/type/lightTank'
PRIVATEQUESTS_PROGRESS_TYPE_MEDIUMTANK = '#tooltips:privateQuests/progress/type/mediumTank'
PRIVATEQUESTS_PROGRESS_TYPE_HEAVYTANK = '#tooltips:privateQuests/progress/type/heavyTank'
PRIVATEQUESTS_PROGRESS_TYPE_SPG = '#tooltips:privateQuests/progress/type/SPG'
PRIVATEQUESTS_PROGRESS_TYPE_AT_SPG = '#tooltips:privateQuests/progress/type/AT-SPG'
PRIVATEQUESTS_STATUS_NOTRECEIVED_HEADER = '#tooltips:privateQuests/status/notReceived/header'
PRIVATEQUESTS_STATUS_RECEIVED_HEADER = '#tooltips:privateQuests/status/received/header'
PRIVATEQUESTS_STATUS_COMPLETED_HEADER = '#tooltips:privateQuests/status/completed/header'
PRIVATEQUESTS_STATUS_AVAILABLE_HEADER = '#tooltips:privateQuests/status/available/header'
PRIVATEQUESTS_STATUS_AVAILABLE_DESCR = '#tooltips:privateQuests/status/available/descr'
PRIVATEQUESTS_STATUS_LOCK_HEADER = '#tooltips:privateQuests/status/lock/header'
PRIVATEQUESTS_STATUS_LOCK_DESCR = '#tooltips:privateQuests/status/lock/descr'
PRIVATEQUESTS_STATUS_INPROGRESS_HEADER = '#tooltips:privateQuests/status/inProgress/header'
PRIVATEQUESTS_STATUS_INPROGRESS_DESCR = '#tooltips:privateQuests/status/inProgress/descr'
PRIVATEQUESTS_PARAMS_SHEETS = '#tooltips:privateQuests/params/sheets'
PRIVATEQUESTS_PARAMS_RECRUITTANKMANFEMALE = '#tooltips:privateQuests/params/recruitTankmanFemale'
PRIVATEQUESTS_PARAMS_COLLECTEDSHEETS = '#tooltips:privateQuests/params/collectedSheets'
PRIVATEQUESTS_PARAMS_COMPETEDTASKS = '#tooltips:privateQuests/params/competedTasks'
PRIVATEQUESTS_PARAMS_RECRUITEDTANKMANFEMALE = '#tooltips:privateQuests/params/recruitedTankmanFemale'
SQUADWINDOW_SIMPLESLOTNOTIFICATION_ALERT_HEADER = '#tooltips:squadWindow/simpleSlotNotification/alert/header'
SQUADWINDOW_SIMPLESLOTNOTIFICATION_ALERT_BODY = '#tooltips:squadWindow/simpleSlotNotification/alert/body'
SQUADWINDOW_SIMPLESLOTNOTIFICATION_ALERT_ALERT = '#tooltips:squadWindow/simpleSlotNotification/alert/alert'
SQUADWINDOW_SIMPLESLOTNOTIFICATION_INFO_HEADER = '#tooltips:squadWindow/simpleSlotNotification/info/header'
SQUADWINDOW_SIMPLESLOTNOTIFICATION_INFO_BODY = '#tooltips:squadWindow/simpleSlotNotification/info/body'
SQUADWINDOW_BATTLETYPEINFO_HEADER = '#tooltips:squadWindow/battleTypeInfo/header'
SQUADWINDOW_BATTLETYPEINFO_BODY = '#tooltips:squadWindow/battleTypeInfo/body'
SQUADWINDOW_BUTTONS_BTNNOTREADY_HEADER = '#tooltips:squadWindow/buttons/btnNotReady/header'
SQUADWINDOW_BUTTONS_BTNNOTREADY_BODY = '#tooltips:squadWindow/buttons/btnNotReady/body'
SQUADWINDOW_BUTTONS_BTNFIGHT_HEADER = '#tooltips:squadWindow/buttons/btnFight/header'
SQUADWINDOW_BUTTONS_BTNFIGHT_BODY = '#tooltips:squadWindow/buttons/btnFight/body'
SQUADWINDOW_BUTTONS_INVITE_HEADER = '#tooltips:squadWindow/buttons/invite/header'
SQUADWINDOW_BUTTONS_INVITE_BODY = '#tooltips:squadWindow/buttons/invite/body'
SQUADWINDOW_BUTTONS_RECOMMEND_HEADER = '#tooltips:squadWindow/buttons/recommend/header'
SQUADWINDOW_BUTTONS_RECOMMEND_BODY = '#tooltips:squadWindow/buttons/recommend/body'
SQUADWINDOW_BUTTONS_LEAVESQUAD_HEADER = '#tooltips:squadWindow/buttons/leaveSquad/header'
SQUADWINDOW_BUTTONS_LEAVESQUAD_BODY = '#tooltips:squadWindow/buttons/leaveSquad/body'
SQUADWINDOW_BUTTONS_LEAVEEVENTSQUAD_HEADER = '#tooltips:squadWindow/buttons/leaveEventSquad/header'
SQUADWINDOW_BUTTONS_LEAVEEVENTSQUAD_BODY = '#tooltips:squadWindow/buttons/leaveEventSquad/body'
SQUADWINDOW_INFOICON_TECH_HEADER = '#tooltips:squadWindow/infoIcon/tech/header'
SQUADWINDOW_INFOICON_TECH_BODY = '#tooltips:squadWindow/infoIcon/tech/body'
SQUADWINDOW_INFOICON_TECHRESTRICTIONS_HEADER = '#tooltips:squadWindow/infoIcon/techRestrictions/header'
SQUADWINDOW_INFOICON_TECHRESTRICTIONS_TITLE0 = '#tooltips:squadWindow/infoIcon/techRestrictions/title0'
SQUADWINDOW_INFOICON_TECHRESTRICTIONS_BODY0 = '#tooltips:squadWindow/infoIcon/techRestrictions/body0'
SQUADWINDOW_INFOICON_TECHRESTRICTIONS_TITLE1 = '#tooltips:squadWindow/infoIcon/techRestrictions/title1'
SQUADWINDOW_INFOICON_TECHRESTRICTIONS_BODY1 = '#tooltips:squadWindow/infoIcon/techRestrictions/body1'
SQUADWINDOW_INFOICON_TECHRESTRICTIONS_TITLE2 = '#tooltips:squadWindow/infoIcon/techRestrictions/title2'
SQUADWINDOW_INFOICON_TECHRESTRICTIONS_BODY2 = '#tooltips:squadWindow/infoIcon/techRestrictions/body2'
SQUADWINDOW_BUTTONS_SENDMESSAGE_HEADER = '#tooltips:squadWindow/buttons/sendMessage/header'
SQUADWINDOW_BUTTONS_SENDMESSAGE_BODY = '#tooltips:squadWindow/buttons/sendMessage/body'
SQUADWINDOW_STATUS_COMMANDER = '#tooltips:squadWindow/status/commander'
SQUADWINDOW_STATUS_NORMAL = '#tooltips:squadWindow/status/normal'
SQUADWINDOW_STATUS_CANCELED = '#tooltips:squadWindow/status/canceled'
SQUADWINDOW_STATUS_READY = '#tooltips:squadWindow/status/ready'
SQUADWINDOW_STATUS_INBATTLE = '#tooltips:squadWindow/status/inBattle'
SQUADWINDOW_STATUS_LOCKED = '#tooltips:squadWindow/status/locked'
SQUADWINDOW_EVENT_DOMINATION = '#tooltips:squadWindow/event/domination'
SQUADWINDOW_EVENT_DOMINATION_NOTE = '#tooltips:squadWindow/event/domination/note'
SQUADWINDOW_TEAMMATE_NOTVALIDVEHICLE_HEADER = '#tooltips:squadWindow/teammate/notValidVehicle/header'
SQUADWINDOW_TEAMMATE_NOTVALIDVEHICLE_BODY = '#tooltips:squadWindow/teammate/notValidVehicle/body'
SQUADWINDOW_DEMANDFORVEHICLE_NOTVALIDVEHICLE_HEADER = '#tooltips:squadWindow/demandForVehicle/notValidVehicle/header'
SQUADWINDOW_DEMANDFORVEHICLE_NOTVALIDVEHICLE_BODY = '#tooltips:squadWindow/demandForVehicle/notValidVehicle/body'
SQUADWINDOW_DOMINATION_VEHICLESINFOICON_HEADER = '#tooltips:squadWindow/domination/vehiclesInfoIcon/header'
SQUADWINDOW_DOMINATION_VEHICLESINFOICON_BODY = '#tooltips:squadWindow/domination/vehiclesInfoIcon/body'
LOGIN_REMEMBERPASSWORD_SIMPLE_HEADER = '#tooltips:login/rememberPassword/simple/header'
LOGIN_REMEMBERPASSWORD_SIMPLE_BODY = '#tooltips:login/rememberPassword/simple/body'
LOGIN_REMEMBERPASSWORD_SOCIAL_HEADER = '#tooltips:login/rememberPassword/social/header'
LOGIN_REMEMBERPASSWORD_SOCIAL_BODY = '#tooltips:login/rememberPassword/social/body'
LOGIN_BYSOCIAL_HEADER = '#tooltips:login/bySocial/header'
LOGIN_BYSOCIAL_BODY = '#tooltips:login/bySocial/body'
LOGIN_BYSOCIAL_WGNI_HEADER = '#tooltips:login/bySocial/wgni/header'
LOGIN_BYSOCIAL_WGNI_BODY = '#tooltips:login/bySocial/wgni/body'
LOGIN_SOCIAL_ENTER_HEADER = '#tooltips:login/social/enter/header'
LOGIN_SOCIAL_ENTER_BODY = '#tooltips:login/social/enter/body'
LOGIN_SOCIAL_FACEBOOK = '#tooltips:login/social/facebook'
LOGIN_SOCIAL_GOOGLE = '#tooltips:login/social/google'
LOGIN_SOCIAL_YAHOO = '#tooltips:login/social/yahoo'
LOGIN_SOCIAL_TWITTER = '#tooltips:login/social/twitter'
LOGIN_SOCIAL_VKONTAKTE = '#tooltips:login/social/vkontakte'
LOGIN_SOCIAL_ODNOKLASSNIKI = '#tooltips:login/social/odnoklassniki'
LOGIN_SOCIAL_WGNI = '#tooltips:login/social/wgni'
LOGIN_SOCIAL_NAVER = '#tooltips:login/social/naver'
LOGIN_CHANGEACCOUNT_HEADER = '#tooltips:login/changeAccount/header'
LOGIN_CHANGEACCOUNT_BODY = '#tooltips:login/changeAccount/body'
LOGIN_LEGAL_BODY = '#tooltips:login/legal/body'
LOGIN_BGMODEBUTTON_ON_HEADER = '#tooltips:login/bgModeButton/on/header'
LOGIN_BGMODEBUTTON_ON_BODY = '#tooltips:login/bgModeButton/on/body'
LOGIN_BGMODEBUTTON_OFF_HEADER = '#tooltips:login/bgModeButton/off/header'
LOGIN_BGMODEBUTTON_OFF_BODY = '#tooltips:login/bgModeButton/off/body'
LOGIN_SOUNDBUTTON_ON_HEADER = '#tooltips:login/soundButton/on/header'
LOGIN_SOUNDBUTTON_ON_BODY = '#tooltips:login/soundButton/on/body'
LOGIN_SOUNDBUTTON_OFF_HEADER = '#tooltips:login/soundButton/off/header'
LOGIN_SOUNDBUTTON_OFF_BODY = '#tooltips:login/soundButton/off/body'
PREBATTLE_NAMEFILTERBUTTON_HEADER = '#tooltips:prebattle/nameFilterButton/header'
PREBATTLE_NAMEFILTERBUTTON_BODY = '#tooltips:prebattle/nameFilterButton/body'
PREBATTLE_INVITATIONS_TOOLTIP_ISONLINE_BODY = '#tooltips:prebattle/invitations/tooltip/isOnline/body'
SETTINGSCONTROL_RECOMMENDED = '#tooltips:settingsControl/recommended'
SETTINGS_MINIMAPCIRCLES_TITLE = '#tooltips:settings/minimapCircles/title'
SETTINGS_MINIMAPCIRCLES_VIEWRANGE_TITLE = '#tooltips:settings/minimapCircles/viewRange/title'
SETTINGS_MINIMAPCIRCLES_VIEWRANGE_BODY = '#tooltips:settings/minimapCircles/viewRange/body'
SETTINGS_MINIMAPCIRCLES_MAXVIEWRANGE_TITLE = '#tooltips:settings/minimapCircles/maxViewRange/title'
SETTINGS_MINIMAPCIRCLES_MAXVIEWRANGE_AS2_BODY = '#tooltips:settings/minimapCircles/maxViewRange/as2/body'
SETTINGS_MINIMAPCIRCLES_MAXVIEWRANGE_AS3_BODY = '#tooltips:settings/minimapCircles/maxViewRange/as3/body'
SETTINGS_MINIMAPCIRCLES_DRAWRANGE_TITLE = '#tooltips:settings/minimapCircles/drawRange/title'
SETTINGS_MINIMAPCIRCLES_DRAWRANGE_BODY = '#tooltips:settings/minimapCircles/drawRange/body'
SETTINGS_DAMAGELOG_DETAILS_HEADER = '#tooltips:settings/damagelog/details/header'
SETTINGS_DAMAGELOG_DETAILS_BODY = '#tooltips:settings/damagelog/details/body'
SETTINGS_RIBBONS_RECEIVEDDAMAGE_HEADER = '#tooltips:settings/ribbons/receivedDamage/header'
SETTINGS_RIBBONS_RECEIVEDDAMAGE_BODY = '#tooltips:settings/ribbons/receivedDamage/body'
SETTINGS_FEEDBACK_INDICATORS_DYNAMICWIDTH_HEADER = '#tooltips:settings/feedback/indicators/dynamicWidth/header'
SETTINGS_FEEDBACK_INDICATORS_DYNAMICWIDTH_BODY = '#tooltips:settings/feedback/indicators/dynamicWidth/body'
SETTINGS_FEEDBACK_INDICATORS_ANIMATION_HEADER = '#tooltips:settings/feedback/indicators/animation/header'
SETTINGS_FEEDBACK_INDICATORS_ANIMATION_BODY = '#tooltips:settings/feedback/indicators/animation/body'
LOBBYMENU_VERSIONINFOBUTTON_BODY = '#tooltips:lobbyMenu/versionInfoButton/body'
WINDOWHELP_HEADER = '#tooltips:windowHelp/header'
WINDOWHELP_BODY = '#tooltips:windowHelp/body'
ACTIONPRICE_HEADER = '#tooltips:actionPrice/header'
ACTIONPRICE_BODY = '#tooltips:actionPrice/body'
ACTIONPRICE_ACTIONNAME = '#tooltips:actionPrice/actionName'
ACTIONPRICE_FORACTION = '#tooltips:actionPrice/forAction'
ACTIONPRICE_FORACTIONS = '#tooltips:actionPrice/forActions'
ACTIONPRICE_RENTCOMPENSATION = '#tooltips:actionPrice/rentCompensation'
ACTIONPRICE_FORPERSONALDISCOUNT = '#tooltips:actionPrice/forPersonalDiscount'
ACTIONPRICE_SELL_HEADER = '#tooltips:actionPrice/sell/header'
ACTIONPRICE_SELL_BODY = '#tooltips:actionPrice/sell/body'
ACTIONPRICE_SELL_TYPE_VEHICLE = '#tooltips:actionPrice/sell/type/vehicle'
ACTIONPRICE_SELL_TYPE_MODULE = '#tooltips:actionPrice/sell/type/module'
ACTIONPRICE_SELL_TYPE_EQUIPMENT = '#tooltips:actionPrice/sell/type/equipment'
ACTIONPRICE_SELL_TYPE_SHELL = '#tooltips:actionPrice/sell/type/shell'
ACTIONPRICE_SELL_TYPE_OPTIONALDEVICE = '#tooltips:actionPrice/sell/type/optionalDevice'
ACTIONPRICE_EXCHANGE_HEADER = '#tooltips:actionPrice/exchange/header'
ACTIONPRICE_EXCHANGE_BODY = '#tooltips:actionPrice/exchange/body'
RECEIVEFRIENDSHIPREQUEST_HEADER = '#tooltips:receiveFriendshipRequest/header'
RECEIVEFRIENDSHIPREQUEST_BODY = '#tooltips:receiveFriendshipRequest/body'
RECEIVEINVITESINBATTLE_HEADER = '#tooltips:receiveInvitesInBattle/header'
RECEIVEINVITESINBATTLE_BODY = '#tooltips:receiveInvitesInBattle/body'
RECEIVECLANINVITESNOTIFICATIONS_HEADER = '#tooltips:receiveClanInvitesNotifications/header'
RECEIVECLANINVITESNOTIFICATIONS_BODY = '#tooltips:receiveClanInvitesNotifications/body'
CHATCONTACTSLISTONLY_HEADER = '#tooltips:chatContactsListOnly/header'
CHATCONTACTSLISTONLY_BODY = '#tooltips:chatContactsListOnly/body'
TURNOFFCOMBATCHAT_HEADER = '#tooltips:turnOffCombatChat/header'
TURNOFFCOMBATCHAT_BODY = '#tooltips:turnOffCombatChat/body'
ENABLEPOSTMORTEMEFFECT_HEADER = '#tooltips:enablePostMortemEffect/header'
ENABLEPOSTMORTEMEFFECT_BODY = '#tooltips:enablePostMortemEffect/body'
SHOWMARKSONGUN_HEADER = '#tooltips:showMarksOnGun/header'
SHOWMARKSONGUN_BODY = '#tooltips:showMarksOnGun/body'
SHOWVEHMODELSONMAP_HEADER = '#tooltips:showVehModelsOnMap/header'
SHOWVEHMODELSONMAP_BODY = '#tooltips:showVehModelsOnMap/body'
SHOWVEHMODELSONMAP_ATTENTION = '#tooltips:showVehModelsOnMap/attention'
BATTLELOADINGINFO_HEADER = '#tooltips:battleLoadingInfo/header'
BATTLELOADINGINFO_BODY = '#tooltips:battleLoadingInfo/body'
SHOWVECTORONMAP_HEADER = '#tooltips:showVectorOnMap/header'
SHOWVECTORONMAP_BODY = '#tooltips:showVectorOnMap/body'
SHOWSECTORONMAP_HEADER = '#tooltips:showSectorOnMap/header'
SHOWSECTORONMAP_BODY = '#tooltips:showSectorOnMap/body'
INCREASEDZOOM_HEADER = '#tooltips:increasedZoom/header'
INCREASEDZOOM_BODY = '#tooltips:increasedZoom/body'
MASTERVOLUMETOGGLEOFF_HEADER = '#tooltips:masterVolumeToggleOff/header'
MASTERVOLUMETOGGLEOFF_BODY = '#tooltips:masterVolumeToggleOff/body'
SOUNDQUALITYON_HEADER = '#tooltips:soundQualityOn/header'
SOUNDQUALITYON_BODY = '#tooltips:soundQualityOn/body'
SOUND_DYNAMICRANGE_HELP_HEADER = '#tooltips:sound/dynamicRange/help/header'
SOUND_DYNAMICRANGE_HELP_BODY = '#tooltips:sound/dynamicRange/help/body'
GOLD_HEADER = '#tooltips:gold/header'
GOLD_BODY = '#tooltips:gold/body'
WALLET_NOT_AVAILABLE_GOLD_HEADER = '#tooltips:wallet/not_available_gold/header'
WALLET_NOT_AVAILABLE_GOLD_BODY = '#tooltips:wallet/not_available_gold/body'
WALLET_NOT_AVAILABLE_FREEXP_HEADER = '#tooltips:wallet/not_available_freexp/header'
WALLET_NOT_AVAILABLE_FREEXP_BODY = '#tooltips:wallet/not_available_freexp/body'
WALLET_NOT_AVAILABLE_CREDITS_HEADER = '#tooltips:wallet/not_available_credits/header'
WALLET_NOT_AVAILABLE_CREDITS_BODY = '#tooltips:wallet/not_available_credits/body'
CREDITS_HEADER = '#tooltips:credits/header'
CREDITS_BODY = '#tooltips:credits/body'
FREEXP_HEADER = '#tooltips:freeXP/header'
FREEXP_BODY = '#tooltips:freeXP/body'
USEFREEXP_HEADER = '#tooltips:useFreeXP/header'
USEFREEXP_BODY = '#tooltips:useFreeXP/body'
XP_HEADER = '#tooltips:XP/header'
XP_BODY = '#tooltips:XP/body'
ELITEXP_HEADER = '#tooltips:eliteXP/header'
ELITEXP_BODY = '#tooltips:eliteXP/body'
HEADER_ELITEICON_HEADER = '#tooltips:header/eliteIcon/header'
HEADER_ELITEICON_BODY = '#tooltips:header/eliteIcon/body'
CYBERSPORT_RESPAWN_STATUS_INFO_BODY = '#tooltips:cyberSport/respawn/status/info/body'
CYBERSPORT_RESPAWN_STATUS_WARNING_BODY = '#tooltips:cyberSport/respawn/status/warning/body'
CYBERSPORT_RESPAWN_STATUS_READY_BODY = '#tooltips:cyberSport/respawn/status/ready/body'
CYBERSPORT_RESPAWN_FIGHTBTN_BODY = '#tooltips:cyberSport/respawn/fightBtn/body'
CYBERSPORT_SELECTVEHICLE_HEADER = '#tooltips:cyberSport/selectVehicle/header'
CYBERSPORT_SELECTVEHICLE_BODY = '#tooltips:cyberSport/selectVehicle/body'
CYBERSPORT_UNITLEVEL_TITLE = '#tooltips:cyberSport/unitLevel/title'
CYBERSPORT_UNITLEVEL_DESCRIPTION = '#tooltips:cyberSport/unitLevel/description'
CYBERSPORT_UNITLEVEL_BODY = '#tooltips:cyberSport/unitLevel/body'
CYBERSPORT_UNITLEVEL_BODY_RECOMMENDED = '#tooltips:cyberSport/unitLevel/body/recommended'
CYBERSPORT_UNITLEVEL_BODY_RECOMMENDEDSTATUS = '#tooltips:cyberSport/unitLevel/body/recommendedStatus'
CYBERSPORT_UNITLEVEL_BODY_NOTRECOMMENDED = '#tooltips:cyberSport/unitLevel/body/notrecommended'
CYBERSPORT_UNITLEVEL_BODY_TOTALLEVEL = '#tooltips:cyberSport/unitLevel/body/totalLevel'
CYBERSPORT_UNITLEVEL_BODY_MINTOTALLEVELERROR = '#tooltips:cyberSport/unitLevel/body/minTotalLevelError'
CYBERSPORT_UNITLEVEL_BODY_MAXTOTALLEVELERROR = '#tooltips:cyberSport/unitLevel/body/maxTotalLevelError'
CYBERSPORT_UNITLEVEL_BODY_ERROR = '#tooltips:cyberSport/unitLevel/body/error'
CYBERSPORT_UNITLEVEL_BODY_ERRORSTATUS = '#tooltips:cyberSport/unitLevel/body/errorStatus'
CYBERSPORT_UNITLEVEL_BACK_HEADER = '#tooltips:cyberSport/unitLevel/back/header'
CYBERSPORT_UNITLEVEL_BACK_BODY = '#tooltips:cyberSport/unitLevel/back/body'
CYBERSPORT_NOVEHICLESINHANGAR_HEADER = '#tooltips:cyberSport/noVehiclesInHangar/header'
CYBERSPORT_NOVEHICLESINHANGAR_BODY = '#tooltips:cyberSport/noVehiclesInHangar/body'
CYBERSPORT_MODECHANGEFROZEN_HEADER = '#tooltips:cyberSport/modeChangeFrozen/header'
CYBERSPORT_MODECHANGEFROZEN_BODY = '#tooltips:cyberSport/modeChangeFrozen/body'
CYBERSPORT_ROSTERSLOTSETTINGS_HEADERTEXT_BODY = '#tooltips:cyberSport/rosterSlotSettings/headerText/body'
BATTLETYPES_STANDART_HEADER = '#tooltips:battleTypes/standart/header'
BATTLETYPES_STANDART_BODY = '#tooltips:battleTypes/standart/body'
BATTLETYPES_FALLOUT_HEADER = '#tooltips:battleTypes/fallout/header'
BATTLETYPES_FALLOUT_BODY = '#tooltips:battleTypes/fallout/body'
BATTLETYPES_TRAINING_HEADER = '#tooltips:battleTypes/training/header'
BATTLETYPES_TRAINING_BODY = '#tooltips:battleTypes/training/body'
BATTLETYPES_LEAVETRAINING_HEADER = '#tooltips:battleTypes/leaveTraining/header'
BATTLETYPES_LEAVETRAINING_BODY = '#tooltips:battleTypes/leaveTraining/body'
BATTLETYPES_COMPANY_HEADER = '#tooltips:battleTypes/company/header'
BATTLETYPES_COMPANY_BODY = '#tooltips:battleTypes/company/body'
COMPANY_FALLOUT_RESTRICTION_HEADER = '#tooltips:company/fallout/restriction/header'
COMPANY_FALLOUT_RESTRICTION_BODY = '#tooltips:company/fallout/restriction/body'
BATTLETYPES_LEAVECOMPANY_HEADER = '#tooltips:battleTypes/leaveCompany/header'
BATTLETYPES_LEAVECOMPANY_BODY = '#tooltips:battleTypes/leaveCompany/body'
BATTLETYPES_SPEC_HEADER = '#tooltips:battleTypes/spec/header'
BATTLETYPES_SPEC_BODY = '#tooltips:battleTypes/spec/body'
BATTLETYPES_LEAVESPEC_HEADER = '#tooltips:battleTypes/leaveSpec/header'
BATTLETYPES_LEAVESPEC_BODY = '#tooltips:battleTypes/leaveSpec/body'
BATTLETYPES_UNIT_HEADER = '#tooltips:battleTypes/unit/header'
BATTLETYPES_UNIT_BODY = '#tooltips:battleTypes/unit/body'
BATTLETYPES_LEAVEUNIT_HEADER = '#tooltips:battleTypes/leaveUnit/header'
BATTLETYPES_LEAVEUNIT_BODY = '#tooltips:battleTypes/leaveUnit/body'
BATTLETYPES_HISTORICAL_BODY = '#tooltips:battleTypes/historical/body'
BATTLETYPES_LEAVEHISTORICAL_HEADER = '#tooltips:battleTypes/leaveHistorical/header'
BATTLETYPES_LEAVEHISTORICAL_BODY = '#tooltips:battleTypes/leaveHistorical/body'
BATTLETYPES_BATTLETUTORIAL_HEADER = '#tooltips:battleTypes/battleTutorial/header'
BATTLETYPES_BATTLETUTORIAL_BODY = '#tooltips:battleTypes/battleTutorial/body'
BATTLETYPES_AVAILABLETIME = '#tooltips:battleTypes/availableTime'
BATTLETYPES_AVAILABLETIME_SINCE = '#tooltips:battleTypes/availableTime/since'
BATTLETYPES_BATTLETEACHING_HEADER = '#tooltips:battleTypes/battleTeaching/header'
BATTLETYPES_BATTLETEACHING_BODY = '#tooltips:battleTypes/battleTeaching/body'
BATTLETYPES_AVAILABLETIME_UNTIL = '#tooltips:battleTypes/availableTime/until'
BATTLETYPES_AVAILABLETIME_SERVERS = '#tooltips:battleTypes/availableTime/servers'
FALLOUTBATTLESELECTORWINDOW_INFO_HEADER = '#tooltips:falloutBattleSelectorWindow/info/header'
FALLOUTBATTLESELECTORWINDOW_INFO_BODY = '#tooltips:falloutBattleSelectorWindow/info/body'
FALLOUTBATTLESELECTORWINDOW_INFO_ALERT = '#tooltips:falloutBattleSelectorWindow/info/alert'
FALLOUTBATTLESELECTORWINDOW_BTNDISABLED_HEADER = '#tooltips:falloutBattleSelectorWindow/btnDisabled/header'
FALLOUTBATTLESELECTORWINDOW_BTNDISABLED_BODY = '#tooltips:falloutBattleSelectorWindow/btnDisabled/body'
FALLOUTBATTLESELECTORWINDOW_BTNINSQUADDISABLED_HEADER = '#tooltips:falloutBattleSelectorWindow/btnInSquadDisabled/header'
FALLOUTBATTLESELECTORWINDOW_BTNINSQUADDISABLED_BODY = '#tooltips:falloutBattleSelectorWindow/btnInSquadDisabled/body'
REDBUTTON_DISABLED_BUYNEEDED_HEADER = '#tooltips:redButton/disabled/buyNeeded/header'
REDBUTTON_DISABLED_BUYNEEDED_BODY = '#tooltips:redButton/disabled/buyNeeded/body'
REDBUTTON_DISABLED_REPAIRNEEDED_HEADER = '#tooltips:redButton/disabled/repairNeeded/header'
REDBUTTON_DISABLED_REPAIRNEEDED_BODY = '#tooltips:redButton/disabled/repairNeeded/body'
REDBUTTON_DISABLED_VEHICLE_INBATTLE_HEADER = '#tooltips:redButton/disabled/vehicle/inBattle/header'
REDBUTTON_DISABLED_VEHICLE_INBATTLE_BODY = '#tooltips:redButton/disabled/vehicle/inBattle/body'
REDBUTTON_DISABLED_VEHICLE_ROAMING_HEADER = '#tooltips:redButton/disabled/vehicle/roaming/header'
REDBUTTON_DISABLED_VEHICLE_ROAMING_BODY = '#tooltips:redButton/disabled/vehicle/roaming/body'
REDBUTTON_DISABLED_VEHICLE_RENTALSISOVER_HEADER = '#tooltips:redButton/disabled/vehicle/rentalsIsOver/header'
REDBUTTON_DISABLED_VEHICLE_RENTALSISOVER_BODY = '#tooltips:redButton/disabled/vehicle/rentalsIsOver/body'
REDBUTTON_DISABLED_VEHICLE_GROUP_IS_NOT_READY_HEADER = '#tooltips:redButton/disabled/vehicle/group_is_not_ready/header'
REDBUTTON_DISABLED_VEHICLE_NOT_SUPPORTED_HEADER = '#tooltips:redButton/disabled/vehicle/not_supported/header'
REDBUTTON_DISABLED_VEHICLE_NOT_SUPPORTED_BODY = '#tooltips:redButton/disabled/vehicle/not_supported/body'
REDBUTTON_DISABLED_CREW_NOTFULL_HEADER = '#tooltips:redButton/disabled/crew/notFull/header'
REDBUTTON_DISABLED_CREW_NOTFULL_BODY = '#tooltips:redButton/disabled/crew/notFull/body'
REDBUTTON_DISABLED_LIMITS_CLASSES_LIGHTTANK_HEADER = '#tooltips:redButton/disabled/limits/classes/lightTank/header'
REDBUTTON_DISABLED_LIMITS_CLASSES_LIGHTTANK_BODY = '#tooltips:redButton/disabled/limits/classes/lightTank/body'
REDBUTTON_DISABLED_LIMITS_CLASSES_MEDIUMTANK_HEADER = '#tooltips:redButton/disabled/limits/classes/mediumTank/header'
REDBUTTON_DISABLED_LIMITS_CLASSES_MEDIUMTANK_BODY = '#tooltips:redButton/disabled/limits/classes/mediumTank/body'
REDBUTTON_DISABLED_LIMITS_CLASSES_HEAVYTANK_HEADER = '#tooltips:redButton/disabled/limits/classes/heavyTank/header'
REDBUTTON_DISABLED_LIMITS_CLASSES_HEAVYTANK_BODY = '#tooltips:redButton/disabled/limits/classes/heavyTank/body'
REDBUTTON_DISABLED_LIMITS_CLASSES_SPG_HEADER = '#tooltips:redButton/disabled/limits/classes/SPG/header'
REDBUTTON_DISABLED_LIMITS_CLASSES_SPG_BODY = '#tooltips:redButton/disabled/limits/classes/SPG/body'
REDBUTTON_DISABLED_LIMITS_CLASSES_AT_SPG_HEADER = '#tooltips:redButton/disabled/limits/classes/AT-SPG/header'
REDBUTTON_DISABLED_LIMITS_CLASSES_AT_SPG_BODY = '#tooltips:redButton/disabled/limits/classes/AT-SPG/body'
REDBUTTON_DISABLED_LIMIT_TOTALLEVEL_HEADER = '#tooltips:redButton/disabled/limit/totalLevel/header'
REDBUTTON_DISABLED_LIMIT_TOTALLEVEL_BODY = '#tooltips:redButton/disabled/limit/totalLevel/body'
REDBUTTON_DISABLED_LIMITS_LEVEL_HEADER = '#tooltips:redButton/disabled/limits/level/header'
REDBUTTON_DISABLED_LIMITS_LEVEL_BODY = '#tooltips:redButton/disabled/limits/level/body'
MEMBERS_VEHICLELEVELLIMITS_BODY = '#tooltips:members/vehicleLevelLimits/body'
MEMBERS_INVALIDVEHICLETYPE_BODY = '#tooltips:members/invalidVehicleType/body'
HANGAR_STARTBTN_SQUADNOTREADY_HEADER = '#tooltips:hangar/startBtn/squadNotReady/header'
HANGAR_STARTBTN_SQUADNOTREADY_BODY = '#tooltips:hangar/startBtn/squadNotReady/body'
HANGAR_TANKCARUSEL_WRONGSQUADVEHICLE_HEADER = '#tooltips:hangar/tankCarusel/wrongSquadVehicle/header'
HANGAR_TANKCARUSEL_WRONGSQUADVEHICLE_BODY = '#tooltips:hangar/tankCarusel/wrongSquadVehicle/body'
HANGAR_TANKCARUSEL_WRONGSQUADSPGVEHICLE_HEADER = '#tooltips:hangar/tankCarusel/wrongSquadSPGVehicle/header'
HANGAR_TANKCARUSEL_WRONGSQUADSPGVEHICLE_BODY = '#tooltips:hangar/tankCarusel/wrongSquadSPGVehicle/body'
ACHIEVEMENT_HISTORYDESCRIPTIONHEADER = '#tooltips:achievement/historyDescriptionHeader'
ACHIEVEMENT_CURRENTDEGREE = '#tooltips:achievement/currentDegree'
ACHIEVEMENT_MARKSONGUNCOUNT = '#tooltips:achievement/marksOnGunCount'
ACHIEVEMENT_MARKSONGUN_NOTINDOSSIER = '#tooltips:achievement/marksOnGun/NotInDossier'
ACHIEVEMENT_ISNOTINDOSSIER = '#tooltips:achievement/isNotInDossier'
ACHIEVEMENT_ALLACHIEVEMENTS = '#tooltips:achievement/allAchievements'
ACHIEVEMENT_ACHIEVEDON = '#tooltips:achievement/achievedOn'
ACHIEVEMENT_CLOSETORECORD = '#tooltips:achievement/closeToRecord'
ACHIEVEMENT_NEWRECORD = '#tooltips:achievement/newRecord'
ACHIEVEMENT_RECORDONVEHICLE = '#tooltips:achievement/recordOnVehicle'
ACHIEVEMENT_PARAMS_MAXSNIPERSERIES = '#tooltips:achievement/params/maxSniperSeries'
ACHIEVEMENT_PARAMS_MAXINVINCIBLESERIES = '#tooltips:achievement/params/maxInvincibleSeries'
ACHIEVEMENT_PARAMS_MAXDIEHARDSERIES = '#tooltips:achievement/params/maxDiehardSeries'
ACHIEVEMENT_PARAMS_MAXKILLINGSERIES = '#tooltips:achievement/params/maxKillingSeries'
ACHIEVEMENT_PARAMS_MAXRELIABLECOMRADESERIES = '#tooltips:achievement/params/maxReliableComradeSeries'
ACHIEVEMENT_PARAMS_MAXAIMERSERIES = '#tooltips:achievement/params/maxAimerSeries'
ACHIEVEMENT_PARAMS_MAXWFC2014WINSERIES = '#tooltips:achievement/params/maxWFC2014WinSeries'
ACHIEVEMENT_PARAMS_MAXDEATHTRACKWINSERIES = '#tooltips:achievement/params/maxDeathTrackWinSeries'
ACHIEVEMENT_PARAMS_MAXPIERCINGSERIES = '#tooltips:achievement/params/maxPiercingSeries'
ACHIEVEMENT_PARAMS_MAXTACTICALBREAKTHROUGHSERIES = '#tooltips:achievement/params/maxTacticalBreakthroughSeries'
ACHIEVEMENT_PARAMS_MAXVICTORYMARCHSERIES = '#tooltips:achievement/params/maxVictoryMarchSeries'
ACHIEVEMENT_PARAMS_MAXEFC2016WINSERIES = '#tooltips:achievement/params/maxEFC2016WinSeries'
ACHIEVEMENT_PARAMS_NO_CLASS_ACHIEVEMENT_LEFT = '#tooltips:achievement/params/no_class_achievement_left'
ACHIEVEMENT_PARAMS_LEFT4 = '#tooltips:achievement/params/left4'
ACHIEVEMENT_PARAMS_LEFT3 = '#tooltips:achievement/params/left3'
ACHIEVEMENT_PARAMS_LEFT2 = '#tooltips:achievement/params/left2'
ACHIEVEMENT_PARAMS_LEFT1 = '#tooltips:achievement/params/left1'
ACHIEVEMENT_PARAMS_HEROESLEFT = '#tooltips:achievement/params/heroesLeft'
ACHIEVEMENT_PARAMS_PAIRWINSLEFT = '#tooltips:achievement/params/pairWinsLeft'
ACHIEVEMENT_PARAMS_VEHICLESLEFT = '#tooltips:achievement/params/vehiclesLeft'
ACHIEVEMENT_PARAMS_QUESTSLEFT = '#tooltips:achievement/params/questsLeft'
ACHIEVEMENT_PARAMS_TANKWOMENLEFT = '#tooltips:achievement/params/tankwomenLeft'
ACHIEVEMENT_PARAMS_DAMAGELEFT = '#tooltips:achievement/params/damageLeft'
ACHIEVEMENT_PARAMS_BATTLESLEFT = '#tooltips:achievement/params/battlesLeft'
ACHIEVEMENT_PARAMS_CAPTUREPOINTSLEFT = '#tooltips:achievement/params/capturePointsLeft'
ACHIEVEMENT_PARAMS_DROPPOINTSLEFT = '#tooltips:achievement/params/dropPointsLeft'
ACHIEVEMENT_PARAMS_WINSLEFT = '#tooltips:achievement/params/winsLeft'
ACHIEVEMENT_PARAMS_ACHIEVESLEFT = '#tooltips:achievement/params/achievesLeft'
ACHIEVEMENT_PARAMS_FORTDEFRESLEFT = '#tooltips:achievement/params/fortDefResLeft'
ACHIEVEMENT_PARAMS_WINPOINTSLEFT = '#tooltips:achievement/params/winPointsLeft'
ACHIEVEMENT_PARAMS_VEHICLESTAKEPART = '#tooltips:achievement/params/vehiclesTakePart'
ACHIEVEMENT_PARAMS_VEHICLESTOKILL = '#tooltips:achievement/params/vehiclesToKill'
ACHIEVEMENT_PARAMS_VEHICLESTORESEARCH = '#tooltips:achievement/params/vehiclesToResearch'
ACHIEVEMENT_PARAMS_VEHICLES = '#tooltips:achievement/params/vehicles'
ACHIEVEMENT_ACTION_UNAVAILABLE_DESCR = '#tooltips:achievement/action/unavailable/descr'
ACHIEVEMENT_ACTION_UNAVAILABLE_TITLE = '#tooltips:achievement/action/unavailable/title'
ACHIEVEMENT_ATTR_COUNTER_HEADER = '#tooltips:achievement/attr/counter/header'
ACHIEVEMENT_ATTR_SMALL_HEADER = '#tooltips:achievement/attr/small/header'
ACHIEVEMENT_ATTR_RECORD_HEADER = '#tooltips:achievement/attr/record/header'
ACHIEVEMENT_ATTR_VEHICLERECORD_HEADER = '#tooltips:achievement/attr/vehicleRecord/header'
ACHIEVEMENT_ATTR_DEGREE_HEADER = '#tooltips:achievement/attr/degree/header'
HEADER_MENU_HEADER = '#tooltips:header/menu/header'
HEADER_MENU_DESCRIPTION = '#tooltips:header/menu/description'
HEADER_MENU_SERVER = '#tooltips:header/menu/server'
HEADER_MENU_SERVER_CURRENT = '#tooltips:header/menu/server/current'
SERVER_NOTRECOMENDED = '#tooltips:server/notRecomended'
HEADER_MENU_PLAYERSONSERVER = '#tooltips:header/menu/playersOnServer'
HEADER_ACCOUNT_HEADER = '#tooltips:header/account/header'
HEADER_ACCOUNT_BODY = '#tooltips:header/account/body'
HEADER_PREMIUM_BUY_HEADER = '#tooltips:header/premium_buy/header'
HEADER_PREMIUM_BUY_BODY = '#tooltips:header/premium_buy/body'
HEADER_PREMIUM_EXTEND_HEADER = '#tooltips:header/premium_extend/header'
HEADER_PREMIUM_EXTEND_BODY = '#tooltips:header/premium_extend/body'
HEADER_PREMSHOP_HEADER = '#tooltips:header/premShop/header'
HEADER_PREMSHOP_BODY = '#tooltips:header/premShop/body'
HEADER_SQUAD_MEMBER_HEADER = '#tooltips:header/squad_member/header'
HEADER_SQUAD_MEMBER_BODY = '#tooltips:header/squad_member/body'
HEADER_SQUAD_HEADER = '#tooltips:header/squad/header'
HEADER_SQUAD_BODY = '#tooltips:header/squad/body'
HEADER_EVENTSQUAD_HEADER = '#tooltips:header/eventSquad/header'
HEADER_EVENTSQUAD_BODY = '#tooltips:header/eventSquad/body'
HEADER_DOMINATIONSQUAD_HEADER = '#tooltips:header/dominationSquad/header'
HEADER_DOMINATIONSQUAD_BODY = '#tooltips:header/dominationSquad/body'
HEADER_BATTLETYPE_HEADER = '#tooltips:header/battleType/header'
HEADER_BATTLETYPE_BODY = '#tooltips:header/battleType/body'
HEADER_REFILL_HEADER = '#tooltips:header/refill/header'
HEADER_REFILL_BODY = '#tooltips:header/refill/body'
HEADER_REFILL_ACTION_HEADER = '#tooltips:header/refill_action/header'
HEADER_REFILL_ACTION_BODY = '#tooltips:header/refill_action/body'
HEADER_REFILL_ACTION_ATTENTION = '#tooltips:header/refill_action/attention'
HEADER_GOLD_EXCHANGE_HEADER = '#tooltips:header/gold_exchange/header'
HEADER_GOLD_EXCHANGE_BODY = '#tooltips:header/gold_exchange/body'
HEADER_XP_GATHERING_HEADER = '#tooltips:header/xp_gathering/header'
HEADER_XP_GATHERING_BODY = '#tooltips:header/xp_gathering/body'
HEADER_ACCOUNTPOPOVER_INVITEBTN_HEADER = '#tooltips:header/accountPopover/inviteBtn/header'
HEADER_ACCOUNTPOPOVER_INVITEBTN_BODY = '#tooltips:header/accountPopover/inviteBtn/body'
HEADER_ACCOUNTPOPOVER_SEARCHCLAN_HEADER = '#tooltips:header/accountPopover/searchClan/header'
HEADER_ACCOUNTPOPOVER_SEARCHCLAN_BODY = '#tooltips:header/accountPopover/searchClan/body'
HEADER_ACCOUNTPOPOVER_INVITEREQUESTBTN_HEADER = '#tooltips:header/accountPopover/inviteRequestBtn/header'
HEADER_ACCOUNTPOPOVER_INVITEREQUESTBTN_BODY = '#tooltips:header/accountPopover/inviteRequestBtn/body'
HEADER_ACCOUNTPOPOVER_UNAVAILABLE = '#tooltips:header/accountPopover/unavailable'
HEADER_ACCOUNTPOPOVER_SEARCHCLAN_UNAVAILABLE_ATTENTION = '#tooltips:header/accountPopover/searchClan/unavailable/attention'
HEADER_ACCOUNTPOPOVER_INVITEREQUESTBTN_UNAVAILABLE_ATTENTION = '#tooltips:header/accountPopover/inviteRequestBtn/unavailable/attention'
HEADER_ACCOUNTPOPOVER_INVITEBTN_UNAVAILABLE_ATTENTION = '#tooltips:header/accountPopover/inviteBtn/unavailable/attention'
HEADER_ACCOUNTPOPOVER_CLANPROFILE_UNAVAILABLE_ATTENTION = '#tooltips:header/accountPopover/clanProfile/unavailable/attention'
HEADER_ACCOUNTPOPOVER_BOOSTERSTITLE_HEADER = '#tooltips:header/accountPopover/boostersTitle/header'
HEADER_ACCOUNTPOPOVER_BOOSTERSTITLE_BODY = '#tooltips:header/accountPopover/boostersTitle/body'
HANGAR_UNLOCKBUTTON_HEADER = '#tooltips:hangar/unlockButton/header'
HANGAR_UNLOCKBUTTON_BODY = '#tooltips:hangar/unlockButton/body'
HANGAR_MAINTENANCE_HEADER = '#tooltips:hangar/maintenance/header'
HANGAR_MAINTENANCE_BODY = '#tooltips:hangar/maintenance/body'
HANGAR_TUNING_HEADER = '#tooltips:hangar/tuning/header'
HANGAR_TUNING_BODY = '#tooltips:hangar/tuning/body'
HANGAR_TUNING_DISABLEDFOREVENTVEHICLE_HEADER = '#tooltips:hangar/tuning/disabledForEventVehicle/header'
HANGAR_TUNING_DISABLEDFOREVENTVEHICLE_BODY = '#tooltips:hangar/tuning/disabledForEventVehicle/body'
HANGAR_MENU_HEADER = '#tooltips:hangar/menu/header'
HANGAR_MENU_BODY = '#tooltips:hangar/menu/body'
HANGAR_STATUS_TORENT = '#tooltips:hangar/status/toRent'
HANGAR_CREW_RUDY_DOG_HEADER = '#tooltips:hangar/crew/rudy/dog/header'
HANGAR_CREW_RUDY_DOG_BODY = '#tooltips:hangar/crew/rudy/dog/body'
HANGAR_CREW_ASSIGNEDTO = '#tooltips:hangar/crew/assignedTo'
HANGAR_CREW_NEW_SKILL_AVAILABLE_HEADER = '#tooltips:hangar/crew/new_skill_available/header'
HANGAR_CREW_NEW_SKILL_AVAILABLE_TEXT = '#tooltips:hangar/crew/new_skill_available/text'
HANGAR_CREW_SPECIALTY_SKILLS = '#tooltips:hangar/crew/specialty_skills'
HANGAR_XPTOTMENCHECKBOX_HEADER = '#tooltips:hangar/xpToTmenCheckbox/header'
HANGAR_XPTOTMENCHECKBOX_BODY = '#tooltips:hangar/xpToTmenCheckbox/body'
PERSONAL_CASE_SKILLS_DROP_HEADER = '#tooltips:personal_case/skills/drop/header'
PERSONAL_CASE_SKILLS_DROP_BODY = '#tooltips:personal_case/skills/drop/body'
PERSONAL_CASE_SKILLS_NEW_HEADER = '#tooltips:personal_case/skills/new/header'
PERSONAL_CASE_SKILLS_NEW_BODY = '#tooltips:personal_case/skills/new/body'
PERSONAL_CASE_SKILLS_ACCTEACHINGOFSKILLBTN_HEADER = '#tooltips:personal_case/skills/accTeachingOfSkillBtn/header'
PERSONAL_CASE_SKILLS_ACCTEACHINGOFSKILLBTN_BODY = '#tooltips:personal_case/skills/accTeachingOfSkillBtn/body'
PERSONAL_CASE_SKILLS_ACCTEACHINGOFSKILLBTN_NOTENOUGHFREEXP_HEADER = '#tooltips:personal_case/skills/accTeachingOfSkillBtn/notEnoughFreeXP/header'
PERSONAL_CASE_SKILLS_ACCTEACHINGOFSKILLBTN_NOTENOUGHFREEXP_BODY = '#tooltips:personal_case/skills/accTeachingOfSkillBtn/notEnoughFreeXP/body'
PERSONAL_CASE_TRAINING_LIGHT_TANK_BTN_HEADER = '#tooltips:personal_case/training/light_tank_btn/header'
PERSONAL_CASE_TRAINING_LIGHT_TANK_BTN_BODY = '#tooltips:personal_case/training/light_tank_btn/body'
PERSONAL_CASE_TRAINING_LIGHT_TANK_BTN_DISABLED_HEADER = '#tooltips:personal_case/training/light_tank_btn_disabled/header'
PERSONAL_CASE_TRAINING_LIGHT_TANK_BTN_DISABLED_BODY = '#tooltips:personal_case/training/light_tank_btn_disabled/body'
PERSONAL_CASE_TRAINING_MEDIUM_TANK_BTN_HEADER = '#tooltips:personal_case/training/medium_tank_btn/header'
PERSONAL_CASE_TRAINING_MEDIUM_TANK_BTN_BODY = '#tooltips:personal_case/training/medium_tank_btn/body'
PERSONAL_CASE_TRAINING_MEDIUM_TANK_BTN_DISABLED_HEADER = '#tooltips:personal_case/training/medium_tank_btn_disabled/header'
PERSONAL_CASE_TRAINING_MEDIUM_TANK_BTN_DISABLED_BODY = '#tooltips:personal_case/training/medium_tank_btn_disabled/body'
PERSONAL_CASE_TRAINING_HEAVY_TANK_BTN_HEADER = '#tooltips:personal_case/training/heavy_tank_btn/header'
PERSONAL_CASE_TRAINING_HEAVY_TANK_BTN_BODY = '#tooltips:personal_case/training/heavy_tank_btn/body'
PERSONAL_CASE_TRAINING_HEAVY_TANK_BTN_DISABLED_HEADER = '#tooltips:personal_case/training/heavy_tank_btn_disabled/header'
PERSONAL_CASE_TRAINING_HEAVY_TANK_BTN_DISABLED_BODY = '#tooltips:personal_case/training/heavy_tank_btn_disabled/body'
PERSONAL_CASE_TRAINING_AT_SPG_BTN_HEADER = '#tooltips:personal_case/training/at_spg_btn/header'
PERSONAL_CASE_TRAINING_AT_SPG_BTN_BODY = '#tooltips:personal_case/training/at_spg_btn/body'
PERSONAL_CASE_TRAINING_AT_SPG_BTN_DISABLED_HEADER = '#tooltips:personal_case/training/at_spg_btn_disabled/header'
PERSONAL_CASE_TRAINING_AT_SPG_BTN_DISABLED_BODY = '#tooltips:personal_case/training/at_spg_btn_disabled/body'
PERSONAL_CASE_TRAINING_SPG_BTN_HEADER = '#tooltips:personal_case/training/spg_btn/header'
PERSONAL_CASE_TRAINING_SPG_BTN_BODY = '#tooltips:personal_case/training/spg_btn/body'
PERSONAL_CASE_TRAINING_SPG_BTN_DISABLED_HEADER = '#tooltips:personal_case/training/spg_btn_disabled/header'
PERSONAL_CASE_TRAINING_SPG_BTN_DISABLED_BODY = '#tooltips:personal_case/training/spg_btn_disabled/body'
PERSONAL_CASE_TRAINING_TANK_HEADER = '#tooltips:personal_case/training/tank/header'
PERSONAL_CASE_TRAINING_TANK_BODY = '#tooltips:personal_case/training/tank/body'
PERSONAL_CASE_TRAINING_CURRENT_TANK_HEADER = '#tooltips:personal_case/training/current_tank/header'
PERSONAL_CASE_TRAINING_CURRENT_TANK_BODY = '#tooltips:personal_case/training/current_tank/body'
HANGAR_AMMO_PANEL_DEVICE_EMPTY_HEADER = '#tooltips:hangar/ammo_panel/device/empty/header'
HANGAR_AMMO_PANEL_DEVICE_EMPTY_BODY = '#tooltips:hangar/ammo_panel/device/empty/body'
HANGAR_AMMO_PANEL_EQUIPMENT_EMPTY_HEADER = '#tooltips:hangar/ammo_panel/equipment/empty/header'
HANGAR_AMMO_PANEL_EQUIPMENT_EMPTY_BODY = '#tooltips:hangar/ammo_panel/equipment/empty/body'
MODULEFITS_CREDITS_ERROR_HEADER = '#tooltips:moduleFits/credits_error/header'
MODULEFITS_CREDITS_ERROR_TEXT = '#tooltips:moduleFits/credits_error/text'
MODULEFITS_GOLD_ERROR_HEADER = '#tooltips:moduleFits/gold_error/header'
MODULEFITS_GOLD_ERROR_TEXT = '#tooltips:moduleFits/gold_error/text'
MODULEFITS_OPERATION_ERROR_HEADER = '#tooltips:moduleFits/operation_error/header'
MODULEFITS_NEED_TURRET_HEADER = '#tooltips:moduleFits/need_turret/header'
MODULEFITS_NEED_TURRET_TEXT = '#tooltips:moduleFits/need_turret/text'
MODULEFITS_TOO_HEAVY_HEADER = '#tooltips:moduleFits/too_heavy/header'
MODULEFITS_TOO_HEAVY_TEXT = '#tooltips:moduleFits/too_heavy/text'
MODULEFITS_TOO_HEAVY_CHASSI_HEADER = '#tooltips:moduleFits/too_heavy_chassi/header'
MODULEFITS_TOO_HEAVY_CHASSI_TEXT = '#tooltips:moduleFits/too_heavy_chassi/text'
MODULEFITS_NOT_FOR_THIS_VEHICLE_TYPE_HEADER = '#tooltips:moduleFits/not_for_this_vehicle_type/header'
MODULEFITS_NOT_FOR_THIS_VEHICLE_TYPE_TEXT = '#tooltips:moduleFits/not_for_this_vehicle_type/text'
MODULEFITS_REMOVE_TOO_HEAVY_HEADER = '#tooltips:moduleFits/remove_too_heavy/header'
MODULEFITS_REMOVE_TOO_HEAVY_TEXT = '#tooltips:moduleFits/remove_too_heavy/text'
MODULEFITS_ALREADY_INSTALLED_HEADER = '#tooltips:moduleFits/already_installed/header'
MODULEFITS_UNLOCK_ERROR_HEADER = '#tooltips:moduleFits/unlock_error/header'
MODULEFITS_UNLOCK_ERROR_TEXT = '#tooltips:moduleFits/unlock_error/text'
MODULEFITS_WRONG_SLOT_BODY = '#tooltips:moduleFits/wrong_slot/body'
MODULEFITS_NOT_WITH_INSTALLED_EQUIPMENT_HEADER = '#tooltips:moduleFits/not_with_installed_equipment/header'
MODULEFITS_NOT_WITH_INSTALLED_EQUIPMENT_TEXT = '#tooltips:moduleFits/not_with_installed_equipment/text'
MODULEFITS_NOT_REMOVABLE_BODY = '#tooltips:moduleFits/not_removable/body'
MODULEFITS_NOT_REMOVABLE_NOTE_HEADER = '#tooltips:moduleFits/not_removable/note_header'
MODULEFITS_NOT_REMOVABLE_NOTE = '#tooltips:moduleFits/not_removable/note'
MODULEFITS_NOT_REMOVABLE_DISMANTLING_PRICE = '#tooltips:moduleFits/not_removable/dismantling/price'
DEVICEFITS_CREDITS_ERROR_HEADER = '#tooltips:deviceFits/credits_error/header'
DEVICEFITS_CREDITS_ERROR_TEXT = '#tooltips:deviceFits/credits_error/text'
DEVICEFITS_NEED_TURRET_HEADER = '#tooltips:deviceFits/need_turret/header'
DEVICEFITS_NEED_TURRET_TEXT = '#tooltips:deviceFits/need_turret/text'
DEVICEFITS_TOO_HEAVY_HEADER = '#tooltips:deviceFits/too_heavy/header'
DEVICEFITS_TOO_HEAVY_TEXT = '#tooltips:deviceFits/too_heavy/text'
DEVICEFITS_TOO_HEAVY_CHASSI_HEADER = '#tooltips:deviceFits/too_heavy_chassi/header'
DEVICEFITS_TOO_HEAVY_CHASSI_TEXT = '#tooltips:deviceFits/too_heavy_chassi/text'
DEVICEFITS_REMOVE_TOO_HEAVY_HEADER = '#tooltips:deviceFits/remove_too_heavy/header'
DEVICEFITS_REMOVE_TOO_HEAVY_TEXT = '#tooltips:deviceFits/remove_too_heavy/text'
DEVICEFITS_ALREADY_INSTALLED_HEADER = '#tooltips:deviceFits/already_installed/header'
DEVICEFITS_UNLOCK_ERROR_HEADER = '#tooltips:deviceFits/unlock_error/header'
DEVICEFITS_UNLOCK_ERROR_TEXT = '#tooltips:deviceFits/unlock_error/text'
DEVICEFITS_WRONG_SLOT_HEADER = '#tooltips:deviceFits/wrong_slot/header'
DEVICEFITS_NOT_WITH_INSTALLED_EQUIPMENT_HEADER = '#tooltips:deviceFits/not_with_installed_equipment/header'
DEVICEFITS_NOT_FOR_THIS_VEHICLE_TYPE_HEADER = '#tooltips:deviceFits/not_for_this_vehicle_type/header'
DEVICEFITS_NOT_FOR_THIS_VEHICLE_TYPE_TEXT = '#tooltips:deviceFits/not_for_this_vehicle_type/text'
DEVICEFITS_NOT_REMOVABLE_BODY = '#tooltips:deviceFits/not_removable/body'
DEVICEFITS_NOT_REMOVABLE_NOTE_HEADER = '#tooltips:deviceFits/not_removable/note_header'
DEVICEFITS_NOT_REMOVABLE_NOTE01 = '#tooltips:deviceFits/not_removable/note01'
DEVICEFITS_NOT_REMOVABLE_NOTE02 = '#tooltips:deviceFits/not_removable/note02'
MODULE_COUNT = '#tooltips:module/count'
EQUIPMENT_EFFECT = '#tooltips:equipment/effect'
EQUIPMENT_ONUSE = '#tooltips:equipment/onUse'
EQUIPMENT_ALWAYS = '#tooltips:equipment/always'
EQUIPMENT_RESTRICTION = '#tooltips:equipment/restriction'
TANKS_CAROUSEL_BUY_VEHICLE_HEADER = '#tooltips:tanks_carousel/buy_vehicle/header'
TANKS_CAROUSEL_BUY_VEHICLE_BODY = '#tooltips:tanks_carousel/buy_vehicle/body'
TANKS_CAROUSEL_BUY_SLOT_HEADER = '#tooltips:tanks_carousel/buy_slot/header'
TANKS_CAROUSEL_BUY_SLOT_BODY = '#tooltips:tanks_carousel/buy_slot/body'
TANKS_CAROUSEL_VEHICLESTATES_BATTLE = '#tooltips:tanks_carousel/vehicleStates/battle'
TANKS_CAROUSEL_VEHICLESTATES_EXPLODED = '#tooltips:tanks_carousel/vehicleStates/exploded'
TANKS_CAROUSEL_VEHICLESTATES_LOCKED = '#tooltips:tanks_carousel/vehicleStates/locked'
TANKS_CAROUSEL_VEHICLESTATES_DESTROYED = '#tooltips:tanks_carousel/vehicleStates/destroyed'
TANKS_CAROUSEL_VEHICLESTATES_DAMAGED = '#tooltips:tanks_carousel/vehicleStates/damaged'
TANKS_CAROUSEL_VEHICLESTATES_UNDAMAGED = '#tooltips:tanks_carousel/vehicleStates/undamaged'
TANKS_CAROUSEL_VEHICLESTATES_CREWNOTFULL = '#tooltips:tanks_carousel/vehicleStates/crewNotFull'
TANKS_CAROUSEL_VEHICLESTATES_SERVERRESTRICTION = '#tooltips:tanks_carousel/vehicleStates/serverRestriction'
TANKS_CAROUSEL_VEHICLESTATES_GROUP_IS_NOT_READY = '#tooltips:tanks_carousel/vehicleStates/group_is_not_ready'
TANKS_CAROUSEL_VEHICLESTATES_FALLOUT_MIN = '#tooltips:tanks_carousel/vehicleStates/fallout_min'
TANKS_CAROUSEL_VEHICLESTATES_FALLOUT_MAX = '#tooltips:tanks_carousel/vehicleStates/fallout_max'
TANKS_CAROUSEL_VEHICLESTATES_FALLOUT_REQUIRED = '#tooltips:tanks_carousel/vehicleStates/fallout_required'
TANKS_CAROUSEL_VEHICLESTATES_FALLOUT_BROKEN = '#tooltips:tanks_carousel/vehicleStates/fallout_broken'
TANKCARUSEL_MAINPROPERTY = '#tooltips:tankCarusel/MainProperty'
TANKCARUSEL_LOCK_HEADER = '#tooltips:tankCarusel/lock/header'
TANKCARUSEL_LOCK_ROTATION_HEADER = '#tooltips:tankCarusel/lock/rotation/header'
TANKCARUSEL_LOCK_TO = '#tooltips:tankCarusel/lock/To'
TANKCARUSEL_LOCK_CLAN = '#tooltips:tankCarusel/lock/CLAN'
TANKCARUSEL_LOCK_TOURNAMENT = '#tooltips:tankCarusel/lock/TOURNAMENT'
TANKCARUSEL_LOCK_ROTATION = '#tooltips:tankCarusel/lock/rotation'
TANKCARUSEL_LOCK_ROAMING = '#tooltips:tankCarusel/lock/ROAMING'
MULTISELECTION_ALERT_HEADER = '#tooltips:multiselection/alert/header'
MULTISELECTION_ALERT_BODY = '#tooltips:multiselection/alert/body'
LOBY_MESSENGER_SERVICE_BUTTON_HEADER = '#tooltips:loby_messenger/service_button/header'
LOBY_MESSENGER_SERVICE_BUTTON_BODY = '#tooltips:loby_messenger/service_button/body'
LOBY_MESSENGER_CONTACTS_BUTTON_HEADER = '#tooltips:loby_messenger/contacts_button/header'
LOBY_MESSENGER_CONTACTS_BUTTON_BODY = '#tooltips:loby_messenger/contacts_button/body'
LOBY_MESSENGER_VEHICLE_COMPARE_BUTTON_HEADER = '#tooltips:loby_messenger/vehicle_compare_button/header'
LOBY_MESSENGER_VEHICLE_COMPARE_BUTTON_BODY = '#tooltips:loby_messenger/vehicle_compare_button/body'
LOBY_MESSENGER_CHANNELS_BUTTON_HEADER = '#tooltips:loby_messenger/channels_button/header'
LOBY_MESSENGER_CHANNELS_BUTTON_BODY = '#tooltips:loby_messenger/channels_button/body'
LOBY_MESSENGER_CHANNEL_BUTTON_INROAMING = '#tooltips:loby_messenger/channel_button/inRoaming'
LOBY_MESSENGER_CHANNELS_CAROUSEL_BUTTON_LEFT_HEADER = '#tooltips:loby_messenger/channels_carousel_button_left/header'
LOBY_MESSENGER_CHANNELS_CAROUSEL_BUTTON_LEFT_BODY = '#tooltips:loby_messenger/channels_carousel_button_left/body'
LOBY_MESSENGER_NEW_CHANNELS_CAROUSEL_BUTTON_LEFT_HEADER = '#tooltips:loby_messenger/new/channels_carousel_button_left/header'
LOBY_MESSENGER_NEW_CHANNELS_CAROUSEL_BUTTON_LEFT_BODY = '#tooltips:loby_messenger/new/channels_carousel_button_left/body'
LOBY_MESSENGER_CHANNELS_CAROUSEL_BUTTON_RIGHT_HEADER = '#tooltips:loby_messenger/channels_carousel_button_right/header'
LOBY_MESSENGER_CHANNELS_CAROUSEL_BUTTON_RIGHT_BODY = '#tooltips:loby_messenger/channels_carousel_button_right/body'
LOBY_MESSENGER_NEW_CHANNELS_CAROUSEL_BUTTON_RIGHT_HEADER = '#tooltips:loby_messenger/new/channels_carousel_button_right/header'
LOBY_MESSENGER_NEW_CHANNELS_CAROUSEL_BUTTON_RIGHT_BODY = '#tooltips:loby_messenger/new/channels_carousel_button_right/body'
LOBBY_MESSENGER_REFERRAL_BODY = '#tooltips:lobby_messenger/referral/body'
LOBBY_MESSENGER_REFERRER_BODY = '#tooltips:lobby_messenger/referrer/body'
LOBBY_HEADER_BUYPREMIUMACCOUNT_DISABLED_HEADER = '#tooltips:lobby/header/buyPremiumAccount/disabled/header'
LOBBY_HEADER_BUYPREMIUMACCOUNT_DISABLED_BODY = '#tooltips:lobby/header/buyPremiumAccount/disabled/body'
TRAINING_CREATE_INVITES_CHECKBOX_HEADER = '#tooltips:training/create/invites_checkbox/header'
TRAINING_CREATE_INVITES_CHECKBOX_BODY = '#tooltips:training/create/invites_checkbox/body'
TRAINING_OBSERVER_BTN_HEADER = '#tooltips:training/observer/btn/header'
TRAINING_OBSERVER_BTN_BODY = '#tooltips:training/observer/btn/body'
TRAINING_OBSERVER_SELECTEDICON_HEADER = '#tooltips:training/observer/selectedicon/header'
TRAINING_OBSERVER_SELECTEDICON_BODY = '#tooltips:training/observer/selectedicon/body'
TRAINING_OBSERVER_ICON_HEADER = '#tooltips:training/observer/icon/header'
TRAINING_OBSERVER_ICON_BODY = '#tooltips:training/observer/icon/body'
BARRACKS_TANKMEN_UNLOAD_HEADER = '#tooltips:barracks/tankmen/unload/header'
BARRACKS_TANKMEN_UNLOAD_BODY = '#tooltips:barracks/tankmen/unload/body'
BARRACKS_TANKMEN_DISMISS_HEADER = '#tooltips:barracks/tankmen/dismiss/header'
BARRACKS_TANKMEN_DISMISS_BODY = '#tooltips:barracks/tankmen/dismiss/body'
BARRACKS_TANKMEN_RECOVERYBTN_HEADER = '#tooltips:barracks/tankmen/recoveryBtn/header'
BARRACKS_TANKMEN_RECOVERY_HEADER = '#tooltips:barracks/tankmen/recovery/header'
BARRACKS_TANKMEN_RECOVERY_FREE_BODY = '#tooltips:barracks/tankmen/recovery/free/body'
BARRACKS_TANKMEN_RECOVERY_GOLD_BODY = '#tooltips:barracks/tankmen/recovery/gold/body'
BARRACKS_PLACESCOUNT_DISMISS_HEADER = '#tooltips:barracks/placesCount/dismiss/header'
BARRACKS_PLACESCOUNT_DISMISS_BODY = '#tooltips:barracks/placesCount/dismiss/body'
BARRACKS_TANKMEN_RECOVERY_NEWSKILL = '#tooltips:barracks/tankmen/recovery/newSkill'
HEADER_INFO_PLAYERS_UNAVAILABLE_HEADER = '#tooltips:header/info/players_unavailable/header'
HEADER_INFO_PLAYERS_UNAVAILABLE_BODY = '#tooltips:header/info/players_unavailable/body'
HEADER_INFO_PLAYERS_ONLINE_REGION_HEADER = '#tooltips:header/info/players_online_region/header'
HEADER_INFO_PLAYERS_ONLINE_REGION_BODY = '#tooltips:header/info/players_online_region/body'
HEADER_INFO_PLAYERS_ONLINE_FULL_HEADER = '#tooltips:header/info/players_online_full/header'
HEADER_INFO_PLAYERS_ONLINE_FULL_BODY = '#tooltips:header/info/players_online_full/body'
HEADER_VERSIONINFOHINT = '#tooltips:header/versionInfoHint'
SETTING_WINDOW_CONTROLS_KEY_INPUT_WARNING = '#tooltips:setting_window/controls/key_input/warning'
SETTING_WINDOW_CONTROLS_KEY_INPUT_PTT_WARNING = '#tooltips:setting_window/controls/key_input/ptt/warning'
HEADER_BUTTONS_HANGAR_HEADER = '#tooltips:header/buttons/hangar/header'
HEADER_BUTTONS_HANGAR_BODY = '#tooltips:header/buttons/hangar/body'
HEADER_BUTTONS_INVENTORY_HEADER = '#tooltips:header/buttons/inventory/header'
HEADER_BUTTONS_INVENTORY_BODY = '#tooltips:header/buttons/inventory/body'
HEADER_BUTTONS_SHOP_HEADER = '#tooltips:header/buttons/shop/header'
HEADER_BUTTONS_SHOP_BODY = '#tooltips:header/buttons/shop/body'
HEADER_BUTTONS_PROFILE_HEADER = '#tooltips:header/buttons/profile/header'
HEADER_BUTTONS_PROFILE_BODY = '#tooltips:header/buttons/profile/body'
HEADER_BUTTONS_TECHTREE_HEADER = '#tooltips:header/buttons/techtree/header'
HEADER_BUTTONS_TECHTREE_BODY = '#tooltips:header/buttons/techtree/body'
HEADER_BUTTONS_BARRACKS_HEADER = '#tooltips:header/buttons/barracks/header'
HEADER_BUTTONS_BARRACKS_BODY = '#tooltips:header/buttons/barracks/body'
HEADER_BUTTONS_FORTS_HEADER = '#tooltips:header/buttons/forts/header'
HEADER_BUTTONS_FORTS_BODY = '#tooltips:header/buttons/forts/body'
HEADER_BUTTONS_BROWSER_HEADER = '#tooltips:header/buttons/browser/header'
HEADER_BUTTONS_BROWSER_BODY = '#tooltips:header/buttons/browser/body'
HEADER_BUTTONS_ENCYCLOPEDIA_HEADER = '#tooltips:header/buttons/encyclopedia/header'
HEADER_BUTTONS_ENCYCLOPEDIA_BODY = '#tooltips:header/buttons/encyclopedia/body'
BARRACKS_ITEM_EMPTY_HEADER = '#tooltips:barracks/item_empty/header'
BARRACKS_ITEM_EMPTY_BODY = '#tooltips:barracks/item_empty/body'
BARRACKS_ITEM_BUY_HEADER = '#tooltips:barracks/item_buy/header'
BARRACKS_ITEM_BUY_BODY = '#tooltips:barracks/item_buy/body'
HEADER_BUTTONS_FORTS_TURNEDOFF_HEADER = '#tooltips:header/buttons/forts/turnedOff/header'
HEADER_BUTTONS_FORTS_TURNEDOFF_BODY = '#tooltips:header/buttons/forts/turnedOff/body'
HEADER_BUTTONS_FORTS_SANDBOX_TURNEDOFF_HEADER = '#tooltips:header/buttons/forts/sandbox/turnedOff/header'
HEADER_BUTTONS_FORTS_SANDBOX_TURNEDOFF_BODY = '#tooltips:header/buttons/forts/sandbox/turnedOff/body'
VEHICLE_GRAPH_NOTES_SHOWCONTEXTMENU = '#tooltips:vehicle/graph/notes/showContextMenu'
VEHICLE_GRAPH_NOTES_SHOWINFOWINDOW = '#tooltips:vehicle/graph/notes/showInfoWindow'
VEHICLE_GRAPH_NOTES_VEHICLEUNLOCK = '#tooltips:vehicle/graph/notes/vehicleUnlock'
VEHICLE_GRAPH_NOTES_MODULEUNLOCK = '#tooltips:vehicle/graph/notes/moduleUnlock'
VEHICLE_GRAPH_NOTES_EQUIP = '#tooltips:vehicle/graph/notes/equip'
VEHICLE_GRAPH_NOTES_BUYANDEQUIP = '#tooltips:vehicle/graph/notes/buyAndEquip'
VEHICLE_GRAPH_NOTES_GOTONATIONTREE = '#tooltips:vehicle/graph/notes/goToNationTree'
VEHICLE_GRAPH_NOTES_GOTONEXTVEHICLE = '#tooltips:vehicle/graph/notes/goToNextVehicle'
VEHICLE_GRAPH_BODY_MODULEINSTALLED = '#tooltips:vehicle/graph/body/moduleInstalled'
VEHICLE_GRAPH_BODY_NOTENOUGH = '#tooltips:vehicle/graph/body/notEnough'
REPAIR_AUTO_HEADER = '#tooltips:repair/auto/header'
REPAIR_AUTO_BODY = '#tooltips:repair/auto/body'
AMMO_AUTO_HEADER = '#tooltips:ammo/auto/header'
AMMO_AUTO_BODY = '#tooltips:ammo/auto/body'
EQUIPMENT_AUTO_HEADER = '#tooltips:equipment/auto/header'
EQUIPMENT_AUTO_BODY = '#tooltips:equipment/auto/body'
EQUIPMENT_EMPTY_HEADER = '#tooltips:equipment/empty/header'
EQUIPMENT_EMPTY_BODY = '#tooltips:equipment/empty/body'
SETTINGS_DIALOG_SOUND_PTTKEY_HEADER = '#tooltips:settings_dialog/sound/PTTKey/header'
SETTINGS_DIALOG_SOUND_PTTKEY_BODY = '#tooltips:settings_dialog/sound/PTTKey/body'
SETTINGS_DIALOG_SOUND_ALTERNATIVEVOICES_BODY = '#tooltips:settings_dialog/sound/alternativeVoices/body'
SETTINGS_DIALOG_SOUND_SOUNDMODEINVALID = '#tooltips:settings_dialog/sound/soundModeInvalid'
CUSTOMIZATION_CAMOUFLAGE_CURRENT_DROP_HEADER = '#tooltips:customization/camouflage/current/drop/header'
CUSTOMIZATION_CAMOUFLAGE_CURRENT_DROP_BODY = '#tooltips:customization/camouflage/current/drop/body'
CUSTOMIZATION_CAMOUFLAGE_CURRENT_DISMOUNT_HEADER = '#tooltips:customization/camouflage/current/dismount/header'
CUSTOMIZATION_CAMOUFLAGE_CURRENT_DISMOUNT_BODY = '#tooltips:customization/camouflage/current/dismount/body'
CUSTOMIZATION_HORN_CURRENT_DROP_HEADER = '#tooltips:customization/horn/current/drop/header'
CUSTOMIZATION_HORN_CURRENT_DROP_BODY = '#tooltips:customization/horn/current/drop/body'
CUSTOMIZATION_EMBLEMLEFT_CURRENT_DROP_HEADER = '#tooltips:customization/emblemLeft/current/drop/header'
CUSTOMIZATION_EMBLEMLEFT_CURRENT_DROP_BODY = '#tooltips:customization/emblemLeft/current/drop/body'
CUSTOMIZATION_EMBLEMLEFT_CURRENT_DISMOUNT_HEADER = '#tooltips:customization/emblemLeft/current/dismount/header'
CUSTOMIZATION_EMBLEMLEFT_CURRENT_DISMOUNT_BODY = '#tooltips:customization/emblemLeft/current/dismount/body'
CUSTOMIZATION_EMBLEMRIGHT_CURRENT_DROP_HEADER = '#tooltips:customization/emblemRight/current/drop/header'
CUSTOMIZATION_EMBLEMRIGHT_CURRENT_DROP_BODY = '#tooltips:customization/emblemRight/current/drop/body'
CUSTOMIZATION_EMBLEMRIGHT_CURRENT_DISMOUNT_HEADER = '#tooltips:customization/emblemRight/current/dismount/header'
CUSTOMIZATION_EMBLEMRIGHT_CURRENT_DISMOUNT_BODY = '#tooltips:customization/emblemRight/current/dismount/body'
CUSTOMIZATION_INSCRIPTIONLEFT_CURRENT_DROP_HEADER = '#tooltips:customization/inscriptionLeft/current/drop/header'
CUSTOMIZATION_INSCRIPTIONLEFT_CURRENT_DROP_BODY = '#tooltips:customization/inscriptionLeft/current/drop/body'
CUSTOMIZATION_INSCRIPTIONLEFT_CURRENT_DISMOUNT_HEADER = '#tooltips:customization/inscriptionLeft/current/dismount/header'
CUSTOMIZATION_INSCRIPTIONLEFT_CURRENT_DISMOUNT_BODY = '#tooltips:customization/inscriptionLeft/current/dismount/body'
CUSTOMIZATION_INSCRIPTIONRIGHT_CURRENT_DROP_HEADER = '#tooltips:customization/inscriptionRight/current/drop/header'
CUSTOMIZATION_INSCRIPTIONRIGHT_CURRENT_DROP_BODY = '#tooltips:customization/inscriptionRight/current/drop/body'
CUSTOMIZATION_INSCRIPTIONRIGHT_CURRENT_DISMOUNT_HEADER = '#tooltips:customization/inscriptionRight/current/dismount/header'
CUSTOMIZATION_INSCRIPTIONRIGHT_CURRENT_DISMOUNT_BODY = '#tooltips:customization/inscriptionRight/current/dismount/body'
CUSTOMIZATION_CAMOUFLAGE_WINTER = '#tooltips:customization/camouflage/winter'
CUSTOMIZATION_CAMOUFLAGE_SUMMER = '#tooltips:customization/camouflage/summer'
CUSTOMIZATION_CAMOUFLAGE_DESERT = '#tooltips:customization/camouflage/desert'
CUSTOMIZATION_TABS_INROAMING = '#tooltips:customization/tabs/inRoaming'
CUSTOMIZATION_CAMOUFLAGE_EMPTY = '#tooltips:customization/camouflage/empty'
CUSTOMIZATION_CAMOUFLAGE_IGR_HEADER = '#tooltips:customization/camouflage/igr/header'
CUSTOMIZATION_CAMOUFLAGE_IGR_BODY = '#tooltips:customization/camouflage/igr/body'
CUSTOMIZATION_EMBLEM_IGR_HEADER = '#tooltips:customization/emblem/igr/header'
CUSTOMIZATION_EMBLEM_IGR_BODY = '#tooltips:customization/emblem/igr/body'
CUSTOMIZATION_INSCRIPTION_IGR_HEADER = '#tooltips:customization/inscription/igr/header'
CUSTOMIZATION_INSCRIPTION_IGR_BODY = '#tooltips:customization/inscription/igr/body'
CUSTOMIZATION_QUESTAWARD_CURRENTVEHICLE = '#tooltips:customization/questAward/currentVehicle'
CUSTOMIZATION_QUESTAWARD_EXACTVEHICLE = '#tooltips:customization/questAward/exactVehicle'
CUSTOMIZATION_QUESTAWARD_DENYVEHICLE = '#tooltips:customization/questAward/denyVehicle'
VEHICLEPARAMS_SPEEDLIMITS = '#tooltips:vehicleParams/speedLimits'
VEHICLEPARAMS_ENGINEPOWER = '#tooltips:vehicleParams/enginePower'
VEHICLEPARAMS_CHASSISROTATIONSPEED = '#tooltips:vehicleParams/chassisRotationSpeed'
VEHICLEPARAMS_DAMAGE = '#tooltips:vehicleParams/damage'
VEHICLEPARAMS_HULLARMOR = '#tooltips:vehicleParams/hullArmor'
VEHICLEPARAMS_TURRETARMOR = '#tooltips:vehicleParams/turretArmor'
VEHICLEPARAMS_PIERCINGPOWER = '#tooltips:vehicleParams/piercingPower'
LEVEL_1 = '#tooltips:level/1'
LEVEL_2 = '#tooltips:level/2'
LEVEL_3 = '#tooltips:level/3'
LEVEL_4 = '#tooltips:level/4'
LEVEL_5 = '#tooltips:level/5'
LEVEL_6 = '#tooltips:level/6'
LEVEL_7 = '#tooltips:level/7'
LEVEL_8 = '#tooltips:level/8'
LEVEL_9 = '#tooltips:level/9'
LEVEL_10 = '#tooltips:level/10'
VEHICLE_INVENTORYCOUNT = '#tooltips:vehicle/inventoryCount'
VEHICLE_VEHICLECOUNT = '#tooltips:vehicle/vehicleCount'
VEHICLE_LEVEL = '#tooltips:vehicle/level'
VEHICLE_MASTERING = '#tooltips:vehicle/mastering'
VEHICLE_MASTERING_VERYSIMPLE = '#tooltips:vehicle/mastering/verySimple'
VEHICLE_STATS_FOOTNOTE = '#tooltips:vehicle/stats/footnote'
VEHICLE_ELITE = '#tooltips:vehicle/elite'
VEHICLE_CREW = '#tooltips:vehicle/crew'
VEHICLE_CREW_AWARD = '#tooltips:vehicle/crew_award'
VEHICLE_AMMO = '#tooltips:vehicle/ammo'
VEHICLE_XP = '#tooltips:vehicle/xp'
VEHICLE_MULTIPLIED_XP = '#tooltips:vehicle/multiplied_xp'
VEHICLE_UNLOCK_PRICE = '#tooltips:vehicle/unlock_price'
VEHICLE_BUY_PRICE = '#tooltips:vehicle/buy_price'
VEHICLE_RESTORE_PRICE = '#tooltips:vehicle/restore_price'
VEHICLE_ACTION_PRC = '#tooltips:vehicle/action_prc'
VEHICLE_MINRENTALSPRICE = '#tooltips:vehicle/minRentalsPrice'
VEHICLE_BUY_PRICE_ACTION = '#tooltips:vehicle/buy_price_action'
VEHICLE_SELL_PRICE = '#tooltips:vehicle/sell_price'
VEHICLE_EQUIPMENTS = '#tooltips:vehicle/equipments'
VEHICLE_DEVICES = '#tooltips:vehicle/devices'
VEHICLE_FAVORITE = '#tooltips:vehicle/favorite'
VEHICLE_DAILYXPFACTOR = '#tooltips:vehicle/dailyXPFactor'
VEHICLE_SPEEDLIMITS = '#tooltips:vehicle/speedLimits'
VEHICLE_DAMAGE = '#tooltips:vehicle/damage'
VEHICLE_TURRETROTATIONSPEED = '#tooltips:vehicle/turretRotationSpeed'
VEHICLE_RADIODISTANCE = '#tooltips:vehicle/radioDistance'
VEHICLE_TEXTDELIMITER_OR = '#tooltips:vehicle/textDelimiter/or'
VEHICLE_RENTLEFT_DAYS = '#tooltips:vehicle/rentLeft/days'
VEHICLE_RENTLEFT_HOURS = '#tooltips:vehicle/rentLeft/hours'
VEHICLE_RENTLEFTFUTURE_DAYS = '#tooltips:vehicle/rentLeftFuture/days'
VEHICLE_RENTLEFTFUTURE_HOURS = '#tooltips:vehicle/rentLeftFuture/hours'
VEHICLE_RENTAVAILABLE = '#tooltips:vehicle/rentAvailable'
VEHICLE_RESTORELEFT_DAYS = '#tooltips:vehicle/restoreLeft/days'
VEHICLE_RESTORELEFT_HOURS = '#tooltips:vehicle/restoreLeft/hours'
VEHICLE_RENTLEFT_BATTLES = '#tooltips:vehicle/rentLeft/battles'
VEHICLEIGR_SPECIALABILITY = '#tooltips:vehicleIgr/specialAbility'
VEHICLEIGR_DAYS = '#tooltips:vehicleIgr/days'
VEHICLEIGR_HOURS = '#tooltips:vehicleIgr/hours'
VEHICLE_DEAL_TELECOM_MAIN = '#tooltips:vehicle/deal/telecom/main'
VEHICLE_TRADE = '#tooltips:vehicle/trade'
BUYSKILL_FULLY = '#tooltips:buySkill/fully'
BUYSKILL_PARTLY = '#tooltips:buySkill/partly'
BUYSKILL_HEADER = '#tooltips:buySkill/header'
QUESTS_PROGRESS_STRATEGIC = '#tooltips:quests/progress/strategic'
QUESTS_PROGRESS_CURRENT = '#tooltips:quests/progress/current'
QUESTS_PROGRESS_EARNEDINBATTLE = '#tooltips:quests/progress/earnedInBattle'
QUESTS_IGR = '#tooltips:quests/igr'
QUESTS_NEWLABEL_TASK = '#tooltips:quests/newLabel/task'
QUESTS_NEWLABEL_ACTION = '#tooltips:quests/newLabel/action'
QUESTS_LINKBTN_TASK = '#tooltips:quests/linkBtn/task'
QUESTS_DISABLELINKBTN_TASK = '#tooltips:quests/disableLinkBtn/task'
QUESTS_STATUS_DONE = '#tooltips:quests/status/done'
QUESTS_STATUS_NOTREADY = '#tooltips:quests/status/notready'
QUESTS_STATUS_NOTDONE = '#tooltips:quests/status/notDone'
QUESTS_RENDERER_LABEL = '#tooltips:quests/renderer/label'
QUESTS_COUNTER_LABEL = '#tooltips:quests/counter/label'
QUESTS_NOTIFIER_HEADER = '#tooltips:quests/notifier/header'
QUESTS_NOTIFIER_BODY = '#tooltips:quests/notifier/body'
QUESTS_VEHICLESEASONAWARD_ABOUTBTN_HEADER = '#tooltips:quests/vehicleSeasonAward/aboutBtn/header'
QUESTS_VEHICLESEASONAWARD_ABOUTBTN_BODY = '#tooltips:quests/vehicleSeasonAward/aboutBtn/body'
QUESTS_VEHICLESBONUS_TITLE = '#tooltips:quests/vehiclesBonus/title'
QUESTS_VEHICLESBONUS_VEHICLESLEFT = '#tooltips:quests/vehiclesBonus/vehiclesLeft'
QUESTS_SEASONAWARD_TITLE = '#tooltips:quests/seasonAward/title'
QUESTS_SEASONAWARD_FEMALETANKMAN_DESCRIPTION_PART1 = '#tooltips:quests/seasonAward/femaleTankman/description/part1'
QUESTS_SEASONAWARD_FEMALETANKMAN_DESCRIPTION_PART2 = '#tooltips:quests/seasonAward/femaleTankman/description/part2'
QUESTS_SEASONAWARD_TOKENS_DESCRIPTION = '#tooltips:quests/seasonAward/tokens/description'
VEHICLESTATUS_BODY = '#tooltips:vehicleStatus/body'
VEHICLESTATUS_BATTLE_HEADER = '#tooltips:vehicleStatus/battle/header'
VEHICLESTATUS_LOCKED_HEADER = '#tooltips:vehicleStatus/locked/header'
VEHICLESTATUS_RENTALISOVER_HEADER = '#tooltips:vehicleStatus/rentalIsOver/header'
VEHICLESTATUS_IGRRENTALISOVER_HEADER = '#tooltips:vehicleStatus/igrRentalIsOver/header'
VEHICLESTATUS_INPREMIUMIGRONLY_HEADER = '#tooltips:vehicleStatus/inPremiumIgrOnly/header'
VEHICLESTATUS_INPREBATTLE_HEADER = '#tooltips:vehicleStatus/inPrebattle/header'
VEHICLESTATUS_CLANLOCKED_HEADER = '#tooltips:vehicleStatus/clanLocked/header'
VEHICLESTATUS_AMMONOTFULL_HEADER = '#tooltips:vehicleStatus/ammoNotFull/header'
VEHICLESTATUS_AMMONOTFULLEVENTS_HEADER = '#tooltips:vehicleStatus/ammoNotFullEvents/header'
VEHICLESTATUS_DAMAGED_HEADER = '#tooltips:vehicleStatus/damaged/header'
VEHICLESTATUS_DESTROYED_HEADER = '#tooltips:vehicleStatus/destroyed/header'
VEHICLESTATUS_EXPLODED_HEADER = '#tooltips:vehicleStatus/exploded/header'
VEHICLESTATUS_NOTPRESENT_HEADER = '#tooltips:vehicleStatus/notpresent/header'
VEHICLESTATUS_UNDAMAGED_HEADER = '#tooltips:vehicleStatus/undamaged/header'
VEHICLESTATUS_CREWNOTFULL_HEADER = '#tooltips:vehicleStatus/crewNotFull/header'
VEHICLESTATUS_NOTUNLOCKED_HEADER = '#tooltips:vehicleStatus/notUnlocked/header'
VEHICLESTATUS_INHANGAR_HEADER = '#tooltips:vehicleStatus/inHangar/header'
VEHICLESTATUS_RESTORECOOLDOWN_DAYS = '#tooltips:vehicleStatus/restoreCooldown/days'
VEHICLESTATUS_RESTORECOOLDOWN_HOURS = '#tooltips:vehicleStatus/restoreCooldown/hours'
VEHICLESTATUS_SERVERRESTRICTION_HEADER = '#tooltips:vehicleStatus/serverRestriction/header'
VEHICLESTATUS_DEALISOVER_HEADER = '#tooltips:vehicleStatus/dealIsOver/header'
VEHICLESTATUS_DEALISOVER_TEXT = '#tooltips:vehicleStatus/dealIsOver/text'
VEHICLESTATUS_NOTENOUGHCREDITS_HEADER = '#tooltips:vehicleStatus/notEnoughCredits/header'
VEHICLESTATUS_NOTENOUGHCREDITS_TEXT = '#tooltips:vehicleStatus/notEnoughCredits/text'
VEHICLESTATUS_NOTENOUGHGOLD_HEADER = '#tooltips:vehicleStatus/notEnoughGold/header'
VEHICLESTATUS_NOTENOUGHGOLD_TEXT = '#tooltips:vehicleStatus/notEnoughGold/text'
VEHICLESTATUS_OPERATIONERROR_HEADER = '#tooltips:vehicleStatus/operationError/header'
VEHICLESTATUS_NOTSUITABLE_HEADER = '#tooltips:vehicleStatus/notSuitable/header'
VEHICLESTATUS_GROUP_IS_NOT_READY_HEADER = '#tooltips:vehicleStatus/group_is_not_ready/header'
VEHICLESTATUS_GROUPAMMONOTFULL_HEADER = '#tooltips:vehicleStatus/groupAmmoNotFull/header'
VEHICLESTATUS_FALLOUT_ONLY_HEADER = '#tooltips:vehicleStatus/fallout_only/header'
VEHICLESTATUS_NOT_SUITABLE_HEADER = '#tooltips:vehicleStatus/not_suitable/header'
VEHICLESTATUS_UNSUITABLETOQUEUE_HEADER = '#tooltips:vehicleStatus/unsuitableToQueue/header'
VEHICLESTATUS_UNSUITABLETOUNIT_HEADER = '#tooltips:vehicleStatus/unsuitableToUnit/header'
VEHICLESTATUS_ROTATIONGROUPUNLOCKED_HEADER = '#tooltips:vehicleStatus/rotationGroupUnlocked/header'
TRADEINVEHICLESTATUS_BATTLE_HEADER = '#tooltips:tradeInVehicleStatus/battle/header'
TRADEINVEHICLESTATUS_BATTLE_BODY = '#tooltips:tradeInVehicleStatus/battle/body'
TRADEINVEHICLESTATUS_DAMAGED_HEADER = '#tooltips:tradeInVehicleStatus/damaged/header'
TRADEINVEHICLESTATUS_DAMAGED_BODY = '#tooltips:tradeInVehicleStatus/damaged/body'
TRADEINVEHICLESTATUS_EXPLODED_HEADER = '#tooltips:tradeInVehicleStatus/exploded/header'
TRADEINVEHICLESTATUS_EXPLODED_BODY = '#tooltips:tradeInVehicleStatus/exploded/body'
TRADEINVEHICLESTATUS_DESTROYED_HEADER = '#tooltips:tradeInVehicleStatus/destroyed/header'
TRADEINVEHICLESTATUS_DESTROYED_BODY = '#tooltips:tradeInVehicleStatus/destroyed/body'
TRADEINVEHICLESTATUS_INPREBATTLE_HEADER = '#tooltips:tradeInVehicleStatus/inPrebattle/header'
TRADEINVEHICLESTATUS_INPREBATTLE_BODY = '#tooltips:tradeInVehicleStatus/inPrebattle/body'
TRADEINVEHICLESTATUS_LOCKED_HEADER = '#tooltips:tradeInVehicleStatus/locked/header'
TRADEINVEHICLESTATUS_LOCKED_BODY = '#tooltips:tradeInVehicleStatus/locked/body'
ITEMSTATUS_NOTENOUGH = '#tooltips:itemStatus/notEnough'
TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_LIGHTTANK = '#tooltips:tankCaruselTooltip/vehicleType/normal/lightTank'
TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_MEDIUMTANK = '#tooltips:tankCaruselTooltip/vehicleType/normal/mediumTank'
TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_HEAVYTANK = '#tooltips:tankCaruselTooltip/vehicleType/normal/heavyTank'
TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_AT_SPG = '#tooltips:tankCaruselTooltip/vehicleType/normal/AT-SPG'
TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_SPG = '#tooltips:tankCaruselTooltip/vehicleType/normal/SPG'
TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_LIGHTTANK = '#tooltips:tankCaruselTooltip/vehicleType/elite/lightTank'
TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_MEDIUMTANK = '#tooltips:tankCaruselTooltip/vehicleType/elite/mediumTank'
TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_HEAVYTANK = '#tooltips:tankCaruselTooltip/vehicleType/elite/heavyTank'
TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_AT_SPG = '#tooltips:tankCaruselTooltip/vehicleType/elite/AT-SPG'
TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_SPG = '#tooltips:tankCaruselTooltip/vehicleType/elite/SPG'
TANKMAN_STATUS_WRONGVEHICLE_HEADER = '#tooltips:tankman/status/wrongVehicle/header'
TANKMAN_STATUS_WRONGVEHICLE_TEXT = '#tooltips:tankman/status/wrongVehicle/text'
TANKMAN_STATUS_WRONGPREMIUMVEHICLE_HEADER = '#tooltips:tankman/status/wrongPremiumVehicle/header'
TANKMAN_STATUS_WRONGPREMIUMVEHICLE_TEXT = '#tooltips:tankman/status/wrongPremiumVehicle/text'
TANKMAN_STATUS_INACTIVESKILLS_HEADER = '#tooltips:tankman/status/inactiveSkills/header'
TANKMAN_STATUS_INACTIVESKILLS_TEXT = '#tooltips:tankman/status/inactiveSkills/text'
TANKMAN_STATUS_INACTIVESKILLSROLEFORMAT = '#tooltips:tankman/status/inactiveSkillsRoleFormat'
TANKMAN_STATUS_WRONGROLELEVEL = '#tooltips:tankman/status/wrongRoleLevel'
RESEARCHPAGE_MODULE_STATUS_ROOTVEHICLEISLOCKED_HEADER = '#tooltips:researchPage/module/status/rootVehicleIsLocked/header'
RESEARCHPAGE_MODULE_STATUS_ROOTVEHICLEISLOCKED_TEXT = '#tooltips:researchPage/module/status/rootVehicleIsLocked/text'
RESEARCHPAGE_MODULE_STATUS_PARENTMODULEISLOCKED_HEADER = '#tooltips:researchPage/module/status/parentModuleIsLocked/header'
RESEARCHPAGE_MODULE_STATUS_PARENTMODULEISLOCKED_TEXT = '#tooltips:researchPage/module/status/parentModuleIsLocked/text'
RESEARCHPAGE_MODULE_STATUS_NOTENOUGHXP_HEADER = '#tooltips:researchPage/module/status/notEnoughXP/header'
RESEARCHPAGE_MODULE_STATUS_NOTENOUGHXP_TEXT = '#tooltips:researchPage/module/status/notEnoughXP/text'
RESEARCHPAGE_MODULE_STATUS_NEEDTOBUYTANK_HEADER = '#tooltips:researchPage/module/status/needToBuyTank/header'
RESEARCHPAGE_MODULE_STATUS_NEEDTOBUYTANK_TEXT = '#tooltips:researchPage/module/status/needToBuyTank/text'
RESEARCHPAGE_MODULE_STATUS_VEHICLEISINBATTLE_HEADER = '#tooltips:researchPage/module/status/vehicleIsInBattle/header'
RESEARCHPAGE_MODULE_STATUS_VEHICLEISINBATTLE_TEXT = '#tooltips:researchPage/module/status/vehicleIsInBattle/text'
RESEARCHPAGE_MODULE_STATUS_VEHICLEISREADYTOFIGHT_HEADER = '#tooltips:researchPage/module/status/vehicleIsReadyToFight/header'
RESEARCHPAGE_MODULE_STATUS_VEHICLEISREADYTOFIGHT_TEXT = '#tooltips:researchPage/module/status/vehicleIsReadyToFight/text'
RESEARCHPAGE_MODULE_STATUS_VEHICLEISBROKEN_HEADER = '#tooltips:researchPage/module/status/vehicleIsBroken/header'
RESEARCHPAGE_MODULE_STATUS_VEHICLEISBROKEN_TEXT = '#tooltips:researchPage/module/status/vehicleIsBroken/text'
RESEARCHPAGE_VEHICLE_STATUS_PARENTMODULEISLOCKED_HEADER = '#tooltips:researchPage/vehicle/status/parentModuleIsLocked/header'
RESEARCHPAGE_VEHICLE_STATUS_PARENTMODULEISLOCKED_TEXT = '#tooltips:researchPage/vehicle/status/parentModuleIsLocked/text'
RESEARCHPAGE_VEHICLE_BUTTON_COMPARE_ADD_HEADER = '#tooltips:researchPage/vehicle/button/compare/add/header'
RESEARCHPAGE_VEHICLE_BUTTON_COMPARE_ADD_BODY = '#tooltips:researchPage/vehicle/button/compare/add/body'
RESEARCHPAGE_VEHICLE_BUTTON_COMPARE_DISABLED_HEADER = '#tooltips:researchPage/vehicle/button/compare/disabled/header'
RESEARCHPAGE_VEHICLE_BUTTON_COMPARE_DISABLED_BODY = '#tooltips:researchPage/vehicle/button/compare/disabled/body'
TECHTREEPAGE_NATIONS_USSR = '#tooltips:techTreePage/nations/ussr'
TECHTREEPAGE_NATIONS_GERMANY = '#tooltips:techTreePage/nations/germany'
TECHTREEPAGE_NATIONS_USA = '#tooltips:techTreePage/nations/usa'
TECHTREEPAGE_NATIONS_CHINA = '#tooltips:techTreePage/nations/china'
TECHTREEPAGE_NATIONS_FRANCE = '#tooltips:techTreePage/nations/france'
TECHTREEPAGE_NATIONS_UK = '#tooltips:techTreePage/nations/uk'
TECHTREEPAGE_NATIONS_JAPAN = '#tooltips:techTreePage/nations/japan'
TECHTREEPAGE_NATIONS_CZECH = '#tooltips:techTreePage/nations/czech'
TECHTREEPAGE_NATIONS_SWEDEN = '#tooltips:techTreePage/nations/sweden'
VEHICLESELLDIALOG_RENDERER_ALERTICON_HEADER = '#tooltips:vehicleSellDialog/renderer/alertIcon/header'
VEHICLESELLDIALOG_RENDERER_ALERTICON_BODY = '#tooltips:vehicleSellDialog/renderer/alertIcon/body'
VEHICLESELLDIALOG_CREW_ALERTICON_RECOVERY_HEADER = '#tooltips:vehicleSellDialog/crew/alertIcon/recovery/header'
VEHICLESELLDIALOG_CREW_ALERTICON_RECOVERY_BODY = '#tooltips:vehicleSellDialog/crew/alertIcon/recovery/body'
ACHIEVEMENT_CUSTOM_NOTENOUGH = '#tooltips:achievement/custom/notEnough'
IGR_TITLE = '#tooltips:igr/title'
IGR_MSG1 = '#tooltips:igr/msg1'
IGR_MSG2 = '#tooltips:igr/msg2'
IGR_MSG3 = '#tooltips:igr/msg3'
IGR_PROGRESSHEADER = '#tooltips:igr/progressHeader'
SUITABLEVEHICLE_HEADER = '#tooltips:suitableVehicle/header'
SELECTEDVEHICLE_HEADER = '#tooltips:selectedVehicle/header'
MEDALION_NOVEHICLE_HEADER = '#tooltips:medalion/noVehicle/header'
MEDALION_NOVEHICLE_BODY = '#tooltips:medalion/noVehicle/body'
SUITABLEVEHICLE_SUITABLETITLE = '#tooltips:suitableVehicle/suitableTitle'
SUITABLEVEHICLE_CONDITIONSTITLE = '#tooltips:suitableVehicle/conditionsTitle'
SUITABLEVEHICLE_MORE = '#tooltips:suitableVehicle/more'
SUITABLEVEHICLE_MATCHES = '#tooltips:suitableVehicle/matches'
SUITABLEVEHICLE_UNSUITABLE = '#tooltips:suitableVehicle/unsuitable'
SUITABLEVEHICLE_HIDDENVEHICLECOUNT = '#tooltips:suitableVehicle/hiddenVehicleCount'
CSAUTOSEARCHVEHICLE_HEADER = '#tooltips:csAutoSearchVehicle/header'
CSAUTOSEARCHVEHICLE_SUITABLETITLE = '#tooltips:csAutoSearchVehicle/suitableTitle'
CSAUTOSEARCHVEHICLE_CONDITIONSTITLE = '#tooltips:csAutoSearchVehicle/conditionsTitle'
CSAUTOSEARCHVEHICLE_MORE = '#tooltips:csAutoSearchVehicle/more'
CSAUTOSEARCHVEHICLE_UNSUITABLE = '#tooltips:csAutoSearchVehicle/unsuitable'
CYBERSPORT_VEHICLESELECTOR_NOTREADY = '#tooltips:cyberSport/vehicleSelector/notReady'
CYBERSPORT_VEHICLESELECTOR_OVERFLOWLEVEL_HEADER = '#tooltips:cyberSport/vehicleSelector/overflowLevel/header'
CYBERSPORT_VEHICLESELECTOR_OVERFLOWLEVEL_BODY = '#tooltips:cyberSport/vehicleSelector/overflowLevel/body'
CYBERSPORT_VEHICLESELECTOR_BADVEHICLE_HEADER = '#tooltips:cyberSport/vehicleSelector/badVehicle/header'
CYBERSPORT_VEHICLESELECTOR_BADVEHICLE_BODY = '#tooltips:cyberSport/vehicleSelector/badVehicle/body'
SETTINGSICON_FREEZED_HEADER = '#tooltips:settingsIcon/freezed/header'
SETTINGSICON_FREEZED_BODY = '#tooltips:settingsIcon/freezed/body'
SETTINGSICON_CONDITIONS_HEADER = '#tooltips:settingsIcon/conditions/header'
SETTINGSICON_CONDITIONS_BODY = '#tooltips:settingsIcon/conditions/body'
CYBERSPORT_INTRO_SEARCH_BTN_HEADER = '#tooltips:cyberSport/intro/search/btn/header'
CYBERSPORT_INTRO_SEARCH_BTN_BODY = '#tooltips:cyberSport/intro/search/btn/body'
CYBERSPORT_INTRO_CREATE_BTN_HEADER = '#tooltips:cyberSport/intro/create/btn/header'
CYBERSPORT_INTRO_CREATE_BTN_BODY = '#tooltips:cyberSport/intro/create/btn/body'
CYBERSPORT_INTRO_CREATEBTN_LOOK_HEADER = '#tooltips:cyberSport/intro/createBtn/look/header'
CYBERSPORT_INTRO_CREATEBTN_LOOK_BODY = '#tooltips:cyberSport/intro/createBtn/look/body'
CYBERSPORT_INTRO_CREATEBTN_ASSEMBLETEAM_HEADER = '#tooltips:cyberSport/intro/createBtn/assembleTeam/header'
CYBERSPORT_INTRO_CREATEBTN_ASSEMBLETEAM_BODY = '#tooltips:cyberSport/intro/createBtn/assembleTeam/body'
CYBERSPORT_INTRO_CREATEBTN_JOINTEAM_HEADER = '#tooltips:cyberSport/intro/createBtn/joinTeam/header'
CYBERSPORT_INTRO_CREATEBTN_JOINTEAM_BODY = '#tooltips:cyberSport/intro/createBtn/joinTeam/body'
CYBERSPORT_INTRO_CREATEBTN_ASSEMBLEDISABLED_HEADER = '#tooltips:cyberSport/intro/createBtn/assembleDisabled/header'
CYBERSPORT_INTRO_CREATEBTN_ASSEMBLEDISABLED_BODY = '#tooltips:cyberSport/intro/createBtn/assembleDisabled/body'
CYBERSPORT_INTRO_CREATEBTN_ADDPLAYERS_HEADER = '#tooltips:cyberSport/intro/createBtn/addPlayers/header'
CYBERSPORT_INTRO_CREATEBTN_ADDPLAYERS_BODY = '#tooltips:cyberSport/intro/createBtn/addPlayers/body'
CYBERSPORT_INTRO_CREATEBTN_ADDPLAYERS_PRIVATE_HEADER = '#tooltips:cyberSport/intro/createBtn/addPlayers/private/header'
CYBERSPORT_INTRO_CREATEBTN_ADDPLAYERS_PRIVATE_BODY = '#tooltips:cyberSport/intro/createBtn/addPlayers/private/body'
CYBERSPORT_INTRO_SELECTEDVEHICLEWARN_INCOMPATIBLETYPE_HEADER = '#tooltips:cyberSport/intro/selectedVehicleWarn/incompatibleType/header'
CYBERSPORT_INTRO_SELECTEDVEHICLEWARN_INCOMPATIBLETYPE_BODY = '#tooltips:cyberSport/intro/selectedVehicleWarn/incompatibleType/body'
CYBERSPORT_INTRO_SELECTEDVEHICLEWARN_INCOMPATIBLELEVEL_HEADER = '#tooltips:cyberSport/intro/selectedVehicleWarn/incompatibleLevel/header'
CYBERSPORT_INTRO_SELECTEDVEHICLEWARN_INCOMPATIBLELEVEL_BODY = '#tooltips:cyberSport/intro/selectedVehicleWarn/incompatibleLevel/body'
CYBERSPORT_COMMANDER_STATS = '#tooltips:cyberSport/commander/stats'
CYBERSPORT_CAPTAIN_STATS = '#tooltips:cyberSport/captain/stats'
CYBERSPORT_ALERT_ONANOTHERSERVER = '#tooltips:cyberSport/alert/onAnotherServer'
CYBERSPORT_UNITCOMMAND_ACTION = '#tooltips:cyberSport/unitCommand/action'
CYBERSPORT_UNITLIST_FILTERBYVEHICLE_HEADER = '#tooltips:cyberSport/unitList/filterByVehicle/header'
CYBERSPORT_UNITLIST_FILTERBYVEHICLE_NOVEHICLESSELECTED = '#tooltips:cyberSport/unitList/filterByVehicle/noVehiclesSelected'
CYBERSPORT_UNITLIST_FILTERBYVEHICLE_SOMEVEHICLESSELECTED = '#tooltips:cyberSport/unitList/filterByVehicle/someVehiclesSelected'
CYBERSPORT_UNITLIST_REFRESH_HEADER = '#tooltips:cyberSport/unitList/refresh/header'
CYBERSPORT_UNITLIST_REFRESH_BODY = '#tooltips:cyberSport/unitList/refresh/body'
CYBERSPORT_UNITLIST_PAGINGUP_HEADER = '#tooltips:cyberSport/unitList/pagingUp/header'
CYBERSPORT_UNITLIST_PAGINGUP_BODY = '#tooltips:cyberSport/unitList/pagingUp/body'
CYBERSPORT_UNITLIST_PAGINGDOWN_HEADER = '#tooltips:cyberSport/unitList/pagingDown/header'
CYBERSPORT_UNITLIST_PAGINGDOWN_BODY = '#tooltips:cyberSport/unitList/pagingDown/body'
CYBERSPORT_UNITLIST_JOIN_HEADER = '#tooltips:cyberSport/unitList/join/header'
CYBERSPORT_UNITLIST_JOIN_BODY = '#tooltips:cyberSport/unitList/join/body'
CYBERSPORT_UNITLIST_CREATEBTN_ALREADYRALLYOWNER_HEADER = '#tooltips:cyberSport/unitList/createBtn/alreadyRallyOwner/header'
CYBERSPORT_UNITLIST_CREATEBTN_ALREADYRALLYOWNER_BODY = '#tooltips:cyberSport/unitList/createBtn/alreadyRallyOwner/body'
CYBERSPORT_UNITLIST_CREATEBTN_ALREADYINRALLY_HEADER = '#tooltips:cyberSport/unitList/createBtn/alreadyInRally/header'
CYBERSPORT_UNITLIST_CREATEBTN_ALREADYINRALLY_BODY = '#tooltips:cyberSport/unitList/createBtn/alreadyInRally/body'
CYBERSPORT_STATICRALLYINFO_STATSBATTLESCOUNT_HEADER = '#tooltips:cyberSport/staticRallyInfo/statsBattlesCount/header'
CYBERSPORT_STATICRALLYINFO_STATSBATTLESCOUNT_BODY = '#tooltips:cyberSport/staticRallyInfo/statsBattlesCount/body'
CYBERSPORT_STATICRALLYINFO_STATSWINSPERCENT_HEADER = '#tooltips:cyberSport/staticRallyInfo/statsWinsPercent/header'
CYBERSPORT_STATICRALLYINFO_STATSWINSPERCENT_BODY = '#tooltips:cyberSport/staticRallyInfo/statsWinsPercent/body'
CYBERSPORT_STATICRALLYINFO_JOINBTN_JOIN_HEADER = '#tooltips:cyberSport/staticRallyInfo/joinBtn/join/header'
CYBERSPORT_STATICRALLYINFO_JOINBTN_JOIN_BODY = '#tooltips:cyberSport/staticRallyInfo/joinBtn/join/body'
CYBERSPORT_STATICRALLYINFO_JOINBTN_APPLICATIONCOOLDOWN_HEADER = '#tooltips:cyberSport/staticRallyInfo/joinBtn/applicationCooldown/header'
CYBERSPORT_STATICRALLYINFO_JOINBTN_APPLICATIONCOOLDOWN_BODY = '#tooltips:cyberSport/staticRallyInfo/joinBtn/applicationCooldown/body'
CYBERSPORT_STATICRALLYINFO_JOINBTN_INPROCESS_HEADER = '#tooltips:cyberSport/staticRallyInfo/joinBtn/inProcess/header'
CYBERSPORT_STATICRALLYINFO_JOINBTN_INPROCESS_BODY = '#tooltips:cyberSport/staticRallyInfo/joinBtn/inProcess/body'
CYBERSPORT_STATICRALLYINFO_JOINBTN_INPROCESSOTHER_HEADER = '#tooltips:cyberSport/staticRallyInfo/joinBtn/inProcessOther/header'
CYBERSPORT_STATICRALLYINFO_JOINBTN_INPROCESSOTHER_BODY = '#tooltips:cyberSport/staticRallyInfo/joinBtn/inProcessOther/body'
CYBERSPORT_STATICRALLYINFO_JOINBTN_ALREADYJOINED_HEADER = '#tooltips:cyberSport/staticRallyInfo/joinBtn/alreadyJoined/header'
CYBERSPORT_STATICRALLYINFO_JOINBTN_ALREADYJOINED_BODY = '#tooltips:cyberSport/staticRallyInfo/joinBtn/alreadyJoined/body'
RALLYINFO_PROFILEBTN_HEADER = '#tooltips:rallyInfo/profileBtn/header'
RALLYINFO_PROFILEBTN_BODY = '#tooltips:rallyInfo/profileBtn/body'
CYBERSPORT_UNITLIST_JOINTOSTATICASLEGIONARY_HEADER = '#tooltips:cyberSport/unitList/joinToStaticAsLegionary/header'
CYBERSPORT_UNITLIST_JOINTOSTATICASLEGIONARY_BODY = '#tooltips:cyberSport/unitList/joinToStaticAsLegionary/body'
CYBERSPORT_UNIT_FREEZE_HEADER = '#tooltips:cyberSport/unit/freeze/header'
CYBERSPORT_UNIT_FREEZE_BODYON = '#tooltips:cyberSport/unit/freeze/bodyOn'
CYBERSPORT_UNIT_FREEZE_BODYOFF = '#tooltips:cyberSport/unit/freeze/bodyOff'
CYBERSPORT_UNIT_CONFIGURE_HEADER = '#tooltips:cyberSport/unit/configure/header'
CYBERSPORT_UNIT_CONFIGURE_BODY = '#tooltips:cyberSport/unit/configure/body'
CYBERSPORT_UNIT_SUMLEVEL_HEADER = '#tooltips:cyberSport/unit/sumLevel/header'
CYBERSPORT_UNIT_SUMLEVEL_BODYNORMAL = '#tooltips:cyberSport/unit/sumLevel/bodyNormal'
CYBERSPORT_UNIT_SUMLEVEL_BODYERROR = '#tooltips:cyberSport/unit/sumLevel/bodyError'
CYBERSPORT_UNIT_ACCESS_HEADER = '#tooltips:cyberSport/unit/access/header'
CYBERSPORT_UNIT_INVITEBTN_HEADER = '#tooltips:cyberSport/unit/inviteBtn/header'
CYBERSPORT_UNIT_INVITEBTN_BODY = '#tooltips:cyberSport/unit/inviteBtn/body'
CYBERSPORT_UNIT_ACCESS_BODYOPEN = '#tooltips:cyberSport/unit/access/bodyOpen'
CYBERSPORT_UNIT_ACCESS_BODYCLOSED = '#tooltips:cyberSport/unit/access/bodyClosed'
CYBERSPORT_UNIT_SLOTLABELCLOSED_HEADER = '#tooltips:cyberSport/unit/slotLabelClosed/header'
CYBERSPORT_UNIT_SLOTLABELCLOSED_BODY = '#tooltips:cyberSport/unit/slotLabelClosed/body'
CYBERSPORT_UNIT_SLOTLABELUNAVAILABLE_HEADER = '#tooltips:cyberSport/unit/slotLabelUnavailable/header'
CYBERSPORT_UNIT_SLOTLABELUNAVAILABLE_BODY = '#tooltips:cyberSport/unit/slotLabelUnavailable/body'
CYBERSPORT_UNIT_TAKEPLACEBTN_HEADER = '#tooltips:cyberSport/unit/takePlaceBtn/header'
CYBERSPORT_UNIT_TAKEPLACEBTN_BODY = '#tooltips:cyberSport/unit/takePlaceBtn/body'
CYBERSPORT_UNIT_TAKEPLACEFIRSTTIMEBTN_HEADER = '#tooltips:cyberSport/unit/takePlaceFirstTimeBtn/header'
CYBERSPORT_UNIT_TAKEPLACEFIRSTTIMEBTN_BODY = '#tooltips:cyberSport/unit/takePlaceFirstTimeBtn/body'
CYBERSPORT_UNIT_COMMAND = '#tooltips:cyberSport/unit/command'
CYBERSPORT_UNIT_SLOT_VEHICLE_NOTREADY_TEMPORALLY_BODY = '#tooltips:cyberSport/unit/slot/vehicle/notReady/temporally/body'
CYBERSPORT_UNIT_SLOT_VEHICLE_NOTREADY_PERMANENTLY_BODY = '#tooltips:cyberSport/unit/slot/vehicle/notReady/permanently/body'
CYBERSPORT_WAITINGPLAYERS_CONFIGALERT_HEADER = '#tooltips:cyberSport/waitingPlayers/configAlert/header'
CYBERSPORT_WAITINGPLAYERS_CONFIGALERT_BODY = '#tooltips:cyberSport/waitingPlayers/configAlert/body'
CYBERSPORT_UNIT_FIGHTBTN_VEHICLENOTVALID_BODY = '#tooltips:cyberSport/unit/fightBtn/vehicleNotValid/body'
CYBERSPORT_UNIT_FIGHTBTN_NOTINSLOT_BODY = '#tooltips:cyberSport/unit/fightBtn/notInSlot/body'
CYBERSPORT_UNIT_FIGHTBTN_PRESSFORREADY_BODY = '#tooltips:cyberSport/unit/fightBtn/pressForReady/body'
CYBERSPORT_UNIT_FIGHTBTN_PRESSFORNOTREADY_BODY = '#tooltips:cyberSport/unit/fightBtn/pressForNotReady/body'
CYBERSPORT_UNIT_FIGHTBTN_EVENTVEHICLEWRONGMODE_HEADER = '#tooltips:cyberSport/unit/fightBtn/eventVehicleWrongMode/header'
CYBERSPORT_UNIT_FIGHTBTN_EVENTVEHICLEWRONGMODE_BODY = '#tooltips:cyberSport/unit/fightBtn/eventVehicleWrongMode/body'
FORTIFICATION_BATTLEROOMLEGIONARIES_HEADER = '#tooltips:fortification/battleRoomLegionaries/header'
FORTIFICATION_BATTLEROOMLEGIONARIES_BODY = '#tooltips:fortification/battleRoomLegionaries/body'
FORTIFICATION_BATTLEROOMLEGIONARIES_TEAMSECTION_HEADER = '#tooltips:fortification/battleRoomLegionaries/teamSection/header'
FORTIFICATION_BATTLEROOMLEGIONARIES_TEAMSECTION_BODY = '#tooltips:fortification/battleRoomLegionaries/teamSection/body'
FORTIFICATION_INTROVIEW_CLANBATTLEBTN_ENABLED_HEADER = '#tooltips:fortification/introView/clanBattleBtn/enabled/header'
FORTIFICATION_INTROVIEW_CLANBATTLEBTN_ENABLED_BODY = '#tooltips:fortification/introView/clanBattleBtn/enabled/body'
FORTIFICATION_INTROVIEW_CLANBATTLEBTN_DISABLED_LEADER = '#tooltips:fortification/introView/clanBattleBtn/disabled/leader'
FORTIFICATION_INTROVIEW_CLANBATTLEBTN_DISABLED_NOTLEADER = '#tooltips:fortification/introView/clanBattleBtn/disabled/notLeader'
FORTIFICATION_INTROVIEW_CLANBATTLEBTN_DISABLED_NEXTBATTLE = '#tooltips:fortification/introView/clanBattleBtn/disabled/nextBattle'
FORTIFICATION_INTROVIEW_CLANBATTLEBTN_DISABLED_LEADERNOACTION = '#tooltips:fortification/introView/clanBattleBtn/disabled/leaderNoAction'
FORTIFICATION_INTROVIEW_CLANBATTLEBTN_DISABLED_NOTLEADERNOACTION = '#tooltips:fortification/introView/clanBattleBtn/disabled/notLeaderNoAction'
FORTIFICATION_INTROVIEW_FORTBATTLES_BTNTOOLTIP = '#tooltips:fortification/introView/fortBattles/btnTooltip'
FORTIFICATION_INTROVIEW_SORTIE_BTNTOOLTIP = '#tooltips:fortification/introView/sortie/btnTooltip'
FORTIFICATION_INTROVIEW_BATTLE_BTNTOOLTIP = '#tooltips:fortification/introView/battle/btnTooltip'
FORTIFICATION_MODERNIZATION_APPLYBUTTON_HEADER = '#tooltips:fortification/modernization/applyButton/header'
FORTIFICATION_MODERNIZATION_APPLYBUTTON_LOWBASELEVEL = '#tooltips:fortification/modernization/applyButton/lowBaseLevel'
FORTIFICATION_MODERNIZATION_APPLYBUTTON_NETENOUGHRESOURCE = '#tooltips:fortification/modernization/applyButton/netEnoughResource'
FORTIFICATION_MODERNIZATION_APPLYBUTTON_LOWLEVELANDRESOURCE = '#tooltips:fortification/modernization/applyButton/lowLevelAndResource'
FORTIFICATION_MODERNIZATION_DESCRIPTIONLINK_HEADER = '#tooltips:fortification/modernization/descriptionLink/header'
FORTIFICATION_MODERNIZATION_DESCRIPTIONLINK_BODY = '#tooltips:fortification/modernization/descriptionLink/body'
FORTIFICATION_DEFRESICONINFO_HEADER = '#tooltips:fortification/defResIconInfo/header'
FORTIFICATION_DEFRESICONINFO_BODY = '#tooltips:fortification/defResIconInfo/body'
FORTIFICATION_MODERNIZATION_NOTACTIVATEDDEFPERIOD_HEADER = '#tooltips:fortification/modernization/notActivatedDefPeriod/header'
FORTIFICATION_MODERNIZATION_NOTACTIVATEDDEFPERIOD_BODY = '#tooltips:fortification/modernization/notActivatedDefPeriod/body'
FORTIFICATION_FOUNDATIONCOMMANDER_HEADER = '#tooltips:fortification/foundationCommander/header'
FORTIFICATION_FOUNDATIONCOMMANDER_BODY = '#tooltips:fortification/foundationCommander/body'
FORTIFICATION_FOUNDATIONCOMMANDER_NOTAVAILABLE_BODY = '#tooltips:fortification/foundationCommander/notAvailable/body'
FORTIFICATION_FOUNDATIONNOTCOMMANDER_HEADER = '#tooltips:fortification/foundationNotCommander/header'
FORTIFICATION_FOUNDATIONNOTCOMMANDER_BODY = '#tooltips:fortification/foundationNotCommander/body'
FORTIFICATION_HEADER_STATISTICS_HEADER = '#tooltips:fortification/header/statistics/header'
FORTIFICATION_HEADER_STATISTICS_BODY = '#tooltips:fortification/header/statistics/body'
FORTIFICATION_HEADER_CLANLIST_HEADER = '#tooltips:fortification/header/clanList/header'
FORTIFICATION_HEADER_CLANLIST_BODY = '#tooltips:fortification/header/clanList/body'
FORTIFICATION_HEADER_CALENDARBTN_HEADER = '#tooltips:fortification/header/calendarBtn/header'
FORTIFICATION_HEADER_CALENDARBTN_BODY = '#tooltips:fortification/header/calendarBtn/body'
FORTIFICATION_HEADER_SETTINGSBTN_HEADER = '#tooltips:fortification/header/settingsBtn/header'
FORTIFICATION_HEADER_SETTINGSBTN_BODY = '#tooltips:fortification/header/settingsBtn/body'
FORTIFICATION_HEADER_SETTINGSBTN_DISABLED_HEADER = '#tooltips:fortification/header/settingsBtn/disabled/header'
FORTIFICATION_HEADER_SETTINGSBTN_DISABLED_BODY = '#tooltips:fortification/header/settingsBtn/disabled/body'
FORTIFICATION_FOOTER_SORTIEBUTTON_HEADER = '#tooltips:fortification/footer/sortieButton/header'
FORTIFICATION_FOOTER_SORTIEBUTTON_BODY = '#tooltips:fortification/footer/sortieButton/body'
FORTIFICATION_FOOTER_INTELLIGENCEBUTTON_HEADER = '#tooltips:fortification/footer/intelligenceButton/header'
FORTIFICATION_FOOTER_INTELLIGENCEBUTTON_BODY = '#tooltips:fortification/footer/intelligenceButton/body'
FORTIFICATION_WELCOME_DETAILS_HEADER = '#tooltips:fortification/welcome/details/header'
FORTIFICATION_WELCOME_DETAILS_BODY = '#tooltips:fortification/welcome/details/body'
FORTIFICATION_WELCOME_CREATEFORT_HEADER = '#tooltips:fortification/welcome/createFort/header'
FORTIFICATION_WELCOME_CREATEFORT_BODY = '#tooltips:fortification/welcome/createFort/body'
FORTIFICATION_WELCOME_CANTCREATEFORT_HEADER = '#tooltips:fortification/welcome/cantCreateFort/header'
FORTIFICATION_WELCOME_CANTCREATEFORT_BODY = '#tooltips:fortification/welcome/cantCreateFort/body'
FORTIFICATION_WELCOME_CLANSEARCH_HEADER = '#tooltips:fortification/welcome/clanSearch/header'
FORTIFICATION_WELCOME_CLANSEARCH_BODY = '#tooltips:fortification/welcome/clanSearch/body'
FORTIFICATION_WELCOME_CLANSEARCHLABEL_HEADER = '#tooltips:fortification/welcome/clanSearchLabel/header'
FORTIFICATION_WELCOME_CLANSEARCHLABEL_BODY = '#tooltips:fortification/welcome/clanSearchLabel/body'
FORTIFICATION_WELCOME_CLANCREATE_HEADER = '#tooltips:fortification/welcome/clanCreate/header'
FORTIFICATION_WELCOME_CLANCREATE_BODY = '#tooltips:fortification/welcome/clanCreate/body'
FORTIFICATION_TRANPORTINGBUTTON_ACTIVE_HEADER = '#tooltips:fortification/tranportingButton/active/header'
FORTIFICATION_TRANPORTINGBUTTON_ACTIVE_BODY = '#tooltips:fortification/tranportingButton/active/body'
FORTIFICATION_TRANPORTINGBUTTON_INACTIVE_HEADER = '#tooltips:fortification/tranportingButton/inactive/header'
FORTIFICATION_TRANPORTINGBUTTON_INACTIVE_BODY = '#tooltips:fortification/tranportingButton/inactive/body'
FORTIFICATION_CLOSEDIRECTIONBUTTON_ACTIVE_HEADER = '#tooltips:fortification/closeDirectionButton/active/header'
FORTIFICATION_CLOSEDIRECTIONBUTTON_ACTIVE_BODY = '#tooltips:fortification/closeDirectionButton/active/body'
FORTIFICATION_CLOSEDIRECTIONBUTTON_INACTIVE_HEADER = '#tooltips:fortification/closeDirectionButton/inactive/header'
FORTIFICATION_CLOSEDIRECTIONBUTTON_INACTIVE_BODY = '#tooltips:fortification/closeDirectionButton/inactive/body'
FORTIFICATION_ORDERPOPOVER_USEORDERBTN_HEADER = '#tooltips:fortification/orderPopover/useOrderBtn/header'
FORTIFICATION_ORDERPOPOVER_USEORDERBTN_CANTUSE_HEADER = '#tooltips:fortification/orderPopover/useOrderBtn/cantUse/header'
FORTIFICATION_ORDERPOPOVER_USEORDERBTN_CANTUSE_BODY = '#tooltips:fortification/orderPopover/useOrderBtn/cantUse/body'
FORTIFICATION_ORDERPOPOVER_USEORDERBTN_DESCRIPTION = '#tooltips:fortification/orderPopover/useOrderBtn/description'
FORTIFICATION_ORDERPOPOVER_USEORDERBTN_NOTAVAILABLE = '#tooltips:fortification/orderPopover/useOrderBtn/notAvailable'
FORTIFICATION_ORDERPOPOVER_USEORDERBTN_NOBUILDING = '#tooltips:fortification/orderPopover/useOrderBtn/noBuilding'
FORTIFICATION_ORDERPOPOVER_USEORDERBTN_NOORDERS = '#tooltips:fortification/orderPopover/useOrderBtn/noOrders'
FORTIFICATION_ORDERPOPOVER_ORDERISREADY = '#tooltips:fortification/orderPopover/orderIsReady'
FORTIFICATION_ORDERPOPOVER_CREATEORDER = '#tooltips:fortification/orderPopover/createOrder'
FORTIFICATION_ORDERPOPOVER_ROLESLIST = '#tooltips:fortification/orderPopover/rolesList'
FORTIFICATION_ORDERPOPOVER_CLANPERMISSIONS = '#tooltips:fortification/orderPopover/clanPermissions'
FORTIFICATION_ORDERPOPOVER_USEORDERBTN_WASUSED = '#tooltips:fortification/orderPopover/useOrderBtn/wasUsed'
FORTIFICATION_ORDERPOPOVER_USEORDERBTN_DEFENCEHOURDISABLED = '#tooltips:fortification/orderPopover/useOrderBtn/defenceHourDisabled'
FORTIFICATION_ORDERPOPOVER_PROGRESSBAR_TIMELEFT = '#tooltips:fortification/orderPopover/progressBar/timeLeft'
FORTIFICATION_ORDERPOPOVER_INDEFENSIVE = '#tooltips:fortification/orderPopover/inDefensive'
FORTIFICATION_ORDERPOPOVER_INPROGRESS_TEXT = '#tooltips:fortification/orderPopover/inProgress/text'
FORTIFICATION_ORDERPOPOVER_INPROGRESS_TIMELEFT = '#tooltips:fortification/orderPopover/inProgress/timeLeft'
FORTIFICATION_ORDERPOPOVER_LINKBTN = '#tooltips:fortification/orderPopover/linkBtn'
FORTIFICATION_ORDERPOPOVER_PERMANENTORDER_INFO = '#tooltips:fortification/orderPopover/permanentOrder/info'
FORTIFICATION_ORDERSPANEL_CANTUSEORDER = '#tooltips:fortification/ordersPanel/cantUseOrder'
FORTIFICATION_ORDERSPANEL_CHECKBOXORDERTYPE_HEADER = '#tooltips:fortification/ordersPanel/checkBoxOrderType/header'
FORTIFICATION_ORDERSPANEL_CHECKBOXORDERTYPE_BODY = '#tooltips:fortification/ordersPanel/checkBoxOrderType/body'
FORTIFICATION_CALENDAREVENT_BATTLERESULTTOOLTIP_ROBBED = '#tooltips:fortification/calendarEvent/battleResultTooltip/robbed'
FORTIFICATION_CALENDAREVENT_BATTLERESULTTOOLTIP_PLUNDERED = '#tooltips:fortification/calendarEvent/battleResultTooltip/plundered'
FORTIFICATION_CALENDAREVENT_BATTLERESULTTOOLTIP_BATTLESCOUNT = '#tooltips:fortification/calendarEvent/battleResultTooltip/battlesCount'
FORTIFICATION_CALENDAREVENT_BATTLERESULTTOOLTIP_BUILDINGSLOST = '#tooltips:fortification/calendarEvent/battleResultTooltip/buildingsLost'
FORTIFICATION_CALENDAREVENT_BATTLERESULTTOOLTIP_BUILDINGSSEIZED = '#tooltips:fortification/calendarEvent/battleResultTooltip/buildingsSeized'
FORTIFICATION_POPOVER_UPGRADEFOUNDATIONBTN_HEADER = '#tooltips:fortification/popOver/upgradeFoundationBtn/header'
FORTIFICATION_POPOVER_UPGRADEFOUNDATIONBTN_BODY = '#tooltips:fortification/popOver/upgradeFoundationBtn/body'
FORTIFICATION_POPOVER_UPGRADEFOUNDATIONBTN_DISABLED_HEADER = '#tooltips:fortification/popOver/upgradeFoundationBtn_Disabled/header'
FORTIFICATION_POPOVER_UPGRADEFOUNDATIONBTN_DISABLED_BODY = '#tooltips:fortification/popOver/upgradeFoundationBtn_Disabled/body'
FORTIFICATION_POPOVER_UPGRADEBTN_DISABLEDBYBATTLE_HEADER = '#tooltips:fortification/popOver/upgradeBtn_DisabledByBattle/header'
FORTIFICATION_POPOVER_UPGRADEBTN_DISABLEDBYBATTLE_BODY = '#tooltips:fortification/popOver/upgradeBtn_DisabledByBattle/body'
FORTIFICATION_POPOVER_UPGRADEBTN_DISABLEDBYDESTROY_HEADER = '#tooltips:fortification/popOver/upgradeBtn_DisabledByDestroy/header'
FORTIFICATION_POPOVER_UPGRADEBTN_DISABLEDBYDESTROY_BODY = '#tooltips:fortification/popOver/upgradeBtn_DisabledByDestroy/body'
FORTIFICATION_POPOVER_DEMOUNTBTN_HEADER = '#tooltips:fortification/popOver/demountBtn/header'
FORTIFICATION_POPOVER_DEMOUNTBTN_BODY = '#tooltips:fortification/popOver/demountBtn/body'
FORTIFICATION_POPOVER_DEMOUNTBTNDISABLED_HEADER = '#tooltips:fortification/popOver/demountBtnDisabled/header'
FORTIFICATION_POPOVER_DEMOUNTBTNDISABLED_BODY = '#tooltips:fortification/popOver/demountBtnDisabled/body'
FORTIFICATION_POPOVER_PREPAREORDERDISABLE_HEADER = '#tooltips:fortification/popOver/prepareOrderDisable/header'
FORTIFICATION_POPOVER_PREPAREORDERDISABLE_BODY = '#tooltips:fortification/popOver/prepareOrderDisable/body'
FORTIFICATION_POPOVER_PREPAREORDERENABLE = '#tooltips:fortification/popOver/prepareOrderEnable'
FORTIFICATION_POPOVER_PREPAREORDEROVERLOAD_HEADER = '#tooltips:fortification/popOver/prepareOrderOverload/header'
FORTIFICATION_POPOVER_PREPAREORDEROVERLOAD_BODY = '#tooltips:fortification/popOver/prepareOrderOverload/body'
FORTIFICATION_POPOVER_HPPROGRESS_HEADER = '#tooltips:fortification/popOver/hpProgress/header'
FORTIFICATION_POPOVER_HPPROGRESS_BODY = '#tooltips:fortification/popOver/hpProgress/body'
FORTIFICATION_POPOVER_DEFRESPROGRESS_HEADER = '#tooltips:fortification/popOver/defResProgress/header'
FORTIFICATION_POPOVER_DEFRESPROGRESS_BODY = '#tooltips:fortification/popOver/defResProgress/body'
FORTIFICATION_POPOVER_DEFRESPROGRESS_COMPENSATION_HEADER = '#tooltips:fortification/popOver/defResProgress/compensation/header'
FORTIFICATION_POPOVER_DEFRESPROGRESS_COMPENSATION_BODY = '#tooltips:fortification/popOver/defResProgress/compensation/body'
FORTIFICATION_POPOVER_FIXEDPLAYERSBTN_HEADER = '#tooltips:fortification/popOver/fixedPlayersBtn/header'
FORTIFICATION_POPOVER_FIXEDPLAYERSBTN_BODY = '#tooltips:fortification/popOver/fixedPlayersBtn/body'
FORTIFICATION_FIXEDPLAYERS_ASSIGNBTNENABLED_HEADER = '#tooltips:fortification/fixedPlayers/assignBtnEnabled/header'
FORTIFICATION_FIXEDPLAYERS_ASSIGNBTNENABLED_BODY = '#tooltips:fortification/fixedPlayers/assignBtnEnabled/body'
FORTIFICATION_FIXEDPLAYERS_ASSIGNBTNDISABLED_HEADER = '#tooltips:fortification/fixedPlayers/assignBtnDisabled/header'
FORTIFICATION_FIXEDPLAYERS_ASSIGNBTNDISABLED_BODY = '#tooltips:fortification/fixedPlayers/assignBtnDisabled/body'
FORTIFICATION_FIXEDPLAYERS_GENERALTOOLTIP_HEADER = '#tooltips:fortification/fixedPlayers/generalTooltip/header'
FORTIFICATION_FIXEDPLAYERS_GENERALTOOLTIP_BODY = '#tooltips:fortification/fixedPlayers/generalTooltip/body'
FORTIFICATION_FIXEDPLAYERS_GENERALTOOLTIPMAXLIMIT_HEADER = '#tooltips:fortification/fixedPlayers/generalTooltipMaxLimit/header'
FORTIFICATION_FIXEDPLAYERS_GENERALTOOLTIPMAXLIMIT_BODY = '#tooltips:fortification/fixedPlayers/generalTooltipMaxLimit/body'
FORTIFICATION_FIXEDPLAYERS_WEEK_HEADER = '#tooltips:fortification/fixedPlayers/week/header'
FORTIFICATION_FIXEDPLAYERS_WEEK_BODY = '#tooltips:fortification/fixedPlayers/week/body'
FORTIFICATION_FIXEDPLAYERS_ALLTIME_HEADER = '#tooltips:fortification/fixedPlayers/allTime/header'
FORTIFICATION_FIXEDPLAYERS_ALLTIME_BODY = '#tooltips:fortification/fixedPlayers/allTime/body'
FORTIFICATION_FIXEDPLAYERS_FORTROLE_HEADER = '#tooltips:fortification/fixedPlayers/fortRole/header'
FORTIFICATION_FIXEDPLAYERS_FORTROLE_BODY = '#tooltips:fortification/fixedPlayers/fortRole/body'
FORTIFICATION_FIXEDPLAYERS_NIC_HEADER = '#tooltips:fortification/fixedPlayers/nic/header'
FORTIFICATION_FIXEDPLAYERS_NIC_BODY = '#tooltips:fortification/fixedPlayers/nic/body'
FORTIFICATION_BUILDINGPROCESS_STATUSICONSUCCESS_HEADER = '#tooltips:fortification/buildingProcess/statusIconSuccess/header'
FORTIFICATION_BUILDINGPROCESS_STATUSICONSUCCESS_BODY = '#tooltips:fortification/buildingProcess/statusIconSuccess/body'
FORTIFICATION_BUILDINGPROCESS_STATUSICONNOTAVAILABLE_HEADER = '#tooltips:fortification/buildingProcess/statusIconNotAvailable/header'
FORTIFICATION_BUILDINGPROCESS_STATUSICONNOTAVAILABLE_BODY = '#tooltips:fortification/buildingProcess/statusIconNotAvailable/body'
FORTIFICATION_BUILDINGPROCESS_BTNENABLED_HEADER = '#tooltips:fortification/buildingProcess/btnEnabled/header'
FORTIFICATION_BUILDINGPROCESS_BTNENABLED_BODY = '#tooltips:fortification/buildingProcess/btnEnabled/body'
FORTIFICATION_BUILDINGPROCESS_BTNDISABLEDBUILT_HEADER = '#tooltips:fortification/buildingProcess/btnDisabledBuilt/header'
FORTIFICATION_BUILDINGPROCESS_BTNDISABLEDBUILT_BODY = '#tooltips:fortification/buildingProcess/btnDisabledBuilt/body'
FORTIFICATION_BUILDINGPROCESS_BTNDISABLEDNOTAVAILABLE_HEADER = '#tooltips:fortification/buildingProcess/btnDisabledNotAvailable/header'
FORTIFICATION_BUILDINGPROCESS_BTNDISABLEDNOTAVAILABLE_BODY = '#tooltips:fortification/buildingProcess/btnDisabledNotAvailable/body'
FORTIFICATION_BUILDINGPROCESS_ALERT_ONLYUSEDINCOMBAT = '#tooltips:fortification/buildingProcess/alert/onlyUsedInCombat'
FORTIFICATION_TRANSPORTING_EMPTYSTORAGE_HEADER = '#tooltips:fortification/transporting/emptyStorage/header'
FORTIFICATION_TRANSPORTING_EMPTYSTORAGE_BODY = '#tooltips:fortification/transporting/emptyStorage/body'
FORTIFICATION_TRANSPORTING_COOLDOWN_HEADER = '#tooltips:fortification/transporting/cooldown/header'
FORTIFICATION_TRANSPORTING_COOLDOWN_BODY = '#tooltips:fortification/transporting/cooldown/body'
FORTIFICATION_TRANSPORTING_FOUNDATION_HEADER = '#tooltips:fortification/transporting/foundation/header'
FORTIFICATION_TRANSPORTING_FOUNDATION_BODY = '#tooltips:fortification/transporting/foundation/body'
FORTIFICATION_TRANSPORTING_NOTEMPTYSPACE_HEADER = '#tooltips:fortification/transporting/notEmptySpace/header'
FORTIFICATION_TRANSPORTING_NOTEMPTYSPACE_BODY = '#tooltips:fortification/transporting/notEmptySpace/body'
FORTIFICATION_ORDERPROCESS_NOTAVAILABLE_HEADER = '#tooltips:fortification/orderProcess/notAvailable/header'
FORTIFICATION_ORDERPROCESS_NOTAVAILABLE_BODY = '#tooltips:fortification/orderProcess/notAvailable/body'
FORTIFICATION_ORDERPROCESS_INPAUSE = '#tooltips:fortification/orderProcess/inPause'
FORTIFICATION_ORDERPROCESS_INPAUSE_HEADER = '#tooltips:fortification/orderProcess/inPause/header'
FORTIFICATION_ORDERPROCESS_INPAUSE_BODY = '#tooltips:fortification/orderProcess/inPause/body'
FORTIFICATION_ORDERPROCESS_NOTENOUGHHP = '#tooltips:fortification/orderProcess/notEnoughHp'
FORTIFICATION_ORDERPROCESS_BASEDESTROYED = '#tooltips:fortification/orderProcess/baseDestroyed'
FORTIFICATION_ORDERPROCESS_INFO = '#tooltips:fortification/orderProcess/info'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_TITLE = '#tooltips:fortification/sortieDivisionToolTip/title'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_DESCRIPTION = '#tooltips:fortification/sortieDivisionToolTip/description'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_INFO = '#tooltips:fortification/sortieDivisionToolTip/info'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_MIDDLEDIVISION = '#tooltips:fortification/sortieDivisionToolTip/middleDivision'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_CHAMPIONDIVISION = '#tooltips:fortification/sortieDivisionToolTip/championDivision'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_ABSOLUTEDIVISION = '#tooltips:fortification/sortieDivisionToolTip/absoluteDivision'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_VEHLEVEL = '#tooltips:fortification/sortieDivisionToolTip/vehLevel'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_PLAYERSLIMIT = '#tooltips:fortification/sortieDivisionToolTip/playersLimit'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_LEGIONNAIRESLIMIT = '#tooltips:fortification/sortieDivisionToolTip/legionnairesLimit'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_TIMELIMIT = '#tooltips:fortification/sortieDivisionToolTip/timeLimit'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_TIMELIMITSORTIE = '#tooltips:fortification/sortieDivisionToolTip/timeLimitSortie'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_DOUBLEBONUS = '#tooltips:fortification/sortieDivisionToolTip/doublebonus'
FORTIFICATION_SORTIEDIVISIONTOOLTIP_BONUS = '#tooltips:fortification/sortieDivisionToolTip/bonus'
FORTIFICATION_SORTIE_LISTROOM_REGULATION_HEADER_INFO = '#tooltips:fortification/sortie/listRoom/regulation/header/info'
FORTIFICATION_SORTIE_LISTROOM_REGULATION_HEADER_CURFEW = '#tooltips:fortification/sortie/listRoom/regulation/header/curfew'
FORTIFICATION_SORTIE_LISTROOM_REGULATION_TIMEDESCR = '#tooltips:fortification/sortie/listRoom/regulation/timeDescr'
FORTIFICATION_SORTIE_LISTROOM_REGULATION_NONE = '#tooltips:fortification/sortie/listRoom/regulation/none'
FORTIFICATION_SORTIE_LISTROOM_REGULATION_SERVERLIMIT = '#tooltips:fortification/sortie/listRoom/regulation/serverLimit'
FORTIFICATION_SORTIE_LISTROOM_REGULATION_TIMELIMITFORMAT = '#tooltips:fortification/sortie/listRoom/regulation/timeLimitFormat'
FORTIFICATION_SORTIE_LISTROOM_REGULATION_SERVERLIMITTIMEDESCR = '#tooltips:fortification/sortie/listRoom/regulation/serverLimitTimeDescr'
FORTIFICATION_SORTIE_LISTROOM_REGULATION_FOOTER = '#tooltips:fortification/sortie/listRoom/regulation/footer'
FORTIFICATION_SORTIE_LISTROOM_BACK_HEADER = '#tooltips:fortification/sortie/listRoom/back/header'
FORTIFICATION_SORTIE_LISTROOM_BACK_BODY = '#tooltips:fortification/sortie/listRoom/back/body'
FORTIFICATION_SORTIE_LISTROOM_CREATEBTN_HEADER = '#tooltips:fortification/sortie/listRoom/createBtn/header'
FORTIFICATION_SORTIE_LISTROOM_CREATEBTN_BODY = '#tooltips:fortification/sortie/listRoom/createBtn/body'
FORTIFICATION_SORTIE_LISTROOM_SORTNAMEBTN_HEADER = '#tooltips:fortification/sortie/listRoom/sortNameBtn/header'
FORTIFICATION_SORTIE_LISTROOM_SORTNAMEBTN_BODY = '#tooltips:fortification/sortie/listRoom/sortNameBtn/body'
FORTIFICATION_SORTIE_LISTROOM_DESCR_HEADER = '#tooltips:fortification/sortie/listRoom/descr/header'
FORTIFICATION_SORTIE_LISTROOM_DESCR_BODY = '#tooltips:fortification/sortie/listRoom/descr/body'
FORTIFICATION_SORTIE_LISTROOM_STATUS_HEADER = '#tooltips:fortification/sortie/listRoom/status/header'
FORTIFICATION_SORTIE_LISTROOM_STATUS_BODY = '#tooltips:fortification/sortie/listRoom/status/body'
FORTIFICATION_SORTIE_LISTROOM_BATTLESTATUS_HEADER = '#tooltips:fortification/sortie/listRoom/battleStatus/header'
FORTIFICATION_SORTIE_LISTROOM_BATTLESTATUS_BODY = '#tooltips:fortification/sortie/listRoom/battleStatus/body'
FORTIFICATION_SORTIE_LISTROOM_SORTDIVISIONBTN_HEADER = '#tooltips:fortification/sortie/listRoom/sortDivisionBtn/header'
FORTIFICATION_SORTIE_LISTROOM_SORTDIVISIONBTN_BODY = '#tooltips:fortification/sortie/listRoom/sortDivisionBtn/body'
FORTIFICATION_SORTIE_LISTROOM_SORTSQUADBTN_HEADER = '#tooltips:fortification/sortie/listRoom/sortSquadBtn/header'
FORTIFICATION_SORTIE_LISTROOM_SORTSQUADBTN_BODY = '#tooltips:fortification/sortie/listRoom/sortSquadBtn/body'
FORTIFICATION_SORTIE_LISTROOM_JOINBTN_HEADER = '#tooltips:fortification/sortie/listRoom/joinBtn/header'
FORTIFICATION_SORTIE_LISTROOM_JOINBTN_BODY = '#tooltips:fortification/sortie/listRoom/joinBtn/body'
FORTIFICATION_SORTIE_LISTROOM_SINGINBTN_HEADER = '#tooltips:fortification/sortie/listRoom/singInBtn/header'
FORTIFICATION_SORTIE_LISTROOM_SINGINBTN_BODY = '#tooltips:fortification/sortie/listRoom/singInBtn/body'
FORTIFICATION_SORTIE_LISTROOM_RENDERERINFO = '#tooltips:fortification/sortie/listRoom/rendererInfo'
FORTIFICATION_TOOLTIPFORTSORTIE_TITLE = '#tooltips:fortification/ToolTipFortSortie/title'
FORTIFICATION_TOOLTIPFORTSORTIE_DIVISION = '#tooltips:fortification/ToolTipFortSortie/division'
FORTIFICATION_TOOLTIPFORTSORTIE_INBATTLE = '#tooltips:fortification/ToolTipFortSortie/inBattle'
FORTIFICATION_TOOLTIPFORTSORTIE_HINT = '#tooltips:fortification/ToolTipFortSortie/hint'
FORTIFICATION_UNIT_ACCESS_HEADER = '#tooltips:fortification/unit/access/header'
FORTIFICATION_UNIT_ACCESS_BODYOPEN = '#tooltips:fortification/unit/access/bodyOpen'
FORTIFICATION_UNIT_ACCESS_BODYCLOSED = '#tooltips:fortification/unit/access/bodyClosed'
BATTLETYPES_FORTIFICATION_HEADER = '#tooltips:battleTypes/fortification/header'
BATTLETYPES_FORTIFICATION_BODY = '#tooltips:battleTypes/fortification/body'
BATTLETYPES_STRONGHOLDS_HEADER = '#tooltips:battleTypes/strongholds/header'
BATTLETYPES_STRONGHOLDS_BODY = '#tooltips:battleTypes/strongholds/body'
FORTIFICATION_DISABLED_NO_CLAN_HEADER = '#tooltips:fortification/disabled/no_clan/header'
FORTIFICATION_DISABLED_NO_CLAN_BODY = '#tooltips:fortification/disabled/no_clan/body'
FORTIFICATION_DISABLED_NO_FORT_HEADER = '#tooltips:fortification/disabled/no_fort/header'
FORTIFICATION_DISABLED_NO_FORT_BODY = '#tooltips:fortification/disabled/no_fort/body'
BATTLETYPES_LEAVEFORTIFICATION_HEADER = '#tooltips:battleTypes/leaveFortification/header'
BATTLETYPES_LEAVEFORTIFICATION_BODY = '#tooltips:battleTypes/leaveFortification/body'
FORTIFICATION_CLAN_LISTROOM_BACK_HEADER = '#tooltips:fortification/clan/listRoom/back/header'
FORTIFICATION_CLAN_LISTROOM_BACK_BODY = '#tooltips:fortification/clan/listRoom/back/body'
BATTLETYPES_FORTIFICATION_DISABLED_HEADER = '#tooltips:battleTypes/fortification/disabled/header'
BATTLETYPES_FORTIFICATION_DISABLED_BODY = '#tooltips:battleTypes/fortification/disabled/body'
BATTLETYPES_STRONGHOLDS_DISABLED_HEADER = '#tooltips:battleTypes/strongholds/disabled/header'
BATTLETYPES_STRONGHOLDS_DISABLED_BODY = '#tooltips:battleTypes/strongholds/disabled/body'
STRONGHOLDS_TIMER_SQUADINBATTLE = '#tooltips:strongholds/timer/squadInBattle'
STRONGHOLDS_TIMER_TIMETOBATTLE = '#tooltips:strongholds/timer/timeToBattle'
STRONGHOLDS_TIMER_TIMETONEXTBATTLE = '#tooltips:strongholds/timer/timeToNextBattle'
FORTIFICATION_SORTIE_BATTLEROOM_LEAVEBTN_HEADER = '#tooltips:fortification/sortie/battleRoom/leaveBtn/header'
FORTIFICATION_SORTIE_BATTLEROOM_LEAVEBTN_BODY = '#tooltips:fortification/sortie/battleRoom/leaveBtn/body'
FORTIFICATION_SORTIE_BATTLEROOM_LEGIONARIESCOUNT_HEADER = '#tooltips:fortification/sortie/battleRoom/legionariesCount/header'
FORTIFICATION_SORTIE_BATTLEROOM_LEGIONARIESCOUNT_BODY = '#tooltips:fortification/sortie/battleRoom/legionariesCount/body'
FORTIFICATION_SORTIE_BATTLEROOM_CHANGEDIVISION_HEADER = '#tooltips:fortification/sortie/battleRoom/changeDivision/header'
FORTIFICATION_SORTIE_BATTLEROOM_CHANGEDIVISION_BODY = '#tooltips:fortification/sortie/battleRoom/changeDivision/body'
FORTIFICATION_SORTIE_BATTLEROOM_INVITEBTN_HEADER = '#tooltips:fortification/sortie/battleRoom/inviteBtn/header'
FORTIFICATION_SORTIE_BATTLEROOM_INVITEBTN_BODY = '#tooltips:fortification/sortie/battleRoom/inviteBtn/body'
FORTIFICATION_SORTIE_BATTLEROOM_STATUS_ISREADY = '#tooltips:fortification/sortie/battleRoom/status/isReady'
FORTIFICATION_SORTIE_BATTLEROOM_STATUS_NOTREADY = '#tooltips:fortification/sortie/battleRoom/status/notReady'
FORTIFICATION_SORTIE_BATTLEROOM_STATUS_COMMANDER = '#tooltips:fortification/sortie/battleRoom/status/commander'
FORTIFICATION_SORTIE_TAKEPLACEFIRSTTIMEBTN_HEADER = '#tooltips:fortification/sortie/takePlaceFirstTimeBtn/header'
FORTIFICATION_SORTIE_TAKEPLACEFIRSTTIMEBTN_BODY = '#tooltips:fortification/sortie/takePlaceFirstTimeBtn/body'
FORTIFICATION_SORTIE_SELECTVEHICLE_HEADER = '#tooltips:fortification/sortie/selectVehicle/header'
FORTIFICATION_SORTIE_SELECTVEHICLE_BODY = '#tooltips:fortification/sortie/selectVehicle/body'
FORTIFICATION_SORTIE_CHAT_DESCRIPTION_HEADER = '#tooltips:fortification/sortie/chat/description/header'
FORTIFICATION_SORTIE_CHAT_DESCRIPTION_BODY = '#tooltips:fortification/sortie/chat/description/body'
FORTIFICATION_SORTIE_CHAT_SENDMESSAGEBTN_HEADER = '#tooltips:fortification/sortie/chat/sendMessageBtn/header'
FORTIFICATION_SORTIE_CHAT_SENDMESSAGEBTN_BODY = '#tooltips:fortification/sortie/chat/sendMessageBtn/body'
FORTIFICATION_SORTIE_REMOVEBTN_HEADER = '#tooltips:fortification/sortie/removeBtn/header'
FORTIFICATION_SORTIE_REMOVEBTN_BODY = '#tooltips:fortification/sortie/removeBtn/body'
FORTIFICATION_SORTIE_PLAYER_VEHICLE = '#tooltips:fortification/sortie/player/vehicle'
FORTIFICATION_SORTIE_PLAYER_CANCELREADY_HEADER = '#tooltips:fortification/sortie/player/cancelReady/header'
FORTIFICATION_SORTIE_PLAYER_CANCELREADY_BODY = '#tooltips:fortification/sortie/player/cancelReady/body'
FORTIFICATION_SORTIE_PLAYER_CHANGEVEHICLE_HEADER = '#tooltips:fortification/sortie/player/changeVehicle/header'
FORTIFICATION_SORTIE_PLAYER_CHANGEVEHICLE_BODY = '#tooltips:fortification/sortie/player/changeVehicle/body'
FORTIFICATION_SORTIE_SLOT_VEHICLE_NOTREADY_TEMPORALLY_BODY = '#tooltips:fortification/sortie/slot/vehicle/notReady/temporally/body'
FORTIFICATION_CLANSTATS_PERIODDEFENCE_BATTLES_EFFICIENCYOFATTACK_HEADER = '#tooltips:fortification/clanStats/periodDefence/battles/efficiencyOfAttack/header'
FORTIFICATION_CLANSTATS_PERIODDEFENCE_BATTLES_EFFICIENCYOFATTACK_BODY = '#tooltips:fortification/clanStats/periodDefence/battles/efficiencyOfAttack/body'
PERIODDEFENCEWINDOW_TOOLTIP_PERIPHERY_HEADER = '#tooltips:periodDefenceWindow/tooltip/periphery/header'
PERIODDEFENCEWINDOW_TOOLTIP_PERIPHERY_BODY = '#tooltips:periodDefenceWindow/tooltip/periphery/body'
PERIODDEFENCEWINDOW_TOOLTIP_HOURDEFENCE_HEADER = '#tooltips:periodDefenceWindow/tooltip/hourDefence/header'
PERIODDEFENCEWINDOW_TOOLTIP_HOURDEFENCE_BODY = '#tooltips:periodDefenceWindow/tooltip/hourDefence/body'
PERIODDEFENCEWINDOW_TOOLTIP_HOLIDAY_HEADER = '#tooltips:periodDefenceWindow/tooltip/holiday/header'
PERIODDEFENCEWINDOW_TOOLTIP_HOLIDAY_BODY = '#tooltips:periodDefenceWindow/tooltip/holiday/body'
FORTIFICATION_CLANSTATS_PERIODDEFENCE_BATTLES_EFFICIENCYOFDEFENCE_HEADER = '#tooltips:fortification/clanStats/periodDefence/battles/efficiencyOfDefence/header'
FORTIFICATION_CLANSTATS_PERIODDEFENCE_BATTLES_EFFICIENCYOFDEFENCE_BODY = '#tooltips:fortification/clanStats/periodDefence/battles/efficiencyOfDefence/body'
FORTIFICATION_CLANSTATS_PERIODDEFENCE_BATTLES_BATTLESCOUNT_HEADER = '#tooltips:fortification/clanStats/periodDefence/battles/battlesCount/header'
FORTIFICATION_CLANSTATS_PERIODDEFENCE_BATTLES_BATTLESCOUNT_BODY = '#tooltips:fortification/clanStats/periodDefence/battles/battlesCount/body'
FORTIFICATION_TOOLTIPCLANINFO_HEADER = '#tooltips:fortification/TooltipClanInfo/header'
FORTIFICATION_TOOLTIPCLANINFO_DESCRIPTION = '#tooltips:fortification/TooltipClanInfo/description'
FORTIFICATION_TOOLTIPCLANINFO_FORTCREATIONDATE = '#tooltips:fortification/TooltipClanInfo/fortCreationDate'
FORTIFICATION_TOOLTIPENEMYCLANINFO_HEADER = '#tooltips:fortification/TooltipEnemyClanInfo/header'
FORTIFICATION_TOOLTIPENEMYCLANINFO_HOMEPEREPHIRY = '#tooltips:fortification/TooltipEnemyClanInfo/homePerephiry'
FORTIFICATION_TOOLTIPENEMYCLANINFO_PLAYERSATCLAN = '#tooltips:fortification/TooltipEnemyClanInfo/playersAtClan'
FORTIFICATION_TOOLTIPENEMYCLANINFO_BUILDINGSATFORTIFICATION = '#tooltips:fortification/TooltipEnemyClanInfo/buildingsAtFortification'
FORTIFICATION_TOOLTIPENEMYCLANINFO_FIGHTSFORFORTIFICATION = '#tooltips:fortification/TooltipEnemyClanInfo/fightsForFortification'
FORTIFICATION_TOOLTIPENEMYCLANINFO_WINPERCENTAGE = '#tooltips:fortification/TooltipEnemyClanInfo/winPercentage'
FORTIFICATION_TOOLTIPENEMYCLANINFO_PROFITPERCENTAGE = '#tooltips:fortification/TooltipEnemyClanInfo/profitPercentage'
FORTIFICATION_TOOLTIPENEMYCLANINFO_DEFENSETIME = '#tooltips:fortification/TooltipEnemyClanInfo/defenseTime'
FORTIFICATION_TOOLTIPENEMYCLANINFO_DEFENSETIMESTOPPED = '#tooltips:fortification/TooltipEnemyClanInfo/defenseTimeStopped'
FORTIFICATION_TOOLTIPENEMYCLANINFO_NODAYOFF = '#tooltips:fortification/TooltipEnemyClanInfo/noDayOff'
FORTIFICATION_TOOLTIPENEMYCLANINFO_NOVACATION = '#tooltips:fortification/TooltipEnemyClanInfo/noVacation'
FORTIFICATION_TOOLTIPENEMYCLANINFO_VACATION = '#tooltips:fortification/TooltipEnemyClanInfo/vacation'
FORTIFICATION_TOOLTIPENEMYCLANINFO_DAYOFF = '#tooltips:fortification/TooltipEnemyClanInfo/dayOff'
FORTIFICATION_TOOLTIPENEMYCLANINFO_DEFENSEHOUR = '#tooltips:fortification/TooltipEnemyClanInfo/defenseHour'
FORTIFICATION_TOOLTIPENEMYCLANINFO_PERIOD = '#tooltips:fortification/TooltipEnemyClanInfo/period'
FORTIFICATION_FORTINTELLIGENCECLANFILTERPOPOVER_APPLY_NOTE = '#tooltips:fortification/FortIntelligenceClanFilterPopover/apply/note'
FORTIFICATION_FORTINTELLIGENCECLANFILTERPOPOVER_DEFAULT_NOTE = '#tooltips:fortification/FortIntelligenceClanFilterPopover/default/note'
VEHICLESELECTOR_FILTER_NATION = '#tooltips:vehicleSelector/filter/nation'
VEHICLESELECTOR_FILTER_VEHTYPE = '#tooltips:vehicleSelector/filter/vehType'
VEHICLESELECTOR_FILTER_VEHLVL = '#tooltips:vehicleSelector/filter/vehLvl'
VEHICLESELECTOR_FILTER_MAINVEHICLE = '#tooltips:vehicleSelector/filter/mainVehicle'
VEHICLESELECTOR_SORTING_NATION_HEADER = '#tooltips:vehicleSelector/sorting/nation/header'
VEHICLESELECTOR_SORTING_NATION_BODY = '#tooltips:vehicleSelector/sorting/nation/body'
VEHICLESELECTOR_SORTING_VEHTYPE_HEADER = '#tooltips:vehicleSelector/sorting/vehType/header'
VEHICLESELECTOR_SORTING_VEHTYPE_BODY = '#tooltips:vehicleSelector/sorting/vehType/body'
VEHICLESELECTOR_SORTING_VEHLVL_HEADER = '#tooltips:vehicleSelector/sorting/vehLvl/header'
VEHICLESELECTOR_SORTING_VEHLVL_BODY = '#tooltips:vehicleSelector/sorting/vehLvl/body'
VEHICLESELECTOR_SORTING_VEHNAME_HEADER = '#tooltips:vehicleSelector/sorting/vehName/header'
VEHICLESELECTOR_SORTING_VEHNAME_BODY = '#tooltips:vehicleSelector/sorting/vehName/body'
VEHICLESELECTOR_OVERFLOWLEVEL_HEADER = '#tooltips:vehicleSelector/overflowLevel/header'
VEHICLESELECTOR_OVERFLOWLEVEL_BODY = '#tooltips:vehicleSelector/overflowLevel/body'
VEHICLESELECTOR_INCOMPATIBLETYPE_HEADER = '#tooltips:vehicleSelector/incompatibleType/header'
VEHICLESELECTOR_INCOMPATIBLETYPE_BODY = '#tooltips:vehicleSelector/incompatibleType/body'
FORTIFICATION_FORTCLANBATTLELIST_GENERALRENDER = '#tooltips:fortification/fortClanBattleList/generalRender'
FORTIFICATION_FORTCLANBATTLELIST_BATTLETYPEICON_DEFENCE = '#tooltips:fortification/fortClanBattleList/battleTypeIcon/defence'
FORTIFICATION_FORTCLANBATTLELIST_BATTLETYPEICON_OFFENCE = '#tooltips:fortification/fortClanBattleList/battleTypeIcon/offence'
FORTIFICATION_FORTCLANBATTLELIST_BATTLENAME_HEADER = '#tooltips:fortification/fortClanBattleList/battleName/header'
FORTIFICATION_FORTCLANBATTLELIST_BATTLENAME_BODY = '#tooltips:fortification/fortClanBattleList/battleName/body'
FORTIFICATION_FORTCLANBATTLELIST_BATTLEDATE_HEADER = '#tooltips:fortification/fortClanBattleList/battleDate/header'
FORTIFICATION_FORTCLANBATTLELIST_BATTLEDATE_BODY = '#tooltips:fortification/fortClanBattleList/battleDate/body'
FORTIFICATION_FORTCLANBATTLELIST_BATTLETIME_HEADER = '#tooltips:fortification/fortClanBattleList/battleTime/header'
FORTIFICATION_FORTCLANBATTLELIST_BATTLETIME_BODY = '#tooltips:fortification/fortClanBattleList/battleTime/body'
FORTIFICATION_FORTCLANBATTLEROOM_ORDERSDISABLED_DIVISIONMISMATCH_BODY = '#tooltips:fortification/fortClanBattleRoom/ordersDisabled/divisionMismatch/body'
FORTIFICATION_CHOICEDIVISION_PLAYERRANGE_HEADER = '#tooltips:fortification/choiceDivision/playerRange/header'
FORTIFICATION_CHOICEDIVISION_PLAYERRANGE_BODY = '#tooltips:fortification/choiceDivision/playerRange/body'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_LEVEL_HEADER = '#tooltips:fortification/intelligenceWindow/sortBtn/level/header'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_LEVEL_BODY = '#tooltips:fortification/intelligenceWindow/sortBtn/level/body'
FORTIFICATION_INTELLIGENCEWINDOW_TAGSEARCHTEXTINPUT_HEADER = '#tooltips:fortification/intelligenceWindow/tagSearchTextInput/header'
FORTIFICATION_INTELLIGENCEWINDOW_TAGSEARCHTEXTINPUT_BODY = '#tooltips:fortification/intelligenceWindow/tagSearchTextInput/body'
FORTIFICATION_INTELLIGENCEWINDOW_TAGSEARCHTEXTINPUT_CN_HEADER = '#tooltips:fortification/intelligenceWindow/tagSearchTextInput/CN/header'
FORTIFICATION_INTELLIGENCEWINDOW_TAGSEARCHTEXTINPUT_CN_BODY = '#tooltips:fortification/intelligenceWindow/tagSearchTextInput/CN/body'
FORTIFICATION_INTELLIGENCEWINDOW_TAGSEARCHTEXTINPUT_KR_HEADER = '#tooltips:fortification/intelligenceWindow/tagSearchTextInput/KR/header'
FORTIFICATION_INTELLIGENCEWINDOW_TAGSEARCHTEXTINPUT_KR_BODY = '#tooltips:fortification/intelligenceWindow/tagSearchTextInput/KR/body'
FORTIFICATION_INTELLIGENCEWINDOW_TAGSEARCHBUTTON_HEADER = '#tooltips:fortification/intelligenceWindow/tagSearchButton/header'
FORTIFICATION_INTELLIGENCEWINDOW_TAGSEARCHBUTTON_BODY = '#tooltips:fortification/intelligenceWindow/tagSearchButton/body'
FORTIFICATION_INTELLIGENCEWINDOW_CLEARFILTERBTN_HEADER = '#tooltips:fortification/intelligenceWindow/clearFilterBtn/header'
FORTIFICATION_INTELLIGENCEWINDOW_CLEARFILTERBTN_BODY = '#tooltips:fortification/intelligenceWindow/clearFilterBtn/body'
FORTIFICATION_INTELLIGENCEWINDOW_FILTERBUTTON_HEADER = '#tooltips:fortification/intelligenceWindow/filterButton/header'
FORTIFICATION_INTELLIGENCEWINDOW_FILTERBUTTON_BODY = '#tooltips:fortification/intelligenceWindow/filterButton/body'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_CLANTAG_HEADER = '#tooltips:fortification/intelligenceWindow/sortBtn/clanTag/header'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_CLANTAG_BODY = '#tooltips:fortification/intelligenceWindow/sortBtn/clanTag/body'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_CLANTAG_NOTE = '#tooltips:fortification/intelligenceWindow/sortBtn/clanTag/note'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_DEFENCETIME_HEADER = '#tooltips:fortification/intelligenceWindow/sortBtn/defenceTime/header'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_DEFENCETIME_BODY = '#tooltips:fortification/intelligenceWindow/sortBtn/defenceTime/body'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_BUILDINGS_HEADER = '#tooltips:fortification/intelligenceWindow/sortBtn/buildings/header'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_BUILDINGS_BODY = '#tooltips:fortification/intelligenceWindow/sortBtn/buildings/body'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_AVAILABILITY_HEADER = '#tooltips:fortification/intelligenceWindow/sortBtn/availability/header'
FORTIFICATION_INTELLIGENCEWINDOW_SORTBTN_AVAILABILITY_BODY = '#tooltips:fortification/intelligenceWindow/sortBtn/availability/body'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_CALENDARBTN = '#tooltips:fortification/fortIntelligenceClanDescription/calendarBtn'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_CALENDARBTN_CANTATTACK = '#tooltips:fortification/fortIntelligenceClanDescription/calendarBtn/cantAttack'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_LINKBTN = '#tooltips:fortification/fortIntelligenceClanDescription/linkBtn'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_ADDTOFAVORITE = '#tooltips:fortification/fortIntelligenceClanDescription/addToFavorite'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_REMOVEFROMFAVORITE = '#tooltips:fortification/fortIntelligenceClanDescription/removeFromFavorite'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_MAXFAVORITES = '#tooltips:fortification/fortIntelligenceClanDescription/maxFavorites'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_BATTLES_HEADER = '#tooltips:fortification/fortIntelligenceClanDescription/battles/header'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_BATTLES_BODY = '#tooltips:fortification/fortIntelligenceClanDescription/battles/body'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_WINS_HEADER = '#tooltips:fortification/fortIntelligenceClanDescription/wins/header'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_WINS_BODY = '#tooltips:fortification/fortIntelligenceClanDescription/wins/body'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_AVGDEFRES_HEADER = '#tooltips:fortification/fortIntelligenceClanDescription/avgDefRes/header'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_AVGDEFRES_BODY = '#tooltips:fortification/fortIntelligenceClanDescription/avgDefRes/body'
FORTIFICATION_FORTINTELLIGENCECLANDESCRIPTION_WARTIME = '#tooltips:fortification/fortIntelligenceClanDescription/warTime'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_OFFENSE_HEADER = '#tooltips:fortification/fortBattleDirectionPopover/offense/header'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_OFFENSE_BODY = '#tooltips:fortification/fortBattleDirectionPopover/offense/body'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_DEFENSE_HEADER = '#tooltips:fortification/fortBattleDirectionPopover/defense/header'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_DEFENSE_BODY = '#tooltips:fortification/fortBattleDirectionPopover/defense/body'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_ISINBATTLE_HEADER = '#tooltips:fortification/fortBattleDirectionPopover/isInBattle/header'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_ISINBATTLE_BODY = '#tooltips:fortification/fortBattleDirectionPopover/isInBattle/body'
FORTIFICATION_BATTLENOTIFIER_OFFENSE_HEADER = '#tooltips:fortification/battleNotifier/offense/header'
FORTIFICATION_BATTLENOTIFIER_OFFENSE_BODY = '#tooltips:fortification/battleNotifier/offense/body'
FORTIFICATION_BATTLENOTIFIER_DEFENSE_HEADER = '#tooltips:fortification/battleNotifier/defense/header'
FORTIFICATION_BATTLENOTIFIER_DEFENSE_BODY = '#tooltips:fortification/battleNotifier/defense/body'
FORTIFICATION_BATTLENOTIFIER_OFFANDDEF_HEADER = '#tooltips:fortification/battleNotifier/offAndDef/header'
FORTIFICATION_BATTLENOTIFIER_OFFANDDEF_BODY = '#tooltips:fortification/battleNotifier/offAndDef/body'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_JOINBTN_HEADER = '#tooltips:fortification/fortBattleDirectionPopover/joinBtn/header'
FORTIFICATION_FORTBATTLEDIRECTIONPOPOVER_JOINBTN_BODY = '#tooltips:fortification/fortBattleDirectionPopover/joinBtn/body'
FORTIFICATION_FORTSETTINGSPERIPHERYPOPOVER_APPLYBTN_ENABLED = '#tooltips:fortification/fortSettingsPeripheryPopover/applyBtn/enabled'
FORTIFICATION_FORTSETTINGSPERIPHERYPOPOVER_APPLYBTN_DISABLED = '#tooltips:fortification/fortSettingsPeripheryPopover/applyBtn/disabled'
FORTIFICATION_FORTSETTINGSDEFENCEHOURPOPOVER_APPLYBTN_ENABLED = '#tooltips:fortification/fortSettingsDefenceHourPopover/applyBtn/enabled'
FORTIFICATION_FORTSETTINGSDEFENCEHOURPOPOVER_APPLYBTN_DISABLED = '#tooltips:fortification/fortSettingsDefenceHourPopover/applyBtn/disabled'
FORTIFICATION_FORTSETTINGSVACATIONPOPOVER_APPLYBTN_BODY = '#tooltips:fortification/fortSettingsVacationPopover/applyBtn/body'
FORTIFICATION_FORTSETTINGSVACATIONPOPOVER_APPLYBTN_DISABLE_BODY = '#tooltips:fortification/fortSettingsVacationPopover/applyBtn/disable/body'
FORTIFICATION_FORTSETTINGSWINDOW_DISABLEDEFENCEPERIOD_HEADER = '#tooltips:fortification/fortSettingsWindow/disableDefencePeriod/header'
FORTIFICATION_FORTSETTINGSWINDOW_DISABLEDEFENCEPERIOD_BODY = '#tooltips:fortification/fortSettingsWindow/disableDefencePeriod/body'
FORTIFICATION_FORTSETTINGSWINDOW_INFOICON_HEADER = '#tooltips:fortification/fortSettingsWindow/infoIcon/header'
FORTIFICATION_FORTSETTINGSWINDOW_INFOICON_BODY = '#tooltips:fortification/fortSettingsWindow/infoIcon/body'
FORTIFICATION_FORTSETTINGSWINDOW_DISABLEBUTTON = '#tooltips:fortification/fortSettingsWindow/disableButton'
FORTIFICATION_FORTSETTINGSWINDOW_STATUSSTRING_ACTIVATED = '#tooltips:fortification/fortSettingsWindow/statusString/activated'
FORTIFICATION_FORTSETTINGSWINDOW_STATUSSTRING_FREEZED = '#tooltips:fortification/fortSettingsWindow/statusString/freezed'
FORTIFICATION_FORTSETTINGSWINDOW_STATUSSTRING_CANBEACTIVATED = '#tooltips:fortification/fortSettingsWindow/statusString/canBeActivated'
FORTIFICATION_FORTSETTINGSWINDOW_STATUSSTRING_CANNOTBEACTIVATED = '#tooltips:fortification/fortSettingsWindow/statusString/canNotBeActivated'
FORTIFICATION_FORTSETTINGSWINDOW_PEREPHERYBTN_HEADER = '#tooltips:fortification/fortSettingsWindow/perepheryBtn/header'
FORTIFICATION_FORTSETTINGSWINDOW_PEREPHERYBTN_ENABLED_BODY = '#tooltips:fortification/fortSettingsWindow/perepheryBtn/enabled/body'
FORTIFICATION_FORTSETTINGSWINDOW_PEREPHERYBTN_DISABLED_BODY = '#tooltips:fortification/fortSettingsWindow/perepheryBtn/disabled/body'
FORTIFICATION_FORTSETTINGSWINDOW_DEFENCEBTNENABLED_HEADER = '#tooltips:fortification/fortSettingsWindow/defenceBtnEnabled/header'
FORTIFICATION_FORTSETTINGSWINDOW_DEFENCEBTNENABLED_BODY = '#tooltips:fortification/fortSettingsWindow/defenceBtnEnabled/body'
FORTIFICATION_FORTSETTINGSWINDOW_DEFENCEBTNDISABLED_HEADER = '#tooltips:fortification/fortSettingsWindow/defenceBtnDisabled/header'
FORTIFICATION_FORTSETTINGSWINDOW_DEFENCEBTNDISABLED_BODY = '#tooltips:fortification/fortSettingsWindow/defenceBtnDisabled/body'
FORTIFICATION_FORTSETTINGSWINDOW_WEEKENDBTNENABLED_HEADER = '#tooltips:fortification/fortSettingsWindow/weekEndBtnEnabled/header'
FORTIFICATION_FORTSETTINGSWINDOW_WEEKENDBTNENABLED_BODY = '#tooltips:fortification/fortSettingsWindow/weekEndBtnEnabled/body'
FORTIFICATION_FORTSETTINGSWINDOW_WEEKENDBTNDISABLED_HEADER = '#tooltips:fortification/fortSettingsWindow/weekEndBtnDisabled/header'
FORTIFICATION_FORTSETTINGSWINDOW_WEEKENDBTNDISABLED_BODY = '#tooltips:fortification/fortSettingsWindow/weekEndBtnDisabled/body'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNENABLED_HEADER = '#tooltips:fortification/fortSettingsWindow/vacationBtnEnabled/header'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNENABLED_BODY = '#tooltips:fortification/fortSettingsWindow/vacationBtnEnabled/body'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNDISABLEDNOTPLANNED_HEADER = '#tooltips:fortification/fortSettingsWindow/vacationBtnDisabledNotPlanned/header'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNDISABLEDNOTPLANNED_BODY = '#tooltips:fortification/fortSettingsWindow/vacationBtnDisabledNotPlanned/body'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNDSBLDLESSADAY_HEADER = '#tooltips:fortification/fortSettingsWindow/vacationBtnDsbldLessADay/header'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNDSBLDLESSADAY_BODY = '#tooltips:fortification/fortSettingsWindow/vacationBtnDsbldLessADay/body'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNDISABLED_HEADER = '#tooltips:fortification/fortSettingsWindow/vacationBtnDisabled/header'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONBTNDISABLED_BODY = '#tooltips:fortification/fortSettingsWindow/vacationBtnDisabled/body'
FORTIFICATION_FORTSETTINGSWINDOW_DEFENCEPERIODDESCRIPTION_HEADER = '#tooltips:fortification/fortSettingsWindow/defencePeriodDescription/header'
FORTIFICATION_FORTSETTINGSWINDOW_DEFENCEPERIODDESCRIPTION_BODY = '#tooltips:fortification/fortSettingsWindow/defencePeriodDescription/body'
FORTIFICATION_FORTSETTINGSWINDOW_DAYOFFDESCRIPTION_HEADER = '#tooltips:fortification/fortSettingsWindow/dayOffDescription/header'
FORTIFICATION_FORTSETTINGSWINDOW_DAYOFFDESCRIPTION_BODY = '#tooltips:fortification/fortSettingsWindow/dayOffDescription/body'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONDESCRIPTION_HEADER = '#tooltips:fortification/fortSettingsWindow/vacationDescription/header'
FORTIFICATION_FORTSETTINGSWINDOW_VACATIONDESCRIPTION_BODY = '#tooltips:fortification/fortSettingsWindow/vacationDescription/body'
FORTIFICATION_FORTSETTINGSWINDOW_PERIPHERYDESCRIPTION_HEADER = '#tooltips:fortification/fortSettingsWindow/peripheryDescription/header'
FORTIFICATION_FORTSETTINGSWINDOW_PERIPHERYDESCRIPTION_BODY = '#tooltips:fortification/fortSettingsWindow/peripheryDescription/body'
FORTIFICATION_FORTSETTINGSDAYOFFPOPOVER_APPLY_ENABLED_BODY = '#tooltips:fortification/FortSettingsDayoffPopover/apply/enabled/body'
FORTIFICATION_FORTSETTINGSDAYOFFPOPOVER_APPLY_DISABLED_BODY = '#tooltips:fortification/FortSettingsDayoffPopover/apply/disabled/body'
FORTIFICATION_FORTCREATEDIRWIN_NEWDIRBTN_ENABLED_HEADER = '#tooltips:fortification/fortCreateDirWin/newDirBtn/enabled/header'
FORTIFICATION_FORTCREATEDIRWIN_NEWDIRBTN_ENABLED_BODY = '#tooltips:fortification/fortCreateDirWin/newDirBtn/enabled/body'
FORTIFICATION_FORTCREATEDIRWIN_NEWDIRBTN_DISABLEDBYFROZEN_HEADER = '#tooltips:fortification/fortCreateDirWin/newDirBtn/disabledByFrozen/header'
FORTIFICATION_FORTCREATEDIRWIN_NEWDIRBTN_DISABLEDBYFROZEN_BODY = '#tooltips:fortification/fortCreateDirWin/newDirBtn/disabledByFrozen/body'
FORTIFICATION_FORTCREATEDIRWIN_NEWDIRBTN_DISABLEDBYPLAYERS_HEADER = '#tooltips:fortification/fortCreateDirWin/newDirBtn/disabledByPlayers/header'
FORTIFICATION_FORTCREATEDIRWIN_NEWDIRBTN_DISABLEDBYPLAYERS_BODY = '#tooltips:fortification/fortCreateDirWin/newDirBtn/disabledByPlayers/body'
FORTIFICATION_FORTBUILDINGCARDPOPOVER_MAPINFO_HEADER = '#tooltips:fortification/fortBuildingCardPopover/mapInfo/header'
FORTIFICATION_FORTBUILDINGCARDPOPOVER_MAPINFO_BODY = '#tooltips:fortification/fortBuildingCardPopover/mapInfo/body'
FORTIFICATION_FORTBUILDINGCARDPOPOVER_MAPINFO_NOBATTLE_HEADER = '#tooltips:fortification/fortBuildingCardPopover/mapInfo/noBattle/header'
FORTIFICATION_FORTBUILDINGCARDPOPOVER_MAPINFO_NOBATTLE_BODY = '#tooltips:fortification/fortBuildingCardPopover/mapInfo/noBattle/body'
REFERRALMANAGEMENTWINDOW_CREATESQUADBTN_ENABLED_HEADER = '#tooltips:ReferralManagementWindow/createSquadBtn/enabled/header'
REFERRALMANAGEMENTWINDOW_CREATESQUADBTN_ENABLED_BODY = '#tooltips:ReferralManagementWindow/createSquadBtn/enabled/body'
REFERRALMANAGEMENTWINDOW_CREATESQUADBTN_DISABLED_ISOFFLINE_HEADER = '#tooltips:ReferralManagementWindow/createSquadBtn/disabled/isOffline/header'
REFERRALMANAGEMENTWINDOW_CREATESQUADBTN_DISABLED_ISOFFLINE_BODY = '#tooltips:ReferralManagementWindow/createSquadBtn/disabled/isOffline/body'
REFERRALMANAGEMENTWINDOW_CREATESQUADBTN_DISABLED_SQUADISFULL_HEADER = '#tooltips:ReferralManagementWindow/createSquadBtn/disabled/squadIsFull/header'
REFERRALMANAGEMENTWINDOW_CREATESQUADBTN_DISABLED_SQUADISFULL_BODY = '#tooltips:ReferralManagementWindow/createSquadBtn/disabled/squadIsFull/body'
REFERRALMANAGEMENTWINDOW_TABLE_EXPERIENCE_HEADER = '#tooltips:ReferralManagementWindow/table/experience/header'
REFERRALMANAGEMENTWINDOW_TABLE_EXPERIENCE_BODY = '#tooltips:ReferralManagementWindow/table/experience/body'
REFERRALMANAGEMENTWINDOW_MULTIPLIER_X1_HEADER = '#tooltips:ReferralManagementWindow/multiplier/x1/header'
REFERRALMANAGEMENTWINDOW_MULTIPLIER_X1_BODY = '#tooltips:ReferralManagementWindow/multiplier/x1/body'
TOOLTIPREFSYSAWARDS_TITLE_GENERAL = '#tooltips:ToolTipRefSysAwards/title/general'
TOOLTIPREFSYSAWARDS_TITLE_CREDITS = '#tooltips:ToolTipRefSysAwards/title/credits'
TOOLTIPREFSYSAWARDS_TITLE_TANKMAN = '#tooltips:ToolTipRefSysAwards/title/tankman'
TOOLTIPREFSYSAWARDS_TITLE_UNIQUETANK = '#tooltips:ToolTipRefSysAwards/title/uniqueTank'
TOOLTIPREFSYSAWARDS_INFOBODY_REQUIREMENTS = '#tooltips:ToolTipRefSysAwards/infoBody/requirements'
TOOLTIPREFSYSAWARDS_INFOBODY_REQUIREMENTS_NOTENOUGHMSG = '#tooltips:ToolTipRefSysAwards/infoBody/requirements/notEnoughMsg'
TOOLTIPREFSYSAWARDS_INFOBODY_REQUIREMENTS_NOTENOUGH = '#tooltips:ToolTipRefSysAwards/infoBody/requirements/notEnough'
TOOLTIPREFSYSAWARDS_INFOBODY_CONDITIONS = '#tooltips:ToolTipRefSysAwards/infoBody/conditions'
TOOLTIPREFSYSAWARDS_INFOBODY_ACCESS = '#tooltips:ToolTipRefSysAwards/infoBody/access'
TOOLTIPREFSYSAWARDS_INFOBODY_NOTACCESS = '#tooltips:ToolTipRefSysAwards/infoBody/notAccess'
TOOLTIPREFSYSDESCRIPTION_HEADER_TITLETF = '#tooltips:ToolTipRefSysDescription/header/titleTF'
TOOLTIPREFSYSDESCRIPTION_HEADER_ACTIONTF = '#tooltips:ToolTipRefSysDescription/header/actionTF'
TOOLTIPREFSYSDESCRIPTION_HEADER_AWARDSTITLETF = '#tooltips:ToolTipRefSysDescription/header/awardsTitleTF'
TOOLTIPREFSYSDESCRIPTION_AWARDS_DESCR_CREDITS = '#tooltips:ToolTipRefSysDescription/awards/descr/credits'
TOOLTIPREFSYSDESCRIPTION_AWARDS_DESCR_TANKMAN = '#tooltips:ToolTipRefSysDescription/awards/descr/tankman'
TOOLTIPREFSYSDESCRIPTION_AWARDS_DESCR_VEHICLEWITHTANKMEN = '#tooltips:ToolTipRefSysDescription/awards/descr/vehicleWithTankmen'
TOOLTIPREFSYSDESCRIPTION_BOTTOM_BOTTOMTF = '#tooltips:ToolTipRefSysDescription/bottom/bottomTF'
TOOLTIPREFSYSXPMULTIPLIER_TITLE = '#tooltips:ToolTipRefSysXPMultiplier/title'
TOOLTIPREFSYSXPMULTIPLIER_DESCRIPTION = '#tooltips:ToolTipRefSysXPMultiplier/description'
TOOLTIPREFSYSXPMULTIPLIER_CONDITIONS = '#tooltips:ToolTipRefSysXPMultiplier/conditions'
TOOLTIPREFSYSXPMULTIPLIER_CONDITIONS_HOURS = '#tooltips:ToolTipRefSysXPMultiplier/conditions/hours'
TOOLTIPREFSYSXPMULTIPLIER_CONDITIONS_DAYS = '#tooltips:ToolTipRefSysXPMultiplier/conditions/days'
TOOLTIPREFSYSXPMULTIPLIER_CONDITIONS_OTHER = '#tooltips:ToolTipRefSysXPMultiplier/conditions/other'
TOOLTIPREFSYSXPMULTIPLIER_BOTTOM = '#tooltips:ToolTipRefSysXPMultiplier/bottom'
CALENDAR_NEXTMONTH = '#tooltips:Calendar/nextMonth'
CALENDAR_PREVMONTH = '#tooltips:Calendar/prevMonth'
FORTWRONGTIME_HEADER = '#tooltips:fortWrongTime/header'
FORTWRONGTIME_BODY = '#tooltips:fortWrongTime/body'
FORTWRONGTIME_LOCKTIME_HEADER = '#tooltips:fortWrongTime/lockTime/header'
FORTWRONGTIME_LOCKTIME_BODY = '#tooltips:fortWrongTime/lockTime/body'
FORTIFICATION_FORTORDERSELECTPOPOVER_ARSENALICON_HEADER = '#tooltips:fortification/FortOrderSelectPopover/arsenalIcon/header'
FORTIFICATION_FORTORDERSELECTPOPOVER_ARSENALICON_BODY = '#tooltips:fortification/FortOrderSelectPopover/arsenalIcon/body'
BATTLERESULTS_CYBERSPORT_POINTS_WIN_HEADER = '#tooltips:battleResults/CyberSport/points/win/header'
BATTLERESULTS_CYBERSPORT_POINTS_WIN_BODY = '#tooltips:battleResults/CyberSport/points/win/body'
BATTLERESULTS_CYBERSPORT_POINTS_LOSE_HEADER = '#tooltips:battleResults/CyberSport/points/lose/header'
BATTLERESULTS_CYBERSPORT_POINTS_LOSE_BODY = '#tooltips:battleResults/CyberSport/points/lose/body'
BATTLERESULTS_FORTRESOURCE_LEGIONER_BODY = '#tooltips:battleResults/FortResource/legioner/body'
BATTLERESULTS_FORTRESOURCE_TEAMINFLUENCETOTAL_HEADER = '#tooltips:battleResults/FortResource/teamInfluenceTotal/header'
BATTLERESULTS_FORTRESOURCE_TEAMINFLUENCETOTAL_BODY = '#tooltips:battleResults/FortResource/teamInfluenceTotal/body'
BATTLERESULTS_FORTRESOURCE_TEAMRESOURCETOTAL_HEADER = '#tooltips:battleResults/FortResource/teamResourceTotal/header'
BATTLERESULTS_FORTRESOURCE_TEAMRESOURCETOTAL_BODY = '#tooltips:battleResults/FortResource/teamResourceTotal/body'
BATTLERESULTS_FORTRESOURCE_RESULTSSHAREBTN_HEADER = '#tooltips:battleResults/FortResource/resultsShareBtn/header'
BATTLERESULTS_FORTRESOURCE_RESULTSSHAREBTN_BODY = '#tooltips:battleResults/FortResource/resultsShareBtn/body'
BATTLERESULTS_VICTORYSCOREDESCRIPTION_HEADER = '#tooltips:battleResults/victoryScoreDescription/header'
BATTLERESULTS_VICTORYSCOREDESCRIPTION_BODY = '#tooltips:battleResults/victoryScoreDescription/body'
BATTLERESULTS_VICTORYSCOREDESCRIPTION_COST = '#tooltips:battleResults/victoryScoreDescription/cost'
BATTLERESULTS_VICTORYSCOREDESCRIPTION_KILLSPATTERN = '#tooltips:battleResults/victoryScoreDescription/killsPattern'
BATTLERESULTS_VICTORYSCOREDESCRIPTION_POINTSPATTERN = '#tooltips:battleResults/victoryScoreDescription/pointsPattern'
BATTLERESULTS_VICTORYSCOREDESCRIPTION_DAMAGEPATTERN = '#tooltips:battleResults/victoryScoreDescription/damagePattern'
BATTLERESULTS_EFFICIENCYHEADER_SUMMSPOTTED = '#tooltips:battleResults/efficiencyHeader/summSpotted'
BATTLERESULTS_EFFICIENCYHEADER_SUMMASSIST = '#tooltips:battleResults/efficiencyHeader/summAssist'
BATTLERESULTS_EFFICIENCYHEADER_SUMMARMOR = '#tooltips:battleResults/efficiencyHeader/summArmor'
BATTLERESULTS_EFFICIENCYHEADER_SUMMCRITS = '#tooltips:battleResults/efficiencyHeader/summCrits'
BATTLERESULTS_EFFICIENCYHEADER_SUMMDAMAGE = '#tooltips:battleResults/efficiencyHeader/summDamage'
BATTLERESULTS_EFFICIENCYHEADER_SUMMKILL = '#tooltips:battleResults/efficiencyHeader/summKill'
BATTLERESULTS_EFFICIENCYHEADER_SUMMSTUN = '#tooltips:battleResults/efficiencyHeader/summStun'
BATTLERESULTS_EFFICIENCYHEADER_VALUE = '#tooltips:battleResults/efficiencyHeader/value'
CONTACT_STATUS_INBATTLE_UNKNOWN = '#tooltips:Contact/status/inBattle/unknown'
CONTACT_STATUS_INBATTLE_RANDOM = '#tooltips:Contact/status/inBattle/random'
CONTACT_STATUS_INBATTLE_TRAINING = '#tooltips:Contact/status/inBattle/training'
CONTACT_STATUS_INBATTLE_TEAM = '#tooltips:Contact/status/inBattle/team'
CONTACT_STATUS_INBATTLE_TUTORIAL = '#tooltips:Contact/status/inBattle/tutorial'
CONTACT_STATUS_INBATTLE_TEAM7X7 = '#tooltips:Contact/status/inBattle/team7x7'
CONTACT_STATUS_INBATTLE_HISTORICAL = '#tooltips:Contact/status/inBattle/historical'
CONTACT_STATUS_INBATTLE_FORTIFICATIONS = '#tooltips:Contact/status/inBattle/fortifications'
CONTACT_STATUS_INBATTLE_SPECIAL = '#tooltips:Contact/status/inBattle/special'
CONTACT_STATUS_ONLINE = '#tooltips:Contact/status/online'
CONTACT_RESOURCE_WOWP = '#tooltips:Contact/resource/wowp'
CONTACT_RESOURCE_WOWS = '#tooltips:Contact/resource/wows'
CONTACT_RESOURCE_WOTG = '#tooltips:Contact/resource/wotg'
CONTACT_RESOURCE_WOTB = '#tooltips:Contact/resource/wotb'
CONTACT_RESOURCE_WEB = '#tooltips:Contact/resource/web'
CONTACT_RESOURCE_MOB = '#tooltips:Contact/resource/mob'
CONTACT_UNITS_HOMEREALM = '#tooltips:Contact/units/homeRealm'
CONTACT_UNITS_CLAN = '#tooltips:Contact/units/clan'
CONTACT_UNITS_STATUS_DESCRIPTION_PENDINGFRIENDSHIP = '#tooltips:Contact/units/status/description/pendingfriendship'
CONTACT_UNITS_STATUS_DESCRIPTION_IGNORED = '#tooltips:Contact/units/status/description/ignored'
CONTACT_UNITS_STATUS_DESCRIPTION_CHATBAN = '#tooltips:Contact/units/status/description/chatBan'
CONTACT_UNITS_STATUS_DESCRIPTION_RECRUITER = '#tooltips:Contact/units/status/description/recruiter'
CONTACT_UNITS_STATUS_DESCRIPTION_RECRUIT = '#tooltips:Contact/units/status/description/recruit'
CONTACT_UNITS_GROUPS = '#tooltips:Contact/units/groups'
ROLECHANGE_CURRENTROLEWARNING_HEADER = '#tooltips:RoleChange/currentRoleWarning/header'
ROLECHANGE_CURRENTROLEWARNING_BODY = '#tooltips:RoleChange/currentRoleWarning/body'
ROLECHANGE_ROLETAKEN_HEADER = '#tooltips:RoleChange/roleTaken/header'
ROLECHANGE_ROLETAKEN_BODY = '#tooltips:RoleChange/roleTaken/body'
ROLECHANGE_ROLEANDVEHICLETAKEN_HEADER = '#tooltips:RoleChange/roleAndVehicleTaken/header'
ROLECHANGE_ROLEANDVEHICLETAKEN_BODY = '#tooltips:RoleChange/roleAndVehicleTaken/body'
ROLECHANGE_FOOTERINFO_HEADER = '#tooltips:RoleChange/footerInfo/header'
ROLECHANGE_FOOTERINFO_BODY = '#tooltips:RoleChange/footerInfo/body'
FORTORDERSPANELCOMPONENT_EMPTYSLOT_BODY = '#tooltips:FortOrdersPanelComponent/emptySlot/body'
HANGARTUTORIAL_RESEARCH_VEHICLEINFO_TITLE = '#tooltips:hangarTutorial/research/vehicleInfo/title'
HANGARTUTORIAL_RESEARCH_VEHICLEINFO_HEADERDESCRIPTION = '#tooltips:hangarTutorial/research/vehicleInfo/headerDescription'
HANGARTUTORIAL_RESEARCH_VEHICLEINFO_EXPTITLE = '#tooltips:hangarTutorial/research/vehicleInfo/expTitle'
HANGARTUTORIAL_RESEARCH_VEHICLEINFO_EXPDESCRIPTION = '#tooltips:hangarTutorial/research/vehicleInfo/expDescription'
HANGARTUTORIAL_RESEARCH_VEHICLEINFO_BATTLEEXPTITLE = '#tooltips:hangarTutorial/research/vehicleInfo/battleExpTitle'
HANGARTUTORIAL_RESEARCH_VEHICLEINFO_BATTLEEXPDESCRIPTION = '#tooltips:hangarTutorial/research/vehicleInfo/battleExpDescription'
HANGARTUTORIAL_RESEARCH_VEHICLEINFO_FREEEXPTITLE = '#tooltips:hangarTutorial/research/vehicleInfo/freeExpTitle'
HANGARTUTORIAL_RESEARCH_VEHICLEINFO_FREEEXPDESCRIPTION = '#tooltips:hangarTutorial/research/vehicleInfo/freeExpDescription'
HANGARTUTORIAL_RESEARCH_VEHICLEINFO_TOTALEXPTITLE = '#tooltips:hangarTutorial/research/vehicleInfo/totalExpTitle'
HANGARTUTORIAL_RESEARCH_MODULES_TITLE = '#tooltips:hangarTutorial/research/modules/title'
HANGARTUTORIAL_RESEARCH_MODULES_HEADERDESCRIPTION = '#tooltips:hangarTutorial/research/modules/headerDescription'
HANGARTUTORIAL_RESEARCH_MODULES_TYPESTITLE = '#tooltips:hangarTutorial/research/modules/typesTitle'
HANGARTUTORIAL_RESEARCH_MODULES_GUNTITLE = '#tooltips:hangarTutorial/research/modules/gunTitle'
HANGARTUTORIAL_RESEARCH_MODULES_GUNDESCRIPTION = '#tooltips:hangarTutorial/research/modules/gunDescription'
HANGARTUTORIAL_RESEARCH_MODULES_TURRETTITLE = '#tooltips:hangarTutorial/research/modules/turretTitle'
HANGARTUTORIAL_RESEARCH_MODULES_TURRETDESCRIPTION = '#tooltips:hangarTutorial/research/modules/turretDescription'
HANGARTUTORIAL_RESEARCH_MODULES_ENGINETITLE = '#tooltips:hangarTutorial/research/modules/engineTitle'
HANGARTUTORIAL_RESEARCH_MODULES_ENGINEDESCRIPTION = '#tooltips:hangarTutorial/research/modules/engineDescription'
HANGARTUTORIAL_RESEARCH_MODULES_CHASSISTITLE = '#tooltips:hangarTutorial/research/modules/chassisTitle'
HANGARTUTORIAL_RESEARCH_MODULES_CHASSISDESCRIPTION = '#tooltips:hangarTutorial/research/modules/chassisDescription'
HANGARTUTORIAL_RESEARCH_MODULES_RADIOSETTITLE = '#tooltips:hangarTutorial/research/modules/radiosetTitle'
HANGARTUTORIAL_RESEARCH_MODULES_RADIOSETDESCRIPTION = '#tooltips:hangarTutorial/research/modules/radiosetDescription'
HANGARTUTORIAL_RESEARCH_MODULES_ACTIONBUTTONSTITLE = '#tooltips:hangarTutorial/research/modules/actionButtonsTitle'
HANGARTUTORIAL_RESEARCH_MODULES_RESEARCHBUTTONDESCRIPTION = '#tooltips:hangarTutorial/research/modules/researchButtonDescription'
HANGARTUTORIAL_RESEARCH_MODULES_BUYBUTTONDESCRIPTION = '#tooltips:hangarTutorial/research/modules/buyButtonDescription'
HANGARTUTORIAL_RESEARCH_MODULES_INHANGARDESCRIPTION = '#tooltips:hangarTutorial/research/modules/inHangarDescription'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_TITLE = '#tooltips:hangarTutorial/customization/types/title'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_CAMOUFLAGEBLOCKTITLE = '#tooltips:hangarTutorial/customization/types/camouflageBlockTitle'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_CAMOUFLAGEBLOCKDESCRIPTION = '#tooltips:hangarTutorial/customization/types/camouflageBlockDescription'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_WINTERCAMOUFLAGETITLE = '#tooltips:hangarTutorial/customization/types/winterCamouflageTitle'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_WINTERCAMOUFLAGEDESCRIPTION = '#tooltips:hangarTutorial/customization/types/winterCamouflageDescription'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_SUMMERCAMOUFLAGETITLE = '#tooltips:hangarTutorial/customization/types/summerCamouflageTitle'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_SUMMERCAMOUFLAGEDESCRIPTION = '#tooltips:hangarTutorial/customization/types/summerCamouflageDescription'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_DESERTCAMOUFLAGETITLE = '#tooltips:hangarTutorial/customization/types/desertCamouflageTitle'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_DESERTCAMOUFLAGEDESCRIPTION = '#tooltips:hangarTutorial/customization/types/desertCamouflageDescription'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_EMBLEMBLOCKTITLE = '#tooltips:hangarTutorial/customization/types/emblemBlockTitle'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_EMBLEMBLOCKDESCRIPTION = '#tooltips:hangarTutorial/customization/types/emblemBlockDescription'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_LABELSBLOCKTITLE = '#tooltips:hangarTutorial/customization/types/labelsBlockTitle'
HANGARTUTORIAL_CUSTOMIZATION_TYPES_LABELSBLOCKDESCRIPTION = '#tooltips:hangarTutorial/customization/types/labelsBlockDescription'
HANGARTUTORIAL_NEXTTECHMODELS_HEADER = '#tooltips:hangarTutorial/nextTechModels/header'
HANGARTUTORIAL_NEXTTECHMODELS_BODY = '#tooltips:hangarTutorial/nextTechModels/body'
HANGARTUTORIAL_TECHREPAIR_HEADER = '#tooltips:hangarTutorial/techRepair/header'
HANGARTUTORIAL_TECHREPAIR_BODY = '#tooltips:hangarTutorial/techRepair/body'
HANGARTUTORIAL_CUSTOMIZATIONDURATION_HEADER = '#tooltips:hangarTutorial/customizationDuration/header'
HANGARTUTORIAL_CUSTOMIZATIONDURATION_BODY = '#tooltips:hangarTutorial/customizationDuration/body'
HANGARTUTORIAL_CUSTOMIZATIONOPTIONS_HEADER = '#tooltips:hangarTutorial/customizationOptions/header'
HANGARTUTORIAL_CUSTOMIZATIONOPTIONS_BODY = '#tooltips:hangarTutorial/customizationOptions/body'
HANGARTUTORIAL_CUSTOMIZATIONCOST_HEADER = '#tooltips:hangarTutorial/customizationCost/header'
HANGARTUTORIAL_CUSTOMIZATIONCOST_BODY = '#tooltips:hangarTutorial/customizationCost/body'
HANGARTUTORIAL_NATIONS_TITLE = '#tooltips:hangarTutorial/nations/title'
HANGARTUTORIAL_NATIONS_USSR = '#tooltips:hangarTutorial/nations/ussr'
HANGARTUTORIAL_NATIONS_GERMANY = '#tooltips:hangarTutorial/nations/germany'
HANGARTUTORIAL_NATIONS_USA = '#tooltips:hangarTutorial/nations/usa'
HANGARTUTORIAL_NATIONS_FRANCE = '#tooltips:hangarTutorial/nations/france'
HANGARTUTORIAL_NATIONS_UK = '#tooltips:hangarTutorial/nations/uk'
HANGARTUTORIAL_NATIONS_CHINA = '#tooltips:hangarTutorial/nations/china'
HANGARTUTORIAL_NATIONS_JAPAN = '#tooltips:hangarTutorial/nations/japan'
HANGARTUTORIAL_NATIONS_CZECH = '#tooltips:hangarTutorial/nations/czech'
HANGARTUTORIAL_NATIONS_SWEDEN = '#tooltips:hangarTutorial/nations/sweden'
HANGARTUTORIAL_RESEARCHTREE_TITLE = '#tooltips:hangarTutorial/researchTree/title'
HANGARTUTORIAL_RESEARCHTREE_DESCRIPTION = '#tooltips:hangarTutorial/researchTree/description'
HANGARTUTORIAL_RESEARCHTREE_TECHBLOCK_COMMONTECH_TITLE = '#tooltips:hangarTutorial/researchTree/techBlock/commonTech/title'
HANGARTUTORIAL_RESEARCHTREE_TECHBLOCK_COMMONTECH_DESCRIPTION = '#tooltips:hangarTutorial/researchTree/techBlock/commonTech/description'
HANGARTUTORIAL_RESEARCHTREE_TECHBLOCK_PREMIUMTECH_TITLE = '#tooltips:hangarTutorial/researchTree/techBlock/premiumTech/title'
HANGARTUTORIAL_RESEARCHTREE_TECHBLOCK_PREMIUMTECH_DESCRIPTION = '#tooltips:hangarTutorial/researchTree/techBlock/premiumTech/description'
HANGARTUTORIAL_RESEARCHTREE_TYPESBLOCK_TITLE = '#tooltips:hangarTutorial/researchTree/typesBlock/title'
HANGARTUTORIAL_RESEARCHTREE_TYPESBLOCK_LIGHTTANK = '#tooltips:hangarTutorial/researchTree/typesBlock/lightTank'
HANGARTUTORIAL_RESEARCHTREE_TYPESBLOCK_MEDIUMTANK = '#tooltips:hangarTutorial/researchTree/typesBlock/mediumTank'
HANGARTUTORIAL_RESEARCHTREE_TYPESBLOCK_HEAVYTANK = '#tooltips:hangarTutorial/researchTree/typesBlock/heavyTank'
HANGARTUTORIAL_RESEARCHTREE_TYPESBLOCK_AT_SPG = '#tooltips:hangarTutorial/researchTree/typesBlock/AT-SPG'
HANGARTUTORIAL_RESEARCHTREE_TYPESBLOCK_SPG = '#tooltips:hangarTutorial/researchTree/typesBlock/SPG'
HANGARTUTORIAL_RESEARCHTREE_BUTTONSBLOCK_TITLE = '#tooltips:hangarTutorial/researchTree/buttonsBlock/title'
HANGARTUTORIAL_RESEARCHTREE_BUTTONSBLOCK_RESEARCH = '#tooltips:hangarTutorial/researchTree/buttonsBlock/research'
HANGARTUTORIAL_RESEARCHTREE_BUTTONSBLOCK_BUY = '#tooltips:hangarTutorial/researchTree/buttonsBlock/buy'
HANGARTUTORIAL_RESEARCHTREE_BUTTONSBLOCK_INHANGAR = '#tooltips:hangarTutorial/researchTree/buttonsBlock/inHangar'
HANGARTUTORIAL_RESEARCHTREE_BUTTONSBLOCK_ADDTOCOMPARE = '#tooltips:hangarTutorial/researchTree/buttonsBlock/addToCompare'
HANGARTUTORIAL_PERSONALCASE_SKILLS_TITLE = '#tooltips:hangarTutorial/personalCase/skills/title'
HANGARTUTORIAL_PERSONALCASE_SKILLS_HEADERDESCRIPTION = '#tooltips:hangarTutorial/personalCase/skills/headerDescription'
HANGARTUTORIAL_PERSONALCASE_SKILLS_SPECIALTYTITLE = '#tooltips:hangarTutorial/personalCase/skills/specialtyTitle'
HANGARTUTORIAL_PERSONALCASE_SKILLS_SPECIALTYDESCRIPTION = '#tooltips:hangarTutorial/personalCase/skills/specialtyDescription'
HANGARTUTORIAL_PERSONALCASE_SKILLS_SPECIALTYWARNING = '#tooltips:hangarTutorial/personalCase/skills/specialtyWarning'
HANGARTUTORIAL_PERSONALCASE_PERKS_TITLE = '#tooltips:hangarTutorial/personalCase/perks/title'
HANGARTUTORIAL_PERSONALCASE_PERKS_HEADERDESCRIPTION = '#tooltips:hangarTutorial/personalCase/perks/headerDescription'
HANGARTUTORIAL_PERSONALCASE_PERKS_NEWPERKTITLE = '#tooltips:hangarTutorial/personalCase/perks/newPerkTitle'
HANGARTUTORIAL_PERSONALCASE_PERKS_NEWPERKDESCRIPTION = '#tooltips:hangarTutorial/personalCase/perks/newPerkDescription'
HANGARTUTORIAL_PERSONALCASE_ADDITIONAL_TITLE = '#tooltips:hangarTutorial/personalCase/additional/title'
HANGARTUTORIAL_PERSONALCASE_ADDITIONAL_RECORDTITLE = '#tooltips:hangarTutorial/personalCase/additional/recordTitle'
HANGARTUTORIAL_PERSONALCASE_ADDITIONAL_RECORDDESCRIPTION = '#tooltips:hangarTutorial/personalCase/additional/recordDescription'
HANGARTUTORIAL_PERSONALCASE_ADDITIONAL_TRAININGTITLE = '#tooltips:hangarTutorial/personalCase/additional/trainingTitle'
HANGARTUTORIAL_PERSONALCASE_ADDITIONAL_TRAININGDESCRIPTION = '#tooltips:hangarTutorial/personalCase/additional/trainingDescription'
HANGARTUTORIAL_PERSONALCASE_ADDITIONAL_PERKSTITLE = '#tooltips:hangarTutorial/personalCase/additional/perksTitle'
HANGARTUTORIAL_PERSONALCASE_ADDITIONAL_PERKSDESCRIPTION = '#tooltips:hangarTutorial/personalCase/additional/perksDescription'
HANGARTUTORIAL_PERSONALCASE_ADDITIONAL_DOCUMENTSTITLE = '#tooltips:hangarTutorial/personalCase/additional/documentsTitle'
HANGARTUTORIAL_PERSONALCASE_ADDITIONAL_DOCUMENTSDESCRIPTION = '#tooltips:hangarTutorial/personalCase/additional/documentsDescription'
HANGARTUTORIAL_AMMUNITION_TITLE = '#tooltips:hangarTutorial/ammunition/title'
HANGARTUTORIAL_AMMUNITION_DESCRIPTION = '#tooltips:hangarTutorial/ammunition/description'
HANGARTUTORIAL_AMMUNITION_TYPE_TITLE = '#tooltips:hangarTutorial/ammunition/type/title'
HANGARTUTORIAL_AMMUNITION_TYPE_PIERCING_TITLE = '#tooltips:hangarTutorial/ammunition/type/piercing/title'
HANGARTUTORIAL_AMMUNITION_TYPE_PIERCING_DESCRIPTION = '#tooltips:hangarTutorial/ammunition/type/piercing/description'
HANGARTUTORIAL_AMMUNITION_TYPE_HIGH_EXPLOSIVE_TITLE = '#tooltips:hangarTutorial/ammunition/type/high-explosive/title'
HANGARTUTORIAL_AMMUNITION_TYPE_HIGH_EXPLOSIVE_DESCRIPTION = '#tooltips:hangarTutorial/ammunition/type/high-explosive/description'
HANGARTUTORIAL_AMMUNITION_TYPE_SUBCALIBER_TITLE = '#tooltips:hangarTutorial/ammunition/type/subcaliber/title'
HANGARTUTORIAL_AMMUNITION_TYPE_SUBCALIBER_DESCRIPTION = '#tooltips:hangarTutorial/ammunition/type/subcaliber/description'
HANGARTUTORIAL_AMMUNITION_TYPE_CUMULATIVE_TITLE = '#tooltips:hangarTutorial/ammunition/type/cumulative/title'
HANGARTUTORIAL_AMMUNITION_TYPE_CUMULATIVE_DESCRIPTION = '#tooltips:hangarTutorial/ammunition/type/cumulative/description'
HANGARTUTORIAL_AMMUNITION_AMMOSET_TITLE = '#tooltips:hangarTutorial/ammunition/ammoSet/title'
HANGARTUTORIAL_AMMUNITION_AMMOSET_DESCRIPTION = '#tooltips:hangarTutorial/ammunition/ammoSet/description'
HANGARTUTORIAL_EQUIPMENT_TITLE = '#tooltips:hangarTutorial/equipment/title'
HANGARTUTORIAL_EQUIPMENT_DESCRIPTION = '#tooltips:hangarTutorial/equipment/description'
HANGARTUTORIAL_EQUIPMENT_PREM_TITLE = '#tooltips:hangarTutorial/equipment/prem/title'
HANGARTUTORIAL_EQUIPMENT_PREM_DESCRIPTION = '#tooltips:hangarTutorial/equipment/prem/description'
BATTLESELECTORWINDOW_TOOLTIP_DOMINATION_SELECTBTN_HEADER = '#tooltips:battleSelectorWindow/tooltip/domination/selectBtn/header'
BATTLESELECTORWINDOW_TOOLTIP_DOMINATION_SELECTBTN_BODY = '#tooltips:battleSelectorWindow/tooltip/domination/selectBtn/body'
BATTLESELECTORWINDOW_TOOLTIP_MULTITEAM_SELECTBTN_HEADER = '#tooltips:battleSelectorWindow/tooltip/multiteam/selectBtn/header'
BATTLESELECTORWINDOW_TOOLTIP_MULTITEAM_SELECTBTN_BODY = '#tooltips:battleSelectorWindow/tooltip/multiteam/selectBtn/body'
BOOSTER_QUESTLINKBTN_BODY = '#tooltips:booster/questLinkBtn/body'
BOOSTER_ACTIVEBTN_DISABLED_BODY = '#tooltips:booster/activeBtn/disabled/body'
BOOSTER_FILTERS_QUALITYSMALL_BODY = '#tooltips:booster/filters/qualitySmall/body'
BOOSTER_FILTERS_QUALITYMEDIUM_BODY = '#tooltips:booster/filters/qualityMedium/body'
BOOSTER_FILTERS_QUALITYBIG_BODY = '#tooltips:booster/filters/qualityBig/body'
BOOSTER_FILTERS_TYPEXP_BODY = '#tooltips:booster/filters/typeXp/body'
BOOSTER_FILTERS_TYPECREWXP_BODY = '#tooltips:booster/filters/typeCrewXp/body'
BOOSTER_FILTERS_TYPEFREEXP_BODY = '#tooltips:booster/filters/typeFreeXp/body'
BOOSTER_FILTERS_TYPECREDITS_BODY = '#tooltips:booster/filters/typeCredits/body'
BOOSTERSWINDOW_BOOSTER_DUEDATE_TITLE = '#tooltips:boostersWindow/booster/dueDate/title'
BOOSTERSWINDOW_BOOSTER_DUEDATE_VALUE = '#tooltips:boostersWindow/booster/dueDate/value'
BOOSTERSWINDOW_BOOSTER_DUEDATE_UNLIMITED = '#tooltips:boostersWindow/booster/dueDate/unlimited'
BOOSTERSWINDOW_BOOSTER_GETCONDITION_TITLE = '#tooltips:boostersWindow/booster/getCondition/title'
BOOSTERSWINDOW_BOOSTER_GETCONDITION_VALUE = '#tooltips:boostersWindow/booster/getCondition/value'
BOOSTERSWINDOW_BOOSTER_HEADER_XP = '#tooltips:boostersWindow/booster/header/xp'
BOOSTERSWINDOW_BOOSTER_HEADER_DESCRIPTION_XP = '#tooltips:boostersWindow/booster/header/description/xp'
BOOSTERSWINDOW_BOOSTER_HEADER_FREEXP = '#tooltips:boostersWindow/booster/header/freeXp'
BOOSTERSWINDOW_BOOSTER_HEADER_DESCRIPTION_FREEXP = '#tooltips:boostersWindow/booster/header/description/freeXp'
BOOSTERSWINDOW_BOOSTER_HEADER_CREWXP = '#tooltips:boostersWindow/booster/header/crewXp'
BOOSTERSWINDOW_BOOSTER_HEADER_DESCRIPTION_CREWXP = '#tooltips:boostersWindow/booster/header/description/crewXp'
BOOSTERSWINDOW_BOOSTER_HEADER_CREDITS = '#tooltips:boostersWindow/booster/header/credits'
BOOSTERSWINDOW_BOOSTER_HEADER_DESCRIPTION_CREDITS = '#tooltips:boostersWindow/booster/header/description/credits'
BOOSTERSWINDOW_BOOSTER_QUALITY_SMALL = '#tooltips:boostersWindow/booster/quality/small'
BOOSTERSWINDOW_BOOSTER_QUALITY_MEDIUM = '#tooltips:boostersWindow/booster/quality/medium'
BOOSTERSWINDOW_BOOSTER_QUALITY_BIG = '#tooltips:boostersWindow/booster/quality/big'
BOOSTERSWINDOW_BOOSTER_ACTIVE_TITLE = '#tooltips:boostersWindow/booster/active/title'
BOOSTERSWINDOW_BOOSTER_ACTIVE_VALUE = '#tooltips:boostersWindow/booster/active/value'
BOOSTERSPANEL_OPENBOOSTERSWINDOW_BODY = '#tooltips:boostersPanel/openBoostersWindow/body'
BOOSTERSPANEL_BOOSTERDESCRIPTION_NOTE = '#tooltips:boostersPanel/boosterDescription/note'
CLANCOMMONINFO_CLANNAME = '#tooltips:clanCommonInfo/ClanName'
CLANCOMMONINFO_SLOGAN = '#tooltips:clanCommonInfo/slogan'
CLANCOMMONINFO_STATRATING = '#tooltips:clanCommonInfo/statRating'
CLANCOMMONINFO_STATBATTLESCOUNT = '#tooltips:clanCommonInfo/statBattlesCount'
CLANCOMMONINFO_STATWINSPERCENT = '#tooltips:clanCommonInfo/statWinsPercent'
CLANCOMMONINFO_STATAVGEXP = '#tooltips:clanCommonInfo/statAvgExp'
CLANCOMMONINFO_COMMANDER = '#tooltips:clanCommonInfo/commander'
CLANCOMMONINFO_ACTIVITY = '#tooltips:clanCommonInfo/activity'
CLANCOMMONINFO_YES = '#tooltips:clanCommonInfo/yes'
CLANCOMMONINFO_NO = '#tooltips:clanCommonInfo/no'
AWARDITEM_CREDITS_HEADER = '#tooltips:awardItem/credits/header'
AWARDITEM_CREDITS_BODY = '#tooltips:awardItem/credits/body'
AWARDITEM_GOLD_HEADER = '#tooltips:awardItem/gold/header'
AWARDITEM_GOLD_BODY = '#tooltips:awardItem/gold/body'
AWARDITEM_FREEXP_HEADER = '#tooltips:awardItem/freeXP/header'
AWARDITEM_FREEXP_BODY = '#tooltips:awardItem/freeXP/body'
AWARDITEM_PREMIUM_HEADER = '#tooltips:awardItem/premium/header'
AWARDITEM_PREMIUM_BODY = '#tooltips:awardItem/premium/body'
AWARDITEM_BATTLETOKEN_ONE_HEADER = '#tooltips:awardItem/battleToken/one/header'
AWARDITEM_BATTLETOKEN_ONE_BODY = '#tooltips:awardItem/battleToken/one/body'
AWARDITEM_BATTLETOKEN_SEVERAL_HEADER = '#tooltips:awardItem/battleToken/several/header'
AWARDITEM_BATTLETOKEN_SEVERAL_BODY = '#tooltips:awardItem/battleToken/several/body'
AWARDITEM_BATTLETOKEN_SEVERAL_LINE = '#tooltips:awardItem/battleToken/several/line'
AWARDITEM_BATTLETOKEN_DESCRIPTION = '#tooltips:awardItem/battleToken/description'
AWARDITEM_BERTHS_HEADER = '#tooltips:awardItem/berths/header'
AWARDITEM_BERTHS_BODY = '#tooltips:awardItem/berths/body'
AWARDITEM_CREDITSFACTOR_HEADER = '#tooltips:awardItem/creditsFactor/header'
AWARDITEM_CREDITSFACTOR_BODY = '#tooltips:awardItem/creditsFactor/body'
AWARDITEM_FREEXPFACTOR_HEADER = '#tooltips:awardItem/freeXPFactor/header'
AWARDITEM_FREEXPFACTOR_BODY = '#tooltips:awardItem/freeXPFactor/body'
AWARDITEM_TANKMENXP_HEADER = '#tooltips:awardItem/tankmenXP/header'
AWARDITEM_TANKMENXP_BODY = '#tooltips:awardItem/tankmenXP/body'
AWARDITEM_TANKMENXPFACTOR_HEADER = '#tooltips:awardItem/tankmenXPFactor/header'
AWARDITEM_TANKMENXPFACTOR_BODY = '#tooltips:awardItem/tankmenXPFactor/body'
AWARDITEM_XP_HEADER = '#tooltips:awardItem/xp/header'
AWARDITEM_XP_BODY = '#tooltips:awardItem/xp/body'
AWARDITEM_XPFACTOR_HEADER = '#tooltips:awardItem/xpFactor/header'
AWARDITEM_XPFACTOR_BODY = '#tooltips:awardItem/xpFactor/body'
AWARDITEM_SLOTS_HEADER = '#tooltips:awardItem/slots/header'
AWARDITEM_SLOTS_BODY = '#tooltips:awardItem/slots/body'
AWARDITEM_TOKENS_HEADER = '#tooltips:awardItem/tokens/header'
AWARDITEM_TOKENS_BODY = '#tooltips:awardItem/tokens/body'
AWARDITEM_TANKMEN_HEADER = '#tooltips:awardItem/tankmen/header'
AWARDITEM_TANKMEN_BODY = '#tooltips:awardItem/tankmen/body'
AWARDITEM_TANKWOMEN_HEADER = '#tooltips:awardItem/tankwomen/header'
AWARDITEM_TANKWOMEN_BODY = '#tooltips:awardItem/tankwomen/body'
CUSTOMIZATION_HEADERCLOSEBTN_HEADER = '#tooltips:customization/headerCloseBtn/header'
CUSTOMIZATION_HEADERCLOSEBTN_BODY = '#tooltips:customization/headerCloseBtn/body'
CUSTOMIZATION_NOTENOUGHRESOURCES_HEADER = '#tooltips:customization/notEnoughResources/header'
CUSTOMIZATION_NOTENOUGHRESOURCES_BODY = '#tooltips:customization/notEnoughResources/body'
CUSTOMIZATION_BUYDISABLED_BODY = '#tooltips:customization/buyDisabled/body'
CUSTOMIZATION_SLOT_HEADER = '#tooltips:customization/slot/header'
CUSTOMIZATION_SLOT_BODY = '#tooltips:customization/slot/body'
CUSTOMIZATION_SLOTREMOVE_HEADER = '#tooltips:customization/slotRemove/header'
CUSTOMIZATION_SLOTREMOVE_BODY = '#tooltips:customization/slotRemove/body'
CUSTOMIZATION_CAROUSEL_FILTER_HEADER = '#tooltips:customization/carousel/filter/header'
CUSTOMIZATION_CAROUSEL_FILTER_BODY = '#tooltips:customization/carousel/filter/body'
CUSTOMIZATION_CAROUSEL_CHBPURCHASED_HEADER = '#tooltips:customization/carousel/chbPurchased/header'
CUSTOMIZATION_CAROUSEL_CHBPURCHASED_BODY = '#tooltips:customization/carousel/chbPurchased/body'
CUSTOMIZATION_CAROUSEL_DURATIONTYPE_HEADER = '#tooltips:customization/carousel/durationType/header'
CUSTOMIZATION_CAROUSEL_DURATIONTYPE_BODY = '#tooltips:customization/carousel/durationType/body'
CUSTOMIZATION_CAROUSEL_SLOT_SELECT_HEADER = '#tooltips:customization/carousel/slot/select/header'
CUSTOMIZATION_CAROUSEL_SLOT_SELECT_BODY = '#tooltips:customization/carousel/slot/select/body'
CUSTOMIZATION_CAROUSEL_SLOT_REMOVE_HEADER = '#tooltips:customization/carousel/slot/remove/header'
CUSTOMIZATION_CAROUSEL_SLOT_REMOVE_BODY = '#tooltips:customization/carousel/slot/remove/body'
CUSTOMIZATION_CAROUSEL_SLOT_CHALLENGE_HEADER = '#tooltips:customization/carousel/slot/challenge/header'
CUSTOMIZATION_CAROUSEL_SLOT_CHALLENGE_BODY = '#tooltips:customization/carousel/slot/challenge/body'
CUSTOMIZATION_FILTERPOPOVER_REFRESH_HEADER = '#tooltips:customization/filterPopover/refresh/header'
CUSTOMIZATION_FILTERPOPOVER_REFRESH_BODY = '#tooltips:customization/filterPopover/refresh/body'
CUSTOMIZATION_FILTERPOPOVER_BONUSDESCRIPTION_HEADER = '#tooltips:customization/filterPopover/bonusDescription/header'
CUSTOMIZATION_FILTERPOPOVER_BONUSDESCRIPTION_BODY = '#tooltips:customization/filterPopover/bonusDescription/body'
CUSTOMIZATION_BONUSPANEL_BONUS_HEADER = '#tooltips:customization/bonusPanel/bonus/header'
EVENT_SQUAD_DISABLE_HEADER = '#tooltips:event/squad/disable/header'
EVENT_SQUAD_DISABLE_BODY = '#tooltips:event/squad/disable/body'
CUSTOMIZATION_BONUSPANEL_BONUS_ENTIRECREW_BODY = '#tooltips:customization/bonusPanel/bonus/entireCrew/body'
CUSTOMIZATION_BONUSPANEL_BONUS_COMMANDER_BODY = '#tooltips:customization/bonusPanel/bonus/commander/body'
CUSTOMIZATION_BONUSPANEL_BONUS_AIMER_BODY = '#tooltips:customization/bonusPanel/bonus/aimer/body'
CUSTOMIZATION_BONUSPANEL_BONUS_DRIVER_BODY = '#tooltips:customization/bonusPanel/bonus/driver/body'
CUSTOMIZATION_BONUSPANEL_BONUS_RADIOMAN_BODY = '#tooltips:customization/bonusPanel/bonus/radioman/body'
CUSTOMIZATION_BONUSPANEL_BONUS_LOADER_BODY = '#tooltips:customization/bonusPanel/bonus/loader/body'
CUSTOMIZATION_BONUSPANEL_BONUS_MASKING_BODY = '#tooltips:customization/bonusPanel/bonus/masking/body'
CUSTOMIZATION_BONUSPANEL_BONUS_FOOTER = '#tooltips:customization/bonusPanel/bonus/footer'
CUSTOMIZATION_BUYWINDOW_COPY_HEADER = '#tooltips:customization/buyWindow/copy/header'
CUSTOMIZATION_BUYWINDOW_COPY_BODY = '#tooltips:customization/buyWindow/copy/body'
CUSTOMIZATION_SLOTREVERT_HEADER = '#tooltips:customization/slotRevert/header'
CUSTOMIZATION_SLOTREVERT_BODY = '#tooltips:customization/slotRevert/body'
TANKSFILTER_COUNTER_CLOSE_HEADER = '#tooltips:tanksFilter/counter/close/header'
TANKSFILTER_COUNTER_CLOSE_BODY = '#tooltips:tanksFilter/counter/close/body'
LOBBYMENU_VERSIONINFOBUTTON_MINICLIENT_HEADER = '#tooltips:lobbyMenu/versionInfoButton/miniclient/header'
LOBBYMENU_VERSIONINFOBUTTON_MINICLIENT_BODY = '#tooltips:lobbyMenu/versionInfoButton/miniclient/body'
TANK_PARAMS_DESC_MAXHEALTH = '#tooltips:tank_params/desc/maxHealth'
TANK_PARAMS_DESC_VEHICLEWEIGHT = '#tooltips:tank_params/desc/vehicleWeight'
TANK_PARAMS_DESC_ENGINEPOWER = '#tooltips:tank_params/desc/enginePower'
TANK_PARAMS_DESC_ENGINEPOWERPERTON = '#tooltips:tank_params/desc/enginePowerPerTon'
TANK_PARAMS_DESC_SPEEDLIMITS = '#tooltips:tank_params/desc/speedLimits'
TANK_PARAMS_DESC_CHASSISROTATIONSPEED = '#tooltips:tank_params/desc/chassisRotationSpeed'
TANK_PARAMS_DESC_TIMEOFREACHING = '#tooltips:tank_params/desc/timeOfReaching'
TANK_PARAMS_DESC_ATTAINABLESPEED = '#tooltips:tank_params/desc/attainableSpeed'
TANK_PARAMS_DESC_HULLARMOR = '#tooltips:tank_params/desc/hullArmor'
TANK_PARAMS_DESC_TURRETARMOR = '#tooltips:tank_params/desc/turretArmor'
TANK_PARAMS_DESC_RELOADTIME = '#tooltips:tank_params/desc/reloadTime'
TANK_PARAMS_DESC_RELOADTIMESECS = '#tooltips:tank_params/desc/reloadTimeSecs'
TANK_PARAMS_DESC_PIERCINGPOWER = '#tooltips:tank_params/desc/piercingPower'
TANK_PARAMS_DESC_AVGPIERCINGPOWER = '#tooltips:tank_params/desc/avgPiercingPower'
TANK_PARAMS_DESC_DAMAGE = '#tooltips:tank_params/desc/damage'
TANK_PARAMS_DESC_AVGDAMAGE = '#tooltips:tank_params/desc/avgDamage'
TANK_PARAMS_AVGPARAMCOMMENT_DAMAGE = '#tooltips:tank_params/avgParamComment/damage'
TANK_PARAMS_AVGPARAMCOMMENT_PIERCINGPOWER = '#tooltips:tank_params/avgParamComment/piercingPower'
TANK_PARAMS_DESC_AVGDAMAGEPERMINUTE = '#tooltips:tank_params/desc/avgDamagePerMinute'
TANK_PARAMS_DESC_TURRETROTATIONSPEED = '#tooltips:tank_params/desc/turretRotationSpeed'
TANK_PARAMS_DESC_GUNROTATIONSPEED = '#tooltips:tank_params/desc/gunRotationSpeed'
TANK_PARAMS_DESC_CIRCULARVISIONRADIUS = '#tooltips:tank_params/desc/circularVisionRadius'
TANK_PARAMS_DESC_RADIODISTANCE = '#tooltips:tank_params/desc/radioDistance'
TANK_PARAMS_DESC_TURRETYAWLIMITS = '#tooltips:tank_params/desc/turretYawLimits'
TANK_PARAMS_DESC_PITCHLIMITS = '#tooltips:tank_params/desc/pitchLimits'
TANK_PARAMS_DESC_GUNYAWLIMITS = '#tooltips:tank_params/desc/gunYawLimits'
TANK_PARAMS_DESC_CLIPFIRERATE = '#tooltips:tank_params/desc/clipFireRate'
TANK_PARAMS_DESC_RELATIVEPOWER = '#tooltips:tank_params/desc/relativePower'
TANK_PARAMS_DESC_RELATIVEARMOR = '#tooltips:tank_params/desc/relativeArmor'
TANK_PARAMS_DESC_RELATIVEMOBILITY = '#tooltips:tank_params/desc/relativeMobility'
TANK_PARAMS_DESC_RELATIVEVISIBILITY = '#tooltips:tank_params/desc/relativeVisibility'
TANK_PARAMS_DESC_RELATIVECAMOUFLAGE = '#tooltips:tank_params/desc/relativeCamouflage'
TANK_PARAMS_DESC_SHOTDISPERSIONANGLE = '#tooltips:tank_params/desc/shotDispersionAngle'
TANK_PARAMS_DESC_AIMINGTIME = '#tooltips:tank_params/desc/aimingTime'
TANK_PARAMS_DESC_EXPLOSIONRADIUS = '#tooltips:tank_params/desc/explosionRadius'
TANK_PARAMS_DESC_INVISIBILITYSTILLFACTOR = '#tooltips:tank_params/desc/invisibilityStillFactor'
TANK_PARAMS_DESC_INVISIBILITYMOVINGFACTOR = '#tooltips:tank_params/desc/invisibilityMovingFactor'
TANK_PARAMS_DESC_SWITCHONTIME = '#tooltips:tank_params/desc/switchOnTime'
TANK_PARAMS_DESC_SWITCHOFFTIME = '#tooltips:tank_params/desc/switchOffTime'
TANK_PARAMS_DESC_STUNMINDURATION = '#tooltips:tank_params/desc/stunMinDuration'
TANK_PARAMS_DESC_STUNMAXDURATION = '#tooltips:tank_params/desc/stunMaxDuration'
VEHICLEPARAMS_TITLE_VALUETEMPLATE = '#tooltips:vehicleParams/title/valueTemplate'
VEHICLEPARAMS_POSSIBLEBONUSES_DESC = '#tooltips:vehicleParams/possibleBonuses/desc'
VEHICLEPARAMS_BONUSES_TITLE = '#tooltips:vehicleParams/bonuses/title'
VEHICLEPARAMS_PENALTIES_TITLE = '#tooltips:vehicleParams/penalties/title'
VEHICLEPARAMS_BONUS_ARTEFACT_TEMPLATE = '#tooltips:vehicleParams/bonus/artefact/template'
VEHICLEPARAMS_BONUS_SKILL_TEMPLATE = '#tooltips:vehicleParams/bonus/skill/template'
VEHICLEPARAMS_BONUS_ROLE_TEMPLATE = '#tooltips:vehicleParams/bonus/role/template'
VEHICLEPARAMS_BONUS_EXTRA_CAMOUFLAGEEXTRAS = '#tooltips:vehicleParams/bonus/extra/camouflageExtras'
VEHICLEPARAMS_SKILL_NAME = '#tooltips:vehicleParams/skill/name'
VEHICLEPARAMS_BONUS_SITUATIONAL = '#tooltips:vehicleParams/bonus/situational'
VEHICLEPARAMS_BONUS_TANKMANLEVEL_COMMANDER = '#tooltips:vehicleParams/bonus/tankmanLevel/commander'
VEHICLEPARAMS_BONUS_TANKMANLEVEL_GUNNER = '#tooltips:vehicleParams/bonus/tankmanLevel/gunner'
VEHICLEPARAMS_BONUS_TANKMANLEVEL_LOADER = '#tooltips:vehicleParams/bonus/tankmanLevel/loader'
VEHICLEPARAMS_BONUS_TANKMANLEVEL_DRIVER = '#tooltips:vehicleParams/bonus/tankmanLevel/driver'
VEHICLEPARAMS_BONUS_TANKMANLEVEL_RADIOMAN = '#tooltips:vehicleParams/bonus/tankmanLevel/radioman'
VEHICLEPARAMS_BONUS_POSSIBLE_NOTINSTALLED = '#tooltips:vehicleParams/bonus/possible/notInstalled'
VEHICLEPARAMS_PENALTY_TANKMANLEVEL_TEMPLATE = '#tooltips:vehicleParams/penalty/tankmanLevel/template'
VEHICLEPARAMS_PENALTY_TANKMANDIFFERENTVEHICLE_TEMPLATE = '#tooltips:vehicleParams/penalty/tankmanDifferentVehicle/template'
VEHICLEPARAMS_PENALTY_CREWNOTFULL_TEMPLATE = '#tooltips:vehicleParams/penalty/crewNotFull/template'
VEHICLEPARAMS_VALUECHANGE_TEMPLATE = '#tooltips:vehicleParams/valueChange/template'
VEHICLEPARAMS_COMMON_TITLE = '#tooltips:vehicleParams/common/title'
VEHICLEPARAMS_SIMPLIFIED_TITLE = '#tooltips:vehicleParams/simplified/title'
VEHICLEPREVIEW_CREW_COMMANDER = '#tooltips:vehiclePreview/crew/commander'
VEHICLEPREVIEW_CREW_DRIVER = '#tooltips:vehiclePreview/crew/driver'
VEHICLEPREVIEW_CREW_RADIOMAN = '#tooltips:vehiclePreview/crew/radioman'
VEHICLEPREVIEW_CREW_GUNNER = '#tooltips:vehiclePreview/crew/gunner'
VEHICLEPREVIEW_CREW_LOADER = '#tooltips:vehiclePreview/crew/loader'
VEHICLEPREVIEW_CREW_INFLUENCE_RECONNAISSANCE = '#tooltips:vehiclePreview/crew/influence/reconnaissance'
VEHICLEPREVIEW_CREW_INFLUENCE_VISIONRADIUS = '#tooltips:vehiclePreview/crew/influence/visionRadius'
VEHICLEPREVIEW_CREW_INFLUENCE_FIREPOWER = '#tooltips:vehiclePreview/crew/influence/firepower'
VEHICLEPREVIEW_CREW_INFLUENCE_AIMINGTIME = '#tooltips:vehiclePreview/crew/influence/aimingTime'
VEHICLEPREVIEW_CREW_INFLUENCE_MOBILITY = '#tooltips:vehiclePreview/crew/influence/mobility'
VEHICLEPREVIEW_CREW_INFLUENCE_CHASSISROTATIONSPEED = '#tooltips:vehiclePreview/crew/influence/chassisRotationSpeed'
VEHICLEPREVIEW_CREW_INFLUENCE_ENGINEPOWER = '#tooltips:vehiclePreview/crew/influence/enginePower'
VEHICLEPREVIEW_CREW_INFLUENCE_RADIODISTANCE = '#tooltips:vehiclePreview/crew/influence/radioDistance'
VEHICLEPREVIEW_CREW_ADDITIONALROLES = '#tooltips:vehiclePreview/crew/additionalRoles'
VEHICLEPREVIEW_VEHICLEPANEL_INFO_HEADER = '#tooltips:vehiclePreview/vehiclePanel/info/header'
VEHICLEPREVIEW_VEHICLEPANEL_INFO_BODY = '#tooltips:vehiclePreview/vehiclePanel/info/body'
VEHICLEPREVIEW_BUYBUTTON_PARENTMODULEISLOCKED_HEADER = '#tooltips:vehiclePreview/buyButton/parentModuleIsLocked/header'
VEHICLEPREVIEW_BUYBUTTON_PARENTMODULEISLOCKED_BODY = '#tooltips:vehiclePreview/buyButton/parentModuleIsLocked/body'
VEHICLEPREVIEW_BUYBUTTON_PARENTVEHICLEISLOCKED_HEADER = '#tooltips:vehiclePreview/buyButton/parentVehicleIsLocked/header'
VEHICLEPREVIEW_BUYBUTTON_PARENTVEHICLEISLOCKED_BODY = '#tooltips:vehiclePreview/buyButton/parentVehicleIsLocked/body'
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHXP_HEADER = '#tooltips:vehiclePreview/buyButton/notEnoughXp/header'
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHXP_BODY = '#tooltips:vehiclePreview/buyButton/notEnoughXp/body'
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHCREDITS_HEADER = '#tooltips:vehiclePreview/buyButton/notEnoughCredits/header'
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHCREDITS_BODY = '#tooltips:vehiclePreview/buyButton/notEnoughCredits/body'
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHGOLD_HEADER = '#tooltips:vehiclePreview/buyButton/notEnoughGold/header'
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHGOLD_BODY = '#tooltips:vehiclePreview/buyButton/notEnoughGold/body'
SQUADWINDOW_EVENTVEHICLE = '#tooltips:squadWindow/eventVehicle'
DISMISSTANKMANDIALOG_CANTRESTORALERT_HEADER = '#tooltips:dismissTankmanDialog/cantRestorAlert/header'
DISMISSTANKMANDIALOG_CANTRESTORALERT_BODY = '#tooltips:dismissTankmanDialog/cantRestorAlert/body'
DISMISSTANKMANDIALOG_BUFFERISFULL_HEADER = '#tooltips:dismissTankmanDialog/bufferIsFull/header'
DISMISSTANKMANDIALOG_BUFFERISFULL_BODY = '#tooltips:dismissTankmanDialog/bufferIsFull/body'
DISMISSTANKMANDIALOG_BUFFERISFULLMULTIPLE_BODY = '#tooltips:dismissTankmanDialog/bufferIsFullMultiple/body'
NOTIFICATIONSVIEW_TAB_INFO_BODY = '#tooltips:notificationsView/tab/info/body'
NOTIFICATIONSVIEW_TAB_INVITES_BODY = '#tooltips:notificationsView/tab/invites/body'
NOTIFICATIONSVIEW_TAB_OFFERS_BODY = '#tooltips:notificationsView/tab/offers/body'
HANGAR_HEADER_QUESTS_HEADER = '#tooltips:hangar/header/quests/header'
HANGAR_HEADER_QUESTS_DESCRIPTION_VEHICLE = '#tooltips:hangar/header/quests/description/vehicle'
HANGAR_HEADER_QUESTS_DESCRIPTION = '#tooltips:hangar/header/quests/description'
HANGAR_HEADER_QUESTS_EMPTY_VEHICLE = '#tooltips:hangar/header/quests/empty/vehicle'
HANGAR_HEADER_QUESTS_EMPTY = '#tooltips:hangar/header/quests/empty'
HANGAR_HEADER_QUESTS_BOTTOM_EMPTY = '#tooltips:hangar/header/quests/bottom/empty'
HANGAR_HEADER_QUESTS_REWARD = '#tooltips:hangar/header/quests/reward'
HANGAR_HEADER_QUESTS_REWARD_REST = '#tooltips:hangar/header/quests/reward/rest'
HANGAR_HEADER_QUESTS_BOTTOM = '#tooltips:hangar/header/quests/bottom'
TRADE_HEADER = '#tooltips:trade/header'
TRADE_BODY = '#tooltips:trade/body'
TRADE_NODISCOUNT = '#tooltips:trade/noDiscount'
TRADEIN_NODISCOUNT_HEADER = '#tooltips:tradeIn/noDiscount/header'
TRADEIN_NODISCOUNT_BODY = '#tooltips:tradeIn/noDiscount/body'
TRADE_DISCOUNT = '#tooltips:trade/discount'
TRADE_SEVERALDISCOUNTS = '#tooltips:trade/severalDiscounts'
TRADE_VEHICLE_HEADER = '#tooltips:trade/vehicle/header'
TRADE_VEHICLE_PRICE = '#tooltips:trade/vehicle/price'
TRADE_VEHICLE_OLDPRICE = '#tooltips:trade/vehicle/oldPrice'
TRADE_VEHICLE_NEWPRICE = '#tooltips:trade/vehicle/newPrice'
TRADE_VEHICLE_TOCHANGE = '#tooltips:trade/vehicle/toChange'
TRADE_VEHICLE_NOVEHICLE = '#tooltips:trade/vehicle/noVehicle'
TRADE_VEHICLE_RESULT = '#tooltips:trade/vehicle/result'
SETTINGS_KEY_ENEMY_BODY = '#tooltips:settings/key/enemy/body'
SETTINGS_KEY_ALLY_BODY = '#tooltips:settings/key/ally/body'
SETTINGS_KEY_TARGET_ENEMY = '#tooltips:settings/key/target/enemy'
SETTINGS_KEY_TARGET_ALLY = '#tooltips:settings/key/target/ally'
SETTINGS_KEYFOLLOWME_TITLE = '#tooltips:settings/keyFollowMe/title'
SETTINGS_KEYMOVEMENT_TITLE = '#tooltips:settings/keyMovement/title'
SETTINGS_SUPPORT_SUBTITLE = '#tooltips:settings/support/subtitle'
SETTINGS_FOLLOWME_SUBTITLE = '#tooltips:settings/followMe/subtitle'
SETTINGS_DEFENDBASE_SUBTITLE = '#tooltips:settings/defendBase/subtitle'
SETTINGS_TURNBACK_SUBTITLE = '#tooltips:settings/turnBack/subtitle'
SETTINGS_NEEDHELP_SUBTITLE = '#tooltips:settings/needHelp/subtitle'
SETTINGS_HELPME_SUBTITLE = '#tooltips:settings/helpMe/subtitle'
SETTINGS_RELOAD_SUBTITLE = '#tooltips:settings/reload/subtitle'
SETTINGS_STOP_SUBTITLE = '#tooltips:settings/stop/subtitle'
SETTINGS_DEFENDBASE_ENEMY_BODY = '#tooltips:settings/defendBase/enemy/body'
SETTINGS_NEEDHELP_ENEMY_BODY = '#tooltips:settings/needHelp/enemy/body'
SETTINGS_RELOAD_ENEMY_BODY = '#tooltips:settings/reload/enemy/body'
SETTINGS_SWITCHMODE_BODY = '#tooltips:settings/switchMode/body'
SHELL_BASIC = '#tooltips:shell/basic'
SHELL_BASIC_DESCRIPTION_BOLD = '#tooltips:shell/basic/description/bold'
SHELL_BASIC_DESCRIPTION = '#tooltips:shell/basic/description'
SKILLS_CAMOUFLAGE_HEADER = '#tooltips:skills/camouflage/header'
SKILLS_CAMOUFLAGE_DESCR = '#tooltips:skills/camouflage/descr'
SKILLS_BROTHERHOOD_HEADER = '#tooltips:skills/brotherhood/header'
SKILLS_BROTHERHOOD_DESCR = '#tooltips:skills/brotherhood/descr'
SKILLS_DRIVER_VIRTUOSO_HEADER = '#tooltips:skills/driver_virtuoso/header'
SKILLS_DRIVER_VIRTUOSO_DESCR = '#tooltips:skills/driver_virtuoso/descr'
SKILLS_COMMANDER_EAGLEEYE_HEADER = '#tooltips:skills/commander_eagleEye/header'
SKILLS_COMMANDER_EAGLEEYE_DESCR = '#tooltips:skills/commander_eagleEye/descr'
SKILLS_DRIVER_BADROADSKING_HEADER = '#tooltips:skills/driver_badRoadsKing/header'
SKILLS_DRIVER_BADROADSKING_DESCR = '#tooltips:skills/driver_badRoadsKing/descr'
SKILLS_RADIOMAN_FINDER_HEADER = '#tooltips:skills/radioman_finder/header'
SKILLS_RADIOMAN_FINDER_DESCR = '#tooltips:skills/radioman_finder/descr'
SKILLS_RADIOMAN_INVENTOR_HEADER = '#tooltips:skills/radioman_inventor/header'
SKILLS_RADIOMAN_INVENTOR_DESCR = '#tooltips:skills/radioman_inventor/descr'
SKILLS_STATUS_MOMENTAL = '#tooltips:skills/status/momental'
SKILLS_STATUS_REQUIERSWHOLECREW = '#tooltips:skills/status/requiersWholeCrew'
SKILLS_STATUS_ISFORROLE = '#tooltips:skills/status/isForRole'
SKILLS_STATUS_AVGEXP = '#tooltips:skills/status/avgExp'
SKILLS_STATUS_FOR2RADIOMEN = '#tooltips:skills/status/for2radiomen'
SKILLS_FORROLE_COMMANDER = '#tooltips:skills/forRole/commander'
SKILLS_FORROLE_DRIVER = '#tooltips:skills/forRole/driver'
SKILLS_FORROLE_RADIOMAN = '#tooltips:skills/forRole/radioman'
SKILLS_FORROLE_LOADER = '#tooltips:skills/forRole/loader'
SKILLS_FORROLE_GUNNER = '#tooltips:skills/forRole/gunner'
SKILLS_FORROLE_COMMON = '#tooltips:skills/forRole/common'
HANGAR_HEADER_PERSONALQUESTS_DISABLED_HEADER = '#tooltips:hangar/header/personalQuests/disabled/header'
HANGAR_HEADER_PERSONALQUESTS_DISABLED_BODY = '#tooltips:hangar/header/personalQuests/disabled/body'
HANGAR_HEADER_PERSONALQUESTS_UNAVAILABLE_HEADER = '#tooltips:hangar/header/personalQuests/unavailable/header'
HANGAR_HEADER_PERSONALQUESTS_UNAVAILABLE_BODY = '#tooltips:hangar/header/personalQuests/unavailable/body'
HANGAR_HEADER_PERSONALQUESTS_COMPLETED_HEADER = '#tooltips:hangar/header/personalQuests/completed/header'
HANGAR_HEADER_PERSONALQUESTS_COMPLETED_BODY = '#tooltips:hangar/header/personalQuests/completed/body'
HANGAR_HEADER_PERSONALQUESTS_AVAILABLE_HEADER = '#tooltips:hangar/header/personalQuests/available/header'
HANGAR_HEADER_PERSONALQUESTS_AVAILABLE_BODY = '#tooltips:hangar/header/personalQuests/available/body'
HANGAR_HEADER_PERSONALQUESTS_AWARD_HEADER = '#tooltips:hangar/header/personalQuests/award/header'
HANGAR_HEADER_PERSONALQUESTS_AWARD_BODY = '#tooltips:hangar/header/personalQuests/award/body'
HANGAR_HEADER_PERSONALQUESTS_DONE_HEADER = '#tooltips:hangar/header/personalQuests/done/header'
HANGAR_HEADER_PERSONALQUESTS_DONE_BODY = '#tooltips:hangar/header/personalQuests/done/body'
HANGAR_HEADER_WGMONEYTOOLTIP_GOLDNAME = '#tooltips:hangar/header/wgmoneyTooltip/goldName'
HANGAR_HEADER_WGMONEYTOOLTIP_CREDITSNAME = '#tooltips:hangar/header/wgmoneyTooltip/creditsName'
HANGAR_HEADER_WGMONEYTOOLTIP_PURCHASEDVALUE = '#tooltips:hangar/header/wgmoneyTooltip/purchasedValue'
HANGAR_HEADER_WGMONEYTOOLTIP_EARNEDVALUE = '#tooltips:hangar/header/wgmoneyTooltip/earnedValue'
HANGAR_HEADER_WGMONEYTOOLTIP_TOTALVALUE = '#tooltips:hangar/header/wgmoneyTooltip/totalValue'
TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_ENUM = (TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_LIGHTTANK,
TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_MEDIUMTANK,
TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_HEAVYTANK,
TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_AT_SPG,
TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_SPG)
TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_ENUM = (TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_LIGHTTANK,
TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_MEDIUMTANK,
TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_HEAVYTANK,
TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_AT_SPG,
TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_SPG)
LEVEL_ENUM = (LEVEL_1,
LEVEL_2,
LEVEL_3,
LEVEL_4,
LEVEL_5,
LEVEL_6,
LEVEL_7,
LEVEL_8,
LEVEL_9,
LEVEL_10)
VEHICLE_ENUM = (VEHICLE_GRAPH_NOTES_SHOWCONTEXTMENU,
VEHICLE_GRAPH_NOTES_SHOWINFOWINDOW,
VEHICLE_GRAPH_NOTES_VEHICLEUNLOCK,
VEHICLE_GRAPH_NOTES_MODULEUNLOCK,
VEHICLE_GRAPH_NOTES_EQUIP,
VEHICLE_GRAPH_NOTES_BUYANDEQUIP,
VEHICLE_GRAPH_NOTES_GOTONATIONTREE,
VEHICLE_GRAPH_NOTES_GOTONEXTVEHICLE,
VEHICLE_GRAPH_BODY_MODULEINSTALLED,
VEHICLE_GRAPH_BODY_NOTENOUGH,
VEHICLE_INVENTORYCOUNT,
VEHICLE_VEHICLECOUNT,
VEHICLE_LEVEL,
VEHICLE_MASTERING,
VEHICLE_MASTERING_VERYSIMPLE,
VEHICLE_STATS_FOOTNOTE,
VEHICLE_ELITE,
VEHICLE_CREW,
VEHICLE_CREW_AWARD,
VEHICLE_AMMO,
VEHICLE_XP,
VEHICLE_MULTIPLIED_XP,
VEHICLE_UNLOCK_PRICE,
VEHICLE_BUY_PRICE,
VEHICLE_RESTORE_PRICE,
VEHICLE_ACTION_PRC,
VEHICLE_MINRENTALSPRICE,
VEHICLE_BUY_PRICE_ACTION,
VEHICLE_SELL_PRICE,
VEHICLE_EQUIPMENTS,
VEHICLE_DEVICES,
VEHICLE_FAVORITE,
VEHICLE_DAILYXPFACTOR,
VEHICLE_SPEEDLIMITS,
VEHICLE_DAMAGE,
VEHICLE_TURRETROTATIONSPEED,
VEHICLE_RADIODISTANCE,
VEHICLE_TEXTDELIMITER_OR,
VEHICLE_RENTLEFT_DAYS,
VEHICLE_RENTLEFT_HOURS,
VEHICLE_RENTLEFTFUTURE_DAYS,
VEHICLE_RENTLEFTFUTURE_HOURS,
VEHICLE_RENTAVAILABLE,
VEHICLE_RESTORELEFT_DAYS,
VEHICLE_RESTORELEFT_HOURS,
VEHICLE_RENTLEFT_BATTLES,
VEHICLE_DEAL_TELECOM_MAIN,
VEHICLE_TRADE)
TANKCARUSEL_LOCK_ENUM = (TANKCARUSEL_LOCK_HEADER,
TANKCARUSEL_LOCK_ROTATION_HEADER,
TANKCARUSEL_LOCK_TO,
TANKCARUSEL_LOCK_CLAN,
TANKCARUSEL_LOCK_TOURNAMENT,
TANKCARUSEL_LOCK_ROTATION,
TANKCARUSEL_LOCK_ROAMING)
ACHIEVEMENT_PARAMS_LEFT_ENUM = (ACHIEVEMENT_PARAMS_LEFT4,
ACHIEVEMENT_PARAMS_LEFT3,
ACHIEVEMENT_PARAMS_LEFT2,
ACHIEVEMENT_PARAMS_LEFT1)
ACTIONPRICE_SELL_TYPE_ENUM = (ACTIONPRICE_SELL_TYPE_VEHICLE,
ACTIONPRICE_SELL_TYPE_MODULE,
ACTIONPRICE_SELL_TYPE_EQUIPMENT,
ACTIONPRICE_SELL_TYPE_SHELL,
ACTIONPRICE_SELL_TYPE_OPTIONALDEVICE)
FORTIFICATION_TRANSPORTING_ENUM = (FORTIFICATION_TRANSPORTING_EMPTYSTORAGE_HEADER,
FORTIFICATION_TRANSPORTING_EMPTYSTORAGE_BODY,
FORTIFICATION_TRANSPORTING_COOLDOWN_HEADER,
FORTIFICATION_TRANSPORTING_COOLDOWN_BODY,
FORTIFICATION_TRANSPORTING_FOUNDATION_HEADER,
FORTIFICATION_TRANSPORTING_FOUNDATION_BODY,
FORTIFICATION_TRANSPORTING_NOTEMPTYSPACE_HEADER,
FORTIFICATION_TRANSPORTING_NOTEMPTYSPACE_BODY)
SQUADWINDOW_STATUS_ENUM = (SQUADWINDOW_STATUS_COMMANDER,
SQUADWINDOW_STATUS_NORMAL,
SQUADWINDOW_STATUS_CANCELED,
SQUADWINDOW_STATUS_READY,
SQUADWINDOW_STATUS_INBATTLE,
SQUADWINDOW_STATUS_LOCKED)
TOOLTIPREFSYSDESCRIPTION_AWARDS_DESCR_ENUM = (TOOLTIPREFSYSDESCRIPTION_AWARDS_DESCR_CREDITS, TOOLTIPREFSYSDESCRIPTION_AWARDS_DESCR_TANKMAN, TOOLTIPREFSYSDESCRIPTION_AWARDS_DESCR_VEHICLEWITHTANKMEN)
PRIVATEQUESTS_PARAMS_ENUM = (PRIVATEQUESTS_PARAMS_SHEETS,
PRIVATEQUESTS_PARAMS_RECRUITTANKMANFEMALE,
PRIVATEQUESTS_PARAMS_COLLECTEDSHEETS,
PRIVATEQUESTS_PARAMS_COMPETEDTASKS,
PRIVATEQUESTS_PARAMS_RECRUITEDTANKMANFEMALE)
LOGIN_SOCIAL_ENUM = (LOGIN_SOCIAL_ENTER_HEADER,
LOGIN_SOCIAL_ENTER_BODY,
LOGIN_SOCIAL_FACEBOOK,
LOGIN_SOCIAL_GOOGLE,
LOGIN_SOCIAL_YAHOO,
LOGIN_SOCIAL_TWITTER,
LOGIN_SOCIAL_VKONTAKTE,
LOGIN_SOCIAL_ODNOKLASSNIKI,
LOGIN_SOCIAL_WGNI,
LOGIN_SOCIAL_NAVER)
TECHTREEPAGE_NATIONS_ENUM = (TECHTREEPAGE_NATIONS_USSR,
TECHTREEPAGE_NATIONS_GERMANY,
TECHTREEPAGE_NATIONS_USA,
TECHTREEPAGE_NATIONS_CHINA,
TECHTREEPAGE_NATIONS_FRANCE,
TECHTREEPAGE_NATIONS_UK,
TECHTREEPAGE_NATIONS_JAPAN,
TECHTREEPAGE_NATIONS_CZECH,
TECHTREEPAGE_NATIONS_SWEDEN)
CONTACT_STATUS_INBATTLE_ENUM = (CONTACT_STATUS_INBATTLE_UNKNOWN,
CONTACT_STATUS_INBATTLE_RANDOM,
CONTACT_STATUS_INBATTLE_TRAINING,
CONTACT_STATUS_INBATTLE_TEAM,
CONTACT_STATUS_INBATTLE_TUTORIAL,
CONTACT_STATUS_INBATTLE_TEAM7X7,
CONTACT_STATUS_INBATTLE_HISTORICAL,
CONTACT_STATUS_INBATTLE_FORTIFICATIONS,
CONTACT_STATUS_INBATTLE_SPECIAL)
VEHICLE_RENTLEFT_ENUM = (VEHICLE_RENTLEFT_DAYS, VEHICLE_RENTLEFT_HOURS, VEHICLE_RENTLEFT_BATTLES)
TANK_PARAMS_DESC_ENUM = (TANK_PARAMS_DESC_MAXHEALTH,
TANK_PARAMS_DESC_VEHICLEWEIGHT,
TANK_PARAMS_DESC_ENGINEPOWER,
TANK_PARAMS_DESC_ENGINEPOWERPERTON,
TANK_PARAMS_DESC_SPEEDLIMITS,
TANK_PARAMS_DESC_CHASSISROTATIONSPEED,
TANK_PARAMS_DESC_TIMEOFREACHING,
TANK_PARAMS_DESC_ATTAINABLESPEED,
TANK_PARAMS_DESC_HULLARMOR,
TANK_PARAMS_DESC_TURRETARMOR,
TANK_PARAMS_DESC_RELOADTIME,
TANK_PARAMS_DESC_RELOADTIMESECS,
TANK_PARAMS_DESC_PIERCINGPOWER,
TANK_PARAMS_DESC_AVGPIERCINGPOWER,
TANK_PARAMS_DESC_DAMAGE,
TANK_PARAMS_DESC_AVGDAMAGE,
TANK_PARAMS_DESC_AVGDAMAGEPERMINUTE,
TANK_PARAMS_DESC_TURRETROTATIONSPEED,
TANK_PARAMS_DESC_GUNROTATIONSPEED,
TANK_PARAMS_DESC_CIRCULARVISIONRADIUS,
TANK_PARAMS_DESC_RADIODISTANCE,
TANK_PARAMS_DESC_TURRETYAWLIMITS,
TANK_PARAMS_DESC_PITCHLIMITS,
TANK_PARAMS_DESC_GUNYAWLIMITS,
TANK_PARAMS_DESC_CLIPFIRERATE,
TANK_PARAMS_DESC_RELATIVEPOWER,
TANK_PARAMS_DESC_RELATIVEARMOR,
TANK_PARAMS_DESC_RELATIVEMOBILITY,
TANK_PARAMS_DESC_RELATIVEVISIBILITY,
TANK_PARAMS_DESC_RELATIVECAMOUFLAGE,
TANK_PARAMS_DESC_SHOTDISPERSIONANGLE,
TANK_PARAMS_DESC_AIMINGTIME,
TANK_PARAMS_DESC_EXPLOSIONRADIUS,
TANK_PARAMS_DESC_INVISIBILITYSTILLFACTOR,
TANK_PARAMS_DESC_INVISIBILITYMOVINGFACTOR,
TANK_PARAMS_DESC_SWITCHONTIME,
TANK_PARAMS_DESC_SWITCHOFFTIME,
TANK_PARAMS_DESC_STUNMINDURATION,
TANK_PARAMS_DESC_STUNMAXDURATION)
BATTLERESULTS_EFFICIENCYHEADER_ENUM = (BATTLERESULTS_EFFICIENCYHEADER_SUMMSPOTTED,
BATTLERESULTS_EFFICIENCYHEADER_SUMMASSIST,
BATTLERESULTS_EFFICIENCYHEADER_SUMMARMOR,
BATTLERESULTS_EFFICIENCYHEADER_SUMMCRITS,
BATTLERESULTS_EFFICIENCYHEADER_SUMMDAMAGE,
BATTLERESULTS_EFFICIENCYHEADER_SUMMKILL,
BATTLERESULTS_EFFICIENCYHEADER_SUMMSTUN,
BATTLERESULTS_EFFICIENCYHEADER_VALUE)
AWARDITEM_ALL_HEADER_ENUM = (AWARDITEM_CREDITS_HEADER,
AWARDITEM_GOLD_HEADER,
AWARDITEM_FREEXP_HEADER,
AWARDITEM_PREMIUM_HEADER,
AWARDITEM_BATTLETOKEN_ONE_HEADER,
AWARDITEM_BATTLETOKEN_SEVERAL_HEADER,
AWARDITEM_BERTHS_HEADER,
AWARDITEM_CREDITSFACTOR_HEADER,
AWARDITEM_FREEXPFACTOR_HEADER,
AWARDITEM_TANKMENXP_HEADER,
AWARDITEM_TANKMENXPFACTOR_HEADER,
AWARDITEM_XP_HEADER,
AWARDITEM_XPFACTOR_HEADER,
AWARDITEM_SLOTS_HEADER,
AWARDITEM_TOKENS_HEADER,
AWARDITEM_TANKMEN_HEADER,
AWARDITEM_TANKWOMEN_HEADER)
AWARDITEM_ALL_BODY_ENUM = (AWARDITEM_CREDITS_BODY,
AWARDITEM_GOLD_BODY,
AWARDITEM_FREEXP_BODY,
AWARDITEM_PREMIUM_BODY,
AWARDITEM_BATTLETOKEN_ONE_BODY,
AWARDITEM_BATTLETOKEN_SEVERAL_BODY,
AWARDITEM_BERTHS_BODY,
AWARDITEM_CREDITSFACTOR_BODY,
AWARDITEM_FREEXPFACTOR_BODY,
AWARDITEM_TANKMENXP_BODY,
AWARDITEM_TANKMENXPFACTOR_BODY,
AWARDITEM_XP_BODY,
AWARDITEM_XPFACTOR_BODY,
AWARDITEM_SLOTS_BODY,
AWARDITEM_TOKENS_BODY,
AWARDITEM_TANKMEN_BODY,
AWARDITEM_TANKWOMEN_BODY)
TEMPLATE_ALL_SHORT_ENUM = (TEMPLATE_DAYS_SHORT, TEMPLATE_HOURS_SHORT, TEMPLATE_MINUTES_SHORT)
VEHICLEPARAMS_BONUS_TANKMANLEVEL_ENUM = (VEHICLEPARAMS_BONUS_TANKMANLEVEL_COMMANDER,
VEHICLEPARAMS_BONUS_TANKMANLEVEL_GUNNER,
VEHICLEPARAMS_BONUS_TANKMANLEVEL_LOADER,
VEHICLEPARAMS_BONUS_TANKMANLEVEL_DRIVER,
VEHICLEPARAMS_BONUS_TANKMANLEVEL_RADIOMAN)
VEHICLEPARAMS_BONUS_EXTRA_ENUM = (VEHICLEPARAMS_BONUS_EXTRA_CAMOUFLAGEEXTRAS,)
VEHICLEPREVIEW_BUYBUTTON_ALL_ENUM = (VEHICLEPREVIEW_BUYBUTTON_PARENTMODULEISLOCKED_HEADER,
VEHICLEPREVIEW_BUYBUTTON_PARENTMODULEISLOCKED_BODY,
VEHICLEPREVIEW_BUYBUTTON_PARENTVEHICLEISLOCKED_HEADER,
VEHICLEPREVIEW_BUYBUTTON_PARENTVEHICLEISLOCKED_BODY,
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHXP_HEADER,
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHXP_BODY,
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHCREDITS_HEADER,
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHCREDITS_BODY,
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHGOLD_HEADER,
VEHICLEPREVIEW_BUYBUTTON_NOTENOUGHGOLD_BODY)
TANK_PARAMS_AVGPARAMCOMMENT_ENUM = (TANK_PARAMS_AVGPARAMCOMMENT_DAMAGE, TANK_PARAMS_AVGPARAMCOMMENT_PIERCINGPOWER)
SKILLS_ALL_HEADER_ENUM = (SKILLS_CAMOUFLAGE_HEADER,
SKILLS_BROTHERHOOD_HEADER,
SKILLS_DRIVER_VIRTUOSO_HEADER,
SKILLS_COMMANDER_EAGLEEYE_HEADER,
SKILLS_DRIVER_BADROADSKING_HEADER,
SKILLS_RADIOMAN_FINDER_HEADER,
SKILLS_RADIOMAN_INVENTOR_HEADER)
SKILLS_ALL_DESCR_ENUM = (SKILLS_CAMOUFLAGE_DESCR,
SKILLS_BROTHERHOOD_DESCR,
SKILLS_DRIVER_VIRTUOSO_DESCR,
SKILLS_COMMANDER_EAGLEEYE_DESCR,
SKILLS_DRIVER_BADROADSKING_DESCR,
SKILLS_RADIOMAN_FINDER_DESCR,
SKILLS_RADIOMAN_INVENTOR_DESCR)
SKILLS_FORROLE_ENUM = (SKILLS_FORROLE_COMMANDER,
SKILLS_FORROLE_DRIVER,
SKILLS_FORROLE_RADIOMAN,
SKILLS_FORROLE_LOADER,
SKILLS_FORROLE_GUNNER,
SKILLS_FORROLE_COMMON)
@classmethod
def tankcaruseltooltip_vehicletype_normal(cls, key0):
outcome = '#tooltips:tankCaruselTooltip/vehicleType/normal/{}'.format(key0)
if outcome not in cls.TANKCARUSELTOOLTIP_VEHICLETYPE_NORMAL_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def tankcaruseltooltip_vehicletype_elite(cls, key0):
outcome = '#tooltips:tankCaruselTooltip/vehicleType/elite/{}'.format(key0)
if outcome not in cls.TANKCARUSELTOOLTIP_VEHICLETYPE_ELITE_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def level(cls, key0):
outcome = '#tooltips:level/{}'.format(key0)
if outcome not in cls.LEVEL_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def vehicle(cls, key0):
outcome = '#tooltips:vehicle/{}'.format(key0)
if outcome not in cls.VEHICLE_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def tankcarusel_lock(cls, key0):
outcome = '#tooltips:tankCarusel/lock/{}'.format(key0)
if outcome not in cls.TANKCARUSEL_LOCK_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def achievement_params_left(cls, key0):
outcome = '#tooltips:achievement/params/left{}'.format(key0)
if outcome not in cls.ACHIEVEMENT_PARAMS_LEFT_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def actionprice_sell_type(cls, key0):
outcome = '#tooltips:actionPrice/sell/type/{}'.format(key0)
if outcome not in cls.ACTIONPRICE_SELL_TYPE_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def fortification_transporting(cls, key0):
outcome = '#tooltips:fortification/transporting/{}'.format(key0)
if outcome not in cls.FORTIFICATION_TRANSPORTING_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def squadwindow_status(cls, key0):
outcome = '#tooltips:squadWindow/status/{}'.format(key0)
if outcome not in cls.SQUADWINDOW_STATUS_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def tooltiprefsysdescription_awards_descr(cls, key0):
outcome = '#tooltips:ToolTipRefSysDescription/awards/descr/{}'.format(key0)
if outcome not in cls.TOOLTIPREFSYSDESCRIPTION_AWARDS_DESCR_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def privatequests_params(cls, key0):
outcome = '#tooltips:privateQuests/params/{}'.format(key0)
if outcome not in cls.PRIVATEQUESTS_PARAMS_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def login_social(cls, key0):
outcome = '#tooltips:login/social/{}'.format(key0)
if outcome not in cls.LOGIN_SOCIAL_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def techtreepage_nations(cls, key0):
outcome = '#tooltips:techTreePage/nations/{}'.format(key0)
if outcome not in cls.TECHTREEPAGE_NATIONS_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def contact_status_inbattle(cls, key0):
outcome = '#tooltips:Contact/status/inBattle/{}'.format(key0)
if outcome not in cls.CONTACT_STATUS_INBATTLE_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def vehicle_rentleft(cls, key0):
outcome = '#tooltips:vehicle/rentLeft/{}'.format(key0)
if outcome not in cls.VEHICLE_RENTLEFT_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def tank_params_desc(cls, key0):
outcome = '#tooltips:tank_params/desc/{}'.format(key0)
if outcome not in cls.TANK_PARAMS_DESC_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def battleresults_efficiencyheader(cls, key0):
outcome = '#tooltips:battleResults/efficiencyHeader/{}'.format(key0)
if outcome not in cls.BATTLERESULTS_EFFICIENCYHEADER_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def getAwardHeader(cls, key0):
outcome = '#tooltips:awardItem/{}/header'.format(key0)
if outcome not in cls.AWARDITEM_ALL_HEADER_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def getAwardBody(cls, key0):
outcome = '#tooltips:awardItem/{}/body'.format(key0)
if outcome not in cls.AWARDITEM_ALL_BODY_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def template_all_short(cls, key0):
outcome = '#tooltips:template/{}/short'.format(key0)
if outcome not in cls.TEMPLATE_ALL_SHORT_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def vehicleparams_bonus_tankmanlevel(cls, key0):
outcome = '#tooltips:vehicleParams/bonus/tankmanLevel/{}'.format(key0)
if outcome not in cls.VEHICLEPARAMS_BONUS_TANKMANLEVEL_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def vehicleparams_bonus_extra(cls, key0):
outcome = '#tooltips:vehicleParams/bonus/extra/{}'.format(key0)
if outcome not in cls.VEHICLEPARAMS_BONUS_EXTRA_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def vehiclepreview_buybutton_all(cls, key0, part):
outcome = '#tooltips:vehiclePreview/buyButton/{}/{}'.format(key0, part)
if outcome not in cls.VEHICLEPREVIEW_BUYBUTTON_ALL_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def getAvgParameterCommentKey(cls, key0):
outcome = '#tooltips:tank_params/avgParamComment/{}'.format(key0)
if outcome not in cls.TANK_PARAMS_AVGPARAMCOMMENT_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def skillTooltipHeader(cls, skill):
outcome = '#tooltips:skills/{}/header'.format(skill)
if outcome not in cls.SKILLS_ALL_HEADER_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def skillTooltipDescr(cls, skill):
outcome = '#tooltips:skills/{}/descr'.format(skill)
if outcome not in cls.SKILLS_ALL_DESCR_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
@classmethod
def roleForSkill(cls, role):
outcome = '#tooltips:skills/forRole/{}'.format(role)
if outcome not in cls.SKILLS_FORROLE_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
# okay decompyling C:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\gui\Scaleform\locale\TOOLTIPS.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.05.04 15:25:25 Střední Evropa (letní čas)
| [
"info@webium.sk"
] | info@webium.sk |
dfe6effc5c5ab2dbea9b908ad60d54602c70d73a | af5e5399d86e276528898c4437a6bf909bfae48b | /modoboa_postfix_autoreply/migrations/0007_auto_20180928_1423.py | 6966118338358fa372c8e9538a30ff7540c42178 | [
"MIT"
] | permissive | modoboa/modoboa-postfix-autoreply | b63445dafc3555952ccf440c98059adc8203f6d5 | 675cff4673164cadfa70892a7184f51e4bc5b648 | refs/heads/master | 2023-09-01T14:41:29.232985 | 2023-08-29T15:59:34 | 2023-08-29T15:59:34 | 30,645,239 | 6 | 11 | MIT | 2023-08-29T15:59:35 | 2015-02-11T11:45:49 | Python | UTF-8 | Python | false | false | 921 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-28 12:23
from __future__ import unicode_literals
from django.db import migrations
def move_transport_entries(apps, schema_editor):
"""Move old transport entries to new model."""
pf_Transport = apps.get_model("modoboa_postfix_autoreply", "Transport")
Transport = apps.get_model("transport", "Transport")
to_create = []
for old_transport in pf_Transport.objects.all():
to_create.append(Transport(
pattern=old_transport.domain, service="autoreply"))
Transport.objects.bulk_create(to_create)
def backward(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('transport', '0002_auto_20180928_1520'),
('modoboa_postfix_autoreply', '0006_auto_20160329_1501'),
]
operations = [
migrations.RunPython(move_transport_entries, backward),
]
| [
"tonio@ngyn.org"
] | tonio@ngyn.org |
1f3bbac74e70f96f7b9ec40d372d31e1b1f773b0 | fae70ce7b3a6aa11f568ea11f6432a15c303ff4c | /backend/home/admin.py | df00658e4315496f555f3883ec935e268f6c0eb7 | [] | no_license | crowdbotics-apps/msm-mobile-041298-d-16278 | 900532950c8bc78b9559d7f7b7369965be3dfd5a | d0cce051eca656658f757b52028c495de8d14df3 | refs/heads/master | 2023-01-23T10:33:49.516108 | 2020-12-04T09:25:15 | 2020-12-04T09:25:15 | 318,404,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | from django.contrib import admin
from .models import Payment, Student
admin.site.register(Student)
admin.site.register(Payment)
# Register your models here.
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
6f37a52fa44b06d9339604895887644e5393908f | f0da5036820e92157a9108b4b6793e757a81861c | /tfmodels/generative/encoder_basemodel.py | 94b65286c61a3583c99508f199b4249200bffe17 | [
"MIT"
] | permissive | BioImageInformatics/tfmodels | cb1e136407f0f148194210b1449b26c126fe5a07 | 7219eac59ba82cfa28e6af5e17f313dcc5ddd65e | refs/heads/master | 2022-01-26T16:09:32.630262 | 2019-04-25T05:09:33 | 2019-04-25T05:09:33 | 115,466,269 | 4 | 3 | null | 2018-02-06T17:46:17 | 2017-12-27T00:55:40 | Python | UTF-8 | Python | false | false | 2,638 | py | from __future__ import print_function
import tensorflow as tf
from ..utilities.basemodel import BaseModel
class BaseEncoder(BaseModel):
## Overload the base class.. do I even need the base class?
## TODO expose number of kernels and number of upsample steps to the world
discriminator_defaults = {
'enc_kernels': [32, 64, 128],
'name': 'encoder',
'z_dim': 32,
}
def __init__(self, **kwargs):
self.discriminator_defaults.update(**kwargs)
super(BaseEncoder, self).__init__(**self.discriminator_defaults)
self.nonlin = tf.nn.selu
""" return q(z|x) """
def model(self, x_in, keep_prob=0.5, reuse=False):
raise Exception(NotImplementedError)
## TODO switch to Wasserstein loss. Remember to clip the outputs
## Just put this into the model def since so many things are going to change
## Can't put these into the __init__ method because we have to have the
## model defined, and we could also change the loss function later.
## these are defaults for now
# def make_loss(self, p_real_fake, p_real_real):
# real_target = tf.ones_like(p_real_real)
# fake_target = tf.zeros_like(p_real_fake)
#
# if self.soften_labels:
# real_epsilon = tf.random_normal(shape=tf.shape(real_target),
# mean=0.0, stddev=self.soften_sddev)
# fake_epsilon = tf.random_normal(shape=tf.shape(fake_target),
# mean=0.0, stddev=self.soften_sddev)
# real_target = real_target + real_epsilon
# fake_target = fake_target + fake_epsilon
#
# loss_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(
# labels=real_target, logits=p_real_real))
# loss_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(
# labels=fake_target, logits=p_real_fake))
# # return (loss_real + loss_fake) / 2.0
# return loss_real + loss_fake
#
# def make_training_op(self, p_real_fake, p_real_real):
# self.var_list = self.get_update_list()
# self.optimizer = tf.train.AdamOptimizer(self.learning_rate,
# name='{}_Adam'.format(self.name))
#
# self.loss = self.make_loss(p_real_fake, p_real_real)
# self.train_op = self.optimizer.minimize(self.loss,
# var_list=self.var_list)
# self.training_op_list.append(self.train_op)
#
# # Summary
# self.disciminator_loss_sum = tf.summary.scalar('{}_loss'.format(self.name),
# self.loss)
# self.summary_op_list.append(self.disciminator_loss_sum)
| [
"ing.nathany@gmail.com"
] | ing.nathany@gmail.com |
be7196960f3976f8925c1fb4d15ab6dec089b8be | 21e5825959a886787a3915ff0d3efa86d9cd3702 | /combat/finishers/crushskull.py | 90999f5e7d95fb019500f25b51ba1105f832984b | [
"MIT"
] | permissive | ChrisLR/Python-Roguelike-Template | e0df37752907377e606197f2469fda61202129d5 | 9b63742b0111c7e9456fb98a96a3cd28d41a1e10 | refs/heads/master | 2021-06-26T07:48:39.215338 | 2017-09-14T21:46:08 | 2017-09-14T21:46:08 | 69,761,175 | 0 | 0 | null | 2017-09-14T21:46:09 | 2016-10-01T20:09:24 | Python | UTF-8 | Python | false | false | 1,967 | py | from combat.enums import DamageType
from combat.finishers.base import Finisher
from echo import functions
from util import gridhelpers
class CrushSkull(Finisher):
name = "Crush Skull"
description = "Crush the skull of your enemy."
attacker_message = "You swing your {attacker_weapon} into a powerful overhead swing" \
" CRUSHING {defender_his} head like an overripe melon!"
observer_message = "{attacker} swings {attacker_his} {attacker_weapon} into a powerful " \
"overhead swing CRUSHING {defender_his} head like an overripe melon!" \
@classmethod
def evaluate(cls, attack_result):
if attack_result.context.distance_to <= 1:
attacker_weapon = attack_result.context.attacker_weapon
if attacker_weapon and hasattr(attacker_weapon, 'weapon'):
weapon_component = attacker_weapon.weapon
if weapon_component:
if weapon_component.melee_damage_type == DamageType.Blunt:
return True
return False
@classmethod
def execute(cls, attack_result):
return cls.get_message(attack_result)
@classmethod
def get_message(cls, attack_result):
defender = attack_result.defender
if attack_result.context.attacker.is_player:
return cls.attacker_message.format(
attacker_weapon=functions.get_name_or_string(attack_result.context.attacker_weapon),
defender_his=functions.his_her_it(defender),
)
else:
return cls.observer_message.format(
attacker=functions.get_name_or_string(attack_result.context.attacker),
attacker_his=functions.his_her_it(attack_result.context.attacker),
attacker_weapon=functions.get_name_or_string(attack_result.context.attacker_weapon),
defender_his=functions.his_her_it(defender),
)
| [
"arzhul@gmail.com"
] | arzhul@gmail.com |
4266808125fee1529cd17fad7f3dcc1b19a5058d | cc5c546379ca79a7634acb9a2a66ae690f20ee15 | /MyCode-01/Schema/__init__.py | 0fa987e6907d0c9a145c077b36c3792e0904e2ca | [] | no_license | vakili73/CodeV1 | 0c13f8b3cba1fa9b5d40039065931687dd0acc84 | 658826200ddb779baf2a1cc2bcccdc0e43aefd47 | refs/heads/master | 2020-04-01T07:23:58.679094 | 2018-12-22T21:07:24 | 2018-12-22T21:07:24 | 152,988,274 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 365 | py | from .BaseSchema import BaseSchema
from .SchemaV01 import SchemaV01
from .SchemaV02 import SchemaV02
from .SchemaV03 import SchemaV03
from .SchemaV04 import SchemaV04
from .SchemaV05 import SchemaV05
from .SchemaV06 import SchemaV06
from .SchemaV07 import SchemaV07
from .SchemaV08 import SchemaV08
from .SchemaV09 import SchemaV09
from .SchemaV10 import SchemaV10 | [
"v.vakili73@gmail.com"
] | v.vakili73@gmail.com |
24fe5caed2fa41cc86fa0d89688e6ed28366e88a | 411eff94020c192d5e5f657fa6012232ab1d051c | /game/src/coginvasion/holiday/DistributedWinterCoachActivityAI.py | 39d242aa474cc9dd01c703a6449f877ffd5cab35 | [] | no_license | xMakerx/cio-src | 48c9efe7f9a1bbf619a4c95a4198aaace78b8491 | 60b2bdf2c4a24d506101fdab1f51752d0d1861f8 | refs/heads/master | 2023-02-14T03:12:51.042106 | 2021-01-15T14:02:10 | 2021-01-15T14:02:10 | 328,268,776 | 1 | 0 | null | 2021-01-15T15:15:35 | 2021-01-09T23:51:37 | Python | UTF-8 | Python | false | false | 904 | py | """
COG INVASION ONLINE
Copyright (c) CIO Team. All rights reserved.
@file DistributedWinterCoachActivityAI.py
@author Maverick Liberty
@date November 14, 2015
"""
from direct.directnotify.DirectNotifyGlobal import directNotify
from direct.distributed.DistributedNodeAI import DistributedNodeAI
class DistributedWinterCoachActivityAI(DistributedNodeAI):
notify = directNotify.newCategory('DistributedWinterCoachActivityAI')
def __init__(self, air):
DistributedNodeAI.__init__(self, air)
def requestEnter(self):
avId = self.air.getAvatarIdFromSender()
avatar = self.air.doId2do.get(avId)
self.sendUpdateToAvatarId(avId, 'enterAccepted', [])
self.sendUpdate('greetAvatar', [avatar.getName()])
def requestExit(self):
avId = self.air.getAvatarIdFromSender()
self.sendUpdateToAvatarId(avId, 'exitAccepted', []) | [
"brianlach72@gmail.com"
] | brianlach72@gmail.com |
c23986aa29d9b2eb49e229606feab8f8b7f0c036 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/dev/cv/image_classification/FasterRCNN_ID0100_for_PyTorch/detectron2/evaluation/coco_evaluation.py | 1186fdbcd2ff0f2d04b51817e27db1ce5837691d | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 22,057 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import copy
import io
import itertools
import json
import logging
import numpy as np
import os
import pickle
from collections import OrderedDict
import pycocotools.mask as mask_util
import torch
from fvcore.common.file_io import PathManager
from pycocotools.coco import COCO
from tabulate import tabulate
import detectron2.utils.comm as comm
from detectron2.data import MetadataCatalog
from detectron2.data.datasets.coco import convert_to_coco_json
from detectron2.evaluation.fast_eval_api import COCOeval_opt as COCOeval
from detectron2.structures import Boxes, BoxMode, pairwise_iou
from detectron2.utils.logger import create_small_table
from .evaluator import DatasetEvaluator
class COCOEvaluator(DatasetEvaluator):
"""
Evaluate AR for object proposals, AP for instance detection/segmentation, AP
for keypoint detection outputs using COCO's metrics.
See http://cocodataset.org/#detection-eval and
http://cocodataset.org/#keypoints-eval to understand its metrics.
In addition to COCO, this evaluator is able to support any bounding box detection,
instance segmentation, or keypoint detection dataset.
"""
def __init__(self, dataset_name, cfg, distributed, output_dir=None):
"""
Args:
dataset_name (str): name of the dataset to be evaluated.
It must have either the following corresponding metadata:
"json_file": the path to the COCO format annotation
Or it must be in detectron2's standard dataset format
so it can be converted to COCO format automatically.
cfg (CfgNode): config instance
distributed (True): if True, will collect results from all ranks and run evaluation
in the main process.
Otherwise, will evaluate the results in the current process.
output_dir (str): optional, an output directory to dump all
results predicted on the dataset. The dump contains two files:
1. "instance_predictions.pth" a file in torch serialization
format that contains all the raw original predictions.
2. "coco_instances_results.json" a json file in COCO's result
format.
"""
self._tasks = self._tasks_from_config(cfg)
self._distributed = distributed
self._output_dir = output_dir
self._cpu_device = torch.device("cpu")
self._logger = logging.getLogger(__name__)
self._metadata = MetadataCatalog.get(dataset_name)
if not hasattr(self._metadata, "json_file"):
self._logger.info(
f"'{dataset_name}' is not registered by `register_coco_instances`."
" Therefore trying to convert it to COCO format ..."
)
cache_path = os.path.join(output_dir, f"{dataset_name}_coco_format.json")
self._metadata.json_file = cache_path
convert_to_coco_json(dataset_name, cache_path)
json_file = PathManager.get_local_path(self._metadata.json_file)
with contextlib.redirect_stdout(io.StringIO()):
self._coco_api = COCO(json_file)
self._kpt_oks_sigmas = cfg.TEST.KEYPOINT_OKS_SIGMAS
# Test set json files do not contain annotations (evaluation must be
# performed using the COCO evaluation server).
self._do_evaluation = "annotations" in self._coco_api.dataset
def reset(self):
self._predictions = []
def _tasks_from_config(self, cfg):
"""
Returns:
tuple[str]: tasks that can be evaluated under the given configuration.
"""
tasks = ("bbox",)
if cfg.MODEL.MASK_ON:
tasks = tasks + ("segm",)
if cfg.MODEL.KEYPOINT_ON:
tasks = tasks + ("keypoints",)
return tasks
def process(self, inputs, outputs):
"""
Args:
inputs: the inputs to a COCO model (e.g., GeneralizedRCNN).
It is a list of dict. Each dict corresponds to an image and
contains keys like "height", "width", "file_name", "image_id".
outputs: the outputs of a COCO model. It is a list of dicts with key
"instances" that contains :class:`Instances`.
"""
for input, output in zip(inputs, outputs):
prediction = {"image_id": input["image_id"]}
# TODO this is ugly
if "instances" in output:
instances = output["instances"].to(self._cpu_device)
keep_mask = instances.scores >= 0
instances.scores = instances.scores[keep_mask]
instances.pred_boxes = instances.pred_boxes[keep_mask]
instances.pred_classes = instances.pred_classes[keep_mask]
if "pred_masks" in instances._fields.keys():
instances.pred_masks = instances.pred_masks[keep_mask]
prediction["instances"] = instances_to_coco_json(instances, input["image_id"])
if "proposals" in output:
prediction["proposals"] = output["proposals"].to(self._cpu_device)
self._predictions.append(prediction)
def evaluate(self):
if self._distributed:
comm.synchronize()
predictions = comm.gather(self._predictions, dst=0)
predictions = list(itertools.chain(*predictions))
if not comm.is_main_process():
return {}
else:
predictions = self._predictions
if len(predictions) == 0:
self._logger.warning("[COCOEvaluator] Did not receive valid predictions.")
return {}
if self._output_dir:
PathManager.mkdirs(self._output_dir)
file_path = os.path.join(self._output_dir, "instances_predictions.pth")
with PathManager.open(file_path, "wb") as f:
torch.save(predictions, f)
self._results = OrderedDict()
if "proposals" in predictions[0]:
self._eval_box_proposals(predictions)
if "instances" in predictions[0]:
self._eval_predictions(set(self._tasks), predictions)
# Copy so the caller can do whatever with results
return copy.deepcopy(self._results)
def _eval_predictions(self, tasks, predictions):
"""
Evaluate predictions on the given tasks.
Fill self._results with the metrics of the tasks.
"""
self._logger.info("Preparing results for COCO format ...")
coco_results = list(itertools.chain(*[x["instances"] for x in predictions]))
# unmap the category ids for COCO
if hasattr(self._metadata, "thing_dataset_id_to_contiguous_id"):
reverse_id_mapping = {
v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items()
}
for result in coco_results:
category_id = result["category_id"]
assert (
category_id in reverse_id_mapping
), "A prediction has category_id={}, which is not available in the dataset.".format(
category_id
)
result["category_id"] = reverse_id_mapping[category_id]
if self._output_dir:
file_path = os.path.join(self._output_dir, "coco_instances_results.json")
self._logger.info("Saving results to {}".format(file_path))
with PathManager.open(file_path, "w") as f:
f.write(json.dumps(coco_results))
f.flush()
if not self._do_evaluation:
self._logger.info("Annotations are not available for evaluation.")
return
self._logger.info("Evaluating predictions ...")
for task in sorted(tasks):
coco_eval = (
_evaluate_predictions_on_coco(
self._coco_api, coco_results, task, kpt_oks_sigmas=self._kpt_oks_sigmas
)
if len(coco_results) > 0
else None # cocoapi does not handle empty results very well
)
res = self._derive_coco_results(
coco_eval, task, class_names=self._metadata.get("thing_classes")
)
self._results[task] = res
def _eval_box_proposals(self, predictions):
"""
Evaluate the box proposals in predictions.
Fill self._results with the metrics for "box_proposals" task.
"""
if self._output_dir:
# Saving generated box proposals to file.
# Predicted box_proposals are in XYXY_ABS mode.
bbox_mode = BoxMode.XYXY_ABS.value
ids, boxes, objectness_logits = [], [], []
for prediction in predictions:
ids.append(prediction["image_id"])
boxes.append(prediction["proposals"].proposal_boxes.tensor.numpy())
objectness_logits.append(prediction["proposals"].objectness_logits.numpy())
proposal_data = {
"boxes": boxes,
"objectness_logits": objectness_logits,
"ids": ids,
"bbox_mode": bbox_mode,
}
with PathManager.open(os.path.join(self._output_dir, "box_proposals.pkl"), "wb") as f:
pickle.dump(proposal_data, f)
if not self._do_evaluation:
self._logger.info("Annotations are not available for evaluation.")
return
self._logger.info("Evaluating bbox proposals ...")
res = {}
areas = {"all": "", "small": "s", "medium": "m", "large": "l"}
for limit in [100, 1000]:
for area, suffix in areas.items():
stats = _evaluate_box_proposals(predictions, self._coco_api, area=area, limit=limit)
key = "AR{}@{:d}".format(suffix, limit)
res[key] = float(stats["ar"].item() * 100)
self._logger.info("Proposal metrics: \n" + create_small_table(res))
self._results["box_proposals"] = res
def _derive_coco_results(self, coco_eval, iou_type, class_names=None):
"""
Derive the desired score numbers from summarized COCOeval.
Args:
coco_eval (None or COCOEval): None represents no predictions from model.
iou_type (str):
class_names (None or list[str]): if provided, will use it to predict
per-category AP.
Returns:
a dict of {metric name: score}
"""
metrics = {
"bbox": ["AP", "AP50", "AP75", "APs", "APm", "APl"],
"segm": ["AP", "AP50", "AP75", "APs", "APm", "APl"],
"keypoints": ["AP", "AP50", "AP75", "APm", "APl"],
}[iou_type]
if coco_eval is None:
self._logger.warn("No predictions from the model!")
return {metric: float("nan") for metric in metrics}
# the standard metrics
results = {
metric: float(coco_eval.stats[idx] * 100 if coco_eval.stats[idx] >= 0 else "nan")
for idx, metric in enumerate(metrics)
}
self._logger.info(
"Evaluation results for {}: \n".format(iou_type) + create_small_table(results)
)
if not np.isfinite(sum(results.values())):
self._logger.info("Some metrics cannot be computed and is shown as NaN.")
if class_names is None or len(class_names) <= 1:
return results
# Compute per-category AP
# from https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L222-L252 # noqa
precisions = coco_eval.eval["precision"]
# precision has dims (iou, recall, cls, area range, max dets)
assert len(class_names) == precisions.shape[2]
results_per_category = []
for idx, name in enumerate(class_names):
# area range index 0: all area ranges
# max dets index -1: typically 100 per image
precision = precisions[:, :, idx, 0, -1]
precision = precision[precision > -1]
ap = np.mean(precision) if precision.size else float("nan")
results_per_category.append(("{}".format(name), float(ap * 100)))
# tabulate it
N_COLS = min(6, len(results_per_category) * 2)
results_flatten = list(itertools.chain(*results_per_category))
results_2d = itertools.zip_longest(*[results_flatten[i::N_COLS] for i in range(N_COLS)])
table = tabulate(
results_2d,
tablefmt="pipe",
floatfmt=".3f",
headers=["category", "AP"] * (N_COLS // 2),
numalign="left",
)
self._logger.info("Per-category {} AP: \n".format(iou_type) + table)
results.update({"AP-" + name: ap for name, ap in results_per_category})
return results
def instances_to_coco_json(instances, img_id):
"""
Dump an "Instances" object to a COCO-format json that's used for evaluation.
Args:
instances (Instances):
img_id (int): the image id
Returns:
list[dict]: list of json annotations in COCO format.
"""
num_instance = len(instances)
if num_instance == 0:
return []
boxes = instances.pred_boxes.tensor.numpy()
boxes = BoxMode.convert(boxes, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS)
boxes = boxes.tolist()
scores = instances.scores.tolist()
classes = instances.pred_classes.tolist()
has_mask = instances.has("pred_masks")
if has_mask:
# use RLE to encode the masks, because they are too large and takes memory
# since this evaluator stores outputs of the entire dataset
rles = [
mask_util.encode(np.array(mask[:, :, None], order="F", dtype="uint8"))[0]
for mask in instances.pred_masks
]
for rle in rles:
# "counts" is an array encoded by mask_util as a byte-stream. Python3's
# json writer which always produces strings cannot serialize a bytestream
# unless you decode it. Thankfully, utf-8 works out (which is also what
# the pycocotools/_mask.pyx does).
rle["counts"] = rle["counts"].decode("utf-8")
has_keypoints = instances.has("pred_keypoints")
if has_keypoints:
keypoints = instances.pred_keypoints
results = []
for k in range(num_instance):
result = {
"image_id": img_id,
"category_id": classes[k],
"bbox": boxes[k],
"score": scores[k],
}
if has_mask:
result["segmentation"] = rles[k]
if has_keypoints:
# In COCO annotations,
# keypoints coordinates are pixel indices.
# However our predictions are floating point coordinates.
# Therefore we subtract 0.5 to be consistent with the annotation format.
# This is the inverse of data loading logic in `datasets/coco.py`.
keypoints[k][:, :2] -= 0.5
result["keypoints"] = keypoints[k].flatten().tolist()
results.append(result)
return results
# inspired from Detectron:
# https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L255 # noqa
def _evaluate_box_proposals(dataset_predictions, coco_api, thresholds=None, area="all", limit=None):
"""
Evaluate detection proposal recall metrics. This function is a much
faster alternative to the official COCO API recall evaluation code. However,
it produces slightly different results.
"""
# Record max overlap value for each gt box
# Return vector of overlap values
areas = {
"all": 0,
"small": 1,
"medium": 2,
"large": 3,
"96-128": 4,
"128-256": 5,
"256-512": 6,
"512-inf": 7,
}
area_ranges = [
[0 ** 2, 1e5 ** 2], # all
[0 ** 2, 32 ** 2], # small
[32 ** 2, 96 ** 2], # medium
[96 ** 2, 1e5 ** 2], # large
[96 ** 2, 128 ** 2], # 96-128
[128 ** 2, 256 ** 2], # 128-256
[256 ** 2, 512 ** 2], # 256-512
[512 ** 2, 1e5 ** 2],
] # 512-inf
assert area in areas, "Unknown area range: {}".format(area)
area_range = area_ranges[areas[area]]
gt_overlaps = []
num_pos = 0
for prediction_dict in dataset_predictions:
predictions = prediction_dict["proposals"]
# sort predictions in descending order
# TODO maybe remove this and make it explicit in the documentation
inds = predictions.objectness_logits.sort(descending=True)[1]
predictions = predictions[inds]
ann_ids = coco_api.getAnnIds(imgIds=prediction_dict["image_id"])
anno = coco_api.loadAnns(ann_ids)
gt_boxes = [
BoxMode.convert(obj["bbox"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS)
for obj in anno
if obj["iscrowd"] == 0
]
gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4) # guard against no boxes
gt_boxes = Boxes(gt_boxes)
gt_areas = torch.as_tensor([obj["area"] for obj in anno if obj["iscrowd"] == 0])
if len(gt_boxes) == 0 or len(predictions) == 0:
continue
valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1])
gt_boxes = gt_boxes[valid_gt_inds]
num_pos += len(gt_boxes)
if len(gt_boxes) == 0:
continue
if limit is not None and len(predictions) > limit:
predictions = predictions[:limit]
overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes)
_gt_overlaps = torch.zeros(len(gt_boxes))
for j in range(min(len(predictions), len(gt_boxes))):
# find which proposal box maximally covers each gt box
# and get the iou amount of coverage for each gt box
max_overlaps, argmax_overlaps = overlaps.max(dim=0)
# find which gt box is 'best' covered (i.e. 'best' = most iou)
gt_ovr, gt_ind = max_overlaps.max(dim=0)
assert gt_ovr >= 0
# find the proposal box that covers the best covered gt box
box_ind = argmax_overlaps[gt_ind]
# record the iou coverage of this gt box
_gt_overlaps[j] = overlaps[box_ind, gt_ind]
assert _gt_overlaps[j] == gt_ovr
# mark the proposal box and the gt box as used
overlaps[box_ind, :] = -1
overlaps[:, gt_ind] = -1
# append recorded iou coverage level
gt_overlaps.append(_gt_overlaps)
gt_overlaps = (
torch.cat(gt_overlaps, dim=0) if len(gt_overlaps) else torch.zeros(0, dtype=torch.float32)
)
gt_overlaps, _ = torch.sort(gt_overlaps)
if thresholds is None:
step = 0.05
thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32)
recalls = torch.zeros_like(thresholds)
# compute recall for each iou threshold
for i, t in enumerate(thresholds):
recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos)
# ar = 2 * np.trapz(recalls, thresholds)
ar = recalls.mean()
return {
"ar": ar,
"recalls": recalls,
"thresholds": thresholds,
"gt_overlaps": gt_overlaps,
"num_pos": num_pos,
}
def _evaluate_predictions_on_coco(coco_gt, coco_results, iou_type, kpt_oks_sigmas=None):
"""
Evaluate the coco results using COCOEval API.
"""
assert len(coco_results) > 0
if iou_type == "segm":
coco_results = copy.deepcopy(coco_results)
# When evaluating mask AP, if the results contain bbox, cocoapi will
# use the box area as the area of the instance, instead of the mask area.
# This leads to a different definition of small/medium/large.
# We remove the bbox field to let mask AP use mask area.
for c in coco_results:
c.pop("bbox", None)
coco_dt = coco_gt.loadRes(coco_results)
coco_eval = COCOeval(coco_gt, coco_dt, iou_type)
if iou_type == "keypoints":
# Use the COCO default keypoint OKS sigmas unless overrides are specified
if kpt_oks_sigmas:
assert hasattr(coco_eval.params, "kpt_oks_sigmas"), "pycocotools is too old!"
coco_eval.params.kpt_oks_sigmas = np.array(kpt_oks_sigmas)
# COCOAPI requires every detection and every gt to have keypoints, so
# we just take the first entry from both
num_keypoints_dt = len(coco_results[0]["keypoints"]) // 3
num_keypoints_gt = len(next(iter(coco_gt.anns.values()))["keypoints"]) // 3
num_keypoints_oks = len(coco_eval.params.kpt_oks_sigmas)
assert num_keypoints_oks == num_keypoints_dt == num_keypoints_gt, (
f"[COCOEvaluator] Prediction contain {num_keypoints_dt} keypoints. "
f"Ground truth contains {num_keypoints_gt} keypoints. "
f"The length of cfg.TEST.KEYPOINT_OKS_SIGMAS is {num_keypoints_oks}. "
"They have to agree with each other. For meaning of OKS, please refer to "
"http://cocodataset.org/#keypoints-eval."
)
coco_eval.evaluate()
coco_eval.accumulate()
coco_eval.summarize()
return coco_eval
| [
"wangjiangben@huawei.com"
] | wangjiangben@huawei.com |
d72458e76f86057e04cab0990ba3207a0ce687ce | 9e8d98c48035d4ee61fa930c324c822a61e5ae55 | /examples3/test_surface_ct.py | 53652450ddfa76ea5a3507d290c37f21d095ec74 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | GRSEB9S/mystic | 59ac0c284a19f7b685a98420cd49d21bb10ff0cd | 748e0030c8d7d8b005f2eafa17a4581c2b3ddb47 | refs/heads/master | 2021-08-14T07:11:04.439139 | 2017-11-14T23:49:22 | 2017-11-14T23:49:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,116 | py | #!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 2010-2016 California Institute of Technology.
# Copyright (c) 2016-2017 The Uncertainty Quantification Foundation.
# License: 3-clause BSD. The full license text is available at:
# - https://github.com/uqfoundation/mystic/blob/master/LICENSE
"""
an example of using an interpolator within a surface object
"""
from surface import Surface_Clough as Surface
import time
if __name__ == '__main__':
start = time.time()
"""
from mystic.models import griewangk
from mystic.termination import NormalizedChangeOverGeneration as NCOG
stop = NCOG(1e-4)
bounds = 2*[(-9.5,9.5)]
self = Surface(griewangk, maxpts=1000)
# self.doit(bounds, stop)
step=100; scale=False; shift=False; density=9; kwds={}
if not self.sampler.traj: self.sampler.UseTrajectories()
# get trajectories
self.Sample(bounds, stop)
# get interpolated function
self.Interpolate(**kwds)
# check extrema #XXX: put _min,_max in Interpolate? (downsampled)
f = lambda x,z: (z,self.surrogate(*x))
print("min: {}; min@f: {}".format(*f(*self._min())))
print("max: {}; max@f: {}".format(*f(*self._max())))
# plot surface
self.Plot(step, scale, shift, density)
"""
# parallel configuration
try:
from pathos.helpers import freeze_support
freeze_support()
from pathos.pools import ProcessPool as Pool
#from pathos.pools import ThreadPool as Pool
#from pathos.pools import ParallelPool as Pool
except ImportError:
from mystic.pools import SerialPool as Pool
_map = Pool().map
# tools
from mystic.termination import VTR, ChangeOverGeneration as COG
from mystic.termination import NormalizedChangeOverGeneration as NCOG
from mystic.monitors import LoggingMonitor, VerboseMonitor, Monitor
from klepto.archives import dir_archive
stop = NCOG(1e-4)
disp = False # print optimization summary
stepmon = False # use LoggingMonitor
archive = False # save an archive
traj = not stepmon # save all trajectories internally, if no logs
# cost function
from mystic.models import griewangk as model
ndim = 2 # model dimensionality
bounds = ndim * [(-9.5,9.5)] # griewangk
# the ensemble solvers
from mystic.solvers import BuckshotSolver, LatticeSolver
# the local solvers
from mystic.solvers import PowellDirectionalSolver
sprayer = BuckshotSolver
seeker = PowellDirectionalSolver
npts = 25 # number of solvers
retry = 1 # max consectutive iteration retries without a cache 'miss'
tol = 8 # rounding precision
mem = 1 # cache rounding precision
#CUTE: 'configure' monitor and archive if they are desired
if stepmon:
stepmon = LoggingMonitor(1) # montor for all runs
itermon = LoggingMonitor(1, filename='inv.txt') #XXX: log.txt?
else:
stepmon = itermon = None
if archive: #python2.5
ar_name = '__%s_%sD_cache__' % (model.__self__.__class__.__name__,ndim)
archive = dir_archive(ar_name, serialized=True, cached=False)
ar_name = '__%s_%sD_invcache__' % (model.__self__.__class__.__name__,ndim)
ivcache = dir_archive(ar_name, serialized=True, cached=False)
else:
archive = ivcache = None
from mystic.search import Searcher #XXX: init w/ archive, then UseArchive?
sampler = Searcher(npts, retry, tol, mem, _map, archive, sprayer, seeker)
sampler.Verbose(disp)
sampler.UseTrajectories(traj)
### doit ###
maxpts = 1000. #10000.
surface = Surface(model, sampler, maxpts=maxpts, dim=ndim)
surface.UseMonitor(stepmon, itermon)
surface.UseArchive(archive, ivcache)
density = 9
shift = 0
scale = 0
step = 200
args = {
#'fill_value': 1.0,
}
#surface.doit(bounds, stop, step=step)
#############
# get trajectories
surface.Sample(bounds, stop)
print("TOOK: %s" % (time.time() - start))
# exit()
# get interpolated function
surface.Interpolate(**args)
# check extrema #XXX: put _min,_max in Interpolate? (downsampled)
f = lambda x,z: (z,surface.surrogate(*x))
print("min: {}; min@f: {}".format(*f(*surface._min())))
print("max: {}; max@f: {}".format(*f(*surface._max())))
# print("TOOK: %s" % (time.time() - start))
# plot surface
axes = (0,1)
vals = () # use remaining minima as the fixed values
surface.Plot(step, scale, shift, density, axes, vals)
"""
try:
from klepto.archives import file_archive
archive = file_archive('models.pkl', serialized=True, cached=False)
archive[model.im_class.__name__.lower()] = surface.surrogate
except Exception:
print("serialization failed")
"""
# some testing of interpolated model
import numpy as np
actual = np.asarray(surface.z) # downsample?
interp = surface.surrogate(*surface.x.T) # downsample?
print("sum diff squares")
print("actual and interp: %s" % np.sum((actual - interp)**2))
# EOF
| [
"mmckerns@968178ea-60bd-409e-af13-df8a517b6005"
] | mmckerns@968178ea-60bd-409e-af13-df8a517b6005 |
a1409738902176bfc7b30eefe27cd0406cc72281 | 62e58c051128baef9452e7e0eb0b5a83367add26 | /x12/6040/265006040.py | 8d9319d572368cb051ac2c665bd006e19f823ef1 | [] | no_license | dougvanhorn/bots-grammars | 2eb6c0a6b5231c14a6faf194b932aa614809076c | 09db18d9d9bd9d92cefbf00f1c0de1c590fe3d0d | refs/heads/master | 2021-05-16T12:55:58.022904 | 2019-05-17T15:22:23 | 2019-05-17T15:22:23 | 105,274,633 | 0 | 0 | null | 2017-09-29T13:21:21 | 2017-09-29T13:21:21 | null | UTF-8 | Python | false | false | 2,266 | py | from bots.botsconfig import *
from records006040 import recorddefs
syntax = {
'version': '00604',
'functionalgroup': 'TO',
}
structure = [
{ID: 'ST', MIN: 1, MAX: 1, LEVEL: [
{ID: 'BGN', MIN: 1, MAX: 1},
{ID: 'N1', MIN: 1, MAX: 5, LEVEL: [
{ID: 'N2', MIN: 0, MAX: 2},
{ID: 'N3', MIN: 0, MAX: 2},
{ID: 'N4', MIN: 0, MAX: 1},
{ID: 'REF', MIN: 0, MAX: 12},
{ID: 'PER', MIN: 0, MAX: 3},
]},
{ID: 'LX', MIN: 1, MAX: 99999, LEVEL: [
{ID: 'REF', MIN: 1, MAX: 12},
{ID: 'PDS', MIN: 1, MAX: 20},
{ID: 'PDE', MIN: 0, MAX: 99999},
{ID: 'NX1', MIN: 0, MAX: 1},
{ID: 'NX2', MIN: 0, MAX: 30},
{ID: 'PRD', MIN: 0, MAX: 1},
{ID: 'LRQ', MIN: 0, MAX: 1},
{ID: 'LN1', MIN: 0, MAX: 1},
{ID: 'MSG', MIN: 0, MAX: 100},
{ID: 'IN1', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'IN2', MIN: 0, MAX: 30},
{ID: 'DMG', MIN: 0, MAX: 1},
{ID: 'FPT', MIN: 0, MAX: 1},
{ID: 'N4', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'N3', MIN: 0, MAX: 2},
{ID: 'PER', MIN: 0, MAX: 4},
]},
]},
{ID: 'MCD', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'AMT', MIN: 0, MAX: 50},
]},
{ID: 'N1', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'N2', MIN: 0, MAX: 2},
{ID: 'N3', MIN: 0, MAX: 2},
{ID: 'N4', MIN: 0, MAX: 1},
{ID: 'REF', MIN: 0, MAX: 12},
{ID: 'PER', MIN: 0, MAX: 3},
{ID: 'AMT', MIN: 0, MAX: 2},
]},
{ID: 'TIS', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'AMT', MIN: 0, MAX: 30},
]},
{ID: 'PWK', MIN: 0, MAX: 5, LEVEL: [
{ID: 'N1', MIN: 0, MAX: 1},
{ID: 'N2', MIN: 0, MAX: 2},
{ID: 'N3', MIN: 0, MAX: 2},
{ID: 'N4', MIN: 0, MAX: 1},
{ID: 'REF', MIN: 0, MAX: 12},
{ID: 'PER', MIN: 0, MAX: 3},
]},
]},
{ID: 'LS', MIN: 0, MAX: 1, LEVEL: [
{ID: 'TIS', MIN: 1, MAX: 99999, LEVEL: [
{ID: 'AMT', MIN: 0, MAX: 30},
{ID: 'MSG', MIN: 0, MAX: 100},
]},
{ID: 'LE', MIN: 1, MAX: 1},
]},
{ID: 'SE', MIN: 1, MAX: 1},
]}
]
| [
"doug.vanhorn@tagglogistics.com"
] | doug.vanhorn@tagglogistics.com |
33c4f63224fc7c43de4b5920f527180a100ed9c8 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03712/s144694352.py | 7cb35c72c8fda9d98bf5dfd772936144ca8be536 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 352 | py | #!/usr/bin/env python3
# Generated by 1.1.7.1 https://github.com/kyuridenamida/atcoder-tools
def main():
H, W = map(int, input().split())
print("#"*(W+2))
for _ in range(H):
print("#{}#".format(input()))
print("#"*(W+2))
def test():
import doctest
doctest.testmod()
if __name__ == '__main__':
#test()
main()
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
355d5d730e52abc11dde2517b53d52a39c7f29e6 | 576b680d1d3ba0f07837e7f28935b0a4632561ad | /pyrepr/repr_json.py | 0a9bb92ce96d1f2c57c17c3e119f1144ad27ba55 | [] | no_license | patarapolw/pyrepr-toml | 4b8639050b118896ef7a8cd99067864e9968fea8 | 187bc1cd2adf2e487712d2d629564f4d2c972dcb | refs/heads/master | 2020-04-02T02:10:32.258042 | 2018-10-20T10:45:16 | 2018-10-20T10:45:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 532 | py | import json
from json.encoder import _make_iterencode
from .util import hyper_markdownify
class ReprJSONEncoder(json.JSONEncoder):
def iterencode(self, o, _one_shot=False):
if self.check_circular:
markers = {}
else:
markers = None
_iterencode = _make_iterencode(
markers, self.default, hyper_markdownify, self.indent, str,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
| [
"patarapolw@gmail.com"
] | patarapolw@gmail.com |
4ecf0174c40593d6223878ddf51d8f839fb2d298 | e3cfab409afb5ff9a0b3812bf848be6ca9239cee | /test/testCartesian.py | 67c8b15475eef66a39f7787d3010fcf0d1e40aad | [
"MIT"
] | permissive | mrJean1/PyGeodesy | 565266a4f7f6cda5abe98e915bbd868f6cbe1760 | eba35704b248a7a0388b30f3cea19793921e99b7 | refs/heads/master | 2023-08-23T13:58:20.069917 | 2023-08-20T18:50:45 | 2023-08-20T18:50:45 | 68,028,481 | 283 | 66 | null | 2022-04-09T00:40:52 | 2016-09-12T16:49:10 | Python | UTF-8 | Python | false | false | 9,980 | py |
# -*- coding: utf-8 -*-
# Test cartesians.
__all__ = ('Tests',)
__version__ = '23.05.23'
from bases import GeodSolve, geographiclib, isPython35, TestsBase
from pygeodesy import R_M, classname, Datums, degrees, fstr, Height, \
modulename, RefFrames, Transforms # PYCHOK expected
from pygeodesy.cartesianBase import CartesianBase
from pygeodesy.ecef import Ecef9Tuple
from pygeodesy.namedTuples import LatLon2Tuple, LatLon3Tuple, LatLon4Tuple, \
PhiLam2Tuple, PhiLam3Tuple, PhiLam4Tuple, \
Vector3Tuple, Vector4Tuple # PYCHOK hanging
class Tests(TestsBase):
def testCartesian(self, module, Sph=False, Nv=False, X=False): # MCCABE 45
self.subtitle(module, 'Cartesian')
Cartesian = module.Cartesian
LatLon = module.LatLon
Nvector = module.Nvector if Nv else Vector4Tuple
datum = Datums.Sphere if Sph else Datums.WGS84
datum2 = None if Sph else Datums.WGS72
# <https://www.Movable-Type.co.UK/scripts/geodesy/docs/
# latlon-nvector-ellipsoidal.js.html#line309>
c = Cartesian(3980581, 97, 4966825, datum=datum)
self.test('Cartesian0', c.toStr(prec=0), '[3980581, 97, 4966825]')
self.test('Cartesian4', c.toStr(prec=4), '[3980581.0, 97.0, 4966825.0]')
self.test('isEllipsoidal', c.isEllipsoidal, not Sph)
self.test('isSpherical', c.isSpherical, Sph)
self.testCopy(c)
if datum2:
d = c.convertDatum(datum2)
t = d.convertDatum(datum)
self.test('convertDatum', t, c) # PYCHOK attribute
if isPython35:
# using eval avoids SyntaxError with Python 3.4-,
# but t = eval("d @= ...") throws a SyntaxError
t = eval('datum2 @ c')
self.test('__matmul__', t, d)
t = eval('Transforms.Identity @ d')
self.test('__matmul__', t, d)
if c.isEllipsoidal:
t = c.dup(reframe=RefFrames.ITRF2000)
t = eval('RefFrames.ITRF2014 @ t')
self.test('__matmul__', t, t)
self.test('height', c.height, '-5918.380258' if Sph else '0.242887', prec=6)
self.test('height4', c.height4().toStr(prec=1), '(3984282.2, 97.1, 4971443.2, -5918.4)' if Sph
else '(3980580.8, 97.0, 4966824.8, 0.2)')
self.test('height4', c.height4(Cartesian=Cartesian, height=0).toStr(prec=1), '[3984282.2, 97.1, 4971443.2]' if Sph
else '[3980580.8, 97.0, 4966824.8]')
n = c.toNvector() # (x=0.622818, y=0.00002, z=0.782367, h=0.242887)
t = n.classname # Nvector.__name__
if Nv:
self.test(t, repr(n), 'Nvector(0.62538, 0.00002, 0.78032, -5918.38)' if Sph
else 'Nvector(0.62282, 0.00002, 0.78237, +0.24)')
self.test(t+'3', n.toStr(prec=3), '(0.625, 0.0, 0.78, -5918.38)' if Sph
else '(0.623, 0.0, 0.782, +0.24)')
self.test(t+'6', n.toStr(prec=6), '(0.625377, 0.000015, 0.780323, -5918.38)' if Sph
else '(0.622818, 0.000015, 0.782367, +0.24)') # PYCHOK attribute
else:
n = fstr(n, fmt='g', prec=9)
self.test(t, n, '0.625376979, 1.52393751e-05, 0.780322775, -5918.38026' if Sph
else '0.622817765, 1.51770114e-05, 0.782366942, 0.242886808')
for ll in ((50.0379, 8.5622), # FRA
(51.47, 0.4543), # LHR
# <https://www.EdWilliams.org/avform.htm#XTE>
(degrees(0.709186), -degrees(1.287762)), # JFK
(33.+57./60, -(118.+24./60)), # LAX
# <https://GeographicLib.SourceForge.io/html/python/examples.html>
(-41.32, 174.81), # WNZ, Wellington, NZ
(40.96, 5.50), # SAL, Salamanca, Spain
(40.1, 116.6), # BJS, Beijing Airport
(37.6, -122.4)): # SFO
p = LatLon(*ll)
q = p.toCartesian().toLatLon()
t = str(q)
self.test('LatLon', t, p, known=t.endswith('m')) # PYCHOK attribute
# c = Cartesian(3980581, 97, 4966825, datum=datum)
t = c.copy()
self.test('copy', t.isequalTo(c), True)
self.test('__eq__', t == c, True)
self.test('__ne__', t != c, False)
if hasattr(Cartesian, 'convertRefFrame'):
pass # PYCHOK attribute
for B in (False, True): # check return types
t = c.__class__
self.test('Cartesian', t, t)
# self.testReturnType(c.Ecef, Ecef, c.Ecef.__name__)
self.testReturnType(c.latlon, LatLon2Tuple, 'latlon')
self.testReturnType(c.latlonheight, LatLon3Tuple, 'latlonheight')
self.testReturnType(c.latlonheightdatum, LatLon4Tuple, 'latlonheightdatum')
self.testReturnType(c.height4(), Vector4Tuple, 'height4')
self.testReturnType(c.isequalTo(c), bool, 'isequalTo')
self.testReturnType(c.philam, PhiLam2Tuple, 'philam')
self.testReturnType(c.philamheight, PhiLam3Tuple, 'philamheight')
self.testReturnType(c.philamheightdatum, PhiLam4Tuple, 'philamheightdatum')
self.testReturnType(c.latlonheight, LatLon3Tuple, 'latlonheight')
self.testReturnType(c.toEcef(), Ecef9Tuple, 'toEcef')
self.testReturnType(c.toLatLon(), Ecef9Tuple if B else LatLon, 'toLatLon')
self.testReturnType(c.toNvector(), Vector4Tuple if B else Nvector, 'toNvector')
self.testReturnType(c.xyz, Vector3Tuple, 'xyz')
c = CartesianBase(c) # PYCHOK attribute
if hasattr(Cartesian, 'intersections2'):
# <https://GIS.StackExchange.com/questions/48937/calculating-intersection-of-two-circles>
c = Cartesian(-0.00323306, -0.7915, 0.61116)
n = classname(c, prefixed=True) + '.intersections2'
self.test(n, c.toLatLon(height=0), '37.673442°N, 090.234036°W' if Sph
else '89.998941°N, 090.234036°W') # XXX?
d = Cartesian(-0.0134464, -0.807775, 0.589337)
self.test(n, d.toLatLon(height=0), '36.109987°N, 090.95367°W' if Sph
else '89.99892°N, 090.95367°W') # XXX?
if Sph:
x, y = c.intersections2(0.0312705, d, 0.0421788, radius=None) # radii in radians
self.test(n, x.toStr(prec=6), '[-0.032779, -0.784769, 0.61892]') # -0.0327606, -0.784759, 0.618935
self.test(n, x.toLatLon(height=0), '38.237342°N, 092.391779°W') # 38.23838°N, 092.390487°W
if y is not x:
self.test(n, y.toStr(prec=6), '[0.025768, -0.798347, 0.601646]') # 0.0257661, -0.798332, 0.601666
self.test(n, y.toLatLon(height=0), '36.987868°N, 088.151309°W') # 36.98931°N, 088.151425°W
try:
from pygeodesy import trilaterate3d2 # with earth ... equivalent to Cartesian.intersections2?
n = modulename(trilaterate3d2, prefixed=True)
i, j = trilaterate3d2(c, 0.0312705, d, 0.0421788, Cartesian(0, 0, 0), 1) # radians
self.test(n, i.toStr(prec=6), '[-0.032761, -0.784757, 0.618937]', known=x.minus(i).length < 5e-5)
self.test(n, j.toStr(prec=6), '[0.025768, -0.798331, 0.601668]', known=y.minus(j).length < 5e-5)
except ImportError as x:
self.skip(str(x), n=2)
else:
x, y = c.intersections2(0.0312705, d, 0.0421788, sphere=True)
self.test(n, x.toStr(prec=6), '[-0.0035, -0.791926, 0.610589]')
self.test(n, x.toLatLon(height=0), '89.998941°N, 090.253237°W')
self.test(n, y.toStr(prec=6), '0.0312613') # radius
try:
from pygeodesy.vector3d import intersections2
n = modulename(intersections2, prefixed=True)
u = Vector3Tuple(-0.00323306, -0.7915, 0.61116)
v = Vector3Tuple(-0.0134464, -0.807775, 0.589337)
c, r = intersections2(u, 0.0312705, v, 0.0421788, sphere=True)
self.test(n, c.toStr(prec=6), '(-0.0035, -0.791926, 0.610589)')
self.test(n, r.toStr(prec=6), '0.0312613', known=True) # XXX G and g formats may add 1 decimal
v1, v2 = intersections2(u, 0.0312705, v, 0.0421788, sphere=False)
self.test(n, v1.toStr(prec=6), '(-0.021973, -0.766467, 0.0)')
if v2 is not v1:
self.test(n, v2.toStr(prec=6), '(0.027459, -0.797488, 0.0)')
except ImportError as x:
self.skip(str(x), n=4)
def testReturnType(self, inst, clas, name):
self.test(name, type(inst), clas) # type(inst).__name__ == clas.__name__
if __name__ == '__main__':
from pygeodesy import ellipsoidalExact, ellipsoidalNvector, ellipsoidalVincenty, \
sphericalNvector, sphericalTrigonometry
t = Tests(__file__, __version__)
t.testCartesian(sphericalNvector, Sph=True, Nv=True)
t.testCartesian(sphericalTrigonometry, Sph=True)
t.testCartesian(ellipsoidalNvector, Nv=True)
t.testCartesian(ellipsoidalVincenty)
if geographiclib:
from pygeodesy import ellipsoidalKarney
t.testCartesian(ellipsoidalKarney)
if GeodSolve:
from pygeodesy import ellipsoidalGeodSolve
t.testCartesian(ellipsoidalGeodSolve)
t.testCartesian(ellipsoidalExact, X=True)
t.results()
t.exit()
| [
"mrJean1@Gmail.com"
] | mrJean1@Gmail.com |
1006a1042d8562501d612446f32f554d87edcacb | 80217a305516de4c1921833b222f094f0148e9f9 | /backend/task_marker_21840/wsgi.py | 9f4c0dac7059e2f594e0ec93c50fe0ea283ef0a8 | [] | no_license | crowdbotics-apps/task-marker-21840 | ea9bcd56f7450a83ac6818c24d7294795deab092 | 8bcd4803dd4b0b56242f5bb92117230f1746f7e2 | refs/heads/master | 2023-01-02T08:14:51.362346 | 2020-10-22T19:26:21 | 2020-10-22T19:26:21 | 306,438,684 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | """
WSGI config for task_marker_21840 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'task_marker_21840.settings')
application = get_wsgi_application()
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
823ac15ec7ef31233f51a529dbf71638c8f76b59 | 72b77f97876983025eb05a5aa1d6f248a1be3074 | /binarysearch/ugly_number.py | 731272b3b9501c721d2305ba2085830cacf9bc46 | [
"Apache-2.0"
] | permissive | erjan/coding_exercises | 4c6bccb2cdac65ccbc3107a482914275ecd157f7 | 68dac358a6d4dabd41d47dbd4addb2ec50e0ca11 | refs/heads/master | 2023-09-02T07:25:30.886175 | 2023-08-27T06:13:06 | 2023-08-27T06:13:06 | 236,281,070 | 5 | 0 | Apache-2.0 | 2020-05-05T15:08:49 | 2020-01-26T07:32:09 | Python | UTF-8 | Python | false | false | 342 | py |
'''
Given an integer n, return whether its prime factors only include 2, 3 or 5.
'''
class Solution:
def solve(self, n):
num = n
if num == 0: return False
while num % 5 == 0: num /= 5
while num % 3 == 0: num /= 3
while num % 2 == 0: num /= 2
return num == 1
| [
"noreply@github.com"
] | erjan.noreply@github.com |
f4d92f33eefb8ade936fbd2367669894919aa93a | a7361705b32e868557dd033aa99d74889c70808c | /braintels_Sockets/servidor_socket.py | cfe9e8e22c5621a4b6635c7baa010d788146892c | [] | no_license | jorgepdsML/PYTHON_NETWORKING | ce6c3e11018b4592a13eaabcc57de6af0a57d39f | 829a7c92634c7ba78a84dbd9fea22cfe8452b371 | refs/heads/master | 2020-12-23T10:15:18.709084 | 2020-02-08T01:12:52 | 2020-02-08T01:12:52 | 237,121,762 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 816 | py | import socket,pickle,time
#ESTABLECER LA IP DEL SERVIDOR (DE ESTE ORDENADOR)
HOST = '127.0.0.1'
PORT = 65432 # PUERTO
#instanciar un objeto de la clase socket del modulo socket
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.bind((HOST,PORT))
#escuchar conexiones del cliente
s.listen()
#aceptar la conexión
cliente,direccion=s.accept()
with cliente:
#recibir maximo 1000 bytes del cliente
dato=cliente.recv(1000)
if not dato:
#cliente se ha desconectado
print("DESCONECTADO")
else:
print("-------EL CLIENTE NOS DICE ------")
#mostrar lo que el cliente me ha enviado
print(pickle.loads(dato))
time.sleep(2)
#devolver un mensaje
cliente.sendall(pickle.dumps("*** AQUI PUES MASCOTA :v :v *** "))
s.close()
| [
"noreply@github.com"
] | jorgepdsML.noreply@github.com |
2ef6777840283a29dde9cda0fbea7f05a7bc5f59 | 4ee504feeb5388ed70f4ffef2caf851eb5edd299 | /pinax/wiki/views.py | 9c75aa7f693e06ddd39e31db3c3c7a5247217b8f | [
"MIT"
] | permissive | adamfeldman/pinax-wiki | cbf2e1d9cdce853a0b959f037e35c6b923feba31 | 06c65bf00e9cf69493ca2d97cd45d167756f054d | refs/heads/master | 2021-01-12T13:51:16.242419 | 2016-03-15T12:54:33 | 2016-03-15T12:54:33 | 69,196,399 | 0 | 0 | null | 2016-09-25T23:21:40 | 2016-09-25T23:21:40 | null | UTF-8 | Python | false | false | 3,556 | py | import json
from django.http import HttpResponse, Http404, HttpResponseForbidden
from django.shortcuts import redirect, render, get_object_or_404
from django.views import static
from django.views.decorators.http import require_POST
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .conf import settings
from .forms import RevisionForm
from .hooks import hookset
from .models import Page, MediaFile
def index(request, binder, *args, **kwargs):
wiki = binder.lookup(*args, **kwargs)
return redirect(binder.page_url(wiki, "WikiIndex"))
def page(request, slug, binder, *args, **kwargs):
wiki = binder.lookup(*args, **kwargs)
try:
if wiki:
page = wiki.pages.get(slug=slug)
else:
page = Page.objects.get(slug=slug)
if not hookset.can_view_page(page, request.user):
raise Http404()
rev = page.revisions.latest()
return render(request, "pinax/wiki/page.html", {"revision": rev, "can_edit": hookset.can_edit_page(page, request.user)})
except Page.DoesNotExist:
return redirect(binder.edit_url(wiki, slug))
@login_required
def edit(request, slug, binder, *args, **kwargs):
wiki = binder.lookup(*args, **kwargs)
try:
if wiki:
page = wiki.pages.get(slug=slug)
else:
page = Page.objects.get(slug=slug)
rev = page.revisions.latest()
if not hookset.can_edit_page(page, request.user):
return HttpResponseForbidden()
except Page.DoesNotExist:
page = Page(wiki=wiki, slug=slug)
rev = None
if not hookset.can_edit_page(page, request.user):
raise Http404()
if request.method == "POST":
form = RevisionForm(request.POST, revision=rev)
if form.is_valid():
if page.pk is None:
page.save()
revision = form.save(commit=False)
revision.page = page
revision.created_by = request.user
revision.created_ip = request.META.get(settings.PINAX_WIKI_IP_ADDRESS_META_FIELD, "REMOTE_ADDR")
revision.parse()
revision.save()
return redirect(binder.page_url(wiki, slug))
else:
form = RevisionForm(revision=rev)
return render(request, "pinax/wiki/edit.html", {
"form": form,
"page": page,
"revision": rev,
"can_delete": hookset.can_delete_page(page, request.user)
})
def file_download(request, pk, filename):
media_file = get_object_or_404(MediaFile, pk=pk, filename=filename)
if getattr(settings, "DOCUMENTS_USE_X_ACCEL_REDIRECT", False):
response = HttpResponse()
response["X-Accel-Redirect"] = media_file.file.url
# delete content-type to allow Gondor to determine the filetype and
# we definitely don't want Django's crappy default :-)
del response["content-type"]
else:
response = static.serve(request, media_file.file.name, document_root=settings.MEDIA_ROOT)
return response
@require_POST
@login_required
def file_upload(request):
uploads = []
for f in request.FILES.getlist("files"):
media_file = request.user.media_files.create(file=f, filename=f.name)
uploads.append(media_file)
return HttpResponse(json.dumps({
"uploads": [
{"filename": m.filename, "download_url": m.download_url()}
for m in uploads
]
}), content_type="application/json")
| [
"paltman@gmail.com"
] | paltman@gmail.com |
1eab08e124fb6251930f469f3fffe440a5b49406 | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/challenge_auth_policy.py | 3239032e9162ffbc954449b7ec15787c522cae2c | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 5,654 | py | # ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
"""Policy implementing Key Vault's challenge authentication protocol.
Normally the protocol is only used for the client's first service request, upon which:
1. The challenge authentication policy sends a copy of the request, without authorization or content.
2. Key Vault responds 401 with a header (the 'challenge') detailing how the client should authenticate such a request.
3. The policy authenticates according to the challenge and sends the original request with authorization.
The policy caches the challenge and thus knows how to authenticate future requests. However, authentication
requirements can change. For example, a vault may move to a new tenant. In such a case the policy will attempt the
protocol again.
"""
import copy
import time
from azure.core.exceptions import ServiceRequestError
from azure.core.pipeline import PipelineContext, PipelineRequest
from azure.core.pipeline.policies import HTTPPolicy
from azure.core.pipeline.transport import HttpRequest
from .http_challenge import HttpChallenge
from . import http_challenge_cache as ChallengeCache
try:
from typing import TYPE_CHECKING
except ImportError:
TYPE_CHECKING = False
if TYPE_CHECKING:
from typing import Any, Optional
from azure.core.credentials import AccessToken, TokenCredential
from azure.core.pipeline import PipelineResponse
def _enforce_tls(request):
# type: (PipelineRequest) -> None
if not request.http_request.url.lower().startswith("https"):
raise ServiceRequestError(
"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."
)
def _get_challenge_request(request):
# type: (PipelineRequest) -> PipelineRequest
# The challenge request is intended to provoke an authentication challenge from Key Vault, to learn how the
# service request should be authenticated. It should be identical to the service request but with no body.
challenge_request = HttpRequest(
request.http_request.method, request.http_request.url, headers=request.http_request.headers
)
challenge_request.headers["Content-Length"] = "0"
options = copy.deepcopy(request.context.options)
context = PipelineContext(request.context.transport, **options)
return PipelineRequest(http_request=challenge_request, context=context)
def _update_challenge(request, challenger):
# type: (PipelineRequest, PipelineResponse) -> HttpChallenge
"""parse challenge from challenger, cache it, return it"""
challenge = HttpChallenge(
request.http_request.url,
challenger.http_response.headers.get("WWW-Authenticate"),
response_headers=challenger.http_response.headers,
)
ChallengeCache.set_challenge_for_url(request.http_request.url, challenge)
return challenge
class ChallengeAuthPolicyBase(object):
"""Sans I/O base for challenge authentication policies"""
def __init__(self, **kwargs):
self._token = None # type: Optional[AccessToken]
super(ChallengeAuthPolicyBase, self).__init__(**kwargs)
@property
def _need_new_token(self):
# type: () -> bool
return not self._token or self._token.expires_on - time.time() < 300
class ChallengeAuthPolicy(ChallengeAuthPolicyBase, HTTPPolicy):
"""policy for handling HTTP authentication challenges"""
def __init__(self, credential, **kwargs):
# type: (TokenCredential, **Any) -> None
self._credential = credential
super(ChallengeAuthPolicy, self).__init__(**kwargs)
def send(self, request):
# type: (PipelineRequest) -> PipelineResponse
_enforce_tls(request)
challenge = ChallengeCache.get_challenge_for_url(request.http_request.url)
if not challenge:
challenge_request = _get_challenge_request(request)
challenger = self.next.send(challenge_request)
try:
challenge = _update_challenge(request, challenger)
except ValueError:
# didn't receive the expected challenge -> nothing more this policy can do
return challenger
self._handle_challenge(request, challenge)
response = self.next.send(request)
if response.http_response.status_code == 401:
# any cached token must be invalid
self._token = None
# cached challenge could be outdated; maybe this response has a new one?
try:
challenge = _update_challenge(request, response)
except ValueError:
# 401 with no legible challenge -> nothing more this policy can do
return response
self._handle_challenge(request, challenge)
response = self.next.send(request)
return response
def _handle_challenge(self, request, challenge):
# type: (PipelineRequest, HttpChallenge) -> None
"""authenticate according to challenge, add Authorization header to request"""
if self._need_new_token:
# azure-identity credentials require an AADv2 scope but the challenge may specify an AADv1 resource
scope = challenge.get_scope() or challenge.get_resource() + "/.default"
self._token = self._credential.get_token(scope)
# ignore mypy's warning because although self._token is Optional, get_token raises when it fails to get a token
request.http_request.headers["Authorization"] = "Bearer {}".format(self._token.token) # type: ignore
| [
"noreply@github.com"
] | scbedd.noreply@github.com |
0b7e2a052708cfbefe1cc8b6f2fa7a0e3b2d65ff | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/amme/testcase/interestcases/testcase0_3_2_019.py | c7aa9f66e3732eb44bd0b67b86245ca0f1902b48 | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,599 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'com.money.manager.ex',
'appActivity' : 'com.money.manager.ex.home.MainActivity',
'resetKeyboard' : True,
'androidCoverage' : 'com.money.manager.ex/com.money.manager.ex.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=1000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=1000)
return
def scrollToFindElement(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
elements = driver.find_elements_by_android_uiautomator(str)
if (len(elements) > 1) :
for temp in elements :
if temp.get_attribute("enabled") == "true" :
element = temp
break
except NoSuchElementException:
swipe(driver, 0.5, 0.55, 0.5, 0.2)
else :
return element
for i in range(0, 4, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
elements = driver.find_elements_by_android_uiautomator(str)
if (len(elements) > 1):
for temp in elements:
if temp.get_attribute("enabled") == "true":
element = temp
break
except NoSuchElementException:
swipe(driver, 0.5, 0.2, 0.5, 0.55)
else :
return element
return
def scrollToClickElement(driver, str) :
element = scrollToFindElement(driver, str)
if element is None :
return
else :
element.click()
def clickInList(driver, str) :
element = None
if (str is None) :
candidates = driver.find_elements_by_class_name("android.widget.CheckedTextView")
if len(candidates) >= 1 and checkWindow(driver):
element = candidates[len(candidates)-1]
else :
element = scrollToFindElement(driver, str)
if element is not None :
element.click()
else :
if checkWindow(driver) :
driver.press_keycode(4)
def clickOnCheckable(driver, str, value = "true") :
parents = driver.find_elements_by_class_name("android.widget.LinearLayout")
for parent in parents:
try :
parent.find_element_by_android_uiautomator(str)
lists = parent.find_elements_by_class_name("android.widget.LinearLayout")
if len(lists) == 1 :
innere = parent.find_element_by_android_uiautomator("new UiSelector().checkable(true)")
nowvalue = innere.get_attribute("checked")
if (nowvalue != value) :
innere.click()
break
except NoSuchElementException:
continue
def typeText(driver, value) :
element = getElememt(driver, "new UiSelector().className(\"android.widget.EditText\")")
element.clear()
element.send_keys(value)
enterelement = getElememt(driver, "new UiSelector().text(\"OK\")")
if (enterelement is None) :
if checkWindow(driver):
driver.press_keycode(4)
else :
enterelement.click()
def checkWindow(driver) :
dsize = driver.get_window_size()
nsize = driver.find_element_by_class_name("android.widget.FrameLayout").size
if dsize['height'] > nsize['height']:
return True
else :
return False
def testingSeekBar(driver, str, value):
try :
if(not checkWindow(driver)) :
element = seekForNearestSeekBar(driver, str)
else :
element = driver.find_element_by_class_name("android.widget.SeekBar")
if (None != element):
settingSeekBar(driver, element, value)
driver.find_element_by_android_uiautomator("new UiSelector().text(\"OK\")").click()
except NoSuchElementException:
time.sleep(1)
def seekForNearestSeekBar(driver, str):
parents = driver.find_elements_by_class_name("android.widget.LinearLayout")
for parent in parents:
try :
parent.find_element_by_android_uiautomator(str)
lists = parent.find_elements_by_class_name("android.widget.LinearLayout")
if len(lists) == 1 :
innere = parent.find_element_by_class_name("android.widget.SeekBar")
return innere
break
except NoSuchElementException:
continue
def settingSeekBar(driver, element, value) :
x = element.rect.get("x")
y = element.rect.get("y")
width = element.rect.get("width")
height = element.rect.get("height")
TouchAction(driver).press(None, x + 10, y + height/2).move_to(None, x + width * value,y + height/2).release().perform()
y = value
def clickInMultiList(driver, str) :
element = None
if (str is None) :
candidates = driver.find_elements_by_class_name("android.widget.CheckedTextView")
if len(candidates) >= 1 and checkWindow(driver):
element = candidates[len(candidates)-1]
else :
element = scrollToFindElement(driver, str)
if element is not None :
nowvalue = element.get_attribute("checked")
if (nowvalue != "true") :
element.click()
else :
if checkWindow(driver) :
driver.find_element_by_android_uiautomator("new UiSelector().text(\"OK\")")
# preference setting and exit
try :
os.popen("adb shell svc data disable")
time.sleep(5)
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
os.popen("adb shell am start -n com.money.manager.ex/com.money.manager.ex.settings.SyncPreferencesActivity -a test")
scrollToClickElement(driver, "new UiSelector().text(\"Sync enabled\")")
clickOnCheckable(driver, "new UiSelector().text(\"Sync enabled\")", "true")
scrollToClickElement(driver, "new UiSelector().text(\"Provider\")")
clickInList(driver, "new UiSelector().text(\"Google Drive\")")
driver.press_keycode(4)
time.sleep(2)
os.popen("adb shell am start -n com.money.manager.ex/com.money.manager.ex.settings.BehaviourSettingsActivity -a test")
scrollToClickElement(driver, "new UiSelector().text(\"Filter in selectors\")")
clickOnCheckable(driver, "new UiSelector().text(\"Filter in selectors\")", "false")
driver.press_keycode(4)
time.sleep(2)
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"2_019_pre\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
# testcase019
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
element = getElememtBack(driver, "new UiSelector().text(\"Summary\")", "new UiSelector().className(\"android.widget.TextView\").instance(9)")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().resourceId(\"com.money.manager.ex:id/menu_search\").className(\"android.widget.TextView\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Select Category\")", "new UiSelector().className(\"android.widget.TextView\").instance(15)")
TouchAction(driver).tap(element).perform()
swipe(driver, 0.5, 0.2, 0.5, 0.8)
element = getElememt(driver, "new UiSelector().resourceId(\"com.money.manager.ex:id/search_close_btn\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
swipe(driver, 0.5, 0.2, 0.5, 0.8)
element = getElememt(driver, "new UiSelector().resourceId(\"com.money.manager.ex:id/expandable_list_indicator\").className(\"android.widget.ImageView\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememtBack(driver, "new UiSelector().text(\"Edit\")", "new UiSelector().className(\"android.widget.TextView\").instance(1)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"com.money.manager.ex:id/editTextCategName\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("testtion");
element = getElememtBack(driver, "new UiSelector().text(\"OK\")", "new UiSelector().className(\"android.widget.TextView\").instance(2)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"com.money.manager.ex:id/fab\").className(\"android.widget.ImageButton\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"OK\")", "new UiSelector().className(\"android.widget.TextView\").instance(2)")
TouchAction(driver).tap(element).perform()
swipe(driver, 0.5, 0.8, 0.5, 0.2)
element = getElememt(driver, "new UiSelector().resourceId(\"com.money.manager.ex:id/expandable_list_indicator\").className(\"android.widget.ImageView\")")
TouchAction(driver).long_press(element).release().perform()
driver.press_keycode(82)
element = getElememtBack(driver, "new UiSelector().text(\"Taxes\")", "new UiSelector().className(\"android.widget.TextView\").instance(18)")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Taxes\")", "new UiSelector().className(\"android.widget.TextView\").instance(15)")
TouchAction(driver).long_press(element).release().perform()
element = getElememtBack(driver, "new UiSelector().text(\"Delete\")", "new UiSelector().className(\"android.widget.TextView\").instance(2)")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"OK\")", "new UiSelector().className(\"android.widget.TextView\").instance(3)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"com.money.manager.ex:id/expandable_list_indicator\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"2_019\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'com.money.manager.ex'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage)
os.popen("adb shell svc data enable")
| [
"prefest2018@gmail.com"
] | prefest2018@gmail.com |
ba2d85f2f61ab3ad1c97db094a54795ce71f6a38 | aba0055290156515e6befc47d06183dee2663aec | /gluon/gluoncv2/models/lwopenpose_cmupan.py | a0c3b24b67044df4b1ab1c5ab1a6a8167f6d9f66 | [
"MIT"
] | permissive | piyop/imgclsmob | f320aeb4675be57042cf661190e19d9299d46731 | 780dc56eddc95ce58c34eb6f6e48d2fb4c566571 | refs/heads/master | 2022-11-05T17:34:34.076150 | 2020-06-26T12:06:16 | 2020-06-26T12:06:16 | 275,135,197 | 1 | 0 | MIT | 2020-06-26T10:59:46 | 2020-06-26T10:59:45 | null | UTF-8 | Python | false | false | 26,268 | py | """
Lightweight OpenPose 2D/3D for CMU Panoptic, implemented in Gluon.
Original paper: 'Real-time 2D Multi-Person Pose Estimation on CPU: Lightweight OpenPose,'
https://arxiv.org/abs/1811.12004.
"""
__all__ = ['LwOpenPose', 'lwopenpose2d_mobilenet_cmupan_coco', 'lwopenpose3d_mobilenet_cmupan_coco',
'LwopDecoderFinalBlock']
import os
from mxnet import cpu
from mxnet.gluon import nn, HybridBlock
from .common import conv1x1, conv1x1_block, conv3x3_block, dwsconv3x3_block
class LwopResBottleneck(HybridBlock):
"""
Bottleneck block for residual path in the residual unit.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
strides : int or tuple/list of 2 int
Strides of the convolution.
use_bias : bool, default True
Whether the layer uses a bias vector.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bottleneck_factor : int, default 2
Bottleneck factor.
squeeze_out : bool, default False
Whether to squeeze the output channels.
"""
def __init__(self,
in_channels,
out_channels,
strides,
use_bias=True,
bn_use_global_stats=False,
bottleneck_factor=2,
squeeze_out=False,
**kwargs):
super(LwopResBottleneck, self).__init__(**kwargs)
mid_channels = out_channels // bottleneck_factor if squeeze_out else in_channels // bottleneck_factor
with self.name_scope():
self.conv1 = conv1x1_block(
in_channels=in_channels,
out_channels=mid_channels,
use_bias=use_bias,
bn_use_global_stats=bn_use_global_stats)
self.conv2 = conv3x3_block(
in_channels=mid_channels,
out_channels=mid_channels,
strides=strides,
use_bias=use_bias,
bn_use_global_stats=bn_use_global_stats)
self.conv3 = conv1x1_block(
in_channels=mid_channels,
out_channels=out_channels,
use_bias=use_bias,
activation=None,
bn_use_global_stats=bn_use_global_stats)
def hybrid_forward(self, F, x):
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
return x
class LwopResUnit(HybridBlock):
"""
ResNet-like residual unit with residual connection.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
strides : int or tuple/list of 2 int, default 1
Strides of the convolution.
use_bias : bool, default True
Whether the layer uses a bias vector.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bottleneck_factor : int, default 2
Bottleneck factor.
squeeze_out : bool, default False
Whether to squeeze the output channels.
activate : bool, default False
Whether to activate the sum.
"""
def __init__(self,
in_channels,
out_channels,
strides=1,
use_bias=True,
bn_use_global_stats=False,
bottleneck_factor=2,
squeeze_out=False,
activate=False,
**kwargs):
super(LwopResUnit, self).__init__(**kwargs)
self.activate = activate
self.resize_identity = (in_channels != out_channels) or (strides != 1)
with self.name_scope():
self.body = LwopResBottleneck(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
use_bias=use_bias,
bn_use_global_stats=bn_use_global_stats,
bottleneck_factor=bottleneck_factor,
squeeze_out=squeeze_out)
if self.resize_identity:
self.identity_conv = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
use_bias=use_bias,
bn_use_global_stats=bn_use_global_stats,
activation=None)
if self.activate:
self.activ = nn.Activation("relu")
def hybrid_forward(self, F, x):
if self.resize_identity:
identity = self.identity_conv(x)
else:
identity = x
x = self.body(x)
x = x + identity
if self.activate:
x = self.activ(x)
return x
class LwopEncoderFinalBlock(HybridBlock):
"""
Lightweight OpenPose 2D/3D specific encoder final block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
"""
def __init__(self,
in_channels,
out_channels,
bn_use_global_stats=False,
**kwargs):
super(LwopEncoderFinalBlock, self).__init__(**kwargs)
with self.name_scope():
self.pre_conv = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels,
use_bias=True,
use_bn=False,
bn_use_global_stats=bn_use_global_stats)
self.body = nn.HybridSequential(prefix="")
for i in range(3):
self.body.add(dwsconv3x3_block(
in_channels=out_channels,
out_channels=out_channels,
use_bn=False,
bn_use_global_stats=bn_use_global_stats,
dw_activation=(lambda: nn.ELU()),
pw_activation=(lambda: nn.ELU())))
self.post_conv = conv3x3_block(
in_channels=out_channels,
out_channels=out_channels,
use_bias=True,
use_bn=False,
bn_use_global_stats=bn_use_global_stats)
def hybrid_forward(self, F, x):
x = self.pre_conv(x)
x = x + self.body(x)
x = self.post_conv(x)
return x
class LwopRefinementBlock(HybridBlock):
"""
Lightweight OpenPose 2D/3D specific refinement block for decoder units.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
"""
def __init__(self,
in_channels,
out_channels,
bn_use_global_stats=False,
**kwargs):
super(LwopRefinementBlock, self).__init__(**kwargs)
with self.name_scope():
self.pre_conv = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels,
use_bias=True,
use_bn=False,
bn_use_global_stats=bn_use_global_stats)
self.body = nn.HybridSequential(prefix="")
self.body.add(conv3x3_block(
in_channels=out_channels,
out_channels=out_channels,
use_bias=True,
bn_use_global_stats=bn_use_global_stats))
self.body.add(conv3x3_block(
in_channels=out_channels,
out_channels=out_channels,
padding=2,
dilation=2,
use_bias=True,
bn_use_global_stats=bn_use_global_stats))
def hybrid_forward(self, F, x):
x = self.pre_conv(x)
x = x + self.body(x)
return x
class LwopDecoderBend(HybridBlock):
"""
Lightweight OpenPose 2D/3D specific decoder bend block.
Parameters:
----------
in_channels : int
Number of input channels.
mid_channels : int
Number of middle channels.
out_channels : int
Number of output channels.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
"""
def __init__(self,
in_channels,
mid_channels,
out_channels,
bn_use_global_stats=False,
**kwargs):
super(LwopDecoderBend, self).__init__(**kwargs)
with self.name_scope():
self.conv1 = conv1x1_block(
in_channels=in_channels,
out_channels=mid_channels,
use_bias=True,
use_bn=False,
bn_use_global_stats=bn_use_global_stats)
self.conv2 = conv1x1(
in_channels=mid_channels,
out_channels=out_channels,
use_bias=True)
def hybrid_forward(self, F, x):
x = self.conv1(x)
x = self.conv2(x)
return x
class LwopDecoderInitBlock(HybridBlock):
"""
Lightweight OpenPose 2D/3D specific decoder init block.
Parameters:
----------
in_channels : int
Number of input channels.
keypoints : int
Number of keypoints.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
"""
def __init__(self,
in_channels,
keypoints,
bn_use_global_stats=False,
**kwargs):
super(LwopDecoderInitBlock, self).__init__(**kwargs)
num_heatmap = keypoints
num_paf = 2 * keypoints
bend_mid_channels = 512
with self.name_scope():
self.body = nn.HybridSequential(prefix="")
for i in range(3):
self.body.add(conv3x3_block(
in_channels=in_channels,
out_channels=in_channels,
use_bias=True,
use_bn=False,
bn_use_global_stats=bn_use_global_stats))
self.heatmap_bend = LwopDecoderBend(
in_channels=in_channels,
mid_channels=bend_mid_channels,
out_channels=num_heatmap,
bn_use_global_stats=bn_use_global_stats)
self.paf_bend = LwopDecoderBend(
in_channels=in_channels,
mid_channels=bend_mid_channels,
out_channels=num_paf,
bn_use_global_stats=bn_use_global_stats)
def hybrid_forward(self, F, x):
y = self.body(x)
heatmap = self.heatmap_bend(y)
paf = self.paf_bend(y)
y = F.concat(x, heatmap, paf, dim=1)
return y
class LwopDecoderUnit(HybridBlock):
"""
Lightweight OpenPose 2D/3D specific decoder init.
Parameters:
----------
in_channels : int
Number of input channels.
keypoints : int
Number of keypoints.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
"""
def __init__(self,
in_channels,
keypoints,
bn_use_global_stats=False,
**kwargs):
super(LwopDecoderUnit, self).__init__(**kwargs)
num_heatmap = keypoints
num_paf = 2 * keypoints
self.features_channels = in_channels - num_heatmap - num_paf
with self.name_scope():
self.body = nn.HybridSequential(prefix="")
for i in range(5):
self.body.add(LwopRefinementBlock(
in_channels=in_channels,
out_channels=self.features_channels,
bn_use_global_stats=bn_use_global_stats))
in_channels = self.features_channels
self.heatmap_bend = LwopDecoderBend(
in_channels=self.features_channels,
mid_channels=self.features_channels,
out_channels=num_heatmap,
bn_use_global_stats=bn_use_global_stats)
self.paf_bend = LwopDecoderBend(
in_channels=self.features_channels,
mid_channels=self.features_channels,
out_channels=num_paf,
bn_use_global_stats=bn_use_global_stats)
def hybrid_forward(self, F, x):
features = F.slice_axis(x, axis=1, begin=0, end=self.features_channels)
y = self.body(x)
heatmap = self.heatmap_bend(y)
paf = self.paf_bend(y)
y = F.concat(features, heatmap, paf, dim=1)
return y
class LwopDecoderFeaturesBend(HybridBlock):
"""
Lightweight OpenPose 2D/3D specific decoder 3D features bend.
Parameters:
----------
in_channels : int
Number of input channels.
mid_channels : int
Number of middle channels.
out_channels : int
Number of output channels.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
"""
def __init__(self,
in_channels,
mid_channels,
out_channels,
bn_use_global_stats=False,
**kwargs):
super(LwopDecoderFeaturesBend, self).__init__(**kwargs)
with self.name_scope():
self.body = nn.HybridSequential(prefix="")
for i in range(2):
self.body.add(LwopRefinementBlock(
in_channels=in_channels,
out_channels=mid_channels,
bn_use_global_stats=bn_use_global_stats))
in_channels = mid_channels
self.features_bend = LwopDecoderBend(
in_channels=mid_channels,
mid_channels=mid_channels,
out_channels=out_channels,
bn_use_global_stats=bn_use_global_stats)
def hybrid_forward(self, F, x):
x = self.body(x)
x = self.features_bend(x)
return x
class LwopDecoderFinalBlock(HybridBlock):
"""
Lightweight OpenPose 2D/3D specific decoder final block for calcualation 3D poses.
Parameters:
----------
in_channels : int
Number of input channels.
keypoints : int
Number of keypoints.
bottleneck_factor : int
Bottleneck factor.
calc_3d_features : bool
Whether to calculate 3D features.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
"""
def __init__(self,
in_channels,
keypoints,
bottleneck_factor,
calc_3d_features,
bn_use_global_stats=False,
**kwargs):
super(LwopDecoderFinalBlock, self).__init__(**kwargs)
self.num_heatmap_paf = 3 * keypoints
self.calc_3d_features = calc_3d_features
features_out_channels = self.num_heatmap_paf
features_in_channels = in_channels - features_out_channels
if self.calc_3d_features:
with self.name_scope():
self.body = nn.HybridSequential(prefix="")
for i in range(5):
self.body.add(LwopResUnit(
in_channels=in_channels,
out_channels=features_in_channels,
bottleneck_factor=bottleneck_factor,
bn_use_global_stats=bn_use_global_stats))
in_channels = features_in_channels
self.features_bend = LwopDecoderFeaturesBend(
in_channels=features_in_channels,
mid_channels=features_in_channels,
out_channels=features_out_channels,
bn_use_global_stats=bn_use_global_stats)
def hybrid_forward(self, F, x):
heatmap_paf_2d = F.slice_axis(x, axis=1, begin=-self.num_heatmap_paf, end=None)
if not self.calc_3d_features:
return heatmap_paf_2d
x = self.body(x)
x = self.features_bend(x)
y = F.concat(heatmap_paf_2d, x, dim=1)
return y
class LwOpenPose(HybridBlock):
"""
Lightweight OpenPose 2D/3D model from 'Real-time 2D Multi-Person Pose Estimation on CPU: Lightweight OpenPose,'
https://arxiv.org/abs/1811.12004.
Parameters:
----------
encoder_channels : list of list of int
Number of output channels for each encoder unit.
encoder_paddings : list of list of int
Padding/dilation value for each encoder unit.
encoder_init_block_channels : int
Number of output channels for the encoder initial unit.
encoder_final_block_channels : int
Number of output channels for the encoder final unit.
refinement_units : int
Number of refinement blocks in the decoder.
calc_3d_features : bool
Whether to calculate 3D features.
return_heatmap : bool, default True
Whether to return only heatmap.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
Useful for fine-tuning.
in_channels : int, default 3
Number of input channels.
in_size : tuple of two ints, default (256, 192)
Spatial size of the expected input image.
keypoints : int, default 19
Number of keypoints.
"""
def __init__(self,
encoder_channels,
encoder_paddings,
encoder_init_block_channels,
encoder_final_block_channels,
refinement_units,
calc_3d_features,
return_heatmap=True,
bn_use_global_stats=False,
in_channels=3,
in_size=(368, 368),
keypoints=19,
**kwargs):
super(LwOpenPose, self).__init__(**kwargs)
assert (in_channels == 3)
self.in_size = in_size
self.keypoints = keypoints
self.return_heatmap = return_heatmap
self.calc_3d_features = calc_3d_features
num_heatmap_paf = 3 * keypoints
with self.name_scope():
self.encoder = nn.HybridSequential(prefix="")
backbone = nn.HybridSequential(prefix="")
backbone.add(conv3x3_block(
in_channels=in_channels,
out_channels=encoder_init_block_channels,
strides=2,
bn_use_global_stats=bn_use_global_stats))
in_channels = encoder_init_block_channels
for i, channels_per_stage in enumerate(encoder_channels):
stage = nn.HybridSequential(prefix="stage{}_".format(i + 1))
with stage.name_scope():
for j, out_channels in enumerate(channels_per_stage):
strides = 2 if (j == 0) and (i != 0) else 1
padding = encoder_paddings[i][j]
stage.add(dwsconv3x3_block(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
padding=padding,
dilation=padding,
bn_use_global_stats=bn_use_global_stats))
in_channels = out_channels
backbone.add(stage)
self.encoder.add(backbone)
self.encoder.add(LwopEncoderFinalBlock(
in_channels=in_channels,
out_channels=encoder_final_block_channels,
bn_use_global_stats=bn_use_global_stats))
in_channels = encoder_final_block_channels
self.decoder = nn.HybridSequential(prefix="")
self.decoder.add(LwopDecoderInitBlock(
in_channels=in_channels,
keypoints=keypoints,
bn_use_global_stats=bn_use_global_stats))
in_channels = encoder_final_block_channels + num_heatmap_paf
for i in range(refinement_units):
self.decoder.add(LwopDecoderUnit(
in_channels=in_channels,
keypoints=keypoints,
bn_use_global_stats=bn_use_global_stats))
self.decoder.add(LwopDecoderFinalBlock(
in_channels=in_channels,
keypoints=keypoints,
bottleneck_factor=2,
calc_3d_features=calc_3d_features,
bn_use_global_stats=bn_use_global_stats))
def hybrid_forward(self, F, x):
x = self.encoder(x)
x = self.decoder(x)
if self.return_heatmap:
return x
else:
return x
def get_lwopenpose(calc_3d_features,
keypoints,
model_name=None,
pretrained=False,
ctx=cpu(),
root=os.path.join("~", ".mxnet", "models"),
**kwargs):
"""
Create Lightweight OpenPose 2D/3D model with specific parameters.
Parameters:
----------
calc_3d_features : bool, default False
Whether to calculate 3D features.
keypoints : int
Number of keypoints.
model_name : str or None, default None
Model name for loading pretrained model.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
encoder_channels = [[64], [128, 128], [256, 256, 512, 512, 512, 512, 512, 512]]
encoder_paddings = [[1], [1, 1], [1, 1, 1, 2, 1, 1, 1, 1]]
encoder_init_block_channels = 32
encoder_final_block_channels = 128
refinement_units = 1
net = LwOpenPose(
encoder_channels=encoder_channels,
encoder_paddings=encoder_paddings,
encoder_init_block_channels=encoder_init_block_channels,
encoder_final_block_channels=encoder_final_block_channels,
refinement_units=refinement_units,
calc_3d_features=calc_3d_features,
keypoints=keypoints,
**kwargs)
if pretrained:
if (model_name is None) or (not model_name):
raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.")
from .model_store import get_model_file
net.load_parameters(
filename=get_model_file(
model_name=model_name,
local_model_store_dir_path=root),
ctx=ctx)
return net
def lwopenpose2d_mobilenet_cmupan_coco(keypoints=19, **kwargs):
"""
Lightweight OpenPose 2D model on the base of MobileNet for CMU Panoptic from 'Real-time 2D Multi-Person Pose
Estimation on CPU: Lightweight OpenPose,' https://arxiv.org/abs/1811.12004.
Parameters:
----------
keypoints : int, default 19
Number of keypoints.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_lwopenpose(calc_3d_features=False, keypoints=keypoints, model_name="lwopenpose2d_mobilenet_cmupan_coco",
**kwargs)
def lwopenpose3d_mobilenet_cmupan_coco(keypoints=19, **kwargs):
"""
Lightweight OpenPose 3D model on the base of MobileNet for CMU Panoptic from 'Real-time 2D Multi-Person Pose
Estimation on CPU: Lightweight OpenPose,' https://arxiv.org/abs/1811.12004.
Parameters:
----------
keypoints : int, default 19
Number of keypoints.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_lwopenpose(calc_3d_features=True, keypoints=keypoints, model_name="lwopenpose3d_mobilenet_cmupan_coco",
**kwargs)
def _test():
import numpy as np
import mxnet as mx
in_size = (368, 368)
keypoints = 19
return_heatmap = True
pretrained = False
models = [
(lwopenpose2d_mobilenet_cmupan_coco, "2d"),
(lwopenpose3d_mobilenet_cmupan_coco, "3d"),
]
for model, model_dim in models:
net = model(pretrained=pretrained, in_size=in_size, return_heatmap=return_heatmap)
ctx = mx.cpu()
if not pretrained:
net.initialize(ctx=ctx)
net.hybridize()
net_params = net.collect_params()
weight_count = 0
for param in net_params.values():
if (param.shape is None) or (not param._differentiable):
continue
weight_count += np.prod(param.shape)
print("m={}, {}".format(model.__name__, weight_count))
assert (model != lwopenpose2d_mobilenet_cmupan_coco or weight_count == 4091698)
assert (model != lwopenpose3d_mobilenet_cmupan_coco or weight_count == 5085983)
batch = 14
x = mx.nd.random.normal(shape=(batch, 3, in_size[0], in_size[1]), ctx=ctx)
y = net(x)
if model_dim == "2d":
assert (y.shape == (batch, 3 * keypoints, in_size[0] // 8, in_size[0] // 8))
else:
assert (y.shape == (batch, 6 * keypoints, in_size[0] // 8, in_size[0] // 8))
if __name__ == "__main__":
_test()
| [
"osemery@gmail.com"
] | osemery@gmail.com |
1fd76c565aad097015fd0f3335e3d6a2cce35c2b | 8bb4a472344fda15985ac322d14e8f4ad79c7553 | /Python3-Core/src/test/prompto/translate/oeo/TestBuiltins.py | 6655a6b6fa59d13d6d11fdb80eb356d342ddc1b4 | [] | no_license | prompto/prompto-python3 | c6b356f5af30c6826730ba7f2ad869f341983a2d | 64bd3d97d4702cc912097d41d961f7ab3fd82bee | refs/heads/master | 2022-12-24T12:33:16.251468 | 2022-11-27T17:37:56 | 2022-11-27T17:37:56 | 32,623,633 | 4 | 0 | null | 2019-05-04T11:06:05 | 2015-03-21T07:17:25 | Python | UTF-8 | Python | false | false | 4,521 | py | from prompto.parser.o.BaseOParserTest import BaseOParserTest
class TestBuiltins(BaseOParserTest):
def setUp(self):
super(type(self), self).setUp()
def testDateDayOfMonth(self):
self.compareResourceOEO("builtins/dateDayOfMonth.poc")
def testDateDayOfYear(self):
self.compareResourceOEO("builtins/dateDayOfYear.poc")
def testDateMonth(self):
self.compareResourceOEO("builtins/dateMonth.poc")
def testDateTimeDayOfMonth(self):
self.compareResourceOEO("builtins/dateTimeDayOfMonth.poc")
def testDateTimeDayOfYear(self):
self.compareResourceOEO("builtins/dateTimeDayOfYear.poc")
def testDateTimeHour(self):
self.compareResourceOEO("builtins/dateTimeHour.poc")
def testDateTimeMinute(self):
self.compareResourceOEO("builtins/dateTimeMinute.poc")
def testDateTimeMonth(self):
self.compareResourceOEO("builtins/dateTimeMonth.poc")
def testDateTimeSecond(self):
self.compareResourceOEO("builtins/dateTimeSecond.poc")
def testDateTimeTZName(self):
self.compareResourceOEO("builtins/dateTimeTZName.poc")
def testDateTimeTZOffset(self):
self.compareResourceOEO("builtins/dateTimeTZOffset.poc")
def testDateTimeYear(self):
self.compareResourceOEO("builtins/dateTimeYear.poc")
def testDateYear(self):
self.compareResourceOEO("builtins/dateYear.poc")
def testDictCount(self):
self.compareResourceOEO("builtins/dictCount.poc")
def testDictSwap(self):
self.compareResourceOEO("builtins/dictSwap.poc")
def testDocumentCount(self):
self.compareResourceOEO("builtins/documentCount.poc")
def testEnumName(self):
self.compareResourceOEO("builtins/enumName.poc")
def testEnumSymbols(self):
self.compareResourceOEO("builtins/enumSymbols.poc")
def testEnumValue(self):
self.compareResourceOEO("builtins/enumValue.poc")
def testIntegerFormat(self):
self.compareResourceOEO("builtins/integerFormat.poc")
def testListCount(self):
self.compareResourceOEO("builtins/listCount.poc")
def testListIndexOf(self):
self.compareResourceOEO("builtins/listIndexOf.poc")
def testListJoin(self):
self.compareResourceOEO("builtins/listJoin.poc")
def testPeriodDays(self):
self.compareResourceOEO("builtins/periodDays.poc")
def testPeriodHours(self):
self.compareResourceOEO("builtins/periodHours.poc")
def testPeriodMillis(self):
self.compareResourceOEO("builtins/periodMillis.poc")
def testPeriodMinutes(self):
self.compareResourceOEO("builtins/periodMinutes.poc")
def testPeriodMonths(self):
self.compareResourceOEO("builtins/periodMonths.poc")
def testPeriodSeconds(self):
self.compareResourceOEO("builtins/periodSeconds.poc")
def testPeriodWeeks(self):
self.compareResourceOEO("builtins/periodWeeks.poc")
def testPeriodYears(self):
self.compareResourceOEO("builtins/periodYears.poc")
def testSetCount(self):
self.compareResourceOEO("builtins/setCount.poc")
def testSetJoin(self):
self.compareResourceOEO("builtins/setJoin.poc")
def testTextCapitalize(self):
self.compareResourceOEO("builtins/textCapitalize.poc")
def testTextCount(self):
self.compareResourceOEO("builtins/textCount.poc")
def testTextIndexOf(self):
self.compareResourceOEO("builtins/textIndexOf.poc")
def testTextLowercase(self):
self.compareResourceOEO("builtins/textLowercase.poc")
def testTextReplace(self):
self.compareResourceOEO("builtins/textReplace.poc")
def testTextReplaceAll(self):
self.compareResourceOEO("builtins/textReplaceAll.poc")
def testTextSplit(self):
self.compareResourceOEO("builtins/textSplit.poc")
def testTextTrim(self):
self.compareResourceOEO("builtins/textTrim.poc")
def testTextUppercase(self):
self.compareResourceOEO("builtins/textUppercase.poc")
def testTimeHour(self):
self.compareResourceOEO("builtins/timeHour.poc")
def testTimeMinute(self):
self.compareResourceOEO("builtins/timeMinute.poc")
def testTimeSecond(self):
self.compareResourceOEO("builtins/timeSecond.poc")
def testTupleCount(self):
self.compareResourceOEO("builtins/tupleCount.poc")
def testTupleJoin(self):
self.compareResourceOEO("builtins/tupleJoin.poc")
| [
"eric.vergnaud@wanadoo.fr"
] | eric.vergnaud@wanadoo.fr |
b976280add3c7f8d108a2e2b62579a8e3baea2df | 38b8bceafb4d80afc7c77196eb9ee99694191bcf | /wxpython/grid4.py | 75c71ea65b3375dda4e507cea2e9ead47a246b12 | [] | no_license | tangc1986/PythonStudy | f6c5b384874e82fbf0b5f51cfb7a7a89a48ec0ff | 1ed1956758e971647426e7096ac2e8cbcca585b4 | refs/heads/master | 2021-01-23T20:39:23.930754 | 2017-10-08T07:40:32 | 2017-10-08T07:42:38 | 42,122,267 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 598 | py | # -*- coding: UTF-8 -*-
__author__ = 'tangchao'
import wx
import wx.grid
class TestFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, None, title="Grid Sizes",
size=(600, 300))
grid = wx.grid.Grid(self)
grid.CreateGrid(5, 5)
for row in range(5):
for col in range(5):
grid.SetCellValue(row, col, "(%s, %s)" % (row, col))
grid.SetCellSize(2, 2, 2, 3)
grid.SetColSize(1, 125)
grid.SetRowSize(1, 100)
app = wx.PySimpleApp()
frame = TestFrame()
frame.Show()
app.MainLoop() | [
"tangc1986@gmail.com"
] | tangc1986@gmail.com |
8b6b130e2da20cd1c05e802909b5a967e3376bab | e10a6d844a286db26ef56469e31dc8488a8c6f0e | /cold_posterior_bnn/core/priorfactory.py | 6680996aff179c4ad86ef7141f6eb0c2cfc4294a | [
"Apache-2.0",
"CC-BY-4.0"
] | permissive | Jimmy-INL/google-research | 54ad5551f97977f01297abddbfc8a99a7900b791 | 5573d9c5822f4e866b6692769963ae819cb3f10d | refs/heads/master | 2023-04-07T19:43:54.483068 | 2023-03-24T16:27:28 | 2023-03-24T16:32:17 | 282,682,170 | 1 | 0 | Apache-2.0 | 2020-07-26T15:50:32 | 2020-07-26T15:50:31 | null | UTF-8 | Python | false | false | 11,649 | py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Default priors for Bayesian neural networks.
Prior factories can create suitable priors given Keras layers as input.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import logging
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from cold_posterior_bnn.core import frn
from cold_posterior_bnn.core import model as bnnmodel
layers = tf.keras.layers
class PriorFactory(object):
"""Prior factory base class.
The prior factory is a helper class that makes the task of adding proper
prior distributions to Keras models easy.
Examples:
The following code instantiates a prior factory object and shows how to
wrap a newly created layer in order to add proper default priors to all
parameters of the layer.
>>> pfac = DefaultPriorFactory(weight=1.0/total_train_size)
>>> dense = pfac(tf.keras.layers.Dense(32))
"""
def __init__(self, weight=0.0, prior_dict=None):
"""Construct a new PriorFactory object.
Args:
weight: prior weight, typically 1.0/total_train_sample_size for Bayesian
neural networks. Must be >0.0.
prior_dict: dict, containing as keys layer.name values and as value a dict
describing the regularizers to add to the respective layer.
The prior_dict can be used to override choices made by other
PriorFactory classes, i.e. it always takes precedence in determining
priors.
Raises:
ValueError: invalid value for weight keyword argument.
"""
# The weight parameter is critical so we force the user to set a value
if weight <= 0.0:
raise ValueError('You must provide a "weight" argument to the prior '
'factory. Typically weight=1.0/total_train_size, '
'where total_train_size is the number of iid training '
'instances.')
self.weight = weight
self.prior_dict = prior_dict
def _replace(self, config, rdict):
"""Replace a key in a tf.keras.layers.Layer config dictionary.
This method replaces a regularizer key in a layer.get_config() dictionary
with specified elements from the rdict regularization dictionary.
Examples:
>>> embedding = tf.keras.layers.Embedding(5000, 512)
>>> config = embedding.get_config()
>>> pfac = DefaultPriorFactory(weight=1.0/50000.0)
>>> rdict = {'embeddings_regularizer': {
'class_name': 'NormalRegularizer',
'config': {'stddev': 0.1, 'weight': 1.0/50000.0} } }
>>> pfac._replace(config, rdict)
Args:
config: dict, containing the layer.get_config() dictionary to modify.
rdict: dict, regularizer keys/values to put into config dictionary.
"""
# If fixed prior is used, replace rdict using prior dictionary
layer_name = config['name']
if (self.prior_dict is not None) and (layer_name in self.prior_dict):
logging.info('Using regularizer for layer "%s" from prior_dict',
layer_name)
rdict = self.prior_dict[layer_name]
if rdict is None:
return
for name in rdict:
if config[name] is not None:
logging.warn('Warning: Overriding regularizer from layer "%s"s %s',
layer_name, name)
config[name] = rdict[name]
def _update_prior(self, layer, config):
"""Update the config dictionary for the given layer.
This abstract method must be overridden by concrete implementations in
derived classes.
The method's job is to select for a given 'layer' the corresponding priors
that are suitable.
Args:
layer: tf.keras.layers.Layer class.
config: the layer.get_config() dictionary. This argument must be
modified. The modified dictionary will then be used to reconstruct the
layer.
"""
raise NotImplementedError('Users must override the _update_prior method '
'of PriorFactory')
def __call__(self, layer):
"""Add a prior to the newly constructed input layer.
Args:
layer: tf.keras.layers.Layer that has just been constructed (not built, no
graph).
Returns:
layer_out: the layer with a suitable prior added.
"""
if not layer.trainable:
return layer
# Obtain serialized layer representation and replace priors
config = layer.get_config()
self._update_prior(layer, config)
# Reconstruct prior from updated serialized representation
with bnnmodel.bnn_scope():
layer_out = type(layer).from_config(config)
return layer_out
class FlatPriorFactory(PriorFactory):
"""Flat prior factory.
This does not add any explicit prior to any layer except those specified
by the 'prior_dict' argument. Therefore, the prior corresponds to the
improper flat prior.
"""
def __init__(self, **kwargs):
super(FlatPriorFactory, self).__init__(**kwargs)
def _update_prior(self, layer, config):
"""Use None, resulting in a flat prior."""
self._replace(config, None)
DEFAULT_NORMAL_STDDEV = 0.5
DEFAULT_CAUCHY_SCALE = 1.0
DEFAULT_LAPLACE_STDDEV = 1.0
DEFAULT_HE_NORMAL_SCALE = 1.0
DEFAULT_GLOROT_NORMAL_SCALE = 1.0
class DefaultPriorFactory(PriorFactory):
"""Default prior factory for Bayesian neural networks.
This class contains a selection of suitable default priors suitable for
Bayesian neural networks.
"""
def __init__(self, **kwargs):
super(DefaultPriorFactory, self).__init__(**kwargs)
def normal(self, _):
normal_dict = {
'class_name': 'NormalRegularizer',
'config': {'stddev': DEFAULT_NORMAL_STDDEV, 'weight': self.weight},
}
return normal_dict
def he_normal(self, _):
he_normal_dict = {
'class_name': 'HeNormalRegularizer',
'config': {'scale': DEFAULT_HE_NORMAL_SCALE, 'weight': self.weight},
}
return he_normal_dict
def glorot_normal(self, _):
glorot_normal_dict = {
'class_name': 'GlorotNormalRegularizer',
'config': {'scale': DEFAULT_GLOROT_NORMAL_SCALE, 'weight': self.weight},
}
return glorot_normal_dict
def cauchy(self, _):
cauchy_dict = {
'class_name': 'CauchyRegularizer',
'config': {'scale': DEFAULT_CAUCHY_SCALE, 'weight': self.weight}
}
return cauchy_dict
def laplace(self, _):
laplace_dict = {
'class_name': 'LaplaceRegularizer',
'config': {'stddev': DEFAULT_LAPLACE_STDDEV, 'weight': self.weight},
}
return laplace_dict
def _update_prior(self, layer, config):
# Bias terms: use heavy-tailed Laplace prior; here the prior choice really
# matters.
# Dense matrices: fixed Normal prior. Good choice?
if isinstance(layer, layers.Dense):
self._replace(config, {
'kernel_regularizer': self.he_normal(layer),
'bias_regularizer': self.cauchy(layer),
})
elif isinstance(layer, layers.Embedding):
self._replace(config, {
'embeddings_regularizer': self.normal(layer),
})
elif isinstance(layer, layers.Conv1D):
self._replace(config, {
'kernel_regularizer': self.he_normal(layer),
'bias_regularizer': self.cauchy(layer),
})
elif isinstance(layer, layers.Conv2D):
self._replace(config, {
'kernel_regularizer': self.he_normal(layer),
'bias_regularizer': self.cauchy(layer),
})
elif isinstance(layer, layers.LSTM):
self._replace(config, {
'kernel_regularizer': self.he_normal(layer),
'recurrent_regularizer': self.he_normal(layer),
'bias_regularizer': self.cauchy(layer),
})
elif isinstance(layer, frn.FRN):
self._replace(config, {
'tau_regularizer': self.cauchy(layer),
'beta_regularizer': self.cauchy(layer),
'gamma_regularizer': self.cauchy(layer),
})
elif isinstance(layer, frn.TLU):
self._replace(config, {
'tau_regularizer': self.cauchy(layer),
})
else:
logging.warning('Layer type "%s" not found', type(layer))
DEFAULT_GAUSSIAN_PFAC_STDDEV = 1.0
class GaussianPriorFactory(PriorFactory):
"""Gaussian prior factory for Bayesian neural networks.
This prior was used in [Zhang et al., 2019].
"""
def __init__(self, prior_stddev=DEFAULT_GAUSSIAN_PFAC_STDDEV, **kwargs):
super(GaussianPriorFactory, self).__init__(**kwargs)
self.prior_stddev = prior_stddev
def normal(self, _):
normal_dict = {
'class_name': 'NormalRegularizer',
'config': {'stddev': self.prior_stddev, 'weight': self.weight},
}
return normal_dict
def _update_prior(self, layer, config):
if isinstance(layer, layers.Dense) or isinstance(layer, layers.Conv1D) or \
isinstance(layer, layers.Conv2D):
self._replace(config, {
'kernel_regularizer': self.normal(layer),
'bias_regularizer': self.normal(layer),
})
elif isinstance(layer, layers.Embedding):
self._replace(config, {
'embeddings_regularizer': self.normal(layer),
})
elif isinstance(layer, layers.LSTM):
self._replace(config, {
'kernel_regularizer': self.normal(layer),
'recurrent_regularizer': self.normal(layer),
'bias_regularizer': self.normal(layer),
})
else:
logging.warning('Layer type "%s" not found', type(layer))
DEFAULT_SHIFTED_GAUSSIAN_PFAC_STDDEV = 1.0
class ShiftedGaussianPriorFactory(PriorFactory):
"""Shifted Gaussian (non-zero mean) prior factory for Bayesian neural networks.
This prior can be used to center a Gaussian prior around a point estimate for
the neural network. See prior.ShiftedNormalPrior for more information.
"""
def __init__(self,
prior_mean=0,
prior_stddev=DEFAULT_SHIFTED_GAUSSIAN_PFAC_STDDEV,
**kwargs):
super(ShiftedGaussianPriorFactory, self).__init__(**kwargs)
self.prior_mean = prior_mean
self.prior_stddev = prior_stddev
def normal(self, _):
normal_dict = {
'class_name': 'ShiftedNormalRegularizer',
'config': {'mean': self.prior_mean,
'stddev': self.prior_stddev,
'weight': self.weight},
}
return normal_dict
def _update_prior(self, layer, config):
if isinstance(layer, layers.Dense) or isinstance(layer, layers.Conv1D) or \
isinstance(layer, layers.Conv2D):
self._replace(config, {
'kernel_regularizer': self.normal(layer),
'bias_regularizer': self.normal(layer),
})
elif isinstance(layer, layers.Embedding):
self._replace(config, {
'embeddings_regularizer': self.normal(layer),
})
elif isinstance(layer, layers.LSTM):
self._replace(config, {
'kernel_regularizer': self.normal(layer),
'recurrent_regularizer': self.normal(layer),
'bias_regularizer': self.normal(layer),
})
else:
logging.warning('Layer type "%s" not found', type(layer))
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
11ae0a8a3865a0065090bf66d87e531d3dc0d981 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-1443.py | 266c3c98d64e4057273b3b38d28ef5120b3fd9c9 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,755 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = $Exp.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
5f89ba104a536912746246b340517f708d85c7be | ece0d321e48f182832252b23db1df0c21b78f20c | /engine/2.80/scripts/addons_contrib/io_directx_bel/import_x.py | 250e36ecec7abb7919af8efc79a829bd74fc0966 | [
"Unlicense",
"GPL-3.0-only",
"Font-exception-2.0",
"GPL-3.0-or-later",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-public-domain-disclaimer",
"Bitstream-Vera",
"LicenseRef-scancode-blender-2010",
"LGPL-2.1-or-later",
... | permissive | byteinc/Phasor | 47d4e48a52fa562dfa1a2dbe493f8ec9e94625b9 | f7d23a489c2b4bcc3c1961ac955926484ff8b8d9 | refs/heads/master | 2022-10-25T17:05:01.585032 | 2019-03-16T19:24:22 | 2019-03-16T19:24:22 | 175,723,233 | 3 | 1 | Unlicense | 2022-10-21T07:02:37 | 2019-03-15T00:58:08 | Python | UTF-8 | Python | false | false | 36,905 | py | # Blender directX importer
# version baby
# litterature explaining the parser directions :
# I don't want to load the whole file as it can be huge : go chunks
# also I want random access to 3d datas to import pieces, not always everything
# so step1 is a whole file fast parsing, retrieving tokens name and building en empty internal dict
# with only pointers and no 3d datas.
# step 2 is to call any token by their names and retrieve the 3d datas thanks to pointers stored in dicts
# between step 1 and step 2 a script ui should be provided to select, transform etc before import.
# > I need to know the pointer position of tokens but data.tell() is slow
# a += pointer computed from line length is way faster. so I need eol -> rb mode
# and readline() is ok in binary mode 'rb' with \r\n (win) \n (unix) but not \r mac..
# 2chrs for windows, 1 for mac and lunix > win eol \r\n becomes \n\n (add a line)
# mac eol \r becomes \n so win lines info are wrong
# this also allows support for wrong files format (mixed \r and \r\n)
# for now it only works for text format, but the used methods will be independant of the container type.
# TEST FILES
# http://assimp.svn.sourceforge.net/viewvc/assimp/trunk/test/models/X/
import os
import re
import struct, binascii
import time
import bpy
import mathutils as bmat
from mathutils import Vector, Matrix
try :
import bel
import bel.mesh
import bel.image
import bel.uv
import bel.material
import bel.ob
import bel.fs
except :
import io_directx_bel.bel as bel
from .bel import mesh,image,uv,material,ob,fs
from .templates_x import *
'''
# just a temp hack to reload bel everytime
import imp
imp.reload(bel)
imp.reload(bel.fs)
imp.reload(bel.image)
imp.reload(bel.material)
imp.reload(bel.mesh)
imp.reload(bel.ob)
imp.reload(bel.uv)
'''
###################################################
def load(operator, context, filepath,
global_clamp_size=0.0,
show_tree=False,
show_templates=False,
show_geninfo=False,
quickmode=False,
parented=False,
bone_maxlength=1.0,
chunksize=False,
naming_method=0,
use_ngons=True,
use_edges=True,
use_smooth_groups=True,
use_split_objects=True,
use_split_groups=True,
use_groups_as_vgroups=False,
use_image_search=True,
global_matrix=None,
):
if quickmode :
parented = False
bone_minlength = bone_maxlength / 100.0
#global templates, tokens
rootTokens = []
namelookup = {}
imgnamelookup = {}
chunksize = int(chunksize)
reserved_type = (
'dword',
'float',
'string'
)
'''
'array',
'Matrix4x4',
'Vector',
'''
'''
with * : defined in dXdata
WORD 16 bits
* DWORD 32 bits
* FLOAT IEEE float
DOUBLE 64 bits
CHAR 8 bits
UCHAR 8 bits
BYTE 8 bits
* STRING NULL-terminated string
CSTRING Formatted C-string (currently unsupported)
UNICODE UNICODE string (currently unsupported)
BINARY FORMAT
# TOKENS in little-endian WORDs
#define TOKEN_NAME 1
#define TOKEN_STRING 2
#define TOKEN_INTEGER 3
#define TOKEN_GUID 5
#define TOKEN_INTEGER_LIST 6
#define TOKEN_FLOAT_LIST 7
#define TOKEN_OBRACE 10
#define TOKEN_CBRACE 11
#define TOKEN_OPAREN 12
#define TOKEN_CPAREN 13
#define TOKEN_OBRACKET 14
#define TOKEN_CBRACKET 15
#define TOKEN_OANGLE 16
#define TOKEN_CANGLE 17
#define TOKEN_DOT 18
#define TOKEN_COMMA 19
#define TOKEN_SEMICOLON 20
#define TOKEN_TEMPLATE 31
#define TOKEN_WORD 40
#define TOKEN_DWORD 41
#define TOKEN_FLOAT 42
#define TOKEN_DOUBLE 43
#define TOKEN_CHAR 44
#define TOKEN_UCHAR 45
#define TOKEN_SWORD 46
#define TOKEN_SDWORD 47
#define TOKEN_VOID 48
#define TOKEN_LPSTR 49
#define TOKEN_UNICODE 50
#define TOKEN_CSTRING 51
#define TOKEN_ARRAY 52
'''
# COMMON REGEX
space = '[\ \t]{1,}' # at least one space / tab
space0 = '[\ \t]{0,}' # zero or more space / tab
# DIRECTX REGEX TOKENS
r_template = r'template' + space + '[\w]*' + space0 + '\{'
if quickmode :
r_sectionname = r'Mesh' + space + '[\W-]*'
else :
r_sectionname = r'[\w]*' + space + '[\w-]*' + space0 + '\{'
r_refsectionname = r'\{' + space0 + '[\w-]*' + space0 + '\}'
r_endsection = r'\{|\}'
# dX comments
r_ignore = r'#|//'
#r_frame = r'Frame' + space + '[\w]*'
#r_matrix = r'FrameTransformMatrix' + space + '\{[\s\d.,-]*'
#r_mesh = r'Mesh' + space + '[\W]*'
###################
## STEP 1 FUNCTIONS
###################
## HEADER
# returns header values or False if directx reco tag is missing
# assuming there's never comment header and that xof if the 1st
# string of the file
'''
they look like xof 0303txt 0032
4 Magic Number (required) "xof "
2 Minor Version 03
2 Major Version 02
4 Format Type (required)
"txt " Text File
"bin " Binary File
"tzip" MSZip Compressed Text File
"bzip" MSZip Compressed Binary File
4 Float Accuracy "0032" 32 bit or "0064" 64 bit
'''
def dXheader(data) :
l = data.read(4)
if l != b'xof ' :
print ('no header found !')
data.seek(0)
return False
minor = data.read(2).decode()
major = data.read(2).decode()
format = data.read(4).decode().strip()
accuracy = int(data.read(4).decode())
data.seek(0)
return ( minor, major, format, accuracy )
##
def dXtree(data,quickmode = False) :
tokens = {}
templates = {}
tokentypes = {}
c = 0
lvl = 0
tree = ['']
ptr = 0
eol = 0
trunkated = False
previouslvl = False
while True :
#for l in data.readlines() :
lines, trunkated = nextFileChunk(data,trunkated)
if lines == None : break
for l in lines :
# compute pointer position
ptr += eol
c += 1
eol = len(l) + 1
#print(c,data.tell(),ptr+eol)
#if l != '' : print('***',l)
#if l == '' : break
l = l.strip()
# remove blank and comment lines
if l == '' or re.match(r_ignore,l) :
continue
# one line token cases level switch
if previouslvl :
lvl -= 1
previouslvl = False
#print('%s lines in %.2f\''%(c,time.clock()-t),end='\r')
#print(c,len(l)+1,ptr,data.tell())
if '{' in l :
lvl += 1
if '}' in l : previouslvl = True #; print('got one line token : \n%s'%l)
elif '}' in l :
lvl -= 1
#print(c,lvl,tree)
if quickmode == False :
## look for templates
if re.match(r_template,l) :
tname = l.split(' ')[1]
templates[tname] = {'pointer' : ptr, 'line' : c}
continue
## look for {references}
if re.match(r_refsectionname,l) :
refname = namelookup[ l[1:-1].strip() ]
#print('FOUND reference to %s in %s at line %s (level %s)'%(refname,tree[lvl-1],c,lvl))
#tree = tree[0:lvl]
parent = tree[lvl-1]
# tag it as a reference, since it's not exactly a child.
# put it in childs since order can matter in sub tokens declaration
tokens[parent]['childs'].append('*'+refname)
if refname not in tokens :
print('reference to %s done before its declaration (line %s)\ncreated dummy'%(refname,c))
tokens[refname] = {}
if 'user' not in tokens[refname] : tokens[refname]['users'] = [parent]
else : tokens[refname]['users'].append(parent)
continue
## look for any token or only Mesh token in quickmode
if re.match(r_sectionname,l) :
tokenname = getName(l,tokens)
#print('FOUND %s %s %s %s'%(tokenname,c,lvl,tree))
#print('pointer %s %s'%(data.tell(),ptr))
if lvl == 1 : rootTokens.append(tokenname)
typ = l.split(' ')[0].strip().lower()
tree = tree[0:lvl]
if typ not in tokentypes : tokentypes[typ] = [tokenname]
else : tokentypes[typ].append(tokenname)
parent = tree[-1]
if tokenname in tokens :
tokens[tokenname]['pointer'] = ptr
tokens[tokenname]['line'] = c
tokens[tokenname]['parent'] = parent
tokens[tokenname]['childs'] = []
tokens[tokenname]['type'] = typ
else : tokens[tokenname] = {'pointer': ptr,
'line' : c,
'parent' : parent,
'childs' : [],
'users' : [],
'type' : typ
}
tree.append(tokenname)
if lvl > 1 and quickmode == False :
tokens[parent]['childs'].append(tokenname)
return tokens, templates, tokentypes
## returns file binary chunks
def nextFileChunk(data,trunkated=False,chunksize=1024) :
if chunksize == 0 : chunk = data.read()
else : chunk = data.read(chunksize)
if format == 'txt' :
lines = chunk.decode('utf-8', errors='ignore')
#if stream : return lines.replace('\r','').replace('\n','')
lines = lines.replace('\r','\n').split('\n')
if trunkated : lines[0] = trunkated + lines[0]
if len(lines) == 1 :
if lines[0] == '' : return None, None
return lines, False
return lines, lines.pop()
# wip, todo for binaries
else :
print(chunk)
for word in range(0,len(chunk)) :
w = chunk[word:word+4]
print(word,w,struct.unpack("<l", w),binascii.unhexlify(w))
# name unnamed tokens, watchout for x duplicate
# for blender, referenced token in x should be named and unique..
def getName(l,tokens) :
xnam = l.split(' ')[1].strip()
#if xnam[0] == '{' : xnam = ''
if xnam and xnam[-1] == '{' : xnam = xnam[:-1]
name = xnam
if len(name) == 0 : name = l.split(' ')[0].strip()
namelookup[xnam] = bel.bpyname(name,tokens,4)
return namelookup[xnam]
###################
## STEP 2 FUNCTIONS
###################
# once the internal dict is populated the functions below can be used
## from a list of tokens, displays every child, users and references
'''
walk_dxtree( [ 'Mesh01', 'Mesh02' ] ) # for particular pieces
walk_dxtree(tokens.keys()) for the whole tree
'''
def walk_dXtree(field,lvl=0,tab='') :
for fi, tokenname in enumerate(field) :
if lvl > 0 or tokens[tokenname]['parent'] == '' :
if tokenname not in tokens :
tokenname = tokenname[1:]
ref = 'ref: '
else : ref = False
frame_type = tokens[tokenname]['type']
line = ('{:7}'.format(tokens[tokenname]['line']))
log = ' %s%s (%s)'%( ref if ref else '', tokenname, frame_type )
print('%s.%s%s'%(line, tab, log))
if fi == len(field) - 1 : tab = tab[:-3] + ' '
if ref == False :
for user in tokens[tokenname]['users'] :
print('%s.%s |__ user: %s'%(line, tab.replace('_',' '), user))
walk_dXtree(tokens[tokenname]['childs'],lvl+1,tab.replace('_',' ')+' |__')
if fi == len(field) - 1 and len(tokens[tokenname]['childs']) == 0 :
print('%s.%s'%(line,tab))
## remove eol, comments, spaces from a raw block of datas
def cleanBlock(block) :
while '//' in block :
s = block.index('//')
e = block.index('\n',s+1)
block = block[0:s] + block[e:]
while '#' in block :
s = block.index('#')
e = block.index('\n',s+1)
block = block[0:s] + block[e:]
block = block.replace('\n','').replace(' ','').replace('\t ','')
return block
def readToken(tokenname) :
token = tokens[tokenname]
datatype = token['type'].lower()
if datatype in templates : tpl = templates[datatype]
elif datatype in defaultTemplates : tpl = defaultTemplates[datatype]
else :
print("can't find any template to read %s (type : %s)"%(tokenname,datatype))
return False
#print('> use template %s'%datatype)
block = readBlock(data,token)
ptr = 0
#return dXtemplateData(tpl,block)
fields, ptr = dXtemplateData(tpl,block)
if datatype in templatesConvert :
fields = eval( templatesConvert[datatype] )
return fields
def dXtemplateData(tpl,block,ptr=0) :
#print('dxTPL',block[ptr])
pack = []
for member in tpl['members'] :
#print(member)
dataname = member[-1]
datatype = member[0].lower()
if datatype == 'array' :
datatype = member[1].lower()
s = dataname.index('[') + 1
e = dataname.index(']')
#print(dataname[s:e])
length = eval(dataname[s:e])
#print("array %s type %s length defined by '%s' : %s"%(dataname[:s-1],datatype,dataname[s:e],length))
dataname = dataname[:s-1]
datavalue, ptr = dXarray(block, datatype, length, ptr)
#print('back to %s'%(dataname))
else :
length = 1
datavalue, ptr = dXdata(block, datatype, length, ptr)
#if len(str(datavalue)) > 50 : dispvalue = str(datavalue[0:25]) + ' [...] ' + str(datavalue[-25:])
#else : dispvalue = str(datavalue)
#print('%s : %s %s'%(dataname,dispvalue,type(datavalue)))
exec('%s = datavalue'%(dataname))
pack.append( datavalue )
return pack, ptr + 1
def dXdata(block,datatype,length,s=0,eof=';') :
#print('dxDTA',block[s])
# at last, the data we need
# should be a ';' but one meet ',' often, like in meshface
if datatype == 'dword' :
e = block.index(';',s+1)
try : field = int(block[s:e])
except :
e = block.index(',',s+1)
field = int(block[s:e])
return field, e+1
elif datatype == 'float' :
e = block.index(eof,s+1)
return float(block[s:e]), e+1
elif datatype == 'string' :
e = block.index(eof,s+1)
return str(block[s+1:e-1]) , e+1
else :
if datatype in templates : tpl = templates[datatype]
elif datatype in defaultTemplates : tpl = defaultTemplates[datatype]
else :
print("can't find any template for type : %s"%(datatype))
return False
#print('> use template %s'%datatype)
fields, ptr = dXtemplateData(tpl,block,s)
if datatype in templatesConvert :
fields = eval( templatesConvert[datatype] )
return fields, ptr
def dXarray(block, datatype, length, s=0) :
#print('dxARR',block[s])
lst = []
if datatype in reserved_type :
eoi=','
for i in range(length) :
if i+1 == length : eoi = ';'
datavalue, s = dXdata(block,datatype,1,s,eoi)
lst.append( datavalue )
else :
eoi = ';,'
for i in range(length) :
if i+1 == length : eoi = ';;'
#print(eoi)
e = block.index(eoi,s)
#except : print(block,s) ; popo()
datavalue, na = dXdata(block[s:e+1],datatype,1)
lst.append( datavalue )
s = e + 2
return lst, s
###################################################
## populate a template with its datas
# this make them available in the internal dict. should be used in step 2 for unknown data type at least
def readTemplate(data,tpl_name,display=False) :
ptr = templates[tpl_name]['pointer']
line = templates[tpl_name]['line']
#print('> %s at line %s (chr %s)'%(tpl_name,line,ptr))
data.seek(ptr)
block = ''
trunkated = False
go = True
while go :
lines, trunkated = nextFileChunk(data,trunkated,chunksize) # stream ?
if lines == None :
break
for l in lines :
#l = data.readline().decode().strip()
block += l.strip()
if '}' in l :
go = False
break
uuid = re.search(r'<.+>',block).group()
templates[tpl_name]['uuid'] = uuid.lower()
templates[tpl_name]['members'] = []
templates[tpl_name]['restriction'] = 'closed'
members = re.search(r'>.+',block).group()[1:-1].split(';')
for member in members :
if member == '' : continue
if member[0] == '[' :
templates[tpl_name]['restriction'] = member
continue
templates[tpl_name]['members'].append( member.split(' ') )
if display :
print('\ntemplate %s :'%tpl_name)
for k,v in templates[tpl_name].items() :
if k != 'members' :
print(' %s : %s'%(k,v))
else :
for member in v :
print(' %s'%str(member)[1:-1].replace(',',' ').replace("'",''))
if tpl_name in defaultTemplates :
defaultTemplates[tpl_name]['line'] = templates[tpl_name]['line']
defaultTemplates[tpl_name]['pointer'] = templates[tpl_name]['pointer']
if defaultTemplates[tpl_name] != templates[tpl_name] :
print('! DIFFERS FROM BUILTIN TEMPLATE :')
print('raw template %s :'%tpl_name)
print(templates[tpl_name])
print('raw default template %s :'%tpl_name)
print(defaultTemplates[tpl_name])
#for k,v in defaultTemplates[tpl_name].items() :
# if k != 'members' :
# print(' %s : %s'%(k,v))
# else :
# for member in v :
# print(' %s'%str(member)[1:-1].replace(',',' ').replace("'",''))
else :
print('MATCHES BUILTIN TEMPLATE')
## read any kind of token data block
# by default the block is cleaned from inline comment space etc to allow data parsing
# useclean = False (retrieve all bytes) if you need to compute a file byte pointer
# to mimic the file.tell() function and use it with file.seek()
def readBlock(data,token, clean=True) :
ptr = token['pointer']
data.seek(ptr)
block = ''
#lvl = 0
trunkated = False
go = True
while go :
lines, trunkated = nextFileChunk(data,trunkated,chunksize)
if lines == None : break
for l in lines :
#eol = len(l) + 1
l = l.strip()
#c += 1
block += l+'\n'
if re.match(r_endsection,l) :
go = False
break
s = block.index('{') + 1
e = block.index('}')
block = block[s:e]
if clean : block = cleanBlock(block)
return block
def getChilds(tokenname) :
childs = []
# '*' in childname means it's a reference. always perform this test
# when using the childs field
for childname in tokens[tokenname]['childs'] :
if childname[0] == '*' : childname = childname[1:]
childs.append( childname )
return childs
# the input nested list of [bonename, matrix, [child0,child1..]] is given by import_dXtree()
def buildArm(armdata, child,lvl=0,parent_matrix=False) :
bonename, bonemat, bonechilds = child
if lvl == 0 :
armname = armdata
armdata = bpy.data.armatures.new(name=armname)
arm = bpy.data.objects.new(armname,armdata)
bpy.context.collection.objects.link(arm)
arm.select_set(True)
bpy.context.view_layer.objects.active = arm
bpy.ops.object.mode_set(mode='EDIT')
parent_matrix = Matrix()
bone = armdata.edit_bones.new(name=bonename)
bonematW = parent_matrix * bonemat
bone.head = bonematW.to_translation()
#bone.roll.. ?
bone_length = bone_maxlength
for bonechild in bonechilds :
bonechild = buildArm(armdata,bonechild,lvl+1,bonematW)
bonechild.parent = bone
bone_length = min((bonechild.head - bone.head).length, bone_length)
bone.tail = bonematW * Vector((0,bone_length,0))
if lvl == 0 :
bpy.ops.object.mode_set(mode='OBJECT')
return arm
return bone
def import_dXtree(field,lvl=0) :
tab = ' '*lvl*2
if field == [] :
if show_geninfo : print('%s>> no childs, return False'%(tab))
return False
ob = False
mat = False
is_root = False
frames = []
obs = []
parentname = tokens[field[0]]['parent']
if show_geninfo : print('%s>>childs in frame %s :'%(tab,parentname))
for tokenname in field :
tokentype = tokens[tokenname]['type']
# frames can contain more than one mesh
if tokentype == 'mesh' :
# object and mesh naming :
# if parent frame has several meshes : obname = meshname = mesh token name,
# if parent frame has only one mesh : obname = parent frame name, meshname = mesh token name.
if parentname :
meshcount = 0
for child in getChilds(parentname) :
if tokens[child]['type'] == 'mesh' :
meshcount += 1
if meshcount == 2 :
parentname = tokenname
break
else : parentname = tokenname
ob = getMesh(parentname,tokenname)
obs.append(ob)
if show_geninfo : print('%smesh : %s'%(tab,tokenname))
# frames contain one matrix (empty or bone)
elif tokentype == 'frametransformmatrix' :
[mat] = readToken(tokenname)
if show_geninfo : print('%smatrix : %s'%(tab,tokenname))
# frames can contain 0 or more frames
elif tokentype == 'frame' :
frames.append(tokenname)
if show_geninfo : print('%sframe : %s'%(tab,tokenname))
# matrix is used for mesh transform if some mesh(es) exist(s)
if ob :
is_root = True
if mat == False :
mat = Matrix()
if show_geninfo : print('%smesh token without matrix, set it to default\n%splease report in bug tracker if you read this !'%(tab,tab))
if parentname == '' :
mat = mat * global_matrix
if len(obs) == 1 :
ob.matrix_world = mat
else :
ob = bel.ob.new(parentname, None, naming_method)
ob.matrix_world = mat
for child in obs :
child.parent = ob
# matrix only, store it as a list as we don't know if
# it's a bone or an empty yet
elif mat :
ob = [parentname, mat,[]]
# nothing case ?
else :
ob = [parentname, Matrix() * global_matrix,[]]
if show_geninfo : print('%snothing here'%(tab))
childs = []
for tokenname in frames :
if show_geninfo : print('%s<Begin %s :'%(tab,tokenname))
# child is either False, empty, object, or a list or undefined name matrices hierarchy
child = import_dXtree(getChilds(tokenname),lvl+1)
if child and type(child) != list :
is_root = True
childs.append( [tokenname, child] )
if show_geninfo : print('%sEnd %s>'%(tab,tokenname))
if is_root and parentname != '' :
if show_geninfo : print('%send of tree a this point'%(tab))
if type(ob) == list :
mat = ob[1]
ob = bel.ob.new(parentname, None, naming_method)
ob.matrix_world = mat
for tokenname, child in childs :
if show_geninfo : print('%sbegin2 %s>'%(tab,tokenname))
# returned a list of object(s) or matrice(s)
if child :
# current frame is an object or an empty, we parent this frame to it
#if eot or (ob and ( type(ob.data) == type(None) or type(ob.data) == bpy.types.Mesh ) ) :
if is_root :
# this branch is an armature, convert it
if type(child) == list :
if show_geninfo : print('%sconvert to armature %s'%(tab,tokenname))
child = buildArm(tokenname, child)
# parent the obj/empty/arm to current
# or apply the global user defined matrix to the object root
if parentname != '' :
child.parent = ob
else :
child.matrix_world = global_matrix
# returned a list of parented matrices. append it in childs list
elif type(child[0]) == str :
ob[2].append(child)
# child is an empty or a mesh, so current frame is an empty, not an armature
elif ob and ( type(child.data) == type(None) or type(child.data) == bpy.types.Mesh ) :
#print(' child data type: %s'%type(child.data))
child.parent = ob
#print('%s parented to %s'%(child.name,ob.name))
# returned False
else :
if show_geninfo : print('%sreturned %s, nothing'%(tab,child))
#print('>> %s return %s'%(field,ob))
return ob# if ob else False
# build from mesh token type
def getMesh(obname,tokenname,debug = False):
if debug : print('\nmesh name : %s'%tokenname)
verts = []
edges = []
faces = []
matslots = []
facemats = []
uvs = []
groupnames = []
groupindices = []
groupweights = []
nVerts, verts, nFaces, faces = readToken(tokenname)
if debug :
print('verts : %s %s\nfaces : %s %s'%(nVerts, len(verts),nFaces, len(faces)))
#for childname in token['childs'] :
for childname in getChilds(tokenname) :
tokentype = tokens[childname]['type']
# UV
if tokentype == 'meshtexturecoords' :
uv = readToken(childname)
#uv = bel.uv.asVertsLocation(uv, faces)
uv = bel.uv.asFlatList(uv, faces)
uvs.append(uv)
if debug : print('uv : %s'%(len(uv)))
# MATERIALS
elif tokentype == 'meshmateriallist' :
nbslots, facemats = readToken(childname)
if debug : print('facemats : %s'%(len(facemats)))
# mat can exist but with no datas so we prepare the mat slot
# with dummy ones
for slot in range(nbslots) :
matslots.append('dXnoname%s'%slot )
# length does not match (could be tuned more, need more cases)
if len(facemats) != len(faces) :
facemats = [ facemats[0] for i in faces ]
# seek for materials then textures if any mapped in this mesh.
# no type test, only one option type in token meshmateriallist : 'Material'
for slotid, matname in enumerate(getChilds(childname)) :
# rename dummy mats with the right name
matslots[slotid] = matname
# blender material creation (need tuning)
mat = bel.material.new(matname,naming_method)
matslots[slotid] = mat.name
if naming_method != 1 :
#print('matname : %s'%matname)
(diffuse_color,alpha), power, specCol, emitCol = readToken(matname)
#if debug : print(diffuse_color,alpha, power, specCol, emitCol)
mat.diffuse_color = diffuse_color
mat.diffuse_intensity = power
mat.specular_color = specCol
# dX emit don't use diffuse color but is a color itself
# convert it to a kind of intensity
mat.emit = (emitCol[0] + emitCol[1] + emitCol[2] ) / 3
if alpha != 1.0 :
mat.use_transparency = True
mat.transparency_method = 'Z_TRANSPARENCY'
mat.alpha = alpha
mat.specular_alpha = 0
transp = True
else : transp = False
# texture
# only 'TextureFilename' can be here, no type test
# textures have no name in .x so we build
# image and texture names from the image file name
# bdata texture slot name = bdata image name
btexnames = []
for texname in getChilds(matname) :
# create/rename/reuse etc corresponding data image
# (returns False if not found)
[filename] = readToken(texname)
img = bel.image.new(path+'/'+filename)
if img == False :
imgname = 'not_found'
else :
imgname = img.name
#print('texname : %s'%texname)
#print('filename : %s'%filename)
#print('btex/img name : %s'%imgname)
# associated texture (no naming check.. maybe tune more)
# tex and texslot are created even if img not found
if imgname in bpy.data.textures and ( img == False or bpy.data.textures[imgname].image == img ) :
tex = bpy.data.textures[imgname]
else :
tex = bpy.data.textures.new(name=imgname,type='IMAGE')
if img : tex.image = img
tex.use_alpha = transp
tex.use_preview_alpha = transp
# then create texture slot
texslot = mat.texture_slots.create(index=0)
texslot.texture = tex
texslot.texture_coords = 'UV'
texslot.uv_layer = 'UV0'
texslot.use_map_alpha = transp
texslot.alpha_factor = alpha
# create remaining dummy mat
for slotid, matname in enumerate(matslots) :
if matname not in bpy.data.materials :
mat = bel.material.new(matname,naming_method)
matslots[slotid] = mat.name
if debug : print('matslots : %s'%matslots)
# VERTICES GROUPS/WEIGHTS
elif tokentype == 'skinweights' :
groupname, nverts, vindices, vweights, mat = readToken(childname)
groupname = namelookup[groupname]
if debug :
print('vgroup : %s (%s/%s verts) %s'%(groupname,len(vindices),len(vweights),'bone' if groupname in tokens else ''))
#if debug : print('matrix : %s\n%s'%(type(mat),mat))
groupnames.append(groupname)
groupindices.append(vindices)
groupweights.append(vweights)
ob = bel.mesh.write(obname,tokenname,
verts, edges, faces,
matslots, facemats, uvs,
groupnames, groupindices, groupweights,
use_smooth_groups,
naming_method)
return ob
## here we go
file = os.path.basename(filepath)
print('\nimporting %s...'%file)
start = time.clock()
path = os.path.dirname(filepath)
filepath = os.fsencode(filepath)
data = open(filepath,'rb')
header = dXheader(data)
if global_matrix is None:
global_matrix = mathutils.Matrix()
if header :
minor, major, format, accuracy = header
if show_geninfo :
print('\n%s directX header'%file)
print(' minor : %s'%(minor))
print(' major : %s'%(major))
print(' format : %s'%(format))
print(' floats are %s bits'%(accuracy))
if format in [ 'txt' ] : #, 'bin' ] :
## FILE READ : STEP 1 : STRUCTURE
if show_geninfo : print('\nBuilding internal .x tree')
t = time.clock()
tokens, templates, tokentypes = dXtree(data,quickmode)
readstruct_time = time.clock()-t
if show_geninfo : print('builded tree in %.2f\''%(readstruct_time)) # ,end='\r')
## populate templates with datas
for tplname in templates :
readTemplate(data,tplname,show_templates)
## DATA TREE CHECK
if show_tree :
print('\nDirectX Data Tree :\n')
walk_dXtree(tokens.keys())
## DATA IMPORTATION
if show_geninfo :
#print(tokens)
print('Root frames :\n %s'%rootTokens)
if parented :
import_dXtree(rootTokens)
else :
for tokenname in tokentypes['mesh'] :
obname = tokens[tokenname]['parent']
# object and mesh naming :
# if parent frame has several meshes : obname = meshname = mesh token name,
# if parent frame has only one mesh : obname = parent frame name, meshname = mesh token name.
if obname :
meshcount = 0
for child in getChilds(obname) :
if tokens[child]['type'] == 'mesh' :
meshcount += 1
if meshcount == 2 :
obname = tokenname
break
else : obname = tokenname
ob = getMesh(obname,tokenname,show_geninfo)
ob.matrix_world = global_matrix
print('done in %.2f\''%(time.clock()-start)) # ,end='\r')
else :
print('only .x files in text format are currently supported')
print('please share your file to make the importer evolve')
return {'FINISHED'}
| [
"admin@irradiate.net"
] | admin@irradiate.net |
ab5a264ea0ecf8ebb13f29fe5e851642a4423712 | f4fea67c838444a53492a9193c9f42dcf6ae6bb6 | /AsFarAsFromLand.py | 13f5337ce3a385360b651f26de8599d3aac5e30b | [] | no_license | sainihimanshu1999/Graph-Solutions | 7641919173c1f878bb807bf5709250f30b9d7804 | 1889ebf7c5b04a945b23efbd14f80c191d4fbd4a | refs/heads/main | 2023-04-26T15:15:26.819831 | 2021-05-30T05:17:27 | 2021-05-30T05:17:27 | 370,977,206 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 643 | py | '''
Matrix is given and we have to find the maximum distance between land and water
'''
from typing import Deque
def maximumDist(self,grid):
m,n = len(grid),len(grid[0])
q = Deque([(i,j) for i in range(m) for j in range(n)])
if len(q) == m*n and len(q) == 0:
return -1
level = 0
while q:
for _ in range(len(q)):
i,j = q.popleft()
for x,y in [(1,0),(-1,0),(0,1),(0,-1)]:
xi,yj = x+i,y+j
if 0<=xi<m and 0<=yj<n and grid[i][j] == 0:
q.append((xi,yj))
grid[xi][yj] = 1
level += 1
return level - 1 | [
"sainihimanshu.1999@gmail.com"
] | sainihimanshu.1999@gmail.com |
ff45cfdca02b8b29eaeeb906efdef59558db6bb6 | 1a114943c92a5db40034470ff31a79bcf8ddfc37 | /python-2/primer/11/Gui.py | e2e2e5d3f1adbfc9a4720b17ec9f89ce7fa8d8dc | [] | no_license | renwl/mylinux | 1924918599efd6766c266231d66b2a7ed6f6cdd1 | 0602fc6d2b0d254a8503e57310f848fc3e1a73b4 | refs/heads/master | 2020-07-10T22:12:03.259349 | 2017-01-02T12:32:04 | 2017-01-02T12:32:04 | 66,467,007 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 367 | py |
from functools import partial
import tkinter
root=tkinter.Tk()
MyButton = partial(tkinter.Button,root,
fg="white",bg="blue")
b1= MyButton(text="button 1")
b2=MyButton(text="button 2")
#qb=MyButton(text="QUIT",bg="red",
# command=root.quit)
b1.pack()
b2.pack()
#qb.pack(file=Tkinter.X,expand= True)
root.title("PFAs!")
root.mainloop()
| [
"wenliang.ren@quanray.com"
] | wenliang.ren@quanray.com |
d9711a46548433f686ed8b66a5edcd838fd2cd43 | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog/optimized_31503.py | ff7f139d663ad522e21166fb64d7c3557099fd21 | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,839 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((516.515, 353.286, 624.114), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((453.29, 365.605, 613.392), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((379.311, 386.632, 582.192), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((474.649, 333.874, 494.488), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((199.558, 460.171, 548.699), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((477.638, 374.76, 610.742), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((478.695, 375.411, 610.954), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((504.186, 386.177, 606.427), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((497.267, 410.057, 593.334), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((512.667, 421.376, 572.671), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((521.663, 444.545, 559.654), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((525.381, 450.989, 586.744), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((491.201, 359.425, 630.584), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((558.398, 544.738, 549.374), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((356.771, 558.69, 551.532), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((356.771, 558.69, 551.532), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((373.429, 538.402, 563.08), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((387.236, 517.459, 577.053), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((401.813, 497.924, 592.364), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((416.36, 474.529, 600.43), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((431.147, 450.306, 605.868), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((445.035, 425.833, 612.608), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((459.774, 630.675, 458.6), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((412.81, 212.306, 760.12), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((392.476, 414.46, 618.453), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((392.476, 414.46, 618.453), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((390.295, 417.288, 589.325), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((392.883, 407.143, 561.467), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((395.648, 378.562, 553.791), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((504.098, 319.357, 578.233), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((282.985, 423.599, 527.262), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((464.894, 353.856, 590.012), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((464.943, 353.775, 589.878), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((458.642, 380.1, 582.792), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((441.285, 400.86, 590.213), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((459.365, 419.927, 579.912), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((474.857, 436.622, 596.116), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((491.447, 457.207, 606.106), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((516.454, 455.052, 618.336), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((455.654, 418.671, 666.721), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((576.429, 489.935, 565.206), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((425.872, 383.54, 663.385), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((424.707, 376.574, 638.722), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((419.557, 362.496, 583.143), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((414.435, 348.228, 527.719), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((458.503, 283.733, 550.164), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((374.712, 376.186, 434.814), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((428.437, 453.734, 640.785), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((419.133, 428.18, 633.059), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((422.281, 402.657, 618.837), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((402.283, 382.588, 612.055), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((378.127, 366.51, 609.173), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((353.388, 356.379, 597.313), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((429.81, 359.319, 619.343), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((274.108, 350.68, 573.059), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"batxes@gmail.com"
] | batxes@gmail.com |
f46aa1e56e52a41483bda536769f43fee0100aa9 | d144df1ba39aef6d740bfbb438c8f7aaff195cc9 | /distance.py | 62d1b600a0f3e44ddea373755e1a45939964934a | [] | no_license | weizhixiaoyi/text-similarity | 42018aa1a4f21f213eed7e7baa8bc2390c2db945 | aa971a1810f95e4f12a88eefec12bc7269b06cb6 | refs/heads/master | 2021-10-16T16:12:00.876963 | 2019-02-12T03:36:37 | 2019-02-12T03:36:37 | 170,244,426 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,477 | py | import numpy as np
from sklearn.feature_extraction.text import CountVectorizer
def edit_distance(str1, str2):
"""edit distance.
Parameters
----------
str1: string1
str2: string2
Returns
-------
distance
"""
m, n = len(str1), len(str2)
if m == 0: return n
if n == 0: return m
dp = [[0] * (n + 1) for _ in range(m + 1)]
for i in range(1, m + 1): dp[i][0] = i
for j in range(1, n + 1): dp[0][j] = j
for i in range(1, m + 1):
for j in range(1, n + 1):
if str1[i - 1] == str2[j - 1]:
dp[i][j] = dp[i - 1][j - 1]
else:
dp[i][j] = min(dp[i - 1][j - 1] + 1, dp[i - 1][j] + 1, dp[i][j - 1] + 1)
return float(dp[m][n])
def euclidean_distance(str1, str2):
"""euclidean distance.
Parameters
----------
str1: string1
str2: string2
Returns
-------
distance
"""
s1, s2 = ' '.join(list(str1)), ' '.join(list(str2))
cv = CountVectorizer(tokenizer=lambda s: s.split())
corpus = [s1, s2]
vectors = cv.fit_transform(corpus).toarray()
vector1 = np.mat(vectors[0])
vector2 = np.mat(vectors[1])
dis = np.sqrt((vector1 - vector2) * ((vector1 - vector2).T))
return float(dis)
def manhattan_distance(str1, str2):
"""manhattan distance.
Parameters
----------
str1: string1
str2: string2
Returns
-------
distance
"""
s1, s2 = ' '.join(list(str1)), ' '.join(list(str2))
cv = CountVectorizer(tokenizer=lambda s: s.split())
corpus = [s1, s2]
vectors = cv.fit_transform(corpus).toarray()
vector1 = np.mat(vectors[0])
vector2 = np.mat(vectors[1])
dis = np.sum(np.abs(vector1 - vector2))
return float(dis)
def jaro_distance(str1, str2):
"""jaro distance.
Parameters
----------
str1: string1
str2: string2
Returns
-------
distance
"""
if len(str1) > len(str2):
longStr = str1
shortStr = str2
else:
longStr = str2
shortStr = str1
allowRange = (len(longStr) // 2) - 1
mappingIndices = [-1] * len(shortStr)
longMatch, shortMatch = [], []
matches = 0
for i in range(0, len(shortStr)):
for j in range(max(0, i - allowRange), min(len(longStr), i + allowRange + 1)):
if shortStr[i] == longStr[j]:
matches = matches + 1
mappingIndices[i] = j
shortMatch.append(shortStr[i])
longMatch.insert(j, shortStr[i])
break
halfTransPosition = 0
for i in range(0, len(shortMatch)):
if (mappingIndices[i] != i) and (shortMatch[i] != longMatch[i]):
halfTransPosition += 1
dis = 0
if matches != 0:
dis = ((matches / len(longStr)) + (matches / len(shortStr)) +
((matches - (halfTransPosition // 2)) / matches)) / 3
return float(dis)
def jaro_winkler_distance(str1, str2):
jaro = jaro_distance(str1, str2)
prefix = 0
for i in range(0, 4):
if str1[i] == str2[i]:
prefix += 1
else:
break
dis = 0
if (jaro > 0.7):
dis = jaro + ((prefix * 0.1) * (1 - jaro))
else:
dis = jaro
return float(dis)
if __name__ == '__main__':
str1 = '你妈妈喊你回家吃饭哦,回家罗回家罗'
str2 = '你妈妈叫你回家吃饭啦,回家罗回家罗'
ans = jaro_winkler_distance(str1, str2)
print(ans)
| [
"zhenhai.gl@gmail.com"
] | zhenhai.gl@gmail.com |
ce0540791bbf93429af66092b7c2e327d2000c1d | 41f2aae52fee7097094473d896346e113c7467f0 | /torch/hub.py | 3dc98f8b216e0d7840df0eef6a685bb084be1e4d | [
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | rdzhabarov/pytorch | a98662f260f19667756d75d4f21f047896ad9836 | 70caa2efe261c511ef7976abcdb6710574db2309 | refs/heads/master | 2020-05-26T16:18:39.345984 | 2019-05-23T19:46:08 | 2019-05-23T19:51:57 | 188,300,369 | 0 | 0 | NOASSERTION | 2019-05-23T20:16:17 | 2019-05-23T20:16:17 | null | UTF-8 | Python | false | false | 15,708 | py | from __future__ import absolute_import, division, print_function, unicode_literals
import errno
import hashlib
import os
import re
import shutil
import sys
import tempfile
import torch
import warnings
import zipfile
if sys.version_info[0] == 2:
from urlparse import urlparse
from urllib2 import urlopen # noqa f811
else:
from urllib.request import urlopen
from urllib.parse import urlparse # noqa: F401
try:
from tqdm import tqdm
except ImportError:
# fake tqdm if it's not installed
class tqdm(object):
def __init__(self, total=None, disable=False):
self.total = total
self.disable = disable
self.n = 0
def update(self, n):
if self.disable:
return
self.n += n
if self.total is None:
sys.stderr.write("\r{0:.1f} bytes".format(self.n))
else:
sys.stderr.write("\r{0:.1f}%".format(100 * self.n / float(self.total)))
sys.stderr.flush()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.disable:
return
sys.stderr.write('\n')
# matches bfd8deac from resnet18-bfd8deac.pth
HASH_REGEX = re.compile(r'-([a-f0-9]*)\.')
MASTER_BRANCH = 'master'
ENV_TORCH_HOME = 'TORCH_HOME'
ENV_XDG_CACHE_HOME = 'XDG_CACHE_HOME'
DEFAULT_CACHE_DIR = '~/.cache'
VAR_DEPENDENCY = 'dependencies'
MODULE_HUBCONF = 'hubconf.py'
READ_DATA_CHUNK = 8192
hub_dir = None
# Copied from tools/shared/module_loader to be included in torch package
def import_module(name, path):
if sys.version_info >= (3, 5):
import importlib.util
spec = importlib.util.spec_from_file_location(name, path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
elif sys.version_info >= (3, 0):
from importlib.machinery import SourceFileLoader
return SourceFileLoader(name, path).load_module()
else:
import imp
return imp.load_source(name, path)
def _remove_if_exists(path):
if os.path.exists(path):
if os.path.isfile(path):
os.remove(path)
else:
shutil.rmtree(path)
def _git_archive_link(repo_owner, repo_name, branch):
return 'https://github.com/{}/{}/archive/{}.zip'.format(repo_owner, repo_name, branch)
def _download_archive_zip(url, filename):
sys.stderr.write('Downloading: \"{}\" to {}\n'.format(url, filename))
response = urlopen(url)
with open(filename, 'wb') as f:
while True:
data = response.read(READ_DATA_CHUNK)
if len(data) == 0:
break
f.write(data)
def _load_attr_from_module(module, func_name):
# Check if callable is defined in the module
if func_name not in dir(module):
return None
return getattr(module, func_name)
def _get_torch_home():
torch_home = os.path.expanduser(
os.getenv(ENV_TORCH_HOME,
os.path.join(os.getenv(ENV_XDG_CACHE_HOME, DEFAULT_CACHE_DIR), 'torch')))
return torch_home
def _setup_hubdir():
global hub_dir
# Issue warning to move data if old env is set
if os.getenv('TORCH_HUB'):
warnings.warn('TORCH_HUB is deprecated, please use env TORCH_HOME instead')
if hub_dir is None:
torch_home = _get_torch_home()
hub_dir = os.path.join(torch_home, 'hub')
if not os.path.exists(hub_dir):
os.makedirs(hub_dir)
def _parse_repo_info(github):
branch = MASTER_BRANCH
if ':' in github:
repo_info, branch = github.split(':')
else:
repo_info = github
repo_owner, repo_name = repo_info.split('/')
return repo_owner, repo_name, branch
def _get_cache_or_reload(github, force_reload):
# Parse github repo information
repo_owner, repo_name, branch = _parse_repo_info(github)
# Github renames folder repo-v1.x.x to repo-1.x.x
# We don't know the repo name before downloading the zip file
# and inspect name from it.
# To check if cached repo exists, we need to normalize folder names.
repo_dir = os.path.join(hub_dir, '_'.join([repo_owner, repo_name, branch]))
use_cache = (not force_reload) and os.path.exists(repo_dir)
if use_cache:
sys.stderr.write('Using cache found in {}\n'.format(repo_dir))
else:
cached_file = os.path.join(hub_dir, branch + '.zip')
_remove_if_exists(cached_file)
url = _git_archive_link(repo_owner, repo_name, branch)
_download_archive_zip(url, cached_file)
with zipfile.ZipFile(cached_file) as cached_zipfile:
extraced_repo_name = cached_zipfile.infolist()[0].filename
extracted_repo = os.path.join(hub_dir, extraced_repo_name)
_remove_if_exists(extracted_repo)
# Unzip the code and rename the base folder
cached_zipfile.extractall(hub_dir)
_remove_if_exists(cached_file)
_remove_if_exists(repo_dir)
shutil.move(extracted_repo, repo_dir) # rename the repo
return repo_dir
def _check_module_exists(name):
if sys.version_info >= (3, 4):
import importlib.util
return importlib.util.find_spec(name) is not None
elif sys.version_info >= (3, 3):
# Special case for python3.3
import importlib.find_loader
return importlib.find_loader(name) is not None
else:
# NB: Python2.7 imp.find_module() doesn't respect PEP 302,
# it cannot find a package installed as .egg(zip) file.
# Here we use workaround from:
# https://stackoverflow.com/questions/28962344/imp-find-module-which-supports-zipped-eggs?lq=1
# Also imp doesn't handle hierarchical module names (names contains dots).
try:
# 1. Try imp.find_module(), which searches sys.path, but does
# not respect PEP 302 import hooks.
import imp
result = imp.find_module(name)
if result:
return True
except ImportError:
pass
path = sys.path
for item in path:
# 2. Scan path for import hooks. sys.path_importer_cache maps
# path items to optional "importer" objects, that implement
# find_module() etc. Note that path must be a subset of
# sys.path for this to work.
importer = sys.path_importer_cache.get(item)
if importer:
try:
result = importer.find_module(name, [item])
if result:
return True
except ImportError:
pass
return False
def _check_dependencies(m):
dependencies = _load_attr_from_module(m, VAR_DEPENDENCY)
if dependencies is not None:
missing_deps = [pkg for pkg in dependencies if not _check_module_exists(pkg)]
if len(missing_deps):
raise RuntimeError('Missing dependencies: {}'.format(', '.join(missing_deps)))
def _load_entry_from_hubconf(m, model):
if not isinstance(model, str):
raise ValueError('Invalid input: model should be a string of function name')
# Note that if a missing dependency is imported at top level of hubconf, it will
# throw before this function. It's a chicken and egg situation where we have to
# load hubconf to know what're the dependencies, but to import hubconf it requires
# a missing package. This is fine, Python will throw proper error message for users.
_check_dependencies(m)
func = _load_attr_from_module(m, model)
if func is None or not callable(func):
raise RuntimeError('Cannot find callable {} in hubconf'.format(model))
return func
def set_dir(d):
r"""
Optionally set hub_dir to a local dir to save downloaded models & weights.
If ``set_dir`` is not called, default path is ``$TORCH_HOME/hub`` where
environment variable ``$TORCH_HOME`` defaults to ``$XDG_CACHE_HOME/torch``.
``$XDG_CACHE_HOME`` follows the X Design Group specification of the Linux
filesytem layout, with a default value ``~/.cache`` if the environment
variable is not set.
Args:
d: path to a local folder to save downloaded models & weights.
"""
global hub_dir
hub_dir = d
def list(github, force_reload=False):
r"""
List all entrypoints available in `github` hubconf.
Args:
github: Required, a string with format "repo_owner/repo_name[:tag_name]" with an optional
tag/branch. The default branch is `master` if not specified.
Example: 'pytorch/vision[:hub]'
force_reload: Optional, whether to discard the existing cache and force a fresh download.
Default is `False`.
Returns:
entrypoints: a list of available entrypoint names
Example:
>>> entrypoints = torch.hub.list('pytorch/vision', force_reload=True)
"""
# Setup hub_dir to save downloaded files
_setup_hubdir()
repo_dir = _get_cache_or_reload(github, force_reload)
sys.path.insert(0, repo_dir)
hub_module = import_module(MODULE_HUBCONF, repo_dir + '/' + MODULE_HUBCONF)
sys.path.remove(repo_dir)
# We take functions starts with '_' as internal helper functions
entrypoints = [f for f in dir(hub_module) if callable(getattr(hub_module, f)) and not f.startswith('_')]
return entrypoints
def help(github, model, force_reload=False):
r"""
Show the docstring of entrypoint `model`.
Args:
github: Required, a string with format <repo_owner/repo_name[:tag_name]> with an optional
tag/branch. The default branch is `master` if not specified.
Example: 'pytorch/vision[:hub]'
model: Required, a string of entrypoint name defined in repo's hubconf.py
force_reload: Optional, whether to discard the existing cache and force a fresh download.
Default is `False`.
Example:
>>> print(torch.hub.help('pytorch/vision', 'resnet18', force_reload=True))
"""
# Setup hub_dir to save downloaded files
_setup_hubdir()
repo_dir = _get_cache_or_reload(github, force_reload)
sys.path.insert(0, repo_dir)
hub_module = import_module(MODULE_HUBCONF, repo_dir + '/' + MODULE_HUBCONF)
sys.path.remove(repo_dir)
entry = _load_entry_from_hubconf(hub_module, model)
return entry.__doc__
# Ideally this should be `def load(github, model, *args, forece_reload=False, **kwargs):`,
# but Python2 complains syntax error for it. We have to skip force_reload in function
# signature here but detect it in kwargs instead.
# TODO: fix it after Python2 EOL
def load(github, model, *args, **kwargs):
r"""
Load a model from a github repo, with pretrained weights.
Args:
github: Required, a string with format "repo_owner/repo_name[:tag_name]" with an optional
tag/branch. The default branch is `master` if not specified.
Example: 'pytorch/vision[:hub]'
model: Required, a string of entrypoint name defined in repo's hubconf.py
*args: Optional, the corresponding args for callable `model`.
force_reload: Optional, whether to force a fresh download of github repo unconditionally.
Default is `False`.
**kwargs: Optional, the corresponding kwargs for callable `model`.
Returns:
a single model with corresponding pretrained weights.
Example:
>>> model = torch.hub.load('pytorch/vision', 'resnet50', pretrained=True)
"""
# Setup hub_dir to save downloaded files
_setup_hubdir()
force_reload = kwargs.get('force_reload', False)
kwargs.pop('force_reload', None)
repo_dir = _get_cache_or_reload(github, force_reload)
sys.path.insert(0, repo_dir)
hub_module = import_module(MODULE_HUBCONF, repo_dir + '/' + MODULE_HUBCONF)
entry = _load_entry_from_hubconf(hub_module, model)
model = entry(*args, **kwargs)
sys.path.remove(repo_dir)
return model
def _download_url_to_file(url, dst, hash_prefix, progress):
file_size = None
u = urlopen(url)
meta = u.info()
if hasattr(meta, 'getheaders'):
content_length = meta.getheaders("Content-Length")
else:
content_length = meta.get_all("Content-Length")
if content_length is not None and len(content_length) > 0:
file_size = int(content_length[0])
f = tempfile.NamedTemporaryFile(delete=False)
try:
if hash_prefix is not None:
sha256 = hashlib.sha256()
with tqdm(total=file_size, disable=not progress) as pbar:
while True:
buffer = u.read(8192)
if len(buffer) == 0:
break
f.write(buffer)
if hash_prefix is not None:
sha256.update(buffer)
pbar.update(len(buffer))
f.close()
if hash_prefix is not None:
digest = sha256.hexdigest()
if digest[:len(hash_prefix)] != hash_prefix:
raise RuntimeError('invalid hash value (expected "{}", got "{}")'
.format(hash_prefix, digest))
shutil.move(f.name, dst)
finally:
f.close()
if os.path.exists(f.name):
os.remove(f.name)
def load_state_dict_from_url(url, model_dir=None, map_location=None, progress=True):
r"""Loads the Torch serialized object at the given URL.
If the object is already present in `model_dir`, it's deserialized and
returned. The filename part of the URL should follow the naming convention
``filename-<sha256>.ext`` where ``<sha256>`` is the first eight or more
digits of the SHA256 hash of the contents of the file. The hash is used to
ensure unique names and to verify the contents of the file.
The default value of `model_dir` is ``$TORCH_HOME/checkpoints`` where
environment variable ``$TORCH_HOME`` defaults to ``$XDG_CACHE_HOME/torch``.
``$XDG_CACHE_HOME`` follows the X Design Group specification of the Linux
filesytem layout, with a default value ``~/.cache`` if not set.
Args:
url (string): URL of the object to download
model_dir (string, optional): directory in which to save the object
map_location (optional): a function or a dict specifying how to remap storage locations (see torch.load)
progress (bool, optional): whether or not to display a progress bar to stderr
Example:
>>> state_dict = torch.hub.load_state_dict_from_url('https://s3.amazonaws.com/pytorch/models/resnet18-5c106cde.pth')
"""
# Issue warning to move data if old env is set
if os.getenv('TORCH_MODEL_ZOO'):
warnings.warn('TORCH_MODEL_ZOO is deprecated, please use env TORCH_HOME instead')
if model_dir is None:
torch_home = _get_torch_home()
model_dir = os.path.join(torch_home, 'checkpoints')
try:
os.makedirs(model_dir)
except OSError as e:
if e.errno == errno.EEXIST:
# Directory already exists, ignore.
pass
else:
# Unexpected OSError, re-raise.
raise
parts = urlparse(url)
filename = os.path.basename(parts.path)
cached_file = os.path.join(model_dir, filename)
if not os.path.exists(cached_file):
sys.stderr.write('Downloading: "{}" to {}\n'.format(url, cached_file))
hash_prefix = HASH_REGEX.search(filename).group(1)
_download_url_to_file(url, cached_file, hash_prefix, progress=progress)
return torch.load(cached_file, map_location=map_location)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
0d4679b918a22de8da38164caf9e8789811ac752 | 3279cea18d23dad027fa5b64c4170926df98f2f4 | /bakerydemo/settings/base.py | af39a915e588b4aadb5b9fe0d5c6651caeb650ee | [
"LicenseRef-scancode-public-domain"
] | permissive | torchbox/dit_directory_cms_poc | 1ed53649f59640eb2d4d9372397a1e8a71f2e44e | 87c54dd35364c66c6f70481148ee2386c784f410 | refs/heads/master | 2020-04-27T07:36:48.299201 | 2019-04-17T16:43:08 | 2019-04-17T16:43:08 | 174,141,444 | 2 | 0 | null | 2019-03-18T12:37:56 | 2019-03-06T12:31:35 | Python | UTF-8 | Python | false | false | 4,639 | py | """
Django settings for temp project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(PROJECT_DIR, ...)
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.dirname(PROJECT_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "c6u0-9c!7nilj_ysatsda0(f@e_2mws2f!6m0n^o*4#*q#kzp)"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
"bakerydemo.base",
"bakerydemo.blog",
"bakerydemo.breads",
"bakerydemo.locations",
"bakerydemo.search",
"richtext_poc",
"wagtail.contrib.search_promotions",
"wagtail.contrib.forms",
"wagtail.contrib.redirects",
"wagtail.embeds",
"wagtail.sites",
"wagtail.users",
"wagtail.snippets",
"wagtail.documents",
"wagtail.images",
"wagtail.search",
"wagtail.admin",
"wagtail.api.v2",
"wagtail.contrib.modeladmin",
"wagtail.contrib.routable_page",
"wagtail.core",
"rest_framework",
"modelcluster",
"taggit",
"wagtailfontawesome",
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sitemaps",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"wagtail.core.middleware.SiteMiddleware",
"wagtail.contrib.redirects.middleware.RedirectMiddleware",
]
ROOT_URLCONF = "bakerydemo.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": ["bakerydemo/templates"],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
WSGI_APPLICATION = "bakerydemo.wsgi.application"
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "bakerydemodb"),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATICFILES_FINDERS = [
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
]
STATICFILES_DIRS = [os.path.join(PROJECT_DIR, "static")]
STATIC_ROOT = os.path.join(PROJECT_DIR, "collect_static")
STATIC_URL = "/static/"
MEDIA_ROOT = os.path.join(PROJECT_DIR, "media")
MEDIA_URL = "/media/"
# Override in local settings or replace with your own key. Please don't use our demo key in production!
GOOGLE_MAP_API_KEY = "AIzaSyD31CT9P9KxvNUJOwDq2kcFEIG8ADgaFgw"
# Use Elasticsearch as the search backend for extra performance and better search results
WAGTAILSEARCH_BACKENDS = {
"default": {"BACKEND": "wagtail.search.backends.db", "INDEX": "bakerydemo"}
}
# Wagtail settings
WAGTAIL_SITE_NAME = "bakerydemo"
| [
"thibaudcolas@gmail.com"
] | thibaudcolas@gmail.com |
947f76df7c719a493f1c1c0a49e64fc6938efde5 | 176899f934288b6e742146282974cf1de4bf2441 | /scripts/rename.py | 97e74f41310c0d66471402ae44d11d288b7bfcec | [
"Apache-2.0"
] | permissive | PatrickEGorman/scrapi | 565e9c7bf2ff5e1fddbab8f691a287be9e5165d0 | 6bb5a4952a34c7507681e206457c78730c28c2e8 | refs/heads/develop | 2020-12-06T20:36:58.590067 | 2015-05-29T17:02:10 | 2015-05-29T17:02:10 | 36,506,821 | 1 | 0 | null | 2015-05-29T13:45:22 | 2015-05-29T13:45:21 | null | UTF-8 | Python | false | false | 1,630 | py | import logging
from scripts.util import documents
from scrapi import settings
from scrapi.linter import RawDocument
from scrapi.processing.elasticsearch import es
from scrapi.tasks import normalize, process_normalized, process_raw
logger = logging.getLogger(__name__)
def rename(source, target, dry=True):
assert source != target, "Can't rename {} to {}, names are the same".format(source, target)
count = 0
exceptions = []
for doc in documents(source):
count += 1
try:
raw = RawDocument({
'doc': doc.doc,
'docID': doc.docID,
'source': target,
'filetype': doc.filetype,
'timestamps': doc.timestamps,
'versions': doc.versions
})
if not dry:
process_raw(raw)
process_normalized(normalize(raw, raw['source']), raw)
logger.info('Processed document from {} with id {}'.format(source, raw['docID']))
except Exception as e:
logger.exception(e)
exceptions.append(e)
else:
if not dry:
es.delete(index=settings.ELASTIC_INDEX, doc_type=source, id=raw['docID'], ignore=[404])
es.delete(index='share_v1', doc_type=source, id=raw['docID'], ignore=[404])
logger.info('Deleted document from {} with id {}'.format(source, raw['docID']))
if dry:
logger.info('Dry run complete')
for ex in exceptions:
logger.exception(e)
logger.info('{} documents processed, with {} exceptions'.format(count, len(exceptions)))
| [
"fabian@fabianism.us"
] | fabian@fabianism.us |
a32ecc9f82723eaf976ef6d4146540243a1679b8 | cda0bf30efa432eaa22278befd0b145801b53027 | /tests/unit/core/parse/test_rollseries.py | a2a9d336206d9413b3f4f879df2cf2e4d8d82e18 | [] | no_license | pastly/craps-dice-control | e863aaa1bee104ddd403ce45a7409a48fc7e9c65 | f6f69c9220dffd5f7e2ef07c929b15b4a73bdd13 | refs/heads/master | 2020-04-26T15:44:18.346970 | 2019-10-24T01:00:04 | 2019-10-24T01:00:04 | 173,655,708 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,816 | py | import io
import json
import pytest
from cdc.core.parse import rollseries as rs
from cdc.lib.rollevent import RollEvent
def assert_unreached(msg=None):
if msg:
assert False, msg
else:
assert False, "Unreachable code path was reached"
def event_gen_from_str(s, starting_point=None):
fd = io.StringIO(s)
pair_gen = rs.roll_series_stream_to_dice_pairs(fd)
event_gen = rs.dice_pairs_gen_to_events(
pair_gen, starting_point=starting_point)
return event_gen
def assert_dice_event(event, type_, dice, args):
assert event.type == type_
assert event.dice == dice
assert event.value == sum(dice)
assert event.args == args
def test_simple_roll():
event_gen = event_gen_from_str("11", starting_point=4)
evs = list(event_gen)
assert len(evs) == 1
assert_dice_event(evs[0], 'roll', (1, 1), {})
def test_simple_natural():
event_gen = event_gen_from_str("3456")
evs = list(event_gen)
assert len(evs) == 2
for ev, dice in [(evs[0], (3, 4)), (evs[1], (5, 6))]:
assert_dice_event(ev, 'natural', dice, {})
def test_simple_craps():
event_gen = event_gen_from_str("111266")
evs = list(event_gen)
assert len(evs) == 3
for ev, dice in [(evs[0], (1, 1)), (evs[1], (1, 2)), (evs[2], (6, 6))]:
assert_dice_event(ev, 'craps', dice, {})
def test_simple_point_established():
event_gen = event_gen_from_str("44")
ev = list(event_gen)[0]
assert_dice_event(ev, 'point', (4, 4), {
'is_established': True,
'is_won': False,
'is_lost': False,
'point_value': 8,
})
def test_simple_point_won():
event_gen = event_gen_from_str("4426")
ev = list(event_gen)[1]
assert_dice_event(ev, 'point', (2, 6), {
'is_established': False,
'is_won': True,
'is_lost': False,
'point_value': 8,
})
def test_simple_point_lost():
event_gen = event_gen_from_str("4416")
ev = list(event_gen)[1]
assert_dice_event(ev, 'point', (1, 6), {
'is_established': False,
'is_won': False,
'is_lost': True,
'point_value': 8,
})
def test_simple_comment_1():
event_gen = event_gen_from_str("1#1\n2")
ev = list(event_gen)[0]
assert_dice_event(ev, 'craps', (1, 2), {})
def test_simple_comment_2():
event_gen = event_gen_from_str("1# 1\n2")
ev = list(event_gen)[0]
assert_dice_event(ev, 'craps', (1, 2), {})
def test_simple_comment_3():
event_gen = event_gen_from_str("1 # \n2")
ev = list(event_gen)[0]
assert_dice_event(ev, 'craps', (1, 2), {})
def test_simple_empty_line():
event_gen = event_gen_from_str("\n\n1\n2")
ev = list(event_gen)[0]
assert_dice_event(ev, 'craps', (1, 2), {})
def test_simple_bad_die_1():
event_gen = event_gen_from_str("71")
with pytest.raises(ValueError) as ex_info:
list(event_gen)[0]
assert 'ImpossibleDieValueError' in str(ex_info)
def test_simple_bad_die_2():
event_gen = event_gen_from_str(".1")
with pytest.raises(ValueError) as ex_info:
list(event_gen)[0]
assert 'ImpossibleDieValueError' in str(ex_info)
def test_simple_no_events_1():
event_gen = event_gen_from_str("")
assert not len(list(event_gen))
def test_simple_no_events_2():
event_gen = event_gen_from_str(" # ")
assert not len(list(event_gen))
def test_simple_no_events_3():
event_gen = event_gen_from_str(" \n # 12 ")
assert not len(list(event_gen))
def test_odd_num_dice():
event_gen = event_gen_from_str("666")
try:
list(event_gen)
except rs.IncompleteRollSeriesError:
pass
else:
assert_unreached()
def test_impossible_die_value_1():
event_gen = event_gen_from_str("7")
try:
list(event_gen)
except rs.ImpossibleDieValueError:
pass
else:
assert_unreached()
def test_impossible_die_value_2():
event_gen = event_gen_from_str("0")
try:
list(event_gen)
except rs.ImpossibleDieValueError:
pass
else:
assert_unreached()
def test_impossible_die_value_nonint():
event_gen = event_gen_from_str("q")
try:
list(event_gen)
except rs.ImpossibleDieValueError:
pass
else:
assert_unreached()
def test_stream_identity():
# An event string with all types of events, and all possible flags set on
# point events
ev_str = "11343366242616"
out_fd = io.StringIO()
expected_events = [_ for _ in event_gen_from_str(ev_str)]
rs.do_stream(out_fd, event_gen_from_str(ev_str))
actual_events = []
for line in filter(None, out_fd.getvalue().split('\n')):
actual_events.append(RollEvent.from_dict(json.loads(line)))
for a, b in zip(expected_events, actual_events):
assert a == b
| [
"sirmatt@ksu.edu"
] | sirmatt@ksu.edu |
b26068fd15185f4dda1e070c55faa0ecf66f918b | f662bd04d2f29ef25bbfd7e768b1e57dfbba4d9f | /apps/plmejoras/migrations/0009_auto_20190319_1051.py | 1d2aae719f33271b26db21ea1dd3a842c8776db7 | [] | no_license | DARKDEYMON/sisevadoc | f59b193688f7eca7c140a03ee414f5d20ada78c7 | 9fc0943200986824a2aab2134fdba5c9f3315798 | refs/heads/master | 2020-03-19T03:27:07.907125 | 2019-12-11T13:30:43 | 2019-12-11T13:30:43 | 135,729,070 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 674 | py | # Generated by Django 2.0.8 on 2019-03-19 14:51
import apps.plmejoras.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('plmejoras', '0008_auto_20190319_1038'),
]
operations = [
migrations.AlterField(
model_name='plan_mejoras',
name='fecha_termino_2',
field=models.DateField(blank=True, null=True, validators=[apps.plmejoras.models.validate_fecha_minima], verbose_name='2.- Fecha limite de cumplimiento a la debilidad: ¿El docente presenta el Plan de asignatura a los estudiantes al inicio de la actividad académica?.(AD-EE-ED)'),
),
]
| [
"darkdeymon04@gmail.com"
] | darkdeymon04@gmail.com |
9dff2c0207a76825d306d6ccde94cbdddb6c3046 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03110/s120799390.py | a85ae752db2895890a6eabb42cd65e8035579610 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | C = 380000.0
ans = 0.0
n = int(input())
for i in range(n):
x, u = input().split()
if u == 'JPY':
ans += int(x)
else:
ans += float(x) * C
print(ans)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
b13da1978e5f3532f9ba976ca7ef9fb82ee3fbd7 | 0f07107b016d2aee64788966b9f0d322ac46b998 | /moya/contextenum.py | 0f14ecc4d22ab2904fcde4c81bb3667ff64ab8a2 | [
"MIT"
] | permissive | fkztw/moya | 35f48cdc5d5723b04c671947099b0b1af1c7cc7a | 78b91d87b4519f91dfdd2b40dab44e72f201a843 | refs/heads/master | 2023-08-09T09:20:21.968908 | 2019-02-03T18:18:54 | 2019-02-03T18:18:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,288 | py | from __future__ import unicode_literals
from __future__ import print_function
from .compat import implements_to_string, string_types, text_type
@implements_to_string
class ContextEnumValue(object):
"""A single value in an enumeration"""
def __init__(self, enum, enum_id, name, description, group=None):
self.enum = enum
self.id = enum_id
self.name = name
self.description = description
self.group = group
def __repr__(self):
return "<enumvalue {}.{} ({})>".format(self.enum.name, self.name, self.id)
def __hash__(self):
return hash((self.id, self.name))
def __int__(self):
return self.id
def __str__(self):
return self.name
def __moyadbobject__(self):
return self.id
def __moyaconsole__(self, console):
console(
"<enumvalue '{}.{}' ({}))>".format(self.enum.name, self.name, self.id),
bold=True,
fg="magenta",
).nl()
def __eq__(self, other):
# Other enum values only compare if they are the same type
if isinstance(other, ContextEnumValue):
return self.enum == other.enum and self.id == other.id
if isinstance(other, string_types):
return self.name == other
try:
return self.id == int(other)
except ValueError:
pass
return False
class ContextEnum(object):
def __init__(self, name, start=1):
self.name = name
self._values = []
self._label_map = {}
self._id_map = {}
self._last_id = start - 1
def __repr__(self):
return '<enum "{}">'.format(self.name)
def __moyaconsole__(self, console):
console.text(repr(self), fg="green", bold=True)
table = []
for value in sorted(self._values, key=int):
table.append([value.name, value.id, value.description or ""])
console.table(table, header_row=("name", "id", "description"))
def __eq__(self, other):
if isinstance(other, ContextEnum):
return self.name == other.name
return False
def add_value(self, name, enum_id=None, description=None, group=None):
if enum_id is None:
enum_id = self._last_id + 1
value = ContextEnumValue(self, enum_id, name, description, group=group)
self._values.append(value)
self._label_map[value.name] = value
self._id_map[value.id] = value
self._last_id = enum_id
return value
def __getitem__(self, key):
enum_value = None
if isinstance(key, string_types):
enum_value = self._label_map[key]
else:
try:
enum_id = int(key)
except:
pass
else:
enum_value = self._id_map[enum_id]
if enum_value is None:
raise KeyError("no enum value {!r} in {!r}".format(key, self))
return enum_value
def __contains__(self, key):
try:
self[key]
except:
return False
else:
return True
def __iter__(self):
return iter(self._values[:])
@property
def choices(self):
return [(e.name, e.description or e.name) for e in self]
@property
def intchoices(self):
return [(e.id, e.description or e.name) for e in self]
def keys(self):
return [int(value) for value in self._values] + [
text_type(value) for value in self._values
]
def values(self):
return [self[key] for key in self.keys()]
def items(self):
return [(key, self[key]) for key in self.keys()]
if __name__ == "__main__":
enum = ContextEnum("moya.admin#enum.hobbits")
enum.add_value("bilbo", description="Bilbo Baggins")
enum.add_value("sam", description="Sam")
enum.add_value("isembard", description="Isembard Took")
from moya.console import Console
console = Console()
console.obj(context, enum)
e = enum["sam"]
console.obj(context, e)
print(e)
print(int(e))
print(text_type(e))
print(enum.values())
print(list(enum))
print(e == 2)
print(e == "sam")
print(e == "bilbo")
print(e == 3)
print(list(enum))
| [
"willmcgugan@gmail.com"
] | willmcgugan@gmail.com |
88aace3339a55db26e14395d480d4661d88f9640 | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/containerregistry/azure-containerregistry/tests/asynctestcase.py | 58cee32899c2a5290b294f38c920311a56ffe5ef | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 1,649 | py | # coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from azure.containerregistry.aio import (
# ContainerRepository,
ContainerRegistryClient,
)
from azure.core.credentials import AccessToken
from azure.identity.aio import DefaultAzureCredential
from testcase import ContainerRegistryTestClass
class AsyncFakeTokenCredential(object):
"""Protocol for classes able to provide OAuth tokens.
:param str scopes: Lets you specify the type of access needed.
"""
def __init__(self):
self.token = AccessToken("YOU SHALL NOT PASS", 0)
async def get_token(self, *args):
return self.token
class AsyncContainerRegistryTestClass(ContainerRegistryTestClass):
def __init__(self, method_name):
super(AsyncContainerRegistryTestClass, self).__init__(method_name)
def get_credential(self):
if self.is_live:
return DefaultAzureCredential()
return AsyncFakeTokenCredential()
def create_registry_client(self, endpoint, **kwargs):
return ContainerRegistryClient(
endpoint=endpoint,
credential=self.get_credential(),
**kwargs,
)
def create_container_repository(self, endpoint, name, **kwargs):
return ContainerRepository(
endpoint=endpoint,
name=name,
credential=self.get_credential(),
**kwargs,
)
def create_anon_client(self, endpoint, **kwargs):
return ContainerRegistryClient(endpoint=endpoint, credential=None, **kwargs)
| [
"noreply@github.com"
] | scbedd.noreply@github.com |
52f14de23e662ba3cdfb7f3070411e54602e9706 | defbefa93deb77311ff5f589372a5fafef05d7a3 | /app/simulations/run_single_sim.py | 4729aa545ff64c7580e72ba514a6c5a6ad5e2701 | [] | no_license | mccarvik/poker | 3eeb75cedee2962e33006853c76a852b83cb2ffa | 766be6582cb3b1743995555aa8239c1980178a46 | refs/heads/master | 2021-01-25T04:15:33.306903 | 2017-10-24T02:22:56 | 2017-10-24T02:22:56 | 93,417,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,009 | py | import sys, json, pdb
sys.path.append("/home/ubuntu/workspace/poker")
from app.deck_utils.card import Card
from app.deck_utils.deck import Deck
from app.deck_utils.hand_rules import HandRules
from app.deck_utils.deck_funcs import getCombinations
from app.deck_utils.stats import Single_Stats
def run_simulation(hand, board):
''' Will simulate the outcome for a given hand'''
hand = [Card(h[0],h[1]) for h in hand]
board = [Card(h[0],h[1]) for h in board]
deck = Deck()
deck.removeCards(hand + board)
hand_combs = getCombinations(deck, 7-len(hand+board))
stats = Single_Stats()
for hc in hand_combs:
# print([hc[0]] + hand + board)
hr = HandRules(list(hc) + hand + board)
stats.addOutCome(hr._result)
# print(stats.returnStats())
stats.printStats()
if __name__ == "__main__":
hand = [('Q','h'), ('K', 'h')]
# board = [('4','s'), ('3', 's'), ('2', 's'), ('5', 's')]
board = [('4','s'), ('3', 's')]
run_simulation(hand, board) | [
"mccarviks@gmail.com"
] | mccarviks@gmail.com |
d7c6a4d01b1fff2af18d2e4f3c5c3004f8f30c38 | d067ba62a1956e1abb571368b02e9158c04966b6 | /xmnlp/sentiment/sentiment.py | 3f3a2b8ce576dc8f09fa27eb16e03e5cb09b1969 | [
"MIT"
] | permissive | wheniseeyou/xmnlp | 29c5ea38ffcb91fa99821788f90f1afc8d8c30bd | ad2d3c0b8875cf415c3adffc10926605da7a458b | refs/heads/master | 2020-04-12T15:17:40.957168 | 2018-09-17T08:19:09 | 2018-09-17T08:19:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,627 | py | # !/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
# -------------------------------------------#
# author: sean lee #
# email: xmlee97@gmail.com #
#--------------------------------------------#
"""MIT License
Copyright (c) 2018 Sean
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE."""
import sys
if sys.version_info[0] == 2:
reload(sys)
sys.setdefaultencoding('utf8')
import io
import os
from ..module import Module
from ..utils import safe_input
from ..postag import seg
from math import log, exp
from collections import defaultdict
class NBayes(Module):
__notsave__ = []
__onlysave__ = ['counter', 'corpus', 'total']
def __init__(self):
self.corpus = {}
self.counter = {}
self.total = 0
def process_data(self, data):
for d in data:
label = d[0]
doc = d[1]
if label not in self.corpus:
self.corpus[label] = defaultdict(int)
self.counter[label] = 0
for word in doc:
self.counter[label] += 1
self.corpus[label][word] += 1
self.total = sum(self.counter.values())
def calc_score(self, sent):
tmp = {}
for k in self.corpus:
tmp[k] = log(self.counter[k]) - log(self.total)
for word in sent:
x = float(self.corpus[k].get(word, 1)) / self.counter[k]
tmp[k] += log(x)
ret, prob = 0, 0
for k in self.corpus:
curr = 0
try:
for kk in self.corpus:
curr += exp(tmp[kk] - tmp[k])
curr = 1.0 / curr
except OverflowError:
curr = 0.0
if curr > prob:
ret, prob = k, curr
return (ret, prob)
class Sentiment(NBayes):
def filter_stopword(self, words, stopword=[]):
if len(stopword) == 0:
return words
ret = []
for word in words:
if word not in stopword:
ret.append(word)
return ret
def load_data(self, posfname, negfname):
def get_file(path):
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
if len(dirs) == 0:
for f in files:
yield os.sep.join([root, f])
else:
yield path
pos_docs = []
neg_docs = []
for fname in get_file(posfname):
with io.open(fname, 'r', encoding='utf-8') as f:
for line in f:
line = safe_input(line)
pos_docs.append(seg(line))
for fname in get_file(negfname):
with io.open(fname, 'r', encoding='utf-8') as f:
for line in f:
line = safe_input(line)
neg_docs.append(seg(line))
return pos_docs, neg_docs
def train(self, posfname, negfname, stopword=[]):
pos_docs, neg_docs = self.load_data(posfname, negfname)
data = []
for sent in neg_docs:
data.append(('neg', self.filter_stopword(sent, stopword=stopword)))
for sent in pos_docs:
data.append(('pos', self.filter_stopword(sent, stopword=stopword)))
self.process_data(data)
def predict(self, doc, stopword=[]):
sent = seg(doc)
ret, prob = self.calc_score(self.filter_stopword(sent, stopword=stopword))
if ret == 'pos':
return prob
return 1 - prob | [
"mitree@sina.com"
] | mitree@sina.com |
061cf308d22ba1f4f327b5a4a9ae3122f55e8d55 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p04044/s186322243.py | 1a114e7a5f53247105fcd6cba05f286158a77b09 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | def compare_strings(s1, s2):
if s1 < s2:
return True
return False
n, l = map(int, input().split())
strings = [input() for i in range(n)]
print(''.join(sorted(strings)))
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
4135f9e1b309b08a79e2a7c5fa6da2848b3d26d1 | 18b3ad3b0e1f7f10969738251e1201d01dfbc6bf | /backup_files/pythonsessions/insert_cmd_data_db.py | 3e87aafa8f2be8768caed764e5532208e37bea80 | [] | no_license | sahthi/backup2 | 11d509b980e731c73733b1399a8143780779e75a | 16bed38f0867fd7c766c2a008c8d43b0660f0cb0 | refs/heads/master | 2020-03-21T12:39:56.890129 | 2018-07-09T08:12:46 | 2018-07-09T08:12:46 | 138,565,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 792 | py | import MySQLdb, subprocess, re, sys, time
def get_date():
return time.ctime()
def get_ut_fa():
op = subprocess.Popen(['uptime'], stdout=subprocess.PIPE)
#print op.communicate()
u_out, u_err = op.communicate()
if u_err:
print "Error in command execution"
sys.exit(100)
#17:48:12 up 31 min, 1 user, load average: 0.13, 0.19, 0.25
mat = re.search(r"up\s(.+?),.+load\saverage:\s(.+?),", u_out)
utime = mat.group(1)
fm_avg = mat.group(2)
return utime, fm_avg
def insert_to_db():
curr_date = get_date()
utime, fm_avg = get_ut_fa()
conn = MySQLdb.connect(user="root", passwd="root", db="students")
c = conn.cursor()
c.execute('insert into system_info (utime, fma) values(%s, %s)', (utime, fm_avg))
conn.commit()
conn.close()
if __name__ == "__main__":
insert_to_db()
| [
"siddamsetty.sahithi@votarytech.com"
] | siddamsetty.sahithi@votarytech.com |
3256bc9d3399f7c5f9d6ac83a1eed907f6c9ba53 | fc0683e4b9b92b02f2bac73b5a1f9e9e6dac28bf | /Aula22 – Módulos e Pacotes/ex110 – Reduzindo ainda mais seu programa/teste.py | 618b116202551588fab65daa0b7e5c834a21a5a4 | [] | no_license | igorkoury/cev-phyton-exercicios-parte-2 | d31ab83f6f8fbe07ac31974e1a01c338280e6c6c | 1471d8d0c9a98194c3f36e562b68731e95b22882 | refs/heads/main | 2023-08-06T08:46:38.970229 | 2021-10-07T21:28:03 | 2021-10-07T21:28:03 | 414,762,200 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 325 | py | '''Exercício Python 110: Adicione o módulo moeda.py criado nos
desafios anteriores, uma função chamada resumo(), que mostre
na tela algumas informações geradas pelas funções que já temos
no módulo criado até aqui.'''
import moeda
p = float(input('Digite o preço do produto: '))
moeda.resumo(p, 15, 20)
| [
"noreply@github.com"
] | igorkoury.noreply@github.com |
fa690dcd18e07099417f6e7ae4a66721e71f518e | 6219e6536774e8eeb4cadc4a84f6f2bea376c1b0 | /scraper/storage_spiders/giavuvn.py | 7b3674fc44e8c1d0fd830d4f106dfeb8301a5bfa | [
"MIT"
] | permissive | nguyenminhthai/choinho | 109d354b410b92784a9737f020894d073bea1534 | d2a216fe7a5064d73cdee3e928a7beef7f511fd1 | refs/heads/master | 2023-05-07T16:51:46.667755 | 2019-10-22T07:53:41 | 2019-10-22T07:53:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 883 | py | # Auto generated by generator.py. Delete this line if you make modification.
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
XPATH = {
'name' : "//h1",
'price' : "//div[@id='dpro']/div[@class='pdl']/b",
'category' : "//div[@class='dlink']/a",
'description' : "//div[@id='dpro']/div[@id='pdt1']/div[2]",
'images' : "//a[@class='imgdt']/img[@class='m']/@src",
'canonical' : "",
'base_url' : "",
'brand' : ""
}
name = 'giavu.vn'
allowed_domains = ['giavu.vn']
start_urls = ['http://giavu.vn/']
tracking_url = ''
sitemap_urls = ['']
sitemap_rules = [('', 'parse_item')]
sitemap_follow = []
rules = [
Rule(LinkExtractor(allow=['/\d+/\d+/[a-zA-Z0-9-]+\.html$']), 'parse_item'),
Rule(LinkExtractor(allow=['[^\d]/\d+/[a-zA-Z0-9-]+(/page-\d+)?\.html$']), 'parse'),
#Rule(LinkExtractor(), 'parse_item_and_links'),
]
| [
"nguyenchungthuy.hust@gmail.com"
] | nguyenchungthuy.hust@gmail.com |
37d544cc17f03c6d834b31fecd5185366d176571 | 9adc810b07f7172a7d0341f0b38088b4f5829cf4 | /experiments/ashvin/vae/fixed3/sawyer_pusher/vae_dense_wider2.py | 337be50432ea280d5a02a5728475f17aac45b9f0 | [
"MIT"
] | permissive | Asap7772/railrl_evalsawyer | 7ee9358b5277b9ddf2468f0c6d28beb92a5a0879 | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | refs/heads/main | 2023-05-29T10:00:50.126508 | 2021-06-18T03:08:12 | 2021-06-18T03:08:12 | 375,810,557 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,643 | py | from rlkit.envs.mujoco.sawyer_push_and_reach_env import \
SawyerPushAndReachXYEnv, SawyerPushAndReachXYEasyEnv
from rlkit.envs.mujoco.sawyer_push_env import SawyerPushXYEnv
from rlkit.envs.multitask.point2d import MultitaskImagePoint2DEnv
from rlkit.envs.multitask.pusher2d import FullPusher2DEnv
from rlkit.images.camera import sawyer_init_camera, \
sawyer_init_camera_zoomed_in
from rlkit.launchers.arglauncher import run_variants
import rlkit.misc.hyperparameter as hyp
from rlkit.launchers.launcher_util import run_experiment
from rlkit.torch.vae.relabeled_vae_experiment import experiment
if __name__ == "__main__":
vae_paths = {
"4": "ashvin/vae/fixed3/sawyer-pusher/train-vae-wider/run5/id0/itr_480.pkl",
"16": "ashvin/vae/fixed3/sawyer-pusher/train-vae-wider/run5/id1/itr_480.pkl",
}
variant = dict(
algo_kwargs=dict(
num_epochs=205,
num_steps_per_epoch=1000,
num_steps_per_eval=1000,
tau=1e-2,
batch_size=128,
max_path_length=100,
discount=0.99,
# qf_learning_rate=1e-3,
# policy_learning_rate=1e-4,
),
env_kwargs=dict(
hide_goal=True,
# reward_info=dict(
# type="shaped",
# ),
),
replay_kwargs=dict(
fraction_goals_are_rollout_goals=0.2,
fraction_goals_are_env_goals=0.5,
),
vae_wrapped_env_kwargs=dict(
sample_from_true_prior=False,
),
algorithm='HER-TD3',
normalize=False,
rdim=4,
render=False,
env=SawyerPushAndReachXYEasyEnv,
use_env_goals=True,
vae_paths=vae_paths,
wrap_mujoco_env=True,
do_state_based_exp=False,
exploration_noise=0.1,
init_camera=sawyer_init_camera_zoomed_in,
)
n_seeds = 3
search_space = {
'exploration_type': [
'ou',
],
'algo_kwargs.num_updates_per_env_step': [1, 4],
'replay_kwargs.fraction_goals_are_env_goals': [0.0, 0.5, ],
'replay_kwargs.fraction_goals_are_rollout_goals': [0.2, ],
'vae_wrapped_env_kwargs.sample_from_true_prior': [False],
'exploration_noise': [0.2, 0.5],
'algo_kwargs.reward_scale': [1e-4],
'training_mode': ['train'],
'testing_mode': ['test', ],
'rdim': [4],
'seedid': range(n_seeds),
}
sweeper = hyp.DeterministicHyperparameterSweeper(
search_space, default_parameters=variant,
)
run_variants(experiment, sweeper.iterate_hyperparameters(), run_id=1)
| [
"alexanderkhazatsky@gmail.com"
] | alexanderkhazatsky@gmail.com |
410a0b38828c36bb3735467e18752357af38afbd | 7f24df3218d32eea55cebcfa5faf2a68d58f3bea | /by_python/backjoon/1699(unsolved).py | afd693db4b3c84023a1bc2c0399f06e73f7ca70e | [] | no_license | tgkei/Algorithm_study | 52d89988eb5df0fcf10754ff2c08ab06c0b5f982 | fe11d8cd2871bb4ee2e5f7389c9c5a5709e4bcb3 | refs/heads/master | 2021-06-28T05:14:44.248742 | 2020-11-16T09:09:09 | 2020-11-16T09:09:09 | 172,659,117 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 77 | py | valid = []
for i in range(1,317):
valid.append(i**2)
n = int(input())
| [
"tagun1202@gmail.com"
] | tagun1202@gmail.com |
84750e0763448e6efd803ff5e0ca28ddb74b0283 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03156/s145919432.py | a3259a2a3c984937831664e6a8145ee9347c7bb2 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | n=int(input())
a,b=map(int,input().split())
p = [int(x) for x in input().split()]
A=0
B=0
C=0
for i in range(n):
if p[i] <= a:
A += 1
elif p[i] > a and b >= p[i]:
B += 1
else:
C += 1
print(min(A, B, C)) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
06f9d8d71242fcd67622b93693247ed498383e02 | 4d107a97633559963f6510767bb9297febbcbb02 | /applications/MeshingApplication/tests/SmallTests.py | b2e4d1dcd414f04b10b8fc8807739ca9b65b027c | [] | no_license | asroy/Kratos | 45dc4a9ad77a2b203ab2e0c6c5fe030633433181 | e89d6808670d4d645319c7678da548b37825abe3 | refs/heads/master | 2021-03-24T13:28:43.618915 | 2017-12-19T15:38:20 | 2017-12-19T15:38:20 | 102,793,791 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,836 | py | import os
# Import Kratos
from KratosMultiphysics import *
# Import KratosUnittest
import KratosMultiphysics.KratosUnittest as KratosUnittest
import Kratos_Execute_Meshing_Test as Execute_Test
# This utiltiy will control the execution scope in case we need to acces files or we depend
# on specific relative locations of the files.
# TODO: Should we move this to KratosUnittest?
class controlledExecutionScope:
def __init__(self, scope):
self.currentPath = os.getcwd()
self.scope = scope
def __enter__(self):
os.chdir(self.scope)
def __exit__(self, type, value, traceback):
os.chdir(self.currentPath)
class MeshingTestFactory(KratosUnittest.TestCase):
def setUp(self):
# Within this location context:
with controlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
# Initialize GiD I/O
parameter_file = open(self.file_name + "_parameters.json", 'r')
ProjectParameters = Parameters(parameter_file.read())
# Creating the model part
self.test = Execute_Test.Kratos_Execute_Test(ProjectParameters)
def test_execution(self):
# Within this location context:
with controlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
self.test.Solve()
def tearDown(self):
pass
class TwoDDynamicBeamTest(MeshingTestFactory):
file_name = "mmg_lagrangian_test/beam2D_test"
class TwoDDynamicBeamLineLoadTest(MeshingTestFactory):
file_name = "mmg_lagrangian_test/beam2D_line_load_test"
class ThreeDDynamicBeamTest(MeshingTestFactory):
file_name = "mmg_lagrangian_test/beam3D_test"
class TwoDDynamicPlasticBeamTest(MeshingTestFactory):
file_name = "mmg_lagrangian_test/beam2D_internal_variables_interpolation_test"
| [
"vmataix@cimne.upc.edu"
] | vmataix@cimne.upc.edu |
93acab2e83b054c06b882c77db1394c27b2b151f | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnlettuc.py | eb21949fbd696bc7945b6433fec5663257813fa0 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 256 | py | ii = [('LyelCPG2.py', 1), ('RennJIT.py', 2), ('WilkJMC3.py', 1), ('GellWPT.py', 1), ('KiddJAE.py', 1), ('WestJIT2.py', 1), ('SoutRD.py', 1), ('WestJIT.py', 3), ('FitzRNS4.py', 1), ('JacoWHI.py', 1), ('RogeSIP.py', 1), ('DibdTRL.py', 1), ('FitzRNS2.py', 2)] | [
"varunwachaspati@gmail.com"
] | varunwachaspati@gmail.com |
6a5f28ab4b6e958f2922790f0f74673bed384643 | 90c5c9df3d0639e1f8420b592a77df752ab79746 | /tests/migrations/0003_auto_20191220_0911.py | 52222805c9f0df3dc9d219975835dd1fb27cd2e2 | [
"BSD-3-Clause"
] | permissive | intellineers/django-bridger | 4a16e5e63c697671740d965c3fcab02a89b8b27f | ed097984a99df7da40a4d01bd00c56e3c6083056 | refs/heads/master | 2023-08-14T05:41:38.003086 | 2021-09-06T16:51:56 | 2021-09-06T16:51:56 | 221,709,929 | 2 | 1 | BSD-3-Clause | 2023-07-22T21:39:45 | 2019-11-14T13:57:09 | Python | UTF-8 | Python | false | false | 543 | py | # Generated by Django 2.2.9 on 2019-12-20 09:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("tests", "0002_relatedmodeltest"),
]
operations = [
migrations.AddField(
model_name="modeltest", name="float_field", field=models.FloatField(default=1), preserve_default=False,
),
migrations.AddField(
model_name="modeltest", name="percent_field", field=models.FloatField(default=1), preserve_default=False,
),
]
| [
"c.wittlinger@intellineers.com"
] | c.wittlinger@intellineers.com |
9847a45cacd0a7625f93aa8595f9afa5b8a5c16d | cc622e69e45db2e3e3172fcf8598ba7d2b64d5d8 | /taxcalc/_version.py | 56b977ec9aec1b043dce330de18274a2afde9280 | [
"MIT"
] | permissive | SherwinLott/Tax-Calculator | 533c7e381ab5e56bccc03821fe3a4a30a6e6f753 | b5276854031225c004eb795ad5b85f76fe94172d | refs/heads/master | 2020-04-01T23:23:16.691655 | 2015-04-17T14:43:05 | 2015-04-17T14:43:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,420 | py |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.12 (https://github.com/warner/python-versioneer)
# these strings will be replaced by git during git-archive
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
# these strings are filled in when 'setup.py versioneer' creates _version.py
tag_prefix = ""
parentdir_prefix = "taxcalc-"
versionfile_source = "taxcalc/_version.py"
import os, sys, re, subprocess, errno
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % args[0])
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version >= '3':
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % args[0])
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose=False):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
(root, dirname, parentdir_prefix))
return None
return {"version": dirname[len(parentdir_prefix):], "full": ""}
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs,"r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
if not keywords:
return {} # keyword-finding function failed to find keywords
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return { "version": r,
"full": keywords["full"].strip() }
# no suitable tags, so we use the full revision id
if verbose:
print("no suitable tags, using full revision id")
return { "version": keywords["full"].strip(),
"full": keywords["full"].strip() }
def git_versions_from_vcs(tag_prefix, root, verbose=False):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
return {}
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
cwd=root)
if stdout is None:
return {}
if not stdout.startswith(tag_prefix):
if verbose:
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
return {}
tag = stdout[len(tag_prefix):]
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
if tag.endswith("-dirty"):
full += "-dirty"
return {"version": tag, "full": full}
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
keywords = { "refnames": git_refnames, "full": git_full }
ver = git_versions_from_keywords(keywords, tag_prefix, verbose)
if ver:
return ver
try:
root = os.path.abspath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in range(len(versionfile_source.split(os.sep))):
root = os.path.dirname(root)
except NameError:
return default
return (git_versions_from_vcs(tag_prefix, root, verbose)
or versions_from_parentdir(parentdir_prefix, root, verbose)
or default)
| [
"tj.alumbaugh@continuum.io"
] | tj.alumbaugh@continuum.io |
370daefbe01246a1903960925230387d185d47cd | 47a6f4e5f92413fda00ea31a2ac3894749dd76d5 | /cvx/op/postprocessing.py | aff84211b0f16e12cfd22d4370114c5fd9c5a2fb | [] | no_license | jtuyls/cvx | 3e569e73cc3a77226014c2921c2ad752a340931e | ede40c895a48103312aabdecc4ffbc8f97c711c8 | refs/heads/master | 2020-08-05T05:51:07.297147 | 2020-03-17T18:17:19 | 2020-03-17T18:17:19 | 212,419,909 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,289 | py | """
CV postprocessing operations
Authors: Jorn Tuyls
"""
import cv2
import numpy as np
from .tools import softmax
def central_crop(height, width, channels):
# type: (str/int, str/int, str/int) -> Function
"""
Return a wrapper function that takes in an image and centrally crops
an image of provided height, width and channels
"""
height, width, channels = int(height), int(width), int(channels)
def _central_crop(img):
# !! img should be in HWC layout
img_h, img_w, img_c = img.shape
if height > img_h:
raise ValueError("Provided crop height is larger than provided"\
" image height.")
if width > img_w:
raise ValueError("Provided crop width is larger than provided"\
" image width.")
if channels > img_c:
raise ValueError("Provided crop channels value is larger than"
" provided image channels.")
start_h = int((img_h - height) / 2)
end_h = start_h + height
start_w = int((img_w - width) / 2)
end_w = start_w + width
start_c = int((img_c - channels) / 2)
end_c = start_c + channels
return img[start_h:end_h, start_w:end_w, start_c:end_c]
return _central_crop | [
"jornt@xilinx.com"
] | jornt@xilinx.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.