sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def list_reced_topics(self, user_alias=None, start=0):
"""
推荐的话题列表
:param user_alias: 指定用户,默认当前
:param start: 翻页
:return: 带下一页的列表
"""
user_alias = user_alias or self.api.user_alias
xml = self.api.xml(API_GROUP_LIST_USER_RECED_TOPICS % user_alias, params={'start': start})
return build_list_result(self._parse_topic_table(xml, 'title,comment,time,group,rec'), xml)
|
推荐的话题列表
:param user_alias: 指定用户,默认当前
:param start: 翻页
:return: 带下一页的列表
|
entailment
|
def add_topic(self, group_alias, title, content):
"""
创建话题(小心验证码~)
:param group_alias: 小组ID
:param title: 标题
:param content: 内容
:return: bool
"""
xml = self.api.req(API_GROUP_ADD_TOPIC % group_alias, 'post', data={
'ck': self.api.ck(),
'rev_title': title,
'rev_text': content,
'rev_submit': '好了,发言',
})
return not xml.url.startswith(API_GROUP_ADD_TOPIC % group_alias)
|
创建话题(小心验证码~)
:param group_alias: 小组ID
:param title: 标题
:param content: 内容
:return: bool
|
entailment
|
def remove_topic(self, topic_id):
"""
删除话题(需要先删除所有评论,使用默认参数)
:param topic_id: 话题ID
:return: None
"""
comment_start = 0
while comment_start is not None:
comments = self.list_comments(topic_id, comment_start)
for comment in comments['results']:
self.remove_comment(topic_id, comment['id'])
comment_start = comments['next_start']
return self.api.req(API_GROUP_REMOVE_TOPIC % topic_id, params={'ck': self.api.ck()})
|
删除话题(需要先删除所有评论,使用默认参数)
:param topic_id: 话题ID
:return: None
|
entailment
|
def update_topic(self, topic_id, title, content):
"""
更新话题
:param topic_id: 话题ID
:param title: 标题
:param content: 内容
:return: bool
"""
xml = self.api.req(API_GROUP_UPDATE_TOPIC % topic_id, 'post', data={
'ck': self.api.ck(),
'rev_title': title,
'rev_text': content,
'rev_submit': '好了,改吧',
})
return not xml.url.startswith(API_GROUP_UPDATE_TOPIC % topic_id)
|
更新话题
:param topic_id: 话题ID
:param title: 标题
:param content: 内容
:return: bool
|
entailment
|
def list_comments(self, topic_id, start=0):
"""
回复列表
:param topic_id: 话题ID
:param start: 翻页
:return: 带下一页的列表
"""
xml = self.api.xml(API_GROUP_GET_TOPIC % topic_id, params={'start': start})
xml_results = xml.xpath('//ul[@id="comments"]/li')
results = []
for item in xml_results:
try:
author_avatar = item.xpath('.//img/@src')[0]
author_url = item.xpath('.//div[@class="user-face"]/a/@href')[0]
author_alias = slash_right(author_url)
author_signature = item.xpath('.//h4/text()')[1].strip()
author_nickname = item.xpath('.//h4/a/text()')[0].strip()
created_at = item.xpath('.//h4/span/text()')[0].strip()
content = etree.tostring(item.xpath('.//div[@class="reply-doc content"]/p')[0]).decode('utf8').strip()
cid = item.get('id')
results.append({
'id': cid,
'author_avatar': author_avatar,
'author_url': author_url,
'author_alias': author_alias,
'author_signature': author_signature,
'author_nickname': author_nickname,
'created_at': created_at,
'content': unescape(content),
})
except Exception as e:
self.api.logger.exception('parse comment exception: %s' % e)
return build_list_result(results, xml)
|
回复列表
:param topic_id: 话题ID
:param start: 翻页
:return: 带下一页的列表
|
entailment
|
def add_comment(self, topic_id, content, reply_id=None):
"""
添加评论
:param topic_id: 话题ID
:param content: 内容
:param reply_id: 回复ID
:return: None
"""
return self.api.req(API_GROUP_ADD_COMMENT % topic_id, 'post', data={
'ck': self.api.ck(),
'ref_cid': reply_id,
'rv_comment': content,
'start': 0,
'submit_btn': '加上去',
})
|
添加评论
:param topic_id: 话题ID
:param content: 内容
:param reply_id: 回复ID
:return: None
|
entailment
|
def remove_comment(self, topic_id, comment_id, reason='0', other=None):
"""
删除评论(自己发的话题所有的都可以删除,否则只能删自己发的)
:param topic_id: 话题ID
:param comment_id: 评论ID
:param reason: 原因 0/1/2 (内容不符/反动/其它)
:param other: 其它原因的具体(2)
:return: None
"""
params = {'cid': comment_id}
data = {'cid': comment_id, 'ck': self.api.ck(), 'reason': reason, 'other': other, 'submit': '确定'}
r = self.api.req(API_GROUP_REMOVE_COMMENT % topic_id, 'post', params, data)
if r.text.find('douban_admin') > -1:
r = self.api.req(API_GROUP_ADMIN_REMOVE_COMMENT % topic_id, 'post', params, data)
self.api.logger.debug('remove comment final url is <%s>' % r.url)
return r
|
删除评论(自己发的话题所有的都可以删除,否则只能删自己发的)
:param topic_id: 话题ID
:param comment_id: 评论ID
:param reason: 原因 0/1/2 (内容不符/反动/其它)
:param other: 其它原因的具体(2)
:return: None
|
entailment
|
def list_user_comments(self, topic_id, user_alias=None):
"""
列出用户在话题下的所有回复
:param topic_id: 话题ID
:param user_alias: 用户ID,默认当前
:return: 纯列表
"""
user_alias = user_alias or self.api.user_alias
comment_start = 0
results = []
while comment_start is not None:
comments = self.list_comments(topic_id, comment_start)
results += [item for item in comments['results'] if item['author_alias'] == user_alias]
comment_start = comments['next_start']
return results
|
列出用户在话题下的所有回复
:param topic_id: 话题ID
:param user_alias: 用户ID,默认当前
:return: 纯列表
|
entailment
|
def remove_commented_topic(self, topic_id):
"""
删除回复的话题(删除所有自己发布的评论)
:param topic_id: 话题ID
:return: None
"""
return [self.remove_comment(topic_id, item['id']) for item in self.list_user_comments(topic_id)]
|
删除回复的话题(删除所有自己发布的评论)
:param topic_id: 话题ID
:return: None
|
entailment
|
def shell(name=None, **attrs):
"""Creates a new :class:`Shell` with a function as callback. This
works otherwise the same as :func:`command` just that the `cls`
parameter is set to :class:`Shell`.
"""
attrs.setdefault('cls', Shell)
return click.command(name, **attrs)
|
Creates a new :class:`Shell` with a function as callback. This
works otherwise the same as :func:`command` just that the `cls`
parameter is set to :class:`Shell`.
|
entailment
|
def getLogger(name):
"""This is used by gcdt plugins to get a logger with the right level."""
logger = logging.getLogger(name)
# note: the level might be adjusted via '-v' option
logger.setLevel(logging_config['loggers']['gcdt']['level'])
return logger
|
This is used by gcdt plugins to get a logger with the right level.
|
entailment
|
def _discover(self):
"""Discovers methods in the XML-RPC API and creates attributes for them
on this object. Enables stuff like "magento.cart.create(...)" to work
without having to define Python methods for each XML-RPC equivalent.
"""
self._resources = {}
resources = self._client.resources(self._session_id)
for resource in resources:
self._resources[resource['name']] = MagentoResource(
self._client, self._session_id, resource['name'],
resource['title'], resource['methods'])
|
Discovers methods in the XML-RPC API and creates attributes for them
on this object. Enables stuff like "magento.cart.create(...)" to work
without having to define Python methods for each XML-RPC equivalent.
|
entailment
|
def keep_session_alive(self):
"""If the session expired, logs back in."""
try:
self.resources()
except xmlrpclib.Fault as fault:
if fault.faultCode == 5:
self.login()
else:
raise
|
If the session expired, logs back in.
|
entailment
|
def help(self):
"""Prints discovered resources and their associated methods. Nice when
noodling in the terminal to wrap your head around Magento's insanity.
"""
print('Resources:')
print('')
for name in sorted(self._resources.keys()):
methods = sorted(self._resources[name]._methods.keys())
print('{}: {}'.format(bold(name), ', '.join(methods)))
|
Prints discovered resources and their associated methods. Nice when
noodling in the terminal to wrap your head around Magento's insanity.
|
entailment
|
def run(self):
"""Import the controller and run it.
This mimics the processing done by :func:`helper.start`
when a controller is run in the foreground. A new instance
of ``self.controller`` is created and run until a keyboard
interrupt occurs or the controller stops on its own accord.
"""
segments = self.controller.split('.')
controller_class = reduce(getattr, segments[1:],
__import__('.'.join(segments[:-1])))
cmd_line = ['-f']
if self.configuration is not None:
cmd_line.extend(['-c', self.configuration])
args = parser.get().parse_args(cmd_line)
controller_instance = controller_class(args, platform)
try:
controller_instance.start()
except KeyboardInterrupt:
controller_instance.stop()
|
Import the controller and run it.
This mimics the processing done by :func:`helper.start`
when a controller is run in the foreground. A new instance
of ``self.controller`` is created and run until a keyboard
interrupt occurs or the controller stops on its own accord.
|
entailment
|
def get_info(self):
"""
Scans the input path and automatically determines the optimal
piece size based on ~1500 pieces (up to MAX_PIECE_SIZE) along
with other basic info, including total size (in bytes), the
total number of files, piece size (in bytes), and resulting
number of pieces. If ``piece_size`` has already been set, the
custom value will be used instead.
:return: ``(total_size, total_files, piece_size, num_pieces)``
"""
if os.path.isfile(self.path):
total_size = os.path.getsize(self.path)
total_files = 1
elif os.path.exists(self.path):
total_size = 0
total_files = 0
for x in os.walk(self.path):
for fn in x[2]:
if any(fnmatch.fnmatch(fn, ext) for ext in self.exclude):
continue
fpath = os.path.normpath(os.path.join(x[0], fn))
fsize = os.path.getsize(fpath)
if fsize and not is_hidden_file(fpath):
total_size += fsize
total_files += 1
else:
raise exceptions.InvalidInputException
if not (total_files and total_size):
raise exceptions.EmptyInputException
if self.piece_size:
ps = self.piece_size
else:
ps = 1 << max(0, math.ceil(math.log(total_size / 1500, 2)))
if ps < MIN_PIECE_SIZE:
ps = MIN_PIECE_SIZE
if ps > MAX_PIECE_SIZE:
ps = MAX_PIECE_SIZE
return (total_size, total_files, ps, math.ceil(total_size / ps))
|
Scans the input path and automatically determines the optimal
piece size based on ~1500 pieces (up to MAX_PIECE_SIZE) along
with other basic info, including total size (in bytes), the
total number of files, piece size (in bytes), and resulting
number of pieces. If ``piece_size`` has already been set, the
custom value will be used instead.
:return: ``(total_size, total_files, piece_size, num_pieces)``
|
entailment
|
def generate(self, callback=None):
"""
Computes and stores piece data. Returns ``True`` on success, ``False``
otherwise.
:param callback: progress/cancellation callable with method
signature ``(filename, pieces_completed, pieces_total)``.
Useful for reporting progress if dottorrent is used in a
GUI/threaded context, and if torrent generation needs to be cancelled.
The callable's return value should evaluate to ``True`` to trigger
cancellation.
"""
files = []
single_file = os.path.isfile(self.path)
if single_file:
files.append((self.path, os.path.getsize(self.path), {}))
elif os.path.exists(self.path):
for x in os.walk(self.path):
for fn in x[2]:
if any(fnmatch.fnmatch(fn, ext) for ext in self.exclude):
continue
fpath = os.path.normpath(os.path.join(x[0], fn))
fsize = os.path.getsize(fpath)
if fsize and not is_hidden_file(fpath):
files.append((fpath, fsize, {}))
else:
raise exceptions.InvalidInputException
total_size = sum([x[1] for x in files])
if not (len(files) and total_size):
raise exceptions.EmptyInputException
# set piece size if not already set
if self.piece_size is None:
self.piece_size = self.get_info()[2]
if files:
self._pieces = bytearray()
i = 0
num_pieces = math.ceil(total_size / self.piece_size)
pc = 0
buf = bytearray()
while i < len(files):
fe = files[i]
f = open(fe[0], 'rb')
if self.include_md5:
md5_hasher = md5()
else:
md5_hasher = None
for chunk in iter(lambda: f.read(self.piece_size), b''):
buf += chunk
if len(buf) >= self.piece_size \
or i == len(files)-1:
piece = buf[:self.piece_size]
self._pieces += sha1(piece).digest()
del buf[:self.piece_size]
pc += 1
if callback:
cancel = callback(fe[0], pc, num_pieces)
if cancel:
f.close()
return False
if self.include_md5:
md5_hasher.update(chunk)
if self.include_md5:
fe[2]['md5sum'] = md5_hasher.hexdigest()
f.close()
i += 1
# Add pieces from any remaining data
while len(buf):
piece = buf[:self.piece_size]
self._pieces += sha1(piece).digest()
del buf[:self.piece_size]
pc += 1
if callback:
cancel = callback(fe[0], pc, num_pieces)
if cancel:
return False
# Create the torrent data structure
data = OrderedDict()
if len(self.trackers) > 0:
data['announce'] = self.trackers[0].encode()
if len(self.trackers) > 1:
data['announce-list'] = [[x.encode()] for x in self.trackers]
if self.comment:
data['comment'] = self.comment.encode()
if self.created_by:
data['created by'] = self.created_by.encode()
else:
data['created by'] = DEFAULT_CREATOR.encode()
if self.creation_date:
data['creation date'] = int(self.creation_date.timestamp())
if self.web_seeds:
data['url-list'] = [x.encode() for x in self.web_seeds]
data['info'] = OrderedDict()
if single_file:
data['info']['length'] = files[0][1]
if self.include_md5:
data['info']['md5sum'] = files[0][2]['md5sum']
data['info']['name'] = files[0][0].split(os.sep)[-1].encode()
else:
data['info']['files'] = []
path_sp = self.path.split(os.sep)
for x in files:
fx = OrderedDict()
fx['length'] = x[1]
if self.include_md5:
fx['md5sum'] = x[2]['md5sum']
fx['path'] = [y.encode()
for y in x[0].split(os.sep)[len(path_sp):]]
data['info']['files'].append(fx)
data['info']['name'] = path_sp[-1].encode()
data['info']['pieces'] = bytes(self._pieces)
data['info']['piece length'] = self.piece_size
data['info']['private'] = int(self.private)
if self.source:
data['info']['source'] = self.source.encode()
self._data = data
return True
|
Computes and stores piece data. Returns ``True`` on success, ``False``
otherwise.
:param callback: progress/cancellation callable with method
signature ``(filename, pieces_completed, pieces_total)``.
Useful for reporting progress if dottorrent is used in a
GUI/threaded context, and if torrent generation needs to be cancelled.
The callable's return value should evaluate to ``True`` to trigger
cancellation.
|
entailment
|
def info_hash_base32(self):
"""
Returns the base32 info hash of the torrent. Useful for generating
magnet links.
.. note:: ``generate()`` must be called first.
"""
if getattr(self, '_data', None):
return b32encode(sha1(bencode(self._data['info'])).digest())
else:
raise exceptions.TorrentNotGeneratedException
|
Returns the base32 info hash of the torrent. Useful for generating
magnet links.
.. note:: ``generate()`` must be called first.
|
entailment
|
def info_hash(self):
"""
:return: The SHA-1 info hash of the torrent. Useful for generating
magnet links.
.. note:: ``generate()`` must be called first.
"""
if getattr(self, '_data', None):
return sha1(bencode(self._data['info'])).hexdigest()
else:
raise exceptions.TorrentNotGeneratedException
|
:return: The SHA-1 info hash of the torrent. Useful for generating
magnet links.
.. note:: ``generate()`` must be called first.
|
entailment
|
def req(self, url, method='get', params=None, data=None, auth=False):
"""
请求API
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:type auth: bool
:param auth: if True and session expired will raise exception
:rtype: requests.Response
:return: Response
"""
self.logger.debug('fetch api<%s:%s>' % (method, url))
if auth and self.user_alias is None:
raise Exception('cannot fetch api<%s> without session' % url)
s = requests.Session()
r = s.request(method, url, params=params, data=data, cookies=self.cookies, headers=self.headers,
timeout=self.timeout)
s.close()
if r.url is not url and RE_SESSION_EXPIRE.search(r.url) is not None:
self.expire()
if auth:
raise Exception('auth expired, could not fetch with<%s>' % url)
return r
|
请求API
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:type auth: bool
:param auth: if True and session expired will raise exception
:rtype: requests.Response
:return: Response
|
entailment
|
def json(self, url, method='get', params=None, data=None):
"""
请求并返回json
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: dict
:return:
"""
r = self.req(url, method, params, data)
return r.json()
|
请求并返回json
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: dict
:return:
|
entailment
|
def xml(self, url, method='get', params=None, data=None):
"""
请求并返回xml
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: html.HtmlElement
:return:
"""
r = self.req(url, method, params, data)
# this is required for avoid utf8-mb4 lead to encoding error
return self.to_xml(r.content, base_url=r.url)
|
请求并返回xml
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: html.HtmlElement
:return:
|
entailment
|
def persist(self):
"""
持久化会话信息
"""
with open(self.persist_file, 'w+') as f:
json.dump({
'cookies': self.cookies,
'user_alias': self.user_alias,
}, f, indent=2)
self.logger.debug('persist session to <%s>' % self.persist_file)
|
持久化会话信息
|
entailment
|
def load(self):
"""
加载会话信息
"""
if not os.path.isfile(self.persist_file):
return
with open(self.persist_file, 'r') as f:
cfg = json.load(f) or {}
self.cookies = cfg.get('cookies', {})
self.user_alias = cfg.get('user_alias') or None
self.logger.debug('load session for <%s> from <%s>' % (self.user_alias, self.persist_file))
|
加载会话信息
|
entailment
|
def flush(self):
"""
更新会话信息,主要是ck, user_alias
"""
if 'dbcl2' not in self.cookies:
return
r = self.req(API_ACCOUNT_HOME)
if RE_SESSION_EXPIRE.search(r.url):
return self.expire()
self.cookies.update(dict(r.cookies))
self.user_alias = slash_right(r.url)
self.logger.debug('flush with user_alias <%s>' % self.user_alias)
return
|
更新会话信息,主要是ck, user_alias
|
entailment
|
def login(self, username, password):
"""
登录
:type username: str
:param username: 用户名(手机号或者邮箱)
:type password: str
:param password: 密码
"""
r0 = self.req(API_HOME)
time.sleep(1)
cookies = dict(r0.cookies)
data = {
'source': 'index_nav',
'form_email': username,
'form_password': password,
'remember': 'on',
}
r1 = self.req(API_ACCOUNT_LOGIN, method='post', data=data)
cookies.update(dict(r1.cookies))
[cookies.update(dict(r.cookies)) for r in r1.history]
if 'dbcl2' not in cookies:
raise Exception('Authorization failed for <%s>: %s' % (username, r1.url))
cookies.update(dict(r1.cookies))
self.logger.info('login with username <%s>' % username)
self.use(cookies)
return self
|
登录
:type username: str
:param username: 用户名(手机号或者邮箱)
:type password: str
:param password: 密码
|
entailment
|
def use(self, cookies):
"""
如果遭遇验证码,用这个接口
:type cookies: str|dict
:param cookies: cookie字符串或者字典
:return: self
"""
self.cookies = dict([item.split('=', 1) for item in re.split(r'; *', cookies)]) \
if isinstance(cookies, str) else cookies
self.flush()
self.persist()
return self
|
如果遭遇验证码,用这个接口
:type cookies: str|dict
:param cookies: cookie字符串或者字典
:return: self
|
entailment
|
def logout(self):
"""
登出会话
:return: self
"""
self.req(API_ACCOUNT_LOGOUT % self.ck())
self.cookies = {}
self.user_alias = None
self.persist()
|
登出会话
:return: self
|
entailment
|
def deploy(awsclient, applicationName, deploymentGroupName,
deploymentConfigName, bucket, bundlefile):
"""Upload bundle and deploy to deployment group.
This includes the bundle-action.
:param applicationName:
:param deploymentGroupName:
:param deploymentConfigName:
:param bucket:
:param bundlefile:
:return: deploymentId from create_deployment
"""
etag, version = upload_file_to_s3(awsclient, bucket,
_build_bundle_key(applicationName),
bundlefile)
client_codedeploy = awsclient.get_client('codedeploy')
response = client_codedeploy.create_deployment(
applicationName=applicationName,
deploymentGroupName=deploymentGroupName,
revision={
'revisionType': 'S3',
's3Location': {
'bucket': bucket,
'key': _build_bundle_key(applicationName),
'bundleType': 'tgz',
'eTag': etag,
'version': version,
},
},
deploymentConfigName=deploymentConfigName,
description='deploy with tenkai',
ignoreApplicationStopFailures=True
)
log.info(
"Deployment: {} -> URL: https://{}.console.aws.amazon.com/codedeploy/home?region={}#/deployments/{}".format(
Fore.MAGENTA + response['deploymentId'] + Fore.RESET,
client_codedeploy.meta.region_name,
client_codedeploy.meta.region_name,
response['deploymentId'],
))
return response['deploymentId']
|
Upload bundle and deploy to deployment group.
This includes the bundle-action.
:param applicationName:
:param deploymentGroupName:
:param deploymentConfigName:
:param bucket:
:param bundlefile:
:return: deploymentId from create_deployment
|
entailment
|
def output_deployment_status(awsclient, deployment_id, iterations=100):
"""Wait until an deployment is in an steady state and output information.
:param deployment_id:
:param iterations:
:return: exit_code
"""
counter = 0
steady_states = ['Succeeded', 'Failed', 'Stopped']
client_codedeploy = awsclient.get_client('codedeploy')
while counter <= iterations:
response = client_codedeploy.get_deployment(deploymentId=deployment_id)
status = response['deploymentInfo']['status']
if status not in steady_states:
log.info('Deployment: %s - State: %s' % (deployment_id, status))
time.sleep(10)
elif status == 'Failed':
log.info(
colored.red('Deployment: {} failed: {}'.format(
deployment_id,
json.dumps(response['deploymentInfo']['errorInformation'],
indent=2)
))
)
return 1
else:
log.info('Deployment: %s - State: %s' % (deployment_id, status))
break
return 0
|
Wait until an deployment is in an steady state and output information.
:param deployment_id:
:param iterations:
:return: exit_code
|
entailment
|
def stop_deployment(awsclient, deployment_id):
"""stop tenkai deployment.
:param awsclient:
:param deployment_id:
"""
log.info('Deployment: %s - stopping active deployment.', deployment_id)
client_codedeploy = awsclient.get_client('codedeploy')
response = client_codedeploy.stop_deployment(
deploymentId=deployment_id,
autoRollbackEnabled=True
)
|
stop tenkai deployment.
:param awsclient:
:param deployment_id:
|
entailment
|
def _list_deployment_instances(awsclient, deployment_id):
"""list deployment instances.
:param awsclient:
:param deployment_id:
"""
client_codedeploy = awsclient.get_client('codedeploy')
instances = []
next_token = None
# TODO refactor generic exhaust_function from this
while True:
request = {
'deploymentId': deployment_id
}
if next_token:
request['nextToken'] = next_token
response = client_codedeploy.list_deployment_instances(**request)
instances.extend(response['instancesList'])
if 'nextToken' not in response:
break
next_token = response['nextToken']
return instances
|
list deployment instances.
:param awsclient:
:param deployment_id:
|
entailment
|
def _get_deployment_instance_summary(awsclient, deployment_id, instance_id):
"""instance summary.
:param awsclient:
:param deployment_id:
:param instance_id:
return: status, last_event
"""
client_codedeploy = awsclient.get_client('codedeploy')
request = {
'deploymentId': deployment_id,
'instanceId': instance_id
}
response = client_codedeploy.get_deployment_instance(**request)
return response['instanceSummary']['status'], \
response['instanceSummary']['lifecycleEvents'][-1]['lifecycleEventName']
|
instance summary.
:param awsclient:
:param deployment_id:
:param instance_id:
return: status, last_event
|
entailment
|
def _get_deployment_instance_diagnostics(awsclient, deployment_id, instance_id):
"""Gets you the diagnostics details for the first 'Failed' event.
:param awsclient:
:param deployment_id:
:param instance_id:
return: None or (error_code, script_name, message, log_tail)
"""
client_codedeploy = awsclient.get_client('codedeploy')
request = {
'deploymentId': deployment_id,
'instanceId': instance_id
}
response = client_codedeploy.get_deployment_instance(**request)
# find first 'Failed' event
for i, event in enumerate(response['instanceSummary']['lifecycleEvents']):
if event['status'] == 'Failed':
return event['diagnostics']['errorCode'], \
event['diagnostics']['scriptName'], \
event['diagnostics']['message'], \
event['diagnostics']['logTail']
return None
|
Gets you the diagnostics details for the first 'Failed' event.
:param awsclient:
:param deployment_id:
:param instance_id:
return: None or (error_code, script_name, message, log_tail)
|
entailment
|
def output_deployment_summary(awsclient, deployment_id):
"""summary
:param awsclient:
:param deployment_id:
"""
log.info('\ndeployment summary:')
log.info('%-22s %-12s %s', 'Instance ID', 'Status', 'Most recent event')
for instance_id in _list_deployment_instances(awsclient, deployment_id):
status, last_event = \
_get_deployment_instance_summary(awsclient, deployment_id, instance_id)
log.info(Fore.MAGENTA + '%-22s' + Fore.RESET + ' %-12s %s',
instance_id, status, last_event)
|
summary
:param awsclient:
:param deployment_id:
|
entailment
|
def output_deployment_diagnostics(awsclient, deployment_id, log_group, start_time=None):
"""diagnostics
:param awsclient:
:param deployment_id:
"""
headline = False
for instance_id in _list_deployment_instances(awsclient, deployment_id):
diagnostics = _get_deployment_instance_diagnostics(
awsclient, deployment_id, instance_id)
#if error_code != 'Success':
if diagnostics is not None:
error_code, script_name, message, log_tail = diagnostics
# header
if not headline:
headline = True
log.info('\ndeployment diagnostics:')
# event logs
log.info('Instance ID: %s', Fore.MAGENTA + instance_id + Fore.RESET)
log.info('Error Code: %s', error_code)
log.info('Script Name: %s', script_name)
log.info('Message: %s', message)
log.info('Log Tail: %s', log_tail)
# cloudwatch logs
if check_log_stream_exists(awsclient, log_group, instance_id):
logentries = get_log_events(
awsclient, log_group, instance_id,
datetime_to_timestamp(start_time))
if logentries:
log.info('instance %s logentries', instance_id)
for e in logentries:
log.info(e['message'].strip())
|
diagnostics
:param awsclient:
:param deployment_id:
|
entailment
|
def is_type(self):
"""
:return:
:rtype: bool
"""
if self.__is_type_result is not None:
return self.__is_type_result
self.__is_type_result = self.__is_type()
return self.__is_type_result
|
:return:
:rtype: bool
|
entailment
|
def validate(self, error_message=None):
"""
:raises TypeError:
If the value is not matched the type that the class represented.
"""
if self.is_type():
return
if not error_message:
error_message = "invalid value type"
raise TypeError(
"{}: expected={}, actual={}".format(error_message, self.typename, type(self._data))
)
|
:raises TypeError:
If the value is not matched the type that the class represented.
|
entailment
|
def convert(self):
"""
:return: Converted value.
:raises typepy.TypeConversionError:
If the value cannot convert.
"""
if self.is_type():
return self.force_convert()
raise TypeConversionError(
"failed to convert from {} to {}".format(type(self._data).__name__, self.typename)
)
|
:return: Converted value.
:raises typepy.TypeConversionError:
If the value cannot convert.
|
entailment
|
def always_fails(
self,
work_dict):
"""always_fails
:param work_dict: dictionary for key/values
"""
label = "always_fails"
log.info(("task - {} - start "
"work_dict={}")
.format(label,
work_dict))
raise Exception(
work_dict.get(
"test_failure",
"simulating a failure"))
log.info(("task - {} - done")
.format(label))
return True
|
always_fails
:param work_dict: dictionary for key/values
|
entailment
|
def name_build(self, name, is_policy=False, prefix=True):
"""
Build name from prefix and name + type
:param name: Name of the role/policy
:param is_policy: True if policy should be added as suffix
:param prefix: True if prefix should be added
:return: Joined name
"""
str = name
# Add prefix
if prefix:
str = self.__role_name_prefix + str
# Add policy suffix
if is_policy:
str = str + "-policy"
return str
|
Build name from prefix and name + type
:param name: Name of the role/policy
:param is_policy: True if policy should be added as suffix
:param prefix: True if prefix should be added
:return: Joined name
|
entailment
|
def name_strip(self, name, is_policy=False, prefix=True):
"""
Transforms name to AWS valid characters and adds prefix and type
:param name: Name of the role/policy
:param is_policy: True if policy should be added as suffix
:param prefix: True if prefix should be added
:return: Transformed and joined name
"""
str = self.name_build(name, is_policy, prefix)
str = str.title()
str = str.replace('-', '')
return str
|
Transforms name to AWS valid characters and adds prefix and type
:param name: Name of the role/policy
:param is_policy: True if policy should be added as suffix
:param prefix: True if prefix should be added
:return: Transformed and joined name
|
entailment
|
def build_policy(self, name, statements, roles, is_managed_policy=False):
"""
Generate policy for IAM cloudformation template
:param name: Name of the policy
:param statements: The "rules" the policy should have
:param roles: The roles associated with this policy
:param is_managed_policy: True if managed policy
:return: Ref to new policy
"""
if is_managed_policy:
policy = ManagedPolicy(
self.name_strip(name, True),
PolicyDocument={
"Version": self.VERSION_IAM,
"Statement": statements,
},
Roles=roles,
Path=self.__role_path,
)
else:
policy = PolicyType(
self.name_strip(name, True),
PolicyName=self.name_strip(name, True),
PolicyDocument={
"Version": self.VERSION_IAM,
"Statement": statements,
},
Roles=roles,
)
self.__template.add_resource(policy)
return policy
|
Generate policy for IAM cloudformation template
:param name: Name of the policy
:param statements: The "rules" the policy should have
:param roles: The roles associated with this policy
:param is_managed_policy: True if managed policy
:return: Ref to new policy
|
entailment
|
def build_policy_bucket(self, bucket, name, statements):
"""
Generate bucket policy for S3 bucket
:param bucket: The bucket to attach policy to
:param name: The name of the bucket (to generate policy name from it)
:param statements: The "rules" the policy should have
:return: Ref to new policy
"""
policy = self.__template.add_resource(
BucketPolicy(
self.name_strip(name, True, False),
Bucket=troposphere.Ref(bucket),
DependsOn=[
troposphere.Name(bucket)
],
PolicyDocument=Policy(
Version=self.VERSION_IAM,
Statement=statements
)
)
)
return policy
|
Generate bucket policy for S3 bucket
:param bucket: The bucket to attach policy to
:param name: The name of the bucket (to generate policy name from it)
:param statements: The "rules" the policy should have
:return: Ref to new policy
|
entailment
|
def build_role(self, name, policies=False):
"""
Generate role for IAM cloudformation template
:param name: Name of role
:param policies: List of policies to attach to this role (False = none)
:return: Ref to new role
"""
# Build role template
if policies:
role = self.__template.add_resource(
Role(
self.name_strip(name),
AssumeRolePolicyDocument=Policy(
Version=self.VERSION_IAM,
Statement=[
Statement(
Effect=Allow,
Principal=Principal(
"Service", self.__role_principals
),
Action=[AssumeRole],
)
]
),
Path=self.__role_path,
ManagedPolicyArns=policies,
))
# Add role to list for default policy
self.__roles_list.append(troposphere.Ref(role))
else:
role = self.__template.add_resource(
Role(
self.name_strip(name),
AssumeRolePolicyDocument=Policy(
Version=self.VERSION_IAM,
Statement=[
Statement(
Effect=Allow,
Principal=Principal(
"Service", self.__role_principals
),
Action=[AssumeRole],
)
]
),
Path=self.__role_path,
))
# Add role to list for default policy
self.__roles_list.append(troposphere.Ref(role))
return role
|
Generate role for IAM cloudformation template
:param name: Name of role
:param policies: List of policies to attach to this role (False = none)
:return: Ref to new role
|
entailment
|
def build_bucket(self, name, lifecycle_configuration=False,
use_plain_name=False):
"""
Generate S3 bucket statement
:param name: Name of the bucket
:param lifecycle_configuration: Additional lifecycle configuration (default=False)
:param use_plain_name: Just use the given name and do not add prefix
:return: Ref to new bucket
"""
if use_plain_name:
name_aws = name_bucket = name
name_aws = name_aws.title()
name_aws = name_aws.replace('-', '')
else:
name_aws = self.name_strip(name, False, False)
name_bucket = self.name_build(name)
if lifecycle_configuration:
return self.__template.add_resource(
Bucket(
name_aws,
BucketName=name_bucket,
LifecycleConfiguration=lifecycle_configuration
)
)
else:
return self.__template.add_resource(
Bucket(
name_aws,
BucketName=name_bucket,
)
)
|
Generate S3 bucket statement
:param name: Name of the bucket
:param lifecycle_configuration: Additional lifecycle configuration (default=False)
:param use_plain_name: Just use the given name and do not add prefix
:return: Ref to new bucket
|
entailment
|
def directive_SPACE(self, label, params):
"""
label SPACE num
Allocate space on the stack. `num` is the number of bytes to allocate
"""
# TODO allow equations
params = params.strip()
try:
self.convert_to_integer(params)
except ValueError:
warnings.warn("Unknown parameters; {}".format(params))
return
self.labels[label] = self.space_pointer
if params in self.equates:
params = self.equates[params]
self.space_pointer += self.convert_to_integer(params)
|
label SPACE num
Allocate space on the stack. `num` is the number of bytes to allocate
|
entailment
|
def directive_DCD(self, label, params):
"""
label DCD value[, value ...]
Allocate a word space in read only memory for the value or list of values
"""
# TODO make this read only
# TODO check for param size
# TODO can take any length comma separated values (VAL DCD 1, 0x2, 3, 4
params = params.strip()
try:
self.convert_to_integer(params)
except ValueError:
# TODO allow word DCDs (like SP_INIT, Reset_Handler)
warnings.warn("Cannot reserve constant words; {}".format(params))
return
# Align address
if self.space_pointer % 4 != 0:
self.space_pointer += self.space_pointer % 4
self.labels[label] = self.space_pointer
if params in self.equates:
params = self.equates[params]
for i in range(4):
self.memory[self.space_pointer + i] = (self.convert_to_integer(params) >> (8*i)) & 0xFF
self.space_pointer += 4
|
label DCD value[, value ...]
Allocate a word space in read only memory for the value or list of values
|
entailment
|
def directive_DCH(self, label, params):
"""
label DCH value[, value ...]
Allocate a half word space in read only memory for the value or list of values
"""
# TODO make this read only
# TODO check for word size
# Align address
if self.space_pointer % 2 != 0:
self.space_pointer += self.space_pointer % 2
self.labels[label] = self.space_pointer
if params in self.equates:
params = self.equates[params]
for i in range(2):
self.memory[self.space_pointer + i] = (self.convert_to_integer(params) >> (8 * i)) & 0xFF
self.space_pointer += 2
|
label DCH value[, value ...]
Allocate a half word space in read only memory for the value or list of values
|
entailment
|
def directive_DCB(self, label, params):
"""
label DCB value[, value ...]
Allocate a byte space in read only memory for the value or list of values
"""
# TODO make this read only
# TODO check for byte size
self.labels[label] = self.space_pointer
if params in self.equates:
params = self.equates[params]
self.memory[self.space_pointer] = self.convert_to_integer(params) & 0xFF
self.space_pointer += 1
|
label DCB value[, value ...]
Allocate a byte space in read only memory for the value or list of values
|
entailment
|
def get_celery_app(
name=os.getenv(
"CELERY_NAME",
"worker"),
auth_url=os.getenv(
"BROKER_URL",
"redis://localhost:6379/9"),
backend_url=os.getenv(
"BACKEND_URL",
"redis://localhost:6379/10"),
include_tasks=[],
ssl_options=None,
transport_options=None,
path_to_config_module=os.getenv(
"CONFIG_MODULE_PATH",
"celery_loaders.work_tasks.celery_config"),
worker_log_format=os.getenv(
"WORKER_LOG_FORMAT",
"%(asctime)s: %(levelname)s %(message)s"),
**kwargs):
"""get_celery_app
:param name: name for this app
:param auth_url: celery broker
:param backend_url: celery backend
:param include_tasks: list of modules containing tasks to add
:param ssl_options: security options dictionary
:param trasport_options: transport options dictionary
:param path_to_config_module: config module
:param worker_log_format: format for logs
"""
if len(include_tasks) == 0:
log.error(("creating celery app={} MISSING tasks={}")
.format(
name,
include_tasks))
else:
log.info(("creating celery app={} tasks={}")
.format(
name,
include_tasks))
# get the Celery application
app = celery.Celery(
name,
broker_url=auth_url,
result_backend=backend_url,
include=include_tasks)
app.config_from_object(
path_to_config_module,
namespace="CELERY")
app.conf.update(kwargs)
if transport_options:
log.info(("loading transport_options={}")
.format(transport_options))
app.conf.update(**transport_options)
# custom tranport options
if ssl_options:
log.info(("loading ssl_options={}")
.format(ssl_options))
app.conf.update(**ssl_options)
# custom ssl options
if len(include_tasks) > 0:
app.autodiscover_tasks(include_tasks)
return app
|
get_celery_app
:param name: name for this app
:param auth_url: celery broker
:param backend_url: celery backend
:param include_tasks: list of modules containing tasks to add
:param ssl_options: security options dictionary
:param trasport_options: transport options dictionary
:param path_to_config_module: config module
:param worker_log_format: format for logs
|
entailment
|
def check_arguments(self, **kwargs):
"""
Determine if the parameters meet the specifications
kwargs contains lists grouped by their parameter
rules are defined by methods starting with 'rule_'
:param kwargs:
:return:
"""
for key in kwargs:
if key in self._rules:
for val in kwargs[key]:
self._rules[key](val)
else:
raise LookupError("Rule for {} does not exist. Make sure the rule starts with 'rule_'".format(key))
|
Determine if the parameters meet the specifications
kwargs contains lists grouped by their parameter
rules are defined by methods starting with 'rule_'
:param kwargs:
:return:
|
entailment
|
def link(self, key1, key2):
"""
Make these two keys have the same value
:param key1:
:param key2:
:return:
"""
# TODO make this have more than one key linked
# TODO Maybe make the value a set?
self._linked_keys[key1] = key2
self._linked_keys[key2] = key1
|
Make these two keys have the same value
:param key1:
:param key2:
:return:
|
entailment
|
def instance_ik_model_receiver(fn):
"""
A method decorator that filters out sign_original_specals coming from models that don't
have fields that function as ImageFieldSourceGroup sources.
"""
@wraps(fn)
def receiver(self, sender, **kwargs):
# print 'inspect.isclass(sender? %s'%(inspect.isclass(sender))
if not inspect.isclass(sender):
return
for src in self._source_groups:
if issubclass(sender, src.model_class):
fn(self, sender=sender, **kwargs)
# If we find a match, return. We don't want to handle the signal
# more than once.
return
return receiver
|
A method decorator that filters out sign_original_specals coming from models that don't
have fields that function as ImageFieldSourceGroup sources.
|
entailment
|
def source_group_receiver(self, sender, source, signal, **kwargs):
"""
Relay source group signals to the appropriate spec strategy.
"""
from imagekit.cachefiles import ImageCacheFile
source_group = sender
instance = kwargs['instance']
# Ignore signals from unregistered groups.
if source_group not in self._source_groups:
return
#HOOK -- update source to point to image file.
for id in self._source_groups[source_group]:
spec_to_update = generator_registry.get(id, source=source, instance=instance, field=hack_spec_field_hash[id])
specs = [generator_registry.get(id, source=source, instance=instance, field=hack_spec_field_hash[id]) for id in
self._source_groups[source_group]]
callback_name = self._signals[signal]
# print 'callback_name? %s'%(callback_name)
for spec in specs:
file = ImageCacheFile(spec)
# print 'SEPC %s file %s'%(spec, file)
call_strategy_method(file, callback_name)
|
Relay source group signals to the appropriate spec strategy.
|
entailment
|
def update_source_hashes(self, instance):
"""
Stores hashes of the source image files so that they can be compared
later to see whether the source image has changed (and therefore whether
the spec file needs to be regenerated).
"""
self.init_instance(instance)
instance._ik['source_hashes'] = dict(
(attname, hash(getattr(instance, attname)))
for attname in self.get_source_fields(instance))
return instance._ik['source_hashes']
|
Stores hashes of the source image files so that they can be compared
later to see whether the source image has changed (and therefore whether
the spec file needs to be regenerated).
|
entailment
|
def get_source_fields(self, instance):
"""
Returns a list of the source fields for the given instance.
"""
return set(src.image_field
for src in self._source_groups
if isinstance(instance, src.model_class))
|
Returns a list of the source fields for the given instance.
|
entailment
|
def on_success(self, retval, task_id, args, kwargs):
"""on_success
http://docs.celeryproject.org/en/latest/reference/celery.app.task.html
:param retval: return value
:param task_id: celery task id
:param args: arguments passed into task
:param kwargs: keyword arguments passed into task
"""
log.info(("{} SUCCESS - retval={} task_id={} "
"args={} kwargs={}")
.format(
self.log_label,
retval,
task_id,
args,
kwargs))
|
on_success
http://docs.celeryproject.org/en/latest/reference/celery.app.task.html
:param retval: return value
:param task_id: celery task id
:param args: arguments passed into task
:param kwargs: keyword arguments passed into task
|
entailment
|
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""on_failure
http://docs.celeryproject.org/en/latest/userguide/tasks.html#task-inheritance
:param exc: exception
:param task_id: task id
:param args: arguments passed into task
:param kwargs: keyword arguments passed into task
:param einfo: exception info
"""
use_exc = str(exc)
log.error(("{} FAIL - exc={} "
"args={} kwargs={}")
.format(
self.log_label,
use_exc,
args,
kwargs))
|
on_failure
http://docs.celeryproject.org/en/latest/userguide/tasks.html#task-inheritance
:param exc: exception
:param task_id: task id
:param args: arguments passed into task
:param kwargs: keyword arguments passed into task
:param einfo: exception info
|
entailment
|
def check_hook_mechanism_is_intact(module):
"""Check if the hook configuration is absent or has both register AND deregister.
:param module:
:return: True if valid plugin / module.
"""
result = True
if check_register_present(module):
result = not result
if check_deregister_present(module):
result = not result
return result
|
Check if the hook configuration is absent or has both register AND deregister.
:param module:
:return: True if valid plugin / module.
|
entailment
|
def cfn_viz(template, parameters={}, outputs={}, out=sys.stdout):
"""Render dot output for cloudformation.template in json format.
"""
known_sg, open_sg = _analyze_sg(template['Resources'])
(graph, edges) = _extract_graph(template.get('Description', ''),
template['Resources'], known_sg, open_sg)
graph['edges'].extend(edges)
_handle_terminals(template, graph, 'Parameters', 'source', parameters)
_handle_terminals(template, graph, 'Outputs', 'sink', outputs)
graph['subgraphs'].append(_handle_pseudo_params(graph['edges']))
_render(graph, out=out)
|
Render dot output for cloudformation.template in json format.
|
entailment
|
def _get_fillcolor(resource_type, properties, known_sg=[], open_sg=[]):
"""Determine fillcolor for resources (public ones in this case)
"""
fillcolor = None
# check security groups
if 'SecurityGroups' in properties:
# check for external security groups
for sg in properties['SecurityGroups']:
if 'Ref' in sg and (sg['Ref'] not in known_sg):
fillcolor = 'yellow'
break
# check for open security groups
for osg in open_sg:
if {'Ref': osg} in properties['SecurityGroups']:
fillcolor = 'red'
break
# LoadBalancer
if resource_type == 'LoadBalancer':
if ('Scheme' not in properties) or \
properties['Scheme'] == 'internet-facing':
fillcolor = 'red'
return fillcolor
|
Determine fillcolor for resources (public ones in this case)
|
entailment
|
def svg_output(dotfile, outfile='cloudformation.svg'):
"""Render template into svg file using the dot command (must be installed).
:param dotfile: path to the dotfile
:param outfile: filename for the output file
:return:
"""
try:
cmd = ['dot', '-Tsvg', '-o' + outfile, dotfile]
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
sys.stderr.write(
'\033[01;31mError running command: %s resulted in the ' % e.cmd +
'following error: \033[01;32m %s' % e.output)
return 1
return 0
|
Render template into svg file using the dot command (must be installed).
:param dotfile: path to the dotfile
:param outfile: filename for the output file
:return:
|
entailment
|
def start(controller_class):
"""Start the Helper controller either in the foreground or as a daemon
process.
:param controller_class: The controller class handle to create and run
:type controller_class: callable
"""
args = parser.parse()
obj = controller_class(args, platform.operating_system())
if args.foreground:
try:
obj.start()
except KeyboardInterrupt:
obj.stop()
else:
try:
with platform.Daemon(obj) as daemon:
daemon.start()
except (OSError, ValueError) as error:
sys.stderr.write('\nError starting %s: %s\n\n' %
(sys.argv[0], error))
sys.exit(1)
|
Start the Helper controller either in the foreground or as a daemon
process.
:param controller_class: The controller class handle to create and run
:type controller_class: callable
|
entailment
|
def run(self, steps=float('inf')):
"""
Run to the current end of the program or a number of steps
:return:
"""
while len(self.program) > (self.register['PC'] - 1):
steps -= 1
if steps < 0:
break
self.program[self.register['PC'] - 1]()
self.register['PC'] += 1
|
Run to the current end of the program or a number of steps
:return:
|
entailment
|
def _validate_type(self, name, obj, *args):
"""
Helper function that checks the input object type against each in a list of classes. This function
also allows the input value to be equal to None.
:param name: Name of the object.
:param obj: Object to check the type of.
:param args: List of classes.
:raises TypeError: if the input object is not of any of the allowed types.
"""
if obj is None:
return
for arg in args:
if isinstance(obj, arg):
return
raise TypeError(self.__class__.__name__ + '.' + name + ' is of type ' + type(obj).__name__ +
'. Must be equal to None or one of the following types: ' + str(args))
|
Helper function that checks the input object type against each in a list of classes. This function
also allows the input value to be equal to None.
:param name: Name of the object.
:param obj: Object to check the type of.
:param args: List of classes.
:raises TypeError: if the input object is not of any of the allowed types.
|
entailment
|
def _validate_list_type(self, name, obj, *args):
"""
Helper function that checks the input object type against each in a list of classes, or if the input object
is a list, each value that it contains against that list.
:param name: Name of the object.
:param obj: Object to check the type of.
:param args: List of classes.
:raises TypeError: if the input object is not of any of the allowed types.
"""
if obj is None:
return
if isinstance(obj, list):
for i in obj:
self._validate_type_not_null(name, i, *args)
else:
self._validate_type(name, obj, *args)
|
Helper function that checks the input object type against each in a list of classes, or if the input object
is a list, each value that it contains against that list.
:param name: Name of the object.
:param obj: Object to check the type of.
:param args: List of classes.
:raises TypeError: if the input object is not of any of the allowed types.
|
entailment
|
def _validate_nested_list_type(self, name, obj, nested_level, *args):
"""
Helper function that checks the input object as a list then recursively until nested_level is 1.
:param name: Name of the object.
:param obj: Object to check the type of.
:param nested_level: Integer with the current nested level.
:param args: List of classes.
:raises TypeError: if the input object is not of any of the allowed types.
"""
if nested_level <= 1:
self._validate_list_type(name, obj, *args)
else:
if obj is None:
return
if not isinstance(obj, list):
raise TypeError(self.__class__.__name__ + '.' + name + ' contains value of type ' +
type(obj).__name__ + ' where a list is expected')
for sub_obj in obj:
self._validate_nested_list_type(name, sub_obj, nested_level - 1, *args)
|
Helper function that checks the input object as a list then recursively until nested_level is 1.
:param name: Name of the object.
:param obj: Object to check the type of.
:param nested_level: Integer with the current nested level.
:param args: List of classes.
:raises TypeError: if the input object is not of any of the allowed types.
|
entailment
|
def B(self, params):
"""
B label
Unconditional branch to the address at label
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# TODO check if label is within +- 2 KB
# B label
def B_func():
if label == '.':
raise iarm.exceptions.EndOfProgram("You have reached an infinite loop")
self.register['PC'] = self.labels[label]
return B_func
|
B label
Unconditional branch to the address at label
|
entailment
|
def BL(self, params):
"""
BL label
Branch to the label, storing the next instruction in the Link Register
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# TODO check if label is within +- 16 MB
# BL label
def BL_func():
self.register['LR'] = self.register['PC'] # No need for the + 1, PC already points to the next instruction
self.register['PC'] = self.labels[label]
return BL_func
|
BL label
Branch to the label, storing the next instruction in the Link Register
|
entailment
|
def BLX(self, params):
"""
BLX Rj
Branch to the address in Rj, storing the next instruction in the Link Register
"""
Rj = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(LR_or_general_purpose_registers=(Rj,))
def BLX_func():
self.register['LR'] = self.register['PC'] # No need for the + 1, PC already points to the next instruction
self.register['PC'] = self.register[Rj]
return BLX_func
|
BLX Rj
Branch to the address in Rj, storing the next instruction in the Link Register
|
entailment
|
def BX(self, params):
"""
BX Rj
Jump to the address in the Link Register
"""
Rj = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(LR_or_general_purpose_registers=(Rj,))
def BX_func():
self.register['PC'] = self.register[Rj]
return BX_func
|
BX Rj
Jump to the address in the Link Register
|
entailment
|
def get_version(version):
"""
Returns a PEP 440-compliant version number from VERSION.
Created by modifying django.utils.version.get_version
"""
# Now build the two parts of the version number:
# major = X.Y[.Z]
# sub = .devN - for development releases
# | {a|b|rc}N - for alpha, beta and rc releases
# | .postN - for post-release releases
assert len(version) == 5
version_parts = version[:2] if version[2] == 0 else version[:3]
# Build the first part of the version
major = '.'.join(str(x) for x in version_parts)
# Just return it if this is a final release version
if version[3] == 'final':
return major
# Add the rest
sub = ''.join(str(x) for x in version[3:5])
if version[3] == 'dev':
# Override the sub part. Add in a timestamp
timestamp = get_git_changeset()
sub = 'dev%s' % (timestamp if timestamp else version[4])
return '%s.%s' % (major, sub)
if version[3] == 'post':
# We need a dot for post
return '%s.%s' % (major, sub)
elif version[3] in ('a', 'b', 'rc'):
# No dot for these
return '%s%s' % (major, sub)
else:
raise ValueError('Invalid version: %s' % str(version))
|
Returns a PEP 440-compliant version number from VERSION.
Created by modifying django.utils.version.get_version
|
entailment
|
def get_git_changeset():
"""Returns a numeric identifier of the latest git changeset.
The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
This value isn't guaranteed to be unique, but collisions are very unlikely,
so it's sufficient for generating the development version numbers.
"""
repo_dir = os.path.dirname(os.path.abspath(__file__))
git_log = subprocess.Popen('git log --pretty=format:%ct --quiet -1 HEAD',
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True, cwd=repo_dir, universal_newlines=True)
timestamp = git_log.communicate()[0]
try:
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
return timestamp.strftime('%Y%m%d%H%M%S')
except ValueError:
return None
|
Returns a numeric identifier of the latest git changeset.
The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
This value isn't guaranteed to be unique, but collisions are very unlikely,
so it's sufficient for generating the development version numbers.
|
entailment
|
def load_cloudformation_template(path=None):
"""Load cloudformation template from path.
:param path: Absolute or relative path of cloudformation template. Defaults to cwd.
:return: module, success
"""
if not path:
path = os.path.abspath('cloudformation.py')
else:
path = os.path.abspath(path)
if isinstance(path, six.string_types):
try:
sp = sys.path
# temporarily add folder to allow relative path
sys.path.append(os.path.abspath(os.path.dirname(path)))
cloudformation = imp.load_source('cloudformation', path)
sys.path = sp # restore
# use cfn template hooks
if not check_hook_mechanism_is_intact(cloudformation):
# no hooks - do nothing
log.debug(
'No valid hook configuration: \'%s\'. Not using hooks!',
path)
else:
if check_register_present(cloudformation):
# register the template hooks so they listen to gcdt_signals
cloudformation.register()
return cloudformation, True
except GracefulExit:
raise
except ImportError as e:
print('could not find package for import: %s' % e)
except Exception as e:
print('could not import cloudformation.py, maybe something wrong ',
'with your code?')
print(e)
return None, False
|
Load cloudformation template from path.
:param path: Absolute or relative path of cloudformation template. Defaults to cwd.
:return: module, success
|
entailment
|
def get_parameter_diff(awsclient, config):
"""get differences between local config and currently active config
"""
client_cf = awsclient.get_client('cloudformation')
try:
stack_name = config['stack']['StackName']
if stack_name:
response = client_cf.describe_stacks(StackName=stack_name)
if response['Stacks']:
stack_id = response['Stacks'][0]['StackId']
stack = response['Stacks'][0]
else:
return None
else:
print(
'StackName is not configured, could not create parameter diff')
return None
except GracefulExit:
raise
except Exception:
# probably the stack is not existent
return None
changed = 0
table = []
table.append(['Parameter', 'Current Value', 'New Value'])
# Check if there are parameters for the stack
if 'Parameters' in stack:
for param in stack['Parameters']:
try:
old = str(param['ParameterValue'])
# can not compare list with str!!
# if ',' in old:
# old = old.split(',')
new = config['parameters'][param['ParameterKey']]
if old != new:
if old.startswith('***'):
# parameter is configured with `NoEcho=True`
# this means we can not really say if the value changed!!
# for security reasons we block viewing the new value
new = old
table.append([param['ParameterKey'], old, new])
changed += 1
except GracefulExit:
raise
except Exception:
print('Did not find %s in local config file' % param[
'ParameterKey'])
if changed > 0:
print(tabulate(table, tablefmt='fancy_grid'))
return changed > 0
|
get differences between local config and currently active config
|
entailment
|
def call_pre_hook(awsclient, cloudformation):
"""Invoke the pre_hook BEFORE the config is read.
:param awsclient:
:param cloudformation:
"""
# TODO: this is deprecated!! move this to glomex_config_reader
# no config available
if not hasattr(cloudformation, 'pre_hook'):
# hook is not present
return
hook_func = getattr(cloudformation, 'pre_hook')
if not hook_func.func_code.co_argcount:
hook_func() # for compatibility with existing templates
else:
log.error('pre_hock can not have any arguments. The pre_hook it is ' +
'executed BEFORE config is read')
|
Invoke the pre_hook BEFORE the config is read.
:param awsclient:
:param cloudformation:
|
entailment
|
def deploy_stack(awsclient, context, conf, cloudformation, override_stack_policy=False):
"""Deploy the stack to AWS cloud. Does either create or update the stack.
:param conf:
:param override_stack_policy:
:return: exit_code
"""
stack_name = _get_stack_name(conf)
parameters = _generate_parameters(conf)
if stack_exists(awsclient, stack_name):
exit_code = _update_stack(awsclient, context, conf, cloudformation,
parameters, override_stack_policy)
else:
exit_code = _create_stack(awsclient, context, conf, cloudformation,
parameters)
# add 'stack_output' to the context so it becomes available
# in 'command_finalized' hook
context['stack_output'] = _get_stack_outputs(
awsclient.get_client('cloudformation'), stack_name)
_call_hook(awsclient, conf, stack_name, parameters, cloudformation,
hook='post_hook',
message='CloudFormation is done, now executing post hook...')
return exit_code
|
Deploy the stack to AWS cloud. Does either create or update the stack.
:param conf:
:param override_stack_policy:
:return: exit_code
|
entailment
|
def delete_stack(awsclient, conf, feedback=True):
"""Delete the stack from AWS cloud.
:param awsclient:
:param conf:
:param feedback: print out stack events (defaults to True)
"""
client_cf = awsclient.get_client('cloudformation')
stack_name = _get_stack_name(conf)
last_event = _get_stack_events_last_timestamp(awsclient, stack_name)
request = {}
dict_selective_merge(request, conf['stack'], ['StackName', 'RoleARN'])
response = client_cf.delete_stack(**request)
if feedback:
return _poll_stack_events(awsclient, stack_name, last_event)
|
Delete the stack from AWS cloud.
:param awsclient:
:param conf:
:param feedback: print out stack events (defaults to True)
|
entailment
|
def list_stacks(awsclient):
"""Print out the list of stacks deployed at AWS cloud.
:param awsclient:
:return:
"""
client_cf = awsclient.get_client('cloudformation')
response = client_cf.list_stacks(
StackStatusFilter=[
'CREATE_IN_PROGRESS', 'CREATE_COMPLETE', 'ROLLBACK_IN_PROGRESS',
'ROLLBACK_COMPLETE', 'DELETE_IN_PROGRESS', 'DELETE_FAILED',
'UPDATE_IN_PROGRESS', 'UPDATE_COMPLETE_CLEANUP_IN_PROGRESS',
'UPDATE_COMPLETE', 'UPDATE_ROLLBACK_IN_PROGRESS',
'UPDATE_ROLLBACK_FAILED',
'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS',
'UPDATE_ROLLBACK_COMPLETE',
]
)
result = {}
stack_sum = 0
for summary in response['StackSummaries']:
result['StackName'] = summary["StackName"]
result['CreationTime'] = summary['CreationTime']
result['StackStatus'] = summary['StackStatus']
print(json2table(result))
stack_sum += 1
print('listed %s stacks' % str(stack_sum))
|
Print out the list of stacks deployed at AWS cloud.
:param awsclient:
:return:
|
entailment
|
def describe_change_set(awsclient, change_set_name, stack_name):
"""Print out the change_set to console.
This needs to run create_change_set first.
:param awsclient:
:param change_set_name:
:param stack_name:
"""
client = awsclient.get_client('cloudformation')
status = None
while status not in ['CREATE_COMPLETE', 'FAILED']:
response = client.describe_change_set(
ChangeSetName=change_set_name,
StackName=stack_name)
status = response['Status']
# print('##### %s' % status)
if status == 'FAILED':
print(response['StatusReason'])
elif status == 'CREATE_COMPLETE':
for change in response['Changes']:
print(json2table(change['ResourceChange']))
|
Print out the change_set to console.
This needs to run create_change_set first.
:param awsclient:
:param change_set_name:
:param stack_name:
|
entailment
|
def delete_change_set(awsclient, change_set_name, stack_name):
"""Delete specified change set. Currently we only use this during
automated regression testing. But we have plans so lets locate this
functionality here
:param awsclient:
:param change_set_name:
:param stack_name:
"""
client = awsclient.get_client('cloudformation')
response = client.delete_change_set(
ChangeSetName=change_set_name,
StackName=stack_name)
|
Delete specified change set. Currently we only use this during
automated regression testing. But we have plans so lets locate this
functionality here
:param awsclient:
:param change_set_name:
:param stack_name:
|
entailment
|
def write_template_to_file(conf, template_body):
"""Writes the template to disk
"""
template_file_name = _get_stack_name(conf) + '-generated-cf-template.json'
with open(template_file_name, 'w') as opened_file:
opened_file.write(template_body)
print('wrote cf-template for %s to disk: %s' % (
get_env(), template_file_name))
return template_file_name
|
Writes the template to disk
|
entailment
|
def generate_template(context, config, cloudformation):
"""call cloudformation to generate the template (json format).
:param context:
:param config:
:param cloudformation:
:return:
"""
spec = inspect.getargspec(cloudformation.generate_template)[0]
if len(spec) == 0:
return cloudformation.generate_template()
elif spec == ['context', 'config']:
return cloudformation.generate_template(context, config)
else:
raise Exception('Arguments of \'generate_template\' not as expected: %s' % spec)
|
call cloudformation to generate the template (json format).
:param context:
:param config:
:param cloudformation:
:return:
|
entailment
|
def info(awsclient, config, format=None):
"""
collect info and output to console
:param awsclient:
:param config:
:param json: True / False to use json format as output
:return:
"""
if format is None:
format = 'tabular'
stack_name = _get_stack_name(config)
client_cfn = awsclient.get_client('cloudformation')
resources = all_pages(
client_cfn.list_stack_resources,
{'StackName': stack_name},
lambda x: [(r['ResourceType'], r['LogicalResourceId'], r['ResourceStatus'])
for r in x['StackResourceSummaries']]
)
infos = {
'stack_output': _get_stack_outputs(client_cfn, stack_name),
'stack_state': _get_stack_state(client_cfn, stack_name),
'resources': resources
}
if format == 'json':
print(json.dumps(infos))
elif format == 'tabular':
print('stack output:')
print(tabulate(infos['stack_output'], tablefmt='fancy_grid'))
print('\nstack_state: %s' % infos['stack_state'])
print('\nresources:')
print(tabulate(infos['resources'], tablefmt='fancy_grid'))
|
collect info and output to console
:param awsclient:
:param config:
:param json: True / False to use json format as output
:return:
|
entailment
|
def BCC(self, params):
"""
BCC label
Branch to the instruction at label if the C flag is not set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BCC label
def BCC_func():
if not self.is_C_set():
self.register['PC'] = self.labels[label]
return BCC_func
|
BCC label
Branch to the instruction at label if the C flag is not set
|
entailment
|
def BCS(self, params):
"""
BCS label
Branch to the instruction at label if the C flag is set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BCS label
def BCS_func():
if self.is_C_set():
self.register['PC'] = self.labels[label]
return BCS_func
|
BCS label
Branch to the instruction at label if the C flag is set
|
entailment
|
def BEQ(self, params):
"""
BEQ label
Branch to the instruction at label if the Z flag is set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BEQ label
def BEQ_func():
if self.is_Z_set():
self.register['PC'] = self.labels[label]
return BEQ_func
|
BEQ label
Branch to the instruction at label if the Z flag is set
|
entailment
|
def BGE(self, params):
"""
BGE label
Branch to the instruction at label if the N flag is the same as the V flag
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BGE label
def BGE_func():
if self.is_N_set() == self.is_V_set():
self.register['PC'] = self.labels[label]
return BGE_func
|
BGE label
Branch to the instruction at label if the N flag is the same as the V flag
|
entailment
|
def BGT(self, params):
"""
BGT label
Branch to the instruction at label if the N flag is the same as the V flag and the Z flag is not set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BGT label
def BGT_func():
if (self.is_N_set() == self.is_V_set()) and not self.is_Z_set():
self.register['PC'] = self.labels[label]
return BGT_func
|
BGT label
Branch to the instruction at label if the N flag is the same as the V flag and the Z flag is not set
|
entailment
|
def BHI(self, params):
"""
BHI label
Branch to the instruction at label if the C flag is set and the Z flag is not set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BHI label
def BHI_func():
if self.is_C_set() and not self.is_Z_set():
self.register['PC'] = self.labels[label]
return BHI_func
|
BHI label
Branch to the instruction at label if the C flag is set and the Z flag is not set
|
entailment
|
def BHS(self, params):
"""
BHS label
Branch to the instruction at label if the C flag is set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BHS label
def BHS_func():
if self.is_C_set():
self.register['PC'] = self.labels[label]
return BHS_func
|
BHS label
Branch to the instruction at label if the C flag is set
|
entailment
|
def BLE(self, params):
"""
BLE label
Branch to the instruction at label if the Z flag is set or if the N flag is not the same as the V flag
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BLE label
def BLE_func():
if self.is_Z_set() or (self.is_N_set() != self.is_V_set()):
self.register['PC'] = self.labels[label]
return BLE_func
|
BLE label
Branch to the instruction at label if the Z flag is set or if the N flag is not the same as the V flag
|
entailment
|
def BLO(self, params):
"""
BLO label
Branch to the instruction at label if the C flag is not set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BLO label
def BLO_func():
if not self.is_C_set():
self.register['PC'] = self.labels[label]
return BLO_func
|
BLO label
Branch to the instruction at label if the C flag is not set
|
entailment
|
def BLS(self, params):
"""
BLS label
Branch to the instruction at label if the C flag is not set or the Z flag is set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BLS label
def BLS_func():
if (not self.is_C_set()) or self.is_Z_set():
self.register['PC'] = self.labels[label]
return BLS_func
|
BLS label
Branch to the instruction at label if the C flag is not set or the Z flag is set
|
entailment
|
def BLT(self, params):
"""
BLT label
Branch to the instruction at label if the N flag is not the same as the V flag
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BLT label
def BLT_func():
if self.is_N_set() != self.is_V_set():
self.register['PC'] = self.labels[label]
return BLT_func
|
BLT label
Branch to the instruction at label if the N flag is not the same as the V flag
|
entailment
|
def BMI(self, params):
"""
BMI label
Branch to the instruction at label if the N flag is set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BMI label
def BMI_func():
if self.is_N_set():
self.register['PC'] = self.labels[label]
return BMI_func
|
BMI label
Branch to the instruction at label if the N flag is set
|
entailment
|
def BNE(self, params):
"""
BNE label
Branch to the instruction at label if the Z flag is not set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BNE label
def BNE_func():
if not self.is_Z_set():
self.register['PC'] = self.labels[label]
return BNE_func
|
BNE label
Branch to the instruction at label if the Z flag is not set
|
entailment
|
def BPL(self, params):
"""
BPL label
Branch to the instruction at label if the N flag is set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BPL label
def BPL_func():
if not self.is_N_set():
self.register['PC'] = self.labels[label]
return BPL_func
|
BPL label
Branch to the instruction at label if the N flag is set
|
entailment
|
def BVC(self, params):
"""
BVC label
Branch to the instruction at label if the V flag is not set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BVC label
def BVC_func():
if not self.is_V_set():
self.register['PC'] = self.labels[label]
return BVC_func
|
BVC label
Branch to the instruction at label if the V flag is not set
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.