content stringlengths 5 1.05M |
|---|
from typing import Any, Dict, Iterable, Optional
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.decorator import webhook_view
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_error, json_success
from zerver.lib.validator import check_dict, check_string
from zerver.models import UserProfile
@webhook_view('AzureDevops_SG')
@has_request_variables
def api_azuredevops_sg_webhook(
request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Iterable[Dict[str, Any]]]=REQ(argument_type='body'),
topic: str=REQ(default='coverage')
) -> HttpResponse:
body = payload['detailedMessage']["markdown"]
# send the message
check_send_webhook_message(request, user_profile, topic, body)
return json_success()
|
# coding: utf-8
"""
Gitea API.
This documentation describes the Gitea API. # noqa: E501
OpenAPI spec version: 1.1.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from gitea_client.api_client import ApiClient
class UserApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_current_user_repo(self, **kwargs): # noqa: E501
"""Create a repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_current_user_repo(async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateRepoOption body:
:return: Repository
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_current_user_repo_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_current_user_repo_with_http_info(**kwargs) # noqa: E501
return data
def create_current_user_repo_with_http_info(self, **kwargs): # noqa: E501
"""Create a repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_current_user_repo_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateRepoOption body:
:return: Repository
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_current_user_repo" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/repos', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Repository', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_add_email(self, **kwargs): # noqa: E501
"""Add email addresses # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_add_email(async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateEmailOption body:
:return: list[Email]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_add_email_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_add_email_with_http_info(**kwargs) # noqa: E501
return data
def user_add_email_with_http_info(self, **kwargs): # noqa: E501
"""Add email addresses # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_add_email_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateEmailOption body:
:return: list[Email]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_add_email" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/emails', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Email]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_check_following(self, follower, followee, **kwargs): # noqa: E501
"""Check if one user is following another user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_check_following(follower, followee, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str follower: username of following user (required)
:param str followee: username of followed user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_check_following_with_http_info(follower, followee, **kwargs) # noqa: E501
else:
(data) = self.user_check_following_with_http_info(follower, followee, **kwargs) # noqa: E501
return data
def user_check_following_with_http_info(self, follower, followee, **kwargs): # noqa: E501
"""Check if one user is following another user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_check_following_with_http_info(follower, followee, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str follower: username of following user (required)
:param str followee: username of followed user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['follower', 'followee'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_check_following" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'follower' is set
if ('follower' not in params or
params['follower'] is None):
raise ValueError("Missing the required parameter `follower` when calling `user_check_following`") # noqa: E501
# verify the required parameter 'followee' is set
if ('followee' not in params or
params['followee'] is None):
raise ValueError("Missing the required parameter `followee` when calling `user_check_following`") # noqa: E501
collection_formats = {}
path_params = {}
if 'follower' in params:
path_params['follower'] = params['follower'] # noqa: E501
if 'followee' in params:
path_params['followee'] = params['followee'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{follower}/following/{followee}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_create_o_auth2_application(self, body, **kwargs): # noqa: E501
"""creates a new OAuth2 application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_create_o_auth2_application(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateOAuth2ApplicationOptions body: (required)
:return: OAuth2Application
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_create_o_auth2_application_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.user_create_o_auth2_application_with_http_info(body, **kwargs) # noqa: E501
return data
def user_create_o_auth2_application_with_http_info(self, body, **kwargs): # noqa: E501
"""creates a new OAuth2 application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_create_o_auth2_application_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateOAuth2ApplicationOptions body: (required)
:return: OAuth2Application
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_create_o_auth2_application" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `user_create_o_auth2_application`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/applications/oauth2', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OAuth2Application', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_create_token(self, username, **kwargs): # noqa: E501
"""Create an access token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_create_token(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param AccessToken access_token:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_create_token_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_create_token_with_http_info(username, **kwargs) # noqa: E501
return data
def user_create_token_with_http_info(self, username, **kwargs): # noqa: E501
"""Create an access token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_create_token_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param AccessToken access_token:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'access_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_create_token" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_create_token`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'access_token' in params:
body_params = params['access_token']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/tokens', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_check_following(self, username, **kwargs): # noqa: E501
"""Check whether a user is followed by the authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_check_following(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of followed user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_check_following_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_current_check_following_with_http_info(username, **kwargs) # noqa: E501
return data
def user_current_check_following_with_http_info(self, username, **kwargs): # noqa: E501
"""Check whether a user is followed by the authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_check_following_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of followed user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_check_following" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_current_check_following`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/following/{username}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_check_starring(self, owner, repo, **kwargs): # noqa: E501
"""Whether the authenticated is starring the repo # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_check_starring(owner, repo, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner: owner of the repo (required)
:param str repo: name of the repo (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_check_starring_with_http_info(owner, repo, **kwargs) # noqa: E501
else:
(data) = self.user_current_check_starring_with_http_info(owner, repo, **kwargs) # noqa: E501
return data
def user_current_check_starring_with_http_info(self, owner, repo, **kwargs): # noqa: E501
"""Whether the authenticated is starring the repo # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_check_starring_with_http_info(owner, repo, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner: owner of the repo (required)
:param str repo: name of the repo (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner', 'repo'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_check_starring" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner' is set
if ('owner' not in params or
params['owner'] is None):
raise ValueError("Missing the required parameter `owner` when calling `user_current_check_starring`") # noqa: E501
# verify the required parameter 'repo' is set
if ('repo' not in params or
params['repo'] is None):
raise ValueError("Missing the required parameter `repo` when calling `user_current_check_starring`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in params:
path_params['owner'] = params['owner'] # noqa: E501
if 'repo' in params:
path_params['repo'] = params['repo'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/starred/{owner}/{repo}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_delete_follow(self, username, **kwargs): # noqa: E501
"""Unfollow a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_delete_follow(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user to unfollow (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_delete_follow_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_current_delete_follow_with_http_info(username, **kwargs) # noqa: E501
return data
def user_current_delete_follow_with_http_info(self, username, **kwargs): # noqa: E501
"""Unfollow a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_delete_follow_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user to unfollow (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_delete_follow" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_current_delete_follow`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/following/{username}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_delete_gpg_key(self, id, **kwargs): # noqa: E501
"""Remove a GPG key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_delete_gpg_key(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of key to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_delete_gpg_key_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.user_current_delete_gpg_key_with_http_info(id, **kwargs) # noqa: E501
return data
def user_current_delete_gpg_key_with_http_info(self, id, **kwargs): # noqa: E501
"""Remove a GPG key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_delete_gpg_key_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of key to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_delete_gpg_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `user_current_delete_gpg_key`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/gpg_keys/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_delete_key(self, id, **kwargs): # noqa: E501
"""Delete a public key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_delete_key(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of key to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_delete_key_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.user_current_delete_key_with_http_info(id, **kwargs) # noqa: E501
return data
def user_current_delete_key_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a public key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_delete_key_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of key to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_delete_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `user_current_delete_key`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/keys/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_delete_star(self, owner, repo, **kwargs): # noqa: E501
"""Unstar the given repo # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_delete_star(owner, repo, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner: owner of the repo to unstar (required)
:param str repo: name of the repo to unstar (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_delete_star_with_http_info(owner, repo, **kwargs) # noqa: E501
else:
(data) = self.user_current_delete_star_with_http_info(owner, repo, **kwargs) # noqa: E501
return data
def user_current_delete_star_with_http_info(self, owner, repo, **kwargs): # noqa: E501
"""Unstar the given repo # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_delete_star_with_http_info(owner, repo, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner: owner of the repo to unstar (required)
:param str repo: name of the repo to unstar (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner', 'repo'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_delete_star" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner' is set
if ('owner' not in params or
params['owner'] is None):
raise ValueError("Missing the required parameter `owner` when calling `user_current_delete_star`") # noqa: E501
# verify the required parameter 'repo' is set
if ('repo' not in params or
params['repo'] is None):
raise ValueError("Missing the required parameter `repo` when calling `user_current_delete_star`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in params:
path_params['owner'] = params['owner'] # noqa: E501
if 'repo' in params:
path_params['repo'] = params['repo'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/starred/{owner}/{repo}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_get_gpg_key(self, id, **kwargs): # noqa: E501
"""Get a GPG key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_get_gpg_key(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of key to get (required)
:return: GPGKey
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_get_gpg_key_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.user_current_get_gpg_key_with_http_info(id, **kwargs) # noqa: E501
return data
def user_current_get_gpg_key_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a GPG key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_get_gpg_key_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of key to get (required)
:return: GPGKey
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_get_gpg_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `user_current_get_gpg_key`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/gpg_keys/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GPGKey', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_get_key(self, id, **kwargs): # noqa: E501
"""Get a public key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_get_key(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of key to get (required)
:return: PublicKey
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_get_key_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.user_current_get_key_with_http_info(id, **kwargs) # noqa: E501
return data
def user_current_get_key_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a public key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_get_key_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of key to get (required)
:return: PublicKey
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_get_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `user_current_get_key`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/keys/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PublicKey', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_list_followers(self, **kwargs): # noqa: E501
"""List the authenticated user's followers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_followers(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_list_followers_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_current_list_followers_with_http_info(**kwargs) # noqa: E501
return data
def user_current_list_followers_with_http_info(self, **kwargs): # noqa: E501
"""List the authenticated user's followers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_followers_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_list_followers" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/followers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_list_following(self, **kwargs): # noqa: E501
"""List the users that the authenticated user is following # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_following(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_list_following_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_current_list_following_with_http_info(**kwargs) # noqa: E501
return data
def user_current_list_following_with_http_info(self, **kwargs): # noqa: E501
"""List the users that the authenticated user is following # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_following_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_list_following" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/following', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_list_gpg_keys(self, **kwargs): # noqa: E501
"""List the authenticated user's GPG keys # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_gpg_keys(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[GPGKey]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_list_gpg_keys_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_current_list_gpg_keys_with_http_info(**kwargs) # noqa: E501
return data
def user_current_list_gpg_keys_with_http_info(self, **kwargs): # noqa: E501
"""List the authenticated user's GPG keys # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_gpg_keys_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[GPGKey]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_list_gpg_keys" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/gpg_keys', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GPGKey]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_list_keys(self, **kwargs): # noqa: E501
"""List the authenticated user's public keys # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_keys(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fingerprint: fingerprint of the key
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[PublicKey]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_list_keys_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_current_list_keys_with_http_info(**kwargs) # noqa: E501
return data
def user_current_list_keys_with_http_info(self, **kwargs): # noqa: E501
"""List the authenticated user's public keys # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_keys_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fingerprint: fingerprint of the key
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[PublicKey]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fingerprint', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_list_keys" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fingerprint' in params:
query_params.append(('fingerprint', params['fingerprint'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/keys', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[PublicKey]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_list_repos(self, **kwargs): # noqa: E501
"""List the repos that the authenticated user owns or has access to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_repos(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_list_repos_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_current_list_repos_with_http_info(**kwargs) # noqa: E501
return data
def user_current_list_repos_with_http_info(self, **kwargs): # noqa: E501
"""List the repos that the authenticated user owns or has access to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_repos_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_list_repos" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/repos', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Repository]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_list_starred(self, **kwargs): # noqa: E501
"""The repos that the authenticated user has starred # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_starred(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_list_starred_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_current_list_starred_with_http_info(**kwargs) # noqa: E501
return data
def user_current_list_starred_with_http_info(self, **kwargs): # noqa: E501
"""The repos that the authenticated user has starred # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_starred_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_list_starred" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/starred', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Repository]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_list_subscriptions(self, **kwargs): # noqa: E501
"""List repositories watched by the authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_subscriptions(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_list_subscriptions_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_current_list_subscriptions_with_http_info(**kwargs) # noqa: E501
return data
def user_current_list_subscriptions_with_http_info(self, **kwargs): # noqa: E501
"""List repositories watched by the authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_list_subscriptions_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_list_subscriptions" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/subscriptions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Repository]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_post_gpg_key(self, **kwargs): # noqa: E501
"""Create a GPG key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_post_gpg_key(async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateGPGKeyOption form:
:return: GPGKey
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_post_gpg_key_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_current_post_gpg_key_with_http_info(**kwargs) # noqa: E501
return data
def user_current_post_gpg_key_with_http_info(self, **kwargs): # noqa: E501
"""Create a GPG key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_post_gpg_key_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateGPGKeyOption form:
:return: GPGKey
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['form'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_post_gpg_key" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'form' in params:
body_params = params['form']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/gpg_keys', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GPGKey', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_post_key(self, **kwargs): # noqa: E501
"""Create a public key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_post_key(async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateKeyOption body:
:return: PublicKey
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_post_key_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_current_post_key_with_http_info(**kwargs) # noqa: E501
return data
def user_current_post_key_with_http_info(self, **kwargs): # noqa: E501
"""Create a public key # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_post_key_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateKeyOption body:
:return: PublicKey
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_post_key" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/keys', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PublicKey', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_put_follow(self, username, **kwargs): # noqa: E501
"""Follow a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_put_follow(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user to follow (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_put_follow_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_current_put_follow_with_http_info(username, **kwargs) # noqa: E501
return data
def user_current_put_follow_with_http_info(self, username, **kwargs): # noqa: E501
"""Follow a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_put_follow_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user to follow (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_put_follow" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_current_put_follow`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/following/{username}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_put_star(self, owner, repo, **kwargs): # noqa: E501
"""Star the given repo # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_put_star(owner, repo, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner: owner of the repo to star (required)
:param str repo: name of the repo to star (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_put_star_with_http_info(owner, repo, **kwargs) # noqa: E501
else:
(data) = self.user_current_put_star_with_http_info(owner, repo, **kwargs) # noqa: E501
return data
def user_current_put_star_with_http_info(self, owner, repo, **kwargs): # noqa: E501
"""Star the given repo # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_put_star_with_http_info(owner, repo, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner: owner of the repo to star (required)
:param str repo: name of the repo to star (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner', 'repo'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_put_star" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner' is set
if ('owner' not in params or
params['owner'] is None):
raise ValueError("Missing the required parameter `owner` when calling `user_current_put_star`") # noqa: E501
# verify the required parameter 'repo' is set
if ('repo' not in params or
params['repo'] is None):
raise ValueError("Missing the required parameter `repo` when calling `user_current_put_star`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in params:
path_params['owner'] = params['owner'] # noqa: E501
if 'repo' in params:
path_params['repo'] = params['repo'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/starred/{owner}/{repo}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_current_tracked_times(self, **kwargs): # noqa: E501
"""List the current user's tracked times # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_tracked_times(async_req=True)
>>> result = thread.get()
:param async_req bool
:param datetime since: Only show times updated after the given time. This is a timestamp in RFC 3339 format
:param datetime before: Only show times updated before the given time. This is a timestamp in RFC 3339 format
:return: list[TrackedTime]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_current_tracked_times_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_current_tracked_times_with_http_info(**kwargs) # noqa: E501
return data
def user_current_tracked_times_with_http_info(self, **kwargs): # noqa: E501
"""List the current user's tracked times # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_current_tracked_times_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param datetime since: Only show times updated after the given time. This is a timestamp in RFC 3339 format
:param datetime before: Only show times updated before the given time. This is a timestamp in RFC 3339 format
:return: list[TrackedTime]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['since', 'before'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_current_tracked_times" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'since' in params:
query_params.append(('since', params['since'])) # noqa: E501
if 'before' in params:
query_params.append(('before', params['before'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/times', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TrackedTime]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_delete_access_token(self, username, token, **kwargs): # noqa: E501
"""delete an access token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_delete_access_token(username, token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int token: token to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_delete_access_token_with_http_info(username, token, **kwargs) # noqa: E501
else:
(data) = self.user_delete_access_token_with_http_info(username, token, **kwargs) # noqa: E501
return data
def user_delete_access_token_with_http_info(self, username, token, **kwargs): # noqa: E501
"""delete an access token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_delete_access_token_with_http_info(username, token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int token: token to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_delete_access_token" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_delete_access_token`") # noqa: E501
# verify the required parameter 'token' is set
if ('token' not in params or
params['token'] is None):
raise ValueError("Missing the required parameter `token` when calling `user_delete_access_token`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
if 'token' in params:
path_params['token'] = params['token'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/tokens/{token}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_delete_email(self, **kwargs): # noqa: E501
"""Delete email addresses # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_delete_email(async_req=True)
>>> result = thread.get()
:param async_req bool
:param DeleteEmailOption body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_delete_email_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_delete_email_with_http_info(**kwargs) # noqa: E501
return data
def user_delete_email_with_http_info(self, **kwargs): # noqa: E501
"""Delete email addresses # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_delete_email_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param DeleteEmailOption body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_delete_email" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/emails', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_delete_o_auth2_application(self, id, **kwargs): # noqa: E501
"""delete an OAuth2 Application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_delete_o_auth2_application(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: token to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_delete_o_auth2_application_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.user_delete_o_auth2_application_with_http_info(id, **kwargs) # noqa: E501
return data
def user_delete_o_auth2_application_with_http_info(self, id, **kwargs): # noqa: E501
"""delete an OAuth2 Application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_delete_o_auth2_application_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: token to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_delete_o_auth2_application" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `user_delete_o_auth2_application`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/applications/oauth2/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_get(self, username, **kwargs): # noqa: E501
"""Get a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user to get (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_get_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_get_with_http_info(username, **kwargs) # noqa: E501
return data
def user_get_with_http_info(self, username, **kwargs): # noqa: E501
"""Get a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user to get (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_get_current(self, **kwargs): # noqa: E501
"""Get the authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_current(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_get_current_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_get_current_with_http_info(**kwargs) # noqa: E501
return data
def user_get_current_with_http_info(self, **kwargs): # noqa: E501
"""Get the authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_current_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: User
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_get_current" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_get_heatmap_data(self, username, **kwargs): # noqa: E501
"""Get a user's heatmap # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_heatmap_data(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user to get (required)
:return: list[UserHeatmapData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_get_heatmap_data_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_get_heatmap_data_with_http_info(username, **kwargs) # noqa: E501
return data
def user_get_heatmap_data_with_http_info(self, username, **kwargs): # noqa: E501
"""Get a user's heatmap # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_heatmap_data_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user to get (required)
:return: list[UserHeatmapData]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_get_heatmap_data" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_get_heatmap_data`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/heatmap', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserHeatmapData]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_get_o_auth2_application(self, id, **kwargs): # noqa: E501
"""get an OAuth2 Application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_o_auth2_application(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Application ID to be found (required)
:return: OAuth2Application
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_get_o_auth2_application_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.user_get_o_auth2_application_with_http_info(id, **kwargs) # noqa: E501
return data
def user_get_o_auth2_application_with_http_info(self, id, **kwargs): # noqa: E501
"""get an OAuth2 Application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_o_auth2_application_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Application ID to be found (required)
:return: OAuth2Application
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_get_o_auth2_application" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `user_get_o_auth2_application`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/applications/oauth2/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OAuth2Application', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_get_oauth2_application(self, **kwargs): # noqa: E501
"""List the authenticated user's oauth2 applications # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_oauth2_application(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[OAuth2Application]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_get_oauth2_application_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_get_oauth2_application_with_http_info(**kwargs) # noqa: E501
return data
def user_get_oauth2_application_with_http_info(self, **kwargs): # noqa: E501
"""List the authenticated user's oauth2 applications # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_oauth2_application_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[OAuth2Application]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_get_oauth2_application" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/applications/oauth2', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[OAuth2Application]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_get_stop_watches(self, **kwargs): # noqa: E501
"""Get list of all existing stopwatches # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_stop_watches(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[StopWatch]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_get_stop_watches_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_get_stop_watches_with_http_info(**kwargs) # noqa: E501
return data
def user_get_stop_watches_with_http_info(self, **kwargs): # noqa: E501
"""Get list of all existing stopwatches # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_stop_watches_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[StopWatch]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_get_stop_watches" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/stopwatches', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[StopWatch]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_get_tokens(self, username, **kwargs): # noqa: E501
"""List the authenticated user's access tokens # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_tokens(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[AccessToken]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_get_tokens_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_get_tokens_with_http_info(username, **kwargs) # noqa: E501
return data
def user_get_tokens_with_http_info(self, username, **kwargs): # noqa: E501
"""List the authenticated user's access tokens # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_tokens_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[AccessToken]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_get_tokens" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_get_tokens`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/tokens', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[AccessToken]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_list_emails(self, **kwargs): # noqa: E501
"""List the authenticated user's email addresses # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_emails(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[Email]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_list_emails_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_list_emails_with_http_info(**kwargs) # noqa: E501
return data
def user_list_emails_with_http_info(self, **kwargs): # noqa: E501
"""List the authenticated user's email addresses # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_emails_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[Email]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_list_emails" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/emails', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Email]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_list_followers(self, username, **kwargs): # noqa: E501
"""List the given user's followers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_followers(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_list_followers_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_list_followers_with_http_info(username, **kwargs) # noqa: E501
return data
def user_list_followers_with_http_info(self, username, **kwargs): # noqa: E501
"""List the given user's followers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_followers_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_list_followers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_list_followers`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/followers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_list_following(self, username, **kwargs): # noqa: E501
"""List the users that the given user is following # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_following(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_list_following_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_list_following_with_http_info(username, **kwargs) # noqa: E501
return data
def user_list_following_with_http_info(self, username, **kwargs): # noqa: E501
"""List the users that the given user is following # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_following_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_list_following" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_list_following`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/following', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_list_gpg_keys(self, username, **kwargs): # noqa: E501
"""List the given user's GPG keys # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_gpg_keys(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[GPGKey]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_list_gpg_keys_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_list_gpg_keys_with_http_info(username, **kwargs) # noqa: E501
return data
def user_list_gpg_keys_with_http_info(self, username, **kwargs): # noqa: E501
"""List the given user's GPG keys # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_gpg_keys_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[GPGKey]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_list_gpg_keys" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_list_gpg_keys`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/gpg_keys', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GPGKey]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_list_keys(self, username, **kwargs): # noqa: E501
"""List the given user's public keys # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_keys(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param str fingerprint: fingerprint of the key
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[PublicKey]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_list_keys_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_list_keys_with_http_info(username, **kwargs) # noqa: E501
return data
def user_list_keys_with_http_info(self, username, **kwargs): # noqa: E501
"""List the given user's public keys # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_keys_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param str fingerprint: fingerprint of the key
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[PublicKey]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'fingerprint', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_list_keys" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_list_keys`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
if 'fingerprint' in params:
query_params.append(('fingerprint', params['fingerprint'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/keys', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[PublicKey]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_list_repos(self, username, **kwargs): # noqa: E501
"""List the repos owned by the given user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_repos(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_list_repos_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_list_repos_with_http_info(username, **kwargs) # noqa: E501
return data
def user_list_repos_with_http_info(self, username, **kwargs): # noqa: E501
"""List the repos owned by the given user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_repos_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_list_repos" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_list_repos`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/repos', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Repository]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_list_starred(self, username, **kwargs): # noqa: E501
"""The repos that the given user has starred # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_starred(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_list_starred_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_list_starred_with_http_info(username, **kwargs) # noqa: E501
return data
def user_list_starred_with_http_info(self, username, **kwargs): # noqa: E501
"""The repos that the given user has starred # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_starred_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_list_starred" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_list_starred`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/starred', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Repository]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_list_subscriptions(self, username, **kwargs): # noqa: E501
"""List the repositories watched by a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_subscriptions(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of the user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_list_subscriptions_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.user_list_subscriptions_with_http_info(username, **kwargs) # noqa: E501
return data
def user_list_subscriptions_with_http_info(self, username, **kwargs): # noqa: E501
"""List the repositories watched by a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_subscriptions_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of the user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_list_subscriptions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `user_list_subscriptions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/subscriptions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Repository]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_list_teams(self, **kwargs): # noqa: E501
"""List all the teams a user belongs to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_teams(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Team]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_list_teams_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_list_teams_with_http_info(**kwargs) # noqa: E501
return data
def user_list_teams_with_http_info(self, **kwargs): # noqa: E501
"""List all the teams a user belongs to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_list_teams_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Team]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_list_teams" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/teams', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Team]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_search(self, **kwargs): # noqa: E501
"""Search for users # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_search(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: keyword
:param int uid: ID of the user to search for
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_search_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_search_with_http_info(**kwargs) # noqa: E501
return data
def user_search_with_http_info(self, **kwargs): # noqa: E501
"""Search for users # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_search_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: keyword
:param int uid: ID of the user to search for
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['q', 'uid', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_search" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'q' in params:
query_params.append(('q', params['q'])) # noqa: E501
if 'uid' in params:
query_params.append(('uid', params['uid'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/search', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_update_o_auth2_application(self, id, body, **kwargs): # noqa: E501
"""update an OAuth2 Application, this includes regenerating the client secret # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_update_o_auth2_application(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: application to be updated (required)
:param CreateOAuth2ApplicationOptions body: (required)
:return: OAuth2Application
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_update_o_auth2_application_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.user_update_o_auth2_application_with_http_info(id, body, **kwargs) # noqa: E501
return data
def user_update_o_auth2_application_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update an OAuth2 Application, this includes regenerating the client secret # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_update_o_auth2_application_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: application to be updated (required)
:param CreateOAuth2ApplicationOptions body: (required)
:return: OAuth2Application
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_update_o_auth2_application" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `user_update_o_auth2_application`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `user_update_o_auth2_application`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/applications/oauth2/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OAuth2Application', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
class SetExpiredRenewParam(object):
def __init__(self, appCode, serviceCode, region, resourceIds, renewStatus, relationStatus=None):
"""
:param appCode: 业务线-必传
:param serviceCode: 产品线-必传
:param region: 地域-必传
:param resourceIds: 资源id列表,英文逗号分隔-必传
:param renewStatus: 过期资源续费状态-必传:YES-续费;NO-不续费
:param relationStatus: (Optional) 绑定资源过期续费状态-必传:YES-续费;NO-不续费
"""
self.appCode = appCode
self.serviceCode = serviceCode
self.region = region
self.resourceIds = resourceIds
self.renewStatus = renewStatus
self.relationStatus = relationStatus
|
from collections import defaultdict
# Ordered List of Coin Symbol Dictionaries
COIN_SYMBOL_ODICT_LIST = [
{
'coin_symbol': 'btc',
'display_name': 'Bitcoin',
'display_shortname': 'BTC',
'blockcypher_code': 'btc',
'blockcypher_network': 'main',
'currency_abbrev': 'BTC',
'pow': 'sha',
'example_address': '16Fg2yjwrbtC6fZp61EV9mNVKmwCzGasw5',
'address_first_char_list': ('1', '3', '4', 'b'),
'singlesig_prefix_list': ('1', 'b', ),
'multisig_prefix_list': ('3', ),
'bech32_prefix': 'bc1',
'first4_mprv': 'xprv',
'first4_mpub': 'xpub',
'vbyte_pubkey': 0,
'vbyte_script': 5,
},
{
'coin_symbol': 'eth',
'display_name': 'Ethereum',
'display_shortname': 'ETH',
'blockcypher_code': 'eth',
'blockcypher_network': 'main',
'currency_abbrev': 'ETH',
'pow': 'ethash',
'example_address': '0x9fece73db33be7b9a3d30b89fcc6a3f8e2bcf514',
'address_first_char_list': ('0'),
'singlesig_prefix_list': ('0', ),
'multisig_prefix_list': ('0', ),
'bech32_prefix': 'none',
'first4_mprv': 'none',
'first4_mpub': 'none',
'vbyte_pubkey': 0,
'vbyte_script': 0,
},
{
'coin_symbol': 'btc-testnet',
'display_name': 'Bitcoin Testnet',
'display_shortname': 'BTC Testnet',
'blockcypher_code': 'btc',
'blockcypher_network': 'test3',
'currency_abbrev': 'BTC',
'pow': 'sha',
'example_address': '2N1rjhumXA3ephUQTDMfGhufxGQPZuZUTMk',
'address_first_char_list': ('m', 'n', '2', 'z', 't', ),
'singlesig_prefix_list': ('m', 'n', 't', ),
'multisig_prefix_list': ('2', ),
'bech32_prefix': 'tb1',
'first4_mprv': 'tprv',
'first4_mpub': 'tpub',
'vbyte_pubkey': 111,
'vbyte_script': 196,
},
{
'coin_symbol': 'ltc',
'display_name': 'Litecoin',
'display_shortname': 'LTC',
'blockcypher_code': 'ltc',
'blockcypher_network': 'main',
'currency_abbrev': 'LTC',
'pow': 'scrypt',
'example_address': 'LcFFkbRUrr8j7TMi8oXUnfR4GPsgcXDepo',
'address_first_char_list': ('L', 'U', 'M', '3', '4'),
'singlesig_prefix_list': ('L', ),
'multisig_prefix_list': ('3', 'M'),
'bech32_prefix': 'ltc1',
'first4_mprv': 'Ltpv',
'first4_mpub': 'Ltub',
'vbyte_pubkey': 48,
'vbyte_script': 5,
},
{
'coin_symbol': 'doge',
'display_name': 'Dogecoin',
'display_shortname': 'DOGE',
'blockcypher_code': 'doge',
'blockcypher_network': 'main',
'currency_abbrev': 'DOGE',
'pow': 'scrypt',
'example_address': 'D7Y55r6Yoc1G8EECxkQ6SuSjTgGJJ7M6yD',
'address_first_char_list': ('D', '9', 'A', '2'),
'singlesig_prefix_list': ('D', ),
'multisig_prefix_list': ('9', 'A', ),
'bech32_prefix': 'xyz',
'first4_mprv': 'dgpv',
'first4_mpub': 'dgub',
'vbyte_pubkey': 30,
'vbyte_script': 22,
},
{
'coin_symbol': 'dash',
'display_name': 'Dash',
'display_shortname': 'DASH',
'blockcypher_code': 'dash',
'blockcypher_network': 'main',
'currency_abbrev': 'DASH',
'pow': 'scrypt',
'example_address': 'XdZW5Waa1i6D9za3qpFvgiwHzr8aFcXtNP',
'address_first_char_list': ('X'),
'singlesig_prefix_list': ('X', ),
'multisig_prefix_list': ('7', ),
'bech32_prefix': 'xyz',
'first4_mprv': 'xprv',
'first4_mpub': 'xpub',
'vbyte_pubkey': 76,
'vbyte_script': 16,
},
{
'coin_symbol': 'bcy',
'display_name': 'BlockCypher Testnet',
'display_shortname': 'BCY Testnet',
'blockcypher_code': 'bcy',
'blockcypher_network': 'test',
'currency_abbrev': 'BCY',
'pow': 'sha',
'example_address': 'CFr99841LyMkyX5ZTGepY58rjXJhyNGXHf',
'address_first_char_list': ('B', 'C', 'D', 'Y', 'b', ),
'singlesig_prefix_list': ('C', 'B', ),
'multisig_prefix_list': ('D', ),
'bech32_prefix': 'bcy1',
'first4_mprv': 'bprv',
'first4_mpub': 'bpub',
'vbyte_pubkey': 27,
'vbyte_script': 31,
},
]
# all fields required
REQUIRED_FIELDS = (
'coin_symbol', # this is a made up unique symbole for library use only
'display_name', # what it commonly looks like
'display_shortname', # an abbreviated version of display_name (for when space is tight)
'blockcypher_code', # blockcypher's unique ID (for their URLs)
'blockcypher_network', # the blockcypher network (main/test)
'currency_abbrev', # what the unit of currency looks like when abbreviated
'pow', # the proof of work algorithm (sha/scrypt)
'example_address', # an example address
'address_first_char_list', # the list of first char possibilites for an address
'singlesig_prefix_list', # the list of first char possibilities for a single signature address
'multisig_prefix_list', # the list of first char possibilities for a multi signature address
'first4_mprv', # first 4 chars of the master private key
'first4_mpub', # first 4 chars of the master public key
'vbyte_pubkey', # pubkey version byte
'vbyte_script', # script hash version byte
)
ELIGIBLE_POW_ENTRIES = set(['sha', 'scrypt', 'x11', 'ethash'])
# Safety checks on the data
for coin_symbol_dict in COIN_SYMBOL_ODICT_LIST:
# Make sure POW is set correctly
assert coin_symbol_dict['pow'] in ELIGIBLE_POW_ENTRIES, coin_symbol_dict['pow']
# Make sure no fields are missing
for required_field in REQUIRED_FIELDS:
assert required_field in coin_symbol_dict
COIN_SYMBOL_LIST = [x['coin_symbol'] for x in COIN_SYMBOL_ODICT_LIST]
COIN_SYMBOL_SET = set(COIN_SYMBOL_LIST)
SHA_COINS = [x['coin_symbol'] for x in COIN_SYMBOL_ODICT_LIST if x['pow'] == 'sha']
SCRYPT_COINS = [x['coin_symbol'] for x in COIN_SYMBOL_ODICT_LIST if x['pow'] == 'scrypt']
ETHASH_COINS = [x['coin_symbol'] for x in COIN_SYMBOL_ODICT_LIST if x['pow'] == 'ethash']
# For django-style lists (with "best" order)
COIN_CHOICES = []
for coin_symbol_dict in COIN_SYMBOL_ODICT_LIST:
COIN_CHOICES.append((coin_symbol_dict['coin_symbol'], coin_symbol_dict['display_name']))
# upper-case to be forgiving on user error
FIRST4_MKEY_CS_MAPPINGS_UPPER = defaultdict(set)
for coin_symbol_dict in COIN_SYMBOL_ODICT_LIST:
first4_mprv = coin_symbol_dict.get('first4_mprv', '').upper()
first4_mpub = coin_symbol_dict.get('first4_mpub', '').upper()
coin_symbol = coin_symbol_dict['coin_symbol']
if first4_mprv:
FIRST4_MKEY_CS_MAPPINGS_UPPER[first4_mprv].add(coin_symbol)
if first4_mpub:
FIRST4_MKEY_CS_MAPPINGS_UPPER[first4_mpub].add(coin_symbol)
# mappings (similar to above but easier retrieval for when order doens't matter)
# must come last because of popping out
COIN_SYMBOL_MAPPINGS = {}
for coin_symbol_dict in COIN_SYMBOL_ODICT_LIST:
coin_symbol = coin_symbol_dict.pop('coin_symbol')
COIN_SYMBOL_MAPPINGS[coin_symbol] = coin_symbol_dict
UNIT_CHOICE_ODICT_LIST = [
{
'unit': 'btc',
'display_name': 'BTC',
'satoshis_per': 10**8,
},
{
'unit': 'mbtc',
'display_name': 'mBTC',
'satoshis_per': 10**5,
},
{
'unit': 'bit',
'display_name': 'bit',
'satoshis_per': 10**2,
},
{
'unit': 'satoshi',
'display_name': 'satoshi',
},
{
'unit': 'ether',
'display_name': 'Ether',
'wei_per': 10**18,
},
{
'unit': 'gwei',
'display_name': 'GWei',
'wei_per': 10**9,
},
{
'unit': 'wei',
'display_name': 'Wei',
},
]
UNIT_CHOICES = []
UNIT_CHOICES_DJANGO = []
for unit_choice_dict in UNIT_CHOICE_ODICT_LIST:
UNIT_CHOICES.append(unit_choice_dict['unit'])
UNIT_CHOICES_DJANGO.append((unit_choice_dict['unit'], unit_choice_dict['display_name']))
# mappings (similar to above but easier retrieval for when order doens't matter)
# must come last because of popping out
UNIT_MAPPINGS = {}
for unit_choice_dict in UNIT_CHOICE_ODICT_LIST:
unit_choice = unit_choice_dict.pop('unit')
UNIT_MAPPINGS[unit_choice] = unit_choice_dict
|
#
# PySNMP MIB module SCC-ENTERPRISE-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/SCC-ENTERPRISE-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:53:04 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ModuleIdentity, ObjectIdentity, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, NotificationType, Unsigned32, IpAddress, enterprises, Counter32, MibIdentifier, Gauge32, Bits, iso, Integer32, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "ObjectIdentity", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "NotificationType", "Unsigned32", "IpAddress", "enterprises", "Counter32", "MibIdentifier", "Gauge32", "Bits", "iso", "Integer32", "Counter64")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
scc = MibIdentifier((1, 3, 6, 1, 4, 1, 1386))
sccProducts = MibIdentifier((1, 3, 6, 1, 4, 1, 1386, 1))
sccRaid7 = MibIdentifier((1, 3, 6, 1, 4, 1, 1386, 1, 1))
sccMibs = MibIdentifier((1, 3, 6, 1, 4, 1, 1386, 2))
raid7mib = MibIdentifier((1, 3, 6, 1, 4, 1, 1386, 2, 1))
raid7proxy = MibIdentifier((1, 3, 6, 1, 4, 1, 1386, 2, 2))
mibBuilder.exportSymbols("SCC-ENTERPRISE-MIB", raid7proxy=raid7proxy, sccProducts=sccProducts, raid7mib=raid7mib, scc=scc, sccRaid7=sccRaid7, sccMibs=sccMibs)
|
from django.http import HttpResponse
from _internalredirect import _convert_file_to_url
def sendfile(request, filename, **kwargs):
response = HttpResponse()
url = _convert_file_to_url(filename)
response['X-Accel-Redirect'] = url.encode('utf-8')
return response
|
"ESGF modules"
__version__ = '0.1.1'
|
import os
from HSTB.shared import path_to_NOAA, path_to_root_env, path_to_conda, path_to_supplementals
def is_pydro():
"""
Quick check to see if kluster is being run from Pydro. If so, the paths we build in this module will be valid.
Returns
-------
True if this environment is within Pydroxl
"""
try:
retrieve_scripts_folder()
return True
except RuntimeError:
return False
def retrieve_vdatum_folder_path():
"""
Helper function to retrieve the path to the Vdatum folder in PydroXL, if it exists
Returns
-------
str
folder path to the supplementals/vdatum folder as string
"""
folder_path = path_to_supplementals('VDatum')
if not os.path.exists(folder_path):
return None
return folder_path
def retrieve_noaa_folder_path():
"""
Helper function to retrieve the path to the NOAA folder in PydroXL
Returns
-------
str
folder path to the NOAA folder as string
"""
folder_path = path_to_NOAA()
if not os.path.exists(folder_path):
raise RuntimeError("the folder does not exist: %s" % folder_path)
return folder_path
def retrieve_install_prefix():
"""
Helper function to retrieve the install prefix path for PydroXL
Returns
-------
str
folder path to the base Pydro folder
"""
folder_path = path_to_root_env()
if not os.path.exists(folder_path):
raise RuntimeError("the folder does not exist: %s" % folder_path)
return folder_path
def retrieve_scripts_folder():
"""
Helper function to retrieve the path to the "Scripts" folder in PydroXL
Returns
-------
str
folder path to the Pydro scripts folder
"""
folder_path = path_to_conda()
if not os.path.exists(folder_path):
raise RuntimeError("the folder does not exist: %s" % folder_path)
return folder_path
def retrieve_activate_batch():
"""
Helper function to retrieve the path to the "activate.bat" batch file in PydroXL
Returns
-------
str
file path to the activate batch file
"""
scripts_prefix = retrieve_scripts_folder()
file_path = os.path.realpath(os.path.join(scripts_prefix, "activate.bat"))
if not os.path.exists(file_path):
raise RuntimeError("the file does not exist: %s" % file_path)
return file_path
|
from pyodide_build.testing import run_in_pyodide
@run_in_pyodide(packages=["sqlalchemy"])
def test_sqlalchemy():
from sqlalchemy import create_engine, text
engine = create_engine("sqlite+pysqlite:///:memory:", future=True)
with engine.connect() as conn:
result = conn.execute(text("select 'hello world'"))
assert result.all()[0] == ("hello world",)
conn.execute(text("CREATE TABLE some_table (x int, y int)"))
conn.execute(
text("INSERT INTO some_table (x, y) VALUES (:x, :y)"),
[{"x": 1, "y": 1}, {"x": 2, "y": 4}],
)
conn.commit()
result = conn.execute(text("SELECT x, y FROM some_table")).all()
assert len(result) == 2
result = conn.execute(text("SELECT x, y FROM some_table WHERE x=2")).all()
assert len(result) == 1
assert result[0].y == 4
|
from six import add_metaclass
from six.moves import zip
from . import path, safe_str
from .iterutils import isiterable, iterate
from .file_types import *
from .platforms.framework import Framework
class option_list(object):
def __init__(self, *args):
self._options = []
self.collect(*args)
def append(self, option):
if ( isinstance(option, safe_str.stringy_types) or
not any(option.matches(i) for i in self._options) ):
self._options.append(option)
def extend(self, options):
for i in options:
self.append(i)
def collect(self, *args):
for i in args:
if isiterable(i):
for j in i:
self.collect(j)
elif i is not None:
self.append(i)
def copy(self):
return option_list(self._options)
def __iter__(self):
return iter(self._options)
def __len__(self):
return len(self._options)
def __eq__(self, rhs):
return type(self) == type(rhs) and self._options == rhs._options
def __ne__(self, rhs):
return not (self == rhs)
def __repr__(self):
return '<option_list({})>'.format(repr(self._options))
def __add__(self, rhs):
x = self.copy()
x += rhs
return x
def __iadd__(self, rhs):
if not isinstance(rhs, option_list):
raise TypeError('expected an option_list, got a {!r}'
.format(type(rhs)))
self.extend(rhs)
return self
# XXX: This is a separate function to make Python 2.7.8 and earlier happy. For
# details, see <https://bugs.python.org/issue21591>.
def _make_init(slots, attrs):
exec('def __init__(self, {0}):\n self._init({0})'
.format(', '.join(slots)), globals(), attrs)
class OptionMeta(type):
def __new__(cls, name, bases, attrs):
fields = attrs.pop('_fields', [])
slots = tuple(i[0] if isiterable(i) else i for i in fields)
types = tuple(i[1] if isiterable(i) else None for i in fields)
attrs.update({'__slots__': slots, '_types': types})
if '__init__' not in attrs:
_make_init(slots, attrs)
return type.__new__(cls, name, bases, attrs)
def __init__(cls, name, bases, attrs):
is_root = not any(type(i) == OptionMeta for i in bases)
if is_root:
cls.registry = {}
else:
cls.registry[name] = cls
type.__init__(cls, name, bases, attrs)
@add_metaclass(OptionMeta)
class Option(object):
def _init(self, *args):
assert len(args) == len(self.__slots__)
for k, t, v in zip(self.__slots__, self._types, args):
if t and not isinstance(v, t):
raise TypeError('expected {}; but got {}'.format(
', '.join(i.__name__ for i in iterate(t)), type(v).__name__
))
setattr(self, k, v)
def matches(self, rhs):
return self == rhs
def __eq__(self, rhs):
return type(self) == type(rhs) and all(
getattr(self, i) == getattr(rhs, i) for i in self.__slots__
)
def __ne__(self, rhs):
return not (self == rhs)
def __repr__(self):
return '<{}({})>'.format(self.__class__.__name__, ', '.join(
repr(getattr(self, i)) for i in self.__slots__
))
def option(name, fields=()):
return type(name, (Option,), {'_fields': fields})
# Compilation options
include_dir = option('include_dir', [('directory', HeaderDirectory)])
std = option('std', [('value', str)])
pic = option('pic')
pch = option('pch', [('header', PrecompiledHeader)])
class define(Option):
_fields = [ ('name', str),
('value', (str, type(None))) ]
def __init__(self, name, value=None):
Option._init(self, name, value)
# Link options
lib_dir = option('lib_dir', [('directory', Directory)])
lib = option('lib', [('library', (Library, Framework, str))])
rpath_dir = option('rpath_dir', [('path', path.BasePath)])
rpath_link_dir = option('rpath_link_dir', [('path', path.BasePath)])
lib_literal = option('lib_literal', [('value', safe_str.stringy_types)])
entry_point = option('entry_point', [('value', str)])
# General options
pthread = option('pthread')
|
# Allows one to plot many data points with many different customizations (marker size, color, legend label etc),
# without writing code. Instead the customizations are specified by the input spreadsheets, in which each row is
# a data point (run code without inputs for instructions). Documentation and variable names assume plotting of
# CRISPR score data but can be used generically.
#
# MIT License
#
# Copyright (c) 2019 Evgeni (Genya) Frenkel
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import matplotlib.pyplot as plt
import os.path
outputFileBaseName = 'scatter'
#plot spec file column names
colNames = ['Gene name', 'Plot or not?', 'Color', 'Marker', 'Size', 'Alpha', 'Layer', 'Label or not?',
'Label font size','Legend group'] # note gene name needs to be first entry in this list or code breaks
if len(sys.argv)<3:
print('\n' + sys.argv[0] + ' requires at least two inputs. These are gene-level log-fold changes or CRISPR scores (CS) and are provided as follows:'
+ '\npython ' + sys.argv[0] + ' [CS treatment A vs initial] [CS treatment B vs initial] [plot spec file]'
+ '\n\nThe output is two image files called scatter.svg, scatter.png (dpi=500). To modify these output formats, edit the code at the very bottom of the script.'
+ '\n\nFormat of the first two input arguments (data files) should be tab or comma separated columns consisting of :'
+ '\nGene name \t CRISPR score '
'\n\nAny gene name that is not present in both files will be ignored. '
'The header of the second column will be the axis label for those data.'
+ '\n\nThe third (optional) argument provides specifications for how all or some data points should be plotted.'
+ ' The plot spec file is also tab or comma delimited, ' + str(len(colNames)) + ' columns in total:'
+ '\n' + '\t'.join(colNames)
+ '\n\nThe plot spec file needs to have column headers (in any order) exactly matching these column names, '
'but can have additional columns (e.g. notes about genes, other data), which will be ignored. '
'\nLikewise in the CRISPR score files, any columns beyond the first two will be ignored.'
'\n\nIf value in "Plot or not?" column = 1, then data for that gene will be plotted. '
'Any value other than 1 will be treated as false. '
'Likewise value in "Label or not?" = 1 means text of gene name will be overlayed on the data point.'
'\n\nLayer should be a number and points with higher value layer are plotted on top. If no layer specified, default is bottom layer.'
'\n\nThe permitted values and meanings for columns Color, Marker, Size, and Alpha can be found in the matplotlib/pyplot documentation:'
'\n https://matplotlib.org/api/_as_gen/matplotlib.pyplot.plot.html'
+ '\n\nThis code was written and tested for python 2.7, might not work with other versions.\n'
)
sys.exit()
fileAvI, fileBvI = sys.argv[1], sys.argv[2]
fileGOI = ''
if len(sys.argv)>3:
fileGOI = sys.argv[3]
def getVals(fileXvY):
CS_XvY = {}
with open(fileXvY,'r') as f:
line = f.readline()
delim = '\t'
if len(line.strip().split('\t'))==1:
delim = ','
label = line.split(delim)[1]
for line in f:
line = line.strip().split(delim)
CS_XvY[line[0]] = float(line[1])
return CS_XvY, label
#load score values
CS_A, xlabel = getVals(fileAvI)
CS_B, ylabel = getVals(fileBvI)
geneList = [g for g in CS_A if g in CS_B]
#load plot specs
GOIs = {}
layers = [-float('inf')]
layerLists = {} #layers[x] = [list of genes to be plotted as layer x]
layerSpecified = [] #list of genes with layer specified
if len(fileGOI)>0:
with open(fileGOI,'r') as f:
#tab or comma delimeter?
header = f.readline().strip()
delim = '\t'
if len(header.strip().split('\t'))==1:
delim = ','
header = header.split(delim)
#find index of relevant columns
colInds = {x:i for i,x in enumerate(header) if x.strip() in colNames}
for x in colNames:
error = False
if x not in colInds:
print('Error: cannot find column `' + x + '` in ' + fileGOI)
error = True
if error : sys.exit()
for line in f:
line = [x.strip() for x in line.split(delim)]
GOIs[line[colInds['Gene name']]] = {x:line[colInds[x]] for x in colNames[1:]}
try:
if int(line[colInds['Layer']]) not in layers:
layers.append(int(line[colInds['Layer']]))
layerLists[int(line[colInds['Layer']])] = []
layerLists[int(line[colInds['Layer']])].append(line[colInds['Gene name']])
layerSpecified.append(line[colInds['Gene name']])
except ValueError:
print('Error: Layer column contains non-integer value in ' + fileGOI + ' for gene ' + line[colInds['Gene name']])
sys.exit()
layers = sorted(layers)
layerLists[-float('inf')] = [g for g in geneList if g not in layerSpecified]
###plot
fig=plt.figure()
ax = plt.subplot()
#determine axes bounds
marginFactor = 1.05
xlim = [marginFactor*min([CS_A[g] for g in geneList] + [0]), marginFactor*max([CS_A[g] for g in geneList])]
ylim = [marginFactor*min([CS_B[g] for g in geneList] + [0]), marginFactor*max([CS_B[g] for g in geneList])]
###MANUALLY SET AXIS BOUNDS HERE
#xlim = [-3, 3]
#ylime = [-3, 3]
ax.plot(xlim,[0, 0],'--',linewidth=1,color='silver',zorder=0)
ax.plot([0, 0],ylim,'--',linewidth=1,color='silver',zorder=0)
legendHandles = []
legendSymbols = []
legendLabels = []
numPtsPlotted = 0
for layerInd, layerKey in enumerate(layers):
for g in layerLists[layerKey]:
#coordinates
x, y = CS_A[g], CS_B[g]
#get plot specs
if g in GOIs: #custom specs
plotOrNot = GOIs[g]['Plot or not?'] == '1'
if plotOrNot:
alpha = float(GOIs[g]['Alpha'])
markerColor = GOIs[g]['Color']
markerShape = GOIs[g]['Marker']
if markerShape == '0': #LibreOffice Calc converts periods into zeros, which aren't valid plot shape
markerShape = '.'
markerSize = float(GOIs[g]['Size'])
legendGroup = GOIs[g]['Legend group']
labelOrNot = GOIs[g]['Label or not?'] == '1'
labelFont = float(GOIs[g]['Label font size'])
else: #default specs
plotOrNot = True
alpha = 1
markerColor = 'b'
markerShape = '.'
markerSize = 10
legendGroup = ''
labelOrNot = False
labelFont = 0
#add point to figure
if plotOrNot:
ax.scatter(x, y,
color=markerColor,
marker=markerShape,
alpha=alpha,
s=markerSize,
zorder=layerInd)
numPtsPlotted += 1
if numPtsPlotted%100==0:
print(str(numPtsPlotted) + ' data points plotted')
# assign to legend group?
if legendGroup != '' and legendGroup not in legendLabels:
legendSymbols.append(markerShape + markerColor)
legendHandles.append(plt.scatter(-1000, -1000, color=markerColor, marker=markerShape, alpha=alpha,
s=markerSize*2, zorder=0))
legendLabels.append(legendGroup)
#overlay gene name?
if labelOrNot:
ax.text(x, y, g, fontsize=labelFont, zorder=max(layers) + 1)
#add legend
if len(legendHandles) > 0:
ax.legend(tuple(legendHandles), tuple(legendLabels), fontsize=6) # ,location=outside)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.xlim(xlim)
plt.ylim(ylim)
ax.set_aspect(aspect='equal')
plt.tight_layout()
#save plot to png, svg files
if os.path.isfile(outputFileBaseName + '.png') or os.path.isfile(outputFileBaseName + '.svg'):
fileInd = 0
while True:
newOutputFileBaseName = outputFileBaseName + '_' + str(fileInd)
if os.path.isfile(newOutputFileBaseName + '.png') or os.path.isfile(newOutputFileBaseName + '.svg'):
fileInd += 1
newOutputFileBaseName = outputFileBaseName + '_' + str(fileInd)
else:
plt.savefig(newOutputFileBaseName + '.svg')
plt.savefig(newOutputFileBaseName + '.png',dpi=500)
break
else:
plt.savefig(outputFileBaseName + '.svg')
plt.savefig(outputFileBaseName+ '.png', dpi=500) |
import os
import argparse
import numpy as npy
try:
import cupy as np
except ImportError:
import numpy as np
print("GPU not enabled on this machine.")
from vae.models.vae_model import VAE
from vae.utils.functionals import load_mnist, BCE
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--epoch", type=int, default=20)
parser.add_argument("--input_size", type=int, default=784)
parser.add_argument("--latent_size", type=int, default=20)
parser.add_argument("--hidden_size", type=int, default=500)
parser.add_argument("--learning_rate", type=float, default=0.0001)
parser.add_argument("--beta1", type=float, default=0.9)
parser.add_argument("--beta2", type=float, default=0.999)
parser.add_argument("--tolerance", type=float, default=1e-8)
parser.add_argument("--batch_size", type=int, default=100)
return parser.parse_args()
args = parse_args()
np.random.seed(663)
def train(model, n_epoch=20):
"""
Train variational auto-encoder on MNIST data set.
"""
# Load training data
train_data, _, train_size = load_mnist(data_path = './vae/data/train-images-idx3-ubyte',
label_path = './vae/data/train-labels-idx1-ubyte')
# shuffle training data
np.random.shuffle(train_data)
# Batch training setup
batch_size = model.n_batch
batch_idx = train_size // batch_size
# Loss setup
total_loss = 0
total_rec_loss = 0
total_kl = 0
total_iter = 0
for epoch in range(n_epoch):
for idx in range(batch_idx):
# Divide training data into mini-batches
train_batch = train_data[idx * batch_size: idx * batch_size + batch_size]
# Ignore a batch if insufficient observations
if train_batch.shape[0] != batch_size:
break
###### Forward Pass ######
xhat, mu, logvar = model.forward(train_batch)
# Calculate reconstruction Loss
rec_loss = BCE(xhat, train_batch)
# Calculate KL Divergence
kl = -.5 * np.sum(1 + logvar - mu ** 2 - np.exp(logvar))
#Loss record keeping
total_rec_loss += rec_loss / batch_size
total_kl += kl / batch_size
total_loss = total_rec_loss + total_kl
total_iter += 1
###### Backpropagation ######
model.backward(train_batch, xhat)
#model.img = np.squeeze(xhat, axis=3) * 2 - 1
print("Epoch [%d/%d] RC Loss:%.4f KL Loss:%.4f Total Loss: %.4f"%(
epoch, n_epoch, total_rec_loss/total_iter, total_kl/total_iter, total_loss/total_iter))
if __name__ == '__main__':
# Instantiate model
model = VAE(args.input_size, args.latent_size, args.hidden_size,
args.batch_size, args.learning_rate, args.beta1, args.beta2, args.tolerance)
import cProfile
pr = cProfile.Profile()
pr.enable()
# Train model
train(model, n_epoch=args.epoch)
pr.disable()
pr.print_stats(sort='time')
|
import traceback
from re import match
class DataMixin():
def type_validate(self, fields_type):
result = {}
if not fields_type:
return
for key, value in fields_type.items():
if type(key) != value:
result.update({key: False})
else:
result.update({key: True})
return result
def check_allowed_kwargs(self, allowed, **kwargs):
excluded = {key: value for key, value in kwargs.items() if key not in allowed}
if excluded:
raise KeyError('not allowed keyword argument(s) ' + str(excluded) + ' for ' + self.__class__.__name__)
# kwargs = {key: value for key, value in kwargs.items() if key in allowed}
return kwargs
def check_validated(self, validated_fields):
if validated_fields:
for key, value in validated_fields.items():
if value == False:
raise TypeError('wrong argument value type: ' + str(key) + ':' + str(type(key)))
return True
def compare_validated(self, obj_fields, fields_types):
for key, value in obj_fields.items():
if type(value) != fields_types[key]:
raise TypeError(
'wrong field value type: ' + str(key) + ':' + str(fields_types[key]) + '=' + str(value) + ':' + str(
type(value)))
return True
class Field(DataMixin):
def __init__(self, primary_key: bool = False, auto_increment: bool = False, max_length: int = None,
unique: bool = False, null: bool = False, default=None):
self.primary_key = primary_key
self.max_length = max_length
self.unique = unique
self.null = null
self.default = default
self.auto_increment = auto_increment
self.value = None
# if not (self.fields_type and self.required):
self.fields_type, self.required = self._dynamic_vars()
if not self._check_errors():
raise KeyError('wrong field key or value: ' + str(self.required))
def _dynamic_vars(self):
fields_type = {self.unique: bool, self.null: bool, self.auto_increment: bool, self.primary_key: bool}
required = {'name': str}
return fields_type, required
def _check_errors(self, **kwargs):
# if not self.name:
# return
if self.auto_increment and not self.primary_key:
raise ValueError('AUTOINCREMENT works only with column PRIMARY KEY')
validated_fields = self.type_validate(self.fields_type)
# if validated_fields:
# for key, value in validated_fields.items():
# if value == False:
# raise TypeError('wrong argument value type: ' + str(key))
success = self.check_validated(validated_fields)
# if not match('^[a-zA-Z_][a-zA-Z0-9_]*$', self.name):
# raise ValueError(str(self.name) + ' table name is not allowed')
return success
def sql(self):
name = '`<placeholder>`'
field = ' <field_type> '
primary_key = ' PRIMARY KEY ' if self.primary_key else ''
auto_increment = ' AUTOINCREMENT ' if self.auto_increment else ''
null = 'NOT NULL' if not self.null else 'NULL'
default = ' DEFAULT ' + str(self.default) + '' if self.default else ''
res = "{}{}{}{}{}{}".format(name, field, primary_key, auto_increment, null, default)
return res
class CharField(Field):
def __init__(self, *args, **kwargs):
allowed = ('name', 'max_length', 'unique', 'null', 'default', 'primary_key')
kwargs = self.check_allowed_kwargs(allowed, **kwargs)
super(CharField, self).__init__(*args, **kwargs)
def _dynamic_vars(self):
fields_type = {self.max_length: int, self.unique: bool, self.null: bool, self.auto_increment: bool,
self.primary_key: bool}
required = {'required': {'name': str, 'max_length': int},
'optional': {'default': str, 'null': bool, 'unique': bool}}
return fields_type, required
def _check_errors(self):
if not self.max_length:
return
result = super(CharField, self)._check_errors()
return False if not result else True
@property
def type(self):
return str
@property
def sql(self):
sql_pattern = super(CharField, self).sql()
sql_string = sql_pattern.replace('<field_type>', 'VARCHAR(' + str(self.max_length) + ')')
return sql_string
class Integer(Field):
def __init__(self, *args, **kwargs):
allowed = ('name', 'primary_key', 'unique', 'null', 'default', 'auto_increment')
kwargs = self.check_allowed_kwargs(allowed, **kwargs)
super(Integer, self).__init__(*args, **kwargs)
@property
def type(self):
return int
@property
def sql(self):
sql_pattern = super(Integer, self).sql()
sql_string = sql_pattern.replace('<field_type>', 'INTEGER')
return sql_string
class Text(Field):
def __init__(self, *args, **kwargs):
allowed = ('name', 'unique', 'null', 'default')
kwargs = self.check_allowed_kwargs(allowed, **kwargs)
super(Text, self).__init__(*args, **kwargs)
@property
def type(self):
return str
@property
def sql(self):
sql_pattern = super(Text, self).sql()
sql_string = sql_pattern.replace('<field_type>', 'TEXT')
return sql_string
class Bool(Field):
def __init__(self, *args, **kwargs):
allowed = ('name', 'null', 'default')
kwargs = self.check_allowed_kwargs(allowed, **kwargs)
super(Bool, self).__init__(*args, **kwargs)
def _dynamic_vars(self):
fields_type = {self.default: bool, self.unique: bool, self.null: bool, self.auto_increment: bool,
self.primary_key: bool}
required = {'required': {'name': str}, 'optional': {'default': bool}}
return fields_type, required
def _check_errors(self):
if self.default not in [True, False]:
return
result = super(Bool, self)._check_errors()
return False if not result else True
@property
def type(self):
return bool
@property
def sql(self):
self.default = 1 if self.default == True else 0
sql_pattern = super(Bool, self).sql()
sql_string = sql_pattern.replace('<field_type>', 'INTEGER')
return sql_string
|
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at: http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distrib-
# uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# specific language governing permissions and limitations under the License.
"""Handler for the domain-specific admin page."""
__author__ = 'rew@google.com (Becky Willrich)'
import base_handler
import domains
import model
import perms
import users
import utils
# String used to mean "none of the above" in the HTML
NO_PERMISSIONS = 'NO_PERMISSIONS'
INITIAL_DOMAIN_ROLE_CHOICES = (
(NO_PERMISSIONS, 'Have no access to the map'),
(perms.Role.MAP_VIEWER, 'Can view the map'),
(perms.Role.MAP_EDITOR, 'Can view and edit the map'),
(perms.Role.MAP_OWNER, 'Can view, edit, and delete the map'),
)
DOMAIN_PERMISSION_CHOICES = (
(perms.Role.MAP_CREATOR, 'Can create maps'),
(perms.Role.CATALOG_EDITOR, 'Can publish maps'),
(perms.Role.DOMAIN_ADMIN, 'Can manage domain'),
)
# _MaxRole relies on these being in order from weakest to strongest.
DOMAIN_PERMISSIONS = [s for s, _ in reversed(DOMAIN_PERMISSION_CHOICES)]
# TODO(rew): This goes away once we migrate the perms data to store only the
# strongest permsision per subject
def _MaxRole(roles):
for role in DOMAIN_PERMISSIONS:
if role in roles:
return role
def SetRolesForDomain(subject_roles, domain_name):
"""Gives each user exactly the specified set of roles to the given domain.
Args:
subject_roles: A dictionary mapping subjects (user IDs or domain names)
to sets of perms.Role constants. For each subject, all roles in the
set will be granted, and all roles not in the set will be revoked.
domain_name: A domain name.
"""
old_subject_roles = perms.GetSubjectsForTarget(domain_name)
for subject, new_roles in subject_roles.items():
old_roles = old_subject_roles.get(subject, set())
for role in old_roles - new_roles:
perms.Revoke(subject, role, domain_name)
for role in new_roles - old_roles:
perms.Grant(subject, role, domain_name)
class Admin(base_handler.BaseHandler):
"""Handler for the overall admin and domain-specific admin pages."""
def Get(self, user, domain=''):
"""Routes to a general admin, domain admin, or map admin page."""
if self.request.get('map'):
map_id = self.request.get('map').split('/')[-1]
return self.redirect(str(self.request.root_path + '/.admin/%s' % map_id))
if domain:
self.GetDomainAdmin(user, domain)
else:
self.GetGeneralAdmin()
# "user" is currently unused, but we must have a user (tacitly used in
# AssertAccess) and we cannot rename the arg.
def GetDomainAdmin(self, user, domain): # pylint:disable=unused-argument
"""Displays the administration page for the given domain."""
domain_name = domain
perms.AssertAccess(perms.Role.DOMAIN_ADMIN, domain_name)
domain = domains.Domain.Get(domain_name)
if not domain:
raise base_handler.Error(404, 'Unknown domain %r.' % domain_name)
subject_roles = perms.GetSubjectsForTarget(domain_name)
user_roles = [(users.Get(subj), _MaxRole(r)) for (subj, r)
in subject_roles.items() if perms.IsUserId(subj)]
user_roles.sort(key=lambda (u, r): u.email)
labels = sorted(e.label for e in model.CatalogEntry.GetAll(domain_name))
self.response.out.write(self.RenderTemplate('admin_domain.html', {
'domain': domain, 'user_roles': user_roles, 'labels': labels,
'domain_role': _MaxRole(subject_roles.get(domain_name, set())),
'user_permission_choices': DOMAIN_PERMISSION_CHOICES,
'initial_domain_role_choices': INITIAL_DOMAIN_ROLE_CHOICES,
'show_welcome': self.request.get('welcome', '')
}))
def GetGeneralAdmin(self):
"""Renders the general admin page."""
perms.AssertAccess(perms.Role.ADMIN)
self.response.out.write(self.RenderTemplate('admin.html', {}))
# TODO(kpy): Also show a list of existing domains on this page?
def Post(self, user, domain):
"""Landing for posts from the domain administration page."""
which = self.request.POST.pop('form')
target = self.request.path
if which != 'create-domain':
perms.AssertAccess(perms.Role.DOMAIN_ADMIN, domain, user)
if not domains.Domain.Get(domain):
raise base_handler.Error(404, 'Unknown domain %r.' % domain)
if which == 'domain-settings':
self.UpdateDomainSettings(self.request.POST, domain)
elif which == 'create-domain':
self.CreateDomain(domain, user)
target += '?welcome=1'
else: # user or domain permissions
inputs = dict(self.request.POST)
self.AddNewUserIfPresent(inputs, domain)
self.UpdateDomainRole(inputs, domain)
SetRolesForDomain(self.FindNewPerms(inputs), domain)
self.redirect(target)
def UpdateDomainSettings(self, inputs, domain_name):
domain = domains.Domain.Get(domain_name)
domain.default_label = inputs.get('default_label', 'empty')
domain.has_sticky_catalog_entries = 'has_sticky_catalog_entries' in inputs
domain.initial_domain_role = inputs.get(
'initial_domain_role', perms.Role.MAP_VIEWER)
if domain.initial_domain_role == NO_PERMISSIONS:
domain.initial_domain_role = None
domain.Put()
def AddNewUserIfPresent(self, inputs, domain):
"""Grants domain roles to a new user."""
new_email = inputs.pop('new_user').strip()
new_role = inputs.pop('new_user.permission')
if not new_email or not new_role:
return
if not utils.IsValidEmail(new_email):
raise base_handler.Error(400, 'Invalid e-mail address: %r.' % new_email)
user = users.GetForEmail(new_email)
perms.Grant(user.id, new_role, domain)
def UpdateDomainRole(self, inputs, domain_name):
# TODO(rew): Simplify this once perms have been migrated to one
# role per (subject, target).
new_role = inputs.pop('domain_role')
new_role = set() if new_role == NO_PERMISSIONS else {new_role}
SetRolesForDomain({domain_name: new_role}, domain_name)
def FindNewPerms(self, inputs):
"""Looks at inputs and determines the new permissions for all users.
Args:
inputs: a dictionary of the form inputs
Returns:
A dictionary keyed by user/domain. Values are sets of the roles
that the key should have.
"""
new_perms = {}
for key in inputs:
if '.' in key:
uid, input_name = key.rsplit('.', 1)
if input_name == 'permission' and uid + '.delete' not in inputs:
new_perms[uid] = {inputs[key]}
elif input_name == 'delete':
new_perms[uid] = set()
return new_perms
def CreateDomain(self, domain_name, user):
def GrantPerms():
perms.Grant(user.id, perms.Role.DOMAIN_ADMIN, domain_name)
perms.Grant(user.id, perms.Role.CATALOG_EDITOR, domain_name)
perms.Grant(user.id, perms.Role.MAP_CREATOR, domain_name)
def TestPerms():
return perms.CheckAccess(perms.Role.DOMAIN_ADMIN, domain_name, user)
domain = domains.Domain.Get(domain_name)
if domain:
raise base_handler.Error(403, 'Domain %r already exists.' % domain_name)
utils.SetAndTest(GrantPerms, TestPerms)
domains.Domain.Create(domain_name)
class AdminMap(base_handler.BaseHandler):
"""Administration page for a map."""
def Get(self, map_id):
"""Renders the admin page."""
perms.AssertAccess(perms.Role.ADMIN)
map_object = model.Map.Get(map_id) or model.Map.GetDeleted(map_id)
if not map_object:
raise base_handler.Error(404, 'Map %r not found.' % map_id)
self.response.out.write(self.RenderTemplate('admin_map.html', {
'map': map_object
}))
def Post(self, map_id):
"""Handles a POST (block/unblock, delete/undelete, or wipe)."""
perms.AssertAccess(perms.Role.ADMIN)
map_object = model.Map.Get(map_id) or model.Map.GetDeleted(map_id)
if not map_object:
raise base_handler.Error(404, 'Map %r not found.' % map_id)
if self.request.get('block'):
map_object.SetBlocked(True)
if self.request.get('unblock'):
map_object.SetBlocked(False)
if self.request.get('delete'):
map_object.Delete()
if self.request.get('undelete'):
map_object.Undelete()
if self.request.get('wipe'):
map_object.Wipe()
self.redirect(map_id)
|
"""
Week 7 - Activity: Classes
---------
AUTHOR: Edward Camp
"""
from Week7.Animal import Animal
'''
Up until now, we've been coding away within either methods or 'activity' python files to accomplish tasks and finish
projects. However, a lot of the based code provided and methods you've tasked to code in have been within classes.
Classes are an easier way of bundling related data into one object rather than multiple variables scattered throughout
the code. In addition, classes contain methods that allow users to access and manipulate existing data in the object.
When it comes to actually coding classes, it better to think of them as 'blueprint' to what you wish to build. Once the
blueprint is completed, it can be used to create multiple copies of such class. Each will contain the same variables and
methods to access data, but the data stored in such copies may have different values. For example, if I wish to build a
car, I would need to define a 'Car' class, declare variables that describe a car like 'maxSpeed' or an 'engine' object,
and create methods that access or manipulate the data that belongs to the car like 'gasConsumption' or 'emergencyBrake'.
In this activity, we will be defining what makes up an Animal. We could define 'hasFur', 'hasTail', 'weight', etc.
However, we will only require you to define a 'color' and 'age' variable of the Animal (to be passed into the
constructor), and a 'getInfo' that prints out the 'color' and 'age' of the animal.
Example message: "I am a blue Animal, and I am 7 years old."
If done correctly, 'activity1.py' should print out information about a red, green, and purple animal who are 24, 16, and
35 years old, respectively.
'''
animal1 = Animal('red', 24)
animal2 = Animal('green', 16)
animal3 = Animal('purple', 35)
animal1.getInfo()
animal2.getInfo()
animal3.getInfo() |
#
# @lc app=leetcode id=263 lang=python3
#
# [263] Ugly Number
#
# @lc code=start
class Solution:
def isUgly(self, num: int) -> bool:
if num < 0: return False
if num == 1: return True
ugly_prime = (2,3,5)
if num in ugly_prime: return True
while(num):
if int(num / 2) *2 == num:
num = num / 2
elif int(num / 3) *3 == num:
num = num / 3
elif int(num / 5) *5 == num:
num = num / 5
else:
return False
if num in ugly_prime:
return True
return False
def test(self):
assert(self.isUgly(6) == True)
assert(self.isUgly(8) == True)
assert(self.isUgly(14) == False)
assert(self.isUgly(-6) == False)
assert(self.isUgly(-8) == False)
assert(self.isUgly(-14) == False)
assert(self.isUgly(0) == False)
assert(self.isUgly(-1) == False)
assert(self.isUgly(1) == True)
sol = Solution()
sol.test()
# @lc code=end
|
def maxsumseq(sequence):
start, end, sum_start = -1, -1, -1
maxsum_, sum_ = 0, 0
for i, x in enumerate(sequence):
sum_ += x
if maxsum_ < sum_: # found maximal subsequence so far
maxsum_ = sum_
start, end = sum_start, i
elif sum_ < 0: # start new sequence
sum_ = 0
sum_start = i
assert maxsum_ == maxsum(sequence)
assert maxsum_ == sum(sequence[start + 1:end + 1])
return sequence[start + 1:end + 1]
|
"""
Datos de entrada
billestes de 50k-->cin-->int
billetes de 20k-->vei-->int
billetes de 10k-->die-->int
billetes de 5k-->cin-->int
billetes de 2k-->dos-->int
billetes de 1k-->uno-->int
billetes de 500-->qui-->int
billetes de 100-->cie-->int
Datos de salida
Total de 50k-->Tc-->int
Total de 20k-->Tv-->int
Total de 10k-->Td-->int
Total de 5k-->Tci-->int
Total de 2k-->Tdo-->int
Total de 1k-->Tu-->int
Total de 500-->Tq-->int
Total de 100-->Tcie-->int
Cantidad de dinero-->T-->int
"""
#entradas
cin=int(input("Digite cantidad billetes de 50k: "))
vei=int(input("digite cantidad billetes de 20k: "))
die=int(input("digite cantidad billetes de 10k: "))
cin=int(input("digite cantidad billetes de 5k: "))
dos=int(input("digite cantidad billetes de 2k: "))
uno=int(input("Digite cantidad billetes de 1k: "))
qui=int(input("Digite cantidad billetes de 500: "))
cie=int(input("Digite cantidad billetes de 100: "))
#caja negra
Tc=cin*50000
Tv=vei*20000
Td=die*10000
Tci=cin*5000
Tdo=dos*2000
Tu=uno*1000
Tq=qui*500
Tcie=cie*100
T=Tc+Tv+Td+Tci+Tdo+Tu+Tq+Tcie
#salida
print("La cantidad total de dinero es de: ",T," COP") |
from elasticizer.pipelines import *
|
# optimizer
optimizer = dict(type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=None)
# learning policy
runner = dict(type='EpochBasedRunner', max_epochs=200)
lr_config = dict(
policy='CosineAnnealing',
min_lr=0,
warmup='exp',
warmup_iters=5,
warmup_ratio=0.1,
warmup_by_epoch=True)
|
# Copyright 2021 Touca, Inc. Subject to Apache-2.0 License.
from conans import ConanFile, CMake
class ToucaConan(ConanFile):
name = "touca"
homepage = "https://touca.io"
description = "client library for regression testing arbitrary execution workflows"
topics = ( "regression-testing", "test-framework", "test-automation" )
url = "https://docs.touca.io"
license = "Apache-2.0"
version ="1.4.0"
author = "Touca, Inc. <hello@touca.io>"
settings = "os", "compiler", "build_type", "arch"
options = {
"shared": [ True, False ],
"with_tests": [ True, False ],
"with_utils": [ True, False ],
"with_examples": [ True, False ],
"with_framework": [ True, False ],
"with_openssl": [ True, False ]
}
default_options = {
"shared": False,
"with_tests": False,
"with_utils": False,
"with_examples": False,
"with_framework": False,
"with_openssl": True
}
generators = "cmake_find_package"
exports_sources = [
"CMakeLists.txt", "cmake/**", "include/**",
"src/**", "framework/**", "tests/**", "utils/**"
]
def requirements(self):
self.requires.add("cpp-httplib/0.8.0")
self.requires.add("flatbuffers/1.12.0")
self.requires.add("fmt/7.1.2")
self.requires.add("ghc-filesystem/1.4.0")
self.requires.add("rapidjson/1.1.0")
self.requires.add("spdlog/1.8.2")
if self.options.with_examples or self.options.with_framework or self.options.with_utils:
self.requires.add("cxxopts/2.2.1")
def build_requirements(self):
if self.options.with_tests:
self.build_requires("catch2/2.13.3")
def configure(self):
self.options["fmt"].header_only = True
self.options["flatbuffers"].header_only = True
self.options["spdlog"].header_only = True
self.options["cpp-httplib"].with_openssl = self.options.with_openssl
def _configure_cmake(self):
cmake = CMake(self)
cmake.definitions["TOUCA_BUILD_TESTS"] = self.options.with_tests
cmake.definitions["TOUCA_BUILD_UTILS"] = self.options.with_utils
cmake.definitions["TOUCA_BUILD_EXAMPLES"] = self.options.with_examples
cmake.definitions["TOUCA_BUILD_FRAMEWORK"] = self.options.with_framework
cmake.configure()
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def test(self):
cmake = self._configure_cmake()
cmake.test()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_info(self):
client_requirements = [
"cpp-httplib::cpp-httplib",
"fmt::fmt",
"flatbuffers::flatbuffers",
"ghc-filesystem::ghc-filesystem",
"rapidjson::rapidjson",
"spdlog::spdlog"
]
if self.options.with_examples or self.options.with_framework or self.options.with_utils:
client_requirements.append("cxxopts::cxxopts")
self.cpp_info.name = "touca"
self.cpp_info.components["client"].names["cmake_find_package"] = "client"
self.cpp_info.components["client"].libs = ["touca_client"]
self.cpp_info.components["client"].requires = client_requirements
if self.options.with_framework:
self.cpp_info.components["framework"].names["cmake_find_package"] = "framework"
self.cpp_info.components["framework"].libs = ["touca_framework"]
self.cpp_info.components["framework"].requires = ["client"]
|
from flask import Blueprint
from database import Database
import json
from flask import request
api = Blueprint('api', __name__, template_folder='templates')
@api.route('/entries', methods=["GET"])
def get_all_entries():
entries = Database.get_records()
for entry in entries:
entry['_id'] = str(entry['_id'])
return json.dumps(entries)
@api.route('/delete', methods=['DELETE'])
def delete_all_entries():
Database.delete_records()
return "records deleted"
@api.route('/post', methods=['POST'])
def post_an_entry():
doc = {
'title': request.form['title'],
'post': request.form['post']
}
Database.insert_record(doc)
return 'record added'
@api.route("/edit", methods=["POST"])
def edit_an_entry():
sieve = {
"title": request.form["title"],
"post": request.form["post"]
}
newdoc = {
"title": request.form["new_title"],
"post": request.form["new_post"]
}
Database.edit_doc(sieve, newdoc)
|
from __future__ import print_function
import io
import sys
import os.path
from setuptools import setup, find_packages
readme_path = os.path.join(os.path.dirname(__file__), 'README.rst')
changelog_path = os.path.join(os.path.dirname(__file__), 'CHANGELOG')
readme = io.open(readme_path, encoding='utf-8').read()
changelog = io.open(changelog_path, encoding='utf-8').read()
long_description = readme + '\n\n' + changelog
setup(
name='pdbpp',
version='0.9.2',
author='Antonio Cuni',
author_email='anto.cuni@gmail.com',
packages=find_packages(exclude=['testing',]),
url='http://github.com/antocuni/pdb',
license='BSD',
platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'],
description='pdb++, a drop-in replacement for pdb',
long_description=long_description,
keywords='pdb debugger tab color completion',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python',
'Topic :: Utilities',
],
install_requires=[
"fancycompleter>=0.8",
"wmctrl",
"pygments",
],
extras_require={
'funcsigs': ["funcsigs"],
},
include_package_data=True,
)
|
from pytest import fixture
from ..step_multimodal import StepMultimodal
__SAMPLES = [
([0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
]
@fixture
def step_multimodal() -> StepMultimodal:
return StepMultimodal(blocks=[5, 5], step_size=0.1)
def test_samples(step_multimodal: StepMultimodal, helpers):
helpers.check_samples(__SAMPLES, step_multimodal)
def test_as_dict(step_multimodal: StepMultimodal):
as_dict = step_multimodal.as_dict
assert isinstance(as_dict, dict)
|
import argparse
from typing import Any, Dict
import torch
import torch.nn as nn
import torchvision
PRETRAINED = True
NUM_CLASSES = 5
DROPOUT = False
DROPOUT_PROB = 0.5
DROPOUT_HIDDEN_DIM = 512
class RGBResnetClassifier(nn.Module):
"""Classify an image of arbitrary size through a (pretrained) ResNet network"""
def __init__(self, data_config: Dict[str, Any] = None, args: argparse.Namespace = None) -> None:
super().__init__()
self.args = vars(args) if args is not None else {}
self.n_classes = self.args.get("n_classes", NUM_CLASSES)
pretrained = self.args.get("pretrained", PRETRAINED)
self.dropout = self.args.get("dropout", DROPOUT)
# base ResNet model
self.resnet = torchvision.models.resnet50(pretrained=pretrained)
for param in self.resnet.parameters():
param.requires_grad = False
# changing the architecture of the laster layers
# if dropout is activated, add an additional fully connected layer with dropout before the last layer
# split classification head into different parts to extract intermediate activations
if self.dropout:
# first fully connected layer
self.resnet.fc = nn.Linear(self.resnet.fc.in_features, self.resnet.fc.in_features) # additional fc layer
# first part of additional classification head
self.head_part_1 = nn.Sequential(
nn.BatchNorm1d(self.resnet.fc.in_features), # adding batchnorm
nn.ReLU(), # additional nonlinearity
nn.Dropout(DROPOUT_PROB), # additional dropout layer
nn.Linear(self.resnet.fc.in_features, DROPOUT_HIDDEN_DIM), # additional fc layer
)
# second part of classification head
self.head_part_2 = nn.Sequential(
nn.BatchNorm1d(DROPOUT_HIDDEN_DIM), # adding batchnorm
nn.ReLU(), # additional nonlinearity
nn.Dropout(DROPOUT_PROB), # additional dropout layer
nn.Linear(DROPOUT_HIDDEN_DIM, self.n_classes), # same fc layer as we had before
)
# otherwise just adapt no. of classes in last fully-connected layer
else:
self.resnet.fc = nn.Sequential(
nn.Linear(self.resnet.fc.in_features, self.resnet.fc.in_features),
nn.BatchNorm1d(self.resnet.fc.in_features),
nn.ReLU(),
nn.Linear(self.resnet.fc.in_features, self.n_classes),
)
def forward(self, x: torch.Tensor, extract_intermediate_activations: bool = False) -> torch.Tensor:
"""
Args:
x
(B, C, H, W) tensor (H, W can be arbitrary, will be reshaped by reprocessing)
Returns
-------
torch.Tensor
(B, C) tensor
"""
if self.dropout:
if extract_intermediate_activations:
x = self.preprocess(x)
x = self.resnet(x)
y = self.head_part_1(x)
z = self.head_part_2(y)
return x, y, z
else:
x = self.resnet(x)
x = self.head_part_1(x)
x = self.head_part_2(x)
return x
else:
x = x.float()
x = self.resnet(x)
return x
def get_num_classes(self):
return self.n_classes
def add_to_argparse(parser):
parser.add_argument("--pretrained", type=bool, default=PRETRAINED)
parser.add_argument("--n_classes", type=int, default=NUM_CLASSES)
parser.add_argument("--dropout", type=bool, default=DROPOUT)
return parser
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functions used by dax_tools like in dax_upload/dax_test/dax_setup.
"""
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import filter
from builtins import input
from builtins import str
from builtins import zip
from builtins import range
from builtins import object
from collections import OrderedDict
import csv
from datetime import datetime
from email.mime.text import MIMEText
import getpass
import glob
import imp
import itertools
import json
import logging
import os
import platform
import random
import readline
import shutil
import smtplib
import socket
import stat
import subprocess as sb
import sys
import time
import traceback
if sys.version_info[0] == 2:
import ConfigParser as configparser
else:
import configparser
from multiprocessing import Pool
from . import bin
from . import launcher
from . import log
from . import modules
from . import processors
from . import task
from . import xnat_tools_utils
from . import XnatUtils
from . import assessor_utils
from . import yaml_doc
from .dax_settings import (DAX_Settings, DAX_Netrc, DEFAULT_DATATYPE,
DEFAULT_FS_DATATYPE)
from .errors import (DaxUploadError, AutoProcessorError, DaxSetupError,
DaxError, DaxNetrcError)
from .task import (READY_TO_COMPLETE, COMPLETE, UPLOADING, JOB_FAILED,
JOB_PENDING, NEEDS_QA)
from .task import ClusterTask
from .XnatUtils import XnatUtilsError
from .version import VERSION as __version__
from .git_revision import git_revision as __git_revision__
# Global Variables
LOGGER = logging.getLogger('dax')
# Global variables for setup:
def complete(text, state):
"""Function to help tab completion path when using dax_setup."""
return (glob.glob(text + '*') + [None])[state]
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind("tab: complete")
readline.set_completer(complete)
BASH_PROFILE_XNAT = """# Xnat Host for default dax executables:
{export_cmd}
"""
ADMIN_DEFAULTS = OrderedDict([
('user_home', os.path.expanduser('~')),
('admin_email', ''),
('smtp_host', ''),
('smtp_from', ''),
('smtp_pass', ''),
('xsitype_include', 'proc:genProcData')])
CLUSTER_DEFAULTS = OrderedDict([
('cmd_submit', 'qsub'),
('prefix_jobid', ''),
('suffix_jobid', ''),
('cmd_count_nb_jobs', ''),
('cmd_get_job_status', ''),
('queue_status', ''),
('running_status', ''),
('complete_status', ''),
('cmd_get_job_memory', ''),
('cmd_get_job_walltime', ''),
('cmd_get_job_node', ''),
('job_extension_file', '.pbs'),
('job_template', ''),
('email_opts', 'a'),
('gateway', socket.gethostname()),
('root_job_dir', '/tmp'),
('queue_limit', '400'),
('results_dir', os.path.join(os.path.expanduser('~'),
'RESULTS_XNAT_SPIDER')),
('max_age', '14'),
('launcher_type', 'xnatq-combined'),
('skip_lastupdate', '')])
CODE_PATH_DEFAULTS = OrderedDict([
('processors_path', ''),
('spiders_path', ''),
('modules_path', '')])
DAX_MANAGER_DEFAULTS = OrderedDict([
('api_url', ''),
('api_key_dax', ''),
('project', 'dax_project'),
('settingsfile', 'dax_settings_full_path'),
('masimatlab', 'dax_masimatlab'),
('tmp', 'dax_tmp_directory'),
('logsdir', 'dax_logs_path'),
('user', 'dax_cluster_user'),
('gateway', 'dax_gateway'),
('email', 'dax_cluster_email'),
('queue', 'dax_queue_limit'),
('priority', 'dax_proj_order'),
('email_opts', 'dax_job_email_options'),
('dax_build_start_date', 'dax_build_start_date'),
('dax_build_end_date', 'dax_build_end_date'),
('dax_build_pid', 'dax_build_pid'),
('dax_update_tasks_start_date', 'dax_update_tasks_start_date'),
('dax_update_tasks_end_date', 'dax_update_tasks_end_date'),
('dax_update_tasks_pid', 'dax_update_tasks_pid'),
('dax_launch_start_date', 'dax_launch_start_date'),
('dax_launch_end_date', 'dax_launch_end_date'),
('dax_launch_pid', 'dax_launch_pid'),
('max_age', 'dax_max_age'),
('skip_lastupdate', 'dax_skip_lastupdate'),
('admin_email', 'dax_email_address')])
DEFAULTS = {'admin': ADMIN_DEFAULTS,
'cluster': CLUSTER_DEFAULTS,
'code_path': CODE_PATH_DEFAULTS,
'dax_manager': DAX_MANAGER_DEFAULTS}
INI_HEADER = """;dax_settings.ini contains all the variables to set dax on your system.
;It contains 4 sections define by [].
;The first one is [admin] defining the High level admin information.
; E.g. email address. xsitype_include needs to define the datatypes for DAX
; (Default: proc:genProcData).
;The second is [cluster] for deep information about the cluster.
; This should include commands that are grid-specific to get job id,
; walltime usage etc. Additionally, there are several templates that
; needed to be specified. See readthedocs for a description.
;The third one is [code_path] for Python script extension information.
; To import in dax all the spiders, processors and modules from those folders.
; You don't have to set a path if you don't want to give those paths.
;The fourth and last one is [dax_manager] that defines the REDCap
; infrastructure (options). Dax_manager uses REDCap to automatically generate
; settings for project. This section will help you set the API for your redcap
; project and all the variable on REDCap. If you don't know anything about it
; Leave all attributes to defaults value.
"""
# Sentence to write when message prompt for user
OPTIONS_DESCRIPTION = {
'user_home': {'msg': 'Please enter your home directory: ',
'is_path': True},
'admin_email': {'msg': 'Please enter email address for admin. \
All emails will get sent here: ', 'is_path': False},
'smtp_from': {'msg': 'Please enter an email address where emails \
should be sent from: ', 'is_path': False},
'smtp_host': {'msg': 'Please enter the SMTP host associated to your \
email address: ', 'is_path': False},
'smtp_pass': {'msg': 'Please enter the password associated to your \
email address: ', 'is_path': False, 'confidential': True},
'xsitype_include': {'msg': 'Please enter the xsitypes you would like DAX \
to access in your XNAT instance: ', 'is_path': False},
'cmd_submit': {'msg': 'What command is used to submit your batch file? \
[e.g., qsub, sbatch]: ', 'is_path': False},
'prefix_jobid': {'msg': 'Please enter a string to print before the \
job id after submission: ', 'is_path': False},
'suffix_jobid': {'msg': 'Please enter a string to print after the \
job id after submission: ', 'is_path': False},
'cmd_count_nb_jobs': {'msg': 'Please enter the full path to text file \
containing the command used to count the number of jobs in the queue: ',
'is_path': True},
'cmd_get_job_status': {'msg': 'Please enter the full path to text file \
containing the command used to check the running status of a job: ',
'is_path': True},
'queue_status': {'msg': 'Please enter the string the job scheduler would \
use to indicate that a job is "in the queue": ', 'is_path': False},
'running_status': {'msg': 'Please enter the string the job scheduler \
would use to indicate that a job is "running": ', 'is_path': False},
'complete_status': {'msg': 'Please enter the string the job scheduler \
would use to indicate that a job is "complete": ', 'is_path': False},
'cmd_get_job_memory': {'msg': 'Please enter the full path to the text \
file containing the command used to see how much memory a job used: ',
'is_path': True},
'cmd_get_job_walltime': {'msg': 'Please enter the full path to the text \
file containing the command used to see how much walltime a job used: ',
'is_path': True},
'cmd_get_job_node': {'msg': 'Please enter the full path to the text file \
containing the command used to see which node a job used: ',
'is_path': True},
'job_extension_file': {'msg': 'Please enter an extension for the job \
batch file: ', 'is_path': False},
'job_template': {'msg': 'Please enter the full path to the text file \
containing the template used to generate the batch script: ',
'is_path': True},
'email_opts': {'msg': 'Please provide the options for the email \
notification for a job as defined by your grid scheduler: ', 'is_path': False},
'gateway': {'msg': 'Please enter the hostname of the server \
to run dax on: ', 'is_path': False},
'root_job_dir': {'msg': 'Please enter where the data should be stored \
on the node: ', 'is_path': True},
'queue_limit': {'msg': 'Please enter the maximum number of jobs \
that should run at once: ', 'is_path': False},
'results_dir': {'msg': 'Please enter directory where data will get \
copied to for upload: ', 'is_path': True},
'max_age': {'msg': 'Please enter max days before re-running dax_build \
on a session: ', 'is_path': False},
'launcher_type': {'msg': 'Please enter launcher type: ',
'is_path': False},
'skip_lastupdate': {'msg': 'Do you want to skip last update?: ',
'is_path': False},
'api_url': {'msg': 'Please enter your REDCap API URL: ',
'is_path': False},
'api_key_dax': {'msg': 'Please enter the key to connect to the \
DAX Manager REDCap database: ', 'is_path': False},
'spiders_path': {'msg': 'Please enter Folder path where you store \
your spiders: ', 'is_path': True},
'processors_path': {'msg': 'Please enter Folder path where you store \
your processors: ', 'is_path': True},
'modules_path': {'msg': 'Please enter Folder path where you store \
your modules: ', 'is_path': True},
}
SGE_TEMPLATE = """#!/bin/bash
#$ -S /bin/sh
#$ -M ${job_email}
#$ -m ${job_email_options}
#$ -l h_rt=${job_walltime}
#$ -l tmem=${job_memory}M
#$ -l h_vmem=${job_memory}M
#$ -o ${job_output_file}
#$ -pe smp ${job_ppn}
#$ -j y
#$ -cwd
#$ -V
uname -a # outputs node info (name, date&time, type, OS, etc)
export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=${job_ppn} #set the variable \
to use only the right amount of ppn
export OMP_NUM_THREADS=${job_ppn} #as previous line for openmp code
source ${job_env} #source the specified environement file
SCREEN=$$$$$$$$
SCREEN=${SCREEN:0:8}
echo 'Screen display number for xvfb-run' $SCREEN
xvfb-run --wait=5 \
-a -e /tmp/xvfb_$SCREEN.err -f /tmp/xvfb_$SCREEN.auth \
--server-num=$SCREEN \
--server-args="-screen 0 1920x1200x24 -ac +extension GLX" \
${job_cmds}\n"""
DEFAULT_SGE_DICT = {'cmd_submit': 'qsub',
'prefix_jobid': 'Your job ',
'suffix_jobid': '("',
'cmd_count_nb_jobs': 'expr `qstat -u $USER | wc -l` - 2\n',
'queue_status': 'qw',
'running_status': 'r',
'complete_status': '',
'cmd_get_job_memory': "echo ''\n",
'cmd_get_job_node': "echo ''\n",
'cmd_get_job_status': "qstat -u $USER | grep ${jobid} \
| awk {'print $5'}\n",
'cmd_get_job_walltime': "echo ''\n",
'job_extension_file': '.pbs',
'job_template': SGE_TEMPLATE,
'email_opts': 'a'}
SLURM_TEMPLATE = """#!/bin/bash
#SBATCH --mail-user=${job_email}
#SBATCH --mail-type=${job_email_options}
#SBATCH --nodes=1
#SBATCH --ntasks=${job_ppn}
#SBATCH --time=${job_walltime}
#SBATCH --mem=${job_memory}mb
#SBATCH -o ${job_output_file}
uname -a # outputs node info (name, date&time, type, OS, etc)
export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=${job_ppn} #set the variable \
to use only the right amount of ppn
export OMP_NUM_THREADS=${job_ppn} #as previous line for openmp code
source ${job_env} #source the specified environement file
SCREEN=$$$$$$$$
SCREEN=${SCREEN:0:8}
echo 'Screen display number for xvfb-run' $SCREEN
xvfb-run --wait=5 \
-a -e /tmp/xvfb_$SCREEN.err -f /tmp/xvfb_$SCREEN.auth \
--server-num=$SCREEN \
--server-args="-screen 0 1920x1200x24 -ac +extension GLX" \
${job_cmds}\n"""
DEFAULT_SLURM_DICT = {'cmd_submit': 'sbatch',
'prefix_jobid': 'Submitted batch job ',
'suffix_jobid': '\n',
'cmd_count_nb_jobs': 'squeue -u masispider,vuiiscci \
--noheader | wc -l\n',
'queue_status': 'Q',
'running_status': 'R',
'complete_status': 'slurm_load_jobs error: Invalid job \
id specified\n',
'cmd_get_job_memory': "sacct -j ${jobid}.batch --format \
MaxRss --noheader | awk '{print $1+0}'\n",
'cmd_get_job_node': 'sacct -j ${jobid}.batch --format \
NodeList --noheader\n',
'cmd_get_job_status': 'slurm_load_jobs error: Invalid \
job id specified\n',
'cmd_get_job_walltime': 'sacct -j ${jobid}.batch \
--format CPUTime --noheader\n',
'job_extension_file': '.slurm',
'job_template': SLURM_TEMPLATE,
'email_opts': 'FAIL'}
MOAB_TEMPLATE = """#!/bin/bash
#PBS -M ${job_email}
#PBS -m ${job_email_options}
#PBS -l nodes=1:ppn=${job_ppn}
#PBS -l walltime=${job_walltime}
#PBS -l mem=${job_memory}mb
#PBS -o ${job_output_file}
#PBS -j y
uname -a # outputs node info (name, date&time, type, OS, etc)
export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=${job_ppn} #set the variable \
to use only the right amount of ppn
export OMP_NUM_THREADS=${job_ppn} #as previous line for openmp code
source ${job_env} #source the specified environement file
SCREEN=$$$$$$$$
SCREEN=${SCREEN:0:8}
echo 'Screen display number for xvfb-run' $SCREEN
xvfb-run --wait=5 \
-a -e /tmp/xvfb_$SCREEN.err -f /tmp/xvfb_$SCREEN.auth \
--server-num=$SCREEN \
--server-args="-screen 0 1920x1200x24 -ac +extension GLX" \
${job_cmds}\n"""
DEFAULT_MOAB_DICT = {
'cmd_submit': 'qsub',
'prefix_jobid': '',
'suffix_jobid': '.',
'cmd_count_nb_jobs': 'qstat | grep $USER | wc -l\n',
'queue_status': 'Q',
'running_status': 'R',
'complete_status': 'C',
'cmd_get_job_memory': "rsh vmpsched 'tracejob -n ${numberofdays} \
${jobid}'2> /dev/null | awk -v FS='(resources_used.mem=|kb)' '{print $2}' \
| sort -u | tail -1\n",
'cmd_get_job_node': "echo ''\n",
'cmd_get_job_status': "qstat -f ${jobid} | grep job_state \
| awk {'print $3'}\n",
'cmd_get_job_walltime': "rsh vmpsched 'tracejob -n ${numberofdays} \
${jobid}' 2> /dev/null | awk -v FS='(resources_used.walltime=|\n)' \
'{print $2}' | sort -u | tail -1\n",
'job_extension_file': '.pbs',
'job_template': MOAB_TEMPLATE,
'email_opts': 'a'}
# Variables for upload
ERR_MSG = 'Error from XnatUtils when uploading: %s'
DAX_SETTINGS = DAX_Settings()
RESULTS_DIR = DAX_SETTINGS.get_results_dir()
JOB_EXTENSION_FILE = DAX_SETTINGS.get_job_extension_file()
DISKQ_DIR = os.path.join(RESULTS_DIR, 'DISKQ')
DISKQ_BATCH_DIR = os.path.join(DISKQ_DIR, 'BATCH')
_COMPLETE_FLAG_FILE = 'READY_TO_COMPLETE.txt'
SMTP_FROM = DAX_SETTINGS.get_smtp_from()
SMTP_HOST = DAX_SETTINGS.get_smtp_host()
SMTP_PASS = DAX_SETTINGS.get_smtp_pass()
_READY_FLAG_FILE = 'READY_TO_UPLOAD.txt'
_FAILED_FLAG_FILE = 'JOB_FAILED.txt'
_EMAILED_FLAG_FILE = 'ALREADY_SEND_EMAIL.txt'
_OUTLOG = 'OUTLOG'
_TRASH = 'TRASH'
_PBS = 'PBS'
_FLAG_FILES = 'FlagFiles'
_UPLOAD_SKIP_LIST = [_OUTLOG, _TRASH, _PBS, _FLAG_FILES]
FLAGFILE_TEMPLATE = os.path.join(RESULTS_DIR, _FLAG_FILES,
'Process_Upload_running')
SNAPSHOTS_ORIGINAL = 'snapshot_original.png'
SNAPSHOTS_PREVIEW = 'snapshot_preview.png'
DEFAULT_HEADER = ['host', 'username', 'password', 'projects']
# Cmd:
GS_CMD = """gs -q -o {original} -sDEVICE=pngalpha -dLastPage=1 {assessor_path}\
/PDF/*.pdf"""
CONVERT_CMD = """convert {original} -resize x200 {preview}"""
# WARNING content for emails
WARNING_START_CONTENT = """
The following assessors already exist and the Spider try to upload files on \
existing files :
"""
WARNING_END_CONTENT = """
You should:
- remove the assessor if you want to upload the data
- set the status of the assessor to "uploading"
- remove the data from the upload folder if you do not want to upload.
"""
WARNING_SUBJECT = 'ERROR/WARNING: dax_upload'
# Variables for testing
DAX_TEST_DIR = os.path.join(os.path.expanduser("~"), '.dax_test')
TD_INFO = """======================================================================
DAX TEST
----------------------------------------------------------------------
Platform : {platform}
Python v. : {version}
Dax v. : {dax_version}
XNAT host : {host}
Username : {user}
======================================================================
Running test for dax files generated by user ...
----------------------------------------------------------------------
"""
TD_END = """
----------------------------------------------------------------------
ran {nb_test} test(s) in {time}s
{state}
"""
SETTINGS_DISPLAY = """ Xnat host: {host}
Xnat user: {user}
Projects Priority: {priority}
Projects Processors: {pp}
Projects Modules: {pm}
Root Job Dir: {jobdir}
Job email: {email}
Email options: {email_opts}
Queue limit: {limit}
Maximum age session: {age}
"""
PROC_DISPLAY = """ *NAME: {name}
SPIDER:
Path: {spath}
version: {version}
XNAT:
Host: {host}
type: {xsitype}
level: {level}
CLUSTER:
memory: {memory}
walltime: {walltime}
Number of cores: {ppn}
Environment file: {env}
OTHER ARGUMENTS:
{other}
"""
PROC_DEF_ARGS = ['name', 'xnat_host', 'xsitype', 'memreq_mb', 'walltime_str',
'ppn', 'env', 'spider_path', 'version']
MOD_DISPLAY = """ *NAME: {name}
TEMP DIRECTORY: {temp_dir}
REPORT EMAIL: {email}
XNAT:
level: {level}
OTHER ARGUMENTS:
{other}
"""
MOD_DEF_ARGS = ['name', 'xnat_host', 'directory', 'email']
DEL_DW = "-----------------------------------------------------------------\
-----"
DEL_UP = "=================================================================\
====="
def upload_tasks(logfile, debug, upload_settings=None,
host=None, username=None, password=None,
projects=None, suffix=None, emailaddress=None,
uselocking=True):
"""
Upload tasks from the queue folder.
:param logfile: Full file of the file used to log to
:param debug: Should debug mode be used
:param upload_settings: settings file (csv, py, json) to define
xnat host/project relationship.
:param host: XNAT host
:param username: XNAT username
:param password: XNAT password
:param suffix: suffix for flagfile
:param emailaddress: email address for warnings
:param projects: Project(s) to upload
"""
bin.set_logger(logfile, debug)
# Check if folders exist
check_folders()
flagfile = "%s%s.txt" % (FLAGFILE_TEMPLATE, suffix)
# Load the settings for upload
upload_settings = load_upload_settings(upload_settings, host, username,
password, projects)
print_upload_settings(upload_settings)
# create the flag file showing that the spider is running
if uselocking and is_dax_upload_running(flagfile):
pass
else:
try:
upload_results(upload_settings, emailaddress)
finally:
if uselocking:
# remove flagfile
os.remove(flagfile)
def testing(test_file, project, sessions, host=None, username=None, hide=False,
do_not_remove=False, nb_sess=5):
"""
Function to run test on some files for dax.
:param test_file: file to test
:param project: project ID on XNAT
:param sessions: list of sessions to run on XNAT
:param host: XNAT host
:param username: XNAT username
:param hide: Hide dax outputs in a logfile in ~/.dax_test/dax_test.log.
:param do_not_remove: do not remove files generated
:param nb_sess: number of sessions to process(default 5 maximum)
"""
# Create test results class object:
tests = test_results()
# Load test_file:
test_obj = load_test(test_file)
if not test_obj:
tests.inc_error()
else:
_host = host if host is not None else os.environ.get('XNAT_HOST', None)
_user = username if username is not None else 'user in dax netrc file.'
if isinstance(test_obj, launcher.Launcher):
_host = test_obj.xnat_host
_user = test_obj.xnat_user
print(TD_INFO.format(platform=platform.system(),
version=platform.python_version(),
dax_version=__version__,
host=_host, user=_user))
# set test object:
tests.set_tobj(test_obj)
# Make the temp dir:
if not os.path.isdir(DAX_TEST_DIR):
os.makedirs(DAX_TEST_DIR)
# Set the log of any dax function to a temp file for user:
if hide:
logfile = os.path.join(DAX_TEST_DIR, 'dax_test.log')
else:
logfile = None
log.setup_debug_logger('dax', logfile)
with XnatUtils.get_interface(host=_host, user=username) as intf:
tests.set_xnat(intf)
tests.run_test(project, sessions, nb_sess)
print(TD_END.format(nb_test=tests.get_number(),
time="%.3f" % tests.get_time(),
state=tests.get_test_state()))
if do_not_remove:
if 'OK' == tests.get_test_state()[:2]:
shutil.rmtree(DAX_TEST_DIR)
def setup_dax_package():
""" Setup dax package """
print('########## DAX_SETUP ##########')
print('Script to setup the ~/.dax_settings.ini files \
for your dax installation.\n')
# Set xnat credentials if needed
set_xnat_netrc()
# Set the settings for dax
dsh = DAX_Setup_Handler()
if dsh.exists():
print('Settings file ~/.dax_settings.ini found.\n')
if not xnat_tools_utils.prompt_user_yes_no('Do you want to edit it?'):
print('########## END ##########')
sys.exit()
dsh.config()
dsh.write()
print('\n0 error(s) -- dax_setup done.')
print('########## END ##########')
# Functions for Uploading
def send_email(from_add, password, dests, subject, content, server):
"""
Send email using the server/from/pws
:param from_add: address to send the email from
:param password: password for the email address
:param dests: list of emails addresses to send to
:param subject: subject for the email
:param content: content of the email
:param server: SMTP server used to send email.
:return: None
"""
# Create the container (outer) email message.
msg = MIMEText(content)
msg['Subject'] = subject
msg['From'] = from_add
msg['To'] = ','.join(dests)
# Send the email via our own SMTP server.
s_obj = smtplib.SMTP(server)
s_obj.starttls()
s_obj.login(from_add, password)
s_obj.sendmail(from_add, dests, msg.as_string())
s_obj.quit()
def send_warning_emails(warnings, emailaddress):
"""
Send warning emails about the dax_upload queue
:param warnings: list of warnings
:param emailaddress: email address
:return: None
"""
if warnings and emailaddress:
content = WARNING_START_CONTENT
for warning in warnings:
content += ' - %s\n' % (warning)
content += WARNING_END_CONTENT
if SMTP_FROM and SMTP_PASS and SMTP_HOST:
send_email(SMTP_FROM, SMTP_PASS, emailaddress.split(','),
WARNING_SUBJECT, content, SMTP_HOST)
def check_folders():
"""
Check that the default folders exist and if not create them
:return: None
"""
# make the directories if they don't exist:
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
if not os.path.exists(os.path.join(RESULTS_DIR, _OUTLOG)):
os.mkdir(os.path.join(RESULTS_DIR, _OUTLOG))
if not os.path.exists(os.path.join(RESULTS_DIR, _TRASH)):
os.mkdir(os.path.join(RESULTS_DIR, _TRASH))
if not os.path.exists(os.path.join(RESULTS_DIR, _PBS)):
os.mkdir(os.path.join(RESULTS_DIR, _PBS))
if not os.path.exists(os.path.join(RESULTS_DIR, _FLAG_FILES)):
os.mkdir(os.path.join(RESULTS_DIR, _FLAG_FILES))
def select_assessor(xnat, assessor_dict):
"""
Select the assessor pyxnat Eobject from the assessor dictionary information
:param xnat: pyxnat.interface object
:param assessor_dict: assessor dictionary
:return: assessor pyxnat Eobject
"""
return XnatUtils.select_obj(xnat,
assessor_dict['project_id'],
assessor_dict['subject_label'],
assessor_dict['session_label'],
assessor_id=assessor_dict['label'])
def is_dax_upload_running(flagfile):
"""
Check if dax_upload is not already running on the station
:return: True if dax_upload already running, False otherwise.
"""
if os.path.exists(flagfile):
LOGGER.warn('Upload already running.')
return True
else:
f_obj = open(flagfile, 'w')
today = datetime.now()
datestr = "Date: %s%s%s_%s:%s:%s" % (str(today.year),
str(today.month),
str(today.day),
str(today.hour),
str(today.minute),
str(today.second))
f_obj.write(datestr + '\n')
f_obj.close()
LOGGER.debug('Flagfile created: %s with date: %s\n'
% (flagfile, datestr))
return False
def get_assessor_dict(assessor_label, assessor_path):
"""
Generate the dictionary for an assessor from the folder in the queue
:param assessor_label: assessor label
:param assessor_path: assessor path on the station
:return: None
"""
assessor_dict = dict()
keys = ['project_id', 'subject_label', 'session_label', 'label',
'proctype', 'path']
labels = assessor_label.split('-x-')
if len(labels) > 3:
values = [labels[0], labels[1], labels[2], assessor_label, labels[-1],
assessor_path]
assessor_dict = dict(list(zip(keys, values)))
return assessor_dict
def get_assessor_list(projects):
"""
Get the list of assessors labels to upload to XNAT from the queue folder.
:param projects: list of projects to upload to XNAT
:return: list of assessor to upload from upload folder
"""
assessor_label_list = list()
LOGGER.debug(' - Get Processes names from the upload folder...')
# check all files/folder in the directory
dirs = list(filter(os.path.isdir,
glob.glob(os.path.join(RESULTS_DIR, '*'))))
dirs.sort(key=lambda x: os.path.getmtime(x))
for assessor_label in dirs:
assessor_label = os.path.basename(assessor_label)
if assessor_label in _UPLOAD_SKIP_LIST:
continue
# If projects set, check that the project is in the list of projects
# to upload to XNAT
if projects and assessor_label.split('-x-')[0] not in projects:
continue
assessor_path = os.path.join(RESULTS_DIR, assessor_label)
if not os.path.isdir(assessor_path):
continue
if os.path.exists(os.path.join(assessor_path, _EMAILED_FLAG_FILE)):
continue
rflag = os.path.join(assessor_path, _READY_FLAG_FILE)
fflag = os.path.join(assessor_path, _FAILED_FLAG_FILE)
cflag = os.path.join(assessor_path, _COMPLETE_FLAG_FILE)
if (os.path.exists(rflag) or os.path.exists(fflag)) and \
(not is_diskq_assessor(assessor_label) or os.path.exists(cflag)):
# Passed all checks, so add it to upload list
assessor_label_list.append(assessor_label)
return assessor_label_list
def get_pbs_list(projects):
"""
Get the list of PBS file to upload to XNAT.
:param projects: list of projects to upload to XNAT
:return: list of pbs file from the PBS folder
"""
pbs_list = list()
LOGGER.debug(' - Get the PBS for the processes...')
# check all files/folder in the directory
for pbs_name in os.listdir(os.path.join(RESULTS_DIR, _PBS)):
# If projects set, check that the project is in the list of
# projects to upload to XNAT
if projects and pbs_name.split('-x-')[0] not in projects:
continue
pbs_file = os.path.join(RESULTS_DIR, _PBS, pbs_name)
if os.path.isfile(pbs_file):
pbs_list.append(pbs_name)
return pbs_list
def get_version_assessor(assessor_path):
"""
Get the version of the assessor that we are uploading from text file
:param assessor_path: path for the assessor
:return: version of the assessor from the version.txt file
"""
version = '1.0.0'
if os.path.exists(os.path.join(assessor_path, 'version.txt')):
f_obj = open(os.path.join(assessor_path, 'version.txt'), 'r')
version = f_obj.read().strip()
f_obj.close()
return version
def get_dax_docker_version_assessor(assessor_path):
"""
Get the dax_docker_version of assessor we are uploading from text file
:param assessor_path: path for the assessor
:return: version of the assessor from the version.txt file
"""
dax_docker_version = ''
fpath = os.path.join(assessor_path, 'dax_docker_version.txt')
try:
with open(fpath, 'r') as f_obj:
dax_docker_version = f_obj.read().strip()
except IOError as e:
LOGGER.warn('failed to read dax_docker_version:' + str(e))
return dax_docker_version
def generate_snapshots(assessor_path):
"""
Generate Snapshots from the PDF if it exists.
:param assessor_path: path for the assessor
:return: None
"""
snapshot_dir = os.path.join(assessor_path, 'SNAPSHOTS')
snapshot_original = os.path.join(snapshot_dir, SNAPSHOTS_ORIGINAL)
snapshot_preview = os.path.join(snapshot_dir, SNAPSHOTS_PREVIEW)
if not os.path.exists(snapshot_original) and\
os.path.exists(os.path.join(assessor_path, 'PDF')):
LOGGER.debug(' +creating original of SNAPSHOTS')
if not os.path.exists(snapshot_dir):
os.mkdir(snapshot_dir)
# Make the snapshots for the assessors with ghostscript
cmd = GS_CMD.format(original=snapshot_original,
assessor_path=assessor_path)
os.system(cmd)
# Create the preview snapshot from the original if Snapshots exist :
if os.path.exists(snapshot_original):
LOGGER.debug(' +creating preview of SNAPSHOTS')
# Make the snapshot_thumbnail
cmd = CONVERT_CMD.format(original=snapshot_original,
preview=snapshot_preview)
os.system(cmd)
def copy_outlog(assessor_dict, assessor_path):
"""
Copy the oulog files to the assessor folder if we are uploading.
:param assessor_dict: dictionary for the assessor
:return: None
"""
outlog_path = os.path.join(RESULTS_DIR, _OUTLOG,
assessor_dict['label'] + '.output')
new_outlog_path = os.path.join(assessor_path, _OUTLOG,
assessor_dict['label'] + '.output')
if os.path.exists(outlog_path):
os.makedirs(os.path.join(assessor_path, _OUTLOG))
shutil.move(outlog_path, new_outlog_path)
def get_xsitype(assessor_dict):
"""
Copy the outlog files to the assessor folder if we are uploading.
:param assessor_dict: dictionary for the assessor
:return: xsitype for the assessor_dict
"""
proctype = assessor_dict['proctype']
if proctype == 'FS':
return DEFAULT_FS_DATATYPE
else:
return DEFAULT_DATATYPE
def is_complete(assessor_dict, assessor_path, procstatus):
"""
Copy the oulog files to the assessor folder if we are uploading.
:param assessor_dict: dictionary for the assessor
:param procstatus: status to set for the assessor
:return: True if the assessor is Complete, False otherwise
"""
if procstatus == READY_TO_COMPLETE or procstatus == COMPLETE:
eflag = os.path.join(assessor_path, _EMAILED_FLAG_FILE)
open(eflag, 'w').close()
LOGGER.warn(' -->Data already present on XNAT.\n')
return True
else:
return False
def create_freesurfer_assessor(assessor_obj):
"""
Create freesurfer specific assessor using the DEFAULT_FS_DATATYPE from dax
:param assessor_obj: pyxnat assessor Eobject
:return: None
"""
# create the assessor and set the status
assessor_obj.create(assessors=DEFAULT_FS_DATATYPE,
**{DEFAULT_FS_DATATYPE + '/fsversion': '0'})
now = datetime.now()
today = '%s-%s-%s-' % (str(now.year), str(now.month), str(now.day))
assessor_obj.attrs.mset(
{DEFAULT_FS_DATATYPE + '/validation/status': JOB_PENDING,
DEFAULT_FS_DATATYPE + '/date': today})
def create_default_assessor(assessor_obj, proctype):
"""
Create default assessor using the DEFAULT_DATATYPE from dax
:param assessor_obj: pyxnat assessor Eobject
:param proctype: proctype for the assessor
:return: None
"""
# Create the assessor and set attributes
now = datetime.now()
today = '%s-%s-%s-' % (str(now.year), str(now.month), str(now.day))
assessor_obj.create(assessors=DEFAULT_DATATYPE)
# Call mset to only make a single HTTP request
assessor_obj.attrs.mset(
{DEFAULT_DATATYPE + '/validation/status': JOB_PENDING,
DEFAULT_DATATYPE + '/proctype': proctype,
DEFAULT_DATATYPE + '/date': today})
def should_upload_assessor(assessor_obj, assessor_dict, assessor_path, version):
"""
Check if the assessor is ready to be uploaded to XNAT
:param assessor_obj: pyxnat assessor Eobject
:param assessor_dict: assessor dictionary
:param xsitype: xsitype for the assessor (fsData or proc:GenProcData, ...)
:param version: version for the assessor
:return: True if the assessor should be upload, False otherwise
"""
if not assessor_obj.exists():
return False
# if xsitype == DEFAULT_FS_DATATYPE:
# create_freesurfer_assessor(assessor_obj)
# else:
# create_default_assessor(assessor_obj, assessor_dict['proctype'])
else:
xsitype = assessor_obj.datatype()
# Check if not already complete assessor
procstatus = assessor_obj.attrs.get(xsitype + '/procstatus')
if is_complete(assessor_dict, assessor_path, procstatus):
return False
# set the status to UPLOADING
assessor_obj.attrs.mset({xsitype + '/procstatus': UPLOADING,
xsitype + '/procversion': version})
return True
def upload_assessor(xnat, assessor_dict, assessor_path):
"""
Upload results to an assessor
:param xnat: pyxnat.Interface object
:param assessor_dict: assessor dictionary
:return: None
"""
# get spiderpath from version.txt file:
version = get_version_assessor(assessor_path)
dax_docker_version = get_dax_docker_version_assessor(assessor_path)
session_obj = XnatUtils.select_obj(xnat,
assessor_dict['project_id'],
assessor_dict['subject_label'],
assessor_dict['session_label'])
if not session_obj.exists():
LOGGER.error('Cannot upload assessor, session does not exist.')
return True
# Select assessor
assessor_dict =\
assessor_utils.parse_full_assessor_name(os.path.basename(assessor_path))
assessor_obj = session_obj.assessor(assessor_dict['label'])
#xsitype = get_xsitype(assessor_dict)
if should_upload_assessor(assessor_obj,
assessor_dict,
assessor_path,
version):
xsitype = assessor_obj.datatype()
# Before Upload
generate_snapshots(assessor_path)
copy_outlog(assessor_dict, assessor_path)
# Upload the XML if FreeSurfer
if xsitype == DEFAULT_FS_DATATYPE:
xmlpath = os.path.join(assessor_path, 'XML')
if os.path.exists(xmlpath):
LOGGER.debug(' +setting XML for FreeSurfer')
xml_files_list = os.listdir(xmlpath)
if len(xml_files_list) != 1:
fpath = assessor_path
msg = 'cannot upload FreeSurfer assessor, \
unable to find XML file: %s'
LOGGER.error(msg % (fpath))
return
xml_path = os.path.join(assessor_path, 'XML',
xml_files_list[0])
assessor_obj.create(xml=xml_path, allowDataDeletion=False)
# Upload
# for each folder=resource in the assessor directory
for resource in os.listdir(assessor_path):
resource_path = os.path.join(assessor_path, resource)
# Need to be in a folder to create the resource :
if os.path.isdir(resource_path):
LOGGER.debug(' +uploading %s' % (resource))
try:
upload_resource(assessor_obj, resource, resource_path)
except Exception as e:
_msg = 'failed to upload, skipping assessor:{}:{}'.format(
resource_path, str(e))
LOGGER.error(_msg)
return
# after Upload
if is_diskq_assessor(os.path.basename(assessor_path)):
# was this run using the DISKQ option
# Read attributes
ctask = ClusterTask(assessor_dict['label'], RESULTS_DIR, DISKQ_DIR)
# Set on XNAT
assessor_obj.attrs.mset({
xsitype + '/procstatus': ctask.get_status(),
xsitype + '/validation/status': NEEDS_QA,
xsitype + '/jobid': ctask.get_jobid(),
xsitype + '/jobnode': ctask.get_jobnode(),
xsitype + '/memused': ctask.get_memused(),
xsitype + '/walltimeused': ctask.get_walltime(),
xsitype + '/jobstartdate': ctask.get_jobstartdate(),
xsitype + '/dax_version': __version__,
xsitype + '/dax_version_hash': __git_revision__,
xsitype + '/dax_docker_version': dax_docker_version
})
# Delete the task from diskq
ctask.delete()
elif os.path.exists(os.path.join(assessor_path, _READY_FLAG_FILE)):
assessor_obj.attrs.set(xsitype + '/procstatus', READY_TO_COMPLETE)
else:
assessor_obj.attrs.set(xsitype + '/procstatus', JOB_FAILED)
# Remove the folder
shutil.rmtree(assessor_path)
return True
return False
def is_diskq_assessor(assr_label):
# Does a batch file exist for this assessor?
afile = os.path.join(DISKQ_BATCH_DIR, assr_label + JOB_EXTENSION_FILE)
return os.path.exists(afile)
def upload_resource(assessor_obj, resource, resource_path):
"""
Upload a resource folder to an assessor
:param assessor_obj: pyxnat assessor Eobject
:param resource: resource to upload
:param resource_path: resource path on the station
:return: None
"""
if resource == 'SNAPSHOTS':
upload_snapshots(assessor_obj, resource_path)
else:
rfiles_list = os.listdir(resource_path)
if not rfiles_list:
LOGGER.warn('No files in {}'.format(resource_path))
elif DAX_SETTINGS.get_use_reference():
try:
ref_path = get_reference_path(resource_path)
XnatUtils.upload_reference(ref_path, assessor_obj, resource)
except XnatUtilsError as err:
raise err
elif len(rfiles_list) > 1 or os.path.isdir(rfiles_list[0]):
try:
XnatUtils.upload_folder_to_obj(
resource_path, assessor_obj.out_resource(resource),
resource, removeall=True)
except XnatUtilsError as err:
print(ERR_MSG % err)
# One or two file, let just upload them:
else:
fpath = os.path.join(resource_path, rfiles_list[0])
try:
XnatUtils.upload_file_to_obj(
fpath, assessor_obj.out_resource(resource), removeall=True)
except XnatUtilsError as err:
print(ERR_MSG % err)
def get_reference_path(resource_path):
return resource_path.replace(
DAX_SETTINGS.get_results_dir(),
DAX_SETTINGS.get_reference_dir())
def upload_snapshots(assessor_obj, resource_path):
"""
Upload snapshots to an assessor
:param assessor_obj: pyxnat assessor Eobject
:param resource_path: resource path on the station
:return: None
"""
# Remove the previous Snapshots:
if assessor_obj.out_resource('SNAPSHOTS').exists:
assessor_obj.out_resource('SNAPSHOTS').delete()
original = os.path.join(resource_path, SNAPSHOTS_ORIGINAL)
thumbnail = os.path.join(resource_path, SNAPSHOTS_PREVIEW)
status = None
try:
status = XnatUtils.upload_assessor_snapshots(
assessor_obj, original, thumbnail)
except XnatUtilsError as err:
print(ERR_MSG % err)
if status:
os.remove(original)
os.remove(thumbnail)
else:
LOGGER.warn('No snapshots original or preview were uploaded')
# Upload the rest of the files in snapshots
if len(os.listdir(resource_path)) > 0:
try:
XnatUtils.upload_folder_to_obj(
resource_path, assessor_obj.out_resource('SNAPSHOTS'),
'SNAPSHOTS')
except XnatUtilsError as err:
print(ERR_MSG % err)
def upload_assessors(xnat, projects):
"""
Upload all assessors to XNAT
:param xnat: pyxnat.Interface object
:param projects: list of projects to upload to XNAT
:return: None
"""
# Get the assessor label from the directory :
assessors_list = get_assessor_list(projects)
number_of_processes = len(assessors_list)
warnings = list()
num_threads = int(DAX_SETTINGS.get_upload_threads())
print('Starting pool with: ' + str(num_threads) + ' threads')
sys.stdout.flush()
pool = Pool(processes=num_threads)
for index, assessor_label in enumerate(assessors_list):
print(index)
sys.stdout.flush()
pool.apply_async(upload_thread,[xnat, index, assessor_label, number_of_processes])
print('Waiting for pool to finish...')
sys.stdout.flush()
pool.close()
pool.join()
print('Pool finished')
sys.stdout.flush()
return warnings
def upload_thread(xnat, index, assessor_label, number_of_processes):
assessor_path = os.path.join(RESULTS_DIR, assessor_label)
msg = " *Process: %s/%s -- label: %s / time: %s"
LOGGER.info(msg % (str(index + 1), str(number_of_processes),
assessor_label, str(datetime.now())))
#assessor_dict = get_assessor_dict(assessor_label, assessor_path)
assessor_dict = assessor_utils.parse_full_assessor_name(assessor_label)
if assessor_dict:
uploaded = upload_assessor(xnat, assessor_dict, assessor_path)
if not uploaded:
mess = """ - Assessor label : {label}\n"""
warnings.append(mess.format(label=assessor_dict['label']))
else:
LOGGER.warn(' --> wrong label')
def upload_pbs(xnat, projects):
"""
Upload all pbs files to XNAT
:param xnat: pyxnat.Interface object
:param projects: list of projects to upload to XNAT
:return: None
"""
pbs_list = get_pbs_list(projects)
number_pbs = len(pbs_list)
for index, pbsfile in enumerate(pbs_list):
pbs_fpath = os.path.join(RESULTS_DIR, _PBS, pbsfile)
mess = """ *Uploading PBS {index}/{max} -- File name: {file}"""
LOGGER.info(mess.format(index=str(index + 1),
max=str(number_pbs),
file=pbsfile))
assessor_label = os.path.splitext(pbsfile)[0]
#assessor_dict = get_assessor_dict(assessor_label, 'none')
assessor_dict = assessor_utils.parse_full_assessor_name(assessor_label)
if not assessor_dict:
LOGGER.warn('wrong assessor label for %s' % (pbsfile))
os.rename(pbs_fpath, os.path.join(RESULTS_DIR, _TRASH, pbsfile))
else:
assessor_obj = select_assessor(xnat, assessor_dict)
if not assessor_obj.exists():
LOGGER.warn('assessor does not exist for %s' % (pbsfile))
new_location = os.path.join(RESULTS_DIR, _TRASH, pbsfile)
os.rename(pbs_fpath, new_location)
else:
resource_obj = assessor_obj.out_resource(_PBS)
if resource_obj.exists():
label = assessor_dict['label']
msg = 'the PBS resource already exists for the assessor %s'
LOGGER.warn(msg % (label))
adir = os.path.join(RESULTS_DIR, assessor_dict['label'])
if os.path.isdir(adir):
msg = 'Copying the pbs file in the assessor folder...'
LOGGER.warn(msg)
pbs_folder = os.path.join(adir, _PBS)
if not os.path.exists(pbs_folder):
os.mkdir(pbs_folder)
os.rename(pbs_fpath, os.path.join(pbs_folder, pbsfile))
else:
LOGGER.warn('Copying the pbs file in the TRASH ...')
trash = os.path.join(RESULTS_DIR, _TRASH, pbsfile)
os.rename(pbs_fpath, trash)
else:
# upload the file
try:
status = XnatUtils.upload_file_to_obj(pbs_fpath,
resource_obj)
except XnatUtilsError as err:
print(ERR_MSG % err)
if status:
os.remove(pbs_fpath)
def upload_outlog(xnat, projects):
"""
Upload all outlog files to XNAT
:param xnat: pyxnat.Interface object
:param projects: list of projects to upload to XNAT
:return: None
"""
outlogs_list = os.listdir(os.path.join(RESULTS_DIR, _OUTLOG))
if projects:
outlogs_list = [logfile for logfile in outlogs_list
if logfile.split('-x-')[0] in projects]
number_outlog = len(outlogs_list)
for index, outlogfile in enumerate(outlogs_list):
outlog_fpath = os.path.join(RESULTS_DIR, _OUTLOG, outlogfile)
mess = """ *Checking OUTLOG {index}/{max} -- File name: {file}"""
LOGGER.info(mess.format(index=str(index + 1),
max=str(number_outlog),
file=outlogfile))
#assessor_dict = get_assessor_dict(outlogfile[:-7], 'none')
assessor_label = os.path.splitext(outlogfile)[0]
assessor_dict = assessor_utils.parse_full_assessor_name(assessor_label)
if not assessor_dict:
LOGGER.warn(' wrong outlog file. You should remove it')
else:
assessor_obj = select_assessor(xnat, assessor_dict)
#xtp = get_xsitype(assessor_dict)
if not assessor_obj.exists():
msg = ' no assessor on XNAT -- moving file to trash.'
LOGGER.warn(msg)
new_location = os.path.join(RESULTS_DIR, _TRASH, outlogfile)
os.rename(outlog_fpath, new_location)
else:
if assessor_obj.attrs.get(assessor_obj.datatype() + '/procstatus') == JOB_FAILED:
resource_obj = assessor_obj.out_resource(_OUTLOG)
if resource_obj.exists():
pass
else:
LOGGER.info(' uploading file.')
try:
status = XnatUtils.upload_file_to_obj(outlog_fpath,
resource_obj)
except XnatUtilsError as err:
print(ERR_MSG % err)
if status:
os.remove(outlog_fpath)
def new_upload_results(upload_settings, emailaddress):
# get the list of assessors from the results directory
if len(os.listdir(RESULTS_DIR)) == 0:
LOGGER.warn('No data to be uploaded.\n')
sys.exit()
warnings = list()
for project in upload_settings:
try:
with XnatUtils.get_interface(host=project['host'],
user=project['username'],
pwd=project['password']) as intf:
LOGGER.info('=' * 60)
assessors = get_assessor_list(project['projects'])
x = [assessor_utils.parse_full_assessor_name(a)
for a in assessors]
# create a nested dictionary of assessor result directories by
# project id then subject label then session label
z = {}
for a in x:
if not a['project_id'] in z:
z[a['project_id']] = dict()
zp = z[a['project_id']]
if not a['subject_label'] in zp:
zp[a['subject_label']] = dict()
zs = zp[a['subject_label']]
if not a['session_label'] in zs:
zs[a['session_label']] = list()
ze = zs[a['session_label']]
ze.append(a)
for kp, vp in z.iteritems():
for ks, vs in vp.iteritems():
for ke, ve in vs.iteritems():
# handle all assessors from this session
session = intf.select_experiment(kp, ks, ke)
if not session.exists():
# flag the experiment as missing
LOGGER.warning(
"session {}/{}/{} does not exist".format(
kp, ks, ke
)
)
else:
# handle assessors
for a in ve:
print(a)
assessor = intf.select_assessor(
kp, ks, ke, a['label'])
if not assessor.exists():
# flag the assessor as missing
LOGGER.warning(
"assessor {}/{}/{}/{} does not exist".format(
kp, ks, ke, a['label']
)
)
else:
# upload this assessor
pass
except Exception as e:
LOGGER.error(e.msg)
def upload_results(upload_settings, emailaddress):
"""
Main function to upload the results / PBS / OUTLOG of assessors
from the queue folder
:param upload_settings: dictionary defining the upload information
:return: None
"""
if len(os.listdir(RESULTS_DIR)) == 0:
LOGGER.warn('No data need to be uploaded.\n')
sys.exit()
warnings = list()
for upload_dict in upload_settings:
try:
with XnatUtils.get_interface(host=upload_dict['host'],
user=upload_dict['username'],
pwd=upload_dict['password']) as intf:
LOGGER.info('='*50)
proj_str = (upload_dict['projects'] if upload_dict['projects']
else 'all')
LOGGER.info('Connecting to XNAT <%s>, upload for projects:%s' %
(upload_dict['host'], proj_str))
if not XnatUtils.has_dax_datatypes(intf):
msg = 'Error: dax datatypes are not installed on xnat <%s>.'
raise DaxUploadError(msg % (upload_dict['host']))
# 1) Upload the assessor data
# For each assessor label that need to be upload :
LOGGER.info('Uploading results for assessors')
if DAX_SETTINGS.get_use_reference():
LOGGER.info('using upload by reference, dir is:{}'.format(
DAX_SETTINGS.get_reference_dir()))
warnings.extend(upload_assessors(intf, upload_dict['projects']))
# 2) Upload the PBS files
# For each file, upload it to the PBS resource
LOGGER.info('Uploading PBS files ...')
upload_pbs(intf, upload_dict['projects'])
# 3) Upload the OUTLOG files not uploaded with processes
LOGGER.info('Checking OUTLOG files for JOB_FAILED jobs ...')
upload_outlog(intf, upload_dict['projects'])
except DaxNetrcError as e:
msg = e.msg
LOGGER.error(e.msg)
send_warning_emails(warnings, emailaddress)
def load_upload_settings(f_settings, host, username, password, projects):
"""
Function to parse arguments base on argparse
:param f_settings: file to define the settings for uploading
:param host: XNAT host
:param username: XNAT username
:param password: XNAT password
:param projects: XNAT list of projects
:return: list of dictionaries info_dict
info_dict for the host [key:value]:
host : string for XNAT host
username : string for XNAT username
password : string for XNAT password
(can be the environment variable containing the value)
projects : list of projects to upload for the host
"""
host_projs = list()
# If settings file given, load it and use it:
if f_settings is not None:
up_file = os.path.abspath(f_settings)
if not os.path.isfile(up_file):
raise DaxError('No upload settings file found: %s' % up_file)
if f_settings.endswith('.json'):
with open(up_file) as data_file:
host_projs = json.load(data_file)
elif f_settings.endswith('.py'):
settings = imp.load_source('settings', up_file)
host_projs = settings.host_projects
elif f_settings.endswith('.csv'):
with open(up_file, 'rb') as csvfileread:
csvreader = csv.reader(csvfileread, delimiter=',')
for index, row in (csvreader):
if len(row) < 4:
raise DaxError("error: could not read the csv row. \
Missing args. 4 needed, %s found at line %s." % (str(len(row)), str(index)))
else:
if row != DEFAULT_HEADER:
host_projs.append(dict(list(zip(DEFAULT_HEADER,
row[:4]))))
elif f_settings.endswith('.yaml'):
doc = XnatUtils.read_yaml(f_settings)
host_projs = doc.get('settings')
else:
raise DaxError("error: doesn't recognize the file format for the \
settings file. Please use either JSON/PYTHON/CSV format.")
else: # if not file, use the environment variables and options
_host = os.environ['XNAT_HOST']
username = None
password = None
if host:
_host = host
if projects:
projects = projects.split(',')
else:
projects = []
if username:
username = username
if not password:
MSG = "Please provide the password for user <%s> on xnat(%s):"
password = getpass.getpass(prompt=MSG % (username, _host))
if not password:
raise DaxError('error: the password entered was empty. \
please provide a password')
elif password in os.environ:
password = os.environ[password]
else:
password = password
else:
netrc_obj = DAX_Netrc()
username, password = netrc_obj.get_login(_host)
host_projs.append(dict(list(zip(DEFAULT_HEADER, [_host, username,
password,
projects]))))
return host_projs
def print_upload_settings(upload_settings):
"""
Display Host/Username/Projects that will be used to upload data from
the queue.
:return: None
"""
LOGGER.info('Upload Settings selected by user:')
for info in upload_settings:
proj_str = ','.join(info['projects']) if info['projects'] else 'all'
user_str = info['username'] if info['username'] else ''
msg = 'XNAT Host: %s -- Xnat Username: %s -- projects: %s'
LOGGER.info(msg % (info['host'], user_str, proj_str))
LOGGER.info('Upload Directory: %s ' % (RESULTS_DIR))
# Functions for testings:
class test_results(object):
'''
Class to keep tract of test results (number of test, fail, error, time)
:param tobj: object to be tested by dax_test (processor/module/launcher)
'''
def __init__(self, tobj=None):
# xnat connection:
self.xnat = None
# Default variables:
self.nb_test = 0
self.error = 0
self.fail = 0
self.warning = 0
self.time = time.time()
# User variable:
self.tobj = tobj
self.should_run = True
self.launch_obj = None
def set_tobj(self, tobj):
"""
Setter for the test object
:param tobj: test object
:return: None
"""
self.tobj = tobj
def set_xnat(self, xnat):
"""
Setter for the xnat interface
:param xnat: pyxnat interface
:return: None
"""
self.xnat = xnat
def run_test(self, project, sessions, nb_sess):
"""
Run test
:param project: project ID on XNAT
:param sessions: list of sessions label on XNAT
:param nb_sess: number of sessions to test
:return: None
"""
# Set the cobj:
sessions = get_sessions_for_project(self.xnat, project, sessions,
nb_sess)
# Set the launcher_obj:
if isinstance(self.tobj, processors.Processor) or \
isinstance(self.tobj, processors.AutoProcessor):
proj_proc = {project: [self.tobj]}
proj_mod = {project: []}
self.launch_obj = launcher.Launcher(
proj_proc, proj_mod, priority_project=None,
xnat_host=self.xnat.host)
elif isinstance(self.tobj, modules.Module):
# Set the cobj:
proj_proc = {project: []}
proj_mod = {project: [self.tobj]}
self.launch_obj = launcher.Launcher(
proj_proc, proj_mod, priority_project=None,
xnat_host=self.xnat.host)
elif isinstance(self.tobj, launcher.Launcher):
self.launch_obj = self.tobj
else:
print('[ERROR] Obj can not be identified as a dax objects.')
self.inc_error()
self.should_run = False
if self.should_run:
if isinstance(self.tobj, processors.Processor) or \
isinstance(self.tobj, processors.AutoProcessor):
self.run_test_processor(project, sessions)
elif isinstance(self.tobj, modules.Module):
self.run_test_module(project, sessions)
elif isinstance(self.tobj, launcher.Launcher):
unique_list = list(set(
list(self.tobj.project_process_dict.keys()) +
list(self.tobj.project_modules_dict.keys())))
if self.tobj.priority_project:
project_list = self.tobj.get_project_list(unique_list)
else:
project_list = unique_list
for project in project_list:
sessions = randomly_get_sessions(self.xnat, project,
nb_sess)
self.run_test_settings(project, sessions)
def inc_warning(self):
"""
Increase warning counter
:return: None
"""
self.warning += 1
def inc_error(self):
"""
Increase error counter
:return: None
"""
self.error += 1
def inc_fail(self):
"""
Increase fail counter
:return: None
"""
self.fail += 1
def inc_test(self):
"""
Increase test counter
:return: None
"""
self.nb_test += 1
def get_time(self):
"""
Return the time since the object was created
:return: time in seconds
"""
end = time.time()
return end - self.time
def get_test_state(self):
"""
Return state of the test
:return: None
"""
if self.error > 0 or self.fail > 0:
return ('FAILED (failures=%s, errors=%s, warnings=%s)'
% (str(self.fail), str(self.error), str(self.warning)))
else:
state = 'OK'
tmp = ' (warnings=%s)'
warning = tmp % str(self.warning) if self.warning != 0 else ''
return state + warning
def get_number(self):
"""
Return Number of tests ran
:return: int
"""
return self.nb_test
def set_proc_cobjs_list(self, proc_obj, project, sessions):
"""
Method to get the list of Cached Objects for the project/sessions for a
processor
:param proc_obj: processor object
:param project: XNAT project
:param sessions: XNAT sessions
:return: None
"""
co_list = list()
sess_list = self.xnat.get_sessions(project)
sess_list = [sess for sess in sess_list if sess['label'] in sessions]
# Loop through the sessions
for sess in sess_list:
csess = XnatUtils.CachedImageSession(self.intf, project,
sess['subject_label'],
sess['label'])
if isinstance(proc_obj, processors.ScanProcessor):
for cscan in csess.scans():
if proc_obj.should_run(cscan.info()):
co_list.append(cscan)
elif isinstance(proc_obj, processors.SessionProcessor):
co_list.append(csess)
elif isinstance(proc_obj, processors.AutoProcessor):
if proc_obj.type == 'session':
co_list.append(csess)
else:
for cscan in csess.scans():
if proc_obj.should_run(cscan.info()):
co_list.append(cscan)
if len(co_list) == 0:
print("[WARNING] No scan found for the processor on scans.")
self.inc_warning()
return co_list
def set_mod_cobjs_list(self, mod_obj, project, sessions):
"""
Method to get the list of Cached Objects for the project/sessions for a
processor
:param mod_obj: processor object
:param project: XNAT project
:param sessions: XNAT sessions
:return: None
"""
co_list = list()
sess_list = self.xnat.get_sessions(project)
sess_list = [sess for sess in sess_list if sess['label'] in sessions]
# Loop through the sessions
for sess in sess_list:
csess = XnatUtils.CachedImageSession(self.xnat, project,
sess['subject_label'],
sess['label'])
if isinstance(mod_obj, modules.ScanModule):
for cscan in csess.scans():
if mod_obj.needs_run(cscan, self.xnat):
co_list.append(cscan)
elif isinstance(mod_obj, modules.SessionModule):
if mod_obj.needs_run(csess, self.xnat):
co_list.append(csess)
if len(co_list) == 0:
print("[WARNING] No object found for the Module.")
self.inc_warning()
return co_list
def test_has_inputs(self, project, sessions):
"""
Method to test the has_inputs function
:param project: XNAT project
:param sessions: XNAT sessions
:return: True if SUCCEEDED, False otherwise
"""
# Test has_inputs for each session
print_sub_test('test_has_inputs')
# Loop through the sessions
for cobj in self.set_proc_cobjs_list(self.tobj, project, sessions):
cinfo = cobj.info()
try:
if isinstance(cobj, XnatUtils.CachedImageScan):
msg = "Processor.has_inputs(cobj) running on %s - %s - %s \
..."
print(msg % (project, cinfo['session_label'], cinfo['ID']))
else:
msg = "Processor.has_inputs(cobj) running on %s - %s ..."
print(msg % (project, cinfo['session_label']))
state, qcstatus = self.tobj.has_inputs(cobj)
self.inc_test()
qcstatus = qcstatus if qcstatus else task.JOB_PENDING
if state == 0:
state = task.NEED_INPUTS
elif state == 1:
state = task.NEED_TO_RUN
elif state == -1:
state = task.NO_DATA
else:
print("[FAIL] State return by Processor.has_inputs() \
unknown (-1/0/1): %s" % state)
self.inc_fail()
return False
print("Outputs: state = %s and qcstatus = %s"
% (state, qcstatus))
except Exception:
print('[ERROR]')
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
self.inc_error()
return False
return True
def test_dax_build(self, project, sessions):
"""
Method to test a processor through dax
:param project: XNAT project
:param sessions: XNAT sessions
:return: None
"""
print_sub_test('test_dax_build')
try:
self.inc_test()
print("dax_build on %s - %s ..." % (project, ','.join(sessions)))
self.launch_obj.build('dax_test', project, ','.join(sessions))
has_assessors = self.check_sessions(project, sessions)
if has_assessors:
print("\nbuild SUCCEEDED")
else:
self.inc_fail()
print("\nbuild FAILED")
except Exception:
print('[ERROR]')
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
self.inc_error()
def check_sessions(self, project, sessions):
"""
Method to test a processor through dax
:param project: XNAT project
:param sessions: XNAT sessions
:return: True if the assessors have been created, False otherwise
"""
if isinstance(self.tobj, processors.Processor):
list_proc_obj = [self.tobj]
elif isinstance(self.tobj, modules.Module):
return True
else:
list_proc_obj = self.tobj.project_process_dict.get(project, list())
for proc_obj in list_proc_obj:
for cobj in self.set_proc_cobjs_list(proc_obj, project, sessions):
cinfo = cobj.info()
if isinstance(cobj, XnatUtils.CachedImageScan):
tmp = "%s-x-%s-x-%s-x-%s-x-%s"
assessor_label = tmp % (project,
cinfo['subject_label'],
cinfo['session_label'],
cinfo['ID'],
proc_obj.name)
else:
tmp = "%s-x-%s-x-%s-x-%s"
assessor_label = tmp % (project,
cinfo['subject_label'],
cinfo['session_label'],
proc_obj.name)
assessor_obj = XnatUtils.select_assessor(self.xnat,
assessor_label)
if not assessor_obj.exists():
print('[FAIL] Assessor %s did not get created on XNAT.'
% assessor_label)
return False
else:
mget = assessor_obj.attrs.mget([
DEFAULT_DATATYPE + '/proctype',
DEFAULT_DATATYPE + '/procstatus',
DEFAULT_DATATYPE + '/validation/status',
DEFAULT_DATATYPE + '/date'])
msg = "Assessor %s: \n - proctype: %s\n - procstatus: %s\n\
- qcstatus: %s\n - date: %s"
print(msg % (assessor_label, mget[0], mget[1], mget[2],
mget[3]))
return True
def test_dax_launch(self, project, sessions):
"""
Method to test a processor through dax
:param project: XNAT project
:param sessions: XNAT sessions
:return: None
"""
print_sub_test('test_dax_launch')
try:
self.inc_test()
print("Launching tasks for %s - %s with writeonly ..."
% (project, ','.join(sessions)))
tasks_list = self.launch_obj.get_tasks(
self.xnat, self.all_tasks, [project], ','.join(sessions))
for cur_task in tasks_list:
cur_task.launch(self.launch_obj.root_job_dir,
self.launch_obj.job_email,
self.launch_obj.job_email_options,
self.launch_obj.xnat_host,
True, pbsdir=DAX_TEST_DIR)
results = self.display_pbs_file(project, sessions)
if results:
print("launch SUCCEEDED")
else:
print("launch FAILED")
except Exception:
print('[ERROR]')
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
self.inc_error()
@staticmethod
def all_tasks(_):
"""
Check if a task is launchable
:param assr_info: dictionary containing procstatus for the assessor
(not used)
:return: True to take all assessor
"""
return True
def test_dax(self, project, sessions):
"""
General Method to test all executables for dax
:param project: XNAT project
:param sessions: XNAT sessions
:return: None
"""
self.test_dax_build(project, sessions)
if not isinstance(self.tobj, modules.Module):
self.test_dax_launch(project, sessions)
def test_pre_run(self):
"""
Method to test pre run function for a module through dax
:param project: XNAT Project
:param sessions: XNAT Sessions
:return: None
"""
print_sub_test('test_pre_run')
try:
self.inc_test()
print("Pre run ...")
self.tobj.prerun()
return True
except Exception:
print('[ERROR]')
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
self.inc_error()
return False
def test_run(self, project, sessions):
"""
Method to test run function for a module
:param project: XNAT Project
:param sessions: XNAT Sessions
:return: None
"""
print_sub_test('test_run')
cobj_list = self.set_mod_cobjs_list(self.tobj, project, sessions)
try:
self.inc_test()
print("Run on sessions: %s ..." % ','.join(sessions))
for cobj in cobj_list:
cinfo = cobj.info()
self.tobj.run(cinfo, cobj.full_object())
if isinstance(self.tobj, modules.SessionModule):
result = self.tobj.has_flag_resource(
cobj, self.tobj.mod_name)
if not result:
print("[FAIL] Session Module didn't create the \
flagfile for %s." % (cinfo['label']))
return True
except Exception:
print('[ERROR]')
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
self.inc_error()
return False
def test_after_run(self, project):
"""
Method to test after run function for a module
:param project: XNAT Project
:return: None
"""
print_sub_test('test_after_run')
try:
self.inc_test()
print("After run ...")
self.tobj.afterrun(self.xnat, project)
return True
except Exception:
print('[ERROR]')
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
self.inc_error()
return False
def run_test_processor(self, project, sessions):
"""
Method to test a processor through dax
:param project: XNAT Project
:param sessions: XNAT Sessions
:return: None
"""
# display
print_new_test(self.tobj.name)
# Test has_inputs:
result = self.test_has_inputs(project, sessions)
if result:
print("\nhas_inputs SUCCEEDED")
else:
print("\nhas_inputs FAILED")
# Test dax functionalities:
self.test_dax(project, sessions)
def run_test_module(self, project, sessions):
"""
Method to test a module through dax
:param project: project on XNAT
:param sessions: list of sessions label on XNAT
:return: Number of test that ran, fail, error
"""
# display
print_new_test(self.tobj.mod_name)
# Test has_inputs:
if self.test_pre_run():
print("prerun() SUCCEEDED")
else:
print("prerun() FAILED")
if self.test_run(project, sessions):
print("run() SUCCEEDED")
else:
print("run() FAILED")
if self.test_after_run(project):
print("afterrun() SUCCEEDED")
else:
print("afterrun() FAILED")
# Test dax functionalities:
self.test_dax(project, sessions)
def run_test_settings(self, project, sessions):
"""
Method to test a settings file through dax
:param project: XNAT Project
:param sessions: XNAT Sessions
:return: Number of test that ran, fail, error
"""
# print info settings:
self.display_settings()
# Test dax functionalities:
self.test_dax(project, sessions)
def display_pbs_file(self, project, sessions):
"""
Function to display one of the pbs file created
:param tests: tests_results object
:param project: XNAT project
:param sessions: XNAT sessions
:return: True if PBS created, False if not.
"""
pbs_files = list()
# get a PBS file created:
for sess in sessions:
pbs_files.extend(glob.glob(os.path.join(DAX_TEST_DIR,
'%s-x-*-x-%s-x-*.pbs' % (project, sess))))
# if empty raise Error
if len(pbs_files) == 0:
print('[ERROR] No PBS file generated in %s by dax_launch'
% DAX_TEST_DIR)
self.inc_error()
return False
else:
print('PBS Example:\n')
print(open(pbs_files[0], "rb").read())
return True
def display_settings(self):
"""
Function to display from the settings:
- projects
- processors and the default values
- modules and the default values
- launcher and the default values
:return: None
"""
proj_list = list()
print('Settings arguments:')
print_settings(self.launch_obj.__dict__)
proj_mods = self.launch_obj.project_modules_dict
proj_procs = self.launch_obj.project_process_dict
proj_list.extend(list(proj_mods.keys()))
proj_list.extend(list(proj_procs.keys()))
print('\nList of XNAT projects : %s' % ','.join(list(set(proj_list))))
for project in list(set(proj_list)):
print(' - Project %s:' % project)
print(' + Module(s) arguments:')
if project in list(proj_mods.keys()) and \
len(proj_mods[project]) > 0:
for module in proj_mods[project]:
print_module(module)
else:
print(' No module set for the project.')
print('\n + Processor(s) arguments:')
if project in list(proj_procs.keys()) and \
len(proj_procs[project]) > 0:
for processor in proj_procs[project]:
print_processor(processor)
else:
print(' No processor set for the project.')
def print_settings(settings_dict):
"""
Display the settings informations
:param settings_dict: dictionary containing the variables
for the dax.launcher.Launcher object
:return: None
"""
print(SETTINGS_DISPLAY.format(
host=settings_dict['xnat_host'],
user=settings_dict['xnat_user'],
priority=settings_dict['priority_project'],
pp=settings_dict['project_process_dict'],
pm=settings_dict['project_modules_dict'],
jobdir=settings_dict['root_job_dir'],
email=settings_dict['job_email'],
email_opts=settings_dict['job_email_options'],
limit=settings_dict['queue_limit'],
age=settings_dict['max_age']))
def print_module(mod_obj):
"""
Display the module informations
:param mod_dict: dax.module.Module object
:return: None
"""
level = 'Scan' if isinstance(mod_obj, modules.ScanModule) else 'Session'
mod_dict = mod_obj.__dict__
other_args = ''
for key, arg in list(mod_dict.items()):
if key not in MOD_DEF_ARGS:
other_args += " %s: %s\n" % (key, str(arg).strip())
print(MOD_DISPLAY.format(name=mod_dict['mod_name'],
temp_dir=mod_dict['directory'],
email=mod_dict['email'],
level=level,
other=other_args))
def print_processor(proc_obj):
"""
Display the processor informations
:param proc_obj: dax.processor.Processor object
:return: None
"""
level = 'Session'
if isinstance(proc_obj, processors.ScanProcessor):
level = 'Scan'
elif isinstance(proc_obj, processors.AutoProcessor):
if proc_obj.type == 'scan':
level = 'Scan'
proc_dict = proc_obj.__dict__
other_args = ''
if proc_dict.get('xnat_host', None):
host = proc_dict['xnat_host']
else:
host = 'using default XNAT_HOST'
for key, arg in list(proc_dict.items()):
if key not in PROC_DEF_ARGS:
other_args += " %s: %s\n" % (key, str(arg).strip())
print(PROC_DISPLAY.format(name=proc_dict['name'],
spath=proc_dict['spider_path'],
version=proc_dict['version'],
host=host,
xsitype=proc_dict['xsitype'],
level=level,
memory=proc_dict['memreq_mb'],
walltime=proc_dict['walltime_str'],
ppn=proc_dict['ppn'],
env=proc_dict['env'],
other=other_args))
def randomly_get_sessions(xnat, project, nb_sess=5):
"""
Retrieve nb_sess sessions label randomly from the test project on XNAT
:param project: XNAT project
:return: list of sessions label
"""
sessions = list()
list_sess = xnat.get_sessions(project)
if len(list_sess) < int(nb_sess):
sessions = [sess['label'] for sess in list_sess]
else:
for _ in range(int(nb_sess)):
session_added = False
while not session_added:
random_index = random.randint(0, len(list_sess) - 1)
if list_sess[random_index]['label'] not in sessions:
sessions.append(list_sess[random_index]['label'])
session_added = True
return sessions
def is_python_file(filepath):
"""
Check if a file is a python file using bash command file
:param filepath: path to the file to test
:return: True if it's a python file, False otherwise
"""
file_call = '''file {fpath}'''.format(fpath=filepath)
output = sb.check_output(file_call.split())
if 'python' in output.lower():
return True
return False
def load_test(filepath):
"""
Check if a file exists and if it's a python file
:param filepath: path to the file to test
:return: True the file pass the test, False otherwise
"""
if not os.path.exists(filepath):
print('[ERROR] %s does not exists.' % filepath)
return None
if filepath.endswith('yaml'):
doc = XnatUtils.read_yaml(filepath)
if 'projects' in list(doc.keys()):
try:
return bin.read_yaml_settings(filepath, LOGGER)
except AutoProcessorError:
print('[ERROR]')
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
else:
# So far only auto processor:
try:
return processors.load_from_yaml(XnatUtils, filepath)
except AutoProcessorError:
print('[ERROR]')
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
elif filepath.endswith('.py') or is_python_file(filepath):
test = imp.load_source('test', filepath)
# Check if processor file
try:
return eval('test.{}()'.format(test.__processor_name__))
except AttributeError:
pass
# Check if it's a settings file.py
try:
return test.myLauncher
except AttributeError:
pass
# Check if it's a module
try:
return eval('test.{}()'.format(os.path.basename(filepath)[:-3]))
except AttributeError:
pass
err = '[ERROR] Module or processor or myLauncher object NOT FOUND in \
the python file {}.'
print(err.format(filepath))
return None
else:
err = '[ERROR] {} format unknown. Please provide a .py or .yaml file.'
print(err.format(filepath))
return None
def print_new_test(name):
"""
Print separation for new test
:param name: name for the test
:return: None
"""
print('{}\nTest -- {} ...'.format(DEL_UP, name))
def print_sub_test(name):
"""
Print separation for new test
:param name: name for the method
:return: None
"""
print('\n{}\n + Testing method {} \n'.format(DEL_DW, name))
def get_sessions_for_project(xnat, project, sessions, nb_sess=5):
"""
Return list of XNAT sessions (between 1-5) to test the object on them
:param project: XNAT project
:return: list of sessions label
"""
# Set Sessions: If No sessions specified, select 5 random sessions for
# testing:
if sessions:
sessions = sessions.split(',')
if len(sessions) > 5:
sessions = sessions[:5]
elif len(sessions) <= 0:
raise Exception('No sessions set for the test.')
return sessions
else:
if nb_sess > 5:
nb_sess = 5
elif nb_sess <= 0:
raise Exception('--nb_sess set with an interger smaller than 1. \
Please use at least one.')
return randomly_get_sessions(xnat, project, nb_sess)
# Functions for setting:
class DAX_Setup_Handler(object):
"""DAX_Setup_Handler Class.
Class to write the dax_settings.ini files required to run any
dax executables.
"""
def __init__(self):
"""Entry Point for DAX_Setup_Handler class."""
# Set the settings_file
self.settings_file = os.path.join(os.path.expanduser('~'),
'.dax_settings.ini')
# ConfigParser
self.config_parser = configparser.SafeConfigParser(allow_no_value=True)
# Set the configParser from init file or default value
if os.path.isfile(self.settings_file):
try:
self.config_parser.read(self.settings_file)
except configparser.MissingSectionHeaderError as MSHE:
self._print_error_and_exit('Missing header bracket detected. \
Please check your ini file.\n', MSHE)
else: # set to default
for section in sorted(DEFAULTS.keys()):
self.config_parser.add_section(section)
for option in list(DEFAULTS[section].keys()):
self.config_parser.set(section, option,
DEFAULTS[section][option])
def exists(self):
"""Check if ini file exists.
:return: True if exists, False otherwise
"""
return os.path.isfile(self.settings_file)
def config(self):
"""Config the configParser for each section and ask user for value.
Caller for all of the _get* methods.
:return: True if using default settings, False otherwise
"""
# For each section ask the user if he wants to edit it:
print('Starting to config the dax_settings.ini file:')
for section in self.config_parser.sections():
sys.stdout.write(' - Section: %s\n' % section)
qst = ' Do you want to set/modify the section [%s] in the \
settings file?' % section
modify = xnat_tools_utils.prompt_user_yes_no(qst)
if modify:
self.config_section(section)
def config_section(self, section):
"""Configure the section.
:param section: name of the section
:return: None
"""
msg = "Do you want to use specific templates settings from DAX?"
if section == 'cluster' and xnat_tools_utils.prompt_user_yes_no(msg):
self._set_cluster_default()
else:
for option in self.config_parser.options(section):
value = self._prompt(section, option)
self.config_parser.set(section, option, value)
def write(self):
"""Write config options to the ~/.dax_settings.ini file.
:return: None
"""
with open(self.settings_file, 'w+') as ini_f:
ini_f.write(INI_HEADER)
self.config_parser.write(ini_f)
os.chmod(self.settings_file, stat.S_IWUSR | stat.S_IRUSR)
def _prompt(self, section, option):
"""Method to prompt a user for an input for the option in the template.
:param option: option name
:return: String of the input
"""
if option in list(OPTIONS_DESCRIPTION.keys()):
if 'confidential' in list(OPTIONS_DESCRIPTION[option].keys()):
msg = OPTIONS_DESCRIPTION[option]['msg']
stdin = getpass.getpass(prompt=msg)
else:
stdin = input(OPTIONS_DESCRIPTION[option]['msg'])
if OPTIONS_DESCRIPTION[option]['is_path'] and stdin:
if stdin.startswith('~/'):
stdin = os.path.join(os.path.expanduser('~'), stdin[2:])
else:
stdin = os.path.abspath(stdin)
if not os.path.exists(stdin):
print("Path <%s> does not exists." % stdin)
stdin = self._prompt(section, option)
else:
stdin = input('Please enter %s: ' % option)
if not stdin:
stdin = DEFAULTS[section][option]
return stdin
def _set_cluster_default(self, ctype=False):
"""Use the default cluster settings from the cluster type selected.
:param ctype: True if set to default
:return: None
"""
cluster_type = '0'
while cluster_type not in ['1', '2', '3']:
cluster_type = input("Which cluster are you using? \
[1.SGE 2.SLURM 3.MOAB] ")
sys.stdout.write('Warning: You can edit the cluster templates files \
at any time in ~/.dax_templates/\n')
for option in ['gateway', 'root_job_dir', 'queue_limit', 'results_dir',
'max_age', 'launcher_type', 'skip_lastupdate']:
value = self._prompt('cluster', option)
self.config_parser.set('cluster', option, value)
if cluster_type == '1':
cluster_dict = DEFAULT_SGE_DICT
elif cluster_type == '2':
cluster_dict = DEFAULT_SLURM_DICT
else:
cluster_dict = DEFAULT_MOAB_DICT
# Copy the files from the template:
templates_path = os.path.join(self.config_parser.get('admin',
'user_home'),
'.dax_templates')
if not os.path.exists(templates_path):
os.makedirs(templates_path)
for option, value in list(cluster_dict.items()):
if option in OPTIONS_DESCRIPTION and \
OPTIONS_DESCRIPTION[option]['is_path']:
file_path = os.path.join(templates_path, option + '.txt')
with open(file_path, 'w') as fid:
fid.writelines(value)
self.config_parser.set('cluster', option, file_path)
else:
self.config_parser.set('cluster', option, value)
def test_connection_xnat(host, user, pwd):
"""
Method to check connection to XNAT using host, user, pwd.
:param host: Host for XNAT
:param user: User for XNAT
:param pwd: Password for XNAT
:return: True if succeeded, False otherwise.
"""
from pyxnat.core.errors import DatabaseError
from pyxnat import Interface
xnat = Interface(host, user, pwd)
try:
# try deleting SESSION connection
xnat._exec('/data/JSESSION', method='DELETE')
print(' --> Good login.\n')
return True
except DatabaseError:
print(' --> error: Wrong login.\n')
return False
def set_xnat_netrc():
"""Ask User for xnat credentials and store it in xnatnetrc file.
:return: None
"""
netrc_obj = DAX_Netrc()
if netrc_obj.is_empty():
print('Warning: daxnetrc is empty. Setting XNAT login:')
connection = False
while not connection:
host = input("Please enter your XNAT host: ")
user = input("Please enter your XNAT username: ")
pwd = getpass.getpass(prompt='Please enter your XNAT password: ')
connection = test_connection_xnat(host, user, pwd)
if connection:
netrc_obj.add_host(host, user, pwd)
# add XNAT_HOST to your profile file:
init_profile(host)
def init_profile(host):
"""Function to init your profile file to call xnat_profile.
:param host: Host of XNAT to add to your profile
:return: None
"""
# link the file in the bashrc or profile
profile = os.path.join(os.path.expanduser('~'), '.bash_profile')
if os.path.exists(os.path.join(os.path.expanduser('~'), '.bash_profile')):
profile = os.path.join(os.path.expanduser('~'), '.bash_profile')
elif os.path.exists(os.path.join(os.path.expanduser('~'), '.bashrc')):
profile = os.path.join(os.path.expanduser('~'), '.bashrc')
elif os.path.exists(os.path.join(os.path.expanduser('~'), '.profile')):
profile = os.path.join(os.path.expanduser('~'), '.profile')
else:
raise DaxSetupError("could not find your profile file.")
# Add the line to the profile
line_to_add = 'export XNAT_HOST=%s' % host
if 'XNAT_HOST' not in open(profile).read():
with open(profile, "a") as f_profile:
f_profile.write(BASH_PROFILE_XNAT.format(export_cmd=line_to_add))
|
"""Test cases for representations."""
import numpy as np
import muspy
from .utils import TEST_JSON_PATH
def test_note_representation():
music = muspy.load(TEST_JSON_PATH)
# Encoding
encoded = muspy.to_representation(music, "note")
assert encoded.shape == (9, 4)
assert encoded.dtype == int
answer = [
[0, 76, 2, 64],
[2, 75, 2, 64],
[4, 76, 2, 64],
[6, 75, 2, 64],
[8, 76, 2, 64],
[10, 71, 2, 64],
[12, 74, 2, 64],
[14, 72, 2, 64],
[16, 69, 2, 64],
]
assert np.all(encoded == np.array(answer, dtype=int))
# Decoding
decoded = muspy.from_representation(encoded, "note")
assert decoded[0].notes == music[0].notes
def test_note_representation_start_end():
music = muspy.load(TEST_JSON_PATH)
# Encoding
encoded = muspy.to_representation(music, "note", use_start_end=True)
assert encoded.shape == (9, 4)
assert encoded.dtype == np.int
answer = [
[0, 76, 2, 64],
[2, 75, 4, 64],
[4, 76, 6, 64],
[6, 75, 8, 64],
[8, 76, 10, 64],
[10, 71, 12, 64],
[12, 74, 14, 64],
[14, 72, 16, 64],
[16, 69, 18, 64],
]
assert np.all(encoded == np.array(answer, dtype=np.uint8,))
# Decoding
decoded = muspy.from_representation(encoded, "note", use_start_end=True)
assert decoded[0].notes == music[0].notes
def test_pitch_representation():
music = muspy.load(TEST_JSON_PATH)
# Encoding
encoded = muspy.to_representation(music, "pitch")
assert encoded.shape == (18, 1)
assert encoded.dtype == np.uint8
answer = [
76,
76,
75,
75,
76,
76,
75,
75,
76,
76,
71,
71,
74,
74,
72,
72,
69,
69,
]
assert np.all(encoded.flatten() == np.array(answer, dtype=np.uint8))
# Decoding
decoded = muspy.from_representation(encoded, "pitch")
assert decoded[0].notes == music[0].notes
def test_pitch_representation_hold_state():
music = muspy.load(TEST_JSON_PATH)
# Encoding
encoded = muspy.to_representation(music, "pitch", use_hold_state=True)
assert encoded.shape == (18, 1)
assert encoded.dtype == np.uint8
answer = [
76,
129,
75,
129,
76,
129,
75,
129,
76,
129,
71,
129,
74,
129,
72,
129,
69,
129,
]
assert np.all(encoded.flatten() == np.array(answer, dtype=np.uint8))
# Decoding
decoded = muspy.from_representation(encoded, "pitch", use_hold_state=True)
assert decoded[0].notes == music[0].notes
def test_event_representation():
music = muspy.load(TEST_JSON_PATH)
# Encoding
encoded = muspy.to_representation(music, "event", encode_velocity=True)
assert encoded.shape == (36, 1)
assert encoded.dtype == np.uint16
answer = [
372,
76,
257,
204,
372,
75,
257,
203,
372,
76,
257,
204,
372,
75,
257,
203,
372,
76,
257,
204,
372,
71,
257,
199,
372,
74,
257,
202,
372,
72,
257,
200,
372,
69,
257,
197,
]
assert np.all(encoded.flatten() == np.array(answer, dtype=np.uint16))
# Decoding
decoded = muspy.from_representation(encoded, "event")
assert decoded[0].notes == music[0].notes
def test_event_representation_single_note_off():
music = muspy.load(TEST_JSON_PATH)
# Encoding
encoded = muspy.to_representation(
music, "event", use_single_note_off_event=True, encode_velocity=True
)
# assert encoded.shape == (36, 1)
assert encoded.dtype == np.uint16
answer = [
245,
76,
130,
128,
245,
75,
130,
128,
245,
76,
130,
128,
245,
75,
130,
128,
245,
76,
130,
128,
245,
71,
130,
128,
245,
74,
130,
128,
245,
72,
130,
128,
245,
69,
130,
128,
]
assert np.all(encoded.flatten() == np.array(answer, dtype=np.uint16))
# Decoding
decoded = muspy.from_representation(
encoded, "event", use_single_note_off_event=True
)
assert decoded[0].notes == music[0].notes
def test_event_representation_force_velocity_event():
music = muspy.load(TEST_JSON_PATH)
# Encoding
encoded = muspy.to_representation(
music, "event", encode_velocity=True, force_velocity_event=False
)
assert encoded.shape == (28, 1)
assert encoded.dtype == np.uint16
answer = [
372,
76,
257,
204,
75,
257,
203,
76,
257,
204,
75,
257,
203,
76,
257,
204,
71,
257,
199,
74,
257,
202,
72,
257,
200,
69,
257,
197,
]
assert np.all(encoded.flatten() == np.array(answer, dtype=np.uint16))
# Decoding
decoded = muspy.from_event_representation(encoded, "event")
assert decoded[0].notes == music[0].notes
def test_event_representation_end_of_sequence_event():
music = muspy.load(TEST_JSON_PATH)
# Encoding
encoded = muspy.to_representation(
music, "event", encode_velocity=True, use_end_of_sequence_event=True
)
assert encoded.shape == (37, 1)
assert encoded.dtype == np.uint16
assert encoded[-1] == 388
# Decoding
decoded = muspy.from_representation(
encoded, "event", use_end_of_sequence_event=True
)
assert decoded[0].notes == music[0].notes
def test_pianoroll_representation():
music = muspy.load(TEST_JSON_PATH)
# Encoding
encoded = muspy.to_representation(music, "pianoroll")
assert encoded.shape == (19, 128)
assert encoded.dtype == np.uint8
assert encoded.sum() == 2 * 9 * 64
answer = [
76,
76,
75,
75,
76,
76,
75,
75,
76,
76,
71,
71,
74,
74,
72,
72,
69,
69,
]
assert np.all(encoded.nonzero()[1] == np.array(answer, dtype=np.uint8))
# Decoding
decoded = muspy.from_representation(encoded, "pianoroll")
assert decoded[0].notes == music[0].notes
def test_pianoroll_representation_encode_velocity():
music = muspy.load(TEST_JSON_PATH)
# Encoding
encoded = muspy.to_representation(
music, "pianoroll", encode_velocity=False
)
assert encoded.shape == (19, 128)
assert encoded.dtype == np.bool
assert encoded.sum() == 2 * 9
|
import io
import sys
from collections import deque
from conscodec import ConsCodec
from pathlib import Path
def PARSE_NUMBER(s):
try:
return int(s)
except TypeError as e:
print(repr(s), e, file=sys.stderr)
return 0
def PARSE_FUNCTIONS(fn):
def parse_ast(tokens):
stream = iter(tokens)
def read(): return next(stream, None)
stack = ['$', '$']
while (s := read()) is not None:
if s == 'ap':
stack.append('ap')
else:
if (s.isdigit() or (s[:1] == '-' and s[1:].isdigit())):
s = int(s, 10)
stack.append(Atom(s))
while (stack[-3] == 'ap') and (stack[-2] != 'ap'):
stack[-3:] = (Ap(stack[-2], stack[-1]), )
return stack[-1]
scope = dict()
with open(fn) as fp:
for s in fp.readlines():
tokens = s.split()
if tokens:
assert tokens[1] == '='
scope[tokens[0]] = parse_ast(tokens[2:])
return scope
def GET_LIST_ITEMS_FROM_EXPR(expr):
p = cons_to_list(expr)
if len(p) == 3:
x,y,z = p
return (
list_to_cons(x),
list_to_cons(y),
list_to_cons(z),
)
print(repr(p))
raise Exception(('unhandled', p))
# return (
# list_to_cons([0]),
# list_to_cons([]),
# list_to_cons([]),
# )
def SEND_TO_ALIEN_PROXY(data):
print('SEND_TO_ALIEN_PROXY')
xs = cons_to_list(data)
print(repr(xs))
Display = dict()
def PRINT_IMAGES(images):
ps = cons_to_list(images)
buffer = dict(Display)
for c, xs in zip('#@?!abcdefghi', ps):
for p in xs:
buffer[p] = c
if c == '#': Display[p] = c
xmin = min(x for x,y in buffer.keys())
ymin = min(y for x,y in buffer.keys())
xo = (-xmin)
yo = (-ymin)
w = max(x for x,y in buffer.keys()) + 1 - xmin
h = max(y for x,y in buffer.keys()) + 1 - ymin
grid = [['.' for dx in range(w)] for dy in range(h)]
for (x,y), c in buffer.items():
grid[y+yo][x+xo] = c
print(f'offset: ({xo}, {yo})')
s = '\n'.join(map(''.join, grid))
print(s)
def REQUEST_CLICK_FROM_USER():
print('REQUEST_CLICK_FROM_USER')
x = int(input('> '))
y = int(input('> '))
v = (x, y)
print(repr(v))
return v
def decode_api_response(text):
cc = ConsCodec()
data = cc.decode(text)
return list_to_cons(data)
class Partial:
def __init__(self, arg):
self.arg = arg
def __repr__(self):
return f'{self.__class__.__name__}({repr(self.arg)})'
def cons_to_list(expr):
if isinstance(expr, Atom):
if (expr.name == 'nil'): return []
return expr.name
if isinstance(expr.fun, Atom) and (expr.fun.name == 'cons'):
return Partial(cons_to_list(expr.arg))
head = cons_to_list(expr.fun)
if isinstance(head, Partial): head = head.arg
tail = cons_to_list(expr.arg)
if isinstance(tail, list):
return [head, *tail]
return (head, tail)
def list_to_cons(data):
def enc(s):
if (s == list()) or (s == tuple()):
return nil
if isinstance(s, (int,str)): return Atom(s)
if isinstance(s, list):
return Ap(Ap(cons, enc(s[0])), enc(s[1:]))
if isinstance(s, tuple):
if len(s) == 2:
return Ap(Ap(cons, enc(s[0])), enc(s[1]))
return Ap(Ap(cons, enc(s[0])), enc(s[1:]))
return enc(data)
def iterative_eq(a, b):
fringe = deque([(a, b)])
while fringe:
a, b = fringe.popleft()
if isinstance(a, Atom):
if (not isinstance(b, Atom)) or (a.name != b.name):
return
else:
if (not isinstance(b, Ap)): return
fringe.append((a.fun, b.fun))
fringe.append((a.arg, b.arg))
return True
class Expr:
def __init__(self):
self.evaluated = None
def __repr__(self):
return f'{self.__class__.__name__}()'
class Atom (Expr):
def __init__(self, name):
super().__init__()
self.name = name
def __repr__(self):
return f'{self.__class__.__name__}({repr(self.name)})'
def __eq__(self, other):
return isinstance(other, Atom) and (self.name == other.name)
class Ap (Expr):
def __init__(self, fun, arg):
super().__init__()
self.fun = fun
self.arg = arg
def __repr__(self):
# return f'{self.__class__.__name__}({repr(self.fun)} {repr(self.arg)})'
return f'{self.__class__.__name__}(...)'
def __eq__(self, other):
return isinstance(other, Ap) and iterative_eq(self, other)
cons = Atom("cons")
t = Atom("t")
f = Atom("f")
nil = Atom("nil")
class Galaxy:
def __init__(self, target=None):
fn = next(Path(__file__).parent.resolve().glob('../../**/spec/galaxy.txt'))
self.functions = PARSE_FUNCTIONS(fn)
self.state = nil
self.mouse = (0, 0)
self.frame = None
def interact(self, state, event):
expr = Ap(Ap(Atom("galaxy"), state), event)
res = self._eval1(expr)
# Note: res will be modulatable here (consists of cons, nil and numbers only)
flag, newState, data = GET_LIST_ITEMS_FROM_EXPR(res)
if (self._asNum(flag) == 0):
return (newState, data)
return self.interact(newState, SEND_TO_ALIEN_PROXY(data))
def _eval1(self, expr):
if (expr.evaluated is not None):
return expr.evaluated
initialExpr = expr
while (True):
result = self._tryEval(expr)
if (result == expr):
initialExpr.evaluated = result
return result
expr = result
def _tryEval(self, expr):
if (expr.evaluated is not None):
return expr.evaluated
if isinstance(expr, Atom) and (self.functions.get(expr.name) is not None):
return self.functions[expr.name]
if isinstance(expr, Ap):
fun = self._eval1(expr.fun)
x = expr.arg
if isinstance(fun, Atom):
if (fun.name == "neg"): return Atom(-self._asNum(self._eval1(x)))
if (fun.name == "i"): return x
if (fun.name == "nil"): return t
if (fun.name == "isnil"): return Ap(x, Ap(t, Ap(t, f)))
if (fun.name == "car"): return Ap(x, t)
if (fun.name == "cdr"): return Ap(x, f)
if isinstance(fun, Ap):
fun2 = self._eval1(fun.fun)
y = fun.arg
if isinstance(fun2, Atom):
if (fun2.name == "t"): return y
if (fun2.name == "f"): return x
if (fun2.name == "add"): return Atom(self._asNum(self._eval1(x)) + self._asNum(self._eval1(y)))
if (fun2.name == "mul"): return Atom(self._asNum(self._eval1(x)) * self._asNum(self._eval1(y)))
if (fun2.name == "div"): return Atom(self._asNum(self._eval1(y)) // self._asNum(self._eval1(x)))
if (fun2.name == "lt"): return t if self._asNum(self._eval1(y)) < self._asNum(self._eval1(x)) else f
if (fun2.name == "eq"): return t if self._asNum(self._eval1(x)) == self._asNum(self._eval1(y)) else f
if (fun2.name == "cons"): return self._evalCons(y, x)
if isinstance(fun2, Ap):
fun3 = self._eval1(fun2.fun)
z = fun2.arg
if isinstance(fun3, Atom):
if (fun3.name == "s"): return Ap(Ap(z, x), Ap(y, x))
if (fun3.name == "c"): return Ap(Ap(z, x), y)
if (fun3.name == "b"): return Ap(z, Ap(y, x))
if (fun3.name == "cons"): return Ap(Ap(x, z), y)
return expr
def _evalCons(self, a, b):
res = Ap(Ap(cons, self._eval1(a)), self._eval1(b))
res.evaluated = res
return res
def _asNum(self, n):
if isinstance(n, Atom):
return PARSE_NUMBER(n.name)
print('not a number', type(n), repr(n), file=sys.stdout)
return 0
def click(self, x, y):
self.mouse = (x, y)
def render_frame(self, images):
self.frame = images
def runloop(self):
# print('state:')
# print(repr(cons_to_list(self.state)))
mouse = self.mouse or (0, 0)
click = nil
if mouse:
self.mouse = None
click = Ap(Ap(cons, Atom(mouse[0])), Atom(mouse[1]))
print('>', cons_to_list(self.state))
print('>', cons_to_list(click))
(newState, images) = self.interact(self.state, click)
print('<', cons_to_list(newState))
print('<', cons_to_list(images))
self.state = newState
# PRINT_IMAGES(images)
self.render_frame(cons_to_list(images))
def eval_step(self, mouse):
self.mose = mouse
return self.runloop()
def main():
galaxy = Galaxy()
while True:
galaxy.runloop()
click = REQUEST_CLICK_FROM_USER()
galaxy.mouse = click
if __name__ == '__main__':
main()
|
# mnist_runner.py
#
# Author : James Mnatzaganian
# Contact : http://techtorials.me
# Organization : NanoComputing Research Lab - Rochester Institute of
# Technology
# Website : https://www.rit.edu/kgcoe/nanolab/
# Date Created : 12/05/15
#
# Description : SLURM runner for MNIST.
# Python Version : 2.7.X
#
# License : MIT License http://opensource.org/licenses/mit-license.php
# Copyright : (c) 2016 James Mnatzaganian
"""
SLURM runner for MNIST.
G{packagetree mHTM}
"""
__docformat__ = 'epytext'
# Native imports
import cPickle, os, json, sys, shutil
# Third party imports
import numpy as np
from sklearn.svm import LinearSVC
# Program imports
from mHTM.datasets.loader import load_mnist
from mHTM.region import SPRegion
def full_cv(base_dir):
"""
Run the MNIST experiment. Each CV split is executed sequentially.
@param base_dir: The full path to the base directory. This directory should
contain the config as well as the pickled data.
"""
# Get the keyword arguments for the SP
with open(os.path.join(base_dir, 'config.json'), 'rb') as f:
kargs = json.load(f)
kargs['clf'] = LinearSVC(random_state=kargs['seed'])
# Get the data
(tr_x, tr_y), (te_x, te_y) = load_mnist()
x, y = np.vstack((tr_x, te_x)), np.hstack((tr_y, te_y))
# Get the CV splits
with open(os.path.join(base_dir, 'cv.pkl'), 'rb') as f:
cv = cPickle.load(f)
# Execute each run
for tr, te in cv:
clf = SPRegion(**kargs)
clf.fit(x[tr], y[tr])
# Column accuracy
clf.score(x[te], y[te])
# Probabilistic accuracy
clf.score(x[te], y[te], tr_x=x[tr], score_method='prob')
# Dimensionality reduction method
clf.score(x[te], y[te], tr_x=x[tr], score_method='reduction')
ndims = len(clf.reduce_dimensions(x[0]))
clf._log_stats('Number of New Dimensions', ndims)
def one_cv(base_dir, cv_split):
"""
Run the MNIST experiment. Only the specified CV split is executed.
@param base_dir: The full path to the base directory. This directory should
contain the config as well as the pickled data.
@param cv_split: The index for the CV split.
"""
# Get the keyword arguments for the SP
with open(os.path.join(base_dir, 'config-{0}.json'.format(cv_split)),
'rb') as f:
kargs = json.load(f)
kargs['clf'] = LinearSVC(random_state=kargs['seed'])
# Get the data
(tr_x, tr_y), (te_x, te_y) = load_mnist()
x, y = np.vstack((tr_x, te_x)), np.hstack((tr_y, te_y))
# Get the CV splits
with open(os.path.join(base_dir, 'cv.pkl'), 'rb') as f:
cv = cPickle.load(f)
tr, te = cv[cv_split - 1]
# Remove the split directory, if it exists
shutil.rmtree(os.path.join(base_dir, str(cv_split)), True)
# Execute
clf = SPRegion(**kargs)
clf.fit(x[tr], y[tr])
# Column accuracy
clf.score(x[te], y[te])
# Probabilistic accuracy
clf.score(x[te], y[te], tr_x=x[tr], score_method='prob')
# Dimensionality reduction method
clf.score(x[te], y[te], tr_x=x[tr], score_method='reduction')
ndims = len(clf.reduce_dimensions(x[0]))
clf._log_stats('Number of New Dimensions', ndims)
def full_mnist(base_dir, new_dir, auto_update=False):
"""
Execute a full MNIST run using the parameters specified by ix.
@param base_dir: The full path to the base directory. This directory should
contain the config.
@param new_dir: The full path of where the data should be saved.
@param auto_update: If True the permanence increment and decrement amounts
will automatically be computed by the runner. If False, the ones specified
in the config file will be used.
"""
# Get the keyword arguments for the SP
with open(os.path.join(base_dir, 'config.json'), 'rb') as f:
kargs = json.load(f)
kargs['log_dir'] = new_dir
kargs['clf'] = LinearSVC(random_state=kargs['seed'])
# Get the data
(tr_x, tr_y), (te_x, te_y) = load_mnist()
# Manually compute the permanence update amounts
if auto_update:
# Compute average sum of each training instance
avg_s = tr_x.sum(1)
# Compute the total average sum
avg_ts = avg_s.mean()
# Compute the average active probability
a_p = avg_ts / float(tr_x.shape[1])
# Compute the scaling factor
scaling_factor = 1 / avg_ts
# Compute the update amounts
pinc = scaling_factor * (1 / a_p)
pdec = scaling_factor * (1 / (1 - a_p))
# Update the config
kargs['pinc'], kargs['pdec'] = pinc, pdec
# Execute
clf = SPRegion(**kargs)
clf.fit(tr_x, tr_y)
# Column accuracy
clf.score(te_x, te_y)
# Probabilistic accuracy
clf.score(te_x, te_y, tr_x=tr_x, score_method='prob')
# Dimensionality reduction method
clf.score(te_x, te_y, tr_x=tr_x, score_method='reduction')
ndims = len(clf.reduce_dimensions(tr_x[0]))
clf._log_stats('Number of New Dimensions', ndims)
if __name__ == '__main__':
if len(sys.argv) == 2:
full_cv(sys.argv[1])
elif len(sys.argv) == 3:
try:
one_cv(sys.argv[1], int(sys.argv[2]))
except ValueError: # Value was a string
full_mnist(sys.argv[1], sys.argv[2])
elif len(sys.argv) == 4:
full_mnist(sys.argv[1], sys.argv[2], bool(int(sys.argv[3]))) |
import gc
import random
from pathlib import Path
import numpy as np
import pandas as pd
import wandb
from annoy import AnnoyIndex
from more_itertools import chunked
from scipy.spatial.distance import cdist
from sklearn.neighbors import NearestNeighbors
from tensorflow.keras.callbacks import EarlyStopping
from tree import shared, train_model, utils
class MrrEarlyStopping(EarlyStopping):
def __init__(self, encoded_seqs_dict: dict):
super().__init__(monitor='val_mrr', mode='max', restore_best_weights=True, verbose=True, patience=5)
self.encoded_seqs_dict = encoded_seqs_dict
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
mean_mrr = compute_mrr(self.model, self.encoded_seqs_dict)
print('Mean MRR:', mean_mrr)
super().on_epoch_end(epoch, {**logs, 'val_mrr': mean_mrr})
def get_embeddings(model, encoded_seqs_dict: dict, idx_chunk):
predictors = list()
chunked_seqs_list = list()
for data_type in shared.SUB_TYPES:
predictor = train_model.get_embedding_predictor(model, data_type)
predictors.append(predictor)
chunked_encoded_seqs = encoded_seqs_dict.get(data_type)[idx_chunk, :]
if shared.CONTEXT and shared.BERT1:
input_ids = chunked_encoded_seqs
input_masks = np.where(input_ids == shared.encoded_pads_dict[data_type], 0, 1)
input_segments = np.zeros_like(input_ids)
chunked_seqs_list.append([input_ids, input_masks, input_segments])
else:
chunked_seqs_list.append(chunked_encoded_seqs)
embeddings_list = list()
for embedding_predictor, chunked_seqs in zip(predictors, chunked_seqs_list):
embeddings = embedding_predictor.predict(chunked_seqs)
embeddings_list.append(embeddings)
return utils.repack_embeddings(embeddings_list)
def compute_mrr(model, encoded_seqs_dict: dict):
n_samples = encoded_seqs_dict.get('query').shape[0]
indices = list(range(n_samples))
random.shuffle(indices)
mrr_scores = []
for idx_chunk in chunked(indices, shared.BATCH_SIZE):
if len(idx_chunk) < shared.BATCH_SIZE:
continue
code_embeddings, query_embeddings = get_embeddings(model, encoded_seqs_dict, idx_chunk)
distance_matrix = cdist(query_embeddings, code_embeddings, 'cosine')
correct_elements = np.expand_dims(np.diag(distance_matrix), axis=-1)
ranks = np.sum(distance_matrix <= correct_elements, axis=-1)
mrr_scores.append(np.mean(1.0 / ranks))
return np.mean(mrr_scores)
def emit_mrr_scores(model, language: str):
valid_seqs_dict = dict()
test_seqs_dict = dict()
for data_type in shared.SUB_TYPES:
valid_seqs_dict[data_type] = utils.load_seq(language, 'valid', data_type)
test_seqs_dict[data_type] = utils.load_seq(language, 'test', data_type)
# Check for invalid sequences when it is not for evaluation
valid_seqs_dict = utils.filter_valid_seqs(valid_seqs_dict)
test_seqs_dict = utils.filter_valid_seqs(test_seqs_dict)
valid_mean_mrr = compute_mrr(model, valid_seqs_dict)
test_mean_mrr = compute_mrr(model, test_seqs_dict)
return valid_mean_mrr, test_mean_mrr
def emit_ndcg_scores(model, language: str):
prediction = []
print(f'Evaluating {language}')
for data_type in shared.SUB_TYPES:
# we always rebuild embeddings when it is using attention
if utils.check_embedding(language, data_type) and not shared.ATTENTION:
continue
print(f'Building {data_type} embeddings')
predictor = train_model.get_embedding_predictor(model, data_type)
seqs = utils.load_seq(language, 'evaluation', data_type)
embeddings = predictor.predict(seqs)
utils.dump_embedding(embeddings, language, data_type)
print('Loading embeddings')
embeddings_list = list()
for data_type in shared.SUB_TYPES:
embeddings = utils.load_embedding(language, data_type)
embeddings_list.append(embeddings)
code_embeddings, query_embeddings = utils.repack_embeddings(embeddings_list)
evaluation_docs = [{'url': doc['url'], 'identifier': doc['identifier']}
for doc in utils.load_doc(language, 'evaluation')]
print('Indexing embeddings')
queries = utils.get_csn_queries()
if shared.ANNOY:
annoy = AnnoyIndex(shared.EMBEDDING_SIZE, 'angular')
for idx in range(code_embeddings.shape[0]):
annoy.add_item(idx, code_embeddings[idx, :])
annoy.build(10)
# annoy.build(200)
for query_idx, query in enumerate(queries):
query_embedding = query_embeddings[query_idx]
nearest_indices = annoy.get_nns_by_vector(query_embedding, 100)
for nearest_idx in nearest_indices:
prediction.append({
'query': query,
'language': language,
'identifier': evaluation_docs[nearest_idx]['identifier'],
'url': evaluation_docs[nearest_idx]['url'],
})
else:
nn = NearestNeighbors(n_neighbors=100, metric='cosine', n_jobs=-1)
nn.fit(code_embeddings)
_, nearest_indices = nn.kneighbors(query_embeddings)
for query_idx, query in enumerate(queries):
for nearest_idx in nearest_indices[query_idx, :]:
prediction.append({
'query': query,
'language': language,
'identifier': evaluation_docs[nearest_idx]['identifier'],
'url': evaluation_docs[nearest_idx]['url'],
})
del evaluation_docs
gc.collect()
return prediction
def evaluating():
print('Evaluating')
models = dict()
for language in shared.LANGUAGES:
model = utils.load_model(language, train_model.get_model())
models[language] = model
# emit_mrr_scores
valid_mrr_scores = {}
test_mrr_scores = {}
for language in shared.LANGUAGES:
model = models.get(language)
valid_mean_mrr, test_mean_mrr = emit_mrr_scores(model, language)
print(f'{language} - Valid Mean MRR: {valid_mean_mrr}, Test Mean MRR: {test_mean_mrr}')
valid_mrr_scores[f'{language}_valid_mrr'] = valid_mean_mrr
test_mrr_scores[f'{language}_test_mrr'] = test_mean_mrr
valid_mean_mrr = np.mean(list(valid_mrr_scores.values()))
test_mean_mrr = np.mean(list(test_mrr_scores.values()))
print(f'All languages - Valid Mean MRR: {valid_mean_mrr}, Test Mean MRR: {test_mean_mrr}')
if shared.WANDB:
wandb.log({
'valid_mean_mrr': valid_mean_mrr,
'test_mean_mrr': test_mean_mrr,
**valid_mrr_scores,
**test_mrr_scores
})
# emit_ndcg_scores
predictions = []
for language in shared.LANGUAGES:
model = models.get(language)
prediction = emit_ndcg_scores(model, language)
predictions.extend(prediction)
df_predictions = pd.DataFrame(predictions, columns=['query', 'language', 'identifier', 'url'])
csv_path = (Path(wandb.run.dir) if shared.WANDB else shared.RESOURCES_DIR) / 'model_predictions.csv'
df_predictions.to_csv(csv_path, index=False)
|
import tkinter.messagebox as msgbox
import tkinter.ttk as ttk
from tkinter import*
root=Tk()
root.title('Molecular Work Tool')
root.geometry("1150x600+100+50")
Label(root, text='안녕하세요 교수님!').grid(row=0,column=2)
wframe=LabelFrame(root, text='어떤 작업을 하실 생각이신가요?')
wframe.grid(row=1, column=0)
fframe=LabelFrame(root, text='첫 번째 sequence')
fframe.grid(row=2, column=2)
sframe=LabelFrame(root, text='찾으실 sequence (or 두 번째 sequence)')
sframe.grid(row=4, column=2)
tframe=LabelFrame(root, text='결과')
tframe.grid(row=2, column=5,rowspan=3, padx=10)
Label(root, text='감사합니다 :D').grid(row=8,column=2)
eframe=Frame(root)
aframe=Frame(root)
lframe=Frame(root)
codon3={'TTT':'Phe','TTC':'Phe','TTA':'Leu','TTG':'Leu','TCT':'Ser','TCC':'Ser','TCA':'Ser','TCG':'Ser','TAT':'Tyr','TAC':'Tyr','TAA':'---','TAG':'---','TGT':'Cys','TGC':'Cys','TGA':'---','TGG':'Trp','CTT':'Leu','CTC':'Leu','CTA':'Leu','CTG':'Leu','CCT':'Pro','CCC':'Pro','CCA':'Pro','CCG':'Pro','CAT':'His','CAC':'His','CAA':'Gln','CAG':'Gln','CGT':'Arg','CGC':'Arg','CGA':'Arg','CGG':'Arg','ATT':'Ile','ATC':'Ile','ATA':'Ile','ATG':'Met','ACT':'Thr','ACC':'Thr','ACA':'Thr','ACG':'Thr','AAT':'Asn','AAC':'Asn','AAA':'Lys','AAG':'Lys','AGT':'Ser','AGC':'Ser','AGA':'Arg','AGG':'Arg','GTT':'Val','GTC':'Val','GTA':'Val','GTG':'Val','GCT':'Ala','GCC':'Ala','GCA':'Ala','GCG':'Ala','GAT':'Asp','GAC':'Asp','GAA':'Glu','GAG':'Glu','GGT':'Gly','GGC':'Gly','GGA':'Gly','GGG':'Gly'}
codon1={'TTT':'F','TTC':'F','TTA':'L','TTG':'L','TCT':'S','TCC':'S','TCA':'S','TCG':'S','TAT':'Y','TAC':'Y','TAA':'-','TAG':'-','TGT':'C','TGC':'C','TGA':'-','TGG':'W','CTT':'L','CTC':'L','CTA':'L','CTG':'L','CCT':'P','CCC':'P','CCA':'P','CCG':'P','CAT':'H','CAC':'H','CAA':'Q','CAG':'Q','CGT':'R','CGC':'R','CGA':'R','CGG':'R','ATT':'I','ATC':'I','ATA':'I','ATG':'M','ACT':'T','ACC':'T','ACA':'T','ACG':'T','AAT':'N','AAC':'N','AAA':'K','AAG':'K','AGT':'S','AGC':'S','AGA':'R','AGG':'R','GTT':'V','GTC':'V','GTA':'V','GTG':'V','GCT':'A','GCC':'A','GCA':'A','GCG':'A','GAT':'D','GAC':'D','GAA':'E','GAG':'E','GGT':'G','GGC':'G','GGA':'G','GGG':'G'}
threeto1={'Ala':'A', 'Ser':'S','Cys':'C','Thr':'T','Val':'V','Met':'M','Trp':'W','Tyr':'Y','Asn':'N','Asp':'D','Gln':'Q','Glu':'E','His':'H','Gly':'G','Pro':'P','Lys':'K','Leu':'L','Ile':'I','Phe':'F','Arg':'R'}
oneto3={'A':'Ala', 'S':'Ser','C':'Cys','T':'Thr','V':'Val','M':'Met','W':'Trp','Y':'Tyr','N':'Asn','D':'Asp','Q':'Gln','E':'Glu','H':'His','G':'Gly','P':'Pro','K':'Lys','L':'Leu','I':'Ile','F':'Phe','R':'Arg'}
base=['A','T','U','C','G','N']
work=['DNA seq 길이 확인', 'DNA seq 위치 확인','RNA,DNA moles <-> mass','Complementray seq 생성','DNA reverse로 배열','Translation', '간단한 DNA seq alignment', 'a.a. 서열 길이 확인','a.a. 위치 확인', 'a.a. 약자 변환']
cb=ttk.Combobox(wframe, width=22, height=5, values=work, state='readonly')
cb.set('작업을 선택해 주세요')
cb.grid(row=0,column=0)
letter=['1 letter','3 letters']
cb1=ttk.Combobox(wframe, height=2, values=letter, state='readonly')
cb1.set('아미노산 약자 개수')
DNAform=['ssDNA로 각각', 'dsDNA로']
cb2=ttk.Combobox(wframe, height=2, values=DNAform, state='readonly')
cb2.set('나타낼 DNA 형식')
findingmethod=['서열의 위치', '위치의 서열']
cb3=ttk.Combobox(wframe,height=2, values=findingmethod, state='readonly')
cb3.set('찾을 방식')
esframe=LabelFrame(eframe,text='시작')
esframe.grid(row=0,column=0)
etframe=LabelFrame(eframe,text='끝')
etframe.grid(row=1,column=0)
es=ttk.Entry(esframe, width=8)
es.pack()
et=ttk.Entry(etframe, width=8)
et.pack()
caltype=['dsDNA','ssDNA','ssRNA']
calmethod=['mass -> moles','moles -> mass']
caltcb=ttk.Combobox(wframe,height=3, values=caltype, state='readonly')
caltcb.set('NA type')
calmcb=ttk.Combobox(wframe,height=2, values=calmethod, state='readonly')
calmcb.set('변환 방식')
lnframe=LabelFrame(lframe,text='length(bp)')
lnframe.grid(row=0,column=0)
ln=ttk.Entry(lnframe,width=10)
ln.pack()
moleframe=LabelFrame(lframe,text='moles(pmol)')
moleframe.grid(row=2,column=0)
mole=ttk.Entry(moleframe,width=8)
mole.pack()
massframe=LabelFrame(lframe,text='mass(ng)')
massframe.grid(row=3,column=0)
mass=ttk.Entry(massframe,width=8)
mass.pack()
convert=['1 to 3', '3 to 1']
cb4=ttk.Combobox(wframe,height=2, values=convert, state='readonly')
cb4.set('변환 방식')
matframe=LabelFrame(aframe,text='match score')
matframe.grid(row=0,column=0)
mismatframe=LabelFrame(aframe,text='mismatch score')
mismatframe.grid(row=1,column=0)
gapframe=LabelFrame(aframe,text='gap penalty')
gapframe.grid(row=2,column=0)
match=ttk.Entry(matframe,width=8)
match.pack()
mismatch=ttk.Entry(mismatframe,width=8)
mismatch.pack()
gap=ttk.Entry(gapframe,width=8)
gap.pack()
sb1=Scrollbar(fframe)
sb1.pack(side='right',fill='y')
t=Text(fframe, width=54, height=20, yscrollcommand=sb1.set)
t.insert(END,'대소문자 구분 없이 DNA 혹은 RNA 혹은 아미노산 서열을 입력해주세요 :D\n')
t.insert(END,'\n')
t.insert(END,'숫자는 들어가도 괜찮습니다\n')
t.insert(END,'RNA 서열은 DNA 서열로 변환됩니다\n')
t.insert(END,'아미노산 서열은 1 letter 약자로 입력해주세요\n')
t.insert(END,'\n')
t.insert(END,'\n')
t.insert(END,'원하시는 작업을 고르신 후 선택 버튼을 누르고 Clear를 눌러주세요')
t.pack(side='left')
sb1.config(command=t.yview)
def clear():
t.delete('1.0',END)
tt.delete('1.0',END)
def a():
global pseq
global pseqrr
p.config(state=NORMAL)
p.delete('1.0',END)
i=t.get('1.0',END)
global seq1
tem3=[z for z in i if z.isalpha()==True]
i=''.join(tem3)
if w!=8:
i=i.upper()
tem1=[]
for z in range(len(i)):
if i[z]=='U':
tem1.append(z)
tem2=[z for z in i]
for z in tem1:
tem2[z]='T'
i=''.join(tem2)
seq1=i.split()
seq1=''.join(seq1)
rtem=[]
for z in range(len(i)):
if i[z]=='A':
rtem.append('T')
if i[z]=='T':
rtem.append('A')
if i[z]=='G':
rtem.append('C')
if i[z]=='C':
rtem.append('G')
if i[z]=='N':
rtem.append('N')
rr=''.join(rtem)
rtem=[]
for z in range(len(rr)-1,-1,-1):
rtem.append(rr[z])
r=''.join(rtem)
global seq1rr
seq1rr=rr
global seq1r
seq1r=r
global gcr1
gn1=seq1.count('G')
cn1=seq1.count('C')
if len(seq1)!=0:
gcr1=((gn1+cn1)/len(seq1))*100
global seq2
i=tt.get('1.0',END)
tem3=[z for z in i if z.isalpha()==True]
i=''.join(tem3)
if w!=8:
i=i.upper()
tem1=[]
for z in range(len(i)):
if i[z]=='U':
tem1.append(z)
tem2=[z for z in i]
for z in tem1:
tem2[z]='T'
i=''.join(tem2)
seq2=i.split()
seq2=''.join(seq2)
rtem=[]
for z in range(len(i)):
if i[z]=='A':
rtem.append('T')
if i[z]=='T':
rtem.append('A')
if i[z]=='G':
rtem.append('C')
if i[z]=='C':
rtem.append('G')
if i[z]=='N':
rtem.append('N')
rr=''.join(rtem)
rtem=[]
for z in range(len(rr)-1,-1,-1):
rtem.append(rr[z])
r=''.join(rtem)
seq2rr=rr
seq2r=r
global gcr2
gn2=seq2.count('G')
cn2=seq2.count('C')
if len(seq2)!=0:
gcr2=((gn2+cn2)/len(seq2))*100
if w==0 or w==1 or w==2 or w==3 or w==4 or w==5 or w==6:
error1=[]
error2=[]
for i in seq1:
if i in base: continue
else:
error1.append(i)
if len(error1)>0:
msgbox.showerror('Error', '첫 번째 sequence에 염기가 아닌 문자가 들어있습니다!')
for i in seq2:
if i in base: continue
else:
error2.append(i)
if len(error2)>0:
msgbox.sowerror('Error','찾으실 sequence (or 두 번째 sequqnece)에 염기가 아닌 문자가 들어있습니다!')
if w==0:
p.insert(END, 'DNA 서열의 길이\n')
p.insert(END, '\n')
p.insert(END, len(seq1))
p.insert(END, '\n\n')
p.insert(END, 'GC content\n')
p.insert(END, '\n')
p.insert(END, gcr1)
p.insert(END, ' %')
p.config(state=DISABLED)
if w==1:
l=cb3.get()
l=findingmethod.index(l)
if l==0:
if seq2 in seq1:
nos=seq1.count(seq2)
p.insert(END, int(seq1.index(seq2))+1)
p.insert(END, '\n')
p.insert(END, '~')
p.insert(END, '\n')
p.insert(END, int(seq1.index(seq2))+len(seq2))
p.insert(END, '\n')
p.insert(END, '\n')
underb=''
for i in range(len(seq2)):
underb=underb+'_'
temseq1=seq1.replace(seq2,underb,1)
if nos>=2:
for _ in range(nos-1):
p.insert(END, int(temseq1.index(seq2))+1)
p.insert(END, '\n')
p.insert(END, '~')
p.insert(END, '\n')
p.insert(END, int(temseq1.index(seq2))+len(seq2))
p.insert(END, '\n')
p.insert(END, '\n')
temseq1=temseq1.replace(seq2,underb,1)
p.config(state=DISABLED)
else:
p.insert(END, 'Cannot find')
p.config(state=DISABLED)
if l==1:
esg=es.get()
etg=et.get()
fseq=seq1[int(esg)-1:int(etg)]
pseq=[]
i=0
while 10*i+10<len(fseq):
pseq.append(fseq[10*i:10*i+10])
i+=1
else:
pseq.append(fseq[10*i:len(seq1)])
j=0
while j<len(pseq)//5:
p.insert(END, pseq[5*j:5*j+5])
p.insert(END, '\n')
j+=1
p.insert(END, pseq[5*j:len(pseq)])
p.insert(END, '\n')
p.config(state=DISABLED)
if w==2:
na=seq1.count('A')
nt=seq1.count('T')
nc=seq1.count('C')
ng=seq1.count('G')
l=ln.get()
if l!='':
l=float(ln.get())
else: l=0
calt=caltype.index(caltcb.get())
calm=calmethod.index(calmcb.get())
moleg=mole.get()
massg=mass.get()
if moleg!='':
moleg=float(mole.get())
if massg!='':
massg=float(mass.get())
if calt==0:
if l>0:
l=float(l)
seqmw=617.96*l+36.04
else:
seqmw=(na+nt)*(313.23+304.21)+(nc+ng)*(329.23+289.2)+36.04
if calm==0:
mo=(massg/seqmw)*1000 #pmol
p.insert(END, '\n')
p.insert(END, 'Molecular weight\n')
p.insert(END, '\n')
p.insert(END, seqmw)
p.insert(END, ' Da\n')
p.insert(END, '\n')
p.insert(END, 'Moles of dsDNA\n')
p.insert(END, '\n')
p.insert(END, mo)
p.insert(END, ' pmol')
p.config(state=DISABLED)
if calm==1:
ma=(moleg*seqmw)/1000 #ng
p.insert(END, '\n')
p.insert(END, 'Molecular weight\n')
p.insert(END, '\n')
p.insert(END, seqmw)
p.insert(END, ' Da\n')
p.insert(END, '\n')
p.insert(END, 'dsDNA Mass\n')
p.insert(END, '\n')
p.insert(END, ma)
p.insert(END, ' ng')
p.config(state=DISABLED)
if calt==1:
if l>0:
l=float(l)
seqmw=308.97*l+18.02
else:
seqmw=na*313.23+nt*304.21+ng*329.23+nc*289.2+18.02
if calm==0:
mo=(massg/seqmw)*1000 #pmol
p.insert(END, '\n')
p.insert(END, 'Molecular weight\n')
p.insert(END, '\n')
p.insert(END, seqmw)
p.insert(END, ' Da\n')
p.insert(END, '\n')
p.insert(END, 'Moles of ssDNA\n')
p.insert(END, '\n')
p.insert(END, mo)
p.insert(END, ' pmol')
p.config(state=DISABLED)
if calm==1:
ma=(moleg*seqmw)/1000 #ng
p.insert(END, '\n')
p.insert(END, 'Molecular weight\n')
p.insert(END, '\n')
p.insert(END, seqmw)
p.insert(END, ' Da\n')
p.insert(END, '\n')
p.insert(END, 'ssDNA Mass\n')
p.insert(END, '\n')
p.insert(END, ma)
p.insert(END, ' ng')
p.config(state=DISABLED)
if calt==2:
if l>0:
l=float(l)
seqmw=321.47*l+18.02
else:
seqmw=na*329.2+nt*306.2+ng*345.2+nc*305.2+18.02
if calm==0:
mo=(massg/seqmw)*1000 #pmol
p.insert(END, '\n')
p.insert(END, 'Molecular weight\n')
p.insert(END, '\n')
p.insert(END, seqmw)
p.insert(END, ' Da\n')
p.insert(END, '\n')
p.insert(END, 'Moles of ssRNA\n')
p.insert(END, '\n')
p.insert(END, mo)
p.insert(END, ' pmol')
p.config(state=DISABLED)
if calm==1:
ma=(moleg*seqmw)/1000 #ng
p.insert(END, '\n')
p.insert(END, 'Molecular weight\n')
p.insert(END, '\n')
p.insert(END, seqmw)
p.insert(END, ' Da\n')
p.insert(END, '\n')
p.insert(END, 'ssRNA Mass\n')
p.insert(END, '\n')
p.insert(END, ma)
p.insert(END, ' ng')
p.config(state=DISABLED)
if w==3:
pseq=[]
pseqrr=[]
i=0
while 10*i+10<len(seq1):
pseq.append(seq1[10*i:10*i+10])
pseqrr.append(seq1rr[10*i:10*i+10])
i+=1
else:
pseq.append(seq1[10*i:len(seq1)])
pseqrr.append(seq1rr[10*i:len(seq1)])
l=cb2.get()
l=DNAform.index(l)
if l==0:
j=0
while j<len(pseq)//5:
p.insert(END, pseq[5*j:5*j+5])
p.insert(END, '\n')
j+=1
p.insert(END, pseq[5*j:len(pseq)])
p.insert(END, '\n')
j=0
while j<len(pseq)//5:
p.insert(END, pseqrr[5*j:5*j+5])
p.insert(END, '\n')
j+=1
p.insert(END, pseqrr[5*j:len(pseqrr)])
p.config(state=DISABLED)
if l==1:
j=0
while j<len(pseq)//5:
p.insert(END, pseq[5*j:5*j+5])
p.insert(END, '\n')
p.insert(END, pseqrr[5*j:5*j+5])
p.insert(END, '\n')
p.insert(END, '\n')
j+=1
p.insert(END, pseq[5*j:len(pseq)])
p.insert(END, '\n')
p.insert(END, pseqrr[5*j:len(pseqrr)])
p.config(state=DISABLED)
if w==4:
rseq=[]
for z in range(len(seq1)-1,-1,-1):
rseq.append(seq1[z])
rseq1=''.join(rseq)
pseq=[]
i=0
while 10*i+10<len(rseq1):
pseq.append(rseq1[10*i:10*i+10])
i+=1
else: pseq.append(rseq1[10*i:len(rseq1)])
i=0
while 5*i+5<len(pseq):
p.insert(END, pseq[5*i:5*i+5])
p.insert(END,'\n')
i+=1
else: p.insert(END, pseq[5*i:len(pseq)])
p.config(state=DISABLED)
if w==5:
f1=[]
f2=[]
f3=[]
for i in range(len(seq1)//3):
f1.append(seq1[3*i:3*i+3])
f2.append(seq1[3*i+1:3*i+4])
f3.append(seq1[3*i+2:3*i+5])
a3a1=[]
a3a2=[]
a3a3=[]
a3a1r=[]
a3a2r=[]
a3a3r=[]
for i in f1:
if i in codon3.keys():
a3a1.append(codon3[i])
elif 'N' in i:
a3a1.append('Xxx')
else: continue
for i in f2:
if i in codon3.keys():
a3a2.append(codon3[i])
elif 'N' in i:
a3a2.append('Xxx')
else: continue
for i in f3:
if i in codon3.keys():
a3a3.append(codon3[i])
elif 'N' in i:
a3a3.append('Xxx')
else: continue
f1=[]
f2=[]
f3=[]
for i in range(len(seq1)//3):
f1.append(seq1r[3*i:3*i+3])
f2.append(seq1r[3*i+1:3*i+4])
f3.append(seq1r[3*i+2:3*i+5])
for i in f1:
if i in codon3.keys():
a3a1r.append(codon3[i])
elif 'N' in i:
a3a1r.append('Xxx')
else: continue
for i in f2:
if i in codon3.keys():
a3a2r.append(codon3[i])
elif 'N' in i:
a3a2r.append('Xxx')
else: continue
for i in f3:
if i in codon3.keys():
a3a3r.append(codon3[i])
elif 'N' in i:
a3a3r.append('Xxx')
else: continue
f1=[]
f2=[]
f3=[]
for i in range(len(seq1)//3):
f1.append(seq1[3*i:3*i+3])
f2.append(seq1[3*i+1:3*i+4])
f3.append(seq1[3*i+2:3*i+5])
a1a1=[]
a1a2=[]
a1a3=[]
a1a1r=[]
a1a2r=[]
a1a3r=[]
for i in f1:
if i in codon1.keys():
a1a1.append(codon1[i])
elif 'N' in i:
a1a1.append('X')
else: continue
for i in f2:
if i in codon1.keys():
a1a2.append(codon1[i])
elif 'N' in i:
a1a2.append('X')
else: continue
for i in f3:
if i in codon1.keys():
a1a3.append(codon1[i])
elif 'N' in i:
a1a3.append('X')
else: continue
f1=[]
f2=[]
f3=[]
for i in range(len(seq1)//3):
f1.append(seq1r[3*i:3*i+3])
f2.append(seq1r[3*i+1:3*i+4])
f3.append(seq1r[3*i+2:3*i+5])
for i in f1:
if i in codon1.keys():
a1a1r.append(codon1[i])
elif 'N' in i:
a1a1r.append('X')
else: continue
for i in f2:
if i in codon1.keys():
a1a2r.append(codon1[i])
elif 'N' in i:
a1a2r.append('X')
else: continue
for i in f3:
if i in codon1.keys():
a1a3r.append(codon1[i])
elif 'N' in i:
a1a3r.append('X')
else: continue
l=cb1.get()
l=letter.index(l)
if l==1:
frame1=''.join(a3a1)
frame2=''.join(a3a2)
frame3=''.join(a3a3)
frame1r=''.join(a3a1r)
frame2r=''.join(a3a2r)
frame3r=''.join(a3a3r)
p.insert(END, 'Forward\n')
p.insert(END, '\n')
p.insert(END, 'frame1\n')
p.insert(END, frame1+ '\n')
p.insert(END, '\n')
p.insert(END, 'frame2\n')
p.insert(END, frame2+ '\n')
p.insert(END, '\n')
p.insert(END, 'frame3\n')
p.insert(END, frame3+ '\n')
p.insert(END, '\n')
p.insert(END, 'Reverse\n')
p.insert(END, '\n')
p.insert(END, 'frame1r\n')
p.insert(END, frame1r+ '\n')
p.insert(END, '\n')
p.insert(END, 'frame2r\n')
p.insert(END, frame2r+ '\n')
p.insert(END, '\n')
p.insert(END, 'frame3r\n')
p.insert(END, frame3r)
p.config(state=DISABLED)
if l==0:
frame1=''.join(a1a1)
frame2=''.join(a1a2)
frame3=''.join(a1a3)
frame1r=''.join(a1a1r)
frame2r=''.join(a1a2r)
frame3r=''.join(a1a3r)
p.insert(END, 'Forward\n')
p.insert(END, '\n')
p.insert(END, 'frame1\n')
p.insert(END, frame1+ '\n')
p.insert(END, '\n')
p.insert(END, 'frame2\n')
p.insert(END, frame2+ '\n')
p.insert(END, '\n')
p.insert(END, 'frame3\n')
p.insert(END, frame3+ '\n')
p.insert(END, '\n')
p.insert(END, 'Reverse\n')
p.insert(END, '\n')
p.insert(END, 'frame1r\n')
p.insert(END, frame1r+ '\n')
p.insert(END, '\n')
p.insert(END, 'frame2r\n')
p.insert(END, frame2r+ '\n')
p.insert(END, '\n')
p.insert(END, 'frame3r\n')
p.insert(END, frame3r)
p.config(state=DISABLED)
if w==6:
x=seq1
y=seq2
import random
m=float(match.get())
mi=float(mismatch.get())
g=float(gap.get())
a=[]
for i in range(len(x)+1):
a.append([])
l=0
for i in range(len(a)):
a[i].append(l)
l=l+g
l=g
for i in range(len(y)):
a[0].append(l)
l=l+g
for l in range(len(y)):
for i in range(1,len(x)+1):
a[i].append(0)
k=[]
for i in range(len(x)+1):
k.append([])
for i in range(len(a)):
k[i].append(3)
for i in range(len(y)):
k[0].append(2)
for l in range(len(y)):
for i in range(1,len(x)+1):
k[i].append(0)
k[0][0]=0
i=0
j=0
for _ in range(len(x)):
i+=1
j=0
for _ in range(len(y)):
j+=1
if x[i-1]==y[j-1]:
s=m
else:s=mi
b=a[i-1][j-1]+s
c=a[i][j-1]+g
d=a[i-1][j]+g
a[i][j]=max(b,c,d)
if max(b,c,d)==b:
if b!=c and b!=d:
k[i][j]=1
elif b==c and b!=d:
k[i][j]=12
elif b==d and b!=c:
k[i][j]=13
elif b==c==d:
k[i][j]=4
elif max(b,c,d)==c:
if c!=b and c!=d:
k[i][j]=2
elif c==b and c!=d:
k[i][j]=21
elif c==d and c!=b:
k[i][j]=23
elif c==b==d:
k[i][j]=4
elif max(b,c,d)==d:
if d!=b and d!=c:
k[i][j]=3
elif d==b and d!=c:
k[i][j]=31
elif d==c and d!=b:
k[i][j]=32
elif d==b==c:
k[i][j]=4
seqxxx=[]
seqyyy=[]
for _ in range(10000):
i=len(x)
j=len(y)
seqx=[]
seqy=[]
while i!=0 and j!=0:
if k[i][j]==1:
seqx.append(x[i-1])
seqy.append(y[j-1])
i+=-1
j+=-1
elif k[i][j]==2:
seqx.append('-')
seqy.append(y[j-1])
j+=-1
elif k[i][j]==3:
seqy.append('-')
seqx.append(x[i-1])
i+=-1
elif k[i][j]==4:
num=random.randint(1,3)
if num==1:
seqx.append(x[i-1])
seqy.append(y[j-1])
i+=-1
j+=-1
elif num==2:
seqx.append('-')
seqy.append(y[j-1])
j+=-1
elif num==3:
seqy.append('-')
seqx.append(x[i-1])
i+=-1
elif k[i][j]==12:
num=random.randint(1,2)
if num==1:
seqx.append(x[i-1])
seqy.append(y[j-1])
i+=-1
j+=-1
elif num==2:
seqx.append('-')
seqy.append(y[j-1])
j+=-1
elif k[i][j]==13:
num=random.randrange(1,4,2)
if num==1:
seqx.append(x[i-1])
seqy.append(y[j-1])
i+=-1
j+=-1
elif num==3:
seqy.append('-')
seqx.append(x[i-1])
i+=-1
elif k[i][j]==23:
num=random.randint(2,3)
if num==2:
seqx.append('-')
seqy.append(y[j-1])
j+=-1
elif num==3:
seqy.append('-')
seqx.append(x[i-1])
i+=-1
if i!=0:
while i!=0:
seqx.append(x[i-1])
seqy.append('-')
i+=-1
if j!=0:
while j!=0:
seqy.append(y[0])
seqx.append('-')
j+=-1
seqxx=[]
seqyy=[]
for i in range(len(seqx)-1,-1,-1):
seqxx.append(seqx[i])
for i in range(len(seqy)-1,-1,-1):
seqyy.append(seqy[i])
seqxx=''.join(seqxx)
seqyy=''.join(seqyy)
seqxxx.append(seqxx)
seqyyy.append(seqyy)
SEQ=[]
for i,j in zip(seqxxx,seqyyy):
SEQ.append((i,j))
SEQ=set(SEQ)
SEQ=list(SEQ)
if len(SEQ)>5:
for i in range(5):
p.insert(END,SEQ[i][0])
p.insert(END, '\n')
p.insert(END,SEQ[i][1])
p.insert(END,'\n')
p.insert(END, '\n')
p.insert(END,'그 외 '+str(len(SEQ)-5)+'개의 seq가 있습니다.')
else:
for i in range(len(SEQ)):
p.insert(END,SEQ[i][0])
p.insert(END, '\n')
p.insert(END,SEQ[i][1])
p.insert(END,'\n')
p.insert(END, '\n')
p.config(state=DISABLED)
if w==7:
p.insert(END, '아미노산 서열의 길이\n')
p.insert(END, '\n')
p.insert(END, len(seq1))
p.config(state=DISABLED)
if w==8:
l=cb3.get()
l=findingmethod.index(l)
if l==0:
if seq2 in seq1:
nos=seq1.count(seq2)
p.insert(END, int(seq1.index(seq2))+1)
p.insert(END, '\n')
p.insert(END, '~')
p.insert(END, '\n')
p.insert(END, int(seq1.index(seq2))+len(seq2))
p.insert(END, '\n')
p.insert(END, '\n')
underb=''
for i in range(len(seq2)):
underb=underb+'_'
temseq1=seq1.replace(seq2,underb,1)
if nos>=2:
for _ in range(nos-1):
p.insert(END, int(temseq1.index(seq2))+1)
p.insert(END, '\n')
p.insert(END, '~')
p.insert(END, '\n')
p.insert(END, int(temseq1.index(seq2))+len(seq2))
p.insert(END, '\n')
p.insert(END, '\n')
temseq1=temseq1.replace(seq2,underb,1)
p.config(state=DISABLED)
else:
p.insert(END, 'Cannot find')
p.config(state=DISABLED)
if l==1:
esg=es.get()
etg=et.get()
fseq=seq1[int(esg)-1:int(etg)]
pseq=[]
i=0
while 10*i+10<len(fseq):
pseq.append(fseq[10*i:10*i+10])
i+=1
else:
pseq.append(fseq[10*i:len(seq1)])
j=0
while j<len(pseq)//5:
p.insert(END, pseq[5*j:5*j+5])
p.insert(END, '\n')
j+=1
p.insert(END, pseq[5*j:len(pseq)])
p.insert(END, '\n')
p.config(state=DISABLED)
if w==9:
l=cb4.get()
l=convert.index(l)
if l==0:
errora=[]
seq1=seq1.upper()
for i in seq1:
if i in oneto3:continue
else:
errora.append(i)
if len(errora)>0:
msgbox.showerror('Error','아미노산의 이름이 잘못된 것이 있었습니다.')
tseq1=[]
for z in seq1:
tseq1.append(oneto3[z])
i=0
tseq=[]
while 10*i+10<len(tseq1):
tseq.append(tseq1[10*i:10*i+10])
i+=1
else:
tseq.append(tseq1[10*i:len(seq1)])
#if len(tseq1)%10==1:
#tseq.pop()
for z in tseq:
p.insert(END, z)
p.insert(END,'\n')
if l==1:
errora=[]
tseq=[]
i=0
while 3*i+3<len(seq1):
tseq.append(seq1[3*i:3*i+3])
i+=1
else: tseq.append(seq1[3*i:len(seq1)])
tseq1=[]
for z in tseq:
if z in threeto1:
tseq1.append(threeto1[z])
else:
errora.append(z)
if len(errora)>0:
msgbox.showerror('Error','아미노산의 이름이 잘못된 것이 있습니다.')
tseq1=''.join(tseq1)
i=0
tseq=[]
while 10*i+10<len(tseq1):
tseq.append(tseq1[10*i:10*i+10])
i+=1
else:
tseq.append(tseq1[10*i:len(seq1)])
#if len(tseq1)%10==1:
#tseq.pop()
i=0
while 5*i+5<len(tseq):
p.insert(END, tseq[5*i:5*i+5])
p.insert(END,'\n')
i+=1
else: p.insert(END, tseq[5*i:len(tseq)])
p.config(state=DISABLED)
b1=Button(root, padx=4, pady=4, text='Clear', command=clear)
b1.grid(row=3, column=3, padx=5)
b2=Button(root, padx=4, pady=4, text='실행', command=a)
b2.grid(row=3, column=4, padx=5)
sb2=Scrollbar(sframe)
sb2.pack(side='right',fill='y')
tt=Text(sframe, width=54, height=10,yscrollcommand=sb2.set)
tt.insert(END, '찾으실 서열을 입력해 주세요\n')
tt.insert(END, '혹은 \n')
tt.insert(END,'alignment할 두 번째 서열을 입력해주세요')
tt.pack(side='left')
sb2.config(command=tt.yview)
def b():
global w
w=cb.get()
w=work.index(w)
if w==0 or w==4 or w==7:
cb1.grid_forget()
cb2.grid_forget()
cb3.grid_forget()
cb4.grid_forget()
aframe.grid_forget()
eframe.grid_forget()
lframe.grid_forget()
caltcb.grid_forget()
calmcb.grid_forget()
if w==1 or w==8:
cb1.grid_forget()
cb2.grid_forget()
cb4.grid_forget()
aframe.grid_forget()
eframe.grid(row=2,column=0)
cb3.grid(row=1,column=0)
lframe.grid_forget()
caltcb.grid_forget()
calmcb.grid_forget()
if w==3:
cb1.grid_forget()
cb3.grid_forget()
cb4.grid_forget()
eframe.grid_forget()
aframe.grid_forget()
cb2.grid(row=1,column=0)
lframe.grid_forget()
caltcb.grid_forget()
calmcb.grid_forget()
if w==5:
cb2.grid_forget()
cb3.grid_forget()
cb4.grid_forget()
eframe.grid_forget()
aframe.grid_forget()
cb1.grid(row=1, column=0)
lframe.grid_forget()
caltcb.grid_forget()
calmcb.grid_forget()
if w==6:
cb1.grid_forget()
cb2.grid_forget()
cb3.grid_forget()
cb4.grid_forget()
eframe.grid_forget()
aframe.grid(row=2,column=0)
lframe.grid_forget()
caltcb.grid_forget()
calmcb.grid_forget()
if w==9:
cb1.grid_forget()
cb2.grid_forget()
cb3.grid_forget()
eframe.grid_forget()
aframe.grid_forget()
cb4.grid(row=1, column=0)
lframe.grid_forget()
caltcb.grid_forget()
calmcb.grid_forget()
if w==2:
cb1.grid_forget()
cb2.grid_forget()
cb3.grid_forget()
cb4.grid_forget()
aframe.grid_forget()
eframe.grid_forget()
caltcb.grid(row=1,column=0)
calmcb.grid(row=2,column=0)
lframe.grid(row=2,column=0)
b3=Button(wframe, padx=2, pady=2, text='선택', command=b)
b3.grid(row=0, column=1)
psb=Scrollbar(tframe)
psb.pack(side='right',fill='y')
p=Text(tframe, width=54, height=33,yscrollcommand=psb.set)
p.config(state=DISABLED)
p.pack(side='left')
psb.config(command=p.yview)
root.mainloop()
|
# -*- coding: utf-8 -*-
"""
/dms/lecture/views_show_complete.py
.. zeigt den Inhalt eines Ordners an
Django content Management System
Hans Rauch
hans.rauch@gmx.net
Die Programme des dms-Systems koennen frei genutzt und den spezifischen
Beduerfnissen entsprechend angepasst werden.
0.01 28.01.2007 Beginn der Arbeit
"""
import string
from django.shortcuts import render_to_response
from django.utils.translation import ugettext as _
from dms.utils import get_breadcrumb
from dms.utils import get_footer_email
from dms.utils import get_folderish_actions
from dms.folder.utils import get_folder_content
#from dms.roles import *
from dms_ext.extension import * # dms-Funktionen ueberschreiben
# -----------------------------------------------------
def lecture_show_complete(request, item_container):
""" zeigt den Inhalt eines Ordners """
def get_section_view(items, sections):
""" erzeugt die Section-Ansicht der im Ordner enthaltenen Objekte """
content = ''
unknown = _(u'Unbekannter Zwischentitel')
section = '--START--'
links = []
for i in items :
if string.find ( i.item.name, '.html' ) > 0 :
n = i.item.app.name
#assert False
if i.item.app.name == 'dmsSheet':
content += '<h3 class="top-border" style="padding-top:0.2em;">' + i.item.title + '</h3>\n'
if i.item.sub_title != '':
content += '<h4>' + i.item.sub_title + '</h4>\n'
content += i.item.text + '\n'
return content
app_name = 'lecture'
items, sections, d_sections = get_folder_content(item_container)
if request.GET.has_key('show_more') :
show_more = request.GET['show_more']
else :
show_more = False
try:
user_perms = UserEditPerms(request.user.username,request.path)
except:
user_perms = []
vars = { 'content_div_style' : 'frame-main-manage',
'this_site_title' : item_container.item.title,
'site' : item_container.container.site,
'no_top_main_navigation': True,
'title' : item_container.item.title,
'sub_title' : item_container.item.sub_title,
'slot_right_info' : item_container.item.info_slot_right,
'action' : get_folderish_actions(request, user_perms, item_container,
app_name, False),
'breadcrumb' : get_breadcrumb(item_container),
'path' : item_container.container.path,
'show_more' : show_more,
'text' : item_container.item.text,
'text_more' : item_container.item.text_more,
'image_url' : item_container.item.image_url,
'image_url_url' : item_container.item.image_url_url,
'image_extern' : item_container.item.image_extern,
'is_wide' : item_container.item.is_wide,
'is_important' : item_container.item.is_important,
'content' : get_section_view(items, sections),
'footer_email' : get_footer_email(item_container),
'last_modified' : item_container.get_last_modified(),
}
return render_to_response ( 'app/lecture/base.html', vars )
|
# -*- coding: utf-8 -*-
import base64
import json
import sys
import time
import warnings
from concurrent.futures import ThreadPoolExecutor, wait, as_completed
from operator import itemgetter
import dlib
import cv2
import os
import glob
import numpy as np
from iface import IFace
class FaceDlib(IFace):
def __init__(self):
super().__init__()
self.current_path = os.getcwd() # 获取根路径
self.predictor_path = self.current_path + "/my_dlib/model/shape_predictor_68_face_landmarks.dat"
self.face_rec_model_path = self.current_path + "/my_dlib/model/dlib_face_recognition_resnet_model_v1.dat"
self.dataPath = self.current_path + "/my_dlib/cache_data/"
# 读入模型
self.detector = dlib.get_frontal_face_detector()
self.shape_predictor = dlib.shape_predictor(self.predictor_path)
self.face_rec_model = dlib.face_recognition_model_v1(self.face_rec_model_path)
self.executor = ThreadPoolExecutor(max_workers=8)
self.result_min_value = 0.5 # 至少要少于0.6才是相似
def init(self, source_img_info, target_img_list, result_list):
os.makedirs(os.path.join(self.current_path, 'my_dlib/cache_data/'), exist_ok=True)
self.result_list = result_list
self.source_img_info = source_img_info
self.target_img_list = target_img_list
self.source_img_data = self.__get_tezheng(source_img_info)
self.error_list = []
self.thread_list = []
return self
def working(self):
try:
print('开始处理数据,总共:' + str(len(self.target_img_list)) + '条')
self.__start_thread(self.target_img_list)
self.__show_thread_log()
if len(self.result_list) > 0:
self.result_list.sort(key=itemgetter(2))
print('---------任务结束------------')
except Exception as ex:
info = sys.exc_info()
msg = '{}:{}'.format(info[0], info[1])
warnings.warn(msg)
finally:
self.executor.shutdown(False)
self.save_log(self.source_img_info['imgurl'].split('/')[-1].split('.')[0], self.result_list, "dlib")
self.save_error_log(self.error_list)
def __chk_photo_for(self, target_info):
result = self.__compare_data(self.source_img_data, self.__get_tezheng(target_info))
if result < self.result_min_value:
self.result_list.append((target_info['imgurl'], target_info['username'], result))
# 开始构建线程进行工作
def __start_thread(self, work_list):
self.thread_list.clear()
for img_info in work_list:
self.thread_list.append(self.executor.submit(self.__chk_photo_for, img_info))
# 显示线程日志
def __show_thread_log(self):
for i, future in enumerate(as_completed(self.thread_list)):
print('完成:' + str(i + 1))
print('---------线程结束------------')
def __get_tezheng(self, img_info):
# 检查是否有缓存数据
filePath = self.dataPath + img_info['imgurl'].split('/')[-1].split('.')[0] + '_' + img_info["username"] + '.npy'
if os.path.isfile(filePath):
vectors = np.load(filePath)
if vectors.size > 0:
return vectors
# 没有的话,就构建并存起来
img_data = base64.b64decode(img_info['buf'])
img_array = np.fromstring(img_data, np.uint8)
img = cv2.imdecode(img_array, cv2.COLOR_BGR2RGB)
dets = self.detector(img, 1) # 人脸标定
if len(dets) is not 1:
warnings.warn("图片检测的人脸数为: {}".format(len(dets)))
self.error_list.append((img_info['username'], img_info['imgurl']))
return np.array([])
face = dets[0]
shape = self.shape_predictor(img, face)
vectors = np.array([])
for i, num in enumerate(self.face_rec_model.compute_face_descriptor(img, shape)):
vectors = np.append(vectors, num)
np.save(filePath, vectors)
return vectors
# 计算欧式距离,判断是否是同一个人
def __compare_data(self, data1, data2):
diff = 0
# for v1, v2 in data1, data2:
# diff += (v1 - v2)**2
for i in range(len(data1)):
diff += (data1[i] - data2[i]) ** 2
diff = np.sqrt(diff)
return diff
|
# Importing Libraries
import serial
import time
import cv2 as cv
def open_cr(key):
opencr.write(bytes(key, 'utf-8'))
value = opencr.readline()
time.sleep(0.5)
def key_push() :
panel = cv.imread('mcu_arduino/hyunjoowhoon/Turtlebot3_logo.jpg')
cv.imshow('arduino control', panel)
while True:
key = cv.waitKey(1)
if key == 27 or key == ord('q'):
# - key q : LED - wave Blink, Buzzer - ?, 5회 연속
open_cr(key)
break
elif key == ord('f'):
# - key f : LED - 22, 23 Blink, Buzzer - ?, 1회 연속
continue
elif key == ord('b'):
# - key b : LED - 24, 25 Blink, Buzzer - ?, 2회 연속
continue
elif key == ord('t'):
# - key r : LED - 22, 25 Blink, Buzzer - ?, 3회 연속
continue
elif key == ord('l'):
# - key l : LED - 23, 24 Blink, Buzzer - ?, 4회 연속
continue
open_cr(key)
return
if __name__ == '__main__':
opencr = serial.Serial(port='/dev/ttyACM1', baudrate=115200, timeout=1)
try:
key_push()
# pushed = input("알맞은 입력을 눌러주세요.( f / b / l / r, 취소: q 혹은 esc) : ")
cv.destroyAllWindows()
except Exception as e:
print(e, " 라는 오류가 있어요!!!")
print(type(e)) |
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views.generic import ListView, DetailView, CreateView, FormView
from django.urls import reverse_lazy
from django.db import transaction
from .forms import PostForm, SubscriptionForm, UnSubscriptionForm, FeedForm
from .models import Blog, Post, Feed, Subscription
class PostListView(ListView):
model = Post
template_name = 'blog/posts.html'
context_object_name = 'posts'
class PostDetailView(DetailView):
model = Post
context_object_name = 'post'
@method_decorator(login_required(login_url=reverse_lazy('admin:index')), name='dispatch')
class MyPostsView(CreateView):
model = Post
template_name = 'blog/my_posts.html'
form_class = PostForm
success_url = reverse_lazy('blog:my-posts')
def get_context_data(self, **kwargs):
user = self.request.user
kwargs['posts'] = Post.objects.filter(blog__author=user)
return super(MyPostsView, self).get_context_data(**kwargs)
def form_valid(self, form):
form.instance.blog = Blog.objects.get(author=self.request.user)
return super().form_valid(form)
@method_decorator(login_required(login_url=reverse_lazy('admin:index')), name='dispatch')
class FeedView(FormView):
template_name = 'blog/feed.html'
form_class = FeedForm
success_url = reverse_lazy('blog:feed')
def get_form(self):
form_class = self.get_form_class()
return form_class(self.request.user, **self.get_form_kwargs())
def form_valid(self, form):
feeds = form.cleaned_data['feeds']
for feed in feeds:
feed.is_read = True
Feed.objects.bulk_update(feeds, ['is_read'])
return super().form_valid(form)
@method_decorator(login_required(login_url=reverse_lazy('admin:index')), name='dispatch')
class BlogsView(FormView):
template_name = 'blog/blogs.html'
form_class = SubscriptionForm
success_url = reverse_lazy('blog:blogs')
def get_form(self):
form_class = self.get_form_class()
return form_class(self.request.user, **self.get_form_kwargs())
@transaction.atomic
def form_valid(self, form):
user = self.request.user
blogs = form.cleaned_data['blogs']
for blog in blogs:
subscription = Subscription(user=user, blog=blog)
subscription.save()
posts = Post.objects.filter(blog=blog)
Feed.objects.bulk_create(
[Feed(user=user, post=post, subscription=subscription) for post in posts])
return super().form_valid(form)
@method_decorator(login_required(login_url=reverse_lazy('admin:index')), name='dispatch')
class SubscriptionsView(FormView):
template_name = 'blog/subscriptions.html'
form_class = UnSubscriptionForm
success_url = reverse_lazy('blog:subscriptions')
def get_form(self):
form_class = self.get_form_class()
return form_class(self.request.user, **self.get_form_kwargs())
def form_valid(self, form):
subscriptions = form.cleaned_data['subscriptions']
subscriptions.delete()
return super().form_valid(form)
|
from common import unittest2
import pyuv
class MultiHandleTest(unittest2.TestCase):
def test_multihandle1(self):
self.close_cb_called = 0
self.prepare_cb_called = 0
def close_cb(handle):
self.close_cb_called += 1
def prepare_cb(prepare):
self.prepare_cb_called += 1
prepare.stop()
prepare.close(close_cb)
self.idle_cb_called = 0
def idle_cb(idle):
self.idle_cb_called += 1
idle.stop()
idle.close(close_cb)
self.check_cb_called = 0
def check_cb(check):
self.check_cb_called += 1
check.stop()
check.close(close_cb)
self.timer_cb_called = 0
def timer_cb(timer):
self.timer_cb_called += 1
timer.stop()
timer.close(close_cb)
loop = pyuv.Loop.default_loop()
prepare = pyuv.Prepare(loop)
prepare.start(prepare_cb)
idle = pyuv.Idle(loop)
idle.start(idle_cb)
check = pyuv.Check(loop)
check.start(check_cb)
timer = pyuv.Timer(loop)
timer.start(timer_cb, 0.1, 0)
loop.run()
self.assertEqual(self.prepare_cb_called, 1)
self.assertEqual(self.idle_cb_called, 1)
self.assertEqual(self.check_cb_called, 1)
self.assertEqual(self.close_cb_called, 4)
if __name__ == '__main__':
unittest2.main(verbosity=2)
|
from threading import Lock
import pygame
from config import Config
from pygame.threads import Thread
import math, random
class PhysicsController(object):
def __init__(self, config, ballsList):
self.config = config
self.threadList = []
self.lock = Lock()
self.ballsList = ballsList
self.degreeX = 0
self.degreeY = 0
self.flAutoControl = False
self.counter = 0
self.deltaTime = 0.001
self.numCollision = 0
if len(self.ballsList) % self.config.app.NUMBER_THREADS != 0:
self.groupSize = (len(self.ballsList) // self.config.app.NUMBER_THREADS) + 1
else:
self.groupSize = (len(self.ballsList) // self.config.app.NUMBER_THREADS)
self.createThreads()
def createThreads(self):
self.threadList = [Thread(target=self.calculatePhysics, args=(i,)) for i in
range(self.config.app.NUMBER_THREADS)]
def startThreads(self):
self.flRun = True
for thread in self.threadList: thread.start()
for thread in self.threadList: thread.join()
def caclulateSpeed(self, ball, degree, axis):
speed = 0
if axis == 'X':
speed = ball.speedX
elif axis == 'Y':
speed = ball.speedY
else:
return speed
direction = 1
if degree < 0: direction = -1
if degree != 0:
mg = ball.mass * self.config.physics.GRAVITY * \
math.sin(abs(degree) * 180 / math.pi)
friction = ball.mass * self.config.physics.GRAVITY * \
math.cos(abs(degree) * 180 / math.pi) * self.config.physics.FRICTION
if speed == 0 and mg > friction:
speed += direction * abs(mg - friction) / ball.mass
else:
speed += direction * abs(mg - friction) / ball.mass
elif degree == 0:
if speed > 0:
speed -= self.config.physics.FRICTION * self.config.physics.GRAVITY
if speed < 0: speed = 0
if speed < 0:
speed += self.config.physics.FRICTION * self.config.physics.GRAVITY
if speed > 0: speed = 0
if axis == 'X':
if ball.x + ball.radius > self.config.win.AREA_WIDTH:
speed *= -(1 - self.config.physics.COLLISION_ENERGY_LOSS)
self.lock.acquire()
self.numCollision += 1
self.lock.release()
if ball.x < ball.radius:
speed *= -(1 - self.config.physics.COLLISION_ENERGY_LOSS)
self.lock.acquire()
self.numCollision += 1
self.lock.release()
elif axis == 'Y':
if ball.y + ball.radius > self.config.win.AREA_HEIGHT:
self.lock.acquire()
self.numCollision += 1
self.lock.release()
speed *= -(1 - self.config.physics.COLLISION_ENERGY_LOSS)
if ball.y < ball.radius:
self.lock.acquire()
self.numCollision += 1
self.lock.release()
speed *= -(1 - self.config.physics.COLLISION_ENERGY_LOSS)
return speed
def getAvgSpeed(self):
sumSpeed = 0
for ball in self.ballsList:
sumSpeed += ball.speed()
return sumSpeed / len(self.ballsList)
def distance(self, ball1, ball2):
return math.sqrt((ball1.x - ball2.x) ** 2 + (ball1.y - ball2.y) ** 2)
def caclulateWallCollision(self, ball):
if ball.x + ball.radius > self.config.win.AREA_WIDTH:
ball.x = self.config.win.AREA_WIDTH - ball.radius
if ball.x - ball.radius < 0:
ball.x = ball.radius
if ball.y + ball.radius > self.config.win.AREA_HEIGHT:
ball.y = self.config.win.AREA_HEIGHT - ball.radius
if ball.y - ball.radius < 0:
ball.y = ball.radius
def caclulateStaticCollision(self, ball1, ball2):
if ball1 != ball2:
overlap = ball1.radius + ball2.radius - self.distance(ball1, ball2)
smallBall = ball1
bigBall = ball2
if smallBall.radius > bigBall.radius:
smallBall, bigBall = bigBall, smallBall
if overlap > 0:
theta = math.atan2((bigBall.y - smallBall.y), (bigBall.x - smallBall.x))
smallBall.x -= overlap * math.cos(theta)
smallBall.y -= overlap * math.sin(theta)
def caclulateBallCollision(self):
for i in range(len(self.ballsList)):
for j in range(i, len(self.ballsList)):
ball1 = self.ballsList[i]
ball2 = self.ballsList[j]
if self.distance(ball1, ball2) < ball1.radius + ball2.radius:
phi = math.atan2((ball2.y - ball1.y), (ball2.x - ball1.x))
theta1 = ball1.angleBetweenSpeedXY()
theta2 = ball2.angleBetweenSpeedXY()
speed1 = ball1.speed()
speed2 = ball2.speed()
newSpeedX1 = (speed1 * math.cos(theta1 - phi) * (
ball1.mass - ball2.mass) + 2 * ball2.mass * speed2 * math.cos(theta2 - phi)) / (
ball1.mass + ball2.mass) * math.cos(phi) + speed1 * math.sin(
theta1 - phi) * math.cos(phi + math.pi / 2)
newSpeedY1 = (speed1 * math.cos(theta1 - phi) * (
ball1.mass - ball2.mass) + 2 * ball2.mass * speed2 * math.cos(theta2 - phi)) / (
ball1.mass + ball2.mass) * math.sin(phi) + speed1 * math.sin(
theta1 - phi) * math.sin(phi + math.pi / 2)
newSpeedX2 = (speed2 * math.cos(theta2 - phi) * (
ball2.mass - ball1.mass) + 2 * ball1.mass * speed1 * math.cos(theta1 - phi)) / (
ball1.mass + ball2.mass) * math.cos(phi) + speed2 * math.sin(
theta2 - phi) * math.cos(phi + math.pi / 2)
newSpeedY2 = (speed2 * math.cos(theta2 - phi) * (
ball2.mass - ball1.mass) + 2 * ball1.mass * speed1 * math.cos(theta1 - phi)) / (
ball1.mass + ball2.mass) * math.sin(phi) + speed2 * math.sin(
theta2 - phi) * math.sin(phi + math.pi / 2)
ball1.speedX = newSpeedX1
ball1.speedY = newSpeedY1
ball2.speedX = newSpeedX2
ball2.speedY = newSpeedY2
self.numCollision += 1
self.caclulateStaticCollision(ball1, ball2)
def calculatePhysics(self, indexThread):
for i in range(self.groupSize * indexThread,
self.groupSize * (indexThread + 1)):
if i < len(self.ballsList):
self.ballsList[i].speedX = self.caclulateSpeed(self.ballsList[i], self.degreeX, 'X')
self.ballsList[i].speedY = self.caclulateSpeed(self.ballsList[i], self.degreeY, 'Y')
if indexThread == 0 and self.config.physics.COLLISION:
self.caclulateBallCollision()
if indexThread == 0: self.flRun = False
while self.flRun: pass # wait other threads on first (indexThread = 0)
self.caclulateWallCollision(self.ballsList[i])
self.ballsList[i].x += self.ballsList[i].speedX * self.deltaTime
self.ballsList[i].y += self.ballsList[i].speedY * self.deltaTime
def autoControl(self):
if self.flAutoControl:
if self.counter == 0:
degree = random.randint(-90, 90)
self.counter = random.randint(0, 50)
if random.randint(0, 1) == 0:
self.degreeX = degree
else:
self.degreeY = degree
else:
self.counter -= 1
def drawBalls(self, gameDisplay):
for ball in self.ballsList: ball.drawBall(gameDisplay)
class Gyroscope(object):
def __init__(self):
self.x = 170
self.y = 535
self.lineEndX = 0
self.lineEndY = 0
self.radius = 50
self.color = (0, 200, 200)
self.TO_RADIANS = math.pi / 180
def calculateOrientation(self, degreeX, degreeY):
self.lineEndX = self.radius * math.cos((degreeX - 90) * self.TO_RADIANS) + 170
self.lineEndY = self.radius * math.sin(degreeY * self.TO_RADIANS) + 535
distance = math.sqrt((self.x - self.lineEndX) ** 2 + (self.y - self.lineEndY) ** 2)
overlap = distance - self.radius
if overlap > 0:
angle = math.atan2((self.lineEndY - self.y), (self.lineEndX - self.x))
self.lineEndX -= overlap * math.cos(angle)
self.lineEndY -= overlap * math.sin(angle)
def draw(self, surface):
pygame.gfxdraw.circle(surface, int(self.x), int(self.y), int(self.radius + 2), self.color)
pygame.gfxdraw.line(surface, int(self.x), int(self.y), int(self.lineEndX), int(self.lineEndY), self.color)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import subprocess
import pwndbg.commands
import pwndbg.which
@pwndbg.commands.Command
@pwndbg.commands.OnlyWithFile
def checksec(file=None):
'''
Prints out the binary security settings. Attempts to call the binjitsu
checksec first, and then falls back to checksec.sh.
'''
local_path = file or pwndbg.file.get_file(pwndbg.proc.exe)
for program in ['checksec', 'checksec.sh']:
program = pwndbg.which.which(program)
if program:
return subprocess.call([program, '--file', local_path])
else:
print('Could not find checksec or checksec.sh in $PATH.')
|
'''
Parsing - Exercise 6
The script reads a multiple sequence regord FASTA file and
writes the sequences to a new file separated by a blank line.
'''
fasta = open('sprot_prot.fasta')
seqs = open('seqs.txt', 'w')
for line in fasta:
if line[0] == '>':
seqs.write('\n')
elif line[0] != '>':
seqs.write(line.strip() + '\n')
seqs.close()
|
"""Photometer
These functions handle data files from spectrophotometers for easy and direct import
The functions are:
* uprtek_import_spectrum - Imports the spectrum from a UPRtek spectrophotometer
* uprtek_import_r_vals - Imports the R values generated by a UPRtek spectrophotometer
* uprtek_file_import - Imports the UPRtek file and extracts the selected data
"""
import csv
import itertools
"""Imports a UPRtek data file and outputs a dictionary with the intensities for each wavelength
Note: UPRtek names these files as .xls, but they are actually formatted as tab-delimited text files
Note2: This has only been tested with the UPRtek CV600 and MK350N. Others may have a different file format
Parameters
----------
filename : String
The filename to import
Returns
-------
dict
A dictionary with the wavelengths and intensities, e.g.:
{380: 0.048, 381: 0.051, ...}
"""
def uprtek_import_spectrum(filename: str):
return uprtek_file_import(filename, 'spd')
"""Imports a UPRtek data file and outputs a dictionary with the R-Values
Note: UPRtek names these files as .xls, but they are actually formatted as tab-delimited text files
Note2: This has only been tested with the UPRtek CV600 and MK350N. Others may have a different file format
Parameters
----------
filename : String
The filename to import
Returns
-------
dict
A dictionary with the R-Values, e.g.:
{'R1': 98.887482, 'R2': 99.234245, ...}
"""
def uprtek_import_r_vals(filename: str):
return uprtek_file_import(filename, 'r_vals')
"""Imports a UPRtek data file and outputs a dictionary with the selected data
Note: UPRtek names these files as .xls, but they are actually formatted as tab-delimited text files
Note2: This has only been tested with the UPRtek CV600 and MK350N. Others may have a different file format
Parameters
----------
filename : String
The filename to import
returntype: dict
The type of data to return. Currently, either 'spd' or 'r_vals'
Returns
-------
dict
A dictionary with the selected data
"""
def uprtek_file_import(filename: str, returntype: dict):
with open(filename, mode='r', encoding='us-ascii') as csvFile:
reader = csv.reader(csvFile, delimiter='\t')
# Get UPRtek model from the first line, then set rows for reading data
model = next(reader)[1]
if model == 'CV600':
spd_start = 40
r_start = 18
r_end = 33
elif model == 'MK350NPLUS':
spd_start = 46
r_start = 26
r_end = 41
else:
print('UPRtek model not available. Using the MK350N format, which could result in errors!')
spd_start = 46
r_start = 26
r_end = 41
# Extract the data and return
if returntype == 'spd':
spd = {}
for row in itertools.islice(reader, spd_start, None):
spd[int(row[0][0:3])] = float(row[1])
return spd
elif returntype == 'r_vals':
r_vals = {}
for row in itertools.islice(reader, r_start, r_end):
r_vals[row[0]] = float(row[1])
return r_vals
|
#!/usr/bin/env vpython3
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for presubmit_support.py and presubmit_canned_checks.py."""
# pylint: disable=no-member,E1103
from __future__ import unicode_literals
import functools
import itertools
import logging
import multiprocessing
import os
import random
import re
import sys
import tempfile
import threading
import time
import unittest
if sys.version_info.major == 2:
from cStringIO import StringIO
import mock
import urllib2 as urllib_request
BUILTIN_OPEN = '__builtin__.open'
else:
from io import StringIO
from unittest import mock
import urllib.request as urllib_request
BUILTIN_OPEN = 'builtins.open'
_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, _ROOT)
from testing_support.test_case_utils import TestCaseUtils
import auth
import gclient_utils
import git_cl
import git_common as git
import json
import owners
import owners_client
import owners_finder
import presubmit_support as presubmit
import rdb_wrapper
import scm
import subprocess2 as subprocess
# Shortcut.
presubmit_canned_checks = presubmit.presubmit_canned_checks
# Access to a protected member XXX of a client class
# pylint: disable=protected-access
class MockTemporaryFile(object):
"""Simple mock for files returned by tempfile.NamedTemporaryFile()."""
def __init__(self, name):
self.name = name
def __enter__(self):
return self
def __exit__(self, *args):
pass
class PresubmitTestsBase(TestCaseUtils, unittest.TestCase):
"""Sets up and tears down the mocks but doesn't test anything as-is."""
presubmit_text = """
def CheckChangeOnUpload(input_api, output_api):
if input_api.change.tags.get('ERROR'):
return [output_api.PresubmitError("!!")]
if input_api.change.tags.get('PROMPT_WARNING'):
return [output_api.PresubmitPromptWarning("??")]
else:
return ()
def PostUploadHook(gerrit, change, output_api):
if change.tags.get('ERROR'):
return [output_api.PresubmitError("!!")]
if change.tags.get('PROMPT_WARNING'):
return [output_api.PresubmitPromptWarning("??")]
else:
return ()
"""
presubmit_trymaster = """
def GetPreferredTryMasters(project, change):
return %s
"""
presubmit_diffs = """
diff --git %(filename)s %(filename)s
index fe3de7b..54ae6e1 100755
--- %(filename)s 2011-02-09 10:38:16.517224845 -0800
+++ %(filename)s 2011-02-09 10:38:53.177226516 -0800
@@ -1,6 +1,5 @@
this is line number 0
this is line number 1
-this is line number 2 to be deleted
this is line number 3
this is line number 4
this is line number 5
@@ -8,7 +7,7 @@
this is line number 7
this is line number 8
this is line number 9
-this is line number 10 to be modified
+this is line number 10
this is line number 11
this is line number 12
this is line number 13
@@ -21,9 +20,8 @@
this is line number 20
this is line number 21
this is line number 22
-this is line number 23
-this is line number 24
-this is line number 25
+this is line number 23.1
+this is line number 25.1
this is line number 26
this is line number 27
this is line number 28
@@ -31,6 +29,7 @@
this is line number 30
this is line number 31
this is line number 32
+this is line number 32.1
this is line number 33
this is line number 34
this is line number 35
@@ -38,14 +37,14 @@
this is line number 37
this is line number 38
this is line number 39
-
this is line number 40
-this is line number 41
+this is line number 41.1
this is line number 42
this is line number 43
this is line number 44
this is line number 45
+
this is line number 46
this is line number 47
-this is line number 48
+this is line number 48.1
this is line number 49
"""
def setUp(self):
super(PresubmitTestsBase, self).setUp()
class FakeChange(object):
def __init__(self, obj):
self._root = obj.fake_root_dir
self.issue = 0
def RepositoryRoot(self):
return self._root
def UpstreamBranch(self):
return 'upstream'
presubmit._ASKED_FOR_FEEDBACK = False
self.fake_root_dir = self.RootDir()
self.fake_change = FakeChange(self)
self.rdb_client = mock.MagicMock()
mock.patch('gclient_utils.FileRead').start()
mock.patch('gclient_utils.FileWrite').start()
mock.patch('json.load').start()
mock.patch('multiprocessing.cpu_count', lambda: 2)
mock.patch('gerrit_util.IsCodeOwnersEnabledOnHost').start()
mock.patch('os.chdir').start()
mock.patch('os.getcwd', self.RootDir)
mock.patch('os.listdir').start()
mock.patch('os.path.abspath', lambda f: f).start()
mock.patch('os.path.isfile').start()
mock.patch('os.remove').start()
mock.patch('presubmit_support._parse_files').start()
mock.patch('presubmit_support.rdb_wrapper.client',
return_value=self.rdb_client).start()
mock.patch('presubmit_support.sigint_handler').start()
mock.patch('presubmit_support.time_time', return_value=0).start()
mock.patch('presubmit_support.warn').start()
mock.patch('random.randint').start()
mock.patch('scm.GIT.GenerateDiff').start()
mock.patch('scm.determine_scm').start()
mock.patch('subprocess2.Popen').start()
mock.patch('sys.stderr', StringIO()).start()
mock.patch('sys.stdout', StringIO()).start()
mock.patch('tempfile.NamedTemporaryFile').start()
mock.patch('threading.Timer').start()
if sys.version_info.major == 2:
mock.patch('urllib2.urlopen').start()
else:
mock.patch('urllib.request.urlopen').start()
self.addCleanup(mock.patch.stopall)
def checkstdout(self, value):
self.assertEqual(sys.stdout.getvalue(), value)
class PresubmitUnittest(PresubmitTestsBase):
"""General presubmit_support.py tests (excluding InputApi and OutputApi)."""
_INHERIT_SETTINGS = 'inherit-review-settings-ok'
fake_root_dir = '/foo/bar'
def testCannedCheckFilter(self):
canned = presubmit.presubmit_canned_checks
orig = canned.CheckOwners
with presubmit.canned_check_filter(['CheckOwners']):
self.assertNotEqual(canned.CheckOwners, orig)
self.assertEqual(canned.CheckOwners(None, None), [])
self.assertEqual(canned.CheckOwners, orig)
def testListRelevantPresubmitFiles(self):
files = [
'blat.cc',
os.path.join('foo', 'haspresubmit', 'yodle', 'smart.h'),
os.path.join('moo', 'mat', 'gat', 'yo.h'),
os.path.join('foo', 'luck.h'),
]
known_files = [
os.path.join(self.fake_root_dir, 'PRESUBMIT.py'),
os.path.join(self.fake_root_dir, 'foo', 'haspresubmit', 'PRESUBMIT.py'),
os.path.join(
self.fake_root_dir, 'foo', 'haspresubmit', 'yodle', 'PRESUBMIT.py'),
]
os.path.isfile.side_effect = lambda f: f in known_files
dirs_with_presubmit = [
self.fake_root_dir,
os.path.join(self.fake_root_dir, 'foo', 'haspresubmit'),
os.path.join(self.fake_root_dir, 'foo', 'haspresubmit', 'yodle'),
]
os.listdir.side_effect = (
lambda d: ['PRESUBMIT.py'] if d in dirs_with_presubmit else [])
presubmit_files = presubmit.ListRelevantPresubmitFiles(
files, self.fake_root_dir)
self.assertEqual(presubmit_files, known_files)
def testListUserPresubmitFiles(self):
files = ['blat.cc',]
os.path.isfile.side_effect = lambda f: 'PRESUBMIT' in f
os.listdir.return_value = [
'PRESUBMIT.py', 'PRESUBMIT_test.py', 'PRESUBMIT-user.py']
presubmit_files = presubmit.ListRelevantPresubmitFiles(
files, self.fake_root_dir)
self.assertEqual(presubmit_files, [
os.path.join(self.fake_root_dir, 'PRESUBMIT.py'),
os.path.join(self.fake_root_dir, 'PRESUBMIT-user.py'),
])
def testListRelevantPresubmitFilesInheritSettings(self):
sys_root_dir = self._OS_SEP
root_dir = os.path.join(sys_root_dir, 'foo', 'bar')
inherit_path = os.path.join(root_dir, self._INHERIT_SETTINGS)
files = [
'test.cc',
os.path.join('moo', 'test2.cc'),
os.path.join('zoo', 'test3.cc')
]
known_files = [
inherit_path,
os.path.join(sys_root_dir, 'foo', 'PRESUBMIT.py'),
os.path.join(sys_root_dir, 'foo', 'bar', 'moo', 'PRESUBMIT.py'),
]
os.path.isfile.side_effect = lambda f: f in known_files
dirs_with_presubmit = [
os.path.join(sys_root_dir, 'foo'),
os.path.join(sys_root_dir, 'foo', 'bar','moo'),
]
os.listdir.side_effect = (
lambda d: ['PRESUBMIT.py'] if d in dirs_with_presubmit else [])
presubmit_files = presubmit.ListRelevantPresubmitFiles(files, root_dir)
self.assertEqual(presubmit_files, [
os.path.join(sys_root_dir, 'foo', 'PRESUBMIT.py'),
os.path.join(sys_root_dir, 'foo', 'bar', 'moo', 'PRESUBMIT.py')
])
def testTagLineRe(self):
m = presubmit.Change.TAG_LINE_RE.match(' BUG =1223, 1445 \t')
self.assertIsNotNone(m)
self.assertEqual(m.group('key'), 'BUG')
self.assertEqual(m.group('value'), '1223, 1445')
def testGitChange(self):
description_lines = ('Hello there',
'this is a change',
'BUG=123',
'and some more regular text \t')
unified_diff = [
'diff --git binary_a.png binary_a.png',
'new file mode 100644',
'index 0000000..6fbdd6d',
'Binary files /dev/null and binary_a.png differ',
'diff --git binary_d.png binary_d.png',
'deleted file mode 100644',
'index 6fbdd6d..0000000',
'Binary files binary_d.png and /dev/null differ',
'diff --git binary_md.png binary_md.png',
'index 6fbdd6..be3d5d8 100644',
'GIT binary patch',
'delta 109',
'zcmeyihjs5>)(Opwi4&WXB~yyi6N|G`(i5|?i<2_a@)OH5N{Um`D-<SM@g!_^W9;SR',
'zO9b*W5{pxTM0slZ=F42indK9U^MTyVQlJ2s%1BMmEKMv1Q^gtS&9nHn&*Ede;|~CU',
'CMJxLN',
'',
'delta 34',
'scmV+-0Nww+y#@BX1(1W0gkzIp3}CZh0gVZ>`wGVcgW(Rh;SK@ZPa9GXlK=n!',
'',
'diff --git binary_m.png binary_m.png',
'index 6fbdd6d..be3d5d8 100644',
'Binary files binary_m.png and binary_m.png differ',
'diff --git boo/blat.cc boo/blat.cc',
'new file mode 100644',
'index 0000000..37d18ad',
'--- boo/blat.cc',
'+++ boo/blat.cc',
'@@ -0,0 +1,5 @@',
'+This is some text',
'+which lacks a copyright warning',
'+but it is nonetheless interesting',
'+and worthy of your attention.',
'+Its freshness factor is through the roof.',
'diff --git floo/delburt.cc floo/delburt.cc',
'deleted file mode 100644',
'index e06377a..0000000',
'--- floo/delburt.cc',
'+++ /dev/null',
'@@ -1,14 +0,0 @@',
'-This text used to be here',
'-but someone, probably you,',
'-having consumed the text',
'- (absorbed its meaning)',
'-decided that it should be made to not exist',
'-that others would not read it.',
'- (What happened here?',
'-was the author incompetent?',
'-or is the world today so different from the world',
'- the author foresaw',
'-and past imaginination',
'- amounts to rubble, insignificant,',
'-something to be tripped over',
'-and frustrated by)',
'diff --git foo/TestExpectations foo/TestExpectations',
'index c6e12ab..d1c5f23 100644',
'--- foo/TestExpectations',
'+++ foo/TestExpectations',
'@@ -1,12 +1,24 @@',
'-Stranger, behold:',
'+Strange to behold:',
' This is a text',
' Its contents existed before.',
'',
'-It is written:',
'+Weasel words suggest:',
' its contents shall exist after',
' and its contents',
' with the progress of time',
' will evolve,',
'- snaillike,',
'+ erratically,',
' into still different texts',
'-from this.',
'\ No newline at end of file',
'+from this.',
'+',
'+For the most part,',
'+I really think unified diffs',
'+are elegant: the way you can type',
'+diff --git inside/a/text inside/a/text',
'+or something silly like',
'+@@ -278,6 +278,10 @@',
'+and have this not be interpreted',
'+as the start of a new file',
'+or anything messed up like that,',
'+because you parsed the header',
'+correctly.',
'\ No newline at end of file',
'']
files = [('A ', 'binary_a.png'),
('D ', 'binary_d.png'),
('M ', 'binary_m.png'),
('M ', 'binary_md.png'), # Binary w/ diff
('A ', 'boo/blat.cc'),
('D ', 'floo/delburt.cc'),
('M ', 'foo/TestExpectations')]
known_files = [
os.path.join(self.fake_root_dir, *path.split('/'))
for op, path in files if not op.startswith('D')]
os.path.isfile.side_effect = lambda f: f in known_files
scm.GIT.GenerateDiff.return_value = '\n'.join(unified_diff)
change = presubmit.GitChange(
'mychange',
'\n'.join(description_lines),
self.fake_root_dir,
files,
0,
0,
None,
upstream=None)
self.assertIsNotNone(change.Name() == 'mychange')
self.assertIsNotNone(change.DescriptionText() ==
'Hello there\nthis is a change\nand some more regular text')
self.assertIsNotNone(change.FullDescriptionText() ==
'\n'.join(description_lines))
self.assertIsNotNone(change.BugsFromDescription() == ['123'])
self.assertIsNotNone(len(change.AffectedFiles()) == 7)
self.assertIsNotNone(len(change.AffectedFiles()) == 7)
self.assertIsNotNone(len(change.AffectedFiles(include_deletes=False)) == 5)
self.assertIsNotNone(len(change.AffectedFiles(include_deletes=False)) == 5)
# Note that on git, there's no distinction between binary files and text
# files; everything that's not a delete is a text file.
affected_text_files = change.AffectedTestableFiles()
self.assertIsNotNone(len(affected_text_files) == 5)
local_paths = change.LocalPaths()
expected_paths = [os.path.normpath(f) for op, f in files]
self.assertEqual(local_paths, expected_paths)
actual_rhs_lines = []
for f, linenum, line in change.RightHandSideLines():
actual_rhs_lines.append((f.LocalPath(), linenum, line))
f_blat = os.path.normpath('boo/blat.cc')
f_test_expectations = os.path.normpath('foo/TestExpectations')
expected_rhs_lines = [
(f_blat, 1, 'This is some text'),
(f_blat, 2, 'which lacks a copyright warning'),
(f_blat, 3, 'but it is nonetheless interesting'),
(f_blat, 4, 'and worthy of your attention.'),
(f_blat, 5, 'Its freshness factor is through the roof.'),
(f_test_expectations, 1, 'Strange to behold:'),
(f_test_expectations, 5, 'Weasel words suggest:'),
(f_test_expectations, 10, ' erratically,'),
(f_test_expectations, 13, 'from this.'),
(f_test_expectations, 14, ''),
(f_test_expectations, 15, 'For the most part,'),
(f_test_expectations, 16, 'I really think unified diffs'),
(f_test_expectations, 17, 'are elegant: the way you can type'),
(f_test_expectations, 18, 'diff --git inside/a/text inside/a/text'),
(f_test_expectations, 19, 'or something silly like'),
(f_test_expectations, 20, '@@ -278,6 +278,10 @@'),
(f_test_expectations, 21, 'and have this not be interpreted'),
(f_test_expectations, 22, 'as the start of a new file'),
(f_test_expectations, 23, 'or anything messed up like that,'),
(f_test_expectations, 24, 'because you parsed the header'),
(f_test_expectations, 25, 'correctly.')]
self.assertEqual(expected_rhs_lines, actual_rhs_lines)
def testInvalidChange(self):
with self.assertRaises(AssertionError):
presubmit.GitChange(
'mychange',
'description',
self.fake_root_dir,
['foo/blat.cc', 'bar'],
0,
0,
None)
def testExecPresubmitScript(self):
description_lines = ('Hello there',
'this is a change',
'BUG=123')
files = [
['A', 'foo\\blat.cc'],
]
fake_presubmit = os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
change = presubmit.Change(
'mychange',
'\n'.join(description_lines),
self.fake_root_dir,
files,
0,
0,
None)
executer = presubmit.PresubmitExecuter(
change, False, None, presubmit.GerritAccessor())
self.assertFalse(executer.ExecPresubmitScript('', fake_presubmit))
# No error if no on-upload entry point
self.assertFalse(executer.ExecPresubmitScript(
('def CheckChangeOnCommit(input_api, output_api):\n'
' return (output_api.PresubmitError("!!"))\n'),
fake_presubmit
))
executer = presubmit.PresubmitExecuter(
change, True, None, presubmit.GerritAccessor())
# No error if no on-commit entry point
self.assertFalse(executer.ExecPresubmitScript(
('def CheckChangeOnUpload(input_api, output_api):\n'
' return (output_api.PresubmitError("!!"))\n'),
fake_presubmit
))
self.assertFalse(executer.ExecPresubmitScript(
('def CheckChangeOnUpload(input_api, output_api):\n'
' if not input_api.change.BugsFromDescription():\n'
' return (output_api.PresubmitError("!!"))\n'
' else:\n'
' return ()'),
fake_presubmit
))
self.assertRaises(presubmit.PresubmitFailure,
executer.ExecPresubmitScript,
'def CheckChangeOnCommit(input_api, output_api):\n'
' return "foo"',
fake_presubmit)
self.assertFalse(executer.ExecPresubmitScript(
'def CheckChangeOnCommit(input_api, output_api):\n'
' results = []\n'
' results.extend(input_api.canned_checks.CheckChangeHasBugField(\n'
' input_api, output_api))\n'
' results.extend(input_api.canned_checks.CheckChangeHasNoUnwantedTags(\n'
' input_api, output_api))\n'
' results.extend(input_api.canned_checks.CheckChangeHasDescription(\n'
' input_api, output_api))\n'
' return results\n',
fake_presubmit))
self.assertRaises(presubmit.PresubmitFailure,
executer.ExecPresubmitScript,
'def CheckChangeOnCommit(input_api, output_api):\n'
' return ["foo"]',
fake_presubmit)
def testExecPresubmitScriptWithResultDB(self):
description_lines = ('Hello there', 'this is a change', 'BUG=123')
files = [['A', 'foo\\blat.cc']]
fake_presubmit = os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
change = presubmit.Change('mychange', '\n'.join(description_lines),
self.fake_root_dir, files, 0, 0, None)
executer = presubmit.PresubmitExecuter(
change, True, None, presubmit.GerritAccessor())
sink = self.rdb_client.__enter__.return_value = mock.MagicMock()
# STATUS_PASS on success
executer.ExecPresubmitScript(
'def CheckChangeOnCommit(input_api, output_api):\n'
' return [output_api.PresubmitResult("test")]\n', fake_presubmit)
sink.report.assert_called_with('CheckChangeOnCommit',
rdb_wrapper.STATUS_PASS, 0)
# STATUS_FAIL on exception
sink.reset_mock()
self.assertRaises(
Exception, executer.ExecPresubmitScript,
'def CheckChangeOnCommit(input_api, output_api):\n'
' raise Exception("boom")', fake_presubmit)
sink.report.assert_called_with('CheckChangeOnCommit',
rdb_wrapper.STATUS_FAIL, 0)
# STATUS_FAIL on fatal error
sink.reset_mock()
executer.ExecPresubmitScript(
'def CheckChangeOnCommit(input_api, output_api):\n'
' return [output_api.PresubmitError("error")]\n', fake_presubmit)
sink.report.assert_called_with('CheckChangeOnCommit',
rdb_wrapper.STATUS_FAIL, 0)
def testExecPresubmitScriptTemporaryFilesRemoval(self):
tempfile.NamedTemporaryFile.side_effect = [
MockTemporaryFile('baz'),
MockTemporaryFile('quux'),
]
fake_presubmit = os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
executer = presubmit.PresubmitExecuter(
self.fake_change, False, None, presubmit.GerritAccessor())
self.assertEqual([], executer.ExecPresubmitScript(
('def CheckChangeOnUpload(input_api, output_api):\n'
' if len(input_api._named_temporary_files):\n'
' return (output_api.PresubmitError("!!"),)\n'
' return ()\n'),
fake_presubmit
))
result = executer.ExecPresubmitScript(
('def CheckChangeOnUpload(input_api, output_api):\n'
' with input_api.CreateTemporaryFile():\n'
' pass\n'
' with input_api.CreateTemporaryFile():\n'
' pass\n'
' return [output_api.PresubmitResult(None, f)\n'
' for f in input_api._named_temporary_files]\n'),
fake_presubmit
)
self.assertEqual(['baz', 'quux'], [r._items for r in result])
self.assertEqual(
os.remove.mock_calls, [mock.call('baz'), mock.call('quux')])
def testDoPostUploadExecuter(self):
os.path.isfile.side_effect = lambda f: 'PRESUBMIT.py' in f
os.listdir.return_value = ['PRESUBMIT.py']
gclient_utils.FileRead.return_value = self.presubmit_text
change = self.ExampleChange()
self.assertEqual(
0,
presubmit.DoPostUploadExecuter(
change=change, gerrit_obj=None, verbose=False))
self.assertEqual('', sys.stdout.getvalue())
def testDoPostUploadExecuterWarning(self):
path = os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
os.path.isfile.side_effect = lambda f: f == path
os.listdir.return_value = ['PRESUBMIT.py']
gclient_utils.FileRead.return_value = self.presubmit_text
change = self.ExampleChange(extra_lines=['PROMPT_WARNING=yes'])
self.assertEqual(
0,
presubmit.DoPostUploadExecuter(
change=change, gerrit_obj=None, verbose=False))
self.assertEqual(
'\n'
'** Post Upload Hook Messages **\n'
'??\n'
'\n',
sys.stdout.getvalue())
def testDoPostUploadExecuterWarning(self):
path = os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
os.path.isfile.side_effect = lambda f: f == path
os.listdir.return_value = ['PRESUBMIT.py']
gclient_utils.FileRead.return_value = self.presubmit_text
change = self.ExampleChange(extra_lines=['ERROR=yes'])
self.assertEqual(
1,
presubmit.DoPostUploadExecuter(
change=change, gerrit_obj=None, verbose=False))
self.assertEqual(
'\n'
'** Post Upload Hook Messages **\n'
'!!\n'
'\n',
sys.stdout.getvalue())
def testDoPresubmitChecksNoWarningsOrErrors(self):
haspresubmit_path = os.path.join(
self.fake_root_dir, 'haspresubmit', 'PRESUBMIT.py')
root_path = os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
os.path.isfile.side_effect = lambda f: f in [root_path, haspresubmit_path]
os.listdir.return_value = ['PRESUBMIT.py']
gclient_utils.FileRead.return_value = self.presubmit_text
# Make a change which will have no warnings.
change = self.ExampleChange(extra_lines=['STORY=http://tracker/123'])
self.assertEqual(
0,
presubmit.DoPresubmitChecks(
change=change, committing=False, verbose=True,
default_presubmit=None, may_prompt=False,
gerrit_obj=None, json_output=None))
self.assertEqual(sys.stdout.getvalue().count('!!'), 0)
self.assertEqual(sys.stdout.getvalue().count('??'), 0)
self.assertEqual(sys.stdout.getvalue().count(
'Running Python 2 presubmit upload checks ...\n'), 1)
def testDoPresubmitChecksJsonOutput(self):
fake_error = 'Missing LGTM'
fake_error_items = '["!", "!!", "!!!"]'
fake_error_long_text = "Error long text..."
fake_error2 = 'This failed was found in file fake.py'
fake_error2_items = '["!!!", "!!", "!"]'
fake_error2_long_text = " Error long text" * 3
fake_warning = 'Line 88 is more than 80 characters.'
fake_warning_items = '["W", "w"]'
fake_warning_long_text = 'Warning long text...'
fake_notify = 'This is a dry run'
fake_notify_items = '["N"]'
fake_notify_long_text = 'Notification long text...'
always_fail_presubmit_script = """
def CheckChangeOnUpload(input_api, output_api):
output_api.more_cc = ['me@example.com']
return [
output_api.PresubmitError("%s",%s, "%s"),
output_api.PresubmitError("%s",%s, "%s"),
output_api.PresubmitPromptWarning("%s",%s, "%s"),
output_api.PresubmitNotifyResult("%s",%s, "%s")
]
def CheckChangeOnCommit(input_api, output_api):
raise Exception("Test error")
""" % (fake_error, fake_error_items, fake_error_long_text,
fake_error2, fake_error2_items, fake_error2_long_text,
fake_warning, fake_warning_items, fake_warning_long_text,
fake_notify, fake_notify_items, fake_notify_long_text
)
os.path.isfile.return_value = False
os.listdir.side_effect = [[], ['PRESUBMIT.py']]
random.randint.return_value = 0
change = self.ExampleChange(extra_lines=['ERROR=yes'])
temp_path = 'temp.json'
fake_result = {
'notifications': [
{
'message': fake_notify,
'items': json.loads(fake_notify_items),
'fatal': False,
'long_text': fake_notify_long_text
}
],
'errors': [
{
'message': fake_error,
'items': json.loads(fake_error_items),
'fatal': True,
'long_text': fake_error_long_text
},
{
'message': fake_error2,
'items': json.loads(fake_error2_items),
'fatal': True,
'long_text': fake_error2_long_text
}
],
'warnings': [
{
'message': fake_warning,
'items': json.loads(fake_warning_items),
'fatal': False,
'long_text': fake_warning_long_text
}
],
'more_cc': ['me@example.com'],
}
fake_result_json = json.dumps(fake_result, sort_keys=True)
self.assertEqual(
1,
presubmit.DoPresubmitChecks(
change=change, committing=False, verbose=True,
default_presubmit=always_fail_presubmit_script,
may_prompt=False, gerrit_obj=None, json_output=temp_path))
gclient_utils.FileWrite.assert_called_with(temp_path, fake_result_json)
def testDoPresubmitChecksPromptsAfterWarnings(self):
presubmit_path = os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
haspresubmit_path = os.path.join(
self.fake_root_dir, 'haspresubmit', 'PRESUBMIT.py')
os.path.isfile.side_effect = (
lambda f: f in [presubmit_path, haspresubmit_path])
os.listdir.return_value = ['PRESUBMIT.py']
random.randint.return_value = 1
gclient_utils.FileRead.return_value = self.presubmit_text
# Make a change with a single warning.
change = self.ExampleChange(extra_lines=['PROMPT_WARNING=yes'])
# say no to the warning
with mock.patch('sys.stdin', StringIO('n\n')):
self.assertEqual(
1,
presubmit.DoPresubmitChecks(
change=change, committing=False, verbose=True,
default_presubmit=None, may_prompt=True,
gerrit_obj=None, json_output=None))
self.assertEqual(sys.stdout.getvalue().count('??'), 2)
sys.stdout.truncate(0)
# say yes to the warning
with mock.patch('sys.stdin', StringIO('y\n')):
self.assertEqual(
0,
presubmit.DoPresubmitChecks(
change=change, committing=False, verbose=True,
default_presubmit=None, may_prompt=True,
gerrit_obj=None, json_output=None))
self.assertEqual(sys.stdout.getvalue().count('??'), 2)
self.assertEqual(sys.stdout.getvalue().count(
'Running Python 2 presubmit upload checks ...\n'), 1)
def testDoPresubmitChecksWithWarningsAndNoPrompt(self):
presubmit_path = os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
haspresubmit_path = os.path.join(
self.fake_root_dir, 'haspresubmit', 'PRESUBMIT.py')
os.path.isfile.side_effect = (
lambda f: f in [presubmit_path, haspresubmit_path])
os.listdir.return_value = ['PRESUBMIT.py']
gclient_utils.FileRead.return_value = self.presubmit_text
random.randint.return_value = 1
change = self.ExampleChange(extra_lines=['PROMPT_WARNING=yes'])
# There is no input buffer and may_prompt is set to False.
self.assertEqual(
0,
presubmit.DoPresubmitChecks(
change=change, committing=False, verbose=True,
default_presubmit=None, may_prompt=False,
gerrit_obj=None, json_output=None))
# A warning is printed, and should_continue is True.
self.assertEqual(sys.stdout.getvalue().count('??'), 2)
self.assertEqual(sys.stdout.getvalue().count('(y/N)'), 0)
self.assertEqual(sys.stdout.getvalue().count(
'Running Python 2 presubmit upload checks ...\n'), 1)
def testDoPresubmitChecksNoWarningPromptIfErrors(self):
presubmit_path = os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
haspresubmit_path = os.path.join(
self.fake_root_dir, 'haspresubmit', 'PRESUBMIT.py')
os.path.isfile.side_effect = (
lambda f: f in [presubmit_path, haspresubmit_path])
os.listdir.return_value = ['PRESUBMIT.py']
gclient_utils.FileRead.return_value = self.presubmit_text
random.randint.return_value = 1
change = self.ExampleChange(extra_lines=['ERROR=yes'])
self.assertEqual(
1,
presubmit.DoPresubmitChecks(
change=change, committing=False, verbose=True,
default_presubmit=None, may_prompt=True,
gerrit_obj=None, json_output=None))
self.assertEqual(sys.stdout.getvalue().count('??'), 0)
self.assertEqual(sys.stdout.getvalue().count('!!'), 2)
self.assertEqual(sys.stdout.getvalue().count('(y/N)'), 0)
self.assertEqual(sys.stdout.getvalue().count(
'Running Python 2 presubmit upload checks ...\n'), 1)
def testDoDefaultPresubmitChecksAndFeedback(self):
always_fail_presubmit_script = """
def CheckChangeOnUpload(input_api, output_api):
return [output_api.PresubmitError("!!")]
def CheckChangeOnCommit(input_api, output_api):
raise Exception("Test error")
"""
os.path.isfile.return_value = False
os.listdir.side_effect = (
lambda d: [] if d == self.fake_root_dir else ['PRESUBMIT.py'])
random.randint.return_value = 0
change = self.ExampleChange(extra_lines=['STORY=http://tracker/123'])
with mock.patch('sys.stdin', StringIO('y\n')):
self.assertEqual(
1,
presubmit.DoPresubmitChecks(
change=change, committing=False, verbose=True,
default_presubmit=always_fail_presubmit_script,
may_prompt=False, gerrit_obj=None, json_output=None))
text = (
'Running Python 2 presubmit upload checks ...\n'
'Warning, no PRESUBMIT.py found.\n'
'Running default presubmit script.\n'
'\n'
'** Presubmit ERRORS **\n!!\n\n'
'Was the presubmit check useful? If not, run "git cl presubmit -v"\n'
'to figure out which PRESUBMIT.py was run, then run git blame\n'
'on the file to figure out who to ask for help.\n')
self.assertEqual(sys.stdout.getvalue(), text)
def testGetTryMastersExecuter(self):
change = self.ExampleChange(
extra_lines=['STORY=http://tracker.com/42', 'BUG=boo\n'])
executer = presubmit.GetTryMastersExecuter()
self.assertEqual({}, executer.ExecPresubmitScript('', '', '', change))
self.assertEqual({},
executer.ExecPresubmitScript('def foo():\n return\n', '', '', change))
expected_result = {'m1': {'s1': set(['t1', 't2'])},
'm2': {'s1': set(['defaulttests']),
's2': set(['defaulttests'])}}
empty_result1 = {}
empty_result2 = {'m': {}}
space_in_name_result = {'m r': {'s\tv': set(['t1'])}}
for result in (
expected_result, empty_result1, empty_result2, space_in_name_result):
self.assertEqual(
result,
executer.ExecPresubmitScript(
self.presubmit_trymaster % result, '', '', change))
def ExampleChange(self, extra_lines=None):
"""Returns an example Change instance for tests."""
description_lines = [
'Hello there',
'This is a change',
] + (extra_lines or [])
files = [
['A', os.path.join('haspresubmit', 'blat.cc')],
]
return presubmit.Change(
name='mychange',
description='\n'.join(description_lines),
local_root=self.fake_root_dir,
files=files,
issue=0,
patchset=0,
author=None)
def testMergeMasters(self):
merge = presubmit._MergeMasters
self.assertEqual({}, merge({}, {}))
self.assertEqual({'m1': {}}, merge({}, {'m1': {}}))
self.assertEqual({'m1': {}}, merge({'m1': {}}, {}))
parts = [
{'try1.cr': {'win': set(['defaulttests'])}},
{'try1.cr': {'linux1': set(['test1'])},
'try2.cr': {'linux2': set(['defaulttests'])}},
{'try1.cr': {'mac1': set(['defaulttests']),
'mac2': set(['test1', 'test2']),
'linux1': set(['defaulttests'])}},
]
expected = {
'try1.cr': {'win': set(['defaulttests']),
'linux1': set(['defaulttests', 'test1']),
'mac1': set(['defaulttests']),
'mac2': set(['test1', 'test2'])},
'try2.cr': {'linux2': set(['defaulttests'])},
}
for permutation in itertools.permutations(parts):
self.assertEqual(expected, functools.reduce(merge, permutation, {}))
def testDoGetTryMasters(self):
root_text = (self.presubmit_trymaster
% '{"t1.cr": {"win": set(["defaulttests"])}}')
linux_text = (self.presubmit_trymaster
% ('{"t1.cr": {"linux1": set(["t1"])},'
' "t2.cr": {"linux2": set(["defaulttests"])}}'))
filename = 'foo.cc'
filename_linux = os.path.join('linux_only', 'penguin.cc')
root_presubmit = os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
linux_presubmit = os.path.join(
self.fake_root_dir, 'linux_only', 'PRESUBMIT.py')
os.path.isfile.side_effect = (
lambda f: f in [root_presubmit, linux_presubmit])
os.listdir.return_value = ['PRESUBMIT.py']
gclient_utils.FileRead.side_effect = (
lambda f, _: root_text if f == root_presubmit else linux_text)
change = presubmit.Change(
'mychange', '', self.fake_root_dir, [], 0, 0, None)
output = StringIO()
self.assertEqual({'t1.cr': {'win': ['defaulttests']}},
presubmit.DoGetTryMasters(change, [filename],
self.fake_root_dir,
None, None, False, output))
output = StringIO()
expected = {
't1.cr': {'win': ['defaulttests'], 'linux1': ['t1']},
't2.cr': {'linux2': ['defaulttests']},
}
self.assertEqual(expected,
presubmit.DoGetTryMasters(change,
[filename, filename_linux],
self.fake_root_dir, None, None,
False, output))
def testMainPostUpload(self):
os.path.isfile.side_effect = lambda f: 'PRESUBMIT.py' in f
os.listdir.return_value = ['PRESUBMIT.py']
gclient_utils.FileRead.return_value = (
'def PostUploadHook(gerrit, change, output_api):\n'
' return ()\n')
scm.determine_scm.return_value = None
presubmit._parse_files.return_value = [('M', 'random_file.txt')]
self.assertEqual(
0,
presubmit.main(
['--root', self.fake_root_dir, 'random_file.txt', '--post_upload']))
@mock.patch('presubmit_support.ListRelevantPresubmitFiles')
def testMainUnversioned(self, *_mocks):
gclient_utils.FileRead.return_value = ''
scm.determine_scm.return_value = None
presubmit.ListRelevantPresubmitFiles.return_value = [
os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
]
self.assertEqual(
0,
presubmit.main(['--root', self.fake_root_dir, 'random_file.txt']))
@mock.patch('presubmit_support.ListRelevantPresubmitFiles')
def testMainUnversionedChecksFail(self, *_mocks):
gclient_utils.FileRead.return_value = (
'def CheckChangeOnUpload(input_api, output_api):\n'
' return [output_api.PresubmitError("!!")]\n')
scm.determine_scm.return_value = None
presubmit.ListRelevantPresubmitFiles.return_value = [
os.path.join(self.fake_root_dir, 'PRESUBMIT.py')
]
self.assertEqual(
1,
presubmit.main(['--root', self.fake_root_dir, 'random_file.txt']))
def testMainUnversionedFail(self):
scm.determine_scm.return_value = None
with self.assertRaises(SystemExit) as e:
presubmit.main(['--root', self.fake_root_dir])
self.assertEqual(2, e.exception.code)
self.assertEqual(
sys.stderr.getvalue(),
'usage: presubmit_unittest.py [options] <files...>\n'
'presubmit_unittest.py: error: <files> is not optional for unversioned '
'directories.\n')
@mock.patch('presubmit_support.Change', mock.Mock())
def testParseChange_Files(self):
presubmit._parse_files.return_value=[('M', 'random_file.txt')]
scm.determine_scm.return_value = None
options = mock.Mock(all_files=False)
change = presubmit._parse_change(None, options)
self.assertEqual(presubmit.Change.return_value, change)
presubmit.Change.assert_called_once_with(
options.name,
options.description,
options.root,
[('M', 'random_file.txt')],
options.issue,
options.patchset,
options.author,
upstream=options.upstream)
presubmit._parse_files.assert_called_once_with(
options.files, options.recursive)
def testParseChange_NoFilesAndNoScm(self):
presubmit._parse_files.return_value = []
scm.determine_scm.return_value = None
parser = mock.Mock()
parser.error.side_effect = [SystemExit]
options = mock.Mock(files=[], all_files=False)
with self.assertRaises(SystemExit):
presubmit._parse_change(parser, options)
parser.error.assert_called_once_with(
'<files> is not optional for unversioned directories.')
def testParseChange_FilesAndAllFiles(self):
parser = mock.Mock()
parser.error.side_effect = [SystemExit]
options = mock.Mock(files=['foo'], all_files=True)
with self.assertRaises(SystemExit):
presubmit._parse_change(parser, options)
parser.error.assert_called_once_with(
'<files> cannot be specified when --all-files is set.')
@mock.patch('presubmit_support.GitChange', mock.Mock())
def testParseChange_FilesAndGit(self):
scm.determine_scm.return_value = 'git'
presubmit._parse_files.return_value = [('M', 'random_file.txt')]
options = mock.Mock(all_files=False)
change = presubmit._parse_change(None, options)
self.assertEqual(presubmit.GitChange.return_value, change)
presubmit.GitChange.assert_called_once_with(
options.name,
options.description,
options.root,
[('M', 'random_file.txt')],
options.issue,
options.patchset,
options.author,
upstream=options.upstream)
presubmit._parse_files.assert_called_once_with(
options.files, options.recursive)
@mock.patch('presubmit_support.GitChange', mock.Mock())
@mock.patch('scm.GIT.CaptureStatus', mock.Mock())
def testParseChange_NoFilesAndGit(self):
scm.determine_scm.return_value = 'git'
scm.GIT.CaptureStatus.return_value = [('A', 'added.txt')]
options = mock.Mock(all_files=False, files=[])
change = presubmit._parse_change(None, options)
self.assertEqual(presubmit.GitChange.return_value, change)
presubmit.GitChange.assert_called_once_with(
options.name,
options.description,
options.root,
[('A', 'added.txt')],
options.issue,
options.patchset,
options.author,
upstream=options.upstream)
scm.GIT.CaptureStatus.assert_called_once_with(
options.root, options.upstream)
@mock.patch('presubmit_support.GitChange', mock.Mock())
@mock.patch('scm.GIT.GetAllFiles', mock.Mock())
def testParseChange_AllFilesAndGit(self):
scm.determine_scm.return_value = 'git'
scm.GIT.GetAllFiles.return_value = ['foo.txt', 'bar.txt']
options = mock.Mock(all_files=True, files=[])
change = presubmit._parse_change(None, options)
self.assertEqual(presubmit.GitChange.return_value, change)
presubmit.GitChange.assert_called_once_with(
options.name,
options.description,
options.root,
[('M', 'foo.txt'), ('M', 'bar.txt')],
options.issue,
options.patchset,
options.author,
upstream=options.upstream)
scm.GIT.GetAllFiles.assert_called_once_with(options.root)
def testParseGerritOptions_NoGerritUrl(self):
options = mock.Mock(
gerrit_url=None,
gerrit_fetch=False,
author='author',
description='description')
gerrit_obj = presubmit._parse_gerrit_options(None, options)
self.assertIsNone(gerrit_obj)
self.assertEqual('author', options.author)
self.assertEqual('description', options.description)
def testParseGerritOptions_NoGerritFetch(self):
options = mock.Mock(
gerrit_url='https://foo-review.googlesource.com/bar',
gerrit_project='project',
gerrit_branch='refs/heads/main',
gerrit_fetch=False,
author='author',
description='description')
gerrit_obj = presubmit._parse_gerrit_options(None, options)
self.assertEqual('foo-review.googlesource.com', gerrit_obj.host)
self.assertEqual('project', gerrit_obj.project)
self.assertEqual('refs/heads/main', gerrit_obj.branch)
self.assertEqual('author', options.author)
self.assertEqual('description', options.description)
@mock.patch('presubmit_support.GerritAccessor.GetChangeOwner')
@mock.patch('presubmit_support.GerritAccessor.GetChangeDescription')
def testParseGerritOptions_GerritFetch(
self, mockDescription, mockOwner):
mockDescription.return_value = 'new description'
mockOwner.return_value = 'new owner'
options = mock.Mock(
gerrit_url='https://foo-review.googlesource.com/bar',
gerrit_project='project',
gerrit_branch='refs/heads/main',
gerrit_fetch=True,
issue=123,
patchset=4)
gerrit_obj = presubmit._parse_gerrit_options(None, options)
self.assertEqual('foo-review.googlesource.com', gerrit_obj.host)
self.assertEqual('project', gerrit_obj.project)
self.assertEqual('refs/heads/main', gerrit_obj.branch)
self.assertEqual('new owner', options.author)
self.assertEqual('new description', options.description)
def testParseGerritOptions_GerritFetchNoUrl(self):
parser = mock.Mock()
parser.error.side_effect = [SystemExit]
options = mock.Mock(
gerrit_url=None,
gerrit_fetch=True,
issue=123,
patchset=4)
with self.assertRaises(SystemExit):
presubmit._parse_gerrit_options(parser, options)
parser.error.assert_called_once_with(
'--gerrit_fetch requires --gerrit_url, --issue and --patchset.')
def testParseGerritOptions_GerritFetchNoIssue(self):
parser = mock.Mock()
parser.error.side_effect = [SystemExit]
options = mock.Mock(
gerrit_url='https://example.com',
gerrit_fetch=True,
issue=None,
patchset=4)
with self.assertRaises(SystemExit):
presubmit._parse_gerrit_options(parser, options)
parser.error.assert_called_once_with(
'--gerrit_fetch requires --gerrit_url, --issue and --patchset.')
def testParseGerritOptions_GerritFetchNoPatchset(self):
parser = mock.Mock()
parser.error.side_effect = [SystemExit]
options = mock.Mock(
gerrit_url='https://example.com',
gerrit_fetch=True,
issue=123,
patchset=None)
with self.assertRaises(SystemExit):
presubmit._parse_gerrit_options(parser, options)
parser.error.assert_called_once_with(
'--gerrit_fetch requires --gerrit_url, --issue and --patchset.')
class InputApiUnittest(PresubmitTestsBase):
"""Tests presubmit.InputApi."""
def testInputApiConstruction(self):
api = presubmit.InputApi(
self.fake_change,
presubmit_path='foo/path/PRESUBMIT.py',
is_committing=False, gerrit_obj=None, verbose=False)
self.assertEqual(api.PresubmitLocalPath(), 'foo/path')
self.assertEqual(api.change, self.fake_change)
def testInputApiPresubmitScriptFiltering(self):
description_lines = ('Hello there',
'this is a change',
'BUG=123',
' STORY =http://foo/ \t',
'and some more regular text')
files = [
['A', os.path.join('foo', 'blat.cc'), True],
['M', os.path.join('foo', 'blat', 'READ_ME2'), True],
['M', os.path.join('foo', 'blat', 'binary.dll'), True],
['M', os.path.join('foo', 'blat', 'weird.xyz'), True],
['M', os.path.join('foo', 'blat', 'another.h'), True],
['M', os.path.join('foo', 'third_party', 'third.cc'), True],
['D', os.path.join('foo', 'mat', 'beingdeleted.txt'), False],
['M', os.path.join('flop', 'notfound.txt'), False],
['A', os.path.join('boo', 'flap.h'), True],
]
diffs = []
known_files = []
for _, f, exists in files:
full_file = os.path.join(self.fake_root_dir, f)
if exists and f.startswith('foo'):
known_files.append(full_file)
diffs.append(self.presubmit_diffs % {'filename': f})
os.path.isfile.side_effect = lambda f: f in known_files
presubmit.scm.GIT.GenerateDiff.return_value = '\n'.join(diffs)
change = presubmit.GitChange(
'mychange',
'\n'.join(description_lines),
self.fake_root_dir,
[[f[0], f[1]] for f in files],
0,
0,
None)
input_api = presubmit.InputApi(
change,
os.path.join(self.fake_root_dir, 'foo', 'PRESUBMIT.py'),
False, None, False)
# Doesn't filter much
got_files = input_api.AffectedFiles()
self.assertEqual(len(got_files), 7)
self.assertEqual(got_files[0].LocalPath(), presubmit.normpath(files[0][1]))
self.assertEqual(got_files[1].LocalPath(), presubmit.normpath(files[1][1]))
self.assertEqual(got_files[2].LocalPath(), presubmit.normpath(files[2][1]))
self.assertEqual(got_files[3].LocalPath(), presubmit.normpath(files[3][1]))
self.assertEqual(got_files[4].LocalPath(), presubmit.normpath(files[4][1]))
self.assertEqual(got_files[5].LocalPath(), presubmit.normpath(files[5][1]))
self.assertEqual(got_files[6].LocalPath(), presubmit.normpath(files[6][1]))
# Ignores weird because of check_list, third_party because of skip_list,
# binary isn't a text file and being deleted doesn't exist. The rest is
# outside foo/.
rhs_lines = [x for x in input_api.RightHandSideLines(None)]
self.assertEqual(len(rhs_lines), 14)
self.assertEqual(rhs_lines[0][0].LocalPath(),
presubmit.normpath(files[0][1]))
self.assertEqual(rhs_lines[3][0].LocalPath(),
presubmit.normpath(files[0][1]))
self.assertEqual(rhs_lines[7][0].LocalPath(),
presubmit.normpath(files[4][1]))
self.assertEqual(rhs_lines[13][0].LocalPath(),
presubmit.normpath(files[4][1]))
def testInputApiFilterSourceFile(self):
files = [
['A', os.path.join('foo', 'blat.cc')],
['M', os.path.join('foo', 'blat', 'READ_ME2')],
['M', os.path.join('foo', 'blat', 'binary.dll')],
['M', os.path.join('foo', 'blat', 'weird.xyz')],
['M', os.path.join('foo', 'blat', 'another.h')],
['M', os.path.join(
'foo', 'third_party', 'WebKit', 'WebKit.cpp')],
['M', os.path.join(
'foo', 'third_party', 'WebKit2', 'WebKit2.cpp')],
['M', os.path.join('foo', 'third_party', 'blink', 'blink.cc')],
['M', os.path.join(
'foo', 'third_party', 'blink1', 'blink1.cc')],
['M', os.path.join('foo', 'third_party', 'third', 'third.cc')],
]
known_files = [
os.path.join(self.fake_root_dir, f)
for _, f in files]
os.path.isfile.side_effect = lambda f: f in known_files
change = presubmit.GitChange(
'mychange',
'description\nlines\n',
self.fake_root_dir,
[[f[0], f[1]] for f in files],
0,
0,
None)
input_api = presubmit.InputApi(
change,
os.path.join(self.fake_root_dir, 'foo', 'PRESUBMIT.py'),
False, None, False)
# We'd like to test FilterSourceFile, which is used by
# AffectedSourceFiles(None).
got_files = input_api.AffectedSourceFiles(None)
self.assertEqual(len(got_files), 4)
# blat.cc, another.h, WebKit.cpp, and blink.cc remain.
self.assertEqual(got_files[0].LocalPath(), presubmit.normpath(files[0][1]))
self.assertEqual(got_files[1].LocalPath(), presubmit.normpath(files[4][1]))
self.assertEqual(got_files[2].LocalPath(), presubmit.normpath(files[5][1]))
self.assertEqual(got_files[3].LocalPath(), presubmit.normpath(files[7][1]))
def testDefaultFilesToCheckFilesToSkipFilters(self):
def f(x):
return presubmit.AffectedFile(x, 'M', self.fake_root_dir, None)
files = [
(
[
# To be tested.
f('testing_support/google_appengine/b'),
f('testing_support/not_google_appengine/foo.cc'),
],
[
# Expected.
'testing_support/not_google_appengine/foo.cc',
],
),
(
[
# To be tested.
f('a/experimental/b'),
f('experimental/b'),
f('a/experimental'),
f('a/experimental.cc'),
f('a/experimental.S'),
],
[
# Expected.
'a/experimental.cc',
'a/experimental.S',
],
),
(
[
# To be tested.
f('a/third_party/b'),
f('third_party/b'),
f('a/third_party'),
f('a/third_party.cc'),
],
[
# Expected.
'a/third_party.cc',
],
),
(
[
# To be tested.
f('a/LOL_FILE/b'),
f('b.c/LOL_FILE'),
f('a/PRESUBMIT.py'),
f('a/FOO.json'),
f('a/FOO.java'),
f('a/FOO.mojom'),
],
[
# Expected.
'a/PRESUBMIT.py',
'a/FOO.java',
'a/FOO.mojom',
],
),
(
[
# To be tested.
f('a/.git'),
f('b.c/.git'),
f('a/.git/bleh.py'),
f('.git/bleh.py'),
f('bleh.diff'),
f('foo/bleh.patch'),
],
[
# Expected.
],
),
]
input_api = presubmit.InputApi(
self.fake_change, './PRESUBMIT.py', False, None, False)
for item in files:
results = list(filter(input_api.FilterSourceFile, item[0]))
for i in range(len(results)):
self.assertEqual(results[i].LocalPath(),
presubmit.normpath(item[1][i]))
# Same number of expected results.
self.assertEqual(sorted([f.LocalPath().replace(os.sep, '/')
for f in results]),
sorted(item[1]))
def testDefaultOverrides(self):
input_api = presubmit.InputApi(
self.fake_change, './PRESUBMIT.py', False, None, False)
self.assertEqual(len(input_api.DEFAULT_FILES_TO_CHECK), 24)
self.assertEqual(len(input_api.DEFAULT_FILES_TO_SKIP), 12)
self.assertEqual(
input_api.DEFAULT_FILES_TO_CHECK, input_api.DEFAULT_WHITE_LIST)
self.assertEqual(
input_api.DEFAULT_FILES_TO_CHECK, input_api.DEFAULT_ALLOW_LIST)
self.assertEqual(
input_api.DEFAULT_FILES_TO_SKIP, input_api.DEFAULT_BLACK_LIST)
self.assertEqual(
input_api.DEFAULT_FILES_TO_SKIP, input_api.DEFAULT_BLOCK_LIST)
input_api.DEFAULT_FILES_TO_CHECK = (r'.+\.c$',)
input_api.DEFAULT_FILES_TO_SKIP = (r'.+\.patch$', r'.+\.diff')
self.assertEqual(len(input_api.DEFAULT_FILES_TO_CHECK), 1)
self.assertEqual(len(input_api.DEFAULT_FILES_TO_SKIP), 2)
self.assertEqual(
input_api.DEFAULT_FILES_TO_CHECK, input_api.DEFAULT_WHITE_LIST)
self.assertEqual(
input_api.DEFAULT_FILES_TO_CHECK, input_api.DEFAULT_ALLOW_LIST)
self.assertEqual(
input_api.DEFAULT_FILES_TO_SKIP, input_api.DEFAULT_BLACK_LIST)
self.assertEqual(
input_api.DEFAULT_FILES_TO_SKIP, input_api.DEFAULT_BLOCK_LIST)
# Test backward compatiblity of setting old property names
# TODO(https://crbug.com/1098562): Remove once no longer used
input_api.DEFAULT_WHITE_LIST = ()
input_api.DEFAULT_BLACK_LIST = ()
self.assertEqual(len(input_api.DEFAULT_FILES_TO_CHECK), 0)
self.assertEqual(len(input_api.DEFAULT_FILES_TO_SKIP), 0)
self.assertEqual(
input_api.DEFAULT_FILES_TO_CHECK, input_api.DEFAULT_WHITE_LIST)
self.assertEqual(
input_api.DEFAULT_FILES_TO_CHECK, input_api.DEFAULT_ALLOW_LIST)
self.assertEqual(
input_api.DEFAULT_FILES_TO_SKIP, input_api.DEFAULT_BLACK_LIST)
self.assertEqual(
input_api.DEFAULT_FILES_TO_SKIP, input_api.DEFAULT_BLOCK_LIST)
def testCustomFilter(self):
def FilterSourceFile(affected_file):
return 'a' in affected_file.LocalPath()
files = [('A', 'eeaee'), ('M', 'eeabee'), ('M', 'eebcee')]
known_files = [
os.path.join(self.fake_root_dir, item)
for _, item in files]
os.path.isfile.side_effect = lambda f: f in known_files
change = presubmit.GitChange(
'mychange', '', self.fake_root_dir, files, 0, 0, None)
input_api = presubmit.InputApi(
change,
os.path.join(self.fake_root_dir, 'PRESUBMIT.py'),
False, None, False)
got_files = input_api.AffectedSourceFiles(FilterSourceFile)
self.assertEqual(len(got_files), 2)
self.assertEqual(got_files[0].LocalPath(), 'eeaee')
self.assertEqual(got_files[1].LocalPath(), 'eeabee')
def testLambdaFilter(self):
files_to_check = presubmit.InputApi.DEFAULT_FILES_TO_SKIP + (r".*?a.*?",)
files_to_skip = [r".*?b.*?"]
files = [('A', 'eeaee'), ('M', 'eeabee'), ('M', 'eebcee'), ('M', 'eecaee')]
known_files = [
os.path.join(self.fake_root_dir, item)
for _, item in files]
os.path.isfile.side_effect = lambda f: f in known_files
change = presubmit.GitChange(
'mychange', '', self.fake_root_dir, files, 0, 0, None)
input_api = presubmit.InputApi(
change, './PRESUBMIT.py', False, None, False)
# Sample usage of overriding the default white and black lists.
got_files = input_api.AffectedSourceFiles(
lambda x: input_api.FilterSourceFile(x, files_to_check, files_to_skip))
self.assertEqual(len(got_files), 2)
self.assertEqual(got_files[0].LocalPath(), 'eeaee')
self.assertEqual(got_files[1].LocalPath(), 'eecaee')
def testGetAbsoluteLocalPath(self):
normpath = presubmit.normpath
# Regression test for bug of presubmit stuff that relies on invoking
# SVN (e.g. to get mime type of file) not working unless gcl invoked
# from the client root (e.g. if you were at 'src' and did 'cd base' before
# invoking 'gcl upload' it would fail because svn wouldn't find the files
# the presubmit script was asking about).
files = [
['A', 'isdir'],
['A', os.path.join('isdir', 'blat.cc')],
['M', os.path.join('elsewhere', 'ouf.cc')],
]
change = presubmit.Change(
'mychange', '', self.fake_root_dir, files, 0, 0, None)
affected_files = change.AffectedFiles()
# Local paths should remain the same
self.assertEqual(affected_files[0].LocalPath(), normpath('isdir'))
self.assertEqual(affected_files[1].LocalPath(), normpath('isdir/blat.cc'))
# Absolute paths should be prefixed
self.assertEqual(
affected_files[0].AbsoluteLocalPath(),
presubmit.normpath(os.path.join(self.fake_root_dir, 'isdir')))
self.assertEqual(
affected_files[1].AbsoluteLocalPath(),
presubmit.normpath(os.path.join(
self.fake_root_dir, 'isdir/blat.cc')))
# New helper functions need to work
paths_from_change = change.AbsoluteLocalPaths()
self.assertEqual(len(paths_from_change), 3)
presubmit_path = os.path.join(
self.fake_root_dir, 'isdir', 'PRESUBMIT.py')
api = presubmit.InputApi(
change=change, presubmit_path=presubmit_path,
is_committing=True, gerrit_obj=None, verbose=False)
paths_from_api = api.AbsoluteLocalPaths()
self.assertEqual(len(paths_from_api), 2)
for absolute_paths in [paths_from_change, paths_from_api]:
self.assertEqual(
absolute_paths[0],
presubmit.normpath(os.path.join(
self.fake_root_dir, 'isdir')))
self.assertEqual(
absolute_paths[1],
presubmit.normpath(os.path.join(
self.fake_root_dir, 'isdir', 'blat.cc')))
def testDeprecated(self):
change = presubmit.Change(
'mychange', '', self.fake_root_dir, [], 0, 0, None)
api = presubmit.InputApi(
change,
os.path.join(self.fake_root_dir, 'foo', 'PRESUBMIT.py'), True,
None, False)
api.AffectedTestableFiles(include_deletes=False)
def testReadFileStringDenied(self):
change = presubmit.Change(
'foo', 'foo', self.fake_root_dir, [('M', 'AA')], 0, 0, None)
input_api = presubmit.InputApi(
change, os.path.join(self.fake_root_dir, '/p'), False,
None, False)
self.assertRaises(IOError, input_api.ReadFile, 'boo', 'x')
def testReadFileStringAccepted(self):
path = os.path.join(self.fake_root_dir, 'AA/boo')
presubmit.gclient_utils.FileRead.return_code = None
change = presubmit.Change(
'foo', 'foo', self.fake_root_dir, [('M', 'AA')], 0, 0, None)
input_api = presubmit.InputApi(
change, os.path.join(self.fake_root_dir, '/p'), False,
None, False)
input_api.ReadFile(path, 'x')
def testReadFileAffectedFileDenied(self):
fileobj = presubmit.AffectedFile('boo', 'M', 'Unrelated',
diff_cache=mock.Mock())
change = presubmit.Change(
'foo', 'foo', self.fake_root_dir, [('M', 'AA')], 0, 0, None)
input_api = presubmit.InputApi(
change, os.path.join(self.fake_root_dir, '/p'), False,
None, False)
self.assertRaises(IOError, input_api.ReadFile, fileobj, 'x')
def testReadFileAffectedFileAccepted(self):
fileobj = presubmit.AffectedFile('AA/boo', 'M', self.fake_root_dir,
diff_cache=mock.Mock())
presubmit.gclient_utils.FileRead.return_code = None
change = presubmit.Change(
'foo', 'foo', self.fake_root_dir, [('M', 'AA')], 0, 0, None)
input_api = presubmit.InputApi(
change, os.path.join(self.fake_root_dir, '/p'), False,
None, False)
input_api.ReadFile(fileobj, 'x')
def testCreateTemporaryFile(self):
input_api = presubmit.InputApi(
self.fake_change,
presubmit_path='foo/path/PRESUBMIT.py',
is_committing=False, gerrit_obj=None, verbose=False)
tempfile.NamedTemporaryFile.side_effect = [
MockTemporaryFile('foo'), MockTemporaryFile('bar')]
self.assertEqual(0, len(input_api._named_temporary_files))
with input_api.CreateTemporaryFile():
self.assertEqual(1, len(input_api._named_temporary_files))
self.assertEqual(['foo'], input_api._named_temporary_files)
with input_api.CreateTemporaryFile():
self.assertEqual(2, len(input_api._named_temporary_files))
self.assertEqual(2, len(input_api._named_temporary_files))
self.assertEqual(['foo', 'bar'], input_api._named_temporary_files)
self.assertRaises(TypeError, input_api.CreateTemporaryFile, delete=True)
self.assertRaises(TypeError, input_api.CreateTemporaryFile, delete=False)
self.assertEqual(['foo', 'bar'], input_api._named_temporary_files)
class OutputApiUnittest(PresubmitTestsBase):
"""Tests presubmit.OutputApi."""
def testOutputApiBasics(self):
self.assertIsNotNone(presubmit.OutputApi.PresubmitError('').fatal)
self.assertFalse(presubmit.OutputApi.PresubmitError('').should_prompt)
self.assertFalse(presubmit.OutputApi.PresubmitPromptWarning('').fatal)
self.assertIsNotNone(
presubmit.OutputApi.PresubmitPromptWarning('').should_prompt)
self.assertFalse(presubmit.OutputApi.PresubmitNotifyResult('').fatal)
self.assertFalse(
presubmit.OutputApi.PresubmitNotifyResult('').should_prompt)
# TODO(joi) Test MailTextResult once implemented.
def testAppendCC(self):
output_api = presubmit.OutputApi(False)
output_api.AppendCC('chromium-reviews@chromium.org')
self.assertEqual(['chromium-reviews@chromium.org'], output_api.more_cc)
def testOutputApiHandling(self):
presubmit.OutputApi.PresubmitError('!!!').handle()
self.assertIsNotNone(sys.stdout.getvalue().count('!!!'))
sys.stdout.truncate(0)
presubmit.OutputApi.PresubmitNotifyResult('?see?').handle()
self.assertIsNotNone(sys.stdout.getvalue().count('?see?'))
sys.stdout.truncate(0)
presubmit.OutputApi.PresubmitPromptWarning('???').handle()
self.assertIsNotNone(sys.stdout.getvalue().count('???'))
sys.stdout.truncate(0)
output_api = presubmit.OutputApi(True)
output_api.PresubmitPromptOrNotify('???').handle()
self.assertIsNotNone(sys.stdout.getvalue().count('???'))
sys.stdout.truncate(0)
output_api = presubmit.OutputApi(False)
output_api.PresubmitPromptOrNotify('???').handle()
self.assertIsNotNone(sys.stdout.getvalue().count('???'))
class AffectedFileUnittest(PresubmitTestsBase):
def testAffectedFile(self):
gclient_utils.FileRead.return_value = 'whatever\ncookie'
af = presubmit.GitAffectedFile('foo/blat.cc', 'M', self.fake_root_dir, None)
self.assertEqual(presubmit.normpath('foo/blat.cc'), af.LocalPath())
self.assertEqual('M', af.Action())
self.assertEqual(['whatever', 'cookie'], af.NewContents())
def testAffectedFileNotExists(self):
notfound = 'notfound.cc'
gclient_utils.FileRead.side_effect = IOError
af = presubmit.AffectedFile(notfound, 'A', self.fake_root_dir, None)
self.assertEqual([], af.NewContents())
def testIsTestableFile(self):
files = [
presubmit.GitAffectedFile('foo/blat.txt', 'M', self.fake_root_dir,
None),
presubmit.GitAffectedFile('foo/binary.blob', 'M', self.fake_root_dir,
None),
presubmit.GitAffectedFile('blat/flop.txt', 'D', self.fake_root_dir,
None)
]
blat = os.path.join('foo', 'blat.txt')
blob = os.path.join('foo', 'binary.blob')
f_blat = os.path.join(self.fake_root_dir, blat)
f_blob = os.path.join(self.fake_root_dir, blob)
os.path.isfile.side_effect = lambda f: f in [f_blat, f_blob]
output = list(filter(lambda x: x.IsTestableFile(), files))
self.assertEqual(2, len(output))
self.assertEqual(files[:2], output[:2])
class ChangeUnittest(PresubmitTestsBase):
def testAffectedFiles(self):
change = presubmit.Change(
'', '', self.fake_root_dir, [('Y', 'AA')], 3, 5, '')
self.assertEqual(1, len(change.AffectedFiles()))
self.assertEqual('Y', change.AffectedFiles()[0].Action())
def testSetDescriptionText(self):
change = presubmit.Change(
'', 'foo\nDRU=ro', self.fake_root_dir, [], 3, 5, '')
self.assertEqual('foo', change.DescriptionText())
self.assertEqual('foo\nDRU=ro', change.FullDescriptionText())
self.assertEqual({'DRU': 'ro'}, change.tags)
change.SetDescriptionText('WHIZ=bang\nbar\nFOO=baz')
self.assertEqual('bar', change.DescriptionText())
self.assertEqual('WHIZ=bang\nbar\nFOO=baz', change.FullDescriptionText())
self.assertEqual({'WHIZ': 'bang', 'FOO': 'baz'}, change.tags)
def testAddDescriptionFooter(self):
change = presubmit.Change(
'', 'foo\nDRU=ro\n\nChange-Id: asdf', self.fake_root_dir, [], 3, 5, '')
change.AddDescriptionFooter('my-footer', 'my-value')
self.assertEqual(
'foo\nDRU=ro\n\nChange-Id: asdf\nMy-Footer: my-value',
change.FullDescriptionText())
def testAddDescriptionFooter_NoPreviousFooters(self):
change = presubmit.Change(
'', 'foo\nDRU=ro', self.fake_root_dir, [], 3, 5, '')
change.AddDescriptionFooter('my-footer', 'my-value')
self.assertEqual(
'foo\nDRU=ro\n\nMy-Footer: my-value', change.FullDescriptionText())
def testAddDescriptionFooter_InvalidFooter(self):
change = presubmit.Change(
'', 'foo\nDRU=ro', self.fake_root_dir, [], 3, 5, '')
with self.assertRaises(ValueError):
change.AddDescriptionFooter('invalid.characters in:the', 'footer key')
def testGitFootersFromDescription(self):
change = presubmit.Change(
'', 'foo\n\nChange-Id: asdf\nBug: 1\nBug: 2\nNo-Try: True',
self.fake_root_dir, [], 0, 0, '')
self.assertEqual({
'Change-Id': ['asdf'],
'Bug': ['2', '1'],
'No-Try': ['True'],
}, change.GitFootersFromDescription())
def testGitFootersFromDescription_NoFooters(self):
change = presubmit.Change('', 'foo', self.fake_root_dir, [], 0, 0, '')
self.assertEqual({}, change.GitFootersFromDescription())
def testBugFromDescription_FixedAndBugGetDeduped(self):
change = presubmit.Change(
'', 'foo\n\nChange-Id: asdf\nBug: 1, 2\nFixed:2, 1 ',
self.fake_root_dir, [], 0, 0, '')
self.assertEqual(['1', '2'], change.BugsFromDescription())
self.assertEqual('1,2', change.BUG)
def testBugsFromDescription_MixedTagsAndFooters(self):
change = presubmit.Change(
'', 'foo\nBUG=2,1\n\nChange-Id: asdf\nBug: 3, 6',
self.fake_root_dir, [], 0, 0, '')
self.assertEqual(['1', '2', '3', '6'], change.BugsFromDescription())
self.assertEqual('1,2,3,6', change.BUG)
def testBugsFromDescription_MultipleFooters(self):
change = presubmit.Change(
'', 'foo\n\nChange-Id: asdf\nBug: 1\nBug:4, 6\nFixed: 7',
self.fake_root_dir, [], 0, 0, '')
self.assertEqual(['1', '4', '6', '7'], change.BugsFromDescription())
self.assertEqual('1,4,6,7', change.BUG)
def testBugFromDescription_OnlyFixed(self):
change = presubmit.Change(
'', 'foo\n\nChange-Id: asdf\nFixed:1, 2',
self.fake_root_dir, [], 0, 0, '')
self.assertEqual(['1', '2'], change.BugsFromDescription())
self.assertEqual('1,2', change.BUG)
def testReviewersFromDescription(self):
change = presubmit.Change(
'', 'foo\nR=foo,bar\n\nChange-Id: asdf\nR: baz',
self.fake_root_dir, [], 0, 0, '')
self.assertEqual(['bar', 'foo'], change.ReviewersFromDescription())
self.assertEqual('bar,foo', change.R)
def testTBRsFromDescription(self):
change = presubmit.Change(
'', 'foo\nTBR=foo,bar\n\nChange-Id: asdf\nTBR: baz',
self.fake_root_dir, [], 0, 0, '')
self.assertEqual(['bar', 'baz', 'foo'], change.TBRsFromDescription())
self.assertEqual('bar,baz,foo', change.TBR)
class CannedChecksUnittest(PresubmitTestsBase):
"""Tests presubmit_canned_checks.py."""
def MockInputApi(self, change, committing):
# pylint: disable=no-self-use
input_api = mock.MagicMock(presubmit.InputApi)
input_api.thread_pool = presubmit.ThreadPool()
input_api.parallel = False
input_api.json = presubmit.json
input_api.logging = logging
input_api.os_listdir = mock.Mock()
input_api.os_walk = mock.Mock()
input_api.os_path = os.path
input_api.re = presubmit.re
input_api.gerrit = mock.MagicMock(presubmit.GerritAccessor)
if sys.version_info.major == 2:
input_api.urllib2 = mock.MagicMock(presubmit.urllib2)
input_api.urllib_request = mock.MagicMock(presubmit.urllib_request)
input_api.urllib_error = mock.MagicMock(presubmit.urllib_error)
input_api.unittest = unittest
input_api.subprocess = subprocess
input_api.sys = sys
class fake_CalledProcessError(Exception):
def __str__(self):
return 'foo'
input_api.subprocess.CalledProcessError = fake_CalledProcessError
input_api.verbose = False
input_api.is_windows = False
input_api.change = change
input_api.is_committing = committing
input_api.tbr = False
input_api.dry_run = None
input_api.python_executable = 'pyyyyython'
input_api.platform = sys.platform
input_api.cpu_count = 2
input_api.time = time
input_api.canned_checks = presubmit_canned_checks
input_api.Command = presubmit.CommandData
input_api.RunTests = functools.partial(
presubmit.InputApi.RunTests, input_api)
return input_api
def DescriptionTest(self, check, description1, description2, error_type,
committing):
change1 = presubmit.Change(
'foo1', description1, self.fake_root_dir, None, 0, 0, None)
input_api1 = self.MockInputApi(change1, committing)
change2 = presubmit.Change(
'foo2', description2, self.fake_root_dir, None, 0, 0, None)
input_api2 = self.MockInputApi(change2, committing)
results1 = check(input_api1, presubmit.OutputApi)
self.assertEqual(results1, [])
results2 = check(input_api2, presubmit.OutputApi)
self.assertEqual(len(results2), 1)
self.assertTrue(isinstance(results2[0], error_type))
def ContentTest(self, check, content1, content1_path, content2,
content2_path, error_type):
"""Runs a test of a content-checking rule.
Args:
check: the check to run.
content1: content which is expected to pass the check.
content1_path: file path for content1.
content2: content which is expected to fail the check.
content2_path: file path for content2.
error_type: the type of the error expected for content2.
"""
change1 = presubmit.Change(
'foo1', 'foo1\n', self.fake_root_dir, None, 0, 0, None)
input_api1 = self.MockInputApi(change1, False)
affected_file1 = mock.MagicMock(presubmit.GitAffectedFile)
input_api1.AffectedFiles.return_value = [affected_file1]
affected_file1.LocalPath.return_value = content1_path
affected_file1.NewContents.return_value = [
'afoo',
content1,
'bfoo',
'cfoo',
'dfoo']
# It falls back to ChangedContents when there is a failure. This is an
# optimization since NewContents() is much faster to execute than
# ChangedContents().
affected_file1.ChangedContents.return_value = [
(42, content1),
(43, 'hfoo'),
(23, 'ifoo')]
change2 = presubmit.Change(
'foo2', 'foo2\n', self.fake_root_dir, None, 0, 0, None)
input_api2 = self.MockInputApi(change2, False)
affected_file2 = mock.MagicMock(presubmit.GitAffectedFile)
input_api2.AffectedFiles.return_value = [affected_file2]
affected_file2.LocalPath.return_value = content2_path
affected_file2.NewContents.return_value = [
'dfoo',
content2,
'efoo',
'ffoo',
'gfoo']
affected_file2.ChangedContents.return_value = [
(42, content2),
(43, 'hfoo'),
(23, 'ifoo')]
results1 = check(input_api1, presubmit.OutputApi, None)
self.assertEqual(results1, [])
results2 = check(input_api2, presubmit.OutputApi, None)
self.assertEqual(len(results2), 1)
self.assertEqual(results2[0].__class__, error_type)
def PythonLongLineTest(self, maxlen, content, should_pass):
"""Runs a test of Python long-line checking rule.
Because ContentTest() cannot be used here due to the different code path
that the implementation of CheckLongLines() uses for Python files.
Args:
maxlen: Maximum line length for content.
content: Python source which is expected to pass or fail the test.
should_pass: True iff the test should pass, False otherwise.
"""
change = presubmit.Change('foo1', 'foo1\n', self.fake_root_dir, None, 0, 0,
None)
input_api = self.MockInputApi(change, False)
affected_file = mock.MagicMock(presubmit.GitAffectedFile)
input_api.AffectedFiles.return_value = [affected_file]
affected_file.LocalPath.return_value = 'foo.py'
affected_file.NewContents.return_value = content.splitlines()
results = presubmit_canned_checks.CheckLongLines(
input_api, presubmit.OutputApi, maxlen)
if should_pass:
self.assertEqual(results, [])
else:
self.assertEqual(len(results), 1)
self.assertEqual(results[0].__class__,
presubmit.OutputApi.PresubmitPromptWarning)
def ReadFileTest(self, check, content1, content2, error_type):
change1 = presubmit.Change(
'foo1', 'foo1\n', self.fake_root_dir, None, 0, 0, None)
input_api1 = self.MockInputApi(change1, False)
affected_file1 = mock.MagicMock(presubmit.GitAffectedFile)
input_api1.AffectedSourceFiles.return_value = [affected_file1]
input_api1.ReadFile.return_value = content1
change2 = presubmit.Change(
'foo2', 'foo2\n', self.fake_root_dir, None, 0, 0, None)
input_api2 = self.MockInputApi(change2, False)
affected_file2 = mock.MagicMock(presubmit.GitAffectedFile)
input_api2.AffectedSourceFiles.return_value = [affected_file2]
input_api2.ReadFile.return_value = content2
affected_file2.LocalPath.return_value = 'bar.cc'
results = check(input_api1, presubmit.OutputApi)
self.assertEqual(results, [])
results2 = check(input_api2, presubmit.OutputApi)
self.assertEqual(len(results2), 1)
self.assertEqual(results2[0].__class__, error_type)
def testCannedCheckChangeHasBugField(self):
self.DescriptionTest(presubmit_canned_checks.CheckChangeHasBugField,
'Foo\nBUG=1234', 'Foo\n',
presubmit.OutputApi.PresubmitNotifyResult,
False)
def testCannedCheckChangeHasNoUnwantedTags(self):
self.DescriptionTest(presubmit_canned_checks.CheckChangeHasNoUnwantedTags,
'Foo\n', 'Foo\nFIXED=1234',
presubmit.OutputApi.PresubmitError, False)
def testCheckChangeHasDescription(self):
self.DescriptionTest(presubmit_canned_checks.CheckChangeHasDescription,
'Bleh', '',
presubmit.OutputApi.PresubmitNotifyResult,
False)
self.DescriptionTest(presubmit_canned_checks.CheckChangeHasDescription,
'Bleh', '',
presubmit.OutputApi.PresubmitError,
True)
def testCannedCheckDoNotSubmitInDescription(self):
self.DescriptionTest(presubmit_canned_checks.CheckDoNotSubmitInDescription,
'Foo\nDO NOTSUBMIT', 'Foo\nDO NOT ' + 'SUBMIT',
presubmit.OutputApi.PresubmitError,
False)
def testCannedCheckDoNotSubmitInFiles(self):
self.ContentTest(
lambda x,y,z: presubmit_canned_checks.CheckDoNotSubmitInFiles(x, y),
'DO NOTSUBMIT', None, 'DO NOT ' + 'SUBMIT', None,
presubmit.OutputApi.PresubmitError)
def testCheckChangeHasNoStrayWhitespace(self):
self.ContentTest(
lambda x,y,z:
presubmit_canned_checks.CheckChangeHasNoStrayWhitespace(x, y),
'Foo', None, 'Foo ', None,
presubmit.OutputApi.PresubmitPromptWarning)
def testCheckChangeHasOnlyOneEol(self):
self.ReadFileTest(presubmit_canned_checks.CheckChangeHasOnlyOneEol,
"Hey!\nHo!\n", "Hey!\nHo!\n\n",
presubmit.OutputApi.PresubmitPromptWarning)
def testCheckChangeHasNoCR(self):
self.ReadFileTest(presubmit_canned_checks.CheckChangeHasNoCR,
"Hey!\nHo!\n", "Hey!\r\nHo!\r\n",
presubmit.OutputApi.PresubmitPromptWarning)
def testCheckChangeHasNoCrAndHasOnlyOneEol(self):
self.ReadFileTest(
presubmit_canned_checks.CheckChangeHasNoCrAndHasOnlyOneEol,
"Hey!\nHo!\n", "Hey!\nHo!\n\n",
presubmit.OutputApi.PresubmitPromptWarning)
self.ReadFileTest(
presubmit_canned_checks.CheckChangeHasNoCrAndHasOnlyOneEol,
"Hey!\nHo!\n", "Hey!\r\nHo!\r\n",
presubmit.OutputApi.PresubmitPromptWarning)
def testCheckChangeTodoHasOwner(self):
self.ContentTest(presubmit_canned_checks.CheckChangeTodoHasOwner,
"TODO(foo): bar", None, "TODO: bar", None,
presubmit.OutputApi.PresubmitPromptWarning)
@mock.patch('git_cl.Changelist')
@mock.patch('auth.Authenticator')
def testCannedCheckChangedLUCIConfigs(self, mockGetAuth, mockChangelist):
affected_file1 = mock.MagicMock(presubmit.GitAffectedFile)
affected_file1.LocalPath.return_value = 'foo.cfg'
affected_file1.NewContents.return_value = ['test', 'foo']
affected_file2 = mock.MagicMock(presubmit.GitAffectedFile)
affected_file2.LocalPath.return_value = 'bar.cfg'
affected_file2.NewContents.return_value = ['test', 'bar']
mockGetAuth().get_access_token().token = 123
host = 'https://host.com'
branch = 'branch'
http_resp = {
'messages': [{'severity': 'ERROR', 'text': 'deadbeef'}],
'config_sets': [{'config_set': 'deadbeef',
'location': '%s/+/%s' % (host, branch)}]
}
urllib_request.urlopen.return_value = http_resp
json.load.return_value = http_resp
mockChangelist().GetRemoteBranch.return_value = ('remote', branch)
mockChangelist().GetRemoteUrl.return_value = host
change1 = presubmit.Change(
'foo', 'foo1', self.fake_root_dir, None, 0, 0, None)
input_api = self.MockInputApi(change1, False)
affected_files = (affected_file1, affected_file2)
input_api.AffectedFiles = lambda **_: affected_files
results = presubmit_canned_checks.CheckChangedLUCIConfigs(
input_api, presubmit.OutputApi)
self.assertEqual(len(results), 1)
def testCannedCheckChangeHasNoTabs(self):
self.ContentTest(presubmit_canned_checks.CheckChangeHasNoTabs,
'blah blah', None, 'blah\tblah', None,
presubmit.OutputApi.PresubmitPromptWarning)
# Make sure makefiles are ignored.
change1 = presubmit.Change(
'foo1', 'foo1\n', self.fake_root_dir, None, 0, 0, None)
input_api1 = self.MockInputApi(change1, False)
affected_file1 = mock.MagicMock(presubmit.GitAffectedFile)
affected_file1.LocalPath.return_value = 'foo.cc'
affected_file2 = mock.MagicMock(presubmit.GitAffectedFile)
affected_file2.LocalPath.return_value = 'foo/Makefile'
affected_file3 = mock.MagicMock(presubmit.GitAffectedFile)
affected_file3.LocalPath.return_value = 'makefile'
# Only this one will trigger.
affected_file4 = mock.MagicMock(presubmit.GitAffectedFile)
affected_file1.LocalPath.return_value = 'foo.cc'
affected_file1.NewContents.return_value = ['yo, ']
affected_file4.LocalPath.return_value = 'makefile.foo'
affected_file4.LocalPath.return_value = 'makefile.foo'
affected_file4.NewContents.return_value = ['ye\t']
affected_file4.ChangedContents.return_value = [(46, 'ye\t')]
affected_file4.LocalPath.return_value = 'makefile.foo'
affected_files = (affected_file1, affected_file2,
affected_file3, affected_file4)
def test(include_deletes=True, file_filter=None):
self.assertFalse(include_deletes)
for x in affected_files:
if file_filter(x):
yield x
# Override the mock of these functions.
input_api1.FilterSourceFile = lambda x: x
input_api1.AffectedFiles = test
results1 = presubmit_canned_checks.CheckChangeHasNoTabs(input_api1,
presubmit.OutputApi, None)
self.assertEqual(len(results1), 1)
self.assertEqual(results1[0].__class__,
presubmit.OutputApi.PresubmitPromptWarning)
self.assertEqual(results1[0]._long_text,
'makefile.foo:46')
def testCannedCheckLongLines(self):
check = lambda x, y, z: presubmit_canned_checks.CheckLongLines(x, y, 10, z)
self.ContentTest(check, '0123456789', None, '01234567890', None,
presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckJavaLongLines(self):
check = lambda x, y, _: presubmit_canned_checks.CheckLongLines(x, y, 80)
self.ContentTest(check, 'A ' * 50, 'foo.java', 'A ' * 50 + 'B', 'foo.java',
presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckSpecialJavaLongLines(self):
check = lambda x, y, _: presubmit_canned_checks.CheckLongLines(x, y, 80)
self.ContentTest(check, 'import ' + 'A ' * 150, 'foo.java',
'importSomething ' + 'A ' * 50, 'foo.java',
presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckPythonLongLines(self):
# NOTE: Cannot use ContentTest() here because of the different code path
# used for Python checks in CheckLongLines().
passing_cases = [
r"""
01234568901234589012345689012345689
A short line
""",
r"""
01234568901234589012345689012345689
This line is too long but should pass # pylint: disable=line-too-long
""",
r"""
01234568901234589012345689012345689
# pylint: disable=line-too-long
This line is too long but should pass due to global disable
""",
r"""
01234568901234589012345689012345689
#pylint: disable=line-too-long
This line is too long but should pass due to global disable.
""",
r"""
01234568901234589012345689012345689
# pylint: disable=line-too-long
This line is too long but should pass due to global disable.
""",
r"""
01234568901234589012345689012345689
# import is a valid exception
import some.really.long.package.name.that.should.pass
""",
r"""
01234568901234589012345689012345689
# from is a valid exception
from some.really.long.package.name import passing.line
""",
r"""
01234568901234589012345689012345689
import some.package
""",
r"""
01234568901234589012345689012345689
from some.package import stuff
""",
]
for content in passing_cases:
self.PythonLongLineTest(40, content, should_pass=True)
failing_cases = [
r"""
01234568901234589012345689012345689
This line is definitely too long and should fail.
""",
r"""
01234568901234589012345689012345689
# pylint: disable=line-too-long
This line is too long and should pass due to global disable
# pylint: enable=line-too-long
But this line is too long and should still fail now
""",
r"""
01234568901234589012345689012345689
# pylint: disable=line-too-long
This line is too long and should pass due to global disable
But this line is too long # pylint: enable=line-too-long
""",
r"""
01234568901234589012345689012345689
This should fail because the global
check is enabled on the next line.
# pylint: enable=line-too-long
""",
r"""
01234568901234589012345689012345689
# pylint: disable=line-too-long
# pylint: enable-foo-bar should pass
The following line should fail
since global directives apply to
the current line as well!
# pylint: enable-line-too-long should fail
""",
]
for content in failing_cases[0:0]:
self.PythonLongLineTest(40, content, should_pass=False)
def testCannedCheckJSLongLines(self):
check = lambda x, y, _: presubmit_canned_checks.CheckLongLines(x, y, 10)
self.ContentTest(check, 'GEN(\'#include "c/b/ui/webui/fixture.h"\');',
'foo.js', "// GEN('something');", 'foo.js',
presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckJSLongImports(self):
check = lambda x, y, _: presubmit_canned_checks.CheckLongLines(x, y, 10)
self.ContentTest(check, "import {Name, otherName} from './dir/file.js';",
'foo.js', "// We should import something long, eh?",
'foo.js', presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckTSLongImports(self):
check = lambda x, y, _: presubmit_canned_checks.CheckLongLines(x, y, 10)
self.ContentTest(check, "import {Name, otherName} from './dir/file';",
'foo.ts', "// We should import something long, eh?",
'foo.ts', presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckObjCExceptionLongLines(self):
check = lambda x, y, _: presubmit_canned_checks.CheckLongLines(x, y, 80)
self.ContentTest(check, '#import ' + 'A ' * 150, 'foo.mm',
'import' + 'A ' * 150, 'foo.mm',
presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckMakefileLongLines(self):
check = lambda x, y, _: presubmit_canned_checks.CheckLongLines(x, y, 80)
self.ContentTest(check, 'A ' * 100, 'foo.mk', 'A ' * 100 + 'B', 'foo.mk',
presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckLongLinesLF(self):
check = lambda x, y, z: presubmit_canned_checks.CheckLongLines(x, y, 10, z)
self.ContentTest(check, '012345678\n', None, '0123456789\n', None,
presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckCppExceptionLongLines(self):
check = lambda x, y, z: presubmit_canned_checks.CheckLongLines(x, y, 10, z)
self.ContentTest(
check,
'#if 56 89 12 45 9191919191919',
'foo.cc',
'#nif 56 89 12 45 9191919191919',
'foo.cc',
presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckLongLinesHttp(self):
check = lambda x, y, z: presubmit_canned_checks.CheckLongLines(x, y, 10, z)
self.ContentTest(
check,
' http:// 0 23 56',
None,
' foob:// 0 23 56',
None,
presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckLongLinesFile(self):
check = lambda x, y, z: presubmit_canned_checks.CheckLongLines(x, y, 10, z)
self.ContentTest(
check,
' file:// 0 23 56',
None,
' foob:// 0 23 56',
None,
presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckLongLinesCssUrl(self):
check = lambda x, y, z: presubmit_canned_checks.CheckLongLines(x, y, 10, z)
self.ContentTest(
check,
' url(some.png)',
'foo.css',
' url(some.png)',
'foo.cc',
presubmit.OutputApi.PresubmitPromptWarning)
def testCannedCheckLongLinesLongSymbol(self):
check = lambda x, y, z: presubmit_canned_checks.CheckLongLines(x, y, 10, z)
self.ContentTest(
check,
' TUP5D_LoNG_SY ',
None,
' TUP5D_LoNG_SY5 ',
None,
presubmit.OutputApi.PresubmitPromptWarning)
def _LicenseCheck(self, text, license_text, committing, expected_result,
**kwargs):
change = mock.MagicMock(presubmit.GitChange)
change.scm = 'svn'
input_api = self.MockInputApi(change, committing)
affected_file = mock.MagicMock(presubmit.GitAffectedFile)
input_api.AffectedSourceFiles.return_value = [affected_file]
input_api.ReadFile.return_value = text
if expected_result:
affected_file.LocalPath.return_value = 'bleh'
result = presubmit_canned_checks.CheckLicense(
input_api, presubmit.OutputApi, license_text,
source_file_filter=42,
**kwargs)
if expected_result:
self.assertEqual(len(result), 1)
self.assertEqual(result[0].__class__, expected_result)
else:
self.assertEqual(result, [])
def testCheckLicenseSuccess(self):
text = (
"#!/bin/python\n"
"# Copyright (c) 2037 Nobody.\n"
"# All Rights Reserved.\n"
"print('foo')\n"
)
license_text = (
r".*? Copyright \(c\) 2037 Nobody." "\n"
r".*? All Rights Reserved\." "\n"
)
self._LicenseCheck(text, license_text, True, None)
def testCheckLicenseFailCommit(self):
text = (
"#!/bin/python\n"
"# Copyright (c) 2037 Nobody.\n"
"# All Rights Reserved.\n"
"print('foo')\n"
)
license_text = (
r".*? Copyright \(c\) 0007 Nobody." "\n"
r".*? All Rights Reserved\." "\n"
)
self._LicenseCheck(text, license_text, True,
presubmit.OutputApi.PresubmitPromptWarning)
def testCheckLicenseFailUpload(self):
text = (
"#!/bin/python\n"
"# Copyright (c) 2037 Nobody.\n"
"# All Rights Reserved.\n"
"print('foo')\n"
)
license_text = (
r".*? Copyright \(c\) 0007 Nobody." "\n"
r".*? All Rights Reserved\." "\n"
)
self._LicenseCheck(text, license_text, False,
presubmit.OutputApi.PresubmitPromptWarning)
def testCheckLicenseEmptySuccess(self):
text = ''
license_text = (
r".*? Copyright \(c\) 2037 Nobody." "\n"
r".*? All Rights Reserved\." "\n"
)
self._LicenseCheck(text, license_text, True, None, accept_empty_files=True)
def testCannedCheckTreeIsOpenOpen(self):
input_api = self.MockInputApi(None, True)
input_api.urllib_request.urlopen().read.return_value = 'The tree is open'
results = presubmit_canned_checks.CheckTreeIsOpen(
input_api, presubmit.OutputApi, url='url_to_open', closed='.*closed.*')
self.assertEqual(results, [])
def testCannedCheckTreeIsOpenClosed(self):
input_api = self.MockInputApi(None, True)
input_api.urllib_request.urlopen().read.return_value = (
'Tree is closed for maintenance')
results = presubmit_canned_checks.CheckTreeIsOpen(
input_api, presubmit.OutputApi,
url='url_to_closed', closed='.*closed.*')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].__class__,
presubmit.OutputApi.PresubmitError)
def testCannedCheckJsonTreeIsOpenOpen(self):
input_api = self.MockInputApi(None, True)
status = {
'can_commit_freely': True,
'general_state': 'open',
'message': 'The tree is open'
}
input_api.urllib_request.urlopen().read.return_value = json.dumps(status)
results = presubmit_canned_checks.CheckTreeIsOpen(
input_api, presubmit.OutputApi, json_url='url_to_open')
self.assertEqual(results, [])
def testCannedCheckJsonTreeIsOpenClosed(self):
input_api = self.MockInputApi(None, True)
status = {
'can_commit_freely': False,
'general_state': 'closed',
'message': 'The tree is close',
}
input_api.urllib_request.urlopen().read.return_value = json.dumps(status)
results = presubmit_canned_checks.CheckTreeIsOpen(
input_api, presubmit.OutputApi, json_url='url_to_closed')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].__class__,
presubmit.OutputApi.PresubmitError)
def testRunPythonUnitTestsNoTest(self):
input_api = self.MockInputApi(None, False)
presubmit_canned_checks.RunPythonUnitTests(
input_api, presubmit.OutputApi, [])
results = input_api.thread_pool.RunAsync()
self.assertEqual(results, [])
def testRunPythonUnitTestsNonExistentUpload(self):
input_api = self.MockInputApi(None, False)
subprocess.Popen().returncode = 1 # pylint: disable=no-value-for-parameter
presubmit.sigint_handler.wait.return_value = ('foo', None)
results = presubmit_canned_checks.RunPythonUnitTests(
input_api, presubmit.OutputApi, ['_non_existent_module'])
self.assertEqual(len(results), 1)
self.assertEqual(results[0].__class__,
presubmit.OutputApi.PresubmitNotifyResult)
def testRunPythonUnitTestsNonExistentCommitting(self):
input_api = self.MockInputApi(None, True)
subprocess.Popen().returncode = 1 # pylint: disable=no-value-for-parameter
presubmit.sigint_handler.wait.return_value = ('foo', None)
results = presubmit_canned_checks.RunPythonUnitTests(
input_api, presubmit.OutputApi, ['_non_existent_module'])
self.assertEqual(len(results), 1)
self.assertEqual(results[0].__class__, presubmit.OutputApi.PresubmitError)
def testRunPythonUnitTestsFailureUpload(self):
input_api = self.MockInputApi(None, False)
input_api.unittest = mock.MagicMock(unittest)
subprocess.Popen().returncode = 1 # pylint: disable=no-value-for-parameter
presubmit.sigint_handler.wait.return_value = ('foo', None)
results = presubmit_canned_checks.RunPythonUnitTests(
input_api, presubmit.OutputApi, ['test_module'])
self.assertEqual(len(results), 1)
self.assertEqual(results[0].__class__,
presubmit.OutputApi.PresubmitNotifyResult)
self.assertEqual(
'test_module\npyyyyython -m test_module (0.00s) failed\nfoo',
results[0]._message)
def testRunPythonUnitTestsFailureCommitting(self):
input_api = self.MockInputApi(None, True)
subprocess.Popen().returncode = 1 # pylint: disable=no-value-for-parameter
presubmit.sigint_handler.wait.return_value = ('foo', None)
results = presubmit_canned_checks.RunPythonUnitTests(
input_api, presubmit.OutputApi, ['test_module'])
self.assertEqual(len(results), 1)
self.assertEqual(results[0].__class__, presubmit.OutputApi.PresubmitError)
self.assertEqual(
'test_module\npyyyyython -m test_module (0.00s) failed\nfoo',
results[0]._message)
def testRunPythonUnitTestsSuccess(self):
input_api = self.MockInputApi(None, False)
input_api.unittest = mock.MagicMock(unittest)
subprocess.Popen().returncode = 0 # pylint: disable=no-value-for-parameter
presubmit.sigint_handler.wait.return_value = ('', None)
presubmit_canned_checks.RunPythonUnitTests(
input_api, presubmit.OutputApi, ['test_module'])
results = input_api.thread_pool.RunAsync()
self.assertEqual(results, [])
def testCannedRunPylint(self):
change = mock.Mock()
change.RepositoryRoot.return_value = 'CWD'
input_api = self.MockInputApi(change, True)
input_api.environ = mock.MagicMock(os.environ)
input_api.environ.copy.return_value = {}
input_api.AffectedSourceFiles.return_value = True
input_api.PresubmitLocalPath.return_value = 'CWD'
input_api.os_walk.return_value = [('CWD', [], ['file1.py'])]
process = mock.Mock()
process.returncode = 0
subprocess.Popen.return_value = process
presubmit.sigint_handler.wait.return_value = ('', None)
pylint = os.path.join(_ROOT, 'pylint')
pylintrc = os.path.join(_ROOT, 'pylintrc')
env = {str('PYTHONPATH'): str('')}
if sys.platform == 'win32':
pylint += '.bat'
results = presubmit_canned_checks.RunPylint(
input_api, presubmit.OutputApi)
self.assertEqual([], results)
self.assertEqual(subprocess.Popen.mock_calls, [
mock.call(
[pylint, '--args-on-stdin'], env=env,
cwd='CWD', stderr=subprocess.STDOUT, stdout=subprocess.PIPE,
stdin=subprocess.PIPE),
mock.call(
[pylint, '--args-on-stdin'], env=env,
cwd='CWD', stderr=subprocess.STDOUT, stdout=subprocess.PIPE,
stdin=subprocess.PIPE),
])
self.assertEqual(presubmit.sigint_handler.wait.mock_calls, [
mock.call(
process,
'--rcfile=%s\n--disable=all\n--enable=cyclic-import\nfile1.py'
% pylintrc),
mock.call(
process,
'--rcfile=%s\n--disable=cyclic-import\n--jobs=2\nfile1.py'
% pylintrc),
])
self.checkstdout('')
def GetInputApiWithFiles(self, files):
change = mock.MagicMock(presubmit.Change)
change.AffectedFiles = lambda *a, **kw: (
presubmit.Change.AffectedFiles(change, *a, **kw))
change._affected_files = []
for path, (action, contents) in files.items():
affected_file = mock.MagicMock(presubmit.GitAffectedFile)
affected_file.AbsoluteLocalPath.return_value = path
affected_file.LocalPath.return_value = path
affected_file.Action.return_value = action
affected_file.ChangedContents.return_value = [
(1, contents or ''),
]
change._affected_files.append(affected_file)
input_api = self.MockInputApi(None, False)
input_api.change = change
input_api.ReadFile = lambda path: files[path][1]
input_api.basename = os.path.basename
input_api.is_windows = sys.platform.startswith('win')
os.path.exists = lambda path: path in files and files[path][0] != 'D'
os.path.isfile = os.path.exists
return input_api
def testCheckDirMetadataFormat(self):
input_api = self.GetInputApiWithFiles({
'DIR_METADATA': ('M', ''),
'a/DIR_METADATA': ('M', ''),
'a/b/OWNERS': ('M', ''),
'c/DIR_METADATA': ('D', ''),
'd/unrelated': ('M', ''),
})
dirmd_bin = 'dirmd.bat' if input_api.is_windows else 'dirmd'
expected_cmd = [
dirmd_bin, 'validate', 'DIR_METADATA', 'a/DIR_METADATA', 'a/b/OWNERS']
commands = presubmit_canned_checks.CheckDirMetadataFormat(
input_api, presubmit.OutputApi)
self.assertEqual(1, len(commands))
self.assertEqual(expected_cmd, commands[0].cmd)
def testCheckNoNewMetadataInOwners(self):
input_api = self.GetInputApiWithFiles({
'no-new-metadata/OWNERS': ('M', '# WARNING: Blah'),
'added-no-new-metadata/OWNERS': ('A', '# WARNING: Bleh'),
'deleted/OWNERS': ('D', None),
})
self.assertEqual(
[],
presubmit_canned_checks.CheckNoNewMetadataInOwners(
input_api, presubmit.OutputApi))
def testCheckNoNewMetadataInOwnersFails(self):
input_api = self.GetInputApiWithFiles({
'new-metadata/OWNERS': ('M', '# CoMpOnEnT: Monorail>Component'),
})
results = presubmit_canned_checks.CheckNoNewMetadataInOwners(
input_api, presubmit.OutputApi)
self.assertEqual(1, len(results))
self.assertIsInstance(results[0], presubmit.OutputApi.PresubmitError)
def testCheckOwnersDirMetadataExclusiveWorks(self):
input_api = self.GetInputApiWithFiles({
'only-owners/OWNERS': ('M', '# COMPONENT: Monorail>Component'),
'only-dir-metadata/DIR_METADATA': ('M', ''),
'owners-has-no-metadata/DIR_METADATA': ('M', ''),
'owners-has-no-metadata/OWNERS': ('M', 'no-metadata'),
'deleted-owners/OWNERS': ('D', None),
'deleted-owners/DIR_METADATA': ('M', ''),
'deleted-dir-metadata/OWNERS': ('M', '# COMPONENT: Monorail>Component'),
'deleted-dir-metadata/DIR_METADATA': ('D', None),
'non-metadata-comment/OWNERS': ('M', '# WARNING: something.'),
'non-metadata-comment/DIR_METADATA': ('M', ''),
})
self.assertEqual(
[],
presubmit_canned_checks.CheckOwnersDirMetadataExclusive(
input_api, presubmit.OutputApi))
def testCheckOwnersDirMetadataExclusiveFails(self):
input_api = self.GetInputApiWithFiles({
'DIR_METADATA': ('M', ''),
'OWNERS': ('M', '# COMPONENT: Monorail>Component'),
})
results = presubmit_canned_checks.CheckOwnersDirMetadataExclusive(
input_api, presubmit.OutputApi)
self.assertEqual(1, len(results))
self.assertIsInstance(results[0], presubmit.OutputApi.PresubmitError)
def GetInputApiWithOWNERS(self, owners_content, code_owners_enabled=False):
input_api = self.GetInputApiWithFiles({'OWNERS': ('M', owners_content)})
owners_file = StringIO(owners_content)
fopen = lambda *args: owners_file
input_api.owners_db = owners.Database('', fopen, os.path)
input_api.gerrit.IsCodeOwnersEnabledOnRepo = lambda: code_owners_enabled
return input_api
def testCheckOwnersFormatWorks(self):
input_api = self.GetInputApiWithOWNERS('\n'.join([
'set noparent',
'per-file lalala = lemur@chromium.org',
]))
self.assertEqual(
[],
presubmit_canned_checks.CheckOwnersFormat(
input_api, presubmit.OutputApi)
)
def testCheckOwnersFormatWorks_CodeOwners(self):
# If code owners is enabled, we rely on it to check owners format instead of
# depot tools.
input_api = self.GetInputApiWithOWNERS(
'any content', code_owners_enabled=True)
self.assertEqual(
[],
presubmit_canned_checks.CheckOwnersFormat(
input_api, presubmit.OutputApi)
)
def testCheckOwnersFormatFails(self):
input_api = self.GetInputApiWithOWNERS('\n'.join([
'set noparent',
'invalid format',
]))
results = presubmit_canned_checks.CheckOwnersFormat(
input_api, presubmit.OutputApi)
self.assertEqual(1, len(results))
self.assertIsInstance(results[0], presubmit.OutputApi.PresubmitError)
def AssertOwnersWorks(
self, tbr=False, issue='1', approvers=None, modified_files=None,
owners_by_path=None, is_committing=True, response=None,
expected_output='', manually_specified_reviewers=None, dry_run=None,
code_owners_enabled=False):
# The set of people who lgtm'ed a change.
approvers = approvers or set()
manually_specified_reviewers = manually_specified_reviewers or []
modified_files = modified_files or ['foo/xyz.cc']
owners_by_path = owners_by_path or {'foo/xyz.cc': ['john@example.com']}
response = response or {
"owner": {"email": 'john@example.com'},
"labels": {"Code-Review": {
u'all': [
{
u'email': a,
u'value': +1
} for a in approvers
],
u'default_value': 0,
u'values': {u' 0': u'No score',
u'+1': u'Looks good to me',
u'-1': u"I would prefer that you didn't submit this"}
}},
"reviewers": {"REVIEWER": [{u'email': a}] for a in approvers},
}
change = mock.MagicMock(presubmit.Change)
change.OriginalOwnersFiles.return_value = {}
change.RepositoryRoot.return_value = None
change.ReviewersFromDescription.return_value = manually_specified_reviewers
change.TBRsFromDescription.return_value = []
change.author_email = 'john@example.com'
change.issue = issue
affected_files = []
for f in modified_files:
affected_file = mock.MagicMock(presubmit.GitAffectedFile)
affected_file.LocalPath.return_value = f
affected_files.append(affected_file)
change.AffectedFiles.return_value = affected_files
input_api = self.MockInputApi(change, False)
input_api.gerrit = presubmit.GerritAccessor('host')
input_api.is_committing = is_committing
input_api.tbr = tbr
input_api.dry_run = dry_run
input_api.gerrit._FetchChangeDetail = lambda _: response
input_api.gerrit.IsCodeOwnersEnabledOnRepo = lambda: code_owners_enabled
input_api.owners_client = owners_client.OwnersClient()
with mock.patch('owners_client.OwnersClient.ListOwners',
side_effect=lambda f: owners_by_path.get(f, [])):
results = presubmit_canned_checks.CheckOwners(
input_api, presubmit.OutputApi)
for result in results:
result.handle()
if expected_output:
self.assertRegexpMatches(sys.stdout.getvalue(), expected_output)
else:
self.assertEqual(sys.stdout.getvalue(), expected_output)
sys.stdout.truncate(0)
def testCannedCheckOwners_DryRun(self):
response = {
"owner": {"email": "john@example.com"},
"labels": {"Code-Review": {
u'all': [
{
u'email': u'ben@example.com',
u'value': 0
},
],
u'approved': {u'email': u'ben@example.org'},
u'default_value': 0,
u'values': {u' 0': u'No score',
u'+1': u'Looks good to me',
u'-1': u"I would prefer that you didn't submit this"}
}},
"reviewers": {"REVIEWER": [{u'email': u'ben@example.com'}]},
}
self.AssertOwnersWorks(
approvers=set(),
dry_run=True,
response=response,
expected_output='This is a dry run, but these failures would be ' +
'reported on commit:\nMissing LGTM from someone ' +
'other than john@example.com\n')
self.AssertOwnersWorks(
approvers=set(['ben@example.com']),
is_committing=False,
response=response,
expected_output='')
def testCannedCheckOwners_OwnersOverride(self):
response = {
"owner": {"email": "john@example.com"},
"labels": {"Owners-Override": {
u'all': [
{
u'email': u'sheriff@example.com',
u'value': 1
},
],
u'approved': {u'email': u'sheriff@example.org'},
u'default_value': 0,
u'values': {u' 0': u'No score',
u'+1': u'Looks good to me'},
}},
"reviewers": {"REVIEWER": [{u'email': u'sheriff@example.com'}]},
}
self.AssertOwnersWorks(
approvers=set(),
response=response,
is_committing=True,
expected_output='')
self.AssertOwnersWorks(
approvers=set(),
is_committing=False,
response=response,
expected_output='')
def testCannedCheckOwners_Approved(self):
response = {
"owner": {"email": "john@example.com"},
"labels": {"Code-Review": {
u'all': [
{
u'email': u'john@example.com', # self +1 :)
u'value': 1
},
{
u'email': u'ben@example.com',
u'value': 2
},
],
u'approved': {u'email': u'ben@example.org'},
u'default_value': 0,
u'values': {u' 0': u'No score',
u'+1': u'Looks good to me, but someone else must approve',
u'+2': u'Looks good to me, approved',
u'-1': u"I would prefer that you didn't submit this",
u'-2': u'Do not submit'}
}},
"reviewers": {"REVIEWER": [{u'email': u'ben@example.com'}]},
}
self.AssertOwnersWorks(
approvers=set(['ben@example.com']),
response=response,
is_committing=True,
expected_output='')
self.AssertOwnersWorks(
approvers=set(['ben@example.com']),
is_committing=False,
response=response,
expected_output='')
# Testing configuration with on -1..+1.
response = {
"owner": {"email": "john@example.com"},
"labels": {"Code-Review": {
u'all': [
{
u'email': u'ben@example.com',
u'value': 1
},
],
u'approved': {u'email': u'ben@example.org'},
u'default_value': 0,
u'values': {u' 0': u'No score',
u'+1': u'Looks good to me',
u'-1': u"I would prefer that you didn't submit this"}
}},
"reviewers": {"REVIEWER": [{u'email': u'ben@example.com'}]},
}
self.AssertOwnersWorks(
approvers=set(['ben@example.com']),
response=response,
is_committing=True,
expected_output='')
def testCannedCheckOwners_NotApproved(self):
response = {
"owner": {"email": "john@example.com"},
"labels": {"Code-Review": {
u'all': [
{
u'email': u'john@example.com', # self +1 :)
u'value': 1
},
{
u'email': u'ben@example.com',
u'value': 1
},
],
u'approved': {u'email': u'ben@example.org'},
u'default_value': 0,
u'values': {u' 0': u'No score',
u'+1': u'Looks good to me, but someone else must approve',
u'+2': u'Looks good to me, approved',
u'-1': u"I would prefer that you didn't submit this",
u'-2': u'Do not submit'}
}},
"reviewers": {"REVIEWER": [{u'email': u'ben@example.com'}]},
}
self.AssertOwnersWorks(
approvers=set(),
response=response,
is_committing=True,
expected_output=
'Missing LGTM from someone other than john@example.com\n')
self.AssertOwnersWorks(
approvers=set(),
is_committing=False,
response=response,
expected_output='')
# Testing configuration with on -1..+1.
response = {
"owner": {"email": "john@example.com"},
"labels": {"Code-Review": {
u'all': [
{
u'email': u'ben@example.com',
u'value': 0
},
],
u'approved': {u'email': u'ben@example.org'},
u'default_value': 0,
u'values': {u' 0': u'No score',
u'+1': u'Looks good to me',
u'-1': u"I would prefer that you didn't submit this"}
}},
"reviewers": {"REVIEWER": [{u'email': u'ben@example.com'}]},
}
self.AssertOwnersWorks(
approvers=set(),
response=response,
is_committing=True,
expected_output=
'Missing LGTM from someone other than john@example.com\n')
def testCannedCheckOwners_NoReviewers(self):
response = {
"owner": {"email": "john@example.com"},
"labels": {"Code-Review": {
u'default_value': 0,
u'values': {u' 0': u'No score',
u'+1': u'Looks good to me',
u'-1': u"I would prefer that you didn't submit this"}
}},
"reviewers": {},
}
self.AssertOwnersWorks(
approvers=set(),
response=response,
expected_output=
'Missing LGTM from someone other than john@example.com\n')
self.AssertOwnersWorks(
approvers=set(),
is_committing=False,
response=response,
expected_output='')
def testCannedCheckOwners_NoIssueNoFiles(self):
self.AssertOwnersWorks(issue=None,
expected_output="OWNERS check failed: this CL has no Gerrit "
"change number, so we can't check it for approvals.\n")
self.AssertOwnersWorks(issue=None, is_committing=False,
expected_output="")
def testCannedCheckOwners_NoIssue(self):
self.AssertOwnersWorks(issue=None,
modified_files=['foo'],
expected_output="OWNERS check failed: this CL has no Gerrit "
"change number, so we can't check it for approvals.\n")
self.AssertOwnersWorks(issue=None,
is_committing=False,
modified_files=['foo'],
expected_output=re.compile(
'Missing OWNER reviewers for these files:\n'
' foo\n', re.MULTILINE))
def testCannedCheckOwners_NoIssueLocalReviewers(self):
self.AssertOwnersWorks(
issue=None,
manually_specified_reviewers=['jane@example.com'],
expected_output="OWNERS check failed: this CL has no Gerrit "
"change number, so we can't check it for approvals.\n")
self.AssertOwnersWorks(
issue=None,
manually_specified_reviewers=['jane@example.com'],
is_committing=False,
expected_output='')
def testCannedCheckOwners_NoIssueLocalReviewersDontInferEmailDomain(self):
self.AssertOwnersWorks(
issue=None,
manually_specified_reviewers=['jane@example.com'],
expected_output="OWNERS check failed: this CL has no Gerrit "
"change number, so we can't check it for approvals.\n")
self.AssertOwnersWorks(
issue=None,
modified_files=['foo'],
manually_specified_reviewers=['jane'],
is_committing=False,
expected_output=re.compile(
'Missing OWNER reviewers for these files:\n'
' foo\n', re.MULTILINE))
def testCannedCheckOwners_NoLGTM(self):
self.AssertOwnersWorks(expected_output='Missing LGTM from someone '
'other than john@example.com\n')
self.AssertOwnersWorks(is_committing=False, expected_output='')
def testCannedCheckOwners_OnlyOwnerLGTM(self):
self.AssertOwnersWorks(approvers=set(['john@example.com']),
expected_output='Missing LGTM from someone '
'other than john@example.com\n')
self.AssertOwnersWorks(approvers=set(['john@example.com']),
is_committing=False,
expected_output='')
def testCannedCheckOwners_TBR(self):
self.AssertOwnersWorks(tbr=True,
expected_output='--tbr was specified, skipping OWNERS check\n')
self.AssertOwnersWorks(tbr=True, is_committing=False, expected_output='')
def testCannedCheckOwners_TBRIgnored(self):
self.AssertOwnersWorks(
tbr=True,
code_owners_enabled=True,
expected_output='')
self.AssertOwnersWorks(
tbr=True,
code_owners_enabled=True,
is_committing=False,
expected_output='')
def testCannedCheckOwners_TBROWNERSFile(self):
self.AssertOwnersWorks(
tbr=True,
modified_files=['foo/OWNERS'],
expected_output='Missing LGTM from an OWNER for these files:\n'
' foo/OWNERS\n'
'TBR for OWNERS files are ignored.\n')
def testCannedCheckOwners_TBRNonOWNERSFile(self):
self.AssertOwnersWorks(
tbr=True,
modified_files=['foo/OWNERS', 'foo/xyz.cc'],
owners_by_path={'foo/OWNERS': ['john@example.com'],
'foo/xyz.cc': []},
expected_output='--tbr was specified, skipping OWNERS check\n')
def testCannedCheckOwners_WithoutOwnerLGTM(self):
self.AssertOwnersWorks(
modified_files=['foo'],
expected_output='Missing LGTM from an OWNER for these files:\n'
' foo\n')
self.AssertOwnersWorks(
modified_files=['foo'],
is_committing=False,
expected_output=re.compile(
'Missing OWNER reviewers for these files:\n'
' foo\n', re.MULTILINE))
def testCannedCheckOwners_WithLGTMs(self):
self.AssertOwnersWorks(approvers=set(['ben@example.com']))
self.AssertOwnersWorks(approvers=set(['ben@example.com']),
is_committing=False)
@mock.patch(BUILTIN_OPEN, mock.mock_open())
def testCannedRunUnitTests(self):
open().readline.return_value = ''
change = presubmit.Change(
'foo1', 'description1', self.fake_root_dir, None, 0, 0, None)
input_api = self.MockInputApi(change, False)
input_api.verbose = True
input_api.PresubmitLocalPath.return_value = self.fake_root_dir
presubmit.sigint_handler.wait.return_value = ('', None)
process1 = mock.Mock()
process1.returncode = 1
process2 = mock.Mock()
process2.returncode = 0
subprocess.Popen.side_effect = [process1, process2]
unit_tests = ['allo', 'bar.py']
results = presubmit_canned_checks.RunUnitTests(
input_api,
presubmit.OutputApi,
unit_tests)
self.assertEqual(2, len(results))
self.assertEqual(
presubmit.OutputApi.PresubmitNotifyResult, results[1].__class__)
self.assertEqual(
presubmit.OutputApi.PresubmitPromptWarning, results[0].__class__)
cmd = ['bar.py', '--verbose']
if input_api.platform == 'win32':
cmd.insert(0, 'vpython.bat')
else:
cmd.insert(0, 'vpython')
self.assertEqual(subprocess.Popen.mock_calls, [
mock.call(
cmd, cwd=self.fake_root_dir, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stdin=subprocess.PIPE),
mock.call(
['allo', '--verbose'], cwd=self.fake_root_dir,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
stdin=subprocess.PIPE),
])
threading.Timer.assert_not_called()
self.checkstdout('')
@mock.patch(BUILTIN_OPEN, mock.mock_open())
def testCannedRunUnitTestsWithTimer(self):
open().readline.return_value = ''
change = presubmit.Change(
'foo1', 'description1', self.fake_root_dir, None, 0, 0, None)
input_api = self.MockInputApi(change, False)
input_api.verbose = True
input_api.thread_pool.timeout = 100
input_api.PresubmitLocalPath.return_value = self.fake_root_dir
presubmit.sigint_handler.wait.return_value = ('', None)
subprocess.Popen.return_value = mock.Mock(returncode=0)
results = presubmit_canned_checks.RunUnitTests(
input_api,
presubmit.OutputApi,
['bar.py'])
self.assertEqual(
presubmit.OutputApi.PresubmitNotifyResult, results[0].__class__)
threading.Timer.assert_called_once_with(
input_api.thread_pool.timeout, mock.ANY)
threading.Timer().start.assert_called_once_with()
threading.Timer().cancel.assert_called_once_with()
self.checkstdout('')
@mock.patch(BUILTIN_OPEN, mock.mock_open())
def testCannedRunUnitTestsWithTimerTimesOut(self):
open().readline.return_value = ''
change = presubmit.Change(
'foo1', 'description1', self.fake_root_dir, None, 0, 0, None)
input_api = self.MockInputApi(change, False)
input_api.verbose = True
input_api.thread_pool.timeout = 100
input_api.PresubmitLocalPath.return_value = self.fake_root_dir
presubmit.sigint_handler.wait.return_value = ('', None)
subprocess.Popen.return_value = mock.Mock(returncode=1)
timer_instance = mock.Mock()
def mockTimer(_, fn):
fn()
return timer_instance
threading.Timer.side_effect = mockTimer
results = presubmit_canned_checks.RunUnitTests(
input_api,
presubmit.OutputApi,
['bar.py'])
self.assertEqual(
presubmit.OutputApi.PresubmitPromptWarning, results[0].__class__)
results[0].handle()
self.assertIn(
'bar.py --verbose (0.00s) failed\nProcess timed out after 100s',
sys.stdout.getvalue())
threading.Timer.assert_called_once_with(
input_api.thread_pool.timeout, mock.ANY)
timer_instance.start.assert_called_once_with()
@mock.patch(BUILTIN_OPEN, mock.mock_open())
def testCannedRunUnitTestsPython3(self):
open().readline.return_value = '#!/usr/bin/env python3'
change = presubmit.Change(
'foo1', 'description1', self.fake_root_dir, None, 0, 0, None)
input_api = self.MockInputApi(change, False)
input_api.verbose = True
input_api.PresubmitLocalPath.return_value = self.fake_root_dir
presubmit.sigint_handler.wait.return_value = ('', None)
subprocesses = [
mock.Mock(returncode=1),
mock.Mock(returncode=0),
mock.Mock(returncode=0),
]
subprocess.Popen.side_effect = subprocesses
unit_tests = ['allo', 'bar.py']
results = presubmit_canned_checks.RunUnitTests(
input_api,
presubmit.OutputApi,
unit_tests)
self.assertEqual([result.__class__ for result in results], [
presubmit.OutputApi.PresubmitPromptWarning,
presubmit.OutputApi.PresubmitNotifyResult,
presubmit.OutputApi.PresubmitNotifyResult,
])
cmd = ['bar.py', '--verbose']
vpython = 'vpython'
vpython3 = 'vpython3'
if input_api.platform == 'win32':
vpython += '.bat'
vpython3 += '.bat'
self.assertEqual(subprocess.Popen.mock_calls, [
mock.call(
[vpython] + cmd, cwd=self.fake_root_dir, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stdin=subprocess.PIPE),
mock.call(
[vpython3] + cmd, cwd=self.fake_root_dir, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stdin=subprocess.PIPE),
mock.call(
['allo', '--verbose'], cwd=self.fake_root_dir,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
stdin=subprocess.PIPE),
])
self.assertEqual(presubmit.sigint_handler.wait.mock_calls, [
mock.call(subprocesses[0], None),
mock.call(subprocesses[1], None),
mock.call(subprocesses[2], None),
])
self.checkstdout('')
@mock.patch(BUILTIN_OPEN, mock.mock_open())
def testCannedRunUnitTestsDontRunOnPython2(self):
open().readline.return_value = '#!/usr/bin/env python3'
change = presubmit.Change(
'foo1', 'description1', self.fake_root_dir, None, 0, 0, None)
input_api = self.MockInputApi(change, False)
input_api.verbose = True
input_api.PresubmitLocalPath.return_value = self.fake_root_dir
presubmit.sigint_handler.wait.return_value = ('', None)
subprocess.Popen.side_effect = [
mock.Mock(returncode=1),
mock.Mock(returncode=0),
mock.Mock(returncode=0),
]
unit_tests = ['allo', 'bar.py']
results = presubmit_canned_checks.RunUnitTests(
input_api,
presubmit.OutputApi,
unit_tests,
run_on_python2=False)
self.assertEqual([result.__class__ for result in results], [
presubmit.OutputApi.PresubmitPromptWarning,
presubmit.OutputApi.PresubmitNotifyResult,
])
cmd = ['bar.py', '--verbose']
vpython3 = 'vpython3'
if input_api.platform == 'win32':
vpython3 += '.bat'
self.assertEqual(subprocess.Popen.mock_calls, [
mock.call(
[vpython3] + cmd, cwd=self.fake_root_dir, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stdin=subprocess.PIPE),
mock.call(
['allo', '--verbose'], cwd=self.fake_root_dir,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
stdin=subprocess.PIPE),
])
self.checkstdout('')
@mock.patch(BUILTIN_OPEN, mock.mock_open())
def testCannedRunUnitTestsDontRunOnPython3(self):
open().readline.return_value = '#!/usr/bin/env python3'
change = presubmit.Change(
'foo1', 'description1', self.fake_root_dir, None, 0, 0, None)
input_api = self.MockInputApi(change, False)
input_api.verbose = True
input_api.PresubmitLocalPath.return_value = self.fake_root_dir
presubmit.sigint_handler.wait.return_value = ('', None)
subprocess.Popen.side_effect = [
mock.Mock(returncode=1),
mock.Mock(returncode=0),
mock.Mock(returncode=0),
]
unit_tests = ['allo', 'bar.py']
results = presubmit_canned_checks.RunUnitTests(
input_api,
presubmit.OutputApi,
unit_tests,
run_on_python3=False)
self.assertEqual([result.__class__ for result in results], [
presubmit.OutputApi.PresubmitPromptWarning,
presubmit.OutputApi.PresubmitNotifyResult,
])
cmd = ['bar.py', '--verbose']
vpython = 'vpython'
if input_api.platform == 'win32':
vpython += '.bat'
self.assertEqual(subprocess.Popen.mock_calls, [
mock.call(
[vpython] + cmd, cwd=self.fake_root_dir, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stdin=subprocess.PIPE),
mock.call(
['allo', '--verbose'], cwd=self.fake_root_dir,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
stdin=subprocess.PIPE),
])
self.checkstdout('')
def testCannedRunUnitTestsInDirectory(self):
change = presubmit.Change(
'foo1', 'description1', self.fake_root_dir, None, 0, 0, None)
input_api = self.MockInputApi(change, False)
input_api.verbose = True
input_api.logging = mock.MagicMock(logging)
input_api.PresubmitLocalPath.return_value = self.fake_root_dir
input_api.os_listdir.return_value = ['.', '..', 'a', 'b', 'c']
input_api.os_path.isfile = lambda x: not x.endswith('.')
process = mock.Mock()
process.returncode = 0
subprocess.Popen.return_value = process
presubmit.sigint_handler.wait.return_value = ('', None)
results = presubmit_canned_checks.RunUnitTestsInDirectory(
input_api,
presubmit.OutputApi,
'random_directory',
files_to_check=['^a$', '^b$'],
files_to_skip=['a'])
self.assertEqual(1, len(results))
self.assertEqual(
presubmit.OutputApi.PresubmitNotifyResult, results[0].__class__)
self.assertEqual(subprocess.Popen.mock_calls, [
mock.call(
[os.path.join('random_directory', 'b'), '--verbose'],
cwd=self.fake_root_dir, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stdin=subprocess.PIPE),
])
self.checkstdout('')
def testPanProjectChecks(self):
# Make sure it accepts both list and tuples.
change = presubmit.Change(
'foo1', 'description1', self.fake_root_dir, None, 0, 0, None)
input_api = self.MockInputApi(change, False)
affected_file = mock.MagicMock(presubmit.GitAffectedFile)
input_api.AffectedFiles.return_value = [affected_file]
affected_file.NewContents.return_value = 'Hey!\nHo!\nHey!\nHo!\n\n'
# CheckChangeHasNoTabs() calls _FindNewViolationsOfRule() which calls
# ChangedContents().
affected_file.ChangedContents.return_value = [
(0, 'Hey!\n'),
(1, 'Ho!\n'),
(2, 'Hey!\n'),
(3, 'Ho!\n'),
(4, '\n')]
affected_file.LocalPath.return_value = 'hello.py'
# CheckingLicense() calls AffectedSourceFiles() instead of AffectedFiles().
input_api.AffectedSourceFiles.return_value = [affected_file]
input_api.ReadFile.return_value = 'Hey!\nHo!\nHey!\nHo!\n\n'
results = presubmit_canned_checks.PanProjectChecks(
input_api,
presubmit.OutputApi,
excluded_paths=None,
text_files=None,
license_header=None,
project_name=None,
owners_check=False)
self.assertEqual(2, len(results))
self.assertEqual(
'Found line ending with white spaces in:', results[0]._message)
self.checkstdout('')
def testCheckCIPDManifest_file(self):
input_api = self.MockInputApi(None, False)
command = presubmit_canned_checks.CheckCIPDManifest(
input_api, presubmit.OutputApi, path='/path/to/foo')
self.assertEqual(command.cmd,
['cipd', 'ensure-file-verify', '-ensure-file', '/path/to/foo'])
self.assertEqual(command.kwargs, {
'stdin': subprocess.PIPE,
'stdout': subprocess.PIPE,
'stderr': subprocess.STDOUT,
})
def testCheckCIPDManifest_content(self):
input_api = self.MockInputApi(None, False)
input_api.verbose = True
command = presubmit_canned_checks.CheckCIPDManifest(
input_api, presubmit.OutputApi, content='manifest_content')
self.assertEqual(command.cmd,
['cipd', 'ensure-file-verify', '-log-level', 'debug', '-ensure-file=-'])
self.assertEqual(command.stdin, 'manifest_content')
self.assertEqual(command.kwargs, {
'stdin': subprocess.PIPE,
'stdout': subprocess.PIPE,
'stderr': subprocess.STDOUT,
})
def testCheckCIPDPackages(self):
content = '\n'.join([
'$VerifiedPlatform foo-bar',
'$VerifiedPlatform baz-qux',
'foo/bar/baz/${platform} version:ohaithere',
'qux version:kthxbye',
])
input_api = self.MockInputApi(None, False)
command = presubmit_canned_checks.CheckCIPDPackages(
input_api, presubmit.OutputApi,
platforms=['foo-bar', 'baz-qux'],
packages={
'foo/bar/baz/${platform}': 'version:ohaithere',
'qux': 'version:kthxbye',
})
self.assertEqual(command.cmd,
['cipd', 'ensure-file-verify', '-ensure-file=-'])
self.assertEqual(command.stdin, content)
self.assertEqual(command.kwargs, {
'stdin': subprocess.PIPE,
'stdout': subprocess.PIPE,
'stderr': subprocess.STDOUT,
})
def testCheckCIPDClientDigests(self):
input_api = self.MockInputApi(None, False)
input_api.verbose = True
command = presubmit_canned_checks.CheckCIPDClientDigests(
input_api, presubmit.OutputApi, client_version_file='ver')
self.assertEqual(command.cmd, [
'cipd', 'selfupdate-roll', '-check', '-version-file', 'ver',
'-log-level', 'debug',
])
def testCannedCheckVPythonSpec(self):
change = presubmit.Change('a', 'b', self.fake_root_dir, None, 0, 0, None)
input_api = self.MockInputApi(change, False)
affected_file = mock.MagicMock(presubmit.GitAffectedFile)
affected_file.AbsoluteLocalPath.return_value = '/path1/to/.vpython'
input_api.AffectedTestableFiles.return_value = [affected_file]
commands = presubmit_canned_checks.CheckVPythonSpec(
input_api, presubmit.OutputApi)
self.assertEqual(len(commands), 1)
self.assertEqual(commands[0].name, 'Verify /path1/to/.vpython')
self.assertEqual(commands[0].cmd, [
'vpython',
'-vpython-spec', '/path1/to/.vpython',
'-vpython-tool', 'verify'
])
self.assertDictEqual(
commands[0].kwargs,
{
'stderr': input_api.subprocess.STDOUT,
'stdout': input_api.subprocess.PIPE,
'stdin': input_api.subprocess.PIPE,
})
self.assertEqual(commands[0].message, presubmit.OutputApi.PresubmitError)
self.assertIsNone(commands[0].info)
class ThreadPoolTest(unittest.TestCase):
def setUp(self):
super(ThreadPoolTest, self).setUp()
mock.patch('subprocess2.Popen').start()
mock.patch('presubmit_support.sigint_handler').start()
mock.patch('presubmit_support.time_time', return_value=0).start()
presubmit.sigint_handler.wait.return_value = ('stdout', '')
self.addCleanup(mock.patch.stopall)
def testSurfaceExceptions(self):
def FakePopen(cmd, **kwargs):
if cmd[0] == '3':
raise TypeError('TypeError')
if cmd[0] == '4':
raise OSError('OSError')
if cmd[0] == '5':
return mock.Mock(returncode=1)
return mock.Mock(returncode=0)
subprocess.Popen.side_effect = FakePopen
mock_tests = [
presubmit.CommandData(
name=str(i),
cmd=[str(i)],
kwargs={},
message=lambda x: x,
)
for i in range(10)
]
t = presubmit.ThreadPool(1)
t.AddTests(mock_tests)
messages = sorted(t.RunAsync())
self.assertEqual(3, len(messages))
self.assertIn(
'3\n3 exec failure (0.00s)\nTraceback (most recent call last):',
messages[0])
self.assertIn(
'4\n4 exec failure (0.00s)\nTraceback (most recent call last):',
messages[1])
self.assertEqual('5\n5 (0.00s) failed\nstdout', messages[2])
if __name__ == '__main__':
import unittest
unittest.main()
|
import datetime
import urllib.request
import math
months = ['January', 'July']
def urlrange(year):
minyr = math.floor(year / 10) * 10
maxyr = minyr + 9
return(f"{minyr}-{maxyr}")
for year in range(1970, 2018):
for month in months:
filename = f"{month}{year}.pdf"
url = f"https://workforcesecurity.doleta.gov/unemploy/content/sigpros/{urlrange(year)}/{filename}"
export = f"../data/pdf/{filename}"
urllib.request.urlretrieve(url, export)
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.12
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_pydart2_api')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_pydart2_api')
_pydart2_api = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_pydart2_api', [dirname(__file__)])
except ImportError:
import _pydart2_api
return _pydart2_api
try:
_mod = imp.load_module('_pydart2_api', fp, pathname, description)
finally:
if fp is not None:
fp.close()
return _mod
_pydart2_api = swig_import_helper()
del swig_import_helper
else:
import _pydart2_api
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _pydart2_api.delete_SwigPyIterator
__del__ = lambda self: None
def value(self) -> "PyObject *":
return _pydart2_api.SwigPyIterator_value(self)
def incr(self, n: 'size_t'=1) -> "swig::SwigPyIterator *":
return _pydart2_api.SwigPyIterator_incr(self, n)
def decr(self, n: 'size_t'=1) -> "swig::SwigPyIterator *":
return _pydart2_api.SwigPyIterator_decr(self, n)
def distance(self, x: 'SwigPyIterator') -> "ptrdiff_t":
return _pydart2_api.SwigPyIterator_distance(self, x)
def equal(self, x: 'SwigPyIterator') -> "bool":
return _pydart2_api.SwigPyIterator_equal(self, x)
def copy(self) -> "swig::SwigPyIterator *":
return _pydart2_api.SwigPyIterator_copy(self)
def next(self) -> "PyObject *":
return _pydart2_api.SwigPyIterator_next(self)
def __next__(self) -> "PyObject *":
return _pydart2_api.SwigPyIterator___next__(self)
def previous(self) -> "PyObject *":
return _pydart2_api.SwigPyIterator_previous(self)
def advance(self, n: 'ptrdiff_t') -> "swig::SwigPyIterator *":
return _pydart2_api.SwigPyIterator_advance(self, n)
def __eq__(self, x: 'SwigPyIterator') -> "bool":
return _pydart2_api.SwigPyIterator___eq__(self, x)
def __ne__(self, x: 'SwigPyIterator') -> "bool":
return _pydart2_api.SwigPyIterator___ne__(self, x)
def __iadd__(self, n: 'ptrdiff_t') -> "swig::SwigPyIterator &":
return _pydart2_api.SwigPyIterator___iadd__(self, n)
def __isub__(self, n: 'ptrdiff_t') -> "swig::SwigPyIterator &":
return _pydart2_api.SwigPyIterator___isub__(self, n)
def __add__(self, n: 'ptrdiff_t') -> "swig::SwigPyIterator *":
return _pydart2_api.SwigPyIterator___add__(self, n)
def __sub__(self, *args) -> "ptrdiff_t":
return _pydart2_api.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _pydart2_api.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
class IntVector(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, IntVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, IntVector, name)
__repr__ = _swig_repr
def iterator(self) -> "swig::SwigPyIterator *":
return _pydart2_api.IntVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self) -> "bool":
return _pydart2_api.IntVector___nonzero__(self)
def __bool__(self) -> "bool":
return _pydart2_api.IntVector___bool__(self)
def __len__(self) -> "std::vector< int >::size_type":
return _pydart2_api.IntVector___len__(self)
def __getslice__(self, i: 'std::vector< int >::difference_type', j: 'std::vector< int >::difference_type') -> "std::vector< int,std::allocator< int > > *":
return _pydart2_api.IntVector___getslice__(self, i, j)
def __setslice__(self, *args) -> "void":
return _pydart2_api.IntVector___setslice__(self, *args)
def __delslice__(self, i: 'std::vector< int >::difference_type', j: 'std::vector< int >::difference_type') -> "void":
return _pydart2_api.IntVector___delslice__(self, i, j)
def __delitem__(self, *args) -> "void":
return _pydart2_api.IntVector___delitem__(self, *args)
def __getitem__(self, *args) -> "std::vector< int >::value_type const &":
return _pydart2_api.IntVector___getitem__(self, *args)
def __setitem__(self, *args) -> "void":
return _pydart2_api.IntVector___setitem__(self, *args)
def pop(self) -> "std::vector< int >::value_type":
return _pydart2_api.IntVector_pop(self)
def append(self, x: 'std::vector< int >::value_type const &') -> "void":
return _pydart2_api.IntVector_append(self, x)
def empty(self) -> "bool":
return _pydart2_api.IntVector_empty(self)
def size(self) -> "std::vector< int >::size_type":
return _pydart2_api.IntVector_size(self)
def swap(self, v: 'IntVector') -> "void":
return _pydart2_api.IntVector_swap(self, v)
def begin(self) -> "std::vector< int >::iterator":
return _pydart2_api.IntVector_begin(self)
def end(self) -> "std::vector< int >::iterator":
return _pydart2_api.IntVector_end(self)
def rbegin(self) -> "std::vector< int >::reverse_iterator":
return _pydart2_api.IntVector_rbegin(self)
def rend(self) -> "std::vector< int >::reverse_iterator":
return _pydart2_api.IntVector_rend(self)
def clear(self) -> "void":
return _pydart2_api.IntVector_clear(self)
def get_allocator(self) -> "std::vector< int >::allocator_type":
return _pydart2_api.IntVector_get_allocator(self)
def pop_back(self) -> "void":
return _pydart2_api.IntVector_pop_back(self)
def erase(self, *args) -> "std::vector< int >::iterator":
return _pydart2_api.IntVector_erase(self, *args)
def __init__(self, *args):
this = _pydart2_api.new_IntVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x: 'std::vector< int >::value_type const &') -> "void":
return _pydart2_api.IntVector_push_back(self, x)
def front(self) -> "std::vector< int >::value_type const &":
return _pydart2_api.IntVector_front(self)
def back(self) -> "std::vector< int >::value_type const &":
return _pydart2_api.IntVector_back(self)
def assign(self, n: 'std::vector< int >::size_type', x: 'std::vector< int >::value_type const &') -> "void":
return _pydart2_api.IntVector_assign(self, n, x)
def resize(self, *args) -> "void":
return _pydart2_api.IntVector_resize(self, *args)
def insert(self, *args) -> "void":
return _pydart2_api.IntVector_insert(self, *args)
def reserve(self, n: 'std::vector< int >::size_type') -> "void":
return _pydart2_api.IntVector_reserve(self, n)
def capacity(self) -> "std::vector< int >::size_type":
return _pydart2_api.IntVector_capacity(self)
__swig_destroy__ = _pydart2_api.delete_IntVector
__del__ = lambda self: None
IntVector_swigregister = _pydart2_api.IntVector_swigregister
IntVector_swigregister(IntVector)
def init(verbose: 'bool'=True) -> "void":
return _pydart2_api.init(verbose)
init = _pydart2_api.init
def destroy() -> "void":
return _pydart2_api.destroy()
destroy = _pydart2_api.destroy
def setVerbose(verbose: 'bool'=True) -> "void":
return _pydart2_api.setVerbose(verbose)
setVerbose = _pydart2_api.setVerbose
def getVerbose() -> "bool":
return _pydart2_api.getVerbose()
getVerbose = _pydart2_api.getVerbose
def marker__getBodyNode(wid: 'int', skid: 'int', mid: 'int') -> "int":
return _pydart2_api.marker__getBodyNode(wid, skid, mid)
marker__getBodyNode = _pydart2_api.marker__getBodyNode
def marker__getLocalPosition(wid: 'int', skid: 'int', mid: 'int') -> "double [3]":
return _pydart2_api.marker__getLocalPosition(wid, skid, mid)
marker__getLocalPosition = _pydart2_api.marker__getLocalPosition
def marker__setLocalPosition(wid: 'int', skid: 'int', mid: 'int', inv3: 'double [3]') -> "void":
return _pydart2_api.marker__setLocalPosition(wid, skid, mid, inv3)
marker__setLocalPosition = _pydart2_api.marker__setLocalPosition
def marker__getWorldPosition(wid: 'int', skid: 'int', mid: 'int') -> "double [3]":
return _pydart2_api.marker__getWorldPosition(wid, skid, mid)
marker__getWorldPosition = _pydart2_api.marker__getWorldPosition
def marker__render(wid: 'int', skid: 'int', mid: 'int') -> "void":
return _pydart2_api.marker__render(wid, skid, mid)
marker__render = _pydart2_api.marker__render
def collisionresult__getNumContacts(wid: 'int') -> "int":
return _pydart2_api.collisionresult__getNumContacts(wid)
collisionresult__getNumContacts = _pydart2_api.collisionresult__getNumContacts
def collisionresult__getContacts(wid: 'int', outv: 'double *') -> "int":
return _pydart2_api.collisionresult__getContacts(wid, outv)
collisionresult__getContacts = _pydart2_api.collisionresult__getContacts
def collisionresult__getCollidingBodyNodes(wid: 'int') -> "std::vector< int,std::allocator< int > >":
return _pydart2_api.collisionresult__getCollidingBodyNodes(wid)
collisionresult__getCollidingBodyNodes = _pydart2_api.collisionresult__getCollidingBodyNodes
def collisionresult__renderContact(inv6: 'double [6]', size: 'double', scale: 'double') -> "void":
return _pydart2_api.collisionresult__renderContact(inv6, size, scale)
collisionresult__renderContact = _pydart2_api.collisionresult__renderContact
def addBallJointConstraint(wid: 'int', skid1: 'int', bid1: 'int', skid2: 'int', bid2: 'int', inv3: 'double [3]') -> "int":
return _pydart2_api.addBallJointConstraint(wid, skid1, bid1, skid2, bid2, inv3)
addBallJointConstraint = _pydart2_api.addBallJointConstraint
def createWorld(timestep: 'double') -> "int":
return _pydart2_api.createWorld(timestep)
createWorld = _pydart2_api.createWorld
def createWorldFromSkel(path: 'char const *const') -> "int":
return _pydart2_api.createWorldFromSkel(path)
createWorldFromSkel = _pydart2_api.createWorldFromSkel
def destroyWorld(wid: 'int') -> "void":
return _pydart2_api.destroyWorld(wid)
destroyWorld = _pydart2_api.destroyWorld
def world__addSkeleton(wid: 'int', path: 'char const *const') -> "int":
return _pydart2_api.world__addSkeleton(wid, path)
world__addSkeleton = _pydart2_api.world__addSkeleton
def world__getNumSkeletons(wid: 'int') -> "int":
return _pydart2_api.world__getNumSkeletons(wid)
world__getNumSkeletons = _pydart2_api.world__getNumSkeletons
def world__reset(wid: 'int') -> "void":
return _pydart2_api.world__reset(wid)
world__reset = _pydart2_api.world__reset
def world__step(wid: 'int') -> "void":
return _pydart2_api.world__step(wid)
world__step = _pydart2_api.world__step
def world__checkCollision(wid: 'int') -> "void":
return _pydart2_api.world__checkCollision(wid)
world__checkCollision = _pydart2_api.world__checkCollision
def world__render(wid: 'int') -> "void":
return _pydart2_api.world__render(wid)
world__render = _pydart2_api.world__render
def world__setTimeStep(wid: 'int', _timeStep: 'double') -> "void":
return _pydart2_api.world__setTimeStep(wid, _timeStep)
world__setTimeStep = _pydart2_api.world__setTimeStep
def world__getTimeStep(wid: 'int') -> "double":
return _pydart2_api.world__getTimeStep(wid)
world__getTimeStep = _pydart2_api.world__getTimeStep
def world__setTime(wid: 'int', _time: 'double') -> "void":
return _pydart2_api.world__setTime(wid, _time)
world__setTime = _pydart2_api.world__setTime
def world__getTime(wid: 'int') -> "double":
return _pydart2_api.world__getTime(wid)
world__getTime = _pydart2_api.world__getTime
def world__getSimFrames(wid: 'int') -> "int":
return _pydart2_api.world__getSimFrames(wid)
world__getSimFrames = _pydart2_api.world__getSimFrames
def world__getIndex(wid: 'int', _index: 'int') -> "int":
return _pydart2_api.world__getIndex(wid, _index)
world__getIndex = _pydart2_api.world__getIndex
def world__setGravity(wid: 'int', inv3: 'double [3]') -> "void":
return _pydart2_api.world__setGravity(wid, inv3)
world__setGravity = _pydart2_api.world__setGravity
def world__getGravity(wid: 'int') -> "double [3]":
return _pydart2_api.world__getGravity(wid)
world__getGravity = _pydart2_api.world__getGravity
def world__setCollisionDetector(wid: 'int', detector_type: 'int') -> "void":
return _pydart2_api.world__setCollisionDetector(wid, detector_type)
world__setCollisionDetector = _pydart2_api.world__setCollisionDetector
def world__getCollisionDetector(wid: 'int') -> "int":
return _pydart2_api.world__getCollisionDetector(wid)
world__getCollisionDetector = _pydart2_api.world__getCollisionDetector
def world__removeAllConstraints(wid: 'int') -> "void":
return _pydart2_api.world__removeAllConstraints(wid)
world__removeAllConstraints = _pydart2_api.world__removeAllConstraints
def skeleton__render(wid: 'int', skid: 'int') -> "void":
return _pydart2_api.skeleton__render(wid, skid)
skeleton__render = _pydart2_api.skeleton__render
def skeleton__renderWithColor(wid: 'int', skid: 'int', inv4: 'double [4]') -> "void":
return _pydart2_api.skeleton__renderWithColor(wid, skid, inv4)
skeleton__renderWithColor = _pydart2_api.skeleton__renderWithColor
def skeleton__getName(wid: 'int', skid: 'int') -> "char const *":
return _pydart2_api.skeleton__getName(wid, skid)
skeleton__getName = _pydart2_api.skeleton__getName
def skeleton__getMass(wid: 'int', skid: 'int') -> "double":
return _pydart2_api.skeleton__getMass(wid, skid)
skeleton__getMass = _pydart2_api.skeleton__getMass
def skeleton__isMobile(wid: 'int', skid: 'int') -> "bool":
return _pydart2_api.skeleton__isMobile(wid, skid)
skeleton__isMobile = _pydart2_api.skeleton__isMobile
def skeleton__setMobile(wid: 'int', skid: 'int', mobile: 'bool') -> "void":
return _pydart2_api.skeleton__setMobile(wid, skid, mobile)
skeleton__setMobile = _pydart2_api.skeleton__setMobile
def skeleton__getSelfCollisionCheck(wid: 'int', skid: 'int') -> "bool":
return _pydart2_api.skeleton__getSelfCollisionCheck(wid, skid)
skeleton__getSelfCollisionCheck = _pydart2_api.skeleton__getSelfCollisionCheck
def skeleton__setSelfCollisionCheck(wid: 'int', skid: 'int', enable: 'int') -> "void":
return _pydart2_api.skeleton__setSelfCollisionCheck(wid, skid, enable)
skeleton__setSelfCollisionCheck = _pydart2_api.skeleton__setSelfCollisionCheck
def skeleton__getAdjacentBodyCheck(wid: 'int', skid: 'int') -> "bool":
return _pydart2_api.skeleton__getAdjacentBodyCheck(wid, skid)
skeleton__getAdjacentBodyCheck = _pydart2_api.skeleton__getAdjacentBodyCheck
def skeleton__setAdjacentBodyCheck(wid: 'int', skid: 'int', enable: 'int') -> "void":
return _pydart2_api.skeleton__setAdjacentBodyCheck(wid, skid, enable)
skeleton__setAdjacentBodyCheck = _pydart2_api.skeleton__setAdjacentBodyCheck
def skeleton__setRootJointToTransAndEuler(wid: 'int', skid: 'int') -> "void":
return _pydart2_api.skeleton__setRootJointToTransAndEuler(wid, skid)
skeleton__setRootJointToTransAndEuler = _pydart2_api.skeleton__setRootJointToTransAndEuler
def skeleton__setRootJointToWeld(wid: 'int', skid: 'int') -> "void":
return _pydart2_api.skeleton__setRootJointToWeld(wid, skid)
skeleton__setRootJointToWeld = _pydart2_api.skeleton__setRootJointToWeld
def skeleton__getNumBodyNodes(wid: 'int', skid: 'int') -> "int":
return _pydart2_api.skeleton__getNumBodyNodes(wid, skid)
skeleton__getNumBodyNodes = _pydart2_api.skeleton__getNumBodyNodes
def skeleton__getNumJoints(wid: 'int', skid: 'int') -> "int":
return _pydart2_api.skeleton__getNumJoints(wid, skid)
skeleton__getNumJoints = _pydart2_api.skeleton__getNumJoints
def skeleton__getNumDofs(wid: 'int', skid: 'int') -> "int":
return _pydart2_api.skeleton__getNumDofs(wid, skid)
skeleton__getNumDofs = _pydart2_api.skeleton__getNumDofs
def skeleton__getNumMarkers(wid: 'int', skid: 'int') -> "int":
return _pydart2_api.skeleton__getNumMarkers(wid, skid)
skeleton__getNumMarkers = _pydart2_api.skeleton__getNumMarkers
def skeleton__getPositions(wid: 'int', skid: 'int', outv: 'double *') -> "int":
return _pydart2_api.skeleton__getPositions(wid, skid, outv)
skeleton__getPositions = _pydart2_api.skeleton__getPositions
def skeleton__setPositions(wid: 'int', skid: 'int', inv: 'double *') -> "void":
return _pydart2_api.skeleton__setPositions(wid, skid, inv)
skeleton__setPositions = _pydart2_api.skeleton__setPositions
def skeleton__getVelocities(wid: 'int', skid: 'int', outv: 'double *') -> "int":
return _pydart2_api.skeleton__getVelocities(wid, skid, outv)
skeleton__getVelocities = _pydart2_api.skeleton__getVelocities
def skeleton__setVelocities(wid: 'int', skid: 'int', inv: 'double *') -> "void":
return _pydart2_api.skeleton__setVelocities(wid, skid, inv)
skeleton__setVelocities = _pydart2_api.skeleton__setVelocities
def skeleton__getAccelerations(wid: 'int', skid: 'int', outv: 'double *') -> "int":
return _pydart2_api.skeleton__getAccelerations(wid, skid, outv)
skeleton__getAccelerations = _pydart2_api.skeleton__getAccelerations
def skeleton__setForces(wid: 'int', skid: 'int', inv: 'double *') -> "void":
return _pydart2_api.skeleton__setForces(wid, skid, inv)
skeleton__setForces = _pydart2_api.skeleton__setForces
def skeleton__getPositionDifferences(wid: 'int', skid: 'int', inv1: 'double *', inv2: 'double *', outv: 'double *') -> "int":
return _pydart2_api.skeleton__getPositionDifferences(wid, skid, inv1, inv2, outv)
skeleton__getPositionDifferences = _pydart2_api.skeleton__getPositionDifferences
def skeleton__getVelocityDifferences(wid: 'int', skid: 'int', inv1: 'double *', inv2: 'double *', outv: 'double *') -> "int":
return _pydart2_api.skeleton__getVelocityDifferences(wid, skid, inv1, inv2, outv)
skeleton__getVelocityDifferences = _pydart2_api.skeleton__getVelocityDifferences
def skeleton__getPositionLowerLimits(wid: 'int', skid: 'int', outv: 'double *') -> "int":
return _pydart2_api.skeleton__getPositionLowerLimits(wid, skid, outv)
skeleton__getPositionLowerLimits = _pydart2_api.skeleton__getPositionLowerLimits
def skeleton__getPositionUpperLimits(wid: 'int', skid: 'int', outv: 'double *') -> "int":
return _pydart2_api.skeleton__getPositionUpperLimits(wid, skid, outv)
skeleton__getPositionUpperLimits = _pydart2_api.skeleton__getPositionUpperLimits
def skeleton__getForceLowerLimits(wid: 'int', skid: 'int', outv: 'double *') -> "int":
return _pydart2_api.skeleton__getForceLowerLimits(wid, skid, outv)
skeleton__getForceLowerLimits = _pydart2_api.skeleton__getForceLowerLimits
def skeleton__getForceUpperLimits(wid: 'int', skid: 'int', outv: 'double *') -> "int":
return _pydart2_api.skeleton__getForceUpperLimits(wid, skid, outv)
skeleton__getForceUpperLimits = _pydart2_api.skeleton__getForceUpperLimits
def skeleton__getCOM(wid: 'int', skid: 'int') -> "double [3]":
return _pydart2_api.skeleton__getCOM(wid, skid)
skeleton__getCOM = _pydart2_api.skeleton__getCOM
def skeleton__getCOMLinearVelocity(wid: 'int', skid: 'int') -> "double [3]":
return _pydart2_api.skeleton__getCOMLinearVelocity(wid, skid)
skeleton__getCOMLinearVelocity = _pydart2_api.skeleton__getCOMLinearVelocity
def skeleton__getCOMLinearAcceleration(wid: 'int', skid: 'int') -> "double [3]":
return _pydart2_api.skeleton__getCOMLinearAcceleration(wid, skid)
skeleton__getCOMLinearAcceleration = _pydart2_api.skeleton__getCOMLinearAcceleration
def skeleton__getMassMatrix(wid: 'int', skid: 'int', outm: 'double *') -> "void":
return _pydart2_api.skeleton__getMassMatrix(wid, skid, outm)
skeleton__getMassMatrix = _pydart2_api.skeleton__getMassMatrix
def skeleton__getCoriolisAndGravityForces(wid: 'int', skid: 'int', outv: 'double *') -> "int":
return _pydart2_api.skeleton__getCoriolisAndGravityForces(wid, skid, outv)
skeleton__getCoriolisAndGravityForces = _pydart2_api.skeleton__getCoriolisAndGravityForces
def skeleton__getConstraintForces(wid: 'int', skid: 'int', outv: 'double *') -> "int":
return _pydart2_api.skeleton__getConstraintForces(wid, skid, outv)
skeleton__getConstraintForces = _pydart2_api.skeleton__getConstraintForces
def bodynode__getName(wid: 'int', skid: 'int', bid: 'int') -> "char const *":
return _pydart2_api.bodynode__getName(wid, skid, bid)
bodynode__getName = _pydart2_api.bodynode__getName
def bodynode__getParentBodyNode(wid: 'int', skid: 'int', bid: 'int') -> "int":
return _pydart2_api.bodynode__getParentBodyNode(wid, skid, bid)
bodynode__getParentBodyNode = _pydart2_api.bodynode__getParentBodyNode
def bodynode__getNumChildBodyNodes(wid: 'int', skid: 'int', bid: 'int') -> "int":
return _pydart2_api.bodynode__getNumChildBodyNodes(wid, skid, bid)
bodynode__getNumChildBodyNodes = _pydart2_api.bodynode__getNumChildBodyNodes
def bodynode__getChildBodyNode(wid: 'int', skid: 'int', bid: 'int', _index: 'int') -> "int":
return _pydart2_api.bodynode__getChildBodyNode(wid, skid, bid, _index)
bodynode__getChildBodyNode = _pydart2_api.bodynode__getChildBodyNode
def bodynode__getParentJoint(wid: 'int', skid: 'int', bid: 'int') -> "int":
return _pydart2_api.bodynode__getParentJoint(wid, skid, bid)
bodynode__getParentJoint = _pydart2_api.bodynode__getParentJoint
def bodynode__getNumChildJoints(wid: 'int', skid: 'int', bid: 'int') -> "int":
return _pydart2_api.bodynode__getNumChildJoints(wid, skid, bid)
bodynode__getNumChildJoints = _pydart2_api.bodynode__getNumChildJoints
def bodynode__getChildJoint(wid: 'int', skid: 'int', bid: 'int', _index: 'int') -> "int":
return _pydart2_api.bodynode__getChildJoint(wid, skid, bid, _index)
bodynode__getChildJoint = _pydart2_api.bodynode__getChildJoint
def bodynode__getNumDependentDofs(wid: 'int', skid: 'int', bid: 'int') -> "int":
return _pydart2_api.bodynode__getNumDependentDofs(wid, skid, bid)
bodynode__getNumDependentDofs = _pydart2_api.bodynode__getNumDependentDofs
def bodynode__getDependentDof(wid: 'int', skid: 'int', bid: 'int', _index: 'int') -> "int":
return _pydart2_api.bodynode__getDependentDof(wid, skid, bid, _index)
bodynode__getDependentDof = _pydart2_api.bodynode__getDependentDof
def bodynode__getNumShapeNodes(wid: 'int', skid: 'int', bid: 'int') -> "int":
return _pydart2_api.bodynode__getNumShapeNodes(wid, skid, bid)
bodynode__getNumShapeNodes = _pydart2_api.bodynode__getNumShapeNodes
def bodynode__getIndexInSkeleton(wid: 'int', skid: 'int', bid: 'int') -> "int":
return _pydart2_api.bodynode__getIndexInSkeleton(wid, skid, bid)
bodynode__getIndexInSkeleton = _pydart2_api.bodynode__getIndexInSkeleton
def bodynode__getIndexInTree(wid: 'int', skid: 'int', bid: 'int') -> "int":
return _pydart2_api.bodynode__getIndexInTree(wid, skid, bid)
bodynode__getIndexInTree = _pydart2_api.bodynode__getIndexInTree
def bodynode__getTreeIndex(wid: 'int', skid: 'int', bid: 'int') -> "int":
return _pydart2_api.bodynode__getTreeIndex(wid, skid, bid)
bodynode__getTreeIndex = _pydart2_api.bodynode__getTreeIndex
def bodynode__setGravityMode(wid: 'int', skid: 'int', bid: 'int', _gravityMode: 'bool') -> "void":
return _pydart2_api.bodynode__setGravityMode(wid, skid, bid, _gravityMode)
bodynode__setGravityMode = _pydart2_api.bodynode__setGravityMode
def bodynode__getGravityMode(wid: 'int', skid: 'int', bid: 'int') -> "bool":
return _pydart2_api.bodynode__getGravityMode(wid, skid, bid)
bodynode__getGravityMode = _pydart2_api.bodynode__getGravityMode
def bodynode__isCollidable(wid: 'int', skid: 'int', bid: 'int') -> "bool":
return _pydart2_api.bodynode__isCollidable(wid, skid, bid)
bodynode__isCollidable = _pydart2_api.bodynode__isCollidable
def bodynode__setCollidable(wid: 'int', skid: 'int', bid: 'int', _isCollidable: 'bool') -> "void":
return _pydart2_api.bodynode__setCollidable(wid, skid, bid, _isCollidable)
bodynode__setCollidable = _pydart2_api.bodynode__setCollidable
def bodynode__getMass(wid: 'int', skid: 'int', bid: 'int') -> "double":
return _pydart2_api.bodynode__getMass(wid, skid, bid)
bodynode__getMass = _pydart2_api.bodynode__getMass
def bodynode__setMass(wid: 'int', skid: 'int', bid: 'int', mass: 'double') -> "void":
return _pydart2_api.bodynode__setMass(wid, skid, bid, mass)
bodynode__setMass = _pydart2_api.bodynode__setMass
def bodynode__getInertia(wid: 'int', skid: 'int', bid: 'int') -> "double [3][3]":
return _pydart2_api.bodynode__getInertia(wid, skid, bid)
bodynode__getInertia = _pydart2_api.bodynode__getInertia
def bodynode__setInertia(wid: 'int', skid: 'int', bid: 'int', inv33: 'double [3][3]') -> "void":
return _pydart2_api.bodynode__setInertia(wid, skid, bid, inv33)
bodynode__setInertia = _pydart2_api.bodynode__setInertia
def bodynode__getLocalCOM(wid: 'int', skid: 'int', bid: 'int') -> "double [3]":
return _pydart2_api.bodynode__getLocalCOM(wid, skid, bid)
bodynode__getLocalCOM = _pydart2_api.bodynode__getLocalCOM
def bodynode__getCOM(wid: 'int', skid: 'int', bid: 'int') -> "double [3]":
return _pydart2_api.bodynode__getCOM(wid, skid, bid)
bodynode__getCOM = _pydart2_api.bodynode__getCOM
def bodynode__getCOMLinearVelocity(wid: 'int', skid: 'int', bid: 'int') -> "double [3]":
return _pydart2_api.bodynode__getCOMLinearVelocity(wid, skid, bid)
bodynode__getCOMLinearVelocity = _pydart2_api.bodynode__getCOMLinearVelocity
def bodynode__getCOMSpatialVelocity(wid: 'int', skid: 'int', bid: 'int') -> "double [6]":
return _pydart2_api.bodynode__getCOMSpatialVelocity(wid, skid, bid)
bodynode__getCOMSpatialVelocity = _pydart2_api.bodynode__getCOMSpatialVelocity
def bodynode__getCOMLinearAcceleration(wid: 'int', skid: 'int', bid: 'int') -> "double [3]":
return _pydart2_api.bodynode__getCOMLinearAcceleration(wid, skid, bid)
bodynode__getCOMLinearAcceleration = _pydart2_api.bodynode__getCOMLinearAcceleration
def bodynode__getCOMSpatialAcceleration(wid: 'int', skid: 'int', bid: 'int') -> "double [6]":
return _pydart2_api.bodynode__getCOMSpatialAcceleration(wid, skid, bid)
bodynode__getCOMSpatialAcceleration = _pydart2_api.bodynode__getCOMSpatialAcceleration
def bodynode__setFrictionCoeff(wid: 'int', skid: 'int', bid: 'int', _coeff: 'double') -> "void":
return _pydart2_api.bodynode__setFrictionCoeff(wid, skid, bid, _coeff)
bodynode__setFrictionCoeff = _pydart2_api.bodynode__setFrictionCoeff
def bodynode__getFrictionCoeff(wid: 'int', skid: 'int', bid: 'int') -> "double":
return _pydart2_api.bodynode__getFrictionCoeff(wid, skid, bid)
bodynode__getFrictionCoeff = _pydart2_api.bodynode__getFrictionCoeff
def bodynode__setRestitutionCoeff(wid: 'int', skid: 'int', bid: 'int', _coeff: 'double') -> "void":
return _pydart2_api.bodynode__setRestitutionCoeff(wid, skid, bid, _coeff)
bodynode__setRestitutionCoeff = _pydart2_api.bodynode__setRestitutionCoeff
def bodynode__getRestitutionCoeff(wid: 'int', skid: 'int', bid: 'int') -> "double":
return _pydart2_api.bodynode__getRestitutionCoeff(wid, skid, bid)
bodynode__getRestitutionCoeff = _pydart2_api.bodynode__getRestitutionCoeff
def bodynode__getTransform(wid: 'int', skid: 'int', bid: 'int') -> "double [4][4]":
return _pydart2_api.bodynode__getTransform(wid, skid, bid)
bodynode__getTransform = _pydart2_api.bodynode__getTransform
def bodynode__getWorldTransform(wid: 'int', skid: 'int', bid: 'int') -> "double [4][4]":
return _pydart2_api.bodynode__getWorldTransform(wid, skid, bid)
bodynode__getWorldTransform = _pydart2_api.bodynode__getWorldTransform
def bodynode__getRelativeTransform(wid: 'int', skid: 'int', bid: 'int') -> "double [4][4]":
return _pydart2_api.bodynode__getRelativeTransform(wid, skid, bid)
bodynode__getRelativeTransform = _pydart2_api.bodynode__getRelativeTransform
def bodynode__addExtForce(wid: 'int', skid: 'int', bid: 'int', inv3: 'double [3]', inv3_2: 'double [3]', _isForceLocal: 'bool', _isOffsetLocal: 'bool') -> "void":
return _pydart2_api.bodynode__addExtForce(wid, skid, bid, inv3, inv3_2, _isForceLocal, _isOffsetLocal)
bodynode__addExtForce = _pydart2_api.bodynode__addExtForce
def bodynode__setExtForce(wid: 'int', skid: 'int', bid: 'int', inv3: 'double [3]', inv3_2: 'double [3]', _isForceLocal: 'bool', _isOffsetLocal: 'bool') -> "void":
return _pydart2_api.bodynode__setExtForce(wid, skid, bid, inv3, inv3_2, _isForceLocal, _isOffsetLocal)
bodynode__setExtForce = _pydart2_api.bodynode__setExtForce
def bodynode__addExtTorque(wid: 'int', skid: 'int', bid: 'int', inv3: 'double [3]', _isLocal: 'bool') -> "void":
return _pydart2_api.bodynode__addExtTorque(wid, skid, bid, inv3, _isLocal)
bodynode__addExtTorque = _pydart2_api.bodynode__addExtTorque
def bodynode__setExtTorque(wid: 'int', skid: 'int', bid: 'int', inv3: 'double [3]', _isLocal: 'bool') -> "void":
return _pydart2_api.bodynode__setExtTorque(wid, skid, bid, inv3, _isLocal)
bodynode__setExtTorque = _pydart2_api.bodynode__setExtTorque
def bodynode__getJacobian(wid: 'int', skid: 'int', bid: 'int', inv3: 'double [3]', outm: 'double *') -> "void":
return _pydart2_api.bodynode__getJacobian(wid, skid, bid, inv3, outm)
bodynode__getJacobian = _pydart2_api.bodynode__getJacobian
def bodynode__getLinearJacobian(wid: 'int', skid: 'int', bid: 'int', inv3: 'double [3]', outm: 'double *') -> "void":
return _pydart2_api.bodynode__getLinearJacobian(wid, skid, bid, inv3, outm)
bodynode__getLinearJacobian = _pydart2_api.bodynode__getLinearJacobian
def bodynode__getAngularJacobian(wid: 'int', skid: 'int', bid: 'int', outm: 'double *') -> "void":
return _pydart2_api.bodynode__getAngularJacobian(wid, skid, bid, outm)
bodynode__getAngularJacobian = _pydart2_api.bodynode__getAngularJacobian
def bodynode__getWorldJacobian(wid: 'int', skid: 'int', bid: 'int', inv3: 'double [3]', outm: 'double *') -> "void":
return _pydart2_api.bodynode__getWorldJacobian(wid, skid, bid, inv3, outm)
bodynode__getWorldJacobian = _pydart2_api.bodynode__getWorldJacobian
def bodynode__getLinearJacobianDeriv(wid: 'int', skid: 'int', bid: 'int', inv3: 'double [3]', outm: 'double *') -> "void":
return _pydart2_api.bodynode__getLinearJacobianDeriv(wid, skid, bid, inv3, outm)
bodynode__getLinearJacobianDeriv = _pydart2_api.bodynode__getLinearJacobianDeriv
def bodynode__getAngularJacobianDeriv(wid: 'int', skid: 'int', bid: 'int', outm: 'double *') -> "void":
return _pydart2_api.bodynode__getAngularJacobianDeriv(wid, skid, bid, outm)
bodynode__getAngularJacobianDeriv = _pydart2_api.bodynode__getAngularJacobianDeriv
def dof__getName(wid: 'int', skid: 'int', dofid: 'int') -> "char const *":
return _pydart2_api.dof__getName(wid, skid, dofid)
dof__getName = _pydart2_api.dof__getName
def dof__getIndexInSkeleton(wid: 'int', skid: 'int', dofid: 'int') -> "int":
return _pydart2_api.dof__getIndexInSkeleton(wid, skid, dofid)
dof__getIndexInSkeleton = _pydart2_api.dof__getIndexInSkeleton
def dof__getIndexInTree(wid: 'int', skid: 'int', dofid: 'int') -> "int":
return _pydart2_api.dof__getIndexInTree(wid, skid, dofid)
dof__getIndexInTree = _pydart2_api.dof__getIndexInTree
def dof__getIndexInJoint(wid: 'int', skid: 'int', dofid: 'int') -> "int":
return _pydart2_api.dof__getIndexInJoint(wid, skid, dofid)
dof__getIndexInJoint = _pydart2_api.dof__getIndexInJoint
def dof__getTreeIndex(wid: 'int', skid: 'int', dofid: 'int') -> "int":
return _pydart2_api.dof__getTreeIndex(wid, skid, dofid)
dof__getTreeIndex = _pydart2_api.dof__getTreeIndex
def dof__getPosition(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getPosition(wid, skid, dofid)
dof__getPosition = _pydart2_api.dof__getPosition
def dof__setPosition(wid: 'int', skid: 'int', dofid: 'int', _position: 'double') -> "void":
return _pydart2_api.dof__setPosition(wid, skid, dofid, _position)
dof__setPosition = _pydart2_api.dof__setPosition
def dof__getInitialPosition(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getInitialPosition(wid, skid, dofid)
dof__getInitialPosition = _pydart2_api.dof__getInitialPosition
def dof__setInitialPosition(wid: 'int', skid: 'int', dofid: 'int', _initial: 'double') -> "void":
return _pydart2_api.dof__setInitialPosition(wid, skid, dofid, _initial)
dof__setInitialPosition = _pydart2_api.dof__setInitialPosition
def dof__hasPositionLimit(wid: 'int', skid: 'int', dofid: 'int') -> "bool":
return _pydart2_api.dof__hasPositionLimit(wid, skid, dofid)
dof__hasPositionLimit = _pydart2_api.dof__hasPositionLimit
def dof__getPositionLowerLimit(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getPositionLowerLimit(wid, skid, dofid)
dof__getPositionLowerLimit = _pydart2_api.dof__getPositionLowerLimit
def dof__setPositionLowerLimit(wid: 'int', skid: 'int', dofid: 'int', _limit: 'double') -> "void":
return _pydart2_api.dof__setPositionLowerLimit(wid, skid, dofid, _limit)
dof__setPositionLowerLimit = _pydart2_api.dof__setPositionLowerLimit
def dof__getPositionUpperLimit(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getPositionUpperLimit(wid, skid, dofid)
dof__getPositionUpperLimit = _pydart2_api.dof__getPositionUpperLimit
def dof__setPositionUpperLimit(wid: 'int', skid: 'int', dofid: 'int', _limit: 'double') -> "void":
return _pydart2_api.dof__setPositionUpperLimit(wid, skid, dofid, _limit)
dof__setPositionUpperLimit = _pydart2_api.dof__setPositionUpperLimit
def dof__getVelocity(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getVelocity(wid, skid, dofid)
dof__getVelocity = _pydart2_api.dof__getVelocity
def dof__setVelocity(wid: 'int', skid: 'int', dofid: 'int', _velocity: 'double') -> "void":
return _pydart2_api.dof__setVelocity(wid, skid, dofid, _velocity)
dof__setVelocity = _pydart2_api.dof__setVelocity
def dof__getInitialVelocity(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getInitialVelocity(wid, skid, dofid)
dof__getInitialVelocity = _pydart2_api.dof__getInitialVelocity
def dof__setInitialVelocity(wid: 'int', skid: 'int', dofid: 'int', _initial: 'double') -> "void":
return _pydart2_api.dof__setInitialVelocity(wid, skid, dofid, _initial)
dof__setInitialVelocity = _pydart2_api.dof__setInitialVelocity
def dof__getVelocityLowerLimit(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getVelocityLowerLimit(wid, skid, dofid)
dof__getVelocityLowerLimit = _pydart2_api.dof__getVelocityLowerLimit
def dof__setVelocityLowerLimit(wid: 'int', skid: 'int', dofid: 'int', _limit: 'double') -> "void":
return _pydart2_api.dof__setVelocityLowerLimit(wid, skid, dofid, _limit)
dof__setVelocityLowerLimit = _pydart2_api.dof__setVelocityLowerLimit
def dof__getVelocityUpperLimit(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getVelocityUpperLimit(wid, skid, dofid)
dof__getVelocityUpperLimit = _pydart2_api.dof__getVelocityUpperLimit
def dof__setVelocityUpperLimit(wid: 'int', skid: 'int', dofid: 'int', _limit: 'double') -> "void":
return _pydart2_api.dof__setVelocityUpperLimit(wid, skid, dofid, _limit)
dof__setVelocityUpperLimit = _pydart2_api.dof__setVelocityUpperLimit
def dof__getSpringStiffness(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getSpringStiffness(wid, skid, dofid)
dof__getSpringStiffness = _pydart2_api.dof__getSpringStiffness
def dof__setSpringStiffness(wid: 'int', skid: 'int', dofid: 'int', _k: 'double') -> "void":
return _pydart2_api.dof__setSpringStiffness(wid, skid, dofid, _k)
dof__setSpringStiffness = _pydart2_api.dof__setSpringStiffness
def dof__getRestPosition(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getRestPosition(wid, skid, dofid)
dof__getRestPosition = _pydart2_api.dof__getRestPosition
def dof__setRestPosition(wid: 'int', skid: 'int', dofid: 'int', _q0: 'double') -> "void":
return _pydart2_api.dof__setRestPosition(wid, skid, dofid, _q0)
dof__setRestPosition = _pydart2_api.dof__setRestPosition
def dof__getDampingCoefficient(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getDampingCoefficient(wid, skid, dofid)
dof__getDampingCoefficient = _pydart2_api.dof__getDampingCoefficient
def dof__setDampingCoefficient(wid: 'int', skid: 'int', dofid: 'int', _coeff: 'double') -> "void":
return _pydart2_api.dof__setDampingCoefficient(wid, skid, dofid, _coeff)
dof__setDampingCoefficient = _pydart2_api.dof__setDampingCoefficient
def dof__getCoulombFriction(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getCoulombFriction(wid, skid, dofid)
dof__getCoulombFriction = _pydart2_api.dof__getCoulombFriction
def dof__setCoulombFriction(wid: 'int', skid: 'int', dofid: 'int', _friction: 'double') -> "void":
return _pydart2_api.dof__setCoulombFriction(wid, skid, dofid, _friction)
dof__setCoulombFriction = _pydart2_api.dof__setCoulombFriction
def dof__getConstraintImpulse(wid: 'int', skid: 'int', dofid: 'int') -> "double":
return _pydart2_api.dof__getConstraintImpulse(wid, skid, dofid)
dof__getConstraintImpulse = _pydart2_api.dof__getConstraintImpulse
def dof__setConstraintImpulse(wid: 'int', skid: 'int', dofid: 'int', _impulse: 'double') -> "void":
return _pydart2_api.dof__setConstraintImpulse(wid, skid, dofid, _impulse)
dof__setConstraintImpulse = _pydart2_api.dof__setConstraintImpulse
def joint__getName(wid: 'int', skid: 'int', jid: 'int') -> "char const *":
return _pydart2_api.joint__getName(wid, skid, jid)
joint__getName = _pydart2_api.joint__getName
def joint__setName(wid: 'int', skid: 'int', jid: 'int', _name: 'char const *', _renameDofs: 'bool') -> "char const *":
return _pydart2_api.joint__setName(wid, skid, jid, _name, _renameDofs)
joint__setName = _pydart2_api.joint__setName
def joint__isKinematic(wid: 'int', skid: 'int', jid: 'int') -> "bool":
return _pydart2_api.joint__isKinematic(wid, skid, jid)
joint__isKinematic = _pydart2_api.joint__isKinematic
def joint__isDynamic(wid: 'int', skid: 'int', jid: 'int') -> "bool":
return _pydart2_api.joint__isDynamic(wid, skid, jid)
joint__isDynamic = _pydart2_api.joint__isDynamic
def joint__getType(wid: 'int', skid: 'int', jid: 'int') -> "char const *":
return _pydart2_api.joint__getType(wid, skid, jid)
joint__getType = _pydart2_api.joint__getType
def joint__setActuatorType(wid: 'int', skid: 'int', jid: 'int', actuator_type: 'int') -> "void":
return _pydart2_api.joint__setActuatorType(wid, skid, jid, actuator_type)
joint__setActuatorType = _pydart2_api.joint__setActuatorType
def joint__getActuatorType(wid: 'int', skid: 'int', jid: 'int') -> "int":
return _pydart2_api.joint__getActuatorType(wid, skid, jid)
joint__getActuatorType = _pydart2_api.joint__getActuatorType
def joint__getParentBodyNode(wid: 'int', skid: 'int', jid: 'int') -> "int":
return _pydart2_api.joint__getParentBodyNode(wid, skid, jid)
joint__getParentBodyNode = _pydart2_api.joint__getParentBodyNode
def joint__getChildBodyNode(wid: 'int', skid: 'int', jid: 'int') -> "int":
return _pydart2_api.joint__getChildBodyNode(wid, skid, jid)
joint__getChildBodyNode = _pydart2_api.joint__getChildBodyNode
def joint__setTransformFromParentBodyNode(wid: 'int', skid: 'int', jid: 'int', inv44: 'double [4][4]') -> "void":
return _pydart2_api.joint__setTransformFromParentBodyNode(wid, skid, jid, inv44)
joint__setTransformFromParentBodyNode = _pydart2_api.joint__setTransformFromParentBodyNode
def joint__setTransformFromChildBodyNode(wid: 'int', skid: 'int', jid: 'int', inv44: 'double [4][4]') -> "void":
return _pydart2_api.joint__setTransformFromChildBodyNode(wid, skid, jid, inv44)
joint__setTransformFromChildBodyNode = _pydart2_api.joint__setTransformFromChildBodyNode
def joint__getTransformFromParentBodyNode(wid: 'int', skid: 'int', jid: 'int') -> "double [4][4]":
return _pydart2_api.joint__getTransformFromParentBodyNode(wid, skid, jid)
joint__getTransformFromParentBodyNode = _pydart2_api.joint__getTransformFromParentBodyNode
def joint__getTransformFromChildBodyNode(wid: 'int', skid: 'int', jid: 'int') -> "double [4][4]":
return _pydart2_api.joint__getTransformFromChildBodyNode(wid, skid, jid)
joint__getTransformFromChildBodyNode = _pydart2_api.joint__getTransformFromChildBodyNode
def joint__setPositionLimitEnforced(wid: 'int', skid: 'int', jid: 'int', _isPositionLimitEnforced: 'bool') -> "void":
return _pydart2_api.joint__setPositionLimitEnforced(wid, skid, jid, _isPositionLimitEnforced)
joint__setPositionLimitEnforced = _pydart2_api.joint__setPositionLimitEnforced
def joint__isPositionLimitEnforced(wid: 'int', skid: 'int', jid: 'int') -> "bool":
return _pydart2_api.joint__isPositionLimitEnforced(wid, skid, jid)
joint__isPositionLimitEnforced = _pydart2_api.joint__isPositionLimitEnforced
def joint__hasPositionLimit(wid: 'int', skid: 'int', jid: 'int', _index: 'int') -> "bool":
return _pydart2_api.joint__hasPositionLimit(wid, skid, jid, _index)
joint__hasPositionLimit = _pydart2_api.joint__hasPositionLimit
def joint__getPositionLowerLimit(wid: 'int', skid: 'int', jid: 'int', _index: 'int') -> "double":
return _pydart2_api.joint__getPositionLowerLimit(wid, skid, jid, _index)
joint__getPositionLowerLimit = _pydart2_api.joint__getPositionLowerLimit
def joint__setPositionLowerLimit(wid: 'int', skid: 'int', jid: 'int', _index: 'int', _position: 'double') -> "void":
return _pydart2_api.joint__setPositionLowerLimit(wid, skid, jid, _index, _position)
joint__setPositionLowerLimit = _pydart2_api.joint__setPositionLowerLimit
def joint__getPositionUpperLimit(wid: 'int', skid: 'int', jid: 'int', _index: 'int') -> "double":
return _pydart2_api.joint__getPositionUpperLimit(wid, skid, jid, _index)
joint__getPositionUpperLimit = _pydart2_api.joint__getPositionUpperLimit
def joint__setPositionUpperLimit(wid: 'int', skid: 'int', jid: 'int', _index: 'int', _position: 'double') -> "void":
return _pydart2_api.joint__setPositionUpperLimit(wid, skid, jid, _index, _position)
joint__setPositionUpperLimit = _pydart2_api.joint__setPositionUpperLimit
def joint__getDof(wid: 'int', skid: 'int', jid: 'int', _index: 'int') -> "int":
return _pydart2_api.joint__getDof(wid, skid, jid, _index)
joint__getDof = _pydart2_api.joint__getDof
def joint__getNumDofs(wid: 'int', skid: 'int', jid: 'int') -> "int":
return _pydart2_api.joint__getNumDofs(wid, skid, jid)
joint__getNumDofs = _pydart2_api.joint__getNumDofs
def joint__getSpringStiffness(wid: 'int', skid: 'int', jid: 'int', _index: 'int') -> "double":
return _pydart2_api.joint__getSpringStiffness(wid, skid, jid, _index)
joint__getSpringStiffness = _pydart2_api.joint__getSpringStiffness
def joint__setSpringStiffness(wid: 'int', skid: 'int', jid: 'int', _index: 'int', _k: 'double') -> "void":
return _pydart2_api.joint__setSpringStiffness(wid, skid, jid, _index, _k)
joint__setSpringStiffness = _pydart2_api.joint__setSpringStiffness
def joint__getRestPosition(wid: 'int', skid: 'int', jid: 'int', _index: 'int') -> "double":
return _pydart2_api.joint__getRestPosition(wid, skid, jid, _index)
joint__getRestPosition = _pydart2_api.joint__getRestPosition
def joint__setRestPosition(wid: 'int', skid: 'int', jid: 'int', _index: 'int', _q0: 'double') -> "void":
return _pydart2_api.joint__setRestPosition(wid, skid, jid, _index, _q0)
joint__setRestPosition = _pydart2_api.joint__setRestPosition
def joint__getDampingCoefficient(wid: 'int', skid: 'int', jid: 'int', _index: 'int') -> "double":
return _pydart2_api.joint__getDampingCoefficient(wid, skid, jid, _index)
joint__getDampingCoefficient = _pydart2_api.joint__getDampingCoefficient
def joint__setDampingCoefficient(wid: 'int', skid: 'int', jid: 'int', _index: 'int', _coeff: 'double') -> "void":
return _pydart2_api.joint__setDampingCoefficient(wid, skid, jid, _index, _coeff)
joint__setDampingCoefficient = _pydart2_api.joint__setDampingCoefficient
def joint__getCoulombFriction(wid: 'int', skid: 'int', jid: 'int', _index: 'int') -> "double":
return _pydart2_api.joint__getCoulombFriction(wid, skid, jid, _index)
joint__getCoulombFriction = _pydart2_api.joint__getCoulombFriction
def joint__setCoulombFriction(wid: 'int', skid: 'int', jid: 'int', _index: 'int', _friction: 'double') -> "void":
return _pydart2_api.joint__setCoulombFriction(wid, skid, jid, _index, _friction)
joint__setCoulombFriction = _pydart2_api.joint__setCoulombFriction
def revolute_joint__getAxis(wid: 'int', skid: 'int', jid: 'int') -> "double [3]":
return _pydart2_api.revolute_joint__getAxis(wid, skid, jid)
revolute_joint__getAxis = _pydart2_api.revolute_joint__getAxis
def revolute_joint__setAxis(wid: 'int', skid: 'int', jid: 'int', inv3: 'double [3]') -> "void":
return _pydart2_api.revolute_joint__setAxis(wid, skid, jid, inv3)
revolute_joint__setAxis = _pydart2_api.revolute_joint__setAxis
def prismatic_joint__getAxis(wid: 'int', skid: 'int', jid: 'int') -> "double [3]":
return _pydart2_api.prismatic_joint__getAxis(wid, skid, jid)
prismatic_joint__getAxis = _pydart2_api.prismatic_joint__getAxis
def prismatic_joint__setAxis(wid: 'int', skid: 'int', jid: 'int', inv3: 'double [3]') -> "void":
return _pydart2_api.prismatic_joint__setAxis(wid, skid, jid, inv3)
prismatic_joint__setAxis = _pydart2_api.prismatic_joint__setAxis
def universal_joint__getAxis1(wid: 'int', skid: 'int', jid: 'int') -> "double [3]":
return _pydart2_api.universal_joint__getAxis1(wid, skid, jid)
universal_joint__getAxis1 = _pydart2_api.universal_joint__getAxis1
def universal_joint__setAxis1(wid: 'int', skid: 'int', jid: 'int', inv3: 'double [3]') -> "void":
return _pydart2_api.universal_joint__setAxis1(wid, skid, jid, inv3)
universal_joint__setAxis1 = _pydart2_api.universal_joint__setAxis1
def universal_joint__getAxis2(wid: 'int', skid: 'int', jid: 'int') -> "double [3]":
return _pydart2_api.universal_joint__getAxis2(wid, skid, jid)
universal_joint__getAxis2 = _pydart2_api.universal_joint__getAxis2
def universal_joint__setAxis2(wid: 'int', skid: 'int', jid: 'int', inv3: 'double [3]') -> "void":
return _pydart2_api.universal_joint__setAxis2(wid, skid, jid, inv3)
universal_joint__setAxis2 = _pydart2_api.universal_joint__setAxis2
def euler_joint__getAxisOrder(wid: 'int', skid: 'int', jid: 'int') -> "char const *":
return _pydart2_api.euler_joint__getAxisOrder(wid, skid, jid)
euler_joint__getAxisOrder = _pydart2_api.euler_joint__getAxisOrder
def euler_joint__setAxisOrder(wid: 'int', skid: 'int', jid: 'int', axisorder: 'char const *') -> "void":
return _pydart2_api.euler_joint__setAxisOrder(wid, skid, jid, axisorder)
euler_joint__setAxisOrder = _pydart2_api.euler_joint__setAxisOrder
def shapenode__getOffset(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double [3]":
return _pydart2_api.shapenode__getOffset(wid, skid, bid, sid)
shapenode__getOffset = _pydart2_api.shapenode__getOffset
def shapenode__setOffset(wid: 'int', skid: 'int', bid: 'int', sid: 'int', inv3: 'double [3]') -> "void":
return _pydart2_api.shapenode__setOffset(wid, skid, bid, sid, inv3)
shapenode__setOffset = _pydart2_api.shapenode__setOffset
def shapenode__getRelativeTransform(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double [4][4]":
return _pydart2_api.shapenode__getRelativeTransform(wid, skid, bid, sid)
shapenode__getRelativeTransform = _pydart2_api.shapenode__getRelativeTransform
def shapenode__setRelativeTransform(wid: 'int', skid: 'int', bid: 'int', sid: 'int', inv44: 'double [4][4]') -> "void":
return _pydart2_api.shapenode__setRelativeTransform(wid, skid, bid, sid, inv44)
shapenode__setRelativeTransform = _pydart2_api.shapenode__setRelativeTransform
def shapenode__hasVisualAspect(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "bool":
return _pydart2_api.shapenode__hasVisualAspect(wid, skid, bid, sid)
shapenode__hasVisualAspect = _pydart2_api.shapenode__hasVisualAspect
def shapenode__hasCollisionAspect(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "bool":
return _pydart2_api.shapenode__hasCollisionAspect(wid, skid, bid, sid)
shapenode__hasCollisionAspect = _pydart2_api.shapenode__hasCollisionAspect
def shapenode__getVisualAspectRGBA(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double [4]":
return _pydart2_api.shapenode__getVisualAspectRGBA(wid, skid, bid, sid)
shapenode__getVisualAspectRGBA = _pydart2_api.shapenode__getVisualAspectRGBA
def shapenode__setVisualAspectRGBA(wid: 'int', skid: 'int', bid: 'int', sid: 'int', inv4: 'double [4]') -> "void":
return _pydart2_api.shapenode__setVisualAspectRGBA(wid, skid, bid, sid, inv4)
shapenode__setVisualAspectRGBA = _pydart2_api.shapenode__setVisualAspectRGBA
def shape__getVolume(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double":
return _pydart2_api.shape__getVolume(wid, skid, bid, sid)
shape__getVolume = _pydart2_api.shape__getVolume
def shape__getType(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "char const *":
return _pydart2_api.shape__getType(wid, skid, bid, sid)
shape__getType = _pydart2_api.shape__getType
def shape__getTypeID(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "int":
return _pydart2_api.shape__getTypeID(wid, skid, bid, sid)
shape__getTypeID = _pydart2_api.shape__getTypeID
def shape__render(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "void":
return _pydart2_api.shape__render(wid, skid, bid, sid)
shape__render = _pydart2_api.shape__render
def shape__getBoundingBoxMin(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double [3]":
return _pydart2_api.shape__getBoundingBoxMin(wid, skid, bid, sid)
shape__getBoundingBoxMin = _pydart2_api.shape__getBoundingBoxMin
def shape__getBoundingBoxMax(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double [3]":
return _pydart2_api.shape__getBoundingBoxMax(wid, skid, bid, sid)
shape__getBoundingBoxMax = _pydart2_api.shape__getBoundingBoxMax
def sphere_shape__getRadius(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double":
return _pydart2_api.sphere_shape__getRadius(wid, skid, bid, sid)
sphere_shape__getRadius = _pydart2_api.sphere_shape__getRadius
def sphere_shape__setRadius(wid: 'int', skid: 'int', bid: 'int', sid: 'int', radius: 'double') -> "void":
return _pydart2_api.sphere_shape__setRadius(wid, skid, bid, sid, radius)
sphere_shape__setRadius = _pydart2_api.sphere_shape__setRadius
def box_shape__getSize(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double [3]":
return _pydart2_api.box_shape__getSize(wid, skid, bid, sid)
box_shape__getSize = _pydart2_api.box_shape__getSize
def box_shape__setSize(wid: 'int', skid: 'int', bid: 'int', sid: 'int', inv3: 'double [3]') -> "void":
return _pydart2_api.box_shape__setSize(wid, skid, bid, sid, inv3)
box_shape__setSize = _pydart2_api.box_shape__setSize
def ellipsoid_shape__getSize(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double [3]":
return _pydart2_api.ellipsoid_shape__getSize(wid, skid, bid, sid)
ellipsoid_shape__getSize = _pydart2_api.ellipsoid_shape__getSize
def ellipsoid_shape__setSize(wid: 'int', skid: 'int', bid: 'int', sid: 'int', inv3: 'double [3]') -> "void":
return _pydart2_api.ellipsoid_shape__setSize(wid, skid, bid, sid, inv3)
ellipsoid_shape__setSize = _pydart2_api.ellipsoid_shape__setSize
def cylindershape__getRadius(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double":
return _pydart2_api.cylindershape__getRadius(wid, skid, bid, sid)
cylindershape__getRadius = _pydart2_api.cylindershape__getRadius
def cylindershape__setRadius(wid: 'int', skid: 'int', bid: 'int', sid: 'int', _radius: 'double') -> "void":
return _pydart2_api.cylindershape__setRadius(wid, skid, bid, sid, _radius)
cylindershape__setRadius = _pydart2_api.cylindershape__setRadius
def cylindershape__getHeight(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double":
return _pydart2_api.cylindershape__getHeight(wid, skid, bid, sid)
cylindershape__getHeight = _pydart2_api.cylindershape__getHeight
def cylindershape__setHeight(wid: 'int', skid: 'int', bid: 'int', sid: 'int', _height: 'double') -> "void":
return _pydart2_api.cylindershape__setHeight(wid, skid, bid, sid, _height)
cylindershape__setHeight = _pydart2_api.cylindershape__setHeight
def capsuleshape__getRadius(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double":
return _pydart2_api.capsuleshape__getRadius(wid, skid, bid, sid)
capsuleshape__getRadius = _pydart2_api.capsuleshape__getRadius
def capsuleshape__setRadius(wid: 'int', skid: 'int', bid: 'int', sid: 'int', radius: 'double') -> "void":
return _pydart2_api.capsuleshape__setRadius(wid, skid, bid, sid, radius)
capsuleshape__setRadius = _pydart2_api.capsuleshape__setRadius
def capsuleshape__getHeight(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double":
return _pydart2_api.capsuleshape__getHeight(wid, skid, bid, sid)
capsuleshape__getHeight = _pydart2_api.capsuleshape__getHeight
def capsuleshape__setHeight(wid: 'int', skid: 'int', bid: 'int', sid: 'int', height: 'double') -> "void":
return _pydart2_api.capsuleshape__setHeight(wid, skid, bid, sid, height)
capsuleshape__setHeight = _pydart2_api.capsuleshape__setHeight
def coneshape__getRadius(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double":
return _pydart2_api.coneshape__getRadius(wid, skid, bid, sid)
coneshape__getRadius = _pydart2_api.coneshape__getRadius
def coneshape__setRadius(wid: 'int', skid: 'int', bid: 'int', sid: 'int', radius: 'double') -> "void":
return _pydart2_api.coneshape__setRadius(wid, skid, bid, sid, radius)
coneshape__setRadius = _pydart2_api.coneshape__setRadius
def coneshape__getHeight(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double":
return _pydart2_api.coneshape__getHeight(wid, skid, bid, sid)
coneshape__getHeight = _pydart2_api.coneshape__getHeight
def coneshape__setHeight(wid: 'int', skid: 'int', bid: 'int', sid: 'int', height: 'double') -> "void":
return _pydart2_api.coneshape__setHeight(wid, skid, bid, sid, height)
coneshape__setHeight = _pydart2_api.coneshape__setHeight
def planeshape__getNormal(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double [3]":
return _pydart2_api.planeshape__getNormal(wid, skid, bid, sid)
planeshape__getNormal = _pydart2_api.planeshape__getNormal
def planeshape__setNormal(wid: 'int', skid: 'int', bid: 'int', sid: 'int', inv3: 'double [3]') -> "void":
return _pydart2_api.planeshape__setNormal(wid, skid, bid, sid, inv3)
planeshape__setNormal = _pydart2_api.planeshape__setNormal
def planeshape__getOffset(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double":
return _pydart2_api.planeshape__getOffset(wid, skid, bid, sid)
planeshape__getOffset = _pydart2_api.planeshape__getOffset
def planeshape__setOffset(wid: 'int', skid: 'int', bid: 'int', sid: 'int', _offset: 'double') -> "void":
return _pydart2_api.planeshape__setOffset(wid, skid, bid, sid, _offset)
planeshape__setOffset = _pydart2_api.planeshape__setOffset
def multisphereshape__addSphere(wid: 'int', skid: 'int', bid: 'int', sid: 'int', inv4: 'double [4]') -> "void":
return _pydart2_api.multisphereshape__addSphere(wid, skid, bid, sid, inv4)
multisphereshape__addSphere = _pydart2_api.multisphereshape__addSphere
def multisphereshape__getSpheres(wid: 'int', skid: 'int', bid: 'int', sid: 'int', outv: 'double *') -> "int":
return _pydart2_api.multisphereshape__getSpheres(wid, skid, bid, sid, outv)
multisphereshape__getSpheres = _pydart2_api.multisphereshape__getSpheres
def multisphereshape__getNumSpheres(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "int":
return _pydart2_api.multisphereshape__getNumSpheres(wid, skid, bid, sid)
multisphereshape__getNumSpheres = _pydart2_api.multisphereshape__getNumSpheres
def mesh_shape__getScale(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "double [3]":
return _pydart2_api.mesh_shape__getScale(wid, skid, bid, sid)
mesh_shape__getScale = _pydart2_api.mesh_shape__getScale
def mesh_shape__getMeshPath(wid: 'int', skid: 'int', bid: 'int', sid: 'int') -> "char const *":
return _pydart2_api.mesh_shape__getMeshPath(wid, skid, bid, sid)
mesh_shape__getMeshPath = _pydart2_api.mesh_shape__getMeshPath
# This file is compatible with both classic and new-style classes.
|
from datetime import datetime
from scrapy import log
from twisted.enterprise import adbapi
class ItemProcessPipeline:
def process_item(self, item, spider):
# parse year
item["year"] = datetime.strptime(item["custom"]["date"], "%Y-%m-%dT%H:%M:%S").year
# parse genres
if item.get("custom").get("genres"):
genres = [a.lower() for a in item["custom"]["genres"].split('/')]
item["genres"] = self._prepare_list_to_string(genres)
else:
item["genres"] = '[]'
item["tags"] = self._prepare_list_to_string(item["tags"])
return item
def _prepare_list_to_string(self, arr):
return str(arr).replace("'", '"')
class MySQLStorePipeline:
def __init__(self, dbpool):
self.dbpool = dbpool
@classmethod
def from_crawler(cls, crawler):
return cls(
dbpool=adbapi.ConnectionPool('MySQLdb', **crawler.settings.get("DB_CONFIG"))
)
def process_item(self, item, spider):
# run db query in the thread pool
d = self.dbpool.runInteraction(self._do_upsert, item)
d.addErrback(self._handle_error, item, spider)
# at the end return the item in case of success or failure
d.addBoth(lambda _: item)
# return the deferred instead the item. This makes the engine to
# process next item (according to CONCURRENT_ITEMS setting) after this
# operation (deferred) has finished.
return d
def _do_upsert(self, conn, item):
conn.execute(
"insert into data (artist, album, picture_url, review_url, genres, year, tags) values (%s, %s, %s, %s, %s, %s, %s)",
(item["artist"], item["album"], item["picture_url"], item["review_url"], item["genres"], item["year"],
item["tags"]))
def _handle_error(self, failure, item, spider):
"""Handle occurred on db interaction."""
# do nothing, just log
log.err(failure)
|
#
# ----------------------------------------------------------------------------------------------------
# DESCRIPTION
# ----------------------------------------------------------------------------------------------------
#
# ----------------------------------------------------------------------------------------------------
# IMPORTS
# ----------------------------------------------------------------------------------------------------
from django.utils.deprecation import MiddlewareMixin
from django.http import HttpResponseRedirect
from projectSettings import common
from projectSettings.models import ProjectSettings
#
# ----------------------------------------------------------------------------------------------------
# CODE
# ----------------------------------------------------------------------------------------------------
#
## @brief [ MIDDLEWARE CLASS ] - Middleware class.
class IsWebSiteEnabledMiddleware(MiddlewareMixin):
#
## @brief Process request.
#
# If the web site is not active it will be redirected to `projectSettings.common.WEB_SITE_DISABLED_REDIRECT_URL`.
#
# @param request [ django.core.handlers.wsgi.WSGIRequest | None | in ] - Request.
#
# @exception N/A
#
# @return None - If web site is enabled.
# @return django.http.response.HttpResponse - Response if web site is disabled.
def process_request(self, request):
if common.CHECK_IF_WEB_SITE_DISABLED and not ProjectSettings.getIsWebSiteEnabled():
# Web site is not enabled, do not allow any request
# apart from the allowed URL
if not request.build_absolute_uri().endswith(common.WEB_SITE_DISABLED_REDIRECT_URL) and \
not common.ADMIN_URL in request.build_absolute_uri():
return HttpResponseRedirect('{}{}'.format(common.BASE_URL, common.WEB_SITE_DISABLED_REDIRECT_URL))
#
## @brief [ MIDDLEWARE CLASS ] - Middleware class.
class LanguageMiddleware(MiddlewareMixin):
#
## @brief Process request.
#
# Method sets `request.sessionLanguageCode` and `request.languageCode` to language and use.
#
# @param request [ django.core.handlers.wsgi.WSGIRequest | None | in ] - Request.
#
# @exception N/A
#
# @return None - If web site is enabled.
# @return django.http.response.HttpResponse - Response if web site is disabled.
def process_request(self, request):
sessionLanguageCode = request.session.get('lang', common.LANGUAGE_CODE)
languageCode = request.COOKIES.get('lang', common.LANGUAGE_CODE)
requestedLanguage = request.GET.get('lang')
if requestedLanguage:
# TODO: Check supported languages here
sessionLanguageCode = requestedLanguage
languageCode = requestedLanguage
request.sessionLanguageCode = sessionLanguageCode
request.languageCode = languageCode
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class ScrapyBookingItem(scrapy.Item):
# define the fields for your item here like:
name = scrapy.Field()
hotel_id = scrapy.Field()
rating_word = scrapy.Field()
rating_score = scrapy.Field()
star_rating = scrapy.Field()
location_bbox = scrapy.Field()
location_coords = scrapy.Field()
location_address = scrapy.Field()
reviews = scrapy.Field()
pass
|
from socket import AF_INET, socket, SOCK_STREAM
from threading import Thread
clientes= {}
address = {}
HOST = ''
PORT = 3000
BUFSIZE = 1024
ADDR = (HOST, PORT)
SERVER = socket(AF_INET, SOCK_STREAM)
SERVER.bind(ADDR)
def aceitar_clientes():
while True:
client , client_addr = SERVER.accept()
print("{} : {} foi conetactado com sucesso".format(client_addr[0], client_addr[1]))
client.send(bytes("Seja bem vindo ao chat!!, digite seu nome: ", 'utf8'))
address[client] = client_addr
Thread(target=handler_client, args=(client, )).start()
def handler_client(client):
name = client.recv(BUFSIZE).decode("utf8")
bemvindo = 'Seja bem-vindo {} ao chat!, digite !quit para sair'.format(name)
client.send(bytes(bemvindo, 'utf8'))
broadcast(bytes("{} acabou de entrar no chat!".format(name), 'utf8'))
clientes[client] = name
while True:
msg = client.recv(BUFSIZE).decode('utf8')
if not '!quit' in msg:
broadcast(bytes(msg, 'utf8'), name+ ': ')
else:
client.send(bytes('!quit', 'utf8'))
client.close()
del clientes[client]
broadcast(bytes(" O cliente {} saiu do chat!".format(name), 'utf8'))
def broadcast(msg, prefix=""):
for sock in clientes:
sock.send(bytes(prefix, 'utf8') + msg)
if __name__ == '__main__':
SERVER.listen(5)
print('esperando clientes')
ACCEPT_THREAD = Thread(target=aceitar_clientes)
ACCEPT_THREAD.start()
ACCEPT_THREAD.join()
SERVER.close()
|
"""
@Author : sean cheng
@Email : aya234@163.com
@CreateTime : 2019/6/30
@Program : 绘制游戏界面的模块
"""
import pygame
from gobang.init_var import init_var
setting = init_var()
def draw_chess(screen, chess_color, posx, posy):
# 绘制棋子,传入的参数包括棋子的颜色和位置
pygame.draw.circle(screen, chess_color, (posx, posy), 12)
def draw_chessboard(screen):
# 棋盘最外层的粗线
pygame.draw.line(screen, setting.BLACK, (150, 55), (640, 55), 4)
pygame.draw.line(screen, setting.BLACK, (150, 545), (640, 545), 4)
pygame.draw.line(screen, setting.BLACK, (150, 55), (150, 545), 4)
pygame.draw.line(screen, setting.BLACK, (640, 55), (640, 545), 4)
# 棋盘内部的细线
# 横线
for i in range(14):
pygame.draw.line(screen, setting.BLACK, (150, 35 * i + 55), (640, 35 * i + 55), 2)
# 竖线
for i in range(14):
pygame.draw.line(screen, setting.BLACK, (35 * i + 150, 55), (35 * i + 150, 545), 2)
# 棋盘上的点
pygame.draw.circle(screen, setting.BLACK, (35 * 3 + 150, 35 * 3 + 55), 6)
pygame.draw.circle(screen, setting.BLACK, (35 * 3 + 150, 35 * 11 + 55), 6)
pygame.draw.circle(screen, setting.BLACK, (35 * 7 + 150, 35 * 7 + 55), 6)
pygame.draw.circle(screen, setting.BLACK, (35 * 11 + 150, 35 * 3 + 55), 6)
pygame.draw.circle(screen, setting.BLACK, (35 * 11 + 150, 35 * 11 + 55), 6)
def draw_player_icon(screen):
# 左上角的头像
pygame.draw.rect(screen, setting.GRAY, (20, 55, 100, 100))
pygame.draw.circle(screen, setting.BLACK, (70, 105), 40)
# 右下角的头像
pygame.draw.rect(screen, setting.GRAY, (670, 445, 100, 100))
pygame.draw.circle(screen, setting.WHITE, (720, 495), 40) |
import unittest
from scrapers.journalscrapers import HindawiScraper
class TestJournalScraper(unittest.TestCase):
def setUp(self):
self.instance = HindawiScraper("http://www.hindawi.com/apc/")
def test_strip_chars(self):
for row in self.instance.get_entries():
print row |
#!/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name='unicards',
version='0.6',
description="Convert strings into unicode playing cards",
author='Luke Macken',
author_email='lmacken@redhat.com',
url='http://github.com/lmacken/unicards',
license='ASL 2.0',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Software Development",
"Topic :: Utilities",
"Topic :: Games/Entertainment",
"License :: OSI Approved :: Apache Software License",
],
keywords='python unicode cards poker deck',
packages=find_packages(exclude=['ez_setup', 'tests']),
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
scripts=['scripts/unicards'],
zip_safe=True,
install_requires=[],
)
|
"""Style handling for QuteStyleWindow."""
import logging
from typing import Dict, Optional
from PyQt5.QtCore import QRect, QSettings, QSize
from PyQt5.QtGui import QColor, QPainter, QPixmap
log = logging.getLogger(
f"qute_style.{__name__}"
) # pylint: disable=invalid-name
# Use this variable when referencing a default style so that adapting to a new
# default style will only require changes in the lib.
DEFAULT_STYLE = "Darcula"
CURRENT_STYLE: Optional[str] = None
def _create_theme_drawing(
icon_size: QSize, color_names: Dict[str, str]
) -> QPixmap:
"""Create theme drawing."""
pixmap = QPixmap(icon_size)
painter = QPainter(pixmap)
# draw background
painter.fillRect(
QRect(0, 0, pixmap.width(), pixmap.height()),
QColor(color_names["bg_one"]),
)
# draw menu
menu = QRect(0, 0, 20, pixmap.height())
painter.fillRect(menu, QColor(color_names["dark_one"]))
# draw menu icons
for i in range(8):
y_pos = i * 20 + 5
if y_pos + 8 < menu.height():
painter.fillRect(
QRect(5, i * 20 + 5, 8, 8),
QColor(color_names["active"]),
)
# draw toolbar
toolbar = QRect(menu.width() + 5, 0, pixmap.width(), 15)
painter.fillRect(toolbar, QColor(color_names["bg_two"]))
# draw footer
footer = QRect(menu.width() + 5, pixmap.height() - 10, pixmap.width(), 10)
painter.fillRect(footer, QColor(color_names["bg_two"]))
# draw widget
widget = QRect(
menu.width() + 5,
toolbar.height() + 5,
pixmap.width() - (menu.width() + 5),
pixmap.height() - (toolbar.height() + footer.height() + 10),
)
painter.fillRect(widget, QColor(color_names["bg_two"]))
# draw widget data
for i in range(6):
y_pos = i * 20 + 10
if y_pos + 5 < widget.height():
if i % 2:
width = widget.width() - 40
color = QColor(color_names["foreground"])
else:
width = widget.width() - 100
color = QColor(color_names["context_color"])
painter.fillRect(
QRect(widget.x() + 10, y_pos + widget.y(), width, 5),
color,
)
painter.end()
return pixmap
def get_current_style() -> str:
"""Return the currently set style."""
global CURRENT_STYLE # pylint: disable=global-statement
if CURRENT_STYLE is None:
CURRENT_STYLE = QSettings().value("style", DEFAULT_STYLE)
log.debug("Loaded current style from registry: %s", CURRENT_STYLE)
if CURRENT_STYLE not in THEMES:
log.warning("Invalid style stored in registry: %s", CURRENT_STYLE)
# If an invalid style is set, revert to DEFAULT_STYLE
CURRENT_STYLE = DEFAULT_STYLE
return CURRENT_STYLE
def set_current_style(style: str) -> None:
"""
Set the current style.
One should only use this method to change the style. This will correctly
set CURRENT_STYLE to be used as a lazy variable.
"""
log.debug("Setting current style to %s", style)
global CURRENT_STYLE # pylint: disable=global-statement
CURRENT_STYLE = style
QSettings().setValue("style", style)
THEMES: Dict[str, Dict[str, str]] = {
"Snow White": {
"dark_one": "#b5c3dd",
"dark_two": "#bfcde6",
"bg_one": "#c9d7ef",
"bg_two": "#d3e0f7",
"bg_elements": "#e2e9f7",
"bg_three": "#eff1f7",
"bg_disabled": "#e5eaf4",
"fg_disabled": "#adaeaf",
"foreground": "#24263f",
"active": "#121320",
"context_pressed": "#90a5c7",
"context_color": "#9bb1d0",
"context_hover": "#b2c3d6",
"white": "#f5f6f9",
"pink": "#ff007f",
"green": "#15c72a",
"light_green": "#46ff5c",
"dark_green": "#0b6315",
"red": "#ff5555",
"light_red": "#ffd4d4",
"dark_red": "#7f2a2a",
"yellow": "#fda600",
"light_yellow": "#ffd27c",
"dark_yellow": "#7e5300",
"grey": "#d3d3d3",
},
"Princess Pink": {
"active": "#fffefe",
"dark_one": "#282a36",
"dark_two": "#363948",
"bg_elements": "#595D75",
"bg_one": "#44475a",
"bg_two": "#4f5268",
"bg_three": "#63677d",
"bg_disabled": "#585c6e",
"fg_disabled": "#83849b",
"context_color": "#ff79c6",
"context_hover": "#ffacdc",
"context_pressed": "#e86eb4",
"foreground": "#f6e2f6",
"white": "#f5f6f9",
"pink": "#ff79c6",
"green": "#00ff7f",
"light_green": "#7fffbf",
"dark_green": "#007f3f",
"red": "#ff5555",
"light_red": "#ffd4d4",
"dark_red": "#7f2a2a",
"yellow": "#f1fa8c",
"light_yellow": "#ffffff",
"dark_yellow": "#787d46",
"grey": "#d3d3d3",
},
"Darcula": {
"active": "#dfe4ed",
"dark_one": "#1b1e23",
"dark_two": "#242830",
"bg_elements": "#3c4454",
"bg_one": "#2c313c",
"bg_two": "#343b48",
"bg_three": "#444C5B",
"bg_disabled": "#3e4150",
"fg_disabled": "#606478",
"context_color": "#568af2",
"context_hover": "#81A8F6",
"context_pressed": "#4e7ddc",
"foreground": "#b0b7c7",
"white": "#f5f6f9",
"pink": "#ff007f",
"green": "#00ff7f",
"light_green": "#7fffbf",
"dark_green": "#007f3f",
"red": "#ff5555",
"light_red": "#ffd4d4",
"dark_red": "#7f2a2a",
"yellow": "#f1fa8c",
"light_yellow": "#ffffff",
"dark_yellow": "#787d46",
"grey": "#d3d3d3",
},
"Highbridge Gray": {
"active": "#0e0e0e",
"bg_disabled": "#d9d9d9",
"bg_one": "#ffffff",
"bg_three": "#e3e3e3",
"bg_two": "#f7f7f7",
"context_color": "#aaa7bf",
"context_hover": "#bdbcc8",
"context_pressed": "#9e9cb7",
"bg_elements": "#ededed",
"dark_two": "#dedede",
"dark_green": "#007f3f",
"dark_one": "#c6c6c6",
"dark_red": "#7f2a2a",
"dark_yellow": "#787d46",
"fg_disabled": "#919191",
"green": "#00ff7f",
"grey": "#d3d3d3",
"light_green": "#7fffbf",
"light_red": "#ffd4d4",
"light_yellow": "#ffffff",
"pink": "#ff007f",
"red": "#ff5555",
"foreground": "#393939",
"white": "#f5f6f9",
"yellow": "#fda600",
},
"Ruby Red": {
"dark_one": "#431e1e",
"dark_two": "#4f2424",
"bg_one": "#5f2d2d",
"bg_two": "#6a3030",
"bg_elements": "#773535",
"bg_three": "#823d3d",
"bg_disabled": "#783a3a",
"fg_disabled": "#a95454",
"foreground": "#cb8d8d",
"active": "#f4dfdf",
"context_pressed": "#cc4848",
"context_color": "#e44e4e",
"context_hover": "#ea7171",
"dark_green": "#007f3f",
"dark_red": "#7f2a2a",
"dark_yellow": "#787d46",
"green": "#00ff7f",
"grey": "#d3d3d3",
"light_green": "#7fffbf",
"light_red": "#ffd4d4",
"light_yellow": "#ffffff",
"pink": "#ff007f",
"red": "#ff5555",
"white": "#f5f6f9",
"yellow": "#f1fa8c",
},
}
def get_style() -> str:
"""Return the current style sheet that is stored in QSettings."""
# Use the Darcula style if not style is stored yet as default.
log.debug("Stored style: %s", get_current_style())
return MAIN_STYLE.format(**THEMES[get_current_style()])
def get_color(name: str) -> str:
"""Return the color code for the given name."""
return THEMES[get_current_style()][name]
MAIN_STYLE = """
/* QComboBox */
QComboBox{{
color: {foreground};
background-color: {bg_elements};
border-radius: 5px;
border: 1px solid {bg_elements};
padding: 5px;
padding-left: 10px;
}}
QComboBox:hover{{
border: 1px solid {context_hover};
}}
QComboBox::drop-down {{
width: 25px;
border-left-width: 3px;
border-left-color: {bg_two};
border-left-style: solid;
border-top-right-radius: 3px;
border-bottom-right-radius: 3px;
}}
QComboBox QAbstractItemView {{
color: {foreground};
background-color: {bg_elements};
padding: 10px;
selection-background-color: {context_color};
}}
QComboBox:disabled {{
color: {fg_disabled};
background-color: {bg_disabled};
border: {bg_disabled};
}}
QComboBox[cssClass="transparent"] {{
background-color: transparent;
}}
QComboBox::drop-down[cssClass="transparent"] {{
border-left-color: transparent;
}}
QComboBox:disabled[cssClass="transparent"] {{
color: {fg_disabled};
background-color: transparent;
border: {bg_disabled};
}}
/* QHeaderView */
QHeaderView{{
background-color: {bg_two};
}}
QHeaderView::section{{
color: {foreground};
background-color: {bg_two};
max-width: 30px;
border: 0px;
padding: 3px;
}}
QHeaderView::section:horizontal{{
border-right: 1px solid {dark_two};
border-right: 1px solid {dark_two};
border-bottom: 1px solid {dark_two};
}}
QHeaderView::section:horizontal:only-one{{
border-right: 0px;
}}
QHeaderView::section:horizontal:last{{
border-right: 0px;
}}
QHeaderView::section:vertical{{
border-bottom: 1px solid {dark_two};
border-right: 1px solid {dark_two};
}}
QHeaderView::section:vertical:last{{
border-bottom: 0px;
}}
QHeaderView::section:vertical:only-one{{
border-bottom: 0px;
}}
QHeaderView::section:disabled{{
color: {fg_disabled};
}}
QTableCornerButton::section {{
background-color: {bg_two};
}}
/* QLineEdit */
QLineEdit,
QPlainTextEdit {{
background-color: {bg_elements};
border-radius: 8px;
border: 1px solid transparent;
padding-left: 10px;
padding-right: 10px;
selection-color: {active};
selection-background-color: {context_color};
color: {foreground};
height: 30px;
}}
QLineEdit:read-only, QPlainTextEdit:read-only{{
background-color: {bg_disabled};
color: {fg_disabled};
}}
QLineEdit:read-only:focus,
QPlainTextEdit:read-only:focus {{
border: 1px solid {bg_disabled};
background-color: {bg_disabled};
}}
QLineEdit#column_line_edit {{
background-color: {bg_elements};
border-radius: 8px;
border: 1px solid transparent;
padding-left: 10px;
padding-right: 10px;
selection-color: {active};
selection-background-color: {context_color};
color: {foreground};
}}
QLineEdit:focus,
QPlainTextEdit:focus {{
border: 1px solid {context_color};
background-color: {bg_one};
}}
QLineEdit#column_line_edit:focus {{
border: 1px solid {context_color};
background-color: {bg_two};
}}
/* QMenu */
QMenu{{
background-color: {bg_one};
color: {foreground};
}}
QMenu::item:disabled {{
color: {fg_disabled};
}}
QMenu::item:enabled:selected {{
color: {active};
background-color: {context_color};
}}
QMenu QCheckBox{{
color: {foreground};
border-radius: 17px;
border: 10px solid transparent;
}}
QMenu QCheckBox:hover {{
background-color: {context_hover};
color: {active};
}}
QMenu QCheckBox:pressed {{
background-color: {context_pressed};
color: {active};
}}
QMenu::separator {{
background: {bg_three};
height: 3px;
margin-left: 5px;
margin-right: 5px;
}}
/* QScrollArea */
QScrollArea {{
border: none;
}}
QWidget#scroll_widget {{
background: {dark_one};
}}
QWidget#style_selection_widget
{{ background-color:{bg_one}; }}
/* QProgressBar */
QProgressBar {{
background-color: {bg_elements};
color: {foreground};
border-style: none;
border-radius: 10px;
text-align: center;
}}
QProgressBar::chunk {{
background-color: {context_color};
border-radius: 10px;
}}
/* QScrollBar */
QScrollBar:horizontal {{
border: none;
background: {bg_three};
height: 8px;
margin: 0px 21px 0 21px;
border-radius: 0px;
}}
QScrollBar:horizontal:disabled {{
background: {bg_disabled};
}}
QScrollBar::handle:horizontal {{
background: {context_color};
min-width: 25px;
border-radius: 4px
}}
QScrollBar::handle:horizontal:disabled {{
background: {fg_disabled};
}}
QScrollBar::handle:horizontal:hover {{
background: {context_hover};
min-width: 25px;
border-radius: 4px
}}
QScrollBar::handle:horizontal:pressed {{
background: {context_pressed};
min-width: 25px;
border-radius: 4px
}}
QScrollBar::add-line:horizontal {{
border: none;
background: {dark_two};
width: 20px;
border-top-right-radius: 4px;
border-bottom-right-radius: 4px;
subcontrol-position: right;
subcontrol-origin: margin;
}}
QScrollBar::sub-line:horizontal {{
border: none;
background: {dark_two};
width: 20px;
border-top-left-radius: 4px;
border-bottom-left-radius: 4px;
subcontrol-position: left;
subcontrol-origin: margin;
}}
QScrollBar::up-arrow:horizontal, QScrollBar::down-arrow:horizontal
{{
background: none;
}}
QScrollBar::add-page:horizontal, QScrollBar::sub-page:horizontal
{{
background: none;
}}
QScrollBar:vertical {{
border: none;
background: {bg_three};
width: 8px;
margin: 21px 0 21px 0;
border-radius: 0px;
}}
QScrollBar:vertical:disabled {{
background: {bg_disabled};
}}
QScrollBar::handle:vertical {{
background: {context_color};
min-height: 25px;
border-radius: 4px
}}
QScrollBar::handle:vertical:disabled {{
background: {fg_disabled};
}}
QScrollBar::handle:vertical:hover {{
background: {context_hover};
min-height: 25px;
border-radius: 4px
}}
QScrollBar::handle:vertical:pressed {{
background: {context_pressed};
min-height: 25px;
border-radius: 4px
}}
QScrollBar::add-line:vertical {{
border: none;
background: {dark_two};
height: 20px;
border-bottom-left-radius: 4px;
border-bottom-right-radius: 4px;
subcontrol-position: bottom;
subcontrol-origin: margin;
}}
QScrollBar::sub-line:vertical {{
border: none;
background: {dark_two};
height: 20px;
border-top-left-radius: 4px;
border-top-right-radius: 4px;
subcontrol-position: top;
subcontrol-origin: margin;
}}
QScrollBar::up-arrow:vertical, QScrollBar::down-arrow:vertical {{
background: none;
}}
QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical {{
background: none;
}}
/* QSlider */
QSlider {{
margin: 0px;
}}
QSlider::groove:horizontal {{
border-radius: 10px;
height: 20px;
margin: 0px;
background-color: {bg_elements};
}}
QSlider::groove:horizontal:hover {{
background-color: {bg_elements};
}}
QSlider::handle:horizontal {{
border: none;
height: 16px;
width: 16px;
margin: 2px;
border-radius: 8px;
background-color: {context_color};
}}
QSlider::handle:horizontal:hover {{
background-color: {context_hover};
}}
QSlider::handle:horizontal:pressed {{
background-color: {context_pressed};
}}
QSlider::groove:vertical {{
border-radius: 10px;
width: 20px;
margin: 0px;
background-color: {bg_elements};
}}
QSlider::groove:vertical:hover {{
background-color: {bg_elements};
}}
QSlider::handle:vertical {{
border: none;
height: 16px;
width: 16px;
margin: 2px;
border-radius: 8px;
background-color: {context_color};
}}
QSlider::handle:vertical:hover {{
background-color: {context_hover};
}}
QSlider::handle:vertical:pressed {{
background-color: {context_pressed};
}}
/* QSplitter */
/* This activates the hover which isn't active by default */
QSplitterHandle:hover {{
}}
/* QSplitter can only be correctly addressed by it's orientation property.
Also, one must take care that the splitter in vertical direction is actually
turned by 90 degrees, so you'll need to use i.e. width as height etc. */
/* Horizontal QSplitter */
QSplitter[orientation='1']::handle {{
height: 2px;
background-color: {bg_elements};
}}
QSplitter[orientation='1']::handle:hover {{
background-color: {context_color};
}}
/* Vertical QSplitter */
QSplitter[orientation='2']::handle {{
height: 2px;
background-color: {bg_elements};
}}
QSplitter[orientation='2']::handle:hover {{
background-color: {context_color};
}}
/* QTabWidget */
/*
QTabWidget lacks of proper qss background-color support.
See:
https://bugreports.qt.io/browse/QTBUG-33344
https://bugreports.qt.io/browse/QTBUG-68642
https://codereview.qt-project.org/c/qt/qtbase/+/230769/
Because of not inheriting the values properly each
widget of QTabWidget needs to be set manually.
*/
QTabWidget > QStackedWidget {{
background-color: {bg_two};
border-top-right-radius: 8px;
border-bottom-left-radius: 8px;
border-bottom-right-radius: 8px;
}}
QTabWidget > QStackedWidget > QWidget {{
background-color: {bg_two};
border-top-right-radius: 8px;
border-bottom-left-radius: 8px;
border-bottom-right-radius: 8px;
}}
QTabWidget::pane {{
background-color: {bg_two};
border-top-right-radius: 8px;
border-bottom-left-radius: 8px;
border-bottom-right-radius: 8px;
}}
/* QTabBar */
QTabBar::tab {{
background-color: {bg_three};
color: {foreground};
border-top-left-radius: 8px;
border-top-right-radius: 8px;
padding: 12px;
}}
QTabBar::tab:hover {{
background-color: {context_color};
color: {active};
}}
QTabBar::tab:selected {{
background-color: {bg_two};
color: {foreground};
}}
/* QTextEdit */
QTextEdit {{
color: {foreground};
background-color: {bg_elements};
padding: 10px;
border-radius: 5px;
}}
QTextEdit:disabled {{
color: {fg_disabled};
background-color: {bg_disabled};
}}
/* QToolTip */
QToolTip {{
background-color: {dark_one};
color:{foreground};
padding-left: 10px;
padding-right: 10px;
border: 0px solid transparent;
border-left: 3px solid {context_color};
}}
/* QTreeView, QListView, QTableView, QTableWidget, QTreeWidget */
QTreeView,
QListView,
QTableView,
QTableWidget,
QTreeWidget {{
color: {foreground};
background-color: {bg_two};
alternate-background-color: {bg_one};
padding: 10px;
border-radius: 5px;
}}
#frozen_column_table_view {{
/* This QTableView must fix exactly at it's position over the real table */
padding: 0px;
}}
#frozen_column_table_view QHeaderView::section:horizontal:last{{
/* The last section of the frozen table's header isn't the real last one */
border-right: 1px solid {dark_two};
}}
QTreeView:disabled,
QListView:disabled,
QTableView:disabled,
QTableWidget:disabled,
QTreeWidget:disabled {{
color: {fg_disabled};
}}
QTableView::item QComboBox{{
border-radius: 0px;
}}
QFrame #_left_column_frame {{
background-color: {bg_two};
}}
QFrame#app_background {{
background-color: {bg_one};
border-color: {bg_two};
border-style: solid;
/* Those are the default values that are applied when the app is not
maximized */
border-radius: 10;
border: 2px;
}}
QFrame {{
color: {foreground};
font: 9pt 'Segoe UI';
}}
/* Used for Frames, and other space covering widgets like title bar,
credit bar and widgets in the right column (mostly QFrame) */
#bg_two_frame {{
background-color: {bg_two};
border-radius: 8px;
}}
QFrame#title_bg_frame {{
background-color: {bg_one};
border-radius: 8px;
}}
QFrame#div {{
background: {dark_two};
border-radius: 0;
}}
.QLabel {{
font: 9pt "Segoe UI";
color: {foreground};
padding-left: 10px;
padding-right: 10px;
}}
/* Label that has no padding on the left side */
QLabel#left_label {{
font: 9pt "Segoe UI";
color: {foreground};
padding-left: 0px;
padding-right: 10px;
}}
QLabel#heading_label {{
font: 14pt "Segoe UI";
}}
QLabel#db_label {{
color: red;
font-weight: bold;
}}
QLabel:disabled {{
color: {fg_disabled};
}}
QLabel#heading_label {{
font: 14pt "Segoe UI";
}}
QLabel#heading1_label {{
font: 12pt "Segoe UI";
background: {dark_one};
border-radius: 8px;
}}
QLabel#heading2_label {{
font: 10pt "Segoe UI";
background: {bg_two};
border-radius: 8px;
}}
/* Completer */
#completer_popup{{
border: 1px;
border-color: {context_pressed};
border-style: solid;
}}
QListView#completer_popup{{
padding: 0px 10px 0px 10px;
}}
/* Background around the LeftMenu */
QFrame#menu_background {{
background-color: {dark_one};
border-radius: 8px;
}}
QLabel#column_title_label {{
font-size: 10pt;
color: {foreground};
padding-bottom: 2px;
}}
/* CornerGrip and EdgeGrip are transparent. */
#grip {{
background: transparent;
}}
/* Style for tooltips on buttons. */
QLabel#label_tooltip {{
background-color: {dark_one};
color: {foreground};
padding-left: 10px;
padding-right: 10px;
border-radius: 17px;
border: 0px solid transparent;
border-left: 3px solid {context_color};
font: 800 9pt "Segoe UI";
}}
QLabel#title_label {{
font: 10pt "Segoe UI";
color: {foreground};
}}
QLabel#about_toolbox {{
font: 9pt "Segoe UI";
color: {foreground};
}}
QMessageBox{{
background-color: {dark_two};
border-left: 3px solid {context_color};
font: 9pt "Segoe UI";
color: {foreground};
}}
QPushButton{{
background-color: {bg_elements};
color: {active};
padding-left: 10px;
padding-right: 10px;
border-radius: 17px;
border: 10px solid transparent;
font: 600 9pt "Segoe UI";
}}
QPushButton:hover {{
background-color: {context_color};
}}
QPushButton:pressed {{
background-color: {context_pressed};
}}
QPushButton:checked {{
background-color: {context_pressed};
}}
QPushButton#info_widget_btn{{
background-color: {dark_one};
color: {foreground};
padding-left: 10px;
padding-right: 10px;
border-radius: 17px;
border: 10px solid transparent;
border-left: 3px solid {context_color};
font: 800 9pt "Segoe UI";
}}
QPushButton[cssClass="red"] {{ background-color: {red}; color: black;}}
QPushButton[cssClass="red"]:hover {{ background-color: {light_red};
color: black;}}
QPushButton[cssClass="red"]:pressed {{ background-color: {dark_red};
color: black;}}
QPushButton[cssClass="green"] {{ background-color: {green};
color: black;}}
QPushButton[cssClass="green"]:hover {{ background-color: {light_green};
color: black;}}
QPushButton[cssClass="green"]:pressed {{ background-color: {dark_green};
color: black;}}
QPushButton[cssClass="yellow"] {{ background-color: {yellow};
color: black;}}
QPushButton[cssClass="yellow"]:hover {{ background-color: {light_yellow};
color: black;}}
QPushButton[cssClass="yellow"]:pressed {{ background-color: {dark_yellow};
color: black;}}
QPushButton#info_widget_btn:hover {{
background-color: {dark_two};
}}
QPushButton#info_widget_btn:pressed {{
background-color: {dark_one};
}}
QPushButton#info_widget_btn:disabled{{
color: {fg_disabled};
background-color: {bg_disabled};
}}
"""
|
import add_module
print(add_module.add(100, 200))
# 300
|
# coding: utf-8
# flake8: noqa
"""
ARC contrib functionality.
"""
__all__ = ["ARCJobManager", "ARCJobFileFactory", "ARCWorkflow"]
# provisioning imports
from law.contrib.arc.job import ARCJobManager, ARCJobFileFactory
from law.contrib.arc.workflow import ARCWorkflow
|
from unittest.mock import patch
from django.test import TestCase
from accounts.models import School
from accounts.update_schools import update_all_schools
@patch("accounts.update_schools.requests.get")
class UpdateMajorsTestCase(TestCase):
def testTotalSchoolCount(self, mock_source_file):
with open(r"./tests/accounts/PennCoursePrograms.html", "r") as f:
mock_source_file.return_value.text = f.read()
update_all_schools()
self.assertEquals(School.objects.all().count(), 12)
|
import urllib.parse
from datetime import date
from datetime import datetime
import numpy as np
import ocflib.printing.quota as quota
from django.http import HttpResponse
from django.shortcuts import redirect
from django.urls import reverse
from matplotlib.figure import Figure
from matplotlib.ticker import MaxNLocator
from ocfweb.caching import periodic
from ocfweb.component.graph import plot_to_image_bytes
def pyday_to_sqlday(pyday):
"""Converting weekday index from python to mysql."""
return (pyday + 1) % 7 + 1
@periodic(1800)
def _jobs_graph_image(day=None):
if not day:
day = date.today()
return HttpResponse(
plot_to_image_bytes(get_jobs_plot(day), format='svg'),
content_type='image/svg+xml',
)
def daily_jobs_image(request):
try:
day = datetime.strptime(request.GET.get('date', ''), '%Y-%m-%d').date()
except ValueError:
day = date.today()
# redirect to canonical url
if request.GET.get('date') != day.isoformat():
return redirect(
'{}?{}'.format(
reverse('daily_job_image'),
urllib.parse.urlencode({'date': day.isoformat()}),
),
)
if day == date.today():
return _jobs_graph_image()
else:
return _jobs_graph_image(day=day)
def get_jobs_plot(day):
"""Return matplotlib plot showing the number i-page-job to the day."""
day_of_week = pyday_to_sqlday(day.weekday())
day_quota = quota.daily_quota(datetime.combine(day, datetime.min.time()))
sql_today_freq = '''
SELECT `pages`, SUM(`count`) AS `count`
FROM `public_jobs`
WHERE
(`pages` <= %s) AND
(DAYOFWEEK(`day`) = %s) AND
(`day` = %s )
GROUP BY `pages`
ORDER BY `pages` ASC
'''
# executing the sql query to get the data
with quota.get_connection() as cursor:
cursor.execute(sql_today_freq, (day_quota, day_of_week, day))
today_freq_data = cursor.fetchall()
# converting the data into a list
today_jobs_dict = {row['pages']: row['count'] for row in today_freq_data}
today_jobs_count = [today_jobs_dict.get(i, 0) for i in range(1, day_quota + 1)]
# Generating the plot
fig = Figure(figsize=(10, 4))
ax = fig.add_subplot(1, 1, 1)
tickLocations = np.arange(1, day_quota + 1)
width = 0.8
ax.bar(tickLocations, today_jobs_count, width)
ax.set_xticks(ticks=tickLocations)
ax.yaxis.set_major_locator(MaxNLocator(integer=True))
ax.yaxis.grid(True)
ax.set_ylim(bottom=0)
ax.set_ylabel('Number of Jobs Printed')
ax.set_title(f'Print Job Distribution {day:%a %b %d}')
return fig
|
import pytest
from pathlib import Path
examples_path = Path(__file__).parent / 'examples'
@pytest.fixture
def example():
return lambda p: open(examples_path / p).read() |
# Generated by Django 2.0.4 on 2018-04-12 09:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workouts', '0005_session_session_slug'),
]
operations = [
migrations.AlterField(
model_name='session',
name='created',
field=models.DateField(auto_now_add=True),
),
]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 23 07:58:27 2021
@author: daniel
"""
import numpy as np
import fmatrices as fm
n = int(input("¿tamaño de la matriz? "))
cuadro = fm.cuadromag1(n)
fm.escribirmat(cuadro)
print (type(cuadro))
try:
np.savetxt('cuadro.txt',cuadro)
except:
print('No puedo guardar el archivo')
|
import urllib
import urllib.request
print(type(urllib))
print(type(urllib.request))
|
"""
copyright 2020 kartik sharma, saurabh saxena
licensed under the apache license, version 2.0 (the "license");
you may not use this file except in compliance with the license.
you may obtain a copy of the license at
http://www.apache.org/licenses/license-2.0
unless required by applicable law or agreed to in writing, software
distributed under the license is distributed on an "as is" basis,
without warranties or conditions of any kind, either express or implied.
see the license for the specific language governing permissions and
limitations under the license.
"""
"""simple unit tests."""
import pytest
import logging
from bunch import Bunch
from core.fetch import Fetch
from core.similarity import Processor
config = {
"max_result": 100,
"topic_score": 10.0,
"threshold": 1.0,
"base_url": "http://export.arxiv.org/api/query?",
}
config = Bunch(config)
fetch = Fetch(config)
p = Processor()
def test_sanity_fetching():
papers = fetch._get_parsed_data()
assert len(papers) > 1
@pytest.mark.parametrize(
["text", "num_tokens"],
[
pytest.param("computer vision is great", 3),
pytest.param("object detection", 2),
],
)
def test_sanity_check_processor(text, num_tokens):
tokens = p.tokenize(text)
assert len(tokens) == num_tokens
@pytest.mark.parametrize(
["text"],
[
pytest.param(["walk", "best", "better"]),
],
)
def test_sanity_check_lemmatize(text):
tokens = p.lemmatize(text)
assert len(tokens) == len(text)
if __name__ == "__main__":
logging.set_verbosity(logging.WARNING)
|
import os
import json
import pickle
import numpy as np
import scipy as sp
import scipy.stats as stats
import matplotlib.pyplot as plt
import seaborn as sns
from eval.eval_bm25_coliee2021 import read_label_file #ranking_eval
from retrieval.bm25_aggregate_paragraphs import sort_write_trec_output
from preprocessing.coliee21_task2_bm25 import ranking_eval
from analysis.ttest import measure_per_query
def plot_ci_manual(t, s_err, n, x, x2, y2, ax=None):
"""Return an axes of confidence bands using a simple approach.
Notes
-----
.. math:: \left| \: \hat{\mu}_{y|x0} - \mu_{y|x0} \: \right| \; \leq \; T_{n-2}^{.975} \; \hat{\sigma} \; \sqrt{\frac{1}{n}+\frac{(x_0-\bar{x})^2}{\sum_{i=1}^n{(x_i-\bar{x})^2}}}
.. math:: \hat{\sigma} = \sqrt{\sum_{i=1}^n{\frac{(y_i-\hat{y})^2}{n-2}}}
References
----------
.. [1] M. Duarte. "Curve fitting," Jupyter Notebook.
http://nbviewer.ipython.org/github/demotu/BMC/blob/master/notebooks/CurveFitting.ipynb
"""
if ax is None:
ax = plt.gca()
ci = t * s_err * np.sqrt(1 / n + (x2 - np.mean(x)) ** 2 / np.sum((x - np.mean(x)) ** 2))
ax.fill_between(x2, y2 + ci, y2 - ci, color="#b9cfe7", edgecolor="")
return ax
def plot_ci_bootstrap(xs, ys, resid, nboot=500, ax=None):
"""Return an axes of confidence bands using a bootstrap approach.
Notes
-----
The bootstrap approach iteratively resampling residuals.
It plots `nboot` number of straight lines and outlines the shape of a band.
The density of overlapping lines indicates improved confidence.
Returns
-------
ax : axes
- Cluster of lines
- Upper and Lower bounds (high and low) (optional) Note: sensitive to outliers
References
----------
.. [1] J. Stults. "Visualizing Confidence Intervals", Various Consequences.
http://www.variousconsequences.com/2010/02/visualizing-confidence-intervals.html
"""
if ax is None:
ax = plt.gca()
bootindex = sp.random.randint
for _ in range(nboot):
resamp_resid = resid[bootindex(0, len(resid) - 1, len(resid))]
# Make coeffs of for polys
pc = sp.polyfit(xs, ys + resamp_resid, 1)
# Plot bootstrap cluster
ax.plot(xs, sp.polyval(pc, xs), "b-", linewidth=2, alpha=3.0 / float(nboot))
return ax
def plot_measures(measures, eval_dir, plot_file):
plt.figure(figsize=(10, 8))
plt.xlabel('recall', fontsize=15)
plt.ylabel('precision', fontsize=15)
for name, measure in measures.items():
xs, ys = zip(*measure.values())
labels = measure.keys()
# display
plt.scatter(xs, ys, marker='o')
plt.plot(xs, ys, label=name)
for label, x, y in zip(labels, xs, ys):
plt.annotate(label, xy=(x, y))
plt.legend(loc="upper right")
plt.savefig(os.path.join(eval_dir, plot_file))
def calculcate_f1_score(plotting_data, output_dir, output_file):
with open(os.path.join(output_dir, output_file), 'w') as f:
for name, measure in plotting_data.items():
for key, value in measure.items():
f1_score = 2*value[0]*value[1]/(value[0]+value[1])
f.writelines(' '.join([name, key, str(f1_score)]) + '\n')
def plot_f1_score(plotting_data, eval_dir, plot_file):
f1_dict = {}
for name, measure in plotting_data.items():
f1_dict.update({name: {}})
for key, value in measure.items():
f1_score = 2 * value[0] * value[1] / (value[0] + value[1])
f1_dict.get(name).update({key: [key, f1_score]})
plt.figure(figsize=(10, 8))
plt.xlabel('cut-off value', fontsize=15)
plt.ylabel('f1-score', fontsize=15)
for name, measure in f1_dict.items():
xs, ys = zip(*measure.values())
labels = measure.keys()
plt.scatter(xs, ys, marker='o')
plt.plot(xs, ys, label=name)
for label, x, y in zip(labels, xs, ys):
plt.annotate(label, xy=(x, y))
plt.legend(loc="upper right")
plt.savefig(os.path.join(eval_dir, plot_file))
def create_plot_data(measures):
plotting_data = {}
for key, value in measures.items():
if not plotting_data.get(key.split('_')[1]):
plotting_data.update({key.split('_')[1]: [0, 0]})
if 'P' in key:
plotting_data.get(key.split('_')[1])[1] = value
if 'recall' in key:
plotting_data.get(key.split('_')[1])[0] = value
# order them:
desired_order_list = [int(key) for key, value in plotting_data.items()]
desired_order_list.sort()
desired_order_list = [str(x) for x in desired_order_list]
plotting_data_sorted = {k: plotting_data[k] for k in desired_order_list}
return plotting_data_sorted
def create_plot_data_recall(measures):
plotting_data = {}
for key, value in measures.items():
if 'recall' in key:
plotting_data.update({key: [key.split('_')[1],value]})
# order them:
desired_order_list = ['recall_100','recall_150','recall_200','recall_250','recall_300','recall_350','recall_400','recall_450',
'recall_500','recall_550','recall_600','recall_650','recall_700','recall_750','recall_800','recall_850','recall_900','recall_950','recall_1000']
plotting_data_sorted = {k: plotting_data[k] for k in desired_order_list}
return plotting_data_sorted
def plot_recall(measures, eval_dir, plot_file):
plt.figure(figsize=(10, 8))
plt.xlabel('cut-off', fontsize=15)
plt.ylabel('recall', fontsize=15)
for name, measure in measures.items():
xs, ys = zip(*measure.values())
labels = measure.keys()
# display
plt.scatter(xs, ys, marker='o')
plt.plot(xs, ys, label=name)
#for label, x, y in zip(labels, xs, ys):
# plt.annotate(label, xy=(x, y))
plt.legend(loc="lower right")
plt.savefig(os.path.join(eval_dir, plot_file))
def plot_recall_nice(plotting_data, output_dir, plot_file):
fig = plt.figure(figsize=(10, 8))
ax = fig.add_subplot(111)
plt.xlabel('cut-off', fontsize=15)
plt.grid(True, linewidth=0.5, color='lightgrey', linestyle='-')
plt.ylabel('recall', fontsize=15)
for name, measure in plotting_data.items():
xs, ys = zip(*measure.values())
labels = measure.keys()
# display
plt.scatter(xs, ys, marker='o')
plt.plot(xs, ys, label=name)
# for label, x, y in zip(labels, xs, ys):
# plt.annotate(label, xy=(x, y))
plt.legend(loc="lower right")
plt.xticks(range(1,21,2),range(100,1100,100))
ax.patch.set_facecolor('white')
plt.savefig(os.path.join(output_dir, plot_file), bbox_inches = 'tight')
def read_in_aggregated_scores(input_file: str, top_n=1000):
with open(input_file) as fp:
lines = fp.readlines()
file_dict = {}
for line in lines:
line_list = line.strip().split(' ')
assert len(line_list) == 6
if int(line_list[3]) <= top_n:
if file_dict.get(line_list[0]):
file_dict.get(line_list[0]).update({line_list[2]: [int(line_list[3]), float(line_list[4])]})
else:
file_dict.update({line_list[0]: {}})
file_dict.get(line_list[0]).update({line_list[2]: [int(line_list[3]), float(line_list[4])]})
return file_dict
def return_rel_docs_for_dict(labels: dict, dpr_dict: dict):
# filter the dictionary for only the relevant ones
label_keys = list(labels.keys())
dpr_keys = list(dpr_dict.keys())
assert label_keys.sort() == dpr_keys.sort()
filtered_dict = {}
for query_id in labels.keys():
filtered_dict.update({query_id: {}})
rel_docs = labels.get(query_id)
for doc in rel_docs:
if dpr_dict.get(query_id).get(doc):
filtered_dict.get(query_id).update({doc: dpr_dict.get(query_id).get(doc)})
return filtered_dict
def compare_overlap(dpr_dict_rel, bm25_dict_rel):
intersection_bm25 = []
intersection_dpr = []
for query_id in dpr_dict_rel.keys():
dpr_rel_doc = set(dpr_dict_rel.get(query_id).keys())
bm25_rel_doc = set(bm25_dict_rel.get(query_id).keys())
if bm25_rel_doc:
intersection_bm25.append(len(dpr_rel_doc.intersection(bm25_rel_doc)) / len(bm25_rel_doc))
# else:
# intersection_bm25.append(0)
if dpr_rel_doc:
intersection_dpr.append(len(dpr_rel_doc.intersection(bm25_rel_doc)) / len(dpr_rel_doc))
# else:
# intersection_dpr.append(0)
print('average percentual intersection of bm25 results which are also found in dpr {}'.format(
np.mean(intersection_bm25)))
print('average percentual intersection of dpr results which are also found in bm25 {}'.format(
np.mean(intersection_dpr)))
def analyze_score_distribution(dpr_dict, name, output_dir):
scores = []
for query in dpr_dict.keys():
for doc in dpr_dict.get(query).keys():
scores.append(dpr_dict.get(query).get(doc)[1])
print('maximum score is {}'.format(max(scores)))
print('minimum score is {}'.format(min(scores)))
# plot histogram
#sns.distplot(scores, kde=False, color='red')
plt.xlim(0, 200)
plt.hist(scores, bins=1000)
plt.xlabel('scores', fontsize=16)
plt.savefig(os.path.join(output_dir, 'scores_{}.svg'.format(name)))
plt.clf()
# boxplot
sns.boxplot(y=scores)
plt.xlabel('scores', fontsize=16)
plt.savefig(os.path.join(output_dir, 'scores_{}2.svg'.format(name)))
plt.clf()
def evaluate_weighting(dpr_dict, bm25_dict, qrels, output_dir, output_file, weight_dpr, weight_bm25, measurements):
# aggregate scores with certain weight
run = {}
for query_id in dpr_dict.keys():
run.update({query_id: {}})
for doc in dpr_dict.get(query_id).keys():
run.get(query_id).update({doc: weight_dpr * dpr_dict.get(query_id).get(doc)[1]})
for doc in bm25_dict.get(query_id).keys():
if run.get(query_id).get(doc):
run.get(query_id).update({doc: run.get(query_id).get(doc) + weight_bm25 * bm25_dict.get(query_id).get(doc)[1]})
else:
run.get(query_id).update({doc: weight_bm25 * bm25_dict.get(query_id).get(doc)[1]})
# evaluate aggregated list!
measures = ranking_eval(qrels, run, output_dir, output_file, measurements)
return run, measures
def compare_overlap_rel(dpr_dict, bm25_dict, qrels):
# filter the dictionary for only the relevant ones
dpr_dict_rel = return_rel_docs_for_dict(qrels, dpr_dict)
bm25_dict_rel = return_rel_docs_for_dict(qrels, bm25_dict)
# now compare overlap of relevant docs
compare_overlap(dpr_dict_rel, bm25_dict_rel)
def evaluate_weight(dpr_dict, bm25_dict, qrels, mode, weight_dpr, weight_bm25, measurements=
{'recall_1', 'recall_2', 'recall_3', 'recall_4', 'recall_5', 'recall_6', 'recall_7', 'recall_8', 'recall_9', 'recall_10',
'P_1', 'P_2', 'P_3', 'P_4', 'P_5', 'P_6', 'P_7', 'P_8', 'P_9', 'P_10'}):
output_dir2 = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/bm25_dpr_legalbert/eval/{}'.format(mode[0])
output_file = 'eval22_score_{}_{}_weight_dpr_{}_weight_bm25_{}.txt'.format(mode[0], mode[2], weight_dpr, weight_bm25)
run, measures = evaluate_weighting(dpr_dict, bm25_dict, qrels, output_dir2, output_file, weight_dpr, weight_bm25, measurements)
return run, measures
def remove_query_from_ranked_list(dpr_dict):
# now remove the document itself from the list of candidates
dpr_dict2 = {}
for key, value in dpr_dict.items():
dpr_dict2.update({key: {}})
for key2, value2 in value.items():
if key != key2:
dpr_dict2.get(key).update({key2: value2})
return dpr_dict2
def eval_prec_rec(measurements, weights, mode, dpr_dict, bm25_dict, qrels, output_dir):
plotting_data = {}
for weight in weights:
mode = [mode[0], 'separate_para_weight_dpr_{}_bm25_{}'.format(weight[0], weight[1]), mode[2], mode[3]]
run, measures = evaluate_weight(dpr_dict, bm25_dict, qrels, mode, weight[0], weight[1], measurements)
plotting_data.update({'BM25:{} DPR:{}'.format(weight[1], weight[0]): create_plot_data(measures)})
output_dir2 = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/bm25_dpr_legalbert/aggregate/{}/'.format(mode[0])
sort_write_trec_output(run, output_dir2, mode)
plot_measures(plotting_data, output_dir, 'dpr_bm25_different_weights_test2.svg')
calculcate_f1_score(plotting_data, output_dir, 'dpr_bm25_different_weights_f15_test.txt')
plot_f1_score(plotting_data, output_dir, 'dpr_bm25_different_weights_f15_test.svg')
return plotting_data
def read_in_run_from_pickle(bm25_file):
with open(bm25_file, 'rb') as f:
bm25_dict = pickle.load(f)
bm25_dict_new = {}
for key, value in bm25_dict.items():
bm25_dict_new.update({key:{}})
i = 1
for key2, value2 in value.items():
bm25_dict_new.get(key).update({key2: [i, float(value2)]})
i += 1
return bm25_dict_new
def compute_ci(run1, qrels, measurement, conf_int=0.95):
# standard deviation
run1_list = measure_per_query(run1, qrels, measurement)
mean = np.mean(run1_list)
std_dev = np.std(run1_list)
#print(stats.norm.interval(conf_int, loc=mean, scale=std_dev))
ci = stats.norm.interval(conf_int, loc=mean, scale=std_dev)
return ci
def create_ci_data(run, qrels, measurements):
cis = {}
for measure in measurements:
cis.update({measure: compute_ci(run, qrels, measure)})
desired_order_list = ['recall_100', 'recall_150', 'recall_200', 'recall_250', 'recall_300', 'recall_350',
'recall_400', 'recall_450',
'recall_500', 'recall_550', 'recall_600', 'recall_650', 'recall_700', 'recall_750',
'recall_800', 'recall_850', 'recall_900', 'recall_950', 'recall_1000']
plotting_data_sorted = {k: cis[k] for k in desired_order_list}
return plotting_data_sorted
if __name__ == "__main__":
mode = ['test', 'separate_para', 'rrf', 'legal_task2']
dpr_file = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/dpr/{}/legalbert/aggregate/{}/run_aggregated_{}_{}.pickle'.format(
mode[3], mode[0], mode[0], mode[2])
# #'/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/dpr/{}/legalbert/aggregate/{}/search_{}_{}_aggregation_{}.txt'.format(
# # mode[3], mode[0], mode[0], mode[1], mode[2])
# bm25_file = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/bm25/aggregate/{}/separately_para_w_summ_intro/run_aggregated_{}_{}.pickle'.format(
# mode[0], mode[0], mode[2])
# bm25_file2= '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/bm25/aggregate/{}/separately_para_w_summ_intro/search_{}_separately_para_w_summ_intro_aggregation_{}.txt'.format(
# mode[0], mode[0], mode[2])
# output_dir = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/bm25_dpr_legalbert/plot/{}'.format(mode[0])
#
# dpr_dict = read_in_run_from_pickle(dpr_file)
# dpr_dict = remove_query_from_ranked_list(dpr_dict)
#
# bm25_dict = read_in_run_from_pickle(bm25_file)
# bm25_dict = remove_query_from_ranked_list(bm25_dict)
#
# # read in the label files
# label_file = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/train/train_wo_val_labels.json'
# label_file_val = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/val/val_labels.json'
# label_file_test = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/test/task1_test_labels_2021.json'
#
# if mode[0] == 'train':
# qrels = read_label_file(label_file)
# elif mode[0] == 'val':
# qrels = read_label_file(label_file_val)
# elif mode[0] == 'test':
# qrels = read_label_file(label_file_test)
# #with open(label_file_test, 'rb') as f:
# # qrels = json.load(f)
# # qrels = [x.split('.txt')[0] for x in qrels]
#
# compare_overlap_rel(dpr_dict, bm25_dict, qrels)
# analyze_score_distribution(dpr_dict, 'dpr', output_dir)
# analyze_score_distribution(bm25_dict, 'bm25', output_dir)
#
# #measurements = {'recall_1', 'recall_2', 'recall_3', 'recall_4', 'recall_5', 'recall_6', 'recall_7', 'recall_8',
# # 'recall_9', 'recall_10','recall_11', 'recall_12', 'recall_13', 'recall_14', 'recall_15', 'recall_16', 'recall_17', 'recall_18',
# # 'recall_19', 'recall_20','P_1', 'P_2', 'P_3', 'P_4', 'P_5', 'P_6', 'P_7', 'P_8', 'P_9', 'P_10',
# # 'P_11', 'P_12', 'P_13', 'P_14', 'P_15', 'P_16', 'P_17', 'P_18', 'P_19', 'P_20'}
#
# measurements = {'recall_50','recall_100','recall_150','recall_200','recall_250','recall_300','recall_350','recall_400','recall_450',
# 'recall_500','recall_550','recall_600','recall_650','recall_700','recall_750','recall_800','recall_850','recall_900','recall_950','recall_1000'}
# weights = [[1,0], [0,1], [1, 1]] #[[2, 1], [3, 1], [4, 1], [1,0], [0,1], [1, 1], [1, 2], [1, 3], [1, 4]]
#
# #weights = [[2, 1], [3, 1], [4, 1], [1,0], [0,1], [1, 1], [1, 2], [1, 3], [1, 4]]
#
# plotting_data = {}
# plotting_data2 = {}
# for weight in weights:
# mode = ['test', 'separate_para_weight_dpr_{}_bm25_{}'.format(weight[0], weight[1]), 'overlap_ranks',
# 'legal_task2']
# run, measures = evaluate_weight(dpr_dict, bm25_dict, qrels, mode, weight[0], weight[1], measurements)
# if 'recall_100' in measurements:
# plotting_data.update({'BM25:{} DPR:{}'.format(weight[1], weight[0]): create_plot_data_recall(measures)})
# else:
# plotting_data.update({'BM25:{} DPR:{}'.format(weight[1], weight[0]): create_plot_data(measures)})
# plotting_data2.update({'BM25:{} DPR:{}'.format(weight[1], weight[0]): measures})
#
# plotting_data.update({'BM25+DPR': plotting_data.get('BM25:1 DPR:1')})
# plotting_data.update({'DPR': plotting_data.get('BM25:0 DPR:1')})
# plotting_data.update({'BM25': plotting_data.get('BM25:1 DPR:0')})
#
# plotting_data.pop('BM25:1 DPR:0')
# plotting_data.pop('BM25:0 DPR:1')
# plotting_data.pop('BM25:1 DPR:1')
#
# fig = plt.figure(figsize=(10, 8))
# ax = fig.add_subplot(111)
# plt.xlabel('Cut-off', fontsize=15)
# plt.grid(True, linewidth=0.5, color='lightgrey', linestyle='-')
# plt.ylabel('Recall', fontsize=15)
# i = 0
# colours = ['#d88144','#7fa955', '#5670bc']
# for name, measure in plotting_data.items():
# xs, ys = zip(*measure.values())
# labels = measure.keys()
# # display
# plt.scatter(xs, ys, c= colours[i], marker='o', edgecolors=colours[i])
# plt.plot(xs, ys, colours[i], label=name)
# # for label, x, y in zip(labels, xs, ys):
# # plt.annotate(label, xy=(x, y))
# i +=1
# plt.legend(loc="lower right")
# plt.xticks(range(1, 21, 2), range(100, 1100, 100))
# ax.patch.set_facecolor('white')
# plt.savefig(os.path.join(output_dir, 'dpr_bm25_different_weights_{}_morerec8_dpr.svg'.format(mode[0])), bbox_inches='tight')
#
# plot_recall(plotting_data, output_dir, 'dpr_bm25_different_weights_{}_morerec2_dpr.svg'.format(mode[0]))
# if 'recall_1' in measurements:
# plot_f1_score(plotting_data, output_dir, 'dpr_bm25_different_weights_f1_score_{}_top20.svg'.format(mode[0]))
# calculcate_f1_score(plotting_data, output_dir, 'f1-scores_dpr_bm25_{}_top20.txt'.format(mode[0]))
#
# # here i should write the plotting data
# print(plotting_data2)
#
# with open(os.path.join(output_dir, 'plotting_data_weights_recall_precision_top_20.txt'), 'w') as f:
# for key, value in plotting_data2.items():
# for key2, value2 in value.items():
# f.writelines('{}\t{}\t{}\n'.format(key, key2, value2[1]))
mode = ['test', 'separate_para', 'rrf', 'legal_task2']
dpr_file_doc = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/dpr/{}/legalbert/aggregate/{}/run_dpr_aggregated_{}_whole_doc_overlap_docs.pickle'.format(
mode[3], mode[0], mode[0])
#dpr_file_parm = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/dpr/{}/legalbert/aggregate/{}/run_aggregated_test_rrf_overlap_ranks.pickle'.format(
# mode[3], mode[0])
dpr_file_parm = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/dpr/legal_task1/legalbert/eval/test/run_aggregated_test_vrrf_legalbert_doc.pickle'
#dpr_file_parm = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/dpr/{}/legalbert/aggregate/{}/run_dpr_aggregated_{}_parm_overlap_ranks.pickle'.format(
# mode[3], mode[0], mode[0])
# '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/dpr/{}/legalbert/aggregate/{}/search_{}_{}_aggregation_{}.txt'.format(
# mode[3], mode[0], mode[0], mode[1], mode[2])
bm25_file_doc = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/bm25/aggregate/{}/separately_para_w_summ_intro/run_bm25_aggregated_{}_whole_doc_overlap_docs.pickle'.format(
mode[0], mode[0])
#bm25_file_parm = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/bm25/aggregate/{}/separately_para_w_summ_intro/run_bm25_aggregated_{}_parm_{}.pickle'.format(
# mode[0], mode[0], mode[2])
bm25_file_parm = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/bm25/aggregate/test/separately_para_w_summ_intro/run_aggregated_test_rrf_overlap_ranks.pickle'
#run_aggregated_{}_{}.pickle'.format(
# mode[0], mode[0], mode[2])
output_dir = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/bm25_dpr_legalbert/plot/{}'.format(mode[0])
dpr_dict_doc = read_in_run_from_pickle(dpr_file_doc)
dpr_dict_doc = remove_query_from_ranked_list(dpr_dict_doc)
bm25_dict_doc = read_in_run_from_pickle(bm25_file_doc)
bm25_dict_doc = remove_query_from_ranked_list(bm25_dict_doc)
# read in the label files
label_file = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/train/train_wo_val_labels.json'
label_file_val = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/val/val_labels.json'
label_file_test = '/mnt/c/Users/salthamm/Documents/phd/data/coliee2021/task1/test/task1_test_labels_2021.json'
if mode[0] == 'train':
qrels = read_label_file(label_file)
elif mode[0] == 'val':
qrels = read_label_file(label_file_val)
elif mode[0] == 'test':
qrels = read_label_file(label_file_test)
# compare_overlap_rel(dpr_dict_doc, bm25_dict_doc, qrels)
#
measurements = {'recall_100', 'recall_150', 'recall_200', 'recall_250', 'recall_300', 'recall_350',
'recall_400', 'recall_450',
'recall_500', 'recall_550', 'recall_600', 'recall_650', 'recall_700', 'recall_750', 'recall_800',
'recall_850', 'recall_900', 'recall_950', 'recall_1000'}
weights = [[1,0], [0,1], [1, 1]]
plotting_data = {}
plotting_data2 = {}
confidence_intervals = {}
for weight in weights:
mode = ['test', 'separate_para_weight_dpr_{}_bm25_{}'.format(weight[0], weight[1]), 'overlap_ranks',
'legal_task2']
run, measures = evaluate_weight(dpr_dict_doc, bm25_dict_doc, qrels, mode, weight[0], weight[1], measurements)
if 'recall_100' in measurements:
plotting_data.update({'BM25:{} DPR:{}'.format(weight[1], weight[0]): create_plot_data_recall(measures)})
else:
plotting_data.update({'BM25:{} DPR:{}'.format(weight[1], weight[0]): create_plot_data(measures)})
confidence_intervals.update({'BM25:{} DPR:{}'.format(weight[1], weight[0]): create_ci_data(run, qrels, measurements)})
plotting_data2.update({'BM25:{} DPR:{}'.format(weight[1], weight[0]): measures})
#print(plotting_data2)
print(plotting_data)
#plotting_data.update({'BM25+DPR': plotting_data.get('BM25:1 DPR:1')})
plotting_data.update({'Doc FirstP (DPR)': plotting_data.get('BM25:0 DPR:1')})
plotting_data.update({'Doc (BM25)': plotting_data.get('BM25:1 DPR:0')})
plotting_data.pop('BM25:1 DPR:0')
plotting_data.pop('BM25:0 DPR:1')
plotting_data.pop('BM25:1 DPR:1')
confidence_intervals.update({'Doc FirstP (DPR)': confidence_intervals.get('BM25:0 DPR:1')})
confidence_intervals.update({'Doc (BM25)': confidence_intervals.get('BM25:1 DPR:0')})
confidence_intervals.pop('BM25:1 DPR:0')
confidence_intervals.pop('BM25:0 DPR:1')
confidence_intervals.pop('BM25:1 DPR:1')
dpr_dict_parm = read_in_run_from_pickle(dpr_file_parm)
dpr_dict_parm = remove_query_from_ranked_list(dpr_dict_parm)
bm25_dict_parm = read_in_run_from_pickle(bm25_file_parm)
bm25_dict_parm = remove_query_from_ranked_list(bm25_dict_parm)
#compare_overlap_rel(dpr_dict_parm, bm25_dict_parm, qrels)
#analyze_score_distribution(dpr_dict_parm, 'dpr', output_dir)
#analyze_score_distribution(bm25_dict_parm, 'bm25', output_dir)
weights = [[1,0], [0,1], [1, 1]]
plotting_data2 = {}
for weight in weights:
mode = ['test', 'separate_para_weight_dpr_{}_bm25_{}'.format(weight[0], weight[1]), 'rrf',
'legal_task2']
run, measures = evaluate_weight(dpr_dict_parm, bm25_dict_parm, qrels, mode, weight[0], weight[1], measurements)
if 'recall_100' in measurements:
plotting_data.update({'BM25:{} DPR:{}'.format(weight[1], weight[0]): create_plot_data_recall(measures)})
else:
plotting_data.update({'BM25:{} DPR:{}'.format(weight[1], weight[0]): create_plot_data(measures)})
confidence_intervals.update(
{'BM25:{} DPR:{}'.format(weight[1], weight[0]): create_ci_data(run, qrels, measurements)})
plotting_data2.update({'BM25:{} DPR:{}'.format(weight[1], weight[0]): measures})
print(plotting_data)
print(plotting_data2)
#plotting_data.update({'PARM (DPR)+PARM (BM25)': plotting_data.get('BM25:1 DPR:1')})
plotting_data.update({'PARM-VRRF (DPR)': plotting_data.get('BM25:0 DPR:1')})
plotting_data.update({'PARM-RRF (BM25)': plotting_data.get('BM25:1 DPR:0')})
plotting_data.pop('BM25:1 DPR:0')
plotting_data.pop('BM25:0 DPR:1')
plotting_data.pop('BM25:1 DPR:1')
confidence_intervals.update({'PARM-VRRF (DPR)': confidence_intervals.get('BM25:0 DPR:1')})
confidence_intervals.update({'PARM-RRF (BM25)': confidence_intervals.get('BM25:1 DPR:0')})
confidence_intervals.pop('BM25:1 DPR:0')
confidence_intervals.pop('BM25:0 DPR:1')
confidence_intervals.pop('BM25:1 DPR:1')
#desired_order_list = ['PARM-VRRF (DPR)','PARM-RRF (BM25)', 'Doc (BM25)','Doc (DPR)']
desired_order_list = ['Doc FirstP (DPR)', 'Doc (BM25)','PARM-RRF (BM25)','PARM-VRRF (DPR)']
plotting_data = {k: plotting_data[k] for k in desired_order_list}
confidence_intervals = {k: confidence_intervals[k] for k in desired_order_list}
fig = plt.figure(figsize=(10, 8))
ax = fig.add_subplot(111)
plt.xlabel('Cut-off', fontsize=15)
plt.grid(True, linewidth=0.5, color='lightgrey', linestyle='-')
plt.ylabel('Recall', fontsize=15)
i = 0
colours = ['#D69915','#D60038','#00D6C4','#004BD6']#['#004BD6','#00D6C4','#D60038','#D69915'] #['#0BD626','#00D6C4','#004BD6','#D60038','#D69915']
for name, measure in plotting_data.items():
xs, ys = zip(*measure.values())
print(xs)
print(ys)
ci_min, ci_max = zip(*confidence_intervals.get(name).values())
print(ci_min)
print(ci_max)
labels = measure.keys()
# display
plt.scatter(xs, ys, c=colours[i], marker='o', edgecolors=colours[i])
plt.plot(xs, ys, colours[i], label=name)
#sns.lineplot(xs, ys, ci=80, color=colours[i], label=name)
print((list(ys) + list(ci_min)))
ax.fill_between(xs, (ci_min), (ci_max), color=colours[i], alpha=.1)
#sns.regplot(xs, ys, ci=80)
# for label, x, y in zip(labels, xs, ys):
# plt.annotate(label, xy=(x, y))
i += 1
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles[::-1], labels[::-1], loc='lower right')
#desired_order_dict = {'PARM-RRF (BM25)':2, 'PARM-VRRF (DPR)':0, 'Doc (BM25)':3, 'Doc (DPR)':4}#{'PARM (DPR)+PARM (BM25)':0, 'PARM (BM25)':2, 'PARM (DPR)':1, 'Doc (BM25)':3, 'Doc (DPR)':4}
#by_label = dict(sorted(zip(labels, handles), key=lambda t: desired_order_dict.get(t[0])))
#print(by_label)
#print(by_label.keys())
#ax.legend(by_label.values(), by_label.keys())
##labels, handles = zip(*sorted(zip(labels, handles), key=lambda t: desired_order_dict.get(t[0])))
##ax.legend(handles, labels)
#plt.legend(loc="lower right")
plt.xticks(range(0, 20, 2), range(100, 1100, 100))
ax.patch.set_facecolor('white')
plt.savefig(os.path.join(output_dir, 'dpr_bm25_{}_dpr_parm_all17.svg'.format(mode[0])),bbox_inches='tight')
|
from django.shortcuts import render, redirect
from django.utils.timezone import now
from .models import (
GreensRolling,
TeeRolling,
FairwayRolling
)
from .forms import (
GreensRollingForm,
TeeRollingForm,
FairwayRollingForm
)
def curr_time():
return now()
def greensIndex(request):
rollings = GreensRolling.objects.all().order_by('-roll_date')[:20]
context = {
'curr_time': curr_time(),
'rollings': rollings,
}
return render(request, 'rolling/greens_index.html', context)
def greensNew(request):
form = GreensRollingForm()
context = {
'curr_time': curr_time(),
'form': form,
}
return render(request, 'rolling/greens_new.html', context)
def greensCreate(request):
if request.method == 'POST':
form = GreensRollingForm(data=request.POST)
if form.is_valid() and request.user.is_authenticated():
pending_form = form.save(commit=False)
pending_form.save()
form.save_m2m()
return redirect('roll:greens_detail', pk=pending_form.pk)
def greensDetail(request, pk):
rolling = GreensRolling.objects.get(pk=pk)
context = {
'curr_time': curr_time(),
'rolling': rolling,
}
return render(request, 'rolling/greens_detail.html', context)
def greensEdit(request, pk):
rolling = GreensRolling.objects.get(pk=pk)
form = GreensRollingForm(instance=rolling)
context = {
'curr_time': curr_time(),
'rolling': rolling,
'form': form,
}
return render(request, 'rolling/greens_edit.html', context)
def greensUpdate(request, pk):
rolling = GreensRolling.objects.get(pk=pk)
if request.method == 'POST':
form = GreensRollingForm(request.POST, instance=rolling)
if form.is_valid() and request.user.is_authenticated():
pending_form = form.save(commit=False)
pending_form.save()
form.save_m2m()
return redirect('roll:greens_detail', pk=rolling.pk)
def teesIndex(request):
rollings = TeeRolling.objects.all().order_by('-roll_date')[:20]
context = {
'curr_time': curr_time(),
'rollings': rollings,
}
return render(request, 'rolling/tees_index.html', context)
def teesNew(request):
form = TeeRollingForm()
context = {
'curr_time': curr_time(),
'form': form,
}
return render(request, 'rolling/tees_new.html', context)
def teesCreate(request):
if request.method == 'POST':
form = TeeRollingForm(data=request.POST)
if form.is_valid() and request.user.is_authenticated():
pending_form = form.save(commit=False)
pending_form.save()
form.save_m2m()
return redirect('roll:tees_detail', pk=pending_form.pk)
def teesDetail(request, pk):
rolling = TeeRolling.objects.get(pk=pk)
context = {
'curr_time': curr_time(),
'rolling': rolling,
}
return render(request, 'rolling/tees_detail.html', context)
def teesEdit(request, pk):
rolling = TeeRolling.objects.get(pk=pk)
form = TeeRollingForm(instance=rolling)
context = {
'curr_time': curr_time(),
'rolling': rolling,
'form': form,
}
return render(request, 'rolling/tees_edit.html', context)
def teesUpdate(request, pk):
rolling = TeeRolling.objects.get(pk=pk)
if request.method == 'POST':
form = TeeRollingForm(request.POST, instance=rolling)
if form.is_valid() and request.user.is_authenticated():
pending_form = form.save(commit=False)
pending_form.save()
form.save_m2m()
return redirect('roll:tees_detail', pk=rolling.pk)
|
class LED_64_4_D1:
keySize = 64
keySizeConst0 = (keySize >> 4)
keySizeConst1 = (0x01 ^ (keySize >> 4))
keySizeConst2 = (0x02 ^ (keySize & 0x0F))
keySizeConst3 = (0x03 ^ (keySize & 0x0F))
sBoxInv = [0x05, 0x0E, 0x0F, 0x08, 0x0C, 0x01, 0x02, 0x0D, 0x0B, 0x04, 0x06, 0x03, 0x00, 0x07, 0x09, 0x0A]
RC02 = [0x00, 0x00, 0x00, 0x01, 0x03, 0x07, 0x07, 0x07, 0x06, 0x05, 0x03, 0x07, 0x07, 0x06, 0x04, 0x01, 0x03, 0x07,
0x06, 0x05, 0x02, 0x05, 0x03, 0x06, 0x04, 0x00, 0x00, 0x01, 0x02, 0x05, 0x03, 0x07, 0x06, 0x04, 0x00, 0x01, 0x03,
0x06, 0x05, 0x03, 0x06, 0x05, 0x02, 0x04, 0x01, 0x02, 0x04, 0x00]
RC13 = [0x01, 0x03, 0x07, 0x07, 0x07, 0x06, 0x05, 0x03, 0x07, 0x07, 0x06, 0x04, 0x01, 0x03, 0x07, 0x06, 0x05, 0x02,
0x05, 0x03, 0x06, 0x04, 0x00, 0x00, 0x01, 0x02, 0x05, 0x03, 0x07, 0x06, 0x04, 0x00, 0x01, 0x03, 0x06, 0x05, 0x03,
0x06, 0x05, 0x02, 0x04, 0x01, 0x02, 0x04, 0x00, 0x01, 0x02, 0x04]
mul2Inv = [0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x03, 0x01, 0x07, 0x05, 0x0B, 0x09, 0x0F, 0x0D]
mul3Inv = [0x00, 0x03, 0x06, 0x05, 0x0C, 0x0F, 0x0A, 0x09, 0x0B, 0x08, 0x0D, 0x0E, 0x07, 0x04, 0x01, 0x02]
mul4Inv = [0x00, 0x04, 0x08, 0x0C, 0x03, 0x07, 0x0B, 0x0F, 0x06, 0x02, 0x0E, 0x0A, 0x05, 0x01, 0x0D, 0x09]
mul5Inv = [0x00, 0x05, 0x0A, 0x0F, 0x07, 0x02, 0x0D, 0x08, 0x0E, 0x0B, 0x04, 0x01, 0x09, 0x0C, 0x03, 0x06]
mul6Inv = [0x00, 0x06, 0x0C, 0x0A, 0x0B, 0x0D, 0x07, 0x01, 0x05, 0x03, 0x09, 0x0F, 0x0E, 0x08, 0x02, 0x04]
mul7Inv = [0x00, 0x07, 0x0E, 0x09, 0x0F, 0x08, 0x01, 0x06, 0x0D, 0x0A, 0x03, 0x04, 0x02, 0x05, 0x0C, 0x0B]
mul8Inv = [0x00, 0x08, 0x03, 0x0B, 0x06, 0x0E, 0x05, 0x0D, 0x0C, 0x04, 0x0F, 0x07, 0x0A, 0x02, 0x09, 0x01]
mul9Inv = [0x00, 0x09, 0x01, 0x08, 0x02, 0x0B, 0x03, 0x0A, 0x04, 0x0D, 0x05, 0x0C, 0x06, 0x0F, 0x07, 0x0E]
mulcInv = [0x00, 0x0C, 0x0B, 0x07, 0x05, 0x09, 0x0E, 0x02, 0x0A, 0x06, 0x01, 0x0D, 0x0F, 0x03, 0x04, 0x08]
muldInv = [0x00, 0x0D, 0x09, 0x04, 0x01, 0x0C, 0x08, 0x05, 0x02, 0x0F, 0x0B, 0x06, 0x03, 0x0E, 0x0A, 0x07]
muleInv = [0x00, 0x0E, 0x0F, 0x01, 0x0D, 0x03, 0x02, 0x0C, 0x09, 0x07, 0x06, 0x08, 0x04, 0x0A, 0x0B, 0x05]
@classmethod
def byte2nibble(cls, byteArr, offSet):
b = [None] * 16
for i in range(0, 8, 1):
b[i * 2 + 0] = byteArr[offSet + i] & 0x0F
b[i * 2 + 1] = (byteArr[offSet + i] >> 4) & 0x0F
return b
@classmethod
def nibbles2byte(cls, nibbles):
c = [None] * 8
for i in range(0, 8, 1):
c[i] = (nibbles[i * 2 + 0] ^ (nibbles[i * 2 + 1] << 4))
return c
@classmethod
def nibbles2byteCopy(cls, nibbles, byteArr, offSet):
for i in range(0, 8, 1):
byteArr[offSet + i] = (nibbles[i * 2 + 0] ^ (nibbles[i * 2 + 1] << 4))
return
@classmethod
def AddKey(cl, state, roundKey):
for i in range(0, 16, 1):
state[i] ^= roundKey[i]
return
@classmethod
def AddConstants(cls, state, round):
state[0] ^= cls.keySizeConst0
state[4] ^= cls.keySizeConst1
state[8] ^= cls.keySizeConst2
state[12] ^= cls.keySizeConst3
state[1] ^= cls.RC02[31 - round]
state[9] ^= cls.RC02[31 - round]
state[5] ^= cls.RC13[31 - round]
state[13] ^= cls.RC13[31 - round]
return
@classmethod
def SubCellShiftRowAndMixColumns(cls, state):
temp = [None] * 16
temp[0] = (cls.mulcInv[state[0]] ^ cls.mulcInv[state[4]] ^ cls.muldInv[state[8]] ^ cls.mul4Inv[state[12]])
temp[5] = (cls.mul3Inv[state[0]] ^ cls.mul8Inv[state[4]] ^ cls.mul4Inv[state[8]] ^ cls.mul5Inv[state[12]])
temp[10] = (cls.mul7Inv[state[0]] ^ cls.mul6Inv[state[4]] ^ cls.mul2Inv[state[8]] ^ cls.muleInv[state[12]])
temp[15] = (cls.muldInv[state[0]] ^ cls.mul9Inv[state[4]] ^ cls.mul9Inv[state[8]] ^ cls.muldInv[state[12]])
temp[1] = (cls.mulcInv[state[1]] ^ cls.mulcInv[state[5]] ^ cls.muldInv[state[9]] ^ cls.mul4Inv[state[13]])
temp[6] = (cls.mul3Inv[state[1]] ^ cls.mul8Inv[state[5]] ^ cls.mul4Inv[state[9]] ^ cls.mul5Inv[state[13]])
temp[11] = (cls.mul7Inv[state[1]] ^ cls.mul6Inv[state[5]] ^ cls.mul2Inv[state[9]] ^ cls.muleInv[state[13]])
temp[12] = (cls.muldInv[state[1]] ^ cls.mul9Inv[state[5]] ^ cls.mul9Inv[state[9]] ^ cls.muldInv[state[13]])
temp[2] = (cls.mulcInv[state[2]] ^ cls.mulcInv[state[6]] ^ cls.muldInv[state[10]] ^ cls.mul4Inv[state[14]])
temp[7] = (cls.mul3Inv[state[2]] ^ cls.mul8Inv[state[6]] ^ cls.mul4Inv[state[10]] ^ cls.mul5Inv[state[14]])
temp[8] = (cls.mul7Inv[state[2]] ^ cls.mul6Inv[state[6]] ^ cls.mul2Inv[state[10]] ^ cls.muleInv[state[14]])
temp[13] = (cls.muldInv[state[2]] ^ cls.mul9Inv[state[6]] ^ cls.mul9Inv[state[10]] ^ cls.muldInv[state[14]])
temp[3] = (cls.mulcInv[state[3]] ^ cls.mulcInv[state[7]] ^ cls.muldInv[state[11]] ^ cls.mul4Inv[state[15]])
temp[4] = (cls.mul3Inv[state[3]] ^ cls.mul8Inv[state[7]] ^ cls.mul4Inv[state[11]] ^ cls.mul5Inv[state[15]])
temp[9] = (cls.mul7Inv[state[3]] ^ cls.mul6Inv[state[7]] ^ cls.mul2Inv[state[11]] ^ cls.muleInv[state[15]])
temp[14] = (cls.muldInv[state[3]] ^ cls.mul9Inv[state[7]] ^ cls.mul9Inv[state[11]] ^ cls.muldInv[state[15]])
for i in range(0, 16, 1):
state[i] = cls.sBoxInv[temp[i]];
return
@classmethod
def Step(cls, state, step):
for i in range(0, 4, 1):
cls.SubCellShiftRowAndMixColumns(state)
cls.AddConstants(state, (step * 4 + i))
return
@classmethod
def EncryptOneBlock(cls, state, sk0):
cls.AddKey(state, sk0)
for i in range(0, 8, 1):
cls.Step(state, i)
cls.AddKey(state, sk0)
return
@classmethod
def Decrypt(cls, plainText, key):
cipherText = [None] * len(plainText)
sk0 = cls.byte2nibble(key, 0)
for i in range(0, len(plainText), 8):
state = cls.byte2nibble(plainText, i)
cls.EncryptOneBlock(state, sk0)
cls.nibbles2byteCopy(state, cipherText, i)
return cipherText
|
import numpy as np
import os
from spektral.data import Dataset, Graph
import tensorflow as tf
import glob
from tensorflow import keras
import json
## MyDataSet not used anywhere
class MyDataset(Dataset):
"""
Turn swarm into a graph
"""
def __init__(self, nodes, feats, **kwargs):
self.nodes = nodes
self.feats = feats
super().__init__(**kwargs)
def download(self,ARGS,prefix,X):
# Create the directory
os.mkdir(self.path)
# Write the data to file
x = X
a= load_edge_data(ARGS.data_dir,
prefix=prefix, size=ARGS.data_size, padding=ARGS.max_padding)
filename = os.path.join(self.path, f'graph_')
np.savez(filename, x=x, a=a)
def read(self):
# We must return a list of Graph objects
output = []
data = np.load(os.path.join(self.path, f'graph_.npz'))
output.append(
Graph(x=data['x'], a=data['a']))
return output
def graph_adj(edges):
a1 = get_edge_indices(edges,1)
a2 = get_edge_indices(edges,2)
a3 = get_edge_indices(edges,3)
m1=Adjacency(a1,edges.shape[1])
m2=Adjacency(a2,edges.shape[1])
m3=Adjacency(a3,edges.shape[1])
return [m1,m2,m3]
def Adjacency(graph,size):
index = 0 #Index of the sublist
matrix = [[0]*size for i in range(size)]
for i in range(len(graph[0])):
matrix[graph[0][i]][graph[1][i]]=1
#print(matrix)
matrix = np.array(matrix)
return matrix
def get_edge_indices(edges,number):
"""Returns edge indices of the adjacency matrix"""
return np.where(edges==number)
class Conv1D(keras.layers.Layer):
"""
Condense and abstract the time segments.
"""
def __init__(self, filters, name=None):
if not filters:
raise ValueError("'filters' must not be empty")
super().__init__(name=name)
# time segment length before being reduced to 1 by Conv1D
self.seg_len = 2 * len(filters) + 1
self.conv1d_layers = []
for i, channels in enumerate(filters):
layer = keras.layers.TimeDistributed(
keras.layers.Conv1D(channels, 3, activation='relu', name=name))
self.conv1d_layers.append(layer)
def call(self, time_segs):
# Node state encoder with 1D convolution along timesteps and across ndims as channels.
encoded_state = time_segs
for conv in self.conv1d_layers:
encoded_state = conv(encoded_state)
return encoded_state
class OutLayer(keras.layers.Layer):
def __init__(self, unit, bound=None, name=None):
super().__init__(name=name)
if bound is None:
self.bound = 1.
self.dense = keras.layers.Dense(unit)
else:
self.bound = np.array(bound, dtype=np.float32)
self.dense = keras.layers.Dense(unit, 'tanh')
def call(self, inputs):
return self.dense(inputs) * self.bound
def load_data(data_path, prefix='train', size=None, padding=None, load_time=False):
if not os.path.exists(data_path):
raise ValueError(f"path '{data_path}' does not exist")
# Load timeseries data.
timeseries_file_pattern = os.path.join(data_path, f'{prefix}_timeseries*.npy')
all_data = _load_files(timeseries_file_pattern, np.float32, padding=padding, pad_dims=(2,))
# Load edge data.
edge_file_pattern = os.path.join(data_path, f'{prefix}_edge*.npy')
all_edges = _load_files(edge_file_pattern, np.int, padding, pad_dims=(1, 2))
shuffled_idx = np.random.permutation(len(all_data))
# Truncate data samples if `size` is given.
if size:
samples = shuffled_idx[:size]
all_data = all_data[samples]
all_edges = all_edges[samples]
# Load time labels only when required.
if load_time:
time_file_pattern = os.path.join(data_path, f'{prefix}_time*.npy')
all_times = _load_files(time_file_pattern, np.float32)
if size:
samples = shuffled_idx[:size]
all_times = all_times[samples]
return all_data, all_edges, all_times
return all_data, all_edges
def preprocess_data(data, seg_len=1, pred_steps=1, edge_type=1, ground_truth=True):
time_series, edges = data[:2]
time_steps, num_nodes, ndims = time_series.shape[1:]
if (seg_len + pred_steps > time_steps):
if ground_truth:
raise ValueError('time_steps in data not long enough for seg_len and pred_steps')
else:
stop = 1
else:
stop = -pred_steps
edge_label = edge_type + 1 # Accounting for "no connection"
# time_series shape [num_sims, time_steps, num_nodes, ndims]
# Stack shape [num_sims, time_steps-seg_len-pred_steps+1, seg_len, num_nodes, ndims]
# no of steps to predict aren't sent to the stack others are and the stack returns a stack of timeseries
# [1:195,2:196,3:197,4:198,5:199]
time_segs_stack = stack_time_series(time_series[:, :stop, :, :],
seg_len)#stop=-1
time_segs = time_segs_stack.reshape([-1, seg_len, num_nodes, ndims])#195,5,5,4, changes the timesegs to
# [0]= first 5 timesteps,[1]= 2:6 timesteps etc
if ground_truth:
# Stack shape [num_sims, time_steps-seg_len-pred_steps+1, pred_steps, num_nodes, ndims]
expected_time_segs_stack = stack_time_series(time_series[:, seg_len:, :, :],
pred_steps)#shape=(1,195,5,4),[0]=5th timestep
# same shape of expected_time_segs_stack and time_segs_stack
assert (time_segs_stack.shape[1] == expected_time_segs_stack.shape[1]
== time_steps - seg_len - pred_steps + 1)
expected_time_segs = expected_time_segs_stack.reshape([-1, pred_steps, num_nodes, ndims])#195,5,5,4, changes the timesegs to
# [0]= 5:10 timesteps,[1]= 6:11 timesteps etc
else:
expected_time_segs = None
edges_one_hot = one_hot(edges, edge_label, np.float32)# first in case of 0 1 in place of pos:0,in case of 1,1 in place of pos:1 etc 1 matrix of 1 row
edges_one_hot = np.repeat(edges_one_hot, time_segs_stack.shape[1], axis=0)#shape changed to (195,5,5,4)
edges = np.repeat(edges, time_segs_stack.shape[1], axis=0)
if len(data) > 2:
time_stamps = data[2]
time_stamps_stack = stack_time_series(time_stamps[:, :stop], seg_len)
time_stamps_segs = time_stamps_stack.reshape([-1, seg_len])
if ground_truth:
expected_time_stamps_stack = stack_time_series(
time_stamps[:, seg_len:], pred_steps)
expected_time_stamps_segs = expected_time_stamps_stack.reshape([-1, pred_steps])
else:
expected_time_stamps_segs = None
return [time_segs, edges_one_hot], expected_time_segs, [time_stamps_segs, expected_time_stamps_segs]
return [time_segs, edges], expected_time_segs
def off_diag_matrix(n):
return np.ones((n, n)) - np.eye(n)
def one_hot(labels, num_classes, dtype=np.int):
identity = np.eye(num_classes, dtype=dtype)
one_hots = identity[labels.reshape(-1)]
return one_hots.reshape(labels.shape + (num_classes,))
def load_model(model, log_dir):
checkpoint = os.path.join(log_dir, 'weights_spektral.h5')
if os.path.exists(checkpoint):
model.load_weights(checkpoint)
def save_model(model, log_dir):
os.makedirs(log_dir, exist_ok=True)
checkpoint = os.path.join(log_dir, 'weights_spektral.h5')
model.save_weights(checkpoint)
return tf.keras.callbacks.ModelCheckpoint(checkpoint, save_weights_only=True)
def load_model_params(config):
with open(config) as f:
model_params = json.load(f)
seg_len = 2 * len(model_params['cnn']['filters']) + 1
model_params['time_seg_len'] = seg_len
model_params.setdefault('edge_type', 1)
model_params.setdefault('output_bound')
model_params.setdefault('edge_aggr', {})
return model_params
def _load_files(file_pattern, dtype, padding=None, pad_dims=None):
files = sorted(glob.glob(file_pattern))
#print(files)
if not files:
raise FileNotFoundError(f"no files matching pattern {file_pattern} found")
all_data = []
for f in files:
data = np.load(f).astype(dtype)
if padding is not None and pad_dims is not None:
pad_shape = [(0, padding - s if i in pad_dims else 0) for i, s in enumerate(data.shape)]
data = np.pad(data, pad_shape, mode='constant', constant_values=0)
all_data.append(data)
return np.concatenate(all_data, axis=0)
def stack_time_series(time_series, seg_len, axis=2):
# time_series shape [num_sims, time_steps, num_agents, ndims]
time_steps = time_series.shape[1]
return np.stack([time_series[:, i:time_steps+1-seg_len+i, :, :] for i in range(seg_len)],
axis=axis)#0:195,1:196,2:197,3:198,4:199 stacking in axis=2 in case of seg_len=5
def load_edge_data(data_path, prefix='train', size=None, padding=None, load_time=False):
if not os.path.exists(data_path):
raise ValueError(f"path '{data_path}' does not exist")
edge_file_pattern = os.path.join(data_path, f'{prefix}_edge*.npy')
all_edges = _load_files(edge_file_pattern, np.int, padding, pad_dims=(1, 2))
all_edges = all_edges[0]
return all_edges
class NodePropagator(keras.layers.Layer):
"""
Pass message between every pair of nodes.
"""
def call(self, node_states):
# node_states shape [batch, num_nodes, out_units].
num_nodes = node_states.shape[1]
msg_from_source = tf.repeat(tf.expand_dims(
node_states, 2), num_nodes, axis=2)
msg_from_target = tf.repeat(tf.expand_dims(
node_states, 1), num_nodes, axis=1)
# msg_from_source and msg_from_target in shape [batch, num_nodes, num_nodes, out_units]
node_msgs = tf.concat([msg_from_source, msg_from_target], axis=-1)
# tf.print(node_msgs)
return node_msgs
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-10-17 15:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('notifications', '0002__'),
]
operations = [
migrations.AddField(
model_name='alert',
name='title',
field=models.CharField(default=b'', max_length=255),
),
]
|
import json
import os
from ..util.exceptions import PypactSpectrumDoesNotExistException
_this_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)))
__SPECTRUM_JSON_LIB_FILE__ = os.path.join(
_this_dir, 'data', 'spectrumlib.min.json')
class SpectrumLibJSONReader:
def __init__(self, filename=__SPECTRUM_JSON_LIB_FILE__):
self.filename = filename
def __enter__(self):
data = {}
with open(self.filename, 'rt') as f:
data = json.load(f)
return data
def __exit__(self, *args):
pass
class SpectrumLibManager:
def __init__(self, data):
self._data = data
def list(self):
return list(self._data.keys())
def get(self, name):
if name not in self._data:
raise PypactSpectrumDoesNotExistException(
f"{name} does not exist in data")
sdata = self._data[name]
return sdata["energies"], sdata["values"]
|
"""
@version 0.1
@contant QQ597806630
@author zc
@desc 本项目淘宝抢购
@date 2020/12/04
说明:
classes: Mes()
function:
"""
import pyautogui
import time
from selenium import webdriver
import sys
from PyQt5.QtCore import QThread,pyqtSignal,Qt
from PyQt5.QtWidgets import QApplication,QFileDialog, QMainWindow,QMessageBox,QCheckBox,QAbstractItemView,QTableView,QTableWidgetItem,QTableWidget
from taobaoui import Ui_mainWindow
class Mytaobao():
def __init__(self):
self.w = QMainWindow()
self.myui = Ui_mainWindow()
self.myui.setupUi(self.w)
self.initmyui()
def initmyui(self):
self.myui.lineEdit.setText("D:/Program Files/Firefox/firefox.exe")
self.myui.pushButton_2.clicked.connect(self.openFile)
self.myui.pushButton.clicked.connect(self.login)
self.myui.pushButton_3.clicked.connect(self.buy)
def openFile(self):
get_filename_path,ok = QFileDialog.getOpenFileName(None,"选取firefox.exe文件","C:\\","All Files (*)")
if ok:
self.myui.lineEdit.setText(str(get_filename_path))
def login(self):
path = self.myui.lineEdit.text()
if not path.endswith("firefox.exe"):
self.messageDialog("提示","找到火狐的浏览器都不会你还抢个蛋啊你!!!")
else:
option = webdriver.FirefoxOptions()
self.browser = webdriver.Firefox(firefox_binary=path,options=option)
self.browser.maximize_window()
self.browser.get("https://www.taobao.com")
while self.browser.find_element_by_link_text("亲,请登录"):
self.browser.find_element_by_link_text("亲,请登录").click()
time.sleep(3)
qrcode = self.browser.find_element_by_xpath("/html/body/div/div[2]/div[3]/div/div[1]/div/div[1]/i")
if qrcode:
qrcode.click()
while True:
try:
user = self.browser.find_element_by_class_name("site-nav-login-info-nick ").text
print(user)
self.myui.label_7.setText(user)
break
except:
time.sleep(1)
break
self.get_car()
def get_car(self):
self.browser.get("https://cart.taobao.com/cart.htm")
time.sleep(3)
self.browser.find_element_by_id("J_SelectAll2").click()
wods = self.browser.find_elements_by_class_name("item-content")
wodlist = []
for wod in wods:
checkbox = wod.find_element_by_class_name("cart-checkbox ")
name = wod.find_element_by_class_name("item-title").text
print(name)
price = wod.find_element_by_class_name("J_Price").text
print(print())
count = wod.find_element_by_class_name("text-amount").get_attribute("value")
print(count)
money = wod.find_element_by_class_name("J_ItemSum").text
print(money)
wodlist.append([checkbox,name,price,count,money])
print(wodlist)
self.myui.tableWidget.setRowCount(len(wodlist))
# self.mesui.tableWidget.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch) # 所有列自动拉伸,充满界面
self.myui.tableWidget.setSelectionMode(QAbstractItemView.SingleSelection) # 设置只能选中一行
self.myui.tableWidget.setEditTriggers(QTableView.NoEditTriggers) # 不可编辑
self.myui.tableWidget.setSelectionBehavior(QAbstractItemView.SelectRows) # 设置只有行选中
# self.myui.tableWidget.horizontalHeader().resizeSection(0, 40)
# self.myui.tableWidget.horizontalHeader().resizeSection(1, 100)
# self.myui.tableWidget.horizontalHeader().resizeSection(2, 55)
# self.myui.tableWidget.horizontalHeader().resizeSection(3, 65)
self.myui.tableWidget.horizontalHeader().setStretchLastSection(True) # 设置最后一列拉伸至最大
n = 0
for i in wodlist:
ck = QCheckBox()
# ck.stateChanged.connect(lambda: i[0].click())
ck.setStyleSheet("QCheckBox{margin:30px};")
self.myui.tableWidget.setCellWidget(n, 0, ck)
for j in range(len(i)):
if j != 0:
taitem = QTableWidgetItem(str(i[j]))
taitem.setToolTip(i[j])
taitem.setTextAlignment(Qt.AlignCenter)
self.myui.tableWidget.setItem(n, j , taitem)
n += 1
def buy(self):
if self.browser.find_element_by_link_text("结 算"):
self.browser.find_element_by_link_text("结 算").click()
self.order = Order(self.browser)
self.order.mysignal.connect(lambda str:self.myui.statusbar.showMessage(str, 2000))
self.order.start()
def messageDialog(self, warn, msg):
# 核心功能代码就两行,可以加到需要的地方
msg_box = QMessageBox(QMessageBox.Warning, warn, msg)
msg_box.exec_()
def show(self):
self.w.show()
class Order(QThread):
mysignal = pyqtSignal(str)
def __init__(self,browser):
super(Order, self).__init__()
self.browser = browser
def run(self):
while True:
try:
self.browser.find_element_by_link_text('提交订单').click()
break
except:
self.browser.refresh()
print("刷新")
pyautogui.moveTo(1062, 481, duration=1)
pyautogui.click()
print("点击")
self.mysignal.emit("提交订单失败我刷新失败再来!!!")
# time.sleep()
if __name__=='__main__':
app=QApplication(sys.argv)
taobao = Mytaobao()
taobao.show()
sys.exit(app.exec_())
|
import numpy as np
import math
from copy import copy
from typing import List
import subprocess
def write_cfgs_from_otf(otf_run, cell, species, start_frame, no_frames,
folder_name, image_quality, scr_dest, trans_vec,
skip=1):
# make folder for cfgs
subprocess.call('mkdir %s' % folder_name, shell=True)
subprocess.call('mkdir %s/Pic' % folder_name, shell=True)
scr_anim_text = '%s\n' % image_quality
# make cfgs
no_digits = int(np.ceil(math.log(no_frames, 10)))
for n in range(no_frames):
frame_no = n
frame_no_padded = str(frame_no).zfill(no_digits)
frame_string = frame_no_padded+'.cfg'
frame_dest = folder_name+'/'+frame_string
scr_anim_text += '%s %s/Pic/%s.jpg\n' % \
(frame_dest, folder_name, frame_no_padded)
positions = otf_run.position_list[start_frame + n * skip]
write_cfg_file(frame_dest, positions, species, cell)
# write animation directions for AtomEye
write_file(scr_dest, scr_anim_text)
def write_cfgs_from_md(md_trajectory, start_frame, no_frames, folder_name,
image_quality, scr_dest, trans_vec, skip=1):
# make folder for cfgs
subprocess.call('mkdir %s' % folder_name, shell=True)
subprocess.call('mkdir %s/Pic' % folder_name, shell=True)
scr_anim_text = '%s\n' % image_quality
# make cfgs
no_digits = int(np.ceil(math.log(no_frames, 10)))
cell = md_trajectory.cell
for n in range(no_frames):
frame_no = n
frame_no_padded = str(frame_no).zfill(no_digits)
frame_string = frame_no_padded+'.cfg'
frame_dest = folder_name+'/'+frame_string
scr_anim_text += '%s %s/Pic/%s.jpg\n' % \
(frame_dest, folder_name, frame_no_padded)
positions = \
np.array(md_trajectory.MD_data[start_frame+n*skip]
['positions']) + \
trans_vec
species = md_trajectory.MD_data[start_frame+n*skip]['elements']
write_cfg_file(frame_dest, positions, species, cell)
# write animation directions for AtomEye
write_file(scr_dest, scr_anim_text)
def write_cfgs_from_pos(pos_list, cell, folder_name, image_quality, scr_dest,
species):
# make folder for cfgs
subprocess.call('mkdir %s' % folder_name, shell=True)
subprocess.call('mkdir %s/Pic' % folder_name, shell=True)
scr_anim_text = '%s\n' % image_quality
# make cfgs
no_frames = len(pos_list)
no_digits = int(np.ceil(math.log(no_frames, 10)))
for n, pos_file in enumerate(pos_list):
pos = np.load(pos_file)
frame_no = n
frame_no_padded = str(frame_no).zfill(no_digits)
frame_string = frame_no_padded+'.cfg'
frame_dest = folder_name+'/'+frame_string
scr_anim_text += '%s %s/Pic/%s.png\n' % \
(frame_dest, folder_name, frame_no_padded)
write_cfg_file(frame_dest, pos, species, cell)
# write animation directions for AtomEye
write_file(scr_dest, scr_anim_text)
def write_cfg_file(file_name: str, positions: np.ndarray, species: List[str],
cell: np.ndarray) -> None:
"""write cfg file that can be interpreted by AtomEye.
assumes orthorombic unit cell.
:param file_name: destination of cfg file
:type file_name: str
:param positions: positions of atoms
:type positions: np.ndarray
:param species: atom species
:type species: List[str]
:param cell: unit cell vectors
:type cell: np.ndarray
:return: creates the cfg file
:rtype: None
"""
cfg_text = get_cfg_text(positions, species, cell)
write_file(file_name, cfg_text)
def get_cfg_text(positions: np.ndarray, species: List[str],
cell: np.ndarray) -> str:
"""returns cfg text
:param positions: Cartesian coordinates of atomic positions
:type positions: np.ndarray
:param species: list of atomic species (determines atom size)
:param cell: cell of unit vectors
:type cell: np.ndarray
:return: cfg text
:rtype: str
"""
cfg_header = get_cfg_header(positions.shape[0], cell)
reduced_coordinates = calculate_reduced_coordinates(positions, cell)
position_text = get_reduced_coordinate_text(reduced_coordinates, species)
cfg_text = cfg_header + position_text
return cfg_text
def get_cfg_header(number_of_particles: int, cell: np.ndarray) -> str:
"""creates cfg header from atom positions and unit cell.
assumes unit cell is orthorombic.
:param positions: Nx3 array of atomic positions
:type positions: np.ndarray
:param cell: 3x3 array of cell vectors (cell vectors are rows)
:type cell: np.ndarray
"""
cfg_text = """Number of particles = %i
# (required) this must be the first line
A = 1.0 Angstrom (basic length-scale)
# (optional) basic length-scale: default A = 1.0 [Angstrom]
H0(1,1) = %f A
H0(1,2) = 0 A
H0(1,3) = 0 A
# (required) this is the supercell's 1st edge, in A
H0(2,1) = 0 A
H0(2,2) = %f A
H0(2,3) = 0 A
# (required) this is the supercell's 2nd edge, in A
H0(3,1) = 0 A
H0(3,2) = 0 A
H0(3,3) = %f A
# (required) this is the supercell's 3rd edge, in A
Transform(1,1) = 1
Transform(1,2) = 0
Transform(1,3) = 0
Transform(2,1) = 0
Transform(2,2) = 1
Transform(2,3) = 0
Transform(3,1) = 0
Transform(3,2) = 0
Transform(3,3) = 1
# (optional) apply additional transformation on H0: H = H0 * Transform;
# default = Identity matrix.
eta(1,1) = 0
eta(1,2) = 0
eta(1,3) = 0
eta(2,2) = 0
eta(2,3) = 0
eta(3,3) = 0
# (optional) apply additional Lagrangian strain on H0:
# H = H0 * sqrt(Identity_matrix + 2 * eta);
# default = zero matrix.
# ENSUING ARE THE ATOMS, EACH ATOM DESCRIBED BY A ROW
# 1st entry is atomic mass in a.m.u.
# 2nd entry is the chemical symbol (max 2 chars)
# 3rd entry is reduced coordinate s1 (dimensionless)
# 4th entry is reduced coordinate s2 (dimensionless)
# 5th entry is reduced coordinate s3 (dimensionless)
# real coordinates x = s * H, x, s are 1x3 row vectors
# 6th entry is d(s1)/dt in basic rate-scale R
# 7th entry is d(s2)/dt in basic rate-scale R
# 8th entry is d(s3)/dt in basic rate-scale R
R = 1.0 [ns^-1]
# (optional) basic rate-scale: default R = 1.0 [ns^-1]
""" % (number_of_particles, cell[0, 0], cell[1, 1], cell[2, 2])
return cfg_text
def calculate_reduced_coordinates(positions: np.ndarray,
cell: np.ndarray) -> np.ndarray:
"""convert raw cartesian coordinates to reduced coordinates with each atom
wrapped back into the unit cell. assumes unit cell is orthorombic.
:param positions: Nx3 array of atomic positions
:type positions: np.ndarray
:param cell: 3x3 array of cell vectors (cell vectors are rows)
:type cell: np.ndarray
:return: Nx3 array of reduced coordinates
:rtype: np.ndarray
"""
reduced_coordinates = np.zeros((positions.shape[0], 3))
for m in range(positions.shape[0]):
for n in range(3):
trial_coord = positions[m, n] / cell[n, n]
# reduced coordinates must be between 0 and 1
trans = np.floor(trial_coord)
reduced_coordinates[m, n] = trial_coord - trans
return reduced_coordinates
def get_reduced_coordinate_text(reduced_coordinates: np.ndarray,
species: List[str]) -> str:
"""records reduced coordinates in cfg format.
:param reduced_coordinates: array of reduced coordinates
:type reduced_coordinates: np.ndarray
:param species: list of atomic species, which determines atom size
:type species: List[str]
:return: cfg string of reduced coordinates.
:rtype: str
"""
reduced_text = """
# ENSUING ARE THE ATOMS, EACH ATOM DESCRIBED BY A ROW
# 1st entry is atomic mass in a.m.u.
# 2nd entry is the chemical symbol (max 2 chars)
# 3rd entry is reduced coordinate s1 (dimensionless)
# 4th entry is reduced coordinate s2 (dimensionless)
# 5th entry is reduced coordinate s3 (dimensionless)
# real coordinates x = s * H, x, s are 1x3 row vectors
# 6th entry is d(s1)/dt in basic rate-scale R
# 7th entry is d(s2)/dt in basic rate-scale R
# 8th entry is d(s3)/dt in basic rate-scale R
R = 1.0 [ns^-1]
# (optional) basic rate-scale: default R = 1.0 [ns^-1]
"""
for spec, coord in zip(species, reduced_coordinates):
# use arbitrary mass, label
reduced_text += \
'1.0 %s %f %f %f 0 0 0 \n' % (spec, coord[0], coord[1], coord[2])
return reduced_text
def write_file(file_name: str, text: str):
with open(file_name, 'w') as fin:
fin.write(text)
if __name__ == '__main__':
reduced_coordinates = np.array([[1, 2, 3], [4, 5, 6]])
species = ['A', 'B']
test = get_reduced_coordinate_text(reduced_coordinates, species)
print(test)
|
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from django.contrib.sites.managers import CurrentSiteManager
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class BaseEntry(models.Model):
site = models.ForeignKey(
'sites.Site',
on_delete=models.CASCADE,
verbose_name=_('Site'))
key = models.CharField(
max_length=63,
verbose_name=_('Key'))
objects = CurrentSiteManager()
all_objects = models.Manager()
class Meta:
abstract = True
def __str__(self):
return self.key
@staticmethod
def autocomplete_search_fields():
return ('key__icontains', )
def natural_key(self):
return (self.key, )
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetHmacKeyResult',
'AwaitableGetHmacKeyResult',
'get_hmac_key',
'get_hmac_key_output',
]
@pulumi.output_type
class GetHmacKeyResult:
def __init__(__self__, access_id=None, etag=None, kind=None, project=None, self_link=None, service_account_email=None, state=None, time_created=None, updated=None):
if access_id and not isinstance(access_id, str):
raise TypeError("Expected argument 'access_id' to be a str")
pulumi.set(__self__, "access_id", access_id)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if project and not isinstance(project, str):
raise TypeError("Expected argument 'project' to be a str")
pulumi.set(__self__, "project", project)
if self_link and not isinstance(self_link, str):
raise TypeError("Expected argument 'self_link' to be a str")
pulumi.set(__self__, "self_link", self_link)
if service_account_email and not isinstance(service_account_email, str):
raise TypeError("Expected argument 'service_account_email' to be a str")
pulumi.set(__self__, "service_account_email", service_account_email)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if time_created and not isinstance(time_created, str):
raise TypeError("Expected argument 'time_created' to be a str")
pulumi.set(__self__, "time_created", time_created)
if updated and not isinstance(updated, str):
raise TypeError("Expected argument 'updated' to be a str")
pulumi.set(__self__, "updated", updated)
@property
@pulumi.getter(name="accessId")
def access_id(self) -> str:
"""
The ID of the HMAC Key.
"""
return pulumi.get(self, "access_id")
@property
@pulumi.getter
def etag(self) -> str:
"""
HTTP 1.1 Entity tag for the HMAC key.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def kind(self) -> str:
"""
The kind of item this is. For HMAC Key metadata, this is always storage#hmacKeyMetadata.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def project(self) -> str:
"""
Project ID owning the service account to which the key authenticates.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> str:
"""
The link to this resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="serviceAccountEmail")
def service_account_email(self) -> str:
"""
The email address of the key's associated service account.
"""
return pulumi.get(self, "service_account_email")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the key. Can be one of ACTIVE, INACTIVE, or DELETED.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The creation time of the HMAC key in RFC 3339 format.
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter
def updated(self) -> str:
"""
The last modification time of the HMAC key metadata in RFC 3339 format.
"""
return pulumi.get(self, "updated")
class AwaitableGetHmacKeyResult(GetHmacKeyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetHmacKeyResult(
access_id=self.access_id,
etag=self.etag,
kind=self.kind,
project=self.project,
self_link=self.self_link,
service_account_email=self.service_account_email,
state=self.state,
time_created=self.time_created,
updated=self.updated)
def get_hmac_key(access_id: Optional[str] = None,
project: Optional[str] = None,
user_project: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetHmacKeyResult:
"""
Retrieves an HMAC key's metadata
"""
__args__ = dict()
__args__['accessId'] = access_id
__args__['project'] = project
__args__['userProject'] = user_project
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('google-native:storage/v1:getHmacKey', __args__, opts=opts, typ=GetHmacKeyResult).value
return AwaitableGetHmacKeyResult(
access_id=__ret__.access_id,
etag=__ret__.etag,
kind=__ret__.kind,
project=__ret__.project,
self_link=__ret__.self_link,
service_account_email=__ret__.service_account_email,
state=__ret__.state,
time_created=__ret__.time_created,
updated=__ret__.updated)
@_utilities.lift_output_func(get_hmac_key)
def get_hmac_key_output(access_id: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[Optional[str]]] = None,
user_project: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetHmacKeyResult]:
"""
Retrieves an HMAC key's metadata
"""
...
|
import logging
logger = logging.getLogger(__name__)
from mqfactory.message import Message
class Store(object):
def __getitem__(self, key):
raise NotImplementedError("implement get collection from the store")
class Collection(object):
def load(self):
raise NotImplementedError("implement loading the collection")
def __getitem__(self, key):
raise NotImplementedError("implement get item from collection")
def add(self, item):
raise NotImplementedError("implement adding item to the collection")
def remove(self, item):
raise NotImplementedError("implement removing item from the collection")
def update(self, key, item):
raise NotImplementedError("implement updating item in the collection")
class MessageStore(object):
def __init__(self, queue, collection):
self.queue = queue
self.collection = collection
self.loaded = False
self.queue.before_add.append(self.before_add)
self.queue.after_add.append(self.after_add)
self.queue.before_remove.append(self.before_remove)
self.queue.after_remove.append(self.after_remove)
self.queue.after_defer.append(self.after_defer)
self.queue.before_get.append(self.before_get)
def before_add(self, message):
self.load_messages()
def after_add(self, message):
message.private["store-id"] = self.collection.add(dict(message))
logger.debug("store: after_add: message {0} stored as {1}".format(
message.id, message.private["store-id"]
))
def before_remove(self, message):
self.load_messages()
def after_remove(self, message):
self.collection.remove(message.private["store-id"])
def before_get(self, message=None):
self.load_messages()
def after_defer(self, message):
try:
self.collection.update(message.private["store-id"], dict(message))
except Exception as e:
logger.error("store: after_defer: update failed for {0}: {1}".format(
str(message), str(e)
))
def load_messages(self):
if not self.loaded:
logger.info("loading messages...")
for doc in self.collection.load():
message = Message(doc["to"], doc["payload"], doc["tags"])
message.private["store-id"] = doc["_id"]
self.queue.add(message, wrapping=False)
self.loaded = True
logger.info("loaded")
def Persisting(mq, outbox=None, inbox=None):
if outbox: MessageStore(mq.outbox, outbox)
if inbox: MessageStore(mq.inbox, inbox)
return mq
|
from output.models.ms_data.datatypes.facets.idrefs.idrefs_enumeration002_xsd.idrefs_enumeration002 import (
Foo,
FooType,
FooIdrefsAttr,
Test,
)
__all__ = [
"Foo",
"FooType",
"FooIdrefsAttr",
"Test",
]
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
"""Control verbose output."""
from __future__ import absolute_import
import logging
_verbose_loggers = []
def push_verbose_logger(logger):
"""Push a logger to log verbose messgaes."""
global _verbose_loggers
_verbose_loggers.append(logger)
def pop_verbose_logger():
"""Remove the most recently-pushed verbose logger."""
global _verbose_loggers
assert len(_verbose_loggers) > 0
_verbose_loggers.pop()
_cached_null_logger = None
def _null_logger():
global _cached_null_logger
if _cached_null_logger is None:
logger = logging.getLogger(name='conda_kapsel_null')
logger.addHandler(logging.NullHandler())
_cached_null_logger = logger
return _cached_null_logger
def _verbose_logger():
"""Used internal to conda-kapsel library to get the current verbose logger."""
if len(_verbose_loggers) > 0:
return _verbose_loggers[-1]
else:
return _null_logger()
|
import sys
import datetime
from pathlib import Path
current_path = Path(__file__).absolute()
abs_path = str(current_path.parent.parent)
sys.path.append(abs_path)
from capture import Capture
def from_api_to_db_deputados(data_list, url, capture_number):
func = lambda datum: dict(
ide_cadastro= datum['ideCadastro'],
cod_orcamento= datum['codOrcamento'],
condicao= datum['condicao'],
matricula= datum['matricula'],
id_parlamentar= datum['idParlamentar'],
nome= datum['nome'],
nome_parlamentar= datum['nomeParlamentar'],
url_foto= datum['urlFoto'],
sexo= datum['sexo'],
uf= datum['uf'],
partido= datum['partido'],
gabinete= datum['gabinete'],
anexo= datum['anexo'],
fone= datum['fone'],
email= datum['email'],
data_captura= datetime.datetime.now(),
url_captura= url,
numero_captura= capture_number
)
return map(func, data_list)
def get_capture_number(capture):
with capture.engine.connect() as conn:
result = list(conn.execute("select max(numero_captura) from camara_v1.deputados"))
if result[0][0] is None:
capture_number = 1
else:
capture_number = int(result[0][0]) + 1
return capture_number
def main():
capture = Capture(schema='camara_v1',)
capture_number = get_capture_number(capture)
print('Numero Captura', capture_number)
capture.capture_data(
url='http://www.camara.leg.br/SitCamaraWS/Deputados.asmx/ObterDeputados')
data_list = capture.data['deputados']['deputado']
data_list = capture.to_default_dict(data_list)
data_list = from_api_to_db_deputados(data_list, capture.url, capture_number)
capture.insert_data(data_list, table_name='deputados', if_exists='pass',
key='ide_cadastro')
if __name__ == '__main__':
main()
|
import time
import tensorflow as tf
import tensorlayer as tl
from tensorlayer.layers import *
def SRGAN_g(t_image, is_train=False, reuse=False):
""" Generator in Photo-Realistic Single Image Super-Resolution Using a Generative Adversarial Network
feature maps (n) and stride (s) feature maps (n) and stride (s)
"""
w_init = tf.random_normal_initializer(stddev=0.02)
b_init = None # tf.constant_initializer(value=0.0)
g_init = tf.random_normal_initializer(1., 0.02)
with tf.variable_scope("SRGAN_g", reuse=reuse) as vs:
# tl.layers.set_name_reuse(reuse) # remove for TL 1.8.0+
n = InputLayer(t_image, name='in')
n = ReshapeLayer(n, [-1, 512, 512, 1], name = 'reshape')
n = Conv2d(n, 64, (3, 3), (1, 1), act=tf.nn.relu, padding='SAME', W_init=w_init, name='n64s1/c')
temp = n
# B residual blocks
for i in range(16):
nn = Conv2d(n, 64, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='n64s1/c1/%s' % i)
nn = BatchNormLayer(nn, act=tf.nn.relu, is_train=is_train, gamma_init=g_init, name='n64s1/b1/%s' % i)
nn = Conv2d(nn, 64, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='n64s1/c2/%s' % i)
nn = BatchNormLayer(nn, is_train=is_train, gamma_init=g_init, name='n64s1/b2/%s' % i)
nn = ElementwiseLayer([n, nn], tf.add, name='b_residual_add/%s' % i)
n = nn
n = Conv2d(n, 64, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='n64s1/c/m')
n = BatchNormLayer(n, is_train=is_train, gamma_init=g_init, name='n64s1/b/m')
n = ElementwiseLayer([n, temp], tf.add, name='add3')
# B residual blacks end
n = Conv2d(n, 3, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, name='n256s1/1')
return n
def SRGAN_d(input_images, is_train=True, reuse=False):
w_init = tf.random_normal_initializer(stddev=0.02)
b_init = None # tf.constant_initializer(value=0.0)
gamma_init = tf.random_normal_initializer(1., 0.02)
df_dim = 64
lrelu = lambda x: tl.act.lrelu(x, 0.2)
with tf.variable_scope("SRGAN_d", reuse=reuse):
tl.layers.set_name_reuse(reuse)
net_in = InputLayer(input_images, name='input/images')
reshape = ReshapeLayer(net_in, [-1, 512, 512, 3], name = 'reshape')
net_h0 = Conv2d(reshape, df_dim, (4, 4), (2, 2), act=lrelu, padding='SAME', W_init=w_init, name='h0/c')
net_h1 = Conv2d(net_h0, df_dim * 2, (4, 4), (2, 2), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h1/c')
net_h1 = BatchNormLayer(net_h1, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h1/bn')
net_h2 = Conv2d(net_h1, df_dim * 4, (4, 4), (2, 2), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h2/c')
net_h2 = BatchNormLayer(net_h2, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h2/bn')
net_h3 = Conv2d(net_h2, df_dim * 8, (4, 4), (2, 2), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h3/c')
net_h3 = BatchNormLayer(net_h3, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h3/bn')
net_h4 = Conv2d(net_h3, df_dim * 16, (4, 4), (2, 2), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h4/c')
net_h4 = BatchNormLayer(net_h4, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h4/bn')
net_h5 = Conv2d(net_h4, df_dim * 32, (4, 4), (2, 2), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h5/c')
net_h5 = BatchNormLayer(net_h5, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h5/bn')
net_h6 = Conv2d(net_h5, df_dim * 16, (1, 1), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h6/c')
net_h6 = BatchNormLayer(net_h6, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h6/bn')
net_h7 = Conv2d(net_h6, df_dim * 8, (1, 1), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h7/c')
net_h7 = BatchNormLayer(net_h7, is_train=is_train, gamma_init=gamma_init, name='h7/bn')
net = Conv2d(net_h7, df_dim * 2, (1, 1), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='res/c')
net = BatchNormLayer(net, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='res/bn')
net = Conv2d(net, df_dim * 2, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='res/c2')
net = BatchNormLayer(net, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='res/bn2')
net = Conv2d(net, df_dim * 8, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='res/c3')
net = BatchNormLayer(net, is_train=is_train, gamma_init=gamma_init, name='res/bn3')
net_h8 = ElementwiseLayer([net_h7, net], combine_fn=tf.add, name='res/add')
net_h8.outputs = tl.act.lrelu(net_h8.outputs, 0.2)
net_ho = FlattenLayer(net_h8, name='ho/flatten')
net_ho = DenseLayer(net_ho, n_units=1, act=tf.identity, W_init=w_init, name='ho/dense')
logits = net_ho.outputs
net_ho.outputs = tf.nn.sigmoid(net_ho.outputs)
return net_ho, logits
def Vgg19_simple_api(input, reuse, nchannels, rgb=False):
"""
Build the VGG 19 Model
Parameters
-----------
rgb : rgb image placeholder [batch, height, width, 3] values scaled [0, 1]
"""
VGG_MEAN = [103.939, 116.779, 123.68]
with tf.variable_scope("VGG19", reuse=reuse) as vs:
start_time = time.time()
print("build model started")
if rgb == True:
rgb_scaled = input * 255.0
# Convert RGB to BGR
if tf.__version__ <= '0.11':
red, green, blue = tf.split(3, 3, rgb_scaled)
else: # TF 1.0
# print(rgb_scaled)
red, green, blue = tf.split(rgb_scaled, 3, 3)
assert red.get_shape().as_list()[1:] == [224, 224, 1]
assert green.get_shape().as_list()[1:] == [224, 224, 1]
assert blue.get_shape().as_list()[1:] == [224, 224, 1]
if tf.__version__ <= '0.11':
bgr = tf.concat(3, [
blue - VGG_MEAN[0],
green - VGG_MEAN[1],
red - VGG_MEAN[2],
])
else:
bgr = tf.concat(
[
blue - VGG_MEAN[0],
green - VGG_MEAN[1],
red - VGG_MEAN[2],
], axis=3)
assert bgr.get_shape().as_list()[1:] == [224, 224, 3]
""" input layer """
net_in = InputLayer(bgr, name='input')
else:
assert input.get_shape().as_list()[1:] == [224, 224, nchannels]
net_in = InputLayer(input, name = 'input')
""" conv1 """
network = Conv2d(net_in, n_filter=64, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv1_1')
network = Conv2d(network, n_filter=64, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv1_2')
network = MaxPool2d(network, filter_size=(2, 2), strides=(2, 2), padding='SAME', name='pool1')
""" conv2 """
network = Conv2d(network, n_filter=128, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv2_1')
network = Conv2d(network, n_filter=128, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv2_2')
network = MaxPool2d(network, filter_size=(2, 2), strides=(2, 2), padding='SAME', name='pool2')
""" conv3 """
network = Conv2d(network, n_filter=256, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv3_1')
network = Conv2d(network, n_filter=256, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv3_2')
network = Conv2d(network, n_filter=256, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv3_3')
network = Conv2d(network, n_filter=256, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv3_4')
network = MaxPool2d(network, filter_size=(2, 2), strides=(2, 2), padding='SAME', name='pool3')
""" conv4 """
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv4_1')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv4_2')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv4_3')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv4_4')
network = MaxPool2d(network, filter_size=(2, 2), strides=(2, 2), padding='SAME', name='pool4') # (batch_size, 14, 14, 512)
conv = network
""" conv5 """
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv5_1')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv5_2')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv5_3')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv5_4')
network = MaxPool2d(network, filter_size=(2, 2), strides=(2, 2), padding='SAME', name='pool5') # (batch_size, 7, 7, 512)
""" fc 6~8 """
network = FlattenLayer(network, name='flatten')
network = DenseLayer(network, n_units=4096, act=tf.nn.relu, name='fc6')
network = DenseLayer(network, n_units=4096, act=tf.nn.relu, name='fc7')
network = DenseLayer(network, n_units=1000, act=tf.identity, name='fc8')
print("build model finished: %fs" % (time.time() - start_time))
return network, conv
|
from asynceth.jsonrpc.client import JsonRPCClient
__all__ = ['JsonRPCClient']
|
import sys
sys.path.insert(0, '../../')
import pyrosim
# toggle joint drawing by pressing 'd'
sim = pyrosim.Simulator(eval_steps=-1, play_paused=True)
cyl = sim.send_cylinder(position=(0.25, 0, 1),
orientation=(1, 0, 0),
length=0.5)
sphere = sim.send_sphere(position=(0.5, 0, 1),
radius=0.1)
hinge = sim.send_hinge_joint(-1, cyl,
anchor=(0, 0, 1),
axis=(0, 1, 0),
joint_range=None)
slider = sim.send_slider_joint(cyl, sphere,
axis=(1, 0, 0),
joint_range=0.3)
sim.send_rotary_actuator(hinge)
sim.send_linear_actuator(slider)
sim.start()
sim.wait_to_finish() |
#appending and extending lists in python
list_odd=[1,3,5,7,9]
'''
list_odd[5]=11
print(list_odd)
--> this will run into an error as the index gets out of the range for the specified length of a list
INorder to overcome this erros, one needs to use append() function - in-built function
'''
list_odd.append(11)
list_odd[5]=13 #after appending the length of the list also changes
print(list_odd) #[1,3,5,7,9,13]
list_odd.extend([15,17,19])
print(list_odd)
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
import math
import time
import numpy as np
import tensorflow as tf
from tensorflow.python.ops import confusion_matrix
import mbi
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('rgb_eval_dir', '/home/charlie/mbi_experiment/rgb_eval',
"""Directory where to write event logs.""")
tf.app.flags.DEFINE_string('eval_data', 'test',
"""Either 'test' or 'train_eval'.""")
tf.app.flags.DEFINE_string('rgb_checkpoint_dir', '/home/charlie/mbi_experiment/rgb_train', """Directory where to read model checkpoints.""")
tf.app.flags.DEFINE_string('rgb_meta', '/home/charlie/mbi_experiment/rgb_train/model.ckpt-0.meta', """Directory where to read model checkpoints.""")
tf.app.flags.DEFINE_integer('eval_interval_secs', 5,
"""How often to run the eval.""")
tf.app.flags.DEFINE_integer('num_examples', 10000,
"""Number of examples to run.""")
def main(argv=None): # pylint: disable=unused-argument
with tf.Session() as sess:
ckpt = tf.train.get_checkpoint_state(FLAGS.rgb_checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path:
# Restores from checkpoint
saver = tf.train.import_meta_graph(FLAGS.rgb_meta)
saver.restore(sess, ckpt.model_checkpoint_path)
all_vars = sess.run(conv2)
for v in all_vars:
print(v.name())
else:
print('No checkpoint file found')
return
if __name__ == '__main__':
tf.app.run()
|
import os
import numpy as np
# Writes particles and error scalars for best, median, and worst
# pred_particles and true_particles are numpy array with dims: # in set, # of particles, 3 coordinates
def write_examples(pred_particles, true_particles, out_dir):
if not os.path.exists(out_dir):
os.makedirs(out_dir)
# get min, mean, and max errors
mses = np.mean(np.mean((pred_particles - true_particles)**2, axis=2), axis=1)
median_index = np.argsort(mses)[len(mses)//2]
indices = [np.argmin(mses), median_index, np.argmax(mses)]
names = ["best", "median", "worst"]
for i in range(3):
# get particles
pred = pred_particles[indices[i]]
# write particle file
out_particle_file = out_dir + names[i] + ".particles"
np.savetxt(out_particle_file, pred)
# get scalar field for error
out_scalar_file = out_dir + names[i] + ".scalars"
scalars = np.mean((pred - true_particles[indices[i]])**2, axis=1)
np.savetxt(out_scalar_file, scalars) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from argparse import ArgumentParser
import os
import re
import sys
import unicodedata
import yaml
# Local files
import cfg
import jiralogin
from helper import vprint, eprint
################################################################################
# Class node
################################################################################
class Node():
"""A node representing an issue in Jira"""
def __init__(self, key, summary, issuetype):
"""Return a node containing the must have feature to be represented in a
tree."""
self.key = key
self.summary = summary
# Take care of some characters not supported in xml
self.summary = self.summary.replace("\"", "'")
self.summary = self.summary.replace("&", "and")
self.issuetype = issuetype
self.assignee = None
self.sponsors = []
self.description = None
self.parent = None
self.childrens = []
self.state = None
self.color = None
self.base_url = None
self._indent = 0
self._sortval = 3
def __str__(self):
s = "%s%s: %s [%s]\n" % (" " * self._indent, self.key, self.summary, self.issuetype)
s += "%s | sponsors: %s\n" % (" " * self._indent, ", ".join(self.sponsors))
s += "%s | assignee: %s\n" % (" " * self._indent, self.assignee)
s += "%s | description: %s\n" % (" " * self._indent, self.description)
s += "%s | parent: %s\n" % (" " * self._indent, self.parent)
s += "%s | state: %s\n" % (" " * self._indent, self.state)
s += "%s | url: %s\n" % (" " * self._indent, self.get_url())
s += "%s |-> color: %s\n" % (" " * self._indent, self.get_color())
return s
def __lt__(self, other):
return self._sortval < other._sortval
def _short_type(self):
st = "I"
if self.issuetype == "Epic":
st = "E"
elif self.issuetype == "Story":
st = "S"
return st
def get_key(self):
return self.key
def add_assignee(self, assignee):
self.assignee = assignee
def get_assignee(self):
return self.assignee
def add_sponsor(self, sponsor):
self.sponsors.append(sponsor)
def get_sponsor(self, sponsor):
return self.sponsors
def add_description(self, description):
self.description = description
def get_description(self, description):
#try:
# f.write("<richcontent TYPE=\"DETAILS\" HIDDEN=\"true\"\n>")
# f.write("<html>\n<head>\n</head>\n<body>\n<p>\n")
# f.write(issue.fields.description)
#except UnicodeEncodeError:
# vprint("UnicodeEncodeError in description in %s" % str(issue))
# f.write("Unicode error in description, please go to Jira\n")
#f.write("\n</p>\n</body>\n</html>\n</richcontent>\n")
return self.description
def add_parent(self, key):
self.parent = key
def get_parent(self):
return self.key
def add_child(self, node):
node.add_parent(self.key)
self.childrens.append(node)
def set_state(self, state):
self.state = state
if self.state in ["In Progress"]:
self._sortval = int(1)
elif self.state in ["To Do", "Blocked"]:
self._sortval = int(2)
else:
self._sortval = int(3)
def get_state(self):
return self.state
def set_color(self, color):
self.color = color
def get_color(self):
if self.color is not None:
return self.color
color = "#990000" # Red
if self.state == "In Progress":
color = "#009900" # Green
elif self.state in ["Blocked", "To Do"]:
color = "#ff6600" # Orange
return color
def set_base_url(self, base_url):
self.base_url = base_url
def get_url(self):
if self.base_url is not None:
return self.base_url + "/browse/" + self.key
else:
return self.base_url
def gen_tree(self, indent=0):
self._indent = indent
print(self)
for c in self.childrens:
c.gen_tree(self._indent + 4)
def to_xml(self, f, indent=0):
self._indent = indent
# Main node
fold = "false"
if self.issuetype in ["Epic", "Story"]:
fold = "true"
if cfg.args.s and self.issuetype == "Epic":
fold = "false"
if cfg.args.i and self.issuetype == "Initiative":
fold = "true"
xml_start = "%s<node LINK=\"%s\" TEXT=\"%s/%s: %s\" FOLDED=\"%s\" COLOR=\"%s\">\n" % \
(" " * self._indent,
self.get_url(),
self._short_type(),
self.key,
self.summary,
fold,
self.get_color())
f.write(xml_start)
# Info start
xml_info_start = "%s<node TEXT=\"info\" FOLDED=\"true\" COLOR=\"#000000\">\n" % \
(" " * (self._indent + 4))
f.write(xml_info_start)
# Assignee, single node
xml_assignee = "%s<node TEXT=\"Assignee: %s\" FOLDED=\"false\" COLOR=\"#000000\"/>\n" % \
(" " * (self._indent + 8),
self.assignee)
f.write(xml_assignee)
# Sponsors
xml_sponsor_start = "%s<node TEXT=\"Sponsors\" FOLDED=\"false\" COLOR=\"#000000\">\n" % \
(" " * (self._indent + 8))
f.write(xml_sponsor_start)
for s in self.sponsors:
xml_sponsor = "%s<node TEXT=\"%s\" FOLDED=\"false\" COLOR=\"#000000\"/>\n" % \
(" " * (self._indent + 12), s)
f.write(xml_sponsor)
# Sponsors end
xml_sponsor_end = "%s%s" % (" " * (self._indent + 8), "</node>\n")
f.write(xml_sponsor_end)
# Info end
xml_info_end = "%s%s" % (" " * (self._indent + 4), "</node>\n")
f.write(xml_info_end)
# Recursive print all childrens
for c in sorted(self.childrens):
c.to_xml(f, self._indent + 4)
# Add the closing element
xml_end = "%s%s" % (" " * self._indent, "</node>\n")
f.write(xml_end)
def open_file(filename):
"""
This will open the user provided file and if there has not been any file
provided it will create and open a temporary file instead.
"""
vprint("filename: %s\n" % filename)
if filename:
return open(filename, "w")
else:
return tempfile.NamedTemporaryFile(delete=False)
def get_parent_key(jira, issue):
if hasattr(issue.fields, "customfield_10005"):
return getattr(issue.fields, "customfield_10005");
return None
################################################################################
# Argument parser
################################################################################
def get_parser():
""" Takes care of script argument parsing. """
parser = ArgumentParser(description='Script used to generate Freeplane mindmap files')
parser.add_argument('-i', required=False, action="store_true", \
default=False, \
help='Show Initiatives only')
parser.add_argument('-p', '--project', required=False, action="store", \
default="SWG", \
help='Project type (SWG, VIRT, KWG etc)')
parser.add_argument('-s', required=False, action="store_true", \
default=False, \
help='Show stories also')
parser.add_argument('-t', required=False, action="store_true", \
default=False, \
help='Use the test server')
parser.add_argument('-v', required=False, action="store_true", \
default=False, \
help='Output some verbose debugging info')
parser.add_argument('--all', required=False, action="store_true", \
default=False, \
help='Load all Jira issues, not just the once marked in progress.')
parser.add_argument('--desc', required=False, action="store_true", \
default=False, \
help='Add description to the issues')
parser.add_argument('--test', required=False, action="store_true", \
default=False, \
help='Run test case and then exit')
return parser
################################################################################
# General nodes
################################################################################
def root_nodes_start(f, key):
f.write("<map version=\"freeplane 1.6.0\">\n")
f.write("<node LINK=\"%s\" TEXT=\"%s\" FOLDED=\"false\" COLOR=\"#000000\" LOCALIZED_STYLE_REF=\"AutomaticLayout.level.root\">\n"
% (cfg.server + "/projects/" + key, key))
def root_nodes_end(f):
f.write("</node>\n</map>")
def orphan_node_start(f):
f.write("<node TEXT=\"Orphans\" POSITION=\"left\" FOLDED=\"false\" COLOR=\"#000000\">\n")
def orphan_node_end(f):
f.write("</node>\n")
################################################################################
# Test
################################################################################
def test():
f = open_file("test" + ".mm")
root_nodes_start(f, "Test")
n1 = Node("SWG-1", "My issue 1", "Initiative")
n12 = Node("SWG-12", "My issue 12", "Epic")
n200 = Node("SWG-200", "My issue 200", "Story")
n201 = Node("SWG-201", "My issue 201", "Story")
n12.add_child(n200)
n12.add_child(n201)
n13 = Node("SWG-13", "My issue 13", "Epic")
n13.add_assignee("Joakim")
n13.set_state("In Progress")
n14 = Node("SWG-14", "My issue 14", "Epic")
n202 = Node("SWG-202", "My issue 202", "Story")
n202.set_state("In Progress")
n203 = Node("SWG-203", "My issue 203", "Story")
n203.set_state("Blocked")
n204 = Node("SWG-204", "My issue 204", "Story")
n204.set_state("In Progress")
n205 = Node("SWG-205", "My issue 205", "Story")
n14.add_child(n202)
n14.add_child(n203)
n14.add_child(n204)
n14.add_child(n205)
n14.add_assignee("Joakim")
n14.set_state("To Do")
n14.set_color("#0000FF")
n14.add_sponsor("STE")
n14.add_sponsor("Arm")
n14.add_sponsor("Hisilicon")
n14.set_base_url(cfg.server)
n1.add_child(n12)
n1.add_child(n13)
n1.add_child(n14)
n1.gen_tree()
n1.to_xml(f)
root_nodes_end(f)
f.close()
################################################################################
# Stories
################################################################################
def build_story_node(jira, story_key, d_handled=None, epic_node=None):
si = jira.issue(story_key)
if si.fields.status.name in ["Closed", "Resolved"]:
d_handled[str(si.key)] = [None, si]
return None
# To prevent UnicodeEncodeError ignore unicode
summary = str(si.fields.summary.encode('ascii', 'ignore').decode())
story = Node(str(si.key), summary, str(si.fields.issuetype))
try:
assignee = str(si.fields.assignee.displayName.encode('ascii', 'ignore').decode())
except AttributeError:
assignee = str(si.fields.assignee)
story.add_assignee(assignee)
story.set_state(str(si.fields.status.name))
story.set_base_url(cfg.server)
if epic_node is not None:
story.add_parent(epic_node.get_key())
epic_node.add_child(story)
else:
# This cateches when people are not using implements/implemented by, but
# there is atleast an "Epic" link that we can use.
parent = get_parent_key(jira, si)
if parent is not None and parent in d_handled:
parent_node = d_handled[parent][0]
if parent_node is not None:
story.add_parent(parent_node)
parent_node.add_child(story)
else:
vprint("Didn't find any parent")
print(story)
d_handled[story.get_key()] = [story, si]
return story
################################################################################
# Epics
################################################################################
def build_epics_node(jira, epic_key, d_handled=None, initiative_node=None):
ei = jira.issue(epic_key)
if ei.fields.status.name in ["Closed", "Resolved"]:
d_handled[str(ei.key)] = [None, ei]
return None
summary = str(ei.fields.summary.encode('ascii', 'ignore').decode())
epic = Node(str(ei.key), summary, str(ei.fields.issuetype))
try:
assignee = str(ei.fields.assignee.displayName.encode('ascii', 'ignore').decode())
except AttributeError:
assignee = str(ei.fields.assignee)
epic.add_assignee(assignee)
epic.set_state(str(ei.fields.status.name))
try:
sponsors = ei.fields.customfield_10101
if sponsors is not None:
for s in sponsors:
epic.add_sponsor(str(s.value))
except AttributeError:
epic.add_sponsor("No sponsor")
epic.set_base_url(cfg.server)
if initiative_node is not None:
epic.add_parent(initiative_node.get_key())
initiative_node.add_child(epic)
else:
# This cateches when people are not using implements/implemented by, but
# there is atleast an "Initiative" link that we can use.
parent = get_parent_key(jira, ei)
if parent is not None and parent in d_handled:
parent_node = d_handled[parent][0]
if parent_node is not None:
epic.add_parent(parent_node)
parent_node.add_child(epic)
else:
vprint("Didn't find any parent")
d_handled[epic.get_key()] = [epic, ei]
# Deal with stories
for link in ei.fields.issuelinks:
if "inwardIssue" in link.raw:
story_key = str(link.inwardIssue.key)
build_story_node(jira, story_key, d_handled, epic)
print(epic)
return epic
################################################################################
# Initiatives
################################################################################
def build_initiatives_node(jira, issue, d_handled):
if issue.fields.status.name in ["Closed", "Resolved"]:
d_handled[str(issue.key)] = [None, issue]
return None
summary = str(issue.fields.summary.encode('ascii', 'ignore').decode())
initiative = Node(str(issue.key), summary, str(issue.fields.issuetype))
try:
assignee = str(issue.fields.assignee.displayName.encode('ascii', 'ignore').decode())
except AttributeError:
assignee = str(issue.fields.assignee)
initiative.add_assignee(assignee)
initiative.set_state(str(issue.fields.status.name))
sponsors = None
if hasattr(issue.fields, "customfield_10101"):
sponsors = issue.fields.customfield_10101
if sponsors is not None:
for s in sponsors:
initiative.add_sponsor(str(s.value))
initiative.set_base_url(cfg.server)
print(initiative)
d_handled[initiative.get_key()] = [initiative, issue] # Initiative
# Deal with Epics
for link in issue.fields.issuelinks:
if "inwardIssue" in link.raw:
epic_key = str(link.inwardIssue.key)
build_epics_node(jira, epic_key, d_handled, initiative)
return initiative
def build_initiatives_tree(jira, key, d_handled):
jql = "project=%s AND issuetype in (Initiative)" % (key)
initiatives = jira.search_issues(jql)
nodes = []
for i in initiatives:
node = build_initiatives_node(jira, i, d_handled)
if node is not None:
nodes.append(node)
return nodes
def build_orphans_tree(jira, key, d_handled):
jql = "project=%s" % (key)
all_issues = jira.search_issues(jql)
orphans_initiatives = []
orphans_epics = []
orphans_stories = []
for i in all_issues:
if str(i.key) not in d_handled:
if i.fields.status.name in ["Closed", "Resolved"]:
continue
else:
if i.fields.issuetype.name == "Initiative":
orphans_initiatives.append(i)
elif i.fields.issuetype.name == "Epic":
orphans_epics.append(i)
elif i.fields.issuetype.name == "Story":
orphans_stories.append(i)
# Now we three list of Jira tickets not touched before, let's go over them
# staring with Initiatives, then Epics and last Stories. By doing so we
# should get them nicely layed out in the orphan part of the tree.
nodes = []
vprint("Orphan Initiatives ...")
for i in orphans_initiatives:
node = build_initiatives_node(jira, i, d_handled)
nodes.append(node)
vprint("Orphan Epics ...")
for i in orphans_epics:
node = build_epics_node(jira, str(i.key), d_handled)
nodes.append(node)
vprint("Orphan Stories ...")
for i in orphans_stories:
node = build_story_node(jira, str(i.key), d_handled)
nodes.append(node)
return nodes
################################################################################
# Config files
################################################################################
def get_config_file():
""" Returns the location for the config file (including the path). """
for d in cfg.config_locations:
for f in [cfg.config_filename, cfg.config_legacy_filename]:
checked_file = d + "/" + f
if os.path.isfile(checked_file):
return d + "/" + f
def initiate_config():
""" Reads the config file (yaml format) and returns the sets the global
instance.
"""
cfg.config_file = get_config_file()
if not os.path.isfile(cfg.config_file):
create_default_config()
vprint("Using config file: %s" % cfg.config_file)
with open(cfg.config_file, 'r') as yml:
cfg.yml_config = yaml.load(yml)
################################################################################
# Main function
################################################################################
def main(argv):
parser = get_parser()
# The parser arguments (cfg.args) are accessible everywhere after this call.
cfg.args = parser.parse_args()
# This initiates the global yml configuration instance so it will be
# accessible everywhere after this call.
initiate_config()
key = "SWG"
if cfg.args.test:
test()
exit()
jira, username = jiralogin.get_jira_instance(cfg.args.t)
if cfg.args.project:
key = cfg.args.project
# Open and initialize the file
f = open_file(key + ".mm")
root_nodes_start(f, key)
# Temporary dictorionary to keep track the data (issues) that we already
# have dealt with.
d_handled = {}
# Build the main tree with Initiatives beloninging to the project.
nodes = build_initiatives_tree(jira, key, d_handled)
# Take care of the orphans, i.e., those who has no connection to any
# initiative in your project.
nodes_orpans = build_orphans_tree(jira, key, d_handled)
# FIXME: We run through this once more since, when we run it the first time
# we will catch Epics and Stories who are not linked with
# "implements/implemented by" but instead uses the so called "Epic" link.
nodes_orpans = build_orphans_tree(jira, key, d_handled)
# Dump the main tree to file
for n in sorted(nodes):
n.to_xml(f)
orphan_node_start(f)
for n in sorted(nodes_orpans):
n.to_xml(f)
orphan_node_end(f)
# End the file
root_nodes_end(f)
f.close()
if __name__ == "__main__":
main(sys.argv)
|
# -*- coding: utf-8 -*-
"""
Пример использования модуля расчета параметров воздушных линий PVL в соответствии с
Руководящими указаниями по релейной защите № 11 Расчеты токов короткого
замыкания для релейной защиты и системной автоматики 110-750 кВ
г.Саратов 24.11.2020г.
"""
#import numpy as np
import PVL5 as PVL
ASO300 = PVL.provod('ACО300 из примера 2.3 РУ11',0.108,23.5,0.9)
PB220 = PVL.opora('ПБ220 из примера 2.3 РУ11 h+10м', [-5.3+18j, 8.8+18j, 5.3+25.5j], [0.0, 0.0, 0.0], 0j, 0j)
sk2_3=PVL.sech('Пример 2.3 из РУ11',1.0,0.05,1000.0)
l1 = PVL.Line(sk2_3,'Линия 1',0.0,PB220.C1,ASO300,0,0,7)
l2 = PVL.Line(sk2_3,'Линия 2',30.0,PB220.C1,ASO300,0,0,7)
l3 = PVL.Line(sk2_3,'Линия 3',70.0,PB220.C1,ASO300,0,0,7)
l4 = PVL.Line(sk2_3,'Линия 4',120.0,PB220.C1,ASO300,0,0,7)
sk2_3.calc()
sk2_3.res()
PB500_2T=PVL.opora('ПБ500-2т из примера 2.4 РУ11',(-12+27j,0+27j,12+27j),(0.0,0.0,0.0),-8.1+32j,8.1+32j)
ASO400=PVL.provod('ACО400 из примера 2.4 РУ11',0.078,27.2,0.9)
ASO400_3_04=ASO400.RaschPr('3*ACО400-0.4м',3,0.4)
S70=PVL.provod('С-70',2.625,13.0)
sk2_4=PVL.sech('Примеры 2.4 и 2.6 из РУ11',1.0,0.05,1000.0)
l1 = PVL.Line(sk2_4,'Линия 1',0.0,PB500_2T.C1,ASO400_3_04,5.0,0,14)
t1_1 = PVL.Line(sk2_4,'Трос 1 Линия 1',0.0,PB500_2T.T1,S70,0.5,1,14)
t1_2 = PVL.Line(sk2_4,'Трос 2 Линия 2',0.0,PB500_2T.T2,S70,0.5,1,14)
l2 = PVL.Line(sk2_4,'Линия 2',50.0,PB500_2T.C1,ASO400_3_04,5.0,0,14)
t2_1 = PVL.Line(sk2_4,'Трос 1 Линия 2',50.0,PB500_2T.T1,S70,0.5,1,14)
t2_2 = PVL.Line(sk2_4,'Трос 2 Линия 2',50.0,PB500_2T.T2,S70,0.5,1,14)
sk2_4.calc()
sk2_4.res()
AC_150_24 = PVL.provod('AC-150/24', 0.198+0.000j, 17.1, 0.95)
S50=PVL.provod('С-50',3.75,9.1)
PS70D=PVL.izol('ПС-70Д',0.127)
PB110_1 = PVL.opora('ПБ110-1', [-2.0+14.5j, 3.5+14.5j, 2.0+17.5j], [0.0, 0.0, 0.0], 0.0+19.5j, 0.0+0.0j)
sk3=PVL.sech('Пример с 4-мя ВЛ 110 кВ',1.0,0.05,1000.0)
pl1 = PVL.Line(sk3,'Линия 1',0.0,PB110_1.C1,AC_150_24,PS70D(8),0,5.5)
t1_1 = PVL.Line(sk3,'Трос 1 Линия 1',0.0,PB110_1.T1,S50,0.0,2,5.5)
pl2 = PVL.Line(sk3,'Линия 2',30.0,PB110_1.C1,AC_150_24,PS70D(8),0,5.5)
t2_1 = PVL.Line(sk3,'Трос 1 Линия 2',30.0,PB110_1.T1,S50,0.0,2,5.5)
pl3 = PVL.Line(sk3,'Линия 3',60.0,PB110_1.C1,AC_150_24,PS70D(8),0,5.5)
t3_1 = PVL.Line(sk3,'Трос 1 Линия 3',60.0,PB110_1.T1,S50,0.0,2,5.5)
pl4 = PVL.Line(sk3,'Линия 4',90.0,PB110_1.C1,AC_150_24,PS70D(8),0,5.5)
t4_1 = PVL.Line(sk3,'Трос 1 Линия 4',90.0,PB110_1.T1,S50,0.0,2,5.5)
sk3.calc()
sk3.res()
|
# A linear-time BFS-based solution.
from collections import deque
def bfs(rate_graph, start, end):
to_visit = deque()
to_visit.appendleft( (start, 1.0) )
visited = set()
while to_visit:
node, rate_from_origin = to_visit.pop()
if node == end:
return rate_from_origin
visited.add(node)
for unit, rate in rate_graph.get_neighbors(node):
if unit not in visited:
to_visit.appendleft((unit, rate_from_origin * rate))
return None
if __name__ == '__main__':
from rates import RATE_GRAPH
print(bfs(RATE_GRAPH, 'hand', 'lightyear'))
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pandas as pd
from EMNaiveBayes import EMNaiveBayes
def main():
data_fn = 'data/Soybean/soybean-large.data.txt'
col_idx_of_y = 0
# Read data with dtype = str.
X = pd.read_csv(data_fn, sep=',', header=None, dtype=str,
skiprows=None, na_values='?', keep_default_na=False)
X.dropna(inplace=True)
Y = X[col_idx_of_y]
K = len(set(Y))
X.drop(col_idx_of_y, axis=1, inplace=True)
emnb = EMNaiveBayes(epsilon=1e-5)
emnb.fit(X.values, K)
emnb.evaluate(Y)
if __name__ == '__main__':
main()
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import PostEstimation_pb2 as PostEstimation__pb2
class PostEstimationServiceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetPoseEstimation = channel.unary_unary(
'/protocolor.PostEstimationService/GetPoseEstimation',
request_serializer=PostEstimation__pb2.ImageInfo.SerializeToString,
response_deserializer=PostEstimation__pb2.PoseEstimationResponse.FromString,
)
class PostEstimationServiceServicer(object):
# missing associated documentation comment in .proto file
pass
def GetPoseEstimation(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_PostEstimationServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetPoseEstimation': grpc.unary_unary_rpc_method_handler(
servicer.GetPoseEstimation,
request_deserializer=PostEstimation__pb2.ImageInfo.FromString,
response_serializer=PostEstimation__pb2.PoseEstimationResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'protocolor.PostEstimationService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
from .multiclass_classification import MultiClassClassificationDataset
LABELS = [
"AnnualCrop",
"Forest",
"HerbaceousVegetation",
"Highway",
"Industrial",
"Pasture",
"PermanentCrop",
"Residential",
"River",
"SeaLake",
]
class EurosatDataset(MultiClassClassificationDataset):
url = "https://github.com/phelber/EuroSAT"
labels = LABELS
name = "EuroSAT dataset"
def __init__(self, config):
# now call the constructor to validate the schema and load the data
super().__init__(config)
|
from torch.utils.data import DataLoader
class BaseDataLoader(DataLoader):
def split_validation(self) -> DataLoader:
return NotImplementedError
|
class BinaryTree:
def __init__(self):
self.root = None
def find(self, key: int) -> bool:
if not self.root:
return False
current = self.root
while current.key != key:
if current.key < key:
current = current.right
else:
current = current.left
if not current:
return False
return True
def insert(self, key: int, value: float):
new_node = self.Node(key, value)
if not self.root:
self.root = new_node
return
parent = self.root
current = self.root
while True:
if current.key == key:
return
parent = current
if current.key < key:
current = current.right
if not current:
parent.right = new_node
return
else:
current = current.left
if not current:
parent.left = new_node
return
def deleteNode(self, key: int):
if not self.root:
return
parent = self.root
current = self.root
is_left_child = False
while current.key != key:
parent = current
if current.key > key:
is_left_child = True
current = current.left
else:
is_left_child = False
current = current.right
if not current:
return self.root
# case 2: a leaf
if not current.left and not current.right:
if current == self.root:
return
if is_left_child:
parent.left = None
else:
parent.right = None
# case 3: one child
# only has left child
elif not current.right:
if current == self.root:
return current.left
if is_left_child:
parent.left = current.left
else:
parent.right = current.left
# only has right child
elif not current.left:
if current == self.root:
return current.right
if is_left_child:
parent.left = current.right
else:
parent.right = current.right
# case 4: two children
else:
successor = self.__get_successor(current)
if current == self.root:
self.root = successor
elif is_left_child:
parent.left = successor
else:
parent.right = successor
successor.left = current.left
def __get_successor(self, to_delete: self.Node) -> self.Node:
# find the minimum node in to_delete's right subtree
parent = to_delete
successor = to_delete
current = to_delete.right
while current:
parent = successor
successor = current
current = current.left
if successor != to_delete.right:
parent.left = successor.right
successor.right = to_delete.right
return successor
def dfs_traverse(self):
self.__in_order(self.root)
def __in_order(self, to_visit: self.Node):
if to_visit:
self.__in_order(to_visit.left)
print(to_visit.key)
self.__in_order(to_visit.right)
def bfs_traverse(self):
if not self.root:
return
queue = [self.root]
for node in queue:
print(node.key)
if node.left:
queue.append(node.left)
if node.right:
queue.append(node.right)
def iterative_traverse(self):
if not self.root:
return
stack = []
current = self.root
while current or stack:
# add all through the left-most node of the tree
while current:
stack.append(current)
current = current.left
current = stack.pop()
print(current.key)
# if the removed node still has non-empty right subtree,
# add all through the right node's left most child
current = current.right
# inner class
class Node:
def __init__(self, key: int, value: float):
self.key = key
self.value = value
self.left = None
self.right = None |
from collections import defaultdict
import os
from pathlib import Path
import joblib
import numpy as np
import pandas as pd
import torch
from .transforms.util import get_transforms
from .engine.voc_assayer import VOCAssayer
from .datasets import VOCDetection
from .utils.general import make_save_path
def assay(csv_file,
net_name,
number_nets_to_train,
epochs_list,
batch_size,
restore_path,
test_results_save_path,
configfile,
random_seed,
root=None,
num_classes=2,
pad_size=500,
embedding_n_out=512,
loss_func='CE',
method='transfer',
mode='classify',
num_workers=4,
data_parallel=False):
"""assay behavior of models trained with Pascal VOC Detection set
Parameters
----------
csv_file : str
name of .csv file containing prepared data sets.
Generated by searchnets.data.split function.
net_name : str
name of convolutional neural net architecture to train.
One of {'alexnet', 'VGG16', 'CORnet_Z', 'CORnet_S'}
number_nets_to_train : int
number of training "replicates"
epochs_list : list
of training epochs. Replicates will be trained for each
value in this list. Can also just be one value, but a list
is useful if you want to test whether effects depend on
number of training epochs.
batch_size : int
number of samples in a batch of training data
restore_path : str
path to directory where checkpoints and train models were saved
test_results_save_path : str
path to directory where results from measuring accuracy on test set should be saved
configfile : str
filename of config.ini file. Used (without .ini extension) as name for output file
that is saved in test_results_save_path.
random_seed : int
to seed random number generator
root : str
path to dataset root. Used with VOCDetection dataset to specify where VOC data was downloaded to.
num_classes : int
number of classes. Default is 2 (target present, target absent).
pad_size : int
size to which images in PascalVOC / Visual Search Difficulty dataset should be padded.
Images are padded by making an array of zeros and randomly placing the image within it
so that the entire image is still within the boundaries of (pad size x pad size).
Default value is specified by searchnets.transforms.functional.VSD_PAD_SIZE.
Argument has no effect if the dataset_type is not 'VOC'.
Used to determine transforms to use at test time.
loss_func : str
type of loss function to use. One of {'CE', 'BCE'}. Default is 'CE',
the standard cross-entropy loss. Used to determine transforms to use at test time.
num_workers : int
number of workers used by torch.DataLoaders. Default is 4.
data_parallel : bool
if True, use torch.nn.dataparallel to train network on multiple GPUs. Default is False.
method : str
training method. One of {'initialize', 'transfer'}.
'initialize' means randomly initialize all weights and train the
networks "from scratch".
'transfer' means perform transfer learning, using weights pre-trained
on imagenet.
Default is 'transfer'.
mode : str
training mode. One of {'classify', 'detect'}.
'classify' is standard image classification.
'detect' trains to detect whether specified target is present or absent.
Default is 'classify'.
embedding_n_out : int
for DetectNet, number of output features from input embedding.
I.e., the output size of the linear layer that accepts the
one hot vector querying whether a specific class is present as input.
Default is 512.
Returns
-------
None
saves .npz output file with following keys:
arrays_per_model_dict : dict
where keys are paths to model and values are array
of predictions made by that model for test set
"""
if mode == 'detect' and loss_func != 'BCE':
print(
f"when mode is 'detect', loss_func must be 'BCE', but was {loss_func}. Setting to 'BCE."
)
loss_func = 'BCE'
if random_seed:
np.random.seed(random_seed) # for shuffling in batch_generator
torch.manual_seed(random_seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
if torch.cuda.is_available():
device = torch.device('cuda')
else:
device = torch.device('cpu')
for epochs in epochs_list:
print(f'assaying behavior on test set for {net_name} model trained for {epochs} epochs')
# ------ initialize variables to hold outputs from all training replicates ------------------------------------
# ---- for VSD save results.gz **and** a .csv, because we have multiple metrics,
# and because csv files are better anyway
assay_records = defaultdict(list) # records gets turned into pandas DataFrame, then saved as .csv
# these will be lists of DataFrames, one for each training replicate
df_lists = defaultdict(list)
arrays_per_model = {} # inputs/outputs of model, where key is restore path, and value is dict of arrays
for net_number in range(1, number_nets_to_train + 1):
transform, target_transform = get_transforms('VSD', loss_func, pad_size)
testset = VOCDetection(root=root,
csv_file=csv_file,
image_set='trainval',
split='test',
download=True,
transform=transform,
target_transform=target_transform)
restore_path_this_net = make_save_path(restore_path, net_name, net_number, epochs)
print(f'Loading model from {restore_path_this_net}')
assayer = VOCAssayer.from_config(net_name=net_name,
num_classes=num_classes,
loss_func=loss_func,
testset=testset,
mode=mode,
embedding_n_out=embedding_n_out,
restore_path=restore_path_this_net,
batch_size=batch_size,
device=device,
num_workers=num_workers,
data_parallel=data_parallel)
results = assayer.assay()
# --- add columns to image + trial dataframes before appending to list
for key in ('images_df', 'trials_df'):
df = results[key]
df['net_name'] = net_name
df['replicate'] = net_number
df['mode'] = mode
df['method'] = method
df['loss_func'] = loss_func
df['restore_path'] = restore_path_this_net
df_lists[key].append(df)
# ---- add columns + metrics to our 'results across replicates' records for that data frame
assay_records['net_name'].append(net_name)
assay_records['replicate'].append(net_number)
assay_records['mode'].append(mode)
assay_records['method'].append(method)
assay_records['loss_func'].append(loss_func)
assay_records['restore_path'] = restore_path_this_net
for metric in ['acc', 'd_prime']:
assay_records[metric].append(results[metric])
results_str = ', '.join(
[f'{key}: {results[key]:7.3f}'
for key in ['acc', 'd_prime']]
)
print(f'assay results: {results_str}')
arrays_per_model[restore_path_this_net] = results['arrays']
# ---- create results dict, save to results.gz file
if not os.path.isdir(test_results_save_path):
os.makedirs(test_results_save_path)
results_fname_stem = str(Path(configfile).stem) # remove .ini extension
arrays_fname = os.path.join(test_results_save_path,
f'{results_fname_stem}_trained_{epochs}_epochs_assay_arrays.gz')
joblib.dump(arrays_per_model, arrays_fname)
summary_csv_fname = os.path.join(test_results_save_path,
f'{results_fname_stem}_trained_{epochs}_epochs_assay_results.csv')
results_df = pd.DataFrame.from_records(assay_records)
results_df.to_csv(summary_csv_fname, index=False)
for key, df_list in df_lists.items():
csv_fname = f"{results_fname_stem}_trained_{epochs}_epochs_assay_{key.replace('_df', '')}.csv"
csv_path = os.path.join(test_results_save_path, csv_fname)
df = pd.concat(df_list)
df.to_csv(csv_path, index=False)
|
number = int(input("Podaj cyfrę: "))
divisors_list = []
if number == 0:
print("Podzielnikami liczby 0 są wszystkie liczby rzeczywiste z wyłączeniem 0")
else:
original = number
number = abs(number)
for i in range((int(number / 2)) + 1):
if i == 0:
pass
elif number % i == 0:
divisors_list.append(-i)
divisors_list.append(i)
divisors_list.append(number)
divisors_list.append(-number)
print("Podzielniki liczby", original, "to:", divisors_list)
|
"""Custom OpenAPI schema."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable
from fastapi.openapi import docs, utils
import imagesecrets
from imagesecrets.api.dependencies import get_config
if TYPE_CHECKING:
from fastapi import FastAPI
from fastapi.responses import HTMLResponse
config = get_config()
def custom(
app: FastAPI,
/,
swagger: bool = True,
redoc: bool = True,
) -> Callable[[], dict[str, Any]]:
"""Return custom OpenAPI generation function.
:param app: Application instance
:param swagger: Whether to show SwaggerUI documentation
:param redoc: Whether to show ReDoc documentation
"""
if swagger:
@app.get("/docs", include_in_schema=False)
def override_swagger() -> HTMLResponse:
"""Override Swagger UI."""
return docs.get_swagger_ui_html(
openapi_url="/openapi.json",
title="ImageSecrets",
swagger_favicon_url=config.icon_url,
)
if redoc:
@app.get("/redoc", include_in_schema=False)
def override_redoc() -> HTMLResponse:
"""Override ReDoc."""
return docs.get_redoc_html(
openapi_url="/openapi.json",
title="ImageSecrets",
redoc_favicon_url=config.icon_url,
)
def schema() -> dict[str, Any]:
"""Return custom OpenAPI schema."""
nonlocal app
if app.openapi_schema:
return app.openapi_schema
openapi_schema = utils.get_openapi(
title="ImageSecrets",
version=imagesecrets.__version__,
description="Encode and decode messages from images!",
routes=app.routes,
)
openapi_schema["info"]["x-logo"] = {"url": config.icon_url}
app.openapi_schema = openapi_schema
return openapi_schema
return schema
|
# Generated by Django 2.1.7 on 2019-02-17 17:44
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('images_backend', '0002_auto_20190217_1555'),
]
operations = [
migrations.RenameModel(
old_name='InstanceValues',
new_name='InstanceValue',
),
]
|
# -*- coding: utf-8 -*-
# Generate QR Code of any type.
# It's based on PyQRCode Library.
import pyqrcode
qr=pyqrcode.create('QrCode Generator')
qr.png('generator.png', scale=10)
|
# coding: utf-8
import os
import sys
import time
import pytest
import easytrader
sys.path.append(".")
class TestThsClientTrader:
@pytest.fixture
def user(self):
_user = easytrader.use("ths")
# _user.prepare(
# user="31016658", password= "211122", comm_password="211122"
# )
_user.enable_type_keys_for_editor()
_user.connect()
return _user
def test_balance(self, user):
# 查询[F4]", "资金余额
# time.sleep(3)
result = user.balance
print(result)
def test_position(self, user):
# 查询[F4]", "资金股票
# time.sleep(3)
result = user.position
print(result)
def test_today_entrusts(self, user):
#当日委托
result = user.today_entrusts
print(result)
def test_today_trades(self, user):
#当日成交
result = user.today_trades
print(result)
def test_cancel_entrusts(self, user):
#撤单[F3] 列表
result = user.cancel_entrusts
print(result)
def test_cancel_entrust(self, user):
# 撤单 根据合同编号 字符串
result = user.cancel_entrust('4245')
print(result)
def test_buy(self, user):
#买入[F1]
result = user.buy("600000", 10.00, 100)
print(result)
def test_invalid_buy(self, user):
with pytest.raises(easytrader.exceptions.TradeError):
result = user.buy("511990", 1, 1e10)
print(result)
def test_sell(self, user):
result = user.sell("600000", 10.00, 100)
print(result)
def test_invalid_sell(self, user):
with pytest.raises(easytrader.exceptions.TradeError):
result = user.sell("162411", 200, 1e10)
print(result)
def test_auto_ipo(self, user):
user.auto_ipo()
|
from simobject import SimObject
class Robot(SimObject):
"""The robot is a :class:`~simobject.SimObject` that implements drawing
and information functions to interface with supervisor.
This class is not intended to be subclassed in user code. Use one
of the provided subclasses instead: :class:`~robot.SimBot` for emulated robots
or :class:`~robot.RealBot` for physical robots.
"""
def get_info(self):
"""Return the robot information structure, including sensor readings and
shape information"""
raise NotImplementedError("Robot.get_info")
def set_inputs(self,inputs):
"""Set drive inputs in the format needed by this robot"""
pass
def draw_sensors(self,renderer):
"""Draw the sensors that this robot has"""
pass
def set_logqueue(self,logqueue):
self.logqueue = logqueue
def log(self, message):
print("{}: {}".format(self.__class__.__name__,message))
if self.logqueue is not None:
self.logqueue.append((self,message))
class SimBot(Robot):
"""The robot defined by this class is a simulated robot, and implements
its own motion in :meth:`~robot.SimBot.move`.
To implement a new type of robot, subclass :class:`SimBot` and implement
:meth:`~robot.Robot.get_info` and :meth:`~robot.SimBot.get_external_sensors`.
To make your robot move, implement :meth:`~robot.SimBot.move`.
To make you robot controllable, implement :meth:`~robot.Robot.set_inputs`.
If your robot has sensors that can be drawn in the view, implement
:meth:`~robot.Robot.draw_sensors`.
"""
def move(self,dt):
"""Move the robot for a time interval `dt`."""
pass
def get_external_sensors(self):
"""Get the external sensors of the robot as a list.
This function is used to update the sensor readings in proximity
sensors."""
raise NotImplementedError("SimBot.get_external_sensors")
class RealBot(Robot):
"""This type of robots implements communication with a real-world robot.
Although this is a SimObject, it doesn't move by itself.
Use :meth:`~simobject.SimObject.set_pose()` to move the robot.
"""
def update_external_info(self):
"""Initiate communication with the real robot and get state info back.
"""
raise NotImplementedError("RealBot.update_external_info")
def pause(self):
"""Stops the robot, saving the state"""
raise NotImplementedError("RealBot.pause")
def resume(self):
"""Restarts the robot from the saved state"""
raise NotImplementedError("RealBot.resume")
|
############# Credits and version info #############
# Definition generated from Assembly XML tag def
# Date generated: 2018/12/03 04:56
#
# revision: 1 author: Assembly
# Generated plugin from scratch.
# revision: 2 author: -DeToX-
# Fixed some Reflexives..
# revision: 3 author: -DeToX-
# Named some values..
# revision: 4 author: Lord Zedd
# Enums, better names, etc.
# revision: 5 author: Moses_of_Egypt
# Cleaned up and converted to SuPyr definition
#
####################################################
from ..common_descs import *
from .objs.tag import *
from supyr_struct.defs.tag_def import TagDef
effe_event_part_camera_mode = (
"independent_of_camera_mode",
"first_person_only",
"third_person_only",
"both_first_and_third",
)
effe_event_part_create_in_disposition = (
"either_mode",
"violent_mode_only",
"nonviolent_mode_only",
)
effe_event_part_create_in_environment = (
"any_environment",
"air_only",
"water_only",
"space_only",
)
effe_event_particle_system_coordinate_system = (
"world",
"local",
"ancestor",
)
effe_location = Struct("location",
h3_string_id("marker_name"),
SInt32("unknown", VISIBLE=False),
Array("unknown_array", SUB_STRUCT=SInt8("unknown"), SIZE=4, VISIBLE=False),
ENDIAN=">", SIZE=12
)
effe_event_part = Struct("part",
SEnum16("create_in_environment", *effe_event_part_create_in_environment),
SEnum16("create_in_disposition", *effe_event_part_create_in_disposition),
SInt16("location_index"),
SInt16("unknown_0", VISIBLE=False),
SInt16("unknown_1", VISIBLE=False),
SInt8("unknown_2", VISIBLE=False),
SEnum8("camera_mode", *effe_event_part_camera_mode),
StrLatin1("anticipated_tag_class", SIZE=4),
h3_dependency("spawned_tag"),
QStruct("velocity_bounds", INCLUDE=from_to),
BytesRaw("unknown_3", SIZE=8, VISIBLE=False),
float_rad("velocity_cone_angle"),
from_to_rad("angular_velocity_bounds"),
QStruct("radius_modifier_bounds", INCLUDE=from_to),
QStruct("origin_offset", INCLUDE=xyz_float),
yp_float_rad("origin_rotation"),
Bool32("a_scales_values", *unknown_flags_32),
Bool32("b_scales_values", *unknown_flags_32),
ENDIAN=">", SIZE=96
)
effe_event_acceleration = Struct("acceleration",
SEnum16("create_in_environment", *effe_event_part_create_in_environment),
SEnum16("create_in_disposition", *effe_event_part_create_in_disposition),
SInt16("location_index"),
SInt16("unknown", VISIBLE=False),
Float("acceleration"),
Float("inner_cone_angle"),
Float("outer_cone_angle"),
ENDIAN=">", SIZE=20
)
effe_event_particle_system_emitter_unknown_21_unknown_1 = Struct("unknown_1",
BytesRaw("unknown_0", SIZE=4, VISIBLE=False),
SInt8("input"),
SInt8("input_range"),
SEnum8("output_kind", *cntl_contrail_system_output_kind_0),
SInt8("output"),
h3_rawdata_ref("unknown_1"),
BytesRaw("unknown_2", SIZE=8, VISIBLE=False),
VISIBLE=False,
ENDIAN=">", SIZE=36
)
effe_event_particle_system_emitter_unknown_21 = Struct("unknown_21",
BytesRaw("unknown_0", SIZE=4, VISIBLE=False),
h3_reflexive("unknown_1", effe_event_particle_system_emitter_unknown_21_unknown_1),
BytesRaw("unknown_2", SIZE=8, VISIBLE=False),
VISIBLE=False,
ENDIAN=">", SIZE=24
)
effe_event_particle_system_emitter_unknown_39 = Struct("unknown_39",
Array("unknown_array", SUB_STRUCT=Float("unknown"), SIZE=4, VISIBLE=False),
VISIBLE=False,
ENDIAN=">", SIZE=16
)
effe_event_particle_system_emitter_compiled_function = Struct("compiled_function",
Array("unknown_array", SUB_STRUCT=Float("unknown"), SIZE=16, VISIBLE=False),
VISIBLE=False,
ENDIAN=">", SIZE=64
)
effe_event_particle_system_emitter_compiled_color_function = Struct("compiled_color_function",
color_rgb_float("color"),
Float("magnitude"),
ENDIAN=">", SIZE=16
)
effe_event_particle_system_emitter = Struct("emitter",
h3_string_id("name"),
Bool16("unknown_0", *unknown_flags_32),
SInt16("unknown_1", VISIBLE=False),
BytesRaw("unknown_2", SIZE=16, VISIBLE=False),
SInt8("input_0"),
SInt8("input_range_0"),
SEnum8("output_kind_0", *cntl_contrail_system_output_kind_0),
SInt8("output_0"),
h3_rawdata_ref("unknown_3"),
BytesRaw("unknown_4", SIZE=32, VISIBLE=False),
SInt8("input_1"),
SInt8("input_range_1"),
SEnum8("output_kind_1", *cntl_contrail_system_output_kind_0),
SInt8("output_1"),
h3_rawdata_ref("unknown_5"),
BytesRaw("unknown_6", SIZE=32, VISIBLE=False),
SInt8("input_2"),
SInt8("input_range_2"),
SEnum8("output_kind_2", *cntl_contrail_system_output_kind_0),
SInt8("output_2"),
h3_rawdata_ref("unknown_7"),
BytesRaw("unknown_8", SIZE=8, VISIBLE=False),
SInt8("input_3"),
SInt8("input_range_3"),
SEnum8("output_kind_3", *cntl_contrail_system_output_kind_0),
SInt8("output_3"),
h3_rawdata_ref("unknown_9"),
BytesRaw("unknown_10", SIZE=8, VISIBLE=False),
SInt8("input_4"),
SInt8("input_range_4"),
SEnum8("output_kind_4", *cntl_contrail_system_output_kind_0),
SInt8("output_4"),
h3_rawdata_ref("unknown_11"),
BytesRaw("unknown_12", SIZE=8, VISIBLE=False),
SInt8("input_5"),
SInt8("input_range_5"),
SEnum8("output_kind_5", *cntl_contrail_system_output_kind_0),
SInt8("output_5"),
h3_rawdata_ref("unknown_13"),
BytesRaw("unknown_14", SIZE=8, VISIBLE=False),
SInt8("input_6"),
SInt8("input_range_6"),
SEnum8("output_kind_6", *cntl_contrail_system_output_kind_0),
SInt8("output_6"),
h3_rawdata_ref("unknown_15"),
BytesRaw("unknown_16", SIZE=8, VISIBLE=False),
SInt8("input_7"),
SInt8("input_range_7"),
SEnum8("output_kind_7", *cntl_contrail_system_output_kind_0),
SInt8("output_7"),
h3_rawdata_ref("unknown_17"),
BytesRaw("unknown_18", SIZE=8, VISIBLE=False),
SInt8("input_8"),
SInt8("input_range_8"),
SEnum8("output_kind_8", *cntl_contrail_system_output_kind_0),
SInt8("output_8"),
h3_rawdata_ref("unknown_19"),
BytesRaw("unknown_20", SIZE=8, VISIBLE=False),
h3_dependency("particle_physics"),
Array("unknown_array", SUB_STRUCT=SInt8("unknown"), SIZE=4, VISIBLE=False),
h3_reflexive("unknown_21", effe_event_particle_system_emitter_unknown_21),
SInt8("input_9"),
SInt8("input_range_9"),
SEnum8("output_kind_9", *cntl_contrail_system_output_kind_0),
SInt8("output_9"),
h3_rawdata_ref("unknown_22"),
BytesRaw("unknown_23", SIZE=32, VISIBLE=False),
SInt8("input_10"),
SInt8("input_range_10"),
SEnum8("output_kind_10", *cntl_contrail_system_output_kind_0),
SInt8("output_10"),
h3_rawdata_ref("unknown_24"),
BytesRaw("unknown_25", SIZE=8, VISIBLE=False),
SInt8("input_11"),
SInt8("input_range_11"),
SEnum8("output_kind_11", *cntl_contrail_system_output_kind_0),
SInt8("output_11"),
h3_rawdata_ref("unknown_26"),
BytesRaw("unknown_27", SIZE=8, VISIBLE=False),
SInt8("input_12"),
SInt8("input_range_12"),
SEnum8("output_kind_12", *cntl_contrail_system_output_kind_0),
SInt8("output_12"),
h3_rawdata_ref("unknown_28"),
BytesRaw("unknown_29", SIZE=8, VISIBLE=False),
SInt8("input_13"),
SInt8("input_range_13"),
SEnum8("output_kind_13", *cntl_contrail_system_output_kind_0),
SInt8("output_13"),
h3_rawdata_ref("unknown_30"),
BytesRaw("unknown_31", SIZE=8, VISIBLE=False),
SInt8("input_14"),
SInt8("input_range_14"),
SEnum8("output_kind_14", *cntl_contrail_system_output_kind_0),
SInt8("output_14"),
h3_rawdata_ref("particle_scale"),
BytesRaw("unknown_32", SIZE=8, VISIBLE=False),
SInt8("input_15"),
SInt8("input_range_15"),
SEnum8("output_kind_15", *cntl_contrail_system_output_kind_0),
SInt8("output_15"),
h3_rawdata_ref("particle_tint"),
BytesRaw("unknown_33", SIZE=8, VISIBLE=False),
SInt8("input_16"),
SInt8("input_range_16"),
SEnum8("output_kind_16", *cntl_contrail_system_output_kind_0),
SInt8("output_16"),
h3_rawdata_ref("particle_alpha"),
BytesRaw("unknown_34", SIZE=8, VISIBLE=False),
SInt8("input_17"),
SInt8("input_range_17"),
SEnum8("output_kind_17", *cntl_contrail_system_output_kind_0),
SInt8("output_17"),
h3_rawdata_ref("particle_alpha_black_point"),
BytesRaw("unknown_35", SIZE=8, VISIBLE=False),
SInt32("unknown_36", VISIBLE=False),
SInt32("unknown_37", VISIBLE=False),
SInt32("unknown_38", VISIBLE=False),
h3_reflexive("unknown_39", effe_event_particle_system_emitter_unknown_39),
h3_reflexive("compiled_functions", effe_event_particle_system_emitter_compiled_function),
h3_reflexive("compiled_color_functions", effe_event_particle_system_emitter_compiled_color_function),
ENDIAN=">", SIZE=752
)
effe_event_particle_system = Struct("particle_system",
Array("unknown_array_0", SUB_STRUCT=SInt8("unknown"), SIZE=4, VISIBLE=False),
h3_dependency("particle"),
SInt16("unknown_0", VISIBLE=False),
SInt16("location_index"),
SEnum16("coordinate_system", *effe_event_particle_system_coordinate_system),
SEnum16("environment", *effe_event_part_create_in_environment),
SEnum16("disposition", *effe_event_part_create_in_disposition),
SEnum16("camera_mode", *effe_event_part_camera_mode),
SInt16("sort_bias"),
Bool16("flags", *unknown_flags_16),
Array("unknown_array_1", SUB_STRUCT=Float("unknown"), SIZE=4, VISIBLE=False),
BytesRaw("unknown_1", SIZE=4, VISIBLE=False),
Float("unknown_2", VISIBLE=False),
Float("amount_size"),
Float("unknown_3", VISIBLE=False),
Float("lod_in_distance"),
Float("lod_feather_in_delta"),
h3_reflexive("emitters", effe_event_particle_system_emitter),
Float("unknown_4", VISIBLE=False),
ENDIAN=">", SIZE=92
)
effe_event = Struct("event",
h3_string_id("name"),
SInt32("unknown", VISIBLE=False),
Array("unknown_array", SUB_STRUCT=SInt8("unknown"), SIZE=4, VISIBLE=False),
Float("skip_fraction"),
QStruct("delay_bounds", INCLUDE=from_to),
QStruct("duration_bounds", INCLUDE=from_to),
h3_reflexive("parts", effe_event_part),
h3_reflexive("accelerations", effe_event_acceleration),
h3_reflexive("particle_systems", effe_event_particle_system),
ENDIAN=">", SIZE=68
)
effe_unknown_9 = Struct("unknown_9",
BytesRaw("unknown", SIZE=12, VISIBLE=False),
VISIBLE=False,
ENDIAN=">", SIZE=12
)
effe_body = Struct("tagdata",
Bool32("flags",
("dark_casings", 1 << 10),
),
SInt32("unknown_0", VISIBLE=False),
Float("unknown_1", VISIBLE=False),
BytesRaw("unknown_2", SIZE=4, VISIBLE=False),
Float("unknown_3", VISIBLE=False),
Array("unknown_array", SUB_STRUCT=SInt8("unknown"), SIZE=4, VISIBLE=False),
SInt16("loop_start_event"),
SInt16("unknown_4", VISIBLE=False),
BytesRaw("unknown_5", SIZE=4, VISIBLE=False),
h3_reflexive("locations", effe_location),
h3_reflexive("events", effe_event),
h3_dependency("looping_sound"),
SInt8("location_index"),
SInt8("event_index"),
SInt16("unknown_6", VISIBLE=False),
Float("always_play_distance"),
Float("never_play_distance"),
Float("unknown_7", VISIBLE=False),
Float("unknown_8", VISIBLE=False),
h3_reflexive("unknown_9", effe_unknown_9),
ENDIAN=">", SIZE=104
)
def get():
return effe_def
effe_def = TagDef("effe",
h3_blam_header('effe'),
effe_body,
ext=".%s" % h3_tag_class_fcc_to_ext["effe"], endian=">", tag_cls=H3Tag
) |
import argparse
import logging
from py4gh.utility import decrypt_files, encrypt_files, get_files, process_output
def cli_parser() -> None:
parser = argparse.ArgumentParser(
description="Decrypt/Encrypt files in root dir and sub dir using crypt4gh"
)
parser.add_argument(
"--filepath",
"-f",
required=True,
type=str,
nargs=1,
help="Root dir where encrypted files are stored",
)
parser.add_argument(
"--secret",
"-s",
required=True,
nargs=1,
type=str,
help="Path to the secret file",
)
parser.add_argument(
"--task",
"-t",
default=["decrypt"],
nargs=1,
help="to encrypt or to decrypt defaults to decrypt",
)
parser.add_argument(
"--pubkey",
"-pks",
nargs="*",
type=str,
help="recipient public key, supports multiple public keys",
)
args = parser.parse_args()
logging.info(f"Task received {args.task[0]}")
# print(args.pubkey)
if args.task[0] == "encrypt":
if args.pubkey is None:
print("Public key is missing : -pks cannot be empty for encrypt task")
return
allfiles = get_files(args.filepath[0])
f, s = encrypt_files(
sec_key=args.secret[0], pub_key=args.pubkey, files=allfiles
)
process_output(f, s, "encrypt")
elif args.task[0] == "decrypt":
allfiles = get_files(args.filepath[0], "c4gh")
f, s = decrypt_files(args.secret[0], allfiles)
process_output(f, s, "decrypt")
else:
print("received unknown task, task can only be encrypt or decrypt")
if __name__ == "__main__":
cli_parser()
|
"""
TrexGUIZ0Plan.py - Services for importing data to the thesarus and GUI.
Using this thesarus a producer or consumer can locate a common field to process.
It can, with a few appropriate changes, connect to anyEnv Uci driven.
It knows about ArchEdenZ0Plan Uci so covers much of the processing.
*Trex is short for Trex thesarus, right?!
Copyright (c) 2020, Robert Russell Millward, all rights reserved.
"""
#from ArchEdenZ0Plan import * #as anyEnv
import ArchEdenZ0Plan as anyEnv
from tkinter import *
from datetime import *
def rptOpenDateRight():
"Print a common look and feel date at the end of the line."
gotIt = datetime.datetime.today();
print("%02d" %gotIt.day, end='');
print("/%02d" %gotIt.month, end='');
print("/%04d" %gotIt.year, end='');
print(" %02d" %gotIt.hour, end='');
print(":%02d" %gotIt.minute, end='');
print(end='\n');
return;
class trexguiField():
def __init__(self, uci, hdrFmtQ, synonyms, value='X'):
print("Field", synonyms, uci.value, self);
self.uci = uci;
self.names = synonyms;
#self.hdrFmt = hdrFmt;
self.contents = StringVar();
self.contents.set(synonyms[0]); # column header
self.ieCtl = Entry();
self.ieCtl.pack();
self.ieCtl["textvariable"] = self.contents;
#self.ieCtl.bind('<Key-Return>', trexguiField.TODO);
#self.seeEm.bind('<Key-Return>', self.myDoPrintOnLog)
return;
# The trexxCtrl is a single argument to the functions that do the work.
# the baseUci is the lowest numbered Uci
# of the field group served by the child.
root = Tk();
class trexxCtrl():
def __init__(self, category, fields, baseUci, viaInCol, viaUci):
print("Control", baseUci.value, self);
self.category = category;
self.subCategory = "TBD";
self.refFields = fields;
self.baseUci = baseUci;
self.refViaInCol = viaInCol;
self.refViaUci = viaUci;
return;
# * Master creator
def Merge__init__(self, master=None):
Frame.__init__(self, master);
self.pack();
self.myCreateButtons();
#for stuff in self: print(stuff);
#master.title = "Master title";
#self.title = "Self title";
return;
# This is the humble thesarus prototype or parent.
# Children of this class are useful.
#
class TrexGUIThesarus():
# These variables must be overridden in any child.
# one reverse index of each kind per field
thrsViaInCol = [-1,-1]; # varies by file being imported
thrsViaUci = [-1,-1];
# the data fields
thrsFields = [
#trexguiField(anyEnv.Uci.metroRowId, "%-8s", ["rowId"] ),
#trexguiField(anyEnv.Uci.countryName, "%-8s", ["country", "nation"])
];
thrsCtrl = trexxCtrl("TREX", thrsFields, anyEnv.Uci.metroRowId, thrsViaInCol, thrsViaUci);
# Doers
# - sayHi
# - loadIt
# - printOnLog
#
def myDoSayHi(self):
print("hi Bob");
return;
def myDoLoadIt(self):
print("Loaded");
return;
def myDoPrintOnLog(self, event):
print("Log->", self.contents.get());
#self.contents.set("So there");
return;
# Creators
# - Buttons
# - Texts
#
# * Buttons (Create)
# ** quit
# ** hi
# ** load
def myCreateQuit(self):
self.QUIT = Button(self);
self.QUIT["text"] = "Quit";
self.QUIT["fg"] = "red";
self.QUIT["command"] = self.quit; # probably class
self.QUIT.pack({"side": "left"});
return;
def myCreateSayHi(self):
self.hiThere = Button(self);
self.hiThere["text"] = "Hello",
self.hiThere["command"] = self.myDoSayHi;
self.hiThere.pack({"side": "left"});
return;
def myCreateLoadData(self):
self.loadData = Button(self);
self.loadData["text"] = "Load",
self.loadData["fg"] = "green";
self.loadData["command"] = self.myDoLoadIt;
self.loadData.pack({"side": "left"});
return;
# * Menues
# ** Buttons]
def myCreateButtons(self):
self.myCreateSayHi();
self.myCreateLoadData();
self.myCreateQuit();
return;
# The *Your* services serve any instance.
# They do not need to be overridden.
def initYourAtStart(yourCtrl):
"Initialize your thesarus."
#print("Initializing Your", yourCtrl.refFields[2].uci);
yourViaUci = yourCtrl.refViaUci;
yourFields = yourCtrl.refFields;
thrsIx = -1;
for tr in yourFields:
thrsIx = thrsIx + 1;
yourViaUci[tr.uci.value - yourCtrl.baseUci.value] = thrsIx;
#print("########", tr.uci, "=", thrsIx, "##");
return;
def initYourForCvsHeader(yourCtrl, csvColName, csvColNbr):
"Initialize your one column Ix by locating its matching synonym."
#print("InitializingZ YourCsv", yourCtrl.refFields[2].uci);
#print("InitializingZ YourCsv", yourCtrl.refViaUci[2]);
#print("InitializingZ YourCsv", yourCtrl.refViaInCol[2]);
yourViaInCol = yourCtrl.refViaInCol;
yourFields = yourCtrl.refFields;
trIx = -1;
for tr in yourFields:
trIx = trIx + 1;
for tfn in tr.names:
#print(tfn);
if(tfn == csvColName):
#print("YourCsvHeader", csvColName, csvColNbr, trIx);
yourViaInCol[csvColNbr] = trIx;
break;
return;
def importYourCsvValue(yourCtrl, csvColVal, csvColNbr):
"Import your column value to its designated thesarus field."
yourViaInCol = yourCtrl.refViaInCol;
yourFields = yourCtrl.refFields;
thryNbr = yourViaInCol[csvColNbr];
yourFields[thryNbr].ievalue.set(csvColVal);
#print("import1", csvColNbr, csvColVal, thryNbr);
#print("import2", yourFields[thryNbr].ievalue);
return;
def importYourLine(yourCtrl, dataRow, rptCols):
"Import into the thesarus this data row."
selColIx = -1;
for selColVal in dataRow:
#print("Working on column ", selColVal);
selColIx = selColIx + 1;
#print(selRowIx, selColIx, selColVal);
trexThesarus.importYourCsvValue(yourCtrl, selColVal, selColIx);
def yourHdrLeftSubMidRight(yourCtrl, hdrSubAndMid):
print("[%s/" %yourCtrl.category, end='');
print("%s]" %hdrSubAndMid[0], end='');
print(" %s " %hdrSubAndMid[1], end='');
rptOpenDateRight();
return;
def yourRptColHdr(yourCtrl, uciList):
"Print the headers for the desired columns."
#print("Column headers are coming");
for uciIx in uciList:
thrsIx = yourCtrl.refViaUci[uciIx.value - yourCtrl.baseUci.value];
#print("\n####", uciIx, thrsIx, );
print(yourCtrl.refFields[thrsIx].hdrFmt %yourCtrl.refFields[thrsIx].names[0], end='');
print();
return;
# 100% flexible report generator in two parts
# part 1 - output one formatted field
def reportYourField(yourCtrl, uci):
"Print one trex Data field"
#print("\n********", uci, "##", end='');
if(uci == anyEnv.Uci.uciEOL):
print(end='\n');
else:
thrsIx = yourCtrl.refViaUci[uci.value - yourCtrl.baseUci.value];
if(uci == anyEnv.Uci.townName):
print('%-12s' %(yourCtrl.refFields[thrsIx].ievalue), end='');
elif(uci == anyEnv.Uci.stateName):
print("%-8s" %(yourCtrl.refFields[thrsIx].ievalue), end='');
elif(uci == anyEnv.Uci.countryName):
print("%-8s" %(yourCtrl.refFields[thrsIx].ievalue), end='');
elif(uci == anyEnv.Uci.townLongLat):
print("%-12s" %(yourCtrl.refFields[thrsIx].ievalue), end='');
return;
# part 2 - output all of the desired fields for a desired row
# including appending a new-line.
def reportYourFields(yourCtrl, uciLst):
"Print all your identified trex fields in the given order"
for uci in uciLst: trexThesarus.reportYourField(yourCtrl, uci);
# Put the new-line.
trexThesarus.reportYourField(yourCtrl, anyEnv.Uci.uciEOL);
return;
def explainYour(yourCtrl): # will revise to __str__(self):
"Explain your thesarus in its current state."
yourViaUci = yourCtrl.refViaUci;
yourViaInCol = yourCtrl.refViaInCol;
yourFields = yourCtrl.refFields;
print("Begin explanation:");
trIx = -1;
for tr in yourFields:
trIx = trIx + 1;
print("thesarusColumn[", trIx, "] uci=", tr.uci, ", value=", tr.ievalue);
for tfn in tr.names:
print(" Synonym=", tfn);
trIx = -1;
for uciIx in yourViaUci:
trIx = trIx + 1;
print("Reverse uci[", trIx, "]=", uciIx);
trIx = -1;
for csvIx in yourViaInCol:
trIx = trIx + 1;
print("Reverse csv[", trIx, "]=", csvIx);
print("End of explanation");
return;
# These services are for this instance and must be
# overridden in other instances.
def initAtStart():
"Initialize the thesarus."
trexGUIThesarus.initYourAtStart(trexGUIThesarus.thrsCtrl);
return;
def initForCsvHeader(csvColName, csvColNbr):
"Initialize one column Ix by locating its matching synonym."
trexThesarus.initYourForCvsHeader(csvColName, csvColNbr, thrsCtrl);
return;
def importCsvValue(csvColVal, csvColNbr):
"Import one column value to its designated thesarus field."
trexThesarus.importYourForCvsValue(csvColValue, csvColNbr, thrsCtrl);
return;
def doOneLine(dataRow, rptColsEnum):
"Import one row then report it."
trexThesarus.importYourLine(thrsCtrl, dataRow, rptColsEnum);
trexThesarus.rptYourLine(thrsCtrl, rptColsEnum);
return;
def explain():
"Explain this thesarus in its current state."
trexThesarus.explainYour(thrsCtrl);
return;
def rptFooterAll():
print("End of report");
return;
#END
|
import sys
import apple
from PyQt4.QtGui import QApplication, QMainWindow
if __name__ == '__main__':
app = QApplication(sys.argv)
MainWindow = QMainWindow()
ui = apple.Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
# In Initialize
self.AddForex("EURUSD", Resolution.Minute) # or
self.AddForex("EURUSD", Resolution.Minute, Market.FXCM)
# For OANDA, we need to explictly define the market
self.AddForex("EURUSD", Resolution.Minute, Market.Oanda) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.