code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
|---|---|---|---|---|---|
url = '/applications/{0}/environment'.format(APPNAME)
environ = cli.user.get(url).item
port = environ['DOTCLOUD_SATELLITE_ZMQ_PORT']
host = socket.gethostbyname(environ['DOTCLOUD_SATELLITE_ZMQ_HOST'])
return "tcp://{0}:{1}".format(host, port)
|
def lookup_endpoint(cli)
|
Looks up the application endpoint from dotcloud
| 6.127578
| 5.249359
| 1.1673
|
client = RESTClient(endpoint=cli.client.endpoint)
client.authenticator = NullAuth()
urlmap = client.get('/auth/discovery').item
username = cli.prompt('dotCloud email')
password = cli.prompt('Password', noecho=True)
credential = {'token_url': urlmap.get('token'),
'key': CLIENT_KEY, 'secret': CLIENT_SECRET}
try:
token = cli.authorize_client(urlmap.get('token'), credential, username, password)
except Exception as e:
cli.die('Username and password do not match. Try again.')
token['url'] = credential['token_url']
config = GlobalConfig()
config.data = {'token': token}
config.save()
cli.global_config = GlobalConfig() # reload
cli.setup_auth()
cli.get_keys()
|
def setup_dotcloud_account(cli)
|
Gets user/pass for dotcloud, performs auth, and stores keys
| 6.435616
| 6.339929
| 1.015093
|
if not cli.global_config.loaded:
setup_dotcloud_account(cli)
discover_satellite(cli)
cli.success("Skypipe is ready for action")
|
def setup(cli)
|
Everything to make skypipe ready to use
| 22.409552
| 12.931198
| 1.732983
|
if not cli.global_config.loaded:
cli.die("Please setup skypipe by running `skypipe --setup`")
try:
endpoint = lookup_endpoint(cli)
ok = client.check_skypipe_endpoint(endpoint, timeout)
if ok:
return endpoint
else:
return launch_satellite(cli) if deploy else None
except (RESTAPIError, KeyError):
return launch_satellite(cli) if deploy else None
|
def discover_satellite(cli, deploy=True, timeout=5)
|
Looks to make sure a satellite exists, returns endpoint
First makes sure we have dotcloud account credentials. Then it looks
up the environment for the satellite app. This will contain host and
port to construct an endpoint. However, if app doesn't exist, or
endpoint does not check out, we call `launch_satellite` to deploy,
which calls `discover_satellite` again when finished. Ultimately we
return a working endpoint. If deploy is False it will not try to
deploy.
| 6.325215
| 5.897758
| 1.072478
|
cli.info("Launching skypipe satellite:")
finish = wait_for(" Pushing to dotCloud")
# destroy any existing satellite
destroy_satellite(cli)
# create new satellite app
url = '/applications'
try:
cli.user.post(url, {
'name': APPNAME,
'flavor': 'sandbox'
})
except RESTAPIError as e:
if e.code == 409:
cli.die('Application "{0}" already exists.'.format(APPNAME))
else:
cli.die('Creating application "{0}" failed: {1}'.format(APPNAME, e))
class args: application = APPNAME
#cli._connect(args)
# push satellite code
protocol = 'rsync'
url = '/applications/{0}/push-endpoints{1}'.format(APPNAME, '')
endpoint = cli._select_endpoint(cli.user.get(url).items, protocol)
class args: path = satellite_path
cli.push_with_rsync(args, endpoint)
# tell dotcloud to deploy, then wait for it to finish
revision = None
clean = False
url = '/applications/{0}/deployments'.format(APPNAME)
response = cli.user.post(url, {'revision': revision, 'clean': clean})
deploy_trace_id = response.trace_id
deploy_id = response.item['deploy_id']
original_stdout = sys.stdout
finish = wait_for(" Waiting for deployment", finish, original_stdout)
try:
sys.stdout = StringIO()
res = cli._stream_deploy_logs(APPNAME, deploy_id,
deploy_trace_id=deploy_trace_id, follow=True)
if res != 0:
return res
except KeyboardInterrupt:
cli.error('You\'ve closed your log stream with Ctrl-C, ' \
'but the deployment is still running in the background.')
cli.error('If you aborted because of an error ' \
'(e.g. the deployment got stuck), please e-mail\n' \
'support@dotcloud.com and mention this trace ID: {0}'
.format(deploy_trace_id))
cli.error('If you want to continue following your deployment, ' \
'try:\n{0}'.format(
cli._fmt_deploy_logs_command(deploy_id)))
cli.die()
except RuntimeError:
# workaround for a bug in the current dotcloud client code
pass
finally:
sys.stdout = original_stdout
finish = wait_for(" Satellite coming online", finish)
endpoint = lookup_endpoint(cli)
ok = client.check_skypipe_endpoint(endpoint, 120)
finish.set()
time.sleep(0.1) # sigh, threads
if ok:
return endpoint
else:
cli.die("Satellite failed to come online")
|
def launch_satellite(cli)
|
Deploys a new satellite app over any existing app
| 5.744478
| 5.750611
| 0.998933
|
if isinstance(s, text_type): # pragma: no cover
return s.encode(encoding, errors)
return s
|
def bytes_(s, encoding='utf-8', errors='strict')
|
If ``s`` is an instance of ``text_type``, return
``s.encode(encoding, errors)``, otherwise return ``s``
| 3.111655
| 2.89776
| 1.073814
|
command = [
pg_dump_exe, '-Fc', '-f', self.file,
'service={}'.format(self.pg_service)
]
if exclude_schema:
command.append(' '.join("--exclude-schema={}".format(schema) for schema in exclude_schema))
subprocess.check_output(command, stderr=subprocess.STDOUT)
|
def pg_backup(self, pg_dump_exe='pg_dump', exclude_schema=None)
|
Call the pg_dump command to create a db backup
Parameters
----------
pg_dump_exe: str
the pg_dump command path
exclude_schema: str[]
list of schemas to be skipped
| 3.586576
| 3.976537
| 0.901934
|
command = [
pg_restore_exe, '-d',
'service={}'.format(self.pg_service),
'--no-owner'
]
if exclude_schema:
exclude_schema_available = False
try:
pg_version = subprocess.check_output(['pg_restore','--version'])
pg_version = str(pg_version).replace('\\n', '').replace("'", '').split(' ')[-1]
exclude_schema_available = LooseVersion(pg_version) >= LooseVersion("10.0")
except subprocess.CalledProcessError as e:
print("*** Could not get pg_restore version:\n", e.stderr)
if exclude_schema_available:
command.append(' '.join("--exclude-schema={}".format(schema) for schema in exclude_schema))
command.append(self.file)
try:
subprocess.check_output(command)
except subprocess.CalledProcessError as e:
print("*** pg_restore failed:\n", command, '\n', e.stderr)
|
def pg_restore(self, pg_restore_exe='pg_restore', exclude_schema=None)
|
Call the pg_restore command to restore a db backup
Parameters
----------
pg_restore_exe: str
the pg_restore command path
| 2.87938
| 3.111788
| 0.925314
|
query = .format(self.upgrades_table[:self.upgrades_table.index('.')],
self.upgrades_table[self.upgrades_table.index('.')+1:])
self.cursor.execute(query)
return self.cursor.fetchone()[0]
|
def exists_table_upgrades(self)
|
Return if the upgrades table exists
Returns
-------
bool
True if the table exists
False if the table don't exists
| 3.853421
| 3.831631
| 1.005687
|
files = [(d, f) for d in self.dirs for f in listdir(d) if isfile(join(d, f))]
deltas = OrderedDict()
for d, f in files:
file_ = join(d, f)
if not Delta.is_valid_delta_name(file_):
continue
delta = Delta(file_)
if d not in deltas:
deltas[d] = []
deltas[d].append(delta)
# sort delta objects in each bucket
for d in deltas:
deltas[d].sort(key=lambda x: (x.get_version(), x.get_priority(), x.get_name()))
return deltas
|
def __get_delta_files(self)
|
Search for delta files and return a dict of Delta objects, keyed by directory names.
| 2.864472
| 2.67153
| 1.072222
|
self.__run_sql_file(delta.get_file())
self.__update_upgrades_table(delta)
|
def __run_delta_sql(self, delta)
|
Execute the delta sql file on the database
| 9.775563
| 8.006212
| 1.220997
|
self.__run_py_file(delta.get_file(), delta.get_name())
self.__update_upgrades_table(delta)
|
def __run_delta_py(self, delta)
|
Execute the delta py file
| 9.051952
| 7.529538
| 1.202192
|
# if the list of delta dirs is [delta1, delta2] the pre scripts of delta2 are
# executed before the pre scripts of delta1
for d in reversed(self.dirs):
pre_all_py_path = os.path.join(d, 'pre-all.py')
if os.path.isfile(pre_all_py_path):
print(' Applying pre-all.py...', end=' ')
self.__run_py_file(pre_all_py_path, 'pre-all')
print('OK')
pre_all_sql_path = os.path.join(d, 'pre-all.sql')
if os.path.isfile(pre_all_sql_path):
print(' Applying pre-all.sql...', end=' ')
self.__run_sql_file(pre_all_sql_path)
print('OK')
|
def __run_pre_all(self)
|
Execute the pre-all.py and pre-all.sql files if they exist
| 2.711593
| 2.419475
| 1.120736
|
# if the list of delta dirs is [delta1, delta2] the post scripts of delta1 are
# executed before the post scripts of delta2
for d in self.dirs:
post_all_py_path = os.path.join(d, 'post-all.py')
if os.path.isfile(post_all_py_path):
print(' Applying post-all.py...', end=' ')
self.__run_py_file(post_all_py_path, 'post-all')
print('OK')
post_all_sql_path = os.path.join(d, 'post-all.sql')
if os.path.isfile(post_all_sql_path):
print(' Applying post-all.sql...', end=' ')
self.__run_sql_file(post_all_sql_path)
print('OK')
|
def __run_post_all(self)
|
Execute the post-all.py and post-all.sql files if they exist
| 2.67932
| 2.336144
| 1.146898
|
with open(filepath, 'r') as delta_file:
sql = delta_file.read()
if self.variables:
self.cursor.execute(sql, self.variables)
else:
self.cursor.execute(sql)
self.connection.commit()
|
def __run_sql_file(self, filepath)
|
Execute the sql file at the passed path
Parameters
----------
filepath: str
the path of the file to execute
| 3.036817
| 3.175337
| 0.956376
|
# Import the module
spec = importlib.util.spec_from_file_location(module_name, filepath)
delta_py = importlib.util.module_from_spec(spec)
spec.loader.exec_module(delta_py)
# Get the python file's directory path
# Note: we add a separator for backward compatibility, as existing DeltaPy subclasses
# may assume that delta_dir ends with a separator
dir_ = dirname(filepath) + os.sep
# Search for subclasses of DeltaPy
for name in dir(delta_py):
obj = getattr(delta_py, name)
if inspect.isclass(obj) and not obj == DeltaPy and issubclass(
obj, DeltaPy):
delta_py_inst = obj(
self.current_db_version(), dir_, self.dirs, self.pg_service,
self.upgrades_table, variables=self.variables)
delta_py_inst.run()
|
def __run_py_file(self, filepath, module_name)
|
Execute the python file at the passed path
Parameters
----------
filepath: str
the path of the file to execute
module_name: str
the name of the python module
| 4.699593
| 5.016047
| 0.936912
|
deltas = self.__get_delta_files()
table = [['Version', 'Name', 'Type', 'Status']]
for dir_ in deltas:
print('delta files in dir: ', dir_)
for delta in deltas[dir_]:
line = [delta.get_version(), delta.get_name()]
if delta.get_type() == DeltaType.PRE_PYTHON:
line.append('pre py')
elif delta.get_type() == DeltaType.PRE_SQL:
line.append('pre sql')
elif delta.get_type() == DeltaType.PYTHON:
line.append('delta py')
elif delta.get_type() == DeltaType.SQL:
line.append('delta sql')
elif delta.get_type() == DeltaType.POST_PYTHON:
line.append('post py')
elif delta.get_type() == DeltaType.POST_SQL:
line.append('post sql')
if self.__is_applied(delta):
line.append('Applied')
else:
line.append('Pending')
table.append(line)
self.__print_table(table)
print('')
print('Applied upgrades in database')
query = .format(self.upgrades_table)
self.cursor.execute(query)
records = self.cursor.fetchall()
table = [['Version', 'Name', 'Type', 'Installed by', 'Installed on',
'Status']]
for i in records:
line = [str(i[0]), str(i[1])]
delta_type = i[2]
if delta_type == 0:
line.append('baseline')
elif delta_type == DeltaType.PRE_PYTHON:
line.append('pre py')
elif delta_type == DeltaType.PRE_SQL:
line.append('pre sql')
elif delta_type == DeltaType.PYTHON:
line.append('delta py')
elif delta_type == DeltaType.SQL:
line.append('delta sql')
elif delta_type == DeltaType.POST_PYTHON:
line.append('post py')
elif delta_type == DeltaType.POST_SQL:
line.append('post sql')
line.append(str(i[3]))
line.append(str(i[4]))
success = str(i[5])
if success == 'True':
line.append('Success')
else:
line.append('Failed')
table.append(line)
self.__print_table(table)
|
def show_info(self)
|
Print info about found delta files and about already made upgrades
| 2.032187
| 1.916093
| 1.060589
|
col_width = [max(len(x) for x in col) for col in zip(*table)]
print("| " + " | ".join("{:{}}".format(x, col_width[i])
for i, x in enumerate(table[0])) + " |")
print("| " + " | ".join("{:{}}".format('-' * col_width[i], col_width[i])
for i, x in enumerate(table[0])) + " |")
for line in table[1:]:
print("| " + " | ".join("{:{}}".format(x, col_width[i])
for i, x in enumerate(line)) + " |")
|
def __print_table(table)
|
Print a list in tabular format
Based on https://stackoverflow.com/a/8356620
| 1.555531
| 1.50599
| 1.032896
|
query = .format(
self.upgrades_table, delta.get_version(), delta.get_checksum())
self.cursor.execute(query)
if not self.cursor.fetchone():
return False
else:
return True
|
def __is_applied(self, delta)
|
Verifies if delta file is already applied on database
Parameters
----------
delta: Delta object
The delta object representing the delta file
Returns
-------
bool
True if the delta is already applied on the db
False otherwise
| 6.485344
| 5.681515
| 1.141481
|
query = .format(
self.upgrades_table, delta.get_version(), delta.get_name(),
delta.get_type(), delta.get_file(), delta.get_checksum(),
self.__get_dbuser())
self.cursor.execute(query)
self.connection.commit()
|
def __update_upgrades_table(self, delta)
|
Add a new record into the upgrades information table about the
applied delta
Parameters
----------
delta: Delta
the applied delta file
| 5.29096
| 5.343031
| 0.990254
|
query = .format(self.upgrades_table)
self.cursor.execute(query)
self.connection.commit()
|
def create_upgrades_table(self)
|
Create the upgrades information table
| 5.330964
| 4.562823
| 1.168348
|
pattern = re.compile(r"^\d+\.\d+\.\d+$")
if not re.match(pattern, version):
raise ValueError('Wrong version format')
query = .format(self.upgrades_table, version, 'baseline', 0,
'', '', self.__get_dbuser())
self.cursor.execute(query)
self.connection.commit()
|
def set_baseline(self, version)
|
Set the baseline into the creation information table
version: str
The version of the current database to set in the information
table. The baseline must be in the format x.x.x where x are numbers.
| 6.097513
| 5.621194
| 1.084736
|
query = .format(self.upgrades_table)
self.cursor.execute(query)
return self.cursor.fetchone()[0]
|
def current_db_version(self)
|
Read the upgrades information table and return the current db
version
Returns
-------
str
the current db version
| 7.618559
| 5.815034
| 1.310149
|
filename = basename(file)
pattern = re.compile(Delta.FILENAME_PATTERN)
if re.match(pattern, filename):
return True
return False
|
def is_valid_delta_name(file)
|
Return if a file has a valid name
A delta file name can be:
- pre-all.py
- pre-all.sql
- delta_x.x.x_ddmmyyyy.pre.py
- delta_x.x.x_ddmmyyyy.pre.sql
- delta_x.x.x_ddmmyyyy.py
- delta_x.x.x_ddmmyyyy.sql
- delta_x.x.x_ddmmyyyy.post.py
- delta_x.x.x_ddmmyyyy.post.sql
- post-all.py
- post-all.sql
where x.x.x is the version number and _ddmmyyyy is an optional
description, usually representing the date of the delta file
| 4.543296
| 6.161733
| 0.737341
|
with open(self.file, 'rb') as f:
cs = md5(f.read()).hexdigest()
return cs
|
def get_checksum(self)
|
Return the md5 checksum of the delta file.
| 3.822935
| 3.289839
| 1.162043
|
ext = self.match.group(5)
if ext == 'pre.py':
return DeltaType.PRE_PYTHON
elif ext == 'pre.sql':
return DeltaType.PRE_SQL
elif ext == 'py':
return DeltaType.PYTHON
elif ext == 'sql':
return DeltaType.SQL
elif ext == 'post.py':
return DeltaType.POST_PYTHON
elif ext == 'post.sql':
return DeltaType.POST_SQL
|
def get_type(self)
|
Return the type of the delta file.
Returns
-------
type: int
| 2.833487
| 2.565404
| 1.104499
|
dtype = self.get_type()
if dtype & DeltaType.PRE:
return 1
elif dtype & DeltaType.POST:
return 3
else:
return 2
|
def get_priority(self) -> int
|
Rerturns the priority of the file from 1 (pre) to 3 (post)
:return: the priority
| 6.415633
| 4.711882
| 1.361586
|
return self.__variables.get(name, default_value)
|
def variable(self, name: str, default_value=None)
|
Safely returns the value of the variable given in PUM
Parameters
----------
name
the name of the variable
default_value
the default value for the variable if it does not exist
| 7.216485
| 9.453158
| 0.763394
|
result = True
differences_dict = {}
if 'tables' not in self.ignore_list:
tmp_result, differences_dict['tables'] = self.check_tables()
result = False if not tmp_result else result
if 'columns' not in self.ignore_list:
tmp_result, differences_dict['columns'] = self.check_columns(
'views' not in self.ignore_list)
result = False if not tmp_result else result
if 'constraints' not in self.ignore_list:
tmp_result, differences_dict['constraints'] = \
self.check_constraints()
result = False if not tmp_result else result
if 'views' not in self.ignore_list:
tmp_result, differences_dict['views'] = self.check_views()
result = False if not tmp_result else result
if 'sequences' not in self.ignore_list:
tmp_result, differences_dict['sequences'] = self.check_sequences()
result = False if not tmp_result else result
if 'indexes' not in self.ignore_list:
tmp_result, differences_dict['indexes'] = self.check_indexes()
result = False if not tmp_result else result
if 'triggers' not in self.ignore_list:
tmp_result, differences_dict['triggers'] = self.check_triggers()
result = False if not tmp_result else result
if 'functions' not in self.ignore_list:
tmp_result, differences_dict['functions'] = self.check_functions()
result = False if not tmp_result else result
if 'rules' not in self.ignore_list:
tmp_result, differences_dict['rules'] = self.check_rules()
result = False if not tmp_result else result
if self.verbose_level == 0:
differences_dict = None
return result, differences_dict
|
def run_checks(self)
|
Run all the checks functions.
Returns
-------
bool
True if all the checks are true
False otherwise
dict
Dictionary of lists of differences
| 1.521576
| 1.498389
| 1.015475
|
if check_views:
query = .format(self.exclude_schema)
else:
query = .format(self.exclude_schema)
return self.__check_equals(query)
|
def check_columns(self, check_views=True)
|
Check if the columns in all tables are equals.
Parameters
----------
check_views: bool
if True, check the columns of all the tables and views, if
False check only the columns of the tables
Returns
-------
bool
True if the columns are the same
False otherwise
list
A list with the differences
| 8.041615
| 8.145231
| 0.987279
|
query = .format(excl=self.exclude_schema)
return self.__check_equals(query)
|
def check_rules(self)
|
Check if the rules are equals.
Returns
-------
bool
True if the rules are the same
False otherwise
list
A list with the differences
| 35.249729
| 34.666855
| 1.016814
|
self.cur1.execute(query)
records1 = self.cur1.fetchall()
self.cur2.execute(query)
records2 = self.cur2.fetchall()
result = True
differences = []
d = difflib.Differ()
records1 = [str(x) for x in records1]
records2 = [str(x) for x in records2]
for line in d.compare(records1, records2):
if line[0] in ('-', '+'):
result = False
if self.verbose_level == 1:
differences.append(line[0:79])
elif self.verbose_level == 2:
differences.append(line)
return result, differences
|
def __check_equals(self, query)
|
Check if the query results on the two databases are equals.
Returns
-------
bool
True if the results are the same
False otherwise
list
A list with the differences
| 2.385425
| 2.235538
| 1.067048
|
global input
if prompt is None:
prompt = 'Confirm'
if resp:
prompt = '%s [%s]|%s: ' % (prompt, 'y', 'n')
else:
prompt = '%s [%s]|%s: ' % (prompt, 'n', 'y')
while True:
# Fix for Python2. In python3 raw_input() is now input()
try:
input = raw_input
except NameError:
pass
ans = input(prompt)
if not ans:
return resp
if ans not in ['y', 'Y', 'n', 'N']:
print('please enter y or n.')
continue
if ans == 'y' or ans == 'Y':
return True
if ans == 'n' or ans == 'N':
return False
|
def ask_for_confirmation(prompt=None, resp=False)
|
Prompt for a yes or no response from the user.
Parameters
----------
prompt: basestring
The question to be prompted to the user.
resp: bool
The default value assumed by the caller when user simply
types ENTER.
Returns
-------
bool
True if the user response is 'y' or 'Y'
False if the user response is 'n' or 'N'
| 2.062795
| 2.13757
| 0.965019
|
self.request = request
super(AuthDecorator, self).handle_target(request, controller_args, controller_kwargs)
del self.request
|
def handle_target(self, request, controller_args, controller_kwargs)
|
Only here to set self.request and get rid of it after
this will set self.request so the target method can access request using
self.request, just like in the controller.
| 4.342266
| 3.444592
| 1.260604
|
return self.fetch('get', uri, query, **kwargs)
|
def get(self, uri, query=None, **kwargs)
|
make a GET request
| 6.716617
| 5.36263
| 1.252485
|
return self.fetch('post', uri, kwargs.pop("query", {}), body, **kwargs)
|
def post(self, uri, body=None, **kwargs)
|
make a POST request
| 8.305491
| 8.063105
| 1.030061
|
# requests doesn't actually need us to open the files but we do anyway because
# if we don't then the filename isn't preserved, so we assume each string
# value is a filepath
for key in files.keys():
if isinstance(files[key], basestring):
files[key] = open(files[key], 'rb')
kwargs["files"] = files
# we ignore content type for posting files since it requires very specific things
ct = self.headers.pop("content-type", None)
ret = self.fetch('post', uri, {}, body, **kwargs)
if ct:
self.headers["content-type"] = ct
# close all the files
for fp in files.values():
fp.close()
return ret
|
def post_file(self, uri, body, files, **kwargs)
|
POST a file
| 6.033067
| 6.145298
| 0.981737
|
return self.fetch('delete', uri, query, **kwargs)
|
def delete(self, uri, query=None, **kwargs)
|
make a DELETE request
| 6.657485
| 5.556755
| 1.198089
|
if not query: query = {}
fetch_url = self.get_fetch_url(uri, query)
args = [fetch_url]
kwargs.setdefault("timeout", self.timeout)
kwargs["headers"] = self.get_fetch_headers(method, kwargs.get("headers", {}))
if body:
if self.is_json(kwargs["headers"]):
kwargs['json'] = self.get_fetch_body(body)
else:
kwargs['data'] = self.get_fetch_body(body)
res = self.get_fetch_request(method, *args, **kwargs)
#res = requests.request(method, *args, **kwargs)
res = self.get_fetch_response(res)
self.response = res
return res
|
def fetch(self, method, uri, query=None, body=None, **kwargs)
|
wrapper method that all the top level methods (get, post, etc.) use to actually
make the request
| 2.610667
| 2.597181
| 1.005193
|
all_headers = self.headers.copy()
if headers:
all_headers.update(headers)
return Headers(all_headers)
|
def get_fetch_headers(self, method, headers)
|
merge class headers with passed in headers
:param method: string, (eg, GET or POST), this is passed in so you can customize
headers based on the method that you are calling
:param headers: dict, all the headers passed into the fetch method
:returns: passed in headers merged with global class headers
| 3.622879
| 4.230994
| 0.856271
|
return requests.request(method, fetch_url, *args, **kwargs)
|
def get_fetch_request(self, method, fetch_url, *args, **kwargs)
|
This is handy if you want to modify the request right before passing it
to requests, or you want to do something extra special customized
:param method: string, the http method (eg, GET, POST)
:param fetch_url: string, the full url with query params
:param *args: any other positional arguments
:param **kwargs: any keyword arguments to pass to requests
:returns: a requests.Response compatible object instance
| 3.325001
| 4.743309
| 0.700988
|
res.code = res.status_code
res.headers = Headers(res.headers)
res._body = None
res.body = ''
body = res.content
if body:
if self.is_json(res.headers):
res._body = res.json()
else:
res._body = body
res.body = String(body, res.encoding)
return res
|
def get_fetch_response(self, res)
|
the goal of this method is to make the requests object more endpoints like
res -- requests Response -- the native requests response instance, we manipulate
it a bit to make it look a bit more like the internal endpoints.Response object
| 3.540119
| 3.657316
| 0.967955
|
ret = False
ct = headers.get("content-type", "").lower()
if ct:
ret = ct.lower().rfind("json") >= 0
return ret
|
def is_json(self, headers)
|
return true if content_type is a json content type
| 4.090693
| 3.53434
| 1.157414
|
doc = None
def visit_FunctionDef(node):
if node.name != self.method_name:
return
doc = ast.get_docstring(node)
raise StopIteration(doc if doc else "")
target = self.controller.controller_class
try:
node_iter = ast.NodeVisitor()
node_iter.visit_FunctionDef = visit_FunctionDef
node_iter.visit(ast.parse(inspect.getsource(target)))
except StopIteration as e:
doc = str(e)
if not doc: doc = ""
return doc
|
def desc(self)
|
return the description of this endpoint
| 3.670033
| 3.636551
| 1.009207
|
ret = {}
for rd in self.decorators:
args = rd.args
kwargs = rd.kwargs
if param in rd:
is_required = kwargs.get('required', 'default' not in kwargs)
ret[args[0]] = {'required': is_required, 'other_names': args[1:], 'options': kwargs}
return ret
|
def params(self)
|
return information about the params that the given http option takes
| 7.691353
| 6.605984
| 1.164301
|
res = collections.defaultdict(list)
mmap = {}
def get_val(na, default=None):
ret = None
if isinstance(na, ast.Num):
repr_n = repr(na.n)
val = na.n
vtype = float if '.' in repr_n else int
ret = vtype(val)
elif isinstance(na, ast.Str):
ret = str(na.s)
elif isinstance(na, ast.Name):
# http://stackoverflow.com/questions/12700893/
ret = getattr(builtins, na.id, None)
if not ret:
ret = na.id
if ret == 'True':
ret = True
elif ret == 'False':
ret = False
elif isinstance(na, ast.Dict):
if na.keys:
ret = {get_val(na_[0]): get_val(na_[1]) for na_ in zip(na.keys, na.values)}
else:
ret = {}
elif isinstance(na, (ast.List, ast.Tuple)):
if na.elts:
ret = [get_val(na_) for na_ in na.elts]
else:
ret = []
if isinstance(na, ast.Tuple):
ret = tuple(ret)
else:
ret = default
return ret
def is_super(childnode, parentnode):
ret = False
for n in childnode.body:
if not isinstance(n, ast.Expr): continue
try:
func = n.value.func
func_name = func.attr
if func_name == parentnode.name:
ret = isinstance(func.value, ast.Call)
break
except AttributeError as e:
ret = False
return ret
def visit_FunctionDef(node):
add_decs = True
if node.name in res:
add_decs = is_super(mmap[node.name], node)
mmap[node.name] = node
if add_decs:
for n in node.decorator_list:
d = {}
name = ''
args = []
kwargs = {}
if isinstance(n, ast.Call):
name = n.func.attr if isinstance(n.func, ast.Attribute) else n.func.id
for an in n.args:
args.append(get_val(an))
for an in n.keywords:
kwargs[an.arg] = get_val(an.value)
else:
name = n.attr if isinstance(n, ast.Attribute) else n.id
d = {
"name": name,
"args": args,
"kwargs": kwargs
}
m = self.module
decor = getattr(m, name, None)
if decor:
d["decorator"] = decor
#res[node.name].append((name, args, kwargs))
res[node.name].append(self.decorator_class(**d))
node_iter = ast.NodeVisitor()
node_iter.visit_FunctionDef = visit_FunctionDef
for target_cls in inspect.getmro(self.controller_class):
if target_cls == Controller: break
node_iter.visit(ast.parse(inspect.getsource(target_cls)))
return res
|
def decorators(self)
|
Get all the decorators of all the option methods in the class
http://stackoverflow.com/questions/5910703/ specifically, I used this
answer http://stackoverflow.com/a/9580006
| 2.580005
| 2.549577
| 1.011935
|
doc = inspect.getdoc(self.controller_class)
if not doc: doc = ''
return doc
|
def desc(self)
|
return the description of this endpoint
| 7.402055
| 5.731653
| 1.291435
|
ret = {}
method_regex = re.compile(r"^[A-Z][A-Z0-9]+(_|$)")
controller_methods = inspect.getmembers(self.controller_class)
for controller_method_name, controller_method in controller_methods:
if controller_method_name.startswith('_'): continue
if method_regex.match(controller_method_name):
method = self.method_class(
controller_method_name,
controller_method,
controller=self
)
ret.setdefault(method.name, [])
ret[method.name].append(method)
return ret
|
def methods(self)
|
return the supported http method options that this class supports
return what http method options this endpoint supports (eg, POST, GET)
http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html
:returns: dict, each http method (eg, GET, POST) will have a key with the value
being every method from the controller that can satisfy the http method
| 2.752707
| 2.757058
| 0.998422
|
req = self.create_request(raw_request, **kwargs)
res = self.create_response(**kwargs)
rou = self.create_router(**kwargs)
c = self.call_class(req, res, rou)
return c
|
def create_call(self, raw_request, **kwargs)
|
create a call object that has endpoints understandable request and response
instances
| 3.727092
| 3.215268
| 1.159185
|
ret = True
if key:
#backend = self.create_backend()
#method = getattr(backend, "normalize_limit", None)
#if method:
# limit = method(request, limit)
#method = getattr(backend, "normalize_ttl", None)
#if method:
# ttl = method(request, ttl)
#ret = backend.target(request, key, limit, ttl)
ret = super(RateLimitDecorator, self).target(request, key, limit, ttl)
else:
logger.warn("No ratelimit key found for {}".format(request.path))
return ret
|
def target(self, request, key, limit, ttl)
|
this will only run the request if the key has a value, if you want to
fail if the key doesn't have a value, then normalize_key() should raise
an exception
:param request: Request, the request instance
:param key: string, the unique key for the endpoint, this is generated using
self.normalize_key, so override that method to customize the key
:param limit: int, max requests that should be received in ttl
:param ttl: int, how many seconds the request should be throttled (eg, 3600 = 1 hour)
| 2.991611
| 3.151299
| 0.949326
|
self.limit = int(limit)
self.ttl = int(ttl)
return super(RateLimitDecorator, self).decorate(func, target=None, *anoop, **kwnoop)
|
def decorate(self, func, limit=0, ttl=0, *anoop, **kwnoop)
|
see target for an explanation of limit and ttl
| 3.665269
| 3.443413
| 1.064429
|
return super(ratelimit, self).decorate(func, limit, ttl, *anoop, **kwnoop)
|
def decorate(self, func, limit, ttl, *anoop, **kwnoop)
|
make limit and ttl required
| 2.804124
| 2.969197
| 0.944405
|
'''
cli hook
return -- integer -- the exit code
'''
parser = argparse.ArgumentParser(description='Start an endpoints WSGI server', add_help=True)
#parser.add_argument('--debug', dest='debug', action='store_true', help='print debugging info')
parser.add_argument(
"-v", "--version",
action='version',
version="%(prog)s {}".format(endpoints.__version__)
)
parser.add_argument(
"--quiet",
action='store_true',
dest='quiet'
)
parser.add_argument(
'--prefix', "--controller-prefix", "-P",
required=True,
help='The endpoints controller prefix'
)
parser.add_argument(
'--file', "-F", "--wsgi-file", "--wsgifile",
dest="file",
default="",
help='The wsgi file, the file that has an application callable'
)
parser.add_argument(
'--host', "-H",
required=True,
help='The host to serve on in the form host:port'
)
parser.add_argument(
'--count', "-C",
help='How many requests to process until self termination',
type=int,
default=0
)
parser.add_argument(
'--dir', "-D", "--directory",
dest="directory",
default=os.getcwd(),
help='directory to run the server in, usually contains the prefix module path',
)
# parser.add_argument(
# '--config', "--config-script", "-S",
# dest="config_script",
# default="",
# help='This script will be loaded before Server is created allowing custom configuration',
# )
args = parser.parse_args()
# we want to make sure the directory can be imported from since chances are
# the prefix module lives in that directory
sys.path.append(args.directory)
if not args.quiet:
# https://docs.python.org/2.7/library/logging.html#logging.basicConfig
logging.basicConfig(format="%(message)s", level=logging.DEBUG, stream=sys.stdout)
logger = logging.getLogger(__name__)
os.environ["ENDPOINTS_HOST"] = args.host
environ.HOST = args.host
os.environ["ENDPOINTS_PREFIX"] = args.prefix
#environ.PREFIXES = args.prefix
config = {}
if args.file:
# load the configuration file
config = runpy.run_path(args.file)
# if args.config_script:
# # load a config script so you can customize the environment
# h = "wsgiserver_config_{}".format(uuid.uuid4())
# config_module = imp.load_source(h, args.config_script)
s = Server()
if "application" in config:
s.application = config["application"]
if args.count:
logger.info("Listening on {} for {} requests".format(args.host, args.prefix))
s.serve_count(args.count)
else:
logger.info("Listening on {}".format(args.host))
s.serve_forever()
return 0
|
def console()
|
cli hook
return -- integer -- the exit code
| 3.541674
| 3.372597
| 1.050132
|
b = ByteString(s)
be = base64.b64encode(b).strip()
return String(be)
|
def encode(cls, s)
|
converts a plain text string to base64 encoding
:param s: unicode str|bytes, the base64 encoded string
:returns: unicode str
| 7.232273
| 8.281322
| 0.873323
|
b = ByteString(s)
bd = base64.b64decode(b)
return String(bd)
|
def decode(cls, s)
|
decodes a base64 string to plain text
:param s: unicode str|bytes, the base64 encoded string
:returns: unicode str
| 7.126202
| 8.590205
| 0.829573
|
mt = ""
index = val.rfind(".")
if index == -1:
val = "fake.{}".format(val)
elif index == 0:
val = "fake{}".format(val)
mt = mimetypes.guess_type(val)[0]
if mt is None:
mt = ""
return mt
|
def find_type(cls, val)
|
return the mimetype from the given string value
if value is a path, then the extension will be found, if val is an extension then
that will be used to find the mimetype
| 4.315306
| 3.237794
| 1.332792
|
'''
sort the headers according to rfc 2616 so when __iter__ is called, the accept media types are
in order from most preferred to least preferred
'''
ret = 0
# first we check q, higher values win:
if a[1] != b[1]:
ret = cmp(a[1], b[1])
else:
found = False
for i in range(2):
ai = a[0][i]
bi = b[0][i]
if ai == '*':
if bi != '*':
ret = -1
found = True
break
else:
# both *, more verbose params win
ret = cmp(len(a[2]), len(b[2]))
found = True
break
elif bi == '*':
ret = 1
found = True
break
if not found:
ret = cmp(len(a[2]), len(b[2]))
return ret
|
def _sort(self, a, b)
|
sort the headers according to rfc 2616 so when __iter__ is called, the accept media types are
in order from most preferred to least preferred
| 4.238206
| 2.775773
| 1.526856
|
mtype, msubtype = self._split_media_type(media_type)
for x in self.__iter__():
# all the params have to match to make the media type valid
matched = True
for k, v in params.items():
if x[2].get(k, None) != v:
matched = False
break
if matched:
if x[0][0] == '*':
if x[0][1] == '*':
yield x
elif x[0][1] == msubtype:
yield x
elif mtype == '*':
if msubtype == '*':
yield x
elif x[0][1] == msubtype:
yield x
elif x[0][0] == mtype:
if msubtype == '*':
yield x
elif x[0][1] == '*':
yield x
elif x[0][1] == msubtype:
yield x
|
def filter(self, media_type, **params)
|
iterate all the accept media types that match media_type
media_type -- string -- the media type to filter by
**params -- dict -- further filter by key: val
return -- generator -- yields all matching media type info things
| 2.497372
| 2.424507
| 1.030053
|
r = self.request_class()
for k, v in raw_request.items():
if k.startswith('HTTP_'):
r.set_header(k[5:], v)
else:
r.environ[k] = v
r.method = raw_request['REQUEST_METHOD']
r.path = raw_request['PATH_INFO']
r.query = raw_request['QUERY_STRING']
# handle headers not prefixed with http
for k, t in {'CONTENT_TYPE': None, 'CONTENT_LENGTH': int}.items():
v = r.environ.pop(k, None)
if v:
r.set_header(k, t(v) if t else v)
if 'wsgi.input' in raw_request:
if "CONTENT_LENGTH" in raw_request and int(r.get_header("CONTENT_LENGTH", 0)) <= 0:
r.body_kwargs = {}
else:
if r.get_header('transfer-encoding', "").lower().startswith('chunked'):
raise IOError("Server does not support chunked requests")
else:
r.body_input = raw_request['wsgi.input']
else:
r.body_kwargs = {}
return r
|
def create_request(self, raw_request, **kwargs)
|
create instance of request
raw_request -- the raw request object retrieved from a WSGI server
| 2.885591
| 2.828156
| 1.020308
|
self._application = v
self.backend.set_app(v)
|
def application(self, v)
|
allow overriding of the application factory, this allows you to set
your own application callable that will be used to handle requests, see
bin/wsgiserver.py script as an example of usage
| 9.031011
| 9.296754
| 0.971415
|
ws_req = req.copy()
del ws_req.controller_info
ws_req.environ.pop("wsgi.input", None)
ws_req.body_kwargs = payload.body
ws_req.environ["REQUEST_METHOD"] = payload.method
ws_req.method = payload.method
ws_req.environ["PATH_INFO"] = payload.path
ws_req.path = payload.path
ws_req.environ["WS_PAYLOAD"] = payload
ws_req.environ["WS_ORIGINAL"] = req
ws_req.payload = payload
ws_req.parent = req
return {"WS_REQUEST": ws_req}
|
def create_environ(self, req, payload)
|
This will take the original request and the new websocket payload and
merge them into a new request instance
| 3.708666
| 3.352086
| 1.106376
|
master_modname = __name__.split(".", 1)[0]
master_module = sys.modules[master_modname]
#return os.path.dirname(os.path.realpath(os.path.join(inspect.getsourcefile(endpoints), "..")))
path = os.path.dirname(inspect.getsourcefile(master_module))
return path
|
def find_module_path()
|
find where the master module is located
| 3.569896
| 3.089294
| 1.15557
|
k = String(k, "iso-8859-1")
klower = k.lower().replace('_', '-')
bits = klower.split('-')
return "-".join((bit.title() for bit in bits))
|
def _convert_string_name(self, k)
|
converts things like FOO_BAR to Foo-Bar which is the normal form
| 6.005206
| 4.900213
| 1.225499
|
uristring = self.path
if self.query:
uristring += "?{}".format(self.query)
if self.fragment:
uristring += "#{}".format(self.fragment)
return uristring
|
def uri(self)
|
return the uri, which is everything but base (no scheme, host, etc)
| 2.742627
| 2.429775
| 1.128757
|
if not query: return {}
d = {}
# https://docs.python.org/2/library/urlparse.html
for k, kv in urlparse.parse_qs(query, True, strict_parsing=True).items():
#k = k.rstrip("[]") # strip out php type array designated variables
if len(kv) > 1:
d[k] = kv
else:
d[k] = kv[0]
return d
|
def parse_query(cls, query)
|
return name=val&name2=val2 strings into {name: val} dict
| 4.499601
| 4.029186
| 1.116752
|
kwargs = {}
if paths:
fragment = paths[-1]
if fragment:
if fragment.startswith("#"):
kwargs["fragment"] = fragment
paths.pop(-1)
kwargs["path"] = "/".join(self.normalize_paths(*paths))
kwargs["query_kwargs"] = query_kwargs
return kwargs
|
def _normalize_params(self, *paths, **query_kwargs)
|
a lot of the helper methods are very similar, this handles their arguments
| 4.010202
| 3.581959
| 1.119556
|
bits = hostname.split(":", 2)
p = None
d = bits[0]
if len(bits) == 2:
p = int(bits[1])
return d, p
|
def split_hostname_from_port(cls, hostname)
|
given a hostname:port return a tuple (hostname, port)
| 4.001851
| 3.662425
| 1.092678
|
if "path" in kwargs:
path = kwargs["path"]
if isinstance(path, bytes):
path = String(path)
if not path[0].startswith("/"):
paths = self.normalize_paths(self.path, path)
else:
paths = self.normalize_paths(path)
kwargs["path"] = "/".join(paths)
return self.create(self, **kwargs)
|
def add(self, **kwargs)
|
Just a shortcut to change the current url, equivalent to Url(self, **kwargs)
| 3.552139
| 3.296456
| 1.077563
|
kwargs = self._normalize_params(*paths, **query_kwargs)
if self.controller_path:
if "path" in kwargs:
paths = self.normalize_paths(self.controller_path, kwargs["path"])
kwargs["path"] = "/".join(paths)
else:
kwargs["path"] = self.controller_path
return self.create(self.root, **kwargs)
|
def controller(self, *paths, **query_kwargs)
|
create a new url object using the controller path as a base
if you have a controller `foo.BarController` then this would create a new
Url instance with `host/foo/bar` as the base path, so any *paths will be
appended to `/foo/bar`
:example:
# controller foo.BarController
print url # http://host.com/foo/bar/some_random_path
print url.controller() # http://host.com/foo/bar
print url.controller("che", boom="bam") # http://host/foo/bar/che?boom=bam
:param *paths: list, the paths to append to the controller path
:param **query_kwargs: dict, any query string params to add
| 3.163745
| 3.958522
| 0.799224
|
kwargs = self._normalize_params(*paths, **query_kwargs)
if self.path:
if "path" in kwargs:
paths = self.normalize_paths(self.path, kwargs["path"])
kwargs["path"] = "/".join(paths)
else:
kwargs["path"] = self.path
return self.create(self.root, **kwargs)
|
def base(self, *paths, **query_kwargs)
|
create a new url object using the current base path as a base
if you had requested /foo/bar, then this would append *paths and **query_kwargs
to /foo/bar
:example:
# current path: /foo/bar
print url # http://host.com/foo/bar
print url.base() # http://host.com/foo/bar
print url.base("che", boom="bam") # http://host/foo/bar/che?boom=bam
:param *paths: list, the paths to append to the current path without query params
:param **query_kwargs: dict, any query string params to add
| 3.3571
| 4.143192
| 0.810269
|
kwargs = self._normalize_params(*paths, **query_kwargs)
return self.create(self.root, **kwargs)
|
def host(self, *paths, **query_kwargs)
|
create a new url object using the host as a base
if you had requested http://host/foo/bar, then this would append *paths and **query_kwargs
to http://host
:example:
# current url: http://host/foo/bar
print url # http://host.com/foo/bar
print url.host_url() # http://host.com/
print url.host_url("che", boom="bam") # http://host/che?boom=bam
:param *paths: list, the paths to append to the current path without query params
:param **query_kwargs: dict, any query string params to add
| 6.78097
| 10.59945
| 0.639747
|
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Charset
ret = ""
accept_encoding = self.get_header("Accept-Charset", "")
if accept_encoding:
bits = re.split(r"\s+", accept_encoding)
bits = bits[0].split(";")
ret = bits[0]
return ret
|
def accept_encoding(self)
|
The encoding the client requested the response to use
| 3.152355
| 2.956226
| 1.066344
|
encoding = None
ct = self.get_header('content-type')
if ct:
ah = AcceptHeader(ct)
if ah.media_types:
encoding = ah.media_types[0][2].get("charset", None)
return encoding
|
def encoding(self)
|
the character encoding of the request, usually only set in POST type requests
| 5.129556
| 4.64685
| 1.103878
|
access_token = self.get_auth_bearer()
if not access_token:
access_token = self.query_kwargs.get('access_token', '')
if not access_token:
access_token = self.body_kwargs.get('access_token', '')
return access_token
|
def access_token(self)
|
return an Oauth 2.0 Bearer access token if it can be found
| 3.138644
| 2.817494
| 1.113984
|
client_id, client_secret = self.get_auth_basic()
if not client_id and not client_secret:
client_id = self.query_kwargs.get('client_id', '')
client_secret = self.query_kwargs.get('client_secret', '')
if not client_id and not client_secret:
client_id = self.body_kwargs.get('client_id', '')
client_secret = self.body_kwargs.get('client_secret', '')
return client_id, client_secret
|
def client_tokens(self)
|
try and get Oauth 2.0 client id and secret first from basic auth header,
then from GET or POST parameters
return -- tuple -- client_id, client_secret
| 2.093994
| 1.775222
| 1.179568
|
r = []
names = ['X_FORWARDED_FOR', 'CLIENT_IP', 'X_REAL_IP', 'X_FORWARDED',
'X_CLUSTER_CLIENT_IP', 'FORWARDED_FOR', 'FORWARDED', 'VIA',
'REMOTE_ADDR']
for name in names:
vs = self.get_header(name, '')
if vs:
r.extend(map(lambda v: v.strip(), vs.split(',')))
vs = self.environ.get(name, '')
if vs:
r.extend(map(lambda v: v.strip(), vs.split(',')))
return r
|
def ips(self)
|
return all the possible ips of this request, this will include public and private ips
| 2.609054
| 2.370405
| 1.100679
|
r = ''
# this was compiled from here:
# https://github.com/un33k/django-ipware
# http://www.ietf.org/rfc/rfc3330.txt (IPv4)
# http://www.ietf.org/rfc/rfc5156.txt (IPv6)
# https://en.wikipedia.org/wiki/Reserved_IP_addresses
format_regex = re.compile(r'\s')
ip_regex = re.compile(r'^(?:{})'.format(r'|'.join([
r'0\.', # reserved for 'self-identification'
r'10\.', # class A
r'169\.254', # link local block
r'172\.(?:1[6-9]|2[0-9]|3[0-1])\.', # class B
r'192\.0\.2\.', # documentation/examples
r'192\.168', # class C
r'255\.{3}', # broadcast address
r'2001\:db8', # documentation/examples
r'fc00\:', # private
r'fe80\:', # link local unicast
r'ff00\:', # multicast
r'127\.', # localhost
r'\:\:1' # localhost
])))
ips = self.ips
for ip in ips:
if not format_regex.search(ip) and not ip_regex.match(ip):
r = ip
break
return r
|
def ip(self)
|
return the public ip address
| 3.675827
| 3.665318
| 1.002867
|
scheme = self.scheme
host = self.host
path = self.path
query = self.query
port = self.port
# normalize the port
host_domain, host_port = Url.split_hostname_from_port(host)
if host_port:
port = host_port
controller_path = ""
if self.controller_info:
controller_path = self.controller_info.get("path", "")
u = Url(
scheme=scheme,
hostname=host,
path=path,
query=query,
port=port,
controller_path=controller_path,
)
return u
|
def url(self)
|
return the full request url as an Url() instance
| 3.14672
| 2.861838
| 1.099545
|
self._path = ''
path_args = self.path_args
path = "/{}".format("/".join(path_args))
return path
|
def path(self)
|
path part of a url (eg, http://host.com/path?query=string)
| 7.309939
| 5.466094
| 1.337324
|
self._path_args = []
path = self.path
path_args = list(filter(None, path.split('/')))
return path_args
|
def path_args(self)
|
the path converted to list (eg /foo/bar becomes [foo, bar])
| 4.936184
| 3.411609
| 1.446879
|
self._query = query = ""
query_kwargs = self.query_kwargs
if query_kwargs: query = urlencode(query_kwargs, doseq=True)
return query
|
def query(self)
|
query_string part of a url (eg, http://host.com/path?query=string)
| 6.443739
| 4.668324
| 1.380311
|
self._query_kwargs = query_kwargs = {}
query = self.query
if query: query_kwargs = self._parse_query_str(query)
return query_kwargs
|
def query_kwargs(self)
|
{foo: bar, baz: che}
| 4.738082
| 4.113544
| 1.151825
|
body = None
if self.body_input:
body = self.body_input.read(int(self.get_header('content-length', -1)))
return body
|
def body(self)
|
return the raw version of the body
| 6.175369
| 4.872877
| 1.267294
|
body_kwargs = {}
ct = self.get_header("content-type")
if ct:
ct = ct.lower()
if ct.rfind("json") >= 0:
body = self.body
if body:
body_kwargs = json.loads(body)
else:
if self.body_input:
body = RequestBody(
fp=self.body_input,
headers=self.headers,
environ=self.environ
#environ=self.raw_request
)
body_kwargs = dict(body)
else:
body = self.body
if body:
body_kwargs = self._parse_query_str(body)
return body_kwargs
|
def body_kwargs(self)
|
the request body, if this is a POST request
this tries to do the right thing with the body, so if you have set the body and
the content type is json, then it will return the body json decoded, if you need
the original string body, use body
example --
self.body = '{"foo":{"name":"bar"}}'
b = self.body_kwargs # dict with: {"foo": { "name": "bar"}}
print self.body # string with: '{"foo":{"name":"bar"}}'
| 3.537768
| 3.389021
| 1.043891
|
kwargs = dict(self.query_kwargs)
kwargs.update(self.body_kwargs)
return kwargs
|
def kwargs(self)
|
combine GET and POST params to be passed to the controller
| 6.187496
| 5.097332
| 1.21387
|
v = ""
accept_header = self.get_header('accept', "")
if accept_header:
a = AcceptHeader(accept_header)
for mt in a.filter(content_type):
v = mt[2].get("version", "")
if v: break
return v
|
def version(self, content_type="*/*")
|
versioning is based off of this post
http://urthen.github.io/2013/05/09/ways-to-version-your-api/
| 5.176467
| 5.070842
| 1.02083
|
access_token = ''
auth_header = self.get_header('authorization')
if auth_header:
m = re.search(r"^Bearer\s+(\S+)$", auth_header, re.I)
if m: access_token = m.group(1)
return access_token
|
def get_auth_bearer(self)
|
return the bearer token in the authorization header if it exists
| 2.634748
| 2.40042
| 1.09762
|
username = ''
password = ''
auth_header = self.get_header('authorization')
if auth_header:
m = re.search(r"^Basic\s+(\S+)$", auth_header, re.I)
if m:
auth_str = Base64.decode(m.group(1))
username, password = auth_str.split(':', 1)
return username, password
|
def get_auth_basic(self)
|
return the username and password of a basic auth header if it exists
| 2.392059
| 2.133482
| 1.1212
|
code = getattr(self, '_code', None)
if not code:
if self.has_body():
code = 200
else:
code = 204
return code
|
def code(self)
|
the http status code to return to the client, by default, 200 if a body is present otherwise 204
| 3.700782
| 2.465829
| 1.500827
|
if b is None: return ''
if self.is_json():
# TODO ???
# I don't like this, if we have a content type but it isn't one
# of the supported ones we were returning the exception, which threw
# Jarid off, but now it just returns a string, which is not best either
# my thought is we could have a body_type_subtype method that would
# make it possible to easily handle custom types
# eg, "application/json" would become: self.body_application_json(b, is_error)
b = json.dumps(b, cls=ResponseBody)
else:
# just return a string representation of body if no content type
b = String(b, self.encoding)
return b
|
def normalize_body(self, b)
|
return the body as a string, formatted to the appropriate content type
:param b: mixed, the current raw body
:returns: unicode string
| 16.148685
| 15.172779
| 1.06432
|
return [], dict(
request=request,
controller_args=controller_args,
controller_kwargs=controller_kwargs
)
|
def normalize_target_params(self, request, controller_args, controller_kwargs)
|
get params ready for calling target
this method exists because child classes might only really need certain params
passed to the method, this allows the child classes to decided what their
target methods need
:param request: the http.Request instance for this specific request
:param controller_args: the arguments that will be passed to the controller
:param controller_kwargs: the key/val arguments that will be passed to the
controller, these usually come from query strings and post bodies
:returns: a tuple (list, dict) that correspond to the *args, **kwargs that
will be passed to the target() method
| 4.380867
| 5.511841
| 0.79481
|
try:
param_args, param_kwargs = self.normalize_target_params(
request=request,
controller_args=controller_args,
controller_kwargs=controller_kwargs
)
ret = self.target(*param_args, **param_kwargs)
if not ret:
raise ValueError("{} check failed".format(self.__class__.__name__))
except CallError:
raise
except Exception as e:
self.handle_error(e)
|
def handle_target(self, request, controller_args, controller_kwargs)
|
Internal method for this class
handles normalizing the passed in values from the decorator using
.normalize_target_params() and then passes them to the set .target()
| 3.30229
| 2.864804
| 1.152711
|
if target:
self.target = target
def decorated(decorated_self, *args, **kwargs):
self.handle_target(
request=decorated_self.request,
controller_args=args,
controller_kwargs=kwargs
)
return func(decorated_self, *args, **kwargs)
return decorated
|
def decorate(self, func, target, *anoop, **kwnoop)
|
decorate the passed in func calling target when func is called
:param func: the function being decorated
:param target: the target that will be run when func is called
:returns: the decorated func
| 3.328317
| 4.203642
| 0.79177
|
flags['type'] = flags.get('type', None)
paction = flags.get('action', 'store')
if paction == 'store_false':
flags['default'] = True
flags['type'] = bool
elif paction == 'store_true':
flags['default'] = False
flags['type'] = bool
prequired = False if 'default' in flags else flags.get('required', True)
flags["action"] = paction
flags["required"] = prequired
self.flags = flags
|
def normalize_flags(self, flags)
|
normalize the flags to make sure needed values are there
after this method is called self.flags is available
:param flags: the flags that will be normalized
| 3.392406
| 3.757473
| 0.902842
|
self.name = names[0]
self.is_kwarg = False
self.is_arg = False
self.names = []
try:
# http://stackoverflow.com/a/16488383/5006 uses ask forgiveness because
# of py2/3 differences of integer check
self.index = int(self.name)
self.name = ""
self.is_arg = True
except ValueError:
self.is_kwarg = True
self.names = names
|
def normalize_type(self, names)
|
Decide if this param is an arg or a kwarg and set appropriate internal flags
| 6.189315
| 5.108153
| 1.211654
|
if self.is_kwarg:
kwargs = self.normalize_kwarg(slf.request, kwargs)
else:
args = self.normalize_arg(slf.request, args)
return slf, args, kwargs
|
def normalize_param(self, slf, args, kwargs)
|
this is where all the magic happens, this will try and find the param and
put its value in kwargs if it has a default and stuff
| 3.111293
| 3.129186
| 0.994282
|
val = default
found_name = ''
for name in names:
if name in kwargs:
val = kwargs[name]
found_name = name
break
if not found_name and required:
raise ValueError("required param {} does not exist".format(self.name))
return found_name, val
|
def find_kwarg(self, request, names, required, default, kwargs)
|
actually try to retrieve names key from params dict
:param request: the current request instance, handy for child classes
:param names: the names this kwarg can be
:param required: True if a name has to be found in kwargs
:param default: the default value if name isn't found
:param kwargs: the kwargs that will be used to find the value
:returns: tuple, found_name, val where found_name is the actual name kwargs contained
| 3.138771
| 3.574076
| 0.878205
|
flags = self.flags
paction = flags['action']
ptype = flags['type']
pchoices = flags.get('choices', None)
allow_empty = flags.get('allow_empty', False)
min_size = flags.get('min_size', None)
max_size = flags.get('max_size', None)
regex = flags.get('regex', None)
if paction in set(['store_list']):
if isinstance(val, list) and len(val) > 1:
raise ValueError("too many values for param")
if isinstance(val, basestring):
val = list(val.split(','))
else:
val = list(val)
elif paction in set(['append', 'append_list']):
if not isinstance(val, list):
val = [val]
if paction == 'append_list':
vs = []
for v in val:
if isinstance(v, basestring):
vs.extend(v.split(','))
else:
vs.append(v)
val = vs
else:
if paction not in set(['store', 'store_false', 'store_true']):
raise RuntimeError('unknown param action {}'.format(paction))
if regex:
failed = False
if isinstance(regex, basestring):
if not re.search(regex, val): failed = True
else:
if not regex.search(val): failed = True
if failed:
raise ValueError("param failed regex check")
if ptype:
if isinstance(val, list) and ptype != list:
val = list(map(ptype, val))
else:
if isinstance(ptype, type):
if issubclass(ptype, bool):
if val in set(['true', 'True', '1']):
val = True
elif val in set(['false', 'False', '0']):
val = False
else:
val = ptype(val)
elif issubclass(ptype, str):
charset = request.encoding
if is_py2:
val = ptype(ByteString(val, charset))
else:
val = ptype(String(val, charset))
# if charset and isinstance(val, unicode):
# val = val.encode(charset)
# else:
# val = ptype(val)
else:
val = ptype(val)
else:
val = ptype(val)
if pchoices:
if isinstance(val, list) and ptype != list:
for v in val:
if v not in pchoices:
raise ValueError("param value {} not in choices {}".format(v, pchoices))
else:
if val not in pchoices:
raise ValueError("param value {} not in choices {}".format(val, pchoices))
# at some point this if statement is just going to be too ridiculous
# FieldStorage check is because of this bug https://bugs.python.org/issue19097
if not isinstance(val, cgi.FieldStorage):
if not allow_empty and val is not False and not val:
if 'default' not in flags:
raise ValueError("param was empty")
if min_size is not None:
failed = False
if isinstance(val, (int, float)):
if val < min_size: failed = True
else:
if len(val) < min_size: failed = True
if failed:
raise ValueError("param was smaller than {}".format(min_size))
if max_size is not None:
failed = False
if isinstance(val, (int, float)):
if val > max_size: failed = True
else:
if len(val) > max_size: failed = True
if failed:
raise ValueError("param was bigger than {}".format(max_size))
return val
|
def normalize_val(self, request, val)
|
This will take the value and make sure it meets expectations
:param request: the current request instance
:param val: the raw value pulled from kwargs or args
:returns: val that has met all param checks
:raises: ValueError if val fails any checks
| 2.260362
| 2.219744
| 1.018298
|
c = cls(*args, **kwargs)
c.connect()
try:
yield c
finally:
c.close()
|
def open(cls, *args, **kwargs)
|
just something to make it easier to quickly open a connection, do something
and then close it
| 4.65027
| 3.436655
| 1.353138
|
ret = None
ws_url = self.get_fetch_url(path, query)
ws_headers = self.get_fetch_headers("GET", headers)
ws_headers = ['{}: {}'.format(h[0], h[1]) for h in ws_headers.items() if h[1]]
timeout = self.get_timeout(timeout=timeout, **kwargs)
self.set_trace(kwargs.pop("trace", False))
#pout.v(websocket_url, websocket_headers, self.query_kwargs, self.headers)
try:
logger.debug("{} connecting to {}".format(self.client_id, ws_url))
self.ws = websocket.create_connection(
ws_url,
header=ws_headers,
timeout=timeout,
sslopt={'cert_reqs':ssl.CERT_NONE},
)
ret = self.recv_callback(callback=lambda r: r.uuid == "CONNECT")
if ret.code >= 400:
raise IOError("Failed to connect with code {}".format(ret.code))
# self.headers = headers
# self.query_kwargs = query_kwargs
except websocket.WebSocketTimeoutException:
raise IOError("Failed to connect within {} seconds".format(timeout))
except websocket.WebSocketException as e:
raise IOError("Failed to connect with error: {}".format(e))
except socket.error as e:
# this is an IOError, I just wanted to be aware of that, most common
# problem is: [Errno 111] Connection refused
raise
return ret
|
def connect(self, path="", headers=None, query=None, timeout=0, **kwargs)
|
make the actual connection to the websocket
:param headers: dict, key/val pairs of any headers to add to connection, if
you would like to override headers just pass in an empty value
:param query: dict, any query string params you want to send up with the connection
url
:returns: Payload, this will return the CONNECT response from the websocket
| 4.029448
| 4.124521
| 0.976949
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.