_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q5200
|
cached_property
|
train
|
def cached_property(**kwargs):
"""Cache the return value of a property."""
def decorator(function):
@wraps(function)
def wrapper(self):
key = 'fandjango.%(model)s.%(property)s_%(pk)s' % {
'model': self.__class__.__name__,
'pk': self.pk,
'property': function.__name__
}
cached_value = cache.get(key)
delta = timedelta(**kwargs)
if cached_value is None:
value = function(self)
cache.set(key, value, delta.days * 86400 + delta.seconds)
else:
value = cached_value
return value
return wrapper
return decorator
|
python
|
{
"resource": ""
}
|
q5201
|
authorization_denied_view
|
train
|
def authorization_denied_view(request):
"""Proxy for the view referenced in ``FANDJANGO_AUTHORIZATION_DENIED_VIEW``."""
authorization_denied_module_name = AUTHORIZATION_DENIED_VIEW.rsplit('.', 1)[0]
authorization_denied_view_name = AUTHORIZATION_DENIED_VIEW.split('.')[-1]
authorization_denied_module = import_module(authorization_denied_module_name)
authorization_denied_view = getattr(authorization_denied_module, authorization_denied_view_name)
return authorization_denied_view(request)
|
python
|
{
"resource": ""
}
|
q5202
|
get_post_authorization_redirect_url
|
train
|
def get_post_authorization_redirect_url(request, canvas=True):
"""
Determine the URL users should be redirected to upon authorization the application.
If request is non-canvas use user defined site url if set, else the site hostname.
"""
path = request.get_full_path()
if canvas:
if FACEBOOK_APPLICATION_CANVAS_URL:
path = path.replace(urlparse(FACEBOOK_APPLICATION_CANVAS_URL).path, '')
redirect_uri = 'https://%(domain)s/%(namespace)s%(path)s' % {
'domain': FACEBOOK_APPLICATION_DOMAIN,
'namespace': FACEBOOK_APPLICATION_NAMESPACE,
'path': path
}
else:
if FANDJANGO_SITE_URL:
site_url = FANDJANGO_SITE_URL
path = path.replace(urlparse(site_url).path, '')
else:
protocol = "https" if request.is_secure() else "http"
site_url = "%s://%s" % (protocol, request.get_host())
redirect_uri = site_url + path
return redirect_uri
|
python
|
{
"resource": ""
}
|
q5203
|
get_full_path
|
train
|
def get_full_path(request, remove_querystrings=[]):
"""Gets the current path, removing specified querstrings"""
path = request.get_full_path()
for qs in remove_querystrings:
path = re.sub(r'&?' + qs + '=?(.+)?&?', '', path)
return path
|
python
|
{
"resource": ""
}
|
q5204
|
authorize_application
|
train
|
def authorize_application(
request,
redirect_uri = 'https://%s/%s' % (FACEBOOK_APPLICATION_DOMAIN, FACEBOOK_APPLICATION_NAMESPACE),
permissions = FACEBOOK_APPLICATION_INITIAL_PERMISSIONS
):
"""
Redirect the user to authorize the application.
Redirection is done by rendering a JavaScript snippet that redirects the parent
window to the authorization URI, since Facebook will not allow this inside an iframe.
"""
query = {
'client_id': FACEBOOK_APPLICATION_ID,
'redirect_uri': redirect_uri
}
if permissions:
query['scope'] = ', '.join(permissions)
return render(
request = request,
template_name = 'fandjango/authorize_application.html',
dictionary = {
'url': 'https://www.facebook.com/dialog/oauth?%s' % urlencode(query)
},
status = 401
)
|
python
|
{
"resource": ""
}
|
q5205
|
deauthorize_application
|
train
|
def deauthorize_application(request):
"""
When a user deauthorizes an application, Facebook sends a HTTP POST request to the application's
"deauthorization callback" URL. This view picks up on requests of this sort and marks the corresponding
users as unauthorized.
"""
if request.facebook:
user = User.objects.get(
facebook_id = request.facebook.signed_request.user.id
)
user.authorized = False
user.save()
return HttpResponse()
else:
return HttpResponse(status=400)
|
python
|
{
"resource": ""
}
|
q5206
|
SqlQuery.make_update
|
train
|
def make_update(cls, table, set_query, where=None):
"""
Make UPDATE query.
:param str table: Table name of executing the query.
:param str set_query: SET part of the UPDATE query.
:param str where:
Add a WHERE clause to execute query,
if the value is not |None|.
:return: Query of SQLite.
:rtype: str
:raises ValueError: If ``set_query`` is empty string.
:raises simplesqlite.NameValidationError:
|raises_validate_table_name|
"""
validate_table_name(table)
if typepy.is_null_string(set_query):
raise ValueError("SET query is null")
query_list = ["UPDATE {:s}".format(Table(table)), "SET {:s}".format(set_query)]
if where and isinstance(where, (six.text_type, Where, And, Or)):
query_list.append("WHERE {:s}".format(where))
return " ".join(query_list)
|
python
|
{
"resource": ""
}
|
q5207
|
SqlQuery.make_where_in
|
train
|
def make_where_in(cls, key, value_list):
"""
Make part of WHERE IN query.
:param str key: Attribute name of the key.
:param str value_list:
List of values that the right hand side associated with the key.
:return: Part of WHERE query of SQLite.
:rtype: str
:Examples:
>>> from simplesqlite.sqlquery import SqlQuery
>>> SqlQuery.make_where_in("key", ["hoge", "foo", "bar"])
"key IN ('hoge', 'foo', 'bar')"
"""
return "{:s} IN ({:s})".format(
Attr(key), ", ".join([Value(value).to_query() for value in value_list])
)
|
python
|
{
"resource": ""
}
|
q5208
|
SimpleSQLite.connect
|
train
|
def connect(self, database_path, mode="a"):
"""
Connect to a SQLite database.
:param str database_path:
Path to the SQLite database file to be connected.
:param str mode:
``"r"``: Open for read only.
``"w"``: Open for read/write.
Delete existing tables when connecting.
``"a"``: Open for read/write. Append to the existing tables.
:raises ValueError:
If ``database_path`` is invalid or |attr_mode| is invalid.
:raises simplesqlite.DatabaseError:
If the file is encrypted or is not a database.
:raises simplesqlite.OperationalError:
If unable to open the database file.
"""
self.close()
logger.debug("connect to a SQLite database: path='{}', mode={}".format(database_path, mode))
if mode == "r":
self.__verify_db_file_existence(database_path)
elif mode in ["w", "a"]:
self.__validate_db_path(database_path)
else:
raise ValueError("unknown connection mode: " + mode)
if database_path == MEMORY_DB_NAME:
self.__database_path = database_path
else:
self.__database_path = os.path.realpath(database_path)
try:
self.__connection = sqlite3.connect(database_path)
except sqlite3.OperationalError as e:
raise OperationalError(e)
self.__mode = mode
try:
# validate connection after connect
self.fetch_table_names()
except sqlite3.DatabaseError as e:
raise DatabaseError(e)
if mode != "w":
return
for table in self.fetch_table_names():
self.drop_table(table)
|
python
|
{
"resource": ""
}
|
q5209
|
SimpleSQLite.execute_query
|
train
|
def execute_query(self, query, caller=None):
"""
Send arbitrary SQLite query to the database.
:param str query: Query to executed.
:param tuple caller:
Caller information.
Expects the return value of :py:meth:`logging.Logger.findCaller`.
:return: The result of the query execution.
:rtype: sqlite3.Cursor
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.OperationalError: |raises_operational_error|
.. warning::
This method can execute an arbitrary query.
i.e. No access permissions check by |attr_mode|.
"""
import time
self.check_connection()
if typepy.is_null_string(query):
return None
if self.debug_query or self.global_debug_query:
logger.debug(query)
if self.__is_profile:
exec_start_time = time.time()
try:
result = self.connection.execute(six.text_type(query))
except (sqlite3.OperationalError, sqlite3.IntegrityError) as e:
if caller is None:
caller = logging.getLogger().findCaller()
file_path, line_no, func_name = caller[:3]
raise OperationalError(
message="\n".join(
[
"failed to execute query at {:s}({:d}) {:s}".format(
file_path, line_no, func_name
),
" - query: {}".format(MultiByteStrDecoder(query).unicode_str),
" - msg: {}".format(e),
" - db: {}".format(self.database_path),
]
)
)
if self.__is_profile:
self.__dict_query_count[query] = self.__dict_query_count.get(query, 0) + 1
elapse_time = time.time() - exec_start_time
self.__dict_query_totalexectime[query] = (
self.__dict_query_totalexectime.get(query, 0) + elapse_time
)
return result
|
python
|
{
"resource": ""
}
|
q5210
|
SimpleSQLite.select
|
train
|
def select(self, select, table_name, where=None, extra=None):
"""
Send a SELECT query to the database.
:param str select: Attribute for the ``SELECT`` query.
:param str table_name: |arg_select_table_name|
:param where: |arg_select_where|
:type where: |arg_where_type|
:param str extra: |arg_select_extra|
:return: Result of the query execution.
:rtype: sqlite3.Cursor
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
"""
self.verify_table_existence(table_name)
return self.execute_query(
six.text_type(Select(select, table_name, where, extra)),
logging.getLogger().findCaller(),
)
|
python
|
{
"resource": ""
}
|
q5211
|
SimpleSQLite.select_as_dict
|
train
|
def select_as_dict(self, table_name, columns=None, where=None, extra=None):
"""
Get data in the database and return fetched data as a
|OrderedDict| list.
:param str table_name: |arg_select_table_name|
:param list columns: |arg_select_as_xx_columns|
:param where: |arg_select_where|
:type where: |arg_where_type|
:param str extra: |arg_select_extra|
:return: Table data as |OrderedDict| instances.
:rtype: |list| of |OrderedDict|
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
:Example:
:ref:`example-select-as-dict`
"""
return self.select_as_tabledata(table_name, columns, where, extra).as_dict().get(table_name)
|
python
|
{
"resource": ""
}
|
q5212
|
SimpleSQLite.select_as_memdb
|
train
|
def select_as_memdb(self, table_name, columns=None, where=None, extra=None):
"""
Get data in the database and return fetched data as a
in-memory |SimpleSQLite| instance.
:param str table_name: |arg_select_table_name|
:param list columns: |arg_select_as_xx_columns|
:param where: |arg_select_where|
:type where: |arg_where_type|
:param str extra: |arg_select_extra|
:return:
Table data as a |SimpleSQLite| instance that connected to in
memory database.
:rtype: |SimpleSQLite|
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
"""
table_schema = self.schema_extractor.fetch_table_schema(table_name)
memdb = connect_memdb()
memdb.create_table_from_tabledata(
self.select_as_tabledata(table_name, columns, where, extra),
primary_key=table_schema.primary_key,
index_attrs=table_schema.index_list,
)
return memdb
|
python
|
{
"resource": ""
}
|
q5213
|
SimpleSQLite.insert
|
train
|
def insert(self, table_name, record, attr_names=None):
"""
Send an INSERT query to the database.
:param str table_name: Table name of executing the query.
:param record: Record to be inserted.
:type record: |dict|/|namedtuple|/|list|/|tuple|
:raises IOError: |raises_write_permission|
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.OperationalError: |raises_operational_error|
:Example:
:ref:`example-insert-records`
"""
self.insert_many(table_name, records=[record], attr_names=attr_names)
|
python
|
{
"resource": ""
}
|
q5214
|
SimpleSQLite.insert_many
|
train
|
def insert_many(self, table_name, records, attr_names=None):
"""
Send an INSERT query with multiple records to the database.
:param str table: Table name of executing the query.
:param records: Records to be inserted.
:type records: list of |dict|/|namedtuple|/|list|/|tuple|
:return: Number of inserted records.
:rtype: int
:raises IOError: |raises_write_permission|
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
:Example:
:ref:`example-insert-records`
"""
self.validate_access_permission(["w", "a"])
self.verify_table_existence(table_name)
if attr_names:
logger.debug(
"insert {number} records into {table}({attrs})".format(
number=len(records) if records else 0, table=table_name, attrs=attr_names
)
)
else:
logger.debug(
"insert {number} records into {table}".format(
number=len(records) if records else 0, table=table_name
)
)
if typepy.is_empty_sequence(records):
return 0
if attr_names is None:
attr_names = self.fetch_attr_names(table_name)
records = RecordConvertor.to_records(attr_names, records)
query = Insert(table_name, AttrList(attr_names)).to_query()
if self.debug_query or self.global_debug_query:
logging_count = 8
num_records = len(records)
logs = [query] + [
" record {:4d}: {}".format(i, record)
for i, record in enumerate(records[:logging_count])
]
if num_records - logging_count > 0:
logs.append(
" and other {} records will be inserted".format(num_records - logging_count)
)
logger.debug("\n".join(logs))
try:
self.connection.executemany(query, records)
except (sqlite3.OperationalError, sqlite3.IntegrityError) as e:
caller = logging.getLogger().findCaller()
file_path, line_no, func_name = caller[:3]
raise OperationalError(
"{:s}({:d}) {:s}: failed to execute query:\n".format(file_path, line_no, func_name)
+ " query={}\n".format(query)
+ " msg='{}'\n".format(e)
+ " db={}\n".format(self.database_path)
+ " records={}\n".format(records[:2])
)
return len(records)
|
python
|
{
"resource": ""
}
|
q5215
|
SimpleSQLite.update
|
train
|
def update(self, table_name, set_query, where=None):
"""Execute an UPDATE query.
Args:
table_name (|str|):
Table name of executing the query.
set_query (|str|):
``SET`` clause for the update query.
where (|arg_where_type| , optional):
``WHERE`` clause for the update query.
Defaults to |None|.
Raises:
IOError:
|raises_write_permission|
simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
simplesqlite.OperationalError:
|raises_operational_error|
"""
self.validate_access_permission(["w", "a"])
self.verify_table_existence(table_name)
query = SqlQuery.make_update(table_name, set_query, where)
return self.execute_query(query, logging.getLogger().findCaller())
|
python
|
{
"resource": ""
}
|
q5216
|
SimpleSQLite.delete
|
train
|
def delete(self, table_name, where=None):
"""
Send a DELETE query to the database.
:param str table_name: Table name of executing the query.
:param where: |arg_select_where|
:type where: |arg_where_type|
"""
self.validate_access_permission(["w", "a"])
self.verify_table_existence(table_name)
query = "DELETE FROM {:s}".format(table_name)
if where:
query += " WHERE {:s}".format(where)
return self.execute_query(query, logging.getLogger().findCaller())
|
python
|
{
"resource": ""
}
|
q5217
|
SimpleSQLite.fetch_value
|
train
|
def fetch_value(self, select, table_name, where=None, extra=None):
"""
Fetch a value from the table. Return |None| if no value matches
the conditions, or the table not found in the database.
:param str select: Attribute for SELECT query
:param str table_name: Table name of executing the query.
:param where: |arg_select_where|
:type where: |arg_where_type|
:return: Result of execution of the query.
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.OperationalError: |raises_operational_error|
"""
try:
self.verify_table_existence(table_name)
except TableNotFoundError as e:
logger.debug(e)
return None
result = self.execute_query(
Select(select, table_name, where, extra), logging.getLogger().findCaller()
)
if result is None:
return None
fetch = result.fetchone()
if fetch is None:
return None
return fetch[0]
|
python
|
{
"resource": ""
}
|
q5218
|
SimpleSQLite.fetch_num_records
|
train
|
def fetch_num_records(self, table_name, where=None):
"""
Fetch the number of records in a table.
:param str table_name: Table name to get number of records.
:param where: |arg_select_where|
:type where: |arg_where_type|
:return:
Number of records in the table.
|None| if no value matches the conditions,
or the table not found in the database.
:rtype: int
"""
return self.fetch_value(select="COUNT(*)", table_name=table_name, where=where)
|
python
|
{
"resource": ""
}
|
q5219
|
SimpleSQLite.get_profile
|
train
|
def get_profile(self, profile_count=50):
"""
Get profile of query execution time.
:param int profile_count:
Number of profiles to retrieve,
counted from the top query in descending order by
the cumulative execution time.
:return: Profile information for each query.
:rtype: list of |namedtuple|
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.OperationalError: |raises_operational_error|
:Example:
:ref:`example-get-profile`
"""
from collections import namedtuple
profile_table_name = "sql_profile"
value_matrix = [
[query, execute_time, self.__dict_query_count.get(query, 0)]
for query, execute_time in six.iteritems(self.__dict_query_totalexectime)
]
attr_names = ("sql_query", "cumulative_time", "count")
con_tmp = connect_memdb()
try:
con_tmp.create_table_from_data_matrix(
profile_table_name, attr_names, data_matrix=value_matrix
)
except ValueError:
return []
try:
result = con_tmp.select(
select="{:s},SUM({:s}),SUM({:s})".format(*attr_names),
table_name=profile_table_name,
extra="GROUP BY {:s} ORDER BY {:s} DESC LIMIT {:d}".format(
attr_names[0], attr_names[1], profile_count
),
)
except sqlite3.OperationalError:
return []
if result is None:
return []
SqliteProfile = namedtuple("SqliteProfile", " ".join(attr_names))
return [SqliteProfile(*profile) for profile in result.fetchall()]
|
python
|
{
"resource": ""
}
|
q5220
|
SimpleSQLite.create_table_from_data_matrix
|
train
|
def create_table_from_data_matrix(
self,
table_name,
attr_names,
data_matrix,
primary_key=None,
add_primary_key_column=False,
index_attrs=None,
):
"""
Create a table if not exists. Moreover, insert data into the created
table.
:param str table_name: Table name to create.
:param list attr_names: Attribute names of the table.
:param data_matrix: Data to be inserted into the table.
:type data_matrix: List of |dict|/|namedtuple|/|list|/|tuple|
:param str primary_key: |primary_key|
:param tuple index_attrs: |index_attrs|
:raises simplesqlite.NameValidationError:
|raises_validate_table_name|
:raises simplesqlite.NameValidationError:
|raises_validate_attr_name|
:raises ValueError: If the ``data_matrix`` is empty.
:Example:
:ref:`example-create-table-from-data-matrix`
.. seealso::
:py:meth:`.create_table`
:py:meth:`.insert_many`
:py:meth:`.create_index_list`
"""
self.__create_table_from_tabledata(
TableData(table_name, attr_names, data_matrix),
primary_key,
add_primary_key_column,
index_attrs,
)
|
python
|
{
"resource": ""
}
|
q5221
|
SimpleSQLite.create_table_from_dataframe
|
train
|
def create_table_from_dataframe(
self,
dataframe,
table_name="",
primary_key=None,
add_primary_key_column=False,
index_attrs=None,
):
"""
Create a table from a pandas.DataFrame instance.
:param pandas.DataFrame dataframe: DataFrame instance to convert.
:param str table_name: Table name to create.
:param str primary_key: |primary_key|
:param tuple index_attrs: |index_attrs|
:Examples:
:ref:`example-create-table-from-df`
"""
self.__create_table_from_tabledata(
TableData.from_dataframe(dataframe=dataframe, table_name=table_name),
primary_key,
add_primary_key_column,
index_attrs,
)
|
python
|
{
"resource": ""
}
|
q5222
|
SimpleSQLite.close
|
train
|
def close(self):
"""
Commit and close the connection.
.. seealso:: :py:meth:`sqlite3.Connection.close`
"""
if self.__delayed_connection_path and self.__connection is None:
self.__initialize_connection()
return
try:
self.check_connection()
except (SystemError, NullDatabaseConnectionError):
return
logger.debug("close connection to a SQLite database: path='{}'".format(self.database_path))
self.commit()
self.connection.close()
self.__initialize_connection()
|
python
|
{
"resource": ""
}
|
q5223
|
SimpleSQLite.__extract_col_type_from_tabledata
|
train
|
def __extract_col_type_from_tabledata(table_data):
"""
Extract data type name for each column as SQLite names.
:param tabledata.TableData table_data:
:return: { column_number : column_data_type }
:rtype: dictionary
"""
typename_table = {
typepy.Typecode.INTEGER: "INTEGER",
typepy.Typecode.REAL_NUMBER: "REAL",
typepy.Typecode.STRING: "TEXT",
}
return dict(
[
[col_idx, typename_table.get(col_dp.typecode, "TEXT")]
for col_idx, col_dp in enumerate(table_data.column_dp_list)
]
)
|
python
|
{
"resource": ""
}
|
q5224
|
copy_table
|
train
|
def copy_table(src_con, dst_con, src_table_name, dst_table_name, is_overwrite=True):
"""
Copy a table from source to destination.
:param SimpleSQLite src_con: Connection to the source database.
:param SimpleSQLite dst_con: Connection to the destination database.
:param str src_table_name: Source table name to copy.
:param str dst_table_name: Destination table name.
:param bool is_overwrite: If |True|, overwrite existing table.
:return: |True| if the copy operation succeed.
:rtype: bool
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises ValueError:
If attributes of the table are different from each other.
"""
logger.debug(
"copy table: src={src_db}.{src_tbl}, dst={dst_db}.{dst_tbl}".format(
src_db=src_con.database_path,
src_tbl=src_table_name,
dst_db=dst_con.database_path,
dst_tbl=dst_table_name,
)
)
src_con.verify_table_existence(src_table_name)
dst_con.validate_access_permission(["w", "a"])
if dst_con.has_table(dst_table_name):
if is_overwrite:
dst_con.drop_table(dst_table_name)
else:
logger.error(
"failed to copy table: the table already exists "
"(src_table={}, dst_table={})".format(src_table_name, dst_table_name)
)
return False
primary_key, index_attrs, _ = extract_table_metadata(src_con, src_table_name)
result = src_con.select(select="*", table_name=src_table_name)
if result is None:
return False
dst_con.create_table_from_data_matrix(
dst_table_name,
src_con.fetch_attr_names(src_table_name),
result.fetchall(),
primary_key=primary_key,
index_attrs=index_attrs,
)
return True
|
python
|
{
"resource": ""
}
|
q5225
|
get_builtin_date
|
train
|
def get_builtin_date(date, date_format="%Y-%m-%dT%H:%M:%S", raise_exception=False):
"""
Try to convert a date to a builtin instance of ``datetime.datetime``.
The input date can be a ``str``, a ``datetime.datetime``, a ``xmlrpc.client.Datetime`` or a ``xmlrpclib.Datetime``
instance. The returned object is a ``datetime.datetime``.
:param date: The date object to convert.
:param date_format: If the given date is a str, format is passed to strptime to parse it
:param raise_exception: If set to True, an exception will be raised if the input string cannot be parsed
:return: A valid ``datetime.datetime`` instance
"""
if isinstance(date, datetime.datetime):
# Default XML-RPC handler is configured to decode dateTime.iso8601 type
# to builtin datetime.datetim instance
return date
elif isinstance(date, xmlrpc_client.DateTime):
# If constant settings.MODERNRPC_XMLRPC_USE_BUILTIN_TYPES has been set to True
# the date is decoded as DateTime object
return datetime.datetime.strptime(date.value, "%Y%m%dT%H:%M:%S")
else:
# If date is given as str (or unicode for python 2)
# This is the normal behavior for JSON-RPC
try:
return datetime.datetime.strptime(date, date_format)
except ValueError:
if raise_exception:
raise
else:
return None
|
python
|
{
"resource": ""
}
|
q5226
|
user_in_group
|
train
|
def user_in_group(user, group):
"""Returns True if the given user is in given group"""
if isinstance(group, Group):
return user_is_superuser(user) or group in user.groups.all()
elif isinstance(group, six.string_types):
return user_is_superuser(user) or user.groups.filter(name=group).exists()
raise TypeError("'group' argument must be a string or a Group instance")
|
python
|
{
"resource": ""
}
|
q5227
|
user_in_any_group
|
train
|
def user_in_any_group(user, groups):
"""Returns True if the given user is in at least 1 of the given groups"""
return user_is_superuser(user) or any(user_in_group(user, group) for group in groups)
|
python
|
{
"resource": ""
}
|
q5228
|
user_in_all_groups
|
train
|
def user_in_all_groups(user, groups):
"""Returns True if the given user is in all given groups"""
return user_is_superuser(user) or all(user_in_group(user, group) for group in groups)
|
python
|
{
"resource": ""
}
|
q5229
|
RPCEntryPoint.get_handler_classes
|
train
|
def get_handler_classes(self):
"""Return the list of handlers to use when receiving RPC requests."""
handler_classes = [import_string(handler_cls) for handler_cls in settings.MODERNRPC_HANDLERS]
if self.protocol == ALL:
return handler_classes
else:
return [cls for cls in handler_classes if cls.protocol in ensure_sequence(self.protocol)]
|
python
|
{
"resource": ""
}
|
q5230
|
RPCEntryPoint.post
|
train
|
def post(self, request, *args, **kwargs):
"""
Handle a XML-RPC or JSON-RPC request.
:param request: Incoming request
:param args: Additional arguments
:param kwargs: Additional named arguments
:return: A HttpResponse containing XML-RPC or JSON-RPC response, depending on the incoming request
"""
logger.debug('RPC request received...')
for handler_cls in self.get_handler_classes():
handler = handler_cls(request, self.entry_point)
try:
if not handler.can_handle():
continue
logger.debug('Request will be handled by {}'.format(handler_cls.__name__))
result = handler.process_request()
return handler.result_success(result)
except AuthenticationFailed as e:
# Customize HttpResponse instance used when AuthenticationFailed was raised
logger.warning(e)
return handler.result_error(e, HttpResponseForbidden)
except RPCException as e:
logger.warning('RPC exception: {}'.format(e), exc_info=settings.MODERNRPC_LOG_EXCEPTIONS)
return handler.result_error(e)
except Exception as e:
logger.error('Exception raised from a RPC method: "{}"'.format(e),
exc_info=settings.MODERNRPC_LOG_EXCEPTIONS)
return handler.result_error(RPCInternalError(str(e)))
logger.error('Unable to handle incoming request.')
return HttpResponse('Unable to handle your request. Please ensure you called the right entry point. If not, '
'this could be a server error.')
|
python
|
{
"resource": ""
}
|
q5231
|
RPCEntryPoint.get_context_data
|
train
|
def get_context_data(self, **kwargs):
"""Update context data with list of RPC methods of the current entry point.
Will be used to display methods documentation page"""
kwargs.update({
'methods': registry.get_all_methods(self.entry_point, sort_methods=True),
})
return super(RPCEntryPoint, self).get_context_data(**kwargs)
|
python
|
{
"resource": ""
}
|
q5232
|
ModernRpcConfig.rpc_methods_registration
|
train
|
def rpc_methods_registration():
"""Look into each module listed in settings.MODERNRPC_METHODS_MODULES, import each module and register
functions annotated with @rpc_method decorator in the registry"""
# In previous version, django-modern-rpc used the django cache system to store methods registry.
# It is useless now, so clean the cache from old data
clean_old_cache_content()
# For security (and unit tests), make sure the registry is empty before registering rpc methods
registry.reset()
if not settings.MODERNRPC_METHODS_MODULES:
# settings.MODERNRPC_METHODS_MODULES is undefined or empty, but we already notified user
# with check_required_settings_defined() function. See http://docs.djangoproject.com/en/1.10/topics/checks/
return
# Lookup content of MODERNRPC_METHODS_MODULES, and add the module containing system methods
for module_name in settings.MODERNRPC_METHODS_MODULES + ['modernrpc.system_methods']:
try:
# Import the module in current scope
rpc_module = import_module(module_name)
except ImportError:
msg = 'Unable to load module "{}" declared in settings.MODERNRPC_METHODS_MODULES. Please ensure ' \
'it is available and doesn\'t contains any error'.format(module_name)
warnings.warn(msg, category=Warning)
continue
# Lookup all global functions in module
for _, func in inspect.getmembers(rpc_module, inspect.isfunction):
# And register only functions with attribute 'modernrpc_enabled' defined to True
if getattr(func, 'modernrpc_enabled', False):
registry.register_method(func)
logger.info('django-modern-rpc initialized: {} RPC methods registered'.format(registry.total_count()))
|
python
|
{
"resource": ""
}
|
q5233
|
http_basic_auth_login_required
|
train
|
def http_basic_auth_login_required(func=None):
"""Decorator. Use it to specify a RPC method is available only to logged users"""
wrapper = auth.set_authentication_predicate(http_basic_auth_check_user, [auth.user_is_authenticated])
# If @http_basic_auth_login_required() is used (with parenthesis)
if func is None:
return wrapper
# If @http_basic_auth_login_required is used without parenthesis
return wrapper(func)
|
python
|
{
"resource": ""
}
|
q5234
|
http_basic_auth_superuser_required
|
train
|
def http_basic_auth_superuser_required(func=None):
"""Decorator. Use it to specify a RPC method is available only to logged superusers"""
wrapper = auth.set_authentication_predicate(http_basic_auth_check_user, [auth.user_is_superuser])
# If @http_basic_auth_superuser_required() is used (with parenthesis)
if func is None:
return wrapper
# If @http_basic_auth_superuser_required is used without parenthesis
return wrapper(func)
|
python
|
{
"resource": ""
}
|
q5235
|
rpc_method
|
train
|
def rpc_method(func=None, name=None, entry_point=ALL, protocol=ALL,
str_standardization=settings.MODERNRPC_PY2_STR_TYPE,
str_standardization_encoding=settings.MODERNRPC_PY2_STR_ENCODING):
"""
Mark a standard python function as RPC method.
All arguments are optional
:param func: A standard function
:param name: Used as RPC method name instead of original function name
:param entry_point: Default: ALL. Used to limit usage of the RPC method for a specific set of entry points
:param protocol: Default: ALL. Used to limit usage of the RPC method for a specific protocol (JSONRPC or XMLRPC)
:param str_standardization: Default: settings.MODERNRPC_PY2_STR_TYPE. Configure string standardization on python 2.
Ignored on python 3.
:param str_standardization_encoding: Default: settings.MODERNRPC_PY2_STR_ENCODING. Configure the encoding used
to perform string standardization conversion. Ignored on python 3.
:type name: str
:type entry_point: str
:type protocol: str
:type str_standardization: type str or unicode
:type str_standardization_encoding: str
"""
def decorated(_func):
_func.modernrpc_enabled = True
_func.modernrpc_name = name or _func.__name__
_func.modernrpc_entry_point = entry_point
_func.modernrpc_protocol = protocol
_func.str_standardization = str_standardization
_func.str_standardization_encoding = str_standardization_encoding
return _func
# If @rpc_method() is used with parenthesis (with or without argument)
if func is None:
return decorated
# If @rpc_method is used without parenthesis
return decorated(func)
|
python
|
{
"resource": ""
}
|
q5236
|
RPCMethod.parse_docstring
|
train
|
def parse_docstring(self, content):
"""
Parse the given full docstring, and extract method description, arguments, and return documentation.
This method try to find arguments description and types, and put the information in "args_doc" and "signature"
members. Also parse return type and description, and put the information in "return_doc" member.
All other lines are added to the returned string
:param content: The full docstring
:type content: str
:return: The parsed method description
:rtype: str
"""
if not content:
return
raw_docstring = ''
# We use the helper defined in django admindocs app to remove indentation chars from docstring,
# and parse it as title, body, metadata. We don't use metadata for now.
docstring = trim_docstring(content)
for line in docstring.split('\n'):
# Empty line
if not line:
raw_docstring += '\n'
continue
param_match = PARAM_REXP.match(line)
if param_match:
param_name, description = param_match.group(1, 2)
if param_name == 'kwargs':
continue
doc = self.args_doc.setdefault(param_name, {})
doc['text'] = description
continue
param_type_match = PARAM_TYPE_REXP.match(line)
if param_type_match:
param_name, param_type = param_type_match.group(1, 2)
if param_name == 'kwargs':
continue
doc = self.args_doc.setdefault(param_name, {})
doc['type'] = param_type
self.signature.append(param_type)
continue
return_match = RETURN_REXP.match(line)
if return_match:
return_description = return_match.group(1)
self.return_doc['text'] = return_description
continue
return_type_match = RETURN_TYPE_REXP.match(line)
if return_type_match:
return_description = return_type_match.group(1)
self.return_doc['type'] = return_description
self.signature.insert(0, return_description)
continue
# Line doesn't match with known args/return regular expressions,
# add the line to raw help text
raw_docstring += line + '\n'
return raw_docstring
|
python
|
{
"resource": ""
}
|
q5237
|
RPCMethod.html_doc
|
train
|
def html_doc(self):
"""Methods docstring, as HTML"""
if not self.raw_docstring:
result = ''
elif settings.MODERNRPC_DOC_FORMAT.lower() in ('rst', 'restructred', 'restructuredtext'):
from docutils.core import publish_parts
result = publish_parts(self.raw_docstring, writer_name='html')['body']
elif settings.MODERNRPC_DOC_FORMAT.lower() in ('md', 'markdown'):
import markdown
result = markdown.markdown(self.raw_docstring)
else:
result = "<p>{}</p>".format(self.raw_docstring.replace('\n\n', '</p><p>').replace('\n', ' '))
return result
|
python
|
{
"resource": ""
}
|
q5238
|
RPCMethod.available_for_protocol
|
train
|
def available_for_protocol(self, protocol):
"""Check if the current function can be executed from a request defining the given protocol"""
if self.protocol == ALL or protocol == ALL:
return True
return protocol in ensure_sequence(self.protocol)
|
python
|
{
"resource": ""
}
|
q5239
|
RPCMethod.available_for_entry_point
|
train
|
def available_for_entry_point(self, entry_point):
"""Check if the current function can be executed from a request to the given entry point"""
if self.entry_point == ALL or entry_point == ALL:
return True
return entry_point in ensure_sequence(self.entry_point)
|
python
|
{
"resource": ""
}
|
q5240
|
RPCMethod.is_valid_for
|
train
|
def is_valid_for(self, entry_point, protocol):
"""Check if the current function can be executed from a request to the given entry point
and with the given protocol"""
return self.available_for_entry_point(entry_point) and self.available_for_protocol(protocol)
|
python
|
{
"resource": ""
}
|
q5241
|
RPCMethod.is_return_doc_available
|
train
|
def is_return_doc_available(self):
"""Returns True if this method's return is documented"""
return bool(self.return_doc and (self.return_doc.get('text') or self.return_doc.get('type')))
|
python
|
{
"resource": ""
}
|
q5242
|
_RPCRegistry.register_method
|
train
|
def register_method(self, func):
"""
Register a function to be available as RPC method.
The given function will be inspected to find external_name, protocol and entry_point values set by the decorator
@rpc_method.
:param func: A function previously decorated using @rpc_method
:return: The name of registered method
"""
if not getattr(func, 'modernrpc_enabled', False):
raise ImproperlyConfigured('Error: trying to register {} as RPC method, but it has not been decorated.'
.format(func.__name__))
# Define the external name of the function
name = getattr(func, 'modernrpc_name', func.__name__)
logger.debug('Register RPC method "{}"'.format(name))
if name.startswith('rpc.'):
raise ImproperlyConfigured('According to RPC standard, method names starting with "rpc." are reserved for '
'system extensions and must not be used. See '
'http://www.jsonrpc.org/specification#extensions for more information.')
# Encapsulate the function in a RPCMethod object
method = RPCMethod(func)
# Ensure method names are unique in the registry
existing_method = self.get_method(method.name, ALL, ALL)
if existing_method is not None:
# Trying to register many times the same function is OK, because if a method is decorated
# with @rpc_method(), it could be imported in different places of the code
if method == existing_method:
return method.name
# But if we try to use the same name to register 2 different methods, we
# must inform the developer there is an error in the code
else:
raise ImproperlyConfigured("A RPC method with name {} has already been registered".format(method.name))
# Store the method
self._registry[method.name] = method
logger.debug('Method registered. len(registry): {}'.format(len(self._registry)))
return method.name
|
python
|
{
"resource": ""
}
|
q5243
|
_RPCRegistry.get_method
|
train
|
def get_method(self, name, entry_point, protocol):
"""Retrieve a method from the given name"""
if name in self._registry and self._registry[name].is_valid_for(entry_point, protocol):
return self._registry[name]
return None
|
python
|
{
"resource": ""
}
|
q5244
|
logger_has_handlers
|
train
|
def logger_has_handlers(logger):
"""
Check if given logger has at least 1 handler associated, return a boolean value.
Since Python 2 doesn't provide Logger.hasHandlers(), we have to perform the lookup by ourself.
"""
if six.PY3:
return logger.hasHandlers()
else:
c = logger
rv = False
while c:
if c.handlers:
rv = True
break
if not c.propagate:
break
else:
c = c.parent
return rv
|
python
|
{
"resource": ""
}
|
q5245
|
get_modernrpc_logger
|
train
|
def get_modernrpc_logger(name):
"""Get a logger from default logging manager. If no handler is associated, add a default NullHandler"""
logger = logging.getLogger(name)
if not logger_has_handlers(logger):
# If logging is not configured in the current project, configure this logger to discard all logs messages.
# This will prevent the 'No handlers could be found for logger XXX' error on Python 2,
# and avoid redirecting errors to the default 'lastResort' StreamHandler on Python 3
logger.addHandler(logging.NullHandler())
return logger
|
python
|
{
"resource": ""
}
|
q5246
|
RPCHandler.execute_procedure
|
train
|
def execute_procedure(self, name, args=None, kwargs=None):
"""
Call the concrete python function corresponding to given RPC Method `name` and return the result.
Raise RPCUnknownMethod, AuthenticationFailed, RPCInvalidParams or any Exception sub-class.
"""
_method = registry.get_method(name, self.entry_point, self.protocol)
if not _method:
raise RPCUnknownMethod(name)
logger.debug('Check authentication / permissions for method {} and user {}'
.format(name, self.request.user))
if not _method.check_permissions(self.request):
raise AuthenticationFailed(name)
logger.debug('RPC method {} will be executed'.format(name))
# Replace default None value with empty instance of corresponding type
args = args or []
kwargs = kwargs or {}
# If the RPC method needs to access some internals, update kwargs dict
if _method.accept_kwargs:
kwargs.update({
REQUEST_KEY: self.request,
ENTRY_POINT_KEY: self.entry_point,
PROTOCOL_KEY: self.protocol,
HANDLER_KEY: self,
})
if six.PY2:
method_std, encoding = _method.str_standardization, _method.str_std_encoding
args = modernrpc.compat.standardize_strings(args, strtype=method_std, encoding=encoding)
kwargs = modernrpc.compat.standardize_strings(kwargs, strtype=method_std, encoding=encoding)
logger.debug('Params: args = {} - kwargs = {}'.format(args, kwargs))
try:
# Call the rpc method, as standard python function
return _method.function(*args, **kwargs)
except TypeError as e:
# If given arguments cannot be transmitted properly to python function,
# raise an Invalid Params exceptions
raise RPCInvalidParams(str(e))
|
python
|
{
"resource": ""
}
|
q5247
|
__system_listMethods
|
train
|
def __system_listMethods(**kwargs):
"""Returns a list of all methods available in the current entry point"""
entry_point = kwargs.get(ENTRY_POINT_KEY)
protocol = kwargs.get(PROTOCOL_KEY)
return registry.get_all_method_names(entry_point, protocol, sort_methods=True)
|
python
|
{
"resource": ""
}
|
q5248
|
__system_methodSignature
|
train
|
def __system_methodSignature(method_name, **kwargs):
"""
Returns an array describing the signature of the given method name.
The result is an array with:
- Return type as first elements
- Types of method arguments from element 1 to N
:param method_name: Name of a method available for current entry point (and protocol)
:param kwargs:
:return: An array describing types of return values and method arguments
"""
entry_point = kwargs.get(ENTRY_POINT_KEY)
protocol = kwargs.get(PROTOCOL_KEY)
method = registry.get_method(method_name, entry_point, protocol)
if method is None:
raise RPCInvalidParams('Unknown method {}. Unable to retrieve signature.'.format(method_name))
return method.signature
|
python
|
{
"resource": ""
}
|
q5249
|
__system_methodHelp
|
train
|
def __system_methodHelp(method_name, **kwargs):
"""
Returns the documentation of the given method name.
:param method_name: Name of a method available for current entry point (and protocol)
:param kwargs:
:return: Documentation text for the RPC method
"""
entry_point = kwargs.get(ENTRY_POINT_KEY)
protocol = kwargs.get(PROTOCOL_KEY)
method = registry.get_method(method_name, entry_point, protocol)
if method is None:
raise RPCInvalidParams('Unknown method {}. Unable to retrieve its documentation.'.format(method_name))
return method.html_doc
|
python
|
{
"resource": ""
}
|
q5250
|
__system_multiCall
|
train
|
def __system_multiCall(calls, **kwargs):
"""
Call multiple RPC methods at once.
:param calls: An array of struct like {"methodName": string, "params": array }
:param kwargs: Internal data
:type calls: list
:type kwargs: dict
:return:
"""
if not isinstance(calls, list):
raise RPCInvalidParams('system.multicall first argument should be a list, {} given.'.format(type(calls)))
handler = kwargs.get(HANDLER_KEY)
results = []
for call in calls:
try:
result = handler.execute_procedure(call['methodName'], args=call.get('params'))
# From https://mirrors.talideon.com/articles/multicall.html:
# "Notice that regular return values are always nested inside a one-element array. This allows you to
# return structs from functions without confusing them with faults."
results.append([result])
except RPCException as e:
results.append({
'faultCode': e.code,
'faultString': e.message,
})
except Exception as e:
results.append({
'faultCode': RPC_INTERNAL_ERROR,
'faultString': str(e),
})
return results
|
python
|
{
"resource": ""
}
|
q5251
|
acquire_code
|
train
|
def acquire_code(args, session, session3):
"""returns the user's token serial number, MFA token code, and an
error code."""
serial_number = find_mfa_for_user(args.serial_number, session, session3)
if not serial_number:
print("There are no MFA devices associated with this user.",
file=sys.stderr)
return None, None, USER_RECOVERABLE_ERROR
token_code = args.token_code
if token_code is None:
while token_code is None or len(token_code) != 6:
token_code = getpass.getpass("MFA Token Code: ")
return serial_number, token_code, OK
|
python
|
{
"resource": ""
}
|
q5252
|
rotate
|
train
|
def rotate(args, credentials):
"""rotate the identity profile's AWS access key pair."""
current_access_key_id = credentials.get(
args.identity_profile, 'aws_access_key_id')
# create new sessions using the MFA credentials
session, session3, err = make_session(args.target_profile)
if err:
return err
iam = session3.resource('iam')
# find the AccessKey corresponding to the identity profile and delete it.
current_access_key = next((key for key
in iam.CurrentUser().access_keys.all()
if key.access_key_id == current_access_key_id))
current_access_key.delete()
# create the new access key pair
iam_service = session3.client('iam')
new_access_key_pair = iam_service.create_access_key()["AccessKey"]
print("Rotating from %s to %s." % (current_access_key.access_key_id,
new_access_key_pair['AccessKeyId']),
file=sys.stderr)
update_credentials_file(args.aws_credentials,
args.identity_profile,
args.identity_profile,
credentials,
new_access_key_pair)
print("%s profile updated." % args.identity_profile, file=sys.stderr)
return OK
|
python
|
{
"resource": ""
}
|
q5253
|
BaseField.coerce
|
train
|
def coerce(self, value):
"""Coerce a cleaned value."""
if self._coerce is not None:
value = self._coerce(value)
return value
|
python
|
{
"resource": ""
}
|
q5254
|
Item.all_from
|
train
|
def all_from(cls, *args, **kwargs):
"""Query for items passing PyQuery args explicitly."""
pq_items = cls._get_items(*args, **kwargs)
return [cls(item=i) for i in pq_items.items()]
|
python
|
{
"resource": ""
}
|
q5255
|
Item.all
|
train
|
def all(cls, path=''):
"""Return all ocurrences of the item."""
url = urljoin(cls._meta.base_url, path)
pq_items = cls._get_items(url=url, **cls._meta._pyquery_kwargs)
return [cls(item=i) for i in pq_items.items()]
|
python
|
{
"resource": ""
}
|
q5256
|
Customer.info
|
train
|
def info(self, id, attributes=None):
"""
Retrieve customer data
:param id: ID of customer
:param attributes: `List` of attributes needed
"""
if attributes:
return self.call('customer.info', [id, attributes])
else:
return self.call('customer.info', [id])
|
python
|
{
"resource": ""
}
|
q5257
|
Order.search
|
train
|
def search(self, filters=None, fields=None, limit=None, page=1):
"""
Retrieve order list by options using search api. Using this result can
be paginated
:param options: Dictionary of options.
:param filters: `{<attribute>:{<operator>:<value>}}`
:param fields: [<String: magento field names>, ...]
:param limit: `page limit`
:param page: `current page`
:return: `list` of `dict`
"""
options = {
'imported': False,
'filters': filters or {},
'fields': fields or [],
'limit': limit or 1000,
'page': page,
}
return self.call('sales_order.search', [options])
|
python
|
{
"resource": ""
}
|
q5258
|
Order.addcomment
|
train
|
def addcomment(self, order_increment_id,
status, comment=None, notify=False):
"""
Add comment to order or change its state
:param order_increment_id: Order ID
TODO: Identify possible values for status
"""
if comment is None:
comment = ""
return bool(self.call(
'sales_order.addComment',
[order_increment_id, status, comment, notify]
)
)
|
python
|
{
"resource": ""
}
|
q5259
|
CreditMemo.create
|
train
|
def create(
self,
order_increment_id,
creditmemo_data=None,
comment=None,
email=False,
include_comment=False,
refund_to_store_credit_amount=None):
"""
Create new credit_memo for order
:param order_increment_id: Order Increment ID
:type order_increment_id: str
:param creditmemo_data: Sales order credit memo data (optional)
:type creditmemo_data: associative array as dict
{
'qtys': [
{
'order_item_id': str, # Order item ID to be refunded
'qty': int # Items quantity to be refunded
},
...
],
'shipping_amount': float # refund shipping amount (optional)
'adjustment_positive': float # adjustment refund amount (optional)
'adjustment_negative': float # adjustment fee amount (optional)
}
:param comment: Credit memo Comment
:type comment: str
:param email: send e-mail flag (optional)
:type email: bool
:param include_comment: include comment in e-mail flag (optional)
:type include_comment: bool
:param refund_to_store_credit_amount: amount to refund to store credit
:type refund_to_store_credit_amount: float
:return str, increment id of credit memo created
"""
if comment is None:
comment = ''
return self.call(
'sales_order_creditmemo.create', [
order_increment_id, creditmemo_data, comment, email, include_comment, refund_to_store_credit_amount
]
)
|
python
|
{
"resource": ""
}
|
q5260
|
CreditMemo.addcomment
|
train
|
def addcomment(self, creditmemo_increment_id,
comment, email=True, include_in_email=False):
"""
Add new comment to credit memo
:param creditmemo_increment_id: Credit memo increment ID
:return: bool
"""
return bool(
self.call(
'sales_order_creditmemo.addComment',
[creditmemo_increment_id, comment, email, include_in_email]
)
)
|
python
|
{
"resource": ""
}
|
q5261
|
Shipment.create
|
train
|
def create(self, order_increment_id,
items_qty, comment=None, email=True, include_comment=False):
"""
Create new shipment for order
:param order_increment_id: Order Increment ID
:type order_increment_id: str
:param items_qty: items qty to ship
:type items_qty: associative array (order_item_id ⇒ qty) as dict
:param comment: Shipment Comment
:type comment: str
:param email: send e-mail flag (optional)
:type email: bool
:param include_comment: include comment in e-mail flag (optional)
:type include_comment: bool
"""
if comment is None:
comment = ''
return self.call(
'sales_order_shipment.create', [
order_increment_id, items_qty, comment, email, include_comment
]
)
|
python
|
{
"resource": ""
}
|
q5262
|
Shipment.addtrack
|
train
|
def addtrack(self, shipment_increment_id, carrier, title, track_number):
"""
Add new tracking number
:param shipment_increment_id: Shipment ID
:param carrier: Carrier Code
:param title: Tracking title
:param track_number: Tracking Number
"""
return self.call(
'sales_order_shipment.addTrack',
[shipment_increment_id, carrier, title, track_number]
)
|
python
|
{
"resource": ""
}
|
q5263
|
Invoice.addcomment
|
train
|
def addcomment(self, invoice_increment_id,
comment=None, email=False, include_comment=False):
"""
Add comment to invoice or change its state
:param invoice_increment_id: Invoice ID
"""
if comment is None:
comment = ""
return bool(
self.call(
'sales_order_invoice.addComment',
[invoice_increment_id, comment, email, include_comment]
)
)
|
python
|
{
"resource": ""
}
|
q5264
|
Category.info
|
train
|
def info(self, category_id, store_view=None, attributes=None):
"""
Retrieve Category details
:param category_id: ID of category to retrieve
:param store_view: Store view ID or code
:param attributes: Return the fields specified
:return: Dictionary of data
"""
return self.call(
'catalog_category.info', [category_id, store_view, attributes]
)
|
python
|
{
"resource": ""
}
|
q5265
|
Category.create
|
train
|
def create(self, parent_id, data, store_view=None):
"""
Create new category and return its ID
:param parent_id: ID of parent
:param data: Data for category
:param store_view: Store view ID or Code
:return: Integer ID
"""
return int(self.call(
'catalog_category.create', [parent_id, data, store_view])
)
|
python
|
{
"resource": ""
}
|
q5266
|
Category.move
|
train
|
def move(self, category_id, parent_id, after_id=None):
"""
Move category in tree
:param category_id: ID of category to move
:param parent_id: New parent of the category
:param after_id: Category ID after what position it will be moved
:return: Boolean
"""
return bool(self.call(
'catalog_category.move', [category_id, parent_id, after_id])
)
|
python
|
{
"resource": ""
}
|
q5267
|
Category.assignproduct
|
train
|
def assignproduct(self, category_id, product, position=None):
"""
Assign product to a category
:param category_id: ID of a category
:param product: ID or Code of the product
:param position: Position of product in category
:return: boolean
"""
return bool(self.call(
'catalog_category.assignProduct', [category_id, product, position])
)
|
python
|
{
"resource": ""
}
|
q5268
|
Category.updateproduct
|
train
|
def updateproduct(self, category_id, product, position=None):
"""
Update assigned product
:param category_id: ID of a category
:param product: ID or Code of the product
:param position: Position of product in category
:return: boolean
"""
return bool(self.call(
'catalog_category.updateProduct', [category_id, product, position])
)
|
python
|
{
"resource": ""
}
|
q5269
|
Product.info
|
train
|
def info(self, product, store_view=None, attributes=None,
identifierType=None):
"""
Retrieve product data
:param product: ID or SKU of product
:param store_view: ID or Code of store view
:param attributes: List of fields required
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: `dict` of values
"""
return self.call(
'catalog_product.info', [
product, store_view, attributes, identifierType
]
)
|
python
|
{
"resource": ""
}
|
q5270
|
Product.create
|
train
|
def create(self, product_type, attribute_set_id, sku, data):
"""
Create Product and return ID
:param product_type: String type of product
:param attribute_set_id: ID of attribute set
:param sku: SKU of the product
:param data: Dictionary of data
:return: INT id of product created
"""
return int(self.call(
'catalog_product.create',
[product_type, attribute_set_id, sku, data]
)
)
|
python
|
{
"resource": ""
}
|
q5271
|
Product.update
|
train
|
def update(self, product, data, store_view=None, identifierType=None):
"""
Update product Information
:param product: ID or SKU of product
:param data: Dictionary of attributes to update
:param store_view: ID or Code of store view
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: Boolean
"""
return bool(self.call(
'catalog_product.update',
[product, data, store_view, identifierType]
))
|
python
|
{
"resource": ""
}
|
q5272
|
Product.setSpecialPrice
|
train
|
def setSpecialPrice(self, product, special_price=None,
from_date=None, to_date=None, store_view=None,
identifierType=None):
"""
Update product's special price
:param product: ID or SKU of product
:param special_price: Special Price
:param from_date: From date
:param to_date: To Date
:param store_view: ID or Code of Store View
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: Boolean
"""
return bool(self.call(
'catalog_product.setSpecialPrice', [
product, special_price, from_date, to_date, store_view,
identifierType
]
))
|
python
|
{
"resource": ""
}
|
q5273
|
Product.getSpecialPrice
|
train
|
def getSpecialPrice(self, product, store_view=None, identifierType=None):
"""
Get product special price data
:param product: ID or SKU of product
:param store_view: ID or Code of Store view
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: Dictionary
"""
return self.call(
'catalog_product.getSpecialPrice', [
product, store_view, identifierType
]
)
|
python
|
{
"resource": ""
}
|
q5274
|
ProductImages.list
|
train
|
def list(self, product, store_view=None, identifierType=None):
"""
Retrieve product image list
:param product: ID or SKU of product
:param store_view: Code or ID of store view
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: `list` of `dict`
"""
return self.call('catalog_product_attribute_media.list',
[product, store_view, identifierType])
|
python
|
{
"resource": ""
}
|
q5275
|
ProductImages.info
|
train
|
def info(self, product, image_file, store_view=None, identifierType=None):
"""
Retrieve product image data
:param product: ID or SKU of product
:param store_view: ID or Code of store view
:param attributes: List of fields required
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: `list` of `dict`
"""
return self.call('catalog_product_attribute_media.info',
[product, image_file, store_view, identifierType])
|
python
|
{
"resource": ""
}
|
q5276
|
ProductImages.create
|
train
|
def create(self, product, data, store_view=None, identifierType=None):
"""
Upload a new product image.
:param product: ID or SKU of product
:param data: `dict` of image data (label, position, exclude, types)
Example: { 'label': 'description of photo',
'position': '1', 'exclude': '0',
'types': ['image', 'small_image', 'thumbnail']}
:param store_view: Store view ID or Code
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: string - image file name
"""
return self.call('catalog_product_attribute_media.create',
[product, data, store_view, identifierType])
|
python
|
{
"resource": ""
}
|
q5277
|
ProductImages.update
|
train
|
def update(self, product, img_file_name, data, store_view=None,
identifierType=None):
"""
Update a product image.
:param product: ID or SKU of product
:param img_file_name: The image file name
Example: '/m/y/my_image_thumb.jpg'
:param data: `dict` of image data (label, position, exclude, types)
Example: { 'label': 'description of photo',
'position': '1', 'exclude': '0',
'types': ['image', 'small_image', 'thumbnail']}
:param store_view: Store view ID or Code
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: string - image file name
"""
return self.call('catalog_product_attribute_media.update',
[product, img_file_name, data, store_view, identifierType])
|
python
|
{
"resource": ""
}
|
q5278
|
ProductImages.remove
|
train
|
def remove(self, product, img_file_name, identifierType=None):
"""
Remove a product image.
:param product: ID or SKU of product
:param img_file_name: The image file name
Example: '/m/y/my_image_thumb.jpg'
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: boolean
"""
return self.call('catalog_product_attribute_media.remove',
[product, img_file_name, identifierType])
|
python
|
{
"resource": ""
}
|
q5279
|
ProductLinks.list
|
train
|
def list(self, link_type, product, identifierType=None):
"""
Retrieve list of linked products
:param link_type: type of link, one of 'cross_sell', 'up_sell',
'related' or 'grouped'
:param product: ID or SKU of product
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: `list` of `dict`
"""
return self.call('catalog_product_link.list',
[link_type, product, identifierType])
|
python
|
{
"resource": ""
}
|
q5280
|
ProductLinks.assign
|
train
|
def assign(self, link_type, product, linked_product, data=None,
identifierType=None):
"""
Assign a product link
:param link_type: type of link, one of 'cross_sell', 'up_sell',
'related' or 'grouped'
:param product: ID or SKU of product
:param linked_product: ID or SKU of linked product
:param data: dictionary of link data, (position, qty, etc.)
Example: { 'position': '0', 'qty': 1}
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: boolean
"""
return bool(self.call('catalog_product_link.assign',
[link_type, product, linked_product, data, identifierType]))
|
python
|
{
"resource": ""
}
|
q5281
|
ProductLinks.remove
|
train
|
def remove(self, link_type, product, linked_product, identifierType=None):
"""
Remove a product link
:param link_type: type of link, one of 'cross_sell', 'up_sell',
'related' or 'grouped'
:param product: ID or SKU of product
:param linked_product: ID or SKU of linked product to unlink
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: boolean
"""
return bool(self.call('catalog_product_link.remove',
[link_type, product, linked_product, identifierType]))
|
python
|
{
"resource": ""
}
|
q5282
|
ProductConfigurable.update
|
train
|
def update(self, product, linked_products, attributes):
"""
Configurable Update product
:param product: ID or SKU of product
:param linked_products: List ID or SKU of linked product to link
:param attributes: dicc
:return: True/False
"""
return bool(self.call('ol_catalog_product_link.assign',
[product, linked_products, attributes]))
|
python
|
{
"resource": ""
}
|
q5283
|
CartCoupon.add
|
train
|
def add(self, quote_id, coupon_code, store_view=None):
"""
Add a coupon code to a quote.
:param quote_id: Shopping cart ID (quote ID)
:param coupon_code, string, Coupon code
:param store_view: Store view ID or code
:return: boolean, True if the coupon code is added
"""
return bool(
self.call('cart_coupon.add', [quote_id, coupon_code, store_view])
)
|
python
|
{
"resource": ""
}
|
q5284
|
CartProduct.move_to_customer_quote
|
train
|
def move_to_customer_quote(self, quote_id, product_data, store_view=None):
"""
Allows you to move products from the current quote to a customer quote.
:param quote_id: Shopping cart ID (quote ID)
:param product_data, list of dicts of product details, example
[
{
'product_id': 1,
'qty': 2,
'options': {
'option_1': 'value_1',
'option_2': 'value_2',
...
},
'bundle_option': {},
'bundle_option_qty': {},
'links': [],
},
{
'sku': 'S0012345',
'qty': 4,
},
]
:param store_view: Store view ID or code
:return: boolean, True if the product is moved to customer quote
"""
return bool(
self.call('cart_product.moveToCustomerQuote',
[quote_id, product_data, store_view])
)
|
python
|
{
"resource": ""
}
|
q5285
|
pack
|
train
|
def pack(ctx, remove_lib=True):
"""Build a isolated runnable package.
"""
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
with ROOT.joinpath('Pipfile.lock').open() as f:
lockfile = plette.Lockfile.load(f)
libdir = OUTPUT_DIR.joinpath('lib')
paths = {'purelib': libdir, 'platlib': libdir}
sources = lockfile.meta.sources._data
maker = distlib.scripts.ScriptMaker(None, None)
# Install packages from Pipfile.lock.
for name, package in lockfile.default._data.items():
if name in DONT_PACKAGE:
continue
print(f'[pack] Installing {name}')
package.pop('editable', None) # Don't install things as editable.
package.pop('markers', None) # Always install everything.
r = requirementslib.Requirement.from_pipfile(name, package)
wheel = passa.internals._pip.build_wheel(
r.as_ireq(), sources, r.hashes or None,
)
wheel.install(paths, maker, lib_only=True)
for pattern in IGNORE_LIB_PATTERNS:
for path in libdir.rglob(pattern):
print(f'[pack] Removing {path}')
path.unlink()
# Pack everything into ZIP.
zipname = OUTPUT_DIR.joinpath('passa.zip')
with zipfile.ZipFile(zipname, 'w') as zf:
_recursive_write_to_zip(zf, OUTPUT_DIR)
_recursive_write_to_zip(zf, STUBFILES_DIR)
print(f'[pack] Written archive {zipname}')
if remove_lib and libdir.exists():
print(f'[pack] Removing {libdir}')
shutil.rmtree(str(libdir))
|
python
|
{
"resource": ""
}
|
q5286
|
extract_feed
|
train
|
def extract_feed(
inpath: str, outpath: str, view: View, config: nx.DiGraph = None
) -> str:
"""Extract a subset of a GTFS zip into a new file"""
config = default_config() if config is None else config
config = remove_node_attributes(config, "converters")
feed = load_feed(inpath, view, config)
return write_feed_dangerously(feed, outpath)
|
python
|
{
"resource": ""
}
|
q5287
|
write_feed_dangerously
|
train
|
def write_feed_dangerously(
feed: Feed, outpath: str, nodes: Optional[Collection[str]] = None
) -> str:
"""Naively write a feed to a zipfile
This function provides no sanity checks. Use it at
your own risk.
"""
nodes = DEFAULT_NODES if nodes is None else nodes
try:
tmpdir = tempfile.mkdtemp()
def write_node(node):
df = feed.get(node)
if not df.empty:
path = os.path.join(tmpdir, node)
df.to_csv(path, index=False)
pool = ThreadPool(len(nodes))
try:
pool.map(write_node, nodes)
finally:
pool.terminate()
if outpath.endswith(".zip"):
outpath, _ = os.path.splitext(outpath)
outpath = shutil.make_archive(outpath, "zip", tmpdir)
finally:
shutil.rmtree(tmpdir)
return outpath
|
python
|
{
"resource": ""
}
|
q5288
|
read_busiest_date
|
train
|
def read_busiest_date(path: str) -> Tuple[datetime.date, FrozenSet[str]]:
"""Find the earliest date with the most trips"""
feed = load_raw_feed(path)
return _busiest_date(feed)
|
python
|
{
"resource": ""
}
|
q5289
|
read_busiest_week
|
train
|
def read_busiest_week(path: str) -> Dict[datetime.date, FrozenSet[str]]:
"""Find the earliest week with the most trips"""
feed = load_raw_feed(path)
return _busiest_week(feed)
|
python
|
{
"resource": ""
}
|
q5290
|
read_service_ids_by_date
|
train
|
def read_service_ids_by_date(path: str) -> Dict[datetime.date, FrozenSet[str]]:
"""Find all service identifiers by date"""
feed = load_raw_feed(path)
return _service_ids_by_date(feed)
|
python
|
{
"resource": ""
}
|
q5291
|
read_dates_by_service_ids
|
train
|
def read_dates_by_service_ids(
path: str
) -> Dict[FrozenSet[str], FrozenSet[datetime.date]]:
"""Find dates with identical service"""
feed = load_raw_feed(path)
return _dates_by_service_ids(feed)
|
python
|
{
"resource": ""
}
|
q5292
|
read_trip_counts_by_date
|
train
|
def read_trip_counts_by_date(path: str) -> Dict[datetime.date, int]:
"""A useful proxy for busyness"""
feed = load_raw_feed(path)
return _trip_counts_by_date(feed)
|
python
|
{
"resource": ""
}
|
q5293
|
_load_feed
|
train
|
def _load_feed(path: str, view: View, config: nx.DiGraph) -> Feed:
"""Multi-file feed filtering"""
config_ = remove_node_attributes(config, ["converters", "transformations"])
feed_ = Feed(path, view={}, config=config_)
for filename, column_filters in view.items():
config_ = reroot_graph(config_, filename)
view_ = {filename: column_filters}
feed_ = Feed(feed_, view=view_, config=config_)
return Feed(feed_, config=config)
|
python
|
{
"resource": ""
}
|
q5294
|
Feed._filter
|
train
|
def _filter(self, filename: str, df: pd.DataFrame) -> pd.DataFrame:
"""Apply view filters"""
view = self._view.get(filename)
if view is None:
return df
for col, values in view.items():
# If applicable, filter this dataframe by the given set of values
if col in df.columns:
df = df[df[col].isin(setwrap(values))]
return df
|
python
|
{
"resource": ""
}
|
q5295
|
Feed._prune
|
train
|
def _prune(self, filename: str, df: pd.DataFrame) -> pd.DataFrame:
"""Depth-first search through the dependency graph
and prune dependent DataFrames along the way.
"""
dependencies = []
for _, depf, data in self._config.out_edges(filename, data=True):
deps = data.get("dependencies")
if deps is None:
msg = f"Edge missing `dependencies` attribute: {filename}->{depf}"
raise ValueError(msg)
dependencies.append((depf, deps))
if not dependencies:
return df
for depfile, column_pairs in dependencies:
# Read the filtered, cached file dependency
depdf = self.get(depfile)
for deps in column_pairs:
col = deps[filename]
depcol = deps[depfile]
# If applicable, prune this dataframe by the other
if col in df.columns and depcol in depdf.columns:
df = df[df[col].isin(depdf[depcol])]
return df
|
python
|
{
"resource": ""
}
|
q5296
|
Feed._convert_types
|
train
|
def _convert_types(self, filename: str, df: pd.DataFrame) -> None:
"""
Apply type conversions
"""
if df.empty:
return
converters = self._config.nodes.get(filename, {}).get("converters", {})
for col, converter in converters.items():
if col in df.columns:
df[col] = converter(df[col])
|
python
|
{
"resource": ""
}
|
q5297
|
reroot_graph
|
train
|
def reroot_graph(G: nx.DiGraph, node: str) -> nx.DiGraph:
"""Return a copy of the graph rooted at the given node"""
G = G.copy()
for n, successors in list(nx.bfs_successors(G, source=node)):
for s in successors:
G.add_edge(s, n, **G.edges[n, s])
G.remove_edge(n, s)
return G
|
python
|
{
"resource": ""
}
|
q5298
|
setwrap
|
train
|
def setwrap(value: Any) -> Set[str]:
"""
Returns a flattened and stringified set from the given object or iterable.
For use in public functions which accept argmuents or kwargs that can be
one object or a list of objects.
"""
return set(map(str, set(flatten([value]))))
|
python
|
{
"resource": ""
}
|
q5299
|
remove_node_attributes
|
train
|
def remove_node_attributes(G: nx.DiGraph, attributes: Union[str, Iterable[str]]):
"""
Return a copy of the graph with the given attributes
deleted from all nodes.
"""
G = G.copy()
for _, data in G.nodes(data=True):
for attribute in setwrap(attributes):
if attribute in data:
del data[attribute]
return G
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.