text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_check_and_report(client_site_url, apikey, get_resource_ids_to_check, get_url_for_id, check_url, upsert_result):
"""Get links from the client site, check them, and post the results back. Get resource IDs from the client site, get the URL for each resource ID from the client site, check each URL, and post the results back to the client site. This function can be called repeatedly to keep on getting more links from the client site and checking them. The functions that this function calls to carry out the various tasks are taken as parameters to this function for testing purposes - it makes it easy for tests to pass in mock functions. It also decouples the code nicely. :param client_site_url: the base URL of the client site :type client_site_url: string :param apikey: the API key to use when making requests to the client site :type apikey: string or None :param get_resource_ids_to_check: The function to call to get the list of resource IDs to be checked from the client site. See get_resource_ids_to_check() above for the interface that this function should implement. :type get_resource_ids_to_check: callable :param get_url_for_id: The function to call to get the URL for a given resource ID from the client site. See get_url_for_id() above for the interface that this function should implement. :type get_url_for_id: callable :param check_url: The function to call to check whether a URL is dead or alive. See check_url() above for the interface that this function should implement. :type check_url: callable :param upsert_result: The function to call to post a link check result to the client site. See upsert_result() above for the interface that this function should implement. :type upsert_result: callable """ |
logger = _get_logger()
resource_ids = get_resource_ids_to_check(client_site_url, apikey)
for resource_id in resource_ids:
try:
url = get_url_for_id(client_site_url, apikey, resource_id)
except CouldNotGetURLError:
logger.info(u"This link checker was not authorized to access "
"resource {0}, skipping.".format(resource_id))
continue
result = check_url(url)
status = result["status"]
reason = result["reason"]
if result["alive"]:
logger.info(u"Checking URL {0} of resource {1} succeeded with "
"status {2}:".format(url, resource_id, status))
else:
logger.info(u"Checking URL {0} of resource {1} failed with error "
"{2}:".format(url, resource_id, reason))
upsert_result(client_site_url, apikey, resource_id=resource_id,
result=result) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def remove(self, member):
"""Remove a member from the archive.""" |
# Make sure we have an info object
if isinstance(member, ZipInfo):
# 'member' is already an info object
zinfo = member
else:
# Get info object for name
zinfo = self.getinfo(member)
# compute the location of the file data in the local file header,
# by adding the lengths of the records before it
zlen = len(zinfo.FileHeader()) + zinfo.compress_size
fileidx = self.filelist.index(zinfo)
fileofs = sum(
[len(self.filelist[f].FileHeader()) + self.filelist[f].compress_size
for f in xrange(0, fileidx)]
)
self.fp.seek(fileofs + zlen)
after = self.fp.read()
self.fp.seek(fileofs)
self.fp.write(after)
self.fp.seek(-zlen, 2)
self.fp.truncate()
self._didModify = True
self.filelist.remove(zinfo)
del self.NameToInfo[member] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def import_class(class_path):
'''
Imports the class for the given class name.
'''
module_name, class_name = class_path.rsplit(".", 1)
module = import_module(module_name)
claz = getattr(module, class_name)
return claz |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _executor(self):
'''
Creating an ExecutorPool is a costly operation. Executor needs to be instantiated only once.
'''
if self.EXECUTE_PARALLEL is False:
executor_path = "batch_requests.concurrent.executor.SequentialExecutor"
executor_class = import_class(executor_path)
return executor_class()
else:
executor_path = self.CONCURRENT_EXECUTOR
executor_class = import_class(executor_path)
return executor_class(self.NUM_WORKERS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def make_label(self, path):
""" this borrows too much from the internals of ofs maybe expose different parts of the api? """ |
from datetime import datetime
from StringIO import StringIO
path = path.lstrip("/")
bucket, label = path.split("/", 1)
bucket = self.ofs._require_bucket(bucket)
key = self.ofs._get_key(bucket, label)
if key is None:
key = bucket.new_key(label)
self.ofs._update_key_metadata(key, { '_creation_time': str(datetime.utcnow()) })
key.set_contents_from_file(StringIO(''))
key.close() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_proxy_config(self, headers, path):
""" stub. this really needs to be a call to the remote restful interface to get the appropriate host and headers to use for this upload """ |
self.ofs.conn.add_aws_auth_header(headers, 'PUT', path)
from pprint import pprint
pprint(headers)
host = self.ofs.conn.server_name()
return host, headers |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def fetch_all_mood_stations(self, terr=KKBOXTerritory.TAIWAN):
'''
Fetches all mood stations.
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#moodstations`.
'''
url = 'https://api.kkbox.com/v1.1/mood-stations'
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def fetch_mood_station(self, station_id, terr=KKBOXTerritory.TAIWAN):
'''
Fetches a mood station by given ID.
:param station_id: the station ID
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#moodstations-station_id`.
'''
url = 'https://api.kkbox.com/v1.1/mood-stations/%s' % station_id
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def fetch_data(self, url):
'''
Fetches data from specific url.
:return: The response.
:rtype: dict
'''
return self.http._post_data(url, None, self.http._headers_with_access_token()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def fetch_shared_playlist(self, playlist_id, terr=KKBOXTerritory.TAIWAN):
'''
Fetches a shared playlist by given ID.
:param playlist_id: the playlist ID.
:type playlist_id: str
:param terr: the current territory.
:return: API response.
:rtype: dictcd
See `https://docs-en.kkbox.codes/v1.1/reference#sharedplaylists-playlist_id`.
'''
url = 'https://api.kkbox.com/v1.1/shared-playlists/%s' % playlist_id
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_firewall_rule(self, server_uuid, firewall_rule_position, server_instance=None):
""" Return a FirewallRule object based on server uuid and rule position. """ |
url = '/server/{0}/firewall_rule/{1}'.format(server_uuid, firewall_rule_position)
res = self.get_request(url)
return FirewallRule(**res['firewall_rule']) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_firewall_rules(self, server):
""" Return all FirewallRule objects based on a server instance or uuid. """ |
server_uuid, server_instance = uuid_and_instance(server)
url = '/server/{0}/firewall_rule'.format(server_uuid)
res = self.get_request(url)
return [
FirewallRule(server=server_instance, **firewall_rule)
for firewall_rule in res['firewall_rules']['firewall_rule']
] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_firewall_rule(self, server, firewall_rule_body):
""" Create a new firewall rule for a given server uuid. The rule can begiven as a dict or with FirewallRule.prepare_post_body(). Returns a FirewallRule object. """ |
server_uuid, server_instance = uuid_and_instance(server)
url = '/server/{0}/firewall_rule'.format(server_uuid)
body = {'firewall_rule': firewall_rule_body}
res = self.post_request(url, body)
return FirewallRule(server=server_instance, **res['firewall_rule']) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete_firewall_rule(self, server_uuid, firewall_rule_position):
""" Delete a firewall rule based on a server uuid and rule position. """ |
url = '/server/{0}/firewall_rule/{1}'.format(server_uuid, firewall_rule_position)
return self.request('DELETE', url) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def configure_firewall(self, server, firewall_rule_bodies):
""" Helper for calling create_firewall_rule in series for a list of firewall_rule_bodies. """ |
server_uuid, server_instance = uuid_and_instance(server)
return [
self.create_firewall_rule(server_uuid, rule)
for rule in firewall_rule_bodies
] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def post(self, data):
""" POSTs a raw SMTP message to the Sinkhole API :param data: raw content to be submitted [STRING] :return: { list of predictions } """ |
uri = '{}/sinkhole'.format(self.client.remote)
self.logger.debug(uri)
if PYVERSION == 2:
try:
data = data.decode('utf-8')
except Exception:
data = data.decode('latin-1')
data = {
'message': data
}
body = self.client.post(uri, data)
return body |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def pre_process_method_headers(method, headers):
'''
Returns the lowered method.
Capitalize headers, prepend HTTP_ and change - to _.
'''
method = method.lower()
# Standard WSGI supported headers
_wsgi_headers = ["content_length", "content_type", "query_string",
"remote_addr", "remote_host", "remote_user",
"request_method", "server_name", "server_port"]
_transformed_headers = {}
# For every header, replace - to _, prepend http_ if necessary and convert
# to upper case.
for header, value in headers.items():
header = header.replace("-", "_")
header = "http_{header}".format(
header=header) if header.lower() not in _wsgi_headers else header
_transformed_headers.update({header.upper(): value})
return method, _transformed_headers |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def headers_to_include_from_request(curr_request):
'''
Define headers that needs to be included from the current request.
'''
return {
h: v for h, v in curr_request.META.items() if h in _settings.HEADERS_TO_INCLUDE} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_wsgi_request_object(curr_request, method, url, headers, body):
'''
Based on the given request parameters, constructs and returns the WSGI request object.
'''
x_headers = headers_to_include_from_request(curr_request)
method, t_headers = pre_process_method_headers(method, headers)
# Add default content type.
if "CONTENT_TYPE" not in t_headers:
t_headers.update({"CONTENT_TYPE": _settings.DEFAULT_CONTENT_TYPE})
# Override existing batch requests headers with the new headers passed for this request.
x_headers.update(t_headers)
content_type = x_headers.get("CONTENT_TYPE", _settings.DEFAULT_CONTENT_TYPE)
# Get hold of request factory to construct the request.
_request_factory = BatchRequestFactory()
_request_provider = getattr(_request_factory, method)
secure = _settings.USE_HTTPS
request = _request_provider(url, data=body, secure=secure,
content_type=content_type, **x_headers)
return request |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _base_environ(self, **request):
'''
Override the default values for the wsgi environment variables.
'''
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('localhost'),
'SERVER_PORT': str('8000'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': True,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def request(self, method, endpoint, body=None, timeout=-1):
""" Perform a request with a given body to a given endpoint in UpCloud's API. Handles errors with __error_middleware. """ |
if method not in set(['GET', 'POST', 'PUT', 'DELETE']):
raise Exception('Invalid/Forbidden HTTP method')
url = '/' + self.api_v + endpoint
headers = {
'Authorization': self.token,
'Content-Type': 'application/json'
}
if body:
json_body_or_None = json.dumps(body)
else:
json_body_or_None = None
call_timeout = timeout if timeout != -1 else self.timeout
APIcall = getattr(requests, method.lower())
res = APIcall('https://api.upcloud.com' + url,
data=json_body_or_None,
headers=headers,
timeout=call_timeout)
if res.text:
res_json = res.json()
else:
res_json = {}
return self.__error_middleware(res, res_json) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def post_request(self, endpoint, body=None, timeout=-1):
""" Perform a POST request to a given endpoint in UpCloud's API. """ |
return self.request('POST', endpoint, body, timeout) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __error_middleware(self, res, res_json):
""" Middleware that raises an exception when HTTP statuscode is an error code. """ |
if(res.status_code in [400, 401, 402, 403, 404, 405, 406, 409]):
err_dict = res_json.get('error', {})
raise UpCloudAPIError(error_code=err_dict.get('error_code'),
error_message=err_dict.get('error_message'))
return res_json |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def put_stream(self, bucket, label, stream_object, params={}):
''' Create a new file to swift object storage. '''
self.claim_bucket(bucket)
self.connection.put_object(bucket, label, stream_object,
headers=self._convert_to_meta(params)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get(self, q, limit=None):
""" Performs a search against the predict endpoint :param q: query to be searched for [STRING] :return: { score: [0|1] } """ |
uri = '{}/predict?q={}'.format(self.client.remote, q)
self.logger.debug(uri)
body = self.client.get(uri)
return body['score'] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def list_labels(self, bucket):
'''List labels for the given bucket. Due to zipfiles inherent arbitrary ordering,
this is an expensive operation, as it walks the entire archive searching for individual
'buckets'
:param bucket: bucket to list labels for.
:return: iterator for the labels in the specified bucket.
'''
for name in self.z.namelist():
container, label = self._nf(name.encode("utf-8"))
if container == bucket and label != MD_FILE:
yield label |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def list_buckets(self):
'''List all buckets managed by this OFS instance. Like list_labels, this also
walks the entire archive, yielding the bucketnames. A local set is retained so that
duplicates aren't returned so this will temporarily pull the entire list into memory
even though this is a generator and will slow as more buckets are added to the set.
:return: iterator for the buckets.
'''
buckets = set()
for name in self.z.namelist():
bucket, _ = self._nf(name)
if bucket not in buckets:
buckets.add(bucket)
yield bucket |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def del_stream(self, bucket, label):
'''Delete a bitstream. This needs more testing - file deletion in a zipfile
is problematic. Alternate method is to create second zipfile without the files
in question, which is not a nice method for large zip archives.
'''
if self.exists(bucket, label):
name = self._zf(bucket, label)
#z = ZipFile(self.zipfile, self.mode, self.compression, self.allowZip64)
self._del_stream(name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def update_metadata(self, bucket, label, params):
'''Update the metadata with the provided dictionary of params.
:param parmams: dictionary of key values (json serializable).
'''
if self.mode !="r":
try:
payload = self._get_bucket_md(bucket)
except OFSFileNotFound:
# No MD found... create it
payload = {}
for l in self.list_labels(bucket):
payload[l] = {}
payload[l]['_label'] = l
if not self.quiet:
print("Had to create md file for %s" % bucket)
except OFSException as e:
raise OFSException(e)
if not label in payload:
payload[label] = {}
payload[label].update(params)
self.put_stream(bucket, MD_FILE, json.dumps(payload).encode('utf-8'), params={}, replace=True, add_md=False)
return payload[label]
else:
raise OFSException("Cannot update MD in archive in 'r' mode") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def del_metadata_keys(self, bucket, label, keys):
'''Delete the metadata corresponding to the specified keys.
'''
if self.mode !="r":
try:
payload = self._get_bucket_md(bucket)
except OFSFileNotFound:
# No MD found...
raise OFSFileNotFound("Couldn't find a md file for %s bucket" % bucket)
except OFSException as e:
raise OFSException(e)
if payload.has_key(label):
for key in [x for x in keys if payload[label].has_key(x)]:
del payload[label][key]
self.put_stream(bucket, MD_FILE, json.dumps(payload), params={}, replace=True, add_md=False)
else:
raise OFSException("Cannot update MD in archive in 'r' mode") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_response(wsgi_request):
'''
Given a WSGI request, makes a call to a corresponding view
function and returns the response.
'''
service_start_time = datetime.now()
# Get the view / handler for this request
view, args, kwargs = resolve(wsgi_request.path_info)
kwargs.update({"request": wsgi_request})
# Let the view do his task.
try:
resp = view(*args, **kwargs)
except Exception as exc:
resp = HttpResponseServerError(content=exc.message)
headers = dict(resp._headers.values())
# Convert HTTP response into simple dict type.
d_resp = {"status_code": resp.status_code, "reason_phrase": resp.reason_phrase,
"headers": headers}
try:
d_resp.update({"body": resp.content})
except ContentNotRenderedError:
resp.render()
d_resp.update({"body": resp.content})
# Check if we need to send across the duration header.
if _settings.ADD_DURATION_HEADER:
d_resp['headers'].update({_settings.DURATION_HEADER_NAME: (datetime.now() - service_start_time).seconds})
return d_resp |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_wsgi_requests(request):
'''
For the given batch request, extract the individual requests and create
WSGIRequest object for each.
'''
valid_http_methods = ["get", "post", "put", "patch", "delete", "head", "options", "connect", "trace"]
requests = json.loads(request.body)
if type(requests) not in (list, tuple):
raise BadBatchRequest("The body of batch request should always be list!")
# Max limit check.
no_requests = len(requests)
if no_requests > _settings.MAX_LIMIT:
raise BadBatchRequest("You can batch maximum of %d requests." % (_settings.MAX_LIMIT))
# We could mutate the current request with the respective parameters, but mutation is ghost in the dark,
# so lets avoid. Construct the new WSGI request object for each request.
def construct_wsgi_from_data(data):
'''
Given the data in the format of url, method, body and headers, construct a new
WSGIRequest object.
'''
url = data.get("url", None)
method = data.get("method", None)
if url is None or method is None:
raise BadBatchRequest("Request definition should have url, method defined.")
if method.lower() not in valid_http_methods:
raise BadBatchRequest("Invalid request method.")
body = data.get("body", "")
headers = data.get("headers", {})
return get_wsgi_request_object(request, method, url, headers, body)
return [construct_wsgi_from_data(data) for data in requests] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def handle_batch_requests(request, *args, **kwargs):
'''
A view function to handle the overall processing of batch requests.
'''
batch_start_time = datetime.now()
try:
# Get the Individual WSGI requests.
wsgi_requests = get_wsgi_requests(request)
except BadBatchRequest as brx:
return HttpResponseBadRequest(content=brx.message)
# Fire these WSGI requests, and collect the response for the same.
response = execute_requests(wsgi_requests)
# Evrything's done, return the response.
resp = HttpResponse(
content=json.dumps(response), content_type="application/json")
if _settings.ADD_DURATION_HEADER:
resp.__setitem__(_settings.DURATION_HEADER_NAME, str((datetime.now() - batch_start_time).seconds))
return resp |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def search(self, keyword, types=[], terr=KKBOXTerritory.TAIWAN):
'''
Searches within KKBOX's database.
:param keyword: the keyword.
:type keyword: str
:param types: the search types.
:return: list
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#search_1`.
'''
url = 'https://api.kkbox.com/v1.1/search'
url += '?' + url_parse.urlencode({'q': keyword, 'territory': terr})
if len(types) > 0:
url += '&type=' + ','.join(types)
return self.http._post_data(url, None, self.http._headers_with_access_token()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _create_ip_address_objs(ip_addresses, cloud_manager):
""" Create IPAddress objects from API response data. Also associates CloudManager with the objects. """ |
# ip-addresses might be provided as a flat array or as a following dict:
# {'ip_addresses': {'ip_address': [...]}} || {'ip_address': [...]}
if 'ip_addresses' in ip_addresses:
ip_addresses = ip_addresses['ip_addresses']
if 'ip_address' in ip_addresses:
ip_addresses = ip_addresses['ip_address']
return [
IPAddress(cloud_manager=cloud_manager, **ip_addr)
for ip_addr in ip_addresses
] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _reset(self, **kwargs):
""" Reset the objects attributes. Accepts servers as either unflattened or flattened UUID strings or Server objects. """ |
super(Tag, self)._reset(**kwargs)
# backup name for changing it (look: Tag.save)
self._api_name = self.name
# flatten { servers: { server: [] } }
if 'server' in self.servers:
self.servers = kwargs['servers']['server']
# convert UUIDs into server objects
if self.servers and isinstance(self.servers[0], six.string_types):
self.servers = [Server(uuid=server, populated=False) for server in self.servers] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get(self, uri, params={}):
""" HTTP GET function :param uri: REST endpoint :param params: optional HTTP params to pass to the endpoint :return: list of results (usually a list of dicts) Example: ret = cli.get('/search', params={ 'q': 'example.org' }) """ |
if not uri.startswith(self.remote):
uri = '{}{}'.format(self.remote, uri)
return self._make_request(uri, params) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _post(self, uri, data):
""" HTTP POST function :param uri: REST endpoint to POST to :param data: list of dicts to be passed to the endpoint :return: list of dicts, usually will be a list of objects or id's Example: ret = cli.post('/indicators', { 'indicator': 'example.com' }) """ |
if not uri.startswith(self.remote):
uri = '{}/{}'.format(self.remote, uri)
self.logger.debug(uri)
return self._make_request(uri, data=data) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def modify_server(self, UUID, **kwargs):
""" modify_server allows updating the server's updateable_fields. Note: Server's IP-addresses and Storages are managed by their own add/remove methods. """ |
body = dict()
body['server'] = {}
for arg in kwargs:
if arg not in Server.updateable_fields:
Exception('{0} is not an updateable field'.format(arg))
body['server'][arg] = kwargs[arg]
res = self.request('PUT', '/server/{0}'.format(UUID), body)
server = res['server']
# Populate subobjects
IPAddresses = IPAddress._create_ip_address_objs(server.pop('ip_addresses'),
cloud_manager=self)
storages = Storage._create_storage_objs(server.pop('storage_devices'),
cloud_manager=self)
return Server(
server,
ip_addresses=IPAddresses,
storage_devices=storages,
populated=True,
cloud_manager=self
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _internal_convert(inp):
""" Converts file in IDX format provided by file-like input into numpy.ndarray and returns it. """ |
'''
Converts file in IDX format provided by file-like input into numpy.ndarray
and returns it.
'''
# Read the "magic number" - 4 bytes.
try:
mn = struct.unpack('>BBBB', inp.read(4))
except struct.error:
raise FormatError(struct.error)
# First two bytes are always zero, check it.
if mn[0] != 0 or mn[1] != 0:
msg = ("Incorrect first two bytes of the magic number: " +
"0x{0:02X} 0x{1:02X}".format(mn[0], mn[1]))
raise FormatError(msg)
# 3rd byte is the data type code.
dtype_code = mn[2]
if dtype_code not in _DATA_TYPES_IDX:
msg = "Incorrect data type code: 0x{0:02X}".format(dtype_code)
raise FormatError(msg)
# 4th byte is the number of dimensions.
dims = int(mn[3])
# See possible data types description.
dtype, dtype_s, el_size = _DATA_TYPES_IDX[dtype_code]
# 4-byte integer for length of each dimension.
try:
dims_sizes = struct.unpack('>' + 'I' * dims, inp.read(4 * dims))
except struct.error as e:
raise FormatError('Dims sizes: {0}'.format(e))
# Full length of data.
full_length = reduce(operator.mul, dims_sizes, 1)
# Create a numpy array from the data
try:
result_array = numpy.frombuffer(
inp.read(full_length * el_size),
dtype=numpy.dtype(dtype)
).reshape(dims_sizes)
except ValueError as e:
raise FormatError('Error creating numpy array: {0}'.format(e))
# Check for superfluous data.
if len(inp.read(1)) > 0:
raise FormatError('Superfluous data detected.')
return result_array |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def convert_to_string(ndarr):
""" Writes the contents of the numpy.ndarray ndarr to bytes in IDX format and returns it. """ |
with contextlib.closing(BytesIO()) as bytesio:
_internal_write(bytesio, ndarr)
return bytesio.getvalue() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_OS_UUID(cls, os):
""" Validate Storage OS and its UUID. If the OS is a custom OS UUID, don't validate against templates. """ |
if os in cls.templates:
return cls.templates[os]
uuid_regexp = '^[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}$'
if re.search(uuid_regexp, os):
return os
raise Exception((
"Invalid OS -- valid options are: 'CentOS 6.5', 'CentOS 7.0', "
"'Debian 7.8', 'Debian 8.0' ,'Ubuntu 12.04', 'Ubuntu 14.04', 'Ubuntu 16.04', "
"'Windows 2008', 'Windows 2012'"
)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def execute(self, requests, resp_generator, *args, **kwargs):
'''
Calls the resp_generator for all the requests in parallel in an asynchronous way.
'''
result_futures = [self.executor_pool.submit(resp_generator, req, *args, **kwargs) for req in requests]
resp = [res_future.result() for res_future in result_futures]
return resp |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def execute(self, requests, resp_generator, *args, **kwargs):
'''
Calls the resp_generator for all the requests in sequential order.
'''
return [resp_generator(request) for request in requests] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def setup_logging(args):
""" Sets up basic logging :param args: ArgParse arguments :return: nothing. sets logger up globally """ |
loglevel = logging.WARNING
if args.verbose:
loglevel = logging.INFO
if args.debug:
loglevel = logging.DEBUG
console = logging.StreamHandler()
logging.getLogger('').setLevel(loglevel)
console.setFormatter(logging.Formatter(LOG_FORMAT))
logging.getLogger('').addHandler(console) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_ips(self):
""" Get all IPAddress objects from the API. """ |
res = self.get_request('/ip_address')
IPs = IPAddress._create_ip_address_objs(res['ip_addresses'], cloud_manager=self)
return IPs |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def new(self, user, name, description=None):
""" Creates a new Feed object :param user: feed username :param name: feed name :param description: feed description :return: dict """ |
uri = self.client.remote + '/users/{0}/feeds'.format(user)
data = {
'feed': {
'name': name,
'description': description
}
}
resp = self.client.post(uri, data)
return resp |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete(self, user, name):
""" Removes a feed :param user: feed username :param name: feed name :return: true/false """ |
uri = self.client.remote + '/users/{}/feeds/{}'.format(user, name)
resp = self.client.session.delete(uri)
return resp.status_code |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def index(self, user):
""" Returns a list of Feeds from the API :param user: feed username :return: list Example: ret = feed.index('csirtgadgets') """ |
uri = self.client.remote + '/users/{0}/feeds'.format(user)
return self.client.get(uri) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def show(self, user, name, limit=None, lasttime=None):
""" Returns a specific Feed from the API :param user: feed username :param name: feed name :param limit: limit the results :param lasttime: only show >= lasttime :return: dict Example: ret = feed.show('csirtgadgets', 'port-scanners', limit=5) """ |
uri = self.client.remote + '/users/{0}/feeds/{1}'.format(user, name)
return self.client.get(uri, params={'limit': limit, 'lasttime': lasttime}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build_object(self, obj):
"""Override django-bakery to skip profiles that raise 404""" |
try:
build_path = self.get_build_path(obj)
self.request = self.create_request(build_path)
self.request.user = AnonymousUser()
self.set_kwargs(obj)
self.build_file(build_path, self.get_content())
except Http404:
# cleanup directory
self.unbuild_object(obj) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def make_schedule_row(schedule_day, slot, seen_items):
"""Create a row for the schedule table.""" |
row = ScheduleRow(schedule_day, slot)
skip = {}
expanding = {}
all_items = list(slot.scheduleitem_set
.select_related('talk', 'page', 'venue')
.all())
for item in all_items:
if item in seen_items:
# Inc rowspan
seen_items[item]['rowspan'] += 1
# Note that we need to skip this during colspan checks
skip[item.venue] = seen_items[item]
continue
scheditem = {'item': item, 'rowspan': 1, 'colspan': 1}
row.items[item.venue] = scheditem
seen_items[item] = scheditem
if item.expand:
expanding[item.venue] = []
empty = []
expanding_right = None
skipping = 0
skip_item = None
for venue in schedule_day.venues:
if venue in skip:
# We need to skip all the venues this item spans over
skipping = 1
skip_item = skip[venue]
continue
if venue in expanding:
item = row.items[venue]
for empty_venue in empty:
row.items.pop(empty_venue)
item['colspan'] += 1
empty = []
expanding_right = item
elif venue in row.items:
empty = []
expanding_right = None
elif expanding_right:
expanding_right['colspan'] += 1
elif skipping > 0 and skipping < skip_item['colspan']:
skipping += 1
else:
skipping = 0
empty.append(venue)
row.items[venue] = {'item': None, 'rowspan': 1, 'colspan': 1}
return row |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate_schedule(today=None):
"""Helper function which creates an ordered list of schedule days""" |
# We create a list of slots and schedule items
schedule_days = {}
seen_items = {}
for slot in Slot.objects.all().order_by('end_time', 'start_time', 'day'):
day = slot.get_day()
if today and day != today:
# Restrict ourselves to only today
continue
schedule_day = schedule_days.get(day)
if schedule_day is None:
schedule_day = schedule_days[day] = ScheduleDay(day)
row = make_schedule_row(schedule_day, slot, seen_items)
schedule_day.rows.append(row)
return sorted(schedule_days.values(), key=lambda x: x.day.date) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_context_data(self, **kwargs):
"""Allow adding a 'render_description' parameter""" |
context = super(ScheduleXmlView, self).get_context_data(**kwargs)
if self.request.GET.get('render_description', None) == '1':
context['render_description'] = True
else:
context['render_description'] = False
return context |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get(self, request):
"""Create a iCal file from the schedule""" |
# Heavily inspired by https://djangosnippets.org/snippets/2223/ and
# the icalendar documentation
calendar = Calendar()
site = get_current_site(request)
calendar.add('prodid', '-//%s Schedule//%s//' % (site.name, site.domain))
calendar.add('version', '2.0')
# Since we don't need to format anything here, we can just use a list
# of schedule items
for item in ScheduleItem.objects.all():
sched_event = Event()
sched_event.add('dtstamp', item.last_updated)
sched_event.add('summary', item.get_title())
sched_event.add('location', item.venue.name)
sched_event.add('dtstart', item.get_start_datetime())
sched_event.add('duration', datetime.timedelta(minutes=item.get_duration_minutes()))
sched_event.add('class', 'PUBLIC')
sched_event.add('uid', '%s@%s' % (item.pk, site.domain))
calendar.add_component(sched_event)
response = HttpResponse(calendar.to_ical(), content_type="text/calendar")
response['Content-Disposition'] = 'attachment; filename=schedule.ics'
return response |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build_object(self, obj):
"""Override django-bakery to skip pages marked exclude_from_static""" |
if not obj.exclude_from_static:
super(ShowPage, self).build_object(obj) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build_object(self, obj):
"""Override django-bakery to skip talks that raise 403""" |
try:
super(TalkView, self).build_object(obj)
except PermissionDenied:
# We cleanup the directory created
self.unbuild_object(obj) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_object(self, *args, **kwargs):
'''Only talk owners can see talks, unless they've been accepted'''
object_ = super(TalkView, self).get_object(*args, **kwargs)
if not object_.can_view(self.request.user):
raise PermissionDenied
return object_ |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def render_to_response(self, *args, **kwargs):
'''Canonicalize the URL if the slug changed'''
if self.request.path != self.object.get_absolute_url():
return HttpResponseRedirect(self.object.get_absolute_url())
return super(TalkView, self).render_to_response(*args, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete(self, request, *args, **kwargs):
"""Override delete to only withdraw""" |
talk = self.get_object()
talk.status = WITHDRAWN
talk.save()
revisions.set_user(self.request.user)
revisions.set_comment("Talk Withdrawn")
return HttpResponseRedirect(self.success_url) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def order_results_by(*fields):
"""A decorator that applies an ordering to the QuerySet returned by a function. """ |
def decorator(f):
@functools.wraps(f)
def wrapper(*args, **kw):
result = f(*args, **kw)
return result.order_by(*fields)
return wrapper
return decorator |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cache_result(cache_key, timeout):
"""A decorator for caching the result of a function.""" |
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
cache = caches[cache_name]
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = caches[cache_name]
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build_queryset(self):
"""Override django-bakery's build logic to fake pagination.""" |
paths = [(os.path.join(self.build_prefix, 'index.html'), {})]
self.request = None
queryset = self.get_queryset()
paginator = self.get_paginator(queryset, self.get_paginate_by(queryset))
for page in paginator.page_range:
paths.append(
(os.path.join(self.build_prefix, 'page',
'%d' % page, 'index.html'), {'page': page}))
for build_path, kwargs in paths:
self.request = self.create_request(build_path)
# Add a user with no permissions
self.request.user = AnonymousUser()
# Fake context so views work as expected
self.kwargs = kwargs
self.prep_directory(build_path)
target_path = os.path.join(settings.BUILD_DIR, build_path)
self.build_file(target_path, self.get_content()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def site_info(request):
'''Expose the site's info to templates'''
site = get_current_site(request)
context = {
'WAFER_CONFERENCE_NAME': site.name,
'WAFER_CONFERENCE_DOMAIN': site.domain,
}
return context |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def navigation_info(request):
'''Expose whether to display the navigation header and footer'''
if request.GET.get('wafer_hide_navigation') == "1":
nav_class = "wafer-invisible"
else:
nav_class = "wafer-visible"
context = {
'WAFER_NAVIGATION_VISIBILITY': nav_class,
}
return context |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def registration_settings(request):
'''Expose selected settings to templates'''
context = {}
for setting in (
'WAFER_SSO',
'WAFER_HIDE_LOGIN',
'WAFER_REGISTRATION_OPEN',
'WAFER_REGISTRATION_MODE',
'WAFER_TALKS_OPEN',
'WAFER_VIDEO_LICENSE',
):
context[setting] = getattr(settings, setting, None)
return context |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def profiles(self):
'''
return the rolls this people is related with
'''
limit = []
if self.is_admin():
limit.append(_("Administrator"))
limit.sort()
return limit |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def set_logging_level(args):
"Computes and sets the logging level from the parsed arguments."
root_logger = logging.getLogger()
level = logging.INFO
logging.getLogger('requests.packages.urllib3').setLevel(logging.WARNING)
if "verbose" in args and args.verbose is not None:
logging.getLogger('requests.packages.urllib3').setLevel(0) # Unset
if args.verbose > 1:
level = 5 # "Trace" level
elif args.verbose > 0:
level = logging.DEBUG
else:
logging.critical("verbose is an unexpected value. (%s) exiting.",
args.verbose)
sys.exit(2)
elif "quiet" in args and args.quiet is not None:
if args.quiet > 1:
level = logging.ERROR
elif args.quiet > 0:
level = logging.WARNING
else:
logging.critical("quiet is an unexpected value. (%s) exiting.",
args.quiet)
if level is not None:
root_logger.setLevel(level)
if args.silence_urllib3:
# See: https://urllib3.readthedocs.org/en/latest/security.html
requests.packages.urllib3.disable_warnings() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def debug(self, msg):
'''
Handle the debugging to a file
'''
# If debug is not disabled
if self.__debug is not False:
# If never was set, try to set it up
if self.__debug is None:
# Check what do we have inside settings
debug_filename = getattr(settings, "AD_DEBUG_FILE", None)
if debug_filename:
# Open the debug file pointer
self.__debug = open(settings.AD_DEBUG_FILE, 'a')
else:
# Disable debuging forever
self.__debug = False
if self.__debug:
# Debug the given message
self.__debug.write("{}\n".format(msg))
self.__debug.flush() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def authenticate(self, *args, **kwargs):
'''
Authenticate the user agains LDAP
'''
# Get config
username = kwargs.get("username", None)
password = kwargs.get("password", None)
# Check user in Active Directory (authorization == None if can not connect to Active Directory Server)
authorization = self.ldap_link(username, password, mode='LOGIN')
if authorization:
# The user was validated in Active Directory
user = self.get_or_create_user(username, password)
# Get or get_create_user will revalidate the new user
if user:
# If the user has been properly validated
user.is_active = True
user.save()
else:
# Locate user in our system
user = User.objects.filter(username=username).first()
if user and not user.is_staff:
# If access was denied
if authorization is False or getattr(settings, "AD_LOCK_UNAUTHORIZED", False):
# Deactivate the user
user.is_active = False
user.save()
# No access and no user here
user = None
# Return the final decision
return user |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_or_create_user(self, username, password):
'''
Get or create the given user
'''
# Get the groups for this user
info = self.get_ad_info(username, password)
self.debug("INFO found: {}".format(info))
# Find the user
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User(username=username)
# Update user
user.first_name = info.get('first_name', '')
user.last_name = info.get('last_name', '')
user.email = info.get('email', '')
# Check if the user is in the Administrators groups
is_admin = False
for domain in info['groups']:
if 'Domain Admins' in info['groups'][domain]:
is_admin = True
break
# Set the user permissions
user.is_staff = is_admin
user.is_superuser = is_admin
# Refresh the password
user.set_password(password)
# Validate the selected user and gotten information
user = self.validate(user, info)
if user:
self.debug("User got validated!")
# Autosave the user until this point
user.save()
# Synchronize user
self.synchronize(user, info)
else:
self.debug("User didn't pass validation!")
# Finally return user
return user |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def synchronize(self, user, info):
'''
It tries to do a group synchronization if possible
This methods should be redeclared by the developer
'''
self.debug("Synchronize!")
# Remove all groups from this user
user.groups.clear()
# For all domains found for this user
for domain in info['groups']:
# For all groups he is
for groupname in info['groups'][domain]:
# Lookup for that group
group = Group.objects.filter(name=groupname).first()
if group:
# If found, add the user to that group
user.groups.add(group) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update_schedule_items(*args, **kw):
"""We save all the schedule items associated with this slot, so the last_update time is updated to reflect any changes to the timing of the slots""" |
slot = kw.pop('instance', None)
if not slot:
return
for item in slot.scheduleitem_set.all():
item.save(update_fields=['last_updated'])
# We also need to update the next slot, in case we changed it's
# times as well
next_slot = slot.slot_set.all()
if next_slot.count():
# From the way we structure the slot tree, we know that
# there's only 1 next slot that could have changed.
for item in next_slot[0].scheduleitem_set.all():
item.save(update_fields=['last_updated']) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def make_diff(current, revision):
"""Create the difference between the current revision and a previous version""" |
the_diff = []
dmp = diff_match_patch()
for field in (set(current.field_dict.keys()) | set(revision.field_dict.keys())):
# These exclusions really should be configurable
if field == 'id' or field.endswith('_rendered'):
continue
# KeyError's may happen if the database structure changes
# between the creation of revisions. This isn't ideal,
# but should not be a fatal error.
# Log this?
missing_field = False
try:
cur_val = current.field_dict[field] or ""
except KeyError:
cur_val = "No such field in latest version\n"
missing_field = True
try:
old_val = revision.field_dict[field] or ""
except KeyError:
old_val = "No such field in old version\n"
missing_field = True
if missing_field:
# Ensure that the complete texts are marked as changed
# so new entries containing any of the marker words
# don't show up as differences
diffs = [(dmp.DIFF_DELETE, old_val), (dmp.DIFF_INSERT, cur_val)]
patch = dmp.diff_prettyHtml(diffs)
elif isinstance(cur_val, Markup):
# we roll our own diff here, so we can compare of the raw
# markdown, rather than the rendered result.
if cur_val.raw == old_val.raw:
continue
diffs = dmp.diff_main(old_val.raw, cur_val.raw)
patch = dmp.diff_prettyHtml(diffs)
elif cur_val == old_val:
continue
else:
# Compare the actual field values
diffs = dmp.diff_main(force_text(old_val), force_text(cur_val))
patch = dmp.diff_prettyHtml(diffs)
the_diff.append((field, patch))
the_diff.sort()
return the_diff |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def compare_view(self, request, object_id, version_id, extra_context=None):
"""Actually compare two versions.""" |
opts = self.model._meta
object_id = unquote(object_id)
# get_for_object's ordering means this is always the latest revision.
# The reversion we want to compare to
current = Version.objects.get_for_object_reference(self.model, object_id)[0]
revision = Version.objects.get_for_object_reference(self.model, object_id).filter(id=version_id)[0]
the_diff = make_diff(current, revision)
context = {
"title": _("Comparing current %(model)s with revision created %(date)s") % {
'model': current,
'date' : get_date(revision),
},
"opts": opts,
"compare_list_url": reverse("%s:%s_%s_comparelist" % (self.admin_site.name, opts.app_label, opts.model_name),
args=(quote(object_id),)),
"diff_list": the_diff,
}
extra_context = extra_context or {}
context.update(extra_context)
return render(request, self.compare_template or self._get_template_list("compare.html"),
context) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def comparelist_view(self, request, object_id, extra_context=None):
"""Allow selecting versions to compare.""" |
opts = self.model._meta
object_id = unquote(object_id)
current = get_object_or_404(self.model, pk=object_id)
# As done by reversion's history_view
action_list = [
{
"revision": version.revision,
"url": reverse("%s:%s_%s_compare" % (self.admin_site.name, opts.app_label, opts.model_name), args=(quote(version.object_id), version.id)),
} for version in self._reversion_order_version_queryset(Version.objects.get_for_object_reference(
self.model,
object_id).select_related("revision__user"))]
context = {"action_list": action_list,
"opts": opts,
"object_id": quote(object_id),
"original": current,
}
extra_context = extra_context or {}
context.update(extra_context)
return render(request, self.compare_list_template or self._get_template_list("compare_list.html"),
context) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def grv(struct, position):
'''
This function helps to convert date information for showing proper filtering
'''
if position == 'year':
size = 4
else:
size = 2
if (struct[position][2]):
rightnow = str(struct[position][0]).zfill(size)
else:
if position == 'year':
rightnow = '____'
else:
rightnow = '__'
return rightnow |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_template_names(self):
'''
Build the list of templates related to this user
'''
# Get user template
template_model = getattr(self, 'template_model', "{0}/{1}_{2}".format(self._appname.lower(), self._modelname.lower(), self.get_template_names_key))
template_model_ext = getattr(self, 'template_model_ext', 'html')
templates = get_template(template_model, self.user, self.language, template_model_ext, raise_error=False)
if type(templates) == list:
templates.append("codenerix/{0}.html".format(self.get_template_names_key))
# Return thet of templates
return templates |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_context_data(self, **kwargs):
'''
Set a base context
'''
# Call the base implementation first to get a context
context = super(GenBase, self).get_context_data(**kwargs)
# Update general context with the stuff we already calculated
if hasattr(self, 'html_head'):
context['html_head'] = self.html_head(self.object)
# Add translation system
if hasattr(self, 'gentrans'):
context['gentranslate'] = self.gentrans.copy()
context['gentranslate'].update(self.gentranslate)
else:
context['gentranslate'] = self.gentranslate
# Return context
return context |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_context_data(self, **kwargs):
'''
Generic list view with validation included and object transfering support
'''
# Call the base implementation first to get a context
context = super(GenList, self).get_context_data(**kwargs)
# Update general context with the stuff we already calculated
context.update(self.__context)
# Initialize with our timestamp
context['now'] = epochdate(time.time())
context['profile'] = self.profile
# Check vtable
context['vtable'] = getattr(self, 'vtable', False)
# Export to excel
context['export_excel'] = getattr(self, 'export_excel', True)
context['export_name'] = getattr(self, 'export_name', 'list')
# Check ngincludes
context['ngincludes'] = getattr(self, 'ngincludes', {})
if 'table' not in context['ngincludes'].keys():
context['ngincludes']['table'] = "{}codenerix/partials/table.html".format(settings.STATIC_URL)
# Check linkadd
context['linkadd'] = getattr(self, 'linkadd', self.auth_permission('add') or getattr(self, 'public', False))
# Check linkedit
context['linkedit'] = getattr(self, 'linkedit', self.auth_permission('change') or getattr(self, 'public', False))
# Check showdetails
context['show_details'] = getattr(self, 'show_details', False)
# Check showmodal
context['show_modal'] = getattr(self, 'show_modal', False)
# Set search filter button
context['search_filter_button'] = getattr(self, 'search_filter_button', False)
# Get base template
if not self.json_worker:
template_base = getattr(self, 'template_base', 'base/base')
template_base_ext = getattr(self, 'template_base_ext', 'html')
context['template_base'] = get_template(template_base, self.user, self.language, extension=template_base_ext)
# Try to convert object_id to a numeric id
object_id = kwargs.get('object_id', None)
try:
object_id = int(object_id)
except Exception:
pass
# Python 2 VS Python 3 compatibility
try:
unicode('codenerix')
unicodetest = unicode
except NameError:
unicodetest = str
if isinstance(object_id, str) or isinstance(object_id, unicodetest):
# If object_id is a string, we have a name not an object
context['object_name'] = object_id
object_obj = None
else:
# If is not an string
if object_id:
# If we got one, load the object
obj = context['obj']
object_obj = get_object_or_404(obj, pk=object_id)
else:
# There is no object
object_obj = None
context['object_obj'] = object_obj
# Attach extra_context
context.update(self.extra_context)
# Return new context
return context |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_context_json(self, context):
'''
Return a base answer for a json answer
'''
# Initialize answer
answer = {}
# Metadata builder
answer['meta'] = self.__jcontext_metadata(context)
# Filter builder
answer['filter'] = self.__jcontext_filter(context)
# Head builder
answer['table'] = {}
answer['table']['head'] = self.__jcontext_tablehead(context)
answer['table']['body'] = None
answer['table']['header'] = None
answer['table']['summary'] = None
# Return answer
return answer |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_form(self, form_class=None):
'''
Set form groups to the groups specified in the view if defined
'''
formobj = super(GenModify, self).get_form(form_class)
# Set requested group to this form
selfgroups = getattr(self, "form_groups", None)
if selfgroups:
if type(selfgroups) == list:
formobj.__groups__ = lambda: selfgroups
else:
formobj.__groups__ = selfgroups
else:
selfgroups = getattr(self, "__groups__", None)
if selfgroups:
formobj.__groups__ = selfgroups
# Return the new updated form
return formobj |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_parent(self, directory):
""" Given a directory name, return the Page representing it in the menu heirarchy. """ |
assert settings.PAGE_DIR.startswith('/')
assert settings.PAGE_DIR.endswith('/')
parents = directory[len(settings.PAGE_DIR):]
page = None
if parents:
for slug in parents.split('/'):
page = Page.objects.get(parent=page, slug=slug)
return page |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def wafer_sso_url(context, sso_method):
'''
Return the correct URL to SSO with the given method.
'''
request = context.request
url = reverse(getattr(views, '%s_login' % sso_method))
if 'next' in request.GET:
url += '?' + urlencode({'next': request.GET['next']})
return url |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def authorize(args):
""" Authorizes Coursera's OAuth2 client for using coursera.org API servers for a specific application """ |
oauth2_instance = oauth2.build_oauth2(args.app, args)
oauth2_instance.build_authorizer()
logging.info('Application "%s" authorized!', args.app) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_auth(args):
""" Checks courseraoauth2client's connectivity to the coursera.org API servers for a specific application """ |
oauth2_instance = oauth2.build_oauth2(args.app, args)
auth = oauth2_instance.build_authorizer()
my_profile_url = (
'https://api.coursera.org/api/externalBasicProfiles.v1?'
'q=me&fields=name'
)
r = requests.get(my_profile_url, auth=auth)
if r.status_code != 200:
logging.error('Received response code %s from the basic profile API.',
r.status_code)
logging.debug('Response body:\n%s', r.text)
sys.exit(1)
try:
external_id = r.json()['elements'][0]['id']
except:
logging.error(
'Could not parse the external id out of the response body %s',
r.text)
external_id = None
try:
name = r.json()['elements'][0]['name']
except:
logging.error(
'Could not parse the name out of the response body %s',
r.text)
name = None
if not args.quiet > 0:
print 'Name: %s' % name
print 'External ID: %s' % external_id
if name is None or external_id is None:
sys.exit(1) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def quintic_bucket_warp(x, n, l1, l2, l3, x0, w1, w2, w3):
"""Warps the length scale with a piecewise quintic "bucket" shape. Parameters x : float or array-like of float Locations to evaluate length scale at. n : non-negative int Derivative order to evaluate. Only first derivatives are supported. l1 : positive float Length scale to the left of the bucket. l2 : positive float Length scale in the bucket. l3 : positive float Length scale to the right of the bucket. x0 : float Location of the center of the bucket. w1 : positive float Width of the left side quintic section. w2 : positive float Width of the bucket. w3 : positive float Width of the right side quintic section. """ |
x1 = x0 - w2 / 2.0 - w1 / 2.0
x2 = x0 + w2 / 2.0 + w3 / 2.0
x_shift_1 = 2.0 * (x - x1) / w1
x_shift_3 = 2.0 * (x - x2) / w3
if n == 0:
return (
l1 * (x <= (x1 - w1 / 2.0)) + (
0.5 * (l2 - l1) * (
3.0 / 8.0 * x_shift_1**5 -
5.0 / 4.0 * x_shift_1**3 +
15.0 / 8.0 * x_shift_1
) + (l1 + l2) / 2.0
) * ((x > (x1 - w1 / 2.0)) & (x < (x1 + w1 / 2.0))) +
l2 * ((x >= (x1 + w1 / 2.0)) & (x <= x2 - w3 / 2.0)) + (
0.5 * (l3 - l2) * (
3.0 / 8.0 * x_shift_3**5 -
5.0 / 4.0 * x_shift_3**3 +
15.0 / 8.0 * x_shift_3
) + (l2 + l3) / 2.0
) * ((x > (x2 - w3 / 2.0)) & (x < (x2 + w3 / 2.0))) +
l3 * (x >= (x2 + w3 / 2.0))
)
elif n == 1:
return (
(
0.5 * (l2 - l1) * (
5.0 * 3.0 / 8.0 * x_shift_1**4 -
3.0 * 5.0 / 4.0 * x_shift_1**2 +
15.0 / 8.0
) / w1
) * ((x > (x1 - w1 / 2.0)) & (x < (x1 + w1 / 2.0))) + (
0.5 * (l3 - l2) * (
5.0 * 3.0 / 8.0 * x_shift_3**4 -
3.0 * 5.0 / 4.0 * x_shift_3**2 +
15.0 / 8.0
) / w3
) * ((x > (x2 - w3 / 2.0)) & (x < (x2 + w3 / 2.0)))
)
else:
raise NotImplementedError("Only up to first derivatives are supported!") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def sso(user, desired_username, name, email, profile_fields=None):
""" Create a user, if the provided `user` is None, from the parameters. Then log the user in, and return it. """ |
if not user:
if not settings.REGISTRATION_OPEN:
raise SSOError('Account registration is closed')
user = _create_desired_user(desired_username)
_configure_user(user, name, email, profile_fields)
if not user.is_active:
raise SSOError('Account disabled')
# login() expects the logging in backend to be set on the user.
# We are bypassing login, so fake it.
user.backend = settings.AUTHENTICATION_BACKENDS[0]
return user |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def debit(self, amount, credit_account, description, debit_memo="", credit_memo="", datetime=None):
""" Post a debit of 'amount' and a credit of -amount against this account and credit_account respectively. note amount must be non-negative. """ |
assert amount >= 0
return self.post(amount, credit_account, description, self_memo=debit_memo, other_memo=credit_memo, datetime=datetime) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def credit(self, amount, debit_account, description, debit_memo="", credit_memo="", datetime=None):
""" Post a credit of 'amount' and a debit of -amount against this account and credit_account respectively. note amount must be non-negative. """ |
assert amount >= 0
return self.post(-amount, debit_account, description, self_memo=credit_memo, other_memo=debit_memo, datetime=datetime) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def post(self, amount, other_account, description, self_memo="", other_memo="", datetime=None):
""" Post a transaction of 'amount' against this account and the negative amount against 'other_account'. This will show as a debit or credit against this account when amount > 0 or amount < 0 respectively. """ |
#Note: debits are always positive, credits are always negative. They should be negated before displaying
#(expense and liability?) accounts
tx = self._new_transaction()
if datetime:
tx.t_stamp = datetime
#else now()
tx.description = description
tx.save()
a1 = self._make_ae(self._DEBIT_IN_DB() * amount, self_memo, tx)
a1.save()
a2 = other_account._make_ae(-self._DEBIT_IN_DB() * amount, other_memo, tx)
a2.save()
return (a1, a2) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def totals(self, start=None, end=None):
"""Returns a Totals object containing the sum of all debits, credits and net change over the period of time from start to end. 'start' is inclusive, 'end' is exclusive """ |
qs = self._entries_range(start=start, end=end)
qs_positive = qs.filter(amount__gt=Decimal("0.00")).all().aggregate(Sum('amount'))
qs_negative = qs.filter(amount__lt=Decimal("0.00")).all().aggregate(Sum('amount'))
#Is there a cleaner way of saying this? Should the sum of 0 things be None?
positives = qs_positive['amount__sum'] if qs_positive['amount__sum'] is not None else 0
negatives = -qs_negative['amount__sum'] if qs_negative['amount__sum'] is not None else 0
if self._DEBIT_IN_DB() > 0:
debits = positives
credits = negatives
else:
debits = negatives
credits = positives
net = debits-credits
if self._positive_credit():
net = -net
return self.Totals(credits, debits, net) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ledger(self, start=None, end=None):
"""Returns a list of entries for this account. Ledger returns a sequence of LedgerEntry's matching the criteria in chronological order. The returned sequence can be boolean-tested (ie. test that nothing was returned). If 'start' is given, only entries on or after that datetime are returned. 'start' must be given with a timezone. If 'end' is given, only entries before that datetime are returned. 'end' must be given with a timezone. """ |
DEBIT_IN_DB = self._DEBIT_IN_DB()
flip = 1
if self._positive_credit():
flip *= -1
qs = self._entries_range(start=start, end=end)
qs = qs.order_by("transaction__t_stamp", "transaction__tid")
balance = Decimal("0.00")
if start:
balance = self.balance(start)
if not qs:
return []
#helper is a hack so the caller can test for no entries.
def helper(balance_in):
balance = balance_in
for e in qs.all():
amount = e.amount * DEBIT_IN_DB
o_balance = balance
balance += flip * amount
yield LedgerEntry(amount, e, o_balance, balance)
return helper(balance) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def find_overlapping_slots(all_slots):
"""Find any slots that overlap""" |
overlaps = set([])
for slot in all_slots:
# Because slots are ordered, we can be more efficient than this
# N^2 loop, but this is simple and, since the number of slots
# should be low, this should be "fast enough"
start = slot.get_start_time()
end = slot.end_time
for other_slot in all_slots:
if other_slot.pk == slot.pk:
continue
if other_slot.get_day() != slot.get_day():
# different days, can't overlap
continue
# Overlap if the start_time or end_time is bounded by our times
# start_time <= other.start_time < end_time
# or
# start_time < other.end_time <= end_time
other_start = other_slot.get_start_time()
other_end = other_slot.end_time
if start <= other_start and other_start < end:
overlaps.add(slot)
overlaps.add(other_slot)
elif start < other_end and other_end <= end:
overlaps.add(slot)
overlaps.add(other_slot)
return overlaps |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def find_non_contiguous(all_items):
"""Find any items that have slots that aren't contiguous""" |
non_contiguous = []
for item in all_items:
if item.slots.count() < 2:
# No point in checking
continue
last_slot = None
for slot in item.slots.all().order_by('end_time'):
if last_slot:
if last_slot.end_time != slot.get_start_time():
non_contiguous.append(item)
break
last_slot = slot
return non_contiguous |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def find_invalid_venues(all_items):
"""Find venues assigned slots that aren't on the allowed list of days.""" |
venues = {}
for item in all_items:
valid = False
item_days = list(item.venue.days.all())
for slot in item.slots.all():
for day in item_days:
if day == slot.get_day():
valid = True
break
if not valid:
venues.setdefault(item.venue, [])
venues[item.venue].append(item)
return venues.items() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_schedule():
"""Helper routine to easily test if the schedule is valid""" |
all_items = prefetch_schedule_items()
for validator, _type, _msg in SCHEDULE_ITEM_VALIDATORS:
if validator(all_items):
return False
all_slots = prefetch_slots()
for validator, _type, _msg in SLOT_VALIDATORS:
if validator(all_slots):
return False
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def validate_schedule():
"""Helper routine to report issues with the schedule""" |
all_items = prefetch_schedule_items()
errors = []
for validator, _type, msg in SCHEDULE_ITEM_VALIDATORS:
if validator(all_items):
errors.append(msg)
all_slots = prefetch_slots()
for validator, _type, msg in SLOT_VALIDATORS:
if validator(all_slots):
errors.append(msg)
return errors |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_form(self, request, obj=None, **kwargs):
"""Change the form depending on whether we're adding or editing the slot.""" |
if obj is None:
# Adding a new Slot
kwargs['form'] = SlotAdminAddForm
return super(SlotAdmin, self).get_form(request, obj, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_cached_menus():
"""Return the menus from the cache or generate them if needed.""" |
items = cache.get(CACHE_KEY)
if items is None:
menu = generate_menu()
cache.set(CACHE_KEY, menu.items)
else:
menu = Menu(items)
return menu |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.