_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q16800
get_institute_trend_graph_url
train
def get_institute_trend_graph_url(institute, start, end): """ Institute trend graph for machine category. """ filename = get_institute_trend_graph_filename(institute, start, end) urls = { 'graph_url': urlparse.urljoin(GRAPH_URL, filename + ".png"), 'data_url': urlparse.urljoin(GRAPH_URL, filename + ".csv"), } return urls
python
{ "resource": "" }
q16801
get_institutes_trend_graph_urls
train
def get_institutes_trend_graph_urls(start, end): """ Get all institute trend graphs. """ graph_list = [] for institute in Institute.objects.all(): urls = get_institute_trend_graph_url(institute, start, end) urls['institute'] = institute graph_list.append(urls) return graph_list
python
{ "resource": "" }
q16802
PersonManager._create_user
train
def _create_user( self, username, email, short_name, full_name, institute, password, is_admin, **extra_fields): """Creates a new active person. """ # Create Person person = self.model( username=username, email=email, short_name=short_name, full_name=full_name, is_admin=is_admin, institute=institute, **extra_fields ) person.set_password(password) person.save() return person
python
{ "resource": "" }
q16803
PersonManager.create_user
train
def create_user( self, username, email, short_name, full_name, institute, password=None, **extra_fields): """ Creates a new ordinary person. """ return self._create_user( username=username, email=email, short_name=short_name, full_name=full_name, institute=institute, password=password, is_admin=False, **extra_fields)
python
{ "resource": "" }
q16804
PersonManager.create_superuser
train
def create_superuser( self, username, email, short_name, full_name, institute, password, **extra_fields): """ Creates a new person with super powers. """ return self._create_user( username=username, email=email, institute=institute, password=password, short_name=short_name, full_name=full_name, is_admin=True, **extra_fields)
python
{ "resource": "" }
q16805
do_application_actions_plus
train
def do_application_actions_plus(parser, token): """ Render actions available with extra text. """ nodelist = parser.parse(('end_application_actions',)) parser.delete_first_token() return ApplicationActionsPlus(nodelist)
python
{ "resource": "" }
q16806
run
train
def run(cmds, **kwargs): """ Wrapper around subprocess.run, with unicode decoding of output. Additional kwargs are passed to subprocess.run. """ proc = sp.Popen(cmds, bufsize=-1, stdout=sp.PIPE, stderr=sp.STDOUT, close_fds=sys.platform != 'win32') for line in proc.stdout: print(line[:-1].decode()) retcode = proc.wait() if retcode: raise sp.CalledProcessError(retcode, cmds)
python
{ "resource": "" }
q16807
symlink
train
def symlink(target, linkname): """ Create a symlink to `target` called `linkname`. Converts `target` and `linkname` to absolute paths; creates `dirname(linkname)` if needed. """ target = os.path.abspath(target) linkname = os.path.abspath(linkname) if not os.path.exists(target): raise ValueError("target {} not found".format(target)) link_dir = os.path.dirname(linkname) if not os.path.exists(link_dir): os.makedirs(link_dir) run(['ln', '-s', '-f', target, linkname]) return linkname
python
{ "resource": "" }
q16808
upload
train
def upload(host, user, local_dir, remote_dir, rsync_options=RSYNC_OPTIONS): """ Upload a file or directory via rsync. Parameters ---------- host : str or None If None, omit the host part and just transfer locally user : str or None If None, omit the user part local_dir : str If a directory, a trailing "/" will be added. remote_dir : str If a directory, a trailing "/" will be added. """ if user is None: user = "" else: user = user + "@" if host is None or host == 'localhost': host = "" else: host = host + ":" if not local_dir.endswith('/'): local_dir = local_dir + '/' if not remote_dir.endswith('/'): remote_dir = remote_dir + '/' remote_string = '{user}{host}{remote_dir}'.format(**locals()) cmds = ['rsync'] cmds += shlex.split(rsync_options) cmds += [local_dir, remote_string] run(cmds) return [remote_string]
python
{ "resource": "" }
q16809
local_link
train
def local_link(local_fn, remote_fn, staging): """ Creates a symlink to a local staging area. The link name is built from `remote_fn`, but the absolute path is put inside the staging directory. Example ------- If we have the following initial setup:: cwd="/home/user" local="data/sample1.bw" remote="/hubs/hg19/a.bw" staging="__staging__" Then this function does the equivalent of:: mkdir -p __staging__/hubs/hg19 ln -sf \\ /home/user/data/sample1.bw \\ /home/user/__staging__/hubs/hg19/a.bw """ linkname = os.path.join(staging, remote_fn.lstrip(os.path.sep)) return symlink(local_fn, linkname)
python
{ "resource": "" }
q16810
stage
train
def stage(x, staging): """ Stage an object to the `staging` directory. If the object is a Track and is one of the types that needs an index file (bam, vcfTabix), then the index file will be staged as well. Returns a list of the linknames created. """ linknames = [] # Objects that don't represent a file shouldn't be staged non_file_objects = ( track.ViewTrack, track.CompositeTrack, track.AggregateTrack, track.SuperTrack, genome.Genome, ) if isinstance(x, non_file_objects): return linknames # If it's an object representing a file, then render it. # # Track objects don't represent files, but their documentation does linknames.append(x.render(staging)) if hasattr(x, 'source') and hasattr(x, 'filename'): def _stg(x, ext=''): # A remote track hosted elsewhere does not need staging. This is # defined by a track with a url, but no source or filename. if ( x.source is None and x.filename is None and getattr(x, 'url', None) is not None ): return linknames.append( local_link(x.source + ext, x.filename + ext, staging) ) _stg(x) if isinstance(x, track.Track): if x.tracktype == 'bam': _stg(x, ext='.bai') if x.tracktype == 'vcfTabix': _stg(x, ext='.tbi') if isinstance(x, track.CompositeTrack): if x._html: _stg(x._html) return linknames
python
{ "resource": "" }
q16811
stage_hub
train
def stage_hub(hub, staging=None): """ Stage a hub by symlinking all its connected files to a local directory. """ linknames = [] if staging is None: staging = tempfile.mkdtemp() for obj, level in hub.leaves(base.HubComponent, intermediate=True): linknames.extend(stage(obj, staging)) return staging, linknames
python
{ "resource": "" }
q16812
upload_hub
train
def upload_hub(hub, host, remote_dir, user=None, port=22, rsync_options=RSYNC_OPTIONS, staging=None): """ Renders, stages, and uploads a hub. """ hub.render() if staging is None: staging = tempfile.mkdtemp() staging, linknames = stage_hub(hub, staging=staging) local_dir = os.path.join(staging) upload(host, user, local_dir=local_dir, remote_dir=remote_dir, rsync_options=rsync_options) return linknames
python
{ "resource": "" }
q16813
get_project_members
train
def get_project_members(machine, project_id): """ Returns list of usernames given a project id """ try: project = Project.objects.get(pid=project_id) except Project.DoesNotExist: return 'Project not found' return [x.username for x in project.group.members.all()]
python
{ "resource": "" }
q16814
get_projects
train
def get_projects(machine): """ Returns list of project ids """ query = Project.active.all() return [x.pid for x in query]
python
{ "resource": "" }
q16815
get_project
train
def get_project(username, project, machine_name=None): """ Used in the submit filter to make sure user is in project """ try: account = Account.objects.get( username=username, date_deleted__isnull=True) except Account.DoesNotExist: return "Account '%s' not found" % username if project is None: project = account.default_project else: try: project = Project.objects.get(pid=project) except Project.DoesNotExist: project = account.default_project if project is None: return "None" if account.person not in project.group.members.all(): project = account.default_project if project is None: return "None" if account.person not in project.group.members.all(): return "None" return project.pid
python
{ "resource": "" }
q16816
get_users_projects
train
def get_users_projects(user): """ List projects a user is part of """ person = user projects = person.projects.filter(is_active=True) return 0, [x.pid for x in projects]
python
{ "resource": "" }
q16817
redirect_to
train
def redirect_to(request, url, permanent=True, query_string=False, **kwargs): r""" Redirect to a given URL. The given url may contain dict-style string formatting, which will be interpolated against the params in the URL. For example, to redirect from ``/foo/<id>/`` to ``/bar/<id>/``, you could use the following URLconf:: urlpatterns = patterns('', (r'^foo/(?P<id>\d+)/$', 'django.views.generic.simple.redirect_to', {'url' : '/bar/%(id)s/'}), ) If the given url is ``None``, a HttpResponseGone (410) will be issued. If the ``permanent`` argument is False, then the response will have a 302 HTTP status code. Otherwise, the status code will be 301. If the ``query_string`` argument is True, then the GET query string from the request is appended to the URL. """ args = request.META.get('QUERY_STRING', '') if url is not None: if kwargs: url = url % kwargs if args and query_string: url = "%s?%s" % (url, args) klass = (permanent and HttpResponsePermanentRedirect or HttpResponseRedirect) return klass(url) else: logger.warning( 'Gone: %s', request.path, extra={ 'status_code': 410, 'request': request }) return HttpResponseGone()
python
{ "resource": "" }
q16818
assert_strong_password
train
def assert_strong_password(username, password, old_password=None): """Raises ValueError if the password isn't strong. Returns the password otherwise.""" # test the length try: minlength = settings.MIN_PASSWORD_LENGTH except AttributeError: minlength = 12 if len(password) < minlength: raise ValueError( "Password must be at least %s characters long" % minlength) if username is not None and username in password: raise ValueError("Password contains username") return _assert_password(password, old_password)
python
{ "resource": "" }
q16819
_lookup
train
def _lookup(cls: str) -> LdapObjectClass: """ Lookup module.class. """ if isinstance(cls, str): module_name, _, name = cls.rpartition(".") module = importlib.import_module(module_name) try: cls = getattr(module, name) except AttributeError: raise AttributeError("%s reference cannot be found" % cls) return cls
python
{ "resource": "" }
q16820
DataStore.save_account
train
def save_account(self, account: Account) -> None: """ Account was saved. """ person = account.person if self._primary_group == 'institute': lgroup = self._get_group(person.institute.group.name) elif self._primary_group == 'default_project': if account.default_project is None: lgroup = self._get_group(self._default_primary_group) else: lgroup = self._get_group(account.default_project.group.name) else: raise RuntimeError("Unknown value of PRIMARY_GROUP.") if account.default_project is None: default_project = "none" else: default_project = account.default_project.pid try: luser = self._get_account(account.username) changes = changeset(luser, {}) new_user = False except ObjectDoesNotExist: new_user = True luser = self._account_class() changes = changeset(luser, { 'uid': account.username }) changes = changes.merge({ 'gidNumber': lgroup['gidNumber'], 'givenName': person.first_name, 'sn': person.last_name, 'telephoneNumber': _str_or_none(person.telephone), 'mail': _str_or_none(person.email), 'title': _str_or_none(person.title), 'o': person.institute.name, 'cn': person.full_name, 'default_project': default_project, 'loginShell': account.shell, 'locked': account.is_locked() }) save(changes, database=self._database) if new_user: # add all groups for group in account.person.groups.all(): self.add_account_to_group(account, group)
python
{ "resource": "" }
q16821
DataStore.delete_account
train
def delete_account(self, account): """ Account was deleted. """ try: luser = self._get_account(account.username) groups = luser['groups'].load(database=self._database) for group in groups: changes = changeset(group, {}) changes = group.remove_member(changes, luser) save(changes, database=self._database) delete(luser, database=self._database) except ObjectDoesNotExist: # it doesn't matter if it doesn't exist pass
python
{ "resource": "" }
q16822
DataStore.set_account_password
train
def set_account_password(self, account, raw_password): """ Account's password was changed. """ luser = self._get_account(account.username) changes = changeset(luser, { 'password': raw_password, }) save(changes, database=self._database)
python
{ "resource": "" }
q16823
DataStore.add_account_to_group
train
def add_account_to_group(self, account, group): """ Add account to group. """ lgroup: OpenldapGroup = self._get_group(group.name) person: OpenldapAccount = self._get_account(account.username) changes = changeset(lgroup, {}) changes = lgroup.add_member(changes, person) save(changes, database=self._database)
python
{ "resource": "" }
q16824
DataStore.save_group
train
def save_group(self, group): """ Group was saved. """ # If group already exists, take over existing group rather then error. try: lgroup = self._get_group(group.name) changes = changeset(lgroup, {}) except ObjectDoesNotExist: lgroup = self._group_class() changes = changeset(lgroup, { 'cn': group.name, }) changes = changes.merge({ 'description': group.description }) save(changes, database=self._database)
python
{ "resource": "" }
q16825
DataStore.delete_group
train
def delete_group(self, group): """ Group was deleted. """ try: lgroup = self._get_group(group.name) delete(lgroup, database=self._database) except ObjectDoesNotExist: # it doesn't matter if it doesn't exist pass
python
{ "resource": "" }
q16826
DataStore.get_group_details
train
def get_group_details(self, group): """ Get the group details. """ result = {} try: lgroup = self._get_group(group.name) lgroup = preload(lgroup, database=self._database) except ObjectDoesNotExist: return result for i, j in lgroup.items(): if j is not None: result[i] = j return result
python
{ "resource": "" }
q16827
_rank
train
def _rank(sample): """ Assign numeric ranks to all values in the sample. The ranks begin with 1 for the smallest value. When there are groups of tied values, assign a rank equal to the midpoint of unadjusted rankings. E.g.:: >>> rank({3: 1, 5: 4, 9: 1}) {3: 1.0, 5: 3.5, 9: 6.0} """ rank = 1 ranks = {} for k in sorted(sample.keys()): n = sample[k] ranks[k] = rank + (n - 1) / 2 rank += n return ranks
python
{ "resource": "" }
q16828
_tie_correct
train
def _tie_correct(sample): """ Returns the tie correction value for U. See: https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.tiecorrect.html """ tc = 0 n = sum(sample.values()) if n < 2: return 1.0 # Avoid a ``ZeroDivisionError``. for k in sorted(sample.keys()): tc += math.pow(sample[k], 3) - sample[k] tc = 1 - tc / (math.pow(n, 3) - n) return tc
python
{ "resource": "" }
q16829
ndtr
train
def ndtr(a): """ Returns the area under the Gaussian probability density function, integrated from minus infinity to x. See: https://docs.scipy.org/doc/scipy/reference/generated/scipy.special.ndtr.html#scipy.special.ndtr """ sqrth = math.sqrt(2) / 2 x = float(a) * sqrth z = abs(x) if z < sqrth: y = 0.5 + 0.5 * math.erf(x) else: y = 0.5 * math.erfc(z) if x > 0: y = 1 - y return y
python
{ "resource": "" }
q16830
mann_whitney_u
train
def mann_whitney_u(sample1, sample2, use_continuity=True): """ Computes the Mann-Whitney rank test on both samples. Each sample is expected to be of the form:: {1: 5, 2: 20, 3: 12, ...} Returns a named tuple with: ``u`` equal to min(U for sample1, U for sample2), and ``p`` equal to the p-value. """ # Merge dictionaries, adding values if keys match. sample = sample1.copy() for k, v in sample2.items(): sample[k] = sample.get(k, 0) + v # Create a ranking dictionary using same keys for lookups. ranks = _rank(sample) sum_of_ranks = sum([sample1[k] * ranks[k] for k, v in sample1.items()]) n1 = sum(sample1.values()) n2 = sum(sample2.values()) # Calculate Mann-Whitney U for both samples. u1 = sum_of_ranks - (n1 * (n1 + 1)) / 2 u2 = n1 * n2 - u1 tie_correction = _tie_correct(sample) if tie_correction == 0: raise ValueError('All provided sample values are identical.') sd_u = math.sqrt(tie_correction * n1 * n2 * (n1 + n2 + 1) / 12.0) mean_rank = n1 * n2 / 2.0 + 0.5 * use_continuity z = abs((max(u1, u2) - mean_rank) / sd_u) return mwu_result(min(u1, u2), ndtr(-z))
python
{ "resource": "" }
q16831
get_pings
train
def get_pings(sc, app=None, build_id=None, channel=None, doc_type='saved_session', fraction=1.0, schema=None, source_name='telemetry', source_version='4', submission_date=None, version=None): """ Returns a RDD of Telemetry submissions for a given filtering criteria. :param sc: an instance of SparkContext :param app: an application name, e.g.: "Firefox" :param channel: a channel name, e.g.: "nightly" :param version: the application version, e.g.: "40.0a1" :param build_id: a build_id or a range of build_ids, e.g.: "20150601000000" or ("20150601000000", "20150610999999") :param submission_date: a submission date or a range of submission dates, e.g: "20150601" or ("20150601", "20150610") :param source_name: source name, set to "telemetry" by default :param source_version: source version, set to "4" by default :param doc_type: ping type, set to "saved_session" by default :param schema: (deprecated) version of the schema to use :param fraction: the fraction of pings to return, set to 1.0 by default """ if schema: print("The 'schema' parameter is deprecated. " "Version 4 is now the only schema supported.") if schema != "v4": raise ValueError("Invalid schema version") dataset = Dataset.from_source('telemetry') filters = ( ('docType', doc_type), ('sourceName', source_name), ('sourceVersion', source_version), ('appName', app), ('appUpdateChannel', channel), ('appVersion', version), ) for key, condition in filters: if condition and condition != '*': dataset = dataset.where(**{key: condition}) # build_id and submission_date can be either strings or tuples or lists, # so they deserve a special treatment special_cases = dict(appBuildId=build_id, submissionDate=submission_date) def range_compare(min_val, max_val, val): return min_val <= val <= max_val for key, value in iteritems(special_cases): if value is not None and value != '*': if isinstance(value, string_types): condition = value elif isinstance(value, (list, tuple)) and len(value) == 2: start, end = value condition = partial(range_compare, start, end) else: raise ValueError(('{} must be either a string or a 2 elements ' 'list/tuple'. format(key))) dataset = dataset.where(**{key: condition}) return dataset.records(sc, sample=fraction)
python
{ "resource": "" }
q16832
get_pings_properties
train
def get_pings_properties(pings, paths, only_median=False, with_processes=False, histograms_url=None, additional_histograms=None): """ Returns a RDD of a subset of properties of pings. Child histograms are automatically merged with the parent histogram. If one of the paths points to a keyedHistogram name without supplying the actual key, returns a dict of all available subhistograms for that property. :param with_processes: should separate parent and child histograms be included as well? :param paths: paths to properties in the payload, with levels separated by "/". These can be supplied either as a list, eg. ["application/channel", "payload/info/subsessionStartDate"], or as the values of a dict keyed by custom identifiers, eg. {"channel": "application/channel", "ssd": "payload/info/subsessionStartDate"}. :param histograms_url: see histogram.Histogram constructor :param additional_histograms: see histogram.Histogram constructor The returned RDD contains a dict for each ping with the required properties as values, keyed by the original paths (if 'paths' is a list) or the custom identifier keys (if 'paths' is a dict). """ if isinstance(pings.first(), binary_type): pings = pings.map(lambda p: json.loads(p.decode('utf-8'))) if isinstance(paths, str): paths = [paths] # Use '/' as dots can appear in keyed histograms if isinstance(paths, dict): paths = [(prop_name, path.split("/")) for prop_name, path in iteritems(paths)] else: paths = [(path, path.split("/")) for path in paths] return pings.map(lambda p: _get_ping_properties(p, paths, only_median, with_processes, histograms_url, additional_histograms)) \ .filter(lambda p: p)
python
{ "resource": "" }
q16833
get_one_ping_per_client
train
def get_one_ping_per_client(pings): """ Returns a single ping for each client in the RDD. THIS METHOD IS NOT RECOMMENDED: The ping to be returned is essentially selected at random. It is also expensive as it requires data to be shuffled around. It should be run only after extracting a subset with get_pings_properties. """ if isinstance(pings.first(), binary_type): pings = pings.map(lambda p: json.loads(p.decode('utf-8'))) filtered = pings.filter(lambda p: "clientID" in p or "clientId" in p) if not filtered: raise ValueError("Missing clientID/clientId attribute.") if "clientID" in filtered.first(): client_id = "clientID" # v2 else: client_id = "clientId" # v4 return filtered.map(lambda p: (p[client_id], p)) \ .reduceByKey(lambda p1, p2: p1) \ .map(lambda p: p[1])
python
{ "resource": "" }
q16834
admin_required
train
def admin_required(function=None): """ Decorator for views that checks that the user is an administrator, redirecting to the log-in page if necessary. """ def check_perms(user): # if user not logged in, show login form if not user.is_authenticated: return False # if this site doesn't allow admin access, fail if settings.ADMIN_IGNORED: raise PermissionDenied # check if the user has admin rights if not user.is_admin: raise PermissionDenied return True actual_decorator = user_passes_test(check_perms, login_url=_login_url) if function: return actual_decorator(function) return actual_decorator
python
{ "resource": "" }
q16835
SlurmDataStore._read_output
train
def _read_output(self, command): """ Read CSV delimited input from Slurm. """ cmd = [] cmd.extend(self._prefix) cmd.extend([self._path, "-iP"]) cmd.extend(command) command = cmd logger.debug("Cmd %s" % command) null = open('/dev/null', 'w') process = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=null) null.close() results = [] reader = csv.reader(_input_csv(process.stdout), delimiter=str("|")) try: headers = next(reader) logger.debug("<-- headers %s" % headers) except StopIteration: logger.debug("Cmd %s headers not found" % command) headers = [] for row in reader: _output_csv(row) logger.debug("<-- row %s" % row) this_row = {} i = 0 for i in range(0, len(headers)): key = headers[i] value = row[i] this_row[key] = value results.append(this_row) process.stdout.close() retcode = process.wait() if retcode != 0: logger.error("<-- Cmd %s returned %d (error)" % (command, retcode)) raise subprocess.CalledProcessError(retcode, command) if len(headers) == 0: logger.error("Cmd %s didn't return any headers." % command) raise RuntimeError("Cmd %s didn't return any headers." % command) logger.debug("<-- Returned: %d (good)" % retcode) return results
python
{ "resource": "" }
q16836
SlurmDataStore.get_project
train
def get_project(self, projectname): """ Get the project details from Slurm. """ cmd = ["list", "accounts", "where", "name=%s" % projectname] results = self._read_output(cmd) if len(results) == 0: return None elif len(results) > 1: logger.error( "Command returned multiple results for '%s'." % projectname) raise RuntimeError( "Command returned multiple results for '%s'." % projectname) the_result = results[0] the_project = the_result["Account"] if projectname.lower() != the_project.lower(): logger.error( "We expected projectname '%s' " "but got projectname '%s'." % (projectname, the_project)) raise RuntimeError( "We expected projectname '%s' " "but got projectname '%s'." % (projectname, the_project)) return the_result
python
{ "resource": "" }
q16837
SlurmDataStore.get_users_in_project
train
def get_users_in_project(self, projectname): """ Get list of users in project from Slurm. """ cmd = ["list", "assoc", "where", "account=%s" % projectname] results = self._read_output(cmd) user_list = [] for result in results: if result["User"] != "": user_list.append(result["User"]) return user_list
python
{ "resource": "" }
q16838
SlurmDataStore.get_projects_in_user
train
def get_projects_in_user(self, username): """ Get list of projects in user from Slurm. """ cmd = ["list", "assoc", "where", "user=%s" % username] results = self._read_output(cmd) project_list = [] for result in results: project_list.append(result["Account"]) return project_list
python
{ "resource": "" }
q16839
TrackDb.add_tracks
train
def add_tracks(self, track): """ Add a track or iterable of tracks. Parameters ---------- track : iterable or Track Iterable of :class:`Track` objects, or a single :class:`Track` object. """ from trackhub import BaseTrack if isinstance(track, BaseTrack): self.add_child(track) self._tracks.append(track) else: for t in track: self.add_child(t) self._tracks.append(t)
python
{ "resource": "" }
q16840
loggable
train
def loggable(obj): """Return "True" if the obj implements the minimum Logger API required by the 'trace' decorator. """ if isinstance(obj, logging.Logger): return True else: return (inspect.isclass(obj) and inspect.ismethod(getattr(obj, 'debug', None)) and inspect.ismethod(getattr(obj, 'isEnabledFor', None)) and inspect.ismethod(getattr(obj, 'setLevel', None)))
python
{ "resource": "" }
q16841
_formatter_self
train
def _formatter_self(name, value): """Format the "self" variable and value on instance methods. """ __mname = value.__module__ if __mname != '__main__': return '%s = <%s.%s object at 0x%x>' \ % (name, __mname, value.__class__.__name__, id(value)) else: return '%s = <%s object at 0x%x>' \ % (name, value.__class__.__name__, id(value))
python
{ "resource": "" }
q16842
_formatter_class
train
def _formatter_class(name, value): """Format the "klass" variable and value on class methods. """ __mname = value.__module__ if __mname != '__main__': return "%s = <type '%s.%s'>" % (name, __mname, value.__name__) else: return "%s = <type '%s'>" % (name, value.__name__)
python
{ "resource": "" }
q16843
get_formatter
train
def get_formatter(name): """Return the named formatter function. See the function "set_formatter" for details. """ if name in ('self', 'instance', 'this'): return af_self elif name == 'class': return af_class elif name in ('named', 'param', 'parameter'): return af_named elif name in ('default', 'optional'): return af_default # elif name in ('anonymous', 'arbitrary', 'unnamed'): # return af_anonymous elif name in ('keyword', 'pair', 'pairs'): return af_keyword else: raise ValueError('unknown trace formatter %r' % name)
python
{ "resource": "" }
q16844
__lookup_builtin
train
def __lookup_builtin(name): """Lookup the parameter name and default parameter values for builtin functions. """ global __builtin_functions if __builtin_functions is None: builtins = dict() for proto in __builtins: pos = proto.find('(') name, params, defaults = proto[:pos], list(), dict() for param in proto[pos + 1:-1].split(','): pos = param.find('=') if not pos < 0: param, value = param[:pos], param[pos + 1:] try: defaults[param] = __builtin_defaults[value] except KeyError: raise ValueError( 'builtin function %s: parameter %s: ' 'unknown default %r' % (name, param, value)) params.append(param) builtins[name] = (params, defaults) __builtin_functions = builtins try: params, defaults = __builtin_functions[name] except KeyError: params, defaults = tuple(), dict() __builtin_functions[name] = (params, defaults) print( "Warning: builtin function %r is missing prototype" % name, file=sys.stderr) return len(params), params, defaults
python
{ "resource": "" }
q16845
application_list
train
def application_list(request): """ a user wants to see all applications possible. """ if util.is_admin(request): queryset = Application.objects.all() else: queryset = Application.objects.get_for_applicant(request.user) q_filter = ApplicationFilter(request.GET, queryset=queryset) table = ApplicationTable(q_filter.qs.order_by("-expires")) tables.RequestConfig(request).configure(table) spec = [] for name, value in six.iteritems(q_filter.form.cleaned_data): if value is not None and value != "": name = name.replace('_', ' ').capitalize() spec.append((name, value)) return render( template_name="kgapplications/application_list.html", context={ 'table': table, 'filter': q_filter, 'spec': spec, 'title': "Application list", }, request=request)
python
{ "resource": "" }
q16846
profile_application_list
train
def profile_application_list(request): """ a logged in user wants to see all his pending applications. """ config = tables.RequestConfig(request, paginate={"per_page": 5}) person = request.user my_applications = Application.objects.get_for_applicant(person) my_applications = ApplicationTable(my_applications, prefix="mine-") config.configure(my_applications) requires_attention = Application.objects.requires_attention(request) requires_attention = ApplicationTable(requires_attention, prefix="attn-") config.configure(requires_attention) return render( template_name='kgapplications/profile_applications.html', context={ 'person': request.user, 'my_applications': my_applications, 'requires_attention': requires_attention, }, request=request)
python
{ "resource": "" }
q16847
application_detail
train
def application_detail(request, application_id, state=None, label=None): """ A authenticated used is trying to access an application. """ application = base.get_application(pk=application_id) state_machine = base.get_state_machine(application) return state_machine.process(request, application, state, label)
python
{ "resource": "" }
q16848
application_unauthenticated
train
def application_unauthenticated(request, token, state=None, label=None): """ An somebody is trying to access an application. """ application = base.get_application(secret_token=token) if application.expires < datetime.datetime.now(): return render( template_name='kgapplications/common_expired.html', context={'application': application}, request=request) roles = {'is_applicant', 'is_authorised'} # redirect user to real url if possible. if request.user.is_authenticated: if request.user == application.applicant: url = base.get_url( request, application, roles, label) return HttpResponseRedirect(url) state_machine = base.get_state_machine(application) return state_machine.process( request, application, state, label, roles)
python
{ "resource": "" }
q16849
get_institute_usage
train
def get_institute_usage(institute, start, end): """Return a tuple of cpu hours and number of jobs for an institute for a given period Keyword arguments: institute -- start -- start date end -- end date """ try: cache = InstituteCache.objects.get( institute=institute, date=datetime.date.today(), start=start, end=end) return cache.cpu_time, cache.no_jobs except InstituteCache.DoesNotExist: return 0, 0
python
{ "resource": "" }
q16850
get_project_usage
train
def get_project_usage(project, start, end): """Return a tuple of cpu hours and number of jobs for a project for a given period Keyword arguments: project -- start -- start date end -- end date """ try: cache = ProjectCache.objects.get( project=project, date=datetime.date.today(), start=start, end=end) return cache.cpu_time, cache.no_jobs except ProjectCache.DoesNotExist: return 0, 0
python
{ "resource": "" }
q16851
get_person_usage
train
def get_person_usage(person, project, start, end): """Return a tuple of cpu hours and number of jobs for a person in a specific project Keyword arguments: person -- project -- The project the usage is from start -- start date end -- end date """ try: cache = PersonCache.objects.get( person=person, project=project, date=datetime.date.today(), start=start, end=end) return cache.cpu_time, cache.no_jobs except PersonCache.DoesNotExist: return 0, 0
python
{ "resource": "" }
q16852
get_machine_usage
train
def get_machine_usage(machine, start, end): """Return a tuple of cpu hours and number of jobs for a machine for a given period Keyword arguments: machine -- start -- start date end -- end date """ try: cache = MachineCache.objects.get( machine=machine, date=datetime.date.today(), start=start, end=end) return cache.cpu_time, cache.no_jobs except MachineCache.DoesNotExist: return 0, 0
python
{ "resource": "" }
q16853
get_machine_category_usage
train
def get_machine_category_usage(start, end): """Return a tuple of cpu hours and number of jobs for a given period Keyword arguments: start -- start date end -- end date """ cache = MachineCategoryCache.objects.get( date=datetime.date.today(), start=start, end=end) return cache
python
{ "resource": "" }
q16854
get_applicant_from_email
train
def get_applicant_from_email(email): """ Get applicant from email address. If the person exists, return (person, True) If multiple matches, return (None, True) Otherwise create applicant and return (applicant, False) """ try: applicant = Person.active.get(email=email) existing_person = True except Person.DoesNotExist: applicant = Applicant.objects.create(email=email) existing_person = False except Person.MultipleObjectsReturned: applicant = None existing_person = False return applicant, existing_person
python
{ "resource": "" }
q16855
_send_invitation
train
def _send_invitation(request, project): """ The logged in project leader OR administrator wants to invite somebody. """ form = forms.InviteUserApplicationForm(request.POST or None) if request.method == 'POST': if form.is_valid(): email = form.cleaned_data['email'] applicant, existing_person = get_applicant_from_email(email) # If applicant is None then there were multiple persons found. if applicant is None: return render( template_name='kgapplications/' 'project_common_invite_multiple.html', context={'form': form, 'email': email}, request=request) if existing_person and 'existing' not in request.POST: return render( template_name='kgapplications/' 'project_common_invite_existing.html', context={'form': form, 'person': applicant}, request=request) application = form.save(commit=False) application.applicant = applicant application.project = project application.save() state_machine = get_application_state_machine() response = state_machine.start(request, application) return response return render( template_name='kgapplications/project_common_invite_other.html', context={'form': form, 'project': project, }, request=request)
python
{ "resource": "" }
q16856
send_invitation
train
def send_invitation(request, project_id=None): """ The logged in project leader wants to invite somebody to their project. """ project = None if project_id is not None: project = get_object_or_404(Project, id=project_id) if project is None: if not is_admin(request): return HttpResponseForbidden('<h1>Access Denied</h1>') else: if not project.can_edit(request): return HttpResponseForbidden('<h1>Access Denied</h1>') return _send_invitation(request, project)
python
{ "resource": "" }
q16857
new_application
train
def new_application(request): """ A new application by a user to start a new project. """ # Note default kgapplications/index.html will display error if user logged # in. if not settings.ALLOW_REGISTRATIONS: return render( template_name='kgapplications/project_common_disabled.html', context={}, request=request) roles = {'is_applicant', 'is_authorised'} if not request.user.is_authenticated: attrs, _ = saml.parse_attributes(request) defaults = {'email': attrs['email']} form = forms.UnauthenticatedInviteUserApplicationForm( request.POST or None, initial=defaults) if request.method == 'POST': if form.is_valid(): email = form.cleaned_data['email'] applicant, existing_person = get_applicant_from_email(email) # If applicant is None then there were multiple persons found. # This should never happen as the # UnauthenticatedInviteUserApplicationForm form disallows # existing users applying unauthenticated. assert applicant is not None # Similarly existing_person should always be False here. assert not existing_person application = ProjectApplication() application.applicant = applicant application.save() state_machine = get_application_state_machine() state_machine.start(request, application, roles) # we do not show unauthenticated users the application at this # stage. url = reverse('index') return HttpResponseRedirect(url) return render( template_name='kgapplications/' 'project_common_invite_unauthenticated.html', context={'form': form, }, request=request) else: if request.method == 'POST': person = request.user application = ProjectApplication() application.applicant = person application.save() state_machine = get_application_state_machine() response = state_machine.start(request, application, roles) return response return render( template_name='kgapplications/' 'project_common_invite_authenticated.html', context={}, request=request)
python
{ "resource": "" }
q16858
_get_ldflags
train
def _get_ldflags(): """Determine the correct link flags. This attempts dummy compiles similar to how autotools does feature detection. """ # windows gcc does not support linking with unresolved symbols if sys.platform == 'win32': # pragma: no cover (windows) prefix = getattr(sys, 'real_prefix', sys.prefix) libs = os.path.join(prefix, str('libs')) return str('-L{} -lpython{}{}').format(libs, *sys.version_info[:2]) cc = subprocess.check_output(('go', 'env', 'CC')).decode('UTF-8').strip() with _tmpdir() as tmpdir: testf = os.path.join(tmpdir, 'test.c') with io.open(testf, 'w') as f: f.write('int f(int); int main(void) { return f(0); }\n') for lflag in LFLAGS: # pragma: no cover (platform specific) try: subprocess.check_call((cc, testf, lflag), cwd=tmpdir) return lflag except subprocess.CalledProcessError: pass else: # pragma: no cover (platform specific) # wellp, none of them worked, fall back to gcc and they'll get a # hopefully reasonable error message return LFLAG_GCC
python
{ "resource": "" }
q16859
get_penalty_model
train
def get_penalty_model(specification): """Factory function for penaltymodel_maxgap. Args: specification (penaltymodel.Specification): The specification for the desired penalty model. Returns: :class:`penaltymodel.PenaltyModel`: Penalty model with the given specification. Raises: :class:`penaltymodel.ImpossiblePenaltyModel`: If the penalty cannot be built. Parameters: priority (int): -100 """ # check that the feasible_configurations are spin feasible_configurations = specification.feasible_configurations if specification.vartype is dimod.BINARY: feasible_configurations = {tuple(2 * v - 1 for v in config): en for config, en in feasible_configurations.items()} # convert ising_quadratic_ranges to the form we expect ising_quadratic_ranges = specification.ising_quadratic_ranges quadratic_ranges = {(u, v): ising_quadratic_ranges[u][v] for u, v in specification.graph.edges} bqm, gap = generate(specification.graph, feasible_configurations, specification.decision_variables, specification.ising_linear_ranges, quadratic_ranges, specification.min_classical_gap, None) # unspecified smt solver try: ground = max(feasible_configurations.values()) except ValueError: ground = 0.0 # if empty return pm.PenaltyModel.from_specification(specification, bqm, gap, ground)
python
{ "resource": "" }
q16860
insert_feasible_configurations
train
def insert_feasible_configurations(cur, feasible_configurations, encoded_data=None): """Insert a group of feasible configurations into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. feasible_configurations (dict[tuple[int]): The set of feasible configurations. Each key should be a tuple of variable assignments. The values are the relative energies. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. Examples: >>> feasible_configurations = {(-1, -1): 0.0, (+1, +1): 0.0} >>> with pmc.cache_connect(':memory:') as cur: ... pmc.insert_feasible_configurations(cur, feasible_configurations) """ if encoded_data is None: encoded_data = {} if 'num_variables' not in encoded_data: encoded_data['num_variables'] = len(next(iter(feasible_configurations))) if 'num_feasible_configurations' not in encoded_data: encoded_data['num_feasible_configurations'] = len(feasible_configurations) if 'feasible_configurations' not in encoded_data or 'energies' not in encoded_data: encoded = {_serialize_config(config): en for config, en in feasible_configurations.items()} configs, energies = zip(*sorted(encoded.items())) encoded_data['feasible_configurations'] = json.dumps(configs, separators=(',', ':')) encoded_data['energies'] = json.dumps(energies, separators=(',', ':')) insert = """ INSERT OR IGNORE INTO feasible_configurations( num_variables, num_feasible_configurations, feasible_configurations, energies) VALUES ( :num_variables, :num_feasible_configurations, :feasible_configurations, :energies); """ cur.execute(insert, encoded_data)
python
{ "resource": "" }
q16861
_decode_config
train
def _decode_config(c, num_variables): """inverse of _serialize_config, always converts to spin.""" def bits(c): n = 1 << (num_variables - 1) for __ in range(num_variables): yield 1 if c & n else -1 n >>= 1 return tuple(bits(c))
python
{ "resource": "" }
q16862
insert_ising_model
train
def insert_ising_model(cur, nodelist, edgelist, linear, quadratic, offset, encoded_data=None): """Insert an Ising model into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. nodelist (list): The nodes in the graph. edgelist (list): The edges in the graph. linear (dict): The linear bias associated with each node in nodelist. quadratic (dict): The quadratic bias associated with teach edge in edgelist. offset (float): The constant offset applied to the ising problem. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. """ if encoded_data is None: encoded_data = {} # insert graph and partially populate encoded_data with graph info insert_graph(cur, nodelist, edgelist, encoded_data=encoded_data) # need to encode the biases if 'linear_biases' not in encoded_data: encoded_data['linear_biases'] = _serialize_linear_biases(linear, nodelist) if 'quadratic_biases' not in encoded_data: encoded_data['quadratic_biases'] = _serialize_quadratic_biases(quadratic, edgelist) if 'offset' not in encoded_data: encoded_data['offset'] = offset if 'max_quadratic_bias' not in encoded_data: encoded_data['max_quadratic_bias'] = max(itervalues(quadratic)) if 'min_quadratic_bias' not in encoded_data: encoded_data['min_quadratic_bias'] = min(itervalues(quadratic)) if 'max_linear_bias' not in encoded_data: encoded_data['max_linear_bias'] = max(itervalues(linear)) if 'min_linear_bias' not in encoded_data: encoded_data['min_linear_bias'] = min(itervalues(linear)) insert = \ """ INSERT OR IGNORE INTO ising_model( linear_biases, quadratic_biases, offset, max_quadratic_bias, min_quadratic_bias, max_linear_bias, min_linear_bias, graph_id) SELECT :linear_biases, :quadratic_biases, :offset, :max_quadratic_bias, :min_quadratic_bias, :max_linear_bias, :min_linear_bias, graph.id FROM graph WHERE num_nodes = :num_nodes AND num_edges = :num_edges AND edges = :edges; """ cur.execute(insert, encoded_data)
python
{ "resource": "" }
q16863
_serialize_linear_biases
train
def _serialize_linear_biases(linear, nodelist): """Serializes the linear biases. Args: linear: a interable object where linear[v] is the bias associated with v. nodelist (list): an ordered iterable containing the nodes. Returns: str: base 64 encoded string of little endian 8 byte floats, one for each of the biases in linear. Ordered according to nodelist. Examples: >>> _serialize_linear_biases({1: -1, 2: 1, 3: 0}, [1, 2, 3]) 'AAAAAAAA8L8AAAAAAADwPwAAAAAAAAAA' >>> _serialize_linear_biases({1: -1, 2: 1, 3: 0}, [3, 2, 1]) 'AAAAAAAAAAAAAAAAAADwPwAAAAAAAPC/' """ linear_bytes = struct.pack('<' + 'd' * len(linear), *[linear[i] for i in nodelist]) return base64.b64encode(linear_bytes).decode('utf-8')
python
{ "resource": "" }
q16864
_serialize_quadratic_biases
train
def _serialize_quadratic_biases(quadratic, edgelist): """Serializes the quadratic biases. Args: quadratic (dict): a dict of the form {edge1: bias1, ...} where each edge is of the form (node1, node2). edgelist (list): a list of the form [(node1, node2), ...]. Returns: str: base 64 encoded string of little endian 8 byte floats, one for each of the edges in quadratic. Ordered by edgelist. Example: >>> _serialize_quadratic_biases({(0, 1): -1, (1, 2): 1, (0, 2): .4}, ... [(0, 1), (1, 2), (0, 2)]) 'AAAAAAAA8L8AAAAAAADwP5qZmZmZmdk/' """ # assumes quadratic is upper-triangular or reflected in edgelist quadratic_list = [quadratic[(u, v)] if (u, v) in quadratic else quadratic[(v, u)] for u, v in edgelist] quadratic_bytes = struct.pack('<' + 'd' * len(quadratic), *quadratic_list) return base64.b64encode(quadratic_bytes).decode('utf-8')
python
{ "resource": "" }
q16865
iter_ising_model
train
def iter_ising_model(cur): """Iterate over all of the Ising models in the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. Yields: tuple: A 5-tuple consisting of: list: The nodelist for a graph in the cache. list: the edgelist for a graph in the cache. dict: The linear biases of an Ising Model in the cache. dict: The quadratic biases of an Ising Model in the cache. float: The constant offset of an Ising Model in the cache. """ select = \ """ SELECT linear_biases, quadratic_biases, num_nodes, edges, offset FROM ising_model, graph WHERE graph.id = ising_model.graph_id; """ for linear_biases, quadratic_biases, num_nodes, edges, offset in cur.execute(select): nodelist = list(range(num_nodes)) edgelist = json.loads(edges) yield (nodelist, edgelist, _decode_linear_biases(linear_biases, nodelist), _decode_quadratic_biases(quadratic_biases, edgelist), offset)
python
{ "resource": "" }
q16866
_decode_linear_biases
train
def _decode_linear_biases(linear_string, nodelist): """Inverse of _serialize_linear_biases. Args: linear_string (str): base 64 encoded string of little endian 8 byte floats, one for each of the nodes in nodelist. nodelist (list): list of the form [node1, node2, ...]. Returns: dict: linear biases in a dict. Examples: >>> _decode_linear_biases('AAAAAAAA8L8AAAAAAADwPwAAAAAAAAAA', [1, 2, 3]) {1: -1.0, 2: 1.0, 3: 0.0} >>> _decode_linear_biases('AAAAAAAA8L8AAAAAAADwPwAAAAAAAAAA', [3, 2, 1]) {1: 0.0, 2: 1.0, 3: -1.0} """ linear_bytes = base64.b64decode(linear_string) return dict(zip(nodelist, struct.unpack('<' + 'd' * (len(linear_bytes) // 8), linear_bytes)))
python
{ "resource": "" }
q16867
_decode_quadratic_biases
train
def _decode_quadratic_biases(quadratic_string, edgelist): """Inverse of _serialize_quadratic_biases Args: quadratic_string (str) : base 64 encoded string of little endian 8 byte floats, one for each of the edges. edgelist (list): a list of edges of the form [(node1, node2), ...]. Returns: dict: J. A dict of the form {edge1: bias1, ...} where each edge is of the form (node1, node2). Example: >>> _decode_quadratic_biases('AAAAAAAA8L8AAAAAAADwP5qZmZmZmdk/', ... [(0, 1), (1, 2), (0, 2)]) {(0, 1): -1.0, (0, 2): 0.4, (1, 2): 1.0} """ quadratic_bytes = base64.b64decode(quadratic_string) return {tuple(edge): bias for edge, bias in zip(edgelist, struct.unpack('<' + 'd' * (len(quadratic_bytes) // 8), quadratic_bytes))}
python
{ "resource": "" }
q16868
insert_penalty_model
train
def insert_penalty_model(cur, penalty_model): """Insert a penalty model into the database. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. penalty_model (:class:`penaltymodel.PenaltyModel`): A penalty model to be stored in the database. Examples: >>> import networkx as nx >>> import penaltymodel.core as pm >>> import dimod >>> graph = nx.path_graph(3) >>> decision_variables = (0, 2) >>> feasible_configurations = {(-1, -1): 0., (+1, +1): 0.} >>> spec = pm.Specification(graph, decision_variables, feasible_configurations, dimod.SPIN) >>> linear = {v: 0 for v in graph} >>> quadratic = {edge: -1 for edge in graph.edges} >>> model = dimod.BinaryQuadraticModel(linear, quadratic, 0.0, vartype=dimod.SPIN) >>> widget = pm.PenaltyModel.from_specification(spec, model, 2., -2) >>> with pmc.cache_connect(':memory:') as cur: ... pmc.insert_penalty_model(cur, widget) """ encoded_data = {} linear, quadratic, offset = penalty_model.model.to_ising() nodelist = sorted(linear) edgelist = sorted(sorted(edge) for edge in penalty_model.graph.edges) insert_graph(cur, nodelist, edgelist, encoded_data) insert_feasible_configurations(cur, penalty_model.feasible_configurations, encoded_data) insert_ising_model(cur, nodelist, edgelist, linear, quadratic, offset, encoded_data) encoded_data['decision_variables'] = json.dumps(penalty_model.decision_variables, separators=(',', ':')) encoded_data['classical_gap'] = penalty_model.classical_gap encoded_data['ground_energy'] = penalty_model.ground_energy insert = \ """ INSERT OR IGNORE INTO penalty_model( decision_variables, classical_gap, ground_energy, feasible_configurations_id, ising_model_id) SELECT :decision_variables, :classical_gap, :ground_energy, feasible_configurations.id, ising_model.id FROM feasible_configurations, ising_model, graph WHERE graph.edges = :edges AND graph.num_nodes = :num_nodes AND ising_model.graph_id = graph.id AND ising_model.linear_biases = :linear_biases AND ising_model.quadratic_biases = :quadratic_biases AND ising_model.offset = :offset AND feasible_configurations.num_variables = :num_variables AND feasible_configurations.num_feasible_configurations = :num_feasible_configurations AND feasible_configurations.feasible_configurations = :feasible_configurations AND feasible_configurations.energies = :energies; """ cur.execute(insert, encoded_data)
python
{ "resource": "" }
q16869
PenaltyModel.from_specification
train
def from_specification(cls, specification, model, classical_gap, ground_energy): """Construct a PenaltyModel from a Specification. Args: specification (:class:`.Specification`): A specification that was used to generate the model. model (:class:`dimod.BinaryQuadraticModel`): A binary quadratic model that has ground states that match the feasible_configurations. classical_gap (numeric): The difference in classical energy between the ground state and the first excited state. Must be positive. ground_energy (numeric): The minimum energy of all possible configurations. Returns: :class:`.PenaltyModel` """ # Author note: there might be a way that avoids rechecking all of the values without # side-effects or lots of repeated code, but this seems simpler and more explicit return cls(specification.graph, specification.decision_variables, specification.feasible_configurations, specification.vartype, model, classical_gap, ground_energy, ising_linear_ranges=specification.ising_linear_ranges, ising_quadratic_ranges=specification.ising_quadratic_ranges)
python
{ "resource": "" }
q16870
get_penalty_model
train
def get_penalty_model(specification, database=None): """Factory function for penaltymodel_cache. Args: specification (penaltymodel.Specification): The specification for the desired penalty model. database (str, optional): The path to the desired sqlite database file. If None, will use the default. Returns: :class:`penaltymodel.PenaltyModel`: Penalty model with the given specification. Raises: :class:`penaltymodel.MissingPenaltyModel`: If the penalty model is not in the cache. Parameters: priority (int): 100 """ # only handles index-labelled nodes if not _is_index_labelled(specification.graph): relabel_applied = True mapping, inverse_mapping = _graph_canonicalization(specification.graph) specification = specification.relabel_variables(mapping, inplace=False) else: relabel_applied = False # connect to the database. Note that once the connection is made it cannot be # broken up between several processes. if database is None: conn = cache_connect() else: conn = cache_connect(database) # get the penalty_model with conn as cur: try: widget = next(iter_penalty_model_from_specification(cur, specification)) except StopIteration: widget = None # close the connection conn.close() if widget is None: raise pm.MissingPenaltyModel("no penalty model with the given specification found in cache") if relabel_applied: # relabel the widget in-place widget.relabel_variables(inverse_mapping, inplace=True) return widget
python
{ "resource": "" }
q16871
cache_penalty_model
train
def cache_penalty_model(penalty_model, database=None): """Caching function for penaltymodel_cache. Args: penalty_model (:class:`penaltymodel.PenaltyModel`): Penalty model to be cached. database (str, optional): The path to the desired sqlite database file. If None, will use the default. """ # only handles index-labelled nodes if not _is_index_labelled(penalty_model.graph): mapping, __ = _graph_canonicalization(penalty_model.graph) penalty_model = penalty_model.relabel_variables(mapping, inplace=False) # connect to the database. Note that once the connection is made it cannot be # broken up between several processes. if database is None: conn = cache_connect() else: conn = cache_connect(database) # load into the database with conn as cur: insert_penalty_model(cur, penalty_model) # close the connection conn.close()
python
{ "resource": "" }
q16872
get_penalty_model
train
def get_penalty_model(specification): """Retrieve a PenaltyModel from one of the available factories. Args: specification (:class:`.Specification`): The specification for the desired PenaltyModel. Returns: :class:`.PenaltyModel`/None: A PenaltyModel as returned by the highest priority factory, or None if no factory could produce it. Raises: :exc:`ImpossiblePenaltyModel`: If the specification describes a penalty model that cannot be built by any factory. """ # Iterate through the available factories until one gives a penalty model for factory in iter_factories(): try: pm = factory(specification) except ImpossiblePenaltyModel as e: # information about impossible models should be propagated raise e except FactoryException: # any other type of factory exception, continue through the list continue # if penalty model was found, broadcast to all of the caches. This could be done # asynchronously for cache in iter_caches(): cache(pm) return pm return None
python
{ "resource": "" }
q16873
iter_factories
train
def iter_factories(): """Iterate through all factories identified by the factory entrypoint. Yields: function: A function that accepts a :class:`.Specification` and returns a :class:`.PenaltyModel`. """ # retrieve all of the factories with factories = (entry.load() for entry in iter_entry_points(FACTORY_ENTRYPOINT)) # sort the factories from highest priority to lowest. Any factory with unknown priority # gets assigned priority -1000. for factory in sorted(factories, key=lambda f: getattr(f, 'priority', -1000), reverse=True): yield factory
python
{ "resource": "" }
q16874
generate_bqm
train
def generate_bqm(graph, table, decision, linear_energy_ranges=None, quadratic_energy_ranges=None, min_classical_gap=2, precision=7, max_decision=8, max_variables=10, return_auxiliary=False): """Get a binary quadratic model with specific ground states. Args: graph (:obj:`~networkx.Graph`): Defines the structure of the generated binary quadratic model. table (iterable): Iterable of valid configurations (of spin-values). Each configuration is a tuple of variable assignments ordered by `decision`. decision (list/tuple): The variables in the binary quadratic model which have specified configurations. linear_energy_ranges (dict, optional): Dict of the form {v: (min, max, ...} where min and max are the range of values allowed to v. The default range is [-2, 2]. quadratic_energy_ranges (dict, optional): Dict of the form {(u, v): (min, max), ...} where min and max are the range of values allowed to (u, v). The default range is [-1, 1]. min_classical_gap (float): The minimum energy gap between the highest feasible state and the lowest infeasible state. precision (int, optional, default=7): Values returned by the optimization solver are rounded to `precision` digits of precision. max_decision (int, optional, default=4): Maximum number of decision variables allowed. The algorithm is valid for arbitrary sizes of problem but can be extremely slow. max_variables (int, optional, default=4): Maximum number of variables allowed. The algorithm is valid for arbitrary sizes of problem but can be extremely slow. return_auxiliary (bool, optional, False): If True, the auxiliary configurations are returned for each configuration in table. Returns: If return_auxiliary is False: :obj:`dimod.BinaryQuadraticModel`: The binary quadratic model. float: The classical gap. If return_auxiliary is True: :obj:`dimod.BinaryQuadraticModel`: The binary quadratic model. float: The classical gap. dict: The auxiliary configurations, keyed on the configurations in table. Raises: ImpossiblePenaltyModel: If the penalty model cannot be built. Normally due to a non-zero infeasible gap. """ # Developer note: This function is input checking and output formatting. The logic is # in _generate_ising if not isinstance(graph, nx.Graph): raise TypeError("expected input graph to be a NetworkX Graph.") if not set().union(*table).issubset({-1, 1}): raise ValueError("expected table to be spin-valued") if not isinstance(decision, list): decision = list(decision) # handle iterables if not all(v in graph for v in decision): raise ValueError("given graph does not match the variable labels in decision variables") num_var = len(decision) if any(len(config) != num_var for config in table): raise ValueError("number of decision variables does not match all of the entires in the table") if len(decision) > max_decision: raise ValueError(("The table is too large. Note that larger models can be built by setting " "max_decision to a higher number, but generation could be extremely slow.")) if len(graph) > max_variables: raise ValueError(("The graph is too large. Note that larger models can be built by setting " "max_variables to a higher number, but generation could be extremely slow.")) if linear_energy_ranges is None: linear_energy_ranges = defaultdict(lambda: (-2, 2)) if quadratic_energy_ranges is None: quadratic_energy_ranges = defaultdict(lambda: (-1, 1)) if not isinstance(table, Mapping): table = {config: 0. for config in table} h, J, offset, gap, aux = _generate_ising(graph, table, decision, min_classical_gap, linear_energy_ranges, quadratic_energy_ranges) bqm = dimod.BinaryQuadraticModel.empty(dimod.SPIN) bqm.add_variables_from((v, round(bias, precision)) for v, bias in h.items()) bqm.add_interactions_from((u, v, round(bias, precision)) for (u, v), bias in J.items()) bqm.add_offset(round(offset, precision)) if return_auxiliary: return bqm, round(gap, precision), aux else: return bqm, round(gap, precision)
python
{ "resource": "" }
q16875
get_penalty_model
train
def get_penalty_model(specification): """Factory function for penaltymodel-lp. Args: specification (penaltymodel.Specification): The specification for the desired penalty model. Returns: :class:`penaltymodel.PenaltyModel`: Penalty model with the given specification. Raises: :class:`penaltymodel.ImpossiblePenaltyModel`: If the penalty cannot be built. Parameters: priority (int): -100 """ # check that the feasible_configurations are spin feasible_configurations = specification.feasible_configurations if specification.vartype is dimod.BINARY: feasible_configurations = {tuple(2 * v - 1 for v in config): en for config, en in iteritems(feasible_configurations)} # convert ising_quadratic_ranges to the form we expect ising_quadratic_ranges = specification.ising_quadratic_ranges quadratic_ranges = {(u, v): ising_quadratic_ranges[u][v] for u, v in specification.graph.edges} try: bqm, gap = generate_bqm(specification.graph, feasible_configurations, specification.decision_variables, linear_energy_ranges=specification.ising_linear_ranges, quadratic_energy_ranges=quadratic_ranges, min_classical_gap=specification.min_classical_gap) except ValueError: raise pm.exceptions.FactoryException("Specification is for too large of a model") return pm.PenaltyModel.from_specification(specification, bqm, gap, 0.0)
python
{ "resource": "" }
q16876
get_item
train
def get_item(dictionary, tuple_key, default_value): """Grab values from a dictionary using an unordered tuple as a key. Dictionary should not contain None, 0, or False as dictionary values. Args: dictionary: Dictionary that uses two-element tuple as keys tuple_key: Unordered tuple of two elements default_value: Value that is returned when the tuple_key is not found in the dictionary """ u, v = tuple_key # Grab tuple-values from dictionary tuple1 = dictionary.get((u, v), None) tuple2 = dictionary.get((v, u), None) # Return the first value that is not {None, 0, False} return tuple1 or tuple2 or default_value
python
{ "resource": "" }
q16877
limitReal
train
def limitReal(x, max_denominator=1000000): """Creates an pysmt Real constant from x. Args: x (number): A number to be cast to a pysmt constant. max_denominator (int, optional): The maximum size of the denominator. Default 1000000. Returns: A Real constant with the given value and the denominator limited. """ f = Fraction(x).limit_denominator(max_denominator) return Real((f.numerator, f.denominator))
python
{ "resource": "" }
q16878
Theta.from_graph
train
def from_graph(cls, graph, linear_energy_ranges, quadratic_energy_ranges): """Create Theta from a graph and energy ranges. Args: graph (:obj:`networkx.Graph`): Provides the structure for Theta. linear_energy_ranges (dict): A dict of the form {v: (min, max), ...} where min and max are the range of values allowed to v. quadratic_energy_ranges (dict): A dict of the form {(u, v): (min, max), ...} where min and max are the range of values allowed to (u, v). Returns: :obj:`.Theta` """ get_env().enable_infix_notation = True # not sure why we need this here theta = cls.empty(dimod.SPIN) theta.add_offset(Symbol('offset', REAL)) def Linear(v): """Create a Symbol for the linear bias including the energy range constraints.""" bias = Symbol('h_{}'.format(v), REAL) min_, max_ = linear_energy_ranges[v] theta.assertions.add(LE(bias, limitReal(max_))) theta.assertions.add(GE(bias, limitReal(min_))) return bias def Quadratic(u, v): """Create a Symbol for the quadratic bias including the energy range constraints.""" bias = Symbol('J_{},{}'.format(u, v), REAL) if (v, u) in quadratic_energy_ranges: min_, max_ = quadratic_energy_ranges[(v, u)] else: min_, max_ = quadratic_energy_ranges[(u, v)] theta.assertions.add(LE(bias, limitReal(max_))) theta.assertions.add(GE(bias, limitReal(min_))) return bias for v in graph.nodes: theta.add_variable(v, Linear(v)) for u, v in graph.edges: theta.add_interaction(u, v, Quadratic(u, v)) return theta
python
{ "resource": "" }
q16879
Theta.to_bqm
train
def to_bqm(self, model): """Given a pysmt model, return a bqm. Adds the values of the biases as determined by the SMT solver to a bqm. Args: model: A pysmt model. Returns: :obj:`dimod.BinaryQuadraticModel` """ linear = ((v, float(model.get_py_value(bias))) for v, bias in self.linear.items()) quadratic = ((u, v, float(model.get_py_value(bias))) for (u, v), bias in self.quadratic.items()) offset = float(model.get_py_value(self.offset)) return dimod.BinaryQuadraticModel(linear, quadratic, offset, dimod.SPIN)
python
{ "resource": "" }
q16880
SpinTimes
train
def SpinTimes(spin, bias): """Define our own multiplication for bias times spins. This allows for cleaner log code as well as value checking. Args: spin (int): -1 or 1 bias (:class:`pysmt.shortcuts.Symbol`): The bias Returns: spins * bias """ if not isinstance(spin, int): raise TypeError('spin must be an int') if spin == -1: return Times(Real((-1, 1)), bias) # -1 / 1 elif spin == 1: # identity return bias else: raise ValueError('expected spins to be -1., or 1.')
python
{ "resource": "" }
q16881
_elimination_trees
train
def _elimination_trees(theta, decision_variables): """From Theta and the decision variables, determine the elimination order and the induced trees. """ # auxiliary variables are any variables that are not decision auxiliary_variables = set(n for n in theta.linear if n not in decision_variables) # get the adjacency of the auxiliary subgraph adj = {v: {u for u in theta.adj[v] if u in auxiliary_variables} for v in theta.adj if v in auxiliary_variables} # get the elimination order that minimizes treewidth tw, order = dnx.treewidth_branch_and_bound(adj) ancestors = {} for n in order: ancestors[n] = set(adj[n]) # now make v simplicial by making its neighborhood a clique, then # continue neighbors = adj[n] for u, v in itertools.combinations(neighbors, 2): adj[u].add(v) adj[v].add(u) for v in neighbors: adj[v].discard(n) del adj[n] roots = {} nodes = {v: {} for v in ancestors} for vidx in range(len(order) - 1, -1, -1): v = order[vidx] if ancestors[v]: for u in order[vidx + 1:]: if u in ancestors[v]: # v is a child of u nodes[u][v] = nodes[v] # nodes[u][v] = children of v break else: roots[v] = nodes[v] # roots[v] = children of v return roots, ancestors
python
{ "resource": "" }
q16882
Table.energy_upperbound
train
def energy_upperbound(self, spins): """A formula for an upper bound on the energy of Theta with spins fixed. Args: spins (dict): Spin values for a subset of the variables in Theta. Returns: Formula that upper bounds the energy with spins fixed. """ subtheta = self.theta.copy() subtheta.fix_variables(spins) # ok, let's start eliminating variables trees = self._trees if not trees: # if there are no variables to eliminate, then the offset of # subtheta is the exact value and we can just return it assert not subtheta.linear and not subtheta.quadratic return subtheta.offset energy = Plus(self.message_upperbound(trees, {}, subtheta), subtheta.offset) return energy
python
{ "resource": "" }
q16883
Table.energy
train
def energy(self, spins, break_aux_symmetry=True): """A formula for the exact energy of Theta with spins fixed. Args: spins (dict): Spin values for a subset of the variables in Theta. break_aux_symmetry (bool, optional): Default True. If True, break the aux variable symmetry by setting all aux variable to 1 for one of the feasible configurations. If the energy ranges are not symmetric then this can make finding models impossible. Returns: Formula for the exact energy of Theta with spins fixed. """ subtheta = self.theta.copy() subtheta.fix_variables(spins) # we need aux variables av = next(self._auxvar_counter) auxvars = {v: Symbol('aux{}_{}'.format(av, v), BOOL) for v in subtheta.linear} if break_aux_symmetry and av == 0: # without loss of generality, we can assume that the aux variables are all # spin-up for one configuration self.assertions.update(set(auxvars.values())) trees = self._trees if not trees: # if there are no variables to eliminate, then the offset of # subtheta is the exact value and we can just return it assert not subtheta.linear and not subtheta.quadratic return subtheta.offset energy = Plus(self.message(trees, {}, subtheta, auxvars), subtheta.offset) return energy
python
{ "resource": "" }
q16884
Table.message
train
def message(self, tree, spins, subtheta, auxvars): """Determine the energy of the elimination tree. Args: tree (dict): The current elimination tree spins (dict): The current fixed spins subtheta (dict): Theta with spins fixed. auxvars (dict): The auxiliary variables for the given spins. Returns: The formula for the energy of the tree. """ energy_sources = set() for v, children in tree.items(): aux = auxvars[v] assert all(u in spins for u in self._ancestors[v]) # build an iterable over all of the energies contributions # that we can exactly determine given v and our known spins # in these contributions we assume that v is positive def energy_contributions(): yield subtheta.linear[v] for u, bias in subtheta.adj[v].items(): if u in spins: yield SpinTimes(spins[u], bias) plus_energy = Plus(energy_contributions()) minus_energy = SpinTimes(-1, plus_energy) # if the variable has children, we need to recursively determine their energies if children: # set v to be positive spins[v] = 1 plus_energy = Plus(plus_energy, self.message(children, spins, subtheta, auxvars)) spins[v] = -1 minus_energy = Plus(minus_energy, self.message(children, spins, subtheta, auxvars)) del spins[v] # we now need a real-valued smt variable to be our message m = FreshSymbol(REAL) ancestor_aux = {auxvars[u] if spins[u] > 0 else Not(auxvars[u]) for u in self._ancestors[v]} plus_aux = And({aux}.union(ancestor_aux)) minus_aux = And({Not(aux)}.union(ancestor_aux)) self.assertions.update({LE(m, plus_energy), LE(m, minus_energy), Implies(plus_aux, GE(m, plus_energy)), Implies(minus_aux, GE(m, minus_energy)) }) energy_sources.add(m) return Plus(energy_sources)
python
{ "resource": "" }
q16885
Table.message_upperbound
train
def message_upperbound(self, tree, spins, subtheta): """Determine an upper bound on the energy of the elimination tree. Args: tree (dict): The current elimination tree spins (dict): The current fixed spins subtheta (dict): Theta with spins fixed. Returns: The formula for the energy of the tree. """ energy_sources = set() for v, subtree in tree.items(): assert all(u in spins for u in self._ancestors[v]) # build an iterable over all of the energies contributions # that we can exactly determine given v and our known spins # in these contributions we assume that v is positive def energy_contributions(): yield subtheta.linear[v] for u, bias in subtheta.adj[v].items(): if u in spins: yield Times(limitReal(spins[u]), bias) energy = Plus(energy_contributions()) # if there are no more variables in the order, we can stop # otherwise we need the next message variable if subtree: spins[v] = 1. plus = self.message_upperbound(subtree, spins, subtheta) spins[v] = -1. minus = self.message_upperbound(subtree, spins, subtheta) del spins[v] else: plus = minus = limitReal(0.0) # we now need a real-valued smt variable to be our message m = FreshSymbol(REAL) self.assertions.update({LE(m, Plus(energy, plus)), LE(m, Plus(Times(energy, limitReal(-1.)), minus))}) energy_sources.add(m) return Plus(energy_sources)
python
{ "resource": "" }
q16886
Table.set_energy
train
def set_energy(self, spins, target_energy): """Set the energy of Theta with spins fixed to target_energy. Args: spins (dict): Spin values for a subset of the variables in Theta. target_energy (float): The desired energy for Theta with spins fixed. Notes: Add equality constraint to assertions. """ spin_energy = self.energy(spins) self.assertions.add(Equals(spin_energy, limitReal(target_energy)))
python
{ "resource": "" }
q16887
PandABlocksManagerController._poll_loop
train
def _poll_loop(self): """At self.poll_period poll for changes""" next_poll = time.time() while True: next_poll += self._poll_period timeout = next_poll - time.time() if timeout < 0: timeout = 0 try: return self._stop_queue.get(timeout=timeout) except TimeoutError: # No stop, no problem pass try: self.handle_changes(self.client.get_changes()) except Exception: # TODO: should fault here? self.log.exception("Error while getting changes")
python
{ "resource": "" }
q16888
camel_to_title
train
def camel_to_title(name): """Takes a camelCaseFieldName and returns an Title Case Field Name Args: name (str): E.g. camelCaseFieldName Returns: str: Title Case converted name. E.g. Camel Case Field Name """ split = re.findall(r"[A-Z]?[a-z0-9]+|[A-Z]+(?=[A-Z]|$)", name) ret = " ".join(split) ret = ret[0].upper() + ret[1:] return ret
python
{ "resource": "" }
q16889
snake_to_camel
train
def snake_to_camel(name): """Takes a snake_field_name and returns a camelCaseFieldName Args: name (str): E.g. snake_field_name or SNAKE_FIELD_NAME Returns: str: camelCase converted name. E.g. capsFieldName """ ret = "".join(x.title() for x in name.split("_")) ret = ret[0].lower() + ret[1:] return ret
python
{ "resource": "" }
q16890
Serializable.from_dict
train
def from_dict(cls, d, ignore=()): """Create an instance from a serialized version of cls Args: d(dict): Endpoints of cls to set ignore(tuple): Keys to ignore Returns: Instance of this class """ filtered = {} for k, v in d.items(): if k == "typeid": assert v == cls.typeid, \ "Dict has typeid %s but %s has typeid %s" % \ (v, cls, cls.typeid) elif k not in ignore: filtered[k] = v try: inst = cls(**filtered) except TypeError as e: raise TypeError("%s raised error: %s" % (cls.typeid, str(e))) return inst
python
{ "resource": "" }
q16891
Serializable.lookup_subclass
train
def lookup_subclass(cls, d): """Look up a class based on a serialized dictionary containing a typeid Args: d (dict): Dictionary with key "typeid" Returns: Serializable subclass """ try: typeid = d["typeid"] except KeyError: raise FieldError("typeid not present in keys %s" % list(d)) subclass = cls._subcls_lookup.get(typeid, None) if not subclass: raise FieldError("'%s' not a valid typeid" % typeid) else: return subclass
python
{ "resource": "" }
q16892
Process.start
train
def start(self, timeout=None): """Start the process going Args: timeout (float): Maximum amount of time to wait for each spawned process. None means forever """ assert self.state == STOPPED, "Process already started" self.state = STARTING should_publish = self._start_controllers( self._controllers.values(), timeout) if should_publish: self._publish_controllers(timeout) self.state = STARTED
python
{ "resource": "" }
q16893
Process.stop
train
def stop(self, timeout=None): """Stop the process and wait for it to finish Args: timeout (float): Maximum amount of time to wait for each spawned object. None means forever """ assert self.state == STARTED, "Process not started" self.state = STOPPING # Allow every controller a chance to clean up self._run_hook(ProcessStopHook, timeout=timeout) for s in self._spawned: if not s.ready(): self.log.debug( "Waiting for %s *%s **%s", s._function, s._args, s._kwargs) s.wait(timeout=timeout) self._spawned = [] self._controllers = OrderedDict() self._unpublished = set() self.state = STOPPED self.log.debug("Done process.stop()")
python
{ "resource": "" }
q16894
Process.spawn
train
def spawn(self, function, *args, **kwargs): # type: (Callable[..., Any], *Any, **Any) -> Spawned """Runs the function in a worker thread, returning a Result object Args: function: Function to run args: Positional arguments to run the function with kwargs: Keyword arguments to run the function with Returns: Spawned: Something you can call wait(timeout) on to see when it's finished executing """ assert self.state != STOPPED, "Can't spawn when process stopped" spawned = Spawned(function, args, kwargs) self._spawned.append(spawned) self._spawn_count += 1 # Filter out things that are ready to avoid memory leaks if self._spawn_count > SPAWN_CLEAR_COUNT: self._clear_spawn_list() return spawned
python
{ "resource": "" }
q16895
Process.add_controller
train
def add_controller(self, controller, timeout=None): # type: (Controller, float) -> None """Add a controller to be hosted by this process Args: controller (Controller): Its controller timeout (float): Maximum amount of time to wait for each spawned object. None means forever """ assert controller.mri not in self._controllers, \ "Controller already exists for %s" % controller.mri self._controllers[controller.mri] = controller controller.setup(self) if self.state: should_publish = self._start_controllers([controller], timeout) if self.state == STARTED and should_publish: self._publish_controllers(timeout)
python
{ "resource": "" }
q16896
Process.block_view
train
def block_view(self, mri): # type: (str) -> Block """Get a Block view from a Controller with given mri""" controller = self.get_controller(mri) block = controller.block_view() return block
python
{ "resource": "" }
q16897
BasicController.update_title
train
def update_title(self, _, info): # type: (object, TitleInfo) -> None """Set the label of the Block Meta object""" with self._lock: self._block.meta.set_label(info.title)
python
{ "resource": "" }
q16898
BasicController.update_health
train
def update_health(self, reporter, info): # type: (object, HealthInfo) -> None """Set the health attribute. Called from part""" with self.changes_squashed: alarm = info.alarm if alarm.is_ok(): self._faults.pop(reporter, None) else: self._faults[reporter] = alarm if self._faults: # Sort them by severity faults = sorted(self._faults.values(), key=lambda a: a.severity.value) alarm = faults[-1] text = faults[-1].message else: alarm = None text = "OK" self.health.set_value(text, alarm=alarm)
python
{ "resource": "" }
q16899
PandABlocksMaker.make_parts_for
train
def make_parts_for(self, field_name, field_data): """Create the relevant parts for this field Args: field_name (str): Short field name, e.g. VAL field_data (FieldData): Field data object """ typ = field_data.field_type subtyp = field_data.field_subtype if typ in ("read", "xadc"): writeable = False else: writeable = True if typ == "time" or typ in ("param", "read") and subtyp == "time": self._make_time_parts(field_name, field_data, writeable) elif typ == "write" and subtyp == "action": self._make_action_part(field_name, field_data) elif typ in ("param", "read", "write", "xadc"): self._make_param_part(field_name, field_data, writeable) elif typ == "bit_out": self._make_out(field_name, field_data, "bit") elif typ == "pos_out": self._make_out(field_name, field_data, "pos") self._make_scale_offset(field_name) self._make_out_capture(field_name, field_data) elif typ == "ext_out": self._make_out_capture(field_name, field_data) elif typ == "bit_mux": self._make_mux(field_name, field_data, "bit") self._make_mux_delay(field_name) elif typ == "pos_mux": self._make_mux(field_name, field_data, "pos") elif typ == "table": self._make_table(field_name, field_data) else: raise ValueError("Unknown type %r subtype %r" % (typ, subtyp))
python
{ "resource": "" }