_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q36700
main
train
def main(): """This is the CLI driver for ia-wrapper.""" args = docopt(__doc__, version=__version__, options_first=True) # Validate args. s = Schema({ six.text_type: bool, '--config-file': Or(None, str), '<args>': list, '<command>': Or(str, lambda _: 'help'), }) try: args = s.validate(args) except SchemaError as exc: print('{0}\n{1}'.format(str(exc), printable_usage(__doc__)), file=sys.stderr) sys.exit(1) # Get subcommand. cmd = args['<command>'] if cmd in cmd_aliases: cmd = cmd_aliases[cmd] if (cmd == 'help') or (not cmd): if not args['<args>']: sys.exit(print(__doc__.strip(), file=sys.stderr)) else: ia_module = load_ia_module(args['<args>'][0]) sys.exit(print(ia_module.__doc__.strip(), file=sys.stderr)) if cmd != 'configure' and args['--config-file']: if not os.path.isfile(args['--config-file']): print('--config-file should be a readable file.\n{0}'.format( printable_usage(__doc__)), file=sys.stderr) sys.exit(1) argv = [cmd] + args['<args>'] config = dict() if args['--log']: config['logging'] = {'level': 'INFO'} elif args['--debug']: config['logging'] = {'level': 'DEBUG'} if args['--insecure']: config['general'] = dict(secure=False) session = get_session(config_file=args['--config-file'], config=config, debug=args['--debug']) ia_module = load_ia_module(cmd) try: sys.exit(ia_module.main(argv, session)) except IOError as e: # Handle Broken Pipe errors. if e.errno == errno.EPIPE: sys.stderr.close() sys.stdout.close() sys.exit(0) else: raise
python
{ "resource": "" }
q36701
suppress_keyboard_interrupt_message
train
def suppress_keyboard_interrupt_message(): """Register a new excepthook to suppress KeyboardInterrupt exception messages, and exit with status code 130. """ old_excepthook = sys.excepthook def new_hook(type, value, traceback): if type != KeyboardInterrupt: old_excepthook(type, value, traceback) else: sys.exit(130) sys.excepthook = new_hook
python
{ "resource": "" }
q36702
recursive_file_count
train
def recursive_file_count(files, item=None, checksum=False): """Given a filepath or list of filepaths, return the total number of files.""" if not isinstance(files, (list, set)): files = [files] total_files = 0 if checksum is True: md5s = [f.get('md5') for f in item.files] else: md5s = list() if isinstance(files, dict): # make sure to use local filenames. _files = files.values() else: if isinstance(files[0], tuple): _files = dict(files).values() else: _files = files for f in _files: try: is_dir = os.path.isdir(f) except TypeError: try: f = f[0] is_dir = os.path.isdir(f) except (AttributeError, TypeError): is_dir = False if is_dir: for x, _ in iter_directory(f): lmd5 = get_md5(open(x, 'rb')) if lmd5 in md5s: continue else: total_files += 1 else: try: lmd5 = get_md5(open(f, 'rb')) except TypeError: # Support file-like objects. lmd5 = get_md5(f) if lmd5 in md5s: continue else: total_files += 1 return total_files
python
{ "resource": "" }
q36703
reraise_modify
train
def reraise_modify(caught_exc, append_msg, prepend=False): """Append message to exception while preserving attributes. Preserves exception class, and exception traceback. Note: This function needs to be called inside an except because `sys.exc_info()` requires the exception context. Args: caught_exc(Exception): The caught exception object append_msg(str): The message to append to the caught exception prepend(bool): If True prepend the message to args instead of appending Returns: None Side Effects: Re-raises the exception with the preserved data / trace but modified message """ ExceptClass = type(caught_exc) # Keep old traceback traceback = sys.exc_info()[2] if not caught_exc.args: # If no args, create our own tuple arg_list = [append_msg] else: # Take the last arg # If it is a string # append your message. # Otherwise append it to the # arg list(Not as pretty) arg_list = list(caught_exc.args[:-1]) last_arg = caught_exc.args[-1] if isinstance(last_arg, str): if prepend: arg_list.append(append_msg + last_arg) else: arg_list.append(last_arg + append_msg) else: arg_list += [last_arg, append_msg] caught_exc.args = tuple(arg_list) six.reraise(ExceptClass, caught_exc, traceback)
python
{ "resource": "" }
q36704
configure
train
def configure(username=None, password=None, config_file=None): """Configure internetarchive with your Archive.org credentials. :type username: str :param username: The email address associated with your Archive.org account. :type password: str :param password: Your Archive.org password. Usage: >>> from internetarchive import configure >>> configure('user@example.com', 'password') """ username = input('Email address: ') if not username else username password = getpass('Password: ') if not password else password config_file_path = config_module.write_config_file(username, password, config_file) return config_file_path
python
{ "resource": "" }
q36705
get_user_info
train
def get_user_info(access_key, secret_key): """Returns details about an Archive.org user given an IA-S3 key pair. :type access_key: str :param access_key: IA-S3 access_key to use when making the given request. :type secret_key: str :param secret_key: IA-S3 secret_key to use when making the given request. """ u = 'https://s3.us.archive.org' p = dict(check_auth=1) r = requests.get(u, params=p, auth=auth.S3Auth(access_key, secret_key)) r.raise_for_status() j = r.json() if j.get('error'): raise AuthenticationError(j.get('error')) else: return j
python
{ "resource": "" }
q36706
CatalogTask.task_log
train
def task_log(self): """Get task log. :rtype: str :returns: The task log as a string. """ if self.task_id is None: raise ValueError('task_id is None') return self.get_task_log(self.task_id, self.session, self.request_kwargs)
python
{ "resource": "" }
q36707
CatalogTask.get_task_log
train
def get_task_log(task_id, session, request_kwargs=None): """Static method for getting a task log, given a task_id. This method exists so a task log can be retrieved without retrieving the items task history first. :type task_id: str or int :param task_id: The task id for the task log you'd like to fetch. :type archive_session: :class:`ArchiveSession <ArchiveSession>` :type request_kwargs: dict :param request_kwargs: (optional) Keyword arguments that :py:class:`requests.Request` takes. :rtype: str :returns: The task log as a string. """ request_kwargs = request_kwargs if request_kwargs else dict() url = '{0}//catalogd.archive.org/log/{1}'.format(session.protocol, task_id) p = dict(full=1) r = session.get(url, params=p, **request_kwargs) r.raise_for_status() return r.content.decode('utf-8')
python
{ "resource": "" }
q36708
ArchiveSession._get_user_agent_string
train
def _get_user_agent_string(self): """Generate a User-Agent string to be sent with every request.""" uname = platform.uname() try: lang = locale.getlocale()[0][:2] except: lang = '' py_version = '{0}.{1}.{2}'.format(*sys.version_info) return 'internetarchive/{0} ({1} {2}; N; {3}; {4}) Python/{5}'.format( __version__, uname[0], uname[-1], lang, self.access_key, py_version)
python
{ "resource": "" }
q36709
ArchiveSession.rebuild_auth
train
def rebuild_auth(self, prepared_request, response): """Never rebuild auth for archive.org URLs. """ u = urlparse(prepared_request.url) if u.netloc.endswith('archive.org'): return super(ArchiveSession, self).rebuild_auth(prepared_request, response)
python
{ "resource": "" }
q36710
FormForForm.email_to
train
def email_to(self): """ Return the value entered for the first field of type EmailField. """ for field in self.form_fields: if field.is_a(fields.EMAIL): return self.cleaned_data[field.slug] return None
python
{ "resource": "" }
q36711
unique_slug
train
def unique_slug(manager, slug_field, slug): """ Ensure slug is unique for the given manager, appending a digit if it isn't. """ max_length = manager.model._meta.get_field(slug_field).max_length slug = slug[:max_length] i = 0 while True: if i > 0: if i > 1: slug = slug.rsplit("-", 1)[0] # We need to keep the slug length under the slug fields max length. We need to # account for the length that is added by adding a random integer and `-`. slug = "%s-%s" % (slug[:max_length - len(str(i)) - 1], i) if not manager.filter(**{slug_field: slug}): break i += 1 return slug
python
{ "resource": "" }
q36712
import_attr
train
def import_attr(path): """ Given a a Python dotted path to a variable in a module, imports the module and returns the variable in it. """ module_path, attr_name = path.rsplit(".", 1) return getattr(import_module(module_path), attr_name)
python
{ "resource": "" }
q36713
form_sent
train
def form_sent(request, slug, template="forms/form_sent.html"): """ Show the response message. """ published = Form.objects.published(for_user=request.user) context = {"form": get_object_or_404(published, slug=slug)} return render_to_response(template, context, RequestContext(request))
python
{ "resource": "" }
q36714
FormAdmin.get_queryset
train
def get_queryset(self, request): """ Annotate the queryset with the entries count for use in the admin list view. """ qs = super(FormAdmin, self).get_queryset(request) return qs.annotate(total_entries=Count("entries"))
python
{ "resource": "" }
q36715
FormAdmin.file_view
train
def file_view(self, request, field_entry_id): """ Output the file for the requested field entry. """ model = self.fieldentry_model field_entry = get_object_or_404(model, id=field_entry_id) path = join(fs.location, field_entry.value) response = HttpResponse(content_type=guess_type(path)[0]) f = open(path, "r+b") response["Content-Disposition"] = "attachment; filename=%s" % f.name response.write(f.read()) f.close() return response
python
{ "resource": "" }
q36716
RequestHandler.get_live_scores
train
def get_live_scores(self, use_12_hour_format): """Gets the live scores""" req = requests.get(RequestHandler.LIVE_URL) if req.status_code == requests.codes.ok: scores_data = [] scores = req.json() if len(scores["games"]) == 0: click.secho("No live action currently", fg="red", bold=True) return for score in scores['games']: # match football-data api structure d = {} d['homeTeam'] = {'name': score['homeTeamName']} d['awayTeam'] = {'name': score['awayTeamName']} d['score'] = {'fullTime': {'homeTeam': score['goalsHomeTeam'], 'awayTeam': score['goalsAwayTeam']}} d['league'] = score['league'] d['time'] = score['time'] scores_data.append(d) self.writer.live_scores(scores_data) else: click.secho("There was problem getting live scores", fg="red", bold=True)
python
{ "resource": "" }
q36717
RequestHandler.get_team_scores
train
def get_team_scores(self, team, time, show_upcoming, use_12_hour_format): """Queries the API and gets the particular team scores""" team_id = self.team_names.get(team, None) time_frame = 'n' if show_upcoming else 'p' if team_id: try: req = self._get('teams/{team_id}/matches?timeFrame={time_frame}{time}'.format( team_id=team_id, time_frame=time_frame, time=time)) team_scores = req.json() if len(team_scores["matches"]) == 0: click.secho("No action during past week. Change the time " "parameter to get more fixtures.", fg="red", bold=True) else: self.writer.team_scores(team_scores, time, show_upcoming, use_12_hour_format) except APIErrorException as e: click.secho(e.args[0], fg="red", bold=True) else: click.secho("Team code is not correct.", fg="red", bold=True)
python
{ "resource": "" }
q36718
RequestHandler.get_standings
train
def get_standings(self, league): """Queries the API and gets the standings for a particular league""" league_id = self.league_ids[league] try: req = self._get('competitions/{id}/standings'.format( id=league_id)) self.writer.standings(req.json(), league) except APIErrorException: # Click handles incorrect League codes so this will only come up # if that league does not have standings available. ie. Champions League click.secho("No standings availble for {league}.".format(league=league), fg="red", bold=True)
python
{ "resource": "" }
q36719
RequestHandler.get_league_scores
train
def get_league_scores(self, league, time, show_upcoming, use_12_hour_format): """ Queries the API and fetches the scores for fixtures based upon the league and time parameter """ time_frame = 'n' if show_upcoming else 'p' if league: try: league_id = self.league_ids[league] req = self._get('competitions/{id}/matches?timeFrame={time_frame}{time}'.format( id=league_id, time_frame=time_frame, time=str(time))) fixtures_results = req.json() # no fixtures in the past week. display a help message and return if len(fixtures_results["matches"]) == 0: click.secho("No {league} matches in the past week.".format(league=league), fg="red", bold=True) return self.writer.league_scores(fixtures_results, time, show_upcoming, use_12_hour_format) except APIErrorException: click.secho("No data for the given league.", fg="red", bold=True) else: # When no league specified. Print all available in time frame. try: req = self._get('matches?timeFrame={time_frame}{time}'.format( time_frame=time_frame, time=str(time))) fixtures_results = req.json() self.writer.league_scores(fixtures_results, time, show_upcoming, use_12_hour_format) except APIErrorException: click.secho("No data available.", fg="red", bold=True)
python
{ "resource": "" }
q36720
RequestHandler.get_team_players
train
def get_team_players(self, team): """ Queries the API and fetches the players for a particular team """ team_id = self.team_names.get(team, None) try: req = self._get('teams/{}/'.format(team_id)) team_players = req.json()['squad'] if not team_players: click.secho("No players found for this team", fg="red", bold=True) else: self.writer.team_players(team_players) except APIErrorException: click.secho("No data for the team. Please check the team code.", fg="red", bold=True)
python
{ "resource": "" }
q36721
load_json
train
def load_json(file): """Load JSON file at app start""" here = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(here, file)) as jfile: data = json.load(jfile) return data
python
{ "resource": "" }
q36722
get_input_key
train
def get_input_key(): """Input API key and validate""" click.secho("No API key found!", fg="yellow", bold=True) click.secho("Please visit {} and get an API token.".format(RequestHandler.BASE_URL), fg="yellow", bold=True) while True: confkey = click.prompt(click.style("Enter API key", fg="yellow", bold=True)) if len(confkey) == 32: # 32 chars try: int(confkey, 16) # hexadecimal except ValueError: click.secho("Invalid API key", fg="red", bold=True) else: break else: click.secho("Invalid API key", fg="red", bold=True) return confkey
python
{ "resource": "" }
q36723
load_config_key
train
def load_config_key(): """Load API key from config file, write if needed""" global api_token try: api_token = os.environ['SOCCER_CLI_API_TOKEN'] except KeyError: home = os.path.expanduser("~") config = os.path.join(home, ".soccer-cli.ini") if not os.path.exists(config): with open(config, "w") as cfile: key = get_input_key() cfile.write(key) else: with open(config, "r") as cfile: key = cfile.read() if key: api_token = key else: os.remove(config) # remove 0-byte file click.secho('No API Token detected. ' 'Please visit {0} and get an API Token, ' 'which will be used by Soccer CLI ' 'to get access to the data.' .format(RequestHandler.BASE_URL), fg="red", bold=True) sys.exit(1) return api_token
python
{ "resource": "" }
q36724
map_team_id
train
def map_team_id(code): """Take in team ID, read JSON file to map ID to name""" for team in TEAM_DATA: if team["code"] == code: click.secho(team["name"], fg="green") break else: click.secho("No team found for this code", fg="red", bold=True)
python
{ "resource": "" }
q36725
list_team_codes
train
def list_team_codes(): """List team names in alphabetical order of team ID, per league.""" # Sort teams by league, then alphabetical by code cleanlist = sorted(TEAM_DATA, key=lambda k: (k["league"]["name"], k["code"])) # Get league names leaguenames = sorted(list(set([team["league"]["name"] for team in cleanlist]))) for league in leaguenames: teams = [team for team in cleanlist if team["league"]["name"] == league] click.secho(league, fg="green", bold=True) for team in teams: if team["code"] != "null": click.secho(u"{0}: {1}".format(team["code"], team["name"]), fg="yellow") click.secho("")
python
{ "resource": "" }
q36726
main
train
def main(league, time, standings, team, live, use12hour, players, output_format, output_file, upcoming, lookup, listcodes, apikey): """ A CLI for live and past football scores from various football leagues. League codes: \b - WC: World Cup - EC: European Championship - CL: Champions League - PL: English Premier League - ELC: English Championship - FL1: French Ligue 1 - BL: German Bundesliga - SA: Serie A - DED: Eredivisie - PPL: Primeira Liga - PD: Primera Division - BSA: Brazil Serie A """ headers = {'X-Auth-Token': apikey} try: if output_format == 'stdout' and output_file: raise IncorrectParametersException('Printing output to stdout and ' 'saving to a file are mutually exclusive') writer = get_writer(output_format, output_file) rh = RequestHandler(headers, LEAGUE_IDS, TEAM_NAMES, writer) if listcodes: list_team_codes() return if live: rh.get_live_scores(use12hour) return if standings: if not league: raise IncorrectParametersException('Please specify a league. ' 'Example --standings --league=PL') if league == 'CL': raise IncorrectParametersException('Standings for CL - ' 'Champions League not supported') rh.get_standings(league) return if team: if lookup: map_team_id(team) return if players: rh.get_team_players(team) return else: rh.get_team_scores(team, time, upcoming, use12hour) return rh.get_league_scores(league, time, upcoming, use12hour) except IncorrectParametersException as e: click.secho(str(e), fg="red", bold=True)
python
{ "resource": "" }
q36727
Stdout.live_scores
train
def live_scores(self, live_scores): """Prints the live scores in a pretty format""" scores = sorted(live_scores, key=lambda x: x["league"]) for league, games in groupby(scores, key=lambda x: x["league"]): self.league_header(league) for game in games: self.scores(self.parse_result(game), add_new_line=False) click.secho(' %s' % Stdout.utc_to_local(game["time"], use_12_hour_format=False), fg=self.colors.TIME) click.echo()
python
{ "resource": "" }
q36728
Stdout.team_scores
train
def team_scores(self, team_scores, time, show_datetime, use_12_hour_format): """Prints the teams scores in a pretty format""" for score in team_scores["matches"]: if score["status"] == "FINISHED": click.secho("%s\t" % score["utcDate"].split('T')[0], fg=self.colors.TIME, nl=False) self.scores(self.parse_result(score)) elif show_datetime: self.scores(self.parse_result(score), add_new_line=False) click.secho(' %s' % Stdout.utc_to_local(score["utcDate"], use_12_hour_format, show_datetime), fg=self.colors.TIME)
python
{ "resource": "" }
q36729
Stdout.team_players
train
def team_players(self, team): """Prints the team players in a pretty format""" players = sorted(team, key=lambda d: d['shirtNumber']) click.secho("%-4s %-25s %-20s %-20s %-15s" % ("N.", "NAME", "POSITION", "NATIONALITY", "BIRTHDAY"), bold=True, fg=self.colors.MISC) fmt = (u"{shirtNumber:<4} {name:<28} {position:<23} {nationality:<23}" u" {dateOfBirth:<18}") for player in players: click.secho(fmt.format(**player), bold=True)
python
{ "resource": "" }
q36730
Stdout.standings
train
def standings(self, league_table, league): """ Prints the league standings in a pretty way """ click.secho("%-6s %-30s %-10s %-10s %-10s" % ("POS", "CLUB", "PLAYED", "GOAL DIFF", "POINTS")) for team in league_table["standings"][0]["table"]: if team["goalDifference"] >= 0: team["goalDifference"] = ' ' + str(team["goalDifference"]) # Define the upper and lower bounds for Champions League, # Europa League and Relegation places. # This is so we can highlight them appropriately. cl_upper, cl_lower = LEAGUE_PROPERTIES[league]['cl'] el_upper, el_lower = LEAGUE_PROPERTIES[league]['el'] rl_upper, rl_lower = LEAGUE_PROPERTIES[league]['rl'] team['teamName'] = team['team']['name'] team_str = (u"{position:<7} {teamName:<33} {playedGames:<12}" u" {goalDifference:<14} {points}").format(**team) if cl_upper <= team["position"] <= cl_lower: click.secho(team_str, bold=True, fg=self.colors.CL_POSITION) elif el_upper <= team["position"] <= el_lower: click.secho(team_str, fg=self.colors.EL_POSITION) elif rl_upper <= team["position"] <= rl_lower: click.secho(team_str, fg=self.colors.RL_POSITION) else: click.secho(team_str, fg=self.colors.POSITION)
python
{ "resource": "" }
q36731
Stdout.league_scores
train
def league_scores(self, total_data, time, show_datetime, use_12_hour_format): """Prints the data in a pretty format""" for match in total_data['matches']: self.scores(self.parse_result(match), add_new_line=not show_datetime) if show_datetime: click.secho(' %s' % Stdout.utc_to_local(match["utcDate"], use_12_hour_format, show_datetime), fg=self.colors.TIME) click.echo()
python
{ "resource": "" }
q36732
Stdout.league_header
train
def league_header(self, league): """Prints the league header""" league_name = " {0} ".format(league) click.secho("{:=^62}".format(league_name), fg=self.colors.MISC) click.echo()
python
{ "resource": "" }
q36733
Stdout.scores
train
def scores(self, result, add_new_line=True): """Prints out the scores in a pretty format""" if result.goalsHomeTeam > result.goalsAwayTeam: homeColor, awayColor = (self.colors.WIN, self.colors.LOSE) elif result.goalsHomeTeam < result.goalsAwayTeam: homeColor, awayColor = (self.colors.LOSE, self.colors.WIN) else: homeColor = awayColor = self.colors.TIE click.secho('%-25s %2s' % (result.homeTeam, result.goalsHomeTeam), fg=homeColor, nl=False) click.secho(" vs ", nl=False) click.secho('%2s %s' % (result.goalsAwayTeam, result.awayTeam.rjust(25)), fg=awayColor, nl=add_new_line)
python
{ "resource": "" }
q36734
Stdout.parse_result
train
def parse_result(self, data): """Parses the results and returns a Result namedtuple""" def valid_score(score): return "" if score is None else score return self.Result( data["homeTeam"]["name"], valid_score(data["score"]["fullTime"]["homeTeam"]), data["awayTeam"]["name"], valid_score(data["score"]["fullTime"]["awayTeam"]))
python
{ "resource": "" }
q36735
Stdout.utc_to_local
train
def utc_to_local(time_str, use_12_hour_format, show_datetime=False): """Converts the API UTC time string to the local user time.""" if not (time_str.endswith(" UTC") or time_str.endswith("Z")): return time_str today_utc = datetime.datetime.utcnow() utc_local_diff = today_utc - datetime.datetime.now() if time_str.endswith(" UTC"): time_str, _ = time_str.split(" UTC") utc_time = datetime.datetime.strptime(time_str, '%I:%M %p') utc_datetime = datetime.datetime(today_utc.year, today_utc.month, today_utc.day, utc_time.hour, utc_time.minute) else: utc_datetime = datetime.datetime.strptime(time_str, '%Y-%m-%dT%H:%M:%SZ') local_time = utc_datetime - utc_local_diff if use_12_hour_format: date_format = '%I:%M %p' if not show_datetime else '%a %d, %I:%M %p' else: date_format = '%H:%M' if not show_datetime else '%a %d, %H:%M' return datetime.datetime.strftime(local_time, date_format)
python
{ "resource": "" }
q36736
Csv.live_scores
train
def live_scores(self, live_scores): """Store output of live scores to a CSV file""" headers = ['League', 'Home Team Name', 'Home Team Goals', 'Away Team Goals', 'Away Team Name'] result = [headers] result.extend([game['league'], game['homeTeamName'], game['goalsHomeTeam'], game['goalsAwayTeam'], game['awayTeamName']] for game in live_scores['games']) self.generate_output(result)
python
{ "resource": "" }
q36737
Csv.team_scores
train
def team_scores(self, team_scores, time): """Store output of team scores to a CSV file""" headers = ['Date', 'Home Team Name', 'Home Team Goals', 'Away Team Goals', 'Away Team Name'] result = [headers] result.extend([score["utcDate"].split('T')[0], score['homeTeam']['name'], score['score']['fullTime']['homeTeam'], score['score']['fullTime']['awayTeam'], score['awayTeam']['name']] for score in team_scores['matches'] if score['status'] == 'FINISHED') self.generate_output(result)
python
{ "resource": "" }
q36738
Csv.team_players
train
def team_players(self, team): """Store output of team players to a CSV file""" headers = ['Jersey Number', 'Name', 'Position', 'Nationality', 'Date of Birth'] result = [headers] result.extend([player['shirtNumber'], player['name'], player['position'], player['nationality'], player['dateOfBirth']] for player in team) self.generate_output(result)
python
{ "resource": "" }
q36739
Csv.standings
train
def standings(self, league_table, league): """Store output of league standings to a CSV file""" headers = ['Position', 'Team Name', 'Games Played', 'Goal For', 'Goals Against', 'Goal Difference', 'Points'] result = [headers] result.extend([team['position'], team['team']['name'], team['playedGames'], team['goalsFor'], team['goalsAgainst'], team['goalDifference'], team['points']] for team in league_table['standings'][0]['table']) self.generate_output(result)
python
{ "resource": "" }
q36740
Csv.league_scores
train
def league_scores(self, total_data, time, show_upcoming, use_12_hour_format): """Store output of fixtures based on league and time to a CSV file""" headers = ['League', 'Home Team Name', 'Home Team Goals', 'Away Team Goals', 'Away Team Name'] result = [headers] league = total_data['competition']['name'] result.extend([league, score['homeTeam']['name'], score['score']['fullTime']['homeTeam'], score['score']['fullTime']['awayTeam'], score['awayTeam']['name']] for score in total_data['matches']) self.generate_output(result)
python
{ "resource": "" }
q36741
Json.team_scores
train
def team_scores(self, team_scores, time): """Store output of team scores to a JSON file""" data = [] for score in team_scores['matches']: if score['status'] == 'FINISHED': item = {'date': score["utcDate"].split('T')[0], 'homeTeamName': score['homeTeam']['name'], 'goalsHomeTeam': score['score']['fullTime']['homeTeam'], 'goalsAwayTeam': score['score']['fullTime']['awayTeam'], 'awayTeamName': score['awayTeam']['name']} data.append(item) self.generate_output({'team_scores': data})
python
{ "resource": "" }
q36742
Json.standings
train
def standings(self, league_table, league): """Store output of league standings to a JSON file""" data = [] for team in league_table['standings'][0]['table']: item = {'position': team['position'], 'teamName': team['team'], 'playedGames': team['playedGames'], 'goalsFor': team['goalsFor'], 'goalsAgainst': team['goalsAgainst'], 'goalDifference': team['goalDifference'], 'points': team['points']} data.append(item) self.generate_output({'standings': data})
python
{ "resource": "" }
q36743
Json.team_players
train
def team_players(self, team): """Store output of team players to a JSON file""" keys = 'shirtNumber name position nationality dateOfBirth'.split() data = [{key: player[key] for key in keys} for player in team] self.generate_output({'players': data})
python
{ "resource": "" }
q36744
Json.league_scores
train
def league_scores(self, total_data, time): """Store output of fixtures based on league and time to a JSON file""" data = [] for league, score in self.supported_leagues(total_data): item = {'league': league, 'homeTeamName': score['homeTeamName'], 'goalsHomeTeam': score['result']['goalsHomeTeam'], 'goalsAwayTeam': score['result']['goalsAwayTeam'], 'awayTeamName': score['awayTeamName']} data.append(item) self.generate_output({'league_scores': data, 'time': time})
python
{ "resource": "" }
q36745
example_camera
train
def example_camera(): """ Example with `morphological_chan_vese` with using the default initialization of the level-set. """ logging.info('Running: example_camera (MorphACWE)...') # Load the image. img = imread(PATH_IMG_CAMERA)/255.0 # Callback for visual plotting callback = visual_callback_2d(img) # Morphological Chan-Vese (or ACWE) ms.morphological_chan_vese(img, 35, smoothing=3, lambda1=1, lambda2=1, iter_callback=callback)
python
{ "resource": "" }
q36746
operator_si
train
def operator_si(u): """operator_si operator.""" global _aux if np.ndim(u) == 2: P = _P2 elif np.ndim(u) == 3: P = _P3 else: raise ValueError("u has an invalid number of dimensions " "(should be 2 or 3)") if u.shape != _aux.shape[1:]: _aux = np.zeros((len(P),) + u.shape) for _aux_i, P_i in zip(_aux, P): _aux_i[:] = binary_erosion(u, P_i) return _aux.max(0)
python
{ "resource": "" }
q36747
operator_is
train
def operator_is(u): """operator_is operator.""" global _aux if np.ndim(u) == 2: P = _P2 elif np.ndim(u) == 3: P = _P3 else: raise ValueError("u has an invalid number of dimensions " "(should be 2 or 3)") if u.shape != _aux.shape[1:]: _aux = np.zeros((len(P),) + u.shape) for _aux_i, P_i in zip(_aux, P): _aux_i[:] = binary_dilation(u, P_i) return _aux.min(0)
python
{ "resource": "" }
q36748
gborders
train
def gborders(img, alpha=1.0, sigma=1.0): """Stopping criterion for image borders.""" # The norm of the gradient. gradnorm = gaussian_gradient_magnitude(img, sigma, mode='constant') return 1.0/np.sqrt(1.0 + alpha*gradnorm)
python
{ "resource": "" }
q36749
MorphACWE.step
train
def step(self): """Perform a single step of the morphological Chan-Vese evolution.""" # Assign attributes to local variables for convenience. u = self._u if u is None: raise ValueError("the levelset function is not set " "(use set_levelset)") data = self.data # Determine c0 and c1. inside = (u > 0) outside = (u <= 0) c0 = data[outside].sum() / float(outside.sum()) c1 = data[inside].sum() / float(inside.sum()) # Image attachment. dres = np.array(np.gradient(u)) abs_dres = np.abs(dres).sum(0) #aux = abs_dres * (c0 - c1) * (c0 + c1 - 2*data) aux = abs_dres * (self.lambda1*(data - c1) ** 2 - self.lambda2*(data - c0) ** 2) res = np.copy(u) res[aux < 0] = 1 res[aux > 0] = 0 # Smoothing. for i in range(self.smoothing): res = curvop(res) self._u = res
python
{ "resource": "" }
q36750
MorphGAC._update_mask
train
def _update_mask(self): """Pre-compute masks for speed.""" self._threshold_mask = self._data > self._theta self._threshold_mask_v = self._data > self._theta/np.abs(self._v)
python
{ "resource": "" }
q36751
MorphGAC.step
train
def step(self): """Perform a single step of the morphological snake evolution.""" # Assign attributes to local variables for convenience. u = self._u gI = self._data dgI = self._ddata theta = self._theta v = self._v if u is None: raise ValueError("the levelset is not set (use set_levelset)") res = np.copy(u) # Balloon. if v > 0: aux = binary_dilation(u, self.structure) elif v < 0: aux = binary_erosion(u, self.structure) if v!= 0: res[self._threshold_mask_v] = aux[self._threshold_mask_v] # Image attachment. aux = np.zeros_like(res) dres = np.gradient(res) for el1, el2 in zip(dgI, dres): aux += el1*el2 res[aux > 0] = 1 res[aux < 0] = 0 # Smoothing. for i in range(self.smoothing): res = curvop(res) self._u = res
python
{ "resource": "" }
q36752
sup_inf
train
def sup_inf(u): """SI operator.""" if np.ndim(u) == 2: P = _P2 elif np.ndim(u) == 3: P = _P3 else: raise ValueError("u has an invalid number of dimensions " "(should be 2 or 3)") erosions = [] for P_i in P: erosions.append(ndi.binary_erosion(u, P_i)) return np.array(erosions, dtype=np.int8).max(0)
python
{ "resource": "" }
q36753
inf_sup
train
def inf_sup(u): """IS operator.""" if np.ndim(u) == 2: P = _P2 elif np.ndim(u) == 3: P = _P3 else: raise ValueError("u has an invalid number of dimensions " "(should be 2 or 3)") dilations = [] for P_i in P: dilations.append(ndi.binary_dilation(u, P_i)) return np.array(dilations, dtype=np.int8).min(0)
python
{ "resource": "" }
q36754
_check_input
train
def _check_input(image, init_level_set): """Check that shapes of `image` and `init_level_set` match.""" if not image.ndim in [2, 3]: raise ValueError("`image` must be a 2 or 3-dimensional array.") if len(image.shape) != len(init_level_set.shape): raise ValueError("The dimensions of the initial level set do not " "match the dimensions of the image.")
python
{ "resource": "" }
q36755
_init_level_set
train
def _init_level_set(init_level_set, image_shape): """Auxiliary function for initializing level sets with a string. If `init_level_set` is not a string, it is returned as is. """ if isinstance(init_level_set, str): if init_level_set == 'checkerboard': res = checkerboard_level_set(image_shape) elif init_level_set == 'circle': res = circle_level_set(image_shape) else: raise ValueError("`init_level_set` not in " "['checkerboard', 'circle']") else: res = init_level_set return res
python
{ "resource": "" }
q36756
circle_level_set
train
def circle_level_set(image_shape, center=None, radius=None): """Create a circle level set with binary values. Parameters ---------- image_shape : tuple of positive integers Shape of the image center : tuple of positive integers, optional Coordinates of the center of the circle given in (row, column). If not given, it defaults to the center of the image. radius : float, optional Radius of the circle. If not given, it is set to the 75% of the smallest image dimension. Returns ------- out : array with shape `image_shape` Binary level set of the circle with the given `radius` and `center`. See also -------- checkerboard_level_set """ if center is None: center = tuple(i // 2 for i in image_shape) if radius is None: radius = min(image_shape) * 3.0 / 8.0 grid = np.mgrid[[slice(i) for i in image_shape]] grid = (grid.T - center).T phi = radius - np.sqrt(np.sum((grid)**2, 0)) res = np.int8(phi > 0) return res
python
{ "resource": "" }
q36757
checkerboard_level_set
train
def checkerboard_level_set(image_shape, square_size=5): """Create a checkerboard level set with binary values. Parameters ---------- image_shape : tuple of positive integers Shape of the image. square_size : int, optional Size of the squares of the checkerboard. It defaults to 5. Returns ------- out : array with shape `image_shape` Binary level set of the checkerboard. See also -------- circle_level_set """ grid = np.ogrid[[slice(i) for i in image_shape]] grid = [(grid_i // square_size) & 1 for grid_i in grid] checkerboard = np.bitwise_xor.reduce(grid, axis=0) res = np.int8(checkerboard) return res
python
{ "resource": "" }
q36758
inverse_gaussian_gradient
train
def inverse_gaussian_gradient(image, alpha=100.0, sigma=5.0): """Inverse of gradient magnitude. Compute the magnitude of the gradients in the image and then inverts the result in the range [0, 1]. Flat areas are assigned values close to 1, while areas close to borders are assigned values close to 0. This function or a similar one defined by the user should be applied over the image as a preprocessing step before calling `morphological_geodesic_active_contour`. Parameters ---------- image : (M, N) or (L, M, N) array Grayscale image or volume. alpha : float, optional Controls the steepness of the inversion. A larger value will make the transition between the flat areas and border areas steeper in the resulting array. sigma : float, optional Standard deviation of the Gaussian filter applied over the image. Returns ------- gimage : (M, N) or (L, M, N) array Preprocessed image (or volume) suitable for `morphological_geodesic_active_contour`. """ gradnorm = ndi.gaussian_gradient_magnitude(image, sigma, mode='nearest') return 1.0 / np.sqrt(1.0 + alpha * gradnorm)
python
{ "resource": "" }
q36759
Command.get_handler
train
def get_handler(self, *args, **options): """ Returns the static files serving handler wrapping the default handler, if static files should be served. Otherwise just returns the default handler. """ handler = super(Command, self).get_handler(*args, **options) insecure_serving = options.get('insecure_serving', False) if self.should_use_static_handler(options): return StaticFilesHandler(handler) return handler
python
{ "resource": "" }
q36760
Account.privateKeyToAccount
train
def privateKeyToAccount(self, private_key): ''' Returns a convenient object for working with the given private key. :param private_key: The raw private key :type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey` :return: object with methods for signing and encrypting :rtype: LocalAccount .. code-block:: python >>> acct = Account.privateKeyToAccount( 0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364) >>> acct.address '0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E' >>> acct.privateKey b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d" # These methods are also available: signHash(), signTransaction(), encrypt() # They correspond to the same-named methods in Account.* # but without the private key argument ''' key = self._parsePrivateKey(private_key) return LocalAccount(key, self)
python
{ "resource": "" }
q36761
Account.recoverTransaction
train
def recoverTransaction(self, serialized_transaction): ''' Get the address of the account that signed this transaction. :param serialized_transaction: the complete signed transaction :type serialized_transaction: hex str, bytes or int :returns: address of signer, hex-encoded & checksummed :rtype: str .. code-block:: python >>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501 >>> Account.recoverTransaction(raw_transaction) '0x2c7536E3605D9C16a7a3D7b1898e529396a65c23' ''' txn_bytes = HexBytes(serialized_transaction) txn = Transaction.from_bytes(txn_bytes) msg_hash = hash_of_signed_transaction(txn) return self.recoverHash(msg_hash, vrs=vrs_from(txn))
python
{ "resource": "" }
q36762
Account.signHash
train
def signHash(self, message_hash, private_key): ''' Sign the hash provided. .. WARNING:: *Never* sign a hash that you didn't generate, it can be an arbitrary transaction. For example, it might send all of your account's ether to an attacker. If you would like compatibility with :meth:`w3.eth.sign() <web3.eth.Eth.sign>` you can use :meth:`~eth_account.messages.defunct_hash_message`. Several other message standards are proposed, but none have a clear consensus. You'll need to manually comply with any of those message standards manually. :param message_hash: the 32-byte message hash to be signed :type message_hash: hex str, bytes or int :param private_key: the key to sign the message with :type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey` :returns: Various details about the signature - most importantly the fields: v, r, and s :rtype: ~eth_account.datastructures.AttributeDict .. code-block:: python >>> msg = "I♥SF" >>> from eth_account.messages import defunct_hash_message >>> msghash = defunct_hash_message(text=msg) HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750') >>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364" >>> Account.signHash(msghash, key) {'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501 'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563, 's': 28205917190874851400050446352651915501321657673772411533993420917949420456142, 'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501 'v': 28} # these are equivalent: >>> Account.signHash( 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750, key ) >>> Account.signHash( "0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750", key ) ''' msg_hash_bytes = HexBytes(message_hash) if len(msg_hash_bytes) != 32: raise ValueError("The message hash must be exactly 32-bytes") key = self._parsePrivateKey(private_key) (v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes) return AttributeDict({ 'messageHash': msg_hash_bytes, 'r': r, 's': s, 'v': v, 'signature': HexBytes(eth_signature_bytes), })
python
{ "resource": "" }
q36763
get_dependencies
train
def get_dependencies(primary_type, types): """ Perform DFS to get all the dependencies of the primary_type """ deps = set() struct_names_yet_to_be_expanded = [primary_type] while len(struct_names_yet_to_be_expanded) > 0: struct_name = struct_names_yet_to_be_expanded.pop() deps.add(struct_name) fields = types[struct_name] for field in fields: if field["type"] not in types: # We don't need to expand types that are not user defined (customized) continue elif field["type"] in deps: # skip types that we have already encountered continue else: # Custom Struct Type struct_names_yet_to_be_expanded.append(field["type"]) # Don't need to make a struct as dependency of itself deps.remove(primary_type) return tuple(deps)
python
{ "resource": "" }
q36764
is_valid_abi_type
train
def is_valid_abi_type(type_name): """ This function is used to make sure that the ``type_name`` is a valid ABI Type. Please note that this is a temporary function and should be replaced by the corresponding ABI function, once the following issue has been resolved. https://github.com/ethereum/eth-abi/issues/125 """ valid_abi_types = {"address", "bool", "bytes", "int", "string", "uint"} is_bytesN = type_name.startswith("bytes") and 1 <= int(type_name[5:]) <= 32 is_intN = ( type_name.startswith("int") and 8 <= int(type_name[3:]) <= 256 and int(type_name[3:]) % 8 == 0 ) is_uintN = ( type_name.startswith("uint") and 8 <= int(type_name[4:]) <= 256 and int(type_name[4:]) % 8 == 0 ) if type_name in valid_abi_types: return True elif is_bytesN: # bytes1 to bytes32 return True elif is_intN: # int8 to int256 return True elif is_uintN: # uint8 to uint256 return True return False
python
{ "resource": "" }
q36765
get_depths_and_dimensions
train
def get_depths_and_dimensions(data, depth): """ Yields 2-length tuples of depth and dimension of each element at that depth """ if not isinstance(data, (list, tuple)): # Not checking for Iterable instance, because even Dictionaries and strings # are considered as iterables, but that's not what we want the condition to be. return () yield depth, len(data) for item in data: # iterating over all 1 dimension less sub-data items yield from get_depths_and_dimensions(item, depth + 1)
python
{ "resource": "" }
q36766
hash_of_signed_transaction
train
def hash_of_signed_transaction(txn_obj): ''' Regenerate the hash of the signed transaction object. 1. Infer the chain ID from the signature 2. Strip out signature from transaction 3. Annotate the transaction with that ID, if available 4. Take the hash of the serialized, unsigned, chain-aware transaction Chain ID inference and annotation is according to EIP-155 See details at https://github.com/ethereum/EIPs/blob/master/EIPS/eip-155.md :return: the hash of the provided transaction, to be signed ''' (chain_id, _v) = extract_chain_id(txn_obj.v) unsigned_parts = strip_signature(txn_obj) if chain_id is None: signable_transaction = UnsignedTransaction(*unsigned_parts) else: extended_transaction = unsigned_parts + [chain_id, 0, 0] signable_transaction = ChainAwareUnsignedTransaction(*extended_transaction) return signable_transaction.hash()
python
{ "resource": "" }
q36767
extract_chain_id
train
def extract_chain_id(raw_v): ''' Extracts chain ID, according to EIP-155 @return (chain_id, v) ''' above_id_offset = raw_v - CHAIN_ID_OFFSET if above_id_offset < 0: if raw_v in {0, 1}: return (None, raw_v + V_OFFSET) elif raw_v in {27, 28}: return (None, raw_v) else: raise ValueError("v %r is invalid, must be one of: 0, 1, 27, 28, 35+") else: (chain_id, v_bit) = divmod(above_id_offset, 2) return (chain_id, v_bit + V_OFFSET)
python
{ "resource": "" }
q36768
get_occurrence
train
def get_occurrence(event_id, occurrence_id=None, year=None, month=None, day=None, hour=None, minute=None, second=None, tzinfo=None): """ Because occurrences don't have to be persisted, there must be two ways to retrieve them. both need an event, but if its persisted the occurrence can be retrieved with an id. If it is not persisted it takes a date to retrieve it. This function returns an event and occurrence regardless of which method is used. """ if(occurrence_id): occurrence = get_object_or_404(Occurrence, id=occurrence_id) event = occurrence.event elif None not in (year, month, day, hour, minute, second): event = get_object_or_404(Event, id=event_id) date = timezone.make_aware(datetime.datetime(int(year), int(month), int(day), int(hour), int(minute), int(second)), tzinfo) occurrence = event.get_occurrence(date) if occurrence is None: raise Http404 else: raise Http404 return event, occurrence
python
{ "resource": "" }
q36769
CalendarManager.get_calendars_for_object
train
def get_calendars_for_object(self, obj, distinction=''): """ This function allows you to get calendars for a specific object If distinction is set it will filter out any relation that doesnt have that distinction. """ ct = ContentType.objects.get_for_model(obj) if distinction: dist_q = Q(calendarrelation__distinction=distinction) else: dist_q = Q() return self.filter(dist_q, calendarrelation__content_type=ct, calendarrelation__object_id=obj.id)
python
{ "resource": "" }
q36770
CalendarRelationManager.create_relation
train
def create_relation(self, calendar, content_object, distinction='', inheritable=True): """ Creates a relation between calendar and content_object. See CalendarRelation for help on distinction and inheritable """ return CalendarRelation.objects.create( calendar=calendar, distinction=distinction, content_object=content_object)
python
{ "resource": "" }
q36771
EventListManager.occurrences_after
train
def occurrences_after(self, after=None): """ It is often useful to know what the next occurrence is given a list of events. This function produces a generator that yields the the most recent occurrence after the date ``after`` from any of the events in ``self.events`` """ from schedule.models import Occurrence if after is None: after = timezone.now() occ_replacer = OccurrenceReplacer( Occurrence.objects.filter(event__in=self.events)) generators = [event._occurrences_after_generator(after) for event in self.events] occurrences = [] for generator in generators: try: heapq.heappush(occurrences, (next(generator), generator)) except StopIteration: pass while occurrences: generator = occurrences[0][1] try: next_occurrence = heapq.heapreplace(occurrences, (next(generator), generator))[0] except StopIteration: next_occurrence = heapq.heappop(occurrences)[0] yield occ_replacer.get_occurrence(next_occurrence)
python
{ "resource": "" }
q36772
EventRelationManager.get_events_for_object
train
def get_events_for_object(self, content_object, distinction='', inherit=True): ''' returns a queryset full of events, that relate to the object through, the distinction If inherit is false it will not consider the calendars that the events belong to. If inherit is true it will inherit all of the relations and distinctions that any calendar that it belongs to has, as long as the relation has inheritable set to True. (See Calendar) >>> event = Event.objects.get(title='Test1') >>> user = User.objects.get(username = 'alice') >>> EventRelation.objects.get_events_for_object(user, 'owner', inherit=False) [<Event: Test1: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>] If a distinction is not declared it will not vet the relations based on distinction. >>> EventRelation.objects.get_events_for_object(user, inherit=False) [<Event: Test1: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>, <Event: Test2: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>] Now if there is a Calendar >>> calendar = Calendar(name = 'MyProject') >>> calendar.save() And an event that belongs to that calendar >>> event = Event.objects.get(title='Test2') >>> calendar.events.add(event) If we relate this calendar to some object with inheritable set to true, that relation will be inherited >>> user = User.objects.get(username='bob') >>> cr = calendar.create_relation(user, 'viewer', True) >>> EventRelation.objects.get_events_for_object(user, 'viewer') [<Event: Test1: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>, <Event: Test2: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>] ''' ct = ContentType.objects.get_for_model(type(content_object)) if distinction: dist_q = Q(eventrelation__distinction=distinction) cal_dist_q = Q(calendar__calendarrelation__distinction=distinction) else: dist_q = Q() cal_dist_q = Q() if inherit: inherit_q = Q( cal_dist_q, calendar__calendarrelation__content_type=ct, calendar__calendarrelation__object_id=content_object.id, calendar__calendarrelation__inheritable=True, ) else: inherit_q = Q() event_q = Q(dist_q, eventrelation__content_type=ct, eventrelation__object_id=content_object.id) return Event.objects.filter(inherit_q | event_q)
python
{ "resource": "" }
q36773
EventRelationManager.create_relation
train
def create_relation(self, event, content_object, distinction=''): """ Creates a relation between event and content_object. See EventRelation for help on distinction. """ return EventRelation.objects.create( event=event, distinction=distinction, content_object=content_object)
python
{ "resource": "" }
q36774
init_db
train
def init_db(): """ Populate a small db with some example entries. """ db.drop_all() db.create_all() # Create sample Post title = "de Finibus Bonorum et Malorum - Part I" text = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor \ incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud \ exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure \ dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. \ Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt \ mollit anim id est laborum." post = Post(title=title, text=text) db.session.add(post) db.session.commit()
python
{ "resource": "" }
q36775
_CKEditor.load
train
def load(custom_url=None, pkg_type=None, serve_local=None, version='4.9.2'): """Load CKEditor resource from CDN or local. :param custom_url: The custom resource url to use, build your CKEditor on `CKEditor builder <https://ckeditor.com/cke4/builder>`_. :param pkg_type: The type of CKEditor package, one of ``basic``, ``standard`` and ``full``. Default to ``standard``. It's a mirror argument to overwrite ``CKEDITOR_PKG_TYPE``. :param serve_local: Mirror argument to overwrite ``CKEDITOR_SERVE_LOCAL``. :param version: The version of CKEditor. """ pkg_type = pkg_type or current_app.config['CKEDITOR_PKG_TYPE'] if pkg_type not in ['basic', 'standard', 'full']: warnings.warn('The provided pkg_type string was invalid, ' 'it should be one of basic/standard/full.') pkg_type = 'standard' if serve_local or current_app.config['CKEDITOR_SERVE_LOCAL']: url = url_for('ckeditor.static', filename='%s/ckeditor.js' % pkg_type) else: url = '//cdn.ckeditor.com/%s/%s/ckeditor.js' % (version, pkg_type) if custom_url: url = custom_url return Markup('<script src="%s"></script>' % url)
python
{ "resource": "" }
q36776
PyWiFi.interfaces
train
def interfaces(self): """Collect the available wlan interfaces.""" self._ifaces = [] wifi_ctrl = wifiutil.WifiUtil() for interface in wifi_ctrl.interfaces(): iface = Interface(interface) self._ifaces.append(iface) self._logger.info("Get interface: %s", iface.name()) if not self._ifaces: self._logger.error("Can't get wifi interface") return self._ifaces
python
{ "resource": "" }
q36777
WifiUtil.network_profile_name_list
train
def network_profile_name_list(self, obj): """Get AP profile names.""" profile_list = pointer(WLAN_PROFILE_INFO_LIST()) self._wlan_get_profile_list(self._handle, byref(obj['guid']), byref(profile_list)) profiles = cast(profile_list.contents.ProfileInfo, POINTER(WLAN_PROFILE_INFO)) profile_name_list = [] for i in range(profile_list.contents.dwNumberOfItems): profile_name = '' for j in range(len(profiles[i].strProfileName)): profile_name += profiles[i].strProfileName[j] profile_name_list.append(profile_name) return profile_name_list
python
{ "resource": "" }
q36778
WifiUtil.remove_network_profile
train
def remove_network_profile(self, obj, params): """Remove the specified AP profile.""" self._logger.debug("delete profile: %s", params.ssid) str_buf = create_unicode_buffer(params.ssid) ret = self._wlan_delete_profile(self._handle, obj['guid'], str_buf) self._logger.debug("delete result %d", ret)
python
{ "resource": "" }
q36779
WifiUtil.remove_all_network_profiles
train
def remove_all_network_profiles(self, obj): """Remove all the AP profiles.""" profile_name_list = self.network_profile_name_list(obj) for profile_name in profile_name_list: self._logger.debug("delete profile: %s", profile_name) str_buf = create_unicode_buffer(profile_name) ret = self._wlan_delete_profile(self._handle, obj['guid'], str_buf) self._logger.debug("delete result %d", ret)
python
{ "resource": "" }
q36780
WifiUtil.remove_network_profile
train
def remove_network_profile(self, obj, params): """Remove the specified AP profiles""" network_id = -1 profiles = self.network_profiles(obj) for profile in profiles: if profile == params: network_id = profile.id if network_id != -1: self._send_cmd_to_wpas(obj['name'], 'REMOVE_NETWORK {}'.format(network_id))
python
{ "resource": "" }
q36781
Interface.scan
train
def scan(self): """Trigger the wifi interface to scan.""" self._logger.info("iface '%s' scans", self.name()) self._wifi_ctrl.scan(self._raw_obj)
python
{ "resource": "" }
q36782
Interface.scan_results
train
def scan_results(self): """Return the scan result.""" bsses = self._wifi_ctrl.scan_results(self._raw_obj) if self._logger.isEnabledFor(logging.INFO): for bss in bsses: self._logger.info("Find bss:") self._logger.info("\tbssid: %s", bss.bssid) self._logger.info("\tssid: %s", bss.ssid) self._logger.info("\tfreq: %d", bss.freq) self._logger.info("\tauth: %s", bss.auth) self._logger.info("\takm: %s", bss.akm) self._logger.info("\tsignal: %d", bss.signal) return bsses
python
{ "resource": "" }
q36783
Interface.network_profiles
train
def network_profiles(self): """Get all the AP profiles.""" profiles = self._wifi_ctrl.network_profiles(self._raw_obj) if self._logger.isEnabledFor(logging.INFO): for profile in profiles: self._logger.info("Get profile:") self._logger.info("\tssid: %s", profile.ssid) self._logger.info("\tauth: %s", profile.auth) self._logger.info("\takm: %s", profile.akm) self._logger.info("\tcipher: %s", profile.cipher) return profiles
python
{ "resource": "" }
q36784
Interface.disconnect
train
def disconnect(self): """Disconnect from the specified AP.""" self._logger.info("iface '%s' disconnects", self.name()) self._wifi_ctrl.disconnect(self._raw_obj)
python
{ "resource": "" }
q36785
Source.label
train
def label(self): """Convert a module name to a formatted node label. This is a default policy - please override. """ if len(self.name) > 14 and '.' in self.name: return '\\.\\n'.join(self.name.split('.')) # pragma: nocover return self.name
python
{ "resource": "" }
q36786
DepGraph.proximity_metric
train
def proximity_metric(self, a, b): """Return the weight of the dependency from a to b. Higher weights usually have shorter straighter edges. Return 1 if it has normal weight. A value of 4 is usually good for ensuring that a related pair of modules are drawn next to each other. Returns an int between 1 (unknown, default), and 4 (very related). """ # if self._is_pylib(a) and self._is_pylib(b): # return 1 res = 1 for ap, bp, n in zip(a.path_parts, b.path_parts, list(range(4))): res += ap == bp if n >= 3: break return res
python
{ "resource": "" }
q36787
DepGraph.dissimilarity_metric
train
def dissimilarity_metric(self, a, b): """Return non-zero if references to this module are strange, and should be drawn extra-long. The value defines the length, in rank. This is also good for putting some vertical space between seperate subsystems. Returns an int between 1 (default) and 4 (highly unrelated). """ # if self._is_pylib(a) and self._is_pylib(b): # return 1 res = 4 for an, bn, n in zip_longest(a.name_parts, b.name_parts, list(range(4))): res -= an == bn if n >= 3: break return res
python
{ "resource": "" }
q36788
DepGraph.connect_generations
train
def connect_generations(self): """Traverse depth-first adding imported_by. """ # for src in list(self.sources.values()): for src in self.sources.values(): for _child in src.imports: if _child in self.sources: child = self.sources[_child] child.imported_by.add(src.name)
python
{ "resource": "" }
q36789
DepGraph.remove_excluded
train
def remove_excluded(self): """Remove all sources marked as excluded. """ # import yaml # print yaml.dump({k:v.__json__() for k,v in self.sources.items()}, default_flow_style=False) sources = list(self.sources.values()) for src in sources: if src.excluded: del self.sources[src.name] src.imports = [m for m in src.imports if not self._exclude(m)] src.imported_by = [m for m in src.imported_by if not self._exclude(m)]
python
{ "resource": "" }
q36790
to_bytes
train
def to_bytes(s): # pragma: nocover """Convert an item into bytes. """ if isinstance(s, bytes): return s if isinstance(s, str) or is_unicode(s): return s.encode("utf-8") try: return unicode(s).encode("utf-8") except NameError: return str(s).encode("utf-8")
python
{ "resource": "" }
q36791
cmd2args
train
def cmd2args(cmd): """Prepare a command line for execution by Popen. """ if isinstance(cmd, str): return cmd if win32 else shlex.split(cmd) return cmd
python
{ "resource": "" }
q36792
pipe
train
def pipe(cmd, txt): """Pipe `txt` into the command `cmd` and return the output. """ return Popen( cmd2args(cmd), stdout=subprocess.PIPE, stdin=subprocess.PIPE, shell=win32 ).communicate(txt)[0]
python
{ "resource": "" }
q36793
dot
train
def dot(src, **kw): """Execute the dot command to create an svg output. """ cmd = "dot -T%s" % kw.pop('T', 'svg') for k, v in list(kw.items()): if v is True: cmd += " -%s" % k else: cmd += " -%s%s" % (k, v) return pipe(cmd, to_bytes(src))
python
{ "resource": "" }
q36794
call_graphviz_dot
train
def call_graphviz_dot(src, fmt): """Call dot command, and provide helpful error message if we cannot find it. """ try: svg = dot(src, T=fmt) except OSError as e: # pragma: nocover if e.errno == 2: cli.error(""" cannot find 'dot' pydeps calls dot (from graphviz) to create svg diagrams, please make sure that the dot executable is available on your path. """) raise return svg
python
{ "resource": "" }
q36795
display_svg
train
def display_svg(kw, fname): # pragma: nocover """Try to display the svg file on this platform. """ if kw['display'] is None: cli.verbose("Displaying:", fname) if sys.platform == 'win32': os.startfile(fname) else: opener = "open" if sys.platform == "darwin" else "xdg-open" subprocess.call([opener, fname]) else: cli.verbose(kw['display'] + " " + fname) os.system(kw['display'] + " " + fname)
python
{ "resource": "" }
q36796
pystdlib
train
def pystdlib(): """Return a set of all module-names in the Python standard library. """ curver = '.'.join(str(x) for x in sys.version_info[:2]) return (set(stdlib_list.stdlib_list(curver)) | { '_LWPCookieJar', '_MozillaCookieJar', '_abcoll', 'email._parseaddr', 'email.base64mime', 'email.feedparser', 'email.quoprimime', 'encodings', 'genericpath', 'ntpath', 'nturl2path', 'os2emxpath', 'posixpath', 'sre_compile', 'sre_parse', 'unittest.case', 'unittest.loader', 'unittest.main', 'unittest.result', 'unittest.runner', 'unittest.signals', 'unittest.suite', 'unittest.util', '_threading_local', 'sre_constants', 'strop', 'repr', 'opcode', 'nt', 'encodings.aliases', '_bisect', '_codecs', '_collections', '_functools', '_hashlib', '_heapq', '_io', '_locale', '_LWPCookieJar', '_md5', '_MozillaCookieJar', '_random', '_sha', '_sha256', '_sha512', '_socket', '_sre', '_ssl', '_struct', '_subprocess', '_threading_local', '_warnings', '_weakref', '_weakrefset', '_winreg' }) - {'__main__'}
python
{ "resource": "" }
q36797
ModuleFinder.report
train
def report(self): # pragma: nocover """Print a report to stdout, listing the found modules with their paths, as well as modules that are missing, or seem to be missing. """ print() print(" %-25s %s" % ("Name", "File")) print(" %-25s %s" % ("----", "----")) # Print modules found keys = list(self.modules.keys()) keys.sort() for key in keys: m = self.modules[key] if m.__path__: print("P", end=' ') else: print("m", end=' ') print("%-25s" % key, m.__file__ or "") # Print missing modules missing, maybe = self.any_missing_maybe() if missing: print() print("Missing modules:") for name in missing: mods = list(self.badmodules[name].keys()) mods.sort() print("?", name, "imported from", ', '.join(mods)) # Print modules that may be missing, but then again, maybe not... if maybe: print() print("Submodules thay appear to be missing, but could also be", end=' ') print("global names in the parent package:") for name in maybe: mods = list(self.badmodules[name].keys()) mods.sort() print("?", name, "imported from", ', '.join(mods))
python
{ "resource": "" }
q36798
name2rgb
train
def name2rgb(hue): """Originally used to calculate color based on module name. """ r, g, b = colorsys.hsv_to_rgb(hue / 360.0, .8, .7) return tuple(int(x * 256) for x in [r, g, b])
python
{ "resource": "" }
q36799
foreground
train
def foreground(background, *options): """Find the best foreground color from `options` based on `background` color. """ def absdiff(a, b): return brightnessdiff(a, b) # return 3 * brightnessdiff(a, b) + colordiff(a, b) diffs = [(absdiff(background, color), color) for color in options] diffs.sort(reverse=True) return diffs[0][1]
python
{ "resource": "" }