_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q43800 | iterate | train | def iterate(t_table, wordlist, stanzas, schemes, rprobs, maxsteps):
"""
Iterate EM and return final probabilities
"""
data_probs = numpy.zeros(len(stanzas))
old_data_probs = None
probs = None
num_words = len(wordlist)
ctr = 0
for ctr in range(maxsteps):
logging.info("Iteration {}".format(ctr))
old_data_probs = data_probs
logging.info("Expectation step")
probs = expectation_step(t_table, stanzas, schemes, rprobs)
logging.info("Maximization step")
t_table, rprobs = maximization_step(num_words, stanzas, schemes, probs)
# Warn if it did not converge
data_probs = numpy.logaddexp.reduce(probs, axis=1)
if ctr == maxsteps - 1 and not numpy.allclose(data_probs, old_data_probs):
logging.warning("Warning: EM did not converge")
logging.info("Stopped after {} interations".format(ctr))
return probs | python | {
"resource": ""
} |
q43801 | print_results | train | def print_results(results, outfile):
"""
Write results to outfile
"""
for stanza_words, scheme in results:
outfile.write(str(' ').join(stanza_words) + str('\n'))
outfile.write(str(' ').join(map(str, scheme)) + str('\n\n'))
outfile.close()
logging.info("Wrote result") | python | {
"resource": ""
} |
q43802 | main | train | def main(args_list=None):
"""
Wrapper for find_schemes if called from command line
"""
args_list = args_list or sys.argv[1:]
parser = argparse.ArgumentParser(description='Discover schemes of given stanza file')
parser.add_argument(
'infile',
type=argparse.FileType('r'),
)
parser.add_argument(
'outfile',
help='Where the result is written to. Default: stdout',
nargs='?',
type=argparse.FileType('w'),
default=sys.stdout,
)
parser.add_argument(
'-t --init-type',
help='Whether to initialize theta uniformly (u), with the orthographic similarity '
'measure (o), or using CELEX pronunciations and definition of rhyme (p). '
'The last one requires you to have CELEX on your machine.',
dest='init_type',
choices=('u', 'o', 'p', 'd'),
default='o',
)
parser.add_argument(
'-i, --iterations',
help='Number of iterations (default: 10)',
dest='num_iterations',
type=int,
default=10,
)
parser.add_argument(
'-v', '--verbose',
help="Verbose output",
action="store_const",
dest="loglevel",
const=logging.INFO,
)
args = parser.parse_args(args_list)
logging.basicConfig(level=args.loglevel)
stanzas = load_stanzas(args.infile)
init_function = None
if args.init_type == 'u':
init_function = init_uniform_ttable
elif args.init_type == 'o':
init_function = init_basicortho_ttable
elif args.init_type == 'p':
init_function = celex.init_perfect_ttable
elif args.init_type == 'd':
init_function = init_difflib_ttable
results = find_schemes(stanzas, init_function, args.num_iterations)
print_results(results, args.outfile) | python | {
"resource": ""
} |
q43803 | Stanza.set_word_indices | train | def set_word_indices(self, wordlist):
"""
Populate the list of word_indices, mapping self.words to the given wordlist
"""
self.word_indices = [wordlist.index(word) for word in self.words] | python | {
"resource": ""
} |
q43804 | Schemes._parse_scheme_file | train | def _parse_scheme_file(self):
"""
Initialize redundant data structures for lookup optimization
"""
schemes = json.loads(self.scheme_file.read(), object_pairs_hook=OrderedDict)
scheme_list = []
scheme_dict = defaultdict(list)
for scheme_len, scheme_group in schemes.items():
for scheme_str, _count in scheme_group:
scheme_code = tuple(int(c) for c in scheme_str.split(' '))
scheme_list.append(scheme_code)
scheme_dict[int(scheme_len)].append(len(scheme_list) - 1)
return scheme_list, scheme_dict | python | {
"resource": ""
} |
q43805 | Isort._create_output_from_match | train | def _create_output_from_match(self, match_result):
"""As isort outputs full path, we change it to relative path."""
full_path = match_result['full_path']
path = self._get_relative_path(full_path)
return LinterOutput(self.name, path, match_result['msg']) | python | {
"resource": ""
} |
q43806 | _generate_one_ephemeris | train | def _generate_one_ephemeris(
cmd):
"""generate one orbfit ephemeris
**Key Arguments:**
- ``cmd`` -- the command to execute [cmd, object]
**Return:**
- ``results`` -- the single ephemeris results
"""
global cmdList
cmd = cmdList[cmd]
results = []
for c in cmd:
p = Popen(c[0], stdout=PIPE, stderr=PIPE, shell=True)
stdout, stderr = p.communicate()
if len(stderr) and len(stderr.split()) != 15:
print stderr, len(stderr.split())
return None
elif "!!WARNING! WARNING! WARNING! WARNING!!" in stdout:
print "%(stdout)s was not found in astorb.dat" % locals()
return None
# SPLIT RESULTS INTO LIST OF DICTIONARIES
r = stdout.strip().split("\n")
keys = r[0].strip().split(',')
lines = r[1:]
for l in lines:
# CREATE DICTIONARY FROM KEYS AND VALUES
values = l.strip().split(',')
for k, v in zip(keys, values):
v = v.strip().replace("/", "")
try:
v = float(v)
except:
pass
result = dict(zip(keys, values))
result["object_name"] = c[1]
results.append(result)
return results | python | {
"resource": ""
} |
q43807 | text | train | def text(short):
"""Compiles short markup text into an HTML strings"""
return indent(short, branch_method=html_block_tag, leaf_method=convert_line, pass_syntax=PASS_SYNTAX,
flush_left_syntax=FLUSH_LEFT_SYNTAX, flush_left_empty_line=FLUSH_LEFT_EMPTY_LINE,
indentation_method=find_indentation) | python | {
"resource": ""
} |
q43808 | get_indented_block | train | def get_indented_block(prefix_lines):
"""Returns an integer.
The return value is the number of lines that belong to block begun
on the first line.
Parameters
----------
prefix_lines : list of basestring pairs
Each pair corresponds to a line of SHPAML source code. The
first element of each pair is indentation. The second is the
remaining part of the line, except for trailing newline.
"""
prefix, line = prefix_lines[0]
len_prefix = len(prefix)
# Find the first nonempty line with len(prefix) <= len(prefix)
i = 1
while i < len(prefix_lines):
new_prefix, line = prefix_lines[i]
if line and len(new_prefix) <= len_prefix:
break
i += 1
# Rewind to exclude empty lines
while i - 1 > 0 and prefix_lines[i - 1][1] == '':
i -= 1
return i | python | {
"resource": ""
} |
q43809 | indent | train | def indent(text, branch_method, leaf_method, pass_syntax, flush_left_syntax, flush_left_empty_line, indentation_method,
get_block=get_indented_block):
"""Returns HTML as a basestring.
Parameters
----------
text : basestring
Source code, typically SHPAML, but could be a different (but
related) language. The remaining parameters specify details
about the language used in the source code. To parse SHPAML,
pass the same values as convert_shpaml_tree.
branch_method : function
convert_shpaml_tree passes html_block_tag here.
leaf_method : function
convert_shpaml_tree passes convert_line here.
pass_syntax : basestring
convert_shpaml_tree passes PASS_SYNTAX here.
flush_left_syntax : basestring
convert_shpaml_tree passes FLUSH_LEFT_SYNTAX here.
flush_left_empty_line : basestring
convert_shpaml_tree passes FLUSH_LEFT_EMPTY_LINE here.
indentation_method : function
convert_shpaml_tree passes _indent here.
get_block : function
Defaults to get_indented_block.
"""
text = text.rstrip()
lines = text.split('\n')
if lines and lines[0].startswith('!! '):
lines[0] = lines[0].replace('!! ', '<!DOCTYPE ') + '>'
output = []
indent_lines(lines, output, branch_method, leaf_method, pass_syntax, flush_left_syntax, flush_left_empty_line,
indentation_method, get_block=get_indented_block)
return '\n'.join(output) + '\n' | python | {
"resource": ""
} |
q43810 | indent_lines | train | def indent_lines(lines, output, branch_method, leaf_method, pass_syntax, flush_left_syntax, flush_left_empty_line,
indentation_method, get_block):
"""Returns None.
The way this function produces output is by adding strings to the
list that's passed in as the second parameter.
Parameters
----------
lines : list of basestring's
Each string is a line of a SHPAML source code
(trailing newlines not included).
output : empty list
Explained earlier...
The remaining parameters are exactly the same as in the indent
function:
* branch_method
* leaf_method
* pass_syntax
* flush_left_syntax
* flush_left_empty_line
* indentation_method
* get_block
"""
append = output.append
def recurse(prefix_lines):
while prefix_lines:
prefix, line = prefix_lines[0]
if line == '':
prefix_lines.pop(0)
append('')
continue
block_size = get_block(prefix_lines)
if block_size == 1:
prefix_lines.pop(0)
if line == pass_syntax:
pass
elif line.startswith(flush_left_syntax):
append(line[len(flush_left_syntax):])
elif line.startswith(flush_left_empty_line):
append('')
else:
append(prefix + leaf_method(line))
else:
block = prefix_lines[:block_size]
prefix_lines = prefix_lines[block_size:]
branch_method(output, block, recurse)
return
prefix_lines = list(map(indentation_method, lines))
recurse(prefix_lines) | python | {
"resource": ""
} |
q43811 | _write_reqs | train | def _write_reqs(amend: bool = False, stage: bool = False):
"""
Writes the requirement files
Args:
amend: amend last commit with changes
stage: stage changes
"""
LOGGER.info('writing requirements')
base_cmd = 'pipenv lock -r'
_write_reqs_file(f'{base_cmd}', 'requirements.txt')
_write_reqs_file(f'{base_cmd} -d', 'requirements-dev.txt')
files_to_add = ['Pipfile', 'requirements.txt', 'requirements-dev.txt']
if amend:
CTX.repo.amend_commit(append_to_msg='update requirements [auto]', files_to_add=files_to_add)
elif stage:
CTX.repo.stage_subset(*files_to_add) | python | {
"resource": ""
} |
q43812 | reqs | train | def reqs(amend: bool = False, stage: bool = False):
"""
Write requirements files
Args:
amend: amend last commit with changes
stage: stage changes
"""
changed_files = CTX.repo.changed_files()
if 'requirements.txt' in changed_files or 'requirements-dev.txt' in changed_files:
LOGGER.error('Requirements have changed; cannot update them')
sys.exit(-1)
_write_reqs(amend, stage) | python | {
"resource": ""
} |
q43813 | chunks | train | def chunks(data, size):
"""
Generator that splits the given data into chunks
"""
for i in range(0, len(data), size):
yield data[i:i + size] | python | {
"resource": ""
} |
q43814 | Bot.connect | train | def connect(self, host, port=6667, password=None):
"""
Connects to a server
"""
# Prepare the callbacks
self._irc.add_global_handler('all_events', self.__handler)
# Prepare the connection
self._connection = self._irc.server().connect(
host, port, self._nickname, password,
self._username, self._realname)
# Start connection thread
self.__stopped.clear()
self.__thread = threading.Thread(target=self.__loop,
name="IRC-Bot-Loop")
self.__thread.daemon = True
self.__thread.start() | python | {
"resource": ""
} |
q43815 | Bot.__handler | train | def __handler(self, connection, event):
"""
Handles an IRC event
"""
try:
# Find local handler
method = getattr(self, "on_{0}".format(event.type))
except AttributeError:
pass
else:
try:
# Call it
return method(connection, event)
except Exception as ex:
logging.exception("Error calling handler: %s", ex) | python | {
"resource": ""
} |
q43816 | Bot.close | train | def close(self):
"""
Disconnects from the server
"""
# Disconnect with a fancy message, then close connection
if self._connection is not None:
self._connection.disconnect("Bot is quitting")
self._connection.close()
self._connection = None
# Stop the client loop
self.__stopped.set()
if self.__thread is not None:
try:
self.__thread.join(5)
except RuntimeError:
pass
self.__thread = None | python | {
"resource": ""
} |
q43817 | Bot.wait | train | def wait(self, timeout=None):
"""
Waits for the client to stop its loop
"""
self.__stopped.wait(timeout)
return self.__stopped.is_set() | python | {
"resource": ""
} |
q43818 | CommandBot.on_privmsg | train | def on_privmsg(self, connection, event):
"""
Got a message from a user
"""
sender = self.get_nick(event.source)
message = event.arguments[0]
if sender == 'NickServ':
logging.info("Got message from NickServ: %s", message)
if "password" in message.lower():
connection.privmsg("NickServ", "pass")
else:
connection.join('#cohorte')
return
self.handle_message(connection, sender, sender, message) | python | {
"resource": ""
} |
q43819 | CommandBot.handle_message | train | def handle_message(self, connection, sender, target, message):
"""
Handles a received message
"""
parts = message.strip().split(' ', 2)
if parts and parts[0].lower() == '!bot':
try:
command = parts[1].lower()
except IndexError:
self.safe_send(connection, target, "No command given")
return
try:
payload = parts[2]
except IndexError:
payload = ""
self.__pool.enqueue(self._handle_command,
connection, sender, target, command, payload) | python | {
"resource": ""
} |
q43820 | CommandBot.on_invite | train | def on_invite(self, connection, event):
"""
Got an invitation to a channel
"""
sender = self.get_nick(event.source)
invited = self.get_nick(event.target)
channel = event.arguments[0]
if invited == self._nickname:
logging.info("! I am invited to %s by %s", channel, sender)
connection.join(channel)
else:
logging.info(">> %s invited %s to %s", sender, invited, channel) | python | {
"resource": ""
} |
q43821 | CommandBot._handle_command | train | def _handle_command(self, connection, sender, target, command, payload):
"""
Handles a command, if any
"""
try:
# Find the handler
handler = getattr(self, "cmd_{0}".format(command))
except AttributeError:
self.safe_send(connection, target, "Unknown command: %s",
command)
else:
try:
logging.info("! Handling command: %s", command)
handler(connection, sender, target, payload)
except Exception as ex:
logging.exception("Error calling command handler: %s", ex) | python | {
"resource": ""
} |
q43822 | CommandBot.safe_send | train | def safe_send(self, connection, target, message, *args, **kwargs):
"""
Safely sends a message to the given target
"""
# Compute maximum length of payload
prefix = "PRIVMSG {0} :".format(target)
max_len = 510 - len(prefix)
for chunk in chunks(message.format(*args, **kwargs), max_len):
connection.send_raw("{0}{1}".format(prefix, chunk)) | python | {
"resource": ""
} |
q43823 | MessageBot.__notify | train | def __notify(self, sender, content):
"""
Calls back listener when a message is received
"""
if self.handle_message is not None:
try:
self.handle_message(sender, content)
except Exception as ex:
logging.exception("Error calling message listener: %s", ex) | python | {
"resource": ""
} |
q43824 | MessageBot._make_line | train | def _make_line(self, uid, command=None):
"""
Prepares an IRC line in Herald's format
"""
if command:
return ":".join(("HRLD", command, uid))
else:
return ":".join(("HRLD", uid)) | python | {
"resource": ""
} |
q43825 | MessageBot.send_message | train | def send_message(self, target, content, uid=None):
"""
Sends a message through IRC
"""
# Compute maximum length of payload
prefix = "PRIVMSG {0} :".format(target)
single_prefix = self._make_line("MSG:")
single_prefix_len = len(single_prefix)
max_len = 510 - len(prefix)
content_len = len(content)
if (content_len + single_prefix_len) < max_len:
# One pass message
self._connection.send_raw("{0}{1}{2}" \
.format(prefix, single_prefix, content))
else:
# Multiple-passes message
uid = uid or str(uuid.uuid4()).replace('-', '').upper()
prefix = "{0}{1}:".format(prefix, self._make_line(uid))
max_len = 510 - len(prefix)
self._connection.privmsg(target, self._make_line(uid, "BEGIN"))
for chunk in chunks(content, max_len):
self._connection.send_raw(''.join((prefix, chunk)))
self._connection.privmsg(target, self._make_line(uid, "END")) | python | {
"resource": ""
} |
q43826 | Herald.__make_message | train | def __make_message(self, topic, content):
"""
Prepares the message content
"""
return {"uid": str(uuid.uuid4()).replace('-', '').upper(),
"topic": topic,
"content": content} | python | {
"resource": ""
} |
q43827 | Herald._notify_listeners | train | def _notify_listeners(self, sender, message):
"""
Notifies listeners of a new message
"""
uid = message['uid']
msg_topic = message['topic']
self._ack(sender, uid, 'fire')
all_listeners = set()
for lst_topic, listeners in self.__listeners.items():
if fnmatch.fnmatch(msg_topic, lst_topic):
all_listeners.update(listeners)
self._ack(sender, uid, 'notice', 'ok' if all_listeners else 'none')
try:
results = []
for listener in all_listeners:
result = listener.handle_message(sender,
message['topic'],
message['content'])
if result:
results.append(result)
self._ack(sender, uid, 'send', json.dumps(results))
except:
self._ack(sender, uid, 'send', "Error") | python | {
"resource": ""
} |
q43828 | Herald._ack | train | def _ack(self, sender, uid, level, payload=None):
"""
Replies to a message
"""
content = {'reply-to': uid,
'reply-level': level,
'payload': payload}
self.__client.send_message(sender, json.dumps(content)) | python | {
"resource": ""
} |
q43829 | Herald.on_message | train | def on_message(self, sender, content):
"""
Got a message from the client
"""
try:
message = json.loads(content)
except (ValueError, TypeError) as ex:
logging.error("Not a valid JSON string: %s", ex)
return
try:
# Check the replied message
reply_uid = message['reply-to']
reply_level = message['reply-level']
except KeyError:
# Got a new message
logging.info("Got message %s from %s", message['content'], sender)
# Notify listeners
self.__pool.enqueue(self._notify_listeners, sender, message)
else:
# Got a reply
try:
level, callback = self.__callbacks[reply_uid]
except KeyError:
# Nobody to callback...
pass
else:
if level == reply_level:
# Match
try:
callback(sender, message['payload'])
except Exception as ex:
logging.exception("Error notifying sender: %s", ex) | python | {
"resource": ""
} |
q43830 | t_php_OBJECT_OPERATOR | train | def t_php_OBJECT_OPERATOR(t):
r'->'
if re.match(r'[A-Za-z_]', peek(t.lexer)):
t.lexer.push_state('property')
return t | python | {
"resource": ""
} |
q43831 | Orchestrator.has_commit | train | def has_commit(self, client_key=None):
"""
Return True if client has new commit.
:param client_key: The client key
:type client_key: str
:return:
:rtype: boolean
"""
if client_key is None and self.current_client is None:
raise ClientNotExist()
if client_key:
if not self.clients.has_client(client_key):
raise ClientNotExist()
client = self.clients.get_client(client_key)
return client.has_commit()
if self.current_client:
client = self.current_client
return client.has_commit()
return False | python | {
"resource": ""
} |
q43832 | init | train | def init(ciprcfg, env, console):
"""
Initialize a Corona project directory.
"""
ciprcfg.create()
templ_dir = path.join(env.skel_dir, 'default')
console.quiet('Copying files from %s' % templ_dir)
for src, dst in util.sync_dir_to(templ_dir, env.project_directory, ignore_existing=True):
console.quiet(' %s -> %s' % (src, dst))
src = path.join(env.code_dir, 'cipr.dev.lua')
dst = path.join(env.project_directory, 'cipr.lua')
console.quiet(' %s -> %s' % (src, dst))
shutil.copy(src, dst) | python | {
"resource": ""
} |
q43833 | update | train | def update(env):
"""
Update an existing cipr project to the latest intalled version.
"""
files = [path.join(env.project_directory, 'cipr.lua')]
for filename in files:
if path.exists(filename):
os.remove(filename)
app.command.run(['init', env.project_directory]) | python | {
"resource": ""
} |
q43834 | install | train | def install(args, console, env, ciprcfg, opts):
"""
Install a package from github and make it available for use.
"""
if len(args) == 0:
# Is this a cipr project?
if ciprcfg.exists:
# Install all the packages for this project
console.quiet('Installing current project packages...')
for name, source in ciprcfg.packages.items():
if opts.upgrade:
app.command.run(['install', '--upgrade', source])
else:
app.command.run(['install', source])
else:
console.error('No cipr project or package found.')
return
else:
for source in args:
package, name, version, type = _package_info(source)
if not path.exists(env.package_dir):
os.makedirs(env.package_dir)
package_dir = path.join(env.package_dir, name)
if path.exists(package_dir):
if opts.upgrade:
app.command.run(['uninstall', name])
else:
console.quiet('Package %s already exists. Use --upgrade to force a re-install.' % name)
return
console.quiet('Installing %s...' % name)
if type == 'git':
tmpdir = tempfile.mkdtemp(prefix='cipr')
clom.git.clone(package, tmpdir).shell.execute()
if version:
cmd = AND(clom.cd(tmpdir), clom.git.checkout(version))
cmd.shell.execute()
package_json = path.join(tmpdir, 'package.json')
if path.exists(package_json):
# Looks like a cipr package, copy directly
shutil.move(tmpdir, package_dir)
else:
# Not a cipr package, sandbox in sub-directory
shutil.move(tmpdir, path.join(package_dir, name))
console.quiet('`%s` installed from git repo to `%s`' % (name, package_dir))
elif path.exists(package):
# Local
os.symlink(package, package_dir)
else:
console.error('Package `%s` type not recognized' % package)
return
pkg = Package(package_dir, source)
ciprcfg.add_package(pkg)
if pkg.dependencies:
console.quiet('Installing dependancies...')
for name, require in pkg.dependencies.items():
if opts.upgrade:
app.command.run(['install', '--upgrade', require])
else:
app.command.run(['install', require]) | python | {
"resource": ""
} |
q43835 | packages | train | def packages(ciprcfg, env, opts, console):
"""
List installed packages for this project
"""
for name, source in ciprcfg.packages.items():
console.normal('- %s' % name)
if opts.long_details:
console.normal(' - directory: %s' % path.join(env.package_dir, name))
console.normal(' - source: %s' % source) | python | {
"resource": ""
} |
q43836 | run | train | def run(env):
"""
Run current project in the Corona Simulator
"""
os.putenv('CIPR_PACKAGES', env.package_dir)
os.putenv('CIPR_PROJECT', env.project_directory)
# `Corona Terminal` doesn't support spaces in filenames so we cd in and use '.'.
cmd = AND(
clom.cd(path.dirname(env.project_directory)),
clom[CORONA_SIMULATOR_PATH](path.basename(env.project_directory))
)
try:
cmd.shell.execute()
except KeyboardInterrupt:
pass | python | {
"resource": ""
} |
q43837 | build | train | def build(env, ciprcfg, console):
"""
Build the current project for distribution
"""
os.putenv('CIPR_PACKAGES', env.package_dir)
os.putenv('CIPR_PROJECT', env.project_directory)
build_settings = path.join(env.project_directory, 'build.settings')
with open(build_settings, 'r') as f:
data = f.read()
m = _build_re.search(data)
if m:
ver = int(m.group(2))
data = data.replace(m.group(0), 'CFBundleVersion = "%d"' % (ver + 1))
with open(build_settings, 'w') as f:
f.write(data)
if path.exists(env.build_dir):
shutil.rmtree(env.build_dir)
os.makedirs(env.build_dir)
if path.exists(env.dist_dir):
shutil.rmtree(env.dist_dir)
os.makedirs(env.dist_dir)
console.normal('Building in %s' % env.build_dir)
console.normal('Copy project files...')
for src, dst in util.sync_dir_to(env.project_directory, env.build_dir, exclude=['.cipr', '.git', 'build', 'dist', '.*']):
console.quiet(' %s -> %s' % (src, dst))
if src.endswith('.lua'):
_fix_lua_module_name(src, dst)
console.normal('Copy cipr packages...')
for package in ciprcfg.packages.keys():
for src, dst in util.sync_lua_dir_to(path.join(env.package_dir, package), env.build_dir, exclude=['.git'], include=['*.lua']):
console.quiet(' %s -> %s' % (src, dst))
if src.endswith('.lua'):
_fix_lua_module_name(src, dst)
src = path.join(env.code_dir, 'cipr.lua')
dst = path.join(env.build_dir, 'cipr.lua')
shutil.copy(src, dst)
cmd = AND(clom.cd(env.build_dir), clom[CORONA_SIMULATOR_PATH](env.build_dir))
console.normal('Be sure to output your app to %s' % env.dist_dir)
try:
cmd.shell.execute()
except KeyboardInterrupt:
pass | python | {
"resource": ""
} |
q43838 | packageipa | train | def packageipa(env, console):
"""
Package the built app as an ipa for distribution in iOS App Store
"""
ipa_path, app_path = _get_ipa(env)
output_dir = path.dirname(ipa_path)
if path.exists(ipa_path):
console.quiet('Removing %s' % ipa_path)
os.remove(ipa_path)
zf = zipfile.ZipFile(ipa_path, mode='w')
payload_dir = 'Payload'
for (dirpath, dirnames, filenames) in os.walk(app_path):
for filename in filenames:
filepath = path.join(dirpath, filename)
prefix = path.commonprefix([filepath, path.dirname(app_path)])
write_path = path.join(payload_dir, filepath[len(prefix) + 1:])
console.quiet('Write %s' % write_path)
zf.write(filepath, write_path)
zf.close()
console.quiet('Packaged %s' % ipa_path) | python | {
"resource": ""
} |
q43839 | expanddotpaths | train | def expanddotpaths(env, console):
"""
Move files with dots in them to sub-directories
"""
for filepath in os.listdir(path.join(env.dir)):
filename, ext = path.splitext(filepath)
if ext == '.lua' and '.' in filename:
paths, newfilename = filename.rsplit('.', 1)
newpath = paths.replace('.', '/')
newfilename = path.join(newpath, newfilename) + ext
console.quiet('Move %s to %s' % (filepath, newfilename))
fullpath = path.join(env.project_directory, newpath)
if not path.exists(fullpath):
os.makedirs(fullpath)
clom.git.mv(filepath, newfilename).shell.execute() | python | {
"resource": ""
} |
q43840 | Obj.change | train | def change(self, key, value):
"""Update any other attribute on the build object"""
self.obj[key] = value
self.changes.append("Updating build:{}.{}={}"
.format(self.obj['name'], key, value))
return self | python | {
"resource": ""
} |
q43841 | Obj.release | train | def release(self, lane, status, target=None, meta=None, svcs=None):
"""Set release information on a build"""
if target not in (None, 'current', 'future'):
raise ValueError("\nError: Target must be None, 'current', or 'future'\n")
svcs, meta, lane = self._prep_for_release(lane, svcs=svcs, meta=meta)
when = time.time()
# loathe non-functional dictionaries in python
rel_data = meta.copy()
rel_data.update({
"_time": when,
"status": status,
"services": list(svcs.keys()),
})
rel_lane = self.obj.get('lanes', {}).get(lane, dict(log=[],status=status))
rel_lane['status'] = status
rel_lane['log'] = [rel_data] + rel_lane.get('log', [])
self.rcs.patch('build', self.name, {
"lanes": {
lane: rel_lane,
}
})
if target:
for svc in svcs:
rel_data = {target: self.name}
# if target is specified, then also update svc.release
# {current/previous/future}
if target == "current":
mysvc = svcs[svc]
curver = mysvc.get('release', {}).get('current', '')
prev = []
if curver:
prev = mysvc.get('release', {}).get('previous', [])
if not prev or prev[0] != curver:
prev = [curver] + prev
while len(prev) > 5: # magic values FTW
prev.pop() # only keep history of 5 previous
rel_data['previous'] = prev
self.rcs.patch('service', svc, {
"release": rel_data,
"statuses": {status: when},
"status": status
}) | python | {
"resource": ""
} |
q43842 | Obj.promote | train | def promote(self, lane, svcs=None, meta=None):
"""promote a build so it is ready for an upper lane"""
svcs, meta, lane = self._prep_for_release(lane, svcs=svcs, meta=meta)
# iterate and mark as future release
for svc in svcs:
self.changes.append("Promoting: {}.release.future={}".format(svc, self.name))
self.rcs.patch('service', svc, {
"release": {"future": self.name}, # new way
"statuses": {"future": time.time()},
})
return self | python | {
"resource": ""
} |
q43843 | Obj.add_info | train | def add_info(self, data):
"""add info to a build"""
for key in data:
# verboten
if key in ('status','state','name','id','application','services','release'):
raise ValueError("Sorry, cannot set build info with key of {}".format(key))
self.obj[key] = data[key]
self.changes.append("Adding build info")
return self | python | {
"resource": ""
} |
q43844 | BaseField.localize_field | train | def localize_field(self, value):
"""
Method that must transform the value from object to localized string
"""
if self.default is not None:
if value is None or value == '':
value = self.default
return value or '' | python | {
"resource": ""
} |
q43845 | hitail | train | def hitail(E: np.ndarray, diffnumflux: np.ndarray, isimE0: np.ndarray, E0: np.ndarray,
Bhf: np.ndarray, bh: float, verbose: int = 0):
"""
strickland 1993 said 0.2, but 0.145 gives better match to peak flux at 2500 = E0
"""
Bh = np.empty_like(E0)
for iE0 in np.arange(E0.size):
Bh[iE0] = Bhf[iE0]*diffnumflux[isimE0[iE0], iE0] # 4100.
# bh = 4 #2.9
het = Bh*(E[:, None] / E0)**-bh
het[E[:, None] < E0] = 0.
if verbose > 0:
print('Bh: ' + (' '.join('{:0.1f}'.format(b) for b in Bh)))
return het | python | {
"resource": ""
} |
q43846 | Diffusible.diffuse | train | def diffuse(self, *args):
"""
this is a dispatcher of diffuse implementation.
Depending of the arguments used.
"""
mode = diffusingModeEnum.unknown
if (isinstance(args[0], str) and (len(args) == 3)):
# reveived diffuse(str, any, any)
mode = diffusingModeEnum.element
elif (hasattr(args[0], "__len__") and (len(args) == 2)):
# reveived diffuse(dict({str: any}), dict({str: any}))
mode = diffusingModeEnum.elements
else:
raise TypeError(
"Called diffuse method using bad argments, receive this" +
" '{0}', but expected 'str, any, any' or" +
" 'dict(str: any), dict(str: any)'."
.format(args))
self._diffuse(mode, *args) | python | {
"resource": ""
} |
q43847 | json_response | train | def json_response(data, status=200):
"""Return a JsonResponse. Make sure you have django installed first."""
from django.http import JsonResponse
return JsonResponse(data=data, status=status, safe=isinstance(data, dict)) | python | {
"resource": ""
} |
q43848 | send_email_template | train | def send_email_template(slug, base_url=None, context=None, user=None,
to=None, cc=None, bcc=None, attachments=None, headers=None, connection=None, fail_silently=False):
"""
Shortcut to send an email template.
"""
email_template = EmailTemplate.objects.get_for_slug(slug)
email = email_template.get_email_message(
base_url, context, user,
to=to, cc=cc, bcc=bcc,
attachments=attachments, headers=headers, connection=connection
)
return email.send(fail_silently=fail_silently) | python | {
"resource": ""
} |
q43849 | Trace.from_file | train | def from_file(filename):
"""Read in filename and creates a trace object.
:param filename: path to nu(x|s)mv output file
:type filename: str
:return:
"""
trace = Trace()
reached = False
with open(filename) as fp:
for line in fp.readlines():
if not reached and line.strip() == "Trace Type: Counterexample":
reached = True
continue
elif reached:
trace.parse_line(line)
return trace | python | {
"resource": ""
} |
q43850 | kwargs_helper | train | def kwargs_helper(kwargs):
"""This function preprocesses the kwargs dictionary to sanitize it."""
args = []
for param, value in kwargs.items():
param = kw_subst.get(param, param)
args.append((param, value))
return args | python | {
"resource": ""
} |
q43851 | GetDate | train | def GetDate(text=None, selected=None, **kwargs):
"""Prompt the user for a date.
This will raise a Zenity Calendar Dialog for the user to pick a date.
It will return a datetime.date object with the date or None if the
user hit cancel.
text - Text to be displayed in the calendar dialog.
selected - A datetime.date object that will be the pre-selected date.
kwargs - Optional command line parameters for Zenity such as height,
width, etc."""
args = ['--date-format=%d/%m/%Y']
if text:
args.append('--text=%s' % text)
if selected:
args.append('--day=%d' % selected.day)
args.append('--month=%d' % selected.month)
args.append('--year=%d' % selected.year)
for generic_args in kwargs_helper(kwargs):
args.append('--%s=%s' % generic_args)
p = run_zenity('--calendar', *args)
if p.wait() == 0:
retval = p.stdout.read().strip()
day, month, year = [int(x) for x in retval.split('/')]
return date(year, month, day) | python | {
"resource": ""
} |
q43852 | GetFilename | train | def GetFilename(multiple=False, sep='|', **kwargs):
"""Prompt the user for a filename.
This will raise a Zenity File Selection Dialog. It will return a list with
the selected files or None if the user hit cancel.
multiple - True to allow the user to select multiple files.
sep - Token to use as the path separator when parsing Zenity's return
string.
kwargs - Optional command line parameters for Zenity such as height,
width, etc."""
args = []
if multiple:
args.append('--multiple')
if sep != '|':
args.append('--separator=%s' % sep)
for generic_args in kwargs_helper(kwargs):
args.append('--%s=%s' % generic_args)
p = run_zenity('--file-selection', *args)
if p.wait() == 0:
return p.stdout.read()[:-1].split('|') | python | {
"resource": ""
} |
q43853 | GetDirectory | train | def GetDirectory(multiple=False, selected=None, sep=None, **kwargs):
"""Prompt the user for a directory.
This will raise a Zenity Directory Selection Dialog. It will return a
list with the selected directories or None if the user hit cancel.
multiple - True to allow the user to select multiple directories.
selected - Path to the directory to be selected on startup.
sep - Token to use as the path separator when parsing Zenity's return
string.
kwargs - Optional command line parameters for Zenity such as height,
width, etc."""
args = ['--directory']
if multiple:
args.append('--multiple')
if selected:
if not path.lexists(selected):
raise ValueError("File %s does not exist!" % selected)
args.append('--filename=%s' % selected)
if sep:
args.append('--separator=%s' % sep)
for generic_args in kwargs_helper(kwargs):
args.append('--%s=%s' % generic_args)
p = run_zenity('--file-selection', *args)
if p.wait() == 0:
return p.stdout.read().strip().split('|') | python | {
"resource": ""
} |
q43854 | GetSavename | train | def GetSavename(default=None, **kwargs):
"""Prompt the user for a filename to save as.
This will raise a Zenity Save As Dialog. It will return the name to save
a file as or None if the user hit cancel.
default - The default name that should appear in the save as dialog.
kwargs - Optional command line parameters for Zenity such as height,
width, etc."""
args = ['--save']
if default:
args.append('--filename=%s' % default)
for generic_args in kwargs_helper(kwargs):
args.append('--%s=%s' % generic_args)
p = run_zenity('--file-selection', *args)
if p.wait() == 0:
return p.stdout.read().strip().split('|') | python | {
"resource": ""
} |
q43855 | ErrorMessage | train | def ErrorMessage(text, **kwargs):
"""Show an error message dialog to the user.
This will raise a Zenity Error Dialog with a description of the error.
text - A description of the error.
kwargs - Optional command line parameters for Zenity such as height,
width, etc."""
args = ['--text=%s' % text]
for generic_args in kwargs_helper(kwargs):
args.append('--%s=%s' % generic_args)
run_zenity('--error', *args).wait() | python | {
"resource": ""
} |
q43856 | Progress | train | def Progress(text='', percentage=0, auto_close=False, pulsate=False, **kwargs):
"""Show a progress dialog to the user.
This will raise a Zenity Progress Dialog. It returns a callback that
accepts two arguments. The first is a numeric value of the percent
complete. The second is a message about the progress.
NOTE: This function sends the SIGHUP signal if the user hits the cancel
button. You must connect to this signal if you do not want your
application to exit.
text - The initial message about the progress.
percentage - The initial percentage to set the progress bar to.
auto_close - True if the dialog should close automatically if it reaches
100%.
pulsate - True is the status should pulsate instead of progress.
kwargs - Optional command line parameters for Zenity such as height,
width, etc."""
args = []
if text:
args.append('--text=%s' % text)
if percentage:
args.append('--percentage=%s' % percentage)
if auto_close:
args.append('--auto-close=%s' % auto_close)
if pulsate:
args.append('--pulsate=%s' % pulsate)
for generic_args in kwargs_helper(kwargs):
args.append('--%s=%s' % generic_args)
p = Popen([zen_exec, '--progress'] + args, stdin=PIPE, stdout=PIPE)
def update(percent, message=''):
if type(percent) == float:
percent = int(percent * 100)
p.stdin.write(str(percent) + '\n')
if message:
p.stdin.write('# %s\n' % message)
return p.returncode
return update | python | {
"resource": ""
} |
q43857 | GetText | train | def GetText(text='', entry_text='', password=False, **kwargs):
"""Get some text from the user.
This will raise a Zenity Text Entry Dialog. It returns the text the user
entered or None if the user hit cancel.
text - A description of the text to enter.
entry_text - The initial value of the text entry box.
password - True if text entered should be hidden by stars.
kwargs - Optional command line parameters for Zenity such as height,
width, etc."""
args = []
if text:
args.append('--text=%s' % text)
if entry_text:
args.append('--entry-text=%s' % entry_text)
if password:
args.append('--hide-text')
for generic_args in kwargs_helper(kwargs):
args.append('--%s=%s' % generic_args)
p = run_zenity('--entry', *args)
if p.wait() == 0:
return p.stdout.read()[:-1] | python | {
"resource": ""
} |
q43858 | TextInfo | train | def TextInfo(filename=None, editable=False, **kwargs):
"""Show the text of a file to the user.
This will raise a Zenity Text Information Dialog presenting the user with
the contents of a file. It returns the contents of the text box.
filename - The path to the file to show.
editable - True if the text should be editable.
kwargs - Optional command line parameters for Zenity such as height,
width, etc."""
args = []
if filename:
args.append('--filename=%s' % filename)
if editable:
args.append('--editable')
for generic_args in kwargs_helper(kwargs):
args.append('--%s=%s' % generic_args)
p = run_zenity('--text-info', *args)
if p.wait() == 0:
return p.stdout.read() | python | {
"resource": ""
} |
q43859 | parse | train | def parse(file_contents, file_name):
"""
This takes a list of filenames and their paths of expected yaml files and
tried to parse them, erroring if there are any parsing issues.
Args:
file_contents (str): Contents of a yml file
Raises:
yaml.parser.ParserError: Raises an error if the file contents cannot be
parsed and interpreted as yaml
"""
try:
yaml.load(file_contents)
except Exception:
_, exc_value, _ = sys.exc_info()
return("Cannot Parse: {file_name}: \n {exc_value}"
.format(file_name=file_name, exc_value=exc_value)) | python | {
"resource": ""
} |
q43860 | GhostBase.get_or_create | train | def get_or_create(cls, **kwargs):
'''
If a record matching the instance already exists in the database, then
return it, otherwise create a new record.
'''
q = cls._get_instance(**kwargs)
if q:
return q
q = cls(**kwargs)
_action_and_commit(q, session.add)
return q | python | {
"resource": ""
} |
q43861 | GhostBase.update | train | def update(cls, **kwargs):
'''
If a record matching the instance id already exists in the database,
update it. If a record matching the instance id does not already exist,
create a new record.
'''
q = cls._get_instance(**{'id': kwargs['id']})
if q:
for k, v in kwargs.items():
setattr(q, k, v)
_action_and_commit(q, session.add)
else:
cls.get_or_create(**kwargs) | python | {
"resource": ""
} |
q43862 | ResponseClassLegacyAccessor._get_instance | train | def _get_instance(self, **kwargs):
'''Return the first existing instance of the response record.
'''
return session.query(self.response_class).filter_by(**kwargs).first() | python | {
"resource": ""
} |
q43863 | ResponseClassLegacyAccessor.update | train | def update(self, response, **kwargs):
'''
If a record matching the instance already exists in the database, update
it, else create a new record.
'''
response_cls = self._get_instance(**kwargs)
if response_cls:
setattr(response_cls, self.column, self.accessor(response))
_action_and_commit(response_cls, session.add)
else:
self.get_or_create_from_legacy_response(response, **kwargs) | python | {
"resource": ""
} |
q43864 | LocationResponseClassLegacyAccessor.update | train | def update(self, response, **kwargs):
'''
If a record matching the instance already exists in the database, update
both the column and venue column attributes, else create a new record.
'''
response_cls = super(
LocationResponseClassLegacyAccessor, self)._get_instance(**kwargs)
if response_cls:
setattr(response_cls, self.column, self.accessor(response))
setattr(
response_cls, self.venue_column, self.venue_accessor(response))
_action_and_commit(response_cls, session.add) | python | {
"resource": ""
} |
q43865 | ServerCommon.common_update_sys | train | def common_update_sys(self):
"""
update system package
"""
try:
sudo('apt-get update -y --fix-missing')
except Exception as e:
print(e)
print(green('System package is up to date.'))
print() | python | {
"resource": ""
} |
q43866 | ServerCommon.common_config_nginx_ssl | train | def common_config_nginx_ssl(self):
"""
Convert nginx server from http to https
"""
if prompt(red(' * Change url from http to https (y/n)?'), default='n') == 'y':
if not exists(self.nginx_ssl_dir):
sudo('mkdir -p {0}'.format(self.nginx_ssl_dir))
# generate ssh key
sudo('openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout {0}/cert.key -out {0}/cert.pem'.format(self.nginx_ssl_dir))
# do nginx config config
put(StringIO(self.nginx_web_ssl_config), '/etc/nginx/sites-available/default', use_sudo=True)
sudo('service nginx restart')
print(green(' * Make Nginx from http to https.'))
print(green(' * Done'))
print() | python | {
"resource": ""
} |
q43867 | ServerCommon.common_install_apache2 | train | def common_install_apache2(self):
"""
Install apache2 web server
"""
try:
sudo('apt-get install apache2 -y')
except Exception as e:
print(e)
print(green(' * Installed Apache2 in the system.'))
print(green(' * Done'))
print() | python | {
"resource": ""
} |
q43868 | ServerCommon.common_install_python_env | train | def common_install_python_env(self):
"""
Install python virtualenv
"""
sudo('apt-get install python3 python3-pip -y')
sudo('pip3 install virtualenv')
run('virtualenv {0}'.format(self.python_env_dir))
print(green(' * Installed Python3 virtual environment in the system.'))
print(green(' * Done'))
print() | python | {
"resource": ""
} |
q43869 | storage.new_tmp | train | def new_tmp(self):
""" Create a new temp file allocation """
self.tmp_idx += 1
return p.join(self.tmp_dir, 'tmp_' + str(self.tmp_idx)) | python | {
"resource": ""
} |
q43870 | storage.new_backup | train | def new_backup(self, src):
""" Create a new backup file allocation """
backup_id_file = p.join(self.backup_dir, '.bk_idx')
backup_num = file_or_default(backup_id_file, 1, int)
backup_name = str(backup_num) + "_" + os.path.basename(src)
backup_num += 1
file_put_contents(backup_id_file, str(backup_num))
return p.join(self.backup_dir, backup_name) | python | {
"resource": ""
} |
q43871 | storage.begin | train | def begin(self):
""" Begin a transaction """
if self.journal != None:
raise Exception('Storage is already active, nested begin not supported')
# under normal operation journal is deleted at end of transaction
# if it does exist we need to roll back
if os.path.isfile(self.j_file): self.rollback()
self.journal = open(self.j_file, 'w') | python | {
"resource": ""
} |
q43872 | storage.do_action | train | def do_action(self, command, journal = True):
""" Implementation for declarative file operations. """
cmd = 0; src = 1; path = 1; data = 2; dst = 2
if journal is True:
self.journal.write(json.dumps(command['undo']) + "\n")
self.journal.flush()
d = command['do']
if d[cmd] == 'copy': shutil.copy(d[src], d[dst])
elif d[cmd] == 'move': shutil.move(d[src], d[dst])
elif d[cmd] == 'backup': shutil.move(d[src], self.new_backup(d[src]))
elif d[cmd] == 'write' :
if callable(d[data]): d[data](d[path])
else: file_put_contents(d[path], d[data]) | python | {
"resource": ""
} |
q43873 | storage.rollback | train | def rollback(self):
""" Do journal rollback """
# Close the journal for writing, if this is an automatic rollback following a crash,
# the file descriptor will not be open, so don't need to do anything.
if self.journal != None: self.journal.close()
self.journal = None
# Read the journal
journ_list = []
with open(self.j_file) as fle:
for l in fle: journ_list.append(json.loads(l))
journ_subtract = deque(reversed(journ_list))
for j_itm in reversed(journ_list):
try: self.do_action({'do' : j_itm}, False)
except IOError: pass
# As each item is completed remove it from the journal file, in case
# something fails during the rollback we can pick up where it stopped.
journ_subtract.popleft()
with open(self.j_file, 'w') as f:
for data in list(journ_subtract):
f.write(json.dumps(data) + "\n")
f.flush()
# Rollback is complete so delete the journal file
os.remove(self.j_file) | python | {
"resource": ""
} |
q43874 | storage.commit | train | def commit(self, cont = False):
""" Finish a transaction """
self.journal.close()
self.journal = None
os.remove(self.j_file)
for itm in os.listdir(self.tmp_dir): os.remove(cpjoin(self.tmp_dir, itm))
if cont is True: self.begin() | python | {
"resource": ""
} |
q43875 | storage.file_get_contents | train | def file_get_contents(self, path):
""" Returns contents of file located at 'path', not changing FS so does
not require journaling """
with open(self.get_full_file_path(path), 'r') as f: return f.read() | python | {
"resource": ""
} |
q43876 | storage.move_file | train | def move_file(self, src, dst):
""" Move file from src to dst """
src = self.get_full_file_path(src); dst = self.get_full_file_path(dst)
# record where file moved
if os.path.isfile(src):
# if destination file exists, copy it to tmp first
if os.path.isfile(dst):
tmp_path = self.new_tmp()
self.do_action({
'do' : ['copy', dst, tmp_path],
'undo' : ['move', tmp_path, dst]})
self.do_action(
{'do' : ['move', src, dst],
'undo' : ['move', dst, src]}) | python | {
"resource": ""
} |
q43877 | storage.delete_file | train | def delete_file(self, path):
""" delete a file """
path = self.get_full_file_path(path)
# if file exists, create a temp copy to allow rollback
if os.path.isfile(path):
tmp_path = self.new_tmp()
self.do_action({
'do' : ['move', path, tmp_path],
'undo' : ['move', tmp_path, path]})
else:
raise OSError(errno.ENOENT, 'No such file or directory', path) | python | {
"resource": ""
} |
q43878 | parse_sysctl | train | def parse_sysctl(text):
''' Parse sysctl output. '''
lines = text.splitlines()
results = {}
for line in lines:
key, _, value = line.decode('ascii').partition(': ')
if key == 'hw.memsize':
value = int(value)
elif key == 'vm.swapusage':
values = value.split()[2::3] # every third token
su_unit = values[0][-1].lower() # get unit, 'M'
PAGESIZE = 1024
if su_unit == 'm':
PAGESIZE = 1024 * 1024
value = [ (float(val[:-1]) * PAGESIZE) for val in values ]
results[key] = value
return results | python | {
"resource": ""
} |
q43879 | parse_vmstat | train | def parse_vmstat(text):
''' Parse vmstat output. '''
lines = text.splitlines()
results = Info() # TODO use MemInfo
try:
PAGESIZE = int(lines[0].split()[-2])
except IndexError:
PAGESIZE = 4096
for line in lines[1:]: # dump header
if not line[0] == 80: # b'P' startswith Page...
break
tokens = line.split()
name, value = tokens[1][:-1].decode('ascii'), tokens[-1][:-1]
results[name] = int(value) * PAGESIZE
return results | python | {
"resource": ""
} |
q43880 | get_base_url | train | def get_base_url(html: str) -> str:
"""
Search for login url from VK login page
"""
forms = BeautifulSoup(html, 'html.parser').find_all('form')
if not forms:
raise VVKBaseUrlException('Form for login not found')
elif len(forms) > 1:
raise VVKBaseUrlException('More than one login form found')
login_url = forms[0].get('action')
if not login_url:
raise VVKBaseUrlException('No action tag in form')
return login_url | python | {
"resource": ""
} |
q43881 | get_url_params | train | def get_url_params(url: str, fragment: bool = False) -> dict:
"""
Parse URL params
"""
parsed_url = urlparse(url)
if fragment:
url_query = parse_qsl(parsed_url.fragment)
else:
url_query = parse_qsl(parsed_url.query)
return dict(url_query) | python | {
"resource": ""
} |
q43882 | check_page_for_warnings | train | def check_page_for_warnings(html: str) -> None:
"""
Checks if is any warnings on page if so raises an exception
"""
soup = BeautifulSoup(html, 'html.parser')
warnings = soup.find_all('div', {'class': 'service_msg_warning'})
if warnings:
exception_msg = '; '.join((warning.get_text() for warning in warnings))
raise VVKPageWarningException(exception_msg) | python | {
"resource": ""
} |
q43883 | get_column_keys_and_names | train | def get_column_keys_and_names(table):
"""
Return a generator of tuples k, c such that k is the name of the python attribute for
the column and c is the name of the column in the sql table.
"""
ins = inspect(table)
return ((k, c.name) for k, c in ins.mapper.c.items()) | python | {
"resource": ""
} |
q43884 | is_modified | train | def is_modified(row, dialect):
"""
Has the row data been modified?
This method inspects the row, and iterates over all columns looking for changes
to the (processed) data, skipping over unmodified columns.
:param row: SQLAlchemy model instance
:param dialect: :py:class:`~sqlalchemy.engine.interfaces.Dialect`
:return: True if any columns were modified, else False
"""
ins = inspect(row)
modified_cols = set(get_column_keys(ins.mapper)) - ins.unmodified
for col_name in modified_cols:
current_value = get_column_attribute(row, col_name, dialect=dialect)
previous_value = get_column_attribute(row, col_name, use_dirty=False, dialect=dialect)
if previous_value != current_value:
return True
return False | python | {
"resource": ""
} |
q43885 | registerLoggers | train | def registerLoggers(info, error, debug):
"""
Add logging functions to this module.
Functions will be called on various severities (log, error, or debug
respectively).
Each function must have the signature:
fn(message, **kwargs)
If Python str.format()-style placeholders are in message, kwargs will be
interpolated.
"""
global log_info
global log_error
global log_debug
log_info = info
log_error = error
log_debug = debug | python | {
"resource": ""
} |
q43886 | background | train | def background(cl, proto=EchoProcess, **kw):
"""
Use the reactor to run a process in the background.
Keep the pid around.
``proto'' may be any callable which returns an instance of ProcessProtocol
"""
if isinstance(cl, basestring):
cl = shlex.split(cl)
if not cl[0].startswith('/'):
path = which(cl[0])
assert path, '%s not found' % cl[0]
cl[0] = path[0]
d = Deferred()
proc = reactor.spawnProcess(
proto(name=basename(cl[0]), deferred=d),
cl[0],
cl,
env=os.environ,
**kw)
daycare.add(proc.pid)
return d | python | {
"resource": ""
} |
q43887 | runner | train | def runner(Options, buffering=True):
"""
Return a standard "run" function that wraps an Options class
If buffering=False, turn off stdout/stderr buffering for this process
"""
def run(argv=None):
if not buffering:
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 0)
if argv is None:
argv = sys.argv
o = Options()
try:
o.parseOptions(argv[1:])
except usage.UsageError, e:
if hasattr(o, 'subOptions'):
print str(o.subOptions)
else:
print str(o)
print str(e)
return 1
return 0
return run | python | {
"resource": ""
} |
q43888 | DayCare.killall | train | def killall(self):
"""
Kill all children
"""
for pid in set(self):
try:
os.kill(pid, signal.SIGTERM)
except OSError, e: # pragma: nocover
if e.errno == errno.ESRCH:
"Process previously died on its own"
self.remove(pid) | python | {
"resource": ""
} |
q43889 | EchoProcess.processEnded | train | def processEnded(self, reason):
"""
Connected process shut down
"""
log_debug("{name} process exited", name=self.name)
if self.deferred:
if reason.type == ProcessDone:
self.deferred.callback(reason.value.exitCode)
elif reason.type == ProcessTerminated:
self.deferred.errback(reason)
return self.deferred | python | {
"resource": ""
} |
q43890 | EchoProcess.errReceived | train | def errReceived(self, data):
"""
Connected process wrote to stderr
"""
lines = data.splitlines()
for line in lines:
log_error("*** {name} stderr *** {line}",
name=self.name,
line=self.errFilter(line)) | python | {
"resource": ""
} |
q43891 | EchoProcess.outLineReceived | train | def outLineReceived(self, line):
"""
Handle data via stdout linewise. This is useful if you turned off
buffering.
In your subclass, override this if you want to handle the line as a
protocol line in addition to logging it. (You may upcall this function
safely.)
"""
log_debug('<<< {name} stdout >>> {line}',
name=self.name,
line=self.outFilter(line)) | python | {
"resource": ""
} |
q43892 | Tee.write | train | def write(self, *a, **kw):
"""
Write to both files
If either one has an error, try writing the error to the other one.
"""
fl = None
try:
self.file1.write(*a, **kw)
self.file1.flush()
except IOError:
badFile, fl = 1, failure.Failure()
try:
self.file2.write(*a, **kw)
self.file2.flush()
except IOError:
badFile, fl = 2, failure.Failure()
if fl:
out = self.file2 if badFile == 1 else self.file1
out.write(str(fl) + '\n')
out.flush()
fl.raiseException() | python | {
"resource": ""
} |
q43893 | from_etree | train | def from_etree(
el, node=None, node_cls=None,
tagsub=functools.partial(re.sub, r'\{.+?\}', ''),
Node=Node):
'''Convert the element tree to a tater tree.
'''
node_cls = node_cls or Node
if node is None:
node = node_cls()
tag = tagsub(el.tag)
attrib = dict((tagsub(k), v) for (k, v) in el.attrib.items())
node.update(attrib, tag=tag)
if el.text:
node['text'] = el.text
for child in el:
child = from_etree(child, node_cls=node_cls)
node.append(child)
if el.tail:
node['tail'] = el.tail
return node | python | {
"resource": ""
} |
q43894 | secure | train | def secure(view):
"""
Authentication decorator for views.
If DEBUG is on, we serve the view without authenticating.
Default is 'django.contrib.auth.decorators.login_required'.
Can also be 'django.contrib.admin.views.decorators.staff_member_required'
or a custom decorator.
"""
auth_decorator = import_class(settings.AUTH_DECORATOR)
return (
view if project_settings.DEBUG
else method_decorator(auth_decorator, name='dispatch')(view)
) | python | {
"resource": ""
} |
q43895 | get_netid_categories | train | def get_netid_categories(netid, category_codes):
"""
Return a list of uwnetid.models Category objects
corresponding to the netid and category code or list provided
"""
url = _netid_category_url(netid, category_codes)
response = get_resource(url)
return _json_to_categories(response) | python | {
"resource": ""
} |
q43896 | update_catagory | train | def update_catagory(netid, category_code, status):
"""
Post a subscriptionfor the given netid
and category_code
"""
url = "{0}/category".format(url_version())
body = {
"categoryCode": category_code,
"status": status,
"categoryList": [{"netid": netid}]
}
response = post_resource(url, json.dumps(body))
return json.loads(response) | python | {
"resource": ""
} |
q43897 | _netid_category_url | train | def _netid_category_url(netid, category_codes):
"""
Return UWNetId resource for provided netid and category
code or code list
"""
return "{0}/{1}/category/{2}".format(
url_base(), netid,
(','.join([str(n) for n in category_codes])
if isinstance(category_codes, (list, tuple))
else category_codes)) | python | {
"resource": ""
} |
q43898 | _json_to_categories | train | def _json_to_categories(response_body):
"""
Returns a list of Category objects
"""
data = json.loads(response_body)
categories = []
for category_data in data.get("categoryList", []):
categories.append(Category().from_json(
data.get('uwNetID'), category_data))
return categories | python | {
"resource": ""
} |
q43899 | ParseContext.activate | train | def activate(ctx):
"""Activate the given ParseContext."""
if hasattr(ctx, '_on_context_exit'):
raise ContextError(
'Context actions registered outside this '
'parse context are active')
try:
ParseContext._active.append(ctx)
ctx._on_context_exit = []
yield
finally:
for func, args, kwargs in ctx._on_context_exit:
func(*args, **kwargs)
del ctx._on_context_exit
ParseContext._active.pop() | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.