id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
11,600
pklaus/brother_ql
brother_ql/backends/helpers.py
send
def send(instructions, printer_identifier=None, backend_identifier=None, blocking=True): """ Send instruction bytes to a printer. :param bytes instructions: The instructions to be sent to the printer. :param str printer_identifier: Identifier for the printer. :param str backend_identifier: Can enforce the use of a specific backend. :param bool blocking: Indicates whether the function call should block while waiting for the completion of the printing. """ status = { 'instructions_sent': True, # The instructions were sent to the printer. 'outcome': 'unknown', # String description of the outcome of the sending operation like: 'unknown', 'sent', 'printed', 'error' 'printer_state': None, # If the selected backend supports reading back the printer state, this key will contain it. 'did_print': False, # If True, a print was produced. It defaults to False if the outcome is uncertain (due to a backend without read-back capability). 'ready_for_next_job': False, # If True, the printer is ready to receive the next instructions. It defaults to False if the state is unknown. } selected_backend = None if backend_identifier: selected_backend = backend_identifier else: try: selected_backend = guess_backend(printer_identifier) except: logger.info("No backend stated. Selecting the default linux_kernel backend.") selected_backend = 'linux_kernel' be = backend_factory(selected_backend) list_available_devices = be['list_available_devices'] BrotherQLBackend = be['backend_class'] printer = BrotherQLBackend(printer_identifier) start = time.time() logger.info('Sending instructions to the printer. Total: %d bytes.', len(instructions)) printer.write(instructions) status['outcome'] = 'sent' if not blocking: return status if selected_backend == 'network': """ No need to wait for completion. The network backend doesn't support readback. """ return status while time.time() - start < 10: data = printer.read() if not data: time.sleep(0.005) continue try: result = interpret_response(data) except ValueError: logger.error("TIME %.3f - Couln't understand response: %s", time.time()-start, data) continue status['printer_state'] = result logger.debug('TIME %.3f - result: %s', time.time()-start, result) if result['errors']: logger.error('Errors occured: %s', result['errors']) status['outcome'] = 'error' break if result['status_type'] == 'Printing completed': status['did_print'] = True status['outcome'] = 'printed' if result['status_type'] == 'Phase change' and result['phase_type'] == 'Waiting to receive': status['ready_for_next_job'] = True if status['did_print'] and status['ready_for_next_job']: break if not status['did_print']: logger.warning("'printing completed' status not received.") if not status['ready_for_next_job']: logger.warning("'waiting to receive' status not received.") if (not status['did_print']) or (not status['ready_for_next_job']): logger.warning('Printing potentially not successful?') if status['did_print'] and status['ready_for_next_job']: logger.info("Printing was successful. Waiting for the next job.") return status
python
def send(instructions, printer_identifier=None, backend_identifier=None, blocking=True): """ Send instruction bytes to a printer. :param bytes instructions: The instructions to be sent to the printer. :param str printer_identifier: Identifier for the printer. :param str backend_identifier: Can enforce the use of a specific backend. :param bool blocking: Indicates whether the function call should block while waiting for the completion of the printing. """ status = { 'instructions_sent': True, # The instructions were sent to the printer. 'outcome': 'unknown', # String description of the outcome of the sending operation like: 'unknown', 'sent', 'printed', 'error' 'printer_state': None, # If the selected backend supports reading back the printer state, this key will contain it. 'did_print': False, # If True, a print was produced. It defaults to False if the outcome is uncertain (due to a backend without read-back capability). 'ready_for_next_job': False, # If True, the printer is ready to receive the next instructions. It defaults to False if the state is unknown. } selected_backend = None if backend_identifier: selected_backend = backend_identifier else: try: selected_backend = guess_backend(printer_identifier) except: logger.info("No backend stated. Selecting the default linux_kernel backend.") selected_backend = 'linux_kernel' be = backend_factory(selected_backend) list_available_devices = be['list_available_devices'] BrotherQLBackend = be['backend_class'] printer = BrotherQLBackend(printer_identifier) start = time.time() logger.info('Sending instructions to the printer. Total: %d bytes.', len(instructions)) printer.write(instructions) status['outcome'] = 'sent' if not blocking: return status if selected_backend == 'network': """ No need to wait for completion. The network backend doesn't support readback. """ return status while time.time() - start < 10: data = printer.read() if not data: time.sleep(0.005) continue try: result = interpret_response(data) except ValueError: logger.error("TIME %.3f - Couln't understand response: %s", time.time()-start, data) continue status['printer_state'] = result logger.debug('TIME %.3f - result: %s', time.time()-start, result) if result['errors']: logger.error('Errors occured: %s', result['errors']) status['outcome'] = 'error' break if result['status_type'] == 'Printing completed': status['did_print'] = True status['outcome'] = 'printed' if result['status_type'] == 'Phase change' and result['phase_type'] == 'Waiting to receive': status['ready_for_next_job'] = True if status['did_print'] and status['ready_for_next_job']: break if not status['did_print']: logger.warning("'printing completed' status not received.") if not status['ready_for_next_job']: logger.warning("'waiting to receive' status not received.") if (not status['did_print']) or (not status['ready_for_next_job']): logger.warning('Printing potentially not successful?') if status['did_print'] and status['ready_for_next_job']: logger.info("Printing was successful. Waiting for the next job.") return status
[ "def", "send", "(", "instructions", ",", "printer_identifier", "=", "None", ",", "backend_identifier", "=", "None", ",", "blocking", "=", "True", ")", ":", "status", "=", "{", "'instructions_sent'", ":", "True", ",", "# The instructions were sent to the printer.", "'outcome'", ":", "'unknown'", ",", "# String description of the outcome of the sending operation like: 'unknown', 'sent', 'printed', 'error'", "'printer_state'", ":", "None", ",", "# If the selected backend supports reading back the printer state, this key will contain it.", "'did_print'", ":", "False", ",", "# If True, a print was produced. It defaults to False if the outcome is uncertain (due to a backend without read-back capability).", "'ready_for_next_job'", ":", "False", ",", "# If True, the printer is ready to receive the next instructions. It defaults to False if the state is unknown.", "}", "selected_backend", "=", "None", "if", "backend_identifier", ":", "selected_backend", "=", "backend_identifier", "else", ":", "try", ":", "selected_backend", "=", "guess_backend", "(", "printer_identifier", ")", "except", ":", "logger", ".", "info", "(", "\"No backend stated. Selecting the default linux_kernel backend.\"", ")", "selected_backend", "=", "'linux_kernel'", "be", "=", "backend_factory", "(", "selected_backend", ")", "list_available_devices", "=", "be", "[", "'list_available_devices'", "]", "BrotherQLBackend", "=", "be", "[", "'backend_class'", "]", "printer", "=", "BrotherQLBackend", "(", "printer_identifier", ")", "start", "=", "time", ".", "time", "(", ")", "logger", ".", "info", "(", "'Sending instructions to the printer. Total: %d bytes.'", ",", "len", "(", "instructions", ")", ")", "printer", ".", "write", "(", "instructions", ")", "status", "[", "'outcome'", "]", "=", "'sent'", "if", "not", "blocking", ":", "return", "status", "if", "selected_backend", "==", "'network'", ":", "\"\"\" No need to wait for completion. The network backend doesn't support readback. \"\"\"", "return", "status", "while", "time", ".", "time", "(", ")", "-", "start", "<", "10", ":", "data", "=", "printer", ".", "read", "(", ")", "if", "not", "data", ":", "time", ".", "sleep", "(", "0.005", ")", "continue", "try", ":", "result", "=", "interpret_response", "(", "data", ")", "except", "ValueError", ":", "logger", ".", "error", "(", "\"TIME %.3f - Couln't understand response: %s\"", ",", "time", ".", "time", "(", ")", "-", "start", ",", "data", ")", "continue", "status", "[", "'printer_state'", "]", "=", "result", "logger", ".", "debug", "(", "'TIME %.3f - result: %s'", ",", "time", ".", "time", "(", ")", "-", "start", ",", "result", ")", "if", "result", "[", "'errors'", "]", ":", "logger", ".", "error", "(", "'Errors occured: %s'", ",", "result", "[", "'errors'", "]", ")", "status", "[", "'outcome'", "]", "=", "'error'", "break", "if", "result", "[", "'status_type'", "]", "==", "'Printing completed'", ":", "status", "[", "'did_print'", "]", "=", "True", "status", "[", "'outcome'", "]", "=", "'printed'", "if", "result", "[", "'status_type'", "]", "==", "'Phase change'", "and", "result", "[", "'phase_type'", "]", "==", "'Waiting to receive'", ":", "status", "[", "'ready_for_next_job'", "]", "=", "True", "if", "status", "[", "'did_print'", "]", "and", "status", "[", "'ready_for_next_job'", "]", ":", "break", "if", "not", "status", "[", "'did_print'", "]", ":", "logger", ".", "warning", "(", "\"'printing completed' status not received.\"", ")", "if", "not", "status", "[", "'ready_for_next_job'", "]", ":", "logger", ".", "warning", "(", "\"'waiting to receive' status not received.\"", ")", "if", "(", "not", "status", "[", "'did_print'", "]", ")", "or", "(", "not", "status", "[", "'ready_for_next_job'", "]", ")", ":", "logger", ".", "warning", "(", "'Printing potentially not successful?'", ")", "if", "status", "[", "'did_print'", "]", "and", "status", "[", "'ready_for_next_job'", "]", ":", "logger", ".", "info", "(", "\"Printing was successful. Waiting for the next job.\"", ")", "return", "status" ]
Send instruction bytes to a printer. :param bytes instructions: The instructions to be sent to the printer. :param str printer_identifier: Identifier for the printer. :param str backend_identifier: Can enforce the use of a specific backend. :param bool blocking: Indicates whether the function call should block while waiting for the completion of the printing.
[ "Send", "instruction", "bytes", "to", "a", "printer", "." ]
b551b1fc944873f3a2ead7032d144dfd81011e79
https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/backends/helpers.py#L26-L103
11,601
pklaus/brother_ql
brother_ql/reader.py
merge_specific_instructions
def merge_specific_instructions(chunks, join_preamble=True, join_raster=True): """ Process a list of instructions by merging subsequent instuctions with identical opcodes into "large instructions". """ new_instructions = [] last_opcode = None instruction_buffer = b'' for instruction in chunks: opcode = match_opcode(instruction) if join_preamble and OPCODES[opcode][0] == 'preamble' and last_opcode == 'preamble': instruction_buffer += instruction elif join_raster and 'raster' in OPCODES[opcode][0] and 'raster' in last_opcode: instruction_buffer += instruction else: if instruction_buffer: new_instructions.append(instruction_buffer) instruction_buffer = instruction last_opcode = OPCODES[opcode][0] if instruction_buffer: new_instructions.append(instruction_buffer) return new_instructions
python
def merge_specific_instructions(chunks, join_preamble=True, join_raster=True): """ Process a list of instructions by merging subsequent instuctions with identical opcodes into "large instructions". """ new_instructions = [] last_opcode = None instruction_buffer = b'' for instruction in chunks: opcode = match_opcode(instruction) if join_preamble and OPCODES[opcode][0] == 'preamble' and last_opcode == 'preamble': instruction_buffer += instruction elif join_raster and 'raster' in OPCODES[opcode][0] and 'raster' in last_opcode: instruction_buffer += instruction else: if instruction_buffer: new_instructions.append(instruction_buffer) instruction_buffer = instruction last_opcode = OPCODES[opcode][0] if instruction_buffer: new_instructions.append(instruction_buffer) return new_instructions
[ "def", "merge_specific_instructions", "(", "chunks", ",", "join_preamble", "=", "True", ",", "join_raster", "=", "True", ")", ":", "new_instructions", "=", "[", "]", "last_opcode", "=", "None", "instruction_buffer", "=", "b''", "for", "instruction", "in", "chunks", ":", "opcode", "=", "match_opcode", "(", "instruction", ")", "if", "join_preamble", "and", "OPCODES", "[", "opcode", "]", "[", "0", "]", "==", "'preamble'", "and", "last_opcode", "==", "'preamble'", ":", "instruction_buffer", "+=", "instruction", "elif", "join_raster", "and", "'raster'", "in", "OPCODES", "[", "opcode", "]", "[", "0", "]", "and", "'raster'", "in", "last_opcode", ":", "instruction_buffer", "+=", "instruction", "else", ":", "if", "instruction_buffer", ":", "new_instructions", ".", "append", "(", "instruction_buffer", ")", "instruction_buffer", "=", "instruction", "last_opcode", "=", "OPCODES", "[", "opcode", "]", "[", "0", "]", "if", "instruction_buffer", ":", "new_instructions", ".", "append", "(", "instruction_buffer", ")", "return", "new_instructions" ]
Process a list of instructions by merging subsequent instuctions with identical opcodes into "large instructions".
[ "Process", "a", "list", "of", "instructions", "by", "merging", "subsequent", "instuctions", "with", "identical", "opcodes", "into", "large", "instructions", "." ]
b551b1fc944873f3a2ead7032d144dfd81011e79
https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/reader.py#L209-L230
11,602
pklaus/brother_ql
brother_ql/cli.py
cli
def cli(ctx, *args, **kwargs): """ Command line interface for the brother_ql Python package. """ backend = kwargs.get('backend', None) model = kwargs.get('model', None) printer = kwargs.get('printer', None) debug = kwargs.get('debug') # Store the general CLI options in the context meta dictionary. # The name corresponds to the second half of the respective envvar: ctx.meta['MODEL'] = model ctx.meta['BACKEND'] = backend ctx.meta['PRINTER'] = printer logging.basicConfig(level='DEBUG' if debug else 'INFO')
python
def cli(ctx, *args, **kwargs): """ Command line interface for the brother_ql Python package. """ backend = kwargs.get('backend', None) model = kwargs.get('model', None) printer = kwargs.get('printer', None) debug = kwargs.get('debug') # Store the general CLI options in the context meta dictionary. # The name corresponds to the second half of the respective envvar: ctx.meta['MODEL'] = model ctx.meta['BACKEND'] = backend ctx.meta['PRINTER'] = printer logging.basicConfig(level='DEBUG' if debug else 'INFO')
[ "def", "cli", "(", "ctx", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "backend", "=", "kwargs", ".", "get", "(", "'backend'", ",", "None", ")", "model", "=", "kwargs", ".", "get", "(", "'model'", ",", "None", ")", "printer", "=", "kwargs", ".", "get", "(", "'printer'", ",", "None", ")", "debug", "=", "kwargs", ".", "get", "(", "'debug'", ")", "# Store the general CLI options in the context meta dictionary.", "# The name corresponds to the second half of the respective envvar:", "ctx", ".", "meta", "[", "'MODEL'", "]", "=", "model", "ctx", ".", "meta", "[", "'BACKEND'", "]", "=", "backend", "ctx", ".", "meta", "[", "'PRINTER'", "]", "=", "printer", "logging", ".", "basicConfig", "(", "level", "=", "'DEBUG'", "if", "debug", "else", "'INFO'", ")" ]
Command line interface for the brother_ql Python package.
[ "Command", "line", "interface", "for", "the", "brother_ql", "Python", "package", "." ]
b551b1fc944873f3a2ead7032d144dfd81011e79
https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/cli.py#L26-L40
11,603
pklaus/brother_ql
brother_ql/cli.py
env
def env(ctx, *args, **kwargs): """ print debug info about running environment """ import sys, platform, os, shutil from pkg_resources import get_distribution, working_set print("\n##################\n") print("Information about the running environment of brother_ql.") print("(Please provide this information when reporting any issue.)\n") # computer print("About the computer:") for attr in ('platform', 'processor', 'release', 'system', 'machine', 'architecture'): print(' * '+attr.title()+':', getattr(platform, attr)()) # Python print("About the installed Python version:") py_version = str(sys.version).replace('\n', ' ') print(" *", py_version) # brother_ql print("About the brother_ql package:") pkg = get_distribution('brother_ql') print(" * package location:", pkg.location) print(" * package version: ", pkg.version) try: cli_loc = shutil.which('brother_ql') except: cli_loc = 'unknown' print(" * brother_ql CLI path:", cli_loc) # brother_ql's requirements print("About the requirements of brother_ql:") fmt = " {req:14s} | {spec:10s} | {ins_vers:17s}" print(fmt.format(req='requirement', spec='requested', ins_vers='installed version')) print(fmt.format(req='-' * 14, spec='-'*10, ins_vers='-'*17)) requirements = list(pkg.requires()) requirements.sort(key=lambda x: x.project_name) for req in requirements: proj = req.project_name req_pkg = get_distribution(proj) spec = ' '.join(req.specs[0]) if req.specs else 'any' print(fmt.format(req=proj, spec=spec, ins_vers=req_pkg.version)) print("\n##################\n")
python
def env(ctx, *args, **kwargs): """ print debug info about running environment """ import sys, platform, os, shutil from pkg_resources import get_distribution, working_set print("\n##################\n") print("Information about the running environment of brother_ql.") print("(Please provide this information when reporting any issue.)\n") # computer print("About the computer:") for attr in ('platform', 'processor', 'release', 'system', 'machine', 'architecture'): print(' * '+attr.title()+':', getattr(platform, attr)()) # Python print("About the installed Python version:") py_version = str(sys.version).replace('\n', ' ') print(" *", py_version) # brother_ql print("About the brother_ql package:") pkg = get_distribution('brother_ql') print(" * package location:", pkg.location) print(" * package version: ", pkg.version) try: cli_loc = shutil.which('brother_ql') except: cli_loc = 'unknown' print(" * brother_ql CLI path:", cli_loc) # brother_ql's requirements print("About the requirements of brother_ql:") fmt = " {req:14s} | {spec:10s} | {ins_vers:17s}" print(fmt.format(req='requirement', spec='requested', ins_vers='installed version')) print(fmt.format(req='-' * 14, spec='-'*10, ins_vers='-'*17)) requirements = list(pkg.requires()) requirements.sort(key=lambda x: x.project_name) for req in requirements: proj = req.project_name req_pkg = get_distribution(proj) spec = ' '.join(req.specs[0]) if req.specs else 'any' print(fmt.format(req=proj, spec=spec, ins_vers=req_pkg.version)) print("\n##################\n")
[ "def", "env", "(", "ctx", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "import", "sys", ",", "platform", ",", "os", ",", "shutil", "from", "pkg_resources", "import", "get_distribution", ",", "working_set", "print", "(", "\"\\n##################\\n\"", ")", "print", "(", "\"Information about the running environment of brother_ql.\"", ")", "print", "(", "\"(Please provide this information when reporting any issue.)\\n\"", ")", "# computer", "print", "(", "\"About the computer:\"", ")", "for", "attr", "in", "(", "'platform'", ",", "'processor'", ",", "'release'", ",", "'system'", ",", "'machine'", ",", "'architecture'", ")", ":", "print", "(", "' * '", "+", "attr", ".", "title", "(", ")", "+", "':'", ",", "getattr", "(", "platform", ",", "attr", ")", "(", ")", ")", "# Python", "print", "(", "\"About the installed Python version:\"", ")", "py_version", "=", "str", "(", "sys", ".", "version", ")", ".", "replace", "(", "'\\n'", ",", "' '", ")", "print", "(", "\" *\"", ",", "py_version", ")", "# brother_ql", "print", "(", "\"About the brother_ql package:\"", ")", "pkg", "=", "get_distribution", "(", "'brother_ql'", ")", "print", "(", "\" * package location:\"", ",", "pkg", ".", "location", ")", "print", "(", "\" * package version: \"", ",", "pkg", ".", "version", ")", "try", ":", "cli_loc", "=", "shutil", ".", "which", "(", "'brother_ql'", ")", "except", ":", "cli_loc", "=", "'unknown'", "print", "(", "\" * brother_ql CLI path:\"", ",", "cli_loc", ")", "# brother_ql's requirements", "print", "(", "\"About the requirements of brother_ql:\"", ")", "fmt", "=", "\" {req:14s} | {spec:10s} | {ins_vers:17s}\"", "print", "(", "fmt", ".", "format", "(", "req", "=", "'requirement'", ",", "spec", "=", "'requested'", ",", "ins_vers", "=", "'installed version'", ")", ")", "print", "(", "fmt", ".", "format", "(", "req", "=", "'-'", "*", "14", ",", "spec", "=", "'-'", "*", "10", ",", "ins_vers", "=", "'-'", "*", "17", ")", ")", "requirements", "=", "list", "(", "pkg", ".", "requires", "(", ")", ")", "requirements", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", ".", "project_name", ")", "for", "req", "in", "requirements", ":", "proj", "=", "req", ".", "project_name", "req_pkg", "=", "get_distribution", "(", "proj", ")", "spec", "=", "' '", ".", "join", "(", "req", ".", "specs", "[", "0", "]", ")", "if", "req", ".", "specs", "else", "'any'", "print", "(", "fmt", ".", "format", "(", "req", "=", "proj", ",", "spec", "=", "spec", ",", "ins_vers", "=", "req_pkg", ".", "version", ")", ")", "print", "(", "\"\\n##################\\n\"", ")" ]
print debug info about running environment
[ "print", "debug", "info", "about", "running", "environment" ]
b551b1fc944873f3a2ead7032d144dfd81011e79
https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/cli.py#L81-L120
11,604
pklaus/brother_ql
brother_ql/cli.py
print_cmd
def print_cmd(ctx, *args, **kwargs): """ Print a label of the provided IMAGE. """ backend = ctx.meta.get('BACKEND', 'pyusb') model = ctx.meta.get('MODEL') printer = ctx.meta.get('PRINTER') from brother_ql.conversion import convert from brother_ql.backends.helpers import send from brother_ql.raster import BrotherQLRaster qlr = BrotherQLRaster(model) qlr.exception_on_warning = True kwargs['cut'] = not kwargs['no_cut'] del kwargs['no_cut'] instructions = convert(qlr=qlr, **kwargs) send(instructions=instructions, printer_identifier=printer, backend_identifier=backend, blocking=True)
python
def print_cmd(ctx, *args, **kwargs): """ Print a label of the provided IMAGE. """ backend = ctx.meta.get('BACKEND', 'pyusb') model = ctx.meta.get('MODEL') printer = ctx.meta.get('PRINTER') from brother_ql.conversion import convert from brother_ql.backends.helpers import send from brother_ql.raster import BrotherQLRaster qlr = BrotherQLRaster(model) qlr.exception_on_warning = True kwargs['cut'] = not kwargs['no_cut'] del kwargs['no_cut'] instructions = convert(qlr=qlr, **kwargs) send(instructions=instructions, printer_identifier=printer, backend_identifier=backend, blocking=True)
[ "def", "print_cmd", "(", "ctx", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "backend", "=", "ctx", ".", "meta", ".", "get", "(", "'BACKEND'", ",", "'pyusb'", ")", "model", "=", "ctx", ".", "meta", ".", "get", "(", "'MODEL'", ")", "printer", "=", "ctx", ".", "meta", ".", "get", "(", "'PRINTER'", ")", "from", "brother_ql", ".", "conversion", "import", "convert", "from", "brother_ql", ".", "backends", ".", "helpers", "import", "send", "from", "brother_ql", ".", "raster", "import", "BrotherQLRaster", "qlr", "=", "BrotherQLRaster", "(", "model", ")", "qlr", ".", "exception_on_warning", "=", "True", "kwargs", "[", "'cut'", "]", "=", "not", "kwargs", "[", "'no_cut'", "]", "del", "kwargs", "[", "'no_cut'", "]", "instructions", "=", "convert", "(", "qlr", "=", "qlr", ",", "*", "*", "kwargs", ")", "send", "(", "instructions", "=", "instructions", ",", "printer_identifier", "=", "printer", ",", "backend_identifier", "=", "backend", ",", "blocking", "=", "True", ")" ]
Print a label of the provided IMAGE.
[ "Print", "a", "label", "of", "the", "provided", "IMAGE", "." ]
b551b1fc944873f3a2ead7032d144dfd81011e79
https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/cli.py#L134-L147
11,605
pklaus/brother_ql
brother_ql/backends/pyusb.py
list_available_devices
def list_available_devices(): """ List all available devices for the respective backend returns: devices: a list of dictionaries with the keys 'identifier' and 'instance': \ [ {'identifier': 'usb://0x04f9:0x2015/C5Z315686', 'instance': pyusb.core.Device()}, ] The 'identifier' is of the format idVendor:idProduct_iSerialNumber. """ class find_class(object): def __init__(self, class_): self._class = class_ def __call__(self, device): # first, let's check the device if device.bDeviceClass == self._class: return True # ok, transverse all devices to find an interface that matches our class for cfg in device: # find_descriptor: what's it? intf = usb.util.find_descriptor(cfg, bInterfaceClass=self._class) if intf is not None: return True return False # only Brother printers printers = usb.core.find(find_all=1, custom_match=find_class(7), idVendor=0x04f9) def identifier(dev): try: serial = usb.util.get_string(dev, 256, dev.iSerialNumber) return 'usb://0x{:04x}:0x{:04x}_{}'.format(dev.idVendor, dev.idProduct, serial) except: return 'usb://0x{:04x}:0x{:04x}'.format(dev.idVendor, dev.idProduct) return [{'identifier': identifier(printer), 'instance': printer} for printer in printers]
python
def list_available_devices(): """ List all available devices for the respective backend returns: devices: a list of dictionaries with the keys 'identifier' and 'instance': \ [ {'identifier': 'usb://0x04f9:0x2015/C5Z315686', 'instance': pyusb.core.Device()}, ] The 'identifier' is of the format idVendor:idProduct_iSerialNumber. """ class find_class(object): def __init__(self, class_): self._class = class_ def __call__(self, device): # first, let's check the device if device.bDeviceClass == self._class: return True # ok, transverse all devices to find an interface that matches our class for cfg in device: # find_descriptor: what's it? intf = usb.util.find_descriptor(cfg, bInterfaceClass=self._class) if intf is not None: return True return False # only Brother printers printers = usb.core.find(find_all=1, custom_match=find_class(7), idVendor=0x04f9) def identifier(dev): try: serial = usb.util.get_string(dev, 256, dev.iSerialNumber) return 'usb://0x{:04x}:0x{:04x}_{}'.format(dev.idVendor, dev.idProduct, serial) except: return 'usb://0x{:04x}:0x{:04x}'.format(dev.idVendor, dev.idProduct) return [{'identifier': identifier(printer), 'instance': printer} for printer in printers]
[ "def", "list_available_devices", "(", ")", ":", "class", "find_class", "(", "object", ")", ":", "def", "__init__", "(", "self", ",", "class_", ")", ":", "self", ".", "_class", "=", "class_", "def", "__call__", "(", "self", ",", "device", ")", ":", "# first, let's check the device", "if", "device", ".", "bDeviceClass", "==", "self", ".", "_class", ":", "return", "True", "# ok, transverse all devices to find an interface that matches our class", "for", "cfg", "in", "device", ":", "# find_descriptor: what's it?", "intf", "=", "usb", ".", "util", ".", "find_descriptor", "(", "cfg", ",", "bInterfaceClass", "=", "self", ".", "_class", ")", "if", "intf", "is", "not", "None", ":", "return", "True", "return", "False", "# only Brother printers", "printers", "=", "usb", ".", "core", ".", "find", "(", "find_all", "=", "1", ",", "custom_match", "=", "find_class", "(", "7", ")", ",", "idVendor", "=", "0x04f9", ")", "def", "identifier", "(", "dev", ")", ":", "try", ":", "serial", "=", "usb", ".", "util", ".", "get_string", "(", "dev", ",", "256", ",", "dev", ".", "iSerialNumber", ")", "return", "'usb://0x{:04x}:0x{:04x}_{}'", ".", "format", "(", "dev", ".", "idVendor", ",", "dev", ".", "idProduct", ",", "serial", ")", "except", ":", "return", "'usb://0x{:04x}:0x{:04x}'", ".", "format", "(", "dev", ".", "idVendor", ",", "dev", ".", "idProduct", ")", "return", "[", "{", "'identifier'", ":", "identifier", "(", "printer", ")", ",", "'instance'", ":", "printer", "}", "for", "printer", "in", "printers", "]" ]
List all available devices for the respective backend returns: devices: a list of dictionaries with the keys 'identifier' and 'instance': \ [ {'identifier': 'usb://0x04f9:0x2015/C5Z315686', 'instance': pyusb.core.Device()}, ] The 'identifier' is of the format idVendor:idProduct_iSerialNumber.
[ "List", "all", "available", "devices", "for", "the", "respective", "backend" ]
b551b1fc944873f3a2ead7032d144dfd81011e79
https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/backends/pyusb.py#L21-L55
11,606
pklaus/brother_ql
brother_ql/devicedependent.py
_populate_label_legacy_structures
def _populate_label_legacy_structures(): """ We contain this code inside a function so that the imports we do in here are not visible at the module level. """ global DIE_CUT_LABEL, ENDLESS_LABEL, ROUND_DIE_CUT_LABEL global label_sizes, label_type_specs from brother_ql.labels import FormFactor DIE_CUT_LABEL = FormFactor.DIE_CUT ENDLESS_LABEL = FormFactor.ENDLESS ROUND_DIE_CUT_LABEL = FormFactor.ROUND_DIE_CUT from brother_ql.labels import LabelsManager lm = LabelsManager() label_sizes = list(lm.iter_identifiers()) for label in lm.iter_elements(): l = {} l['name'] = label.name l['kind'] = label.form_factor l['color'] = label.color l['tape_size'] = label.tape_size l['dots_total'] = label.dots_total l['dots_printable'] = label.dots_printable l['right_margin_dots'] = label.offset_r l['feed_margin'] = label.feed_margin l['restrict_printers'] = label.restricted_to_models label_type_specs[label.identifier] = l
python
def _populate_label_legacy_structures(): """ We contain this code inside a function so that the imports we do in here are not visible at the module level. """ global DIE_CUT_LABEL, ENDLESS_LABEL, ROUND_DIE_CUT_LABEL global label_sizes, label_type_specs from brother_ql.labels import FormFactor DIE_CUT_LABEL = FormFactor.DIE_CUT ENDLESS_LABEL = FormFactor.ENDLESS ROUND_DIE_CUT_LABEL = FormFactor.ROUND_DIE_CUT from brother_ql.labels import LabelsManager lm = LabelsManager() label_sizes = list(lm.iter_identifiers()) for label in lm.iter_elements(): l = {} l['name'] = label.name l['kind'] = label.form_factor l['color'] = label.color l['tape_size'] = label.tape_size l['dots_total'] = label.dots_total l['dots_printable'] = label.dots_printable l['right_margin_dots'] = label.offset_r l['feed_margin'] = label.feed_margin l['restrict_printers'] = label.restricted_to_models label_type_specs[label.identifier] = l
[ "def", "_populate_label_legacy_structures", "(", ")", ":", "global", "DIE_CUT_LABEL", ",", "ENDLESS_LABEL", ",", "ROUND_DIE_CUT_LABEL", "global", "label_sizes", ",", "label_type_specs", "from", "brother_ql", ".", "labels", "import", "FormFactor", "DIE_CUT_LABEL", "=", "FormFactor", ".", "DIE_CUT", "ENDLESS_LABEL", "=", "FormFactor", ".", "ENDLESS", "ROUND_DIE_CUT_LABEL", "=", "FormFactor", ".", "ROUND_DIE_CUT", "from", "brother_ql", ".", "labels", "import", "LabelsManager", "lm", "=", "LabelsManager", "(", ")", "label_sizes", "=", "list", "(", "lm", ".", "iter_identifiers", "(", ")", ")", "for", "label", "in", "lm", ".", "iter_elements", "(", ")", ":", "l", "=", "{", "}", "l", "[", "'name'", "]", "=", "label", ".", "name", "l", "[", "'kind'", "]", "=", "label", ".", "form_factor", "l", "[", "'color'", "]", "=", "label", ".", "color", "l", "[", "'tape_size'", "]", "=", "label", ".", "tape_size", "l", "[", "'dots_total'", "]", "=", "label", ".", "dots_total", "l", "[", "'dots_printable'", "]", "=", "label", ".", "dots_printable", "l", "[", "'right_margin_dots'", "]", "=", "label", ".", "offset_r", "l", "[", "'feed_margin'", "]", "=", "label", ".", "feed_margin", "l", "[", "'restrict_printers'", "]", "=", "label", ".", "restricted_to_models", "label_type_specs", "[", "label", ".", "identifier", "]", "=", "l" ]
We contain this code inside a function so that the imports we do in here are not visible at the module level.
[ "We", "contain", "this", "code", "inside", "a", "function", "so", "that", "the", "imports", "we", "do", "in", "here", "are", "not", "visible", "at", "the", "module", "level", "." ]
b551b1fc944873f3a2ead7032d144dfd81011e79
https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/devicedependent.py#L59-L86
11,607
pklaus/brother_ql
brother_ql/backends/__init__.py
guess_backend
def guess_backend(identifier): """ guess the backend from a given identifier string for the device """ if identifier.startswith('usb://') or identifier.startswith('0x'): return 'pyusb' elif identifier.startswith('file://') or identifier.startswith('/dev/usb/') or identifier.startswith('lp'): return 'linux_kernel' elif identifier.startswith('tcp://'): return 'network' else: raise ValueError('Cannot guess backend for given identifier: %s' % identifier)
python
def guess_backend(identifier): """ guess the backend from a given identifier string for the device """ if identifier.startswith('usb://') or identifier.startswith('0x'): return 'pyusb' elif identifier.startswith('file://') or identifier.startswith('/dev/usb/') or identifier.startswith('lp'): return 'linux_kernel' elif identifier.startswith('tcp://'): return 'network' else: raise ValueError('Cannot guess backend for given identifier: %s' % identifier)
[ "def", "guess_backend", "(", "identifier", ")", ":", "if", "identifier", ".", "startswith", "(", "'usb://'", ")", "or", "identifier", ".", "startswith", "(", "'0x'", ")", ":", "return", "'pyusb'", "elif", "identifier", ".", "startswith", "(", "'file://'", ")", "or", "identifier", ".", "startswith", "(", "'/dev/usb/'", ")", "or", "identifier", ".", "startswith", "(", "'lp'", ")", ":", "return", "'linux_kernel'", "elif", "identifier", ".", "startswith", "(", "'tcp://'", ")", ":", "return", "'network'", "else", ":", "raise", "ValueError", "(", "'Cannot guess backend for given identifier: %s'", "%", "identifier", ")" ]
guess the backend from a given identifier string for the device
[ "guess", "the", "backend", "from", "a", "given", "identifier", "string", "for", "the", "device" ]
b551b1fc944873f3a2ead7032d144dfd81011e79
https://github.com/pklaus/brother_ql/blob/b551b1fc944873f3a2ead7032d144dfd81011e79/brother_ql/backends/__init__.py#L11-L20
11,608
gfairchild/yelpapi
yelpapi/yelpapi.py
YelpAPI.featured_event_query
def featured_event_query(self, **kwargs): """ Query the Yelp Featured Event API. documentation: https://www.yelp.com/developers/documentation/v3/featured_event required parameters: * one of either: * location - text specifying a location to search for * latitude and longitude """ if not kwargs.get('location') and (not kwargs.get('latitude') or not kwargs.get('longitude')): raise ValueError('A valid location (parameter "location") or latitude/longitude combination ' '(parameters "latitude" and "longitude") must be provided.') return self._query(FEATURED_EVENT_API_URL, **kwargs)
python
def featured_event_query(self, **kwargs): """ Query the Yelp Featured Event API. documentation: https://www.yelp.com/developers/documentation/v3/featured_event required parameters: * one of either: * location - text specifying a location to search for * latitude and longitude """ if not kwargs.get('location') and (not kwargs.get('latitude') or not kwargs.get('longitude')): raise ValueError('A valid location (parameter "location") or latitude/longitude combination ' '(parameters "latitude" and "longitude") must be provided.') return self._query(FEATURED_EVENT_API_URL, **kwargs)
[ "def", "featured_event_query", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "kwargs", ".", "get", "(", "'location'", ")", "and", "(", "not", "kwargs", ".", "get", "(", "'latitude'", ")", "or", "not", "kwargs", ".", "get", "(", "'longitude'", ")", ")", ":", "raise", "ValueError", "(", "'A valid location (parameter \"location\") or latitude/longitude combination '", "'(parameters \"latitude\" and \"longitude\") must be provided.'", ")", "return", "self", ".", "_query", "(", "FEATURED_EVENT_API_URL", ",", "*", "*", "kwargs", ")" ]
Query the Yelp Featured Event API. documentation: https://www.yelp.com/developers/documentation/v3/featured_event required parameters: * one of either: * location - text specifying a location to search for * latitude and longitude
[ "Query", "the", "Yelp", "Featured", "Event", "API", "." ]
51e35fbe44ac131630ce5e2f1b6f53711846e2a7
https://github.com/gfairchild/yelpapi/blob/51e35fbe44ac131630ce5e2f1b6f53711846e2a7/yelpapi/yelpapi.py#L174-L189
11,609
gfairchild/yelpapi
yelpapi/yelpapi.py
YelpAPI._get_clean_parameters
def _get_clean_parameters(kwargs): """ Clean the parameters by filtering out any parameters that have a None value. """ return dict((k, v) for k, v in kwargs.items() if v is not None)
python
def _get_clean_parameters(kwargs): """ Clean the parameters by filtering out any parameters that have a None value. """ return dict((k, v) for k, v in kwargs.items() if v is not None)
[ "def", "_get_clean_parameters", "(", "kwargs", ")", ":", "return", "dict", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", "if", "v", "is", "not", "None", ")" ]
Clean the parameters by filtering out any parameters that have a None value.
[ "Clean", "the", "parameters", "by", "filtering", "out", "any", "parameters", "that", "have", "a", "None", "value", "." ]
51e35fbe44ac131630ce5e2f1b6f53711846e2a7
https://github.com/gfairchild/yelpapi/blob/51e35fbe44ac131630ce5e2f1b6f53711846e2a7/yelpapi/yelpapi.py#L258-L262
11,610
gfairchild/yelpapi
yelpapi/yelpapi.py
YelpAPI._query
def _query(self, url, **kwargs): """ All query methods have the same logic, so don't repeat it! Query the URL, parse the response as JSON, and check for errors. If all goes well, return the parsed JSON. """ parameters = YelpAPI._get_clean_parameters(kwargs) response = self._yelp_session.get( url, headers=self._headers, params=parameters, timeout=self._timeout_s, ) response_json = response.json() # shouldn't happen, but this will raise a ValueError if the response isn't JSON # Yelp can return one of many different API errors, so check for one of them. # The Yelp Fusion API does not yet have a complete list of errors, but this is on the TODO list; see # https://github.com/Yelp/yelp-fusion/issues/95 for more info. if 'error' in response_json: raise YelpAPI.YelpAPIError('{}: {}'.format(response_json['error']['code'], response_json['error']['description'])) # we got a good response, so return return response_json
python
def _query(self, url, **kwargs): """ All query methods have the same logic, so don't repeat it! Query the URL, parse the response as JSON, and check for errors. If all goes well, return the parsed JSON. """ parameters = YelpAPI._get_clean_parameters(kwargs) response = self._yelp_session.get( url, headers=self._headers, params=parameters, timeout=self._timeout_s, ) response_json = response.json() # shouldn't happen, but this will raise a ValueError if the response isn't JSON # Yelp can return one of many different API errors, so check for one of them. # The Yelp Fusion API does not yet have a complete list of errors, but this is on the TODO list; see # https://github.com/Yelp/yelp-fusion/issues/95 for more info. if 'error' in response_json: raise YelpAPI.YelpAPIError('{}: {}'.format(response_json['error']['code'], response_json['error']['description'])) # we got a good response, so return return response_json
[ "def", "_query", "(", "self", ",", "url", ",", "*", "*", "kwargs", ")", ":", "parameters", "=", "YelpAPI", ".", "_get_clean_parameters", "(", "kwargs", ")", "response", "=", "self", ".", "_yelp_session", ".", "get", "(", "url", ",", "headers", "=", "self", ".", "_headers", ",", "params", "=", "parameters", ",", "timeout", "=", "self", ".", "_timeout_s", ",", ")", "response_json", "=", "response", ".", "json", "(", ")", "# shouldn't happen, but this will raise a ValueError if the response isn't JSON", "# Yelp can return one of many different API errors, so check for one of them.", "# The Yelp Fusion API does not yet have a complete list of errors, but this is on the TODO list; see", "# https://github.com/Yelp/yelp-fusion/issues/95 for more info.", "if", "'error'", "in", "response_json", ":", "raise", "YelpAPI", ".", "YelpAPIError", "(", "'{}: {}'", ".", "format", "(", "response_json", "[", "'error'", "]", "[", "'code'", "]", ",", "response_json", "[", "'error'", "]", "[", "'description'", "]", ")", ")", "# we got a good response, so return", "return", "response_json" ]
All query methods have the same logic, so don't repeat it! Query the URL, parse the response as JSON, and check for errors. If all goes well, return the parsed JSON.
[ "All", "query", "methods", "have", "the", "same", "logic", "so", "don", "t", "repeat", "it!", "Query", "the", "URL", "parse", "the", "response", "as", "JSON", "and", "check", "for", "errors", ".", "If", "all", "goes", "well", "return", "the", "parsed", "JSON", "." ]
51e35fbe44ac131630ce5e2f1b6f53711846e2a7
https://github.com/gfairchild/yelpapi/blob/51e35fbe44ac131630ce5e2f1b6f53711846e2a7/yelpapi/yelpapi.py#L264-L286
11,611
samgiles/slumber
slumber/utils.py
url_join
def url_join(base, *args): """ Helper function to join an arbitrary number of url segments together. """ scheme, netloc, path, query, fragment = urlsplit(base) path = path if len(path) else "/" path = posixpath.join(path, *[('%s' % x) for x in args]) return urlunsplit([scheme, netloc, path, query, fragment])
python
def url_join(base, *args): """ Helper function to join an arbitrary number of url segments together. """ scheme, netloc, path, query, fragment = urlsplit(base) path = path if len(path) else "/" path = posixpath.join(path, *[('%s' % x) for x in args]) return urlunsplit([scheme, netloc, path, query, fragment])
[ "def", "url_join", "(", "base", ",", "*", "args", ")", ":", "scheme", ",", "netloc", ",", "path", ",", "query", ",", "fragment", "=", "urlsplit", "(", "base", ")", "path", "=", "path", "if", "len", "(", "path", ")", "else", "\"/\"", "path", "=", "posixpath", ".", "join", "(", "path", ",", "*", "[", "(", "'%s'", "%", "x", ")", "for", "x", "in", "args", "]", ")", "return", "urlunsplit", "(", "[", "scheme", ",", "netloc", ",", "path", ",", "query", ",", "fragment", "]", ")" ]
Helper function to join an arbitrary number of url segments together.
[ "Helper", "function", "to", "join", "an", "arbitrary", "number", "of", "url", "segments", "together", "." ]
af0f9ef7bd8df8bde6b47088630786c737869bce
https://github.com/samgiles/slumber/blob/af0f9ef7bd8df8bde6b47088630786c737869bce/slumber/utils.py#L9-L16
11,612
20c/vaping
vaping/plugins/vodka.py
probe_to_graphsrv
def probe_to_graphsrv(probe): """ takes a probe instance and generates a graphsrv data group for it using the probe's config """ config = probe.config # manual group set up via `group` config key if "group" in config: source, group = config["group"].split(".") group_field = config.get("group_field", "host") group_value = config[group_field] graphsrv.group.add(source, group, {group_value:{group_field:group_value}}, **config) return # automatic group setup for fping # FIXME: this should be somehow more dynamic for k, v in list(config.items()): if isinstance(v, dict) and "hosts" in v: r = {} for host in v.get("hosts"): if isinstance(host, dict): r[host["host"]] = host else: r[host] = {"host":host} graphsrv.group.add(probe.name, k, r, **v)
python
def probe_to_graphsrv(probe): """ takes a probe instance and generates a graphsrv data group for it using the probe's config """ config = probe.config # manual group set up via `group` config key if "group" in config: source, group = config["group"].split(".") group_field = config.get("group_field", "host") group_value = config[group_field] graphsrv.group.add(source, group, {group_value:{group_field:group_value}}, **config) return # automatic group setup for fping # FIXME: this should be somehow more dynamic for k, v in list(config.items()): if isinstance(v, dict) and "hosts" in v: r = {} for host in v.get("hosts"): if isinstance(host, dict): r[host["host"]] = host else: r[host] = {"host":host} graphsrv.group.add(probe.name, k, r, **v)
[ "def", "probe_to_graphsrv", "(", "probe", ")", ":", "config", "=", "probe", ".", "config", "# manual group set up via `group` config key", "if", "\"group\"", "in", "config", ":", "source", ",", "group", "=", "config", "[", "\"group\"", "]", ".", "split", "(", "\".\"", ")", "group_field", "=", "config", ".", "get", "(", "\"group_field\"", ",", "\"host\"", ")", "group_value", "=", "config", "[", "group_field", "]", "graphsrv", ".", "group", ".", "add", "(", "source", ",", "group", ",", "{", "group_value", ":", "{", "group_field", ":", "group_value", "}", "}", ",", "*", "*", "config", ")", "return", "# automatic group setup for fping", "# FIXME: this should be somehow more dynamic", "for", "k", ",", "v", "in", "list", "(", "config", ".", "items", "(", ")", ")", ":", "if", "isinstance", "(", "v", ",", "dict", ")", "and", "\"hosts\"", "in", "v", ":", "r", "=", "{", "}", "for", "host", "in", "v", ".", "get", "(", "\"hosts\"", ")", ":", "if", "isinstance", "(", "host", ",", "dict", ")", ":", "r", "[", "host", "[", "\"host\"", "]", "]", "=", "host", "else", ":", "r", "[", "host", "]", "=", "{", "\"host\"", ":", "host", "}", "graphsrv", ".", "group", ".", "add", "(", "probe", ".", "name", ",", "k", ",", "r", ",", "*", "*", "v", ")" ]
takes a probe instance and generates a graphsrv data group for it using the probe's config
[ "takes", "a", "probe", "instance", "and", "generates", "a", "graphsrv", "data", "group", "for", "it", "using", "the", "probe", "s", "config" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/vodka.py#L20-L49
11,613
20c/vaping
vaping/plugins/__init__.py
PluginBase.new_message
def new_message(self): """ creates a new message, setting `type`, `source`, `ts`, `data` - `data` is initialized to an empty array """ msg = {} msg['data'] = [] msg['type'] = self.plugin_type msg['source'] = self.name msg['ts'] = (datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds() return msg
python
def new_message(self): """ creates a new message, setting `type`, `source`, `ts`, `data` - `data` is initialized to an empty array """ msg = {} msg['data'] = [] msg['type'] = self.plugin_type msg['source'] = self.name msg['ts'] = (datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds() return msg
[ "def", "new_message", "(", "self", ")", ":", "msg", "=", "{", "}", "msg", "[", "'data'", "]", "=", "[", "]", "msg", "[", "'type'", "]", "=", "self", ".", "plugin_type", "msg", "[", "'source'", "]", "=", "self", ".", "name", "msg", "[", "'ts'", "]", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "-", "datetime", ".", "datetime", "(", "1970", ",", "1", ",", "1", ")", ")", ".", "total_seconds", "(", ")", "return", "msg" ]
creates a new message, setting `type`, `source`, `ts`, `data` - `data` is initialized to an empty array
[ "creates", "a", "new", "message", "setting", "type", "source", "ts", "data", "-", "data", "is", "initialized", "to", "an", "empty", "array" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L51-L61
11,614
20c/vaping
vaping/plugins/__init__.py
PluginBase.popen
def popen(self, args, **kwargs): """ creates a subprocess with passed args """ self.log.debug("popen %s", ' '.join(args)) return vaping.io.subprocess.Popen(args, **kwargs)
python
def popen(self, args, **kwargs): """ creates a subprocess with passed args """ self.log.debug("popen %s", ' '.join(args)) return vaping.io.subprocess.Popen(args, **kwargs)
[ "def", "popen", "(", "self", ",", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "log", ".", "debug", "(", "\"popen %s\"", ",", "' '", ".", "join", "(", "args", ")", ")", "return", "vaping", ".", "io", ".", "subprocess", ".", "Popen", "(", "args", ",", "*", "*", "kwargs", ")" ]
creates a subprocess with passed args
[ "creates", "a", "subprocess", "with", "passed", "args" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L63-L68
11,615
20c/vaping
vaping/plugins/__init__.py
ProbeBase.queue_emission
def queue_emission(self, msg): """ queue an emission of a message for all output plugins """ if not msg: return for _emitter in self._emit: if not hasattr(_emitter, 'emit'): continue def emit(emitter=_emitter): self.log.debug("emit to {}".format(emitter.name)) emitter.emit(msg) self.log.debug("queue emission to {} ({})".format( _emitter.name, self._emit_queue.qsize())) self._emit_queue.put(emit)
python
def queue_emission(self, msg): """ queue an emission of a message for all output plugins """ if not msg: return for _emitter in self._emit: if not hasattr(_emitter, 'emit'): continue def emit(emitter=_emitter): self.log.debug("emit to {}".format(emitter.name)) emitter.emit(msg) self.log.debug("queue emission to {} ({})".format( _emitter.name, self._emit_queue.qsize())) self._emit_queue.put(emit)
[ "def", "queue_emission", "(", "self", ",", "msg", ")", ":", "if", "not", "msg", ":", "return", "for", "_emitter", "in", "self", ".", "_emit", ":", "if", "not", "hasattr", "(", "_emitter", ",", "'emit'", ")", ":", "continue", "def", "emit", "(", "emitter", "=", "_emitter", ")", ":", "self", ".", "log", ".", "debug", "(", "\"emit to {}\"", ".", "format", "(", "emitter", ".", "name", ")", ")", "emitter", ".", "emit", "(", "msg", ")", "self", ".", "log", ".", "debug", "(", "\"queue emission to {} ({})\"", ".", "format", "(", "_emitter", ".", "name", ",", "self", ".", "_emit_queue", ".", "qsize", "(", ")", ")", ")", "self", ".", "_emit_queue", ".", "put", "(", "emit", ")" ]
queue an emission of a message for all output plugins
[ "queue", "an", "emission", "of", "a", "message", "for", "all", "output", "plugins" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L131-L145
11,616
20c/vaping
vaping/plugins/__init__.py
ProbeBase.send_emission
def send_emission(self): """ emit and remove the first emission in the queue """ if self._emit_queue.empty(): return emit = self._emit_queue.get() emit()
python
def send_emission(self): """ emit and remove the first emission in the queue """ if self._emit_queue.empty(): return emit = self._emit_queue.get() emit()
[ "def", "send_emission", "(", "self", ")", ":", "if", "self", ".", "_emit_queue", ".", "empty", "(", ")", ":", "return", "emit", "=", "self", ".", "_emit_queue", ".", "get", "(", ")", "emit", "(", ")" ]
emit and remove the first emission in the queue
[ "emit", "and", "remove", "the", "first", "emission", "in", "the", "queue" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L147-L154
11,617
20c/vaping
vaping/plugins/__init__.py
FileProbe.validate_file_handler
def validate_file_handler(self): """ Here we validate that our filehandler is pointing to an existing file. If it doesnt, because file has been deleted, we close the filehander and try to reopen """ if self.fh.closed: try: self.fh = open(self.path, "r") self.fh.seek(0, 2) except OSError as err: logging.error("Could not reopen file: {}".format(err)) return False open_stat = os.fstat(self.fh.fileno()) try: file_stat = os.stat(self.path) except OSError as err: logging.error("Could not stat file: {}".format(err)) return False if open_stat != file_stat: self.log self.fh.close() return False return True
python
def validate_file_handler(self): """ Here we validate that our filehandler is pointing to an existing file. If it doesnt, because file has been deleted, we close the filehander and try to reopen """ if self.fh.closed: try: self.fh = open(self.path, "r") self.fh.seek(0, 2) except OSError as err: logging.error("Could not reopen file: {}".format(err)) return False open_stat = os.fstat(self.fh.fileno()) try: file_stat = os.stat(self.path) except OSError as err: logging.error("Could not stat file: {}".format(err)) return False if open_stat != file_stat: self.log self.fh.close() return False return True
[ "def", "validate_file_handler", "(", "self", ")", ":", "if", "self", ".", "fh", ".", "closed", ":", "try", ":", "self", ".", "fh", "=", "open", "(", "self", ".", "path", ",", "\"r\"", ")", "self", ".", "fh", ".", "seek", "(", "0", ",", "2", ")", "except", "OSError", "as", "err", ":", "logging", ".", "error", "(", "\"Could not reopen file: {}\"", ".", "format", "(", "err", ")", ")", "return", "False", "open_stat", "=", "os", ".", "fstat", "(", "self", ".", "fh", ".", "fileno", "(", ")", ")", "try", ":", "file_stat", "=", "os", ".", "stat", "(", "self", ".", "path", ")", "except", "OSError", "as", "err", ":", "logging", ".", "error", "(", "\"Could not stat file: {}\"", ".", "format", "(", "err", ")", ")", "return", "False", "if", "open_stat", "!=", "file_stat", ":", "self", ".", "log", "self", ".", "fh", ".", "close", "(", ")", "return", "False", "return", "True" ]
Here we validate that our filehandler is pointing to an existing file. If it doesnt, because file has been deleted, we close the filehander and try to reopen
[ "Here", "we", "validate", "that", "our", "filehandler", "is", "pointing", "to", "an", "existing", "file", "." ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L245-L273
11,618
20c/vaping
vaping/plugins/__init__.py
FileProbe.probe
def probe(self): """ Probe the file for new lines """ # make sure the filehandler is still valid # (e.g. file stat hasnt changed, file exists etc.) if not self.validate_file_handler(): return [] messages = [] # read any new lines and push them onto the stack for line in self.fh.readlines(self.max_lines): data = {"path":self.path} msg = self.new_message() # process the line - this is where parsing happens parsed = self.process_line(line, data) if not parsed: continue data.update(parsed) # process the probe - this is where data assignment # happens data = self.process_probe(data) msg["data"] = [data] messages.append(msg) # process all new messages before returning them # for emission messages = self.process_messages(messages) return messages
python
def probe(self): """ Probe the file for new lines """ # make sure the filehandler is still valid # (e.g. file stat hasnt changed, file exists etc.) if not self.validate_file_handler(): return [] messages = [] # read any new lines and push them onto the stack for line in self.fh.readlines(self.max_lines): data = {"path":self.path} msg = self.new_message() # process the line - this is where parsing happens parsed = self.process_line(line, data) if not parsed: continue data.update(parsed) # process the probe - this is where data assignment # happens data = self.process_probe(data) msg["data"] = [data] messages.append(msg) # process all new messages before returning them # for emission messages = self.process_messages(messages) return messages
[ "def", "probe", "(", "self", ")", ":", "# make sure the filehandler is still valid", "# (e.g. file stat hasnt changed, file exists etc.)", "if", "not", "self", ".", "validate_file_handler", "(", ")", ":", "return", "[", "]", "messages", "=", "[", "]", "# read any new lines and push them onto the stack", "for", "line", "in", "self", ".", "fh", ".", "readlines", "(", "self", ".", "max_lines", ")", ":", "data", "=", "{", "\"path\"", ":", "self", ".", "path", "}", "msg", "=", "self", ".", "new_message", "(", ")", "# process the line - this is where parsing happens", "parsed", "=", "self", ".", "process_line", "(", "line", ",", "data", ")", "if", "not", "parsed", ":", "continue", "data", ".", "update", "(", "parsed", ")", "# process the probe - this is where data assignment", "# happens", "data", "=", "self", ".", "process_probe", "(", "data", ")", "msg", "[", "\"data\"", "]", "=", "[", "data", "]", "messages", ".", "append", "(", "msg", ")", "# process all new messages before returning them", "# for emission", "messages", "=", "self", ".", "process_messages", "(", "messages", ")", "return", "messages" ]
Probe the file for new lines
[ "Probe", "the", "file", "for", "new", "lines" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L276-L310
11,619
20c/vaping
vaping/plugins/__init__.py
TimeSeriesDB.filename_formatters
def filename_formatters(self, data, row): """ Returns a dict containing the various filename formatter values Values are gotten from the vaping data message as well as the currently processed row in the message - `data`: vaping message - `row`: vaping message data row """ r = { "source" : data.get("source"), "field" : self.field, "type" : data.get("type") } r.update(**row) return r
python
def filename_formatters(self, data, row): """ Returns a dict containing the various filename formatter values Values are gotten from the vaping data message as well as the currently processed row in the message - `data`: vaping message - `row`: vaping message data row """ r = { "source" : data.get("source"), "field" : self.field, "type" : data.get("type") } r.update(**row) return r
[ "def", "filename_formatters", "(", "self", ",", "data", ",", "row", ")", ":", "r", "=", "{", "\"source\"", ":", "data", ".", "get", "(", "\"source\"", ")", ",", "\"field\"", ":", "self", ".", "field", ",", "\"type\"", ":", "data", ".", "get", "(", "\"type\"", ")", "}", "r", ".", "update", "(", "*", "*", "row", ")", "return", "r" ]
Returns a dict containing the various filename formatter values Values are gotten from the vaping data message as well as the currently processed row in the message - `data`: vaping message - `row`: vaping message data row
[ "Returns", "a", "dict", "containing", "the", "various", "filename", "formatter", "values" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L394-L411
11,620
20c/vaping
vaping/plugins/__init__.py
TimeSeriesDB.format_filename
def format_filename(self, data, row): """ Returns a formatted filename using the template stored in self.filename - `data`: vaping message - `row`: vaping message data row """ return self.filename.format(**self.filename_formatters(data, row))
python
def format_filename(self, data, row): """ Returns a formatted filename using the template stored in self.filename - `data`: vaping message - `row`: vaping message data row """ return self.filename.format(**self.filename_formatters(data, row))
[ "def", "format_filename", "(", "self", ",", "data", ",", "row", ")", ":", "return", "self", ".", "filename", ".", "format", "(", "*", "*", "self", ".", "filename_formatters", "(", "data", ",", "row", ")", ")" ]
Returns a formatted filename using the template stored in self.filename - `data`: vaping message - `row`: vaping message data row
[ "Returns", "a", "formatted", "filename", "using", "the", "template", "stored", "in", "self", ".", "filename" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L413-L421
11,621
20c/vaping
vaping/plugins/__init__.py
TimeSeriesDB.emit
def emit(self, message): """ emit to database """ # handle vaping data that arrives in a list if isinstance(message.get("data"), list): for row in message.get("data"): # format filename from data filename = self.format_filename(message, row) # create database file if it does not exist yet if not os.path.exists(filename): self.create(filename) # update database self.log.debug("storing time:%d, %s:%.5f in %s" % ( message.get("ts"), self.field, row.get(self.field), filename)) self.update(filename, message.get("ts"), row.get(self.field))
python
def emit(self, message): """ emit to database """ # handle vaping data that arrives in a list if isinstance(message.get("data"), list): for row in message.get("data"): # format filename from data filename = self.format_filename(message, row) # create database file if it does not exist yet if not os.path.exists(filename): self.create(filename) # update database self.log.debug("storing time:%d, %s:%.5f in %s" % ( message.get("ts"), self.field, row.get(self.field), filename)) self.update(filename, message.get("ts"), row.get(self.field))
[ "def", "emit", "(", "self", ",", "message", ")", ":", "# handle vaping data that arrives in a list", "if", "isinstance", "(", "message", ".", "get", "(", "\"data\"", ")", ",", "list", ")", ":", "for", "row", "in", "message", ".", "get", "(", "\"data\"", ")", ":", "# format filename from data", "filename", "=", "self", ".", "format_filename", "(", "message", ",", "row", ")", "# create database file if it does not exist yet", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "self", ".", "create", "(", "filename", ")", "# update database", "self", ".", "log", ".", "debug", "(", "\"storing time:%d, %s:%.5f in %s\"", "%", "(", "message", ".", "get", "(", "\"ts\"", ")", ",", "self", ".", "field", ",", "row", ".", "get", "(", "self", ".", "field", ")", ",", "filename", ")", ")", "self", ".", "update", "(", "filename", ",", "message", ".", "get", "(", "\"ts\"", ")", ",", "row", ".", "get", "(", "self", ".", "field", ")", ")" ]
emit to database
[ "emit", "to", "database" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/__init__.py#L423-L442
11,622
20c/vaping
vaping/config.py
parse_interval
def parse_interval(val): """ converts a string to float of seconds .5 = 500ms 90 = 1m30s """ re_intv = re.compile(r"([\d\.]+)([a-zA-Z]+)") val = val.strip() total = 0.0 for match in re_intv.findall(val): unit = match[1] count = float(match[0]) if unit == 's': total += count elif unit == 'm': total += count * 60 elif unit == 'ms': total += count / 1000 elif unit == "h": total += count * 3600 elif unit == 'd': total += count * 86400 else: raise ValueError("unknown unit from interval string '%s'" % val) return total
python
def parse_interval(val): """ converts a string to float of seconds .5 = 500ms 90 = 1m30s """ re_intv = re.compile(r"([\d\.]+)([a-zA-Z]+)") val = val.strip() total = 0.0 for match in re_intv.findall(val): unit = match[1] count = float(match[0]) if unit == 's': total += count elif unit == 'm': total += count * 60 elif unit == 'ms': total += count / 1000 elif unit == "h": total += count * 3600 elif unit == 'd': total += count * 86400 else: raise ValueError("unknown unit from interval string '%s'" % val) return total
[ "def", "parse_interval", "(", "val", ")", ":", "re_intv", "=", "re", ".", "compile", "(", "r\"([\\d\\.]+)([a-zA-Z]+)\"", ")", "val", "=", "val", ".", "strip", "(", ")", "total", "=", "0.0", "for", "match", "in", "re_intv", ".", "findall", "(", "val", ")", ":", "unit", "=", "match", "[", "1", "]", "count", "=", "float", "(", "match", "[", "0", "]", ")", "if", "unit", "==", "'s'", ":", "total", "+=", "count", "elif", "unit", "==", "'m'", ":", "total", "+=", "count", "*", "60", "elif", "unit", "==", "'ms'", ":", "total", "+=", "count", "/", "1000", "elif", "unit", "==", "\"h\"", ":", "total", "+=", "count", "*", "3600", "elif", "unit", "==", "'d'", ":", "total", "+=", "count", "*", "86400", "else", ":", "raise", "ValueError", "(", "\"unknown unit from interval string '%s'\"", "%", "val", ")", "return", "total" ]
converts a string to float of seconds .5 = 500ms 90 = 1m30s
[ "converts", "a", "string", "to", "float", "of", "seconds", ".", "5", "=", "500ms", "90", "=", "1m30s" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/config.py#L8-L33
11,623
20c/vaping
vaping/plugins/fping.py
FPingBase.hosts_args
def hosts_args(self): """ hosts list can contain strings specifying a host directly or dicts containing a "host" key to specify the host this way we can allow passing further config details (color, name etc.) with each host as well as simply dropping in addresses for quick setup depending on the user's needs """ host_args = [] for row in self.hosts: if isinstance(row, dict): host_args.append(row["host"]) else: host_args.append(row) # using a set changes the order dedupe = list() for each in host_args: if each not in dedupe: dedupe.append(each) return dedupe
python
def hosts_args(self): """ hosts list can contain strings specifying a host directly or dicts containing a "host" key to specify the host this way we can allow passing further config details (color, name etc.) with each host as well as simply dropping in addresses for quick setup depending on the user's needs """ host_args = [] for row in self.hosts: if isinstance(row, dict): host_args.append(row["host"]) else: host_args.append(row) # using a set changes the order dedupe = list() for each in host_args: if each not in dedupe: dedupe.append(each) return dedupe
[ "def", "hosts_args", "(", "self", ")", ":", "host_args", "=", "[", "]", "for", "row", "in", "self", ".", "hosts", ":", "if", "isinstance", "(", "row", ",", "dict", ")", ":", "host_args", ".", "append", "(", "row", "[", "\"host\"", "]", ")", "else", ":", "host_args", ".", "append", "(", "row", ")", "# using a set changes the order", "dedupe", "=", "list", "(", ")", "for", "each", "in", "host_args", ":", "if", "each", "not", "in", "dedupe", ":", "dedupe", ".", "append", "(", "each", ")", "return", "dedupe" ]
hosts list can contain strings specifying a host directly or dicts containing a "host" key to specify the host this way we can allow passing further config details (color, name etc.) with each host as well as simply dropping in addresses for quick setup depending on the user's needs
[ "hosts", "list", "can", "contain", "strings", "specifying", "a", "host", "directly", "or", "dicts", "containing", "a", "host", "key", "to", "specify", "the", "host" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/fping.py#L39-L61
11,624
20c/vaping
vaping/plugins/fping.py
FPingBase.parse_verbose
def parse_verbose(self, line): """ parse output from verbose format """ try: logging.debug(line) (host, pings) = line.split(' : ') cnt = 0 lost = 0 times = [] pings = pings.strip().split(' ') cnt = len(pings) for latency in pings: if latency == '-': continue times.append(float(latency)) lost = cnt - len(times) if lost: loss = lost / float(cnt) else: loss = 0.0 rv = { 'host': host.strip(), 'cnt': cnt, 'loss': loss, 'data': times, } if times: rv['min'] = min(times) rv['max'] = max(times) rv['avg'] = sum(times) / len(times) rv['last'] = times[-1] return rv except Exception as e: logging.error("failed to get data: {}".format(e))
python
def parse_verbose(self, line): """ parse output from verbose format """ try: logging.debug(line) (host, pings) = line.split(' : ') cnt = 0 lost = 0 times = [] pings = pings.strip().split(' ') cnt = len(pings) for latency in pings: if latency == '-': continue times.append(float(latency)) lost = cnt - len(times) if lost: loss = lost / float(cnt) else: loss = 0.0 rv = { 'host': host.strip(), 'cnt': cnt, 'loss': loss, 'data': times, } if times: rv['min'] = min(times) rv['max'] = max(times) rv['avg'] = sum(times) / len(times) rv['last'] = times[-1] return rv except Exception as e: logging.error("failed to get data: {}".format(e))
[ "def", "parse_verbose", "(", "self", ",", "line", ")", ":", "try", ":", "logging", ".", "debug", "(", "line", ")", "(", "host", ",", "pings", ")", "=", "line", ".", "split", "(", "' : '", ")", "cnt", "=", "0", "lost", "=", "0", "times", "=", "[", "]", "pings", "=", "pings", ".", "strip", "(", ")", ".", "split", "(", "' '", ")", "cnt", "=", "len", "(", "pings", ")", "for", "latency", "in", "pings", ":", "if", "latency", "==", "'-'", ":", "continue", "times", ".", "append", "(", "float", "(", "latency", ")", ")", "lost", "=", "cnt", "-", "len", "(", "times", ")", "if", "lost", ":", "loss", "=", "lost", "/", "float", "(", "cnt", ")", "else", ":", "loss", "=", "0.0", "rv", "=", "{", "'host'", ":", "host", ".", "strip", "(", ")", ",", "'cnt'", ":", "cnt", ",", "'loss'", ":", "loss", ",", "'data'", ":", "times", ",", "}", "if", "times", ":", "rv", "[", "'min'", "]", "=", "min", "(", "times", ")", "rv", "[", "'max'", "]", "=", "max", "(", "times", ")", "rv", "[", "'avg'", "]", "=", "sum", "(", "times", ")", "/", "len", "(", "times", ")", "rv", "[", "'last'", "]", "=", "times", "[", "-", "1", "]", "return", "rv", "except", "Exception", "as", "e", ":", "logging", ".", "error", "(", "\"failed to get data: {}\"", ".", "format", "(", "e", ")", ")" ]
parse output from verbose format
[ "parse", "output", "from", "verbose", "format" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/plugins/fping.py#L63-L100
11,625
20c/vaping
vaping/cli.py
start
def start(ctx, **kwargs): """ start a vaping process """ update_context(ctx, kwargs) daemon = mk_daemon(ctx) if ctx.debug or kwargs['no_fork']: daemon.run() else: daemon.start()
python
def start(ctx, **kwargs): """ start a vaping process """ update_context(ctx, kwargs) daemon = mk_daemon(ctx) if ctx.debug or kwargs['no_fork']: daemon.run() else: daemon.start()
[ "def", "start", "(", "ctx", ",", "*", "*", "kwargs", ")", ":", "update_context", "(", "ctx", ",", "kwargs", ")", "daemon", "=", "mk_daemon", "(", "ctx", ")", "if", "ctx", ".", "debug", "or", "kwargs", "[", "'no_fork'", "]", ":", "daemon", ".", "run", "(", ")", "else", ":", "daemon", ".", "start", "(", ")" ]
start a vaping process
[ "start", "a", "vaping", "process" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/cli.py#L52-L63
11,626
20c/vaping
vaping/cli.py
stop
def stop(ctx, **kwargs): """ stop a vaping process """ update_context(ctx, kwargs) daemon = mk_daemon(ctx) daemon.stop()
python
def stop(ctx, **kwargs): """ stop a vaping process """ update_context(ctx, kwargs) daemon = mk_daemon(ctx) daemon.stop()
[ "def", "stop", "(", "ctx", ",", "*", "*", "kwargs", ")", ":", "update_context", "(", "ctx", ",", "kwargs", ")", "daemon", "=", "mk_daemon", "(", "ctx", ")", "daemon", ".", "stop", "(", ")" ]
stop a vaping process
[ "stop", "a", "vaping", "process" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/cli.py#L70-L77
11,627
20c/vaping
vaping/cli.py
restart
def restart(ctx, **kwargs): """ restart a vaping process """ update_context(ctx, kwargs) daemon = mk_daemon(ctx) daemon.stop() daemon.start()
python
def restart(ctx, **kwargs): """ restart a vaping process """ update_context(ctx, kwargs) daemon = mk_daemon(ctx) daemon.stop() daemon.start()
[ "def", "restart", "(", "ctx", ",", "*", "*", "kwargs", ")", ":", "update_context", "(", "ctx", ",", "kwargs", ")", "daemon", "=", "mk_daemon", "(", "ctx", ")", "daemon", ".", "stop", "(", ")", "daemon", ".", "start", "(", ")" ]
restart a vaping process
[ "restart", "a", "vaping", "process" ]
c51f00586c99edb3d51e4abdbdfe3174755533ee
https://github.com/20c/vaping/blob/c51f00586c99edb3d51e4abdbdfe3174755533ee/vaping/cli.py#L84-L92
11,628
aio-libs/aiohttp-debugtoolbar
aiohttp_debugtoolbar/panels/base.py
DebugPanel.render_content
def render_content(self, request): """Return a string containing the HTML to be rendered for the panel. By default this will render the template defined by the :attr:`.template` attribute with a rendering context defined by :attr:`.data` combined with the ``dict`` returned from :meth:`.render_vars`. The ``request`` here is the active request in the toolbar. Not the original request that this panel represents. """ context = self.data.copy() context.update(self.render_vars(request)) return render(self.template, request.app, context, request=request)
python
def render_content(self, request): """Return a string containing the HTML to be rendered for the panel. By default this will render the template defined by the :attr:`.template` attribute with a rendering context defined by :attr:`.data` combined with the ``dict`` returned from :meth:`.render_vars`. The ``request`` here is the active request in the toolbar. Not the original request that this panel represents. """ context = self.data.copy() context.update(self.render_vars(request)) return render(self.template, request.app, context, request=request)
[ "def", "render_content", "(", "self", ",", "request", ")", ":", "context", "=", "self", ".", "data", ".", "copy", "(", ")", "context", ".", "update", "(", "self", ".", "render_vars", "(", "request", ")", ")", "return", "render", "(", "self", ".", "template", ",", "request", ".", "app", ",", "context", ",", "request", "=", "request", ")" ]
Return a string containing the HTML to be rendered for the panel. By default this will render the template defined by the :attr:`.template` attribute with a rendering context defined by :attr:`.data` combined with the ``dict`` returned from :meth:`.render_vars`. The ``request`` here is the active request in the toolbar. Not the original request that this panel represents.
[ "Return", "a", "string", "containing", "the", "HTML", "to", "be", "rendered", "for", "the", "panel", "." ]
a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322
https://github.com/aio-libs/aiohttp-debugtoolbar/blob/a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322/aiohttp_debugtoolbar/panels/base.py#L82-L95
11,629
aio-libs/aiohttp-debugtoolbar
aiohttp_debugtoolbar/toolbar.py
DebugToolbar.inject
def inject(self, request, response): """ Inject the debug toolbar iframe into an HTML response. """ # called in host app if not isinstance(response, Response): return settings = request.app[APP_KEY]['settings'] response_html = response.body route = request.app.router['debugtoolbar.request'] toolbar_url = route.url_for(request_id=request['id']) button_style = settings['button_style'] css_path = request.app.router[STATIC_ROUTE_NAME].url_for( filename='css/toolbar_button.css') toolbar_css = toolbar_css_template % {'css_path': css_path} toolbar_html = toolbar_html_template % { 'button_style': button_style, 'css_path': css_path, 'toolbar_url': toolbar_url} toolbar_html = toolbar_html.encode(response.charset or 'utf-8') toolbar_css = toolbar_css.encode(response.charset or 'utf-8') response_html = replace_insensitive( response_html, b'</head>', toolbar_css + b'</head>') response.body = replace_insensitive( response_html, b'</body>', toolbar_html + b'</body>')
python
def inject(self, request, response): """ Inject the debug toolbar iframe into an HTML response. """ # called in host app if not isinstance(response, Response): return settings = request.app[APP_KEY]['settings'] response_html = response.body route = request.app.router['debugtoolbar.request'] toolbar_url = route.url_for(request_id=request['id']) button_style = settings['button_style'] css_path = request.app.router[STATIC_ROUTE_NAME].url_for( filename='css/toolbar_button.css') toolbar_css = toolbar_css_template % {'css_path': css_path} toolbar_html = toolbar_html_template % { 'button_style': button_style, 'css_path': css_path, 'toolbar_url': toolbar_url} toolbar_html = toolbar_html.encode(response.charset or 'utf-8') toolbar_css = toolbar_css.encode(response.charset or 'utf-8') response_html = replace_insensitive( response_html, b'</head>', toolbar_css + b'</head>') response.body = replace_insensitive( response_html, b'</body>', toolbar_html + b'</body>')
[ "def", "inject", "(", "self", ",", "request", ",", "response", ")", ":", "# called in host app", "if", "not", "isinstance", "(", "response", ",", "Response", ")", ":", "return", "settings", "=", "request", ".", "app", "[", "APP_KEY", "]", "[", "'settings'", "]", "response_html", "=", "response", ".", "body", "route", "=", "request", ".", "app", ".", "router", "[", "'debugtoolbar.request'", "]", "toolbar_url", "=", "route", ".", "url_for", "(", "request_id", "=", "request", "[", "'id'", "]", ")", "button_style", "=", "settings", "[", "'button_style'", "]", "css_path", "=", "request", ".", "app", ".", "router", "[", "STATIC_ROUTE_NAME", "]", ".", "url_for", "(", "filename", "=", "'css/toolbar_button.css'", ")", "toolbar_css", "=", "toolbar_css_template", "%", "{", "'css_path'", ":", "css_path", "}", "toolbar_html", "=", "toolbar_html_template", "%", "{", "'button_style'", ":", "button_style", ",", "'css_path'", ":", "css_path", ",", "'toolbar_url'", ":", "toolbar_url", "}", "toolbar_html", "=", "toolbar_html", ".", "encode", "(", "response", ".", "charset", "or", "'utf-8'", ")", "toolbar_css", "=", "toolbar_css", ".", "encode", "(", "response", ".", "charset", "or", "'utf-8'", ")", "response_html", "=", "replace_insensitive", "(", "response_html", ",", "b'</head>'", ",", "toolbar_css", "+", "b'</head>'", ")", "response", ".", "body", "=", "replace_insensitive", "(", "response_html", ",", "b'</body>'", ",", "toolbar_html", "+", "b'</body>'", ")" ]
Inject the debug toolbar iframe into an HTML response.
[ "Inject", "the", "debug", "toolbar", "iframe", "into", "an", "HTML", "response", "." ]
a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322
https://github.com/aio-libs/aiohttp-debugtoolbar/blob/a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322/aiohttp_debugtoolbar/toolbar.py#L52-L81
11,630
aio-libs/aiohttp-debugtoolbar
aiohttp_debugtoolbar/utils.py
common_segment_count
def common_segment_count(path, value): """Return the number of path segments common to both""" i = 0 if len(path) <= len(value): for x1, x2 in zip(path, value): if x1 == x2: i += 1 else: return 0 return i
python
def common_segment_count(path, value): """Return the number of path segments common to both""" i = 0 if len(path) <= len(value): for x1, x2 in zip(path, value): if x1 == x2: i += 1 else: return 0 return i
[ "def", "common_segment_count", "(", "path", ",", "value", ")", ":", "i", "=", "0", "if", "len", "(", "path", ")", "<=", "len", "(", "value", ")", ":", "for", "x1", ",", "x2", "in", "zip", "(", "path", ",", "value", ")", ":", "if", "x1", "==", "x2", ":", "i", "+=", "1", "else", ":", "return", "0", "return", "i" ]
Return the number of path segments common to both
[ "Return", "the", "number", "of", "path", "segments", "common", "to", "both" ]
a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322
https://github.com/aio-libs/aiohttp-debugtoolbar/blob/a1c3fb2b487bcaaf23eb71ee4c9c3cfc9cb94322/aiohttp_debugtoolbar/utils.py#L83-L92
11,631
jsocol/pystatsd
statsd/client/base.py
StatsClientBase.timing
def timing(self, stat, delta, rate=1): """ Send new timing information. `delta` can be either a number of milliseconds or a timedelta. """ if isinstance(delta, timedelta): # Convert timedelta to number of milliseconds. delta = delta.total_seconds() * 1000. self._send_stat(stat, '%0.6f|ms' % delta, rate)
python
def timing(self, stat, delta, rate=1): """ Send new timing information. `delta` can be either a number of milliseconds or a timedelta. """ if isinstance(delta, timedelta): # Convert timedelta to number of milliseconds. delta = delta.total_seconds() * 1000. self._send_stat(stat, '%0.6f|ms' % delta, rate)
[ "def", "timing", "(", "self", ",", "stat", ",", "delta", ",", "rate", "=", "1", ")", ":", "if", "isinstance", "(", "delta", ",", "timedelta", ")", ":", "# Convert timedelta to number of milliseconds.", "delta", "=", "delta", ".", "total_seconds", "(", ")", "*", "1000.", "self", ".", "_send_stat", "(", "stat", ",", "'%0.6f|ms'", "%", "delta", ",", "rate", ")" ]
Send new timing information. `delta` can be either a number of milliseconds or a timedelta.
[ "Send", "new", "timing", "information", "." ]
006a86394c44ff71e6e8e52529daa3c0fdcc93fb
https://github.com/jsocol/pystatsd/blob/006a86394c44ff71e6e8e52529daa3c0fdcc93fb/statsd/client/base.py#L22-L31
11,632
jsocol/pystatsd
statsd/client/base.py
StatsClientBase.decr
def decr(self, stat, count=1, rate=1): """Decrement a stat by `count`.""" self.incr(stat, -count, rate)
python
def decr(self, stat, count=1, rate=1): """Decrement a stat by `count`.""" self.incr(stat, -count, rate)
[ "def", "decr", "(", "self", ",", "stat", ",", "count", "=", "1", ",", "rate", "=", "1", ")", ":", "self", ".", "incr", "(", "stat", ",", "-", "count", ",", "rate", ")" ]
Decrement a stat by `count`.
[ "Decrement", "a", "stat", "by", "count", "." ]
006a86394c44ff71e6e8e52529daa3c0fdcc93fb
https://github.com/jsocol/pystatsd/blob/006a86394c44ff71e6e8e52529daa3c0fdcc93fb/statsd/client/base.py#L37-L39
11,633
jsocol/pystatsd
statsd/client/base.py
StatsClientBase.gauge
def gauge(self, stat, value, rate=1, delta=False): """Set a gauge value.""" if value < 0 and not delta: if rate < 1: if random.random() > rate: return with self.pipeline() as pipe: pipe._send_stat(stat, '0|g', 1) pipe._send_stat(stat, '%s|g' % value, 1) else: prefix = '+' if delta and value >= 0 else '' self._send_stat(stat, '%s%s|g' % (prefix, value), rate)
python
def gauge(self, stat, value, rate=1, delta=False): """Set a gauge value.""" if value < 0 and not delta: if rate < 1: if random.random() > rate: return with self.pipeline() as pipe: pipe._send_stat(stat, '0|g', 1) pipe._send_stat(stat, '%s|g' % value, 1) else: prefix = '+' if delta and value >= 0 else '' self._send_stat(stat, '%s%s|g' % (prefix, value), rate)
[ "def", "gauge", "(", "self", ",", "stat", ",", "value", ",", "rate", "=", "1", ",", "delta", "=", "False", ")", ":", "if", "value", "<", "0", "and", "not", "delta", ":", "if", "rate", "<", "1", ":", "if", "random", ".", "random", "(", ")", ">", "rate", ":", "return", "with", "self", ".", "pipeline", "(", ")", "as", "pipe", ":", "pipe", ".", "_send_stat", "(", "stat", ",", "'0|g'", ",", "1", ")", "pipe", ".", "_send_stat", "(", "stat", ",", "'%s|g'", "%", "value", ",", "1", ")", "else", ":", "prefix", "=", "'+'", "if", "delta", "and", "value", ">=", "0", "else", "''", "self", ".", "_send_stat", "(", "stat", ",", "'%s%s|g'", "%", "(", "prefix", ",", "value", ")", ",", "rate", ")" ]
Set a gauge value.
[ "Set", "a", "gauge", "value", "." ]
006a86394c44ff71e6e8e52529daa3c0fdcc93fb
https://github.com/jsocol/pystatsd/blob/006a86394c44ff71e6e8e52529daa3c0fdcc93fb/statsd/client/base.py#L41-L52
11,634
jsocol/pystatsd
statsd/client/base.py
StatsClientBase.set
def set(self, stat, value, rate=1): """Set a set value.""" self._send_stat(stat, '%s|s' % value, rate)
python
def set(self, stat, value, rate=1): """Set a set value.""" self._send_stat(stat, '%s|s' % value, rate)
[ "def", "set", "(", "self", ",", "stat", ",", "value", ",", "rate", "=", "1", ")", ":", "self", ".", "_send_stat", "(", "stat", ",", "'%s|s'", "%", "value", ",", "rate", ")" ]
Set a set value.
[ "Set", "a", "set", "value", "." ]
006a86394c44ff71e6e8e52529daa3c0fdcc93fb
https://github.com/jsocol/pystatsd/blob/006a86394c44ff71e6e8e52529daa3c0fdcc93fb/statsd/client/base.py#L54-L56
11,635
jsocol/pystatsd
statsd/client/timer.py
safe_wraps
def safe_wraps(wrapper, *args, **kwargs): """Safely wraps partial functions.""" while isinstance(wrapper, functools.partial): wrapper = wrapper.func return functools.wraps(wrapper, *args, **kwargs)
python
def safe_wraps(wrapper, *args, **kwargs): """Safely wraps partial functions.""" while isinstance(wrapper, functools.partial): wrapper = wrapper.func return functools.wraps(wrapper, *args, **kwargs)
[ "def", "safe_wraps", "(", "wrapper", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "while", "isinstance", "(", "wrapper", ",", "functools", ".", "partial", ")", ":", "wrapper", "=", "wrapper", ".", "func", "return", "functools", ".", "wraps", "(", "wrapper", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Safely wraps partial functions.
[ "Safely", "wraps", "partial", "functions", "." ]
006a86394c44ff71e6e8e52529daa3c0fdcc93fb
https://github.com/jsocol/pystatsd/blob/006a86394c44ff71e6e8e52529daa3c0fdcc93fb/statsd/client/timer.py#L14-L18
11,636
jorisroovers/gitlint
gitlint/user_rules.py
find_rule_classes
def find_rule_classes(extra_path): """ Searches a given directory or python module for rule classes. This is done by adding the directory path to the python path, importing the modules and then finding any Rule class in those modules. :param extra_path: absolute directory or file path to search for rule classes :return: The list of rule classes that are found in the given directory or module """ files = [] modules = [] if os.path.isfile(extra_path): files = [os.path.basename(extra_path)] directory = os.path.dirname(extra_path) elif os.path.isdir(extra_path): files = os.listdir(extra_path) directory = extra_path else: raise UserRuleError(u"Invalid extra-path: {0}".format(extra_path)) # Filter out files that are not python modules for filename in files: if fnmatch.fnmatch(filename, '*.py'): modules.append(os.path.splitext(filename)[0]) # No need to continue if there are no modules specified if not modules: return [] # Append the extra rules path to python path so that we can import them sys.path.append(directory) # Find all the rule classes in the found python files rule_classes = [] for module in modules: # Import the module try: importlib.import_module(module) except Exception as e: raise UserRuleError(u"Error while importing extra-path module '{0}': {1}".format(module, ustr(e))) # Find all rule classes in the module. We do this my inspecting all members of the module and checking # 1) is it a class, if not, skip # 2) is the parent path the current module. If not, we are dealing with an imported class, skip # 3) is it a subclass of rule rule_classes.extend([clazz for _, clazz in inspect.getmembers(sys.modules[module]) if inspect.isclass(clazz) and # check isclass to ensure clazz.__module__ exists clazz.__module__ == module and # ignore imported classes (issubclass(clazz, rules.LineRule) or issubclass(clazz, rules.CommitRule))]) # validate that the rule classes are valid user-defined rules for rule_class in rule_classes: assert_valid_rule_class(rule_class) return rule_classes
python
def find_rule_classes(extra_path): """ Searches a given directory or python module for rule classes. This is done by adding the directory path to the python path, importing the modules and then finding any Rule class in those modules. :param extra_path: absolute directory or file path to search for rule classes :return: The list of rule classes that are found in the given directory or module """ files = [] modules = [] if os.path.isfile(extra_path): files = [os.path.basename(extra_path)] directory = os.path.dirname(extra_path) elif os.path.isdir(extra_path): files = os.listdir(extra_path) directory = extra_path else: raise UserRuleError(u"Invalid extra-path: {0}".format(extra_path)) # Filter out files that are not python modules for filename in files: if fnmatch.fnmatch(filename, '*.py'): modules.append(os.path.splitext(filename)[0]) # No need to continue if there are no modules specified if not modules: return [] # Append the extra rules path to python path so that we can import them sys.path.append(directory) # Find all the rule classes in the found python files rule_classes = [] for module in modules: # Import the module try: importlib.import_module(module) except Exception as e: raise UserRuleError(u"Error while importing extra-path module '{0}': {1}".format(module, ustr(e))) # Find all rule classes in the module. We do this my inspecting all members of the module and checking # 1) is it a class, if not, skip # 2) is the parent path the current module. If not, we are dealing with an imported class, skip # 3) is it a subclass of rule rule_classes.extend([clazz for _, clazz in inspect.getmembers(sys.modules[module]) if inspect.isclass(clazz) and # check isclass to ensure clazz.__module__ exists clazz.__module__ == module and # ignore imported classes (issubclass(clazz, rules.LineRule) or issubclass(clazz, rules.CommitRule))]) # validate that the rule classes are valid user-defined rules for rule_class in rule_classes: assert_valid_rule_class(rule_class) return rule_classes
[ "def", "find_rule_classes", "(", "extra_path", ")", ":", "files", "=", "[", "]", "modules", "=", "[", "]", "if", "os", ".", "path", ".", "isfile", "(", "extra_path", ")", ":", "files", "=", "[", "os", ".", "path", ".", "basename", "(", "extra_path", ")", "]", "directory", "=", "os", ".", "path", ".", "dirname", "(", "extra_path", ")", "elif", "os", ".", "path", ".", "isdir", "(", "extra_path", ")", ":", "files", "=", "os", ".", "listdir", "(", "extra_path", ")", "directory", "=", "extra_path", "else", ":", "raise", "UserRuleError", "(", "u\"Invalid extra-path: {0}\"", ".", "format", "(", "extra_path", ")", ")", "# Filter out files that are not python modules", "for", "filename", "in", "files", ":", "if", "fnmatch", ".", "fnmatch", "(", "filename", ",", "'*.py'", ")", ":", "modules", ".", "append", "(", "os", ".", "path", ".", "splitext", "(", "filename", ")", "[", "0", "]", ")", "# No need to continue if there are no modules specified", "if", "not", "modules", ":", "return", "[", "]", "# Append the extra rules path to python path so that we can import them", "sys", ".", "path", ".", "append", "(", "directory", ")", "# Find all the rule classes in the found python files", "rule_classes", "=", "[", "]", "for", "module", "in", "modules", ":", "# Import the module", "try", ":", "importlib", ".", "import_module", "(", "module", ")", "except", "Exception", "as", "e", ":", "raise", "UserRuleError", "(", "u\"Error while importing extra-path module '{0}': {1}\"", ".", "format", "(", "module", ",", "ustr", "(", "e", ")", ")", ")", "# Find all rule classes in the module. We do this my inspecting all members of the module and checking", "# 1) is it a class, if not, skip", "# 2) is the parent path the current module. If not, we are dealing with an imported class, skip", "# 3) is it a subclass of rule", "rule_classes", ".", "extend", "(", "[", "clazz", "for", "_", ",", "clazz", "in", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "module", "]", ")", "if", "inspect", ".", "isclass", "(", "clazz", ")", "and", "# check isclass to ensure clazz.__module__ exists", "clazz", ".", "__module__", "==", "module", "and", "# ignore imported classes", "(", "issubclass", "(", "clazz", ",", "rules", ".", "LineRule", ")", "or", "issubclass", "(", "clazz", ",", "rules", ".", "CommitRule", ")", ")", "]", ")", "# validate that the rule classes are valid user-defined rules", "for", "rule_class", "in", "rule_classes", ":", "assert_valid_rule_class", "(", "rule_class", ")", "return", "rule_classes" ]
Searches a given directory or python module for rule classes. This is done by adding the directory path to the python path, importing the modules and then finding any Rule class in those modules. :param extra_path: absolute directory or file path to search for rule classes :return: The list of rule classes that are found in the given directory or module
[ "Searches", "a", "given", "directory", "or", "python", "module", "for", "rule", "classes", ".", "This", "is", "done", "by", "adding", "the", "directory", "path", "to", "the", "python", "path", "importing", "the", "modules", "and", "then", "finding", "any", "Rule", "class", "in", "those", "modules", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/user_rules.py#L16-L73
11,637
jorisroovers/gitlint
qa/base.py
ustr
def ustr(obj): """ Python 2 and 3 utility method that converts an obj to unicode in python 2 and to a str object in python 3""" if sys.version_info[0] == 2: # If we are getting a string, then do an explicit decode # else, just call the unicode method of the object if type(obj) in [str, basestring]: # pragma: no cover # noqa return unicode(obj, DEFAULT_ENCODING) # pragma: no cover # noqa else: return unicode(obj) # pragma: no cover # noqa else: if type(obj) in [bytes]: return obj.decode(DEFAULT_ENCODING) else: return str(obj)
python
def ustr(obj): """ Python 2 and 3 utility method that converts an obj to unicode in python 2 and to a str object in python 3""" if sys.version_info[0] == 2: # If we are getting a string, then do an explicit decode # else, just call the unicode method of the object if type(obj) in [str, basestring]: # pragma: no cover # noqa return unicode(obj, DEFAULT_ENCODING) # pragma: no cover # noqa else: return unicode(obj) # pragma: no cover # noqa else: if type(obj) in [bytes]: return obj.decode(DEFAULT_ENCODING) else: return str(obj)
[ "def", "ustr", "(", "obj", ")", ":", "if", "sys", ".", "version_info", "[", "0", "]", "==", "2", ":", "# If we are getting a string, then do an explicit decode", "# else, just call the unicode method of the object", "if", "type", "(", "obj", ")", "in", "[", "str", ",", "basestring", "]", ":", "# pragma: no cover # noqa", "return", "unicode", "(", "obj", ",", "DEFAULT_ENCODING", ")", "# pragma: no cover # noqa", "else", ":", "return", "unicode", "(", "obj", ")", "# pragma: no cover # noqa", "else", ":", "if", "type", "(", "obj", ")", "in", "[", "bytes", "]", ":", "return", "obj", ".", "decode", "(", "DEFAULT_ENCODING", ")", "else", ":", "return", "str", "(", "obj", ")" ]
Python 2 and 3 utility method that converts an obj to unicode in python 2 and to a str object in python 3
[ "Python", "2", "and", "3", "utility", "method", "that", "converts", "an", "obj", "to", "unicode", "in", "python", "2", "and", "to", "a", "str", "object", "in", "python", "3" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/qa/base.py#L21-L34
11,638
jorisroovers/gitlint
gitlint/config.py
LintConfig.get_rule_option
def get_rule_option(self, rule_name_or_id, option_name): """ Returns the value of a given option for a given rule. LintConfigErrors will be raised if the rule or option don't exist. """ option = self._get_option(rule_name_or_id, option_name) return option.value
python
def get_rule_option(self, rule_name_or_id, option_name): """ Returns the value of a given option for a given rule. LintConfigErrors will be raised if the rule or option don't exist. """ option = self._get_option(rule_name_or_id, option_name) return option.value
[ "def", "get_rule_option", "(", "self", ",", "rule_name_or_id", ",", "option_name", ")", ":", "option", "=", "self", ".", "_get_option", "(", "rule_name_or_id", ",", "option_name", ")", "return", "option", ".", "value" ]
Returns the value of a given option for a given rule. LintConfigErrors will be raised if the rule or option don't exist.
[ "Returns", "the", "value", "of", "a", "given", "option", "for", "a", "given", "rule", ".", "LintConfigErrors", "will", "be", "raised", "if", "the", "rule", "or", "option", "don", "t", "exist", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/config.py#L207-L211
11,639
jorisroovers/gitlint
gitlint/config.py
LintConfig.set_rule_option
def set_rule_option(self, rule_name_or_id, option_name, option_value): """ Attempts to set a given value for a given option for a given rule. LintConfigErrors will be raised if the rule or option don't exist or if the value is invalid. """ option = self._get_option(rule_name_or_id, option_name) try: option.set(option_value) except options.RuleOptionError as e: msg = u"'{0}' is not a valid value for option '{1}.{2}'. {3}." raise LintConfigError(msg.format(option_value, rule_name_or_id, option_name, ustr(e)))
python
def set_rule_option(self, rule_name_or_id, option_name, option_value): """ Attempts to set a given value for a given option for a given rule. LintConfigErrors will be raised if the rule or option don't exist or if the value is invalid. """ option = self._get_option(rule_name_or_id, option_name) try: option.set(option_value) except options.RuleOptionError as e: msg = u"'{0}' is not a valid value for option '{1}.{2}'. {3}." raise LintConfigError(msg.format(option_value, rule_name_or_id, option_name, ustr(e)))
[ "def", "set_rule_option", "(", "self", ",", "rule_name_or_id", ",", "option_name", ",", "option_value", ")", ":", "option", "=", "self", ".", "_get_option", "(", "rule_name_or_id", ",", "option_name", ")", "try", ":", "option", ".", "set", "(", "option_value", ")", "except", "options", ".", "RuleOptionError", "as", "e", ":", "msg", "=", "u\"'{0}' is not a valid value for option '{1}.{2}'. {3}.\"", "raise", "LintConfigError", "(", "msg", ".", "format", "(", "option_value", ",", "rule_name_or_id", ",", "option_name", ",", "ustr", "(", "e", ")", ")", ")" ]
Attempts to set a given value for a given option for a given rule. LintConfigErrors will be raised if the rule or option don't exist or if the value is invalid.
[ "Attempts", "to", "set", "a", "given", "value", "for", "a", "given", "option", "for", "a", "given", "rule", ".", "LintConfigErrors", "will", "be", "raised", "if", "the", "rule", "or", "option", "don", "t", "exist", "or", "if", "the", "value", "is", "invalid", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/config.py#L213-L221
11,640
jorisroovers/gitlint
gitlint/config.py
LintConfigBuilder.set_from_config_file
def set_from_config_file(self, filename): """ Loads lint config from a ini-style config file """ if not os.path.exists(filename): raise LintConfigError(u"Invalid file path: {0}".format(filename)) self._config_path = os.path.abspath(filename) try: parser = ConfigParser() parser.read(filename) for section_name in parser.sections(): for option_name, option_value in parser.items(section_name): self.set_option(section_name, option_name, ustr(option_value)) except ConfigParserError as e: raise LintConfigError(ustr(e))
python
def set_from_config_file(self, filename): """ Loads lint config from a ini-style config file """ if not os.path.exists(filename): raise LintConfigError(u"Invalid file path: {0}".format(filename)) self._config_path = os.path.abspath(filename) try: parser = ConfigParser() parser.read(filename) for section_name in parser.sections(): for option_name, option_value in parser.items(section_name): self.set_option(section_name, option_name, ustr(option_value)) except ConfigParserError as e: raise LintConfigError(ustr(e))
[ "def", "set_from_config_file", "(", "self", ",", "filename", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "raise", "LintConfigError", "(", "u\"Invalid file path: {0}\"", ".", "format", "(", "filename", ")", ")", "self", ".", "_config_path", "=", "os", ".", "path", ".", "abspath", "(", "filename", ")", "try", ":", "parser", "=", "ConfigParser", "(", ")", "parser", ".", "read", "(", "filename", ")", "for", "section_name", "in", "parser", ".", "sections", "(", ")", ":", "for", "option_name", ",", "option_value", "in", "parser", ".", "items", "(", "section_name", ")", ":", "self", ".", "set_option", "(", "section_name", ",", "option_name", ",", "ustr", "(", "option_value", ")", ")", "except", "ConfigParserError", "as", "e", ":", "raise", "LintConfigError", "(", "ustr", "(", "e", ")", ")" ]
Loads lint config from a ini-style config file
[ "Loads", "lint", "config", "from", "a", "ini", "-", "style", "config", "file" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/config.py#L310-L324
11,641
jorisroovers/gitlint
gitlint/config.py
LintConfigBuilder.build
def build(self, config=None): """ Build a real LintConfig object by normalizing and validating the options that were previously set on this factory. """ # If we are passed a config object, then rebuild that object instead of building a new lintconfig object from # scratch if not config: config = LintConfig() config._config_path = self._config_path # Set general options first as this might change the behavior or validity of the other options general_section = self._config_blueprint.get('general') if general_section: for option_name, option_value in general_section.items(): config.set_general_option(option_name, option_value) for section_name, section_dict in self._config_blueprint.items(): for option_name, option_value in section_dict.items(): # Skip over the general section, as we've already done that above if section_name != "general": config.set_rule_option(section_name, option_name, option_value) return config
python
def build(self, config=None): """ Build a real LintConfig object by normalizing and validating the options that were previously set on this factory. """ # If we are passed a config object, then rebuild that object instead of building a new lintconfig object from # scratch if not config: config = LintConfig() config._config_path = self._config_path # Set general options first as this might change the behavior or validity of the other options general_section = self._config_blueprint.get('general') if general_section: for option_name, option_value in general_section.items(): config.set_general_option(option_name, option_value) for section_name, section_dict in self._config_blueprint.items(): for option_name, option_value in section_dict.items(): # Skip over the general section, as we've already done that above if section_name != "general": config.set_rule_option(section_name, option_name, option_value) return config
[ "def", "build", "(", "self", ",", "config", "=", "None", ")", ":", "# If we are passed a config object, then rebuild that object instead of building a new lintconfig object from", "# scratch", "if", "not", "config", ":", "config", "=", "LintConfig", "(", ")", "config", ".", "_config_path", "=", "self", ".", "_config_path", "# Set general options first as this might change the behavior or validity of the other options", "general_section", "=", "self", ".", "_config_blueprint", ".", "get", "(", "'general'", ")", "if", "general_section", ":", "for", "option_name", ",", "option_value", "in", "general_section", ".", "items", "(", ")", ":", "config", ".", "set_general_option", "(", "option_name", ",", "option_value", ")", "for", "section_name", ",", "section_dict", "in", "self", ".", "_config_blueprint", ".", "items", "(", ")", ":", "for", "option_name", ",", "option_value", "in", "section_dict", ".", "items", "(", ")", ":", "# Skip over the general section, as we've already done that above", "if", "section_name", "!=", "\"general\"", ":", "config", ".", "set_rule_option", "(", "section_name", ",", "option_name", ",", "option_value", ")", "return", "config" ]
Build a real LintConfig object by normalizing and validating the options that were previously set on this factory.
[ "Build", "a", "real", "LintConfig", "object", "by", "normalizing", "and", "validating", "the", "options", "that", "were", "previously", "set", "on", "this", "factory", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/config.py#L326-L349
11,642
jorisroovers/gitlint
gitlint/config.py
LintConfigBuilder.clone
def clone(self): """ Creates an exact copy of a LintConfigBuilder. """ builder = LintConfigBuilder() builder._config_blueprint = copy.deepcopy(self._config_blueprint) builder._config_path = self._config_path return builder
python
def clone(self): """ Creates an exact copy of a LintConfigBuilder. """ builder = LintConfigBuilder() builder._config_blueprint = copy.deepcopy(self._config_blueprint) builder._config_path = self._config_path return builder
[ "def", "clone", "(", "self", ")", ":", "builder", "=", "LintConfigBuilder", "(", ")", "builder", ".", "_config_blueprint", "=", "copy", ".", "deepcopy", "(", "self", ".", "_config_blueprint", ")", "builder", ".", "_config_path", "=", "self", ".", "_config_path", "return", "builder" ]
Creates an exact copy of a LintConfigBuilder.
[ "Creates", "an", "exact", "copy", "of", "a", "LintConfigBuilder", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/config.py#L351-L356
11,643
jorisroovers/gitlint
gitlint/git.py
_git
def _git(*command_parts, **kwargs): """ Convenience function for running git commands. Automatically deals with exceptions and unicode. """ # Special arguments passed to sh: http://amoffat.github.io/sh/special_arguments.html git_kwargs = {'_tty_out': False} git_kwargs.update(kwargs) try: result = sh.git(*command_parts, **git_kwargs) # pylint: disable=unexpected-keyword-arg # If we reach this point and the result has an exit_code that is larger than 0, this means that we didn't # get an exception (which is the default sh behavior for non-zero exit codes) and so the user is expecting # a non-zero exit code -> just return the entire result if hasattr(result, 'exit_code') and result.exit_code > 0: return result return ustr(result) except CommandNotFound: raise GitNotInstalledError() except ErrorReturnCode as e: # Something went wrong while executing the git command error_msg = e.stderr.strip() if '_cwd' in git_kwargs and b"not a git repository" in error_msg.lower(): error_msg = u"{0} is not a git repository.".format(git_kwargs['_cwd']) else: error_msg = u"An error occurred while executing '{0}': {1}".format(e.full_cmd, error_msg) raise GitContextError(error_msg)
python
def _git(*command_parts, **kwargs): """ Convenience function for running git commands. Automatically deals with exceptions and unicode. """ # Special arguments passed to sh: http://amoffat.github.io/sh/special_arguments.html git_kwargs = {'_tty_out': False} git_kwargs.update(kwargs) try: result = sh.git(*command_parts, **git_kwargs) # pylint: disable=unexpected-keyword-arg # If we reach this point and the result has an exit_code that is larger than 0, this means that we didn't # get an exception (which is the default sh behavior for non-zero exit codes) and so the user is expecting # a non-zero exit code -> just return the entire result if hasattr(result, 'exit_code') and result.exit_code > 0: return result return ustr(result) except CommandNotFound: raise GitNotInstalledError() except ErrorReturnCode as e: # Something went wrong while executing the git command error_msg = e.stderr.strip() if '_cwd' in git_kwargs and b"not a git repository" in error_msg.lower(): error_msg = u"{0} is not a git repository.".format(git_kwargs['_cwd']) else: error_msg = u"An error occurred while executing '{0}': {1}".format(e.full_cmd, error_msg) raise GitContextError(error_msg)
[ "def", "_git", "(", "*", "command_parts", ",", "*", "*", "kwargs", ")", ":", "# Special arguments passed to sh: http://amoffat.github.io/sh/special_arguments.html", "git_kwargs", "=", "{", "'_tty_out'", ":", "False", "}", "git_kwargs", ".", "update", "(", "kwargs", ")", "try", ":", "result", "=", "sh", ".", "git", "(", "*", "command_parts", ",", "*", "*", "git_kwargs", ")", "# pylint: disable=unexpected-keyword-arg", "# If we reach this point and the result has an exit_code that is larger than 0, this means that we didn't", "# get an exception (which is the default sh behavior for non-zero exit codes) and so the user is expecting", "# a non-zero exit code -> just return the entire result", "if", "hasattr", "(", "result", ",", "'exit_code'", ")", "and", "result", ".", "exit_code", ">", "0", ":", "return", "result", "return", "ustr", "(", "result", ")", "except", "CommandNotFound", ":", "raise", "GitNotInstalledError", "(", ")", "except", "ErrorReturnCode", "as", "e", ":", "# Something went wrong while executing the git command", "error_msg", "=", "e", ".", "stderr", ".", "strip", "(", ")", "if", "'_cwd'", "in", "git_kwargs", "and", "b\"not a git repository\"", "in", "error_msg", ".", "lower", "(", ")", ":", "error_msg", "=", "u\"{0} is not a git repository.\"", ".", "format", "(", "git_kwargs", "[", "'_cwd'", "]", ")", "else", ":", "error_msg", "=", "u\"An error occurred while executing '{0}': {1}\"", ".", "format", "(", "e", ".", "full_cmd", ",", "error_msg", ")", "raise", "GitContextError", "(", "error_msg", ")" ]
Convenience function for running git commands. Automatically deals with exceptions and unicode.
[ "Convenience", "function", "for", "running", "git", "commands", ".", "Automatically", "deals", "with", "exceptions", "and", "unicode", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/git.py#L21-L42
11,644
jorisroovers/gitlint
gitlint/git.py
git_commentchar
def git_commentchar(): """ Shortcut for retrieving comment char from git config """ commentchar = _git("config", "--get", "core.commentchar", _ok_code=[1]) # git will return an exit code of 1 if it can't find a config value, in this case we fall-back to # as commentchar if hasattr(commentchar, 'exit_code') and commentchar.exit_code == 1: # pylint: disable=no-member commentchar = "#" return ustr(commentchar).replace(u"\n", u"")
python
def git_commentchar(): """ Shortcut for retrieving comment char from git config """ commentchar = _git("config", "--get", "core.commentchar", _ok_code=[1]) # git will return an exit code of 1 if it can't find a config value, in this case we fall-back to # as commentchar if hasattr(commentchar, 'exit_code') and commentchar.exit_code == 1: # pylint: disable=no-member commentchar = "#" return ustr(commentchar).replace(u"\n", u"")
[ "def", "git_commentchar", "(", ")", ":", "commentchar", "=", "_git", "(", "\"config\"", ",", "\"--get\"", ",", "\"core.commentchar\"", ",", "_ok_code", "=", "[", "1", "]", ")", "# git will return an exit code of 1 if it can't find a config value, in this case we fall-back to # as commentchar", "if", "hasattr", "(", "commentchar", ",", "'exit_code'", ")", "and", "commentchar", ".", "exit_code", "==", "1", ":", "# pylint: disable=no-member", "commentchar", "=", "\"#\"", "return", "ustr", "(", "commentchar", ")", ".", "replace", "(", "u\"\\n\"", ",", "u\"\"", ")" ]
Shortcut for retrieving comment char from git config
[ "Shortcut", "for", "retrieving", "comment", "char", "from", "git", "config" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/git.py#L50-L56
11,645
jorisroovers/gitlint
gitlint/git.py
GitCommitMessage.from_full_message
def from_full_message(commit_msg_str): """ Parses a full git commit message by parsing a given string into the different parts of a commit message """ all_lines = commit_msg_str.splitlines() try: cutline_index = all_lines.index(GitCommitMessage.CUTLINE) except ValueError: cutline_index = None lines = [line for line in all_lines[:cutline_index] if not line.startswith(GitCommitMessage.COMMENT_CHAR)] full = "\n".join(lines) title = lines[0] if lines else "" body = lines[1:] if len(lines) > 1 else [] return GitCommitMessage(original=commit_msg_str, full=full, title=title, body=body)
python
def from_full_message(commit_msg_str): """ Parses a full git commit message by parsing a given string into the different parts of a commit message """ all_lines = commit_msg_str.splitlines() try: cutline_index = all_lines.index(GitCommitMessage.CUTLINE) except ValueError: cutline_index = None lines = [line for line in all_lines[:cutline_index] if not line.startswith(GitCommitMessage.COMMENT_CHAR)] full = "\n".join(lines) title = lines[0] if lines else "" body = lines[1:] if len(lines) > 1 else [] return GitCommitMessage(original=commit_msg_str, full=full, title=title, body=body)
[ "def", "from_full_message", "(", "commit_msg_str", ")", ":", "all_lines", "=", "commit_msg_str", ".", "splitlines", "(", ")", "try", ":", "cutline_index", "=", "all_lines", ".", "index", "(", "GitCommitMessage", ".", "CUTLINE", ")", "except", "ValueError", ":", "cutline_index", "=", "None", "lines", "=", "[", "line", "for", "line", "in", "all_lines", "[", ":", "cutline_index", "]", "if", "not", "line", ".", "startswith", "(", "GitCommitMessage", ".", "COMMENT_CHAR", ")", "]", "full", "=", "\"\\n\"", ".", "join", "(", "lines", ")", "title", "=", "lines", "[", "0", "]", "if", "lines", "else", "\"\"", "body", "=", "lines", "[", "1", ":", "]", "if", "len", "(", "lines", ")", ">", "1", "else", "[", "]", "return", "GitCommitMessage", "(", "original", "=", "commit_msg_str", ",", "full", "=", "full", ",", "title", "=", "title", ",", "body", "=", "body", ")" ]
Parses a full git commit message by parsing a given string into the different parts of a commit message
[ "Parses", "a", "full", "git", "commit", "message", "by", "parsing", "a", "given", "string", "into", "the", "different", "parts", "of", "a", "commit", "message" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/git.py#L76-L87
11,646
jorisroovers/gitlint
gitlint/lint.py
GitLinter.should_ignore_rule
def should_ignore_rule(self, rule): """ Determines whether a rule should be ignored based on the general list of commits to ignore """ return rule.id in self.config.ignore or rule.name in self.config.ignore
python
def should_ignore_rule(self, rule): """ Determines whether a rule should be ignored based on the general list of commits to ignore """ return rule.id in self.config.ignore or rule.name in self.config.ignore
[ "def", "should_ignore_rule", "(", "self", ",", "rule", ")", ":", "return", "rule", ".", "id", "in", "self", ".", "config", ".", "ignore", "or", "rule", ".", "name", "in", "self", ".", "config", ".", "ignore" ]
Determines whether a rule should be ignored based on the general list of commits to ignore
[ "Determines", "whether", "a", "rule", "should", "be", "ignored", "based", "on", "the", "general", "list", "of", "commits", "to", "ignore" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/lint.py#L19-L21
11,647
jorisroovers/gitlint
gitlint/lint.py
GitLinter._apply_line_rules
def _apply_line_rules(lines, commit, rules, line_nr_start): """ Iterates over the lines in a given list of lines and validates a given list of rules against each line """ all_violations = [] line_nr = line_nr_start for line in lines: for rule in rules: violations = rule.validate(line, commit) if violations: for violation in violations: violation.line_nr = line_nr all_violations.append(violation) line_nr += 1 return all_violations
python
def _apply_line_rules(lines, commit, rules, line_nr_start): """ Iterates over the lines in a given list of lines and validates a given list of rules against each line """ all_violations = [] line_nr = line_nr_start for line in lines: for rule in rules: violations = rule.validate(line, commit) if violations: for violation in violations: violation.line_nr = line_nr all_violations.append(violation) line_nr += 1 return all_violations
[ "def", "_apply_line_rules", "(", "lines", ",", "commit", ",", "rules", ",", "line_nr_start", ")", ":", "all_violations", "=", "[", "]", "line_nr", "=", "line_nr_start", "for", "line", "in", "lines", ":", "for", "rule", "in", "rules", ":", "violations", "=", "rule", ".", "validate", "(", "line", ",", "commit", ")", "if", "violations", ":", "for", "violation", "in", "violations", ":", "violation", ".", "line_nr", "=", "line_nr", "all_violations", ".", "append", "(", "violation", ")", "line_nr", "+=", "1", "return", "all_violations" ]
Iterates over the lines in a given list of lines and validates a given list of rules against each line
[ "Iterates", "over", "the", "lines", "in", "a", "given", "list", "of", "lines", "and", "validates", "a", "given", "list", "of", "rules", "against", "each", "line" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/lint.py#L46-L58
11,648
jorisroovers/gitlint
gitlint/lint.py
GitLinter._apply_commit_rules
def _apply_commit_rules(rules, commit): """ Applies a set of rules against a given commit and gitcontext """ all_violations = [] for rule in rules: violations = rule.validate(commit) if violations: all_violations.extend(violations) return all_violations
python
def _apply_commit_rules(rules, commit): """ Applies a set of rules against a given commit and gitcontext """ all_violations = [] for rule in rules: violations = rule.validate(commit) if violations: all_violations.extend(violations) return all_violations
[ "def", "_apply_commit_rules", "(", "rules", ",", "commit", ")", ":", "all_violations", "=", "[", "]", "for", "rule", "in", "rules", ":", "violations", "=", "rule", ".", "validate", "(", "commit", ")", "if", "violations", ":", "all_violations", ".", "extend", "(", "violations", ")", "return", "all_violations" ]
Applies a set of rules against a given commit and gitcontext
[ "Applies", "a", "set", "of", "rules", "against", "a", "given", "commit", "and", "gitcontext" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/lint.py#L61-L68
11,649
jorisroovers/gitlint
gitlint/lint.py
GitLinter.lint
def lint(self, commit): """ Lint the last commit in a given git context by applying all ignore, title, body and commit rules. """ LOG.debug("Linting commit %s", commit.sha or "[SHA UNKNOWN]") LOG.debug("Commit Object\n" + ustr(commit)) # Apply config rules for rule in self.configuration_rules: rule.apply(self.config, commit) # Skip linting if this is a special commit type that is configured to be ignored ignore_commit_types = ["merge", "squash", "fixup"] for commit_type in ignore_commit_types: if getattr(commit, "is_{0}_commit".format(commit_type)) and \ getattr(self.config, "ignore_{0}_commits".format(commit_type)): return [] violations = [] # determine violations by applying all rules violations.extend(self._apply_line_rules([commit.message.title], commit, self.title_line_rules, 1)) violations.extend(self._apply_line_rules(commit.message.body, commit, self.body_line_rules, 2)) violations.extend(self._apply_commit_rules(self.commit_rules, commit)) # Sort violations by line number and rule_id. If there's no line nr specified (=common certain commit rules), # we replace None with -1 so that it always get's placed first. Note that we need this to do this to support # python 3, as None is not allowed in a list that is being sorted. violations.sort(key=lambda v: (-1 if v.line_nr is None else v.line_nr, v.rule_id)) return violations
python
def lint(self, commit): """ Lint the last commit in a given git context by applying all ignore, title, body and commit rules. """ LOG.debug("Linting commit %s", commit.sha or "[SHA UNKNOWN]") LOG.debug("Commit Object\n" + ustr(commit)) # Apply config rules for rule in self.configuration_rules: rule.apply(self.config, commit) # Skip linting if this is a special commit type that is configured to be ignored ignore_commit_types = ["merge", "squash", "fixup"] for commit_type in ignore_commit_types: if getattr(commit, "is_{0}_commit".format(commit_type)) and \ getattr(self.config, "ignore_{0}_commits".format(commit_type)): return [] violations = [] # determine violations by applying all rules violations.extend(self._apply_line_rules([commit.message.title], commit, self.title_line_rules, 1)) violations.extend(self._apply_line_rules(commit.message.body, commit, self.body_line_rules, 2)) violations.extend(self._apply_commit_rules(self.commit_rules, commit)) # Sort violations by line number and rule_id. If there's no line nr specified (=common certain commit rules), # we replace None with -1 so that it always get's placed first. Note that we need this to do this to support # python 3, as None is not allowed in a list that is being sorted. violations.sort(key=lambda v: (-1 if v.line_nr is None else v.line_nr, v.rule_id)) return violations
[ "def", "lint", "(", "self", ",", "commit", ")", ":", "LOG", ".", "debug", "(", "\"Linting commit %s\"", ",", "commit", ".", "sha", "or", "\"[SHA UNKNOWN]\"", ")", "LOG", ".", "debug", "(", "\"Commit Object\\n\"", "+", "ustr", "(", "commit", ")", ")", "# Apply config rules", "for", "rule", "in", "self", ".", "configuration_rules", ":", "rule", ".", "apply", "(", "self", ".", "config", ",", "commit", ")", "# Skip linting if this is a special commit type that is configured to be ignored", "ignore_commit_types", "=", "[", "\"merge\"", ",", "\"squash\"", ",", "\"fixup\"", "]", "for", "commit_type", "in", "ignore_commit_types", ":", "if", "getattr", "(", "commit", ",", "\"is_{0}_commit\"", ".", "format", "(", "commit_type", ")", ")", "and", "getattr", "(", "self", ".", "config", ",", "\"ignore_{0}_commits\"", ".", "format", "(", "commit_type", ")", ")", ":", "return", "[", "]", "violations", "=", "[", "]", "# determine violations by applying all rules", "violations", ".", "extend", "(", "self", ".", "_apply_line_rules", "(", "[", "commit", ".", "message", ".", "title", "]", ",", "commit", ",", "self", ".", "title_line_rules", ",", "1", ")", ")", "violations", ".", "extend", "(", "self", ".", "_apply_line_rules", "(", "commit", ".", "message", ".", "body", ",", "commit", ",", "self", ".", "body_line_rules", ",", "2", ")", ")", "violations", ".", "extend", "(", "self", ".", "_apply_commit_rules", "(", "self", ".", "commit_rules", ",", "commit", ")", ")", "# Sort violations by line number and rule_id. If there's no line nr specified (=common certain commit rules),", "# we replace None with -1 so that it always get's placed first. Note that we need this to do this to support", "# python 3, as None is not allowed in a list that is being sorted.", "violations", ".", "sort", "(", "key", "=", "lambda", "v", ":", "(", "-", "1", "if", "v", ".", "line_nr", "is", "None", "else", "v", ".", "line_nr", ",", "v", ".", "rule_id", ")", ")", "return", "violations" ]
Lint the last commit in a given git context by applying all ignore, title, body and commit rules.
[ "Lint", "the", "last", "commit", "in", "a", "given", "git", "context", "by", "applying", "all", "ignore", "title", "body", "and", "commit", "rules", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/lint.py#L70-L96
11,650
jorisroovers/gitlint
gitlint/lint.py
GitLinter.print_violations
def print_violations(self, violations): """ Print a given set of violations to the standard error output """ for v in violations: line_nr = v.line_nr if v.line_nr else "-" self.display.e(u"{0}: {1}".format(line_nr, v.rule_id), exact=True) self.display.ee(u"{0}: {1} {2}".format(line_nr, v.rule_id, v.message), exact=True) if v.content: self.display.eee(u"{0}: {1} {2}: \"{3}\"".format(line_nr, v.rule_id, v.message, v.content), exact=True) else: self.display.eee(u"{0}: {1} {2}".format(line_nr, v.rule_id, v.message), exact=True)
python
def print_violations(self, violations): """ Print a given set of violations to the standard error output """ for v in violations: line_nr = v.line_nr if v.line_nr else "-" self.display.e(u"{0}: {1}".format(line_nr, v.rule_id), exact=True) self.display.ee(u"{0}: {1} {2}".format(line_nr, v.rule_id, v.message), exact=True) if v.content: self.display.eee(u"{0}: {1} {2}: \"{3}\"".format(line_nr, v.rule_id, v.message, v.content), exact=True) else: self.display.eee(u"{0}: {1} {2}".format(line_nr, v.rule_id, v.message), exact=True)
[ "def", "print_violations", "(", "self", ",", "violations", ")", ":", "for", "v", "in", "violations", ":", "line_nr", "=", "v", ".", "line_nr", "if", "v", ".", "line_nr", "else", "\"-\"", "self", ".", "display", ".", "e", "(", "u\"{0}: {1}\"", ".", "format", "(", "line_nr", ",", "v", ".", "rule_id", ")", ",", "exact", "=", "True", ")", "self", ".", "display", ".", "ee", "(", "u\"{0}: {1} {2}\"", ".", "format", "(", "line_nr", ",", "v", ".", "rule_id", ",", "v", ".", "message", ")", ",", "exact", "=", "True", ")", "if", "v", ".", "content", ":", "self", ".", "display", ".", "eee", "(", "u\"{0}: {1} {2}: \\\"{3}\\\"\"", ".", "format", "(", "line_nr", ",", "v", ".", "rule_id", ",", "v", ".", "message", ",", "v", ".", "content", ")", ",", "exact", "=", "True", ")", "else", ":", "self", ".", "display", ".", "eee", "(", "u\"{0}: {1} {2}\"", ".", "format", "(", "line_nr", ",", "v", ".", "rule_id", ",", "v", ".", "message", ")", ",", "exact", "=", "True", ")" ]
Print a given set of violations to the standard error output
[ "Print", "a", "given", "set", "of", "violations", "to", "the", "standard", "error", "output" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/lint.py#L98-L108
11,651
jorisroovers/gitlint
gitlint/display.py
Display._output
def _output(self, message, verbosity, exact, stream): """ Output a message if the config's verbosity is >= to the given verbosity. If exact == True, the message will only be outputted if the given verbosity exactly matches the config's verbosity. """ if exact: if self.config.verbosity == verbosity: stream.write(message + "\n") else: if self.config.verbosity >= verbosity: stream.write(message + "\n")
python
def _output(self, message, verbosity, exact, stream): """ Output a message if the config's verbosity is >= to the given verbosity. If exact == True, the message will only be outputted if the given verbosity exactly matches the config's verbosity. """ if exact: if self.config.verbosity == verbosity: stream.write(message + "\n") else: if self.config.verbosity >= verbosity: stream.write(message + "\n")
[ "def", "_output", "(", "self", ",", "message", ",", "verbosity", ",", "exact", ",", "stream", ")", ":", "if", "exact", ":", "if", "self", ".", "config", ".", "verbosity", "==", "verbosity", ":", "stream", ".", "write", "(", "message", "+", "\"\\n\"", ")", "else", ":", "if", "self", ".", "config", ".", "verbosity", ">=", "verbosity", ":", "stream", ".", "write", "(", "message", "+", "\"\\n\"", ")" ]
Output a message if the config's verbosity is >= to the given verbosity. If exact == True, the message will only be outputted if the given verbosity exactly matches the config's verbosity.
[ "Output", "a", "message", "if", "the", "config", "s", "verbosity", "is", ">", "=", "to", "the", "given", "verbosity", ".", "If", "exact", "==", "True", "the", "message", "will", "only", "be", "outputted", "if", "the", "given", "verbosity", "exactly", "matches", "the", "config", "s", "verbosity", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/display.py#L20-L28
11,652
jorisroovers/gitlint
gitlint/cli.py
setup_logging
def setup_logging(): """ Setup gitlint logging """ root_log = logging.getLogger("gitlint") root_log.propagate = False # Don't propagate to child loggers, the gitlint root logger handles everything handler = logging.StreamHandler() formatter = logging.Formatter(LOG_FORMAT) handler.setFormatter(formatter) root_log.addHandler(handler) root_log.setLevel(logging.ERROR)
python
def setup_logging(): """ Setup gitlint logging """ root_log = logging.getLogger("gitlint") root_log.propagate = False # Don't propagate to child loggers, the gitlint root logger handles everything handler = logging.StreamHandler() formatter = logging.Formatter(LOG_FORMAT) handler.setFormatter(formatter) root_log.addHandler(handler) root_log.setLevel(logging.ERROR)
[ "def", "setup_logging", "(", ")", ":", "root_log", "=", "logging", ".", "getLogger", "(", "\"gitlint\"", ")", "root_log", ".", "propagate", "=", "False", "# Don't propagate to child loggers, the gitlint root logger handles everything", "handler", "=", "logging", ".", "StreamHandler", "(", ")", "formatter", "=", "logging", ".", "Formatter", "(", "LOG_FORMAT", ")", "handler", ".", "setFormatter", "(", "formatter", ")", "root_log", ".", "addHandler", "(", "handler", ")", "root_log", ".", "setLevel", "(", "logging", ".", "ERROR", ")" ]
Setup gitlint logging
[ "Setup", "gitlint", "logging" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/cli.py#L39-L47
11,653
jorisroovers/gitlint
gitlint/cli.py
build_config
def build_config(ctx, target, config_path, c, extra_path, ignore, verbose, silent, debug): """ Creates a LintConfig object based on a set of commandline parameters. """ config_builder = LintConfigBuilder() try: # Config precedence: # First, load default config or config from configfile if config_path: config_builder.set_from_config_file(config_path) elif os.path.exists(DEFAULT_CONFIG_FILE): config_builder.set_from_config_file(DEFAULT_CONFIG_FILE) # Then process any commandline configuration flags config_builder.set_config_from_string_list(c) # Finally, overwrite with any convenience commandline flags if ignore: config_builder.set_option('general', 'ignore', ignore) if silent: config_builder.set_option('general', 'verbosity', 0) elif verbose > 0: config_builder.set_option('general', 'verbosity', verbose) if extra_path: config_builder.set_option('general', 'extra-path', extra_path) if target: config_builder.set_option('general', 'target', target) if debug: config_builder.set_option('general', 'debug', debug) config = config_builder.build() return config, config_builder except LintConfigError as e: click.echo(u"Config Error: {0}".format(ustr(e))) ctx.exit(CONFIG_ERROR_CODE)
python
def build_config(ctx, target, config_path, c, extra_path, ignore, verbose, silent, debug): """ Creates a LintConfig object based on a set of commandline parameters. """ config_builder = LintConfigBuilder() try: # Config precedence: # First, load default config or config from configfile if config_path: config_builder.set_from_config_file(config_path) elif os.path.exists(DEFAULT_CONFIG_FILE): config_builder.set_from_config_file(DEFAULT_CONFIG_FILE) # Then process any commandline configuration flags config_builder.set_config_from_string_list(c) # Finally, overwrite with any convenience commandline flags if ignore: config_builder.set_option('general', 'ignore', ignore) if silent: config_builder.set_option('general', 'verbosity', 0) elif verbose > 0: config_builder.set_option('general', 'verbosity', verbose) if extra_path: config_builder.set_option('general', 'extra-path', extra_path) if target: config_builder.set_option('general', 'target', target) if debug: config_builder.set_option('general', 'debug', debug) config = config_builder.build() return config, config_builder except LintConfigError as e: click.echo(u"Config Error: {0}".format(ustr(e))) ctx.exit(CONFIG_ERROR_CODE)
[ "def", "build_config", "(", "ctx", ",", "target", ",", "config_path", ",", "c", ",", "extra_path", ",", "ignore", ",", "verbose", ",", "silent", ",", "debug", ")", ":", "config_builder", "=", "LintConfigBuilder", "(", ")", "try", ":", "# Config precedence:", "# First, load default config or config from configfile", "if", "config_path", ":", "config_builder", ".", "set_from_config_file", "(", "config_path", ")", "elif", "os", ".", "path", ".", "exists", "(", "DEFAULT_CONFIG_FILE", ")", ":", "config_builder", ".", "set_from_config_file", "(", "DEFAULT_CONFIG_FILE", ")", "# Then process any commandline configuration flags", "config_builder", ".", "set_config_from_string_list", "(", "c", ")", "# Finally, overwrite with any convenience commandline flags", "if", "ignore", ":", "config_builder", ".", "set_option", "(", "'general'", ",", "'ignore'", ",", "ignore", ")", "if", "silent", ":", "config_builder", ".", "set_option", "(", "'general'", ",", "'verbosity'", ",", "0", ")", "elif", "verbose", ">", "0", ":", "config_builder", ".", "set_option", "(", "'general'", ",", "'verbosity'", ",", "verbose", ")", "if", "extra_path", ":", "config_builder", ".", "set_option", "(", "'general'", ",", "'extra-path'", ",", "extra_path", ")", "if", "target", ":", "config_builder", ".", "set_option", "(", "'general'", ",", "'target'", ",", "target", ")", "if", "debug", ":", "config_builder", ".", "set_option", "(", "'general'", ",", "'debug'", ",", "debug", ")", "config", "=", "config_builder", ".", "build", "(", ")", "return", "config", ",", "config_builder", "except", "LintConfigError", "as", "e", ":", "click", ".", "echo", "(", "u\"Config Error: {0}\"", ".", "format", "(", "ustr", "(", "e", ")", ")", ")", "ctx", ".", "exit", "(", "CONFIG_ERROR_CODE", ")" ]
Creates a LintConfig object based on a set of commandline parameters.
[ "Creates", "a", "LintConfig", "object", "based", "on", "a", "set", "of", "commandline", "parameters", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/cli.py#L57-L93
11,654
jorisroovers/gitlint
gitlint/cli.py
get_stdin_data
def get_stdin_data(): """ Helper function that returns data send to stdin or False if nothing is send """ # STDIN can only be 3 different types of things ("modes") # 1. An interactive terminal device (i.e. a TTY -> sys.stdin.isatty() or stat.S_ISCHR) # 2. A (named) pipe (stat.S_ISFIFO) # 3. A regular file (stat.S_ISREG) # Technically, STDIN can also be other device type like a named unix socket (stat.S_ISSOCK), but we don't # support that in gitlint (at least not today). # # Now, the behavior that we want is the following: # If someone sends something directly to gitlint via a pipe or a regular file, read it. If not, read from the # local repository. # Note that we don't care about whether STDIN is a TTY or not, we only care whether data is via a pipe or regular # file. # However, in case STDIN is not a TTY, it HAS to be one of the 2 other things (pipe or regular file), even if # no-one is actually sending anything to gitlint over them. In this case, we still want to read from the local # repository. # To support this use-case (which is common in CI runners such as Jenkins and Gitlab), we need to actually attempt # to read from STDIN in case it's a pipe or regular file. In case that fails, then we'll fall back to reading # from the local repo. mode = os.fstat(sys.stdin.fileno()).st_mode stdin_is_pipe_or_file = stat.S_ISFIFO(mode) or stat.S_ISREG(mode) if stdin_is_pipe_or_file: input_data = sys.stdin.read() # Only return the input data if there's actually something passed # i.e. don't consider empty piped data if input_data: return ustr(input_data) return False
python
def get_stdin_data(): """ Helper function that returns data send to stdin or False if nothing is send """ # STDIN can only be 3 different types of things ("modes") # 1. An interactive terminal device (i.e. a TTY -> sys.stdin.isatty() or stat.S_ISCHR) # 2. A (named) pipe (stat.S_ISFIFO) # 3. A regular file (stat.S_ISREG) # Technically, STDIN can also be other device type like a named unix socket (stat.S_ISSOCK), but we don't # support that in gitlint (at least not today). # # Now, the behavior that we want is the following: # If someone sends something directly to gitlint via a pipe or a regular file, read it. If not, read from the # local repository. # Note that we don't care about whether STDIN is a TTY or not, we only care whether data is via a pipe or regular # file. # However, in case STDIN is not a TTY, it HAS to be one of the 2 other things (pipe or regular file), even if # no-one is actually sending anything to gitlint over them. In this case, we still want to read from the local # repository. # To support this use-case (which is common in CI runners such as Jenkins and Gitlab), we need to actually attempt # to read from STDIN in case it's a pipe or regular file. In case that fails, then we'll fall back to reading # from the local repo. mode = os.fstat(sys.stdin.fileno()).st_mode stdin_is_pipe_or_file = stat.S_ISFIFO(mode) or stat.S_ISREG(mode) if stdin_is_pipe_or_file: input_data = sys.stdin.read() # Only return the input data if there's actually something passed # i.e. don't consider empty piped data if input_data: return ustr(input_data) return False
[ "def", "get_stdin_data", "(", ")", ":", "# STDIN can only be 3 different types of things (\"modes\")", "# 1. An interactive terminal device (i.e. a TTY -> sys.stdin.isatty() or stat.S_ISCHR)", "# 2. A (named) pipe (stat.S_ISFIFO)", "# 3. A regular file (stat.S_ISREG)", "# Technically, STDIN can also be other device type like a named unix socket (stat.S_ISSOCK), but we don't", "# support that in gitlint (at least not today).", "#", "# Now, the behavior that we want is the following:", "# If someone sends something directly to gitlint via a pipe or a regular file, read it. If not, read from the", "# local repository.", "# Note that we don't care about whether STDIN is a TTY or not, we only care whether data is via a pipe or regular", "# file.", "# However, in case STDIN is not a TTY, it HAS to be one of the 2 other things (pipe or regular file), even if", "# no-one is actually sending anything to gitlint over them. In this case, we still want to read from the local", "# repository.", "# To support this use-case (which is common in CI runners such as Jenkins and Gitlab), we need to actually attempt", "# to read from STDIN in case it's a pipe or regular file. In case that fails, then we'll fall back to reading", "# from the local repo.", "mode", "=", "os", ".", "fstat", "(", "sys", ".", "stdin", ".", "fileno", "(", ")", ")", ".", "st_mode", "stdin_is_pipe_or_file", "=", "stat", ".", "S_ISFIFO", "(", "mode", ")", "or", "stat", ".", "S_ISREG", "(", "mode", ")", "if", "stdin_is_pipe_or_file", ":", "input_data", "=", "sys", ".", "stdin", ".", "read", "(", ")", "# Only return the input data if there's actually something passed", "# i.e. don't consider empty piped data", "if", "input_data", ":", "return", "ustr", "(", "input_data", ")", "return", "False" ]
Helper function that returns data send to stdin or False if nothing is send
[ "Helper", "function", "that", "returns", "data", "send", "to", "stdin", "or", "False", "if", "nothing", "is", "send" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/cli.py#L96-L125
11,655
jorisroovers/gitlint
gitlint/cli.py
cli
def cli( # pylint: disable=too-many-arguments ctx, target, config, c, commits, extra_path, ignore, msg_filename, verbose, silent, debug, ): """ Git lint tool, checks your git commit messages for styling issues """ try: if debug: logging.getLogger("gitlint").setLevel(logging.DEBUG) log_system_info() # Get the lint config from the commandline parameters and # store it in the context (click allows storing an arbitrary object in ctx.obj). config, config_builder = build_config(ctx, target, config, c, extra_path, ignore, verbose, silent, debug) LOG.debug(u"Configuration\n%s", ustr(config)) ctx.obj = (config, config_builder, commits, msg_filename) # If no subcommand is specified, then just lint if ctx.invoked_subcommand is None: ctx.invoke(lint) except GitContextError as e: click.echo(ustr(e)) ctx.exit(GIT_CONTEXT_ERROR_CODE)
python
def cli( # pylint: disable=too-many-arguments ctx, target, config, c, commits, extra_path, ignore, msg_filename, verbose, silent, debug, ): """ Git lint tool, checks your git commit messages for styling issues """ try: if debug: logging.getLogger("gitlint").setLevel(logging.DEBUG) log_system_info() # Get the lint config from the commandline parameters and # store it in the context (click allows storing an arbitrary object in ctx.obj). config, config_builder = build_config(ctx, target, config, c, extra_path, ignore, verbose, silent, debug) LOG.debug(u"Configuration\n%s", ustr(config)) ctx.obj = (config, config_builder, commits, msg_filename) # If no subcommand is specified, then just lint if ctx.invoked_subcommand is None: ctx.invoke(lint) except GitContextError as e: click.echo(ustr(e)) ctx.exit(GIT_CONTEXT_ERROR_CODE)
[ "def", "cli", "(", "# pylint: disable=too-many-arguments", "ctx", ",", "target", ",", "config", ",", "c", ",", "commits", ",", "extra_path", ",", "ignore", ",", "msg_filename", ",", "verbose", ",", "silent", ",", "debug", ",", ")", ":", "try", ":", "if", "debug", ":", "logging", ".", "getLogger", "(", "\"gitlint\"", ")", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "log_system_info", "(", ")", "# Get the lint config from the commandline parameters and", "# store it in the context (click allows storing an arbitrary object in ctx.obj).", "config", ",", "config_builder", "=", "build_config", "(", "ctx", ",", "target", ",", "config", ",", "c", ",", "extra_path", ",", "ignore", ",", "verbose", ",", "silent", ",", "debug", ")", "LOG", ".", "debug", "(", "u\"Configuration\\n%s\"", ",", "ustr", "(", "config", ")", ")", "ctx", ".", "obj", "=", "(", "config", ",", "config_builder", ",", "commits", ",", "msg_filename", ")", "# If no subcommand is specified, then just lint", "if", "ctx", ".", "invoked_subcommand", "is", "None", ":", "ctx", ".", "invoke", "(", "lint", ")", "except", "GitContextError", "as", "e", ":", "click", ".", "echo", "(", "ustr", "(", "e", ")", ")", "ctx", ".", "exit", "(", "GIT_CONTEXT_ERROR_CODE", ")" ]
Git lint tool, checks your git commit messages for styling issues
[ "Git", "lint", "tool", "checks", "your", "git", "commit", "messages", "for", "styling", "issues" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/cli.py#L147-L173
11,656
jorisroovers/gitlint
gitlint/cli.py
install_hook
def install_hook(ctx): """ Install gitlint as a git commit-msg hook. """ try: lint_config = ctx.obj[0] hooks.GitHookInstaller.install_commit_msg_hook(lint_config) # declare victory :-) hook_path = hooks.GitHookInstaller.commit_msg_hook_path(lint_config) click.echo(u"Successfully installed gitlint commit-msg hook in {0}".format(hook_path)) ctx.exit(0) except hooks.GitHookInstallerError as e: click.echo(ustr(e), err=True) ctx.exit(GIT_CONTEXT_ERROR_CODE)
python
def install_hook(ctx): """ Install gitlint as a git commit-msg hook. """ try: lint_config = ctx.obj[0] hooks.GitHookInstaller.install_commit_msg_hook(lint_config) # declare victory :-) hook_path = hooks.GitHookInstaller.commit_msg_hook_path(lint_config) click.echo(u"Successfully installed gitlint commit-msg hook in {0}".format(hook_path)) ctx.exit(0) except hooks.GitHookInstallerError as e: click.echo(ustr(e), err=True) ctx.exit(GIT_CONTEXT_ERROR_CODE)
[ "def", "install_hook", "(", "ctx", ")", ":", "try", ":", "lint_config", "=", "ctx", ".", "obj", "[", "0", "]", "hooks", ".", "GitHookInstaller", ".", "install_commit_msg_hook", "(", "lint_config", ")", "# declare victory :-)", "hook_path", "=", "hooks", ".", "GitHookInstaller", ".", "commit_msg_hook_path", "(", "lint_config", ")", "click", ".", "echo", "(", "u\"Successfully installed gitlint commit-msg hook in {0}\"", ".", "format", "(", "hook_path", ")", ")", "ctx", ".", "exit", "(", "0", ")", "except", "hooks", ".", "GitHookInstallerError", "as", "e", ":", "click", ".", "echo", "(", "ustr", "(", "e", ")", ",", "err", "=", "True", ")", "ctx", ".", "exit", "(", "GIT_CONTEXT_ERROR_CODE", ")" ]
Install gitlint as a git commit-msg hook.
[ "Install", "gitlint", "as", "a", "git", "commit", "-", "msg", "hook", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/cli.py#L246-L257
11,657
jorisroovers/gitlint
gitlint/cli.py
uninstall_hook
def uninstall_hook(ctx): """ Uninstall gitlint commit-msg hook. """ try: lint_config = ctx.obj[0] hooks.GitHookInstaller.uninstall_commit_msg_hook(lint_config) # declare victory :-) hook_path = hooks.GitHookInstaller.commit_msg_hook_path(lint_config) click.echo(u"Successfully uninstalled gitlint commit-msg hook from {0}".format(hook_path)) ctx.exit(0) except hooks.GitHookInstallerError as e: click.echo(ustr(e), err=True) ctx.exit(GIT_CONTEXT_ERROR_CODE)
python
def uninstall_hook(ctx): """ Uninstall gitlint commit-msg hook. """ try: lint_config = ctx.obj[0] hooks.GitHookInstaller.uninstall_commit_msg_hook(lint_config) # declare victory :-) hook_path = hooks.GitHookInstaller.commit_msg_hook_path(lint_config) click.echo(u"Successfully uninstalled gitlint commit-msg hook from {0}".format(hook_path)) ctx.exit(0) except hooks.GitHookInstallerError as e: click.echo(ustr(e), err=True) ctx.exit(GIT_CONTEXT_ERROR_CODE)
[ "def", "uninstall_hook", "(", "ctx", ")", ":", "try", ":", "lint_config", "=", "ctx", ".", "obj", "[", "0", "]", "hooks", ".", "GitHookInstaller", ".", "uninstall_commit_msg_hook", "(", "lint_config", ")", "# declare victory :-)", "hook_path", "=", "hooks", ".", "GitHookInstaller", ".", "commit_msg_hook_path", "(", "lint_config", ")", "click", ".", "echo", "(", "u\"Successfully uninstalled gitlint commit-msg hook from {0}\"", ".", "format", "(", "hook_path", ")", ")", "ctx", ".", "exit", "(", "0", ")", "except", "hooks", ".", "GitHookInstallerError", "as", "e", ":", "click", ".", "echo", "(", "ustr", "(", "e", ")", ",", "err", "=", "True", ")", "ctx", ".", "exit", "(", "GIT_CONTEXT_ERROR_CODE", ")" ]
Uninstall gitlint commit-msg hook.
[ "Uninstall", "gitlint", "commit", "-", "msg", "hook", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/cli.py#L262-L273
11,658
jorisroovers/gitlint
gitlint/cli.py
generate_config
def generate_config(ctx): """ Generates a sample gitlint config file. """ path = click.prompt('Please specify a location for the sample gitlint config file', default=DEFAULT_CONFIG_FILE) path = os.path.abspath(path) dir_name = os.path.dirname(path) if not os.path.exists(dir_name): click.echo(u"Error: Directory '{0}' does not exist.".format(dir_name), err=True) ctx.exit(USAGE_ERROR_CODE) elif os.path.exists(path): click.echo(u"Error: File \"{0}\" already exists.".format(path), err=True) ctx.exit(USAGE_ERROR_CODE) LintConfigGenerator.generate_config(path) click.echo(u"Successfully generated {0}".format(path)) ctx.exit(0)
python
def generate_config(ctx): """ Generates a sample gitlint config file. """ path = click.prompt('Please specify a location for the sample gitlint config file', default=DEFAULT_CONFIG_FILE) path = os.path.abspath(path) dir_name = os.path.dirname(path) if not os.path.exists(dir_name): click.echo(u"Error: Directory '{0}' does not exist.".format(dir_name), err=True) ctx.exit(USAGE_ERROR_CODE) elif os.path.exists(path): click.echo(u"Error: File \"{0}\" already exists.".format(path), err=True) ctx.exit(USAGE_ERROR_CODE) LintConfigGenerator.generate_config(path) click.echo(u"Successfully generated {0}".format(path)) ctx.exit(0)
[ "def", "generate_config", "(", "ctx", ")", ":", "path", "=", "click", ".", "prompt", "(", "'Please specify a location for the sample gitlint config file'", ",", "default", "=", "DEFAULT_CONFIG_FILE", ")", "path", "=", "os", ".", "path", ".", "abspath", "(", "path", ")", "dir_name", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "dir_name", ")", ":", "click", ".", "echo", "(", "u\"Error: Directory '{0}' does not exist.\"", ".", "format", "(", "dir_name", ")", ",", "err", "=", "True", ")", "ctx", ".", "exit", "(", "USAGE_ERROR_CODE", ")", "elif", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "click", ".", "echo", "(", "u\"Error: File \\\"{0}\\\" already exists.\"", ".", "format", "(", "path", ")", ",", "err", "=", "True", ")", "ctx", ".", "exit", "(", "USAGE_ERROR_CODE", ")", "LintConfigGenerator", ".", "generate_config", "(", "path", ")", "click", ".", "echo", "(", "u\"Successfully generated {0}\"", ".", "format", "(", "path", ")", ")", "ctx", ".", "exit", "(", "0", ")" ]
Generates a sample gitlint config file.
[ "Generates", "a", "sample", "gitlint", "config", "file", "." ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/cli.py#L278-L292
11,659
jorisroovers/gitlint
gitlint/hooks.py
GitHookInstaller._assert_git_repo
def _assert_git_repo(target): """ Asserts that a given target directory is a git repository """ hooks_dir = os.path.abspath(os.path.join(target, HOOKS_DIR_PATH)) if not os.path.isdir(hooks_dir): raise GitHookInstallerError(u"{0} is not a git repository.".format(target))
python
def _assert_git_repo(target): """ Asserts that a given target directory is a git repository """ hooks_dir = os.path.abspath(os.path.join(target, HOOKS_DIR_PATH)) if not os.path.isdir(hooks_dir): raise GitHookInstallerError(u"{0} is not a git repository.".format(target))
[ "def", "_assert_git_repo", "(", "target", ")", ":", "hooks_dir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "target", ",", "HOOKS_DIR_PATH", ")", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "hooks_dir", ")", ":", "raise", "GitHookInstallerError", "(", "u\"{0} is not a git repository.\"", ".", "format", "(", "target", ")", ")" ]
Asserts that a given target directory is a git repository
[ "Asserts", "that", "a", "given", "target", "directory", "is", "a", "git", "repository" ]
6248bd6cbc20c1be3bb6d196a5ec0425af99733b
https://github.com/jorisroovers/gitlint/blob/6248bd6cbc20c1be3bb6d196a5ec0425af99733b/gitlint/hooks.py#L23-L27
11,660
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
get_job_url
def get_job_url(config, hub, group, project): """ Util method to get job url """ if ((config is not None) and ('hub' in config) and (hub is None)): hub = config["hub"] if ((config is not None) and ('group' in config) and (group is None)): group = config["group"] if ((config is not None) and ('project' in config) and (project is None)): project = config["project"] if ((hub is not None) and (group is not None) and (project is not None)): return '/Network/{}/Groups/{}/Projects/{}/jobs'.format(hub, group, project) return '/Jobs'
python
def get_job_url(config, hub, group, project): """ Util method to get job url """ if ((config is not None) and ('hub' in config) and (hub is None)): hub = config["hub"] if ((config is not None) and ('group' in config) and (group is None)): group = config["group"] if ((config is not None) and ('project' in config) and (project is None)): project = config["project"] if ((hub is not None) and (group is not None) and (project is not None)): return '/Network/{}/Groups/{}/Projects/{}/jobs'.format(hub, group, project) return '/Jobs'
[ "def", "get_job_url", "(", "config", ",", "hub", ",", "group", ",", "project", ")", ":", "if", "(", "(", "config", "is", "not", "None", ")", "and", "(", "'hub'", "in", "config", ")", "and", "(", "hub", "is", "None", ")", ")", ":", "hub", "=", "config", "[", "\"hub\"", "]", "if", "(", "(", "config", "is", "not", "None", ")", "and", "(", "'group'", "in", "config", ")", "and", "(", "group", "is", "None", ")", ")", ":", "group", "=", "config", "[", "\"group\"", "]", "if", "(", "(", "config", "is", "not", "None", ")", "and", "(", "'project'", "in", "config", ")", "and", "(", "project", "is", "None", ")", ")", ":", "project", "=", "config", "[", "\"project\"", "]", "if", "(", "(", "hub", "is", "not", "None", ")", "and", "(", "group", "is", "not", "None", ")", "and", "(", "project", "is", "not", "None", ")", ")", ":", "return", "'/Network/{}/Groups/{}/Projects/{}/jobs'", ".", "format", "(", "hub", ",", "group", ",", "project", ")", "return", "'/Jobs'" ]
Util method to get job url
[ "Util", "method", "to", "get", "job", "url" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L22-L34
11,661
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
get_backend_stats_url
def get_backend_stats_url(config, hub, backend_type): """ Util method to get backend stats url """ if ((config is not None) and ('hub' in config) and (hub is None)): hub = config["hub"] if (hub is not None): return '/Network/{}/devices/{}'.format(hub, backend_type) return '/Backends/{}'.format(backend_type)
python
def get_backend_stats_url(config, hub, backend_type): """ Util method to get backend stats url """ if ((config is not None) and ('hub' in config) and (hub is None)): hub = config["hub"] if (hub is not None): return '/Network/{}/devices/{}'.format(hub, backend_type) return '/Backends/{}'.format(backend_type)
[ "def", "get_backend_stats_url", "(", "config", ",", "hub", ",", "backend_type", ")", ":", "if", "(", "(", "config", "is", "not", "None", ")", "and", "(", "'hub'", "in", "config", ")", "and", "(", "hub", "is", "None", ")", ")", ":", "hub", "=", "config", "[", "\"hub\"", "]", "if", "(", "hub", "is", "not", "None", ")", ":", "return", "'/Network/{}/devices/{}'", ".", "format", "(", "hub", ",", "backend_type", ")", "return", "'/Backends/{}'", ".", "format", "(", "backend_type", ")" ]
Util method to get backend stats url
[ "Util", "method", "to", "get", "backend", "stats", "url" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L36-L44
11,662
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
get_backend_url
def get_backend_url(config, hub, group, project): """ Util method to get backend url """ if ((config is not None) and ('hub' in config) and (hub is None)): hub = config["hub"] if ((config is not None) and ('group' in config) and (group is None)): group = config["group"] if ((config is not None) and ('project' in config) and (project is None)): project = config["project"] if ((hub is not None) and (group is not None) and (project is not None)): return '/Network/{}/Groups/{}/Projects/{}/devices'.format(hub, group, project) return '/Backends'
python
def get_backend_url(config, hub, group, project): """ Util method to get backend url """ if ((config is not None) and ('hub' in config) and (hub is None)): hub = config["hub"] if ((config is not None) and ('group' in config) and (group is None)): group = config["group"] if ((config is not None) and ('project' in config) and (project is None)): project = config["project"] if ((hub is not None) and (group is not None) and (project is not None)): return '/Network/{}/Groups/{}/Projects/{}/devices'.format(hub, group, project) return '/Backends'
[ "def", "get_backend_url", "(", "config", ",", "hub", ",", "group", ",", "project", ")", ":", "if", "(", "(", "config", "is", "not", "None", ")", "and", "(", "'hub'", "in", "config", ")", "and", "(", "hub", "is", "None", ")", ")", ":", "hub", "=", "config", "[", "\"hub\"", "]", "if", "(", "(", "config", "is", "not", "None", ")", "and", "(", "'group'", "in", "config", ")", "and", "(", "group", "is", "None", ")", ")", ":", "group", "=", "config", "[", "\"group\"", "]", "if", "(", "(", "config", "is", "not", "None", ")", "and", "(", "'project'", "in", "config", ")", "and", "(", "project", "is", "None", ")", ")", ":", "project", "=", "config", "[", "\"project\"", "]", "if", "(", "(", "hub", "is", "not", "None", ")", "and", "(", "group", "is", "not", "None", ")", "and", "(", "project", "is", "not", "None", ")", ")", ":", "return", "'/Network/{}/Groups/{}/Projects/{}/devices'", ".", "format", "(", "hub", ",", "group", ",", "project", ")", "return", "'/Backends'" ]
Util method to get backend url
[ "Util", "method", "to", "get", "backend", "url" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L47-L59
11,663
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
_Credentials.obtain_token
def obtain_token(self, config=None): """Obtain the token to access to QX Platform. Raises: CredentialsError: when token is invalid or the user has not accepted the license. ApiError: when the response from the server couldn't be parsed. """ client_application = CLIENT_APPLICATION if self.config and ("client_application" in self.config): client_application += ':' + self.config["client_application"] headers = {'x-qx-client-application': client_application} if self.token_unique: try: response = requests.post(str(self.config.get('url') + "/users/loginWithToken"), data={'apiToken': self.token_unique}, verify=self.verify, headers=headers, **self.extra_args) except requests.RequestException as e: raise ApiError('error during login: %s' % str(e)) elif config and ("email" in config) and ("password" in config): email = config.get('email', None) password = config.get('password', None) credentials = { 'email': email, 'password': password } try: response = requests.post(str(self.config.get('url') + "/users/login"), data=credentials, verify=self.verify, headers=headers, **self.extra_args) except requests.RequestException as e: raise ApiError('error during login: %s' % str(e)) else: raise CredentialsError('invalid token') if response.status_code == 401: error_message = None try: # For 401: ACCEPT_LICENSE_REQUIRED, a detailed message is # present in the response and passed to the exception. error_message = response.json()['error']['message'] except: pass if error_message: raise CredentialsError('error during login: %s' % error_message) else: raise CredentialsError('invalid token') try: response.raise_for_status() self.data_credentials = response.json() except (requests.HTTPError, ValueError) as e: raise ApiError('error during login: %s' % str(e)) if self.get_token() is None: raise CredentialsError('invalid token')
python
def obtain_token(self, config=None): """Obtain the token to access to QX Platform. Raises: CredentialsError: when token is invalid or the user has not accepted the license. ApiError: when the response from the server couldn't be parsed. """ client_application = CLIENT_APPLICATION if self.config and ("client_application" in self.config): client_application += ':' + self.config["client_application"] headers = {'x-qx-client-application': client_application} if self.token_unique: try: response = requests.post(str(self.config.get('url') + "/users/loginWithToken"), data={'apiToken': self.token_unique}, verify=self.verify, headers=headers, **self.extra_args) except requests.RequestException as e: raise ApiError('error during login: %s' % str(e)) elif config and ("email" in config) and ("password" in config): email = config.get('email', None) password = config.get('password', None) credentials = { 'email': email, 'password': password } try: response = requests.post(str(self.config.get('url') + "/users/login"), data=credentials, verify=self.verify, headers=headers, **self.extra_args) except requests.RequestException as e: raise ApiError('error during login: %s' % str(e)) else: raise CredentialsError('invalid token') if response.status_code == 401: error_message = None try: # For 401: ACCEPT_LICENSE_REQUIRED, a detailed message is # present in the response and passed to the exception. error_message = response.json()['error']['message'] except: pass if error_message: raise CredentialsError('error during login: %s' % error_message) else: raise CredentialsError('invalid token') try: response.raise_for_status() self.data_credentials = response.json() except (requests.HTTPError, ValueError) as e: raise ApiError('error during login: %s' % str(e)) if self.get_token() is None: raise CredentialsError('invalid token')
[ "def", "obtain_token", "(", "self", ",", "config", "=", "None", ")", ":", "client_application", "=", "CLIENT_APPLICATION", "if", "self", ".", "config", "and", "(", "\"client_application\"", "in", "self", ".", "config", ")", ":", "client_application", "+=", "':'", "+", "self", ".", "config", "[", "\"client_application\"", "]", "headers", "=", "{", "'x-qx-client-application'", ":", "client_application", "}", "if", "self", ".", "token_unique", ":", "try", ":", "response", "=", "requests", ".", "post", "(", "str", "(", "self", ".", "config", ".", "get", "(", "'url'", ")", "+", "\"/users/loginWithToken\"", ")", ",", "data", "=", "{", "'apiToken'", ":", "self", ".", "token_unique", "}", ",", "verify", "=", "self", ".", "verify", ",", "headers", "=", "headers", ",", "*", "*", "self", ".", "extra_args", ")", "except", "requests", ".", "RequestException", "as", "e", ":", "raise", "ApiError", "(", "'error during login: %s'", "%", "str", "(", "e", ")", ")", "elif", "config", "and", "(", "\"email\"", "in", "config", ")", "and", "(", "\"password\"", "in", "config", ")", ":", "email", "=", "config", ".", "get", "(", "'email'", ",", "None", ")", "password", "=", "config", ".", "get", "(", "'password'", ",", "None", ")", "credentials", "=", "{", "'email'", ":", "email", ",", "'password'", ":", "password", "}", "try", ":", "response", "=", "requests", ".", "post", "(", "str", "(", "self", ".", "config", ".", "get", "(", "'url'", ")", "+", "\"/users/login\"", ")", ",", "data", "=", "credentials", ",", "verify", "=", "self", ".", "verify", ",", "headers", "=", "headers", ",", "*", "*", "self", ".", "extra_args", ")", "except", "requests", ".", "RequestException", "as", "e", ":", "raise", "ApiError", "(", "'error during login: %s'", "%", "str", "(", "e", ")", ")", "else", ":", "raise", "CredentialsError", "(", "'invalid token'", ")", "if", "response", ".", "status_code", "==", "401", ":", "error_message", "=", "None", "try", ":", "# For 401: ACCEPT_LICENSE_REQUIRED, a detailed message is", "# present in the response and passed to the exception.", "error_message", "=", "response", ".", "json", "(", ")", "[", "'error'", "]", "[", "'message'", "]", "except", ":", "pass", "if", "error_message", ":", "raise", "CredentialsError", "(", "'error during login: %s'", "%", "error_message", ")", "else", ":", "raise", "CredentialsError", "(", "'invalid token'", ")", "try", ":", "response", ".", "raise_for_status", "(", ")", "self", ".", "data_credentials", "=", "response", ".", "json", "(", ")", "except", "(", "requests", ".", "HTTPError", ",", "ValueError", ")", "as", "e", ":", "raise", "ApiError", "(", "'error during login: %s'", "%", "str", "(", "e", ")", ")", "if", "self", ".", "get_token", "(", ")", "is", "None", ":", "raise", "CredentialsError", "(", "'invalid token'", ")" ]
Obtain the token to access to QX Platform. Raises: CredentialsError: when token is invalid or the user has not accepted the license. ApiError: when the response from the server couldn't be parsed.
[ "Obtain", "the", "token", "to", "access", "to", "QX", "Platform", "." ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L107-L169
11,664
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
_Request.check_token
def check_token(self, respond): """ Check is the user's token is valid """ if respond.status_code == 401: self.credential.obtain_token(config=self.config) return False return True
python
def check_token(self, respond): """ Check is the user's token is valid """ if respond.status_code == 401: self.credential.obtain_token(config=self.config) return False return True
[ "def", "check_token", "(", "self", ",", "respond", ")", ":", "if", "respond", ".", "status_code", "==", "401", ":", "self", ".", "credential", ".", "obtain_token", "(", "config", "=", "self", ".", "config", ")", "return", "False", "return", "True" ]
Check is the user's token is valid
[ "Check", "is", "the", "user", "s", "token", "is", "valid" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L264-L271
11,665
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
_Request.post
def post(self, path, params='', data=None): """ POST Method Wrapper of the REST API """ self.result = None data = data or {} headers = {'Content-Type': 'application/json', 'x-qx-client-application': self.client_application} url = str(self.credential.config['url'] + path + '?access_token=' + self.credential.get_token() + params) retries = self.retries while retries > 0: respond = requests.post(url, data=data, headers=headers, verify=self.verify, **self.extra_args) if not self.check_token(respond): respond = requests.post(url, data=data, headers=headers, verify=self.verify, **self.extra_args) if self._response_good(respond): if self.result: return self.result elif retries < 2: return respond.json() else: retries -= 1 else: retries -= 1 time.sleep(self.timeout_interval) # timed out raise ApiError(usr_msg='Failed to get proper ' + 'response from backend.')
python
def post(self, path, params='', data=None): """ POST Method Wrapper of the REST API """ self.result = None data = data or {} headers = {'Content-Type': 'application/json', 'x-qx-client-application': self.client_application} url = str(self.credential.config['url'] + path + '?access_token=' + self.credential.get_token() + params) retries = self.retries while retries > 0: respond = requests.post(url, data=data, headers=headers, verify=self.verify, **self.extra_args) if not self.check_token(respond): respond = requests.post(url, data=data, headers=headers, verify=self.verify, **self.extra_args) if self._response_good(respond): if self.result: return self.result elif retries < 2: return respond.json() else: retries -= 1 else: retries -= 1 time.sleep(self.timeout_interval) # timed out raise ApiError(usr_msg='Failed to get proper ' + 'response from backend.')
[ "def", "post", "(", "self", ",", "path", ",", "params", "=", "''", ",", "data", "=", "None", ")", ":", "self", ".", "result", "=", "None", "data", "=", "data", "or", "{", "}", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", ",", "'x-qx-client-application'", ":", "self", ".", "client_application", "}", "url", "=", "str", "(", "self", ".", "credential", ".", "config", "[", "'url'", "]", "+", "path", "+", "'?access_token='", "+", "self", ".", "credential", ".", "get_token", "(", ")", "+", "params", ")", "retries", "=", "self", ".", "retries", "while", "retries", ">", "0", ":", "respond", "=", "requests", ".", "post", "(", "url", ",", "data", "=", "data", ",", "headers", "=", "headers", ",", "verify", "=", "self", ".", "verify", ",", "*", "*", "self", ".", "extra_args", ")", "if", "not", "self", ".", "check_token", "(", "respond", ")", ":", "respond", "=", "requests", ".", "post", "(", "url", ",", "data", "=", "data", ",", "headers", "=", "headers", ",", "verify", "=", "self", ".", "verify", ",", "*", "*", "self", ".", "extra_args", ")", "if", "self", ".", "_response_good", "(", "respond", ")", ":", "if", "self", ".", "result", ":", "return", "self", ".", "result", "elif", "retries", "<", "2", ":", "return", "respond", ".", "json", "(", ")", "else", ":", "retries", "-=", "1", "else", ":", "retries", "-=", "1", "time", ".", "sleep", "(", "self", ".", "timeout_interval", ")", "# timed out", "raise", "ApiError", "(", "usr_msg", "=", "'Failed to get proper '", "+", "'response from backend.'", ")" ]
POST Method Wrapper of the REST API
[ "POST", "Method", "Wrapper", "of", "the", "REST", "API" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L273-L305
11,666
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
_Request._parse_response
def _parse_response(self, respond): """parse text of response for HTTP errors This parses the text of the response to decide whether to retry request or raise exception. At the moment this only detects an exception condition. Args: respond (Response): requests.Response object Returns: bool: False if the request should be retried, True if not. Raises: RegisterSizeError """ # convert error messages into exceptions mobj = self._max_qubit_error_re.match(respond.text) if mobj: raise RegisterSizeError( 'device register size must be <= {}'.format(mobj.group(1))) return True
python
def _parse_response(self, respond): """parse text of response for HTTP errors This parses the text of the response to decide whether to retry request or raise exception. At the moment this only detects an exception condition. Args: respond (Response): requests.Response object Returns: bool: False if the request should be retried, True if not. Raises: RegisterSizeError """ # convert error messages into exceptions mobj = self._max_qubit_error_re.match(respond.text) if mobj: raise RegisterSizeError( 'device register size must be <= {}'.format(mobj.group(1))) return True
[ "def", "_parse_response", "(", "self", ",", "respond", ")", ":", "# convert error messages into exceptions", "mobj", "=", "self", ".", "_max_qubit_error_re", ".", "match", "(", "respond", ".", "text", ")", "if", "mobj", ":", "raise", "RegisterSizeError", "(", "'device register size must be <= {}'", ".", "format", "(", "mobj", ".", "group", "(", "1", ")", ")", ")", "return", "True" ]
parse text of response for HTTP errors This parses the text of the response to decide whether to retry request or raise exception. At the moment this only detects an exception condition. Args: respond (Response): requests.Response object Returns: bool: False if the request should be retried, True if not. Raises: RegisterSizeError
[ "parse", "text", "of", "response", "for", "HTTP", "errors" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L421-L443
11,667
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience._check_backend
def _check_backend(self, backend, endpoint): """ Check if the name of a backend is valid to run in QX Platform """ # First check against hacks for old backend names original_backend = backend backend = backend.lower() if endpoint == 'experiment': if backend in self.__names_backend_ibmqxv2: return 'real' elif backend in self.__names_backend_ibmqxv3: return 'ibmqx3' elif backend in self.__names_backend_simulator: return 'sim_trivial_2' # Check for new-style backends backends = self.available_backends() for backend in backends: if backend['name'] == original_backend: return original_backend # backend unrecognized return None
python
def _check_backend(self, backend, endpoint): """ Check if the name of a backend is valid to run in QX Platform """ # First check against hacks for old backend names original_backend = backend backend = backend.lower() if endpoint == 'experiment': if backend in self.__names_backend_ibmqxv2: return 'real' elif backend in self.__names_backend_ibmqxv3: return 'ibmqx3' elif backend in self.__names_backend_simulator: return 'sim_trivial_2' # Check for new-style backends backends = self.available_backends() for backend in backends: if backend['name'] == original_backend: return original_backend # backend unrecognized return None
[ "def", "_check_backend", "(", "self", ",", "backend", ",", "endpoint", ")", ":", "# First check against hacks for old backend names", "original_backend", "=", "backend", "backend", "=", "backend", ".", "lower", "(", ")", "if", "endpoint", "==", "'experiment'", ":", "if", "backend", "in", "self", ".", "__names_backend_ibmqxv2", ":", "return", "'real'", "elif", "backend", "in", "self", ".", "__names_backend_ibmqxv3", ":", "return", "'ibmqx3'", "elif", "backend", "in", "self", ".", "__names_backend_simulator", ":", "return", "'sim_trivial_2'", "# Check for new-style backends", "backends", "=", "self", ".", "available_backends", "(", ")", "for", "backend", "in", "backends", ":", "if", "backend", "[", "'name'", "]", "==", "original_backend", ":", "return", "original_backend", "# backend unrecognized", "return", "None" ]
Check if the name of a backend is valid to run in QX Platform
[ "Check", "if", "the", "name", "of", "a", "backend", "is", "valid", "to", "run", "in", "QX", "Platform" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L480-L501
11,668
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.get_execution
def get_execution(self, id_execution, access_token=None, user_id=None): """ Get a execution, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') execution = self.req.get('/Executions/' + id_execution) if "codeId" in execution: execution['code'] = self.get_code(execution["codeId"]) return execution
python
def get_execution(self, id_execution, access_token=None, user_id=None): """ Get a execution, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') execution = self.req.get('/Executions/' + id_execution) if "codeId" in execution: execution['code'] = self.get_code(execution["codeId"]) return execution
[ "def", "get_execution", "(", "self", ",", "id_execution", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "raise", "CredentialsError", "(", "'credentials invalid'", ")", "execution", "=", "self", ".", "req", ".", "get", "(", "'/Executions/'", "+", "id_execution", ")", "if", "\"codeId\"", "in", "execution", ":", "execution", "[", "'code'", "]", "=", "self", ".", "get_code", "(", "execution", "[", "\"codeId\"", "]", ")", "return", "execution" ]
Get a execution, by its id
[ "Get", "a", "execution", "by", "its", "id" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L509-L522
11,669
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.get_result_from_execution
def get_result_from_execution(self, id_execution, access_token=None, user_id=None): """ Get the result of a execution, by the execution id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') execution = self.req.get('/Executions/' + id_execution) result = {} if "result" in execution and "data" in execution["result"]: if execution["result"]["data"].get('p', None): result["measure"] = execution["result"]["data"]["p"] if execution["result"]["data"].get('valsxyz', None): result["bloch"] = execution["result"]["data"]["valsxyz"] if "additionalData" in execution["result"]["data"]: ad_aux = execution["result"]["data"]["additionalData"] result["extraInfo"] = ad_aux if "calibration" in execution: result["calibration"] = execution["calibration"] if execution["result"]["data"].get('cregLabels', None): result["creg_labels"] = execution["result"]["data"]["cregLabels"] if execution["result"]["data"].get('time', None): result["time_taken"] = execution["result"]["data"]["time"] return result
python
def get_result_from_execution(self, id_execution, access_token=None, user_id=None): """ Get the result of a execution, by the execution id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') execution = self.req.get('/Executions/' + id_execution) result = {} if "result" in execution and "data" in execution["result"]: if execution["result"]["data"].get('p', None): result["measure"] = execution["result"]["data"]["p"] if execution["result"]["data"].get('valsxyz', None): result["bloch"] = execution["result"]["data"]["valsxyz"] if "additionalData" in execution["result"]["data"]: ad_aux = execution["result"]["data"]["additionalData"] result["extraInfo"] = ad_aux if "calibration" in execution: result["calibration"] = execution["calibration"] if execution["result"]["data"].get('cregLabels', None): result["creg_labels"] = execution["result"]["data"]["cregLabels"] if execution["result"]["data"].get('time', None): result["time_taken"] = execution["result"]["data"]["time"] return result
[ "def", "get_result_from_execution", "(", "self", ",", "id_execution", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "raise", "CredentialsError", "(", "'credentials invalid'", ")", "execution", "=", "self", ".", "req", ".", "get", "(", "'/Executions/'", "+", "id_execution", ")", "result", "=", "{", "}", "if", "\"result\"", "in", "execution", "and", "\"data\"", "in", "execution", "[", "\"result\"", "]", ":", "if", "execution", "[", "\"result\"", "]", "[", "\"data\"", "]", ".", "get", "(", "'p'", ",", "None", ")", ":", "result", "[", "\"measure\"", "]", "=", "execution", "[", "\"result\"", "]", "[", "\"data\"", "]", "[", "\"p\"", "]", "if", "execution", "[", "\"result\"", "]", "[", "\"data\"", "]", ".", "get", "(", "'valsxyz'", ",", "None", ")", ":", "result", "[", "\"bloch\"", "]", "=", "execution", "[", "\"result\"", "]", "[", "\"data\"", "]", "[", "\"valsxyz\"", "]", "if", "\"additionalData\"", "in", "execution", "[", "\"result\"", "]", "[", "\"data\"", "]", ":", "ad_aux", "=", "execution", "[", "\"result\"", "]", "[", "\"data\"", "]", "[", "\"additionalData\"", "]", "result", "[", "\"extraInfo\"", "]", "=", "ad_aux", "if", "\"calibration\"", "in", "execution", ":", "result", "[", "\"calibration\"", "]", "=", "execution", "[", "\"calibration\"", "]", "if", "execution", "[", "\"result\"", "]", "[", "\"data\"", "]", ".", "get", "(", "'cregLabels'", ",", "None", ")", ":", "result", "[", "\"creg_labels\"", "]", "=", "execution", "[", "\"result\"", "]", "[", "\"data\"", "]", "[", "\"cregLabels\"", "]", "if", "execution", "[", "\"result\"", "]", "[", "\"data\"", "]", ".", "get", "(", "'time'", ",", "None", ")", ":", "result", "[", "\"time_taken\"", "]", "=", "execution", "[", "\"result\"", "]", "[", "\"data\"", "]", "[", "\"time\"", "]", "return", "result" ]
Get the result of a execution, by the execution id
[ "Get", "the", "result", "of", "a", "execution", "by", "the", "execution", "id" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L524-L551
11,670
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.get_code
def get_code(self, id_code, access_token=None, user_id=None): """ Get a code, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') code = self.req.get('/Codes/' + id_code) executions = self.req.get('/Codes/' + id_code + '/executions', '&filter={"limit":3}') if isinstance(executions, list): code["executions"] = executions return code
python
def get_code(self, id_code, access_token=None, user_id=None): """ Get a code, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') code = self.req.get('/Codes/' + id_code) executions = self.req.get('/Codes/' + id_code + '/executions', '&filter={"limit":3}') if isinstance(executions, list): code["executions"] = executions return code
[ "def", "get_code", "(", "self", ",", "id_code", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "raise", "CredentialsError", "(", "'credentials invalid'", ")", "code", "=", "self", ".", "req", ".", "get", "(", "'/Codes/'", "+", "id_code", ")", "executions", "=", "self", ".", "req", ".", "get", "(", "'/Codes/'", "+", "id_code", "+", "'/executions'", ",", "'&filter={\"limit\":3}'", ")", "if", "isinstance", "(", "executions", ",", "list", ")", ":", "code", "[", "\"executions\"", "]", "=", "executions", "return", "code" ]
Get a code, by its id
[ "Get", "a", "code", "by", "its", "id" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L553-L568
11,671
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.get_image_code
def get_image_code(self, id_code, access_token=None, user_id=None): """ Get the image of a code, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') return self.req.get('/Codes/' + id_code + '/export/png/url')
python
def get_image_code(self, id_code, access_token=None, user_id=None): """ Get the image of a code, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') return self.req.get('/Codes/' + id_code + '/export/png/url')
[ "def", "get_image_code", "(", "self", ",", "id_code", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "raise", "CredentialsError", "(", "'credentials invalid'", ")", "return", "self", ".", "req", ".", "get", "(", "'/Codes/'", "+", "id_code", "+", "'/export/png/url'", ")" ]
Get the image of a code, by its id
[ "Get", "the", "image", "of", "a", "code", "by", "its", "id" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L570-L580
11,672
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.get_last_codes
def get_last_codes(self, access_token=None, user_id=None): """ Get the last codes of the user """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') last = '/users/' + self.req.credential.get_user_id() + '/codes/lastest' return self.req.get(last, '&includeExecutions=true')['codes']
python
def get_last_codes(self, access_token=None, user_id=None): """ Get the last codes of the user """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') last = '/users/' + self.req.credential.get_user_id() + '/codes/lastest' return self.req.get(last, '&includeExecutions=true')['codes']
[ "def", "get_last_codes", "(", "self", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "raise", "CredentialsError", "(", "'credentials invalid'", ")", "last", "=", "'/users/'", "+", "self", ".", "req", ".", "credential", ".", "get_user_id", "(", ")", "+", "'/codes/lastest'", "return", "self", ".", "req", ".", "get", "(", "last", ",", "'&includeExecutions=true'", ")", "[", "'codes'", "]" ]
Get the last codes of the user
[ "Get", "the", "last", "codes", "of", "the", "user" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L582-L593
11,673
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.run_job
def run_job(self, job, backend='simulator', shots=1, max_credits=None, seed=None, hub=None, group=None, project=None, hpc=None, access_token=None, user_id=None): """ Execute a job """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): return {"error": "Not credentials valid"} backend_type = self._check_backend(backend, 'job') if not backend_type: raise BadBackendError(backend) if isinstance(job, (list, tuple)): qasms = job for qasm in qasms: qasm['qasm'] = qasm['qasm'].replace('IBMQASM 2.0;', '') qasm['qasm'] = qasm['qasm'].replace('OPENQASM 2.0;', '') data = {'qasms': qasms, 'shots': shots, 'backend': {}} if max_credits: data['maxCredits'] = max_credits if seed and len(str(seed)) < 11 and str(seed).isdigit(): data['seed'] = seed elif seed: return {"error": "Not seed allowed. Max 10 digits."} data['backend']['name'] = backend_type elif isinstance(job, dict): q_obj = job data = {'qObject': q_obj, 'backend': {}} data['backend']['name'] = backend_type else: return {"error": "Not a valid data to send"} if hpc: data['hpc'] = hpc url = get_job_url(self.config, hub, group, project) job = self.req.post(url, data=json.dumps(data)) return job
python
def run_job(self, job, backend='simulator', shots=1, max_credits=None, seed=None, hub=None, group=None, project=None, hpc=None, access_token=None, user_id=None): """ Execute a job """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): return {"error": "Not credentials valid"} backend_type = self._check_backend(backend, 'job') if not backend_type: raise BadBackendError(backend) if isinstance(job, (list, tuple)): qasms = job for qasm in qasms: qasm['qasm'] = qasm['qasm'].replace('IBMQASM 2.0;', '') qasm['qasm'] = qasm['qasm'].replace('OPENQASM 2.0;', '') data = {'qasms': qasms, 'shots': shots, 'backend': {}} if max_credits: data['maxCredits'] = max_credits if seed and len(str(seed)) < 11 and str(seed).isdigit(): data['seed'] = seed elif seed: return {"error": "Not seed allowed. Max 10 digits."} data['backend']['name'] = backend_type elif isinstance(job, dict): q_obj = job data = {'qObject': q_obj, 'backend': {}} data['backend']['name'] = backend_type else: return {"error": "Not a valid data to send"} if hpc: data['hpc'] = hpc url = get_job_url(self.config, hub, group, project) job = self.req.post(url, data=json.dumps(data)) return job
[ "def", "run_job", "(", "self", ",", "job", ",", "backend", "=", "'simulator'", ",", "shots", "=", "1", ",", "max_credits", "=", "None", ",", "seed", "=", "None", ",", "hub", "=", "None", ",", "group", "=", "None", ",", "project", "=", "None", ",", "hpc", "=", "None", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "return", "{", "\"error\"", ":", "\"Not credentials valid\"", "}", "backend_type", "=", "self", ".", "_check_backend", "(", "backend", ",", "'job'", ")", "if", "not", "backend_type", ":", "raise", "BadBackendError", "(", "backend", ")", "if", "isinstance", "(", "job", ",", "(", "list", ",", "tuple", ")", ")", ":", "qasms", "=", "job", "for", "qasm", "in", "qasms", ":", "qasm", "[", "'qasm'", "]", "=", "qasm", "[", "'qasm'", "]", ".", "replace", "(", "'IBMQASM 2.0;'", ",", "''", ")", "qasm", "[", "'qasm'", "]", "=", "qasm", "[", "'qasm'", "]", ".", "replace", "(", "'OPENQASM 2.0;'", ",", "''", ")", "data", "=", "{", "'qasms'", ":", "qasms", ",", "'shots'", ":", "shots", ",", "'backend'", ":", "{", "}", "}", "if", "max_credits", ":", "data", "[", "'maxCredits'", "]", "=", "max_credits", "if", "seed", "and", "len", "(", "str", "(", "seed", ")", ")", "<", "11", "and", "str", "(", "seed", ")", ".", "isdigit", "(", ")", ":", "data", "[", "'seed'", "]", "=", "seed", "elif", "seed", ":", "return", "{", "\"error\"", ":", "\"Not seed allowed. Max 10 digits.\"", "}", "data", "[", "'backend'", "]", "[", "'name'", "]", "=", "backend_type", "elif", "isinstance", "(", "job", ",", "dict", ")", ":", "q_obj", "=", "job", "data", "=", "{", "'qObject'", ":", "q_obj", ",", "'backend'", ":", "{", "}", "}", "data", "[", "'backend'", "]", "[", "'name'", "]", "=", "backend_type", "else", ":", "return", "{", "\"error\"", ":", "\"Not a valid data to send\"", "}", "if", "hpc", ":", "data", "[", "'hpc'", "]", "=", "hpc", "url", "=", "get_job_url", "(", "self", ".", "config", ",", "hub", ",", "group", ",", "project", ")", "job", "=", "self", ".", "req", ".", "post", "(", "url", ",", "data", "=", "json", ".", "dumps", "(", "data", ")", ")", "return", "job" ]
Execute a job
[ "Execute", "a", "job" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L679-L732
11,674
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.get_job
def get_job(self, id_job, hub=None, group=None, project=None, access_token=None, user_id=None): """ Get the information about a job, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): respond = {} respond["status"] = 'Error' respond["error"] = "Not credentials valid" return respond if not id_job: respond = {} respond["status"] = 'Error' respond["error"] = "Job ID not specified" return respond url = get_job_url(self.config, hub, group, project) url += '/' + id_job job = self.req.get(url) if 'qasms' in job: for qasm in job['qasms']: if ('result' in qasm) and ('data' in qasm['result']): qasm['data'] = qasm['result']['data'] del qasm['result']['data'] for key in qasm['result']: qasm['data'][key] = qasm['result'][key] del qasm['result'] return job
python
def get_job(self, id_job, hub=None, group=None, project=None, access_token=None, user_id=None): """ Get the information about a job, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): respond = {} respond["status"] = 'Error' respond["error"] = "Not credentials valid" return respond if not id_job: respond = {} respond["status"] = 'Error' respond["error"] = "Job ID not specified" return respond url = get_job_url(self.config, hub, group, project) url += '/' + id_job job = self.req.get(url) if 'qasms' in job: for qasm in job['qasms']: if ('result' in qasm) and ('data' in qasm['result']): qasm['data'] = qasm['result']['data'] del qasm['result']['data'] for key in qasm['result']: qasm['data'][key] = qasm['result'][key] del qasm['result'] return job
[ "def", "get_job", "(", "self", ",", "id_job", ",", "hub", "=", "None", ",", "group", "=", "None", ",", "project", "=", "None", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "respond", "=", "{", "}", "respond", "[", "\"status\"", "]", "=", "'Error'", "respond", "[", "\"error\"", "]", "=", "\"Not credentials valid\"", "return", "respond", "if", "not", "id_job", ":", "respond", "=", "{", "}", "respond", "[", "\"status\"", "]", "=", "'Error'", "respond", "[", "\"error\"", "]", "=", "\"Job ID not specified\"", "return", "respond", "url", "=", "get_job_url", "(", "self", ".", "config", ",", "hub", ",", "group", ",", "project", ")", "url", "+=", "'/'", "+", "id_job", "job", "=", "self", ".", "req", ".", "get", "(", "url", ")", "if", "'qasms'", "in", "job", ":", "for", "qasm", "in", "job", "[", "'qasms'", "]", ":", "if", "(", "'result'", "in", "qasm", ")", "and", "(", "'data'", "in", "qasm", "[", "'result'", "]", ")", ":", "qasm", "[", "'data'", "]", "=", "qasm", "[", "'result'", "]", "[", "'data'", "]", "del", "qasm", "[", "'result'", "]", "[", "'data'", "]", "for", "key", "in", "qasm", "[", "'result'", "]", ":", "qasm", "[", "'data'", "]", "[", "key", "]", "=", "qasm", "[", "'result'", "]", "[", "key", "]", "del", "qasm", "[", "'result'", "]", "return", "job" ]
Get the information about a job, by its id
[ "Get", "the", "information", "about", "a", "job", "by", "its", "id" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L734-L769
11,675
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.get_jobs
def get_jobs(self, limit=10, skip=0, backend=None, only_completed=False, filter=None, hub=None, group=None, project=None, access_token=None, user_id=None): """ Get the information about the user jobs """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): return {"error": "Not credentials valid"} url = get_job_url(self.config, hub, group, project) url_filter = '&filter=' query = { "order": "creationDate DESC", "limit": limit, "skip": skip, "where" : {} } if filter is not None: query['where'] = filter else: if backend is not None: query['where']['backend.name'] = backend if only_completed: query['where']['status'] = 'COMPLETED' url_filter = url_filter + json.dumps(query) jobs = self.req.get(url, url_filter) return jobs
python
def get_jobs(self, limit=10, skip=0, backend=None, only_completed=False, filter=None, hub=None, group=None, project=None, access_token=None, user_id=None): """ Get the information about the user jobs """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): return {"error": "Not credentials valid"} url = get_job_url(self.config, hub, group, project) url_filter = '&filter=' query = { "order": "creationDate DESC", "limit": limit, "skip": skip, "where" : {} } if filter is not None: query['where'] = filter else: if backend is not None: query['where']['backend.name'] = backend if only_completed: query['where']['status'] = 'COMPLETED' url_filter = url_filter + json.dumps(query) jobs = self.req.get(url, url_filter) return jobs
[ "def", "get_jobs", "(", "self", ",", "limit", "=", "10", ",", "skip", "=", "0", ",", "backend", "=", "None", ",", "only_completed", "=", "False", ",", "filter", "=", "None", ",", "hub", "=", "None", ",", "group", "=", "None", ",", "project", "=", "None", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "return", "{", "\"error\"", ":", "\"Not credentials valid\"", "}", "url", "=", "get_job_url", "(", "self", ".", "config", ",", "hub", ",", "group", ",", "project", ")", "url_filter", "=", "'&filter='", "query", "=", "{", "\"order\"", ":", "\"creationDate DESC\"", ",", "\"limit\"", ":", "limit", ",", "\"skip\"", ":", "skip", ",", "\"where\"", ":", "{", "}", "}", "if", "filter", "is", "not", "None", ":", "query", "[", "'where'", "]", "=", "filter", "else", ":", "if", "backend", "is", "not", "None", ":", "query", "[", "'where'", "]", "[", "'backend.name'", "]", "=", "backend", "if", "only_completed", ":", "query", "[", "'where'", "]", "[", "'status'", "]", "=", "'COMPLETED'", "url_filter", "=", "url_filter", "+", "json", ".", "dumps", "(", "query", ")", "jobs", "=", "self", ".", "req", ".", "get", "(", "url", ",", "url_filter", ")", "return", "jobs" ]
Get the information about the user jobs
[ "Get", "the", "information", "about", "the", "user", "jobs" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L771-L800
11,676
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.get_status_job
def get_status_job(self, id_job, hub=None, group=None, project=None, access_token=None, user_id=None): """ Get the status about a job, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): respond = {} respond["status"] = 'Error' respond["error"] = "Not credentials valid" return respond if not id_job: respond = {} respond["status"] = 'Error' respond["error"] = "Job ID not specified" return respond url = get_job_url(self.config, hub, group, project) url += '/' + id_job + '/status' status = self.req.get(url) return status
python
def get_status_job(self, id_job, hub=None, group=None, project=None, access_token=None, user_id=None): """ Get the status about a job, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): respond = {} respond["status"] = 'Error' respond["error"] = "Not credentials valid" return respond if not id_job: respond = {} respond["status"] = 'Error' respond["error"] = "Job ID not specified" return respond url = get_job_url(self.config, hub, group, project) url += '/' + id_job + '/status' status = self.req.get(url) return status
[ "def", "get_status_job", "(", "self", ",", "id_job", ",", "hub", "=", "None", ",", "group", "=", "None", ",", "project", "=", "None", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "respond", "=", "{", "}", "respond", "[", "\"status\"", "]", "=", "'Error'", "respond", "[", "\"error\"", "]", "=", "\"Not credentials valid\"", "return", "respond", "if", "not", "id_job", ":", "respond", "=", "{", "}", "respond", "[", "\"status\"", "]", "=", "'Error'", "respond", "[", "\"error\"", "]", "=", "\"Job ID not specified\"", "return", "respond", "url", "=", "get_job_url", "(", "self", ".", "config", ",", "hub", ",", "group", ",", "project", ")", "url", "+=", "'/'", "+", "id_job", "+", "'/status'", "status", "=", "self", ".", "req", ".", "get", "(", "url", ")", "return", "status" ]
Get the status about a job, by its id
[ "Get", "the", "status", "about", "a", "job", "by", "its", "id" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L802-L828
11,677
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.cancel_job
def cancel_job(self, id_job, hub=None, group=None, project=None, access_token=None, user_id=None): """ Cancel the information about a job, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): respond = {} respond["status"] = 'Error' respond["error"] = "Not credentials valid" return respond if not id_job: respond = {} respond["status"] = 'Error' respond["error"] = "Job ID not specified" return respond url = get_job_url(self.config, hub, group, project) url += '/{}/cancel'.format(id_job) res = self.req.post(url) return res
python
def cancel_job(self, id_job, hub=None, group=None, project=None, access_token=None, user_id=None): """ Cancel the information about a job, by its id """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): respond = {} respond["status"] = 'Error' respond["error"] = "Not credentials valid" return respond if not id_job: respond = {} respond["status"] = 'Error' respond["error"] = "Job ID not specified" return respond url = get_job_url(self.config, hub, group, project) url += '/{}/cancel'.format(id_job) res = self.req.post(url) return res
[ "def", "cancel_job", "(", "self", ",", "id_job", ",", "hub", "=", "None", ",", "group", "=", "None", ",", "project", "=", "None", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "respond", "=", "{", "}", "respond", "[", "\"status\"", "]", "=", "'Error'", "respond", "[", "\"error\"", "]", "=", "\"Not credentials valid\"", "return", "respond", "if", "not", "id_job", ":", "respond", "=", "{", "}", "respond", "[", "\"status\"", "]", "=", "'Error'", "respond", "[", "\"error\"", "]", "=", "\"Job ID not specified\"", "return", "respond", "url", "=", "get_job_url", "(", "self", ".", "config", ",", "hub", ",", "group", ",", "project", ")", "url", "+=", "'/{}/cancel'", ".", "format", "(", "id_job", ")", "res", "=", "self", ".", "req", ".", "post", "(", "url", ")", "return", "res" ]
Cancel the information about a job, by its id
[ "Cancel", "the", "information", "about", "a", "job", "by", "its", "id" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L863-L889
11,678
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.backend_status
def backend_status(self, backend='ibmqx4', access_token=None, user_id=None): """ Get the status of a chip """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) backend_type = self._check_backend(backend, 'status') if not backend_type: raise BadBackendError(backend) status = self.req.get('/Backends/' + backend_type + '/queue/status', with_token=False) ret = {} if 'state' in status: ret['available'] = bool(status['state']) if 'busy' in status: ret['busy'] = bool(status['busy']) if 'lengthQueue' in status: ret['pending_jobs'] = status['lengthQueue'] ret['backend'] = backend_type return ret
python
def backend_status(self, backend='ibmqx4', access_token=None, user_id=None): """ Get the status of a chip """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) backend_type = self._check_backend(backend, 'status') if not backend_type: raise BadBackendError(backend) status = self.req.get('/Backends/' + backend_type + '/queue/status', with_token=False) ret = {} if 'state' in status: ret['available'] = bool(status['state']) if 'busy' in status: ret['busy'] = bool(status['busy']) if 'lengthQueue' in status: ret['pending_jobs'] = status['lengthQueue'] ret['backend'] = backend_type return ret
[ "def", "backend_status", "(", "self", ",", "backend", "=", "'ibmqx4'", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "backend_type", "=", "self", ".", "_check_backend", "(", "backend", ",", "'status'", ")", "if", "not", "backend_type", ":", "raise", "BadBackendError", "(", "backend", ")", "status", "=", "self", ".", "req", ".", "get", "(", "'/Backends/'", "+", "backend_type", "+", "'/queue/status'", ",", "with_token", "=", "False", ")", "ret", "=", "{", "}", "if", "'state'", "in", "status", ":", "ret", "[", "'available'", "]", "=", "bool", "(", "status", "[", "'state'", "]", ")", "if", "'busy'", "in", "status", ":", "ret", "[", "'busy'", "]", "=", "bool", "(", "status", "[", "'busy'", "]", ")", "if", "'lengthQueue'", "in", "status", ":", "ret", "[", "'pending_jobs'", "]", "=", "status", "[", "'lengthQueue'", "]", "ret", "[", "'backend'", "]", "=", "backend_type", "return", "ret" ]
Get the status of a chip
[ "Get", "the", "status", "of", "a", "chip" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L891-L916
11,679
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.backend_calibration
def backend_calibration(self, backend='ibmqx4', hub=None, access_token=None, user_id=None): """ Get the calibration of a real chip """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') backend_type = self._check_backend(backend, 'calibration') if not backend_type: raise BadBackendError(backend) if backend_type in self.__names_backend_simulator: ret = {} return ret url = get_backend_stats_url(self.config, hub, backend_type) ret = self.req.get(url + '/calibration') if not bool(ret): ret = {} else: ret["backend"] = backend_type return ret
python
def backend_calibration(self, backend='ibmqx4', hub=None, access_token=None, user_id=None): """ Get the calibration of a real chip """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') backend_type = self._check_backend(backend, 'calibration') if not backend_type: raise BadBackendError(backend) if backend_type in self.__names_backend_simulator: ret = {} return ret url = get_backend_stats_url(self.config, hub, backend_type) ret = self.req.get(url + '/calibration') if not bool(ret): ret = {} else: ret["backend"] = backend_type return ret
[ "def", "backend_calibration", "(", "self", ",", "backend", "=", "'ibmqx4'", ",", "hub", "=", "None", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "raise", "CredentialsError", "(", "'credentials invalid'", ")", "backend_type", "=", "self", ".", "_check_backend", "(", "backend", ",", "'calibration'", ")", "if", "not", "backend_type", ":", "raise", "BadBackendError", "(", "backend", ")", "if", "backend_type", "in", "self", ".", "__names_backend_simulator", ":", "ret", "=", "{", "}", "return", "ret", "url", "=", "get_backend_stats_url", "(", "self", ".", "config", ",", "hub", ",", "backend_type", ")", "ret", "=", "self", ".", "req", ".", "get", "(", "url", "+", "'/calibration'", ")", "if", "not", "bool", "(", "ret", ")", ":", "ret", "=", "{", "}", "else", ":", "ret", "[", "\"backend\"", "]", "=", "backend_type", "return", "ret" ]
Get the calibration of a real chip
[ "Get", "the", "calibration", "of", "a", "real", "chip" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L918-L945
11,680
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.available_backends
def available_backends(self, hub=None, group=None, project=None, access_token=None, user_id=None): """ Get the backends available to use in the QX Platform """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') else: url = get_backend_url(self.config, hub, group, project) ret = self.req.get(url) if (ret is not None) and (isinstance(ret, dict)): return [] return [backend for backend in ret if backend.get('status') == 'on']
python
def available_backends(self, hub=None, group=None, project=None, access_token=None, user_id=None): """ Get the backends available to use in the QX Platform """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') else: url = get_backend_url(self.config, hub, group, project) ret = self.req.get(url) if (ret is not None) and (isinstance(ret, dict)): return [] return [backend for backend in ret if backend.get('status') == 'on']
[ "def", "available_backends", "(", "self", ",", "hub", "=", "None", ",", "group", "=", "None", ",", "project", "=", "None", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "raise", "CredentialsError", "(", "'credentials invalid'", ")", "else", ":", "url", "=", "get_backend_url", "(", "self", ".", "config", ",", "hub", ",", "group", ",", "project", ")", "ret", "=", "self", ".", "req", ".", "get", "(", "url", ")", "if", "(", "ret", "is", "not", "None", ")", "and", "(", "isinstance", "(", "ret", ",", "dict", ")", ")", ":", "return", "[", "]", "return", "[", "backend", "for", "backend", "in", "ret", "if", "backend", ".", "get", "(", "'status'", ")", "==", "'on'", "]" ]
Get the backends available to use in the QX Platform
[ "Get", "the", "backends", "available", "to", "use", "in", "the", "QX", "Platform" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L976-L994
11,681
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.available_backend_simulators
def available_backend_simulators(self, access_token=None, user_id=None): """ Get the backend simulators available to use in the QX Platform """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') else: ret = self.req.get('/Backends') if (ret is not None) and (isinstance(ret, dict)): return [] return [backend for backend in ret if backend.get('status') == 'on' and backend.get('simulator') is True]
python
def available_backend_simulators(self, access_token=None, user_id=None): """ Get the backend simulators available to use in the QX Platform """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') else: ret = self.req.get('/Backends') if (ret is not None) and (isinstance(ret, dict)): return [] return [backend for backend in ret if backend.get('status') == 'on' and backend.get('simulator') is True]
[ "def", "available_backend_simulators", "(", "self", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "raise", "CredentialsError", "(", "'credentials invalid'", ")", "else", ":", "ret", "=", "self", ".", "req", ".", "get", "(", "'/Backends'", ")", "if", "(", "ret", "is", "not", "None", ")", "and", "(", "isinstance", "(", "ret", ",", "dict", ")", ")", ":", "return", "[", "]", "return", "[", "backend", "for", "backend", "in", "ret", "if", "backend", ".", "get", "(", "'status'", ")", "==", "'on'", "and", "backend", ".", "get", "(", "'simulator'", ")", "is", "True", "]" ]
Get the backend simulators available to use in the QX Platform
[ "Get", "the", "backend", "simulators", "available", "to", "use", "in", "the", "QX", "Platform" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L996-L1012
11,682
Qiskit/qiskit-api-py
IBMQuantumExperience/IBMQuantumExperience.py
IBMQuantumExperience.get_my_credits
def get_my_credits(self, access_token=None, user_id=None): """ Get the credits by user to use in the QX Platform """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') else: user_data_url = '/users/' + self.req.credential.get_user_id() user_data = self.req.get(user_data_url) if "credit" in user_data: if "promotionalCodesUsed" in user_data["credit"]: del user_data["credit"]["promotionalCodesUsed"] if "lastRefill" in user_data["credit"]: del user_data["credit"]["lastRefill"] return user_data["credit"] return {}
python
def get_my_credits(self, access_token=None, user_id=None): """ Get the credits by user to use in the QX Platform """ if access_token: self.req.credential.set_token(access_token) if user_id: self.req.credential.set_user_id(user_id) if not self.check_credentials(): raise CredentialsError('credentials invalid') else: user_data_url = '/users/' + self.req.credential.get_user_id() user_data = self.req.get(user_data_url) if "credit" in user_data: if "promotionalCodesUsed" in user_data["credit"]: del user_data["credit"]["promotionalCodesUsed"] if "lastRefill" in user_data["credit"]: del user_data["credit"]["lastRefill"] return user_data["credit"] return {}
[ "def", "get_my_credits", "(", "self", ",", "access_token", "=", "None", ",", "user_id", "=", "None", ")", ":", "if", "access_token", ":", "self", ".", "req", ".", "credential", ".", "set_token", "(", "access_token", ")", "if", "user_id", ":", "self", ".", "req", ".", "credential", ".", "set_user_id", "(", "user_id", ")", "if", "not", "self", ".", "check_credentials", "(", ")", ":", "raise", "CredentialsError", "(", "'credentials invalid'", ")", "else", ":", "user_data_url", "=", "'/users/'", "+", "self", ".", "req", ".", "credential", ".", "get_user_id", "(", ")", "user_data", "=", "self", ".", "req", ".", "get", "(", "user_data_url", ")", "if", "\"credit\"", "in", "user_data", ":", "if", "\"promotionalCodesUsed\"", "in", "user_data", "[", "\"credit\"", "]", ":", "del", "user_data", "[", "\"credit\"", "]", "[", "\"promotionalCodesUsed\"", "]", "if", "\"lastRefill\"", "in", "user_data", "[", "\"credit\"", "]", ":", "del", "user_data", "[", "\"credit\"", "]", "[", "\"lastRefill\"", "]", "return", "user_data", "[", "\"credit\"", "]", "return", "{", "}" ]
Get the credits by user to use in the QX Platform
[ "Get", "the", "credits", "by", "user", "to", "use", "in", "the", "QX", "Platform" ]
2ab240110fb7e653254e44c4833f3643e8ae7f0f
https://github.com/Qiskit/qiskit-api-py/blob/2ab240110fb7e653254e44c4833f3643e8ae7f0f/IBMQuantumExperience/IBMQuantumExperience.py#L1014-L1033
11,683
ionelmc/python-hunter
src/hunter/tracer.py
Tracer.trace
def trace(self, predicate): """ Starts tracing with the given callable. Args: predicate (callable that accepts a single :obj:`hunter.Event` argument): Return: self """ self._handler = predicate if self.threading_support is None or self.threading_support: self._threading_previous = getattr(threading, '_trace_hook', None) threading.settrace(self) self._previous = sys.gettrace() sys.settrace(self) return self
python
def trace(self, predicate): """ Starts tracing with the given callable. Args: predicate (callable that accepts a single :obj:`hunter.Event` argument): Return: self """ self._handler = predicate if self.threading_support is None or self.threading_support: self._threading_previous = getattr(threading, '_trace_hook', None) threading.settrace(self) self._previous = sys.gettrace() sys.settrace(self) return self
[ "def", "trace", "(", "self", ",", "predicate", ")", ":", "self", ".", "_handler", "=", "predicate", "if", "self", ".", "threading_support", "is", "None", "or", "self", ".", "threading_support", ":", "self", ".", "_threading_previous", "=", "getattr", "(", "threading", ",", "'_trace_hook'", ",", "None", ")", "threading", ".", "settrace", "(", "self", ")", "self", ".", "_previous", "=", "sys", ".", "gettrace", "(", ")", "sys", ".", "settrace", "(", "self", ")", "return", "self" ]
Starts tracing with the given callable. Args: predicate (callable that accepts a single :obj:`hunter.Event` argument): Return: self
[ "Starts", "tracing", "with", "the", "given", "callable", "." ]
b3a1310b0593d2c6b6ef430883843896e17d6a81
https://github.com/ionelmc/python-hunter/blob/b3a1310b0593d2c6b6ef430883843896e17d6a81/src/hunter/tracer.py#L81-L96
11,684
ionelmc/python-hunter
src/hunter/__init__.py
And
def And(*predicates, **kwargs): """ `And` predicate. Returns ``False`` at the first sub-predicate that returns ``False``. """ if kwargs: predicates += Query(**kwargs), return _flatten(_And, *predicates)
python
def And(*predicates, **kwargs): """ `And` predicate. Returns ``False`` at the first sub-predicate that returns ``False``. """ if kwargs: predicates += Query(**kwargs), return _flatten(_And, *predicates)
[ "def", "And", "(", "*", "predicates", ",", "*", "*", "kwargs", ")", ":", "if", "kwargs", ":", "predicates", "+=", "Query", "(", "*", "*", "kwargs", ")", ",", "return", "_flatten", "(", "_And", ",", "*", "predicates", ")" ]
`And` predicate. Returns ``False`` at the first sub-predicate that returns ``False``.
[ "And", "predicate", ".", "Returns", "False", "at", "the", "first", "sub", "-", "predicate", "that", "returns", "False", "." ]
b3a1310b0593d2c6b6ef430883843896e17d6a81
https://github.com/ionelmc/python-hunter/blob/b3a1310b0593d2c6b6ef430883843896e17d6a81/src/hunter/__init__.py#L120-L126
11,685
ionelmc/python-hunter
src/hunter/__init__.py
Or
def Or(*predicates, **kwargs): """ `Or` predicate. Returns ``True`` at the first sub-predicate that returns ``True``. """ if kwargs: predicates += tuple(Query(**{k: v}) for k, v in kwargs.items()) return _flatten(_Or, *predicates)
python
def Or(*predicates, **kwargs): """ `Or` predicate. Returns ``True`` at the first sub-predicate that returns ``True``. """ if kwargs: predicates += tuple(Query(**{k: v}) for k, v in kwargs.items()) return _flatten(_Or, *predicates)
[ "def", "Or", "(", "*", "predicates", ",", "*", "*", "kwargs", ")", ":", "if", "kwargs", ":", "predicates", "+=", "tuple", "(", "Query", "(", "*", "*", "{", "k", ":", "v", "}", ")", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", ")", "return", "_flatten", "(", "_Or", ",", "*", "predicates", ")" ]
`Or` predicate. Returns ``True`` at the first sub-predicate that returns ``True``.
[ "Or", "predicate", ".", "Returns", "True", "at", "the", "first", "sub", "-", "predicate", "that", "returns", "True", "." ]
b3a1310b0593d2c6b6ef430883843896e17d6a81
https://github.com/ionelmc/python-hunter/blob/b3a1310b0593d2c6b6ef430883843896e17d6a81/src/hunter/__init__.py#L129-L135
11,686
ionelmc/python-hunter
src/hunter/__init__.py
wrap
def wrap(function_to_trace=None, **trace_options): """ Functions decorated with this will be traced. Use ``local=True`` to only trace local code, eg:: @hunter.wrap(local=True) def my_function(): ... Keyword arguments are allowed, eg:: @hunter.wrap(action=hunter.CallPrinter) def my_function(): ... Or, filters:: @hunter.wrap(module='foobar') def my_function(): ... """ def tracing_decorator(func): @functools.wraps(func) def tracing_wrapper(*args, **kwargs): predicates = [] local = trace_options.pop('local', False) if local: predicates.append(Q(depth_lt=2)) predicates.append(~When(Q(calls_gt=0, depth=0) & ~Q(kind='return'), Stop)) local_tracer = trace(*predicates, **trace_options) try: return func(*args, **kwargs) finally: local_tracer.stop() return tracing_wrapper if function_to_trace is None: return tracing_decorator else: return tracing_decorator(function_to_trace)
python
def wrap(function_to_trace=None, **trace_options): """ Functions decorated with this will be traced. Use ``local=True`` to only trace local code, eg:: @hunter.wrap(local=True) def my_function(): ... Keyword arguments are allowed, eg:: @hunter.wrap(action=hunter.CallPrinter) def my_function(): ... Or, filters:: @hunter.wrap(module='foobar') def my_function(): ... """ def tracing_decorator(func): @functools.wraps(func) def tracing_wrapper(*args, **kwargs): predicates = [] local = trace_options.pop('local', False) if local: predicates.append(Q(depth_lt=2)) predicates.append(~When(Q(calls_gt=0, depth=0) & ~Q(kind='return'), Stop)) local_tracer = trace(*predicates, **trace_options) try: return func(*args, **kwargs) finally: local_tracer.stop() return tracing_wrapper if function_to_trace is None: return tracing_decorator else: return tracing_decorator(function_to_trace)
[ "def", "wrap", "(", "function_to_trace", "=", "None", ",", "*", "*", "trace_options", ")", ":", "def", "tracing_decorator", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "tracing_wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "predicates", "=", "[", "]", "local", "=", "trace_options", ".", "pop", "(", "'local'", ",", "False", ")", "if", "local", ":", "predicates", ".", "append", "(", "Q", "(", "depth_lt", "=", "2", ")", ")", "predicates", ".", "append", "(", "~", "When", "(", "Q", "(", "calls_gt", "=", "0", ",", "depth", "=", "0", ")", "&", "~", "Q", "(", "kind", "=", "'return'", ")", ",", "Stop", ")", ")", "local_tracer", "=", "trace", "(", "*", "predicates", ",", "*", "*", "trace_options", ")", "try", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "local_tracer", ".", "stop", "(", ")", "return", "tracing_wrapper", "if", "function_to_trace", "is", "None", ":", "return", "tracing_decorator", "else", ":", "return", "tracing_decorator", "(", "function_to_trace", ")" ]
Functions decorated with this will be traced. Use ``local=True`` to only trace local code, eg:: @hunter.wrap(local=True) def my_function(): ... Keyword arguments are allowed, eg:: @hunter.wrap(action=hunter.CallPrinter) def my_function(): ... Or, filters:: @hunter.wrap(module='foobar') def my_function(): ...
[ "Functions", "decorated", "with", "this", "will", "be", "traced", "." ]
b3a1310b0593d2c6b6ef430883843896e17d6a81
https://github.com/ionelmc/python-hunter/blob/b3a1310b0593d2c6b6ef430883843896e17d6a81/src/hunter/__init__.py#L210-L250
11,687
ionelmc/python-hunter
src/hunter/event.py
Event.threadid
def threadid(self): """ Current thread ident. If current thread is main thread then it returns ``None``. :type: int or None """ current = self.thread.ident main = get_main_thread() if main is None: return current else: return current if current != main.ident else None
python
def threadid(self): """ Current thread ident. If current thread is main thread then it returns ``None``. :type: int or None """ current = self.thread.ident main = get_main_thread() if main is None: return current else: return current if current != main.ident else None
[ "def", "threadid", "(", "self", ")", ":", "current", "=", "self", ".", "thread", ".", "ident", "main", "=", "get_main_thread", "(", ")", "if", "main", "is", "None", ":", "return", "current", "else", ":", "return", "current", "if", "current", "!=", "main", ".", "ident", "else", "None" ]
Current thread ident. If current thread is main thread then it returns ``None``. :type: int or None
[ "Current", "thread", "ident", ".", "If", "current", "thread", "is", "main", "thread", "then", "it", "returns", "None", "." ]
b3a1310b0593d2c6b6ef430883843896e17d6a81
https://github.com/ionelmc/python-hunter/blob/b3a1310b0593d2c6b6ef430883843896e17d6a81/src/hunter/event.py#L101-L112
11,688
ionelmc/python-hunter
src/hunter/event.py
Event.filename
def filename(self, exists=os.path.exists, cython_suffix_re=CYTHON_SUFFIX_RE): """ A string with absolute path to file. :type: str """ filename = self.frame.f_globals.get('__file__', '') if filename is None: filename = '' if filename.endswith(('.pyc', '.pyo')): filename = filename[:-1] elif filename.endswith('$py.class'): # Jython filename = filename[:-9] + ".py" elif filename.endswith(('.so', '.pyd')): basename = cython_suffix_re.sub('', filename) for ext in ('.pyx', '.py'): cyfilename = basename + ext if exists(cyfilename): filename = cyfilename break return filename
python
def filename(self, exists=os.path.exists, cython_suffix_re=CYTHON_SUFFIX_RE): """ A string with absolute path to file. :type: str """ filename = self.frame.f_globals.get('__file__', '') if filename is None: filename = '' if filename.endswith(('.pyc', '.pyo')): filename = filename[:-1] elif filename.endswith('$py.class'): # Jython filename = filename[:-9] + ".py" elif filename.endswith(('.so', '.pyd')): basename = cython_suffix_re.sub('', filename) for ext in ('.pyx', '.py'): cyfilename = basename + ext if exists(cyfilename): filename = cyfilename break return filename
[ "def", "filename", "(", "self", ",", "exists", "=", "os", ".", "path", ".", "exists", ",", "cython_suffix_re", "=", "CYTHON_SUFFIX_RE", ")", ":", "filename", "=", "self", ".", "frame", ".", "f_globals", ".", "get", "(", "'__file__'", ",", "''", ")", "if", "filename", "is", "None", ":", "filename", "=", "''", "if", "filename", ".", "endswith", "(", "(", "'.pyc'", ",", "'.pyo'", ")", ")", ":", "filename", "=", "filename", "[", ":", "-", "1", "]", "elif", "filename", ".", "endswith", "(", "'$py.class'", ")", ":", "# Jython", "filename", "=", "filename", "[", ":", "-", "9", "]", "+", "\".py\"", "elif", "filename", ".", "endswith", "(", "(", "'.so'", ",", "'.pyd'", ")", ")", ":", "basename", "=", "cython_suffix_re", ".", "sub", "(", "''", ",", "filename", ")", "for", "ext", "in", "(", "'.pyx'", ",", "'.py'", ")", ":", "cyfilename", "=", "basename", "+", "ext", "if", "exists", "(", "cyfilename", ")", ":", "filename", "=", "cyfilename", "break", "return", "filename" ]
A string with absolute path to file. :type: str
[ "A", "string", "with", "absolute", "path", "to", "file", "." ]
b3a1310b0593d2c6b6ef430883843896e17d6a81
https://github.com/ionelmc/python-hunter/blob/b3a1310b0593d2c6b6ef430883843896e17d6a81/src/hunter/event.py#L176-L197
11,689
ionelmc/python-hunter
src/hunter/event.py
Event.stdlib
def stdlib(self): """ A boolean flag. ``True`` if frame is in stdlib. :type: bool """ if self.module == 'pkg_resources' or self.module.startswith('pkg_resources.'): return False elif self.filename.startswith(SITE_PACKAGES_PATHS): # if it's in site-packages then its definitely not stdlib return False elif self.filename.startswith(SYS_PREFIX_PATHS): return True else: return False
python
def stdlib(self): """ A boolean flag. ``True`` if frame is in stdlib. :type: bool """ if self.module == 'pkg_resources' or self.module.startswith('pkg_resources.'): return False elif self.filename.startswith(SITE_PACKAGES_PATHS): # if it's in site-packages then its definitely not stdlib return False elif self.filename.startswith(SYS_PREFIX_PATHS): return True else: return False
[ "def", "stdlib", "(", "self", ")", ":", "if", "self", ".", "module", "==", "'pkg_resources'", "or", "self", ".", "module", ".", "startswith", "(", "'pkg_resources.'", ")", ":", "return", "False", "elif", "self", ".", "filename", ".", "startswith", "(", "SITE_PACKAGES_PATHS", ")", ":", "# if it's in site-packages then its definitely not stdlib", "return", "False", "elif", "self", ".", "filename", ".", "startswith", "(", "SYS_PREFIX_PATHS", ")", ":", "return", "True", "else", ":", "return", "False" ]
A boolean flag. ``True`` if frame is in stdlib. :type: bool
[ "A", "boolean", "flag", ".", "True", "if", "frame", "is", "in", "stdlib", "." ]
b3a1310b0593d2c6b6ef430883843896e17d6a81
https://github.com/ionelmc/python-hunter/blob/b3a1310b0593d2c6b6ef430883843896e17d6a81/src/hunter/event.py#L216-L230
11,690
ionelmc/python-hunter
src/hunter/actions.py
VarsPrinter._iter_symbols
def _iter_symbols(code): """ Iterate all the variable names in the given expression. Example: * ``self.foobar`` yields ``self`` * ``self[foobar]`` yields `self`` and ``foobar`` """ for node in ast.walk(ast.parse(code)): if isinstance(node, ast.Name): yield node.id
python
def _iter_symbols(code): """ Iterate all the variable names in the given expression. Example: * ``self.foobar`` yields ``self`` * ``self[foobar]`` yields `self`` and ``foobar`` """ for node in ast.walk(ast.parse(code)): if isinstance(node, ast.Name): yield node.id
[ "def", "_iter_symbols", "(", "code", ")", ":", "for", "node", "in", "ast", ".", "walk", "(", "ast", ".", "parse", "(", "code", ")", ")", ":", "if", "isinstance", "(", "node", ",", "ast", ".", "Name", ")", ":", "yield", "node", ".", "id" ]
Iterate all the variable names in the given expression. Example: * ``self.foobar`` yields ``self`` * ``self[foobar]`` yields `self`` and ``foobar``
[ "Iterate", "all", "the", "variable", "names", "in", "the", "given", "expression", "." ]
b3a1310b0593d2c6b6ef430883843896e17d6a81
https://github.com/ionelmc/python-hunter/blob/b3a1310b0593d2c6b6ef430883843896e17d6a81/src/hunter/actions.py#L437-L448
11,691
HttpRunner/har2case
har2case/core.py
HarParser.__make_request_url
def __make_request_url(self, teststep_dict, entry_json): """ parse HAR entry request url and queryString, and make teststep url and params Args: entry_json (dict): { "request": { "url": "https://httprunner.top/home?v=1&w=2", "queryString": [ {"name": "v", "value": "1"}, {"name": "w", "value": "2"} ], }, "response": {} } Returns: { "name: "/home", "request": { url: "https://httprunner.top/home", params: {"v": "1", "w": "2"} } } """ request_params = utils.convert_list_to_dict( entry_json["request"].get("queryString", []) ) url = entry_json["request"].get("url") if not url: logging.exception("url missed in request.") sys.exit(1) parsed_object = urlparse.urlparse(url) if request_params: parsed_object = parsed_object._replace(query='') teststep_dict["request"]["url"] = parsed_object.geturl() teststep_dict["request"]["params"] = request_params else: teststep_dict["request"]["url"] = url teststep_dict["name"] = parsed_object.path
python
def __make_request_url(self, teststep_dict, entry_json): """ parse HAR entry request url and queryString, and make teststep url and params Args: entry_json (dict): { "request": { "url": "https://httprunner.top/home?v=1&w=2", "queryString": [ {"name": "v", "value": "1"}, {"name": "w", "value": "2"} ], }, "response": {} } Returns: { "name: "/home", "request": { url: "https://httprunner.top/home", params: {"v": "1", "w": "2"} } } """ request_params = utils.convert_list_to_dict( entry_json["request"].get("queryString", []) ) url = entry_json["request"].get("url") if not url: logging.exception("url missed in request.") sys.exit(1) parsed_object = urlparse.urlparse(url) if request_params: parsed_object = parsed_object._replace(query='') teststep_dict["request"]["url"] = parsed_object.geturl() teststep_dict["request"]["params"] = request_params else: teststep_dict["request"]["url"] = url teststep_dict["name"] = parsed_object.path
[ "def", "__make_request_url", "(", "self", ",", "teststep_dict", ",", "entry_json", ")", ":", "request_params", "=", "utils", ".", "convert_list_to_dict", "(", "entry_json", "[", "\"request\"", "]", ".", "get", "(", "\"queryString\"", ",", "[", "]", ")", ")", "url", "=", "entry_json", "[", "\"request\"", "]", ".", "get", "(", "\"url\"", ")", "if", "not", "url", ":", "logging", ".", "exception", "(", "\"url missed in request.\"", ")", "sys", ".", "exit", "(", "1", ")", "parsed_object", "=", "urlparse", ".", "urlparse", "(", "url", ")", "if", "request_params", ":", "parsed_object", "=", "parsed_object", ".", "_replace", "(", "query", "=", "''", ")", "teststep_dict", "[", "\"request\"", "]", "[", "\"url\"", "]", "=", "parsed_object", ".", "geturl", "(", ")", "teststep_dict", "[", "\"request\"", "]", "[", "\"params\"", "]", "=", "request_params", "else", ":", "teststep_dict", "[", "\"request\"", "]", "[", "\"url\"", "]", "=", "url", "teststep_dict", "[", "\"name\"", "]", "=", "parsed_object", ".", "path" ]
parse HAR entry request url and queryString, and make teststep url and params Args: entry_json (dict): { "request": { "url": "https://httprunner.top/home?v=1&w=2", "queryString": [ {"name": "v", "value": "1"}, {"name": "w", "value": "2"} ], }, "response": {} } Returns: { "name: "/home", "request": { url: "https://httprunner.top/home", params: {"v": "1", "w": "2"} } }
[ "parse", "HAR", "entry", "request", "url", "and", "queryString", "and", "make", "teststep", "url", "and", "params" ]
369e576b24b3521832c35344b104828e30742170
https://github.com/HttpRunner/har2case/blob/369e576b24b3521832c35344b104828e30742170/har2case/core.py#L43-L86
11,692
HttpRunner/har2case
har2case/core.py
HarParser.__make_request_method
def __make_request_method(self, teststep_dict, entry_json): """ parse HAR entry request method, and make teststep method. """ method = entry_json["request"].get("method") if not method: logging.exception("method missed in request.") sys.exit(1) teststep_dict["request"]["method"] = method
python
def __make_request_method(self, teststep_dict, entry_json): """ parse HAR entry request method, and make teststep method. """ method = entry_json["request"].get("method") if not method: logging.exception("method missed in request.") sys.exit(1) teststep_dict["request"]["method"] = method
[ "def", "__make_request_method", "(", "self", ",", "teststep_dict", ",", "entry_json", ")", ":", "method", "=", "entry_json", "[", "\"request\"", "]", ".", "get", "(", "\"method\"", ")", "if", "not", "method", ":", "logging", ".", "exception", "(", "\"method missed in request.\"", ")", "sys", ".", "exit", "(", "1", ")", "teststep_dict", "[", "\"request\"", "]", "[", "\"method\"", "]", "=", "method" ]
parse HAR entry request method, and make teststep method.
[ "parse", "HAR", "entry", "request", "method", "and", "make", "teststep", "method", "." ]
369e576b24b3521832c35344b104828e30742170
https://github.com/HttpRunner/har2case/blob/369e576b24b3521832c35344b104828e30742170/har2case/core.py#L88-L96
11,693
HttpRunner/har2case
har2case/core.py
HarParser.__make_request_headers
def __make_request_headers(self, teststep_dict, entry_json): """ parse HAR entry request headers, and make teststep headers. header in IGNORE_REQUEST_HEADERS will be ignored. Args: entry_json (dict): { "request": { "headers": [ {"name": "Host", "value": "httprunner.top"}, {"name": "Content-Type", "value": "application/json"}, {"name": "User-Agent", "value": "iOS/10.3"} ], }, "response": {} } Returns: { "request": { headers: {"Content-Type": "application/json"} } """ teststep_headers = {} for header in entry_json["request"].get("headers", []): if header["name"].lower() in IGNORE_REQUEST_HEADERS: continue teststep_headers[header["name"]] = header["value"] if teststep_headers: teststep_dict["request"]["headers"] = teststep_headers
python
def __make_request_headers(self, teststep_dict, entry_json): """ parse HAR entry request headers, and make teststep headers. header in IGNORE_REQUEST_HEADERS will be ignored. Args: entry_json (dict): { "request": { "headers": [ {"name": "Host", "value": "httprunner.top"}, {"name": "Content-Type", "value": "application/json"}, {"name": "User-Agent", "value": "iOS/10.3"} ], }, "response": {} } Returns: { "request": { headers: {"Content-Type": "application/json"} } """ teststep_headers = {} for header in entry_json["request"].get("headers", []): if header["name"].lower() in IGNORE_REQUEST_HEADERS: continue teststep_headers[header["name"]] = header["value"] if teststep_headers: teststep_dict["request"]["headers"] = teststep_headers
[ "def", "__make_request_headers", "(", "self", ",", "teststep_dict", ",", "entry_json", ")", ":", "teststep_headers", "=", "{", "}", "for", "header", "in", "entry_json", "[", "\"request\"", "]", ".", "get", "(", "\"headers\"", ",", "[", "]", ")", ":", "if", "header", "[", "\"name\"", "]", ".", "lower", "(", ")", "in", "IGNORE_REQUEST_HEADERS", ":", "continue", "teststep_headers", "[", "header", "[", "\"name\"", "]", "]", "=", "header", "[", "\"value\"", "]", "if", "teststep_headers", ":", "teststep_dict", "[", "\"request\"", "]", "[", "\"headers\"", "]", "=", "teststep_headers" ]
parse HAR entry request headers, and make teststep headers. header in IGNORE_REQUEST_HEADERS will be ignored. Args: entry_json (dict): { "request": { "headers": [ {"name": "Host", "value": "httprunner.top"}, {"name": "Content-Type", "value": "application/json"}, {"name": "User-Agent", "value": "iOS/10.3"} ], }, "response": {} } Returns: { "request": { headers: {"Content-Type": "application/json"} }
[ "parse", "HAR", "entry", "request", "headers", "and", "make", "teststep", "headers", ".", "header", "in", "IGNORE_REQUEST_HEADERS", "will", "be", "ignored", "." ]
369e576b24b3521832c35344b104828e30742170
https://github.com/HttpRunner/har2case/blob/369e576b24b3521832c35344b104828e30742170/har2case/core.py#L98-L130
11,694
HttpRunner/har2case
har2case/core.py
HarParser._make_request_data
def _make_request_data(self, teststep_dict, entry_json): """ parse HAR entry request data, and make teststep request data Args: entry_json (dict): { "request": { "method": "POST", "postData": { "mimeType": "application/x-www-form-urlencoded; charset=utf-8", "params": [ {"name": "a", "value": 1}, {"name": "b", "value": "2"} } }, }, "response": {...} } Returns: { "request": { "method": "POST", "data": {"v": "1", "w": "2"} } } """ method = entry_json["request"].get("method") if method in ["POST", "PUT", "PATCH"]: postData = entry_json["request"].get("postData", {}) mimeType = postData.get("mimeType") # Note that text and params fields are mutually exclusive. if "text" in postData: post_data = postData.get("text") else: params = postData.get("params", []) post_data = utils.convert_list_to_dict(params) request_data_key = "data" if not mimeType: pass elif mimeType.startswith("application/json"): try: post_data = json.loads(post_data) request_data_key = "json" except JSONDecodeError: pass elif mimeType.startswith("application/x-www-form-urlencoded"): post_data = utils.convert_x_www_form_urlencoded_to_dict(post_data) else: # TODO: make compatible with more mimeType pass teststep_dict["request"][request_data_key] = post_data
python
def _make_request_data(self, teststep_dict, entry_json): """ parse HAR entry request data, and make teststep request data Args: entry_json (dict): { "request": { "method": "POST", "postData": { "mimeType": "application/x-www-form-urlencoded; charset=utf-8", "params": [ {"name": "a", "value": 1}, {"name": "b", "value": "2"} } }, }, "response": {...} } Returns: { "request": { "method": "POST", "data": {"v": "1", "w": "2"} } } """ method = entry_json["request"].get("method") if method in ["POST", "PUT", "PATCH"]: postData = entry_json["request"].get("postData", {}) mimeType = postData.get("mimeType") # Note that text and params fields are mutually exclusive. if "text" in postData: post_data = postData.get("text") else: params = postData.get("params", []) post_data = utils.convert_list_to_dict(params) request_data_key = "data" if not mimeType: pass elif mimeType.startswith("application/json"): try: post_data = json.loads(post_data) request_data_key = "json" except JSONDecodeError: pass elif mimeType.startswith("application/x-www-form-urlencoded"): post_data = utils.convert_x_www_form_urlencoded_to_dict(post_data) else: # TODO: make compatible with more mimeType pass teststep_dict["request"][request_data_key] = post_data
[ "def", "_make_request_data", "(", "self", ",", "teststep_dict", ",", "entry_json", ")", ":", "method", "=", "entry_json", "[", "\"request\"", "]", ".", "get", "(", "\"method\"", ")", "if", "method", "in", "[", "\"POST\"", ",", "\"PUT\"", ",", "\"PATCH\"", "]", ":", "postData", "=", "entry_json", "[", "\"request\"", "]", ".", "get", "(", "\"postData\"", ",", "{", "}", ")", "mimeType", "=", "postData", ".", "get", "(", "\"mimeType\"", ")", "# Note that text and params fields are mutually exclusive.", "if", "\"text\"", "in", "postData", ":", "post_data", "=", "postData", ".", "get", "(", "\"text\"", ")", "else", ":", "params", "=", "postData", ".", "get", "(", "\"params\"", ",", "[", "]", ")", "post_data", "=", "utils", ".", "convert_list_to_dict", "(", "params", ")", "request_data_key", "=", "\"data\"", "if", "not", "mimeType", ":", "pass", "elif", "mimeType", ".", "startswith", "(", "\"application/json\"", ")", ":", "try", ":", "post_data", "=", "json", ".", "loads", "(", "post_data", ")", "request_data_key", "=", "\"json\"", "except", "JSONDecodeError", ":", "pass", "elif", "mimeType", ".", "startswith", "(", "\"application/x-www-form-urlencoded\"", ")", ":", "post_data", "=", "utils", ".", "convert_x_www_form_urlencoded_to_dict", "(", "post_data", ")", "else", ":", "# TODO: make compatible with more mimeType", "pass", "teststep_dict", "[", "\"request\"", "]", "[", "request_data_key", "]", "=", "post_data" ]
parse HAR entry request data, and make teststep request data Args: entry_json (dict): { "request": { "method": "POST", "postData": { "mimeType": "application/x-www-form-urlencoded; charset=utf-8", "params": [ {"name": "a", "value": 1}, {"name": "b", "value": "2"} } }, }, "response": {...} } Returns: { "request": { "method": "POST", "data": {"v": "1", "w": "2"} } }
[ "parse", "HAR", "entry", "request", "data", "and", "make", "teststep", "request", "data" ]
369e576b24b3521832c35344b104828e30742170
https://github.com/HttpRunner/har2case/blob/369e576b24b3521832c35344b104828e30742170/har2case/core.py#L132-L188
11,695
HttpRunner/har2case
har2case/core.py
HarParser._make_validate
def _make_validate(self, teststep_dict, entry_json): """ parse HAR entry response and make teststep validate. Args: entry_json (dict): { "request": {}, "response": { "status": 200, "headers": [ { "name": "Content-Type", "value": "application/json; charset=utf-8" }, ], "content": { "size": 71, "mimeType": "application/json; charset=utf-8", "text": "eyJJc1N1Y2Nlc3MiOnRydWUsIkNvZGUiOjIwMCwiTWVzc2FnZSI6bnVsbCwiVmFsdWUiOnsiQmxuUmVzdWx0Ijp0cnVlfX0=", "encoding": "base64" } } } Returns: { "validate": [ {"eq": ["status_code", 200]} ] } """ teststep_dict["validate"].append( {"eq": ["status_code", entry_json["response"].get("status")]} ) resp_content_dict = entry_json["response"].get("content") headers_mapping = utils.convert_list_to_dict( entry_json["response"].get("headers", []) ) if "Content-Type" in headers_mapping: teststep_dict["validate"].append( {"eq": ["headers.Content-Type", headers_mapping["Content-Type"]]} ) text = resp_content_dict.get("text") if not text: return mime_type = resp_content_dict.get("mimeType") if mime_type and mime_type.startswith("application/json"): encoding = resp_content_dict.get("encoding") if encoding and encoding == "base64": content = base64.b64decode(text).decode('utf-8') else: content = text try: resp_content_json = json.loads(content) except JSONDecodeError: logging.warning( "response content can not be loaded as json: {}".format(content.encode("utf-8")) ) return if not isinstance(resp_content_json, dict): return for key, value in resp_content_json.items(): if isinstance(value, (dict, list)): continue teststep_dict["validate"].append( {"eq": ["content.{}".format(key), value]} )
python
def _make_validate(self, teststep_dict, entry_json): """ parse HAR entry response and make teststep validate. Args: entry_json (dict): { "request": {}, "response": { "status": 200, "headers": [ { "name": "Content-Type", "value": "application/json; charset=utf-8" }, ], "content": { "size": 71, "mimeType": "application/json; charset=utf-8", "text": "eyJJc1N1Y2Nlc3MiOnRydWUsIkNvZGUiOjIwMCwiTWVzc2FnZSI6bnVsbCwiVmFsdWUiOnsiQmxuUmVzdWx0Ijp0cnVlfX0=", "encoding": "base64" } } } Returns: { "validate": [ {"eq": ["status_code", 200]} ] } """ teststep_dict["validate"].append( {"eq": ["status_code", entry_json["response"].get("status")]} ) resp_content_dict = entry_json["response"].get("content") headers_mapping = utils.convert_list_to_dict( entry_json["response"].get("headers", []) ) if "Content-Type" in headers_mapping: teststep_dict["validate"].append( {"eq": ["headers.Content-Type", headers_mapping["Content-Type"]]} ) text = resp_content_dict.get("text") if not text: return mime_type = resp_content_dict.get("mimeType") if mime_type and mime_type.startswith("application/json"): encoding = resp_content_dict.get("encoding") if encoding and encoding == "base64": content = base64.b64decode(text).decode('utf-8') else: content = text try: resp_content_json = json.loads(content) except JSONDecodeError: logging.warning( "response content can not be loaded as json: {}".format(content.encode("utf-8")) ) return if not isinstance(resp_content_json, dict): return for key, value in resp_content_json.items(): if isinstance(value, (dict, list)): continue teststep_dict["validate"].append( {"eq": ["content.{}".format(key), value]} )
[ "def", "_make_validate", "(", "self", ",", "teststep_dict", ",", "entry_json", ")", ":", "teststep_dict", "[", "\"validate\"", "]", ".", "append", "(", "{", "\"eq\"", ":", "[", "\"status_code\"", ",", "entry_json", "[", "\"response\"", "]", ".", "get", "(", "\"status\"", ")", "]", "}", ")", "resp_content_dict", "=", "entry_json", "[", "\"response\"", "]", ".", "get", "(", "\"content\"", ")", "headers_mapping", "=", "utils", ".", "convert_list_to_dict", "(", "entry_json", "[", "\"response\"", "]", ".", "get", "(", "\"headers\"", ",", "[", "]", ")", ")", "if", "\"Content-Type\"", "in", "headers_mapping", ":", "teststep_dict", "[", "\"validate\"", "]", ".", "append", "(", "{", "\"eq\"", ":", "[", "\"headers.Content-Type\"", ",", "headers_mapping", "[", "\"Content-Type\"", "]", "]", "}", ")", "text", "=", "resp_content_dict", ".", "get", "(", "\"text\"", ")", "if", "not", "text", ":", "return", "mime_type", "=", "resp_content_dict", ".", "get", "(", "\"mimeType\"", ")", "if", "mime_type", "and", "mime_type", ".", "startswith", "(", "\"application/json\"", ")", ":", "encoding", "=", "resp_content_dict", ".", "get", "(", "\"encoding\"", ")", "if", "encoding", "and", "encoding", "==", "\"base64\"", ":", "content", "=", "base64", ".", "b64decode", "(", "text", ")", ".", "decode", "(", "'utf-8'", ")", "else", ":", "content", "=", "text", "try", ":", "resp_content_json", "=", "json", ".", "loads", "(", "content", ")", "except", "JSONDecodeError", ":", "logging", ".", "warning", "(", "\"response content can not be loaded as json: {}\"", ".", "format", "(", "content", ".", "encode", "(", "\"utf-8\"", ")", ")", ")", "return", "if", "not", "isinstance", "(", "resp_content_json", ",", "dict", ")", ":", "return", "for", "key", ",", "value", "in", "resp_content_json", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "(", "dict", ",", "list", ")", ")", ":", "continue", "teststep_dict", "[", "\"validate\"", "]", ".", "append", "(", "{", "\"eq\"", ":", "[", "\"content.{}\"", ".", "format", "(", "key", ")", ",", "value", "]", "}", ")" ]
parse HAR entry response and make teststep validate. Args: entry_json (dict): { "request": {}, "response": { "status": 200, "headers": [ { "name": "Content-Type", "value": "application/json; charset=utf-8" }, ], "content": { "size": 71, "mimeType": "application/json; charset=utf-8", "text": "eyJJc1N1Y2Nlc3MiOnRydWUsIkNvZGUiOjIwMCwiTWVzc2FnZSI6bnVsbCwiVmFsdWUiOnsiQmxuUmVzdWx0Ijp0cnVlfX0=", "encoding": "base64" } } } Returns: { "validate": [ {"eq": ["status_code", 200]} ] }
[ "parse", "HAR", "entry", "response", "and", "make", "teststep", "validate", "." ]
369e576b24b3521832c35344b104828e30742170
https://github.com/HttpRunner/har2case/blob/369e576b24b3521832c35344b104828e30742170/har2case/core.py#L190-L266
11,696
HttpRunner/har2case
har2case/utils.py
load_har_log_entries
def load_har_log_entries(file_path): """ load HAR file and return log entries list Args: file_path (str) Returns: list: entries [ { "request": {}, "response": {} }, { "request": {}, "response": {} } ] """ with io.open(file_path, "r+", encoding="utf-8-sig") as f: try: content_json = json.loads(f.read()) return content_json["log"]["entries"] except (KeyError, TypeError): logging.error("HAR file content error: {}".format(file_path)) sys.exit(1)
python
def load_har_log_entries(file_path): """ load HAR file and return log entries list Args: file_path (str) Returns: list: entries [ { "request": {}, "response": {} }, { "request": {}, "response": {} } ] """ with io.open(file_path, "r+", encoding="utf-8-sig") as f: try: content_json = json.loads(f.read()) return content_json["log"]["entries"] except (KeyError, TypeError): logging.error("HAR file content error: {}".format(file_path)) sys.exit(1)
[ "def", "load_har_log_entries", "(", "file_path", ")", ":", "with", "io", ".", "open", "(", "file_path", ",", "\"r+\"", ",", "encoding", "=", "\"utf-8-sig\"", ")", "as", "f", ":", "try", ":", "content_json", "=", "json", ".", "loads", "(", "f", ".", "read", "(", ")", ")", "return", "content_json", "[", "\"log\"", "]", "[", "\"entries\"", "]", "except", "(", "KeyError", ",", "TypeError", ")", ":", "logging", ".", "error", "(", "\"HAR file content error: {}\"", ".", "format", "(", "file_path", ")", ")", "sys", ".", "exit", "(", "1", ")" ]
load HAR file and return log entries list Args: file_path (str) Returns: list: entries [ { "request": {}, "response": {} }, { "request": {}, "response": {} } ]
[ "load", "HAR", "file", "and", "return", "log", "entries", "list" ]
369e576b24b3521832c35344b104828e30742170
https://github.com/HttpRunner/har2case/blob/369e576b24b3521832c35344b104828e30742170/har2case/utils.py#L10-L36
11,697
HttpRunner/har2case
har2case/utils.py
x_www_form_urlencoded
def x_www_form_urlencoded(post_data): """ convert origin dict to x-www-form-urlencoded Args: post_data (dict): {"a": 1, "b":2} Returns: str: a=1&b=2 """ if isinstance(post_data, dict): return "&".join([ u"{}={}".format(key, value) for key, value in post_data.items() ]) else: return post_data
python
def x_www_form_urlencoded(post_data): """ convert origin dict to x-www-form-urlencoded Args: post_data (dict): {"a": 1, "b":2} Returns: str: a=1&b=2 """ if isinstance(post_data, dict): return "&".join([ u"{}={}".format(key, value) for key, value in post_data.items() ]) else: return post_data
[ "def", "x_www_form_urlencoded", "(", "post_data", ")", ":", "if", "isinstance", "(", "post_data", ",", "dict", ")", ":", "return", "\"&\"", ".", "join", "(", "[", "u\"{}={}\"", ".", "format", "(", "key", ",", "value", ")", "for", "key", ",", "value", "in", "post_data", ".", "items", "(", ")", "]", ")", "else", ":", "return", "post_data" ]
convert origin dict to x-www-form-urlencoded Args: post_data (dict): {"a": 1, "b":2} Returns: str: a=1&b=2
[ "convert", "origin", "dict", "to", "x", "-", "www", "-", "form", "-", "urlencoded" ]
369e576b24b3521832c35344b104828e30742170
https://github.com/HttpRunner/har2case/blob/369e576b24b3521832c35344b104828e30742170/har2case/utils.py#L39-L57
11,698
HttpRunner/har2case
har2case/utils.py
convert_x_www_form_urlencoded_to_dict
def convert_x_www_form_urlencoded_to_dict(post_data): """ convert x_www_form_urlencoded data to dict Args: post_data (str): a=1&b=2 Returns: dict: {"a":1, "b":2} """ if isinstance(post_data, str): converted_dict = {} for k_v in post_data.split("&"): try: key, value = k_v.split("=") except ValueError: raise Exception( "Invalid x_www_form_urlencoded data format: {}".format(post_data) ) converted_dict[key] = unquote(value) return converted_dict else: return post_data
python
def convert_x_www_form_urlencoded_to_dict(post_data): """ convert x_www_form_urlencoded data to dict Args: post_data (str): a=1&b=2 Returns: dict: {"a":1, "b":2} """ if isinstance(post_data, str): converted_dict = {} for k_v in post_data.split("&"): try: key, value = k_v.split("=") except ValueError: raise Exception( "Invalid x_www_form_urlencoded data format: {}".format(post_data) ) converted_dict[key] = unquote(value) return converted_dict else: return post_data
[ "def", "convert_x_www_form_urlencoded_to_dict", "(", "post_data", ")", ":", "if", "isinstance", "(", "post_data", ",", "str", ")", ":", "converted_dict", "=", "{", "}", "for", "k_v", "in", "post_data", ".", "split", "(", "\"&\"", ")", ":", "try", ":", "key", ",", "value", "=", "k_v", ".", "split", "(", "\"=\"", ")", "except", "ValueError", ":", "raise", "Exception", "(", "\"Invalid x_www_form_urlencoded data format: {}\"", ".", "format", "(", "post_data", ")", ")", "converted_dict", "[", "key", "]", "=", "unquote", "(", "value", ")", "return", "converted_dict", "else", ":", "return", "post_data" ]
convert x_www_form_urlencoded data to dict Args: post_data (str): a=1&b=2 Returns: dict: {"a":1, "b":2}
[ "convert", "x_www_form_urlencoded", "data", "to", "dict" ]
369e576b24b3521832c35344b104828e30742170
https://github.com/HttpRunner/har2case/blob/369e576b24b3521832c35344b104828e30742170/har2case/utils.py#L60-L82
11,699
HttpRunner/har2case
har2case/utils.py
dump_yaml
def dump_yaml(testcase, yaml_file): """ dump HAR entries to yaml testcase """ logging.info("dump testcase to YAML format.") with io.open(yaml_file, 'w', encoding="utf-8") as outfile: yaml.dump(testcase, outfile, allow_unicode=True, default_flow_style=False, indent=4) logging.info("Generate YAML testcase successfully: {}".format(yaml_file))
python
def dump_yaml(testcase, yaml_file): """ dump HAR entries to yaml testcase """ logging.info("dump testcase to YAML format.") with io.open(yaml_file, 'w', encoding="utf-8") as outfile: yaml.dump(testcase, outfile, allow_unicode=True, default_flow_style=False, indent=4) logging.info("Generate YAML testcase successfully: {}".format(yaml_file))
[ "def", "dump_yaml", "(", "testcase", ",", "yaml_file", ")", ":", "logging", ".", "info", "(", "\"dump testcase to YAML format.\"", ")", "with", "io", ".", "open", "(", "yaml_file", ",", "'w'", ",", "encoding", "=", "\"utf-8\"", ")", "as", "outfile", ":", "yaml", ".", "dump", "(", "testcase", ",", "outfile", ",", "allow_unicode", "=", "True", ",", "default_flow_style", "=", "False", ",", "indent", "=", "4", ")", "logging", ".", "info", "(", "\"Generate YAML testcase successfully: {}\"", ".", "format", "(", "yaml_file", ")", ")" ]
dump HAR entries to yaml testcase
[ "dump", "HAR", "entries", "to", "yaml", "testcase" ]
369e576b24b3521832c35344b104828e30742170
https://github.com/HttpRunner/har2case/blob/369e576b24b3521832c35344b104828e30742170/har2case/utils.py#L106-L114