id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
233,600
bitcraze/crazyflie-lib-python
cflib/crazyflie/mem.py
Memory.get_mems
def get_mems(self, type): """Fetch all the memories of the supplied type""" ret = () for m in self.mems: if m.type == type: ret += (m,) return ret
python
def get_mems(self, type): """Fetch all the memories of the supplied type""" ret = () for m in self.mems: if m.type == type: ret += (m,) return ret
[ "def", "get_mems", "(", "self", ",", "type", ")", ":", "ret", "=", "(", ")", "for", "m", "in", "self", ".", "mems", ":", "if", "m", ".", "type", "==", "type", ":", "ret", "+=", "(", "m", ",", ")", "return", "ret" ]
Fetch all the memories of the supplied type
[ "Fetch", "all", "the", "memories", "of", "the", "supplied", "type" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/crazyflie/mem.py#L919-L926
233,601
bitcraze/crazyflie-lib-python
cflib/crazyflie/mem.py
Memory.write
def write(self, memory, addr, data, flush_queue=False): """Write the specified data to the given memory at the given address""" wreq = _WriteRequest(memory, addr, data, self.cf) if memory.id not in self._write_requests: self._write_requests[memory.id] = [] # Workaround until we secure the uplink and change messages for # mems to non-blocking self._write_requests_lock.acquire() if flush_queue: self._write_requests[memory.id] = self._write_requests[ memory.id][:1] self._write_requests[memory.id].insert(len(self._write_requests), wreq) if len(self._write_requests[memory.id]) == 1: wreq.start() self._write_requests_lock.release() return True
python
def write(self, memory, addr, data, flush_queue=False): """Write the specified data to the given memory at the given address""" wreq = _WriteRequest(memory, addr, data, self.cf) if memory.id not in self._write_requests: self._write_requests[memory.id] = [] # Workaround until we secure the uplink and change messages for # mems to non-blocking self._write_requests_lock.acquire() if flush_queue: self._write_requests[memory.id] = self._write_requests[ memory.id][:1] self._write_requests[memory.id].insert(len(self._write_requests), wreq) if len(self._write_requests[memory.id]) == 1: wreq.start() self._write_requests_lock.release() return True
[ "def", "write", "(", "self", ",", "memory", ",", "addr", ",", "data", ",", "flush_queue", "=", "False", ")", ":", "wreq", "=", "_WriteRequest", "(", "memory", ",", "addr", ",", "data", ",", "self", ".", "cf", ")", "if", "memory", ".", "id", "not", "in", "self", ".", "_write_requests", ":", "self", ".", "_write_requests", "[", "memory", ".", "id", "]", "=", "[", "]", "# Workaround until we secure the uplink and change messages for", "# mems to non-blocking", "self", ".", "_write_requests_lock", ".", "acquire", "(", ")", "if", "flush_queue", ":", "self", ".", "_write_requests", "[", "memory", ".", "id", "]", "=", "self", ".", "_write_requests", "[", "memory", ".", "id", "]", "[", ":", "1", "]", "self", ".", "_write_requests", "[", "memory", ".", "id", "]", ".", "insert", "(", "len", "(", "self", ".", "_write_requests", ")", ",", "wreq", ")", "if", "len", "(", "self", ".", "_write_requests", "[", "memory", ".", "id", "]", ")", "==", "1", ":", "wreq", ".", "start", "(", ")", "self", ".", "_write_requests_lock", ".", "release", "(", ")", "return", "True" ]
Write the specified data to the given memory at the given address
[ "Write", "the", "specified", "data", "to", "the", "given", "memory", "at", "the", "given", "address" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/crazyflie/mem.py#L936-L953
233,602
bitcraze/crazyflie-lib-python
cflib/crazyflie/mem.py
Memory.read
def read(self, memory, addr, length): """ Read the specified amount of bytes from the given memory at the given address """ if memory.id in self._read_requests: logger.warning('There is already a read operation ongoing for ' 'memory id {}'.format(memory.id)) return False rreq = _ReadRequest(memory, addr, length, self.cf) self._read_requests[memory.id] = rreq rreq.start() return True
python
def read(self, memory, addr, length): """ Read the specified amount of bytes from the given memory at the given address """ if memory.id in self._read_requests: logger.warning('There is already a read operation ongoing for ' 'memory id {}'.format(memory.id)) return False rreq = _ReadRequest(memory, addr, length, self.cf) self._read_requests[memory.id] = rreq rreq.start() return True
[ "def", "read", "(", "self", ",", "memory", ",", "addr", ",", "length", ")", ":", "if", "memory", ".", "id", "in", "self", ".", "_read_requests", ":", "logger", ".", "warning", "(", "'There is already a read operation ongoing for '", "'memory id {}'", ".", "format", "(", "memory", ".", "id", ")", ")", "return", "False", "rreq", "=", "_ReadRequest", "(", "memory", ",", "addr", ",", "length", ",", "self", ".", "cf", ")", "self", ".", "_read_requests", "[", "memory", ".", "id", "]", "=", "rreq", "rreq", ".", "start", "(", ")", "return", "True" ]
Read the specified amount of bytes from the given memory at the given address
[ "Read", "the", "specified", "amount", "of", "bytes", "from", "the", "given", "memory", "at", "the", "given", "address" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/crazyflie/mem.py#L955-L970
233,603
bitcraze/crazyflie-lib-python
cflib/crazyflie/mem.py
Memory.refresh
def refresh(self, refresh_done_callback): """Start fetching all the detected memories""" self._refresh_callback = refresh_done_callback self._fetch_id = 0 for m in self.mems: try: self.mem_read_cb.remove_callback(m.new_data) m.disconnect() except Exception as e: logger.info( 'Error when removing memory after update: {}'.format(e)) self.mems = [] self.nbr_of_mems = 0 self._getting_count = False logger.debug('Requesting number of memories') pk = CRTPPacket() pk.set_header(CRTPPort.MEM, CHAN_INFO) pk.data = (CMD_INFO_NBR,) self.cf.send_packet(pk, expected_reply=(CMD_INFO_NBR,))
python
def refresh(self, refresh_done_callback): """Start fetching all the detected memories""" self._refresh_callback = refresh_done_callback self._fetch_id = 0 for m in self.mems: try: self.mem_read_cb.remove_callback(m.new_data) m.disconnect() except Exception as e: logger.info( 'Error when removing memory after update: {}'.format(e)) self.mems = [] self.nbr_of_mems = 0 self._getting_count = False logger.debug('Requesting number of memories') pk = CRTPPacket() pk.set_header(CRTPPort.MEM, CHAN_INFO) pk.data = (CMD_INFO_NBR,) self.cf.send_packet(pk, expected_reply=(CMD_INFO_NBR,))
[ "def", "refresh", "(", "self", ",", "refresh_done_callback", ")", ":", "self", ".", "_refresh_callback", "=", "refresh_done_callback", "self", ".", "_fetch_id", "=", "0", "for", "m", "in", "self", ".", "mems", ":", "try", ":", "self", ".", "mem_read_cb", ".", "remove_callback", "(", "m", ".", "new_data", ")", "m", ".", "disconnect", "(", ")", "except", "Exception", "as", "e", ":", "logger", ".", "info", "(", "'Error when removing memory after update: {}'", ".", "format", "(", "e", ")", ")", "self", ".", "mems", "=", "[", "]", "self", ".", "nbr_of_mems", "=", "0", "self", ".", "_getting_count", "=", "False", "logger", ".", "debug", "(", "'Requesting number of memories'", ")", "pk", "=", "CRTPPacket", "(", ")", "pk", ".", "set_header", "(", "CRTPPort", ".", "MEM", ",", "CHAN_INFO", ")", "pk", ".", "data", "=", "(", "CMD_INFO_NBR", ",", ")", "self", ".", "cf", ".", "send_packet", "(", "pk", ",", "expected_reply", "=", "(", "CMD_INFO_NBR", ",", ")", ")" ]
Start fetching all the detected memories
[ "Start", "fetching", "all", "the", "detected", "memories" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/crazyflie/mem.py#L972-L992
233,604
bitcraze/crazyflie-lib-python
cflib/bootloader/cloader.py
Cloader.reset_to_bootloader1
def reset_to_bootloader1(self, cpu_id): """ Reset to the bootloader The parameter cpuid shall correspond to the device to reset. Return true if the reset has been done and the contact with the bootloader is established. """ # Send an echo request and wait for the answer # Mainly aim to bypass a bug of the crazyflie firmware that prevents # reset before normal CRTP communication pk = CRTPPacket() pk.port = CRTPPort.LINKCTRL pk.data = (1, 2, 3) + cpu_id self.link.send_packet(pk) pk = None while True: pk = self.link.receive_packet(2) if not pk: return False if pk.port == CRTPPort.LINKCTRL: break # Send the reset to bootloader request pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = (0xFF, 0xFE) + cpu_id self.link.send_packet(pk) # Wait to ack the reset ... pk = None while True: pk = self.link.receive_packet(2) if not pk: return False if pk.port == 0xFF and tuple(pk.data) == (0xFF, 0xFE) + cpu_id: pk.data = (0xFF, 0xF0) + cpu_id self.link.send_packet(pk) break time.sleep(0.1) self.link.close() self.link = cflib.crtp.get_link_driver(self.clink_address) # time.sleep(0.1) return self._update_info()
python
def reset_to_bootloader1(self, cpu_id): """ Reset to the bootloader The parameter cpuid shall correspond to the device to reset. Return true if the reset has been done and the contact with the bootloader is established. """ # Send an echo request and wait for the answer # Mainly aim to bypass a bug of the crazyflie firmware that prevents # reset before normal CRTP communication pk = CRTPPacket() pk.port = CRTPPort.LINKCTRL pk.data = (1, 2, 3) + cpu_id self.link.send_packet(pk) pk = None while True: pk = self.link.receive_packet(2) if not pk: return False if pk.port == CRTPPort.LINKCTRL: break # Send the reset to bootloader request pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = (0xFF, 0xFE) + cpu_id self.link.send_packet(pk) # Wait to ack the reset ... pk = None while True: pk = self.link.receive_packet(2) if not pk: return False if pk.port == 0xFF and tuple(pk.data) == (0xFF, 0xFE) + cpu_id: pk.data = (0xFF, 0xF0) + cpu_id self.link.send_packet(pk) break time.sleep(0.1) self.link.close() self.link = cflib.crtp.get_link_driver(self.clink_address) # time.sleep(0.1) return self._update_info()
[ "def", "reset_to_bootloader1", "(", "self", ",", "cpu_id", ")", ":", "# Send an echo request and wait for the answer", "# Mainly aim to bypass a bug of the crazyflie firmware that prevents", "# reset before normal CRTP communication", "pk", "=", "CRTPPacket", "(", ")", "pk", ".", "port", "=", "CRTPPort", ".", "LINKCTRL", "pk", ".", "data", "=", "(", "1", ",", "2", ",", "3", ")", "+", "cpu_id", "self", ".", "link", ".", "send_packet", "(", "pk", ")", "pk", "=", "None", "while", "True", ":", "pk", "=", "self", ".", "link", ".", "receive_packet", "(", "2", ")", "if", "not", "pk", ":", "return", "False", "if", "pk", ".", "port", "==", "CRTPPort", ".", "LINKCTRL", ":", "break", "# Send the reset to bootloader request", "pk", "=", "CRTPPacket", "(", ")", "pk", ".", "set_header", "(", "0xFF", ",", "0xFF", ")", "pk", ".", "data", "=", "(", "0xFF", ",", "0xFE", ")", "+", "cpu_id", "self", ".", "link", ".", "send_packet", "(", "pk", ")", "# Wait to ack the reset ...", "pk", "=", "None", "while", "True", ":", "pk", "=", "self", ".", "link", ".", "receive_packet", "(", "2", ")", "if", "not", "pk", ":", "return", "False", "if", "pk", ".", "port", "==", "0xFF", "and", "tuple", "(", "pk", ".", "data", ")", "==", "(", "0xFF", ",", "0xFE", ")", "+", "cpu_id", ":", "pk", ".", "data", "=", "(", "0xFF", ",", "0xF0", ")", "+", "cpu_id", "self", ".", "link", ".", "send_packet", "(", "pk", ")", "break", "time", ".", "sleep", "(", "0.1", ")", "self", ".", "link", ".", "close", "(", ")", "self", ".", "link", "=", "cflib", ".", "crtp", ".", "get_link_driver", "(", "self", ".", "clink_address", ")", "# time.sleep(0.1)", "return", "self", ".", "_update_info", "(", ")" ]
Reset to the bootloader The parameter cpuid shall correspond to the device to reset. Return true if the reset has been done and the contact with the bootloader is established.
[ "Reset", "to", "the", "bootloader", "The", "parameter", "cpuid", "shall", "correspond", "to", "the", "device", "to", "reset", "." ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/bootloader/cloader.py#L137-L184
233,605
bitcraze/crazyflie-lib-python
cflib/bootloader/cloader.py
Cloader.reset_to_firmware
def reset_to_firmware(self, target_id): """ Reset to firmware The parameter cpuid shall correspond to the device to reset. Return true if the reset has been done """ # The fake CPU ID is legacy from the Crazyflie 1.0 # In order to reset the CPU id had to be sent, but this # was removed before launching it. But the length check is # still in the bootloader. So to work around this bug so # some extra data needs to be sent. fake_cpu_id = (1, 2, 4, 5, 6, 7, 8, 9, 10, 11, 12) # Send the reset to bootloader request pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = (target_id, 0xFF) + fake_cpu_id self.link.send_packet(pk) # Wait to ack the reset ... pk = None while True: pk = self.link.receive_packet(2) if not pk: return False if (pk.header == 0xFF and struct.unpack( 'B' * len(pk.data), pk.data)[:2] == (target_id, 0xFF)): # Difference in CF1 and CF2 (CPU ID) if target_id == 0xFE: pk.data = (target_id, 0xF0, 0x01) else: pk.data = (target_id, 0xF0) + fake_cpu_id self.link.send_packet(pk) break time.sleep(0.1)
python
def reset_to_firmware(self, target_id): """ Reset to firmware The parameter cpuid shall correspond to the device to reset. Return true if the reset has been done """ # The fake CPU ID is legacy from the Crazyflie 1.0 # In order to reset the CPU id had to be sent, but this # was removed before launching it. But the length check is # still in the bootloader. So to work around this bug so # some extra data needs to be sent. fake_cpu_id = (1, 2, 4, 5, 6, 7, 8, 9, 10, 11, 12) # Send the reset to bootloader request pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = (target_id, 0xFF) + fake_cpu_id self.link.send_packet(pk) # Wait to ack the reset ... pk = None while True: pk = self.link.receive_packet(2) if not pk: return False if (pk.header == 0xFF and struct.unpack( 'B' * len(pk.data), pk.data)[:2] == (target_id, 0xFF)): # Difference in CF1 and CF2 (CPU ID) if target_id == 0xFE: pk.data = (target_id, 0xF0, 0x01) else: pk.data = (target_id, 0xF0) + fake_cpu_id self.link.send_packet(pk) break time.sleep(0.1)
[ "def", "reset_to_firmware", "(", "self", ",", "target_id", ")", ":", "# The fake CPU ID is legacy from the Crazyflie 1.0", "# In order to reset the CPU id had to be sent, but this", "# was removed before launching it. But the length check is", "# still in the bootloader. So to work around this bug so", "# some extra data needs to be sent.", "fake_cpu_id", "=", "(", "1", ",", "2", ",", "4", ",", "5", ",", "6", ",", "7", ",", "8", ",", "9", ",", "10", ",", "11", ",", "12", ")", "# Send the reset to bootloader request", "pk", "=", "CRTPPacket", "(", ")", "pk", ".", "set_header", "(", "0xFF", ",", "0xFF", ")", "pk", ".", "data", "=", "(", "target_id", ",", "0xFF", ")", "+", "fake_cpu_id", "self", ".", "link", ".", "send_packet", "(", "pk", ")", "# Wait to ack the reset ...", "pk", "=", "None", "while", "True", ":", "pk", "=", "self", ".", "link", ".", "receive_packet", "(", "2", ")", "if", "not", "pk", ":", "return", "False", "if", "(", "pk", ".", "header", "==", "0xFF", "and", "struct", ".", "unpack", "(", "'B'", "*", "len", "(", "pk", ".", "data", ")", ",", "pk", ".", "data", ")", "[", ":", "2", "]", "==", "(", "target_id", ",", "0xFF", ")", ")", ":", "# Difference in CF1 and CF2 (CPU ID)", "if", "target_id", "==", "0xFE", ":", "pk", ".", "data", "=", "(", "target_id", ",", "0xF0", ",", "0x01", ")", "else", ":", "pk", ".", "data", "=", "(", "target_id", ",", "0xF0", ")", "+", "fake_cpu_id", "self", ".", "link", ".", "send_packet", "(", "pk", ")", "break", "time", ".", "sleep", "(", "0.1", ")" ]
Reset to firmware The parameter cpuid shall correspond to the device to reset. Return true if the reset has been done
[ "Reset", "to", "firmware", "The", "parameter", "cpuid", "shall", "correspond", "to", "the", "device", "to", "reset", "." ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/bootloader/cloader.py#L186-L221
233,606
bitcraze/crazyflie-lib-python
cflib/bootloader/cloader.py
Cloader.check_link_and_get_info
def check_link_and_get_info(self, target_id=0xFF): """Try to get a connection with the bootloader by requesting info 5 times. This let roughly 10 seconds to boot the copter ...""" for _ in range(0, 5): if self._update_info(target_id): if self._in_boot_cb: self._in_boot_cb.call(True, self.targets[ target_id].protocol_version) if self._info_cb: self._info_cb.call(self.targets[target_id]) return True return False
python
def check_link_and_get_info(self, target_id=0xFF): """Try to get a connection with the bootloader by requesting info 5 times. This let roughly 10 seconds to boot the copter ...""" for _ in range(0, 5): if self._update_info(target_id): if self._in_boot_cb: self._in_boot_cb.call(True, self.targets[ target_id].protocol_version) if self._info_cb: self._info_cb.call(self.targets[target_id]) return True return False
[ "def", "check_link_and_get_info", "(", "self", ",", "target_id", "=", "0xFF", ")", ":", "for", "_", "in", "range", "(", "0", ",", "5", ")", ":", "if", "self", ".", "_update_info", "(", "target_id", ")", ":", "if", "self", ".", "_in_boot_cb", ":", "self", ".", "_in_boot_cb", ".", "call", "(", "True", ",", "self", ".", "targets", "[", "target_id", "]", ".", "protocol_version", ")", "if", "self", ".", "_info_cb", ":", "self", ".", "_info_cb", ".", "call", "(", "self", ".", "targets", "[", "target_id", "]", ")", "return", "True", "return", "False" ]
Try to get a connection with the bootloader by requesting info 5 times. This let roughly 10 seconds to boot the copter ...
[ "Try", "to", "get", "a", "connection", "with", "the", "bootloader", "by", "requesting", "info", "5", "times", ".", "This", "let", "roughly", "10", "seconds", "to", "boot", "the", "copter", "..." ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/bootloader/cloader.py#L231-L242
233,607
bitcraze/crazyflie-lib-python
cflib/bootloader/cloader.py
Cloader._update_info
def _update_info(self, target_id): """ Call the command getInfo and fill up the information received in the fields of the object """ # Call getInfo ... pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = (target_id, 0x10) self.link.send_packet(pk) # Wait for the answer pk = self.link.receive_packet(2) if (pk and pk.header == 0xFF and struct.unpack('<BB', pk.data[0:2]) == (target_id, 0x10)): tab = struct.unpack('BBHHHH', pk.data[0:10]) cpuid = struct.unpack('B' * 12, pk.data[10:22]) if target_id not in self.targets: self.targets[target_id] = Target(target_id) self.targets[target_id].addr = target_id if len(pk.data) > 22: self.targets[target_id].protocol_version = pk.datat[22] self.protocol_version = pk.datat[22] self.targets[target_id].page_size = tab[2] self.targets[target_id].buffer_pages = tab[3] self.targets[target_id].flash_pages = tab[4] self.targets[target_id].start_page = tab[5] self.targets[target_id].cpuid = '%02X' % cpuid[0] for i in cpuid[1:]: self.targets[target_id].cpuid += ':%02X' % i if (self.protocol_version == 0x10 and target_id == TargetTypes.STM32): self._update_mapping(target_id) return True return False
python
def _update_info(self, target_id): """ Call the command getInfo and fill up the information received in the fields of the object """ # Call getInfo ... pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = (target_id, 0x10) self.link.send_packet(pk) # Wait for the answer pk = self.link.receive_packet(2) if (pk and pk.header == 0xFF and struct.unpack('<BB', pk.data[0:2]) == (target_id, 0x10)): tab = struct.unpack('BBHHHH', pk.data[0:10]) cpuid = struct.unpack('B' * 12, pk.data[10:22]) if target_id not in self.targets: self.targets[target_id] = Target(target_id) self.targets[target_id].addr = target_id if len(pk.data) > 22: self.targets[target_id].protocol_version = pk.datat[22] self.protocol_version = pk.datat[22] self.targets[target_id].page_size = tab[2] self.targets[target_id].buffer_pages = tab[3] self.targets[target_id].flash_pages = tab[4] self.targets[target_id].start_page = tab[5] self.targets[target_id].cpuid = '%02X' % cpuid[0] for i in cpuid[1:]: self.targets[target_id].cpuid += ':%02X' % i if (self.protocol_version == 0x10 and target_id == TargetTypes.STM32): self._update_mapping(target_id) return True return False
[ "def", "_update_info", "(", "self", ",", "target_id", ")", ":", "# Call getInfo ...", "pk", "=", "CRTPPacket", "(", ")", "pk", ".", "set_header", "(", "0xFF", ",", "0xFF", ")", "pk", ".", "data", "=", "(", "target_id", ",", "0x10", ")", "self", ".", "link", ".", "send_packet", "(", "pk", ")", "# Wait for the answer", "pk", "=", "self", ".", "link", ".", "receive_packet", "(", "2", ")", "if", "(", "pk", "and", "pk", ".", "header", "==", "0xFF", "and", "struct", ".", "unpack", "(", "'<BB'", ",", "pk", ".", "data", "[", "0", ":", "2", "]", ")", "==", "(", "target_id", ",", "0x10", ")", ")", ":", "tab", "=", "struct", ".", "unpack", "(", "'BBHHHH'", ",", "pk", ".", "data", "[", "0", ":", "10", "]", ")", "cpuid", "=", "struct", ".", "unpack", "(", "'B'", "*", "12", ",", "pk", ".", "data", "[", "10", ":", "22", "]", ")", "if", "target_id", "not", "in", "self", ".", "targets", ":", "self", ".", "targets", "[", "target_id", "]", "=", "Target", "(", "target_id", ")", "self", ".", "targets", "[", "target_id", "]", ".", "addr", "=", "target_id", "if", "len", "(", "pk", ".", "data", ")", ">", "22", ":", "self", ".", "targets", "[", "target_id", "]", ".", "protocol_version", "=", "pk", ".", "datat", "[", "22", "]", "self", ".", "protocol_version", "=", "pk", ".", "datat", "[", "22", "]", "self", ".", "targets", "[", "target_id", "]", ".", "page_size", "=", "tab", "[", "2", "]", "self", ".", "targets", "[", "target_id", "]", ".", "buffer_pages", "=", "tab", "[", "3", "]", "self", ".", "targets", "[", "target_id", "]", ".", "flash_pages", "=", "tab", "[", "4", "]", "self", ".", "targets", "[", "target_id", "]", ".", "start_page", "=", "tab", "[", "5", "]", "self", ".", "targets", "[", "target_id", "]", ".", "cpuid", "=", "'%02X'", "%", "cpuid", "[", "0", "]", "for", "i", "in", "cpuid", "[", "1", ":", "]", ":", "self", ".", "targets", "[", "target_id", "]", ".", "cpuid", "+=", "':%02X'", "%", "i", "if", "(", "self", ".", "protocol_version", "==", "0x10", "and", "target_id", "==", "TargetTypes", ".", "STM32", ")", ":", "self", ".", "_update_mapping", "(", "target_id", ")", "return", "True", "return", "False" ]
Call the command getInfo and fill up the information received in the fields of the object
[ "Call", "the", "command", "getInfo", "and", "fill", "up", "the", "information", "received", "in", "the", "fields", "of", "the", "object" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/bootloader/cloader.py#L251-L289
233,608
bitcraze/crazyflie-lib-python
cflib/bootloader/cloader.py
Cloader.upload_buffer
def upload_buffer(self, target_id, page, address, buff): """Upload data into a buffer on the Crazyflie""" # print len(buff) count = 0 pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = struct.pack('=BBHH', target_id, 0x14, page, address) for i in range(0, len(buff)): pk.data.append(buff[i]) count += 1 if count > 24: self.link.send_packet(pk) count = 0 pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = struct.pack('=BBHH', target_id, 0x14, page, i + address + 1) self.link.send_packet(pk)
python
def upload_buffer(self, target_id, page, address, buff): """Upload data into a buffer on the Crazyflie""" # print len(buff) count = 0 pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = struct.pack('=BBHH', target_id, 0x14, page, address) for i in range(0, len(buff)): pk.data.append(buff[i]) count += 1 if count > 24: self.link.send_packet(pk) count = 0 pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = struct.pack('=BBHH', target_id, 0x14, page, i + address + 1) self.link.send_packet(pk)
[ "def", "upload_buffer", "(", "self", ",", "target_id", ",", "page", ",", "address", ",", "buff", ")", ":", "# print len(buff)", "count", "=", "0", "pk", "=", "CRTPPacket", "(", ")", "pk", ".", "set_header", "(", "0xFF", ",", "0xFF", ")", "pk", ".", "data", "=", "struct", ".", "pack", "(", "'=BBHH'", ",", "target_id", ",", "0x14", ",", "page", ",", "address", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "buff", ")", ")", ":", "pk", ".", "data", ".", "append", "(", "buff", "[", "i", "]", ")", "count", "+=", "1", "if", "count", ">", "24", ":", "self", ".", "link", ".", "send_packet", "(", "pk", ")", "count", "=", "0", "pk", "=", "CRTPPacket", "(", ")", "pk", ".", "set_header", "(", "0xFF", ",", "0xFF", ")", "pk", ".", "data", "=", "struct", ".", "pack", "(", "'=BBHH'", ",", "target_id", ",", "0x14", ",", "page", ",", "i", "+", "address", "+", "1", ")", "self", ".", "link", ".", "send_packet", "(", "pk", ")" ]
Upload data into a buffer on the Crazyflie
[ "Upload", "data", "into", "a", "buffer", "on", "the", "Crazyflie" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/bootloader/cloader.py#L313-L334
233,609
bitcraze/crazyflie-lib-python
cflib/bootloader/cloader.py
Cloader.read_flash
def read_flash(self, addr=0xFF, page=0x00): """Read back a flash page from the Crazyflie and return it""" buff = bytearray() page_size = self.targets[addr].page_size for i in range(0, int(math.ceil(page_size / 25.0))): pk = None retry_counter = 5 while ((not pk or pk.header != 0xFF or struct.unpack('<BB', pk.data[0:2]) != (addr, 0x1C)) and retry_counter >= 0): pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = struct.pack('<BBHH', addr, 0x1C, page, (i * 25)) self.link.send_packet(pk) pk = self.link.receive_packet(1) retry_counter -= 1 if (retry_counter < 0): return None else: buff += pk.data[6:] # For some reason we get one byte extra here... return buff[0:page_size]
python
def read_flash(self, addr=0xFF, page=0x00): """Read back a flash page from the Crazyflie and return it""" buff = bytearray() page_size = self.targets[addr].page_size for i in range(0, int(math.ceil(page_size / 25.0))): pk = None retry_counter = 5 while ((not pk or pk.header != 0xFF or struct.unpack('<BB', pk.data[0:2]) != (addr, 0x1C)) and retry_counter >= 0): pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = struct.pack('<BBHH', addr, 0x1C, page, (i * 25)) self.link.send_packet(pk) pk = self.link.receive_packet(1) retry_counter -= 1 if (retry_counter < 0): return None else: buff += pk.data[6:] # For some reason we get one byte extra here... return buff[0:page_size]
[ "def", "read_flash", "(", "self", ",", "addr", "=", "0xFF", ",", "page", "=", "0x00", ")", ":", "buff", "=", "bytearray", "(", ")", "page_size", "=", "self", ".", "targets", "[", "addr", "]", ".", "page_size", "for", "i", "in", "range", "(", "0", ",", "int", "(", "math", ".", "ceil", "(", "page_size", "/", "25.0", ")", ")", ")", ":", "pk", "=", "None", "retry_counter", "=", "5", "while", "(", "(", "not", "pk", "or", "pk", ".", "header", "!=", "0xFF", "or", "struct", ".", "unpack", "(", "'<BB'", ",", "pk", ".", "data", "[", "0", ":", "2", "]", ")", "!=", "(", "addr", ",", "0x1C", ")", ")", "and", "retry_counter", ">=", "0", ")", ":", "pk", "=", "CRTPPacket", "(", ")", "pk", ".", "set_header", "(", "0xFF", ",", "0xFF", ")", "pk", ".", "data", "=", "struct", ".", "pack", "(", "'<BBHH'", ",", "addr", ",", "0x1C", ",", "page", ",", "(", "i", "*", "25", ")", ")", "self", ".", "link", ".", "send_packet", "(", "pk", ")", "pk", "=", "self", ".", "link", ".", "receive_packet", "(", "1", ")", "retry_counter", "-=", "1", "if", "(", "retry_counter", "<", "0", ")", ":", "return", "None", "else", ":", "buff", "+=", "pk", ".", "data", "[", "6", ":", "]", "# For some reason we get one byte extra here...", "return", "buff", "[", "0", ":", "page_size", "]" ]
Read back a flash page from the Crazyflie and return it
[ "Read", "back", "a", "flash", "page", "from", "the", "Crazyflie", "and", "return", "it" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/bootloader/cloader.py#L336-L361
233,610
bitcraze/crazyflie-lib-python
cflib/bootloader/cloader.py
Cloader.write_flash
def write_flash(self, addr, page_buffer, target_page, page_count): """Initiate flashing of data in the buffer to flash.""" # print "Write page", flashPage # print "Writing page [%d] and [%d] forward" % (flashPage, nPage) pk = None # Flushing downlink ... pk = self.link.receive_packet(0) while pk is not None: pk = self.link.receive_packet(0) retry_counter = 5 # print "Flasing to 0x{:X}".format(addr) while ((not pk or pk.header != 0xFF or struct.unpack('<BB', pk.data[0:2]) != (addr, 0x18)) and retry_counter >= 0): pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = struct.pack('<BBHHH', addr, 0x18, page_buffer, target_page, page_count) self.link.send_packet(pk) pk = self.link.receive_packet(1) retry_counter -= 1 if retry_counter < 0: self.error_code = -1 return False self.error_code = pk.data[3] return pk.data[2] == 1
python
def write_flash(self, addr, page_buffer, target_page, page_count): """Initiate flashing of data in the buffer to flash.""" # print "Write page", flashPage # print "Writing page [%d] and [%d] forward" % (flashPage, nPage) pk = None # Flushing downlink ... pk = self.link.receive_packet(0) while pk is not None: pk = self.link.receive_packet(0) retry_counter = 5 # print "Flasing to 0x{:X}".format(addr) while ((not pk or pk.header != 0xFF or struct.unpack('<BB', pk.data[0:2]) != (addr, 0x18)) and retry_counter >= 0): pk = CRTPPacket() pk.set_header(0xFF, 0xFF) pk.data = struct.pack('<BBHHH', addr, 0x18, page_buffer, target_page, page_count) self.link.send_packet(pk) pk = self.link.receive_packet(1) retry_counter -= 1 if retry_counter < 0: self.error_code = -1 return False self.error_code = pk.data[3] return pk.data[2] == 1
[ "def", "write_flash", "(", "self", ",", "addr", ",", "page_buffer", ",", "target_page", ",", "page_count", ")", ":", "# print \"Write page\", flashPage", "# print \"Writing page [%d] and [%d] forward\" % (flashPage, nPage)", "pk", "=", "None", "# Flushing downlink ...", "pk", "=", "self", ".", "link", ".", "receive_packet", "(", "0", ")", "while", "pk", "is", "not", "None", ":", "pk", "=", "self", ".", "link", ".", "receive_packet", "(", "0", ")", "retry_counter", "=", "5", "# print \"Flasing to 0x{:X}\".format(addr)", "while", "(", "(", "not", "pk", "or", "pk", ".", "header", "!=", "0xFF", "or", "struct", ".", "unpack", "(", "'<BB'", ",", "pk", ".", "data", "[", "0", ":", "2", "]", ")", "!=", "(", "addr", ",", "0x18", ")", ")", "and", "retry_counter", ">=", "0", ")", ":", "pk", "=", "CRTPPacket", "(", ")", "pk", ".", "set_header", "(", "0xFF", ",", "0xFF", ")", "pk", ".", "data", "=", "struct", ".", "pack", "(", "'<BBHHH'", ",", "addr", ",", "0x18", ",", "page_buffer", ",", "target_page", ",", "page_count", ")", "self", ".", "link", ".", "send_packet", "(", "pk", ")", "pk", "=", "self", ".", "link", ".", "receive_packet", "(", "1", ")", "retry_counter", "-=", "1", "if", "retry_counter", "<", "0", ":", "self", ".", "error_code", "=", "-", "1", "return", "False", "self", ".", "error_code", "=", "pk", ".", "data", "[", "3", "]", "return", "pk", ".", "data", "[", "2", "]", "==", "1" ]
Initiate flashing of data in the buffer to flash.
[ "Initiate", "flashing", "of", "data", "in", "the", "buffer", "to", "flash", "." ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/bootloader/cloader.py#L363-L393
233,611
bitcraze/crazyflie-lib-python
cflib/bootloader/cloader.py
Cloader.decode_cpu_id
def decode_cpu_id(self, cpuid): """Decode the CPU id into a string""" ret = () for i in cpuid.split(':'): ret += (eval('0x' + i),) return ret
python
def decode_cpu_id(self, cpuid): """Decode the CPU id into a string""" ret = () for i in cpuid.split(':'): ret += (eval('0x' + i),) return ret
[ "def", "decode_cpu_id", "(", "self", ",", "cpuid", ")", ":", "ret", "=", "(", ")", "for", "i", "in", "cpuid", ".", "split", "(", "':'", ")", ":", "ret", "+=", "(", "eval", "(", "'0x'", "+", "i", ")", ",", ")", "return", "ret" ]
Decode the CPU id into a string
[ "Decode", "the", "CPU", "id", "into", "a", "string" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/bootloader/cloader.py#L395-L401
233,612
bitcraze/crazyflie-lib-python
cflib/crtp/crtpstack.py
CRTPPacket.set_header
def set_header(self, port, channel): """ Set the port and channel for this packet. """ self._port = port self.channel = channel self._update_header()
python
def set_header(self, port, channel): """ Set the port and channel for this packet. """ self._port = port self.channel = channel self._update_header()
[ "def", "set_header", "(", "self", ",", "port", ",", "channel", ")", ":", "self", ".", "_port", "=", "port", "self", ".", "channel", "=", "channel", "self", ".", "_update_header", "(", ")" ]
Set the port and channel for this packet.
[ "Set", "the", "port", "and", "channel", "for", "this", "packet", "." ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/crtp/crtpstack.py#L99-L105
233,613
bitcraze/crazyflie-lib-python
cflib/crtp/crtpstack.py
CRTPPacket._set_data
def _set_data(self, data): """Set the packet data""" if type(data) == bytearray: self._data = data elif type(data) == str: if sys.version_info < (3,): self._data = bytearray(data) else: self._data = bytearray(data.encode('ISO-8859-1')) elif type(data) == list or type(data) == tuple: self._data = bytearray(data) elif sys.version_info >= (3,) and type(data) == bytes: self._data = bytearray(data) else: raise Exception('Data must be bytearray, string, list or tuple,' ' not {}'.format(type(data)))
python
def _set_data(self, data): """Set the packet data""" if type(data) == bytearray: self._data = data elif type(data) == str: if sys.version_info < (3,): self._data = bytearray(data) else: self._data = bytearray(data.encode('ISO-8859-1')) elif type(data) == list or type(data) == tuple: self._data = bytearray(data) elif sys.version_info >= (3,) and type(data) == bytes: self._data = bytearray(data) else: raise Exception('Data must be bytearray, string, list or tuple,' ' not {}'.format(type(data)))
[ "def", "_set_data", "(", "self", ",", "data", ")", ":", "if", "type", "(", "data", ")", "==", "bytearray", ":", "self", ".", "_data", "=", "data", "elif", "type", "(", "data", ")", "==", "str", ":", "if", "sys", ".", "version_info", "<", "(", "3", ",", ")", ":", "self", ".", "_data", "=", "bytearray", "(", "data", ")", "else", ":", "self", ".", "_data", "=", "bytearray", "(", "data", ".", "encode", "(", "'ISO-8859-1'", ")", ")", "elif", "type", "(", "data", ")", "==", "list", "or", "type", "(", "data", ")", "==", "tuple", ":", "self", ".", "_data", "=", "bytearray", "(", "data", ")", "elif", "sys", ".", "version_info", ">=", "(", "3", ",", ")", "and", "type", "(", "data", ")", "==", "bytes", ":", "self", ".", "_data", "=", "bytearray", "(", "data", ")", "else", ":", "raise", "Exception", "(", "'Data must be bytearray, string, list or tuple,'", "' not {}'", ".", "format", "(", "type", "(", "data", ")", ")", ")" ]
Set the packet data
[ "Set", "the", "packet", "data" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/crtp/crtpstack.py#L119-L134
233,614
bitcraze/crazyflie-lib-python
cflib/positioning/motion_commander.py
MotionCommander.take_off
def take_off(self, height=None, velocity=VELOCITY): """ Takes off, that is starts the motors, goes straigt up and hovers. Do not call this function if you use the with keyword. Take off is done automatically when the context is created. :param height: the height (meters) to hover at. None uses the default height set when constructed. :param velocity: the velocity (meters/second) when taking off :return: """ if self._is_flying: raise Exception('Already flying') if not self._cf.is_connected(): raise Exception('Crazyflie is not connected') self._is_flying = True self._reset_position_estimator() self._thread = _SetPointThread(self._cf) self._thread.start() if height is None: height = self.default_height self.up(height, velocity)
python
def take_off(self, height=None, velocity=VELOCITY): """ Takes off, that is starts the motors, goes straigt up and hovers. Do not call this function if you use the with keyword. Take off is done automatically when the context is created. :param height: the height (meters) to hover at. None uses the default height set when constructed. :param velocity: the velocity (meters/second) when taking off :return: """ if self._is_flying: raise Exception('Already flying') if not self._cf.is_connected(): raise Exception('Crazyflie is not connected') self._is_flying = True self._reset_position_estimator() self._thread = _SetPointThread(self._cf) self._thread.start() if height is None: height = self.default_height self.up(height, velocity)
[ "def", "take_off", "(", "self", ",", "height", "=", "None", ",", "velocity", "=", "VELOCITY", ")", ":", "if", "self", ".", "_is_flying", ":", "raise", "Exception", "(", "'Already flying'", ")", "if", "not", "self", ".", "_cf", ".", "is_connected", "(", ")", ":", "raise", "Exception", "(", "'Crazyflie is not connected'", ")", "self", ".", "_is_flying", "=", "True", "self", ".", "_reset_position_estimator", "(", ")", "self", ".", "_thread", "=", "_SetPointThread", "(", "self", ".", "_cf", ")", "self", ".", "_thread", ".", "start", "(", ")", "if", "height", "is", "None", ":", "height", "=", "self", ".", "default_height", "self", ".", "up", "(", "height", ",", "velocity", ")" ]
Takes off, that is starts the motors, goes straigt up and hovers. Do not call this function if you use the with keyword. Take off is done automatically when the context is created. :param height: the height (meters) to hover at. None uses the default height set when constructed. :param velocity: the velocity (meters/second) when taking off :return:
[ "Takes", "off", "that", "is", "starts", "the", "motors", "goes", "straigt", "up", "and", "hovers", ".", "Do", "not", "call", "this", "function", "if", "you", "use", "the", "with", "keyword", ".", "Take", "off", "is", "done", "automatically", "when", "the", "context", "is", "created", "." ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/positioning/motion_commander.py#L81-L107
233,615
bitcraze/crazyflie-lib-python
cflib/positioning/motion_commander.py
MotionCommander.turn_left
def turn_left(self, angle_degrees, rate=RATE): """ Turn to the left, staying on the spot :param angle_degrees: How far to turn (degrees) :param rate: The trurning speed (degrees/second) :return: """ flight_time = angle_degrees / rate self.start_turn_left(rate) time.sleep(flight_time) self.stop()
python
def turn_left(self, angle_degrees, rate=RATE): """ Turn to the left, staying on the spot :param angle_degrees: How far to turn (degrees) :param rate: The trurning speed (degrees/second) :return: """ flight_time = angle_degrees / rate self.start_turn_left(rate) time.sleep(flight_time) self.stop()
[ "def", "turn_left", "(", "self", ",", "angle_degrees", ",", "rate", "=", "RATE", ")", ":", "flight_time", "=", "angle_degrees", "/", "rate", "self", ".", "start_turn_left", "(", "rate", ")", "time", ".", "sleep", "(", "flight_time", ")", "self", ".", "stop", "(", ")" ]
Turn to the left, staying on the spot :param angle_degrees: How far to turn (degrees) :param rate: The trurning speed (degrees/second) :return:
[ "Turn", "to", "the", "left", "staying", "on", "the", "spot" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/positioning/motion_commander.py#L195-L207
233,616
bitcraze/crazyflie-lib-python
cflib/positioning/motion_commander.py
MotionCommander.turn_right
def turn_right(self, angle_degrees, rate=RATE): """ Turn to the right, staying on the spot :param angle_degrees: How far to turn (degrees) :param rate: The trurning speed (degrees/second) :return: """ flight_time = angle_degrees / rate self.start_turn_right(rate) time.sleep(flight_time) self.stop()
python
def turn_right(self, angle_degrees, rate=RATE): """ Turn to the right, staying on the spot :param angle_degrees: How far to turn (degrees) :param rate: The trurning speed (degrees/second) :return: """ flight_time = angle_degrees / rate self.start_turn_right(rate) time.sleep(flight_time) self.stop()
[ "def", "turn_right", "(", "self", ",", "angle_degrees", ",", "rate", "=", "RATE", ")", ":", "flight_time", "=", "angle_degrees", "/", "rate", "self", ".", "start_turn_right", "(", "rate", ")", "time", ".", "sleep", "(", "flight_time", ")", "self", ".", "stop", "(", ")" ]
Turn to the right, staying on the spot :param angle_degrees: How far to turn (degrees) :param rate: The trurning speed (degrees/second) :return:
[ "Turn", "to", "the", "right", "staying", "on", "the", "spot" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/positioning/motion_commander.py#L209-L221
233,617
bitcraze/crazyflie-lib-python
cflib/positioning/motion_commander.py
MotionCommander.circle_left
def circle_left(self, radius_m, velocity=VELOCITY, angle_degrees=360.0): """ Go in circle, counter clock wise :param radius_m: The radius of the circle (meters) :param velocity: The velocity along the circle (meters/second) :param angle_degrees: How far to go in the circle (degrees) :return: """ distance = 2 * radius_m * math.pi * angle_degrees / 360.0 flight_time = distance / velocity self.start_circle_left(radius_m, velocity) time.sleep(flight_time) self.stop()
python
def circle_left(self, radius_m, velocity=VELOCITY, angle_degrees=360.0): """ Go in circle, counter clock wise :param radius_m: The radius of the circle (meters) :param velocity: The velocity along the circle (meters/second) :param angle_degrees: How far to go in the circle (degrees) :return: """ distance = 2 * radius_m * math.pi * angle_degrees / 360.0 flight_time = distance / velocity self.start_circle_left(radius_m, velocity) time.sleep(flight_time) self.stop()
[ "def", "circle_left", "(", "self", ",", "radius_m", ",", "velocity", "=", "VELOCITY", ",", "angle_degrees", "=", "360.0", ")", ":", "distance", "=", "2", "*", "radius_m", "*", "math", ".", "pi", "*", "angle_degrees", "/", "360.0", "flight_time", "=", "distance", "/", "velocity", "self", ".", "start_circle_left", "(", "radius_m", ",", "velocity", ")", "time", ".", "sleep", "(", "flight_time", ")", "self", ".", "stop", "(", ")" ]
Go in circle, counter clock wise :param radius_m: The radius of the circle (meters) :param velocity: The velocity along the circle (meters/second) :param angle_degrees: How far to go in the circle (degrees) :return:
[ "Go", "in", "circle", "counter", "clock", "wise" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/positioning/motion_commander.py#L223-L237
233,618
bitcraze/crazyflie-lib-python
cflib/positioning/motion_commander.py
MotionCommander.circle_right
def circle_right(self, radius_m, velocity=VELOCITY, angle_degrees=360.0): """ Go in circle, clock wise :param radius_m: The radius of the circle (meters) :param velocity: The velocity along the circle (meters/second) :param angle_degrees: How far to go in the circle (degrees) :return: """ distance = 2 * radius_m * math.pi * angle_degrees / 360.0 flight_time = distance / velocity self.start_circle_right(radius_m, velocity) time.sleep(flight_time) self.stop()
python
def circle_right(self, radius_m, velocity=VELOCITY, angle_degrees=360.0): """ Go in circle, clock wise :param radius_m: The radius of the circle (meters) :param velocity: The velocity along the circle (meters/second) :param angle_degrees: How far to go in the circle (degrees) :return: """ distance = 2 * radius_m * math.pi * angle_degrees / 360.0 flight_time = distance / velocity self.start_circle_right(radius_m, velocity) time.sleep(flight_time) self.stop()
[ "def", "circle_right", "(", "self", ",", "radius_m", ",", "velocity", "=", "VELOCITY", ",", "angle_degrees", "=", "360.0", ")", ":", "distance", "=", "2", "*", "radius_m", "*", "math", ".", "pi", "*", "angle_degrees", "/", "360.0", "flight_time", "=", "distance", "/", "velocity", "self", ".", "start_circle_right", "(", "radius_m", ",", "velocity", ")", "time", ".", "sleep", "(", "flight_time", ")", "self", ".", "stop", "(", ")" ]
Go in circle, clock wise :param radius_m: The radius of the circle (meters) :param velocity: The velocity along the circle (meters/second) :param angle_degrees: How far to go in the circle (degrees) :return:
[ "Go", "in", "circle", "clock", "wise" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/positioning/motion_commander.py#L239-L253
233,619
bitcraze/crazyflie-lib-python
cflib/positioning/motion_commander.py
MotionCommander.start_circle_left
def start_circle_left(self, radius_m, velocity=VELOCITY): """ Start a circular motion to the left. This function returns immediately. :param radius_m: The radius of the circle (meters) :param velocity: The velocity of the motion (meters/second) :return: """ circumference = 2 * radius_m * math.pi rate = 360.0 * velocity / circumference self._set_vel_setpoint(velocity, 0.0, 0.0, -rate)
python
def start_circle_left(self, radius_m, velocity=VELOCITY): """ Start a circular motion to the left. This function returns immediately. :param radius_m: The radius of the circle (meters) :param velocity: The velocity of the motion (meters/second) :return: """ circumference = 2 * radius_m * math.pi rate = 360.0 * velocity / circumference self._set_vel_setpoint(velocity, 0.0, 0.0, -rate)
[ "def", "start_circle_left", "(", "self", ",", "radius_m", ",", "velocity", "=", "VELOCITY", ")", ":", "circumference", "=", "2", "*", "radius_m", "*", "math", ".", "pi", "rate", "=", "360.0", "*", "velocity", "/", "circumference", "self", ".", "_set_vel_setpoint", "(", "velocity", ",", "0.0", ",", "0.0", ",", "-", "rate", ")" ]
Start a circular motion to the left. This function returns immediately. :param radius_m: The radius of the circle (meters) :param velocity: The velocity of the motion (meters/second) :return:
[ "Start", "a", "circular", "motion", "to", "the", "left", ".", "This", "function", "returns", "immediately", "." ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/positioning/motion_commander.py#L364-L375
233,620
bitcraze/crazyflie-lib-python
cflib/positioning/motion_commander.py
MotionCommander.start_linear_motion
def start_linear_motion(self, velocity_x_m, velocity_y_m, velocity_z_m): """ Start a linear motion. This function returns immediately. positive X is forward positive Y is left positive Z is up :param velocity_x_m: The velocity along the X-axis (meters/second) :param velocity_y_m: The velocity along the Y-axis (meters/second) :param velocity_z_m: The velocity along the Z-axis (meters/second) :return: """ self._set_vel_setpoint( velocity_x_m, velocity_y_m, velocity_z_m, 0.0)
python
def start_linear_motion(self, velocity_x_m, velocity_y_m, velocity_z_m): """ Start a linear motion. This function returns immediately. positive X is forward positive Y is left positive Z is up :param velocity_x_m: The velocity along the X-axis (meters/second) :param velocity_y_m: The velocity along the Y-axis (meters/second) :param velocity_z_m: The velocity along the Z-axis (meters/second) :return: """ self._set_vel_setpoint( velocity_x_m, velocity_y_m, velocity_z_m, 0.0)
[ "def", "start_linear_motion", "(", "self", ",", "velocity_x_m", ",", "velocity_y_m", ",", "velocity_z_m", ")", ":", "self", ".", "_set_vel_setpoint", "(", "velocity_x_m", ",", "velocity_y_m", ",", "velocity_z_m", ",", "0.0", ")" ]
Start a linear motion. This function returns immediately. positive X is forward positive Y is left positive Z is up :param velocity_x_m: The velocity along the X-axis (meters/second) :param velocity_y_m: The velocity along the Y-axis (meters/second) :param velocity_z_m: The velocity along the Z-axis (meters/second) :return:
[ "Start", "a", "linear", "motion", ".", "This", "function", "returns", "immediately", "." ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/positioning/motion_commander.py#L390-L404
233,621
bitcraze/crazyflie-lib-python
cflib/positioning/motion_commander.py
_SetPointThread.set_vel_setpoint
def set_vel_setpoint(self, velocity_x, velocity_y, velocity_z, rate_yaw): """Set the velocity setpoint to use for the future motion""" self._queue.put((velocity_x, velocity_y, velocity_z, rate_yaw))
python
def set_vel_setpoint(self, velocity_x, velocity_y, velocity_z, rate_yaw): """Set the velocity setpoint to use for the future motion""" self._queue.put((velocity_x, velocity_y, velocity_z, rate_yaw))
[ "def", "set_vel_setpoint", "(", "self", ",", "velocity_x", ",", "velocity_y", ",", "velocity_z", ",", "rate_yaw", ")", ":", "self", ".", "_queue", ".", "put", "(", "(", "velocity_x", ",", "velocity_y", ",", "velocity_z", ",", "rate_yaw", ")", ")" ]
Set the velocity setpoint to use for the future motion
[ "Set", "the", "velocity", "setpoint", "to", "use", "for", "the", "future", "motion" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/positioning/motion_commander.py#L446-L448
233,622
bitcraze/crazyflie-lib-python
examples/basicparam.py
ParamExample._param_callback
def _param_callback(self, name, value): """Generic callback registered for all the groups""" print('{0}: {1}'.format(name, value)) # Remove each parameter from the list and close the link when # all are fetched self._param_check_list.remove(name) if len(self._param_check_list) == 0: print('Have fetched all parameter values.') # First remove all the group callbacks for g in self._param_groups: self._cf.param.remove_update_callback(group=g, cb=self._param_callback) # Create a new random value [0.00,1.00] for pid_attitude.pitch_kd # and set it pkd = random.random() print('') print('Write: pid_attitude.pitch_kd={:.2f}'.format(pkd)) self._cf.param.add_update_callback(group='pid_attitude', name='pitch_kd', cb=self._a_pitch_kd_callback) # When setting a value the parameter is automatically read back # and the registered callbacks will get the updated value self._cf.param.set_value('pid_attitude.pitch_kd', '{:.2f}'.format(pkd))
python
def _param_callback(self, name, value): """Generic callback registered for all the groups""" print('{0}: {1}'.format(name, value)) # Remove each parameter from the list and close the link when # all are fetched self._param_check_list.remove(name) if len(self._param_check_list) == 0: print('Have fetched all parameter values.') # First remove all the group callbacks for g in self._param_groups: self._cf.param.remove_update_callback(group=g, cb=self._param_callback) # Create a new random value [0.00,1.00] for pid_attitude.pitch_kd # and set it pkd = random.random() print('') print('Write: pid_attitude.pitch_kd={:.2f}'.format(pkd)) self._cf.param.add_update_callback(group='pid_attitude', name='pitch_kd', cb=self._a_pitch_kd_callback) # When setting a value the parameter is automatically read back # and the registered callbacks will get the updated value self._cf.param.set_value('pid_attitude.pitch_kd', '{:.2f}'.format(pkd))
[ "def", "_param_callback", "(", "self", ",", "name", ",", "value", ")", ":", "print", "(", "'{0}: {1}'", ".", "format", "(", "name", ",", "value", ")", ")", "# Remove each parameter from the list and close the link when", "# all are fetched", "self", ".", "_param_check_list", ".", "remove", "(", "name", ")", "if", "len", "(", "self", ".", "_param_check_list", ")", "==", "0", ":", "print", "(", "'Have fetched all parameter values.'", ")", "# First remove all the group callbacks", "for", "g", "in", "self", ".", "_param_groups", ":", "self", ".", "_cf", ".", "param", ".", "remove_update_callback", "(", "group", "=", "g", ",", "cb", "=", "self", ".", "_param_callback", ")", "# Create a new random value [0.00,1.00] for pid_attitude.pitch_kd", "# and set it", "pkd", "=", "random", ".", "random", "(", ")", "print", "(", "''", ")", "print", "(", "'Write: pid_attitude.pitch_kd={:.2f}'", ".", "format", "(", "pkd", ")", ")", "self", ".", "_cf", ".", "param", ".", "add_update_callback", "(", "group", "=", "'pid_attitude'", ",", "name", "=", "'pitch_kd'", ",", "cb", "=", "self", ".", "_a_pitch_kd_callback", ")", "# When setting a value the parameter is automatically read back", "# and the registered callbacks will get the updated value", "self", ".", "_cf", ".", "param", ".", "set_value", "(", "'pid_attitude.pitch_kd'", ",", "'{:.2f}'", ".", "format", "(", "pkd", ")", ")" ]
Generic callback registered for all the groups
[ "Generic", "callback", "registered", "for", "all", "the", "groups" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/examples/basicparam.py#L99-L125
233,623
bitcraze/crazyflie-lib-python
examples/basicparam.py
ParamExample._a_pitch_kd_callback
def _a_pitch_kd_callback(self, name, value): """Callback for pid_attitude.pitch_kd""" print('Readback: {0}={1}'.format(name, value)) # End the example by closing the link (will cause the app to quit) self._cf.close_link()
python
def _a_pitch_kd_callback(self, name, value): """Callback for pid_attitude.pitch_kd""" print('Readback: {0}={1}'.format(name, value)) # End the example by closing the link (will cause the app to quit) self._cf.close_link()
[ "def", "_a_pitch_kd_callback", "(", "self", ",", "name", ",", "value", ")", ":", "print", "(", "'Readback: {0}={1}'", ".", "format", "(", "name", ",", "value", ")", ")", "# End the example by closing the link (will cause the app to quit)", "self", ".", "_cf", ".", "close_link", "(", ")" ]
Callback for pid_attitude.pitch_kd
[ "Callback", "for", "pid_attitude", ".", "pitch_kd" ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/examples/basicparam.py#L127-L132
233,624
bitcraze/crazyflie-lib-python
cflib/crtp/radiodriver.py
RadioDriver._scan_radio_channels
def _scan_radio_channels(self, cradio, start=0, stop=125): """ Scan for Crazyflies between the supplied channels. """ return list(cradio.scan_channels(start, stop, (0xff,)))
python
def _scan_radio_channels(self, cradio, start=0, stop=125): """ Scan for Crazyflies between the supplied channels. """ return list(cradio.scan_channels(start, stop, (0xff,)))
[ "def", "_scan_radio_channels", "(", "self", ",", "cradio", ",", "start", "=", "0", ",", "stop", "=", "125", ")", ":", "return", "list", "(", "cradio", ".", "scan_channels", "(", "start", ",", "stop", ",", "(", "0xff", ",", ")", ")", ")" ]
Scan for Crazyflies between the supplied channels.
[ "Scan", "for", "Crazyflies", "between", "the", "supplied", "channels", "." ]
f6ebb4eb315bbe6e02db518936ac17fb615b2af8
https://github.com/bitcraze/crazyflie-lib-python/blob/f6ebb4eb315bbe6e02db518936ac17fb615b2af8/cflib/crtp/radiodriver.py#L290-L292
233,625
googleapis/google-auth-library-python-oauthlib
google_auth_oauthlib/tool/__main__.py
main
def main(client_secrets, scope, save, credentials, headless): """Command-line tool for obtaining authorization and credentials from a user. This tool uses the OAuth 2.0 Authorization Code grant as described in section 1.3.1 of RFC6749: https://tools.ietf.org/html/rfc6749#section-1.3.1 This tool is intended for assist developers in obtaining credentials for testing applications where it may not be possible or easy to run a complete OAuth 2.0 authorization flow, especially in the case of code samples or embedded devices without input / display capabilities. This is not intended for production use where a combination of companion and on-device applications should complete the OAuth 2.0 authorization flow to get authorization from the users. """ flow = google_auth_oauthlib.flow.InstalledAppFlow.from_client_secrets_file( client_secrets, scopes=scope ) if not headless: creds = flow.run_local_server() else: creds = flow.run_console() creds_data = { 'token': creds.token, 'refresh_token': creds.refresh_token, 'token_uri': creds.token_uri, 'client_id': creds.client_id, 'client_secret': creds.client_secret, 'scopes': creds.scopes } if save: del creds_data['token'] config_path = os.path.dirname(credentials) if config_path and not os.path.isdir(config_path): os.makedirs(config_path) with open(credentials, 'w') as outfile: json.dump(creds_data, outfile) click.echo('credentials saved: %s' % credentials) else: click.echo(json.dumps(creds_data))
python
def main(client_secrets, scope, save, credentials, headless): """Command-line tool for obtaining authorization and credentials from a user. This tool uses the OAuth 2.0 Authorization Code grant as described in section 1.3.1 of RFC6749: https://tools.ietf.org/html/rfc6749#section-1.3.1 This tool is intended for assist developers in obtaining credentials for testing applications where it may not be possible or easy to run a complete OAuth 2.0 authorization flow, especially in the case of code samples or embedded devices without input / display capabilities. This is not intended for production use where a combination of companion and on-device applications should complete the OAuth 2.0 authorization flow to get authorization from the users. """ flow = google_auth_oauthlib.flow.InstalledAppFlow.from_client_secrets_file( client_secrets, scopes=scope ) if not headless: creds = flow.run_local_server() else: creds = flow.run_console() creds_data = { 'token': creds.token, 'refresh_token': creds.refresh_token, 'token_uri': creds.token_uri, 'client_id': creds.client_id, 'client_secret': creds.client_secret, 'scopes': creds.scopes } if save: del creds_data['token'] config_path = os.path.dirname(credentials) if config_path and not os.path.isdir(config_path): os.makedirs(config_path) with open(credentials, 'w') as outfile: json.dump(creds_data, outfile) click.echo('credentials saved: %s' % credentials) else: click.echo(json.dumps(creds_data))
[ "def", "main", "(", "client_secrets", ",", "scope", ",", "save", ",", "credentials", ",", "headless", ")", ":", "flow", "=", "google_auth_oauthlib", ".", "flow", ".", "InstalledAppFlow", ".", "from_client_secrets_file", "(", "client_secrets", ",", "scopes", "=", "scope", ")", "if", "not", "headless", ":", "creds", "=", "flow", ".", "run_local_server", "(", ")", "else", ":", "creds", "=", "flow", ".", "run_console", "(", ")", "creds_data", "=", "{", "'token'", ":", "creds", ".", "token", ",", "'refresh_token'", ":", "creds", ".", "refresh_token", ",", "'token_uri'", ":", "creds", ".", "token_uri", ",", "'client_id'", ":", "creds", ".", "client_id", ",", "'client_secret'", ":", "creds", ".", "client_secret", ",", "'scopes'", ":", "creds", ".", "scopes", "}", "if", "save", ":", "del", "creds_data", "[", "'token'", "]", "config_path", "=", "os", ".", "path", ".", "dirname", "(", "credentials", ")", "if", "config_path", "and", "not", "os", ".", "path", ".", "isdir", "(", "config_path", ")", ":", "os", ".", "makedirs", "(", "config_path", ")", "with", "open", "(", "credentials", ",", "'w'", ")", "as", "outfile", ":", "json", ".", "dump", "(", "creds_data", ",", "outfile", ")", "click", ".", "echo", "(", "'credentials saved: %s'", "%", "credentials", ")", "else", ":", "click", ".", "echo", "(", "json", ".", "dumps", "(", "creds_data", ")", ")" ]
Command-line tool for obtaining authorization and credentials from a user. This tool uses the OAuth 2.0 Authorization Code grant as described in section 1.3.1 of RFC6749: https://tools.ietf.org/html/rfc6749#section-1.3.1 This tool is intended for assist developers in obtaining credentials for testing applications where it may not be possible or easy to run a complete OAuth 2.0 authorization flow, especially in the case of code samples or embedded devices without input / display capabilities. This is not intended for production use where a combination of companion and on-device applications should complete the OAuth 2.0 authorization flow to get authorization from the users.
[ "Command", "-", "line", "tool", "for", "obtaining", "authorization", "and", "credentials", "from", "a", "user", "." ]
ba826565994cf20c073d79f534036747fdef2041
https://github.com/googleapis/google-auth-library-python-oauthlib/blob/ba826565994cf20c073d79f534036747fdef2041/google_auth_oauthlib/tool/__main__.py#L80-L130
233,626
googleapis/google-auth-library-python-oauthlib
google_auth_oauthlib/flow.py
Flow.authorization_url
def authorization_url(self, **kwargs): """Generates an authorization URL. This is the first step in the OAuth 2.0 Authorization Flow. The user's browser should be redirected to the returned URL. This method calls :meth:`requests_oauthlib.OAuth2Session.authorization_url` and specifies the client configuration's authorization URI (usually Google's authorization server) and specifies that "offline" access is desired. This is required in order to obtain a refresh token. Args: kwargs: Additional arguments passed through to :meth:`requests_oauthlib.OAuth2Session.authorization_url` Returns: Tuple[str, str]: The generated authorization URL and state. The user must visit the URL to complete the flow. The state is used when completing the flow to verify that the request originated from your application. If your application is using a different :class:`Flow` instance to obtain the token, you will need to specify the ``state`` when constructing the :class:`Flow`. """ kwargs.setdefault('access_type', 'offline') url, state = self.oauth2session.authorization_url( self.client_config['auth_uri'], **kwargs) return url, state
python
def authorization_url(self, **kwargs): """Generates an authorization URL. This is the first step in the OAuth 2.0 Authorization Flow. The user's browser should be redirected to the returned URL. This method calls :meth:`requests_oauthlib.OAuth2Session.authorization_url` and specifies the client configuration's authorization URI (usually Google's authorization server) and specifies that "offline" access is desired. This is required in order to obtain a refresh token. Args: kwargs: Additional arguments passed through to :meth:`requests_oauthlib.OAuth2Session.authorization_url` Returns: Tuple[str, str]: The generated authorization URL and state. The user must visit the URL to complete the flow. The state is used when completing the flow to verify that the request originated from your application. If your application is using a different :class:`Flow` instance to obtain the token, you will need to specify the ``state`` when constructing the :class:`Flow`. """ kwargs.setdefault('access_type', 'offline') url, state = self.oauth2session.authorization_url( self.client_config['auth_uri'], **kwargs) return url, state
[ "def", "authorization_url", "(", "self", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "'access_type'", ",", "'offline'", ")", "url", ",", "state", "=", "self", ".", "oauth2session", ".", "authorization_url", "(", "self", ".", "client_config", "[", "'auth_uri'", "]", ",", "*", "*", "kwargs", ")", "return", "url", ",", "state" ]
Generates an authorization URL. This is the first step in the OAuth 2.0 Authorization Flow. The user's browser should be redirected to the returned URL. This method calls :meth:`requests_oauthlib.OAuth2Session.authorization_url` and specifies the client configuration's authorization URI (usually Google's authorization server) and specifies that "offline" access is desired. This is required in order to obtain a refresh token. Args: kwargs: Additional arguments passed through to :meth:`requests_oauthlib.OAuth2Session.authorization_url` Returns: Tuple[str, str]: The generated authorization URL and state. The user must visit the URL to complete the flow. The state is used when completing the flow to verify that the request originated from your application. If your application is using a different :class:`Flow` instance to obtain the token, you will need to specify the ``state`` when constructing the :class:`Flow`.
[ "Generates", "an", "authorization", "URL", "." ]
ba826565994cf20c073d79f534036747fdef2041
https://github.com/googleapis/google-auth-library-python-oauthlib/blob/ba826565994cf20c073d79f534036747fdef2041/google_auth_oauthlib/flow.py#L186-L214
233,627
googleapis/google-auth-library-python-oauthlib
google_auth_oauthlib/flow.py
Flow.fetch_token
def fetch_token(self, **kwargs): """Completes the Authorization Flow and obtains an access token. This is the final step in the OAuth 2.0 Authorization Flow. This is called after the user consents. This method calls :meth:`requests_oauthlib.OAuth2Session.fetch_token` and specifies the client configuration's token URI (usually Google's token server). Args: kwargs: Arguments passed through to :meth:`requests_oauthlib.OAuth2Session.fetch_token`. At least one of ``code`` or ``authorization_response`` must be specified. Returns: Mapping[str, str]: The obtained tokens. Typically, you will not use return value of this function and instead and use :meth:`credentials` to obtain a :class:`~google.auth.credentials.Credentials` instance. """ kwargs.setdefault('client_secret', self.client_config['client_secret']) return self.oauth2session.fetch_token( self.client_config['token_uri'], **kwargs)
python
def fetch_token(self, **kwargs): """Completes the Authorization Flow and obtains an access token. This is the final step in the OAuth 2.0 Authorization Flow. This is called after the user consents. This method calls :meth:`requests_oauthlib.OAuth2Session.fetch_token` and specifies the client configuration's token URI (usually Google's token server). Args: kwargs: Arguments passed through to :meth:`requests_oauthlib.OAuth2Session.fetch_token`. At least one of ``code`` or ``authorization_response`` must be specified. Returns: Mapping[str, str]: The obtained tokens. Typically, you will not use return value of this function and instead and use :meth:`credentials` to obtain a :class:`~google.auth.credentials.Credentials` instance. """ kwargs.setdefault('client_secret', self.client_config['client_secret']) return self.oauth2session.fetch_token( self.client_config['token_uri'], **kwargs)
[ "def", "fetch_token", "(", "self", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "'client_secret'", ",", "self", ".", "client_config", "[", "'client_secret'", "]", ")", "return", "self", ".", "oauth2session", ".", "fetch_token", "(", "self", ".", "client_config", "[", "'token_uri'", "]", ",", "*", "*", "kwargs", ")" ]
Completes the Authorization Flow and obtains an access token. This is the final step in the OAuth 2.0 Authorization Flow. This is called after the user consents. This method calls :meth:`requests_oauthlib.OAuth2Session.fetch_token` and specifies the client configuration's token URI (usually Google's token server). Args: kwargs: Arguments passed through to :meth:`requests_oauthlib.OAuth2Session.fetch_token`. At least one of ``code`` or ``authorization_response`` must be specified. Returns: Mapping[str, str]: The obtained tokens. Typically, you will not use return value of this function and instead and use :meth:`credentials` to obtain a :class:`~google.auth.credentials.Credentials` instance.
[ "Completes", "the", "Authorization", "Flow", "and", "obtains", "an", "access", "token", "." ]
ba826565994cf20c073d79f534036747fdef2041
https://github.com/googleapis/google-auth-library-python-oauthlib/blob/ba826565994cf20c073d79f534036747fdef2041/google_auth_oauthlib/flow.py#L216-L241
233,628
googleapis/google-auth-library-python-oauthlib
google_auth_oauthlib/flow.py
InstalledAppFlow.run_console
def run_console( self, authorization_prompt_message=_DEFAULT_AUTH_PROMPT_MESSAGE, authorization_code_message=_DEFAULT_AUTH_CODE_MESSAGE, **kwargs): """Run the flow using the console strategy. The console strategy instructs the user to open the authorization URL in their browser. Once the authorization is complete the authorization server will give the user a code. The user then must copy & paste this code into the application. The code is then exchanged for a token. Args: authorization_prompt_message (str): The message to display to tell the user to navigate to the authorization URL. authorization_code_message (str): The message to display when prompting the user for the authorization code. kwargs: Additional keyword arguments passed through to :meth:`authorization_url`. Returns: google.oauth2.credentials.Credentials: The OAuth 2.0 credentials for the user. """ kwargs.setdefault('prompt', 'consent') self.redirect_uri = self._OOB_REDIRECT_URI auth_url, _ = self.authorization_url(**kwargs) print(authorization_prompt_message.format(url=auth_url)) code = input(authorization_code_message) self.fetch_token(code=code) return self.credentials
python
def run_console( self, authorization_prompt_message=_DEFAULT_AUTH_PROMPT_MESSAGE, authorization_code_message=_DEFAULT_AUTH_CODE_MESSAGE, **kwargs): """Run the flow using the console strategy. The console strategy instructs the user to open the authorization URL in their browser. Once the authorization is complete the authorization server will give the user a code. The user then must copy & paste this code into the application. The code is then exchanged for a token. Args: authorization_prompt_message (str): The message to display to tell the user to navigate to the authorization URL. authorization_code_message (str): The message to display when prompting the user for the authorization code. kwargs: Additional keyword arguments passed through to :meth:`authorization_url`. Returns: google.oauth2.credentials.Credentials: The OAuth 2.0 credentials for the user. """ kwargs.setdefault('prompt', 'consent') self.redirect_uri = self._OOB_REDIRECT_URI auth_url, _ = self.authorization_url(**kwargs) print(authorization_prompt_message.format(url=auth_url)) code = input(authorization_code_message) self.fetch_token(code=code) return self.credentials
[ "def", "run_console", "(", "self", ",", "authorization_prompt_message", "=", "_DEFAULT_AUTH_PROMPT_MESSAGE", ",", "authorization_code_message", "=", "_DEFAULT_AUTH_CODE_MESSAGE", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "'prompt'", ",", "'consent'", ")", "self", ".", "redirect_uri", "=", "self", ".", "_OOB_REDIRECT_URI", "auth_url", ",", "_", "=", "self", ".", "authorization_url", "(", "*", "*", "kwargs", ")", "print", "(", "authorization_prompt_message", ".", "format", "(", "url", "=", "auth_url", ")", ")", "code", "=", "input", "(", "authorization_code_message", ")", "self", ".", "fetch_token", "(", "code", "=", "code", ")", "return", "self", ".", "credentials" ]
Run the flow using the console strategy. The console strategy instructs the user to open the authorization URL in their browser. Once the authorization is complete the authorization server will give the user a code. The user then must copy & paste this code into the application. The code is then exchanged for a token. Args: authorization_prompt_message (str): The message to display to tell the user to navigate to the authorization URL. authorization_code_message (str): The message to display when prompting the user for the authorization code. kwargs: Additional keyword arguments passed through to :meth:`authorization_url`. Returns: google.oauth2.credentials.Credentials: The OAuth 2.0 credentials for the user.
[ "Run", "the", "flow", "using", "the", "console", "strategy", "." ]
ba826565994cf20c073d79f534036747fdef2041
https://github.com/googleapis/google-auth-library-python-oauthlib/blob/ba826565994cf20c073d79f534036747fdef2041/google_auth_oauthlib/flow.py#L330-L366
233,629
googleapis/google-auth-library-python-oauthlib
google_auth_oauthlib/flow.py
InstalledAppFlow.run_local_server
def run_local_server( self, host='localhost', port=8080, authorization_prompt_message=_DEFAULT_AUTH_PROMPT_MESSAGE, success_message=_DEFAULT_WEB_SUCCESS_MESSAGE, open_browser=True, **kwargs): """Run the flow using the server strategy. The server strategy instructs the user to open the authorization URL in their browser and will attempt to automatically open the URL for them. It will start a local web server to listen for the authorization response. Once authorization is complete the authorization server will redirect the user's browser to the local web server. The web server will get the authorization code from the response and shutdown. The code is then exchanged for a token. Args: host (str): The hostname for the local redirect server. This will be served over http, not https. port (int): The port for the local redirect server. authorization_prompt_message (str): The message to display to tell the user to navigate to the authorization URL. success_message (str): The message to display in the web browser the authorization flow is complete. open_browser (bool): Whether or not to open the authorization URL in the user's browser. kwargs: Additional keyword arguments passed through to :meth:`authorization_url`. Returns: google.oauth2.credentials.Credentials: The OAuth 2.0 credentials for the user. """ self.redirect_uri = 'http://{}:{}/'.format(host, port) auth_url, _ = self.authorization_url(**kwargs) wsgi_app = _RedirectWSGIApp(success_message) local_server = wsgiref.simple_server.make_server( host, port, wsgi_app, handler_class=_WSGIRequestHandler) if open_browser: webbrowser.open(auth_url, new=1, autoraise=True) print(authorization_prompt_message.format(url=auth_url)) local_server.handle_request() # Note: using https here because oauthlib is very picky that # OAuth 2.0 should only occur over https. authorization_response = wsgi_app.last_request_uri.replace( 'http', 'https') self.fetch_token(authorization_response=authorization_response) return self.credentials
python
def run_local_server( self, host='localhost', port=8080, authorization_prompt_message=_DEFAULT_AUTH_PROMPT_MESSAGE, success_message=_DEFAULT_WEB_SUCCESS_MESSAGE, open_browser=True, **kwargs): """Run the flow using the server strategy. The server strategy instructs the user to open the authorization URL in their browser and will attempt to automatically open the URL for them. It will start a local web server to listen for the authorization response. Once authorization is complete the authorization server will redirect the user's browser to the local web server. The web server will get the authorization code from the response and shutdown. The code is then exchanged for a token. Args: host (str): The hostname for the local redirect server. This will be served over http, not https. port (int): The port for the local redirect server. authorization_prompt_message (str): The message to display to tell the user to navigate to the authorization URL. success_message (str): The message to display in the web browser the authorization flow is complete. open_browser (bool): Whether or not to open the authorization URL in the user's browser. kwargs: Additional keyword arguments passed through to :meth:`authorization_url`. Returns: google.oauth2.credentials.Credentials: The OAuth 2.0 credentials for the user. """ self.redirect_uri = 'http://{}:{}/'.format(host, port) auth_url, _ = self.authorization_url(**kwargs) wsgi_app = _RedirectWSGIApp(success_message) local_server = wsgiref.simple_server.make_server( host, port, wsgi_app, handler_class=_WSGIRequestHandler) if open_browser: webbrowser.open(auth_url, new=1, autoraise=True) print(authorization_prompt_message.format(url=auth_url)) local_server.handle_request() # Note: using https here because oauthlib is very picky that # OAuth 2.0 should only occur over https. authorization_response = wsgi_app.last_request_uri.replace( 'http', 'https') self.fetch_token(authorization_response=authorization_response) return self.credentials
[ "def", "run_local_server", "(", "self", ",", "host", "=", "'localhost'", ",", "port", "=", "8080", ",", "authorization_prompt_message", "=", "_DEFAULT_AUTH_PROMPT_MESSAGE", ",", "success_message", "=", "_DEFAULT_WEB_SUCCESS_MESSAGE", ",", "open_browser", "=", "True", ",", "*", "*", "kwargs", ")", ":", "self", ".", "redirect_uri", "=", "'http://{}:{}/'", ".", "format", "(", "host", ",", "port", ")", "auth_url", ",", "_", "=", "self", ".", "authorization_url", "(", "*", "*", "kwargs", ")", "wsgi_app", "=", "_RedirectWSGIApp", "(", "success_message", ")", "local_server", "=", "wsgiref", ".", "simple_server", ".", "make_server", "(", "host", ",", "port", ",", "wsgi_app", ",", "handler_class", "=", "_WSGIRequestHandler", ")", "if", "open_browser", ":", "webbrowser", ".", "open", "(", "auth_url", ",", "new", "=", "1", ",", "autoraise", "=", "True", ")", "print", "(", "authorization_prompt_message", ".", "format", "(", "url", "=", "auth_url", ")", ")", "local_server", ".", "handle_request", "(", ")", "# Note: using https here because oauthlib is very picky that", "# OAuth 2.0 should only occur over https.", "authorization_response", "=", "wsgi_app", ".", "last_request_uri", ".", "replace", "(", "'http'", ",", "'https'", ")", "self", ".", "fetch_token", "(", "authorization_response", "=", "authorization_response", ")", "return", "self", ".", "credentials" ]
Run the flow using the server strategy. The server strategy instructs the user to open the authorization URL in their browser and will attempt to automatically open the URL for them. It will start a local web server to listen for the authorization response. Once authorization is complete the authorization server will redirect the user's browser to the local web server. The web server will get the authorization code from the response and shutdown. The code is then exchanged for a token. Args: host (str): The hostname for the local redirect server. This will be served over http, not https. port (int): The port for the local redirect server. authorization_prompt_message (str): The message to display to tell the user to navigate to the authorization URL. success_message (str): The message to display in the web browser the authorization flow is complete. open_browser (bool): Whether or not to open the authorization URL in the user's browser. kwargs: Additional keyword arguments passed through to :meth:`authorization_url`. Returns: google.oauth2.credentials.Credentials: The OAuth 2.0 credentials for the user.
[ "Run", "the", "flow", "using", "the", "server", "strategy", "." ]
ba826565994cf20c073d79f534036747fdef2041
https://github.com/googleapis/google-auth-library-python-oauthlib/blob/ba826565994cf20c073d79f534036747fdef2041/google_auth_oauthlib/flow.py#L368-L422
233,630
PyFilesystem/pyfilesystem2
fs/opener/registry.py
Registry.install
def install(self, opener): # type: (Union[Type[Opener], Opener, Callable[[], Opener]]) -> None """Install an opener. Arguments: opener (`Opener`): an `Opener` instance, or a callable that returns an opener instance. Note: May be used as a class decorator. For example:: registry = Registry() @registry.install class ArchiveOpener(Opener): protocols = ['zip', 'tar'] """ _opener = opener if isinstance(opener, Opener) else opener() assert isinstance(_opener, Opener), "Opener instance required" assert _opener.protocols, "must list one or more protocols" for protocol in _opener.protocols: self._protocols[protocol] = _opener return opener
python
def install(self, opener): # type: (Union[Type[Opener], Opener, Callable[[], Opener]]) -> None """Install an opener. Arguments: opener (`Opener`): an `Opener` instance, or a callable that returns an opener instance. Note: May be used as a class decorator. For example:: registry = Registry() @registry.install class ArchiveOpener(Opener): protocols = ['zip', 'tar'] """ _opener = opener if isinstance(opener, Opener) else opener() assert isinstance(_opener, Opener), "Opener instance required" assert _opener.protocols, "must list one or more protocols" for protocol in _opener.protocols: self._protocols[protocol] = _opener return opener
[ "def", "install", "(", "self", ",", "opener", ")", ":", "# type: (Union[Type[Opener], Opener, Callable[[], Opener]]) -> None", "_opener", "=", "opener", "if", "isinstance", "(", "opener", ",", "Opener", ")", "else", "opener", "(", ")", "assert", "isinstance", "(", "_opener", ",", "Opener", ")", ",", "\"Opener instance required\"", "assert", "_opener", ".", "protocols", ",", "\"must list one or more protocols\"", "for", "protocol", "in", "_opener", ".", "protocols", ":", "self", ".", "_protocols", "[", "protocol", "]", "=", "_opener", "return", "opener" ]
Install an opener. Arguments: opener (`Opener`): an `Opener` instance, or a callable that returns an opener instance. Note: May be used as a class decorator. For example:: registry = Registry() @registry.install class ArchiveOpener(Opener): protocols = ['zip', 'tar']
[ "Install", "an", "opener", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/opener/registry.py#L59-L79
233,631
PyFilesystem/pyfilesystem2
fs/opener/registry.py
Registry.get_opener
def get_opener(self, protocol): # type: (Text) -> Opener """Get the opener class associated to a given protocol. Arguments: protocol (str): A filesystem protocol. Returns: Opener: an opener instance. Raises: ~fs.opener.errors.UnsupportedProtocol: If no opener could be found for the given protocol. EntryPointLoadingError: If the returned entry point is not an `Opener` subclass or could not be loaded successfully. """ protocol = protocol or self.default_opener if self.load_extern: entry_point = next( pkg_resources.iter_entry_points("fs.opener", protocol), None ) else: entry_point = None # If not entry point was loaded from the extensions, try looking # into the registered protocols if entry_point is None: if protocol in self._protocols: opener_instance = self._protocols[protocol] else: raise UnsupportedProtocol( "protocol '{}' is not supported".format(protocol) ) # If an entry point was found in an extension, attempt to load it else: try: opener = entry_point.load() except Exception as exception: raise EntryPointError( "could not load entry point; {}".format(exception) ) if not issubclass(opener, Opener): raise EntryPointError("entry point did not return an opener") try: opener_instance = opener() except Exception as exception: raise EntryPointError( "could not instantiate opener; {}".format(exception) ) return opener_instance
python
def get_opener(self, protocol): # type: (Text) -> Opener """Get the opener class associated to a given protocol. Arguments: protocol (str): A filesystem protocol. Returns: Opener: an opener instance. Raises: ~fs.opener.errors.UnsupportedProtocol: If no opener could be found for the given protocol. EntryPointLoadingError: If the returned entry point is not an `Opener` subclass or could not be loaded successfully. """ protocol = protocol or self.default_opener if self.load_extern: entry_point = next( pkg_resources.iter_entry_points("fs.opener", protocol), None ) else: entry_point = None # If not entry point was loaded from the extensions, try looking # into the registered protocols if entry_point is None: if protocol in self._protocols: opener_instance = self._protocols[protocol] else: raise UnsupportedProtocol( "protocol '{}' is not supported".format(protocol) ) # If an entry point was found in an extension, attempt to load it else: try: opener = entry_point.load() except Exception as exception: raise EntryPointError( "could not load entry point; {}".format(exception) ) if not issubclass(opener, Opener): raise EntryPointError("entry point did not return an opener") try: opener_instance = opener() except Exception as exception: raise EntryPointError( "could not instantiate opener; {}".format(exception) ) return opener_instance
[ "def", "get_opener", "(", "self", ",", "protocol", ")", ":", "# type: (Text) -> Opener", "protocol", "=", "protocol", "or", "self", ".", "default_opener", "if", "self", ".", "load_extern", ":", "entry_point", "=", "next", "(", "pkg_resources", ".", "iter_entry_points", "(", "\"fs.opener\"", ",", "protocol", ")", ",", "None", ")", "else", ":", "entry_point", "=", "None", "# If not entry point was loaded from the extensions, try looking", "# into the registered protocols", "if", "entry_point", "is", "None", ":", "if", "protocol", "in", "self", ".", "_protocols", ":", "opener_instance", "=", "self", ".", "_protocols", "[", "protocol", "]", "else", ":", "raise", "UnsupportedProtocol", "(", "\"protocol '{}' is not supported\"", ".", "format", "(", "protocol", ")", ")", "# If an entry point was found in an extension, attempt to load it", "else", ":", "try", ":", "opener", "=", "entry_point", ".", "load", "(", ")", "except", "Exception", "as", "exception", ":", "raise", "EntryPointError", "(", "\"could not load entry point; {}\"", ".", "format", "(", "exception", ")", ")", "if", "not", "issubclass", "(", "opener", ",", "Opener", ")", ":", "raise", "EntryPointError", "(", "\"entry point did not return an opener\"", ")", "try", ":", "opener_instance", "=", "opener", "(", ")", "except", "Exception", "as", "exception", ":", "raise", "EntryPointError", "(", "\"could not instantiate opener; {}\"", ".", "format", "(", "exception", ")", ")", "return", "opener_instance" ]
Get the opener class associated to a given protocol. Arguments: protocol (str): A filesystem protocol. Returns: Opener: an opener instance. Raises: ~fs.opener.errors.UnsupportedProtocol: If no opener could be found for the given protocol. EntryPointLoadingError: If the returned entry point is not an `Opener` subclass or could not be loaded successfully.
[ "Get", "the", "opener", "class", "associated", "to", "a", "given", "protocol", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/opener/registry.py#L96-L151
233,632
PyFilesystem/pyfilesystem2
fs/opener/registry.py
Registry.open
def open( self, fs_url, # type: Text writeable=True, # type: bool create=False, # type: bool cwd=".", # type: Text default_protocol="osfs", # type: Text ): # type: (...) -> Tuple[FS, Text] """Open a filesystem from a FS URL. Returns a tuple of a filesystem object and a path. If there is no path in the FS URL, the path value will be `None`. Arguments: fs_url (str): A filesystem URL. writeable (bool, optional): `True` if the filesystem must be writeable. create (bool, optional): `True` if the filesystem should be created if it does not exist. cwd (str): The current working directory. Returns: (FS, str): a tuple of ``(<filesystem>, <path from url>)`` """ if "://" not in fs_url: # URL may just be a path fs_url = "{}://{}".format(default_protocol, fs_url) parse_result = parse_fs_url(fs_url) protocol = parse_result.protocol open_path = parse_result.path opener = self.get_opener(protocol) open_fs = opener.open_fs(fs_url, parse_result, writeable, create, cwd) return open_fs, open_path
python
def open( self, fs_url, # type: Text writeable=True, # type: bool create=False, # type: bool cwd=".", # type: Text default_protocol="osfs", # type: Text ): # type: (...) -> Tuple[FS, Text] """Open a filesystem from a FS URL. Returns a tuple of a filesystem object and a path. If there is no path in the FS URL, the path value will be `None`. Arguments: fs_url (str): A filesystem URL. writeable (bool, optional): `True` if the filesystem must be writeable. create (bool, optional): `True` if the filesystem should be created if it does not exist. cwd (str): The current working directory. Returns: (FS, str): a tuple of ``(<filesystem>, <path from url>)`` """ if "://" not in fs_url: # URL may just be a path fs_url = "{}://{}".format(default_protocol, fs_url) parse_result = parse_fs_url(fs_url) protocol = parse_result.protocol open_path = parse_result.path opener = self.get_opener(protocol) open_fs = opener.open_fs(fs_url, parse_result, writeable, create, cwd) return open_fs, open_path
[ "def", "open", "(", "self", ",", "fs_url", ",", "# type: Text", "writeable", "=", "True", ",", "# type: bool", "create", "=", "False", ",", "# type: bool", "cwd", "=", "\".\"", ",", "# type: Text", "default_protocol", "=", "\"osfs\"", ",", "# type: Text", ")", ":", "# type: (...) -> Tuple[FS, Text]", "if", "\"://\"", "not", "in", "fs_url", ":", "# URL may just be a path", "fs_url", "=", "\"{}://{}\"", ".", "format", "(", "default_protocol", ",", "fs_url", ")", "parse_result", "=", "parse_fs_url", "(", "fs_url", ")", "protocol", "=", "parse_result", ".", "protocol", "open_path", "=", "parse_result", ".", "path", "opener", "=", "self", ".", "get_opener", "(", "protocol", ")", "open_fs", "=", "opener", ".", "open_fs", "(", "fs_url", ",", "parse_result", ",", "writeable", ",", "create", ",", "cwd", ")", "return", "open_fs", ",", "open_path" ]
Open a filesystem from a FS URL. Returns a tuple of a filesystem object and a path. If there is no path in the FS URL, the path value will be `None`. Arguments: fs_url (str): A filesystem URL. writeable (bool, optional): `True` if the filesystem must be writeable. create (bool, optional): `True` if the filesystem should be created if it does not exist. cwd (str): The current working directory. Returns: (FS, str): a tuple of ``(<filesystem>, <path from url>)``
[ "Open", "a", "filesystem", "from", "a", "FS", "URL", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/opener/registry.py#L153-L190
233,633
PyFilesystem/pyfilesystem2
fs/opener/registry.py
Registry.manage_fs
def manage_fs( self, fs_url, # type: Union[FS, Text] create=False, # type: bool writeable=False, # type: bool cwd=".", # type: Text ): # type: (...) -> Iterator[FS] """Get a context manager to open and close a filesystem. Arguments: fs_url (FS or str): A filesystem instance or a FS URL. create (bool, optional): If `True`, then create the filesystem if it doesn't already exist. writeable (bool, optional): If `True`, then the filesystem must be writeable. cwd (str): The current working directory, if opening a `~fs.osfs.OSFS`. Sometimes it is convenient to be able to pass either a FS object *or* an FS URL to a function. This context manager handles the required logic for that. Example: >>> def print_ls(list_fs): ... '''List a directory.''' ... with manage_fs(list_fs) as fs: ... print(' '.join(fs.listdir())) This function may be used in two ways. You may either pass a ``str``, as follows:: >>> print_list('zip://projects.zip') Or, an filesystem instance:: >>> from fs.osfs import OSFS >>> projects_fs = OSFS('~/') >>> print_list(projects_fs) """ from ..base import FS if isinstance(fs_url, FS): yield fs_url else: _fs = self.open_fs(fs_url, create=create, writeable=writeable, cwd=cwd) try: yield _fs except: raise finally: _fs.close()
python
def manage_fs( self, fs_url, # type: Union[FS, Text] create=False, # type: bool writeable=False, # type: bool cwd=".", # type: Text ): # type: (...) -> Iterator[FS] """Get a context manager to open and close a filesystem. Arguments: fs_url (FS or str): A filesystem instance or a FS URL. create (bool, optional): If `True`, then create the filesystem if it doesn't already exist. writeable (bool, optional): If `True`, then the filesystem must be writeable. cwd (str): The current working directory, if opening a `~fs.osfs.OSFS`. Sometimes it is convenient to be able to pass either a FS object *or* an FS URL to a function. This context manager handles the required logic for that. Example: >>> def print_ls(list_fs): ... '''List a directory.''' ... with manage_fs(list_fs) as fs: ... print(' '.join(fs.listdir())) This function may be used in two ways. You may either pass a ``str``, as follows:: >>> print_list('zip://projects.zip') Or, an filesystem instance:: >>> from fs.osfs import OSFS >>> projects_fs = OSFS('~/') >>> print_list(projects_fs) """ from ..base import FS if isinstance(fs_url, FS): yield fs_url else: _fs = self.open_fs(fs_url, create=create, writeable=writeable, cwd=cwd) try: yield _fs except: raise finally: _fs.close()
[ "def", "manage_fs", "(", "self", ",", "fs_url", ",", "# type: Union[FS, Text]", "create", "=", "False", ",", "# type: bool", "writeable", "=", "False", ",", "# type: bool", "cwd", "=", "\".\"", ",", "# type: Text", ")", ":", "# type: (...) -> Iterator[FS]", "from", ".", ".", "base", "import", "FS", "if", "isinstance", "(", "fs_url", ",", "FS", ")", ":", "yield", "fs_url", "else", ":", "_fs", "=", "self", ".", "open_fs", "(", "fs_url", ",", "create", "=", "create", ",", "writeable", "=", "writeable", ",", "cwd", "=", "cwd", ")", "try", ":", "yield", "_fs", "except", ":", "raise", "finally", ":", "_fs", ".", "close", "(", ")" ]
Get a context manager to open and close a filesystem. Arguments: fs_url (FS or str): A filesystem instance or a FS URL. create (bool, optional): If `True`, then create the filesystem if it doesn't already exist. writeable (bool, optional): If `True`, then the filesystem must be writeable. cwd (str): The current working directory, if opening a `~fs.osfs.OSFS`. Sometimes it is convenient to be able to pass either a FS object *or* an FS URL to a function. This context manager handles the required logic for that. Example: >>> def print_ls(list_fs): ... '''List a directory.''' ... with manage_fs(list_fs) as fs: ... print(' '.join(fs.listdir())) This function may be used in two ways. You may either pass a ``str``, as follows:: >>> print_list('zip://projects.zip') Or, an filesystem instance:: >>> from fs.osfs import OSFS >>> projects_fs = OSFS('~/') >>> print_list(projects_fs)
[ "Get", "a", "context", "manager", "to", "open", "and", "close", "a", "filesystem", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/opener/registry.py#L233-L285
233,634
PyFilesystem/pyfilesystem2
fs/copy.py
copy_fs
def copy_fs( src_fs, # type: Union[FS, Text] dst_fs, # type: Union[FS, Text] walker=None, # type: Optional[Walker] on_copy=None, # type: Optional[_OnCopy] workers=0, # type: int ): # type: (...) -> None """Copy the contents of one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (URL or instance). dst_fs (FS or str): Destination filesystem (URL or instance). walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable): A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use `worker` threads to copy data, or ``0`` (default) for a single-threaded copy. """ return copy_dir( src_fs, "/", dst_fs, "/", walker=walker, on_copy=on_copy, workers=workers )
python
def copy_fs( src_fs, # type: Union[FS, Text] dst_fs, # type: Union[FS, Text] walker=None, # type: Optional[Walker] on_copy=None, # type: Optional[_OnCopy] workers=0, # type: int ): # type: (...) -> None """Copy the contents of one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (URL or instance). dst_fs (FS or str): Destination filesystem (URL or instance). walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable): A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use `worker` threads to copy data, or ``0`` (default) for a single-threaded copy. """ return copy_dir( src_fs, "/", dst_fs, "/", walker=walker, on_copy=on_copy, workers=workers )
[ "def", "copy_fs", "(", "src_fs", ",", "# type: Union[FS, Text]", "dst_fs", ",", "# type: Union[FS, Text]", "walker", "=", "None", ",", "# type: Optional[Walker]", "on_copy", "=", "None", ",", "# type: Optional[_OnCopy]", "workers", "=", "0", ",", "# type: int", ")", ":", "# type: (...) -> None", "return", "copy_dir", "(", "src_fs", ",", "\"/\"", ",", "dst_fs", ",", "\"/\"", ",", "walker", "=", "walker", ",", "on_copy", "=", "on_copy", ",", "workers", "=", "workers", ")" ]
Copy the contents of one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (URL or instance). dst_fs (FS or str): Destination filesystem (URL or instance). walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable): A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use `worker` threads to copy data, or ``0`` (default) for a single-threaded copy.
[ "Copy", "the", "contents", "of", "one", "filesystem", "to", "another", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/copy.py#L22-L47
233,635
PyFilesystem/pyfilesystem2
fs/copy.py
copy_fs_if_newer
def copy_fs_if_newer( src_fs, # type: Union[FS, Text] dst_fs, # type: Union[FS, Text] walker=None, # type: Optional[Walker] on_copy=None, # type: Optional[_OnCopy] workers=0, # type: int ): # type: (...) -> None """Copy the contents of one filesystem to another, checking times. If both source and destination files exist, the copy is executed only if the source file is newer than the destination file. In case modification times of source or destination files are not available, copy file is always executed. Arguments: src_fs (FS or str): Source filesystem (URL or instance). dst_fs (FS or str): Destination filesystem (URL or instance). walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable):A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for a single-threaded copy. """ return copy_dir_if_newer( src_fs, "/", dst_fs, "/", walker=walker, on_copy=on_copy, workers=workers )
python
def copy_fs_if_newer( src_fs, # type: Union[FS, Text] dst_fs, # type: Union[FS, Text] walker=None, # type: Optional[Walker] on_copy=None, # type: Optional[_OnCopy] workers=0, # type: int ): # type: (...) -> None """Copy the contents of one filesystem to another, checking times. If both source and destination files exist, the copy is executed only if the source file is newer than the destination file. In case modification times of source or destination files are not available, copy file is always executed. Arguments: src_fs (FS or str): Source filesystem (URL or instance). dst_fs (FS or str): Destination filesystem (URL or instance). walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable):A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for a single-threaded copy. """ return copy_dir_if_newer( src_fs, "/", dst_fs, "/", walker=walker, on_copy=on_copy, workers=workers )
[ "def", "copy_fs_if_newer", "(", "src_fs", ",", "# type: Union[FS, Text]", "dst_fs", ",", "# type: Union[FS, Text]", "walker", "=", "None", ",", "# type: Optional[Walker]", "on_copy", "=", "None", ",", "# type: Optional[_OnCopy]", "workers", "=", "0", ",", "# type: int", ")", ":", "# type: (...) -> None", "return", "copy_dir_if_newer", "(", "src_fs", ",", "\"/\"", ",", "dst_fs", ",", "\"/\"", ",", "walker", "=", "walker", ",", "on_copy", "=", "on_copy", ",", "workers", "=", "workers", ")" ]
Copy the contents of one filesystem to another, checking times. If both source and destination files exist, the copy is executed only if the source file is newer than the destination file. In case modification times of source or destination files are not available, copy file is always executed. Arguments: src_fs (FS or str): Source filesystem (URL or instance). dst_fs (FS or str): Destination filesystem (URL or instance). walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable):A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for a single-threaded copy.
[ "Copy", "the", "contents", "of", "one", "filesystem", "to", "another", "checking", "times", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/copy.py#L50-L80
233,636
PyFilesystem/pyfilesystem2
fs/copy.py
_source_is_newer
def _source_is_newer(src_fs, src_path, dst_fs, dst_path): # type: (FS, Text, FS, Text) -> bool """Determine if source file is newer than destination file. Arguments: src_fs (FS): Source filesystem (instance or URL). src_path (str): Path to a file on the source filesystem. dst_fs (FS): Destination filesystem (instance or URL). dst_path (str): Path to a file on the destination filesystem. Returns: bool: `True` if the source file is newer than the destination file or file modification time cannot be determined, `False` otherwise. """ try: if dst_fs.exists(dst_path): namespace = ("details", "modified") src_modified = src_fs.getinfo(src_path, namespace).modified if src_modified is not None: dst_modified = dst_fs.getinfo(dst_path, namespace).modified return dst_modified is None or src_modified > dst_modified return True except FSError: # pragma: no cover # todo: should log something here return True
python
def _source_is_newer(src_fs, src_path, dst_fs, dst_path): # type: (FS, Text, FS, Text) -> bool """Determine if source file is newer than destination file. Arguments: src_fs (FS): Source filesystem (instance or URL). src_path (str): Path to a file on the source filesystem. dst_fs (FS): Destination filesystem (instance or URL). dst_path (str): Path to a file on the destination filesystem. Returns: bool: `True` if the source file is newer than the destination file or file modification time cannot be determined, `False` otherwise. """ try: if dst_fs.exists(dst_path): namespace = ("details", "modified") src_modified = src_fs.getinfo(src_path, namespace).modified if src_modified is not None: dst_modified = dst_fs.getinfo(dst_path, namespace).modified return dst_modified is None or src_modified > dst_modified return True except FSError: # pragma: no cover # todo: should log something here return True
[ "def", "_source_is_newer", "(", "src_fs", ",", "src_path", ",", "dst_fs", ",", "dst_path", ")", ":", "# type: (FS, Text, FS, Text) -> bool", "try", ":", "if", "dst_fs", ".", "exists", "(", "dst_path", ")", ":", "namespace", "=", "(", "\"details\"", ",", "\"modified\"", ")", "src_modified", "=", "src_fs", ".", "getinfo", "(", "src_path", ",", "namespace", ")", ".", "modified", "if", "src_modified", "is", "not", "None", ":", "dst_modified", "=", "dst_fs", ".", "getinfo", "(", "dst_path", ",", "namespace", ")", ".", "modified", "return", "dst_modified", "is", "None", "or", "src_modified", ">", "dst_modified", "return", "True", "except", "FSError", ":", "# pragma: no cover", "# todo: should log something here", "return", "True" ]
Determine if source file is newer than destination file. Arguments: src_fs (FS): Source filesystem (instance or URL). src_path (str): Path to a file on the source filesystem. dst_fs (FS): Destination filesystem (instance or URL). dst_path (str): Path to a file on the destination filesystem. Returns: bool: `True` if the source file is newer than the destination file or file modification time cannot be determined, `False` otherwise.
[ "Determine", "if", "source", "file", "is", "newer", "than", "destination", "file", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/copy.py#L83-L109
233,637
PyFilesystem/pyfilesystem2
fs/copy.py
copy_file
def copy_file( src_fs, # type: Union[FS, Text] src_path, # type: Text dst_fs, # type: Union[FS, Text] dst_path, # type: Text ): # type: (...) -> None """Copy a file from one filesystem to another. If the destination exists, and is a file, it will be first truncated. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a file on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a file on the destination filesystem. """ with manage_fs(src_fs, writeable=False) as _src_fs: with manage_fs(dst_fs, create=True) as _dst_fs: if _src_fs is _dst_fs: # Same filesystem, so we can do a potentially optimized # copy _src_fs.copy(src_path, dst_path, overwrite=True) else: # Standard copy with _src_fs.lock(), _dst_fs.lock(): if _dst_fs.hassyspath(dst_path): with _dst_fs.openbin(dst_path, "w") as write_file: _src_fs.download(src_path, write_file) else: with _src_fs.openbin(src_path) as read_file: _dst_fs.upload(dst_path, read_file)
python
def copy_file( src_fs, # type: Union[FS, Text] src_path, # type: Text dst_fs, # type: Union[FS, Text] dst_path, # type: Text ): # type: (...) -> None """Copy a file from one filesystem to another. If the destination exists, and is a file, it will be first truncated. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a file on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a file on the destination filesystem. """ with manage_fs(src_fs, writeable=False) as _src_fs: with manage_fs(dst_fs, create=True) as _dst_fs: if _src_fs is _dst_fs: # Same filesystem, so we can do a potentially optimized # copy _src_fs.copy(src_path, dst_path, overwrite=True) else: # Standard copy with _src_fs.lock(), _dst_fs.lock(): if _dst_fs.hassyspath(dst_path): with _dst_fs.openbin(dst_path, "w") as write_file: _src_fs.download(src_path, write_file) else: with _src_fs.openbin(src_path) as read_file: _dst_fs.upload(dst_path, read_file)
[ "def", "copy_file", "(", "src_fs", ",", "# type: Union[FS, Text]", "src_path", ",", "# type: Text", "dst_fs", ",", "# type: Union[FS, Text]", "dst_path", ",", "# type: Text", ")", ":", "# type: (...) -> None", "with", "manage_fs", "(", "src_fs", ",", "writeable", "=", "False", ")", "as", "_src_fs", ":", "with", "manage_fs", "(", "dst_fs", ",", "create", "=", "True", ")", "as", "_dst_fs", ":", "if", "_src_fs", "is", "_dst_fs", ":", "# Same filesystem, so we can do a potentially optimized", "# copy", "_src_fs", ".", "copy", "(", "src_path", ",", "dst_path", ",", "overwrite", "=", "True", ")", "else", ":", "# Standard copy", "with", "_src_fs", ".", "lock", "(", ")", ",", "_dst_fs", ".", "lock", "(", ")", ":", "if", "_dst_fs", ".", "hassyspath", "(", "dst_path", ")", ":", "with", "_dst_fs", ".", "openbin", "(", "dst_path", ",", "\"w\"", ")", "as", "write_file", ":", "_src_fs", ".", "download", "(", "src_path", ",", "write_file", ")", "else", ":", "with", "_src_fs", ".", "openbin", "(", "src_path", ")", "as", "read_file", ":", "_dst_fs", ".", "upload", "(", "dst_path", ",", "read_file", ")" ]
Copy a file from one filesystem to another. If the destination exists, and is a file, it will be first truncated. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a file on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a file on the destination filesystem.
[ "Copy", "a", "file", "from", "one", "filesystem", "to", "another", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/copy.py#L112-L144
233,638
PyFilesystem/pyfilesystem2
fs/copy.py
copy_file_internal
def copy_file_internal( src_fs, # type: FS src_path, # type: Text dst_fs, # type: FS dst_path, # type: Text ): # type: (...) -> None """Low level copy, that doesn't call manage_fs or lock. If the destination exists, and is a file, it will be first truncated. This method exists to optimize copying in loops. In general you should prefer `copy_file`. Arguments: src_fs (FS): Source filesystem. src_path (str): Path to a file on the source filesystem. dst_fs (FS: Destination filesystem. dst_path (str): Path to a file on the destination filesystem. """ if src_fs is dst_fs: # Same filesystem, so we can do a potentially optimized # copy src_fs.copy(src_path, dst_path, overwrite=True) elif dst_fs.hassyspath(dst_path): with dst_fs.openbin(dst_path, "w") as write_file: src_fs.download(src_path, write_file) else: with src_fs.openbin(src_path) as read_file: dst_fs.upload(dst_path, read_file)
python
def copy_file_internal( src_fs, # type: FS src_path, # type: Text dst_fs, # type: FS dst_path, # type: Text ): # type: (...) -> None """Low level copy, that doesn't call manage_fs or lock. If the destination exists, and is a file, it will be first truncated. This method exists to optimize copying in loops. In general you should prefer `copy_file`. Arguments: src_fs (FS): Source filesystem. src_path (str): Path to a file on the source filesystem. dst_fs (FS: Destination filesystem. dst_path (str): Path to a file on the destination filesystem. """ if src_fs is dst_fs: # Same filesystem, so we can do a potentially optimized # copy src_fs.copy(src_path, dst_path, overwrite=True) elif dst_fs.hassyspath(dst_path): with dst_fs.openbin(dst_path, "w") as write_file: src_fs.download(src_path, write_file) else: with src_fs.openbin(src_path) as read_file: dst_fs.upload(dst_path, read_file)
[ "def", "copy_file_internal", "(", "src_fs", ",", "# type: FS", "src_path", ",", "# type: Text", "dst_fs", ",", "# type: FS", "dst_path", ",", "# type: Text", ")", ":", "# type: (...) -> None", "if", "src_fs", "is", "dst_fs", ":", "# Same filesystem, so we can do a potentially optimized", "# copy", "src_fs", ".", "copy", "(", "src_path", ",", "dst_path", ",", "overwrite", "=", "True", ")", "elif", "dst_fs", ".", "hassyspath", "(", "dst_path", ")", ":", "with", "dst_fs", ".", "openbin", "(", "dst_path", ",", "\"w\"", ")", "as", "write_file", ":", "src_fs", ".", "download", "(", "src_path", ",", "write_file", ")", "else", ":", "with", "src_fs", ".", "openbin", "(", "src_path", ")", "as", "read_file", ":", "dst_fs", ".", "upload", "(", "dst_path", ",", "read_file", ")" ]
Low level copy, that doesn't call manage_fs or lock. If the destination exists, and is a file, it will be first truncated. This method exists to optimize copying in loops. In general you should prefer `copy_file`. Arguments: src_fs (FS): Source filesystem. src_path (str): Path to a file on the source filesystem. dst_fs (FS: Destination filesystem. dst_path (str): Path to a file on the destination filesystem.
[ "Low", "level", "copy", "that", "doesn", "t", "call", "manage_fs", "or", "lock", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/copy.py#L147-L177
233,639
PyFilesystem/pyfilesystem2
fs/copy.py
copy_file_if_newer
def copy_file_if_newer( src_fs, # type: Union[FS, Text] src_path, # type: Text dst_fs, # type: Union[FS, Text] dst_path, # type: Text ): # type: (...) -> bool """Copy a file from one filesystem to another, checking times. If the destination exists, and is a file, it will be first truncated. If both source and destination files exist, the copy is executed only if the source file is newer than the destination file. In case modification times of source or destination files are not available, copy is always executed. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a file on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a file on the destination filesystem. Returns: bool: `True` if the file copy was executed, `False` otherwise. """ with manage_fs(src_fs, writeable=False) as _src_fs: with manage_fs(dst_fs, create=True) as _dst_fs: if _src_fs is _dst_fs: # Same filesystem, so we can do a potentially optimized # copy if _source_is_newer(_src_fs, src_path, _dst_fs, dst_path): _src_fs.copy(src_path, dst_path, overwrite=True) return True else: return False else: # Standard copy with _src_fs.lock(), _dst_fs.lock(): if _source_is_newer(_src_fs, src_path, _dst_fs, dst_path): copy_file_internal(_src_fs, src_path, _dst_fs, dst_path) return True else: return False
python
def copy_file_if_newer( src_fs, # type: Union[FS, Text] src_path, # type: Text dst_fs, # type: Union[FS, Text] dst_path, # type: Text ): # type: (...) -> bool """Copy a file from one filesystem to another, checking times. If the destination exists, and is a file, it will be first truncated. If both source and destination files exist, the copy is executed only if the source file is newer than the destination file. In case modification times of source or destination files are not available, copy is always executed. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a file on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a file on the destination filesystem. Returns: bool: `True` if the file copy was executed, `False` otherwise. """ with manage_fs(src_fs, writeable=False) as _src_fs: with manage_fs(dst_fs, create=True) as _dst_fs: if _src_fs is _dst_fs: # Same filesystem, so we can do a potentially optimized # copy if _source_is_newer(_src_fs, src_path, _dst_fs, dst_path): _src_fs.copy(src_path, dst_path, overwrite=True) return True else: return False else: # Standard copy with _src_fs.lock(), _dst_fs.lock(): if _source_is_newer(_src_fs, src_path, _dst_fs, dst_path): copy_file_internal(_src_fs, src_path, _dst_fs, dst_path) return True else: return False
[ "def", "copy_file_if_newer", "(", "src_fs", ",", "# type: Union[FS, Text]", "src_path", ",", "# type: Text", "dst_fs", ",", "# type: Union[FS, Text]", "dst_path", ",", "# type: Text", ")", ":", "# type: (...) -> bool", "with", "manage_fs", "(", "src_fs", ",", "writeable", "=", "False", ")", "as", "_src_fs", ":", "with", "manage_fs", "(", "dst_fs", ",", "create", "=", "True", ")", "as", "_dst_fs", ":", "if", "_src_fs", "is", "_dst_fs", ":", "# Same filesystem, so we can do a potentially optimized", "# copy", "if", "_source_is_newer", "(", "_src_fs", ",", "src_path", ",", "_dst_fs", ",", "dst_path", ")", ":", "_src_fs", ".", "copy", "(", "src_path", ",", "dst_path", ",", "overwrite", "=", "True", ")", "return", "True", "else", ":", "return", "False", "else", ":", "# Standard copy", "with", "_src_fs", ".", "lock", "(", ")", ",", "_dst_fs", ".", "lock", "(", ")", ":", "if", "_source_is_newer", "(", "_src_fs", ",", "src_path", ",", "_dst_fs", ",", "dst_path", ")", ":", "copy_file_internal", "(", "_src_fs", ",", "src_path", ",", "_dst_fs", ",", "dst_path", ")", "return", "True", "else", ":", "return", "False" ]
Copy a file from one filesystem to another, checking times. If the destination exists, and is a file, it will be first truncated. If both source and destination files exist, the copy is executed only if the source file is newer than the destination file. In case modification times of source or destination files are not available, copy is always executed. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a file on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a file on the destination filesystem. Returns: bool: `True` if the file copy was executed, `False` otherwise.
[ "Copy", "a", "file", "from", "one", "filesystem", "to", "another", "checking", "times", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/copy.py#L180-L222
233,640
PyFilesystem/pyfilesystem2
fs/copy.py
copy_dir
def copy_dir( src_fs, # type: Union[FS, Text] src_path, # type: Text dst_fs, # type: Union[FS, Text] dst_path, # type: Text walker=None, # type: Optional[Walker] on_copy=None, # type: Optional[_OnCopy] workers=0, # type: int ): # type: (...) -> None """Copy a directory from one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a directory on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a directory on the destination filesystem. walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable, optional): A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for a single-threaded copy. """ on_copy = on_copy or (lambda *args: None) walker = walker or Walker() _src_path = abspath(normpath(src_path)) _dst_path = abspath(normpath(dst_path)) def src(): return manage_fs(src_fs, writeable=False) def dst(): return manage_fs(dst_fs, create=True) from ._bulk import Copier with src() as _src_fs, dst() as _dst_fs: with _src_fs.lock(), _dst_fs.lock(): _thread_safe = is_thread_safe(_src_fs, _dst_fs) with Copier(num_workers=workers if _thread_safe else 0) as copier: _dst_fs.makedir(_dst_path, recreate=True) for dir_path, dirs, files in walker.walk(_src_fs, _src_path): copy_path = combine(_dst_path, frombase(_src_path, dir_path)) for info in dirs: _dst_fs.makedir(info.make_path(copy_path), recreate=True) for info in files: src_path = info.make_path(dir_path) dst_path = info.make_path(copy_path) copier.copy(_src_fs, src_path, _dst_fs, dst_path) on_copy(_src_fs, src_path, _dst_fs, dst_path)
python
def copy_dir( src_fs, # type: Union[FS, Text] src_path, # type: Text dst_fs, # type: Union[FS, Text] dst_path, # type: Text walker=None, # type: Optional[Walker] on_copy=None, # type: Optional[_OnCopy] workers=0, # type: int ): # type: (...) -> None """Copy a directory from one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a directory on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a directory on the destination filesystem. walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable, optional): A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for a single-threaded copy. """ on_copy = on_copy or (lambda *args: None) walker = walker or Walker() _src_path = abspath(normpath(src_path)) _dst_path = abspath(normpath(dst_path)) def src(): return manage_fs(src_fs, writeable=False) def dst(): return manage_fs(dst_fs, create=True) from ._bulk import Copier with src() as _src_fs, dst() as _dst_fs: with _src_fs.lock(), _dst_fs.lock(): _thread_safe = is_thread_safe(_src_fs, _dst_fs) with Copier(num_workers=workers if _thread_safe else 0) as copier: _dst_fs.makedir(_dst_path, recreate=True) for dir_path, dirs, files in walker.walk(_src_fs, _src_path): copy_path = combine(_dst_path, frombase(_src_path, dir_path)) for info in dirs: _dst_fs.makedir(info.make_path(copy_path), recreate=True) for info in files: src_path = info.make_path(dir_path) dst_path = info.make_path(copy_path) copier.copy(_src_fs, src_path, _dst_fs, dst_path) on_copy(_src_fs, src_path, _dst_fs, dst_path)
[ "def", "copy_dir", "(", "src_fs", ",", "# type: Union[FS, Text]", "src_path", ",", "# type: Text", "dst_fs", ",", "# type: Union[FS, Text]", "dst_path", ",", "# type: Text", "walker", "=", "None", ",", "# type: Optional[Walker]", "on_copy", "=", "None", ",", "# type: Optional[_OnCopy]", "workers", "=", "0", ",", "# type: int", ")", ":", "# type: (...) -> None", "on_copy", "=", "on_copy", "or", "(", "lambda", "*", "args", ":", "None", ")", "walker", "=", "walker", "or", "Walker", "(", ")", "_src_path", "=", "abspath", "(", "normpath", "(", "src_path", ")", ")", "_dst_path", "=", "abspath", "(", "normpath", "(", "dst_path", ")", ")", "def", "src", "(", ")", ":", "return", "manage_fs", "(", "src_fs", ",", "writeable", "=", "False", ")", "def", "dst", "(", ")", ":", "return", "manage_fs", "(", "dst_fs", ",", "create", "=", "True", ")", "from", ".", "_bulk", "import", "Copier", "with", "src", "(", ")", "as", "_src_fs", ",", "dst", "(", ")", "as", "_dst_fs", ":", "with", "_src_fs", ".", "lock", "(", ")", ",", "_dst_fs", ".", "lock", "(", ")", ":", "_thread_safe", "=", "is_thread_safe", "(", "_src_fs", ",", "_dst_fs", ")", "with", "Copier", "(", "num_workers", "=", "workers", "if", "_thread_safe", "else", "0", ")", "as", "copier", ":", "_dst_fs", ".", "makedir", "(", "_dst_path", ",", "recreate", "=", "True", ")", "for", "dir_path", ",", "dirs", ",", "files", "in", "walker", ".", "walk", "(", "_src_fs", ",", "_src_path", ")", ":", "copy_path", "=", "combine", "(", "_dst_path", ",", "frombase", "(", "_src_path", ",", "dir_path", ")", ")", "for", "info", "in", "dirs", ":", "_dst_fs", ".", "makedir", "(", "info", ".", "make_path", "(", "copy_path", ")", ",", "recreate", "=", "True", ")", "for", "info", "in", "files", ":", "src_path", "=", "info", ".", "make_path", "(", "dir_path", ")", "dst_path", "=", "info", ".", "make_path", "(", "copy_path", ")", "copier", ".", "copy", "(", "_src_fs", ",", "src_path", ",", "_dst_fs", ",", "dst_path", ")", "on_copy", "(", "_src_fs", ",", "src_path", ",", "_dst_fs", ",", "dst_path", ")" ]
Copy a directory from one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a directory on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a directory on the destination filesystem. walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable, optional): A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for a single-threaded copy.
[ "Copy", "a", "directory", "from", "one", "filesystem", "to", "another", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/copy.py#L249-L302
233,641
PyFilesystem/pyfilesystem2
fs/copy.py
copy_dir_if_newer
def copy_dir_if_newer( src_fs, # type: Union[FS, Text] src_path, # type: Text dst_fs, # type: Union[FS, Text] dst_path, # type: Text walker=None, # type: Optional[Walker] on_copy=None, # type: Optional[_OnCopy] workers=0, # type: int ): # type: (...) -> None """Copy a directory from one filesystem to another, checking times. If both source and destination files exist, the copy is executed only if the source file is newer than the destination file. In case modification times of source or destination files are not available, copy is always executed. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a directory on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a directory on the destination filesystem. walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable, optional): A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for a single-threaded copy. """ on_copy = on_copy or (lambda *args: None) walker = walker or Walker() _src_path = abspath(normpath(src_path)) _dst_path = abspath(normpath(dst_path)) def src(): return manage_fs(src_fs, writeable=False) def dst(): return manage_fs(dst_fs, create=True) from ._bulk import Copier with src() as _src_fs, dst() as _dst_fs: with _src_fs.lock(), _dst_fs.lock(): _thread_safe = is_thread_safe(_src_fs, _dst_fs) with Copier(num_workers=workers if _thread_safe else 0) as copier: _dst_fs.makedir(_dst_path, recreate=True) namespace = ("details", "modified") dst_state = { path: info for path, info in walker.info(_dst_fs, _dst_path, namespace) if info.is_file } src_state = [ (path, info) for path, info in walker.info(_src_fs, _src_path, namespace) ] for dir_path, copy_info in src_state: copy_path = combine(_dst_path, frombase(_src_path, dir_path)) if copy_info.is_dir: _dst_fs.makedir(copy_path, recreate=True) elif copy_info.is_file: # dst file is present, try to figure out if copy # is necessary try: src_modified = copy_info.modified dst_modified = dst_state[dir_path].modified except KeyError: do_copy = True else: do_copy = ( src_modified is None or dst_modified is None or src_modified > dst_modified ) if do_copy: copier.copy(_src_fs, dir_path, _dst_fs, copy_path) on_copy(_src_fs, dir_path, _dst_fs, copy_path)
python
def copy_dir_if_newer( src_fs, # type: Union[FS, Text] src_path, # type: Text dst_fs, # type: Union[FS, Text] dst_path, # type: Text walker=None, # type: Optional[Walker] on_copy=None, # type: Optional[_OnCopy] workers=0, # type: int ): # type: (...) -> None """Copy a directory from one filesystem to another, checking times. If both source and destination files exist, the copy is executed only if the source file is newer than the destination file. In case modification times of source or destination files are not available, copy is always executed. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a directory on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a directory on the destination filesystem. walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable, optional): A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for a single-threaded copy. """ on_copy = on_copy or (lambda *args: None) walker = walker or Walker() _src_path = abspath(normpath(src_path)) _dst_path = abspath(normpath(dst_path)) def src(): return manage_fs(src_fs, writeable=False) def dst(): return manage_fs(dst_fs, create=True) from ._bulk import Copier with src() as _src_fs, dst() as _dst_fs: with _src_fs.lock(), _dst_fs.lock(): _thread_safe = is_thread_safe(_src_fs, _dst_fs) with Copier(num_workers=workers if _thread_safe else 0) as copier: _dst_fs.makedir(_dst_path, recreate=True) namespace = ("details", "modified") dst_state = { path: info for path, info in walker.info(_dst_fs, _dst_path, namespace) if info.is_file } src_state = [ (path, info) for path, info in walker.info(_src_fs, _src_path, namespace) ] for dir_path, copy_info in src_state: copy_path = combine(_dst_path, frombase(_src_path, dir_path)) if copy_info.is_dir: _dst_fs.makedir(copy_path, recreate=True) elif copy_info.is_file: # dst file is present, try to figure out if copy # is necessary try: src_modified = copy_info.modified dst_modified = dst_state[dir_path].modified except KeyError: do_copy = True else: do_copy = ( src_modified is None or dst_modified is None or src_modified > dst_modified ) if do_copy: copier.copy(_src_fs, dir_path, _dst_fs, copy_path) on_copy(_src_fs, dir_path, _dst_fs, copy_path)
[ "def", "copy_dir_if_newer", "(", "src_fs", ",", "# type: Union[FS, Text]", "src_path", ",", "# type: Text", "dst_fs", ",", "# type: Union[FS, Text]", "dst_path", ",", "# type: Text", "walker", "=", "None", ",", "# type: Optional[Walker]", "on_copy", "=", "None", ",", "# type: Optional[_OnCopy]", "workers", "=", "0", ",", "# type: int", ")", ":", "# type: (...) -> None", "on_copy", "=", "on_copy", "or", "(", "lambda", "*", "args", ":", "None", ")", "walker", "=", "walker", "or", "Walker", "(", ")", "_src_path", "=", "abspath", "(", "normpath", "(", "src_path", ")", ")", "_dst_path", "=", "abspath", "(", "normpath", "(", "dst_path", ")", ")", "def", "src", "(", ")", ":", "return", "manage_fs", "(", "src_fs", ",", "writeable", "=", "False", ")", "def", "dst", "(", ")", ":", "return", "manage_fs", "(", "dst_fs", ",", "create", "=", "True", ")", "from", ".", "_bulk", "import", "Copier", "with", "src", "(", ")", "as", "_src_fs", ",", "dst", "(", ")", "as", "_dst_fs", ":", "with", "_src_fs", ".", "lock", "(", ")", ",", "_dst_fs", ".", "lock", "(", ")", ":", "_thread_safe", "=", "is_thread_safe", "(", "_src_fs", ",", "_dst_fs", ")", "with", "Copier", "(", "num_workers", "=", "workers", "if", "_thread_safe", "else", "0", ")", "as", "copier", ":", "_dst_fs", ".", "makedir", "(", "_dst_path", ",", "recreate", "=", "True", ")", "namespace", "=", "(", "\"details\"", ",", "\"modified\"", ")", "dst_state", "=", "{", "path", ":", "info", "for", "path", ",", "info", "in", "walker", ".", "info", "(", "_dst_fs", ",", "_dst_path", ",", "namespace", ")", "if", "info", ".", "is_file", "}", "src_state", "=", "[", "(", "path", ",", "info", ")", "for", "path", ",", "info", "in", "walker", ".", "info", "(", "_src_fs", ",", "_src_path", ",", "namespace", ")", "]", "for", "dir_path", ",", "copy_info", "in", "src_state", ":", "copy_path", "=", "combine", "(", "_dst_path", ",", "frombase", "(", "_src_path", ",", "dir_path", ")", ")", "if", "copy_info", ".", "is_dir", ":", "_dst_fs", ".", "makedir", "(", "copy_path", ",", "recreate", "=", "True", ")", "elif", "copy_info", ".", "is_file", ":", "# dst file is present, try to figure out if copy", "# is necessary", "try", ":", "src_modified", "=", "copy_info", ".", "modified", "dst_modified", "=", "dst_state", "[", "dir_path", "]", ".", "modified", "except", "KeyError", ":", "do_copy", "=", "True", "else", ":", "do_copy", "=", "(", "src_modified", "is", "None", "or", "dst_modified", "is", "None", "or", "src_modified", ">", "dst_modified", ")", "if", "do_copy", ":", "copier", ".", "copy", "(", "_src_fs", ",", "dir_path", ",", "_dst_fs", ",", "copy_path", ")", "on_copy", "(", "_src_fs", ",", "dir_path", ",", "_dst_fs", ",", "copy_path", ")" ]
Copy a directory from one filesystem to another, checking times. If both source and destination files exist, the copy is executed only if the source file is newer than the destination file. In case modification times of source or destination files are not available, copy is always executed. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a directory on the source filesystem. dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a directory on the destination filesystem. walker (~fs.walk.Walker, optional): A walker object that will be used to scan for files in ``src_fs``. Set this if you only want to consider a sub-set of the resources in ``src_fs``. on_copy (callable, optional): A function callback called after a single file copy is executed. Expected signature is ``(src_fs, src_path, dst_fs, dst_path)``. workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for a single-threaded copy.
[ "Copy", "a", "directory", "from", "one", "filesystem", "to", "another", "checking", "times", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/copy.py#L305-L386
233,642
PyFilesystem/pyfilesystem2
fs/ftpfs.py
_parse_ftp_error
def _parse_ftp_error(error): # type: (ftplib.Error) -> Tuple[Text, Text] """Extract code and message from ftp error.""" code, _, message = text_type(error).partition(" ") return code, message
python
def _parse_ftp_error(error): # type: (ftplib.Error) -> Tuple[Text, Text] """Extract code and message from ftp error.""" code, _, message = text_type(error).partition(" ") return code, message
[ "def", "_parse_ftp_error", "(", "error", ")", ":", "# type: (ftplib.Error) -> Tuple[Text, Text]", "code", ",", "_", ",", "message", "=", "text_type", "(", "error", ")", ".", "partition", "(", "\" \"", ")", "return", "code", ",", "message" ]
Extract code and message from ftp error.
[ "Extract", "code", "and", "message", "from", "ftp", "error", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/ftpfs.py#L108-L112
233,643
PyFilesystem/pyfilesystem2
fs/ftpfs.py
FTPFile._open_ftp
def _open_ftp(self): # type: () -> FTP """Open an ftp object for the file.""" ftp = self.fs._open_ftp() ftp.voidcmd(str("TYPE I")) return ftp
python
def _open_ftp(self): # type: () -> FTP """Open an ftp object for the file.""" ftp = self.fs._open_ftp() ftp.voidcmd(str("TYPE I")) return ftp
[ "def", "_open_ftp", "(", "self", ")", ":", "# type: () -> FTP", "ftp", "=", "self", ".", "fs", ".", "_open_ftp", "(", ")", "ftp", ".", "voidcmd", "(", "str", "(", "\"TYPE I\"", ")", ")", "return", "ftp" ]
Open an ftp object for the file.
[ "Open", "an", "ftp", "object", "for", "the", "file", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/ftpfs.py#L150-L155
233,644
PyFilesystem/pyfilesystem2
fs/ftpfs.py
FTPFS._parse_features
def _parse_features(cls, feat_response): # type: (Text) -> Dict[Text, Text] """Parse a dict of features from FTP feat response. """ features = {} if feat_response.split("-")[0] == "211": for line in feat_response.splitlines(): if line.startswith(" "): key, _, value = line[1:].partition(" ") features[key] = value return features
python
def _parse_features(cls, feat_response): # type: (Text) -> Dict[Text, Text] """Parse a dict of features from FTP feat response. """ features = {} if feat_response.split("-")[0] == "211": for line in feat_response.splitlines(): if line.startswith(" "): key, _, value = line[1:].partition(" ") features[key] = value return features
[ "def", "_parse_features", "(", "cls", ",", "feat_response", ")", ":", "# type: (Text) -> Dict[Text, Text]", "features", "=", "{", "}", "if", "feat_response", ".", "split", "(", "\"-\"", ")", "[", "0", "]", "==", "\"211\"", ":", "for", "line", "in", "feat_response", ".", "splitlines", "(", ")", ":", "if", "line", ".", "startswith", "(", "\" \"", ")", ":", "key", ",", "_", ",", "value", "=", "line", "[", "1", ":", "]", ".", "partition", "(", "\" \"", ")", "features", "[", "key", "]", "=", "value", "return", "features" ]
Parse a dict of features from FTP feat response.
[ "Parse", "a", "dict", "of", "features", "from", "FTP", "feat", "response", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/ftpfs.py#L397-L407
233,645
PyFilesystem/pyfilesystem2
fs/ftpfs.py
FTPFS._open_ftp
def _open_ftp(self): # type: () -> FTP """Open a new ftp object. """ _ftp = FTP() _ftp.set_debuglevel(0) with ftp_errors(self): _ftp.connect(self.host, self.port, self.timeout) _ftp.login(self.user, self.passwd, self.acct) self._features = {} try: feat_response = _decode(_ftp.sendcmd("FEAT"), "latin-1") except error_perm: # pragma: no cover self.encoding = "latin-1" else: self._features = self._parse_features(feat_response) self.encoding = "utf-8" if "UTF8" in self._features else "latin-1" if not PY2: _ftp.file = _ftp.sock.makefile( # type: ignore "r", encoding=self.encoding ) _ftp.encoding = self.encoding self._welcome = _ftp.welcome return _ftp
python
def _open_ftp(self): # type: () -> FTP """Open a new ftp object. """ _ftp = FTP() _ftp.set_debuglevel(0) with ftp_errors(self): _ftp.connect(self.host, self.port, self.timeout) _ftp.login(self.user, self.passwd, self.acct) self._features = {} try: feat_response = _decode(_ftp.sendcmd("FEAT"), "latin-1") except error_perm: # pragma: no cover self.encoding = "latin-1" else: self._features = self._parse_features(feat_response) self.encoding = "utf-8" if "UTF8" in self._features else "latin-1" if not PY2: _ftp.file = _ftp.sock.makefile( # type: ignore "r", encoding=self.encoding ) _ftp.encoding = self.encoding self._welcome = _ftp.welcome return _ftp
[ "def", "_open_ftp", "(", "self", ")", ":", "# type: () -> FTP", "_ftp", "=", "FTP", "(", ")", "_ftp", ".", "set_debuglevel", "(", "0", ")", "with", "ftp_errors", "(", "self", ")", ":", "_ftp", ".", "connect", "(", "self", ".", "host", ",", "self", ".", "port", ",", "self", ".", "timeout", ")", "_ftp", ".", "login", "(", "self", ".", "user", ",", "self", ".", "passwd", ",", "self", ".", "acct", ")", "self", ".", "_features", "=", "{", "}", "try", ":", "feat_response", "=", "_decode", "(", "_ftp", ".", "sendcmd", "(", "\"FEAT\"", ")", ",", "\"latin-1\"", ")", "except", "error_perm", ":", "# pragma: no cover", "self", ".", "encoding", "=", "\"latin-1\"", "else", ":", "self", ".", "_features", "=", "self", ".", "_parse_features", "(", "feat_response", ")", "self", ".", "encoding", "=", "\"utf-8\"", "if", "\"UTF8\"", "in", "self", ".", "_features", "else", "\"latin-1\"", "if", "not", "PY2", ":", "_ftp", ".", "file", "=", "_ftp", ".", "sock", ".", "makefile", "(", "# type: ignore", "\"r\"", ",", "encoding", "=", "self", ".", "encoding", ")", "_ftp", ".", "encoding", "=", "self", ".", "encoding", "self", ".", "_welcome", "=", "_ftp", ".", "welcome", "return", "_ftp" ]
Open a new ftp object.
[ "Open", "a", "new", "ftp", "object", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/ftpfs.py#L409-L432
233,646
PyFilesystem/pyfilesystem2
fs/ftpfs.py
FTPFS.ftp_url
def ftp_url(self): # type: () -> Text """Get the FTP url this filesystem will open.""" url = ( "ftp://{}".format(self.host) if self.port == 21 else "ftp://{}:{}".format(self.host, self.port) ) return url
python
def ftp_url(self): # type: () -> Text """Get the FTP url this filesystem will open.""" url = ( "ftp://{}".format(self.host) if self.port == 21 else "ftp://{}:{}".format(self.host, self.port) ) return url
[ "def", "ftp_url", "(", "self", ")", ":", "# type: () -> Text", "url", "=", "(", "\"ftp://{}\"", ".", "format", "(", "self", ".", "host", ")", "if", "self", ".", "port", "==", "21", "else", "\"ftp://{}:{}\"", ".", "format", "(", "self", ".", "host", ",", "self", ".", "port", ")", ")", "return", "url" ]
Get the FTP url this filesystem will open.
[ "Get", "the", "FTP", "url", "this", "filesystem", "will", "open", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/ftpfs.py#L440-L448
233,647
PyFilesystem/pyfilesystem2
fs/ftpfs.py
FTPFS._parse_ftp_time
def _parse_ftp_time(cls, time_text): # type: (Text) -> Optional[int] """Parse a time from an ftp directory listing. """ try: tm_year = int(time_text[0:4]) tm_month = int(time_text[4:6]) tm_day = int(time_text[6:8]) tm_hour = int(time_text[8:10]) tm_min = int(time_text[10:12]) tm_sec = int(time_text[12:14]) except ValueError: return None epoch_time = calendar.timegm( (tm_year, tm_month, tm_day, tm_hour, tm_min, tm_sec) ) return epoch_time
python
def _parse_ftp_time(cls, time_text): # type: (Text) -> Optional[int] """Parse a time from an ftp directory listing. """ try: tm_year = int(time_text[0:4]) tm_month = int(time_text[4:6]) tm_day = int(time_text[6:8]) tm_hour = int(time_text[8:10]) tm_min = int(time_text[10:12]) tm_sec = int(time_text[12:14]) except ValueError: return None epoch_time = calendar.timegm( (tm_year, tm_month, tm_day, tm_hour, tm_min, tm_sec) ) return epoch_time
[ "def", "_parse_ftp_time", "(", "cls", ",", "time_text", ")", ":", "# type: (Text) -> Optional[int]", "try", ":", "tm_year", "=", "int", "(", "time_text", "[", "0", ":", "4", "]", ")", "tm_month", "=", "int", "(", "time_text", "[", "4", ":", "6", "]", ")", "tm_day", "=", "int", "(", "time_text", "[", "6", ":", "8", "]", ")", "tm_hour", "=", "int", "(", "time_text", "[", "8", ":", "10", "]", ")", "tm_min", "=", "int", "(", "time_text", "[", "10", ":", "12", "]", ")", "tm_sec", "=", "int", "(", "time_text", "[", "12", ":", "14", "]", ")", "except", "ValueError", ":", "return", "None", "epoch_time", "=", "calendar", ".", "timegm", "(", "(", "tm_year", ",", "tm_month", ",", "tm_day", ",", "tm_hour", ",", "tm_min", ",", "tm_sec", ")", ")", "return", "epoch_time" ]
Parse a time from an ftp directory listing.
[ "Parse", "a", "time", "from", "an", "ftp", "directory", "listing", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/ftpfs.py#L506-L522
233,648
PyFilesystem/pyfilesystem2
fs/compress.py
write_zip
def write_zip( src_fs, # type: FS file, # type: Union[Text, BinaryIO] compression=zipfile.ZIP_DEFLATED, # type: int encoding="utf-8", # type: Text walker=None, # type: Optional[Walker] ): # type: (...) -> None """Write the contents of a filesystem to a zip file. Arguments: src_fs (~fs.base.FS): The source filesystem to compress. file (str or io.IOBase): Destination file, may be a file name or an open file object. compression (int): Compression to use (one of the constants defined in the `zipfile` module in the stdlib). Defaults to `zipfile.ZIP_DEFLATED`. encoding (str): The encoding to use for filenames. The default is ``"utf-8"``, use ``"CP437"`` if compatibility with WinZip is desired. walker (~fs.walk.Walker, optional): A `Walker` instance, or `None` to use default walker. You can use this to specify which files you want to compress. """ _zip = zipfile.ZipFile(file, mode="w", compression=compression, allowZip64=True) walker = walker or Walker() with _zip: gen_walk = walker.info(src_fs, namespaces=["details", "stat", "access"]) for path, info in gen_walk: # Zip names must be relative, directory names must end # with a slash. zip_name = relpath(path + "/" if info.is_dir else path) if not six.PY3: # Python2 expects bytes filenames zip_name = zip_name.encode(encoding, "replace") if info.has_namespace("stat"): # If the file has a stat namespace, get the # zip time directory from the stat structure st_mtime = info.get("stat", "st_mtime", None) _mtime = time.localtime(st_mtime) zip_time = _mtime[0:6] # type: ZipTime else: # Otherwise, use the modified time from details # namespace. mt = info.modified or datetime.utcnow() zip_time = (mt.year, mt.month, mt.day, mt.hour, mt.minute, mt.second) # NOTE(@althonos): typeshed's `zipfile.py` on declares # ZipInfo.__init__ for Python < 3 ?! zip_info = zipfile.ZipInfo(zip_name, zip_time) # type: ignore try: if info.permissions is not None: zip_info.external_attr = info.permissions.mode << 16 except MissingInfoNamespace: pass if info.is_dir: zip_info.external_attr |= 0x10 # This is how to record directories with zipfile _zip.writestr(zip_info, b"") else: # Get a syspath if possible try: sys_path = src_fs.getsyspath(path) except NoSysPath: # Write from bytes _zip.writestr(zip_info, src_fs.readbytes(path)) else: # Write from a file which is (presumably) # more memory efficient _zip.write(sys_path, zip_name)
python
def write_zip( src_fs, # type: FS file, # type: Union[Text, BinaryIO] compression=zipfile.ZIP_DEFLATED, # type: int encoding="utf-8", # type: Text walker=None, # type: Optional[Walker] ): # type: (...) -> None """Write the contents of a filesystem to a zip file. Arguments: src_fs (~fs.base.FS): The source filesystem to compress. file (str or io.IOBase): Destination file, may be a file name or an open file object. compression (int): Compression to use (one of the constants defined in the `zipfile` module in the stdlib). Defaults to `zipfile.ZIP_DEFLATED`. encoding (str): The encoding to use for filenames. The default is ``"utf-8"``, use ``"CP437"`` if compatibility with WinZip is desired. walker (~fs.walk.Walker, optional): A `Walker` instance, or `None` to use default walker. You can use this to specify which files you want to compress. """ _zip = zipfile.ZipFile(file, mode="w", compression=compression, allowZip64=True) walker = walker or Walker() with _zip: gen_walk = walker.info(src_fs, namespaces=["details", "stat", "access"]) for path, info in gen_walk: # Zip names must be relative, directory names must end # with a slash. zip_name = relpath(path + "/" if info.is_dir else path) if not six.PY3: # Python2 expects bytes filenames zip_name = zip_name.encode(encoding, "replace") if info.has_namespace("stat"): # If the file has a stat namespace, get the # zip time directory from the stat structure st_mtime = info.get("stat", "st_mtime", None) _mtime = time.localtime(st_mtime) zip_time = _mtime[0:6] # type: ZipTime else: # Otherwise, use the modified time from details # namespace. mt = info.modified or datetime.utcnow() zip_time = (mt.year, mt.month, mt.day, mt.hour, mt.minute, mt.second) # NOTE(@althonos): typeshed's `zipfile.py` on declares # ZipInfo.__init__ for Python < 3 ?! zip_info = zipfile.ZipInfo(zip_name, zip_time) # type: ignore try: if info.permissions is not None: zip_info.external_attr = info.permissions.mode << 16 except MissingInfoNamespace: pass if info.is_dir: zip_info.external_attr |= 0x10 # This is how to record directories with zipfile _zip.writestr(zip_info, b"") else: # Get a syspath if possible try: sys_path = src_fs.getsyspath(path) except NoSysPath: # Write from bytes _zip.writestr(zip_info, src_fs.readbytes(path)) else: # Write from a file which is (presumably) # more memory efficient _zip.write(sys_path, zip_name)
[ "def", "write_zip", "(", "src_fs", ",", "# type: FS", "file", ",", "# type: Union[Text, BinaryIO]", "compression", "=", "zipfile", ".", "ZIP_DEFLATED", ",", "# type: int", "encoding", "=", "\"utf-8\"", ",", "# type: Text", "walker", "=", "None", ",", "# type: Optional[Walker]", ")", ":", "# type: (...) -> None", "_zip", "=", "zipfile", ".", "ZipFile", "(", "file", ",", "mode", "=", "\"w\"", ",", "compression", "=", "compression", ",", "allowZip64", "=", "True", ")", "walker", "=", "walker", "or", "Walker", "(", ")", "with", "_zip", ":", "gen_walk", "=", "walker", ".", "info", "(", "src_fs", ",", "namespaces", "=", "[", "\"details\"", ",", "\"stat\"", ",", "\"access\"", "]", ")", "for", "path", ",", "info", "in", "gen_walk", ":", "# Zip names must be relative, directory names must end", "# with a slash.", "zip_name", "=", "relpath", "(", "path", "+", "\"/\"", "if", "info", ".", "is_dir", "else", "path", ")", "if", "not", "six", ".", "PY3", ":", "# Python2 expects bytes filenames", "zip_name", "=", "zip_name", ".", "encode", "(", "encoding", ",", "\"replace\"", ")", "if", "info", ".", "has_namespace", "(", "\"stat\"", ")", ":", "# If the file has a stat namespace, get the", "# zip time directory from the stat structure", "st_mtime", "=", "info", ".", "get", "(", "\"stat\"", ",", "\"st_mtime\"", ",", "None", ")", "_mtime", "=", "time", ".", "localtime", "(", "st_mtime", ")", "zip_time", "=", "_mtime", "[", "0", ":", "6", "]", "# type: ZipTime", "else", ":", "# Otherwise, use the modified time from details", "# namespace.", "mt", "=", "info", ".", "modified", "or", "datetime", ".", "utcnow", "(", ")", "zip_time", "=", "(", "mt", ".", "year", ",", "mt", ".", "month", ",", "mt", ".", "day", ",", "mt", ".", "hour", ",", "mt", ".", "minute", ",", "mt", ".", "second", ")", "# NOTE(@althonos): typeshed's `zipfile.py` on declares", "# ZipInfo.__init__ for Python < 3 ?!", "zip_info", "=", "zipfile", ".", "ZipInfo", "(", "zip_name", ",", "zip_time", ")", "# type: ignore", "try", ":", "if", "info", ".", "permissions", "is", "not", "None", ":", "zip_info", ".", "external_attr", "=", "info", ".", "permissions", ".", "mode", "<<", "16", "except", "MissingInfoNamespace", ":", "pass", "if", "info", ".", "is_dir", ":", "zip_info", ".", "external_attr", "|=", "0x10", "# This is how to record directories with zipfile", "_zip", ".", "writestr", "(", "zip_info", ",", "b\"\"", ")", "else", ":", "# Get a syspath if possible", "try", ":", "sys_path", "=", "src_fs", ".", "getsyspath", "(", "path", ")", "except", "NoSysPath", ":", "# Write from bytes", "_zip", ".", "writestr", "(", "zip_info", ",", "src_fs", ".", "readbytes", "(", "path", ")", ")", "else", ":", "# Write from a file which is (presumably)", "# more memory efficient", "_zip", ".", "write", "(", "sys_path", ",", "zip_name", ")" ]
Write the contents of a filesystem to a zip file. Arguments: src_fs (~fs.base.FS): The source filesystem to compress. file (str or io.IOBase): Destination file, may be a file name or an open file object. compression (int): Compression to use (one of the constants defined in the `zipfile` module in the stdlib). Defaults to `zipfile.ZIP_DEFLATED`. encoding (str): The encoding to use for filenames. The default is ``"utf-8"``, use ``"CP437"`` if compatibility with WinZip is desired. walker (~fs.walk.Walker, optional): A `Walker` instance, or `None` to use default walker. You can use this to specify which files you want to compress.
[ "Write", "the", "contents", "of", "a", "filesystem", "to", "a", "zip", "file", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/compress.py#L32-L105
233,649
PyFilesystem/pyfilesystem2
fs/compress.py
write_tar
def write_tar( src_fs, # type: FS file, # type: Union[Text, BinaryIO] compression=None, # type: Optional[Text] encoding="utf-8", # type: Text walker=None, # type: Optional[Walker] ): # type: (...) -> None """Write the contents of a filesystem to a tar file. Arguments: file (str or io.IOBase): Destination file, may be a file name or an open file object. compression (str, optional): Compression to use, or `None` for a plain Tar archive without compression. encoding(str): The encoding to use for filenames. The default is ``"utf-8"``. walker (~fs.walk.Walker, optional): A `Walker` instance, or `None` to use default walker. You can use this to specify which files you want to compress. """ type_map = { ResourceType.block_special_file: tarfile.BLKTYPE, ResourceType.character: tarfile.CHRTYPE, ResourceType.directory: tarfile.DIRTYPE, ResourceType.fifo: tarfile.FIFOTYPE, ResourceType.file: tarfile.REGTYPE, ResourceType.socket: tarfile.AREGTYPE, # no type for socket ResourceType.symlink: tarfile.SYMTYPE, ResourceType.unknown: tarfile.AREGTYPE, # no type for unknown } tar_attr = [("uid", "uid"), ("gid", "gid"), ("uname", "user"), ("gname", "group")] mode = "w:{}".format(compression or "") if isinstance(file, (six.text_type, six.binary_type)): _tar = tarfile.open(file, mode=mode) else: _tar = tarfile.open(fileobj=file, mode=mode) current_time = time.time() walker = walker or Walker() with _tar: gen_walk = walker.info(src_fs, namespaces=["details", "stat", "access"]) for path, info in gen_walk: # Tar names must be relative tar_name = relpath(path) if not six.PY3: # Python2 expects bytes filenames tar_name = tar_name.encode(encoding, "replace") tar_info = tarfile.TarInfo(tar_name) if info.has_namespace("stat"): mtime = info.get("stat", "st_mtime", current_time) else: mtime = info.modified or current_time if isinstance(mtime, datetime): mtime = datetime_to_epoch(mtime) if isinstance(mtime, float): mtime = int(mtime) tar_info.mtime = mtime for tarattr, infoattr in tar_attr: if getattr(info, infoattr, None) is not None: setattr(tar_info, tarattr, getattr(info, infoattr, None)) if info.has_namespace("access"): tar_info.mode = getattr(info.permissions, "mode", 0o420) if info.is_dir: tar_info.type = tarfile.DIRTYPE _tar.addfile(tar_info) else: tar_info.type = type_map.get(info.type, tarfile.REGTYPE) tar_info.size = info.size with src_fs.openbin(path) as bin_file: _tar.addfile(tar_info, bin_file)
python
def write_tar( src_fs, # type: FS file, # type: Union[Text, BinaryIO] compression=None, # type: Optional[Text] encoding="utf-8", # type: Text walker=None, # type: Optional[Walker] ): # type: (...) -> None """Write the contents of a filesystem to a tar file. Arguments: file (str or io.IOBase): Destination file, may be a file name or an open file object. compression (str, optional): Compression to use, or `None` for a plain Tar archive without compression. encoding(str): The encoding to use for filenames. The default is ``"utf-8"``. walker (~fs.walk.Walker, optional): A `Walker` instance, or `None` to use default walker. You can use this to specify which files you want to compress. """ type_map = { ResourceType.block_special_file: tarfile.BLKTYPE, ResourceType.character: tarfile.CHRTYPE, ResourceType.directory: tarfile.DIRTYPE, ResourceType.fifo: tarfile.FIFOTYPE, ResourceType.file: tarfile.REGTYPE, ResourceType.socket: tarfile.AREGTYPE, # no type for socket ResourceType.symlink: tarfile.SYMTYPE, ResourceType.unknown: tarfile.AREGTYPE, # no type for unknown } tar_attr = [("uid", "uid"), ("gid", "gid"), ("uname", "user"), ("gname", "group")] mode = "w:{}".format(compression or "") if isinstance(file, (six.text_type, six.binary_type)): _tar = tarfile.open(file, mode=mode) else: _tar = tarfile.open(fileobj=file, mode=mode) current_time = time.time() walker = walker or Walker() with _tar: gen_walk = walker.info(src_fs, namespaces=["details", "stat", "access"]) for path, info in gen_walk: # Tar names must be relative tar_name = relpath(path) if not six.PY3: # Python2 expects bytes filenames tar_name = tar_name.encode(encoding, "replace") tar_info = tarfile.TarInfo(tar_name) if info.has_namespace("stat"): mtime = info.get("stat", "st_mtime", current_time) else: mtime = info.modified or current_time if isinstance(mtime, datetime): mtime = datetime_to_epoch(mtime) if isinstance(mtime, float): mtime = int(mtime) tar_info.mtime = mtime for tarattr, infoattr in tar_attr: if getattr(info, infoattr, None) is not None: setattr(tar_info, tarattr, getattr(info, infoattr, None)) if info.has_namespace("access"): tar_info.mode = getattr(info.permissions, "mode", 0o420) if info.is_dir: tar_info.type = tarfile.DIRTYPE _tar.addfile(tar_info) else: tar_info.type = type_map.get(info.type, tarfile.REGTYPE) tar_info.size = info.size with src_fs.openbin(path) as bin_file: _tar.addfile(tar_info, bin_file)
[ "def", "write_tar", "(", "src_fs", ",", "# type: FS", "file", ",", "# type: Union[Text, BinaryIO]", "compression", "=", "None", ",", "# type: Optional[Text]", "encoding", "=", "\"utf-8\"", ",", "# type: Text", "walker", "=", "None", ",", "# type: Optional[Walker]", ")", ":", "# type: (...) -> None", "type_map", "=", "{", "ResourceType", ".", "block_special_file", ":", "tarfile", ".", "BLKTYPE", ",", "ResourceType", ".", "character", ":", "tarfile", ".", "CHRTYPE", ",", "ResourceType", ".", "directory", ":", "tarfile", ".", "DIRTYPE", ",", "ResourceType", ".", "fifo", ":", "tarfile", ".", "FIFOTYPE", ",", "ResourceType", ".", "file", ":", "tarfile", ".", "REGTYPE", ",", "ResourceType", ".", "socket", ":", "tarfile", ".", "AREGTYPE", ",", "# no type for socket", "ResourceType", ".", "symlink", ":", "tarfile", ".", "SYMTYPE", ",", "ResourceType", ".", "unknown", ":", "tarfile", ".", "AREGTYPE", ",", "# no type for unknown", "}", "tar_attr", "=", "[", "(", "\"uid\"", ",", "\"uid\"", ")", ",", "(", "\"gid\"", ",", "\"gid\"", ")", ",", "(", "\"uname\"", ",", "\"user\"", ")", ",", "(", "\"gname\"", ",", "\"group\"", ")", "]", "mode", "=", "\"w:{}\"", ".", "format", "(", "compression", "or", "\"\"", ")", "if", "isinstance", "(", "file", ",", "(", "six", ".", "text_type", ",", "six", ".", "binary_type", ")", ")", ":", "_tar", "=", "tarfile", ".", "open", "(", "file", ",", "mode", "=", "mode", ")", "else", ":", "_tar", "=", "tarfile", ".", "open", "(", "fileobj", "=", "file", ",", "mode", "=", "mode", ")", "current_time", "=", "time", ".", "time", "(", ")", "walker", "=", "walker", "or", "Walker", "(", ")", "with", "_tar", ":", "gen_walk", "=", "walker", ".", "info", "(", "src_fs", ",", "namespaces", "=", "[", "\"details\"", ",", "\"stat\"", ",", "\"access\"", "]", ")", "for", "path", ",", "info", "in", "gen_walk", ":", "# Tar names must be relative", "tar_name", "=", "relpath", "(", "path", ")", "if", "not", "six", ".", "PY3", ":", "# Python2 expects bytes filenames", "tar_name", "=", "tar_name", ".", "encode", "(", "encoding", ",", "\"replace\"", ")", "tar_info", "=", "tarfile", ".", "TarInfo", "(", "tar_name", ")", "if", "info", ".", "has_namespace", "(", "\"stat\"", ")", ":", "mtime", "=", "info", ".", "get", "(", "\"stat\"", ",", "\"st_mtime\"", ",", "current_time", ")", "else", ":", "mtime", "=", "info", ".", "modified", "or", "current_time", "if", "isinstance", "(", "mtime", ",", "datetime", ")", ":", "mtime", "=", "datetime_to_epoch", "(", "mtime", ")", "if", "isinstance", "(", "mtime", ",", "float", ")", ":", "mtime", "=", "int", "(", "mtime", ")", "tar_info", ".", "mtime", "=", "mtime", "for", "tarattr", ",", "infoattr", "in", "tar_attr", ":", "if", "getattr", "(", "info", ",", "infoattr", ",", "None", ")", "is", "not", "None", ":", "setattr", "(", "tar_info", ",", "tarattr", ",", "getattr", "(", "info", ",", "infoattr", ",", "None", ")", ")", "if", "info", ".", "has_namespace", "(", "\"access\"", ")", ":", "tar_info", ".", "mode", "=", "getattr", "(", "info", ".", "permissions", ",", "\"mode\"", ",", "0o420", ")", "if", "info", ".", "is_dir", ":", "tar_info", ".", "type", "=", "tarfile", ".", "DIRTYPE", "_tar", ".", "addfile", "(", "tar_info", ")", "else", ":", "tar_info", ".", "type", "=", "type_map", ".", "get", "(", "info", ".", "type", ",", "tarfile", ".", "REGTYPE", ")", "tar_info", ".", "size", "=", "info", ".", "size", "with", "src_fs", ".", "openbin", "(", "path", ")", "as", "bin_file", ":", "_tar", ".", "addfile", "(", "tar_info", ",", "bin_file", ")" ]
Write the contents of a filesystem to a tar file. Arguments: file (str or io.IOBase): Destination file, may be a file name or an open file object. compression (str, optional): Compression to use, or `None` for a plain Tar archive without compression. encoding(str): The encoding to use for filenames. The default is ``"utf-8"``. walker (~fs.walk.Walker, optional): A `Walker` instance, or `None` to use default walker. You can use this to specify which files you want to compress.
[ "Write", "the", "contents", "of", "a", "filesystem", "to", "a", "tar", "file", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/compress.py#L108-L187
233,650
PyFilesystem/pyfilesystem2
fs/glob.py
Globber.count_lines
def count_lines(self): # type: () -> LineCounts """Count the lines in the matched files. Returns: `~LineCounts`: A named tuple containing line counts. Example: >>> import fs >>> fs.open_fs('~/projects').glob('**/*.py').count_lines() LineCounts(lines=5767102, non_blank=4915110) """ lines = 0 non_blank = 0 for path, info in self._make_iter(): if info.is_file: for line in self.fs.open(path, "rb"): lines += 1 if line.rstrip(): non_blank += 1 return LineCounts(lines=lines, non_blank=non_blank)
python
def count_lines(self): # type: () -> LineCounts """Count the lines in the matched files. Returns: `~LineCounts`: A named tuple containing line counts. Example: >>> import fs >>> fs.open_fs('~/projects').glob('**/*.py').count_lines() LineCounts(lines=5767102, non_blank=4915110) """ lines = 0 non_blank = 0 for path, info in self._make_iter(): if info.is_file: for line in self.fs.open(path, "rb"): lines += 1 if line.rstrip(): non_blank += 1 return LineCounts(lines=lines, non_blank=non_blank)
[ "def", "count_lines", "(", "self", ")", ":", "# type: () -> LineCounts", "lines", "=", "0", "non_blank", "=", "0", "for", "path", ",", "info", "in", "self", ".", "_make_iter", "(", ")", ":", "if", "info", ".", "is_file", ":", "for", "line", "in", "self", ".", "fs", ".", "open", "(", "path", ",", "\"rb\"", ")", ":", "lines", "+=", "1", "if", "line", ".", "rstrip", "(", ")", ":", "non_blank", "+=", "1", "return", "LineCounts", "(", "lines", "=", "lines", ",", "non_blank", "=", "non_blank", ")" ]
Count the lines in the matched files. Returns: `~LineCounts`: A named tuple containing line counts. Example: >>> import fs >>> fs.open_fs('~/projects').glob('**/*.py').count_lines() LineCounts(lines=5767102, non_blank=4915110)
[ "Count", "the", "lines", "in", "the", "matched", "files", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/glob.py#L190-L212
233,651
PyFilesystem/pyfilesystem2
fs/glob.py
Globber.remove
def remove(self): # type: () -> int """Removed all matched paths. Returns: int: Number of file and directories removed. Example: >>> import fs >>> fs.open_fs('~/projects/my_project').glob('**/*.pyc').remove() 29 """ removes = 0 for path, info in self._make_iter(search="depth"): if info.is_dir: self.fs.removetree(path) else: self.fs.remove(path) removes += 1 return removes
python
def remove(self): # type: () -> int """Removed all matched paths. Returns: int: Number of file and directories removed. Example: >>> import fs >>> fs.open_fs('~/projects/my_project').glob('**/*.pyc').remove() 29 """ removes = 0 for path, info in self._make_iter(search="depth"): if info.is_dir: self.fs.removetree(path) else: self.fs.remove(path) removes += 1 return removes
[ "def", "remove", "(", "self", ")", ":", "# type: () -> int", "removes", "=", "0", "for", "path", ",", "info", "in", "self", ".", "_make_iter", "(", "search", "=", "\"depth\"", ")", ":", "if", "info", ".", "is_dir", ":", "self", ".", "fs", ".", "removetree", "(", "path", ")", "else", ":", "self", ".", "fs", ".", "remove", "(", "path", ")", "removes", "+=", "1", "return", "removes" ]
Removed all matched paths. Returns: int: Number of file and directories removed. Example: >>> import fs >>> fs.open_fs('~/projects/my_project').glob('**/*.pyc').remove() 29
[ "Removed", "all", "matched", "paths", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/glob.py#L214-L234
233,652
PyFilesystem/pyfilesystem2
fs/move.py
move_file
def move_file( src_fs, # type: Union[Text, FS] src_path, # type: Text dst_fs, # type: Union[Text, FS] dst_path, # type: Text ): # type: (...) -> None """Move a file from one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a file on ``src_fs``. dst_fs (FS or str); Destination filesystem (instance or URL). dst_path (str): Path to a file on ``dst_fs``. """ with manage_fs(src_fs) as _src_fs: with manage_fs(dst_fs, create=True) as _dst_fs: if _src_fs is _dst_fs: # Same filesystem, may be optimized _src_fs.move(src_path, dst_path, overwrite=True) else: # Standard copy and delete with _src_fs.lock(), _dst_fs.lock(): copy_file(_src_fs, src_path, _dst_fs, dst_path) _src_fs.remove(src_path)
python
def move_file( src_fs, # type: Union[Text, FS] src_path, # type: Text dst_fs, # type: Union[Text, FS] dst_path, # type: Text ): # type: (...) -> None """Move a file from one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a file on ``src_fs``. dst_fs (FS or str); Destination filesystem (instance or URL). dst_path (str): Path to a file on ``dst_fs``. """ with manage_fs(src_fs) as _src_fs: with manage_fs(dst_fs, create=True) as _dst_fs: if _src_fs is _dst_fs: # Same filesystem, may be optimized _src_fs.move(src_path, dst_path, overwrite=True) else: # Standard copy and delete with _src_fs.lock(), _dst_fs.lock(): copy_file(_src_fs, src_path, _dst_fs, dst_path) _src_fs.remove(src_path)
[ "def", "move_file", "(", "src_fs", ",", "# type: Union[Text, FS]", "src_path", ",", "# type: Text", "dst_fs", ",", "# type: Union[Text, FS]", "dst_path", ",", "# type: Text", ")", ":", "# type: (...) -> None", "with", "manage_fs", "(", "src_fs", ")", "as", "_src_fs", ":", "with", "manage_fs", "(", "dst_fs", ",", "create", "=", "True", ")", "as", "_dst_fs", ":", "if", "_src_fs", "is", "_dst_fs", ":", "# Same filesystem, may be optimized", "_src_fs", ".", "move", "(", "src_path", ",", "dst_path", ",", "overwrite", "=", "True", ")", "else", ":", "# Standard copy and delete", "with", "_src_fs", ".", "lock", "(", ")", ",", "_dst_fs", ".", "lock", "(", ")", ":", "copy_file", "(", "_src_fs", ",", "src_path", ",", "_dst_fs", ",", "dst_path", ")", "_src_fs", ".", "remove", "(", "src_path", ")" ]
Move a file from one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a file on ``src_fs``. dst_fs (FS or str); Destination filesystem (instance or URL). dst_path (str): Path to a file on ``dst_fs``.
[ "Move", "a", "file", "from", "one", "filesystem", "to", "another", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/move.py#L32-L57
233,653
PyFilesystem/pyfilesystem2
fs/move.py
move_dir
def move_dir( src_fs, # type: Union[Text, FS] src_path, # type: Text dst_fs, # type: Union[Text, FS] dst_path, # type: Text workers=0, # type: int ): # type: (...) -> None """Move a directory from one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a directory on ``src_fs`` dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a directory on ``dst_fs``. workers (int): Use `worker` threads to copy data, or ``0`` (default) for a single-threaded copy. """ def src(): return manage_fs(src_fs, writeable=False) def dst(): return manage_fs(dst_fs, create=True) with src() as _src_fs, dst() as _dst_fs: with _src_fs.lock(), _dst_fs.lock(): _dst_fs.makedir(dst_path, recreate=True) copy_dir(src_fs, src_path, dst_fs, dst_path, workers=workers) _src_fs.removetree(src_path)
python
def move_dir( src_fs, # type: Union[Text, FS] src_path, # type: Text dst_fs, # type: Union[Text, FS] dst_path, # type: Text workers=0, # type: int ): # type: (...) -> None """Move a directory from one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a directory on ``src_fs`` dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a directory on ``dst_fs``. workers (int): Use `worker` threads to copy data, or ``0`` (default) for a single-threaded copy. """ def src(): return manage_fs(src_fs, writeable=False) def dst(): return manage_fs(dst_fs, create=True) with src() as _src_fs, dst() as _dst_fs: with _src_fs.lock(), _dst_fs.lock(): _dst_fs.makedir(dst_path, recreate=True) copy_dir(src_fs, src_path, dst_fs, dst_path, workers=workers) _src_fs.removetree(src_path)
[ "def", "move_dir", "(", "src_fs", ",", "# type: Union[Text, FS]", "src_path", ",", "# type: Text", "dst_fs", ",", "# type: Union[Text, FS]", "dst_path", ",", "# type: Text", "workers", "=", "0", ",", "# type: int", ")", ":", "# type: (...) -> None", "def", "src", "(", ")", ":", "return", "manage_fs", "(", "src_fs", ",", "writeable", "=", "False", ")", "def", "dst", "(", ")", ":", "return", "manage_fs", "(", "dst_fs", ",", "create", "=", "True", ")", "with", "src", "(", ")", "as", "_src_fs", ",", "dst", "(", ")", "as", "_dst_fs", ":", "with", "_src_fs", ".", "lock", "(", ")", ",", "_dst_fs", ".", "lock", "(", ")", ":", "_dst_fs", ".", "makedir", "(", "dst_path", ",", "recreate", "=", "True", ")", "copy_dir", "(", "src_fs", ",", "src_path", ",", "dst_fs", ",", "dst_path", ",", "workers", "=", "workers", ")", "_src_fs", ".", "removetree", "(", "src_path", ")" ]
Move a directory from one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a directory on ``src_fs`` dst_fs (FS or str): Destination filesystem (instance or URL). dst_path (str): Path to a directory on ``dst_fs``. workers (int): Use `worker` threads to copy data, or ``0`` (default) for a single-threaded copy.
[ "Move", "a", "directory", "from", "one", "filesystem", "to", "another", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/move.py#L60-L90
233,654
PyFilesystem/pyfilesystem2
fs/path.py
recursepath
def recursepath(path, reverse=False): # type: (Text, bool) -> List[Text] """Get intermediate paths from the root to the given path. Arguments: path (str): A PyFilesystem path reverse (bool): Reverses the order of the paths (default `False`). Returns: list: A list of paths. Example: >>> recursepath('a/b/c') ['/', '/a', '/a/b', '/a/b/c'] """ if path in "/": return ["/"] path = abspath(normpath(path)) + "/" paths = ["/"] find = path.find append = paths.append pos = 1 len_path = len(path) while pos < len_path: pos = find("/", pos) append(path[:pos]) pos += 1 if reverse: return paths[::-1] return paths
python
def recursepath(path, reverse=False): # type: (Text, bool) -> List[Text] """Get intermediate paths from the root to the given path. Arguments: path (str): A PyFilesystem path reverse (bool): Reverses the order of the paths (default `False`). Returns: list: A list of paths. Example: >>> recursepath('a/b/c') ['/', '/a', '/a/b', '/a/b/c'] """ if path in "/": return ["/"] path = abspath(normpath(path)) + "/" paths = ["/"] find = path.find append = paths.append pos = 1 len_path = len(path) while pos < len_path: pos = find("/", pos) append(path[:pos]) pos += 1 if reverse: return paths[::-1] return paths
[ "def", "recursepath", "(", "path", ",", "reverse", "=", "False", ")", ":", "# type: (Text, bool) -> List[Text]", "if", "path", "in", "\"/\"", ":", "return", "[", "\"/\"", "]", "path", "=", "abspath", "(", "normpath", "(", "path", ")", ")", "+", "\"/\"", "paths", "=", "[", "\"/\"", "]", "find", "=", "path", ".", "find", "append", "=", "paths", ".", "append", "pos", "=", "1", "len_path", "=", "len", "(", "path", ")", "while", "pos", "<", "len_path", ":", "pos", "=", "find", "(", "\"/\"", ",", "pos", ")", "append", "(", "path", "[", ":", "pos", "]", ")", "pos", "+=", "1", "if", "reverse", ":", "return", "paths", "[", ":", ":", "-", "1", "]", "return", "paths" ]
Get intermediate paths from the root to the given path. Arguments: path (str): A PyFilesystem path reverse (bool): Reverses the order of the paths (default `False`). Returns: list: A list of paths. Example: >>> recursepath('a/b/c') ['/', '/a', '/a/b', '/a/b/c']
[ "Get", "intermediate", "paths", "from", "the", "root", "to", "the", "given", "path", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/path.py#L114-L149
233,655
PyFilesystem/pyfilesystem2
fs/path.py
join
def join(*paths): # type: (*Text) -> Text """Join any number of paths together. Arguments: *paths (str): Paths to join, given as positional arguments. Returns: str: The joined path. Example: >>> join('foo', 'bar', 'baz') 'foo/bar/baz' >>> join('foo/bar', '../baz') 'foo/baz' >>> join('foo/bar', '/baz') '/baz' """ absolute = False relpaths = [] # type: List[Text] for p in paths: if p: if p[0] == "/": del relpaths[:] absolute = True relpaths.append(p) path = normpath("/".join(relpaths)) if absolute: path = abspath(path) return path
python
def join(*paths): # type: (*Text) -> Text """Join any number of paths together. Arguments: *paths (str): Paths to join, given as positional arguments. Returns: str: The joined path. Example: >>> join('foo', 'bar', 'baz') 'foo/bar/baz' >>> join('foo/bar', '../baz') 'foo/baz' >>> join('foo/bar', '/baz') '/baz' """ absolute = False relpaths = [] # type: List[Text] for p in paths: if p: if p[0] == "/": del relpaths[:] absolute = True relpaths.append(p) path = normpath("/".join(relpaths)) if absolute: path = abspath(path) return path
[ "def", "join", "(", "*", "paths", ")", ":", "# type: (*Text) -> Text", "absolute", "=", "False", "relpaths", "=", "[", "]", "# type: List[Text]", "for", "p", "in", "paths", ":", "if", "p", ":", "if", "p", "[", "0", "]", "==", "\"/\"", ":", "del", "relpaths", "[", ":", "]", "absolute", "=", "True", "relpaths", ".", "append", "(", "p", ")", "path", "=", "normpath", "(", "\"/\"", ".", "join", "(", "relpaths", ")", ")", "if", "absolute", ":", "path", "=", "abspath", "(", "path", ")", "return", "path" ]
Join any number of paths together. Arguments: *paths (str): Paths to join, given as positional arguments. Returns: str: The joined path. Example: >>> join('foo', 'bar', 'baz') 'foo/bar/baz' >>> join('foo/bar', '../baz') 'foo/baz' >>> join('foo/bar', '/baz') '/baz'
[ "Join", "any", "number", "of", "paths", "together", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/path.py#L208-L239
233,656
PyFilesystem/pyfilesystem2
fs/path.py
combine
def combine(path1, path2): # type: (Text, Text) -> Text """Join two paths together. This is faster than :func:`~fs.path.join`, but only works when the second path is relative, and there are no back references in either path. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: str: The joint path. Example: >>> combine("foo/bar", "baz") 'foo/bar/baz' """ if not path1: return path2.lstrip() return "{}/{}".format(path1.rstrip("/"), path2.lstrip("/"))
python
def combine(path1, path2): # type: (Text, Text) -> Text """Join two paths together. This is faster than :func:`~fs.path.join`, but only works when the second path is relative, and there are no back references in either path. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: str: The joint path. Example: >>> combine("foo/bar", "baz") 'foo/bar/baz' """ if not path1: return path2.lstrip() return "{}/{}".format(path1.rstrip("/"), path2.lstrip("/"))
[ "def", "combine", "(", "path1", ",", "path2", ")", ":", "# type: (Text, Text) -> Text", "if", "not", "path1", ":", "return", "path2", ".", "lstrip", "(", ")", "return", "\"{}/{}\"", ".", "format", "(", "path1", ".", "rstrip", "(", "\"/\"", ")", ",", "path2", ".", "lstrip", "(", "\"/\"", ")", ")" ]
Join two paths together. This is faster than :func:`~fs.path.join`, but only works when the second path is relative, and there are no back references in either path. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: str: The joint path. Example: >>> combine("foo/bar", "baz") 'foo/bar/baz'
[ "Join", "two", "paths", "together", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/path.py#L242-L264
233,657
PyFilesystem/pyfilesystem2
fs/path.py
parts
def parts(path): # type: (Text) -> List[Text] """Split a path in to its component parts. Arguments: path (str): Path to split in to parts. Returns: list: List of components Example: >>> parts('/foo/bar/baz') ['/', 'foo', 'bar', 'baz'] """ _path = normpath(path) components = _path.strip("/") _parts = ["/" if _path.startswith("/") else "./"] if components: _parts += components.split("/") return _parts
python
def parts(path): # type: (Text) -> List[Text] """Split a path in to its component parts. Arguments: path (str): Path to split in to parts. Returns: list: List of components Example: >>> parts('/foo/bar/baz') ['/', 'foo', 'bar', 'baz'] """ _path = normpath(path) components = _path.strip("/") _parts = ["/" if _path.startswith("/") else "./"] if components: _parts += components.split("/") return _parts
[ "def", "parts", "(", "path", ")", ":", "# type: (Text) -> List[Text]", "_path", "=", "normpath", "(", "path", ")", "components", "=", "_path", ".", "strip", "(", "\"/\"", ")", "_parts", "=", "[", "\"/\"", "if", "_path", ".", "startswith", "(", "\"/\"", ")", "else", "\"./\"", "]", "if", "components", ":", "_parts", "+=", "components", ".", "split", "(", "\"/\"", ")", "return", "_parts" ]
Split a path in to its component parts. Arguments: path (str): Path to split in to parts. Returns: list: List of components Example: >>> parts('/foo/bar/baz') ['/', 'foo', 'bar', 'baz']
[ "Split", "a", "path", "in", "to", "its", "component", "parts", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/path.py#L267-L288
233,658
PyFilesystem/pyfilesystem2
fs/path.py
splitext
def splitext(path): # type: (Text) -> Tuple[Text, Text] """Split the extension from the path. Arguments: path (str): A path to split. Returns: (str, str): A tuple containing the path and the extension. Example: >>> splitext('baz.txt') ('baz', '.txt') >>> splitext('foo/bar/baz.txt') ('foo/bar/baz', '.txt') >>> splitext('foo/bar/.foo') ('foo/bar/.foo', '') """ parent_path, pathname = split(path) if pathname.startswith(".") and pathname.count(".") == 1: return path, "" if "." not in pathname: return path, "" pathname, ext = pathname.rsplit(".", 1) path = join(parent_path, pathname) return path, "." + ext
python
def splitext(path): # type: (Text) -> Tuple[Text, Text] """Split the extension from the path. Arguments: path (str): A path to split. Returns: (str, str): A tuple containing the path and the extension. Example: >>> splitext('baz.txt') ('baz', '.txt') >>> splitext('foo/bar/baz.txt') ('foo/bar/baz', '.txt') >>> splitext('foo/bar/.foo') ('foo/bar/.foo', '') """ parent_path, pathname = split(path) if pathname.startswith(".") and pathname.count(".") == 1: return path, "" if "." not in pathname: return path, "" pathname, ext = pathname.rsplit(".", 1) path = join(parent_path, pathname) return path, "." + ext
[ "def", "splitext", "(", "path", ")", ":", "# type: (Text) -> Tuple[Text, Text]", "parent_path", ",", "pathname", "=", "split", "(", "path", ")", "if", "pathname", ".", "startswith", "(", "\".\"", ")", "and", "pathname", ".", "count", "(", "\".\"", ")", "==", "1", ":", "return", "path", ",", "\"\"", "if", "\".\"", "not", "in", "pathname", ":", "return", "path", ",", "\"\"", "pathname", ",", "ext", "=", "pathname", ".", "rsplit", "(", "\".\"", ",", "1", ")", "path", "=", "join", "(", "parent_path", ",", "pathname", ")", "return", "path", ",", "\".\"", "+", "ext" ]
Split the extension from the path. Arguments: path (str): A path to split. Returns: (str, str): A tuple containing the path and the extension. Example: >>> splitext('baz.txt') ('baz', '.txt') >>> splitext('foo/bar/baz.txt') ('foo/bar/baz', '.txt') >>> splitext('foo/bar/.foo') ('foo/bar/.foo', '')
[ "Split", "the", "extension", "from", "the", "path", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/path.py#L319-L345
233,659
PyFilesystem/pyfilesystem2
fs/path.py
isbase
def isbase(path1, path2): # type: (Text, Text) -> bool """Check if ``path1`` is a base of ``path2``. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: bool: `True` if ``path2`` starts with ``path1`` Example: >>> isbase('foo/bar', 'foo/bar/baz/egg.txt') True """ _path1 = forcedir(abspath(path1)) _path2 = forcedir(abspath(path2)) return _path2.startswith(_path1)
python
def isbase(path1, path2): # type: (Text, Text) -> bool """Check if ``path1`` is a base of ``path2``. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: bool: `True` if ``path2`` starts with ``path1`` Example: >>> isbase('foo/bar', 'foo/bar/baz/egg.txt') True """ _path1 = forcedir(abspath(path1)) _path2 = forcedir(abspath(path2)) return _path2.startswith(_path1)
[ "def", "isbase", "(", "path1", ",", "path2", ")", ":", "# type: (Text, Text) -> bool", "_path1", "=", "forcedir", "(", "abspath", "(", "path1", ")", ")", "_path2", "=", "forcedir", "(", "abspath", "(", "path2", ")", ")", "return", "_path2", ".", "startswith", "(", "_path1", ")" ]
Check if ``path1`` is a base of ``path2``. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: bool: `True` if ``path2`` starts with ``path1`` Example: >>> isbase('foo/bar', 'foo/bar/baz/egg.txt') True
[ "Check", "if", "path1", "is", "a", "base", "of", "path2", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/path.py#L441-L459
233,660
PyFilesystem/pyfilesystem2
fs/path.py
isparent
def isparent(path1, path2): # type: (Text, Text) -> bool """Check if ``path1`` is a parent directory of ``path2``. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: bool: `True` if ``path1`` is a parent directory of ``path2`` Example: >>> isparent("foo/bar", "foo/bar/spam.txt") True >>> isparent("foo/bar/", "foo/bar") True >>> isparent("foo/barry", "foo/baz/bar") False >>> isparent("foo/bar/baz/", "foo/baz/bar") False """ bits1 = path1.split("/") bits2 = path2.split("/") while bits1 and bits1[-1] == "": bits1.pop() if len(bits1) > len(bits2): return False for (bit1, bit2) in zip(bits1, bits2): if bit1 != bit2: return False return True
python
def isparent(path1, path2): # type: (Text, Text) -> bool """Check if ``path1`` is a parent directory of ``path2``. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: bool: `True` if ``path1`` is a parent directory of ``path2`` Example: >>> isparent("foo/bar", "foo/bar/spam.txt") True >>> isparent("foo/bar/", "foo/bar") True >>> isparent("foo/barry", "foo/baz/bar") False >>> isparent("foo/bar/baz/", "foo/baz/bar") False """ bits1 = path1.split("/") bits2 = path2.split("/") while bits1 and bits1[-1] == "": bits1.pop() if len(bits1) > len(bits2): return False for (bit1, bit2) in zip(bits1, bits2): if bit1 != bit2: return False return True
[ "def", "isparent", "(", "path1", ",", "path2", ")", ":", "# type: (Text, Text) -> bool", "bits1", "=", "path1", ".", "split", "(", "\"/\"", ")", "bits2", "=", "path2", ".", "split", "(", "\"/\"", ")", "while", "bits1", "and", "bits1", "[", "-", "1", "]", "==", "\"\"", ":", "bits1", ".", "pop", "(", ")", "if", "len", "(", "bits1", ")", ">", "len", "(", "bits2", ")", ":", "return", "False", "for", "(", "bit1", ",", "bit2", ")", "in", "zip", "(", "bits1", ",", "bits2", ")", ":", "if", "bit1", "!=", "bit2", ":", "return", "False", "return", "True" ]
Check if ``path1`` is a parent directory of ``path2``. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: bool: `True` if ``path1`` is a parent directory of ``path2`` Example: >>> isparent("foo/bar", "foo/bar/spam.txt") True >>> isparent("foo/bar/", "foo/bar") True >>> isparent("foo/barry", "foo/baz/bar") False >>> isparent("foo/bar/baz/", "foo/baz/bar") False
[ "Check", "if", "path1", "is", "a", "parent", "directory", "of", "path2", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/path.py#L462-L493
233,661
PyFilesystem/pyfilesystem2
fs/path.py
frombase
def frombase(path1, path2): # type: (Text, Text) -> Text """Get the final path of ``path2`` that isn't in ``path1``. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: str: the final part of ``path2``. Example: >>> frombase('foo/bar/', 'foo/bar/baz/egg') 'baz/egg' """ if not isparent(path1, path2): raise ValueError("path1 must be a prefix of path2") return path2[len(path1) :]
python
def frombase(path1, path2): # type: (Text, Text) -> Text """Get the final path of ``path2`` that isn't in ``path1``. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: str: the final part of ``path2``. Example: >>> frombase('foo/bar/', 'foo/bar/baz/egg') 'baz/egg' """ if not isparent(path1, path2): raise ValueError("path1 must be a prefix of path2") return path2[len(path1) :]
[ "def", "frombase", "(", "path1", ",", "path2", ")", ":", "# type: (Text, Text) -> Text", "if", "not", "isparent", "(", "path1", ",", "path2", ")", ":", "raise", "ValueError", "(", "\"path1 must be a prefix of path2\"", ")", "return", "path2", "[", "len", "(", "path1", ")", ":", "]" ]
Get the final path of ``path2`` that isn't in ``path1``. Arguments: path1 (str): A PyFilesytem path. path2 (str): A PyFilesytem path. Returns: str: the final part of ``path2``. Example: >>> frombase('foo/bar/', 'foo/bar/baz/egg') 'baz/egg'
[ "Get", "the", "final", "path", "of", "path2", "that", "isn", "t", "in", "path1", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/path.py#L520-L538
233,662
PyFilesystem/pyfilesystem2
fs/path.py
relativefrom
def relativefrom(base, path): # type: (Text, Text) -> Text """Return a path relative from a given base path. Insert backrefs as appropriate to reach the path from the base. Arguments: base (str): Path to a directory. path (str): Path to make relative. Returns: str: the path to ``base`` from ``path``. >>> relativefrom("foo/bar", "baz/index.html") '../../baz/index.html' """ base_parts = list(iteratepath(base)) path_parts = list(iteratepath(path)) common = 0 for component_a, component_b in zip(base_parts, path_parts): if component_a != component_b: break common += 1 return "/".join([".."] * (len(base_parts) - common) + path_parts[common:])
python
def relativefrom(base, path): # type: (Text, Text) -> Text """Return a path relative from a given base path. Insert backrefs as appropriate to reach the path from the base. Arguments: base (str): Path to a directory. path (str): Path to make relative. Returns: str: the path to ``base`` from ``path``. >>> relativefrom("foo/bar", "baz/index.html") '../../baz/index.html' """ base_parts = list(iteratepath(base)) path_parts = list(iteratepath(path)) common = 0 for component_a, component_b in zip(base_parts, path_parts): if component_a != component_b: break common += 1 return "/".join([".."] * (len(base_parts) - common) + path_parts[common:])
[ "def", "relativefrom", "(", "base", ",", "path", ")", ":", "# type: (Text, Text) -> Text", "base_parts", "=", "list", "(", "iteratepath", "(", "base", ")", ")", "path_parts", "=", "list", "(", "iteratepath", "(", "path", ")", ")", "common", "=", "0", "for", "component_a", ",", "component_b", "in", "zip", "(", "base_parts", ",", "path_parts", ")", ":", "if", "component_a", "!=", "component_b", ":", "break", "common", "+=", "1", "return", "\"/\"", ".", "join", "(", "[", "\"..\"", "]", "*", "(", "len", "(", "base_parts", ")", "-", "common", ")", "+", "path_parts", "[", "common", ":", "]", ")" ]
Return a path relative from a given base path. Insert backrefs as appropriate to reach the path from the base. Arguments: base (str): Path to a directory. path (str): Path to make relative. Returns: str: the path to ``base`` from ``path``. >>> relativefrom("foo/bar", "baz/index.html") '../../baz/index.html'
[ "Return", "a", "path", "relative", "from", "a", "given", "base", "path", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/path.py#L541-L567
233,663
PyFilesystem/pyfilesystem2
fs/error_tools.py
unwrap_errors
def unwrap_errors(path_replace): # type: (Union[Text, Mapping[Text, Text]]) -> Iterator[None] """Get a context to map OS errors to their `fs.errors` counterpart. The context will re-write the paths in resource exceptions to be in the same context as the wrapped filesystem. The only parameter may be the path from the parent, if only one path is to be unwrapped. Or it may be a dictionary that maps wrapped paths on to unwrapped paths. """ try: yield except errors.ResourceError as e: if hasattr(e, "path"): if isinstance(path_replace, Mapping): e.path = path_replace.get(e.path, e.path) else: e.path = path_replace reraise(type(e), e)
python
def unwrap_errors(path_replace): # type: (Union[Text, Mapping[Text, Text]]) -> Iterator[None] """Get a context to map OS errors to their `fs.errors` counterpart. The context will re-write the paths in resource exceptions to be in the same context as the wrapped filesystem. The only parameter may be the path from the parent, if only one path is to be unwrapped. Or it may be a dictionary that maps wrapped paths on to unwrapped paths. """ try: yield except errors.ResourceError as e: if hasattr(e, "path"): if isinstance(path_replace, Mapping): e.path = path_replace.get(e.path, e.path) else: e.path = path_replace reraise(type(e), e)
[ "def", "unwrap_errors", "(", "path_replace", ")", ":", "# type: (Union[Text, Mapping[Text, Text]]) -> Iterator[None]", "try", ":", "yield", "except", "errors", ".", "ResourceError", "as", "e", ":", "if", "hasattr", "(", "e", ",", "\"path\"", ")", ":", "if", "isinstance", "(", "path_replace", ",", "Mapping", ")", ":", "e", ".", "path", "=", "path_replace", ".", "get", "(", "e", ".", "path", ",", "e", ".", "path", ")", "else", ":", "e", ".", "path", "=", "path_replace", "reraise", "(", "type", "(", "e", ")", ",", "e", ")" ]
Get a context to map OS errors to their `fs.errors` counterpart. The context will re-write the paths in resource exceptions to be in the same context as the wrapped filesystem. The only parameter may be the path from the parent, if only one path is to be unwrapped. Or it may be a dictionary that maps wrapped paths on to unwrapped paths.
[ "Get", "a", "context", "to", "map", "OS", "errors", "to", "their", "fs", ".", "errors", "counterpart", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/error_tools.py#L99-L119
233,664
PyFilesystem/pyfilesystem2
fs/wildcard.py
match
def match(pattern, name): # type: (Text, Text) -> bool """Test whether a name matches a wildcard pattern. Arguments: pattern (str): A wildcard pattern, e.g. ``"*.py"``. name (str): A filename. Returns: bool: `True` if the filename matches the pattern. """ try: re_pat = _PATTERN_CACHE[(pattern, True)] except KeyError: res = "(?ms)" + _translate(pattern) + r'\Z' _PATTERN_CACHE[(pattern, True)] = re_pat = re.compile(res) return re_pat.match(name) is not None
python
def match(pattern, name): # type: (Text, Text) -> bool """Test whether a name matches a wildcard pattern. Arguments: pattern (str): A wildcard pattern, e.g. ``"*.py"``. name (str): A filename. Returns: bool: `True` if the filename matches the pattern. """ try: re_pat = _PATTERN_CACHE[(pattern, True)] except KeyError: res = "(?ms)" + _translate(pattern) + r'\Z' _PATTERN_CACHE[(pattern, True)] = re_pat = re.compile(res) return re_pat.match(name) is not None
[ "def", "match", "(", "pattern", ",", "name", ")", ":", "# type: (Text, Text) -> bool", "try", ":", "re_pat", "=", "_PATTERN_CACHE", "[", "(", "pattern", ",", "True", ")", "]", "except", "KeyError", ":", "res", "=", "\"(?ms)\"", "+", "_translate", "(", "pattern", ")", "+", "r'\\Z'", "_PATTERN_CACHE", "[", "(", "pattern", ",", "True", ")", "]", "=", "re_pat", "=", "re", ".", "compile", "(", "res", ")", "return", "re_pat", ".", "match", "(", "name", ")", "is", "not", "None" ]
Test whether a name matches a wildcard pattern. Arguments: pattern (str): A wildcard pattern, e.g. ``"*.py"``. name (str): A filename. Returns: bool: `True` if the filename matches the pattern.
[ "Test", "whether", "a", "name", "matches", "a", "wildcard", "pattern", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/wildcard.py#L21-L38
233,665
PyFilesystem/pyfilesystem2
fs/wildcard.py
match_any
def match_any(patterns, name): # type: (Iterable[Text], Text) -> bool """Test if a name matches any of a list of patterns. Will return `True` if ``patterns`` is an empty list. Arguments: patterns (list): A list of wildcard pattern, e.g ``["*.py", "*.pyc"]`` name (str): A filename. Returns: bool: `True` if the name matches at least one of the patterns. """ if not patterns: return True return any(match(pattern, name) for pattern in patterns)
python
def match_any(patterns, name): # type: (Iterable[Text], Text) -> bool """Test if a name matches any of a list of patterns. Will return `True` if ``patterns`` is an empty list. Arguments: patterns (list): A list of wildcard pattern, e.g ``["*.py", "*.pyc"]`` name (str): A filename. Returns: bool: `True` if the name matches at least one of the patterns. """ if not patterns: return True return any(match(pattern, name) for pattern in patterns)
[ "def", "match_any", "(", "patterns", ",", "name", ")", ":", "# type: (Iterable[Text], Text) -> bool", "if", "not", "patterns", ":", "return", "True", "return", "any", "(", "match", "(", "pattern", ",", "name", ")", "for", "pattern", "in", "patterns", ")" ]
Test if a name matches any of a list of patterns. Will return `True` if ``patterns`` is an empty list. Arguments: patterns (list): A list of wildcard pattern, e.g ``["*.py", "*.pyc"]`` name (str): A filename. Returns: bool: `True` if the name matches at least one of the patterns.
[ "Test", "if", "a", "name", "matches", "any", "of", "a", "list", "of", "patterns", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/wildcard.py#L61-L78
233,666
PyFilesystem/pyfilesystem2
fs/wildcard.py
get_matcher
def get_matcher(patterns, case_sensitive): # type: (Iterable[Text], bool) -> Callable[[Text], bool] """Get a callable that matches names against the given patterns. Arguments: patterns (list): A list of wildcard pattern. e.g. ``["*.py", "*.pyc"]`` case_sensitive (bool): If ``True``, then the callable will be case sensitive, otherwise it will be case insensitive. Returns: callable: a matcher that will return `True` if the name given as an argument matches any of the given patterns. Example: >>> from fs import wildcard >>> is_python = wildcard.get_matcher(['*.py'], True) >>> is_python('__init__.py') True >>> is_python('foo.txt') False """ if not patterns: return lambda name: True if case_sensitive: return partial(match_any, patterns) else: return partial(imatch_any, patterns)
python
def get_matcher(patterns, case_sensitive): # type: (Iterable[Text], bool) -> Callable[[Text], bool] """Get a callable that matches names against the given patterns. Arguments: patterns (list): A list of wildcard pattern. e.g. ``["*.py", "*.pyc"]`` case_sensitive (bool): If ``True``, then the callable will be case sensitive, otherwise it will be case insensitive. Returns: callable: a matcher that will return `True` if the name given as an argument matches any of the given patterns. Example: >>> from fs import wildcard >>> is_python = wildcard.get_matcher(['*.py'], True) >>> is_python('__init__.py') True >>> is_python('foo.txt') False """ if not patterns: return lambda name: True if case_sensitive: return partial(match_any, patterns) else: return partial(imatch_any, patterns)
[ "def", "get_matcher", "(", "patterns", ",", "case_sensitive", ")", ":", "# type: (Iterable[Text], bool) -> Callable[[Text], bool]", "if", "not", "patterns", ":", "return", "lambda", "name", ":", "True", "if", "case_sensitive", ":", "return", "partial", "(", "match_any", ",", "patterns", ")", "else", ":", "return", "partial", "(", "imatch_any", ",", "patterns", ")" ]
Get a callable that matches names against the given patterns. Arguments: patterns (list): A list of wildcard pattern. e.g. ``["*.py", "*.pyc"]`` case_sensitive (bool): If ``True``, then the callable will be case sensitive, otherwise it will be case insensitive. Returns: callable: a matcher that will return `True` if the name given as an argument matches any of the given patterns. Example: >>> from fs import wildcard >>> is_python = wildcard.get_matcher(['*.py'], True) >>> is_python('__init__.py') True >>> is_python('foo.txt') False
[ "Get", "a", "callable", "that", "matches", "names", "against", "the", "given", "patterns", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/wildcard.py#L101-L129
233,667
PyFilesystem/pyfilesystem2
fs/wildcard.py
_translate
def _translate(pattern, case_sensitive=True): # type: (Text, bool) -> Text """Translate a wildcard pattern to a regular expression. There is no way to quote meta-characters. Arguments: pattern (str): A wildcard pattern. case_sensitive (bool): Set to `False` to use a case insensitive regex (default `True`). Returns: str: A regex equivalent to the given pattern. """ if not case_sensitive: pattern = pattern.lower() i, n = 0, len(pattern) res = "" while i < n: c = pattern[i] i = i + 1 if c == "*": res = res + "[^/]*" elif c == "?": res = res + "." elif c == "[": j = i if j < n and pattern[j] == "!": j = j + 1 if j < n and pattern[j] == "]": j = j + 1 while j < n and pattern[j] != "]": j = j + 1 if j >= n: res = res + "\\[" else: stuff = pattern[i:j].replace("\\", "\\\\") i = j + 1 if stuff[0] == "!": stuff = "^" + stuff[1:] elif stuff[0] == "^": stuff = "\\" + stuff res = "%s[%s]" % (res, stuff) else: res = res + re.escape(c) return res
python
def _translate(pattern, case_sensitive=True): # type: (Text, bool) -> Text """Translate a wildcard pattern to a regular expression. There is no way to quote meta-characters. Arguments: pattern (str): A wildcard pattern. case_sensitive (bool): Set to `False` to use a case insensitive regex (default `True`). Returns: str: A regex equivalent to the given pattern. """ if not case_sensitive: pattern = pattern.lower() i, n = 0, len(pattern) res = "" while i < n: c = pattern[i] i = i + 1 if c == "*": res = res + "[^/]*" elif c == "?": res = res + "." elif c == "[": j = i if j < n and pattern[j] == "!": j = j + 1 if j < n and pattern[j] == "]": j = j + 1 while j < n and pattern[j] != "]": j = j + 1 if j >= n: res = res + "\\[" else: stuff = pattern[i:j].replace("\\", "\\\\") i = j + 1 if stuff[0] == "!": stuff = "^" + stuff[1:] elif stuff[0] == "^": stuff = "\\" + stuff res = "%s[%s]" % (res, stuff) else: res = res + re.escape(c) return res
[ "def", "_translate", "(", "pattern", ",", "case_sensitive", "=", "True", ")", ":", "# type: (Text, bool) -> Text", "if", "not", "case_sensitive", ":", "pattern", "=", "pattern", ".", "lower", "(", ")", "i", ",", "n", "=", "0", ",", "len", "(", "pattern", ")", "res", "=", "\"\"", "while", "i", "<", "n", ":", "c", "=", "pattern", "[", "i", "]", "i", "=", "i", "+", "1", "if", "c", "==", "\"*\"", ":", "res", "=", "res", "+", "\"[^/]*\"", "elif", "c", "==", "\"?\"", ":", "res", "=", "res", "+", "\".\"", "elif", "c", "==", "\"[\"", ":", "j", "=", "i", "if", "j", "<", "n", "and", "pattern", "[", "j", "]", "==", "\"!\"", ":", "j", "=", "j", "+", "1", "if", "j", "<", "n", "and", "pattern", "[", "j", "]", "==", "\"]\"", ":", "j", "=", "j", "+", "1", "while", "j", "<", "n", "and", "pattern", "[", "j", "]", "!=", "\"]\"", ":", "j", "=", "j", "+", "1", "if", "j", ">=", "n", ":", "res", "=", "res", "+", "\"\\\\[\"", "else", ":", "stuff", "=", "pattern", "[", "i", ":", "j", "]", ".", "replace", "(", "\"\\\\\"", ",", "\"\\\\\\\\\"", ")", "i", "=", "j", "+", "1", "if", "stuff", "[", "0", "]", "==", "\"!\"", ":", "stuff", "=", "\"^\"", "+", "stuff", "[", "1", ":", "]", "elif", "stuff", "[", "0", "]", "==", "\"^\"", ":", "stuff", "=", "\"\\\\\"", "+", "stuff", "res", "=", "\"%s[%s]\"", "%", "(", "res", ",", "stuff", ")", "else", ":", "res", "=", "res", "+", "re", ".", "escape", "(", "c", ")", "return", "res" ]
Translate a wildcard pattern to a regular expression. There is no way to quote meta-characters. Arguments: pattern (str): A wildcard pattern. case_sensitive (bool): Set to `False` to use a case insensitive regex (default `True`). Returns: str: A regex equivalent to the given pattern.
[ "Translate", "a", "wildcard", "pattern", "to", "a", "regular", "expression", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/wildcard.py#L132-L178
233,668
PyFilesystem/pyfilesystem2
fs/mountfs.py
MountFS._delegate
def _delegate(self, path): # type: (Text) -> Tuple[FS, Text] """Get the delegate FS for a given path. Arguments: path (str): A path. Returns: (FS, str): a tuple of ``(<fs>, <path>)`` for a mounted filesystem, or ``(None, None)`` if no filesystem is mounted on the given ``path``. """ _path = forcedir(abspath(normpath(path))) is_mounted = _path.startswith for mount_path, fs in self.mounts: if is_mounted(mount_path): return fs, _path[len(mount_path) :].rstrip("/") return self.default_fs, path
python
def _delegate(self, path): # type: (Text) -> Tuple[FS, Text] """Get the delegate FS for a given path. Arguments: path (str): A path. Returns: (FS, str): a tuple of ``(<fs>, <path>)`` for a mounted filesystem, or ``(None, None)`` if no filesystem is mounted on the given ``path``. """ _path = forcedir(abspath(normpath(path))) is_mounted = _path.startswith for mount_path, fs in self.mounts: if is_mounted(mount_path): return fs, _path[len(mount_path) :].rstrip("/") return self.default_fs, path
[ "def", "_delegate", "(", "self", ",", "path", ")", ":", "# type: (Text) -> Tuple[FS, Text]", "_path", "=", "forcedir", "(", "abspath", "(", "normpath", "(", "path", ")", ")", ")", "is_mounted", "=", "_path", ".", "startswith", "for", "mount_path", ",", "fs", "in", "self", ".", "mounts", ":", "if", "is_mounted", "(", "mount_path", ")", ":", "return", "fs", ",", "_path", "[", "len", "(", "mount_path", ")", ":", "]", ".", "rstrip", "(", "\"/\"", ")", "return", "self", ".", "default_fs", ",", "path" ]
Get the delegate FS for a given path. Arguments: path (str): A path. Returns: (FS, str): a tuple of ``(<fs>, <path>)`` for a mounted filesystem, or ``(None, None)`` if no filesystem is mounted on the given ``path``.
[ "Get", "the", "delegate", "FS", "for", "a", "given", "path", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/mountfs.py#L80-L100
233,669
PyFilesystem/pyfilesystem2
fs/mountfs.py
MountFS.mount
def mount(self, path, fs): # type: (Text, Union[FS, Text]) -> None """Mounts a host FS object on a given path. Arguments: path (str): A path within the MountFS. fs (FS or str): A filesystem (instance or URL) to mount. """ if isinstance(fs, text_type): from .opener import open_fs fs = open_fs(fs) if not isinstance(fs, FS): raise TypeError("fs argument must be an FS object or a FS URL") if fs is self: raise ValueError("Unable to mount self") _path = forcedir(abspath(normpath(path))) for mount_path, _ in self.mounts: if _path.startswith(mount_path): raise MountError("mount point overlaps existing mount") self.mounts.append((_path, fs)) self.default_fs.makedirs(_path, recreate=True)
python
def mount(self, path, fs): # type: (Text, Union[FS, Text]) -> None """Mounts a host FS object on a given path. Arguments: path (str): A path within the MountFS. fs (FS or str): A filesystem (instance or URL) to mount. """ if isinstance(fs, text_type): from .opener import open_fs fs = open_fs(fs) if not isinstance(fs, FS): raise TypeError("fs argument must be an FS object or a FS URL") if fs is self: raise ValueError("Unable to mount self") _path = forcedir(abspath(normpath(path))) for mount_path, _ in self.mounts: if _path.startswith(mount_path): raise MountError("mount point overlaps existing mount") self.mounts.append((_path, fs)) self.default_fs.makedirs(_path, recreate=True)
[ "def", "mount", "(", "self", ",", "path", ",", "fs", ")", ":", "# type: (Text, Union[FS, Text]) -> None", "if", "isinstance", "(", "fs", ",", "text_type", ")", ":", "from", ".", "opener", "import", "open_fs", "fs", "=", "open_fs", "(", "fs", ")", "if", "not", "isinstance", "(", "fs", ",", "FS", ")", ":", "raise", "TypeError", "(", "\"fs argument must be an FS object or a FS URL\"", ")", "if", "fs", "is", "self", ":", "raise", "ValueError", "(", "\"Unable to mount self\"", ")", "_path", "=", "forcedir", "(", "abspath", "(", "normpath", "(", "path", ")", ")", ")", "for", "mount_path", ",", "_", "in", "self", ".", "mounts", ":", "if", "_path", ".", "startswith", "(", "mount_path", ")", ":", "raise", "MountError", "(", "\"mount point overlaps existing mount\"", ")", "self", ".", "mounts", ".", "append", "(", "(", "_path", ",", "fs", ")", ")", "self", ".", "default_fs", ".", "makedirs", "(", "_path", ",", "recreate", "=", "True", ")" ]
Mounts a host FS object on a given path. Arguments: path (str): A path within the MountFS. fs (FS or str): A filesystem (instance or URL) to mount.
[ "Mounts", "a", "host", "FS", "object", "on", "a", "given", "path", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/mountfs.py#L102-L128
233,670
PyFilesystem/pyfilesystem2
fs/_bulk.py
Copier.start
def start(self): """Start the workers.""" if self.num_workers: self.queue = Queue(maxsize=self.num_workers) self.workers = [_Worker(self) for _ in range(self.num_workers)] for worker in self.workers: worker.start() self.running = True
python
def start(self): """Start the workers.""" if self.num_workers: self.queue = Queue(maxsize=self.num_workers) self.workers = [_Worker(self) for _ in range(self.num_workers)] for worker in self.workers: worker.start() self.running = True
[ "def", "start", "(", "self", ")", ":", "if", "self", ".", "num_workers", ":", "self", ".", "queue", "=", "Queue", "(", "maxsize", "=", "self", ".", "num_workers", ")", "self", ".", "workers", "=", "[", "_Worker", "(", "self", ")", "for", "_", "in", "range", "(", "self", ".", "num_workers", ")", "]", "for", "worker", "in", "self", ".", "workers", ":", "worker", ".", "start", "(", ")", "self", ".", "running", "=", "True" ]
Start the workers.
[ "Start", "the", "workers", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/_bulk.py#L87-L94
233,671
PyFilesystem/pyfilesystem2
fs/_bulk.py
Copier.copy
def copy(self, src_fs, src_path, dst_fs, dst_path): # type: (FS, Text, FS, Text) -> None """Copy a file from one fs to another.""" if self.queue is None: # This should be the most performant for a single-thread copy_file_internal(src_fs, src_path, dst_fs, dst_path) else: src_file = src_fs.openbin(src_path, "r") try: dst_file = dst_fs.openbin(dst_path, "w") except Exception: src_file.close() raise task = _CopyTask(src_file, dst_file) self.queue.put(task)
python
def copy(self, src_fs, src_path, dst_fs, dst_path): # type: (FS, Text, FS, Text) -> None """Copy a file from one fs to another.""" if self.queue is None: # This should be the most performant for a single-thread copy_file_internal(src_fs, src_path, dst_fs, dst_path) else: src_file = src_fs.openbin(src_path, "r") try: dst_file = dst_fs.openbin(dst_path, "w") except Exception: src_file.close() raise task = _CopyTask(src_file, dst_file) self.queue.put(task)
[ "def", "copy", "(", "self", ",", "src_fs", ",", "src_path", ",", "dst_fs", ",", "dst_path", ")", ":", "# type: (FS, Text, FS, Text) -> None", "if", "self", ".", "queue", "is", "None", ":", "# This should be the most performant for a single-thread", "copy_file_internal", "(", "src_fs", ",", "src_path", ",", "dst_fs", ",", "dst_path", ")", "else", ":", "src_file", "=", "src_fs", ".", "openbin", "(", "src_path", ",", "\"r\"", ")", "try", ":", "dst_file", "=", "dst_fs", ".", "openbin", "(", "dst_path", ",", "\"w\"", ")", "except", "Exception", ":", "src_file", ".", "close", "(", ")", "raise", "task", "=", "_CopyTask", "(", "src_file", ",", "dst_file", ")", "self", ".", "queue", ".", "put", "(", "task", ")" ]
Copy a file from one fs to another.
[ "Copy", "a", "file", "from", "one", "fs", "to", "another", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/_bulk.py#L126-L140
233,672
PyFilesystem/pyfilesystem2
fs/multifs.py
MultiFS.add_fs
def add_fs(self, name, fs, write=False, priority=0): # type: (Text, FS, bool, int) -> None """Add a filesystem to the MultiFS. Arguments: name (str): A unique name to refer to the filesystem being added. fs (FS or str): The filesystem (instance or URL) to add. write (bool): If this value is True, then the ``fs`` will be used as the writeable FS (defaults to False). priority (int): An integer that denotes the priority of the filesystem being added. Filesystems will be searched in descending priority order and then by the reverse order they were added. So by default, the most recently added filesystem will be looked at first. """ if isinstance(fs, text_type): fs = open_fs(fs) if not isinstance(fs, FS): raise TypeError("fs argument should be an FS object or FS URL") self._filesystems[name] = _PrioritizedFS( priority=(priority, self._sort_index), fs=fs ) self._sort_index += 1 self._resort() if write: self.write_fs = fs self._write_fs_name = name
python
def add_fs(self, name, fs, write=False, priority=0): # type: (Text, FS, bool, int) -> None """Add a filesystem to the MultiFS. Arguments: name (str): A unique name to refer to the filesystem being added. fs (FS or str): The filesystem (instance or URL) to add. write (bool): If this value is True, then the ``fs`` will be used as the writeable FS (defaults to False). priority (int): An integer that denotes the priority of the filesystem being added. Filesystems will be searched in descending priority order and then by the reverse order they were added. So by default, the most recently added filesystem will be looked at first. """ if isinstance(fs, text_type): fs = open_fs(fs) if not isinstance(fs, FS): raise TypeError("fs argument should be an FS object or FS URL") self._filesystems[name] = _PrioritizedFS( priority=(priority, self._sort_index), fs=fs ) self._sort_index += 1 self._resort() if write: self.write_fs = fs self._write_fs_name = name
[ "def", "add_fs", "(", "self", ",", "name", ",", "fs", ",", "write", "=", "False", ",", "priority", "=", "0", ")", ":", "# type: (Text, FS, bool, int) -> None", "if", "isinstance", "(", "fs", ",", "text_type", ")", ":", "fs", "=", "open_fs", "(", "fs", ")", "if", "not", "isinstance", "(", "fs", ",", "FS", ")", ":", "raise", "TypeError", "(", "\"fs argument should be an FS object or FS URL\"", ")", "self", ".", "_filesystems", "[", "name", "]", "=", "_PrioritizedFS", "(", "priority", "=", "(", "priority", ",", "self", ".", "_sort_index", ")", ",", "fs", "=", "fs", ")", "self", ".", "_sort_index", "+=", "1", "self", ".", "_resort", "(", ")", "if", "write", ":", "self", ".", "write_fs", "=", "fs", "self", ".", "_write_fs_name", "=", "name" ]
Add a filesystem to the MultiFS. Arguments: name (str): A unique name to refer to the filesystem being added. fs (FS or str): The filesystem (instance or URL) to add. write (bool): If this value is True, then the ``fs`` will be used as the writeable FS (defaults to False). priority (int): An integer that denotes the priority of the filesystem being added. Filesystems will be searched in descending priority order and then by the reverse order they were added. So by default, the most recently added filesystem will be looked at first.
[ "Add", "a", "filesystem", "to", "the", "MultiFS", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/multifs.py#L79-L110
233,673
PyFilesystem/pyfilesystem2
fs/multifs.py
MultiFS._delegate
def _delegate(self, path): # type: (Text) -> Optional[FS] """Get a filesystem which has a given path. """ for _name, fs in self.iterate_fs(): if fs.exists(path): return fs return None
python
def _delegate(self, path): # type: (Text) -> Optional[FS] """Get a filesystem which has a given path. """ for _name, fs in self.iterate_fs(): if fs.exists(path): return fs return None
[ "def", "_delegate", "(", "self", ",", "path", ")", ":", "# type: (Text) -> Optional[FS]", "for", "_name", ",", "fs", "in", "self", ".", "iterate_fs", "(", ")", ":", "if", "fs", ".", "exists", "(", "path", ")", ":", "return", "fs", "return", "None" ]
Get a filesystem which has a given path.
[ "Get", "a", "filesystem", "which", "has", "a", "given", "path", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/multifs.py#L147-L154
233,674
PyFilesystem/pyfilesystem2
fs/multifs.py
MultiFS._delegate_required
def _delegate_required(self, path): # type: (Text) -> FS """Check that there is a filesystem with the given ``path``. """ fs = self._delegate(path) if fs is None: raise errors.ResourceNotFound(path) return fs
python
def _delegate_required(self, path): # type: (Text) -> FS """Check that there is a filesystem with the given ``path``. """ fs = self._delegate(path) if fs is None: raise errors.ResourceNotFound(path) return fs
[ "def", "_delegate_required", "(", "self", ",", "path", ")", ":", "# type: (Text) -> FS", "fs", "=", "self", ".", "_delegate", "(", "path", ")", "if", "fs", "is", "None", ":", "raise", "errors", ".", "ResourceNotFound", "(", "path", ")", "return", "fs" ]
Check that there is a filesystem with the given ``path``.
[ "Check", "that", "there", "is", "a", "filesystem", "with", "the", "given", "path", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/multifs.py#L156-L163
233,675
PyFilesystem/pyfilesystem2
fs/multifs.py
MultiFS._writable_required
def _writable_required(self, path): # type: (Text) -> FS """Check that ``path`` is writeable. """ if self.write_fs is None: raise errors.ResourceReadOnly(path) return self.write_fs
python
def _writable_required(self, path): # type: (Text) -> FS """Check that ``path`` is writeable. """ if self.write_fs is None: raise errors.ResourceReadOnly(path) return self.write_fs
[ "def", "_writable_required", "(", "self", ",", "path", ")", ":", "# type: (Text) -> FS", "if", "self", ".", "write_fs", "is", "None", ":", "raise", "errors", ".", "ResourceReadOnly", "(", "path", ")", "return", "self", ".", "write_fs" ]
Check that ``path`` is writeable.
[ "Check", "that", "path", "is", "writeable", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/multifs.py#L165-L171
233,676
PyFilesystem/pyfilesystem2
fs/iotools.py
make_stream
def make_stream( name, # type: Text bin_file, # type: RawIOBase mode="r", # type: Text buffering=-1, # type: int encoding=None, # type: Optional[Text] errors=None, # type: Optional[Text] newline="", # type: Optional[Text] line_buffering=False, # type: bool **kwargs # type: Any ): # type: (...) -> IO """Take a Python 2.x binary file and return an IO Stream. """ reading = "r" in mode writing = "w" in mode appending = "a" in mode binary = "b" in mode if "+" in mode: reading = True writing = True encoding = None if binary else (encoding or "utf-8") io_object = RawWrapper(bin_file, mode=mode, name=name) # type: io.IOBase if buffering >= 0: if reading and writing: io_object = io.BufferedRandom( typing.cast(io.RawIOBase, io_object), buffering or io.DEFAULT_BUFFER_SIZE, ) elif reading: io_object = io.BufferedReader( typing.cast(io.RawIOBase, io_object), buffering or io.DEFAULT_BUFFER_SIZE, ) elif writing or appending: io_object = io.BufferedWriter( typing.cast(io.RawIOBase, io_object), buffering or io.DEFAULT_BUFFER_SIZE, ) if not binary: io_object = io.TextIOWrapper( io_object, encoding=encoding, errors=errors, newline=newline, line_buffering=line_buffering, ) return io_object
python
def make_stream( name, # type: Text bin_file, # type: RawIOBase mode="r", # type: Text buffering=-1, # type: int encoding=None, # type: Optional[Text] errors=None, # type: Optional[Text] newline="", # type: Optional[Text] line_buffering=False, # type: bool **kwargs # type: Any ): # type: (...) -> IO """Take a Python 2.x binary file and return an IO Stream. """ reading = "r" in mode writing = "w" in mode appending = "a" in mode binary = "b" in mode if "+" in mode: reading = True writing = True encoding = None if binary else (encoding or "utf-8") io_object = RawWrapper(bin_file, mode=mode, name=name) # type: io.IOBase if buffering >= 0: if reading and writing: io_object = io.BufferedRandom( typing.cast(io.RawIOBase, io_object), buffering or io.DEFAULT_BUFFER_SIZE, ) elif reading: io_object = io.BufferedReader( typing.cast(io.RawIOBase, io_object), buffering or io.DEFAULT_BUFFER_SIZE, ) elif writing or appending: io_object = io.BufferedWriter( typing.cast(io.RawIOBase, io_object), buffering or io.DEFAULT_BUFFER_SIZE, ) if not binary: io_object = io.TextIOWrapper( io_object, encoding=encoding, errors=errors, newline=newline, line_buffering=line_buffering, ) return io_object
[ "def", "make_stream", "(", "name", ",", "# type: Text", "bin_file", ",", "# type: RawIOBase", "mode", "=", "\"r\"", ",", "# type: Text", "buffering", "=", "-", "1", ",", "# type: int", "encoding", "=", "None", ",", "# type: Optional[Text]", "errors", "=", "None", ",", "# type: Optional[Text]", "newline", "=", "\"\"", ",", "# type: Optional[Text]", "line_buffering", "=", "False", ",", "# type: bool", "*", "*", "kwargs", "# type: Any", ")", ":", "# type: (...) -> IO", "reading", "=", "\"r\"", "in", "mode", "writing", "=", "\"w\"", "in", "mode", "appending", "=", "\"a\"", "in", "mode", "binary", "=", "\"b\"", "in", "mode", "if", "\"+\"", "in", "mode", ":", "reading", "=", "True", "writing", "=", "True", "encoding", "=", "None", "if", "binary", "else", "(", "encoding", "or", "\"utf-8\"", ")", "io_object", "=", "RawWrapper", "(", "bin_file", ",", "mode", "=", "mode", ",", "name", "=", "name", ")", "# type: io.IOBase", "if", "buffering", ">=", "0", ":", "if", "reading", "and", "writing", ":", "io_object", "=", "io", ".", "BufferedRandom", "(", "typing", ".", "cast", "(", "io", ".", "RawIOBase", ",", "io_object", ")", ",", "buffering", "or", "io", ".", "DEFAULT_BUFFER_SIZE", ",", ")", "elif", "reading", ":", "io_object", "=", "io", ".", "BufferedReader", "(", "typing", ".", "cast", "(", "io", ".", "RawIOBase", ",", "io_object", ")", ",", "buffering", "or", "io", ".", "DEFAULT_BUFFER_SIZE", ",", ")", "elif", "writing", "or", "appending", ":", "io_object", "=", "io", ".", "BufferedWriter", "(", "typing", ".", "cast", "(", "io", ".", "RawIOBase", ",", "io_object", ")", ",", "buffering", "or", "io", ".", "DEFAULT_BUFFER_SIZE", ",", ")", "if", "not", "binary", ":", "io_object", "=", "io", ".", "TextIOWrapper", "(", "io_object", ",", "encoding", "=", "encoding", ",", "errors", "=", "errors", ",", "newline", "=", "newline", ",", "line_buffering", "=", "line_buffering", ",", ")", "return", "io_object" ]
Take a Python 2.x binary file and return an IO Stream.
[ "Take", "a", "Python", "2", ".", "x", "binary", "file", "and", "return", "an", "IO", "Stream", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/iotools.py#L153-L204
233,677
PyFilesystem/pyfilesystem2
fs/iotools.py
line_iterator
def line_iterator(readable_file, size=None): # type: (IO[bytes], Optional[int]) -> Iterator[bytes] """Iterate over the lines of a file. Implementation reads each char individually, which is not very efficient. Yields: str: a single line in the file. """ read = readable_file.read line = [] byte = b"1" if size is None or size < 0: while byte: byte = read(1) line.append(byte) if byte in b"\n": yield b"".join(line) del line[:] else: while byte and size: byte = read(1) size -= len(byte) line.append(byte) if byte in b"\n" or not size: yield b"".join(line) del line[:]
python
def line_iterator(readable_file, size=None): # type: (IO[bytes], Optional[int]) -> Iterator[bytes] """Iterate over the lines of a file. Implementation reads each char individually, which is not very efficient. Yields: str: a single line in the file. """ read = readable_file.read line = [] byte = b"1" if size is None or size < 0: while byte: byte = read(1) line.append(byte) if byte in b"\n": yield b"".join(line) del line[:] else: while byte and size: byte = read(1) size -= len(byte) line.append(byte) if byte in b"\n" or not size: yield b"".join(line) del line[:]
[ "def", "line_iterator", "(", "readable_file", ",", "size", "=", "None", ")", ":", "# type: (IO[bytes], Optional[int]) -> Iterator[bytes]", "read", "=", "readable_file", ".", "read", "line", "=", "[", "]", "byte", "=", "b\"1\"", "if", "size", "is", "None", "or", "size", "<", "0", ":", "while", "byte", ":", "byte", "=", "read", "(", "1", ")", "line", ".", "append", "(", "byte", ")", "if", "byte", "in", "b\"\\n\"", ":", "yield", "b\"\"", ".", "join", "(", "line", ")", "del", "line", "[", ":", "]", "else", ":", "while", "byte", "and", "size", ":", "byte", "=", "read", "(", "1", ")", "size", "-=", "len", "(", "byte", ")", "line", ".", "append", "(", "byte", ")", "if", "byte", "in", "b\"\\n\"", "or", "not", "size", ":", "yield", "b\"\"", ".", "join", "(", "line", ")", "del", "line", "[", ":", "]" ]
Iterate over the lines of a file. Implementation reads each char individually, which is not very efficient. Yields: str: a single line in the file.
[ "Iterate", "over", "the", "lines", "of", "a", "file", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/iotools.py#L207-L236
233,678
PyFilesystem/pyfilesystem2
fs/mode.py
validate_openbin_mode
def validate_openbin_mode(mode, _valid_chars=frozenset("rwxab+")): # type: (Text, Union[Set[Text], FrozenSet[Text]]) -> None """Check ``mode`` parameter of `~fs.base.FS.openbin` is valid. Arguments: mode (str): Mode parameter. Raises: `ValueError` if mode is not valid. """ if "t" in mode: raise ValueError("text mode not valid in openbin") if not mode: raise ValueError("mode must not be empty") if mode[0] not in "rwxa": raise ValueError("mode must start with 'r', 'w', 'a' or 'x'") if not _valid_chars.issuperset(mode): raise ValueError("mode '{}' contains invalid characters".format(mode))
python
def validate_openbin_mode(mode, _valid_chars=frozenset("rwxab+")): # type: (Text, Union[Set[Text], FrozenSet[Text]]) -> None """Check ``mode`` parameter of `~fs.base.FS.openbin` is valid. Arguments: mode (str): Mode parameter. Raises: `ValueError` if mode is not valid. """ if "t" in mode: raise ValueError("text mode not valid in openbin") if not mode: raise ValueError("mode must not be empty") if mode[0] not in "rwxa": raise ValueError("mode must start with 'r', 'w', 'a' or 'x'") if not _valid_chars.issuperset(mode): raise ValueError("mode '{}' contains invalid characters".format(mode))
[ "def", "validate_openbin_mode", "(", "mode", ",", "_valid_chars", "=", "frozenset", "(", "\"rwxab+\"", ")", ")", ":", "# type: (Text, Union[Set[Text], FrozenSet[Text]]) -> None", "if", "\"t\"", "in", "mode", ":", "raise", "ValueError", "(", "\"text mode not valid in openbin\"", ")", "if", "not", "mode", ":", "raise", "ValueError", "(", "\"mode must not be empty\"", ")", "if", "mode", "[", "0", "]", "not", "in", "\"rwxa\"", ":", "raise", "ValueError", "(", "\"mode must start with 'r', 'w', 'a' or 'x'\"", ")", "if", "not", "_valid_chars", ".", "issuperset", "(", "mode", ")", ":", "raise", "ValueError", "(", "\"mode '{}' contains invalid characters\"", ".", "format", "(", "mode", ")", ")" ]
Check ``mode`` parameter of `~fs.base.FS.openbin` is valid. Arguments: mode (str): Mode parameter. Raises: `ValueError` if mode is not valid.
[ "Check", "mode", "parameter", "of", "~fs", ".", "base", ".", "FS", ".", "openbin", "is", "valid", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/mode.py#L229-L247
233,679
PyFilesystem/pyfilesystem2
fs/mirror.py
_compare
def _compare(info1, info2): # type: (Info, Info) -> bool """Compare two `Info` objects to see if they should be copied. Returns: bool: `True` if the `Info` are different in size or mtime. """ # Check filesize has changed if info1.size != info2.size: return True # Check modified dates date1 = info1.modified date2 = info2.modified return date1 is None or date2 is None or date1 > date2
python
def _compare(info1, info2): # type: (Info, Info) -> bool """Compare two `Info` objects to see if they should be copied. Returns: bool: `True` if the `Info` are different in size or mtime. """ # Check filesize has changed if info1.size != info2.size: return True # Check modified dates date1 = info1.modified date2 = info2.modified return date1 is None or date2 is None or date1 > date2
[ "def", "_compare", "(", "info1", ",", "info2", ")", ":", "# type: (Info, Info) -> bool", "# Check filesize has changed", "if", "info1", ".", "size", "!=", "info2", ".", "size", ":", "return", "True", "# Check modified dates", "date1", "=", "info1", ".", "modified", "date2", "=", "info2", ".", "modified", "return", "date1", "is", "None", "or", "date2", "is", "None", "or", "date1", ">", "date2" ]
Compare two `Info` objects to see if they should be copied. Returns: bool: `True` if the `Info` are different in size or mtime.
[ "Compare", "two", "Info", "objects", "to", "see", "if", "they", "should", "be", "copied", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/mirror.py#L38-L52
233,680
PyFilesystem/pyfilesystem2
fs/opener/parse.py
parse_fs_url
def parse_fs_url(fs_url): # type: (Text) -> ParseResult """Parse a Filesystem URL and return a `ParseResult`. Arguments: fs_url (str): A filesystem URL. Returns: ~fs.opener.parse.ParseResult: a parse result instance. Raises: ~fs.errors.ParseError: if the FS URL is not valid. """ match = _RE_FS_URL.match(fs_url) if match is None: raise ParseError("{!r} is not a fs2 url".format(fs_url)) fs_name, credentials, url1, url2, path = match.groups() if not credentials: username = None # type: Optional[Text] password = None # type: Optional[Text] url = url2 else: username, _, password = credentials.partition(":") username = unquote(username) password = unquote(password) url = url1 url, has_qs, qs = url.partition("?") resource = unquote(url) if has_qs: _params = parse_qs(qs, keep_blank_values=True) params = {k: unquote(v[0]) for k, v in six.iteritems(_params)} else: params = {} return ParseResult(fs_name, username, password, resource, params, path)
python
def parse_fs_url(fs_url): # type: (Text) -> ParseResult """Parse a Filesystem URL and return a `ParseResult`. Arguments: fs_url (str): A filesystem URL. Returns: ~fs.opener.parse.ParseResult: a parse result instance. Raises: ~fs.errors.ParseError: if the FS URL is not valid. """ match = _RE_FS_URL.match(fs_url) if match is None: raise ParseError("{!r} is not a fs2 url".format(fs_url)) fs_name, credentials, url1, url2, path = match.groups() if not credentials: username = None # type: Optional[Text] password = None # type: Optional[Text] url = url2 else: username, _, password = credentials.partition(":") username = unquote(username) password = unquote(password) url = url1 url, has_qs, qs = url.partition("?") resource = unquote(url) if has_qs: _params = parse_qs(qs, keep_blank_values=True) params = {k: unquote(v[0]) for k, v in six.iteritems(_params)} else: params = {} return ParseResult(fs_name, username, password, resource, params, path)
[ "def", "parse_fs_url", "(", "fs_url", ")", ":", "# type: (Text) -> ParseResult", "match", "=", "_RE_FS_URL", ".", "match", "(", "fs_url", ")", "if", "match", "is", "None", ":", "raise", "ParseError", "(", "\"{!r} is not a fs2 url\"", ".", "format", "(", "fs_url", ")", ")", "fs_name", ",", "credentials", ",", "url1", ",", "url2", ",", "path", "=", "match", ".", "groups", "(", ")", "if", "not", "credentials", ":", "username", "=", "None", "# type: Optional[Text]", "password", "=", "None", "# type: Optional[Text]", "url", "=", "url2", "else", ":", "username", ",", "_", ",", "password", "=", "credentials", ".", "partition", "(", "\":\"", ")", "username", "=", "unquote", "(", "username", ")", "password", "=", "unquote", "(", "password", ")", "url", "=", "url1", "url", ",", "has_qs", ",", "qs", "=", "url", ".", "partition", "(", "\"?\"", ")", "resource", "=", "unquote", "(", "url", ")", "if", "has_qs", ":", "_params", "=", "parse_qs", "(", "qs", ",", "keep_blank_values", "=", "True", ")", "params", "=", "{", "k", ":", "unquote", "(", "v", "[", "0", "]", ")", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "_params", ")", "}", "else", ":", "params", "=", "{", "}", "return", "ParseResult", "(", "fs_name", ",", "username", ",", "password", ",", "resource", ",", "params", ",", "path", ")" ]
Parse a Filesystem URL and return a `ParseResult`. Arguments: fs_url (str): A filesystem URL. Returns: ~fs.opener.parse.ParseResult: a parse result instance. Raises: ~fs.errors.ParseError: if the FS URL is not valid.
[ "Parse", "a", "Filesystem", "URL", "and", "return", "a", "ParseResult", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/opener/parse.py#L62-L97
233,681
PyFilesystem/pyfilesystem2
fs/zipfs.py
_ZipExtFile.seek
def seek(self, offset, whence=Seek.set): # type: (int, SupportsInt) -> int """Change stream position. Change the stream position to the given byte offset. The offset is interpreted relative to the position indicated by ``whence``. Arguments: offset (int): the offset to the new position, in bytes. whence (int): the position reference. Possible values are: * `Seek.set`: start of stream (the default). * `Seek.current`: current position; offset may be negative. * `Seek.end`: end of stream; offset must be negative. Returns: int: the new absolute position. Raises: ValueError: when ``whence`` is not known, or ``offset`` is invalid. Note: Zip compression does not support seeking, so the seeking is emulated. Seeking somewhere else than the current position will need to either: * reopen the file and restart decompression * read and discard data to advance in the file """ _whence = int(whence) if _whence == Seek.current: offset += self._pos if _whence == Seek.current or _whence == Seek.set: if offset < 0: raise ValueError("Negative seek position {}".format(offset)) elif _whence == Seek.end: if offset > 0: raise ValueError("Positive seek position {}".format(offset)) offset += self._end else: raise ValueError( "Invalid whence ({}, should be {}, {} or {})".format( _whence, Seek.set, Seek.current, Seek.end ) ) if offset < self._pos: self._f = self._zip.open(self.name) # type: ignore self._pos = 0 self.read(offset - self._pos) return self._pos
python
def seek(self, offset, whence=Seek.set): # type: (int, SupportsInt) -> int """Change stream position. Change the stream position to the given byte offset. The offset is interpreted relative to the position indicated by ``whence``. Arguments: offset (int): the offset to the new position, in bytes. whence (int): the position reference. Possible values are: * `Seek.set`: start of stream (the default). * `Seek.current`: current position; offset may be negative. * `Seek.end`: end of stream; offset must be negative. Returns: int: the new absolute position. Raises: ValueError: when ``whence`` is not known, or ``offset`` is invalid. Note: Zip compression does not support seeking, so the seeking is emulated. Seeking somewhere else than the current position will need to either: * reopen the file and restart decompression * read and discard data to advance in the file """ _whence = int(whence) if _whence == Seek.current: offset += self._pos if _whence == Seek.current or _whence == Seek.set: if offset < 0: raise ValueError("Negative seek position {}".format(offset)) elif _whence == Seek.end: if offset > 0: raise ValueError("Positive seek position {}".format(offset)) offset += self._end else: raise ValueError( "Invalid whence ({}, should be {}, {} or {})".format( _whence, Seek.set, Seek.current, Seek.end ) ) if offset < self._pos: self._f = self._zip.open(self.name) # type: ignore self._pos = 0 self.read(offset - self._pos) return self._pos
[ "def", "seek", "(", "self", ",", "offset", ",", "whence", "=", "Seek", ".", "set", ")", ":", "# type: (int, SupportsInt) -> int", "_whence", "=", "int", "(", "whence", ")", "if", "_whence", "==", "Seek", ".", "current", ":", "offset", "+=", "self", ".", "_pos", "if", "_whence", "==", "Seek", ".", "current", "or", "_whence", "==", "Seek", ".", "set", ":", "if", "offset", "<", "0", ":", "raise", "ValueError", "(", "\"Negative seek position {}\"", ".", "format", "(", "offset", ")", ")", "elif", "_whence", "==", "Seek", ".", "end", ":", "if", "offset", ">", "0", ":", "raise", "ValueError", "(", "\"Positive seek position {}\"", ".", "format", "(", "offset", ")", ")", "offset", "+=", "self", ".", "_end", "else", ":", "raise", "ValueError", "(", "\"Invalid whence ({}, should be {}, {} or {})\"", ".", "format", "(", "_whence", ",", "Seek", ".", "set", ",", "Seek", ".", "current", ",", "Seek", ".", "end", ")", ")", "if", "offset", "<", "self", ".", "_pos", ":", "self", ".", "_f", "=", "self", ".", "_zip", ".", "open", "(", "self", ".", "name", ")", "# type: ignore", "self", ".", "_pos", "=", "0", "self", ".", "read", "(", "offset", "-", "self", ".", "_pos", ")", "return", "self", ".", "_pos" ]
Change stream position. Change the stream position to the given byte offset. The offset is interpreted relative to the position indicated by ``whence``. Arguments: offset (int): the offset to the new position, in bytes. whence (int): the position reference. Possible values are: * `Seek.set`: start of stream (the default). * `Seek.current`: current position; offset may be negative. * `Seek.end`: end of stream; offset must be negative. Returns: int: the new absolute position. Raises: ValueError: when ``whence`` is not known, or ``offset`` is invalid. Note: Zip compression does not support seeking, so the seeking is emulated. Seeking somewhere else than the current position will need to either: * reopen the file and restart decompression * read and discard data to advance in the file
[ "Change", "stream", "position", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/zipfs.py#L65-L116
233,682
PyFilesystem/pyfilesystem2
fs/walk.py
Walker._iter_walk
def _iter_walk( self, fs, # type: FS path, # type: Text namespaces=None, # type: Optional[Collection[Text]] ): # type: (...) -> Iterator[Tuple[Text, Optional[Info]]] """Get the walk generator.""" if self.search == "breadth": return self._walk_breadth(fs, path, namespaces=namespaces) else: return self._walk_depth(fs, path, namespaces=namespaces)
python
def _iter_walk( self, fs, # type: FS path, # type: Text namespaces=None, # type: Optional[Collection[Text]] ): # type: (...) -> Iterator[Tuple[Text, Optional[Info]]] """Get the walk generator.""" if self.search == "breadth": return self._walk_breadth(fs, path, namespaces=namespaces) else: return self._walk_depth(fs, path, namespaces=namespaces)
[ "def", "_iter_walk", "(", "self", ",", "fs", ",", "# type: FS", "path", ",", "# type: Text", "namespaces", "=", "None", ",", "# type: Optional[Collection[Text]]", ")", ":", "# type: (...) -> Iterator[Tuple[Text, Optional[Info]]]", "if", "self", ".", "search", "==", "\"breadth\"", ":", "return", "self", ".", "_walk_breadth", "(", "fs", ",", "path", ",", "namespaces", "=", "namespaces", ")", "else", ":", "return", "self", ".", "_walk_depth", "(", "fs", ",", "path", ",", "namespaces", "=", "namespaces", ")" ]
Get the walk generator.
[ "Get", "the", "walk", "generator", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/walk.py#L188-L199
233,683
PyFilesystem/pyfilesystem2
fs/walk.py
Walker._check_open_dir
def _check_open_dir(self, fs, path, info): # type: (FS, Text, Info) -> bool """Check if a directory should be considered in the walk. """ if self.exclude_dirs is not None and fs.match(self.exclude_dirs, info.name): return False if self.filter_dirs is not None and not fs.match(self.filter_dirs, info.name): return False return self.check_open_dir(fs, path, info)
python
def _check_open_dir(self, fs, path, info): # type: (FS, Text, Info) -> bool """Check if a directory should be considered in the walk. """ if self.exclude_dirs is not None and fs.match(self.exclude_dirs, info.name): return False if self.filter_dirs is not None and not fs.match(self.filter_dirs, info.name): return False return self.check_open_dir(fs, path, info)
[ "def", "_check_open_dir", "(", "self", ",", "fs", ",", "path", ",", "info", ")", ":", "# type: (FS, Text, Info) -> bool", "if", "self", ".", "exclude_dirs", "is", "not", "None", "and", "fs", ".", "match", "(", "self", ".", "exclude_dirs", ",", "info", ".", "name", ")", ":", "return", "False", "if", "self", ".", "filter_dirs", "is", "not", "None", "and", "not", "fs", ".", "match", "(", "self", ".", "filter_dirs", ",", "info", ".", "name", ")", ":", "return", "False", "return", "self", ".", "check_open_dir", "(", "fs", ",", "path", ",", "info", ")" ]
Check if a directory should be considered in the walk.
[ "Check", "if", "a", "directory", "should", "be", "considered", "in", "the", "walk", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/walk.py#L201-L209
233,684
PyFilesystem/pyfilesystem2
fs/walk.py
Walker._check_scan_dir
def _check_scan_dir(self, fs, path, info, depth): # type: (FS, Text, Info, int) -> bool """Check if a directory contents should be scanned.""" if self.max_depth is not None and depth >= self.max_depth: return False return self.check_scan_dir(fs, path, info)
python
def _check_scan_dir(self, fs, path, info, depth): # type: (FS, Text, Info, int) -> bool """Check if a directory contents should be scanned.""" if self.max_depth is not None and depth >= self.max_depth: return False return self.check_scan_dir(fs, path, info)
[ "def", "_check_scan_dir", "(", "self", ",", "fs", ",", "path", ",", "info", ",", "depth", ")", ":", "# type: (FS, Text, Info, int) -> bool", "if", "self", ".", "max_depth", "is", "not", "None", "and", "depth", ">=", "self", ".", "max_depth", ":", "return", "False", "return", "self", ".", "check_scan_dir", "(", "fs", ",", "path", ",", "info", ")" ]
Check if a directory contents should be scanned.
[ "Check", "if", "a", "directory", "contents", "should", "be", "scanned", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/walk.py#L228-L233
233,685
PyFilesystem/pyfilesystem2
fs/walk.py
Walker.check_file
def check_file(self, fs, info): # type: (FS, Info) -> bool """Check if a filename should be included. Override to exclude files from the walk. Arguments: fs (FS): A filesystem instance. info (Info): A resource info object. Returns: bool: `True` if the file should be included. """ if self.exclude is not None and fs.match(self.exclude, info.name): return False return fs.match(self.filter, info.name)
python
def check_file(self, fs, info): # type: (FS, Info) -> bool """Check if a filename should be included. Override to exclude files from the walk. Arguments: fs (FS): A filesystem instance. info (Info): A resource info object. Returns: bool: `True` if the file should be included. """ if self.exclude is not None and fs.match(self.exclude, info.name): return False return fs.match(self.filter, info.name)
[ "def", "check_file", "(", "self", ",", "fs", ",", "info", ")", ":", "# type: (FS, Info) -> bool", "if", "self", ".", "exclude", "is", "not", "None", "and", "fs", ".", "match", "(", "self", ".", "exclude", ",", "info", ".", "name", ")", ":", "return", "False", "return", "fs", ".", "match", "(", "self", ".", "filter", ",", "info", ".", "name", ")" ]
Check if a filename should be included. Override to exclude files from the walk. Arguments: fs (FS): A filesystem instance. info (Info): A resource info object. Returns: bool: `True` if the file should be included.
[ "Check", "if", "a", "filename", "should", "be", "included", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/walk.py#L254-L271
233,686
PyFilesystem/pyfilesystem2
fs/walk.py
Walker._scan
def _scan( self, fs, # type: FS dir_path, # type: Text namespaces=None, # type: Optional[Collection[Text]] ): # type: (...) -> Iterator[Info] """Get an iterator of `Info` objects for a directory path. Arguments: fs (FS): A filesystem instance. dir_path (str): A path to a directory on the filesystem. namespaces (list): A list of additional namespaces to include in the `Info` objects. Returns: ~collections.Iterator: iterator of `Info` objects for resources within the given path. """ try: for info in fs.scandir(dir_path, namespaces=namespaces): yield info except FSError as error: if not self.on_error(dir_path, error): six.reraise(type(error), error)
python
def _scan( self, fs, # type: FS dir_path, # type: Text namespaces=None, # type: Optional[Collection[Text]] ): # type: (...) -> Iterator[Info] """Get an iterator of `Info` objects for a directory path. Arguments: fs (FS): A filesystem instance. dir_path (str): A path to a directory on the filesystem. namespaces (list): A list of additional namespaces to include in the `Info` objects. Returns: ~collections.Iterator: iterator of `Info` objects for resources within the given path. """ try: for info in fs.scandir(dir_path, namespaces=namespaces): yield info except FSError as error: if not self.on_error(dir_path, error): six.reraise(type(error), error)
[ "def", "_scan", "(", "self", ",", "fs", ",", "# type: FS", "dir_path", ",", "# type: Text", "namespaces", "=", "None", ",", "# type: Optional[Collection[Text]]", ")", ":", "# type: (...) -> Iterator[Info]", "try", ":", "for", "info", "in", "fs", ".", "scandir", "(", "dir_path", ",", "namespaces", "=", "namespaces", ")", ":", "yield", "info", "except", "FSError", "as", "error", ":", "if", "not", "self", ".", "on_error", "(", "dir_path", ",", "error", ")", ":", "six", ".", "reraise", "(", "type", "(", "error", ")", ",", "error", ")" ]
Get an iterator of `Info` objects for a directory path. Arguments: fs (FS): A filesystem instance. dir_path (str): A path to a directory on the filesystem. namespaces (list): A list of additional namespaces to include in the `Info` objects. Returns: ~collections.Iterator: iterator of `Info` objects for resources within the given path.
[ "Get", "an", "iterator", "of", "Info", "objects", "for", "a", "directory", "path", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/walk.py#L273-L298
233,687
PyFilesystem/pyfilesystem2
fs/walk.py
BoundWalker._make_walker
def _make_walker(self, *args, **kwargs): # type: (*Any, **Any) -> Walker """Create a walker instance. """ walker = self.walker_class(*args, **kwargs) return walker
python
def _make_walker(self, *args, **kwargs): # type: (*Any, **Any) -> Walker """Create a walker instance. """ walker = self.walker_class(*args, **kwargs) return walker
[ "def", "_make_walker", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# type: (*Any, **Any) -> Walker", "walker", "=", "self", ".", "walker_class", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "walker" ]
Create a walker instance.
[ "Create", "a", "walker", "instance", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/walk.py#L529-L534
233,688
PyFilesystem/pyfilesystem2
fs/walk.py
BoundWalker.dirs
def dirs(self, path="/", **kwargs): # type: (Text, **Any) -> Iterator[Text] """Walk a filesystem, yielding absolute paths to directories. Arguments: path (str): A path to a directory. Keyword Arguments: ignore_errors (bool): If `True`, any errors reading a directory will be ignored, otherwise exceptions will be raised. on_error (callable): If ``ignore_errors`` is `False`, then this callable will be invoked with a path and the exception object. It should return `True` to ignore the error, or `False` to re-raise it. search (str): If ``'breadth'`` then the directory will be walked *top down*. Set to ``'depth'`` to walk *bottom up*. filter_dirs (list, optional): A list of patterns that will be used to match directories paths. The walk will only open directories that match at least one of these patterns. exclude_dirs (list): A list of patterns that will be used to filter out directories from the walk, e.g. ``['*.svn', '*.git']``. max_depth (int, optional): Maximum directory depth to walk. Returns: ~collections.Iterator: an iterator over directory paths (absolute from the filesystem root). This method invokes `Walker.dirs` with the bound `FS` object. """ walker = self._make_walker(**kwargs) return walker.dirs(self.fs, path=path)
python
def dirs(self, path="/", **kwargs): # type: (Text, **Any) -> Iterator[Text] """Walk a filesystem, yielding absolute paths to directories. Arguments: path (str): A path to a directory. Keyword Arguments: ignore_errors (bool): If `True`, any errors reading a directory will be ignored, otherwise exceptions will be raised. on_error (callable): If ``ignore_errors`` is `False`, then this callable will be invoked with a path and the exception object. It should return `True` to ignore the error, or `False` to re-raise it. search (str): If ``'breadth'`` then the directory will be walked *top down*. Set to ``'depth'`` to walk *bottom up*. filter_dirs (list, optional): A list of patterns that will be used to match directories paths. The walk will only open directories that match at least one of these patterns. exclude_dirs (list): A list of patterns that will be used to filter out directories from the walk, e.g. ``['*.svn', '*.git']``. max_depth (int, optional): Maximum directory depth to walk. Returns: ~collections.Iterator: an iterator over directory paths (absolute from the filesystem root). This method invokes `Walker.dirs` with the bound `FS` object. """ walker = self._make_walker(**kwargs) return walker.dirs(self.fs, path=path)
[ "def", "dirs", "(", "self", ",", "path", "=", "\"/\"", ",", "*", "*", "kwargs", ")", ":", "# type: (Text, **Any) -> Iterator[Text]", "walker", "=", "self", ".", "_make_walker", "(", "*", "*", "kwargs", ")", "return", "walker", ".", "dirs", "(", "self", ".", "fs", ",", "path", "=", "path", ")" ]
Walk a filesystem, yielding absolute paths to directories. Arguments: path (str): A path to a directory. Keyword Arguments: ignore_errors (bool): If `True`, any errors reading a directory will be ignored, otherwise exceptions will be raised. on_error (callable): If ``ignore_errors`` is `False`, then this callable will be invoked with a path and the exception object. It should return `True` to ignore the error, or `False` to re-raise it. search (str): If ``'breadth'`` then the directory will be walked *top down*. Set to ``'depth'`` to walk *bottom up*. filter_dirs (list, optional): A list of patterns that will be used to match directories paths. The walk will only open directories that match at least one of these patterns. exclude_dirs (list): A list of patterns that will be used to filter out directories from the walk, e.g. ``['*.svn', '*.git']``. max_depth (int, optional): Maximum directory depth to walk. Returns: ~collections.Iterator: an iterator over directory paths (absolute from the filesystem root). This method invokes `Walker.dirs` with the bound `FS` object.
[ "Walk", "a", "filesystem", "yielding", "absolute", "paths", "to", "directories", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/walk.py#L641-L674
233,689
PyFilesystem/pyfilesystem2
fs/walk.py
BoundWalker.info
def info( self, path="/", # type: Text namespaces=None, # type: Optional[Collection[Text]] **kwargs # type: Any ): # type: (...) -> Iterator[Tuple[Text, Info]] """Walk a filesystem, yielding path and `Info` of resources. Arguments: path (str): A path to a directory. namespaces (list, optional): A list of namespaces to include in the resource information, e.g. ``['basic', 'access']`` (defaults to ``['basic']``). Keyword Arguments: ignore_errors (bool): If `True`, any errors reading a directory will be ignored, otherwise exceptions will be raised. on_error (callable): If ``ignore_errors`` is `False`, then this callable will be invoked with a path and the exception object. It should return `True` to ignore the error, or `False` to re-raise it. search (str): If ``'breadth'`` then the directory will be walked *top down*. Set to ``'depth'`` to walk *bottom up*. filter (list): If supplied, this parameter should be a list of file name patterns, e.g. ``['*.py']``. Files will only be returned if the final component matches one of the patterns. exclude (list, optional): If supplied, this parameter should be a list of filename patterns, e.g. ``['~*', '.*']``. Files matching any of these patterns will be removed from the walk. filter_dirs (list, optional): A list of patterns that will be used to match directories paths. The walk will only open directories that match at least one of these patterns. exclude_dirs (list): A list of patterns that will be used to filter out directories from the walk, e.g. ``['*.svn', '*.git']``. max_depth (int, optional): Maximum directory depth to walk. Returns: ~collections.Iterable: an iterable yielding tuples of ``(<absolute path>, <resource info>)``. This method invokes `Walker.info` with the bound `FS` object. """ walker = self._make_walker(**kwargs) return walker.info(self.fs, path=path, namespaces=namespaces)
python
def info( self, path="/", # type: Text namespaces=None, # type: Optional[Collection[Text]] **kwargs # type: Any ): # type: (...) -> Iterator[Tuple[Text, Info]] """Walk a filesystem, yielding path and `Info` of resources. Arguments: path (str): A path to a directory. namespaces (list, optional): A list of namespaces to include in the resource information, e.g. ``['basic', 'access']`` (defaults to ``['basic']``). Keyword Arguments: ignore_errors (bool): If `True`, any errors reading a directory will be ignored, otherwise exceptions will be raised. on_error (callable): If ``ignore_errors`` is `False`, then this callable will be invoked with a path and the exception object. It should return `True` to ignore the error, or `False` to re-raise it. search (str): If ``'breadth'`` then the directory will be walked *top down*. Set to ``'depth'`` to walk *bottom up*. filter (list): If supplied, this parameter should be a list of file name patterns, e.g. ``['*.py']``. Files will only be returned if the final component matches one of the patterns. exclude (list, optional): If supplied, this parameter should be a list of filename patterns, e.g. ``['~*', '.*']``. Files matching any of these patterns will be removed from the walk. filter_dirs (list, optional): A list of patterns that will be used to match directories paths. The walk will only open directories that match at least one of these patterns. exclude_dirs (list): A list of patterns that will be used to filter out directories from the walk, e.g. ``['*.svn', '*.git']``. max_depth (int, optional): Maximum directory depth to walk. Returns: ~collections.Iterable: an iterable yielding tuples of ``(<absolute path>, <resource info>)``. This method invokes `Walker.info` with the bound `FS` object. """ walker = self._make_walker(**kwargs) return walker.info(self.fs, path=path, namespaces=namespaces)
[ "def", "info", "(", "self", ",", "path", "=", "\"/\"", ",", "# type: Text", "namespaces", "=", "None", ",", "# type: Optional[Collection[Text]]", "*", "*", "kwargs", "# type: Any", ")", ":", "# type: (...) -> Iterator[Tuple[Text, Info]]", "walker", "=", "self", ".", "_make_walker", "(", "*", "*", "kwargs", ")", "return", "walker", ".", "info", "(", "self", ".", "fs", ",", "path", "=", "path", ",", "namespaces", "=", "namespaces", ")" ]
Walk a filesystem, yielding path and `Info` of resources. Arguments: path (str): A path to a directory. namespaces (list, optional): A list of namespaces to include in the resource information, e.g. ``['basic', 'access']`` (defaults to ``['basic']``). Keyword Arguments: ignore_errors (bool): If `True`, any errors reading a directory will be ignored, otherwise exceptions will be raised. on_error (callable): If ``ignore_errors`` is `False`, then this callable will be invoked with a path and the exception object. It should return `True` to ignore the error, or `False` to re-raise it. search (str): If ``'breadth'`` then the directory will be walked *top down*. Set to ``'depth'`` to walk *bottom up*. filter (list): If supplied, this parameter should be a list of file name patterns, e.g. ``['*.py']``. Files will only be returned if the final component matches one of the patterns. exclude (list, optional): If supplied, this parameter should be a list of filename patterns, e.g. ``['~*', '.*']``. Files matching any of these patterns will be removed from the walk. filter_dirs (list, optional): A list of patterns that will be used to match directories paths. The walk will only open directories that match at least one of these patterns. exclude_dirs (list): A list of patterns that will be used to filter out directories from the walk, e.g. ``['*.svn', '*.git']``. max_depth (int, optional): Maximum directory depth to walk. Returns: ~collections.Iterable: an iterable yielding tuples of ``(<absolute path>, <resource info>)``. This method invokes `Walker.info` with the bound `FS` object.
[ "Walk", "a", "filesystem", "yielding", "path", "and", "Info", "of", "resources", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/walk.py#L676-L724
233,690
PyFilesystem/pyfilesystem2
fs/tools.py
remove_empty
def remove_empty(fs, path): # type: (FS, Text) -> None """Remove all empty parents. Arguments: fs (FS): A filesystem instance. path (str): Path to a directory on the filesystem. """ path = abspath(normpath(path)) try: while path not in ("", "/"): fs.removedir(path) path = dirname(path) except DirectoryNotEmpty: pass
python
def remove_empty(fs, path): # type: (FS, Text) -> None """Remove all empty parents. Arguments: fs (FS): A filesystem instance. path (str): Path to a directory on the filesystem. """ path = abspath(normpath(path)) try: while path not in ("", "/"): fs.removedir(path) path = dirname(path) except DirectoryNotEmpty: pass
[ "def", "remove_empty", "(", "fs", ",", "path", ")", ":", "# type: (FS, Text) -> None", "path", "=", "abspath", "(", "normpath", "(", "path", ")", ")", "try", ":", "while", "path", "not", "in", "(", "\"\"", ",", "\"/\"", ")", ":", "fs", ".", "removedir", "(", "path", ")", "path", "=", "dirname", "(", "path", ")", "except", "DirectoryNotEmpty", ":", "pass" ]
Remove all empty parents. Arguments: fs (FS): A filesystem instance. path (str): Path to a directory on the filesystem.
[ "Remove", "all", "empty", "parents", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/tools.py#L23-L38
233,691
PyFilesystem/pyfilesystem2
fs/tools.py
copy_file_data
def copy_file_data(src_file, dst_file, chunk_size=None): # type: (IO, IO, Optional[int]) -> None """Copy data from one file object to another. Arguments: src_file (io.IOBase): File open for reading. dst_file (io.IOBase): File open for writing. chunk_size (int): Number of bytes to copy at a time (or `None` to use sensible default). """ _chunk_size = 1024 * 1024 if chunk_size is None else chunk_size read = src_file.read write = dst_file.write # The 'or None' is so that it works with binary and text files for chunk in iter(lambda: read(_chunk_size) or None, None): write(chunk)
python
def copy_file_data(src_file, dst_file, chunk_size=None): # type: (IO, IO, Optional[int]) -> None """Copy data from one file object to another. Arguments: src_file (io.IOBase): File open for reading. dst_file (io.IOBase): File open for writing. chunk_size (int): Number of bytes to copy at a time (or `None` to use sensible default). """ _chunk_size = 1024 * 1024 if chunk_size is None else chunk_size read = src_file.read write = dst_file.write # The 'or None' is so that it works with binary and text files for chunk in iter(lambda: read(_chunk_size) or None, None): write(chunk)
[ "def", "copy_file_data", "(", "src_file", ",", "dst_file", ",", "chunk_size", "=", "None", ")", ":", "# type: (IO, IO, Optional[int]) -> None", "_chunk_size", "=", "1024", "*", "1024", "if", "chunk_size", "is", "None", "else", "chunk_size", "read", "=", "src_file", ".", "read", "write", "=", "dst_file", ".", "write", "# The 'or None' is so that it works with binary and text files", "for", "chunk", "in", "iter", "(", "lambda", ":", "read", "(", "_chunk_size", ")", "or", "None", ",", "None", ")", ":", "write", "(", "chunk", ")" ]
Copy data from one file object to another. Arguments: src_file (io.IOBase): File open for reading. dst_file (io.IOBase): File open for writing. chunk_size (int): Number of bytes to copy at a time (or `None` to use sensible default).
[ "Copy", "data", "from", "one", "file", "object", "to", "another", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/tools.py#L41-L57
233,692
PyFilesystem/pyfilesystem2
fs/tools.py
get_intermediate_dirs
def get_intermediate_dirs(fs, dir_path): # type: (FS, Text) -> List[Text] """Get a list of non-existing intermediate directories. Arguments: fs (FS): A filesystem instance. dir_path (str): A path to a new directory on the filesystem. Returns: list: A list of non-existing paths. Raises: ~fs.errors.DirectoryExpected: If a path component references a file and not a directory. """ intermediates = [] with fs.lock(): for path in recursepath(abspath(dir_path), reverse=True): try: resource = fs.getinfo(path) except ResourceNotFound: intermediates.append(abspath(path)) else: if resource.is_dir: break raise errors.DirectoryExpected(dir_path) return intermediates[::-1][:-1]
python
def get_intermediate_dirs(fs, dir_path): # type: (FS, Text) -> List[Text] """Get a list of non-existing intermediate directories. Arguments: fs (FS): A filesystem instance. dir_path (str): A path to a new directory on the filesystem. Returns: list: A list of non-existing paths. Raises: ~fs.errors.DirectoryExpected: If a path component references a file and not a directory. """ intermediates = [] with fs.lock(): for path in recursepath(abspath(dir_path), reverse=True): try: resource = fs.getinfo(path) except ResourceNotFound: intermediates.append(abspath(path)) else: if resource.is_dir: break raise errors.DirectoryExpected(dir_path) return intermediates[::-1][:-1]
[ "def", "get_intermediate_dirs", "(", "fs", ",", "dir_path", ")", ":", "# type: (FS, Text) -> List[Text]", "intermediates", "=", "[", "]", "with", "fs", ".", "lock", "(", ")", ":", "for", "path", "in", "recursepath", "(", "abspath", "(", "dir_path", ")", ",", "reverse", "=", "True", ")", ":", "try", ":", "resource", "=", "fs", ".", "getinfo", "(", "path", ")", "except", "ResourceNotFound", ":", "intermediates", ".", "append", "(", "abspath", "(", "path", ")", ")", "else", ":", "if", "resource", ".", "is_dir", ":", "break", "raise", "errors", ".", "DirectoryExpected", "(", "dir_path", ")", "return", "intermediates", "[", ":", ":", "-", "1", "]", "[", ":", "-", "1", "]" ]
Get a list of non-existing intermediate directories. Arguments: fs (FS): A filesystem instance. dir_path (str): A path to a new directory on the filesystem. Returns: list: A list of non-existing paths. Raises: ~fs.errors.DirectoryExpected: If a path component references a file and not a directory.
[ "Get", "a", "list", "of", "non", "-", "existing", "intermediate", "directories", "." ]
047f3593f297d1442194cda3da7a7335bcc9c14a
https://github.com/PyFilesystem/pyfilesystem2/blob/047f3593f297d1442194cda3da7a7335bcc9c14a/fs/tools.py#L60-L87
233,693
soynatan/django-easy-audit
easyaudit/admin_helpers.py
prettify_json
def prettify_json(json_string): """Given a JSON string, it returns it as a safe formatted HTML""" try: data = json.loads(json_string) html = '<pre>' + json.dumps(data, sort_keys=True, indent=4) + '</pre>' except: html = json_string return mark_safe(html)
python
def prettify_json(json_string): """Given a JSON string, it returns it as a safe formatted HTML""" try: data = json.loads(json_string) html = '<pre>' + json.dumps(data, sort_keys=True, indent=4) + '</pre>' except: html = json_string return mark_safe(html)
[ "def", "prettify_json", "(", "json_string", ")", ":", "try", ":", "data", "=", "json", ".", "loads", "(", "json_string", ")", "html", "=", "'<pre>'", "+", "json", ".", "dumps", "(", "data", ",", "sort_keys", "=", "True", ",", "indent", "=", "4", ")", "+", "'</pre>'", "except", ":", "html", "=", "json_string", "return", "mark_safe", "(", "html", ")" ]
Given a JSON string, it returns it as a safe formatted HTML
[ "Given", "a", "JSON", "string", "it", "returns", "it", "as", "a", "safe", "formatted", "HTML" ]
03e05bc94beb29fc3e4ff86e313a6fef4b766b4b
https://github.com/soynatan/django-easy-audit/blob/03e05bc94beb29fc3e4ff86e313a6fef4b766b4b/easyaudit/admin_helpers.py#L21-L29
233,694
soynatan/django-easy-audit
easyaudit/admin_helpers.py
EasyAuditModelAdmin.purge_objects
def purge_objects(self, request): """ Removes all objects in this table. This action first displays a confirmation page; next, it deletes all objects and redirects back to the change list. """ def truncate_table(model): if settings.TRUNCATE_TABLE_SQL_STATEMENT: from django.db import connection sql = settings.TRUNCATE_TABLE_SQL_STATEMENT.format(db_table=model._meta.db_table) cursor = connection.cursor() cursor.execute(sql) else: model.objects.all().delete() modeladmin = self opts = modeladmin.model._meta # Check that the user has delete permission for the actual model if not request.user.is_superuser: raise PermissionDenied if not modeladmin.has_delete_permission(request): raise PermissionDenied # If the user has already confirmed or cancelled the deletion, # (eventually) do the deletion and return to the change list view again. if request.method == 'POST': if 'btn-confirm' in request.POST: try: n = modeladmin.model.objects.count() truncate_table(modeladmin.model) modeladmin.message_user(request, _("Successfully removed %d rows" % n), messages.SUCCESS); except Exception as e: modeladmin.message_user(request, _(u'ERROR') + ': %r' % e, messages.ERROR) else: modeladmin.message_user(request, _("Action cancelled by user"), messages.SUCCESS); return HttpResponseRedirect(reverse('admin:%s_%s_changelist' % (opts.app_label, opts.model_name))) context = { "title": _("Purge all %s ... are you sure?") % opts.verbose_name_plural, "opts": opts, "app_label": opts.app_label, } # Display the confirmation page return render( request, 'admin/easyaudit/purge_confirmation.html', context )
python
def purge_objects(self, request): """ Removes all objects in this table. This action first displays a confirmation page; next, it deletes all objects and redirects back to the change list. """ def truncate_table(model): if settings.TRUNCATE_TABLE_SQL_STATEMENT: from django.db import connection sql = settings.TRUNCATE_TABLE_SQL_STATEMENT.format(db_table=model._meta.db_table) cursor = connection.cursor() cursor.execute(sql) else: model.objects.all().delete() modeladmin = self opts = modeladmin.model._meta # Check that the user has delete permission for the actual model if not request.user.is_superuser: raise PermissionDenied if not modeladmin.has_delete_permission(request): raise PermissionDenied # If the user has already confirmed or cancelled the deletion, # (eventually) do the deletion and return to the change list view again. if request.method == 'POST': if 'btn-confirm' in request.POST: try: n = modeladmin.model.objects.count() truncate_table(modeladmin.model) modeladmin.message_user(request, _("Successfully removed %d rows" % n), messages.SUCCESS); except Exception as e: modeladmin.message_user(request, _(u'ERROR') + ': %r' % e, messages.ERROR) else: modeladmin.message_user(request, _("Action cancelled by user"), messages.SUCCESS); return HttpResponseRedirect(reverse('admin:%s_%s_changelist' % (opts.app_label, opts.model_name))) context = { "title": _("Purge all %s ... are you sure?") % opts.verbose_name_plural, "opts": opts, "app_label": opts.app_label, } # Display the confirmation page return render( request, 'admin/easyaudit/purge_confirmation.html', context )
[ "def", "purge_objects", "(", "self", ",", "request", ")", ":", "def", "truncate_table", "(", "model", ")", ":", "if", "settings", ".", "TRUNCATE_TABLE_SQL_STATEMENT", ":", "from", "django", ".", "db", "import", "connection", "sql", "=", "settings", ".", "TRUNCATE_TABLE_SQL_STATEMENT", ".", "format", "(", "db_table", "=", "model", ".", "_meta", ".", "db_table", ")", "cursor", "=", "connection", ".", "cursor", "(", ")", "cursor", ".", "execute", "(", "sql", ")", "else", ":", "model", ".", "objects", ".", "all", "(", ")", ".", "delete", "(", ")", "modeladmin", "=", "self", "opts", "=", "modeladmin", ".", "model", ".", "_meta", "# Check that the user has delete permission for the actual model", "if", "not", "request", ".", "user", ".", "is_superuser", ":", "raise", "PermissionDenied", "if", "not", "modeladmin", ".", "has_delete_permission", "(", "request", ")", ":", "raise", "PermissionDenied", "# If the user has already confirmed or cancelled the deletion,", "# (eventually) do the deletion and return to the change list view again.", "if", "request", ".", "method", "==", "'POST'", ":", "if", "'btn-confirm'", "in", "request", ".", "POST", ":", "try", ":", "n", "=", "modeladmin", ".", "model", ".", "objects", ".", "count", "(", ")", "truncate_table", "(", "modeladmin", ".", "model", ")", "modeladmin", ".", "message_user", "(", "request", ",", "_", "(", "\"Successfully removed %d rows\"", "%", "n", ")", ",", "messages", ".", "SUCCESS", ")", "except", "Exception", "as", "e", ":", "modeladmin", ".", "message_user", "(", "request", ",", "_", "(", "u'ERROR'", ")", "+", "': %r'", "%", "e", ",", "messages", ".", "ERROR", ")", "else", ":", "modeladmin", ".", "message_user", "(", "request", ",", "_", "(", "\"Action cancelled by user\"", ")", ",", "messages", ".", "SUCCESS", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'admin:%s_%s_changelist'", "%", "(", "opts", ".", "app_label", ",", "opts", ".", "model_name", ")", ")", ")", "context", "=", "{", "\"title\"", ":", "_", "(", "\"Purge all %s ... are you sure?\"", ")", "%", "opts", ".", "verbose_name_plural", ",", "\"opts\"", ":", "opts", ",", "\"app_label\"", ":", "opts", ".", "app_label", ",", "}", "# Display the confirmation page", "return", "render", "(", "request", ",", "'admin/easyaudit/purge_confirmation.html'", ",", "context", ")" ]
Removes all objects in this table. This action first displays a confirmation page; next, it deletes all objects and redirects back to the change list.
[ "Removes", "all", "objects", "in", "this", "table", ".", "This", "action", "first", "displays", "a", "confirmation", "page", ";", "next", "it", "deletes", "all", "objects", "and", "redirects", "back", "to", "the", "change", "list", "." ]
03e05bc94beb29fc3e4ff86e313a6fef4b766b4b
https://github.com/soynatan/django-easy-audit/blob/03e05bc94beb29fc3e4ff86e313a6fef4b766b4b/easyaudit/admin_helpers.py#L66-L116
233,695
soynatan/django-easy-audit
easyaudit/settings.py
get_model_list
def get_model_list(class_list): """ Receives a list of strings with app_name.model_name format and turns them into classes. If an item is already a class it ignores it. """ for idx, item in enumerate(class_list): if isinstance(item, six.string_types): model_class = apps.get_model(item) class_list[idx] = model_class
python
def get_model_list(class_list): """ Receives a list of strings with app_name.model_name format and turns them into classes. If an item is already a class it ignores it. """ for idx, item in enumerate(class_list): if isinstance(item, six.string_types): model_class = apps.get_model(item) class_list[idx] = model_class
[ "def", "get_model_list", "(", "class_list", ")", ":", "for", "idx", ",", "item", "in", "enumerate", "(", "class_list", ")", ":", "if", "isinstance", "(", "item", ",", "six", ".", "string_types", ")", ":", "model_class", "=", "apps", ".", "get_model", "(", "item", ")", "class_list", "[", "idx", "]", "=", "model_class" ]
Receives a list of strings with app_name.model_name format and turns them into classes. If an item is already a class it ignores it.
[ "Receives", "a", "list", "of", "strings", "with", "app_name", ".", "model_name", "format", "and", "turns", "them", "into", "classes", ".", "If", "an", "item", "is", "already", "a", "class", "it", "ignores", "it", "." ]
03e05bc94beb29fc3e4ff86e313a6fef4b766b4b
https://github.com/soynatan/django-easy-audit/blob/03e05bc94beb29fc3e4ff86e313a6fef4b766b4b/easyaudit/settings.py#L15-L24
233,696
soynatan/django-easy-audit
easyaudit/signals/model_signals.py
should_audit
def should_audit(instance): """Returns True or False to indicate whether the instance should be audited or not, depending on the project settings.""" # do not audit any model listed in UNREGISTERED_CLASSES for unregistered_class in UNREGISTERED_CLASSES: if isinstance(instance, unregistered_class): return False # only audit models listed in REGISTERED_CLASSES (if it's set) if len(REGISTERED_CLASSES) > 0: for registered_class in REGISTERED_CLASSES: if isinstance(instance, registered_class): break else: return False # all good return True
python
def should_audit(instance): """Returns True or False to indicate whether the instance should be audited or not, depending on the project settings.""" # do not audit any model listed in UNREGISTERED_CLASSES for unregistered_class in UNREGISTERED_CLASSES: if isinstance(instance, unregistered_class): return False # only audit models listed in REGISTERED_CLASSES (if it's set) if len(REGISTERED_CLASSES) > 0: for registered_class in REGISTERED_CLASSES: if isinstance(instance, registered_class): break else: return False # all good return True
[ "def", "should_audit", "(", "instance", ")", ":", "# do not audit any model listed in UNREGISTERED_CLASSES", "for", "unregistered_class", "in", "UNREGISTERED_CLASSES", ":", "if", "isinstance", "(", "instance", ",", "unregistered_class", ")", ":", "return", "False", "# only audit models listed in REGISTERED_CLASSES (if it's set)", "if", "len", "(", "REGISTERED_CLASSES", ")", ">", "0", ":", "for", "registered_class", "in", "REGISTERED_CLASSES", ":", "if", "isinstance", "(", "instance", ",", "registered_class", ")", ":", "break", "else", ":", "return", "False", "# all good", "return", "True" ]
Returns True or False to indicate whether the instance should be audited or not, depending on the project settings.
[ "Returns", "True", "or", "False", "to", "indicate", "whether", "the", "instance", "should", "be", "audited", "or", "not", "depending", "on", "the", "project", "settings", "." ]
03e05bc94beb29fc3e4ff86e313a6fef4b766b4b
https://github.com/soynatan/django-easy-audit/blob/03e05bc94beb29fc3e4ff86e313a6fef4b766b4b/easyaudit/signals/model_signals.py#L23-L41
233,697
soynatan/django-easy-audit
easyaudit/signals/model_signals.py
_m2m_rev_field_name
def _m2m_rev_field_name(model1, model2): """Gets the name of the reverse m2m accessor from `model1` to `model2` For example, if User has a ManyToManyField connected to Group, `_m2m_rev_field_name(Group, User)` retrieves the name of the field on Group that lists a group's Users. (By default, this field is called `user_set`, but the name can be overridden). """ m2m_field_names = [ rel.get_accessor_name() for rel in model1._meta.get_fields() if rel.many_to_many and rel.auto_created and rel.related_model == model2 ] return m2m_field_names[0]
python
def _m2m_rev_field_name(model1, model2): """Gets the name of the reverse m2m accessor from `model1` to `model2` For example, if User has a ManyToManyField connected to Group, `_m2m_rev_field_name(Group, User)` retrieves the name of the field on Group that lists a group's Users. (By default, this field is called `user_set`, but the name can be overridden). """ m2m_field_names = [ rel.get_accessor_name() for rel in model1._meta.get_fields() if rel.many_to_many and rel.auto_created and rel.related_model == model2 ] return m2m_field_names[0]
[ "def", "_m2m_rev_field_name", "(", "model1", ",", "model2", ")", ":", "m2m_field_names", "=", "[", "rel", ".", "get_accessor_name", "(", ")", "for", "rel", "in", "model1", ".", "_meta", ".", "get_fields", "(", ")", "if", "rel", ".", "many_to_many", "and", "rel", ".", "auto_created", "and", "rel", ".", "related_model", "==", "model2", "]", "return", "m2m_field_names", "[", "0", "]" ]
Gets the name of the reverse m2m accessor from `model1` to `model2` For example, if User has a ManyToManyField connected to Group, `_m2m_rev_field_name(Group, User)` retrieves the name of the field on Group that lists a group's Users. (By default, this field is called `user_set`, but the name can be overridden).
[ "Gets", "the", "name", "of", "the", "reverse", "m2m", "accessor", "from", "model1", "to", "model2" ]
03e05bc94beb29fc3e4ff86e313a6fef4b766b4b
https://github.com/soynatan/django-easy-audit/blob/03e05bc94beb29fc3e4ff86e313a6fef4b766b4b/easyaudit/signals/model_signals.py#L174-L188
233,698
wookayin/gpustat
gpustat/core.py
GPUStatCollection.new_query
def new_query(): """Query the information of all the GPUs on local machine""" N.nvmlInit() def _decode(b): if isinstance(b, bytes): return b.decode() # for python3, to unicode return b def get_gpu_info(handle): """Get one GPU information specified by nvml handle""" def get_process_info(nv_process): """Get the process information of specific pid""" process = {} ps_process = psutil.Process(pid=nv_process.pid) process['username'] = ps_process.username() # cmdline returns full path; # as in `ps -o comm`, get short cmdnames. _cmdline = ps_process.cmdline() if not _cmdline: # sometimes, zombie or unknown (e.g. [kworker/8:2H]) process['command'] = '?' else: process['command'] = os.path.basename(_cmdline[0]) # Bytes to MBytes process['gpu_memory_usage'] = nv_process.usedGpuMemory // MB process['pid'] = nv_process.pid return process name = _decode(N.nvmlDeviceGetName(handle)) uuid = _decode(N.nvmlDeviceGetUUID(handle)) try: temperature = N.nvmlDeviceGetTemperature( handle, N.NVML_TEMPERATURE_GPU ) except N.NVMLError: temperature = None # Not supported try: memory = N.nvmlDeviceGetMemoryInfo(handle) # in Bytes except N.NVMLError: memory = None # Not supported try: utilization = N.nvmlDeviceGetUtilizationRates(handle) except N.NVMLError: utilization = None # Not supported try: power = N.nvmlDeviceGetPowerUsage(handle) except N.NVMLError: power = None try: power_limit = N.nvmlDeviceGetEnforcedPowerLimit(handle) except N.NVMLError: power_limit = None try: nv_comp_processes = \ N.nvmlDeviceGetComputeRunningProcesses(handle) except N.NVMLError: nv_comp_processes = None # Not supported try: nv_graphics_processes = \ N.nvmlDeviceGetGraphicsRunningProcesses(handle) except N.NVMLError: nv_graphics_processes = None # Not supported if nv_comp_processes is None and nv_graphics_processes is None: processes = None else: processes = [] nv_comp_processes = nv_comp_processes or [] nv_graphics_processes = nv_graphics_processes or [] for nv_process in nv_comp_processes + nv_graphics_processes: # TODO: could be more information such as system memory # usage, CPU percentage, create time etc. try: process = get_process_info(nv_process) processes.append(process) except psutil.NoSuchProcess: # TODO: add some reminder for NVML broken context # e.g. nvidia-smi reset or reboot the system pass index = N.nvmlDeviceGetIndex(handle) gpu_info = { 'index': index, 'uuid': uuid, 'name': name, 'temperature.gpu': temperature, 'utilization.gpu': utilization.gpu if utilization else None, 'power.draw': power // 1000 if power is not None else None, 'enforced.power.limit': power_limit // 1000 if power_limit is not None else None, # Convert bytes into MBytes 'memory.used': memory.used // MB if memory else None, 'memory.total': memory.total // MB if memory else None, 'processes': processes, } return gpu_info # 1. get the list of gpu and status gpu_list = [] device_count = N.nvmlDeviceGetCount() for index in range(device_count): handle = N.nvmlDeviceGetHandleByIndex(index) gpu_info = get_gpu_info(handle) gpu_stat = GPUStat(gpu_info) gpu_list.append(gpu_stat) # 2. additional info (driver version, etc). try: driver_version = _decode(N.nvmlSystemGetDriverVersion()) except N.NVMLError: driver_version = None # N/A N.nvmlShutdown() return GPUStatCollection(gpu_list, driver_version=driver_version)
python
def new_query(): """Query the information of all the GPUs on local machine""" N.nvmlInit() def _decode(b): if isinstance(b, bytes): return b.decode() # for python3, to unicode return b def get_gpu_info(handle): """Get one GPU information specified by nvml handle""" def get_process_info(nv_process): """Get the process information of specific pid""" process = {} ps_process = psutil.Process(pid=nv_process.pid) process['username'] = ps_process.username() # cmdline returns full path; # as in `ps -o comm`, get short cmdnames. _cmdline = ps_process.cmdline() if not _cmdline: # sometimes, zombie or unknown (e.g. [kworker/8:2H]) process['command'] = '?' else: process['command'] = os.path.basename(_cmdline[0]) # Bytes to MBytes process['gpu_memory_usage'] = nv_process.usedGpuMemory // MB process['pid'] = nv_process.pid return process name = _decode(N.nvmlDeviceGetName(handle)) uuid = _decode(N.nvmlDeviceGetUUID(handle)) try: temperature = N.nvmlDeviceGetTemperature( handle, N.NVML_TEMPERATURE_GPU ) except N.NVMLError: temperature = None # Not supported try: memory = N.nvmlDeviceGetMemoryInfo(handle) # in Bytes except N.NVMLError: memory = None # Not supported try: utilization = N.nvmlDeviceGetUtilizationRates(handle) except N.NVMLError: utilization = None # Not supported try: power = N.nvmlDeviceGetPowerUsage(handle) except N.NVMLError: power = None try: power_limit = N.nvmlDeviceGetEnforcedPowerLimit(handle) except N.NVMLError: power_limit = None try: nv_comp_processes = \ N.nvmlDeviceGetComputeRunningProcesses(handle) except N.NVMLError: nv_comp_processes = None # Not supported try: nv_graphics_processes = \ N.nvmlDeviceGetGraphicsRunningProcesses(handle) except N.NVMLError: nv_graphics_processes = None # Not supported if nv_comp_processes is None and nv_graphics_processes is None: processes = None else: processes = [] nv_comp_processes = nv_comp_processes or [] nv_graphics_processes = nv_graphics_processes or [] for nv_process in nv_comp_processes + nv_graphics_processes: # TODO: could be more information such as system memory # usage, CPU percentage, create time etc. try: process = get_process_info(nv_process) processes.append(process) except psutil.NoSuchProcess: # TODO: add some reminder for NVML broken context # e.g. nvidia-smi reset or reboot the system pass index = N.nvmlDeviceGetIndex(handle) gpu_info = { 'index': index, 'uuid': uuid, 'name': name, 'temperature.gpu': temperature, 'utilization.gpu': utilization.gpu if utilization else None, 'power.draw': power // 1000 if power is not None else None, 'enforced.power.limit': power_limit // 1000 if power_limit is not None else None, # Convert bytes into MBytes 'memory.used': memory.used // MB if memory else None, 'memory.total': memory.total // MB if memory else None, 'processes': processes, } return gpu_info # 1. get the list of gpu and status gpu_list = [] device_count = N.nvmlDeviceGetCount() for index in range(device_count): handle = N.nvmlDeviceGetHandleByIndex(index) gpu_info = get_gpu_info(handle) gpu_stat = GPUStat(gpu_info) gpu_list.append(gpu_stat) # 2. additional info (driver version, etc). try: driver_version = _decode(N.nvmlSystemGetDriverVersion()) except N.NVMLError: driver_version = None # N/A N.nvmlShutdown() return GPUStatCollection(gpu_list, driver_version=driver_version)
[ "def", "new_query", "(", ")", ":", "N", ".", "nvmlInit", "(", ")", "def", "_decode", "(", "b", ")", ":", "if", "isinstance", "(", "b", ",", "bytes", ")", ":", "return", "b", ".", "decode", "(", ")", "# for python3, to unicode", "return", "b", "def", "get_gpu_info", "(", "handle", ")", ":", "\"\"\"Get one GPU information specified by nvml handle\"\"\"", "def", "get_process_info", "(", "nv_process", ")", ":", "\"\"\"Get the process information of specific pid\"\"\"", "process", "=", "{", "}", "ps_process", "=", "psutil", ".", "Process", "(", "pid", "=", "nv_process", ".", "pid", ")", "process", "[", "'username'", "]", "=", "ps_process", ".", "username", "(", ")", "# cmdline returns full path;", "# as in `ps -o comm`, get short cmdnames.", "_cmdline", "=", "ps_process", ".", "cmdline", "(", ")", "if", "not", "_cmdline", ":", "# sometimes, zombie or unknown (e.g. [kworker/8:2H])", "process", "[", "'command'", "]", "=", "'?'", "else", ":", "process", "[", "'command'", "]", "=", "os", ".", "path", ".", "basename", "(", "_cmdline", "[", "0", "]", ")", "# Bytes to MBytes", "process", "[", "'gpu_memory_usage'", "]", "=", "nv_process", ".", "usedGpuMemory", "//", "MB", "process", "[", "'pid'", "]", "=", "nv_process", ".", "pid", "return", "process", "name", "=", "_decode", "(", "N", ".", "nvmlDeviceGetName", "(", "handle", ")", ")", "uuid", "=", "_decode", "(", "N", ".", "nvmlDeviceGetUUID", "(", "handle", ")", ")", "try", ":", "temperature", "=", "N", ".", "nvmlDeviceGetTemperature", "(", "handle", ",", "N", ".", "NVML_TEMPERATURE_GPU", ")", "except", "N", ".", "NVMLError", ":", "temperature", "=", "None", "# Not supported", "try", ":", "memory", "=", "N", ".", "nvmlDeviceGetMemoryInfo", "(", "handle", ")", "# in Bytes", "except", "N", ".", "NVMLError", ":", "memory", "=", "None", "# Not supported", "try", ":", "utilization", "=", "N", ".", "nvmlDeviceGetUtilizationRates", "(", "handle", ")", "except", "N", ".", "NVMLError", ":", "utilization", "=", "None", "# Not supported", "try", ":", "power", "=", "N", ".", "nvmlDeviceGetPowerUsage", "(", "handle", ")", "except", "N", ".", "NVMLError", ":", "power", "=", "None", "try", ":", "power_limit", "=", "N", ".", "nvmlDeviceGetEnforcedPowerLimit", "(", "handle", ")", "except", "N", ".", "NVMLError", ":", "power_limit", "=", "None", "try", ":", "nv_comp_processes", "=", "N", ".", "nvmlDeviceGetComputeRunningProcesses", "(", "handle", ")", "except", "N", ".", "NVMLError", ":", "nv_comp_processes", "=", "None", "# Not supported", "try", ":", "nv_graphics_processes", "=", "N", ".", "nvmlDeviceGetGraphicsRunningProcesses", "(", "handle", ")", "except", "N", ".", "NVMLError", ":", "nv_graphics_processes", "=", "None", "# Not supported", "if", "nv_comp_processes", "is", "None", "and", "nv_graphics_processes", "is", "None", ":", "processes", "=", "None", "else", ":", "processes", "=", "[", "]", "nv_comp_processes", "=", "nv_comp_processes", "or", "[", "]", "nv_graphics_processes", "=", "nv_graphics_processes", "or", "[", "]", "for", "nv_process", "in", "nv_comp_processes", "+", "nv_graphics_processes", ":", "# TODO: could be more information such as system memory", "# usage, CPU percentage, create time etc.", "try", ":", "process", "=", "get_process_info", "(", "nv_process", ")", "processes", ".", "append", "(", "process", ")", "except", "psutil", ".", "NoSuchProcess", ":", "# TODO: add some reminder for NVML broken context", "# e.g. nvidia-smi reset or reboot the system", "pass", "index", "=", "N", ".", "nvmlDeviceGetIndex", "(", "handle", ")", "gpu_info", "=", "{", "'index'", ":", "index", ",", "'uuid'", ":", "uuid", ",", "'name'", ":", "name", ",", "'temperature.gpu'", ":", "temperature", ",", "'utilization.gpu'", ":", "utilization", ".", "gpu", "if", "utilization", "else", "None", ",", "'power.draw'", ":", "power", "//", "1000", "if", "power", "is", "not", "None", "else", "None", ",", "'enforced.power.limit'", ":", "power_limit", "//", "1000", "if", "power_limit", "is", "not", "None", "else", "None", ",", "# Convert bytes into MBytes", "'memory.used'", ":", "memory", ".", "used", "//", "MB", "if", "memory", "else", "None", ",", "'memory.total'", ":", "memory", ".", "total", "//", "MB", "if", "memory", "else", "None", ",", "'processes'", ":", "processes", ",", "}", "return", "gpu_info", "# 1. get the list of gpu and status", "gpu_list", "=", "[", "]", "device_count", "=", "N", ".", "nvmlDeviceGetCount", "(", ")", "for", "index", "in", "range", "(", "device_count", ")", ":", "handle", "=", "N", ".", "nvmlDeviceGetHandleByIndex", "(", "index", ")", "gpu_info", "=", "get_gpu_info", "(", "handle", ")", "gpu_stat", "=", "GPUStat", "(", "gpu_info", ")", "gpu_list", ".", "append", "(", "gpu_stat", ")", "# 2. additional info (driver version, etc).", "try", ":", "driver_version", "=", "_decode", "(", "N", ".", "nvmlSystemGetDriverVersion", "(", ")", ")", "except", "N", ".", "NVMLError", ":", "driver_version", "=", "None", "# N/A", "N", ".", "nvmlShutdown", "(", ")", "return", "GPUStatCollection", "(", "gpu_list", ",", "driver_version", "=", "driver_version", ")" ]
Query the information of all the GPUs on local machine
[ "Query", "the", "information", "of", "all", "the", "GPUs", "on", "local", "machine" ]
28299cdcf55dd627fdd9800cf344988b43188ee8
https://github.com/wookayin/gpustat/blob/28299cdcf55dd627fdd9800cf344988b43188ee8/gpustat/core.py#L262-L385
233,699
wookayin/gpustat
gpustat/__main__.py
print_gpustat
def print_gpustat(json=False, debug=False, **kwargs): ''' Display the GPU query results into standard output. ''' try: gpu_stats = GPUStatCollection.new_query() except Exception as e: sys.stderr.write('Error on querying NVIDIA devices.' ' Use --debug flag for details\n') if debug: try: import traceback traceback.print_exc(file=sys.stderr) except Exception: # NVMLError can't be processed by traceback: # https://bugs.python.org/issue28603 # as a workaround, simply re-throw the exception raise e sys.exit(1) if json: gpu_stats.print_json(sys.stdout) else: gpu_stats.print_formatted(sys.stdout, **kwargs)
python
def print_gpustat(json=False, debug=False, **kwargs): ''' Display the GPU query results into standard output. ''' try: gpu_stats = GPUStatCollection.new_query() except Exception as e: sys.stderr.write('Error on querying NVIDIA devices.' ' Use --debug flag for details\n') if debug: try: import traceback traceback.print_exc(file=sys.stderr) except Exception: # NVMLError can't be processed by traceback: # https://bugs.python.org/issue28603 # as a workaround, simply re-throw the exception raise e sys.exit(1) if json: gpu_stats.print_json(sys.stdout) else: gpu_stats.print_formatted(sys.stdout, **kwargs)
[ "def", "print_gpustat", "(", "json", "=", "False", ",", "debug", "=", "False", ",", "*", "*", "kwargs", ")", ":", "try", ":", "gpu_stats", "=", "GPUStatCollection", ".", "new_query", "(", ")", "except", "Exception", "as", "e", ":", "sys", ".", "stderr", ".", "write", "(", "'Error on querying NVIDIA devices.'", "' Use --debug flag for details\\n'", ")", "if", "debug", ":", "try", ":", "import", "traceback", "traceback", ".", "print_exc", "(", "file", "=", "sys", ".", "stderr", ")", "except", "Exception", ":", "# NVMLError can't be processed by traceback:", "# https://bugs.python.org/issue28603", "# as a workaround, simply re-throw the exception", "raise", "e", "sys", ".", "exit", "(", "1", ")", "if", "json", ":", "gpu_stats", ".", "print_json", "(", "sys", ".", "stdout", ")", "else", ":", "gpu_stats", ".", "print_formatted", "(", "sys", ".", "stdout", ",", "*", "*", "kwargs", ")" ]
Display the GPU query results into standard output.
[ "Display", "the", "GPU", "query", "results", "into", "standard", "output", "." ]
28299cdcf55dd627fdd9800cf344988b43188ee8
https://github.com/wookayin/gpustat/blob/28299cdcf55dd627fdd9800cf344988b43188ee8/gpustat/__main__.py#L14-L37