code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
path_spec = tsk_partition_path_spec.TSKPartitionPathSpec( location=self.LOCATION_ROOT, parent=self._path_spec.parent) return self.GetFileEntryByPathSpec(path_spec)
def GetRootFileEntry(self)
Retrieves the root file entry. Returns: TSKPartitionFileEntry: a file entry or None of not available.
3.141803
3.261007
0.963446
try: fsapfs_file_entry = self._file_system.GetAPFSFileEntryByPathSpec( self.path_spec) except errors.PathSpecError: return location = getattr(self.path_spec, 'location', None) for fsapfs_sub_file_entry in fsapfs_file_entry.sub_file_entries: directory_entry = fsapfs_sub_file_entry.name if location == self._file_system.PATH_SEPARATOR: directory_entry = self._file_system.JoinPath([directory_entry]) else: directory_entry = self._file_system.JoinPath([ location, directory_entry]) yield apfs_path_spec.APFSPathSpec( identifier=fsapfs_sub_file_entry.identifier, location=directory_entry, parent=self.path_spec.parent)
def _EntriesGenerator(self)
Retrieves directory entries. Since a directory can contain a vast number of entries using a generator is more memory efficient. Yields: APFSPathSpec: APFS path specification.
2.277086
2.057644
1.106647
if self._fsapfs_file_entry.number_of_sub_file_entries <= 0: return None return APFSDirectory(self._file_system, self.path_spec)
def _GetDirectory(self)
Retrieves a directory. Returns: APFSDirectory: directory or None if not available.
8.50092
5.76159
1.475447
if self._link is None: self._link = '' if self.entry_type != definitions.FILE_ENTRY_TYPE_LINK: return self._link link = self._fsapfs_file_entry.symbolic_link_target if link and link[0] != self._file_system.PATH_SEPARATOR: # TODO: make link absolute. self._link = '/{0:s}'.format(link) return self._link
def _GetLink(self)
Retrieves the link. Returns: str: path of the linked file.
4.751534
4.515022
1.052384
stat_object = super(APFSFileEntry, self)._GetStat() # File data stat information. stat_object.size = self._fsapfs_file_entry.size # Ownership and permissions stat information. stat_object.mode = self._fsapfs_file_entry.file_mode & 0x0fff stat_object.uid = self._fsapfs_file_entry.owner_identifier stat_object.gid = self._fsapfs_file_entry.group_identifier # File entry type stat information. stat_object.type = self.entry_type # Other stat information. stat_object.ino = self._fsapfs_file_entry.identifier stat_object.fs_type = 'APFS' stat_object.is_allocated = True return stat_object
def _GetStat(self)
Retrieves information about the file entry. Returns: VFSStat: a stat object.
2.980058
2.89256
1.030249
timestamp = self._fsapfs_file_entry.get_access_time_as_integer() return dfdatetime_apfs_time.APFSTime(timestamp=timestamp)
def access_time(self)
dfdatetime.DateTimeValues: access time or None if not available.
11.642938
6.599026
1.764342
timestamp = self._fsapfs_file_entry.get_inode_change_time_as_integer() return dfdatetime_apfs_time.APFSTime(timestamp=timestamp)
def change_time(self)
dfdatetime.DateTimeValues: change time or None if not available.
14.270202
7.032938
2.029053
timestamp = self._fsapfs_file_entry.get_creation_time_as_integer() return dfdatetime_apfs_time.APFSTime(timestamp=timestamp)
def creation_time(self)
dfdatetime.DateTimeValues: creation time or None if not available.
11.594923
5.999073
1.932786
timestamp = self._fsapfs_file_entry.get_modification_time_as_integer() return dfdatetime_apfs_time.APFSTime(timestamp=timestamp)
def modification_time(self)
dfdatetime.DateTimeValues: modification time or None if not available.
11.093778
6.064204
1.829387
link = self._GetLink() if not link: return None # TODO: is there a way to determine the identifier here? link_identifier = None parent_path_spec = getattr(self.path_spec, 'parent', None) path_spec = apfs_path_spec.APFSPathSpec( location=link, parent=parent_path_spec) is_root = bool( link == self._file_system.LOCATION_ROOT or link_identifier == self._file_system.ROOT_DIRECTORY_IDENTIFIER) return APFSFileEntry( self._resolver_context, self._file_system, path_spec, is_root=is_root)
def GetLinkedFileEntry(self)
Retrieves the linked file entry, e.g. for a symbolic link. Returns: APFSFileEntry: linked file entry or None if not available.
3.223154
3.009563
1.070971
parent_location = None location = getattr(self.path_spec, 'location', None) if location is not None: parent_location = self._file_system.DirnamePath(location) if parent_location == '': parent_location = self._file_system.PATH_SEPARATOR parent_identifier = self._fsapfs_file_entry.parent_identifier if parent_identifier is None: return None parent_path_spec = getattr(self.path_spec, 'parent', None) path_spec = apfs_path_spec.APFSPathSpec( location=parent_location, identifier=parent_identifier, parent=parent_path_spec) is_root = bool( parent_location == self._file_system.LOCATION_ROOT or parent_identifier == self._file_system.ROOT_DIRECTORY_IDENTIFIER) return APFSFileEntry( self._resolver_context, self._file_system, path_spec, is_root=is_root)
def GetParentFileEntry(self)
Retrieves the parent file entry. Returns: APFSFileEntry: parent file entry or None if not available.
2.678343
2.437697
1.098719
if not path_spec: raise ValueError('Missing path specfication.') volume_index = lvm.LVMPathSpecGetVolumeIndex(path_spec) if volume_index is None: raise errors.PathSpecError( 'Unable to retrieve volume index from path specification.') self._file_system = resolver.Resolver.OpenFileSystem( path_spec, resolver_context=self._resolver_context) vslvm_volume_group = self._file_system.GetLVMVolumeGroup() if (volume_index < 0 or volume_index >= vslvm_volume_group.number_of_logical_volumes): raise errors.PathSpecError(( 'Unable to retrieve LVM logical volume index: {0:d} from path ' 'specification.').format(volume_index)) self._vslvm_logical_volume = vslvm_volume_group.get_logical_volume( volume_index)
def _Open(self, path_spec=None, mode='rb')
Opens the file-like object defined by path specification. Args: path_spec (PathSpec): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.368453
2.447619
0.967656
if not self._is_open: raise IOError('Not opened.') return self._vslvm_logical_volume.read(size)
def read(self, size=None)
Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
14.147816
20.529716
0.689138
if not self._is_open: raise IOError('Not opened.') self._vslvm_logical_volume.seek(offset, whence)
def seek(self, offset, whence=os.SEEK_SET)
Seeks to an offset within the file-like object. Args: offset (int): offset to seek to. whence (Optional(int)): value that indicates whether offset is an absolute or relative position within the file. Raises: IOError: if the seek failed. OSError: if the seek failed.
11.94401
13.592314
0.878733
index_split = -(len(encrypted_data) % DES3.block_size) if index_split: remaining_encrypted_data = encrypted_data[index_split:] encrypted_data = encrypted_data[:index_split] else: remaining_encrypted_data = b'' decrypted_data = self._des3_cipher.decrypt(encrypted_data) return decrypted_data, remaining_encrypted_data
def Decrypt(self, encrypted_data)
Decrypts the encrypted data. Args: encrypted_data (bytes): encrypted data. Returns: tuple[bytes, bytes]: decrypted data and remaining encrypted data.
3.021871
2.673215
1.130426
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) qcow_file = pyqcow.file() qcow_file.open_file_object(file_object) return qcow_file
def _OpenFileObject(self, path_spec)
Opens the file-like object defined by path specification. Args: path_spec (PathSpec): path specification. Returns: pyqcow.file: a file-like object. Raises: PathSpecError: if the path specification is incorrect.
2.385911
2.066404
1.15462
argument_parser = argparse.ArgumentParser(description=( 'Calculates a message digest hash for every file in a directory or ' 'storage media image.')) argument_parser.add_argument( 'source', nargs='?', action='store', metavar='image.raw', default=None, help=('path of the directory or filename of a storage media image ' 'containing the file.')) argument_parser.add_argument( '--no-auto-recurse', '--no_auto_recurse', dest='no_auto_recurse', action='store_true', default=False, help=( 'Indicate that the source scanner should not auto-recurse.')) options = argument_parser.parse_args() if not options.source: print('Source value is missing.') print('') argument_parser.print_help() print('') return False logging.basicConfig( level=logging.INFO, format='[%(levelname)s] %(message)s') output_writer = StdoutWriter() if not output_writer.Open(): print('Unable to open output writer.') print('') return False return_value = True source_analyzer = SourceAnalyzer(auto_recurse=not options.no_auto_recurse) try: source_analyzer.Analyze(options.source, output_writer) print('Completed.') except KeyboardInterrupt: return_value = False print('Aborted by user.') output_writer.Close() return return_value
def Main()
The main program function. Returns: bool: True if successful or False if not.
2.799157
2.767791
1.011333
try: # Note that encode() will first convert string into a Unicode string # if necessary. encoded_string = string.encode( self._preferred_encoding, errors=self._encode_errors) except UnicodeEncodeError: if self._encode_errors == 'strict': logging.error( 'Unable to properly write output due to encoding error. ' 'Switching to error tolerant encoding which can result in ' 'non Basic Latin (C0) characters to be replaced with "?" or ' '"\\ufffd".') self._encode_errors = 'replace' encoded_string = string.encode( self._preferred_encoding, errors=self._encode_errors) return encoded_string
def _EncodeString(self, string)
Encodes a string in the preferred encoding. Returns: bytes: encoded string.
3.791426
3.551155
1.06766
credentials = credentials_manager.CredentialsManager.GetCredentials( locked_scan_node.path_spec) # TODO: print volume description. if locked_scan_node.type_indicator == ( definitions.TYPE_INDICATOR_APFS_CONTAINER): line = 'Found an APFS encrypted volume.' elif locked_scan_node.type_indicator == definitions.TYPE_INDICATOR_BDE: line = 'Found a BitLocker encrypted volume.' elif locked_scan_node.type_indicator == definitions.TYPE_INDICATOR_FVDE: line = 'Found a CoreStorage (FVDE) encrypted volume.' else: line = 'Found an encrypted volume.' output_writer.WriteLine(line) credentials_list = list(credentials.CREDENTIALS) credentials_list.append('skip') # TODO: check which credentials are available. output_writer.WriteLine('Supported credentials:') output_writer.WriteLine('') for index, name in enumerate(credentials_list): output_writer.WriteLine(' {0:d}. {1:s}'.format(index + 1, name)) output_writer.WriteLine('') result = False while not result: output_writer.WriteString( 'Select a credential to unlock the volume: ') # TODO: add an input reader. input_line = sys.stdin.readline() input_line = input_line.strip() if input_line in credentials_list: credential_identifier = input_line else: try: credential_identifier = int(input_line, 10) credential_identifier = credentials_list[credential_identifier - 1] except (IndexError, ValueError): output_writer.WriteLine( 'Unsupported credential: {0:s}'.format(input_line)) continue if credential_identifier == 'skip': break getpass_string = 'Enter credential data: ' if sys.platform.startswith('win') and sys.version_info[0] < 3: # For Python 2 on Windows getpass (win_getpass) requires an encoded # byte string. For Python 3 we need it to be a Unicode string. getpass_string = self._EncodeString(getpass_string) credential_data = getpass.getpass(getpass_string) output_writer.WriteLine('') result = self._source_scanner.Unlock( scan_context, locked_scan_node.path_spec, credential_identifier, credential_data) if not result: output_writer.WriteLine('Unable to unlock volume.') output_writer.WriteLine('')
def _PromptUserForEncryptedVolumeCredential( self, scan_context, locked_scan_node, output_writer)
Prompts the user to provide a credential for an encrypted volume. Args: scan_context (SourceScannerContext): the source scanner context. locked_scan_node (SourceScanNode): the locked scan node. output_writer (StdoutWriter): the output writer.
2.473137
2.509188
0.985632
if not os.path.exists(source_path): raise RuntimeError('No such source: {0:s}.'.format(source_path)) scan_context = source_scanner.SourceScannerContext() scan_path_spec = None scan_step = 0 scan_context.OpenSourcePath(source_path) while True: self._source_scanner.Scan( scan_context, auto_recurse=self._auto_recurse, scan_path_spec=scan_path_spec) if not scan_context.updated: break if not self._auto_recurse: output_writer.WriteScanContext(scan_context, scan_step=scan_step) scan_step += 1 # The source is a directory or file. if scan_context.source_type in [ definitions.SOURCE_TYPE_DIRECTORY, definitions.SOURCE_TYPE_FILE]: break # The source scanner found a locked volume, e.g. an encrypted volume, # and we need a credential to unlock the volume. for locked_scan_node in scan_context.locked_scan_nodes: self._PromptUserForEncryptedVolumeCredential( scan_context, locked_scan_node, output_writer) if not self._auto_recurse: scan_node = scan_context.GetUnscannedScanNode() if not scan_node: return scan_path_spec = scan_node.path_spec if self._auto_recurse: output_writer.WriteScanContext(scan_context)
def Analyze(self, source_path, output_writer)
Analyzes the source. Args: source_path (str): the source path. output_writer (StdoutWriter): the output writer. Raises: RuntimeError: if the source path does not exists, or if the source path is not a file or directory, or if the format of or within the source file is not supported.
3.086782
3.138067
0.983657
if scan_step is not None: print('Scan step: {0:d}'.format(scan_step)) print('Source type\t\t: {0:s}'.format(scan_context.source_type)) print('') scan_node = scan_context.GetRootScanNode() self.WriteScanNode(scan_context, scan_node) print('')
def WriteScanContext(self, scan_context, scan_step=None)
Writes the source scanner context to stdout. Args: scan_context (SourceScannerContext): the source scanner context. scan_step (Optional[int]): the scan step, where None represents no step.
3.393623
3.103657
1.093427
if not scan_node: return values = [] part_index = getattr(scan_node.path_spec, 'part_index', None) if part_index is not None: values.append('{0:d}'.format(part_index)) store_index = getattr(scan_node.path_spec, 'store_index', None) if store_index is not None: values.append('{0:d}'.format(store_index)) start_offset = getattr(scan_node.path_spec, 'start_offset', None) if start_offset is not None: values.append('start offset: {0:d} (0x{0:08x})'.format(start_offset)) location = getattr(scan_node.path_spec, 'location', None) if location is not None: values.append('location: {0:s}'.format(location)) values = ', '.join(values) flags = '' if scan_node in scan_context.locked_scan_nodes: flags = ' [LOCKED]' print('{0:s}{1:s}: {2:s}{3:s}'.format( indentation, scan_node.path_spec.type_indicator, values, flags)) indentation = ' {0:s}'.format(indentation) for sub_scan_node in scan_node.sub_nodes: self.WriteScanNode(scan_context, sub_scan_node, indentation=indentation)
def WriteScanNode(self, scan_context, scan_node, indentation='')
Writes the source scanner node to stdout. Args: scan_context (SourceScannerContext): the source scanner context. scan_node (SourceScanNode): the scan node. indentation (Optional[str]): indentation.
1.848245
1.832625
1.008523
argument_parser = argparse.ArgumentParser(description=( 'Calculates a message digest hash for every file in a directory or ' 'storage media image.')) argument_parser.add_argument( '--output_file', '--output-file', dest='output_file', action='store', metavar='source.hashes', default=None, help=( 'path of the output file, default is to output to stdout.')) argument_parser.add_argument( 'source', nargs='?', action='store', metavar='image.raw', default=None, help='path of the directory or storage media image.') options = argument_parser.parse_args() if not options.source: print('Source value is missing.') print('') argument_parser.print_help() print('') return False logging.basicConfig( level=logging.INFO, format='[%(levelname)s] %(message)s') if options.output_file: output_writer = FileOutputWriter(options.output_file) else: output_writer = StdoutWriter() try: output_writer.Open() except IOError as exception: print('Unable to open output writer with error: {0!s}.'.format( exception)) print('') return False return_value = True mediator = command_line.CLIVolumeScannerMediator() recursive_hasher = RecursiveHasher(mediator=mediator) try: base_path_specs = recursive_hasher.GetBasePathSpecs(options.source) if not base_path_specs: print('No supported file system found in source.') print('') return False recursive_hasher.CalculateHashes(base_path_specs, output_writer) print('') print('Completed.') except errors.ScannerError as exception: return_value = False print('') print('[ERROR] {0!s}'.format(exception)) except errors.UserAbort as exception: return_value = False print('') print('Aborted.') output_writer.Close() return return_value
def Main()
The main program function. Returns: bool: True if successful or False if not.
2.565132
2.533497
1.012487
hash_context = hashlib.sha256() try: file_object = file_entry.GetFileObject(data_stream_name=data_stream_name) except IOError as exception: logging.warning(( 'Unable to open path specification:\n{0:s}' 'with error: {1!s}').format( file_entry.path_spec.comparable, exception)) return None if not file_object: return None try: data = file_object.read(self._READ_BUFFER_SIZE) while data: hash_context.update(data) data = file_object.read(self._READ_BUFFER_SIZE) except IOError as exception: logging.warning(( 'Unable to read from path specification:\n{0:s}' 'with error: {1!s}').format( file_entry.path_spec.comparable, exception)) return None finally: file_object.close() return hash_context.hexdigest()
def _CalculateHashDataStream(self, file_entry, data_stream_name)
Calculates a message digest hash of the data of the file entry. Args: file_entry (dfvfs.FileEntry): file entry. data_stream_name (str): name of the data stream. Returns: bytes: digest hash or None.
1.722671
1.796983
0.958646
# Since every file system implementation can have their own path # segment separator we are using JoinPath to be platform and file system # type independent. full_path = file_system.JoinPath([parent_full_path, file_entry.name]) for data_stream in file_entry.data_streams: hash_value = self._CalculateHashDataStream(file_entry, data_stream.name) display_path = self._GetDisplayPath( file_entry.path_spec, full_path, data_stream.name) output_writer.WriteFileHash(display_path, hash_value or 'N/A') for sub_file_entry in file_entry.sub_file_entries: self._CalculateHashesFileEntry( file_system, sub_file_entry, full_path, output_writer)
def _CalculateHashesFileEntry( self, file_system, file_entry, parent_full_path, output_writer)
Recursive calculates hashes starting with the file entry. Args: file_system (dfvfs.FileSystem): file system. file_entry (dfvfs.FileEntry): file entry. parent_full_path (str): full path of the parent file entry. output_writer (StdoutWriter): output writer.
2.991379
3.150283
0.949559
display_path = '' if path_spec.HasParent(): parent_path_spec = path_spec.parent if parent_path_spec and parent_path_spec.type_indicator == ( dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION): display_path = ''.join([display_path, parent_path_spec.location]) display_path = ''.join([display_path, full_path]) if data_stream_name: display_path = ':'.join([display_path, data_stream_name]) return display_path
def _GetDisplayPath(self, path_spec, full_path, data_stream_name)
Retrieves a path to display. Args: path_spec (dfvfs.PathSpec): path specification of the file entry. full_path (str): full path of the file entry. data_stream_name (str): name of the data stream. Returns: str: path to display.
1.845781
1.977726
0.933285
for base_path_spec in base_path_specs: file_system = resolver.Resolver.OpenFileSystem(base_path_spec) file_entry = resolver.Resolver.OpenFileEntry(base_path_spec) if file_entry is None: logging.warning('Unable to open base path specification:\n{0:s}'.format( base_path_spec.comparable)) continue self._CalculateHashesFileEntry(file_system, file_entry, '', output_writer)
def CalculateHashes(self, base_path_specs, output_writer)
Recursive calculates hashes starting with the base path specification. Args: base_path_specs (list[dfvfs.PathSpec]): source path specification. output_writer (StdoutWriter): output writer.
2.056608
2.218661
0.926959
try: # Note that encode() will first convert string into a Unicode string # if necessary. encoded_string = string.encode(self._encoding, errors=self._errors) except UnicodeEncodeError: if self._errors == 'strict': logging.error( 'Unable to properly write output due to encoding error. ' 'Switching to error tolerant encoding which can result in ' 'non Basic Latin (C0) characters to be replaced with "?" or ' '"\\ufffd".') self._errors = 'replace' encoded_string = string.encode(self._encoding, errors=self._errors) return encoded_string
def _EncodeString(self, string)
Encodes the string. Args: string (str): string to encode. Returns: bytes: encoded string.
4.049692
3.937237
1.028562
string = '{0:s}\t{1:s}\n'.format(hash_value, path) encoded_string = self._EncodeString(string) self._file_object.write(encoded_string)
def WriteFileHash(self, path, hash_value)
Writes the file path and hash to file. Args: path (str): path of the file. hash_value (str): message digest hash calculated over the file data.
4.465779
4.738357
0.942474
string = '{0:s}\t{1:s}'.format(hash_value, path) encoded_string = self._EncodeString(string) print(encoded_string)
def WriteFileHash(self, path, hash_value)
Writes the file path and hash to stdout. Args: path (str): path of the file. hash_value (str): message digest hash calculated over the file data.
5.436069
5.932796
0.916274
location = getattr(self.path_spec, 'location', None) if location is not None: # Windows will raise WindowsError, which can be caught by OSError, # if the process has not access to list the directory. The os.access() # function cannot be used since it will return true even when os.listdir() # fails. try: for directory_entry in os.listdir(location): directory_entry_location = self._file_system.JoinPath([ location, directory_entry]) yield os_path_spec.OSPathSpec(location=directory_entry_location) except OSError as exception: if exception.errno == errno.EACCES: exception_string = str(exception) if not isinstance(exception_string, py2to3.UNICODE_TYPE): exception_string = py2to3.UNICODE_TYPE( exception_string, errors='replace') raise errors.AccessError( 'Access to directory denied with error: {0!s}'.format( exception_string)) else: raise errors.BackEndError( 'Unable to list directory: {0:s} with error: {1!s}'.format( location, exception))
def _EntriesGenerator(self)
Retrieves directory entries. Since a directory can contain a vast number of entries using a generator is more memory efficient. Yields: OSPathSpec: a path specification. Raises: AccessError: if the access to list the directory was denied. BackEndError: if the directory could not be listed.
3.150122
2.954145
1.06634
if self.entry_type != definitions.FILE_ENTRY_TYPE_DIRECTORY: return None return OSDirectory(self._file_system, self.path_spec)
def _GetDirectory(self)
Retrieves a directory. Returns: OSDirectory: a directory object or None if not available.
7.153149
4.463995
1.602409
stat_object = super(OSFileEntry, self)._GetStat() if not self._is_windows_device: # File data stat information. stat_object.size = self._stat_info.st_size # Ownership and permissions stat information. stat_object.mode = stat.S_IMODE(self._stat_info.st_mode) stat_object.uid = self._stat_info.st_uid stat_object.gid = self._stat_info.st_gid # Other stat information. stat_object.ino = self._stat_info.st_ino # stat_info.st_dev # stat_info.st_nlink return stat_object
def _GetStat(self)
Retrieves information about the file entry. Returns: VFSStat: a stat object or None if not available.
2.762218
2.794401
0.988483
if self._stat_info is None: return None timestamp = int(self._stat_info.st_atime) return dfdatetime_posix_time.PosixTime(timestamp=timestamp)
def access_time(self)
dfdatetime.DateTimeValues: access time or None if not available.
4.43721
2.989406
1.484312
if self._stat_info is None: return None timestamp = int(self._stat_info.st_ctime) return dfdatetime_posix_time.PosixTime(timestamp=timestamp)
def change_time(self)
dfdatetime.DateTimeValues: change time or None if not available.
5.053205
3.308027
1.527559
if self._link is None: self._link = '' if not self.IsLink(): return self._link location = getattr(self.path_spec, 'location', None) if location is None: return self._link self._link = os.readlink(location) self._link = os.path.abspath(self._link) return self._link
def link(self)
str: full path of the linked file entry.
3.37488
2.83309
1.191236
if self._stat_info is None: return None timestamp = int(self._stat_info.st_mtime) return dfdatetime_posix_time.PosixTime(timestamp=timestamp)
def modification_time(self)
dfdatetime.DateTimeValues: modification time or None if not available.
4.127273
2.740135
1.50623
link = self._GetLink() if not link: return None path_spec = os_path_spec.OSPathSpec(location=link) return OSFileEntry(self._resolver_context, self._file_system, path_spec)
def GetLinkedFileEntry(self)
Retrieves the linked file entry, for example for a symbolic link. Returns: OSFileEntry: linked file entry or None if not available.
3.537051
3.272847
1.080726
location = getattr(self.path_spec, 'location', None) if location is None: return None parent_location = self._file_system.DirnamePath(location) if parent_location is None: return None if parent_location == '': parent_location = self._file_system.PATH_SEPARATOR path_spec = os_path_spec.OSPathSpec(location=parent_location) return OSFileEntry(self._resolver_context, self._file_system, path_spec)
def GetParentFileEntry(self)
Retrieves the parent file entry. Returns: OSFileEntry: parent file entry or None if not available.
2.782192
2.518908
1.104523
if not self._file_object_set_in_init: try: # TODO: fix close being called for the same object multiple times. self._file_object.close() except IOError: pass self._file_object = None
def _Close(self)
Closes the file-like object. If the file-like object was passed in the init function the file object-based file-like object does not control the file-like object and should not actually close it.
6.440842
5.496744
1.171756
if not self._file_object_set_in_init and not path_spec: raise ValueError('Missing path specification.') if self._file_object_set_in_init: return self._file_object = self._OpenFileObject(path_spec) if not self._file_object: raise IOError('Unable to open missing file-like object.')
def _Open(self, path_spec=None, mode='rb')
Opens the file-like object defined by path specification. Args: path_spec (Optional[PathSpec]): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
3.173188
3.016014
1.052113
if not self._is_open: raise IOError('Not opened.') # Do not pass the size argument as a keyword argument since it breaks # some file-like object implementations. return self._file_object.read(size)
def read(self, size=None)
Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
7.314011
7.832591
0.933792
if not self._is_open: raise IOError('Not opened.') if not hasattr(self._file_object, 'get_offset'): return self._file_object.tell() return self._file_object.get_offset()
def get_offset(self)
Retrieves the current offset into the file-like object. Returns: int: current offset into the file-like object. Raises: IOError: if the file-like object has not been opened. OSError: if the file-like object has not been opened.
3.922353
3.330264
1.177791
if not self._is_open: raise IOError('Not opened.') if not hasattr(self._file_object, 'get_size'): if not self._size: current_offset = self.get_offset() self.seek(0, os.SEEK_END) self._size = self.get_offset() self.seek(current_offset, os.SEEK_SET) return self._size return self._file_object.get_size()
def get_size(self)
Retrieves the size of the file-like object. Returns: int: size of the file-like object data. Raises: IOError: if the file-like object has not been opened. OSError: if the file-like object has not been opened.
2.61112
2.419287
1.079293
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') range_offset = getattr(path_spec, 'range_offset', None) if range_offset is None: raise errors.PathSpecError( 'Unsupported path specification without encoding method.') range_size = getattr(path_spec, 'range_size', None) if range_size is None: raise errors.PathSpecError( 'Unsupported path specification without encoding method.') self._range_offset = range_offset self._range_size = range_size
def _Open(self, path_spec, mode='rb')
Opens the file system defined by path specification. Args: path_spec (PathSpec): a path specification. mode (Optional[str]): file access mode. The default is 'rb' which represents read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
1.847463
2.013415
0.917577
return data_range_file_entry.DataRangeFileEntry( self._resolver_context, self, path_spec, is_root=True, is_virtual=True)
def GetFileEntryByPathSpec(self, path_spec)
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): a path specification. Returns: DataRangeFileEntry: a file entry or None if not available.
3.751185
3.913929
0.958419
path_spec = data_range_path_spec.DataRangePathSpec( range_offset=self._range_offset, range_size=self._range_size, parent=self._path_spec.parent) return self.GetFileEntryByPathSpec(path_spec)
def GetRootFileEntry(self)
Retrieves the root file entry. Returns: DataRangeFileEntry: a file entry or None if not available.
3.256198
3.217407
1.012056
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) try: fsapfs_container = pyfsapfs.container() fsapfs_container.open_file_object(file_object) except: file_object.close() raise self._file_object = file_object self._fsapfs_container = fsapfs_container
def _Open(self, path_spec, mode='rb')
Opens the file system defined by path specification. Args: path_spec (PathSpec): a path specification. mode (Optional[str])): file access mode. The default is 'rb' read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.246852
2.474567
0.907978
volume_index = apfs_helper.APFSContainerPathSpecGetVolumeIndex(path_spec) # The virtual root file has not corresponding volume index but # should have a location. if volume_index is None: location = getattr(path_spec, 'location', None) return location is not None and location == self.LOCATION_ROOT return 0 <= volume_index < self._fsapfs_container.number_of_volumes
def FileEntryExistsByPathSpec(self, path_spec)
Determines if a file entry for a path specification exists. Args: path_spec (PathSpec): a path specification. Returns: bool: True if the file entry exists.
6.465952
6.825408
0.947336
volume_index = apfs_helper.APFSContainerPathSpecGetVolumeIndex(path_spec) if volume_index is None: return None return self._fsapfs_container.get_volume(volume_index)
def GetAPFSVolumeByPathSpec(self, path_spec)
Retrieves an APFS volume for a path specification. Args: path_spec (PathSpec): path specification. Returns: pyfsapfs.volume: an APFS volume or None if not available.
5.645082
5.770116
0.978331
volume_index = apfs_helper.APFSContainerPathSpecGetVolumeIndex(path_spec) # The virtual root file has not corresponding volume index but # should have a location. if volume_index is None: location = getattr(path_spec, 'location', None) if location is None or location != self.LOCATION_ROOT: return None return apfs_container_file_entry.APFSContainerFileEntry( self._resolver_context, self, path_spec, is_root=True, is_virtual=True) if (volume_index < 0 or volume_index >= self._fsapfs_container.number_of_volumes): return None return apfs_container_file_entry.APFSContainerFileEntry( self._resolver_context, self, path_spec)
def GetFileEntryByPathSpec(self, path_spec)
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): a path specification. Returns: APFSContainerFileEntry: a file entry or None if not exists.
3.055007
3.075577
0.993312
path_spec = apfs_container_path_spec.APFSContainerPathSpec( location=self.LOCATION_ROOT, parent=self._path_spec.parent) return self.GetFileEntryByPathSpec(path_spec)
def GetRootFileEntry(self)
Retrieves the root file entry. Returns: APFSContainerFileEntry: a file entry.
3.636549
3.431786
1.059667
if not glob_pattern: raise ValueError('Missing glob pattern.') regex_pattern = [] glob_pattern_index = 0 glob_pattern_length = len(glob_pattern) while glob_pattern_index < glob_pattern_length: character = glob_pattern[glob_pattern_index] glob_pattern_index += 1 if character == '*': regex_pattern.append('.*') elif character == '?': regex_pattern.append('.') elif character != '[': regex_character = re.escape(character) regex_pattern.append(regex_character) else: glob_group_index = glob_pattern_index if (glob_group_index < glob_pattern_length and glob_pattern[glob_group_index] == '!'): glob_group_index += 1 if (glob_group_index < glob_pattern_length and glob_pattern[glob_group_index] == ']'): glob_group_index += 1 while (glob_group_index < glob_pattern_length and glob_pattern[glob_group_index] != ']'): glob_group_index += 1 if glob_group_index >= glob_pattern_length: regex_pattern.append('\\[') continue glob_group = glob_pattern[glob_pattern_index:glob_group_index] glob_pattern_index = glob_group_index + 1 glob_group = glob_group.replace('\\', '\\\\') if py2to3.PY_3_7_AND_LATER: glob_group = glob_group.replace('|', '\\|') regex_pattern.append('[') if glob_group[0] == '!': regex_pattern.append('^') glob_group = glob_group[1:] elif glob_group[0] == '^': regex_pattern.append('\\') regex_pattern.append(glob_group) regex_pattern.append(']') return ''.join(regex_pattern)
def Glob2Regex(glob_pattern)
Converts a glob pattern to a regular expression. This function supports basic glob patterns that consist of: * matches everything ? matches any single character [seq] matches any character in sequence [!seq] matches any character not in sequence Args: glob_pattern (str): glob pattern. Returns: str: regular expression pattern. Raises: ValueError: if the glob pattern cannot be converted.
1.664803
1.696802
0.981141
string_parts = [] if self.location is not None: string_parts.append('location: {0:s}'.format(self.location)) if self.part_index is not None: string_parts.append('part index: {0:d}'.format(self.part_index)) if self.start_offset is not None: string_parts.append('start offset: 0x{0:08x}'.format(self.start_offset)) return self._GetComparable(sub_comparable_string=', '.join(string_parts))
def comparable(self)
str: comparable representation of the path specification.
2.495534
2.264896
1.101832
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') resolver.Resolver.key_chain.ExtractCredentialsFromPathSpec(path_spec) encryption_method = getattr(path_spec, 'encryption_method', None) if not encryption_method: raise errors.PathSpecError( 'Unsupported path specification without encryption method.') self._encryption_method = encryption_method
def _Open(self, path_spec, mode='rb')
Opens the file system defined by path specification. Args: path_spec (PathSpec): a path specification. mode (Optional[str]): file access mode. The default is 'rb' which represents read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.591601
3.063764
0.845888
return encrypted_stream_file_entry.EncryptedStreamFileEntry( self._resolver_context, self, path_spec, is_root=True, is_virtual=True)
def GetFileEntryByPathSpec(self, path_spec)
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): a path specification. Returns: EncryptedStreamFileEntry: a file entry or None if not available.
3.878259
3.968199
0.977335
path_spec = encrypted_stream_path_spec.EncryptedStreamPathSpec( encryption_method=self._encryption_method, parent=self._path_spec.parent) return self.GetFileEntryByPathSpec(path_spec)
def GetRootFileEntry(self)
Retrieves the root file entry. Returns: EncryptedStreamFileEntry: a file entry or None if not available.
3.653936
3.158874
1.156721
# Use __type__ to indicate the object class type. class_type = json_dict.get('__type__', None) if class_type not in self._CLASS_TYPES: raise TypeError('Missing path specification object type.') # Remove the class type from the JSON dict since we cannot pass it. del json_dict['__type__'] type_indicator = json_dict.get('type_indicator', None) if type_indicator: del json_dict['type_indicator'] # Convert row_condition back to a tuple. if 'row_condition' in json_dict: json_dict['row_condition'] = tuple(json_dict['row_condition']) return path_spec_factory.Factory.NewPathSpec(type_indicator, **json_dict)
def _ConvertDictToObject(self, json_dict)
Converts a JSON dict into a path specification object. The dictionary of the JSON serialized objects consists of: { '__type__': 'PathSpec' 'type_indicator': 'OS' 'parent': { ... } ... } Here '__type__' indicates the object base type in this case this should be 'PathSpec'. The rest of the elements of the dictionary make up the path specification object properties. Note that json_dict is a dict of dicts and the _ConvertDictToObject method will be called for every dict. That is how the path specification parent objects are created. Args: json_dict (dict[str, object]): JSON serialized objects. Returns: PathSpec: a path specification. Raises: TypeError: if the JSON serialized object does not contain a '__type__' attribute that contains 'PathSpec'.
3.766556
2.992011
1.258871
if not isinstance(path_spec_object, path_spec.PathSpec): raise TypeError json_dict = {'__type__': 'PathSpec'} for property_name in path_spec_factory.Factory.PROPERTY_NAMES: property_value = getattr(path_spec_object, property_name, None) if property_value is not None: # Convert row_condition tuple to a list if property_name == 'row_condition': json_dict[property_name] = list(property_value) else: json_dict[property_name] = property_value if path_spec_object.HasParent(): json_dict['parent'] = self.default(path_spec_object.parent) json_dict['type_indicator'] = path_spec_object.type_indicator location = getattr(path_spec_object, 'location', None) if location: json_dict['location'] = location return json_dict
def default(self, path_spec_object)
Converts a path specification object into a JSON dictionary. The resulting dictionary of the JSON serialized objects consists of: { '__type__': 'PathSpec' 'type_indicator': 'OS' 'parent': { ... } ... } Here '__type__' indicates the object base type in this case this should be 'PathSpec'. The rest of the elements of the dictionary make up the path specification object properties. The supported property names are defined in path_spec_factory.Factory.PROPERTY_NAMES. Note that this method is called recursively for every path specification object and creates a dict of dicts in the process that is transformed into a JSON string by the JSON encoder. Args: path_spec_object (PathSpec): a path specification. Returns: dict[str, object]: JSON serialized objects. Raises: TypeError: if not an instance of PathSpec.
2.204024
1.93443
1.139366
if identifier in self._values: raise KeyError('Object already cached for identifier: {0:s}'.format( identifier)) if len(self._values) == self._maximum_number_of_cached_values: raise errors.CacheFullError('Maximum number of cached values reached.') self._values[identifier] = ObjectsCacheValue(vfs_object)
def CacheObject(self, identifier, vfs_object)
Caches a VFS object. This method ignores the cache value reference count. Args: identifier (str): VFS object identifier. vfs_object (object): VFS object to cache. Raises: CacheFullError: if he maximum number of cached values is reached. KeyError: if the VFS object already is cached.
3.45486
2.779127
1.243146
for identifier, cache_value in iter(self._values.items()): if not cache_value: raise RuntimeError('Missing cache value.') if cache_value.vfs_object == vfs_object: return identifier, cache_value return None, None
def GetCacheValueByObject(self, vfs_object)
Retrieves the cache value for the cached object. Args: vfs_object (object): VFS object that was cached. Returns: tuple[str, ObjectsCacheValue]: identifier and cache value object or (None, None) if not cached. Raises: RuntimeError: if the cache value is missing.
4.406831
3.370348
1.30753
cache_value = self._values.get(identifier, None) if not cache_value: return None return cache_value.vfs_object
def GetObject(self, identifier)
Retrieves a cached object based on the identifier. This method ignores the cache value reference count. Args: identifier (str): VFS object identifier. Returns: object: cached VFS object or None if not cached.
7.166958
5.540999
1.293441
if identifier not in self._values: raise KeyError('Missing cached object for identifier: {0:s}'.format( identifier)) cache_value = self._values[identifier] if not cache_value: raise RuntimeError('Missing cache value for identifier: {0:s}'.format( identifier)) cache_value.IncrementReferenceCount()
def GrabObject(self, identifier)
Grabs a cached object based on the identifier. This method increments the cache value reference count. Args: identifier (str): VFS object identifier. Raises: KeyError: if the VFS object is not found in the cache. RuntimeError: if the cache value is missing.
3.662727
2.508293
1.460247
if identifier not in self._values: raise KeyError('Missing cached object for identifier: {0:s}'.format( identifier)) cache_value = self._values[identifier] if not cache_value: raise RuntimeError('Missing cache value for identifier: {0:s}'.format( identifier)) cache_value.DecrementReferenceCount()
def ReleaseObject(self, identifier)
Releases a cached object based on the identifier. This method decrements the cache value reference count. Args: identifier (str): VFS object identifier. Raises: KeyError: if the VFS object is not found in the cache. RuntimeError: if the cache value is missing.
3.645035
2.487547
1.465313
if identifier not in self._values: raise KeyError('Missing cached object for identifier: {0:s}'.format( identifier)) del self._values[identifier]
def RemoveObject(self, identifier)
Removes a cached object based on the identifier. This method ignores the cache value reference count. Args: identifier (str): VFS object identifier. Raises: KeyError: if the VFS object is not found in the cache.
5.603631
4.381579
1.278907
try: # TODO: replace by libuna implementation or equivalent. The behavior of # base64.b64decode() does not raise TypeError for certain invalid base64 # data e.g. b'\x01\x02\x03\x04\x05\x06\x07\x08' these are silently # ignored. decoded_data = base64.b64decode(encoded_data) except (TypeError, binascii.Error) as exception: raise errors.BackEndError( 'Unable to decode base64 stream with error: {0!s}.'.format( exception)) return decoded_data, b''
def Decode(self, encoded_data)
Decode the encoded data. Args: encoded_data (byte): encoded data. Returns: tuple(bytes, bytes): decoded data and remaining encoded data. Raises: BackEndError: if the base64 stream cannot be decoded.
4.554519
3.996203
1.139711
timestamp = self._fsntfs_attribute.get_access_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def access_time(self)
dfdatetime.Filetime: access time or None if not set.
11.292279
5.181029
2.179544
timestamp = self._fsntfs_attribute.get_creation_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def creation_time(self)
dfdatetime.Filetime: creation time or None if not set.
11.017941
4.611635
2.389162
timestamp = self._fsntfs_attribute.get_entry_modification_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def entry_modification_time(self)
dfdatetime.Filetime: entry modification time or None if not set.
9.212349
4.332352
2.126408
timestamp = self._fsntfs_attribute.get_modification_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def modification_time(self)
dfdatetime.Filetime: modification time.
9.958584
4.13407
2.408905
fwnt_security_descriptor = pyfwnt.security_descriptor() fwnt_security_descriptor.copy_from_byte_stream(self._fsntfs_attribute.data) return fwnt_security_descriptor
def security_descriptor(self)
pyfwnt.security_descriptor: security descriptor.
8.02941
5.077661
1.581321
try: fsntfs_file_entry = self._file_system.GetNTFSFileEntryByPathSpec( self.path_spec) except errors.PathSpecError: fsntfs_file_entry = None if fsntfs_file_entry: location = getattr(self.path_spec, 'location', None) for fsntfs_sub_file_entry in fsntfs_file_entry.sub_file_entries: directory_entry = fsntfs_sub_file_entry.name # Ignore references to self or parent. if directory_entry in ('.', '..'): continue file_reference = fsntfs_sub_file_entry.file_reference directory_entry_mft_entry = ( file_reference & _FILE_REFERENCE_MFT_ENTRY_BITMASK) if location == self._file_system.PATH_SEPARATOR: directory_entry = self._file_system.JoinPath([directory_entry]) else: directory_entry = self._file_system.JoinPath([ location, directory_entry]) yield ntfs_path_spec.NTFSPathSpec( location=directory_entry, mft_attribute=fsntfs_sub_file_entry.name_attribute_index, mft_entry=directory_entry_mft_entry, parent=self.path_spec.parent)
def _EntriesGenerator(self)
Retrieves directory entries. Since a directory can contain a vast number of entries using a generator is more memory efficient. Yields: NTFSPathSpec: NTFS path specification.
2.471886
2.323597
1.063819
if self._attributes is None: self._attributes = [] for fsntfs_attribute in self._fsntfs_file_entry.attributes: attribute_class = self._ATTRIBUTE_TYPE_CLASS_MAPPINGS.get( fsntfs_attribute.attribute_type, NTFSAttribute) attribute_object = attribute_class(fsntfs_attribute) self._attributes.append(attribute_object) return self._attributes
def _GetAttributes(self)
Retrieves the attributes. Returns: list[NTFSAttribute]: attributes.
3.400178
2.845578
1.194899
if self._data_streams is None: self._data_streams = [] if self._fsntfs_file_entry.has_default_data_stream(): data_stream = NTFSDataStream(None) self._data_streams.append(data_stream) for fsntfs_data_stream in self._fsntfs_file_entry.alternate_data_streams: data_stream = NTFSDataStream(fsntfs_data_stream) self._data_streams.append(data_stream) return self._data_streams
def _GetDataStreams(self)
Retrieves the data streams. Returns: list[NTFSDataStream]: data streams.
2.700747
2.404819
1.123056
if self._fsntfs_file_entry.number_of_sub_file_entries == 0: return None return NTFSDirectory(self._file_system, self.path_spec)
def _GetDirectory(self)
Retrieves a directory. Returns: NTFSDirectory: directory or None if not available.
6.319524
4.874073
1.296559
if self._link is None: self._link = '' if not self._IsLink(self._fsntfs_file_entry.file_attribute_flags): return self._link link = self._fsntfs_file_entry.reparse_point_print_name if link: # Strip off the drive letter, we assume the link is within # the same volume. _, _, self._link = link.rpartition(':') return self._link
def _GetLink(self)
Retrieves the link. Returns: str: path of the linked file.
5.86695
5.789512
1.013376
stat_object = super(NTFSFileEntry, self)._GetStat() # File data stat information. if self._fsntfs_file_entry.has_default_data_stream(): stat_object.size = self._fsntfs_file_entry.get_size() # Ownership and permissions stat information. # TODO: stat_object.mode # TODO: stat_object.uid # TODO: stat_object.gid # File entry type stat information. if self._IsLink(self._fsntfs_file_entry.file_attribute_flags): stat_object.type = stat_object.TYPE_LINK elif self._fsntfs_file_entry.has_directory_entries_index(): stat_object.type = stat_object.TYPE_DIRECTORY else: stat_object.type = stat_object.TYPE_FILE # Other stat information. file_reference = self._fsntfs_file_entry.file_reference stat_object.ino = file_reference & _FILE_REFERENCE_MFT_ENTRY_BITMASK stat_object.fs_type = 'NTFS' stat_object.is_allocated = self._fsntfs_file_entry.is_allocated() return stat_object
def _GetStat(self)
Retrieves information about the file entry. Returns: VFSStat: a stat object.
3.04475
2.988714
1.018749
timestamp = self._fsntfs_file_entry.get_access_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def access_time(self)
dfdatetime.DateTimeValues: access time or None if not available.
9.174862
4.71709
1.945026
timestamp = self._fsntfs_file_entry.get_entry_modification_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def change_time(self)
dfdatetime.DateTimeValues: change time or None if not available.
11.109146
5.315935
2.089782
timestamp = self._fsntfs_file_entry.get_creation_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def creation_time(self)
dfdatetime.DateTimeValues: creation time or None if not available.
9.125611
4.389006
2.079198
# The root directory file name is typically '.', dfVFS however uses ''. if self._is_root: return '' mft_attribute = getattr(self.path_spec, 'mft_attribute', None) if mft_attribute is not None: return self._fsntfs_file_entry.get_name_by_attribute_index(mft_attribute) return self._fsntfs_file_entry.get_name()
def name(self)
str: name of the file entry, which does not include the full path.
8.058198
6.80011
1.18501
timestamp = self._fsntfs_file_entry.get_modification_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def modification_time(self)
dfdatetime.DateTimeValues: modification time or None if not available.
8.461287
4.2878
1.97334
if (not data_stream_name and not self._fsntfs_file_entry.has_default_data_stream()): return None # Make sure to make the changes on a copy of the path specification, so we # do not alter self.path_spec. path_spec = copy.deepcopy(self.path_spec) if data_stream_name: setattr(path_spec, 'data_stream', data_stream_name) return resolver.Resolver.OpenFileObject( path_spec, resolver_context=self._resolver_context)
def GetFileObject(self, data_stream_name='')
Retrieves the file-like object. Args: data_stream_name (Optional[str]): data stream name, where an empty string represents the default data stream. Returns: NTFSFileIO: file-like object or None.
4.178751
3.685982
1.133687
link = self._GetLink() if not link: return None # TODO: is there a way to determine the MFT entry here? link_mft_entry = None parent_path_spec = getattr(self.path_spec, 'parent', None) path_spec = ntfs_path_spec.NTFSPathSpec( location=link, parent=parent_path_spec) is_root = bool( link == self._file_system.LOCATION_ROOT or link_mft_entry == self._file_system.MFT_ENTRY_ROOT_DIRECTORY) return NTFSFileEntry( self._resolver_context, self._file_system, path_spec, is_root=is_root)
def GetLinkedFileEntry(self)
Retrieves the linked file entry, e.g. for a symbolic link. Returns: NTFSFileEntry: linked file entry or None.
3.10592
2.910531
1.067132
location = getattr(self.path_spec, 'location', None) if location is not None: parent_location = self._file_system.DirnamePath(location) if parent_location == '': parent_location = self._file_system.PATH_SEPARATOR parent_file_reference = None mft_attribute = getattr(self.path_spec, 'mft_attribute', None) if mft_attribute is not None: parent_file_reference = ( self._fsntfs_file_entry.get_parent_file_reference_by_attribute_index( mft_attribute)) else: parent_file_reference = ( self._fsntfs_file_entry.get_parent_file_reference()) if parent_file_reference is None: return None parent_mft_entry = ( parent_file_reference & _FILE_REFERENCE_MFT_ENTRY_BITMASK) parent_path_spec = getattr(self.path_spec, 'parent', None) # TODO: determine and pass the mft_attribute of the parent # for a faster resolve of the file entry. path_spec = ntfs_path_spec.NTFSPathSpec( location=parent_location, mft_entry=parent_mft_entry, parent=parent_path_spec) # TODO: handle parent correctly use attribute index? is_root = bool( parent_location == self._file_system.LOCATION_ROOT or parent_mft_entry == self._file_system.MFT_ENTRY_ROOT_DIRECTORY) return NTFSFileEntry( self._resolver_context, self._file_system, path_spec, is_root=is_root)
def GetParentFileEntry(self)
Retrieves the parent file entry. Returns: NTFSFileEntry: parent file entry or None if not available.
2.778017
2.680321
1.036449
fwnt_security_descriptor = pyfwnt.security_descriptor() fwnt_security_descriptor.copy_from_byte_stream( self._fsntfs_file_entry.security_descriptor_data) return fwnt_security_descriptor
def GetSecurityDescriptor(self)
Retrieves the security descriptor. Returns: pyfwnt.security_descriptor: security descriptor.
7.009508
5.332721
1.314434
self._file_entries = {} file_offset = 0 while file_offset < self._file_size or self._file_size == 0: file_entry = self._ReadFileEntry(file_object, file_offset) file_offset += file_entry.size if file_entry.path == 'TRAILER!!!': break if file_entry.path in self._file_entries: # TODO: alert on file entries with duplicate paths? continue self._file_entries[file_entry.path] = file_entry
def _ReadFileEntries(self, file_object)
Reads the file entries from the cpio archive. Args: file_object (FileIO): file-like object.
3.037774
3.134935
0.969007
if self._file_entries: for path, file_entry in iter(self._file_entries.items()): if path.startswith(path_prefix): yield file_entry
def GetFileEntries(self, path_prefix='')
Retrieves the file entries. Args: path_prefix (str): path prefix. Yields: CPIOArchiveFileEntry: a CPIO archive file entry.
3.281434
3.614686
0.907806
file_object.seek(0, os.SEEK_SET) signature_data = file_object.read(6) self.file_format = None if len(signature_data) > 2: if signature_data[:2] == self._CPIO_SIGNATURE_BINARY_BIG_ENDIAN: self.file_format = 'bin-big-endian' elif signature_data[:2] == self._CPIO_SIGNATURE_BINARY_LITTLE_ENDIAN: self.file_format = 'bin-little-endian' elif signature_data == self._CPIO_SIGNATURE_PORTABLE_ASCII: self.file_format = 'odc' elif signature_data == self._CPIO_SIGNATURE_NEW_ASCII: self.file_format = 'newc' elif signature_data == self._CPIO_SIGNATURE_NEW_ASCII_WITH_CHECKSUM: self.file_format = 'crc' if self.file_format is None: raise IOError('Unsupported CPIO format.') self._file_object = file_object self._file_size = file_object.get_size() self._ReadFileEntries(self._file_object)
def Open(self, file_object)
Opens the CPIO archive file. Args: file_object (FileIO): a file-like object. Raises: IOError: if the file format signature is not supported. OSError: if the file format signature is not supported.
2.394493
2.371437
1.009722
self._file_object.seek(file_offset, os.SEEK_SET) return self._file_object.read(size)
def ReadDataAtOffset(self, file_offset, size)
Reads a byte string from the file-like object at a specific offset. Args: file_offset (int): file offset. size (int): number of bytes to read. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
2.629948
3.142566
0.836879
location = getattr(path_spec, 'location', None) if location is None: return False is_device = False if platform.system() == 'Windows': # Note that os.path.exists() returns False for Windows device files so # instead use libsmdev to do the check. try: is_device = pysmdev.check_device(location) except IOError as exception: # Since pysmdev will raise IOError when it has no access to the device # we check if the exception message contains ' access denied ' and # return true. # Note that exception.message no longer works in Python 3. exception_string = str(exception) if not isinstance(exception_string, py2to3.UNICODE_TYPE): exception_string = py2to3.UNICODE_TYPE( exception_string, errors='replace') if ' access denied ' in exception_string: is_device = True # Note that os.path.exists() returns False for broken symbolic links hence # an additional check using os.path.islink() is necessary. return is_device or os.path.exists(location) or os.path.islink(location)
def FileEntryExistsByPathSpec(self, path_spec)
Determines if a file entry for a path specification exists. Args: path_spec (PathSpec): a path specification. Returns: bool: True if the file entry exists, false otherwise.
3.928294
4.11483
0.954667
if not self.FileEntryExistsByPathSpec(path_spec): return None return os_file_entry.OSFileEntry(self._resolver_context, self, path_spec)
def GetFileEntryByPathSpec(self, path_spec)
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): a path specification. Returns: OSFileEntry: a file entry or None if not available.
3.217764
3.670492
0.876657
if platform.system() == 'Windows': # Return the root with the drive letter of the volume the current # working directory is on. location = os.getcwd() location, _, _ = location.partition('\\') location = '{0:s}\\'.format(location) else: location = '/' if not os.path.exists(location): return None path_spec = os_path_spec.OSPathSpec(location=location) return self.GetFileEntryByPathSpec(path_spec)
def GetRootFileEntry(self)
Retrieves the root file entry. Returns: OSFileEntry: a file entry or None if not available.
3.509005
3.313903
1.058874
# For paths on Windows we need to make sure to handle the first path # segment correctly. first_path_segment = None if path_segments and platform.system() == 'Windows': # Check if the first path segment contains a "special" path definition. first_path_segment = path_segments[0] first_path_segment_length = len(first_path_segment) first_path_segment_prefix = None # In case the path start with: \\.\C:\ if (first_path_segment_length >= 7 and first_path_segment.startswith('\\\\.\\') and first_path_segment[5:7] == ':\\'): first_path_segment_prefix = first_path_segment[4:6] first_path_segment = first_path_segment[7:] # In case the path start with: \\.\ or \\?\ elif (first_path_segment_length >= 4 and first_path_segment[:4] in ['\\\\.\\', '\\\\?\\']): first_path_segment_prefix = first_path_segment[:4] first_path_segment = first_path_segment[4:] # In case the path start with: C: elif first_path_segment_length >= 2 and first_path_segment[1] == ':': first_path_segment_prefix = first_path_segment[:2] first_path_segment = first_path_segment[2:] # In case the path start with: \\server\share (UNC). elif first_path_segment.startswith('\\\\'): prefix, _, remainder = first_path_segment[2:].partition( self.PATH_SEPARATOR) first_path_segment_prefix = '\\\\{0:s}'.format(prefix) first_path_segment = '\\{0:s}'.format(remainder) if first_path_segment_prefix: first_path_segment, _, remainder = first_path_segment.partition( self.PATH_SEPARATOR) if not remainder: _ = path_segments.pop(0) else: path_segments[0] = remainder first_path_segment = ''.join([ first_path_segment_prefix, first_path_segment]) else: first_path_segment = None # We are not using os.path.join() here since it will not remove all # variations of successive path separators. # Split all the path segments based on the path (segment) separator. path_segments = [ segment.split(self.PATH_SEPARATOR) for segment in path_segments] # Flatten the sublists into one list. path_segments = [ element for sublist in path_segments for element in sublist] # Remove empty path segments. path_segments = list(filter(None, path_segments)) if first_path_segment is None: path = '{0:s}{1:s}'.format( self.PATH_SEPARATOR, self.PATH_SEPARATOR.join(path_segments)) else: path = first_path_segment if path_segments: path = '{0:s}{1:s}{2:s}'.format( path, self.PATH_SEPARATOR, self.PATH_SEPARATOR.join(path_segments)) return path
def JoinPath(self, path_segments)
Joins the path segments into a path. Args: path_segments (list[str]): path segments. Returns: str: joined path segments prefixed with the path separator.
2.251276
2.260657
0.995851
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) try: fsnfts_volume = pyfsntfs.volume() fsnfts_volume.open_file_object(file_object) except: file_object.close() raise self._file_object = file_object self._fsntfs_volume = fsnfts_volume
def _Open(self, path_spec, mode='rb')
Opens the file system object defined by path specification. Args: path_spec (PathSpec): path specification. mode (Optional[str]): file access mode. The default is 'rb' which represents read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.318463
2.540156
0.912725
# Opening a file by MFT entry is faster than opening a file by location. # However we need the index of the corresponding $FILE_NAME MFT attribute. fsntfs_file_entry = None location = getattr(path_spec, 'location', None) mft_attribute = getattr(path_spec, 'mft_attribute', None) mft_entry = getattr(path_spec, 'mft_entry', None) try: if mft_attribute is not None and mft_entry is not None: fsntfs_file_entry = self._fsntfs_volume.get_file_entry(mft_entry) elif location is not None: fsntfs_file_entry = self._fsntfs_volume.get_file_entry_by_path(location) except IOError as exception: raise errors.BackEndError(exception) return fsntfs_file_entry is not None
def FileEntryExistsByPathSpec(self, path_spec)
Determines if a file entry for a path specification exists. Args: path_spec (PathSpec): path specification. Returns: bool: True if the file entry exists. Raises: BackEndError: if the file entry cannot be opened.
2.859802
2.947927
0.970106
# Opening a file by MFT entry is faster than opening a file by location. # However we need the index of the corresponding $FILE_NAME MFT attribute. fsntfs_file_entry = None location = getattr(path_spec, 'location', None) mft_attribute = getattr(path_spec, 'mft_attribute', None) mft_entry = getattr(path_spec, 'mft_entry', None) if (location == self.LOCATION_ROOT or mft_entry == self.MFT_ENTRY_ROOT_DIRECTORY): fsntfs_file_entry = self._fsntfs_volume.get_root_directory() return ntfs_file_entry.NTFSFileEntry( self._resolver_context, self, path_spec, fsntfs_file_entry=fsntfs_file_entry, is_root=True) try: if mft_attribute is not None and mft_entry is not None: fsntfs_file_entry = self._fsntfs_volume.get_file_entry(mft_entry) elif location is not None: fsntfs_file_entry = self._fsntfs_volume.get_file_entry_by_path(location) except IOError as exception: raise errors.BackEndError(exception) if fsntfs_file_entry is None: return None return ntfs_file_entry.NTFSFileEntry( self._resolver_context, self, path_spec, fsntfs_file_entry=fsntfs_file_entry)
def GetFileEntryByPathSpec(self, path_spec)
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): path specification. Returns: NTFSFileEntry: file entry or None if not available. Raises: BackEndError: if the file entry cannot be opened.
2.26448
2.304046
0.982828
# Opening a file by MFT entry is faster than opening a file by location. # However we need the index of the corresponding $FILE_NAME MFT attribute. location = getattr(path_spec, 'location', None) mft_attribute = getattr(path_spec, 'mft_attribute', None) mft_entry = getattr(path_spec, 'mft_entry', None) if mft_attribute is not None and mft_entry is not None: fsntfs_file_entry = self._fsntfs_volume.get_file_entry(mft_entry) elif location is not None: fsntfs_file_entry = self._fsntfs_volume.get_file_entry_by_path(location) else: raise errors.PathSpecError( 'Path specification missing location and MFT entry.') return fsntfs_file_entry
def GetNTFSFileEntryByPathSpec(self, path_spec)
Retrieves the NTFS file entry for a path specification. Args: path_spec (PathSpec): a path specification. Returns: pyfsntfs.file_entry: NTFS file entry. Raises: PathSpecError: if the path specification is missing location and MFT entry.
2.850328
2.691396
1.059052
path_spec = ntfs_path_spec.NTFSPathSpec( location=self.LOCATION_ROOT, mft_entry=self.MFT_ENTRY_ROOT_DIRECTORY, parent=self._path_spec.parent) return self.GetFileEntryByPathSpec(path_spec)
def GetRootFileEntry(self)
Retrieves the root file entry. Returns: NTFSFileEntry: file entry.
3.60743
3.87793
0.930246
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) self._file_object = file_object
def _Open(self, path_spec, mode='rb')
Opens the file system object defined by path specification. Args: path_spec (PathSpec): path specification. mode (Optional[str]): file access mode. The default is 'rb' which represents read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.060433
2.375479
0.867376