id
int64
0
458k
file_name
stringlengths
4
119
file_path
stringlengths
14
227
content
stringlengths
24
9.96M
size
int64
24
9.96M
language
stringclasses
1 value
extension
stringclasses
14 values
total_lines
int64
1
219k
avg_line_length
float64
2.52
4.63M
max_line_length
int64
5
9.91M
alphanum_fraction
float64
0
1
repo_name
stringlengths
7
101
repo_stars
int64
100
139k
repo_forks
int64
0
26.4k
repo_open_issues
int64
0
2.27k
repo_license
stringclasses
12 values
repo_extraction_date
stringclasses
433 values
19,700
pifv.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/misc/pifv.py
""" EFI Platform Initialization Firmware Volume parser. Author: Alexandre Boeglin Creation date: 08 jul 2007 """ from hachoir_parser import Parser from hachoir_core.field import (FieldSet, UInt8, UInt16, UInt24, UInt32, UInt64, Enum, CString, String, PaddingBytes, RawBytes, NullBytes) from hachoir_core.endian import LITTLE_ENDIAN from hachoir_core.tools import paddingSize, humanFilesize from hachoir_parser.common.win32 import GUID EFI_SECTION_COMPRESSION = 0x1 EFI_SECTION_GUID_DEFINED = 0x2 EFI_SECTION_PE32 = 0x10 EFI_SECTION_PIC = 0x11 EFI_SECTION_TE = 0x12 EFI_SECTION_DXE_DEPEX = 0x13 EFI_SECTION_VERSION = 0x14 EFI_SECTION_USER_INTERFACE = 0x15 EFI_SECTION_COMPATIBILITY16 = 0x16 EFI_SECTION_FIRMWARE_VOLUME_IMAGE = 0x17 EFI_SECTION_FREEFORM_SUBTYPE_GUID = 0x18 EFI_SECTION_RAW = 0x19 EFI_SECTION_PEI_DEPEX = 0x1b EFI_SECTION_TYPE = { EFI_SECTION_COMPRESSION: "Encapsulation section where other sections" \ + " are compressed", EFI_SECTION_GUID_DEFINED: "Encapsulation section where other sections" \ + " have format defined by a GUID", EFI_SECTION_PE32: "PE32+ Executable image", EFI_SECTION_PIC: "Position-Independent Code", EFI_SECTION_TE: "Terse Executable image", EFI_SECTION_DXE_DEPEX: "DXE Dependency Expression", EFI_SECTION_VERSION: "Version, Text and Numeric", EFI_SECTION_USER_INTERFACE: "User-Friendly name of the driver", EFI_SECTION_COMPATIBILITY16: "DOS-style 16-bit EXE", EFI_SECTION_FIRMWARE_VOLUME_IMAGE: "PI Firmware Volume image", EFI_SECTION_FREEFORM_SUBTYPE_GUID: "Raw data with GUID in header to" \ + " define format", EFI_SECTION_RAW: "Raw data", EFI_SECTION_PEI_DEPEX: "PEI Dependency Expression", } EFI_FV_FILETYPE_RAW = 0x1 EFI_FV_FILETYPE_FREEFORM = 0x2 EFI_FV_FILETYPE_SECURITY_CORE = 0x3 EFI_FV_FILETYPE_PEI_CORE = 0x4 EFI_FV_FILETYPE_DXE_CORE = 0x5 EFI_FV_FILETYPE_PEIM = 0x6 EFI_FV_FILETYPE_DRIVER = 0x7 EFI_FV_FILETYPE_COMBINED_PEIM_DRIVER = 0x8 EFI_FV_FILETYPE_APPLICATION = 0x9 EFI_FV_FILETYPE_FIRMWARE_VOLUME_IMAGE = 0xb EFI_FV_FILETYPE_FFS_PAD = 0xf0 EFI_FV_FILETYPE = { EFI_FV_FILETYPE_RAW: "Binary data", EFI_FV_FILETYPE_FREEFORM: "Sectioned data", EFI_FV_FILETYPE_SECURITY_CORE: "Platform core code used during the SEC" \ + " phase", EFI_FV_FILETYPE_PEI_CORE: "PEI Foundation", EFI_FV_FILETYPE_DXE_CORE: "DXE Foundation", EFI_FV_FILETYPE_PEIM: "PEI module (PEIM)", EFI_FV_FILETYPE_DRIVER: "DXE driver", EFI_FV_FILETYPE_COMBINED_PEIM_DRIVER: "Combined PEIM/DXE driver", EFI_FV_FILETYPE_APPLICATION: "Application", EFI_FV_FILETYPE_FIRMWARE_VOLUME_IMAGE: "Firmware volume image", EFI_FV_FILETYPE_FFS_PAD: "Pad File For FFS", } for x in xrange(0xc0, 0xe0): EFI_FV_FILETYPE[x] = "OEM File" for x in xrange(0xe0, 0xf0): EFI_FV_FILETYPE[x] = "Debug/Test File" for x in xrange(0xf1, 0x100): EFI_FV_FILETYPE[x] = "Firmware File System Specific File" class BlockMap(FieldSet): static_size = 8*8 def createFields(self): yield UInt32(self, "num_blocks") yield UInt32(self, "len") def createDescription(self): return "%d blocks of %s" % ( self["num_blocks"].value, humanFilesize(self["len"].value)) class FileSection(FieldSet): COMPRESSION_TYPE = { 0: 'Not Compressed', 1: 'Standard Compression', } def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) self._size = self["size"].value * 8 section_type = self["type"].value if section_type in (EFI_SECTION_DXE_DEPEX, EFI_SECTION_PEI_DEPEX): # These sections can sometimes be longer than what their size # claims! It's so nice to have so detailled specs and not follow # them ... if self.stream.readBytes(self.absolute_address + self._size, 1) == '\0': self._size = self._size + 16 def createFields(self): # Header yield UInt24(self, "size") yield Enum(UInt8(self, "type"), EFI_SECTION_TYPE) section_type = self["type"].value if section_type == EFI_SECTION_COMPRESSION: yield UInt32(self, "uncomp_len") yield Enum(UInt8(self, "comp_type"), self.COMPRESSION_TYPE) elif section_type == EFI_SECTION_FREEFORM_SUBTYPE_GUID: yield GUID(self, "sub_type_guid") elif section_type == EFI_SECTION_GUID_DEFINED: yield GUID(self, "section_definition_guid") yield UInt16(self, "data_offset") yield UInt16(self, "attributes") elif section_type == EFI_SECTION_USER_INTERFACE: yield CString(self, "file_name", charset="UTF-16-LE") elif section_type == EFI_SECTION_VERSION: yield UInt16(self, "build_number") yield CString(self, "version", charset="UTF-16-LE") # Content content_size = (self.size - self.current_size) // 8 if content_size == 0: return if section_type == EFI_SECTION_COMPRESSION: compression_type = self["comp_type"].value if compression_type == 1: while not self.eof: yield RawBytes(self, "compressed_content", content_size) else: while not self.eof: yield FileSection(self, "section[]") elif section_type == EFI_SECTION_FIRMWARE_VOLUME_IMAGE: yield FirmwareVolume(self, "firmware_volume") else: yield RawBytes(self, "content", content_size, EFI_SECTION_TYPE.get(self["type"].value, "Unknown Section Type")) def createDescription(self): return EFI_SECTION_TYPE.get(self["type"].value, "Unknown Section Type") class File(FieldSet): def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) self._size = self["size"].value * 8 def createFields(self): # Header yield GUID(self, "name") yield UInt16(self, "integrity_check") yield Enum(UInt8(self, "type"), EFI_FV_FILETYPE) yield UInt8(self, "attributes") yield UInt24(self, "size") yield UInt8(self, "state") # Content while not self.eof: yield FileSection(self, "section[]") def createDescription(self): return "%s: %s containing %d section(s)" % ( self["name"].value, self["type"].display, len(self.array("section"))) class FirmwareVolume(FieldSet): def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) if not self._size: self._size = self["volume_len"].value * 8 def createFields(self): # Header yield NullBytes(self, "zero_vector", 16) yield GUID(self, "fs_guid") yield UInt64(self, "volume_len") yield String(self, "signature", 4) yield UInt32(self, "attributes") yield UInt16(self, "header_len") yield UInt16(self, "checksum") yield UInt16(self, "ext_header_offset") yield UInt8(self, "reserved") yield UInt8(self, "revision") while True: bm = BlockMap(self, "block_map[]") yield bm if bm['num_blocks'].value == 0 and bm['len'].value == 0: break # TODO must handle extended header # Content while not self.eof: padding = paddingSize(self.current_size // 8, 8) if padding: yield PaddingBytes(self, "padding[]", padding) yield File(self, "file[]") def createDescription(self): return "Firmware Volume containing %d file(s)" % len(self.array("file")) class PIFVFile(Parser): endian = LITTLE_ENDIAN MAGIC = '_FVH' PARSER_TAGS = { "id": "pifv", "category": "program", "file_ext": ("bin", ""), "min_size": 64*8, # smallest possible header "magic_regex": (("\0{16}.{24}%s" % MAGIC, 0), ), "description": "EFI Platform Initialization Firmware Volume", } def validate(self): if self.stream.readBytes(40*8, 4) != self.MAGIC: return "Invalid magic number" if self.stream.readBytes(0, 16) != "\0"*16: return "Invalid zero vector" return True def createFields(self): while not self.eof: yield FirmwareVolume(self, "firmware_volume[]")
8,472
Python
.py
209
32.990431
80
0.633459
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,701
word_doc.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/misc/word_doc.pyc
—Ú Œ »Mc@s”dZddklZddklZlZlZlZlZl Z l Z l Z l Z l Z lZlZlZlZddklZeZdfdÑÉYZdeefdÑÉYZd eefd ÑÉYZd S( s« Documents: * libwx source code: see fib.c source code * "Microsoft Word 97 Binary File Format" http://bio.gsi.de/DOCS/AIX/wword8.html Microsoft Word 97 (aka Version 8) for Windows and Macintosh. From the Office book, found in the Microsoft Office Development section in the MSDN Online Library. HTMLified June 1998. Revised Aug 1 1998, added missing Definitions section. Revised Dec 21 1998, added missing Document Properties (section). iˇˇˇˇ(tParser(tFieldSettBittBitstUInt8tInt16tUInt16tUInt32tInt32t NullBytestBytestRawBytestPascalString16tDateTimeMSDOS32tTimeDateMSDOS32(t LITTLE_ENDIANtBaseWordDocumentcBseZdÑZRS(ccsê t|ddÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|d ÉVt|d ÉVt|d d ÉVt|d ÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|ddÉVt|dÉVt|d ÉVt|d!ÉVt|d"ÉVt|d#d$d%ÉVt|d&d'ÉVt|d(ÉVt|d)ÉVt|d*ÉVt|d+ÉVt |d,ÉVt|d-ÉVt|d.ÉVt|d/ÉVt|d0ÉVt|d1ÉVt|d2ÉVt|d3ÉVt|d4ÉVt|d5ÉVt|d6ÉVt|d7ÉVt|d8ÉVt|d9ÉVt|d:ÉVt|d;ÉVt|d<ÉVt|d=ÉVt|d>ÉVt|d?ÉVt|d@ÉVt|dAÉVt|dBÉVt|dCÉVt|dDÉVt|dEÉVt|dFÉVt|dGÉVt|dHÉVt|dIÉVt|dJÉVt|dKÉVt|dLÉVt|dMÉVt|dNÉVt|dOÉVt|dPÉVt|dQÉVt|dRÉVt|dSÉVt|dTÉVt|dUÉVt|dVÉVt|dWÉVt|dXÉVt|dYÉVt|dZÉVt|d[ÉVt|d\ÉVt|d]ÉVt|d^ÉVt|d_ÉVt|d`ÉVt|daÉVt|dbÉVt|dcÉVt|ddÉVt|deÉVt|dfÉVt|dgÉVt|dhÉVt|diÉVt|djÉVt|dkÉVt|dlÉVt|dmÉVt|dnÉVt|doÉVt|dpÉVt|dqÉVt|drÉVt|dsÉVt|dtÉVt|duÉVt|dvÉVt|dwÉVt|dxÉVt|dyÉVt|dzÉVt|d{ÉVt|d|ÉVt|d}ÉVt|d~ÉVt|dÉVt|dÄÉVt|dÅÉVt|dÇÉVt|dÉÉVt|dÑÉVt|dÖÉVt|dÜÉVt|dáÉVt|dàÉVt|dâÉVt|däÉVt|dãÉVt|dåÉVt|dçÉVt|déÉVt|dèÉVt|dêÉVt|dëÉVt|díÉVt|dìÉVt|dîÉVt|dïÉVt|dñÉVt|dóÉVt|dòÉVt|dôÉVt|döÉVt|dõÉVt|dúÉVt|dùÉVt|dûÉVt|düÉVt|d†ÉVt|d°ÉVt|d¢ÉVt|d£ÉVt|d§ÉVt|d•ÉVt|d¶ÉVt|dßÉVt|d®ÉVt|d©ÉVt|d™ÉVt|d´ÉVt|d¨ÉVt|d≠ÉVt|dÆÉVt|dØÉVt|d∞ÉVt|d±ÉVt|d≤ÉVt|d≥ÉVt|d¥ÉVt|dµÉVt|d∂ÉVt|d∑ÉVt|d∏ÉVt|dπÉVt|d∫ÉVt|dªÉVt|dºÉVt|dΩÉVt|dæÉVt|døÉVt|d¿ÉVt|d¡ÉVt|d¬ÉVt|d√ÉVt|dƒÉVt|d≈ÉVt|d∆ÉVt|d«ÉVt|d»ÉVt|d…ÉVt|d ÉVt|dÀÉVt|dÃÉVt|dÕÉVt|dŒÉVt|dœÉVt|d–ÉVt|d—ÉVt|d“ÉVt|d”ÉVt|d‘ÉVt|d’ÉVt|d÷ÉVt|d◊ÉVt|dÿÉVt|dŸÉVt|d⁄ÉVt|d€ÉVt|d‹ÉVt|d›ÉVt|dfiÉVt|dflÉVt|d‡ÉVt|d·ÉVt|d‚ÉVt|d„ÉVt|d‰ÉVt|dÂÉVt|dÊÉVt|dÁÉVt|dËÉVt|dÈÉVt|dÍÉVt|dÎÉVt|dÏÉVt|dÌÉVt|dÓÉVt |dÔÉVt|dÉVt|dÒÉVt|dÚÉVt|dÛÉVt|dÙÉVt|dıÉVt|dˆÉVt|d˜ÉVt|d¯ÉVt|d˘ÉV|i |i d˙}|ot |d˚|ÉVndS(¸NtwIdentitnFibtnProducttlidtpnNexttfDottfGlsytfComplextfHasPict cQuickSavesit fEncryptedt fWhichTblStmtfReadOnlyRecommandedtfWriteReservationtfExtChart fLoadOverridet fFarEeasttfCryptotnFibBacktlKeytenvrtfMact fEmptySpecialtfLoadOverridePagetfFutureSavedUndot fWord97SavetfSpare0itchset chsTablestfcMintfcMact file_creatortstripts reserved[]i tlidFEtclwtcbMactlProductCreatedtlProductRevisedtccpTexttccpFtntccpHdrtccpMcrtccpAtntccpEdntccpTxbxt ccpHdrTxbxt pnFbpChpFirstt pnChpFirstt cpnBteChpt pnFbpPapFirstt pnPapFirstt cpnBtePapt pnFbpLvcFirstt pnLvcFirstt cpnBteLvct fcIslandFirstt fcIslandLimtcfclcbt fcStshfOrigt lcbStshfOrigtfcStshftlcbStshft fcPlcffndReft lcbPlcffndReft fcPlcffndTxtt lcbPlcffndTxtt fcPlcfandReft lcbPlcfandReft fcPlcfandTxtt lcbPlcfandTxtt fcPlcfsedt lcbPlcfsedtfcPlcpadt lcbPlcpadt fcPlcfphet lcbPlcfphet fcSttbfglsyt lcbSttbfglsyt fcPlcfglsyt lcbPlcfglsyt fcPlcfhddt lcbPlcfhddt fcPlcfbteChpxtlcbPlcfbteChpxt fcPlcfbtePapxtlcbPlcfbtePapxt fcPlcfseat lcbPlcfseat fcSttbfffnt lcbSttbfffnt fcPlcffldMomt lcbPlcffldMomt fcPlcffldHdrt lcbPlcffldHdrt fcPlcffldFtnt lcbPlcffldFtnt fcPlcffldAtnt lcbPlcffldAtnt fcPlcffldMcrt lcbPlcffldMcrt fcSttbfbkmkt lcbSttbfbkmkt fcPlcfbkft lcbPlcfbkft fcPlcfbklt lcbPlcfbkltfcCmdstlcbCmdstfcPlcmcrt lcbPlcmcrt fcSttbfmcrt lcbSttbfmcrtfcPrDrvrt lcbPrDrvrt fcPrEnvPortt lcbPrEnvPortt fcPrEnvLandt lcbPrEnvLandtfcWsstlcbWsstfcDoptlcbDopt fcSttbfAssoct lcbSttbfAssoctfcClxtlcbClxt fcPlcfpgdFtnt lcbPlcfpgdFtntfcAutosaveSourcetlcbAutosaveSourcetfcGrpXstAtnOwnerstlcbGrpXstAtnOwnerstfcSttbfAtnbkmktlcbSttbfAtnbkmkt fcPlcdoaMomt lcbPlcdoaMomt fcPlcdoaHdrt lcbPlcdoaHdrt fcPlcspaMomt lcbPlcspaMomt fcPlcspaHdrt lcbPlcspaHdrt fcPlcfAtnbkft lcbPlcfAtnbkft fcPlcfAtnbklt lcbPlcfAtnbkltfcPmstlcbPmstfcFormFldSttbstlcbFormFldSttbst fcPlcfendReft lcbPlcfendReft fcPlcfendTxtt lcbPlcfendTxtt fcPlcffldEdnt lcbPlcffldEdnt fcPlcfpgdEdnt lcbPlcfpgdEdnt fcDggInfot lcbDggInfot fcSttbfRMarkt lcbSttbfRMarkt fcSttbCaptiontlcbSttbCaptiontfcSttbAutoCaptiontlcbSttbAutoCaptiont fcPlcfwkbt lcbPlcfwkbt fcPlcfsplt lcbPlcfsplt fcPlcftxbxTxttlcbPlcftxbxTxtt fcPlcffldTxbxtlcbPlcffldTxbxtfcPlcfhdrtxbxTxttlcbPlcfhdrtxbxTxttfcPlcffldHdrTxbxtlcbPlcffldHdrTxbxt fcStwUsert lcbStwUsert fcSttbttmbdt cbSttbttmbdtfcUnusedt lcbUnusedt fcPgdMothert lcbPgdMothert fcBkdMothert lcbBkdMothertfcPgdFtnt lcbPgdFtntfcBkdFtnt lcbBkdFtntfcPgdEdnt lcbPgdEdntfcBkdEdnt lcbBkdEdntfcSttbfIntlFldtlcbSttbfIntlFldt fcRouteSlipt lcbRouteSlipt fcSttbSavedBytlcbSttbSavedByt fcSttbFnmt lcbSttbFnmt fcPlcfLstt lcbPlcfLsttfcPlfLfot lcbPlfLfot fcPlcftxbxBkdtlcbPlcftxbxBkdtfcPlcftxbxHdrBkdtlcbPlcftxbxHdrBkdt fcDocUndot lcbDocUndotfcRgbuset lcbRgbusetfcUsptlcbUsptfcUskftlcbUskftfcPlcupcRgbusetlcbPlcupcRgbuset fcPlcupcUspt lcbPlcupcUsptfcSttbGlsyStyletlcbSttbGlsyStyletfcPlgoslt lcbPlgosltfcPlcocxt lcbPlcocxt fcPlcfbteLvct lcbPlcfbteLvct ftModifiedt fcPlcflvct lcbPlcflvct fcPlcasumyt lcbPlcasumyt fcPlcfgramt lcbPlcfgramtfcSttbListNamestlcbSttbListNamest fcSttbfUssrt lcbSttbfUssrittail( RRRRRRR R Rt TIMESTAMPtsizet current_sizeR (tselfR((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/word_doc.pyt createFieldssÏ(t__name__t __module__R (((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/word_doc.pyRstWordDocumentFieldSetcBseZRS((R R (((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/word_doc.pyR stWordDocumentParsercBs;eZhdd6dd6dd6ZeZdÑZdÑZRS(t word_documenttiditmin_sizesMicrosoft Office Word documentt descriptioncKsti|||çdS(N(Rt__init__(R tstreamtkw((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/word_doc.pyR'scCstS(N(tTrue(R ((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/word_doc.pytvalidate*s(R R t PARSER_TAGSRtendianRR(((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/word_doc.pyRs  N(t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRRR R R R R Rthachoir_core.endianRRRR R(((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/word_doc.pyt<module> s^ˇ
10,442
Python
.py
46
225.630435
3,301
0.51621
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,702
chm.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/misc/chm.pyc
Ñò Î ÈMc @sˆdZddklZddklZlZlZlZlZl Z l Z l Z l Z l Z ddklZddklZddklZddklZlZlZdefd „ƒYZd efd „ƒYZd efd „ƒYZdefd„ƒYZdefd„ƒYZdefd„ƒYZdefd„ƒYZdefd„ƒYZdefd„ƒYZ defd„ƒYZ!dS(s InfoTech Storage Format (ITSF) parser, used by Microsoft's HTML Help (.chm) Document: - Microsoft's HTML Help (.chm) format http://www.wotsit.org (search "chm") - chmlib library http://www.jedrea.com/chmlib/ Author: Victor Stinner Creation date: 2007-03-04 iÿÿÿÿ(tParser( tFieldtFieldSett ParserErrortInt32tUInt32tUInt64tRawBytest PaddingBytestEnumtString(t LITTLE_ENDIAN(tGUID(t LANGUAGE_ID(t textHandlert hexadecimaltfilesizeHandlertCWordcBseZdZdd„ZRS(s Compressed double-word csîti|||d|ƒ|ii}|ii}|i}d‰|i|d|ƒ}xv|d@ojˆdK‰ˆ|d@7‰|id7_d|ijotdƒ‚n|d7}|i|d|ƒ}qXWˆ|7‰‡fd†|_ dS( Niii€iii@s CHM: CWord is limited to 64 bitscsˆS((((tvalue(sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyt<lambda>.s( Rt__init__t_parenttendiantstreamtabsolute_addresstreadBitst_sizeRt createValue(tselftparenttnamet descriptionRRtaddrtbyte((RsA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyRs"       N(t__name__t __module__t__doc__tNoneR(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyRstFilesize_HeadercBseZd„ZRS(ccs€tt|ddƒtƒVtt|ddƒtƒVtt|dƒƒVtt|ddƒtƒVtt|ddƒtƒVdS(Ns unknown[]t0x01FEt0x0t file_size(RRRRR(R((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyt createFields1s (R"R#R*(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR&0stITSPcBseZd„Zd„ZRS(cGs(ti||Œ|did|_dS(Ntsizei(RRRR(Rtargs((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR9sccsZt|dddddƒVt|ddƒVtt|dd ƒƒVt|d d ƒVtt|d d ƒƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|d dƒVt|ddƒVtt|ddƒtƒVt|ddƒVtt|dd ƒƒVt|d dƒVt|d dƒVt|d dƒVdS(!NtmagiciR+tcharsettASCIItversions Version (=1)R,s.Length (in bytes) of the directory header (84)s unknown[]s(=10)t block_sizesDirectory block sizetdensitys&Density of quickref section, usually 2t index_depthsDepth of the index treetnb_dirs Chunk number of root index chunkt first_pmgls*Chunk number of first PMGL (listing) chunkt last_pmgls)Chunk number of last PMGL (listing) chunks-1t nb_dir_chunks"Number of directory chunks (total)tlang_idsWindows language IDt system_uuids&{5D02926A-212E-11D0-9DF9-00A0C922E6EC}tsize2sSame value than size(R RRRR R R (R((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR*=s$(R"R#RR*(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR+8s tITSFcBseZd„ZRS(ccsÿt|dddddƒVt|dƒVt|ddƒVt|d ƒVt|d ƒVtt|d d ƒtƒVt|d dƒVt|ddƒVt|dƒVtt|dƒƒVt|dƒVtt|dƒƒVd|dijot|dƒVndS(NR.iR<R/R0R1t header_sizesTotal header length (in bytes)tonet last_modifiedR9sWindows Language IDtdir_uuids({7C01FD10-7BAA-11D0-9E0C-00A0-C922-E6EC}t stream_uuids({7C01FD11-7BAA-11D0-9E0C-00A0-C922-E6EC}tfilesize_offsett filesize_lent dir_offsettdir_lenit data_offset(R RR R R RRR(R((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR*Rs(R"R#R*(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR<Qst PMGL_EntrycBseZd„Zd„ZRS(ccs`t|dƒVt|d|diddƒVt|dƒVt|dƒVtt|dƒƒVdS(Ntname_lenRR/sUTF-8tspacetstarttlength(RR RR(R((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR*cs cCsd|di|difS(Ns%s (%s)RRK(Rtdisplay(R((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pytcreateDescriptionjs(R"R#R*RM(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyRGbs tPMGLcBseZd„ZRS(ccsÓt|dddddƒVtt|ddƒƒVt|dƒVt|d d ƒVt|d d ƒV|i|did }x#|i|jot|d ƒVq|W|i|id }|ot|d|ƒVndS(NR.iRNR/R0t free_spacesCLength of free space and/or quickref area at end of directory chunktunknowntpreviouss&Chunk number of previous listing chunktnextisentry[]tpadding(R RRR,Rt current_sizeRGR(RtstopRS((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR*ns  (R"R#R*(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyRNmst PMGI_EntrycBseZd„Zd„ZRS(ccs>t|dƒVt|d|diddƒVt|dƒVdS(NRHRR/sUTF-8tpage(RR R(R((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR*‚scCsd|di|difS(Ns %s (page #%u)RRW(R(R((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyRM‡s(R"R#R*RM(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyRV�s tPMGIcBseZd„ZRS(ccs£t|dddddƒVtt|ddƒƒV|i|did}x#|i|jot|d ƒVqLW|i|id}|ot|d |ƒVndS( NR.iRXR/R0ROsCLength of free space and/or quickref area at end of directory chunkisentry[]RS(R RRR,RRTRVR(RRURS((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR*‹s  (R"R#R*(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyRXŠst DirectorycBseZd„ZRS(ccs�t|dƒV|did}|di}|djo d}nx(t|ƒD]}t|dd|ƒVqPW|i|ijot|d d|ƒVndS( Ntitspsitsp/block_sizeis itsp/nb_diriispmgl[]R,tpmgi(R+RtxrangeRNRTR,RX(RR2R5tindex((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR*™s    (R"R#R*(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyRY˜stChmFilecBsYeZhdd6dd6dd6dd6dd 6d d 6ZeZd „Zd„Zd„ZRS(tchmtidtmisctcategorytfile_extiitmin_sizesITSFiR.sMicrosoft's HTML Help (.chm)RcCs>|iiddƒdjodS|didjodStS(NiiR<s Invalid magics itsf/versionisInvalid version(Rt readBytesRtTrue(R((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pytvalidate²s ccs¦t|dƒVt|dd|didƒV|i|diƒ}|o |Vnt|dd|didƒV|i|id}|ot|d |ƒVndS( NtitsfR)R,sitsf/filesize_lenisitsf/dir_offsettdirs itsf/dir_lentraw_end(R<R&RtseekByteRYR,RTR(RRSR,((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR*¹s cCs|didS(Nsfile_size/file_sizei(R(R((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pytcreateContentSizeÆs(schmi (sITSFi((sITSFi(R"R#t PARSER_TAGSR RRgR*Rl(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyR^§s   N("R$thachoir_parserRthachoir_core.fieldRRRRRRRRR R thachoir_core.endianR thachoir_parser.common.win32R t#hachoir_parser.common.win32_lang_idR thachoir_core.text_handlerRRRRR&R+R<RGRNRVRXRYR^(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/chm.pyt<module> s F  
10,687
Python
.py
53
200.377358
735
0.451989
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,703
msoffice.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/misc/msoffice.py
""" Parsers for the different streams and fragments found in an OLE2 file. Documents: - goffice source code Author: Robert Xiao, Victor Stinner Creation: 2006-04-23 """ from hachoir_parser import HachoirParser from hachoir_core.field import FieldSet, RootSeekableFieldSet, RawBytes from hachoir_core.endian import LITTLE_ENDIAN from hachoir_core.stream import StringInputStream from hachoir_parser.misc.msoffice_summary import SummaryFieldSet, CompObj from hachoir_parser.misc.word_doc import WordDocumentFieldSet PROPERTY_NAME = { u"\5DocumentSummaryInformation": "doc_summary", u"\5SummaryInformation": "summary", u"WordDocument": "word_doc", } class OfficeRootEntry(HachoirParser, RootSeekableFieldSet): PARSER_TAGS = { "description": "Microsoft Office document subfragments", } endian = LITTLE_ENDIAN def __init__(self, stream, **args): RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self)) HachoirParser.__init__(self, stream, **args) def validate(self): return True def createFields(self): for index, property in enumerate(self.ole2.properties): if index == 0: continue try: name = PROPERTY_NAME[property["name"].value] except LookupError: name = property.name+"content" for field in self.parseProperty(index, property, name): yield field def parseProperty(self, property_index, property, name_prefix): ole2 = self.ole2 if not property["size"].value: return if property["size"].value >= ole2["header/threshold"].value: return name = "%s[]" % name_prefix first = None previous = None size = 0 start = property["start"].value chain = ole2.getChain(start, True) blocksize = ole2.ss_size desc_format = "Small blocks %s..%s (%s)" fragment_group = None while True: try: block = chain.next() contiguous = False if not first: first = block contiguous = True if previous and block == (previous+1): contiguous = True if contiguous: previous = block size += blocksize continue except StopIteration: block = None self.seekSBlock(first) desc = desc_format % (first, previous, previous-first+1) size = min(size, property["size"].value*8) if name_prefix in ("summary", "doc_summary"): yield SummaryFieldSet(self, name, desc, size=size) elif name_prefix == "word_doc": yield WordDocumentFieldSet(self, name, desc, size=size) elif property_index == 1: yield CompObj(self, "comp_obj", desc, size=size) else: yield RawBytes(self, name, size//8, desc) if block is None: break first = block previous = block size = ole2.sector_size def seekSBlock(self, block): self.seekBit(block * self.ole2.ss_size) class FragmentGroup: def __init__(self, parser): self.items = [] self.parser = parser def add(self, item): self.items.append(item) def createInputStream(self): # FIXME: Use lazy stream creation data = [] for item in self.items: data.append( item["rawdata"].value ) data = "".join(data) # FIXME: Use smarter code to send arguments args = {"ole2": self.items[0].root} tags = {"class": self.parser, "args": args} tags = tags.iteritems() return StringInputStream(data, "<fragment group>", tags=tags) class CustomFragment(FieldSet): def __init__(self, parent, name, size, parser, description=None, group=None): FieldSet.__init__(self, parent, name, description, size=size) if not group: group = FragmentGroup(parser) self.group = group self.group.add(self) def createFields(self): yield RawBytes(self, "rawdata", self.size//8) def _createInputStream(self, **args): return self.group.createInputStream()
4,411
Python
.py
114
28.964912
93
0.598271
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,704
ole2.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/misc/ole2.pyc
—Ú Œ »Mc @s≠dZddklZddklZlZlZlZlZl Z l Z l Z l Z l Z lZlZlZlZlZddklZlZlZddklZlZddklZddklZlZlZddk l!Z!dd k"l#Z#d Z$d Z%d Z&d e fdÑÉYZ'defdÑÉYZ(defdÑÉYZ)defdÑÉYZ*de*i+e&e'i+Z,defdÑÉYZ-deefdÑÉYZ.dS(sS Microsoft Office documents parser. Informations: * wordole.c of AntiWord program (v0.35) Copyright (C) 1998-2003 A.J. van Os Released under GNU GPL http://www.winfield.demon.nl/ * File gsf-infile-msole.c of libgsf library (v1.14.0) Copyright (C) 2002-2004 Jody Goldberg (jody@gnome.org) Released under GNU LGPL 2.1 http://freshmeat.net/projects/libgsf/ * PDF from AAF Association Copyright (C) 2004 AAF Association Copyright (C) 1991-2003 Microsoft Corporation http://www.aafassociation.org/html/specs/aafcontainerspec-v1.0.1.pdf Author: Victor Stinner Creation: 2006-04-23 iˇˇˇˇ(t HachoirParser(tFieldSett ParserErrortSeekableFieldSettRootSeekableFieldSettUInt8tUInt16tInt32tUInt32tUInt64tTimestampWin64tEnumtBytestRawBytest NullBytestString(t textHandlert hexadecimaltfilesizeHandler(t LITTLE_ENDIANt BIG_ENDIAN(tGUID(tCustomFragmenttOfficeRootEntryt PROPERTY_NAME(tWordDocumentParser(t SummaryParseriiimtSECTcBsoeZdZdZdZdZeeeeefÉZhde6de6de6de6Zd dÑZ d ÑZ RS( lˇˇl˛ˇl˝ˇl¸ˇtunusedsend of a chainsBFAT sector (in a FAT)sDIFAT sector (in a FAT)cCsti||||ÉdS(N(Rt__init__(tselftparenttnamet description((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyR6scCs"|i}tii|t|ÉÉS(N(tvalueRtspecial_value_nametgettstr(Rtval((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyt createDisplay9s N( t__name__t __module__tUNUSEDt END_OF_CHAINt BFAT_SECTORt DIFAT_SECTORtsettSPECIALSR#tNoneRR'(((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyR(s  tPropertycBsceZdZhdd6dd6dd6dd6d d6Zhd d 6d d6ZdZdÑZdÑZRS(itstorageitstreamit ILockBytesitIPropertyStorageitroottreditblackiÄiccs}|ii|idÉ}|djo d}nd}t|ddd|dd ÉVt|d d ÉVtt|d d É|iÉVtt|ddÉ|iÉVt |dÉVt |dÉVt |ddÉVt |ddÉVt |dddÉVt |ddÉVt |ddÉVt |ddÉV|di djo,tt|d d!ÉÉVt |d"dÉVntt|d d!ÉÉVdS(#NitRs UTF-16-BEs UTF-16-LER i@tcharsetttruncatettnamelensLength of the namettypes Property typet decoratort Decoratortlefttrighttchilds-Child node (valid for storage and root types)tclsids8CLSID of this storage (valid for storage and root types)tflagss User flagstcreations4Creation timestamp(valid for storage and root types)tlastmods3Modify timestamp (valid for storage and root types)tstarts=Starting SECT of the stream (valid for stream and root types)s/header/bb_shifti tsizes/Size in bytes (valid for stream and root types)tpadding(R3t readBytestabsolute_addressRRR Rt TYPE_NAMEtDECORATOR_NAMERRRR R"RRR (RtbytesR:((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyt createFieldsLs(  cCs(|di}|di}d||fS(NR RIsProperty: %s (%s)(tdisplay(RR RI((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pytcreateDescriptionds  i(R(R)t TYPE_ROOTRMRNt static_sizeRPRR(((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyR1=s   tDIFatcBseZddÑZdÑZRS(cCs,ti||||É||_||_dS(N(RRRHtcount(RRR tdb_starttdb_countR!((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRjs ccs¨x&ttÉD]}t|d|ÉVq Wx|t|iÉD]k}|itti|ii|i|Éx9tt|dt|dÉD]}t|d|ÉVqàWq9WdS(Ns index[%u]ii( txrangetNB_DIFATRRVtseekBitRTRt sector_sizeRH(Rtindext sect_index((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRPos ) N(R(R)R0RRP(((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRUis tHeadercBseZdZdÑZRS(iDiccst|ddÉVt|ddÉVt|ddÉVt|dddÉVt|d d ÉVt|d d ÉVt|d ddÉVt|ddÉVt|ddÉVt|ddÉVt|dddÉVt|ddÉVt|ddÉVt|dÉVt|ddÉVt|d d!ÉVdS("NRDs16 bytes GUID used by some appstver_mins Minor versiontver_majtendianisEndian (0xFFFE for Intel)tbb_shifts"Log, base 2, of the big block sizetsb_shifts$Log, base 2, of the small block sizes reserved[]is (reserved)tcsectdirs?Number of SECTs in directory chain for 4 KB sectors (version 4)tbb_counts Number of Big Block Depot blockstbb_startsRoot start blockt transactionis.Signature used for transactions (must be zero)t thresholds5Maximum size for a mini stream (typically 4096 bytes)tsb_startsSmall Block Depot start blocktsb_countRWsFirst block of DIFATRXsNumber of SECTs in DIFAT(RRR RRR(R((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRP{s i (R(R)RTRP(((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyR_ysi@tSectFatcBseZddÑZdÑZRS(cCs6ti||||d|dÉ||_||_dS(NRIi (RRRVRH(RRR RHRVR!((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRës  ccs=x6t|i|i|iÉD]}t|d|ÉVqWdS(Ns index[%u](RYRHRVR(Rti((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRPñsN(R(R)R0RRP(((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRlês t OLE2_Filec BsôeZhdd6dd6d#d 6d$d6d%d6dd6d'd6ZeZdÑZdÑZdÑZdÑZe dÑZ dÑZ d ÑZ d!ÑZ d"ÑZRS((tole2tidtmisctcategorytdoctdottppttppztppstpottxlstxlatmsitfile_extuapplication/msworduapplication/msexceluapplication/mspowerpointtmimeiitmin_sizesMicrosoft Office documentR!s–œ‡°±·itmagiccKs<ti|dd|d|i|ÉÉti|||çdS(NR6(RRR0taskSizeR(RR3targs((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRØs%cCs“|didjodS|didjod|diS|didjod |diSt|d ijo tjnpd |d iS|d i|dijod|di|d ifStS(Ntole_ids–œ‡°±·s Invalid magicsheader/ver_majiisUnknown major version (%s)s header/endiansˇ˛s˛ˇsUnknown endian (%s)sheader/bb_shifts&Invalid (log 2 of) big block size (%s)sheader/sb_shiftsCSmall block size (log2=%s) is bigger than big block size (log2=%s)!(ii(sˇ˛s˛ˇ(R"t raw_displaytMIN_BIG_BLOCK_LOG2tMAX_BIG_BLOCK_LOG2tTrue(R((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pytvalidate≥s%c cs˘t|dddÉVt|dÉ}|Vd|di>|_|di|_|iti|_d|di>|_|i|_ t |d|d i|d id ÉVx|i ÉD] }|Vq±Wx|i ÉD] }|VqÕW|i |d iÉ}|iti}g|_xV|D]N}|i|Éx8t|ÉD]*}t|d É}|V|ii|Éq2WqWxét|iÉD]}\}}|djo d}n9yt|di}Wn tj o|id}nXx|i||ÉD] }|Vq‚WqtWdS(NRÇisOLE object signaturetheaderRcRfRdtdifatRWRXsDouble Indirection FATsheader/bb_starts property[]iR6R tcontent(R R_R"R\t fat_countRRTtitems_per_bbfattss_sizetitems_per_ssfatRUtreadBFATtreadSFATtgetChainR1t propertiest seekBlockRYtappendt enumerateRt LookupErrorR t parseProperty( RRàtfieldtchaintprop_per_sectortblockR]tpropertyR ((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRP¡sJ %          c cs'|dipdS|idjo |di|dijodSd|}d}d}d}d}|i|diÉ}x•toùyp|iÉ} t} |p| }t} n|o| |djo t} n| o| }||i7}w~nWntj o d} nX|djoPn|i |Éd||||df} | d |id 7} |t dÉjol|d jo t } n|djo t } nt } t|||| | |É} | V|p | i}q˜nt|||d | ÉV| djoPn| }| }|i}q~WdS(NRIs property[0]sheader/thresholds%s[]iRHisBig blocks %s..%s (%s)s of %s bytesiR6tsummaryt doc_summarytword_doc(srootRùRûRü(R"R R0RëRÜtnexttFalseR\t StopIterationRìR.RRRRtgroupR (RRút name_prefixR tfirsttpreviousRItfragment_groupRôRõt contiguoustdesctparserRò((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRóÒs`              c cs|o|i}|i}d}n|i}|i}d}|}tÉ}|}x¿|tijoØ|tijotd|||fÉÇn||jotd|||fÉÇn|i |É|V|}||} y|| d|i }WqSt j o } PqSXqSWdS(Ns SFAT chains BFAT chains-%s: Invalid block index (0x%08x), previous=%ss%s: Found a loop (%s=>%s)s index[%u]( tss_fatRétbb_fatRåR.RR+R/RtaddR"Rñ( RRHtuse_sfattfatt items_per_fatt err_prefixRõt block_setR¶R]terr((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRë%s0         ccs¬g|_d}|i}x£t|idÉÉD]å\}}|i}|tijoPndd||di|f}|i|Ét|d|||É}|V|ii |É||7}q.WdS(Nis difat/indexsFAT %u/%u at block %uisheader/bb_countsbbfat[]( R¨RåRïtarrayR"RR*RìRlRî(RRHRVR]RõR©Rò((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRè?s     c cs´|i|diÉ}d}g|_|i}xvt|ÉD]h\}}|i|Ét|d||dd||di|fÉ}|V|ii|É||7}q;WdS(Nsheader/sb_startissfat[]sSFAT %u/%u at block %uisheader/sb_count(RëR"R´RéRïRìRlRî(RRôRHRVR]RõRò((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRêRs     cCsâd}xT|idÉD]C}x:|D]2}|i}|tijot||É}q#q#WqW|tijodSt|d|iSdS(Nitbbfati(R¥R"RR/tmaxR0t HEADER_SIZER\(Rt max_blockRØtentryRõ((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pytcreateContentSizeas cCs|it||iÉdS(N(R[R∑R\(RRõ((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRìms( sdocsdotspptRvRwRxsxlsRzR{(uapplication/msworduapplication/msexceluapplication/mspowerpointi(s–œ‡°±·i((s–œ‡°±·i(R(R)t PARSER_TAGSRRbRRáRPRóR°RëRèRêR∫Rì(((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyRnös.    0 4    N(/t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRRR R R R R RRthachoir_core.text_handlerRRRthachoir_core.endianRRthachoir_parser.common.win32Rthachoir_parser.misc.msofficeRRRthachoir_parser.misc.word_docRt$hachoir_parser.misc.msoffice_summaryRRÑRÖRZRR1RUR_RTR∑RlRn(((sB/pentest/enumeration/google/metagoofil/hachoir_parser/misc/ole2.pyt<module>s$d,
15,040
Python
.py
82
182.158537
1,511
0.453798
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,705
bplist.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/misc/bplist.py
""" Apple/NeXT Binary Property List (BPLIST) parser. Also includes a .createXML() function which produces an XML representation of the object. Note that it will discard unknown objects, nulls and fill values, but should work for most files. Documents: - CFBinaryPList.c http://src.gnu-darwin.org/DarwinSourceArchive/expanded/CF/CF-299/Parsing.subproj/CFBinaryPList.c - ForFoundationOnly.h (for structure formats) http://src.gnu-darwin.org/DarwinSourceArchive/expanded/CF/CF-299/Base.subproj/ForFoundationOnly.h - XML <-> BPList converter http://scw.us/iPhone/plutil/plutil.pl Author: Robert Xiao Created: 2008-09-21 """ from hachoir_parser import HachoirParser from hachoir_core.field import (RootSeekableFieldSet, FieldSet, Enum, Bits, GenericInteger, Float32, Float64, UInt8, UInt64, Bytes, NullBytes, RawBytes, String) from hachoir_core.endian import BIG_ENDIAN from hachoir_core.text_handler import displayHandler from hachoir_core.tools import humanDatetime from datetime import datetime, timedelta class BPListTrailer(FieldSet): def createFields(self): yield NullBytes(self, "unused", 6) yield UInt8(self, "offsetIntSize", "Size (in bytes) of offsets in the offset table") yield UInt8(self, "objectRefSize", "Size (in bytes) of object numbers in object references") yield UInt64(self, "numObjects", "Number of objects in this file") yield UInt64(self, "topObject", "Top-level object reference") yield UInt64(self, "offsetTableOffset", "File offset to the offset table") def createDescription(self): return "Binary PList trailer" class BPListOffsetTable(FieldSet): def createFields(self): size = self["../trailer/offsetIntSize"].value*8 for i in range(self["../trailer/numObjects"].value): yield Bits(self, "offset[]", size) class BPListSize(FieldSet): def createFields(self): yield Bits(self, "size", 4) if self['size'].value == 0xF: yield BPListObject(self, "fullsize") def createValue(self): if 'fullsize' in self: return self['fullsize'].value else: return self['size'].value class BPListObjectRef(GenericInteger): def __init__(self, parent, name, description=None): size = parent['/trailer/objectRefSize'].value*8 GenericInteger.__init__(self, parent, name, False, size, description) def getRef(self): return self.parent['/object[' + str(self.value) + ']'] def createDisplay(self): return self.getRef().display def createXML(self, prefix=''): return self.getRef().createXML(prefix) class BPListArray(FieldSet): def __init__(self, parent, name, size, description=None): FieldSet.__init__(self, parent, name, description=description) self.numels = size def createFields(self): for i in range(self.numels): yield BPListObjectRef(self, "ref[]") def createValue(self): return self.array('ref') def createDisplay(self): return '[' + ', '.join([x.display for x in self.value]) + ']' def createXML(self,prefix=''): return prefix + '<array>\n' + ''.join([x.createXML(prefix + '\t' ) + '\n' for x in self.value]) + prefix + '</array>' class BPListDict(FieldSet): def __init__(self, parent, name, size, description=None): FieldSet.__init__(self, parent, name, description=description) self.numels = size def createFields(self): for i in range(self.numels): yield BPListObjectRef(self, "keyref[]") for i in range(self.numels): yield BPListObjectRef(self, "valref[]") def createValue(self): return zip(self.array('keyref'),self.array('valref')) def createDisplay(self): return '{' + ', '.join(['%s: %s'%(k.display,v.display) for k,v in self.value]) + '}' def createXML(self, prefix=''): return prefix + '<dict>\n' + ''.join(['%s\t<key>%s</key>\n%s\n'%(prefix,k.getRef().value.encode('utf-8'),v.createXML(prefix + '\t')) for k,v in self.value]) + prefix + '</dict>' class BPListObject(FieldSet): def createFields(self): yield Enum(Bits(self, "marker_type", 4), {0: "Simple", 1: "Int", 2: "Real", 3: "Date", 4: "Data", 5: "ASCII String", 6: "UTF-16-BE String", 8: "UID", 10: "Array", 13: "Dict",}) markertype = self['marker_type'].value if markertype == 0: # Simple (Null) yield Enum(Bits(self, "value", 4), {0: "Null", 8: "False", 9: "True", 15: "Fill Byte",}) if self['value'].display == "False": self.xml=lambda prefix:prefix + "<false/>" elif self['value'].display == "True": self.xml=lambda prefix:prefix + "<true/>" else: self.xml=lambda prefix:prefix + "" elif markertype == 1: # Int yield Bits(self, "size", 4, "log2 of number of bytes") size=self['size'].value # 8-bit (size=0), 16-bit (size=1) and 32-bit (size=2) numbers are unsigned # 64-bit (size=3) numbers are signed yield GenericInteger(self, "value", (size>=3), (2**size)*8) self.xml=lambda prefix:prefix + "<integer>%s</integer>"%self['value'].value elif markertype == 2: # Real yield Bits(self, "size", 4, "log2 of number of bytes") if self['size'].value == 2: # 2**2 = 4 byte float yield Float32(self, "value") elif self['size'].value == 3: # 2**3 = 8 byte float yield Float64(self, "value") else: # FIXME: What is the format of the real? yield Bits(self, "value", (2**self['size'].value)*8) self.xml=lambda prefix:prefix + "<real>%s</real>"%self['value'].value elif markertype == 3: # Date yield Bits(self, "extra", 4, "Extra value, should be 3") cvt_time=lambda v:datetime(2001,1,1) + timedelta(seconds=v) yield displayHandler(Float64(self, "value"),lambda x:humanDatetime(cvt_time(x))) self.xml=lambda prefix:prefix + "<date>%s</date>"%(cvt_time(self['value'].value).isoformat()) elif markertype == 4: # Data yield BPListSize(self, "size") yield Bytes(self, "value", self['size'].value) self.xml=lambda prefix:prefix + "<data>\n%s\n%s</data>"%(self['value'].value.encode('base64').strip(),prefix) elif markertype == 5: # ASCII String yield BPListSize(self, "size") yield String(self, "value", self['size'].value, charset="ASCII") self.xml=lambda prefix:prefix + "<string>%s</string>"%(self['value'].value.encode('iso-8859-1')) elif markertype == 6: # UTF-16-BE String yield BPListSize(self, "size") yield String(self, "value", self['size'].value*2, charset="UTF-16-BE") self.xml=lambda prefix:prefix + "<string>%s</string>"%(self['value'].value.encode('utf-8')) elif markertype == 8: # UID yield Bits(self, "size", 4, "Number of bytes minus 1") yield GenericInteger(self, "value", False, (self['size'].value + 1)*8) self.xml=lambda prefix:prefix + "" # no equivalent? elif markertype == 10: # Array yield BPListSize(self, "size") size = self['size'].value if size: yield BPListArray(self, "value", size) self.xml=lambda prefix:self['value'].createXML(prefix) elif markertype == 13: # Dict yield BPListSize(self, "size") yield BPListDict(self, "value", self['size'].value) self.xml=lambda prefix:self['value'].createXML(prefix) else: yield Bits(self, "value", 4) self.xml=lambda prefix:'' def createValue(self): if 'value' in self: return self['value'].value else: return None def createDisplay(self): if 'value' in self: return unicode(self['value'].display) else: return None def createXML(self, prefix=''): if 'value' in self: try: return self.xml(prefix) except AttributeError: return '' return '' def getFieldType(self): return '%s<%s>'%(FieldSet.getFieldType(self), self['marker_type'].display) class BPList(HachoirParser, RootSeekableFieldSet): endian = BIG_ENDIAN MAGIC = "bplist00" PARSER_TAGS = { "id": "bplist", "category": "misc", "file_ext": ("plist",), "magic": ((MAGIC, 0),), "min_size": 8 + 32, # bplist00 + 32-byte trailer "description": "Apple/NeXT Binary Property List", } def __init__(self, stream, **args): RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self)) HachoirParser.__init__(self, stream, **args) def validate(self): if self["magic"].value != self.MAGIC: return "Invalid magic." return True def createFields(self): yield Bytes(self, "magic", 8, "File magic (bplist00)") if self.size: self.seekByte(self.size//8-32, True) else: # FIXME: UNTESTED while True: try: self.seekByte(1024) except: break self.seekByte(self.size//8-32) yield BPListTrailer(self, "trailer") self.seekByte(self['trailer/offsetTableOffset'].value) yield BPListOffsetTable(self, "offset_table") for i in self.array("offset_table/offset"): if self.current_size > i.value*8: self.seekByte(i.value) elif self.current_size < i.value*8: # try to detect files with gaps or unparsed content yield RawBytes(self, "padding[]", i.value-self.current_size//8) yield BPListObject(self, "object[]") def createXML(self, prefix=''): return '''<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> ''' + self['/object[' + str(self['/trailer/topObject'].value) + ']'].createXML(prefix) + ''' </plist>'''
10,777
Python
.py
236
35.457627
185
0.583159
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,706
hlp.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/misc/hlp.pyc
Ñò Î ÈMc@s¸dZddklZddklZlZlZlZlZl Z l Z l Z l Z ddk lZddklZlZlZlZdefd„ƒYZdefd „ƒYZd S( s  Microsoft Windows Help (HLP) parser for Hachoir project. Documents: - Windows Help File Format / Annotation File Format / SHG and MRB File Format written by M. Winterhoff (100326.2776@compuserve.com) found on http://www.wotsit.org/ Author: Victor Stinner Creation date: 2007-09-03 iÿÿÿÿ(tParser( tFieldSettBitstInt32tUInt16tUInt32t NullBytestRawBytest PaddingBytestString(t LITTLE_ENDIAN(t textHandlert hexadecimaltdisplayHandlert humanFilesizet FileEntrycBseZd„Zd„ZRS(cOs+ti|||Ž|did|_dS(Nt res_spacei(Rt__init__tvaluet_size(tselftargstkw((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/hlp.pyRsccsXtt|ddƒtƒVtt|ddƒtƒVt|dddƒVtt|dƒtƒVt|d d ƒVtt|d d ƒtƒVt|d d ddddƒVt|ddƒVt|ddƒVt|ddƒVt |ddddƒVt|ddƒVt|ddƒVt|dd ƒV|i |i d}|ot |d!|ƒVndS("NRsReserved spacet used_spaces Used spacet file_flagsis(=4)tmagictflagsit page_sizesPage size in bytest structuretstripttcharsettASCIItzeroitnb_page_splitss*Number of page splits B+ tree has sufferedt root_pages Page number of B+ tree root pagetonetpatternsÿtnb_pagesNumber of B+ tree pagestnb_levelsNumber of levels of B+ treetnb_entrysNumber of entries in B+ treetreserved_space( R RRRR RR R RRtsizet current_size(RR*((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/hlp.pyt createFieldss"(t__name__t __module__RR,(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/hlp.pyRs tHlpFilecBsIeZhdd6dd6d d6dd6dd6ZeZd „Zd „ZRS( thlptidtmisctcategorytfile_exti tmin_sizesMicrosoft Windows Help (HLP)t descriptioncCs@|didjodS|di|iidjodStS(NRi?_s Invalid magictfilesizei(RtstreamR*tTrue(R((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/hlp.pytvalidate9s ccs¡tt|dƒtƒVt|ddƒVt|ddƒVt|ddƒV|i|diƒVt|dƒV|i|id }|ot |d |ƒVndS( NRt dir_startsDirectory starttfirst_free_blocksFirst free blockR7sFile size in bytessfile[]itend( R RR RtseekByteRRR*R+R(RR*((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/hlp.pyR,@s(R0(R-R.t PARSER_TAGSR tendianR:R,(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/hlp.pyR//s  N(t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRRR thachoir_core.endianR thachoir_core.text_handlerR R R RRR/(((sA/pentest/enumeration/google/metagoofil/hachoir_parser/misc/hlp.pyt<module> s @"
3,868
Python
.py
27
142.037037
703
0.486979
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,707
common.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/misc/common.py
from hachoir_core.field import StaticFieldSet, Float32 class Vertex(StaticFieldSet): format = ((Float32, "x"), (Float32, "y"), (Float32, "z")) def createValue(self): return (self["x"].value, self["y"].value, self["z"].value) class MapUV(StaticFieldSet): format = ((Float32, "u"), (Float32, "v")) def createValue(self): return (self["u"].value, self["v"].value)
397
Python
.py
9
39.111111
66
0.640625
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,708
file_3ds.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/misc/file_3ds.py
""" 3D Studio Max file (.3ds) parser. Author: Victor Stinner """ from hachoir_parser import Parser from hachoir_core.field import (StaticFieldSet, FieldSet, UInt16, UInt32, RawBytes, Enum, CString) from hachoir_parser.image.common import RGB from hachoir_core.endian import LITTLE_ENDIAN from hachoir_core.text_handler import textHandler, hexadecimal from hachoir_parser.misc.common import Vertex, MapUV def readObject(parent): yield CString(parent, "name", "Object name") size = parent["size"].value * 8 while parent.current_size < size: yield Chunk(parent, "chunk[]") def readTextureFilename(parent): yield CString(parent, "filename", "Texture filename") def readVersion(parent): yield UInt32(parent, "version", "3DS file format version") def readMaterialName(parent): yield CString(parent, "name", "Material name") class Polygon(StaticFieldSet): format = ( (UInt16, "a", "Vertex A"), (UInt16, "b", "Vertex B"), (UInt16, "c", "Vertex C"), (UInt16, "flags", "Flags")) def readMapList(parent): yield UInt16(parent, "count", "Map count") for index in xrange(parent["count"].value): yield MapUV(parent, "map_uv[]", "Mapping UV") def readColor(parent): yield RGB(parent, "color") def readVertexList(parent): yield UInt16(parent, "count", "Vertex count") for index in range(0, parent["count"].value): yield Vertex(parent, "vertex[]", "Vertex") def readPolygonList(parent): count = UInt16(parent, "count", "Vertex count") yield count for i in range(0, count.value): yield Polygon(parent, "polygon[]") size = parent["size"].value * 8 while parent.current_size < size: yield Chunk(parent, "chunk[]") class Chunk(FieldSet): # List of chunk type name type_name = { 0x0011: "Color", 0x4D4D: "Main chunk", 0x0002: "File version", 0x3D3D: "Materials and objects", 0x4000: "Object", 0x4100: "Mesh (triangular)", 0x4110: "Vertices list", 0x4120: "Polygon (faces) list", 0x4140: "Map UV list", 0x4130: "Object material", 0xAFFF: "New material", 0xA000: "Material name", 0xA010: "Material ambient", 0xA020: "Material diffuse", 0xA030: "Texture specular", 0xA200: "Texture", 0xA300: "Texture filename", # Key frames 0xB000: "Keyframes", 0xB002: "Object node tag", 0xB006: "Light target node tag", 0xB007: "Spot light node tag", 0xB00A: "Keyframes header", 0xB009: "Keyframe current time", 0xB030: "Node identifier", 0xB010: "Node header", 0x7001: "Viewport layout" } chunk_id_by_type = { 0x4d4d: "main", 0x0002: "version", 0x3d3d: "obj_mat", 0xb000: "keyframes", 0xafff: "material[]", 0x4000: "object[]", 0x4110: "vertices_list", 0x4120: "polygon_list", 0x4140: "mapuv_list", 0x4100: "mesh" } # List of chunks which contains other chunks sub_chunks = \ (0x4D4D, 0x4100, 0x3D3D, 0xAFFF, 0xA200, 0xB002, 0xB006, 0xB007, 0xA010, 0xA030, 0xA020, 0xB000) # List of chunk type handlers handlers = { 0xA000: readMaterialName, 0x4000: readObject, 0xA300: readTextureFilename, 0x0011: readColor, 0x0002: readVersion, 0x4110: readVertexList, 0x4120: readPolygonList, 0x4140: readMapList } def __init__(self, *args): FieldSet.__init__(self, *args) # Set description self._description = "Chunk: %s" % self["type"].display # Set name based on type field type = self["type"].value if type in Chunk.chunk_id_by_type: self._name = Chunk.chunk_id_by_type[type] else: self._name = "chunk_%04x" % type # Guess chunk size self._size = self["size"].value * 8 def createFields(self): yield Enum(textHandler(UInt16(self, "type", "Chunk type"), hexadecimal), Chunk.type_name) yield UInt32(self, "size", "Chunk size (in bytes)") content_size = self["size"].value - 6 if content_size == 0: return type = self["type"].value if type in Chunk.sub_chunks: while self.current_size < self.size: yield Chunk(self, "chunk[]") else: if type in Chunk.handlers: fields = Chunk.handlers[type] (self) for field in fields: yield field else: yield RawBytes(self, "data", content_size) class File3ds(Parser): endian = LITTLE_ENDIAN PARSER_TAGS = { "id": "3ds", "category": "misc", "file_ext": ("3ds",), "mime": (u"image/x-3ds",), "min_size": 16*8, "description": "3D Studio Max model" } def validate(self): if self.stream.readBytes(0, 2) != "MM": return "Wrong signature" if self["main/version/version"].value not in (2, 3): return "Unknown format version" return True def createFields(self): while not self.eof: yield Chunk(self, "chunk[]")
5,305
Python
.py
153
26.993464
97
0.600819
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,709
file_3do.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/misc/file_3do.pyc
Ñò Î ÈMc@s*dZddklZddklZlZlZlZlZl Z l Z ddk l Z l Z ddklZlZdefd„ƒYZdefd „ƒYZd efd „ƒYZd efd „ƒYZdefd„ƒYZdefd„ƒYZdefd„ƒYZdefd„ƒYZdS(sI 3do model parser. Author: Cyril Zorin Creation date: 28 september 2006 iÿÿÿÿ(tParser(tFieldSettUInt32tInt32tStringtFloat32tRawBytest PaddingBytes(t LITTLE_ENDIANt BIG_ENDIAN(tVertextMapUVtVectorcBs eZddd„Zd„ZRS(c Csžti||||ƒ||_||_|d|_||_y|ii|i|iƒ}Wntj o|ii}nX|o||i|_ndS(Ns[]( Rt__init__tcountttypetenametedesct static_sizet TypeErrort_size( tselftparenttnameRRRRt descriptiont item_size((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR s    ccs8x1t|iƒD] }|i||i|iƒVqWdS(N(txrangeRRRR(Rtindex((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyt createFields sN(t__name__t __module__tNoneR R(((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR s tFacecBseZd„Zd„ZRS(ccsFt|dƒVt|dƒVt|dƒVt|dƒVt|dƒVt|dƒVt|ddƒVt|d d ƒVt|d d ƒVt|dƒVt|d ƒVt|dƒVt|dƒV|dio"t|d|ditdƒVn|d io"t|d|ditdƒVn|d iot|ddƒVndS(NtidRt geometry_modet lighting_modet texture_modet nverticess unknown[]tunknownt has_textures Has texture?t has_materials Has material?t extra_lighttnormaltvertex_indicestvertexttexture_vertex_indicesttexture_vertextmaterial_indexsmaterial index(RRR tvalueR (R((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR%s*  cCsd|diS(Ns Face: id=%sR!(R0(R((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pytcreateDescription<s(RRRR1(((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR $s tMeshcBs#eZd„Zd„Zd„ZRS(cGsti||ŒdS(N(RR (Rtargs((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR @sccs­t|ddddƒVt|dƒVt|dƒVt|dƒVt|dƒVt|d ƒVt|d ƒVt|d ƒV|d i}|ot|d |td ƒVn|d io"t|d|d itdƒVn|o2t|d|tdƒVt|d|tdƒVn|d io"t|d|d itdƒVn|ot|d|tdƒVnt|dƒVt|dƒVt|dƒVt|dƒVt|dƒVdS(NRi tstriptR!R"R#R$tnmesh_verticestntexture_verticestnfacestverticesR,stexture verticesR.slight verticesR)s unknown[]R&tfacestfacesvertex normalsR*t has_shadowtradius(RRR0R R R RR (Rtnb_vert((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyRCs<     " cCsd|di|difS(NsMesh "%s" (id %s)RR!(R0(R((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR1es(RRR RR1(((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR2?s  "tGeosetcBseZd„Zd„ZRS(ccs>t|dƒVx)t|diƒD]}t|dƒVq"WdS(NRsmesh[](RRR0R2(RR((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyRiscCsd|diS(NsSet of %s meshesR(R0(R((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR1ns(RRRR1(((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR?hs tNodecBs#eZd„Zd„Zd„ZRS(cGs‰ti||Œd }|didjo|d7}n|didjo|d7}n|didjo|d7}n||_dS( Ni¼iit parent_offsetii tfirst_child_offsettnext_sibling_offseti¸iÀ(RR R0R(RR3tsize((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR rsccs�t|ddddƒVt|ddddƒVt|dƒVt|d ƒVt|d ƒVt|d ƒVt|d ƒVt|d ƒVt|dƒVt|dƒVt|dƒVt|dƒVt|dƒVt|dƒVt|dƒVt|dƒVx"tdƒD]}t|dƒVqÿW|d idjot|dƒVn|didjot|dƒVn|didjot|dƒVndS(NRi R4R5s unknown[]tpatternsÌtflagsR!Rtmesh_idtdepthRAt nchildrenRBRCtpivottpositiontpitchtyawtrollisunknown_vertex[]it parent_idtfirst_child_idtnext_sibling_id(RRRRR RRR0(RR((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR}s2 cCsd|diS(Ns Node "%s"R(R0(R((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR1—s(RRR RR1(((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR@qs tNodescBseZd„Zd„ZRS(ccs>t|dƒVx)t|diƒD]}t|dƒVq"WdS(NRsnode[](RRR0R@(RR((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR›scCsd|diS(Ns Nodes (%s)R(R0(R((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR1 s(RRRR1(((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyRRšs t MaterialscBs#eZd„Zd„Zd„ZRS(cGs5ti||Œ|d}|i|id|_dS(NRi ii(RR RDR0R(RR3R((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR ¤s ccsJt|dƒVx5t|diƒD] }t|dddddƒVq"WdS(NRs filename[]i sMaterial file nameR4R5(RRR0R(RR((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR©scCsd|diS(NsMaterial file names (%s)R(R0(R((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyR1®s(RRR RR1(((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyRS£s  tFile3docBsPeZhdd6dd6dd6dd6dd 6d d 6ZeZd „Zd „ZRS(t3doR!tmisctcategorytfile_extu image/x-3dotmimeiitmin_sizesrenderdroid 3d model.RcCs|iiddƒ}|djS(NiitLDOMtMODL(R[R\(tstreamt readBytes(Rt signature((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pytvalidate½sccst|dddddƒV|didjo t|_nt|dƒVt|dd d d d ƒVt|d dƒVt|dƒVx)t|diƒD]}t|dƒVq–Wt|d dƒVt |dƒVt |dƒVt |dƒV|i |i jo|i|i dƒVndS(Ntfile_sigisFile signaturetcharsettASCIIR\t materialst model_namei smodel file nameR4R5s unknown[]tngeosetssgeoset[]tnodest model_radiustinsertion_offsettend(RR0R tendianRSRRRR?RRRR t current_sizeRtseekBit(RR((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyRÁs  (RU(u image/x-3doi (RRt PARSER_TAGSRRkR`R(((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyRT±s  N(t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRthachoir_core.endianRR thachoir_parser.misc.commonR R R R R2R?R@RRRSRT(((sF/pentest/enumeration/google/metagoofil/hachoir_parser/misc/file_3do.pyt<module>s4) ) 
10,191
Python
.py
32
317.4375
1,298
0.427011
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,710
python.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/python.py
""" Python compiled source code parser. Informations: - Python 2.4.2 source code: files Python/marshal.c and Python/import.c Author: Victor Stinner Creation: 25 march 2005 """ DISASSEMBLE = False from hachoir_parser import Parser from hachoir_core.field import (FieldSet, UInt16, Int32, UInt32, Int64, ParserError, Float64, Enum, Character, Bytes, RawBytes, PascalString8, TimestampUnix32) from hachoir_core.endian import LITTLE_ENDIAN from hachoir_core.bits import long2raw from hachoir_core.text_handler import textHandler, hexadecimal from hachoir_core.i18n import ngettext if DISASSEMBLE: from dis import dis def disassembleBytecode(field): bytecode = field.value dis(bytecode) # --- String and string reference --- def parseString(parent): yield UInt32(parent, "length", "Length") length = parent["length"].value if 0 < length: yield RawBytes(parent, "text", length, "Content") if DISASSEMBLE and parent.name == "compiled_code": disassembleBytecode(parent["text"]) def parseStringRef(parent): yield textHandler(UInt32(parent, "ref"), hexadecimal) def createStringRefDesc(parent): return "String ref: %s" % parent["ref"].display # --- Integers --- def parseInt32(parent): yield Int32(parent, "value") def parseInt64(parent): yield Int64(parent, "value") def parseLong(parent): yield Int32(parent, "digit_count") for index in xrange( abs(parent["digit_count"].value) ): yield UInt16(parent, "digit[]") # --- Float and complex --- def parseFloat(parent): yield PascalString8(parent, "value") def parseBinaryFloat(parent): yield Float64(parent, "value") def parseComplex(parent): yield PascalString8(parent, "real") yield PascalString8(parent, "complex") def parseBinaryComplex(parent): yield Float64(parent, "real") yield Float64(parent, "complex") # --- Tuple and list --- def parseTuple(parent): yield Int32(parent, "count", "Item count") count = parent["count"].value if count < 0: raise ParserError("Invalid tuple/list count") for index in xrange(count): yield Object(parent, "item[]") def createTupleDesc(parent): count = parent["count"].value items = ngettext("%s item", "%s items", count) % count return "%s: %s" % (parent.code_info[2], items) # --- Dict --- def parseDict(parent): """ Format is: (key1, value1, key2, value2, ..., keyn, valuen, NULL) where each keyi and valuei is an object. """ parent.count = 0 while True: key = Object(parent, "key[]") yield key if key["bytecode"].value == "0": break yield Object(parent, "value[]") parent.count += 1 def createDictDesc(parent): return "Dict: %s" % (ngettext("%s key", "%s keys", parent.count) % parent.count) # --- Code --- def parseCode(parent): if 0x3000000 <= parent.root.getVersion(): yield UInt32(parent, "arg_count", "Argument count") yield UInt32(parent, "kwonlyargcount", "Keyword only argument count") yield UInt32(parent, "nb_locals", "Number of local variables") yield UInt32(parent, "stack_size", "Stack size") yield UInt32(parent, "flags") elif 0x2030000 <= parent.root.getVersion(): yield UInt32(parent, "arg_count", "Argument count") yield UInt32(parent, "nb_locals", "Number of local variables") yield UInt32(parent, "stack_size", "Stack size") yield UInt32(parent, "flags") else: yield UInt16(parent, "arg_count", "Argument count") yield UInt16(parent, "nb_locals", "Number of local variables") yield UInt16(parent, "stack_size", "Stack size") yield UInt16(parent, "flags") yield Object(parent, "compiled_code") yield Object(parent, "consts") yield Object(parent, "names") yield Object(parent, "varnames") if 0x2000000 <= parent.root.getVersion(): yield Object(parent, "freevars") yield Object(parent, "cellvars") yield Object(parent, "filename") yield Object(parent, "name") if 0x2030000 <= parent.root.getVersion(): yield UInt32(parent, "firstlineno", "First line number") else: yield UInt16(parent, "firstlineno", "First line number") yield Object(parent, "lnotab") class Object(FieldSet): bytecode_info = { # Don't contains any data '0': ("null", None, "NULL", None), 'N': ("none", None, "None", None), 'F': ("false", None, "False", None), 'T': ("true", None, "True", None), 'S': ("stop_iter", None, "StopIter", None), '.': ("ellipsis", None, "ELLIPSIS", None), '?': ("unknown", None, "Unknown", None), 'i': ("int32", parseInt32, "Int32", None), 'I': ("int64", parseInt64, "Int64", None), 'f': ("float", parseFloat, "Float", None), 'g': ("bin_float", parseBinaryFloat, "Binary float", None), 'x': ("complex", parseComplex, "Complex", None), 'y': ("bin_complex", parseBinaryComplex, "Binary complex", None), 'l': ("long", parseLong, "Long", None), 's': ("string", parseString, "String", None), 't': ("interned", parseString, "Interned", None), 'u': ("unicode", parseString, "Unicode", None), 'R': ("string_ref", parseStringRef, "String ref", createStringRefDesc), '(': ("tuple", parseTuple, "Tuple", createTupleDesc), '[': ("list", parseTuple, "List", createTupleDesc), '<': ("set", parseTuple, "Set", createTupleDesc), '>': ("frozenset", parseTuple, "Frozen set", createTupleDesc), '{': ("dict", parseDict, "Dict", createDictDesc), 'c': ("code", parseCode, "Code", None), } def __init__(self, parent, name, **kw): FieldSet.__init__(self, parent, name, **kw) code = self["bytecode"].value if code not in self.bytecode_info: raise ParserError('Unknown bytecode: "%s"' % code) self.code_info = self.bytecode_info[code] if not name: self._name = self.code_info[0] if code == "l": self.createValue = self.createValueLong elif code in ("i", "I", "f", "g"): self.createValue = lambda: self["value"].value elif code == "T": self.createValue = lambda: True elif code == "F": self.createValue = lambda: False elif code in ("x", "y"): self.createValue = self.createValueComplex elif code in ("s", "t", "u"): self.createValue = self.createValueString self.createDisplay = self.createDisplayString def createValueString(self): if "text" in self: return self["text"].value else: return "" def createDisplayString(self): if "text" in self: return self["text"].display else: return "(empty)" def createValueLong(self): is_negative = self["digit_count"].value < 0 count = abs(self["digit_count"].value) total = 0 for index in xrange(count-1, -1, -1): total <<= 15 total += self["digit[%u]" % index].value if is_negative: total = -total return total def createValueComplex(self): return complex( float(self["real"].value), float(self["complex"].value)) def createFields(self): yield Character(self, "bytecode", "Bytecode") parser = self.code_info[1] if parser: for field in parser(self): yield field def createDescription(self): create = self.code_info[3] if create: return create(self) else: return self.code_info[2] class PythonCompiledFile(Parser): PARSER_TAGS = { "id": "python", "category": "program", "file_ext": ("pyc", "pyo"), "min_size": 9*8, "description": "Compiled Python script (.pyc/.pyo files)" } endian = LITTLE_ENDIAN # Dictionnary which associate the pyc signature (32-bit integer) # to a Python version string (eg. "m\xf2\r\n" => "Python 2.4b1"). # This list comes from CPython source code, see "MAGIC" # and "pyc_magic" in file Python/import.c MAGIC = { # Python 1.x 20121: ("1.5", 0x1050000), # Python 2.x 50823: ("2.0", 0x2000000), 60202: ("2.1", 0x2010000), 60717: ("2.2", 0x2020000), 62011: ("2.3a0", 0x2030000), 62021: ("2.3a0", 0x2030000), 62041: ("2.4a0", 0x2040000), 62051: ("2.4a3", 0x2040000), 62061: ("2.4b1", 0x2040000), 62071: ("2.5a0", 0x2050000), 62081: ("2.5a0 (ast-branch)", 0x2050000), 62091: ("2.5a0 (with)", 0x2050000), 62092: ("2.5a0 (WITH_CLEANUP opcode)", 0x2050000), 62101: ("2.5b3", 0x2050000), 62111: ("2.5b3", 0x2050000), 62121: ("2.5c1", 0x2050000), 62131: ("2.5c2", 0x2050000), # Python 3.x 3000: ("3.0 (3000)", 0x3000000), 3010: ("3.0 (3010)", 0x3000000), 3020: ("3.0 (3020)", 0x3000000), 3030: ("3.0 (3030)", 0x3000000), 3040: ("3.0 (3040)", 0x3000000), 3050: ("3.0 (3050)", 0x3000000), 3060: ("3.0 (3060)", 0x3000000), 3070: ("3.0 (3070)", 0x3000000), 3080: ("3.0 (3080)", 0x3000000), 3090: ("3.0 (3090)", 0x3000000), 3100: ("3.0 (3100)", 0x3000000), 3102: ("3.0 (3102)", 0x3000000), 3110: ("3.0a4", 0x3000000), 3130: ("3.0a5", 0x3000000), 3131: ("3.0a5 unicode", 0x3000000), } # Dictionnary which associate the pyc signature (4-byte long string) # to a Python version string (eg. "m\xf2\r\n" => "2.4b1") STR_MAGIC = dict( \ (long2raw(magic | (ord('\r')<<16) | (ord('\n')<<24), LITTLE_ENDIAN), value[0]) \ for magic, value in MAGIC.iteritems()) def validate(self): signature = self.stream.readBits(0, 16, self.endian) if signature not in self.MAGIC: return "Unknown version (%s)" % signature if self.stream.readBytes(2*8, 2) != "\r\n": return r"Wrong signature (\r\n)" if self.stream.readBytes(8*8, 1) != 'c': return "First object bytecode is not code" return True def getVersion(self): if not hasattr(self, "version"): signature = self.stream.readBits(0, 16, self.endian) self.version = self.MAGIC[signature][1] return self.version def createFields(self): yield Enum(Bytes(self, "signature", 4, "Python file signature and version"), self.STR_MAGIC) yield TimestampUnix32(self, "timestamp", "Timestamp") yield Object(self, "content")
10,832
Python
.py
272
32.444853
100
0.597833
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,711
elf.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/elf.py
""" ELF (Unix/BSD executable file format) parser. Author: Victor Stinner Creation date: 08 may 2006 """ from hachoir_parser import Parser from hachoir_core.field import (FieldSet, ParserError, UInt8, UInt16, UInt32, Enum, String, Bytes) from hachoir_core.text_handler import textHandler, hexadecimal from hachoir_core.endian import LITTLE_ENDIAN, BIG_ENDIAN class ElfHeader(FieldSet): static_size = 52*8 LITTLE_ENDIAN_ID = 1 BIG_ENDIAN_ID = 2 MACHINE_NAME = { 1: u"AT&T WE 32100", 2: u"SPARC", 3: u"Intel 80386", 4: u"Motorola 68000", 5: u"Motorola 88000", 7: u"Intel 80860", 8: u"MIPS RS3000" } CLASS_NAME = { 1: u"32 bits", 2: u"64 bits" } TYPE_NAME = { 0: u"No file type", 1: u"Relocatable file", 2: u"Executable file", 3: u"Shared object file", 4: u"Core file", 0xFF00: u"Processor-specific (0xFF00)", 0xFFFF: u"Processor-specific (0xFFFF)" } ENDIAN_NAME = { LITTLE_ENDIAN_ID: "Little endian", BIG_ENDIAN_ID: "Big endian", } def createFields(self): yield Bytes(self, "signature", 4, r'ELF signature ("\x7fELF")') yield Enum(UInt8(self, "class", "Class"), self.CLASS_NAME) yield Enum(UInt8(self, "endian", "Endian"), self.ENDIAN_NAME) yield UInt8(self, "file_version", "File version") yield String(self, "pad", 8, "Pad") yield UInt8(self, "nb_ident", "Size of ident[]") yield Enum(UInt16(self, "type", "File type"), self.TYPE_NAME) yield Enum(UInt16(self, "machine", "Machine type"), self.MACHINE_NAME) yield UInt32(self, "version", "ELF format version") yield UInt32(self, "entry", "Number of entries") yield UInt32(self, "phoff", "Program header offset") yield UInt32(self, "shoff", "Section header offset") yield UInt32(self, "flags", "Flags") yield UInt16(self, "ehsize", "Elf header size (this header)") yield UInt16(self, "phentsize", "Program header entry size") yield UInt16(self, "phnum", "Program header entry count") yield UInt16(self, "shentsize", "Section header entry size") yield UInt16(self, "shnum", "Section header entre count") yield UInt16(self, "shstrndx", "Section header strtab index") def isValid(self): if self["signature"].value != "\x7FELF": return "Wrong ELF signature" if self["class"].value not in self.CLASS_NAME: return "Unknown class" if self["endian"].value not in self.ENDIAN_NAME: return "Unknown endian (%s)" % self["endian"].value return "" class SectionHeader32(FieldSet): static_size = 40*8 TYPE_NAME = { 8: "BSS" } def createFields(self): yield UInt32(self, "name", "Name") yield Enum(UInt32(self, "type", "Type"), self.TYPE_NAME) yield UInt32(self, "flags", "Flags") yield textHandler(UInt32(self, "VMA", "Virtual memory address"), hexadecimal) yield textHandler(UInt32(self, "LMA", "Logical memory address (in file)"), hexadecimal) yield textHandler(UInt32(self, "size", "Size"), hexadecimal) yield UInt32(self, "link", "Link") yield UInt32(self, "info", "Information") yield UInt32(self, "addr_align", "Address alignment") yield UInt32(self, "entry_size", "Entry size") def createDescription(self): return "Section header (name: %s, type: %s)" % \ (self["name"].value, self["type"].display) class ProgramHeader32(FieldSet): TYPE_NAME = { 3: "Dynamic library" } static_size = 32*8 def createFields(self): yield Enum(UInt16(self, "type", "Type"), ProgramHeader32.TYPE_NAME) yield UInt16(self, "flags", "Flags") yield UInt32(self, "offset", "Offset") yield textHandler(UInt32(self, "vaddr", "V. address"), hexadecimal) yield textHandler(UInt32(self, "paddr", "P. address"), hexadecimal) yield UInt32(self, "file_size", "File size") yield UInt32(self, "mem_size", "Memory size") yield UInt32(self, "align", "Alignment") yield UInt32(self, "xxx", "???") def createDescription(self): return "Program Header (%s)" % self["type"].display def sortSection(a, b): return int(a["offset"] - b["offset"]) #class Sections(FieldSet): # def createFields?(self, stream, parent, sections): # for section in sections: # ofs = section["offset"] # size = section["file_size"] # if size != 0: # sub = stream.createSub(ofs, size) # #yield DeflateFilter(self, "section[]", sub, size, Section, "Section")) # chunk = self.doRead("section[]", "Section", (Section,), {"stream": sub}) # else: # chunk = self.doRead("section[]", "Section", (FormatChunk, "string[0]")) # chunk.description = "ELF section (in file: %s..%s)" % (ofs, ofs+size) class ElfFile(Parser): PARSER_TAGS = { "id": "elf", "category": "program", "file_ext": ("so", ""), "min_size": ElfHeader.static_size, # At least one program header "mime": ( u"application/x-executable", u"application/x-object", u"application/x-sharedlib", u"application/x-executable-file", u"application/x-coredump"), "magic": (("\x7FELF", 0),), "description": "ELF Unix/BSD program/library" } endian = LITTLE_ENDIAN def validate(self): err = self["header"].isValid() if err: return err return True def createFields(self): # Choose the right endian depending on endian specified in header if self.stream.readBits(5*8, 8, BIG_ENDIAN) == ElfHeader.BIG_ENDIAN_ID: self.endian = BIG_ENDIAN else: self.endian = LITTLE_ENDIAN # Parse header and program headers yield ElfHeader(self, "header", "Header") for index in xrange(self["header/phnum"].value): yield ProgramHeader32(self, "prg_header[]") if False: raise ParserError("TODO: Parse sections...") #sections = self.array("prg_header") #size = self["header/shoff"].value - self.current_size//8 #chunk = self.doRead("data", "Data", (DeflateFilter, stream, size, Sections, sections)) #chunk.description = "Sections (use an evil hack to manage share same data on differents parts)" #assert self.current_size//8 == self["header/shoff"].value else: raw = self.seekByte(self["header/shoff"].value, "raw[]", relative=False) if raw: yield raw for index in xrange(self["header/shnum"].value): yield SectionHeader32(self, "section_header[]") def createDescription(self): return "ELF Unix/BSD program/library: %s" % ( self["header/class"].display)
7,104
Python
.py
166
34.939759
108
0.599393
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,712
java.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/java.pyc
—Ú Œ »Mc@s-dZddklZddklZlZlZlZlZl Z l Z l Z l Z l Z lZlZlZlZlZddklZddklZlZeddÑZh dd 6d d 6d d 6dd6dd6dd6dd6dd6dd6adÑZd6dÑZd6dÑZd6dÑZdefdÑÉYZ d efd!ÑÉYZ!d"e fd#ÑÉYZ"d$efd%ÑÉYZ#d&efd'ÑÉYZ$d(efd)ÑÉYZ%d*efd+ÑÉYZ&d,efd-ÑÉYZ'd.efd/ÑÉYZ(d0efd1ÑÉYZ)d2efd3ÑÉYZ*d4efd5ÑÉYZ+d6S(7sS Compiled Java classes parser. Author: Thomas de Grenier de Latour (TGL) <degrenier@easyconnect.fr> Creation: 2006/11/01 Last-update: 2006/11/06 Introduction: * This parser is for compiled Java classes, aka .class files. What is nice with this format is that it is well documented in the official Java VM specs. * Some fields, and most field sets, have dynamic sizes, and there is no offset to directly jump from an header to a given section, or anything like that. It means that accessing a field at the end of the file requires that you've already parsed almost the whole file. That's not very efficient, but it's okay given the usual size of .class files (usually a few KB). * Most fields are just indexes of some "constant pool" entries, which holds most constant datas of the class. And constant pool entries reference other constant pool entries, etc. Hence, a raw display of this fields only shows integers and is not really understandable. Because of that, this parser comes with two important custom field classes: - CPInfo are constant pool entries. They have a type ("Utf8", "Methodref", etc.), and some contents fields depending on this type. They also have a "__str__()" method, which returns a syntetic view of this contents. - CPIndex are constant pool indexes (UInt16). It is possible to specify what type of CPInfo they are allowed to points to. They also have a custom display method, usually printing something like "-> foo", where foo is the str() of their target CPInfo. References: * The Java Virtual Machine Specification, 2nd edition, chapter 4, in HTML: http://java.sun.com/docs/books/vmspec/2nd-edition/html/ClassFile.doc.html => That's the spec i've been implementing so far. I think it is format version 46.0 (JDK 1.2). * The Java Virtual Machine Specification, 2nd edition, chapter 4, in PDF: http://java.sun.com/docs/books/vmspec/2nd-edition/ClassFileFormat.pdf => don't trust the URL, this PDF version is more recent than the HTML one. It highligths some recent additions to the format (i don't know the exact version though), which are not yet implemented in this parser. * The Java Virtual Machine Specification, chapter 4: http://java.sun.com/docs/books/vmspec/html/ClassFile.doc.html => describes an older format, probably version 45.3 (JDK 1.1). TODO/FIXME: * Google for some existing free .class files parsers, to get more infos on the various formats differences, etc. * Write/compile some good tests cases. * Rework pretty-printing of CPIndex fields. This str() thing sinks. * Add support of formats other than 46.0 (45.3 seems to already be ok, but there are things to add for later formats). * Make parsing robust: currently, the parser will die on asserts as soon as something seems wrong. It should rather be tolerant, print errors/warnings, and try its best to continue. Check how error-handling is done in other parsers. * Gettextize the whole thing. * Check whether Float32/64 are really the same as Java floats/double. PEP-0754 says that handling of +/-infinity and NaN is very implementation-dependent. Also check how this values are displayed. * Make the parser edition-proof. For instance, editing a constant-pool string should update the length field of it's entry, etc. Sounds like a huge work. iˇˇˇˇ(tParser(t ParserErrortFieldSettStaticFieldSettEnumtRawBytestPascalString16tFloat32tFloat64tUInt8tUInt16tInt32tUInt32tInt64tBittNullBits(t BIG_ENDIAN(t textHandlert hexadecimalt cCsÅg}d}xe||joW||@o>||jo|i||Éqe|o|idÉqen|d>}qW|i|ÉS(së Parses an integer representing a set of flags. The known flags are stored with their bit-mask in a dictionnary. Returns a string. is???(tappendtjoin(tflagst flags_dicttshow_unknown_flagst separatort flags_listtmask((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyt parse_flagsHs   tbytetBtchartCtdoubletDtfloattFtinttItlongtJtshorttStbooleantZtvoidtVcCsÊd}x*|ddjo|d7}|d}q W|ddjoHy|idÉ}Wntd|ÉÇnX|d|!}||}n9yt|d}Wn#tj otd|ÉÇnX|idd É|d |dfS( s› Read head of a field/method descriptor. Returns a pair of strings, where the first one is a human-readable string representation of the first found type, and the second one is the tail of the parameter. it[itLt;sNot a valid descriptor string: s!Not a valid descriptor string: %st/t.s[](tfindRtcode_to_type_nametKeyErrortreplace(tdescrt array_dimtendttype((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyteat_descriptorfs"  cCsK|ptÇt|É\}}| ptÇ|o |d|S|SdS(sm Parse a field descriptor (single type), and returns it as human-readable string representation. RN(tAssertionErrorR<(R8tnameR;ttail((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pytparse_field_descriptor}s  cCs∆|o|ddjptÇ|d}g}x5|ddjo#t|É\}}|i|Éq2Wt|dÉ\}}| ptÇdi|É}|od|||fSd||fSdS( s~ Parse a method descriptor (params type and return type), and returns it as human-readable string representation. it(it)s, s %s %s(%s)s%s (%s)N(R=R<RR(R8R>t params_listtparamR;R?tparams((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pytparse_method_descriptoräs cCs>|ptÇ|ddjot||ÉSt||ÉSdS(sq Parse either a field or method descriptor, and returns it as human- readable string representation. iRAN(R=RFR@(R8R>((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pytparse_any_descriptorùst FieldArraycBs eZdZdÑZdÑZRS(s≤ Holds a fixed length array of fields which all have the same type. This type may be variable-length. Each field will be named "foo[x]" (with x starting at 0). cKs2ti|||É||_||_||_dS(sΩCreate a FieldArray of <length> fields of class <elements_class>, named "<name>[x]". The **elements_extra_args will be passed to the constructor of each field when yielded.N(Rt__init__tarray_elements_classt array_lengthtarray_elements_extra_args(tselftparentR>telements_classtlengthtelements_extra_args((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRI∞s  ccsEx>td|iÉD]*}|i|d|i|f|içVqWdS(Nis%s[%d](trangeRKRJR>RL(RMti((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyt createFields∫s(t__name__t __module__t__doc__RIRT(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRH™s t ConstantPoolcBs eZdZdÑZdÑZRS(s≤ ConstantPool is similar to a FieldArray of CPInfo fields, but: - numbering starts at 1 instead of zero - some indexes are skipped (after Long or Double entries) cCs ti|||É||_dS(N(RRItconstant_pool_length(RMRNR>RP((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRI≈sccsod}xb||ijoQd|i|f}t||ÉV|d7}||idjo|d7}q q WdS(Nis%s[%d]tLongtDouble(sLongsDouble(RYR>tCPInfot constant_type(RMRSR>((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRT»s (RURVRWRIRT(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRXøs tCPIndexcBs8eZdZdddÑedÑZdÑZdÑZRS(s/ Holds index of a constant pool entry. cCs|S(((tx((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyt<lambda>ÿscsgtià|||Ét|tÉo|fà_n |à_|à_|à_áfdÜà_dS(sñ Initialize a CPIndex. - target_type is the tuple of expected type for the target CPInfo (if None, then there will be no type check) - target_text_handler is a string transformation function used for pretty printing the target str() result - allow_zero states whether null index is allowed (sometimes, constant pool index is optionnal) csàiS((tvalue((RM(sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyR`ÈsN(R RIt isinstancetstrt target_typest allow_zerottarget_text_handlertgetOriginalDisplay(RMRNR>t descriptionRdRfRe((RMsE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRI◊s    cCsH|iÉ}|io | odS|ptÇd|it|ÉÉS(NtZEROs-> (t get_cp_entryReR=RfRc(RMtcp_entry((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyt createDisplayÎs  cCså|i|dijptÇ|io|i odS|d|i}t|tÉptÇ|io|i|ijptÇn|S(s2 Returns the target CPInfo field. s/constant_pool_counts /constant_pool/constant_pool[%d]N(RaR=RetNoneRbR\RdR](RMRk((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRjÚs N(RURVRWRmtFalseRIRlRj(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyR^”s  R\cBs eZdZdÑZdÑZRS(sy Holds a constant pool entry. Entries all have a type, and various contents fields depending on their type. ccsÉtt|dÉ|iiÉV|di|iijotd|diÉÇn|ii|di|_|idjot|dddÉVnÏ|idjot|dÉVn |idjot |dÉVn®|id jot |dÉVnÜ|id jot |dÉVnd|id jot |d d ddÉVn9|idjot |dddÉVn|idjo/t |dddd ÉVt |dddÉVn“|idjo/t |dddd ÉVt |dddÉVnì|idjo/t |dddd ÉVt |dddÉVnT|idjo,t |d ddÉVt |dddÉVntd|diÉÇdS(Nttags Java: unknown constant type (%s)tUtf8tbytestcharsetsUTF-8tIntegertFloatRZR[tClasst name_indexsClass or interface nameRdtStringt string_indextFieldreft class_indexsField class or interface nametname_and_type_indext NameAndTypet MethodrefsMethod class nametInterfaceMethodrefsMethod interface nametdescriptor_indexs(Not a valid constant pool element type: ( RR troottCONSTANT_TYPESRaRR]RR RR RR^(RM((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRTs@cCse|idjo |diS|idjo |diS|idjo't|diÉÉ}|id d ÉS|id jot|d iÉÉS|id jod|d|dfS|idjod|d|dfS|idjod|d|dfS|idjo1tt|diÉÉdt|diÉÉÉStd|diÉÇdS(s≤ Returns a human-readable string representation of the constant pool entry. It is used for pretty-printing of the CPIndex fields pointing to it. RpRqRsRtRZR[RuRvR2R3RwRxRys %s (from %s)R{RzR}R~R|RR>s(Not a valid constant pool element type: RoN(sIntegersFloatsLongsDouble(R]RatdisplayRcRjR7RGR(RMt class_name((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyt__str__(s*  (RURVRWRTRÑ(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyR\s #t FieldInfocBseZdÑZRS(ccst|ddÉVt|dÉVt|dÉVt|ddÉVt|dÉVt|dÉVt|dÉVt|d ÉVt|d ÉVt|d d d dÉVt|ddd ddtÉVt|ddÉV|didjot|dt|diÉVndS(Ns reserved[]it transienttvolatileitfinaltstatict protectedtprivatetpublicRvs Field nameRdRpRsField descriptorRftattributes_countsNumber of field attributesit attributes(RRR^R@R RaRHt AttributeInfo(RM((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRTPs  (RURVRT(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRÖOst MethodInfocBseZdÑZRS(ccs-t|ddÉVt|dÉVt|dÉVt|ddÉVt|dÉVt|ddÉVt|dÉVt|d ÉVt|d ÉVt|d ÉVt|d ÉVt|d ÉVt|ddddÉVt|dddddtÉVt|ddÉV|didjot|dt|diÉVndS(Ns reserved[]itstricttabstractitnativeit synchronizedRàRâRäRãRåRvs Method nameRdRpRsMethod descriptorRfRçsNumber of method attributesiRé(RRR^RFR RaRHRè(RM((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRTns( (RURVRT(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRêmsRècBseZdÑZdÑZRS(cGs,ti||å|didd|_dS(Ntattribute_lengthii(RRIRat_size(RMtargs((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRIèsc cs2t|ddddÉVt|ddÉVt|diÉÉ}|djoP|didjo$td |i|difÉÇnt|d dd/ÉVnî|djo‡t|dÉVt|dÉVt|dÉV|didjot|d|diÉVnt|dÉV|didjot |dt |diÉVnt|dÉV|didjot |dt |diÉVq.nß|djo]t|dÉVt |dt|diddÉV|did|didjpt Çn=|djoot|dÉV|didjot |d t |diÉVn|did|did!jpt Çn¡|d"jo|didjpt Çnï|d#jo3|didjpt Çt|d$ddÉVnU|d%joot|d&ÉV|d&idjot |d't|d&iÉVn|did|d&id(jpt ÇnŸ|d)joot|d*ÉV|d*idjot |d+t|d*iÉVn|did|d*id,jpt Çn]|d-jo|didjpt Çn1|didjot|d.|diÉVndS(0Ntattribute_name_indexsAttribute nameRdRpRïsLength of the attributet ConstantValueis&Java: Invalid attribute %s length (%s)tconstantvalue_indexRZRtR[RsRwtCodet max_stackt max_localst code_lengthitcodetexception_table_lengthtexception_tableRçRét Exceptionstnumber_of_exceptionstexception_index_tableRut InnerClassestnumber_of_classestclassesit Synthetict SourceFiletsourcefile_indextLineNumberTabletline_number_table_lengthtline_number_tableitLocalVariableTabletlocal_variable_table_lengthtlocal_variable_tablei t Deprecatedtinfo(sLongsFloatsDoublesIntegersString(R^R RcRjRaRtpathR RRHtExceptionTableEntryRèR=tInnerClassesEntrytLineNumberTableEntrytLocalVariableTableEntry(RMt attr_name((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRTìsz         $   $     $   $ (RURVRIRT(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRèés R¥cBseZdeiZdÑZRS(i0ccs]tt|dÉtÉVtt|dÉtÉVtt|dÉtÉVt|dddÉVdS(Ntstart_pctend_pct handler_pct catch_typeRdRu(RR RR^(RM((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRT&s(RURVR^t static_sizeRT(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyR¥#s RµcBsºeZedhdd6ed6fedhdd6ed6fedhdd6ed6feddfed fed fedd fed fed fedfedfedfedff ZRS(tinner_class_info_indexRuRdRetouter_class_info_indextinner_name_indexRps reserved[]iRít interfaceitsuperRàRâRäRãRå(RURVR^tTrueRRtformat(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRµ,s          R∂cBs eZedfedffZRS(Rπt line_number(RURVR Rƒ(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyR∂Bs R∑cBsVeZedfedfedhdd6fedhdd6ed6fedffZRS(RπRPRvRpRdRRftindex(RURVR R^R@Rƒ(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyR∑Hs    tJavaCompiledClassFilecBsÂeZdZeZhdd6dd6d2d6d3d6d d4d 6d d6ZdZhdd6dd6dd6dd6dd6Zh dd6dd 6dd6dd 6d!d"6d#d$6d%d&6d'd(6d)d*6d+d,6d-d.6Zd/ÑZ d0ÑZ d1ÑZ RS(5s$ Root of the .class parser. t java_classtidtprogramtcategorytclasstfile_extuapplication/java-vmtmimei iitmin_sizesCompiled Java classRhlæ:˝sJDK 1.1s45.3sJDK 1.2s46.0sJDK 1.3s47.0sJDK 1.4s48.0sJDK 1.5s49.0RpiRsRtiRZiR[iRuiRwiRyi R}i R~i R|i cCsW|di|ijodSd|di|dif}||ijo d|StS(NtmagicsWrong magic signature!s%d.%dt major_versiont minor_versionsUnknown version (%s)(RatMAGICtKNOWN_VERSIONSR√(RMtversion((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pytvalidateês  cCsJd|di|dif}||ijod|i|Sd|SdS(Ns%d.%dR—R“sCompiled Java class, %ssCompiled Java class, version %s(RaR‘(RMR’((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pytcreateDescriptionòsccsWtt|ddÉtÉVt|ddÉVt|ddÉVt|ddÉV|did jot|d |diÉVnt|d d ÉVt|d ÉVt|dÉVt|d dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|ddddÉVt|ddddÉVt|ddÉV|didjo%t |dt|diddÉVnt|d d!ÉV|d idjot |d"t |d iÉVnt|d#d$ÉV|d#idjot |d%t |d#iÉVnt|d&d'ÉV|d&idjot |d(t |d&iÉVndS()NR–sJava compiled class signatureR“sClass format minor versionR—sClass format major versiontconstant_pool_countsSize of the constant poolit constant_pools reserved[]iRíR¡iR¬RàRâRäRãRåt this_classs Class nameRdRut super_classsSuper class nametinterfaces_counts Number of implemented interfacesit interfacest fields_countsNumber of fieldstfieldst methods_countsNumber of methodstmethodsRçsNumber of attributesRé( RR RR RaRXRRR^RHRÖRêRè(RM((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyRTüsH     (sclass(uapplication/java-vmi0( RURVRWRtendiant PARSER_TAGSR”R‘RÅR÷R◊RT(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyR«fs>      N(,RWthachoir_parserRthachoir_core.fieldRRRRRRRRR R R R R RRthachoir_core.endianRthachoir_core.text_handlerRRR√RR5R<RmR@RFRGRHRXR^R\RÖRêRèR¥RµR∂R∑R«(((sE/pentest/enumeration/google/metagoofil/hachoir_parser/program/java.pyt<module><s>d    -O!ï 
24,322
Python
.py
176
134.863636
1,003
0.503334
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,713
exe.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/exe.pyc
—Ú Œ »Mc @sÎdZddklZddklZddklZlZlZl Z l Z l Z l Z ddk lZlZddklZddklZlZlZddklZlZd Zd efd ÑÉYZd eefd ÑÉYZdS(s  Microsoft Windows Portable Executable (PE) file parser. Informations: - Microsoft Portable Executable and Common Object File Format Specification: http://www.microsoft.com/whdc/system/platform/firmware/PECOFF.mspx Author: Victor Stinner Creation date: 2006-08-13 iˇˇˇˇ(t HachoirParser(t LITTLE_ENDIAN(tFieldSettRootSeekableFieldSettUInt16tUInt32tStringtRawBytest PaddingBytes(t textHandlert hexadecimal(t NE_Header(t PE_Headert PE_OptHeadert SectionHeader(t PE_ResourcetNE_VersionInfoNodei2t MSDosHeadercBs eZdZdÑZdÑZRS(i@iccsFt|dddddÉVt|ddÉVt|dd ÉVt|d d ÉVt|d d ÉVt|ddÉVt|ddÉVtt|ddÉtÉVt|ddÉVtt|ddÉtÉVt|ddÉVt|ddÉVt|dddÉVt|dd ÉVt|d!d"ÉVt|dd#dÉVt|d$d%ÉVdS(&NtheaderisFile header (MZ)tcharsettASCIIt size_mod_512sFile size in bytes modulo 512t size_div_512s File size in bytes divide by 512t reloc_entriessNumber of relocation entriest code_offsets.Offset to the code in the file (divided by 16)t needed_memorys$Memory needed to run (divided by 16)t max_memorys,Maximum memory needed to run (divided by 16)t init_ss_sps Initial value of SP:SS registerstchecksumtChecksumt init_cs_ips Initial value of CS:IP registerst reloc_offsets"Offset in file to relocation tabletoverlay_numbersOverlay numbers reserved[]itReservedtoem_idsOEM idtoem_infosOEM infoit next_offsets Offset to next header (PE or NE)(RRR RR R(tself((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyt createFieldss"cCsód|dijodS|didjodS|didj}|oG|did jod Sd |d ijo d jnpdSndS(NiRs"Invalid field 'size_mod_512' valueRisInvalid code offsetRRisInvalid value of checksumiPR$isInvalid value of next_offsett(tvalue(R%tlooks_pe((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pytisValid.s% i(t__name__t __module__t static_sizeR&R*(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyRs tExeFilecBsüeZhdd6dd6dd6dd6dd 6dd6dd6ZeZdÑZdÑZdÑZdÑZdÑZ dÑZ dÑZ dÑZ dÑZ dÑZRS( texetidtprogramtcategorytdlltocxtfile_extuapplication/x-dosexectmimei@itmin_sizesMZ.[].{4}[^]it magic_regexs%Microsoft Windows Portable Executablet descriptioncKs<ti|dd|d|i|ÉÉti|||çdS(Ntroot(Rt__init__tNonetaskSizeR(R%tstreamtargs((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyR;Hs%cCsz|iiddÉdjodS|diÉ}|o d|S|iÉo(t|dijod|diSntS( NiitMZs Wrong headertmsdossInvalid MSDOS header: spe_header/nb_sectionsInvalid number of section (%s)(R>t readBytesR*tisPEtMAX_NB_SECTIONR(tTrue(R%terr((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pytvalidateLs  ccs—t|ddÉV|iÉp |iÉo$|di}|i|dtÉn|iÉo xn|iÉD] }|VqiWnR|iÉo xA|iÉD] }|VqñWn%|did}|i|dtÉdS(NRAsMS-DOS program headersmsdos/next_offsettrelativesmsdos/code_offseti(RRCtisNER(tseekBytetFalsetparsePortableExecutabletparseNE_Executable(R%toffsettfield((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyR&Xs       ccsZt|dÉV|i}|iid|É}|o#|i|dÉt|dÉVndS(Nt ne_headertVS_VERSION_INFOi tinfo(R t current_sizeR>t searchBytestseekBitR(R%tstarttaddr((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyRMis  c csjt|dÉV|di}d}|o:t|dd|dÉVd|jo|di}qbng}xNt|diÉD]9}t|dÉ}|V|d io|i|Éq|q|W|id d ÑÉxó|D]è}|i|d iÉ|d i}|oa|i É}|dj o3|d i|jot |||d|dÉVqbt |||ÉVq”q”WdS(Nt pe_headerspe_header/opt_hdr_sizet pe_opt_headertsizeispe_opt_header/resource/rvaspe_header/nb_sections section_hdr[]t phys_sizetkeycSs |diS(tphys_off(R((RO((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyt<lambda>âsR]trva( R R(R<R txrangeRtappendtsortRJtcreateSectionNameRR(R%RZtrsrc_rvatsectionstindextsectiontname((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyRLts0    !cCsÇt|dÉpkt|_|did}d|jo@|ti|ijo)|ii|dÉdjo t |_q{n|iS(Nt_is_pesmsdos/next_offsetiiitPEi( thasattrRKRiR(R R-RZR>RBRE(R%RN((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyRCîs  cCsÇt|dÉpkt|_|did}d|jo@|ti|ijo)|ii|dÉdjo t |_q{n|iS(Nt_is_nesmsdos/next_offsetii@itNEi( RkRKRlR(R R-RZR>RBRE(R%RN((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyRIûs  cCsP|iÉpdSd|jo|dipdSnd|jo |dSdS(Nspe_opt_header/resource/sizet section_rsrc(RCR<R((R%((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyt getResource®s     cCsª|iÉoî|dio d}nd}|dig}d|jo"|d}|i|diÉn|dio|idÉnd |d i|ÉfS|iÉod Sd SdS( Nspe_header/is_dlluMicrosoft Windows DLLu%Microsoft Windows Portable Executables pe_header/cpuRYt subsystemspe_header/is_strippedustrippedu%s: %ss, u6New-style Executable (NE) for Microsoft MS Windows 3.xuMS-DOS executable(RCR(tdisplayRatjoinRI(R%ttextRRthdr((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pytcreateDescriptionµs     cCsÔ|iÉoñd}xyt|diÉD]d}|d|}|di}|pq'n||did}|ot||É}q'|}q'W|o|SdSnE|iÉodS|di|did d }|djodS|dS( Nispe_header/nb_sectionssection_hdr[%u]R[R]ismsdos/size_mod_512smsdos/size_div_512ii(RCR`R(tmaxR<RI(R%RZRfRgt section_size((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pytcreateContentSize«s*      (R/R3R4(uapplication/x-dosexeci(sMZ.[].{4}[^]i((sMZ.[].{4}[^]i(R+R,t PARSER_TAGSRtendianR;RGR&RMRLRCRIRoRuRx(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyR.;s&    N(t__doc__thachoir_parserRthachoir_core.endianRthachoir_core.fieldRRRRRRRthachoir_core.text_handlerR R thachoir_parser.program.exe_neR thachoir_parser.program.exe_peR R Rthachoir_parser.program.exe_resRRRDRR.(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe.pyt<module> s4#
9,430
Python
.py
38
246.789474
2,012
0.470977
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,714
python.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/python.pyc
—Ú Œ »Mc @sïdZeZddklZddklZlZlZl Z l Z l Z l Z l Z lZlZlZlZlZddklZddklZddklZlZddklZeoddklZd ÑZnd ÑZd ÑZd ÑZ d ÑZ!dÑZ"dÑZ#dÑZ$dÑZ%dÑZ&dÑZ'dÑZ(dÑZ)dÑZ*dÑZ+dÑZ,defdÑÉYZ-defdÑÉYZ.dS(s≠ Python compiled source code parser. Informations: - Python 2.4.2 source code: files Python/marshal.c and Python/import.c Author: Victor Stinner Creation: 25 march 2005 iˇˇˇˇ(tParser( tFieldSettUInt16tInt32tUInt32tInt64t ParserErrortFloat64tEnumt CharactertBytestRawBytest PascalString8tTimestampUnix32(t LITTLE_ENDIAN(tlong2raw(t textHandlert hexadecimal(tngettext(tdiscCs|i}t|ÉdS(N(tvalueR(tfieldtbytecode((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pytdisassembleBytecodes ccspt|ddÉV|di}d|jot|d|dÉVnto"|idjot|dÉndS(NtlengthtLengthittexttContentt compiled_code(RRR t DISASSEMBLEtnameR(tparentR((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt parseStrings   ccstt|dÉtÉVdS(Ntref(RRR(R((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pytparseStringRef&scCsd|diS(NsString ref: %sR!(tdisplay(R((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pytcreateStringRefDesc(sccst|dÉVdS(NR(R(R((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt parseInt32,sccst|dÉVdS(NR(R(R((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt parseInt64/sccsDt|dÉVx/tt|diÉÉD]}t|dÉVq(WdS(Nt digit_countsdigit[](RtxrangetabsRR(Rtindex((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt parseLong2sccst|dÉVdS(NR(R (R((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt parseFloat9sccst|dÉVdS(NR(R(R((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pytparseBinaryFloat;sccs t|dÉVt|dÉVdS(Ntrealtcomplex(R (R((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt parseComplex=sccs t|dÉVt|dÉVdS(NR.R/(R(R((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pytparseBinaryComplex@sccsdt|ddÉV|di}|djotdÉÇnx"t|ÉD]}t|dÉVqHWdS(Ntcounts Item countisInvalid tuple/list countsitem[](RRRR(tObject(RR2R*((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt parseTupleFs   cCs8|di}tdd|É|}d|id|fS(NR2s%s items%s itemss%s: %si(RRt code_info(RR2titems((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pytcreateTupleDescNs ccsfd|_xVtoNt|dÉ}|V|didjoPnt|dÉV|id7_q WdS(sw Format is: (key1, value1, key2, value2, ..., keyn, valuen, NULL) where each keyi and valuei is an object. iskey[]Rt0svalue[]iN(R2tTrueR3R(Rtkey((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt parseDictUs cCsdtdd|iÉ|iS(NsDict: %ss%s keys%s keys(RR2(R((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pytcreateDictDesccsccs‡d|iiÉjoVt|ddÉVt|ddÉVt|ddÉVt|dd ÉVt|d ÉVnùd |iiÉjoEt|ddÉVt|ddÉVt|dd ÉVt|d ÉVnBt|ddÉVt|ddÉVt|dd ÉVt|d ÉVt|d ÉVt|d ÉVt|dÉVt|dÉVd|iiÉjo t|dÉVt|dÉVnt|dÉVt|dÉVd |iiÉjot|ddÉVnt|ddÉVt|dÉVdS(Nit arg_countsArgument counttkwonlyargcountsKeyword only argument countt nb_localssNumber of local variablest stack_sizes Stack sizetflagsiRtconststnamestvarnamesitfreevarstcellvarstfilenameRt firstlinenosFirst line numbertlnotab(troott getVersionRRR3(R((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt parseCodegs8R3cBs¡eZhdPd6dQd6dRd6dSd 6dTd6dUd6dVd6deddOfd6deddOfd6deddOfd6deddOfd 6d!ed"dOfd#6d$ed%dOfd&6d'e d(dOfd)6d*e d+dOfd,6d-e d.dOfd/6d0e d1dOfd26d3e d4e fd56d6e d7efd86d9e d:efd;6d<e d=efd>6d?e d@efdA6dBedCefdD6dEedFdOfdG6ZdHÑZdIÑZdJÑZdKÑZdLÑZdMÑZdNÑZRS(WtnulltNULLR8tnonetNonetNtfalsetFalsetFttrueR9tTt stop_itertStopItertStellipsistELLIPSISt.tunknowntUnknownt?tint32Rtitint64RtItfloattFloattft bin_floats Binary floattgR/tComplextxt bin_complexsBinary complextytlongtLongtltstringtStringtstinternedtInternedtttunicodetUnicodetut string_refs String reftRttupletTuplet(tlisttListt[tsettSett<t frozensets Frozen sett>tdicttDictt{tcodetCodetcc s6tià|||çàdi}|àijotd|ÉÇnài|à_|pàidà_n|djoàià_n§|djoáfd Üà_nÅ|d jod Ñà_nd|d jod Ñà_nG|djoài à_n*|djoài à_ài à_ ndS(NRsUnknown bytecode: "%s"iRoRaRcRfRhcs àdiS(R(R((tself(sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt<lambda>∞sRVcSstS((R9(((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyRç≤sRTcSstS((RS(((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyRç¥sRjRlRrRuRx(RaRcRfRh(RjRl(RrRuRx( Rt__init__Rt bytecode_infoRR5t_nametcreateValueLongt createValuetcreateValueComplextcreateValueStringtcreateDisplayStringt createDisplay(RåRRtkwRâ((RåsG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyRé•s(        cCs!d|jo |diSdSdS(NRt(R(Rå((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyRîªs  cCs!d|jo |diSdSdS(NRs(empty)(R#(Rå((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyRï¡s  cCsÇ|didj}t|diÉ}d}x=t|dddÉD]%}|dK}||d|i7}qCW|o | }n|S(NR'iiiˇˇˇˇis digit[%u](RR)R((Råt is_negativeR2ttotalR*((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyRë«s  cCs'tt|diÉt|diÉÉS(NR.R/(R/RdR(Rå((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyRì“sccsIt|ddÉV|id}|o x||ÉD] }|Vq2WndS(NRtBytecodei(R R5(RåtparserR((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt createFields◊s   cCs.|id}|o ||ÉS|idSdS(Nii(R5(Råtcreate((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pytcreateDescriptionfis  N(snullNsNULLN(snoneNsNoneN(sfalseNsFalseN(strueNsTrueN(RWNRXN(RZNR[N(sunknownNsUnknownN(t__name__t __module__RPR%R&R,R-R0R1R+R R"R$R4R7R;R<RLRèRéRîRïRëRìRùRü(((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyR3ás@     tPythonCompiledFilecBsTeZhdd6dd6dVd6dWd 6d d 6ZeZh dXd6dYd6dZd6d[d6d\d6d]d6d^d6d_d 6d`d"6dad%6dbd'6dcd)6ddd+6ded-6dfd.6dgd06dhd26did56djd76dkd96dld;6dmd=6dnd?6dodA6dpdC6dqdE6drdG6dsdI6dtdK6dudM6dvdO6dwdQ6ZedRÑeiÉDÉÉZdSÑZ dTÑZ dUÑZ RS(xtpythontidtprogramtcategorytpyctpyotfile_exti itmin_sizes(Compiled Python script (.pyc/.pyo files)t descriptions1.5iiôNs2.0iiá∆s2.1ii*Îs2.2ii-Ìs2.3a0ii;ÚiEÚs2.4a0iiYÚs2.4a3icÚs2.4b1imÚs2.5a0iiwÚs2.5a0 (ast-branch)iÅÚs 2.5a0 (with)iãÚs2.5a0 (WITH_CLEANUP opcode)iåÚs2.5b3iïÚiüÚs2.5c1i©Ús2.5c2i≥Ús 3.0 (3000)ii∏ s 3.0 (3010)i¬ s 3.0 (3020)ià s 3.0 (3030)i÷ s 3.0 (3040)i‡ s 3.0 (3050)iÍ s 3.0 (3060)iÙ s 3.0 (3070)i˛ s 3.0 (3080)i s 3.0 (3090)i s 3.0 (3100)i s 3.0 (3102)i s3.0a4i& s3.0a5i: s 3.0a5 unicodei; ccsNxG|]@\}}t|tdÉd>BtdÉd>BtÉ|dfVqWdS(s is iiN(RtordR(t.0tmagicR((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pys <genexpr>s cCsz|iidd|iÉ}||ijo d|S|iid dÉdjodS|iid dÉd jod StS( NiisUnknown version (%s)iis sWrong signature (\r\n)iRãs!First object bytecode is not codeii@(tstreamtreadBitstendiantMAGICt readBytesR9(Råt signature((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pytvalidate!s cCsJt|dÉp3|iidd|iÉ}|i|d|_n|iS(Ntversioniii(thasattrRØR∞R±R≤R∂(RåR¥((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyRK+sccsCtt|dddÉ|iÉVt|ddÉVt|dÉVdS(NR¥is!Python file signature and versiont timestampt Timestamptcontent(RR t STR_MAGICR R3(Rå((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyRù1s (RßR®iH(s1.5i(s2.0i(s2.1i(s2.2i(s2.3a0i(s2.3a0i(s2.4a0i(s2.4a3i(s2.4b1i(s2.5a0i(s2.5a0 (ast-branch)i(s 2.5a0 (with)i(s2.5a0 (WITH_CLEANUP opcode)i(s2.5b3i(s2.5b3i(s2.5c1i(s2.5c2i(s 3.0 (3000)i(s 3.0 (3010)i(s 3.0 (3020)i(s 3.0 (3030)i(s 3.0 (3040)i(s 3.0 (3050)i(s 3.0 (3060)i(s 3.0 (3070)i(s 3.0 (3080)i(s 3.0 (3090)i(s 3.0 (3100)i(s 3.0 (3102)i(s3.0a4i(s3.0a5i(s 3.0a5 unicodei( R†R°t PARSER_TAGSRR±R≤RÜt iteritemsRªRµRKRù(((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyR¢Âs\   N(/t__doc__RSRthachoir_parserRthachoir_core.fieldRRRRRRRRR R R R R thachoir_core.endianRthachoir_core.bitsRthachoir_core.text_handlerRRthachoir_core.i18nRRRR R"R$R%R&R+R,R-R0R1R4R7R;R<RLR3R¢(((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/python.pyt<module> s6X                ^
14,498
Python
.py
72
200.097222
1,799
0.455875
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,715
exe_res.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/exe_res.pyc
—Ú Œ »Mc @sédZddklZlZlZlZlZlZlZl Z l Z l Z l Z l Z lZlZlZddklZlZlZddklZlZlZlZddklZddklZdZdZeZ d efd ÑÉYZ!hd d 6d d6dd6dd6Z"dZ#hdd6dd 6dd6dd6dd6Z$dZ%dZ&hdd 6dd6dd6dd6dd6dd6Z'h d d 6d!d6d"d6d#d6d$d6d%d&6d'd6d(d)6d*d+6d,d-6Z(hd.d 6d/d6d0d6Z)d1efd2ÑÉYZ*d3efd4ÑÉYZ+d5ÑZ,d6ÑZ-d7efd8ÑÉYZ.d9ÑZ/hdld 6dmd6d>d?e-fd6dnd6dod6dDdEe/fd&6dpd6dqd)6drd+6dsd-6dtdO6dudR6dvdU6dVdWe,fdX6Z1dYefdZÑÉYZ2d[efd\ÑÉYZ3d]efd^ÑÉYZ4d_efd`ÑÉYZ5daefdbÑÉYZ6dcefddÑÉYZ7deefdfÑÉYZ8dgefdhÑÉYZ9diefdjÑÉYZ:dkS(ws… Parser for resource of Microsoft Windows Portable Executable (PE). Documentation: - Wine project VS_FIXEDFILEINFO structure, file include/winver.h Author: Victor Stinner Creation date: 2007-01-19 iˇˇˇˇ(tFieldSett ParserErrortEnumtBittBitstSeekableFieldSettUInt16tUInt32tTimestampUnix32tRawBytest PaddingBytest NullBytestNullBitstCStringtString(t textHandlertfilesizeHandlert hexadecimal(t createDictt paddingSizet alignValuet makePrintable(tHACHOIR_ERRORS(tBitmapInfoHeaderii,tVersioncBs eZdZdÑZdÑZRS(i ccs8tt|ddÉtÉVtt|ddÉtÉVdS(NtminorsMinor version numbertmajorsMajor version number(RRR(tself((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyt createFieldsscCs |dit|diÉdS(NRRi'(tvaluetfloat(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyt createValues(t__name__t __module__t static_sizeRR(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRs tDOSis OS/2 16-bitis OS/2 32-bitis Windows NTiitBasesWindows 16-bitsPresentation Manager 16-bitsPresentation Manager 32-bitsWindows 32-bitt ApplicationtDLLtDrivertFonttVXDsStatic libraryitPrintertKeyboardtLanguagetDisplaytMousetNetworkitSystemt InstallableitSoundi tCommunicationsi tRastertVectortTrueTypetVersionInfoBinarycBseZdÑZRS(ccsÚtt|ddÉtÉV|didjotdÉÇnt|ddÉVt|ddÉVt|d d ÉVt|d d ÉVt|d dÉVtt|dÉtÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|dÉVt|ddÉVttt |dÉtÉt ÉVttt |dÉtÉt ÉVttt|dÉtÉt ÉVtt|dÉtÉ}|it jot|tÉ}n$|itjot|tÉ}n|Vt|dÉVt|dÉVdS(Ntmagics#File information magic (0xFEEF04BD)lΩfi}s%EXE resource: invalid file info magict struct_versStructure version (1.0)t file_ver_mssFile version MSt file_ver_lssFile version LStproduct_ver_mssProduct version MStproduct_ver_lssProduct version LStfile_flags_masktdebugt prereleasetpatchedt private_buildt info_inferredt special_buildtreservedit file_os_majort file_os_minort file_typet file_subfiletdate_mstdate_ls(RRRRRRRR RRt MAJOR_OS_NAMEt MINOR_OS_NAMEt FILETYPE_NAMEtFILETYPE_DRIVERtDRIVER_SUBTYPE_NAMEt FILETYPE_FONTtFONT_SUBTYPE_NAMER(Rtfield((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRQs6   (R R!R(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyR7PstVersionInfoNodecBs@eZdZhdd6dd6ZedÑZdÑZdÑZRS(itbinaryitstringcCs=ti|||Ét|didÉd|_||_dS(Ntsizeii(Rt__init__RRt_sizetis_32bit(RtparenttnameRZ((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRXwsccs¥t|ddÉVt|dÉVtt|dÉ|iÉVt|dddÉVt|idd É}|ot|d |ÉVn|di}|o∂|di|ijo9|i o|d 9}nt |d |ddd dÉVqI|didjo=t |d d|dÉV|didjo t |_ qEqIt |d |ÉVnx4d|i|idjot|d|i ÉVqLW|i|id}|ot|d |ÉVndS(NRWsNode size (in bytes)t data_sizettypeR\tcharsets UTF-16-LEiis padding[]iRttruncatettVS_VERSION_INFOsvalue/file_flags_maskii snode[](RRt TYPE_NAMER Rt current_sizeR Rt TYPE_STRINGRZRR7tFalseR RWRT(RRW((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyR|s0  !cCsRd|di}|di|ijo&d|jo|d|di7}n|S(NsVersion info node: %sR\R^Rs=%s(RRe(Rttext((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pytcreateDescriptionòs$(R R!ReRctTrueRXRRh(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRTps   ccst|dÉVdS(Nsnode[](RT(R[((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pytparseVersionInfoûsccsBt|dÉV|i|id}|ot|d|ÉVndS(Nt bmp_headeritraw(RRWRdR (R[RW((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyt parseIcon°st WindowsStringcBs#eZdÑZdÑZdÑZRS(ccsHt|ddÉV|did}|ot|d|ddÉVndS(NtlengthsNumber of 16-bit charactersiRgR_s UTF-16-LE(RRR(RRW((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyR®scCs!d|jo |diSdSdS(NRgu(R(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRÆs  cCst|iddtddÉS(NsUTF-8t to_unicodetquotet"(RRRi(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyt createDisplay¥s(R R!RRRs(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRnßs  ccs$x|ipt|dÉVqWdS(Nsstring[](teofRn(R[((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pytparseStringTable∑s scursor[]tCursorsbitmap[]tBitmapsicon[]tIconsmenu[]tMenusdialog[]tDialogsstring_table[]s String tables font_dir[]sFont directorysfont[]saccelerators[]t Acceleratorss raw_res[]sUnformatted resource datasmessage_table[]s Message tablei sgroup_cursor[]s Group cursori s group_icon[]s Group iconit version_infosVersion informationitEntrycBs,eZdZddÑZdÑZdÑZRS(iicCs ti|||É||_dS(N(RRXtinode(RR[R\R~((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRXœsccsNtt|dÉtÉVtt|dÉÉVt|dÉVt|ddÉVdS(NtrvaRWtcodepageREi(RRRRR (R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyR”scCs)d|idi|di|difS(NsEntry #%u: offset=%s size=%stoffsetRRW(R~Rtdisplay(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRhŸsiÄN(R R!R"tNoneRXRRh(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyR}Ãs  t NameOffsetcBseZdÑZRS(ccs1t|dÉVt|ddÉVt|dÉVdS(NR\RÅitis_name(RRR(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRfis(R R!R(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRÑ›st IndexOffsetcBs5eZeedÉZddÑZdÑZdÑZRS(icCs ti|||É||_dS(N(RRXtres_type(RR[R\Rá((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRXÊsccs=tt|dÉ|iÉVt|ddÉVt|dÉVdS(NR^RÅit is_subdir(RRt TYPE_DESCRR(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRÍscCsK|diod|di|difSd|di|difSdS(NRàsSub-directory: %s at %sR^RÅsIndex: ID %s at %s(RRÇ(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRhÔsN( R R!Rt RESOURCE_TYPERâRÉRXRRh(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRÜ„s  tResourceContentcBs8eZddÑZdÑZdÑZdÑZdÑZRS(cCspti|||d|didÉ||_|iÉ}|tjot|\|_}|_n d|_dS(NRWi( RRXRtentryt getResTypeRät_namet_parserRÉ(RR[R\RåRWRát description((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRXˆs $   cCs|iidiS(NRÅ(RåR~R(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pytgetResIDˇscCs |iiiS(N(RåR~Rá(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRçsccsI|io#x8|i|ÉD] }|VqWnt|d|idÉVdS(Ntcontenti(RèR RW(RRS((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRs   cCsd|iÉ|iÉfS(NsResource #%u content: type=%s(RëRç(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRh sN(R R!RÉRXRëRçRRh(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRãıs    tHeadercBs eZdZdÑZdÑZRS(iiccsgt|ddÉVt|dÉVt|ddÉVt|ddÉVt|dd ÉVt|d d ÉVdS( Ntoptionsit creation_datetmaj_vers Major versiontmin_vers Minor versiontnb_namesNumber of named entriestnb_indexsNumber of indexed entries(R RR(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRs cCs≠d}g}|dio|id|diÉn|dio|id|diÉn|dio|i|diÉn|od|di|ÉfS|SdS( NsResource headerRòs%u nameRôs%u indexRïs%s: %ss, (RtappendRÇtjoin(RRgtinfo((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRhsiÄ(R R!R"RRh(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRìs tNamecBseZdÑZRS(ccsJt|dÉVt|didÉ}|ot|d|ddÉVndS(NRoiˇR\R_sUTF-16LE(RtminRR(RRW((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyR)s(R R!R(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRù(st DirectorycBs&eZddÑZdÑZdÑZRS(cCsLti|||É|di|di}ti|d|_||_dS(Nsheader/nb_namesheader/nb_indexi@(RRXRRìR"RYRá(RR[R\Rát nb_entries((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRX0sccsÿt|dÉVt|dijotd|diÉÇnt|dijotd|diÉÇn|d}x)t|diÉD]}t|dÉVqäWx/t|diÉD]}t|d |iÉVq∂WdS( Ntheadersheader/nb_names)EXE resource: invalid number of name (%s)sheader/nb_indexs*EXE resource: invalid number of index (%s)Ròsname[]Rôsindex[]( RìtMAX_NAME_PER_HEADERRRtMAX_INDEX_PER_HEADERtxrangeRÑRÜRá(Rthdrtindex((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyR6s cCs |diS(NR°(Rê(R((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRhFsN(R R!RÉRXRRh(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRü/s  t PE_ResourcecBs#eZdÑZdÑZdÑZRS(cCs&ti|||d|É||_dS(NRW(RRXtsection(RR[R\R®RW((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRXJsccs†g}x6|idÉD]%}|dio|i|ÉqqWxZ|D]R}|i|diÉ|djo|di}n |i}t|||ÉVqFWdS(NR¶RàRÅiR^(tarrayRRötseekByteRáRü(Rt directoryR\tdepthtindexesR¶Rá((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pytparseSubNs  c cs¿d}t|dÉ}|V|g}|g}x¯|o|d7}t|jo|id|ÉPng}x£t|ÉD]ï\}}d||f}yJxC|i|||ÉD],}|itjo|i|Én|Vq´WWqvtj o } |id|| fÉqvXqvW|}|i|Éq/Wg} xG|D]?} x6| i dÉD]%}|di p| i|ÉqJqJWq4Wg} xc| D][} | d i }|djoqÑn|i |Ét |d d | É}|V| i|ÉqÑW| id d ÑÉxì| D]ã}yS|ii|di É}|i |dtÉ}|o |Vnt|d|ÉVWq˝tj o#} |id|i| fÉq˝Xq˝W|i|id}|ot|d|ÉVndS(Nitrootis;EXE resource: depth too high (%s), stop parsing directoriessdirectory[%u][%u][]s!Unable to create directory %s: %sR¶RàRÅsentry[]R~tkeycSs |diS(R(R(Rå((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyt<lambda>ásRtrelatives content[]sError when parsing entry %s: %sit padding_end(Rüt MAX_DEPTHterrort enumerateRÆt __class__RöRtextendR©RRÉR™R}tsortR®trva2fileRfRãtwarningtpathRWRdR (RR¨tsubdirtsubdirstalldirst newsubdirsR¶R\RSterrt resourcesR´tentriestresourceRÅRåtpaddingRW((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyR]sp           #(R R!RXRÆR(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRßIs  tNE_VersionInfoNodecBs=eZdZhdd6dd6ZdÑZdÑZdÑZRS(iRUiRVcCs4ti|||Ét|didÉd|_dS(NRWii(RRXRRRY(RR[R\((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRXüsccs)t|ddÉVt|dÉVt|dddÉVt|iddÉ}|ot|d |ÉVn|di}|oK|did jot|d d|dÉVqƒt|d |ddÉVnx.d |i|idjot |d ÉVq«W|i|id}|ot|d |ÉVndS(NRWsNode size (in bytes)R]R\R_s ISO-8859-1iis padding[]RbRi snode[]( RR RRdR RR7RRWR∆(RRW((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyR£s" cCsd|di}|S(NsVersion info node: %sR\(R(RRg((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyRh∏s(R R!ReRcRXRRh(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyR∆òs   N(scursor[]RvN(sbitmap[]sBitmapN(smenu[]RyN(sdialog[]RzN(s font_dir[]sFont directoryN(sfont[]R(N(saccelerators[]s AcceleratorsN(s raw_res[]sUnformatted resource dataN(smessage_table[]s Message tableN(sgroup_cursor[]s Group cursorN(s group_icon[]s Group iconN(;t__doc__thachoir_core.fieldRRRRRRRRRR R R R R Rthachoir_core.text_handlerRRRthachoir_core.toolsRRRRthachoir_core.errorRthachoir_parser.common.win32RR¥R£R¢RRLt MINOR_OS_BASERMRORQRNRPRRR7RTRjRmRnRuRÉRäR}RÑRÜRãRìRùRüRßR∆(((sH/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_res.pyt<module> sñd"      .   O
20,544
Python
.py
68
301.058824
1,719
0.448694
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,716
exe_ne.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/exe_ne.py
from hachoir_core.field import (FieldSet, Bit, UInt8, UInt16, UInt32, Bytes, PaddingBits, PaddingBytes, NullBits, NullBytes) from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler class NE_Header(FieldSet): static_size = 64*8 def createFields(self): yield Bytes(self, "signature", 2, "New executable signature (NE)") yield UInt8(self, "link_ver", "Linker version number") yield UInt8(self, "link_rev", "Linker revision number") yield UInt16(self, "entry_table_ofst", "Offset to the entry table") yield UInt16(self, "entry_table_size", "Length (in bytes) of the entry table") yield PaddingBytes(self, "reserved[]", 4) yield Bit(self, "is_dll", "Is a dynamic-link library (DLL)?") yield Bit(self, "is_win_app", "Is a Windows application?") yield PaddingBits(self, "reserved[]", 9) yield Bit(self, "first_seg_code", "First segment contains code that loads the application?") yield NullBits(self, "reserved[]", 1) yield Bit(self, "link_error", "Load even if linker detects errors?") yield NullBits(self, "reserved[]", 1) yield Bit(self, "is_lib", "Is a library module?") yield UInt16(self, "auto_data_seg", "Automatic data segment number") yield filesizeHandler(UInt16(self, "local_heap_size", "Initial size (in bytes) of the local heap")) yield filesizeHandler(UInt16(self, "stack_size", "Initial size (in bytes) of the stack")) yield textHandler(UInt32(self, "cs_ip", "Value of CS:IP"), hexadecimal) yield textHandler(UInt32(self, "ss_sp", "Value of SS:SP"), hexadecimal) yield UInt16(self, "nb_entry_seg_tab", "Number of entries in the segment table") yield UInt16(self, "nb_entry_modref_tab", "Number of entries in the module-reference table") yield filesizeHandler(UInt16(self, "size_nonres_name_tab", "Number of bytes in the nonresident-name table")) yield UInt16(self, "seg_tab_ofs", "Segment table offset") yield UInt16(self, "rsrc_ofs", "Resource offset") yield UInt16(self, "res_name_tab_ofs", "Resident-name table offset") yield UInt16(self, "mod_ref_tab_ofs", "Module-reference table offset") yield UInt16(self, "import_tab_ofs", "Imported-name table offset") yield UInt32(self, "non_res_name_tab_ofs", "Nonresident-name table offset") yield UInt16(self, "nb_mov_ent_pt", "Number of movable entry points") yield UInt16(self, "log2_sector_size", "Log2 of the segment sector size") yield UInt16(self, "nb_rsrc_seg", "Number of resource segments") yield Bit(self, "unknown_os_format", "Operating system format is unknown") yield PaddingBits(self, "reserved[]", 1) yield Bit(self, "os_windows", "Operating system is Microsoft Windows") yield NullBits(self, "reserved[]", 6) yield Bit(self, "is_win20_prot", "Is Windows 2.x application running in version 3.x protected mode") yield Bit(self, "is_win20_font", "Is Windows 2.x application supporting proportional fonts") yield Bit(self, "fast_load", "Contains a fast-load area?") yield NullBits(self, "reserved[]", 4) yield UInt16(self, "fastload_ofs", "Fast-load area offset (in sector)") yield UInt16(self, "fastload_size", "Fast-load area length (in sector)") yield NullBytes(self, "reserved[]", 2) yield textHandler(UInt16(self, "win_version", "Expected Windows version number"), hexadecimal)
3,543
Python
.py
50
62.46
116
0.674419
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,717
java.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/java.py
""" Compiled Java classes parser. Author: Thomas de Grenier de Latour (TGL) <degrenier@easyconnect.fr> Creation: 2006/11/01 Last-update: 2006/11/06 Introduction: * This parser is for compiled Java classes, aka .class files. What is nice with this format is that it is well documented in the official Java VM specs. * Some fields, and most field sets, have dynamic sizes, and there is no offset to directly jump from an header to a given section, or anything like that. It means that accessing a field at the end of the file requires that you've already parsed almost the whole file. That's not very efficient, but it's okay given the usual size of .class files (usually a few KB). * Most fields are just indexes of some "constant pool" entries, which holds most constant datas of the class. And constant pool entries reference other constant pool entries, etc. Hence, a raw display of this fields only shows integers and is not really understandable. Because of that, this parser comes with two important custom field classes: - CPInfo are constant pool entries. They have a type ("Utf8", "Methodref", etc.), and some contents fields depending on this type. They also have a "__str__()" method, which returns a syntetic view of this contents. - CPIndex are constant pool indexes (UInt16). It is possible to specify what type of CPInfo they are allowed to points to. They also have a custom display method, usually printing something like "-> foo", where foo is the str() of their target CPInfo. References: * The Java Virtual Machine Specification, 2nd edition, chapter 4, in HTML: http://java.sun.com/docs/books/vmspec/2nd-edition/html/ClassFile.doc.html => That's the spec i've been implementing so far. I think it is format version 46.0 (JDK 1.2). * The Java Virtual Machine Specification, 2nd edition, chapter 4, in PDF: http://java.sun.com/docs/books/vmspec/2nd-edition/ClassFileFormat.pdf => don't trust the URL, this PDF version is more recent than the HTML one. It highligths some recent additions to the format (i don't know the exact version though), which are not yet implemented in this parser. * The Java Virtual Machine Specification, chapter 4: http://java.sun.com/docs/books/vmspec/html/ClassFile.doc.html => describes an older format, probably version 45.3 (JDK 1.1). TODO/FIXME: * Google for some existing free .class files parsers, to get more infos on the various formats differences, etc. * Write/compile some good tests cases. * Rework pretty-printing of CPIndex fields. This str() thing sinks. * Add support of formats other than 46.0 (45.3 seems to already be ok, but there are things to add for later formats). * Make parsing robust: currently, the parser will die on asserts as soon as something seems wrong. It should rather be tolerant, print errors/warnings, and try its best to continue. Check how error-handling is done in other parsers. * Gettextize the whole thing. * Check whether Float32/64 are really the same as Java floats/double. PEP-0754 says that handling of +/-infinity and NaN is very implementation-dependent. Also check how this values are displayed. * Make the parser edition-proof. For instance, editing a constant-pool string should update the length field of it's entry, etc. Sounds like a huge work. """ from hachoir_parser import Parser from hachoir_core.field import ( ParserError, FieldSet, StaticFieldSet, Enum, RawBytes, PascalString16, Float32, Float64, UInt8, UInt16, Int32, UInt32, Int64, Bit, NullBits ) from hachoir_core.endian import BIG_ENDIAN from hachoir_core.text_handler import textHandler, hexadecimal ############################################################################### def parse_flags(flags, flags_dict, show_unknown_flags=True, separator=" "): """ Parses an integer representing a set of flags. The known flags are stored with their bit-mask in a dictionnary. Returns a string. """ flags_list = [] mask = 0x01 while mask <= flags: if flags & mask: if mask in flags_dict: flags_list.append(flags_dict[mask]) elif show_unknown_flags: flags_list.append("???") mask = mask << 1 return separator.join(flags_list) ############################################################################### code_to_type_name = { 'B': "byte", 'C': "char", 'D': "double", 'F': "float", 'I': "int", 'J': "long", 'S': "short", 'Z': "boolean", 'V': "void", } def eat_descriptor(descr): """ Read head of a field/method descriptor. Returns a pair of strings, where the first one is a human-readable string representation of the first found type, and the second one is the tail of the parameter. """ array_dim = 0 while descr[0] == '[': array_dim += 1 descr = descr[1:] if (descr[0] == 'L'): try: end = descr.find(';') except: raise ParserError("Not a valid descriptor string: " + descr) type = descr[1:end] descr = descr[end:] else: global code_to_type_name try: type = code_to_type_name[descr[0]] except KeyError: raise ParserError("Not a valid descriptor string: %s" % descr) return (type.replace("/", ".") + array_dim * "[]", descr[1:]) def parse_field_descriptor(descr, name=None): """ Parse a field descriptor (single type), and returns it as human-readable string representation. """ assert descr (type, tail) = eat_descriptor(descr) assert not tail if name: return type + " " + name else: return type def parse_method_descriptor(descr, name=None): """ Parse a method descriptor (params type and return type), and returns it as human-readable string representation. """ assert descr and (descr[0] == '(') descr = descr[1:] params_list = [] while descr[0] != ')': (param, descr) = eat_descriptor(descr) params_list.append(param) (type, tail) = eat_descriptor(descr[1:]) assert not tail params = ", ".join(params_list) if name: return "%s %s(%s)" % (type, name, params) else: return "%s (%s)" % (type, params) def parse_any_descriptor(descr, name=None): """ Parse either a field or method descriptor, and returns it as human- readable string representation. """ assert descr if descr[0] == '(': return parse_method_descriptor(descr, name) else: return parse_field_descriptor(descr, name) ############################################################################### class FieldArray(FieldSet): """ Holds a fixed length array of fields which all have the same type. This type may be variable-length. Each field will be named "foo[x]" (with x starting at 0). """ def __init__(self, parent, name, elements_class, length, **elements_extra_args): """Create a FieldArray of <length> fields of class <elements_class>, named "<name>[x]". The **elements_extra_args will be passed to the constructor of each field when yielded.""" FieldSet.__init__(self, parent, name) self.array_elements_class = elements_class self.array_length = length self.array_elements_extra_args = elements_extra_args def createFields(self): for i in range(0, self.array_length): yield self.array_elements_class(self, "%s[%d]" % (self.name, i), **self.array_elements_extra_args) class ConstantPool(FieldSet): """ ConstantPool is similar to a FieldArray of CPInfo fields, but: - numbering starts at 1 instead of zero - some indexes are skipped (after Long or Double entries) """ def __init__(self, parent, name, length): FieldSet.__init__(self, parent, name) self.constant_pool_length = length def createFields(self): i = 1 while i < self.constant_pool_length: name = "%s[%d]" % (self.name, i) yield CPInfo(self, name) i += 1 if self[name].constant_type in ("Long", "Double"): i += 1 ############################################################################### class CPIndex(UInt16): """ Holds index of a constant pool entry. """ def __init__(self, parent, name, description=None, target_types=None, target_text_handler=(lambda x: x), allow_zero=False): """ Initialize a CPIndex. - target_type is the tuple of expected type for the target CPInfo (if None, then there will be no type check) - target_text_handler is a string transformation function used for pretty printing the target str() result - allow_zero states whether null index is allowed (sometimes, constant pool index is optionnal) """ UInt16.__init__(self, parent, name, description) if isinstance(target_types, str): self.target_types = (target_types,) else: self.target_types = target_types self.allow_zero = allow_zero self.target_text_handler = target_text_handler self.getOriginalDisplay = lambda: self.value def createDisplay(self): cp_entry = self.get_cp_entry() if self.allow_zero and not cp_entry: return "ZERO" assert cp_entry return "-> " + self.target_text_handler(str(cp_entry)) def get_cp_entry(self): """ Returns the target CPInfo field. """ assert self.value < self["/constant_pool_count"].value if self.allow_zero and not self.value: return None cp_entry = self["/constant_pool/constant_pool[%d]" % self.value] assert isinstance(cp_entry, CPInfo) if self.target_types: assert cp_entry.constant_type in self.target_types return cp_entry ############################################################################### class CPInfo(FieldSet): """ Holds a constant pool entry. Entries all have a type, and various contents fields depending on their type. """ def createFields(self): yield Enum(UInt8(self, "tag"), self.root.CONSTANT_TYPES) if self["tag"].value not in self.root.CONSTANT_TYPES: raise ParserError("Java: unknown constant type (%s)" % self["tag"].value) self.constant_type = self.root.CONSTANT_TYPES[self["tag"].value] if self.constant_type == "Utf8": yield PascalString16(self, "bytes", charset="UTF-8") elif self.constant_type == "Integer": yield Int32(self, "bytes") elif self.constant_type == "Float": yield Float32(self, "bytes") elif self.constant_type == "Long": yield Int64(self, "bytes") elif self.constant_type == "Double": yield Float64(self, "bytes") elif self.constant_type == "Class": yield CPIndex(self, "name_index", "Class or interface name", target_types="Utf8") elif self.constant_type == "String": yield CPIndex(self, "string_index", target_types="Utf8") elif self.constant_type == "Fieldref": yield CPIndex(self, "class_index", "Field class or interface name", target_types="Class") yield CPIndex(self, "name_and_type_index", target_types="NameAndType") elif self.constant_type == "Methodref": yield CPIndex(self, "class_index", "Method class name", target_types="Class") yield CPIndex(self, "name_and_type_index", target_types="NameAndType") elif self.constant_type == "InterfaceMethodref": yield CPIndex(self, "class_index", "Method interface name", target_types="Class") yield CPIndex(self, "name_and_type_index", target_types="NameAndType") elif self.constant_type == "NameAndType": yield CPIndex(self, "name_index", target_types="Utf8") yield CPIndex(self, "descriptor_index", target_types="Utf8") else: raise ParserError("Not a valid constant pool element type: " + self["tag"].value) def __str__(self): """ Returns a human-readable string representation of the constant pool entry. It is used for pretty-printing of the CPIndex fields pointing to it. """ if self.constant_type == "Utf8": return self["bytes"].value elif self.constant_type in ("Integer", "Float", "Long", "Double"): return self["bytes"].display elif self.constant_type == "Class": class_name = str(self["name_index"].get_cp_entry()) return class_name.replace("/",".") elif self.constant_type == "String": return str(self["string_index"].get_cp_entry()) elif self.constant_type == "Fieldref": return "%s (from %s)" % (self["name_and_type_index"], self["class_index"]) elif self.constant_type == "Methodref": return "%s (from %s)" % (self["name_and_type_index"], self["class_index"]) elif self.constant_type == "InterfaceMethodref": return "%s (from %s)" % (self["name_and_type_index"], self["class_index"]) elif self.constant_type == "NameAndType": return parse_any_descriptor( str(self["descriptor_index"].get_cp_entry()), name=str(self["name_index"].get_cp_entry())) else: # FIXME: Return "<error>" instead of raising an exception? raise ParserError("Not a valid constant pool element type: " + self["tag"].value) ############################################################################### # field_info { # u2 access_flags; # u2 name_index; # u2 descriptor_index; # u2 attributes_count; # attribute_info attributes[attributes_count]; # } class FieldInfo(FieldSet): def createFields(self): # Access flags (16 bits) yield NullBits(self, "reserved[]", 8) yield Bit(self, "transient") yield Bit(self, "volatile") yield NullBits(self, "reserved[]", 1) yield Bit(self, "final") yield Bit(self, "static") yield Bit(self, "protected") yield Bit(self, "private") yield Bit(self, "public") yield CPIndex(self, "name_index", "Field name", target_types="Utf8") yield CPIndex(self, "descriptor_index", "Field descriptor", target_types="Utf8", target_text_handler=parse_field_descriptor) yield UInt16(self, "attributes_count", "Number of field attributes") if self["attributes_count"].value > 0: yield FieldArray(self, "attributes", AttributeInfo, self["attributes_count"].value) ############################################################################### # method_info { # u2 access_flags; # u2 name_index; # u2 descriptor_index; # u2 attributes_count; # attribute_info attributes[attributes_count]; # } class MethodInfo(FieldSet): def createFields(self): # Access flags (16 bits) yield NullBits(self, "reserved[]", 4) yield Bit(self, "strict") yield Bit(self, "abstract") yield NullBits(self, "reserved[]", 1) yield Bit(self, "native") yield NullBits(self, "reserved[]", 2) yield Bit(self, "synchronized") yield Bit(self, "final") yield Bit(self, "static") yield Bit(self, "protected") yield Bit(self, "private") yield Bit(self, "public") yield CPIndex(self, "name_index", "Method name", target_types="Utf8") yield CPIndex(self, "descriptor_index", "Method descriptor", target_types="Utf8", target_text_handler=parse_method_descriptor) yield UInt16(self, "attributes_count", "Number of method attributes") if self["attributes_count"].value > 0: yield FieldArray(self, "attributes", AttributeInfo, self["attributes_count"].value) ############################################################################### # attribute_info { # u2 attribute_name_index; # u4 attribute_length; # u1 info[attribute_length]; # } # [...] class AttributeInfo(FieldSet): def __init__(self, *args): FieldSet.__init__(self, *args) self._size = (self["attribute_length"].value + 6) * 8 def createFields(self): yield CPIndex(self, "attribute_name_index", "Attribute name", target_types="Utf8") yield UInt32(self, "attribute_length", "Length of the attribute") attr_name = str(self["attribute_name_index"].get_cp_entry()) # ConstantValue_attribute { # u2 attribute_name_index; # u4 attribute_length; # u2 constantvalue_index; # } if attr_name == "ConstantValue": if self["attribute_length"].value != 2: raise ParserError("Java: Invalid attribute %s length (%s)" \ % (self.path, self["attribute_length"].value)) yield CPIndex(self, "constantvalue_index", target_types=("Long","Float","Double","Integer","String")) # Code_attribute { # u2 attribute_name_index; # u4 attribute_length; # u2 max_stack; # u2 max_locals; # u4 code_length; # u1 code[code_length]; # u2 exception_table_length; # { u2 start_pc; # u2 end_pc; # u2 handler_pc; # u2 catch_type; # } exception_table[exception_table_length]; # u2 attributes_count; # attribute_info attributes[attributes_count]; # } elif attr_name == "Code": yield UInt16(self, "max_stack") yield UInt16(self, "max_locals") yield UInt32(self, "code_length") if self["code_length"].value > 0: yield RawBytes(self, "code", self["code_length"].value) yield UInt16(self, "exception_table_length") if self["exception_table_length"].value > 0: yield FieldArray(self, "exception_table", ExceptionTableEntry, self["exception_table_length"].value) yield UInt16(self, "attributes_count") if self["attributes_count"].value > 0: yield FieldArray(self, "attributes", AttributeInfo, self["attributes_count"].value) # Exceptions_attribute { # u2 attribute_name_index; # u4 attribute_length; # u2 number_of_exceptions; # u2 exception_index_table[number_of_exceptions]; # } elif (attr_name == "Exceptions"): yield UInt16(self, "number_of_exceptions") yield FieldArray(self, "exception_index_table", CPIndex, self["number_of_exceptions"].value, target_types="Class") assert self["attribute_length"].value == \ 2 + self["number_of_exceptions"].value * 2 # InnerClasses_attribute { # u2 attribute_name_index; # u4 attribute_length; # u2 number_of_classes; # { u2 inner_class_info_index; # u2 outer_class_info_index; # u2 inner_name_index; # u2 inner_class_access_flags; # } classes[number_of_classes]; # } elif (attr_name == "InnerClasses"): yield UInt16(self, "number_of_classes") if self["number_of_classes"].value > 0: yield FieldArray(self, "classes", InnerClassesEntry, self["number_of_classes"].value) assert self["attribute_length"].value == \ 2 + self["number_of_classes"].value * 8 # Synthetic_attribute { # u2 attribute_name_index; # u4 attribute_length; # } elif (attr_name == "Synthetic"): assert self["attribute_length"].value == 0 # SourceFile_attribute { # u2 attribute_name_index; # u4 attribute_length; # u2 sourcefile_index; # } elif (attr_name == "SourceFile"): assert self["attribute_length"].value == 2 yield CPIndex(self, "sourcefile_index", target_types="Utf8") # LineNumberTable_attribute { # u2 attribute_name_index; # u4 attribute_length; # u2 line_number_table_length; # { u2 start_pc; # u2 line_number; # } line_number_table[line_number_table_length]; # } elif (attr_name == "LineNumberTable"): yield UInt16(self, "line_number_table_length") if self["line_number_table_length"].value > 0: yield FieldArray(self, "line_number_table", LineNumberTableEntry, self["line_number_table_length"].value) assert self["attribute_length"].value == \ 2 + self["line_number_table_length"].value * 4 # LocalVariableTable_attribute { # u2 attribute_name_index; # u4 attribute_length; # u2 local_variable_table_length; # { u2 start_pc; # u2 length; # u2 name_index; # u2 descriptor_index; # u2 index; # } local_variable_table[local_variable_table_length]; # } elif (attr_name == "LocalVariableTable"): yield UInt16(self, "local_variable_table_length") if self["local_variable_table_length"].value > 0: yield FieldArray(self, "local_variable_table", LocalVariableTableEntry, self["local_variable_table_length"].value) assert self["attribute_length"].value == \ 2 + self["local_variable_table_length"].value * 10 # Deprecated_attribute { # u2 attribute_name_index; # u4 attribute_length; # } elif (attr_name == "Deprecated"): assert self["attribute_length"].value == 0 # Unkown attribute type. They are allowed by the JVM specs, but we # can't say much about them... elif self["attribute_length"].value > 0: yield RawBytes(self, "info", self["attribute_length"].value) class ExceptionTableEntry(FieldSet): static_size = 48 + CPIndex.static_size def createFields(self): yield textHandler(UInt16(self, "start_pc"), hexadecimal) yield textHandler(UInt16(self, "end_pc"), hexadecimal) yield textHandler(UInt16(self, "handler_pc"), hexadecimal) yield CPIndex(self, "catch_type", target_types="Class") class InnerClassesEntry(StaticFieldSet): format = ( (CPIndex, "inner_class_info_index", {"target_types": "Class", "allow_zero": True}), (CPIndex, "outer_class_info_index", {"target_types": "Class", "allow_zero": True}), (CPIndex, "inner_name_index", {"target_types": "Utf8", "allow_zero": True}), # Inner class access flags (16 bits) (NullBits, "reserved[]", 5), (Bit, "abstract"), (Bit, "interface"), (NullBits, "reserved[]", 3), (Bit, "super"), (Bit, "final"), (Bit, "static"), (Bit, "protected"), (Bit, "private"), (Bit, "public"), ) class LineNumberTableEntry(StaticFieldSet): format = ( (UInt16, "start_pc"), (UInt16, "line_number") ) class LocalVariableTableEntry(StaticFieldSet): format = ( (UInt16, "start_pc"), (UInt16, "length"), (CPIndex, "name_index", {"target_types": "Utf8"}), (CPIndex, "descriptor_index", {"target_types": "Utf8", "target_text_handler": parse_field_descriptor}), (UInt16, "index") ) ############################################################################### # ClassFile { # u4 magic; # u2 minor_version; # u2 major_version; # u2 constant_pool_count; # cp_info constant_pool[constant_pool_count-1]; # u2 access_flags; # u2 this_class; # u2 super_class; # u2 interfaces_count; # u2 interfaces[interfaces_count]; # u2 fields_count; # field_info fields[fields_count]; # u2 methods_count; # method_info methods[methods_count]; # u2 attributes_count; # attribute_info attributes[attributes_count]; # } class JavaCompiledClassFile(Parser): """ Root of the .class parser. """ endian = BIG_ENDIAN PARSER_TAGS = { "id": "java_class", "category": "program", "file_ext": ("class",), "mime": (u"application/java-vm",), "min_size": (32 + 3*16), "description": "Compiled Java class" } MAGIC = 0xCAFEBABE KNOWN_VERSIONS = { "45.3": "JDK 1.1", "46.0": "JDK 1.2", "47.0": "JDK 1.3", "48.0": "JDK 1.4", "49.0": "JDK 1.5" } # Constants go here since they will probably depend on the detected format # version at some point. Though, if they happen to be really backward # compatible, they may become module globals. CONSTANT_TYPES = { 1: "Utf8", 3: "Integer", 4: "Float", 5: "Long", 6: "Double", 7: "Class", 8: "String", 9: "Fieldref", 10: "Methodref", 11: "InterfaceMethodref", 12: "NameAndType" } def validate(self): if self["magic"].value != self.MAGIC: return "Wrong magic signature!" version = "%d.%d" % (self["major_version"].value, self["minor_version"].value) if version not in self.KNOWN_VERSIONS: return "Unknown version (%s)" % version return True def createDescription(self): version = "%d.%d" % (self["major_version"].value, self["minor_version"].value) if version in self.KNOWN_VERSIONS: return "Compiled Java class, %s" % self.KNOWN_VERSIONS[version] else: return "Compiled Java class, version %s" % version def createFields(self): yield textHandler(UInt32(self, "magic", "Java compiled class signature"), hexadecimal) yield UInt16(self, "minor_version", "Class format minor version") yield UInt16(self, "major_version", "Class format major version") yield UInt16(self, "constant_pool_count", "Size of the constant pool") if self["constant_pool_count"].value > 1: #yield FieldArray(self, "constant_pool", CPInfo, # (self["constant_pool_count"].value - 1), first_index=1) # Mmmh... can't use FieldArray actually, because ConstantPool # requires some specific hacks (skipping some indexes after Long # and Double entries). yield ConstantPool(self, "constant_pool", (self["constant_pool_count"].value)) # Inner class access flags (16 bits) yield NullBits(self, "reserved[]", 5) yield Bit(self, "abstract") yield Bit(self, "interface") yield NullBits(self, "reserved[]", 3) yield Bit(self, "super") yield Bit(self, "final") yield Bit(self, "static") yield Bit(self, "protected") yield Bit(self, "private") yield Bit(self, "public") yield CPIndex(self, "this_class", "Class name", target_types="Class") yield CPIndex(self, "super_class", "Super class name", target_types="Class") yield UInt16(self, "interfaces_count", "Number of implemented interfaces") if self["interfaces_count"].value > 0: yield FieldArray(self, "interfaces", CPIndex, self["interfaces_count"].value, target_types="Class") yield UInt16(self, "fields_count", "Number of fields") if self["fields_count"].value > 0: yield FieldArray(self, "fields", FieldInfo, self["fields_count"].value) yield UInt16(self, "methods_count", "Number of methods") if self["methods_count"].value > 0: yield FieldArray(self, "methods", MethodInfo, self["methods_count"].value) yield UInt16(self, "attributes_count", "Number of attributes") if self["attributes_count"].value > 0: yield FieldArray(self, "attributes", AttributeInfo, self["attributes_count"].value) # vim: set expandtab tabstop=4 shiftwidth=4 autoindent smartindent:
28,940
Python
.py
657
35.677321
101
0.591574
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,718
elf.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/elf.pyc
—Ú Œ »Mc@s·dZddklZddklZlZlZlZlZl Z l Z l Z ddk l Z lZddklZlZdefdÑÉYZdefd ÑÉYZd efd ÑÉYZd ÑZd efdÑÉYZdS(sb ELF (Unix/BSD executable file format) parser. Author: Victor Stinner Creation date: 08 may 2006 iˇˇˇˇ(tParser(tFieldSett ParserErrortUInt8tUInt16tUInt32tEnumtStringtBytes(t textHandlert hexadecimal(t LITTLE_ENDIANt BIG_ENDIANt ElfHeadercBs¬eZdZdZdZhdd6dd6dd6dd 6d d 6d d 6dd6Zhdd6dd6Zhdd6dd6dd6dd6dd 6dd6dd6Zhde6de6ZdÑZ dÑZ RS( i4iiiu AT&T WE 32100uSPARCu Intel 80386iuMotorola 68000iuMotorola 88000iu Intel 80860iu MIPS RS3000u32 bitsu64 bitsu No file typeiuRelocatable fileuExecutable fileuShared object fileu Core fileuProcessor-specific (0xFF00)iˇuProcessor-specific (0xFFFF)iˇˇs Little endians Big endianccs}t|dddÉVtt|ddÉ|iÉVtt|ddÉ|iÉVt|dd ÉVt|d d d ÉVt|d dÉVtt|ddÉ|iÉVtt|ddÉ|iÉVt |ddÉVt |ddÉVt |ddÉVt |ddÉVt |ddÉVt|ddÉVt|dd ÉVt|d!d"ÉVt|d#d$ÉVt|d%d&ÉVt|d'd(ÉVdS()Nt signatureisELF signature ("\x7fELF")tclasstClasstendiantEndiant file_versions File versiontpaditPadtnb_identsSize of ident[]ttypes File typetmachines Machine typetversionsELF format versiontentrysNumber of entriestphoffsProgram header offsettshoffsSection header offsettflagstFlagstehsizesElf header size (this header)t phentsizesProgram header entry sizetphnumsProgram header entry countt shentsizesSection header entry sizetshnumsSection header entre counttshstrndxsSection header strtab index( RRRt CLASS_NAMEt ENDIAN_NAMERRt TYPE_NAMEt MACHINE_NAMER(tself((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyt createFields.s&cCs`|didjodS|di|ijodS|di|ijod|diSdS( NRsELFsWrong ELF signatureRs Unknown classRsUnknown endian (%s)t(tvalueR%R&(R)((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pytisValidCsi†( t__name__t __module__t static_sizetLITTLE_ENDIAN_IDt BIG_ENDIAN_IDR(R%R'R&R*R-(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyR s6     tSectionHeader32cBs-eZdZhdd6ZdÑZdÑZRS(i(itBSSccs’t|ddÉVtt|ddÉ|iÉVt|ddÉVtt|ddÉtÉVtt|d d ÉtÉVtt|d d ÉtÉVt|d dÉVt|ddÉVt|ddÉVt|ddÉVdS(NtnametNameRtTypeRRtVMAsVirtual memory addresstLMAs Logical memory address (in file)tsizetSizetlinktLinktinfot Informationt addr_alignsAddress alignmentt entry_sizes Entry size(RRR'R R (R)((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyR*RscCsd|di|difS(Ns#Section header (name: %s, type: %s)R5R(R,tdisplay(R)((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pytcreateDescription^si@(R.R/R0R'R*RC(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyR3Ls   tProgramHeader32cBs-eZhdd6ZdZdÑZdÑZRS(sDynamic libraryii iccsªtt|ddÉtiÉVt|ddÉVt|ddÉVtt|ddÉtÉVtt|d d ÉtÉVt|d d ÉVt|d dÉVt|ddÉVt|ddÉVdS(NRR7RRtoffsettOffsettvaddrs V. addresstpaddrs P. addresst file_sizes File sizetmem_sizes Memory sizetalignt Alignmenttxxxs???(RRRDR'RR R (R)((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyR*hscCsd|diS(NsProgram Header (%s)R(RB(R)((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyRCssi(R.R/R'R0R*RC(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyRDbs   cCst|d|dÉS(NRE(tint(tatb((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyt sortSectionvstElfFilecBsceZhdd6dd6dd6eid6dd 6dd6dd6ZeZdÑZdÑZdÑZ RS(telftidtprogramtcategorytsoR+tfile_exttmin_sizeuapplication/x-executableuapplication/x-objectuapplication/x-sharedlibuapplication/x-executable-fileuapplication/x-coredumptmimesELFitmagicsELF Unix/BSD program/libraryt descriptioncCs |diÉ}|o|StS(Ntheader(R-tTrue(R)terr((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pytvalidateósccsÎ|iid dtÉtijo t|_n t|_t|ddÉVx)t|diÉD]}t |dÉVq]Wt ot dÉÇn0|i |did d t É}|o |Vnx)t|d iÉD]}t |d ÉVqœWdS(NiiR]tHeaders header/phnums prg_header[]sTODO: Parse sections...s header/shoffsraw[]trelatives header/shnumssection_header[]i((tstreamtreadBitsR R R2RR txrangeR,RDtFalseRtseekByteR3(R)tindextraw((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyR*ùs"   cCsd|diS(Ns ELF Unix/BSD program/library: %ss header/class(RB(R)((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyRC∏s(RWR+(uapplication/x-executableuapplication/x-objectuapplication/x-sharedlibuapplication/x-executable-fileuapplication/x-coredump(sELFi((sELFi( R.R/R R0t PARSER_TAGSR RR`R*RC(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyRRÜs     N(t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRRthachoir_core.text_handlerR R thachoir_core.endianR R R R3RDRQRR(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/program/elf.pyt<module>s:= 
7,858
Python
.py
42
186.071429
936
0.490276
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,719
exe_ne.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/exe_ne.pyc
Ñò Î ÈMc@s|ddklZlZlZlZlZlZlZlZl Z l Z ddk l Z l Z lZdefd„ƒYZdS(iÿÿÿÿ( tFieldSettBittUInt8tUInt16tUInt32tBytest PaddingBitst PaddingBytestNullBitst NullBytes(t textHandlert hexadecimaltfilesizeHandlert NE_HeadercBseZdZd„ZRS(i@iccst|dddƒVt|ddƒVt|ddƒVt|dd ƒVt|d d ƒVt|d d ƒVt|ddƒVt|ddƒVt|d dƒVt|ddƒVt|d dƒVt|ddƒVt|d dƒVt|ddƒVt|ddƒVtt|ddƒƒVtt|ddƒƒVtt |d d!ƒt ƒVtt |d"d#ƒt ƒVt|d$d%ƒVt|d&d'ƒVtt|d(d)ƒƒVt|d*d+ƒVt|d,d-ƒVt|d.d/ƒVt|d0d1ƒVt|d2d3ƒVt |d4d5ƒVt|d6d7ƒVt|d8d9ƒVt|d:d;ƒVt|d<d=ƒVt|d dƒVt|d>d?ƒVt|d d@ƒVt|dAdBƒVt|dCdDƒVt|dEdFƒVt|d d ƒVt|dGdHƒVt|dIdJƒVt |d dƒVtt|dKdLƒt ƒVdS(MNt signatureisNew executable signature (NE)tlink_versLinker version numbertlink_revsLinker revision numbertentry_table_ofstsOffset to the entry tabletentry_table_sizes$Length (in bytes) of the entry tables reserved[]itis_dlls Is a dynamic-link library (DLL)?t is_win_appsIs a Windows application?i tfirst_seg_codes7First segment contains code that loads the application?it link_errors#Load even if linker detects errors?tis_libsIs a library module?t auto_data_segsAutomatic data segment numbertlocal_heap_sizes)Initial size (in bytes) of the local heapt stack_sizes$Initial size (in bytes) of the stacktcs_ipsValue of CS:IPtss_spsValue of SS:SPtnb_entry_seg_tabs&Number of entries in the segment tabletnb_entry_modref_tabs/Number of entries in the module-reference tabletsize_nonres_name_tabs-Number of bytes in the nonresident-name tablet seg_tab_ofssSegment table offsettrsrc_ofssResource offsettres_name_tab_ofssResident-name table offsettmod_ref_tab_ofssModule-reference table offsettimport_tab_ofssImported-name table offsettnon_res_name_tab_ofssNonresident-name table offsett nb_mov_ent_ptsNumber of movable entry pointstlog2_sector_sizesLog2 of the segment sector sizet nb_rsrc_segsNumber of resource segmentstunknown_os_formats"Operating system format is unknownt os_windowss%Operating system is Microsoft Windowsit is_win20_prots@Is Windows 2.x application running in version 3.x protected modet is_win20_fonts8Is Windows 2.x application supporting proportional fontst fast_loadsContains a fast-load area?t fastload_ofss!Fast-load area offset (in sector)t fastload_sizes!Fast-load area length (in sector)t win_versionsExpected Windows version number( RRRRRRRR R RR R (tself((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_ne.pyt createFieldssVi(t__name__t __module__t static_sizeR2(((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_ne.pyR sN(thachoir_core.fieldRRRRRRRRRR thachoir_core.text_handlerR R R R (((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_ne.pyt<module>sF
3,854
Python
.py
17
225.705882
836
0.563314
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,720
exe.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/exe.py
""" Microsoft Windows Portable Executable (PE) file parser. Informations: - Microsoft Portable Executable and Common Object File Format Specification: http://www.microsoft.com/whdc/system/platform/firmware/PECOFF.mspx Author: Victor Stinner Creation date: 2006-08-13 """ from hachoir_parser import HachoirParser from hachoir_core.endian import LITTLE_ENDIAN from hachoir_core.field import (FieldSet, RootSeekableFieldSet, UInt16, UInt32, String, RawBytes, PaddingBytes) from hachoir_core.text_handler import textHandler, hexadecimal from hachoir_parser.program.exe_ne import NE_Header from hachoir_parser.program.exe_pe import PE_Header, PE_OptHeader, SectionHeader from hachoir_parser.program.exe_res import PE_Resource, NE_VersionInfoNode MAX_NB_SECTION = 50 class MSDosHeader(FieldSet): static_size = 64*8 def createFields(self): yield String(self, "header", 2, "File header (MZ)", charset="ASCII") yield UInt16(self, "size_mod_512", "File size in bytes modulo 512") yield UInt16(self, "size_div_512", "File size in bytes divide by 512") yield UInt16(self, "reloc_entries", "Number of relocation entries") yield UInt16(self, "code_offset", "Offset to the code in the file (divided by 16)") yield UInt16(self, "needed_memory", "Memory needed to run (divided by 16)") yield UInt16(self, "max_memory", "Maximum memory needed to run (divided by 16)") yield textHandler(UInt32(self, "init_ss_sp", "Initial value of SP:SS registers"), hexadecimal) yield UInt16(self, "checksum", "Checksum") yield textHandler(UInt32(self, "init_cs_ip", "Initial value of CS:IP registers"), hexadecimal) yield UInt16(self, "reloc_offset", "Offset in file to relocation table") yield UInt16(self, "overlay_number", "Overlay number") yield PaddingBytes(self, "reserved[]", 8, "Reserved") yield UInt16(self, "oem_id", "OEM id") yield UInt16(self, "oem_info", "OEM info") yield PaddingBytes(self, "reserved[]", 20, "Reserved") yield UInt32(self, "next_offset", "Offset to next header (PE or NE)") def isValid(self): if 512 <= self["size_mod_512"].value: return "Invalid field 'size_mod_512' value" if self["code_offset"].value < 4: return "Invalid code offset" looks_pe = self["size_div_512"].value < 4 if looks_pe: if self["checksum"].value != 0: return "Invalid value of checksum" if not (80 <= self["next_offset"].value <= 1024): return "Invalid value of next_offset" return "" class ExeFile(HachoirParser, RootSeekableFieldSet): PARSER_TAGS = { "id": "exe", "category": "program", "file_ext": ("exe", "dll", "ocx"), "mime": (u"application/x-dosexec",), "min_size": 64*8, #"magic": (("MZ", 0),), "magic_regex": (("MZ.[\0\1].{4}[^\0\1\2\3]", 0),), "description": "Microsoft Windows Portable Executable" } endian = LITTLE_ENDIAN def __init__(self, stream, **args): RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self)) HachoirParser.__init__(self, stream, **args) def validate(self): if self.stream.readBytes(0, 2) != 'MZ': return "Wrong header" err = self["msdos"].isValid() if err: return "Invalid MSDOS header: "+err if self.isPE(): if MAX_NB_SECTION < self["pe_header/nb_section"].value: return "Invalid number of section (%s)" \ % self["pe_header/nb_section"].value return True def createFields(self): yield MSDosHeader(self, "msdos", "MS-DOS program header") if self.isPE() or self.isNE(): offset = self["msdos/next_offset"].value self.seekByte(offset, relative=False) if self.isPE(): for field in self.parsePortableExecutable(): yield field elif self.isNE(): for field in self.parseNE_Executable(): yield field else: offset = self["msdos/code_offset"].value * 16 self.seekByte(offset, relative=False) def parseNE_Executable(self): yield NE_Header(self, "ne_header") # FIXME: Compute resource offset instead of using searchBytes() # Ugly hack to get find version info structure start = self.current_size addr = self.stream.searchBytes('VS_VERSION_INFO', start) if addr: self.seekBit(addr-32) yield NE_VersionInfoNode(self, "info") def parsePortableExecutable(self): # Read PE header yield PE_Header(self, "pe_header") # Read PE optional header size = self["pe_header/opt_hdr_size"].value rsrc_rva = None if size: yield PE_OptHeader(self, "pe_opt_header", size=size*8) if "pe_opt_header/resource/rva" in self: rsrc_rva = self["pe_opt_header/resource/rva"].value # Read section headers sections = [] for index in xrange(self["pe_header/nb_section"].value): section = SectionHeader(self, "section_hdr[]") yield section if section["phys_size"].value: sections.append(section) # Read sections sections.sort(key=lambda field: field["phys_off"].value) for section in sections: self.seekByte(section["phys_off"].value) size = section["phys_size"].value if size: name = section.createSectionName() if rsrc_rva is not None and section["rva"].value == rsrc_rva: yield PE_Resource(self, name, section, size=size*8) else: yield RawBytes(self, name, size) def isPE(self): if not hasattr(self, "_is_pe"): self._is_pe = False offset = self["msdos/next_offset"].value * 8 if 2*8 <= offset \ and (offset+PE_Header.static_size) <= self.size \ and self.stream.readBytes(offset, 4) == 'PE\0\0': self._is_pe = True return self._is_pe def isNE(self): if not hasattr(self, "_is_ne"): self._is_ne = False offset = self["msdos/next_offset"].value * 8 if 64*8 <= offset \ and (offset+NE_Header.static_size) <= self.size \ and self.stream.readBytes(offset, 2) == 'NE': self._is_ne = True return self._is_ne def getResource(self): # MS-DOS program: no resource if not self.isPE(): return None # Check if PE has resource or not if "pe_opt_header/resource/size" in self: if not self["pe_opt_header/resource/size"].value: return None if "section_rsrc" in self: return self["section_rsrc"] return None def createDescription(self): if self.isPE(): if self["pe_header/is_dll"].value: text = u"Microsoft Windows DLL" else: text = u"Microsoft Windows Portable Executable" info = [self["pe_header/cpu"].display] if "pe_opt_header" in self: hdr = self["pe_opt_header"] info.append(hdr["subsystem"].display) if self["pe_header/is_stripped"].value: info.append(u"stripped") return u"%s: %s" % (text, ", ".join(info)) elif self.isNE(): return u"New-style Executable (NE) for Microsoft MS Windows 3.x" else: return u"MS-DOS executable" def createContentSize(self): if self.isPE(): size = 0 for index in xrange(self["pe_header/nb_section"].value): section = self["section_hdr[%u]" % index] section_size = section["phys_size"].value if not section_size: continue section_size = (section_size + section["phys_off"].value) * 8 if size: size = max(size, section_size) else: size = section_size if size: return size else: return None elif self.isNE(): # TODO: Guess NE size return None else: size = self["msdos/size_mod_512"].value + (self["msdos/size_div_512"].value-1) * 512 if size < 0: return None return size*8
8,673
Python
.py
198
33.247475
102
0.583028
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,721
__init__.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/__init__.py
from hachoir_parser.program.elf import ElfFile from hachoir_parser.program.exe import ExeFile from hachoir_parser.program.python import PythonCompiledFile from hachoir_parser.program.java import JavaCompiledClassFile
218
Python
.py
4
53.25
61
0.887324
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,722
exe_res.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/exe_res.py
""" Parser for resource of Microsoft Windows Portable Executable (PE). Documentation: - Wine project VS_FIXEDFILEINFO structure, file include/winver.h Author: Victor Stinner Creation date: 2007-01-19 """ from hachoir_core.field import (FieldSet, ParserError, Enum, Bit, Bits, SeekableFieldSet, UInt16, UInt32, TimestampUnix32, RawBytes, PaddingBytes, NullBytes, NullBits, CString, String) from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal from hachoir_core.tools import createDict, paddingSize, alignValue, makePrintable from hachoir_core.error import HACHOIR_ERRORS from hachoir_parser.common.win32 import BitmapInfoHeader MAX_DEPTH = 5 MAX_INDEX_PER_HEADER = 300 MAX_NAME_PER_HEADER = MAX_INDEX_PER_HEADER class Version(FieldSet): static_size = 32 def createFields(self): yield textHandler(UInt16(self, "minor", "Minor version number"), hexadecimal) yield textHandler(UInt16(self, "major", "Major version number"), hexadecimal) def createValue(self): return self["major"].value + float(self["minor"].value) / 10000 MAJOR_OS_NAME = { 1: "DOS", 2: "OS/2 16-bit", 3: "OS/2 32-bit", 4: "Windows NT", } MINOR_OS_BASE = 0 MINOR_OS_NAME = { 0: "Base", 1: "Windows 16-bit", 2: "Presentation Manager 16-bit", 3: "Presentation Manager 32-bit", 4: "Windows 32-bit", } FILETYPE_DRIVER = 3 FILETYPE_FONT = 4 FILETYPE_NAME = { 1: "Application", 2: "DLL", 3: "Driver", 4: "Font", 5: "VXD", 7: "Static library", } DRIVER_SUBTYPE_NAME = { 1: "Printer", 2: "Keyboard", 3: "Language", 4: "Display", 5: "Mouse", 6: "Network", 7: "System", 8: "Installable", 9: "Sound", 10: "Communications", } FONT_SUBTYPE_NAME = { 1: "Raster", 2: "Vector", 3: "TrueType", } class VersionInfoBinary(FieldSet): def createFields(self): yield textHandler(UInt32(self, "magic", "File information magic (0xFEEF04BD)"), hexadecimal) if self["magic"].value != 0xFEEF04BD: raise ParserError("EXE resource: invalid file info magic") yield Version(self, "struct_ver", "Structure version (1.0)") yield Version(self, "file_ver_ms", "File version MS") yield Version(self, "file_ver_ls", "File version LS") yield Version(self, "product_ver_ms", "Product version MS") yield Version(self, "product_ver_ls", "Product version LS") yield textHandler(UInt32(self, "file_flags_mask"), hexadecimal) yield Bit(self, "debug") yield Bit(self, "prerelease") yield Bit(self, "patched") yield Bit(self, "private_build") yield Bit(self, "info_inferred") yield Bit(self, "special_build") yield NullBits(self, "reserved", 26) yield Enum(textHandler(UInt16(self, "file_os_major"), hexadecimal), MAJOR_OS_NAME) yield Enum(textHandler(UInt16(self, "file_os_minor"), hexadecimal), MINOR_OS_NAME) yield Enum(textHandler(UInt32(self, "file_type"), hexadecimal), FILETYPE_NAME) field = textHandler(UInt32(self, "file_subfile"), hexadecimal) if field.value == FILETYPE_DRIVER: field = Enum(field, DRIVER_SUBTYPE_NAME) elif field.value == FILETYPE_FONT: field = Enum(field, FONT_SUBTYPE_NAME) yield field yield TimestampUnix32(self, "date_ms") yield TimestampUnix32(self, "date_ls") class VersionInfoNode(FieldSet): TYPE_STRING = 1 TYPE_NAME = { 0: "binary", 1: "string", } def __init__(self, parent, name, is_32bit=True): FieldSet.__init__(self, parent, name) self._size = alignValue(self["size"].value, 4) * 8 self.is_32bit = is_32bit def createFields(self): yield UInt16(self, "size", "Node size (in bytes)") yield UInt16(self, "data_size") yield Enum(UInt16(self, "type"), self.TYPE_NAME) yield CString(self, "name", charset="UTF-16-LE") size = paddingSize(self.current_size//8, 4) if size: yield NullBytes(self, "padding[]", size) size = self["data_size"].value if size: if self["type"].value == self.TYPE_STRING: if self.is_32bit: size *= 2 yield String(self, "value", size, charset="UTF-16-LE", truncate="\0") elif self["name"].value == "VS_VERSION_INFO": yield VersionInfoBinary(self, "value", size=size*8) if self["value/file_flags_mask"].value == 0: self.is_32bit = False else: yield RawBytes(self, "value", size) while 12 <= (self.size - self.current_size) // 8: yield VersionInfoNode(self, "node[]", self.is_32bit) size = (self.size - self.current_size) // 8 if size: yield NullBytes(self, "padding[]", size) def createDescription(self): text = "Version info node: %s" % self["name"].value if self["type"].value == self.TYPE_STRING and "value" in self: text += "=%s" % self["value"].value return text def parseVersionInfo(parent): yield VersionInfoNode(parent, "node[]") def parseIcon(parent): yield BitmapInfoHeader(parent, "bmp_header") size = (parent.size - parent.current_size) // 8 if size: yield RawBytes(parent, "raw", size) class WindowsString(FieldSet): def createFields(self): yield UInt16(self, "length", "Number of 16-bit characters") size = self["length"].value * 2 if size: yield String(self, "text", size, charset="UTF-16-LE") def createValue(self): if "text" in self: return self["text"].value else: return u"" def createDisplay(self): return makePrintable(self.value, "UTF-8", to_unicode=True, quote='"') def parseStringTable(parent): while not parent.eof: yield WindowsString(parent, "string[]") RESOURCE_TYPE = { 1: ("cursor[]", "Cursor", None), 2: ("bitmap[]", "Bitmap", None), 3: ("icon[]", "Icon", parseIcon), 4: ("menu[]", "Menu", None), 5: ("dialog[]", "Dialog", None), 6: ("string_table[]", "String table", parseStringTable), 7: ("font_dir[]", "Font directory", None), 8: ("font[]", "Font", None), 9: ("accelerators[]", "Accelerators", None), 10: ("raw_res[]", "Unformatted resource data", None), 11: ("message_table[]", "Message table", None), 12: ("group_cursor[]", "Group cursor", None), 14: ("group_icon[]", "Group icon", None), 16: ("version_info", "Version information", parseVersionInfo), } class Entry(FieldSet): static_size = 16*8 def __init__(self, parent, name, inode=None): FieldSet.__init__(self, parent, name) self.inode = inode def createFields(self): yield textHandler(UInt32(self, "rva"), hexadecimal) yield filesizeHandler(UInt32(self, "size")) yield UInt32(self, "codepage") yield NullBytes(self, "reserved", 4) def createDescription(self): return "Entry #%u: offset=%s size=%s" % ( self.inode["offset"].value, self["rva"].display, self["size"].display) class NameOffset(FieldSet): def createFields(self): yield UInt32(self, "name") yield Bits(self, "offset", 31) yield Bit(self, "is_name") class IndexOffset(FieldSet): TYPE_DESC = createDict(RESOURCE_TYPE, 1) def __init__(self, parent, name, res_type=None): FieldSet.__init__(self, parent, name) self.res_type = res_type def createFields(self): yield Enum(UInt32(self, "type"), self.TYPE_DESC) yield Bits(self, "offset", 31) yield Bit(self, "is_subdir") def createDescription(self): if self["is_subdir"].value: return "Sub-directory: %s at %s" % (self["type"].display, self["offset"].value) else: return "Index: ID %s at %s" % (self["type"].display, self["offset"].value) class ResourceContent(FieldSet): def __init__(self, parent, name, entry, size=None): FieldSet.__init__(self, parent, name, size=entry["size"].value*8) self.entry = entry res_type = self.getResType() if res_type in RESOURCE_TYPE: self._name, description, self._parser = RESOURCE_TYPE[res_type] else: self._parser = None def getResID(self): return self.entry.inode["offset"].value def getResType(self): return self.entry.inode.res_type def createFields(self): if self._parser: for field in self._parser(self): yield field else: yield RawBytes(self, "content", self.size//8) def createDescription(self): return "Resource #%u content: type=%s" % ( self.getResID(), self.getResType()) class Header(FieldSet): static_size = 16*8 def createFields(self): yield NullBytes(self, "options", 4) yield TimestampUnix32(self, "creation_date") yield UInt16(self, "maj_ver", "Major version") yield UInt16(self, "min_ver", "Minor version") yield UInt16(self, "nb_name", "Number of named entries") yield UInt16(self, "nb_index", "Number of indexed entries") def createDescription(self): text = "Resource header" info = [] if self["nb_name"].value: info.append("%u name" % self["nb_name"].value) if self["nb_index"].value: info.append("%u index" % self["nb_index"].value) if self["creation_date"].value: info.append(self["creation_date"].display) if info: return "%s: %s" % (text, ", ".join(info)) else: return text class Name(FieldSet): def createFields(self): yield UInt16(self, "length") size = min(self["length"].value, 255) if size: yield String(self, "name", size, charset="UTF-16LE") class Directory(FieldSet): def __init__(self, parent, name, res_type=None): FieldSet.__init__(self, parent, name) nb_entries = self["header/nb_name"].value + self["header/nb_index"].value self._size = Header.static_size + nb_entries * 64 self.res_type = res_type def createFields(self): yield Header(self, "header") if MAX_NAME_PER_HEADER < self["header/nb_name"].value: raise ParserError("EXE resource: invalid number of name (%s)" % self["header/nb_name"].value) if MAX_INDEX_PER_HEADER < self["header/nb_index"].value: raise ParserError("EXE resource: invalid number of index (%s)" % self["header/nb_index"].value) hdr = self["header"] for index in xrange(hdr["nb_name"].value): yield NameOffset(self, "name[]") for index in xrange(hdr["nb_index"].value): yield IndexOffset(self, "index[]", self.res_type) def createDescription(self): return self["header"].description class PE_Resource(SeekableFieldSet): def __init__(self, parent, name, section, size): SeekableFieldSet.__init__(self, parent, name, size=size) self.section = section def parseSub(self, directory, name, depth): indexes = [] for index in directory.array("index"): if index["is_subdir"].value: indexes.append(index) #indexes.sort(key=lambda index: index["offset"].value) for index in indexes: self.seekByte(index["offset"].value) if depth == 1: res_type = index["type"].value else: res_type = directory.res_type yield Directory(self, name, res_type) def createFields(self): # Parse directories depth = 0 subdir = Directory(self, "root") yield subdir subdirs = [subdir] alldirs = [subdir] while subdirs: depth += 1 if MAX_DEPTH < depth: self.error("EXE resource: depth too high (%s), stop parsing directories" % depth) break newsubdirs = [] for index, subdir in enumerate(subdirs): name = "directory[%u][%u][]" % (depth, index) try: for field in self.parseSub(subdir, name, depth): if field.__class__ == Directory: newsubdirs.append(field) yield field except HACHOIR_ERRORS, err: self.error("Unable to create directory %s: %s" % (name, err)) subdirs = newsubdirs alldirs.extend(subdirs) # Create resource list resources = [] for directory in alldirs: for index in directory.array("index"): if not index["is_subdir"].value: resources.append(index) # Parse entries entries = [] for resource in resources: offset = resource["offset"].value if offset is None: continue self.seekByte(offset) entry = Entry(self, "entry[]", inode=resource) yield entry entries.append(entry) entries.sort(key=lambda entry: entry["rva"].value) # Parse resource content for entry in entries: try: offset = self.section.rva2file(entry["rva"].value) padding = self.seekByte(offset, relative=False) if padding: yield padding yield ResourceContent(self, "content[]", entry) except HACHOIR_ERRORS, err: self.warning("Error when parsing entry %s: %s" % (entry.path, err)) size = (self.size - self.current_size) // 8 if size: yield PaddingBytes(self, "padding_end", size) class NE_VersionInfoNode(FieldSet): TYPE_STRING = 1 TYPE_NAME = { 0: "binary", 1: "string", } def __init__(self, parent, name): FieldSet.__init__(self, parent, name) self._size = alignValue(self["size"].value, 4) * 8 def createFields(self): yield UInt16(self, "size", "Node size (in bytes)") yield UInt16(self, "data_size") yield CString(self, "name", charset="ISO-8859-1") size = paddingSize(self.current_size//8, 4) if size: yield NullBytes(self, "padding[]", size) size = self["data_size"].value if size: if self["name"].value == "VS_VERSION_INFO": yield VersionInfoBinary(self, "value", size=size*8) else: yield String(self, "value", size, charset="ISO-8859-1") while 12 <= (self.size - self.current_size) // 8: yield NE_VersionInfoNode(self, "node[]") size = (self.size - self.current_size) // 8 if size: yield NullBytes(self, "padding[]", size) def createDescription(self): text = "Version info node: %s" % self["name"].value # if self["type"].value == self.TYPE_STRING and "value" in self: # text += "=%s" % self["value"].value return text
15,292
Python
.py
382
31.403141
100
0.593319
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,723
exe_pe.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/exe_pe.pyc
Ñò Î ÈMc@sæddklZlZlZlZlZlZlZlZl Z l Z l Z l Z l Z lZddklZlZlZddklZdefd„ƒYZdefd„ƒYZdefd „ƒYZd efd „ƒYZd S( iÿÿÿÿ(tFieldSett ParserErrortBittUInt8tUInt16tUInt32tTimestampUnix32tBytestStringtEnumt PaddingBytest PaddingBitst NullBytestNullBits(t textHandlert hexadecimaltfilesizeHandler(tHACHOIR_ERRORSt SectionHeadercBs2eZdZd„Zd„Zd„Zd„ZRS(i(iccst|ddddddƒVtt|ddƒƒVtt|d d ƒtƒVtt|d d ƒƒVtt|d dƒƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|d d!ƒVt|d"d#ƒVt|ddƒVt|ddƒVt|d$d%ƒVt|d&d'ƒVt|d(d)ƒVt|d*d+ƒVt|d,d-ƒVt|d.d/ƒVt|d0d1ƒVt|d2d3ƒVdS(4NtnameitcharsettASCIItstript tmem_sizesSize in memorytrvasRVA (location) in memoryt phys_sizesPhysical size (on disk)tphys_offsPhysical location (on disk)treservedi s reserved[]iithas_codes Contains codet has_init_datasContains initialized datathas_uninit_datasContains uninitialized datat has_commentsContains comments?tremoves&Contents will not become part of imaget has_comdatasContains comdat?tno_defer_spec_excs=Reset speculative exceptions handling bits in the TLB entriestgp_rels&Content can be accessed relative to GPt ext_relocsContains extended relocations?t discardedsCan be discarded?t is_not_cachedsIs not cachable?t is_not_pagedsIs not pageable?t is_shareables Is shareable?t is_executablesIs executable?t is_readables Is readable?t is_writables Is writable?(RRRRRR R R(tself((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pyt createFields s:cCs|di||diS(NRR(tvalue(R-R((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pytrva2file2scCsº|di}|di}d|||fd|dig}|dio|idƒn|dio|idƒn|d io|id ƒnd |d id i|ƒfS(NRRsrva=0x%08x..0x%08xssize=%sR*texecR+treadR,twritesSection "%s": %sRs, (R/tdisplaytappendtjoin(R-Rtsizetinfo((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pytcreateDescription5s  cCs`y0t|diidƒƒ}|o d|SWn)tj o}|it|ƒƒdSXdS(NRt.s section_%ss section[](tstrR/RRtwarningtunicode(R-Rterr((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pytcreateSectionNameDs i@(t__name__t __module__t static_sizeR.R0R9R?(((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pyRs  (  t DataDirectorycBseZd„Zd„ZRS(ccs2tt|ddƒtƒVtt|dƒƒVdS(NRsVirtual addressR7(RRRR(R-((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pyR.NscCs3|diod|di|difSdSdS(NR7sDirectory at %s (%s)Rs(empty directory)(R/R4(R-((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pyR9Rs(R@RAR.R9(((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pyRCMs t PE_HeadercBs¾eZd,Zhdd6dd6dd6dd 6d d 6d d 6dd6dd6dd6dd6dd6dd6dd6dd6dd6d d!6d"d#6d$d%6d&d6d'd6d(d6d)d6d*d6Zd+„ZRS(-iiu Alpha AXPi„uARMiÀu Intel 80386iLu Intel 80486iMu Intel PentiumiNu Intel IA64iuMotorola 68000ihuMIPSifuAlpha AXP 64 bitsi„u MIPS with FPUifuMIPS16 with FPUifuPowerPC little endianiðuR3000ibuMIPS little endian (R4000)ifuR10000ihu Hitachi SH3i¢u Hitachi SH4i¦uR3000 (MIPS), big endiani`uR3000 (MIPS), little endianuR4000 (MIPS), little endianuR10000 (MIPS), little endianu DEC Alpha AXPuIBM Power PC, little endianccs¾t|dddƒV|didjotdƒ‚ntt|ddƒ|iƒVt|dd ƒVt|d d ƒVt|d d ƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt |ddƒVt|d d!ƒVt|d"d#ƒVt|d$d%ƒVt|d&d'ƒVt |d(dƒVt|d)d*ƒVt|d+d,ƒVt|d-d.ƒVt|d/d0ƒVdS(1NtheaderisPE header signature (PE\0\0)tPEsInvalid PE header signaturetcpusCPU typet nb_sectionsNumber of sectionst creation_dates Creation datet ptr_to_symsPointer to symbol tablet nb_symbolssNumber of symbolst opt_hdr_sizesOptional header sizetreloc_strippeds(If true, don't contain base relocations.t exec_imagesExecutable image?tline_nb_strippedsCOFF line numbers stripped?tlocal_sym_strippeds#COFF symbol table entries stripped?taggr_wssAggressively trim working sett large_addrs2Application can handle addresses greater than 2 GBRit reverse_los)Little endian: LSB precedes MSB in memoryt32bits)Machine based on 32-bit-word architecturet is_strippedsDebugging information removed?tswaps;If image is on removable media, copy and run from swap filet reserved2t is_systemsIt's a system filetis_dlls!It's a dynamic-link library (DLL)tups'File should be run only on a UP machinet reverse_his&Big endian: MSB precedes LSB in memory( RR/RR Rtcpu_nameRRRR R (R-((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pyR.us2iÀ(R@RARBR\R.(((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pyRDYs4 t PE_OptHeadercBsÜeZh dd6dd6dd6dd6dd 6d d 6d d 6dd6dd6dd6dd6dd6dd6Zh dd6dd6dd6dd6dd 6d!d6d"d#6d$d 6d%d 6d&d 6d'd6d(d6d)d6Zd*„Zd+„ZRS(,uNativeiu Windows GUIiu Windows CUIiuOS/2 CUIiu POSIX CUIiuNative WindowsiuWindows CE GUIi uEFI applicationi uEFI boot service driveri uEFI runtime driveri uEFI ROMi uXBOXiuWindows boot applicationitexportitimporttresourcet exceptiont certificateit relocationtdebugit descriptiont global_ptrttlst load_configt bound_importtimport_addressccsæt|ddƒV|didjotdƒ‚nt|ddƒVt|ddƒVtt|d d ƒƒVtt|d d ƒƒVtt|d dƒƒVtt|ddƒtƒVtt|ddƒtƒVtt|ddƒtƒVtt|ddƒtƒVtt|ddƒƒVtt|ddƒƒVt|ddƒVt|ddƒVt|dd ƒVt|d!d"ƒVt|d#d$ƒVt|d%d&ƒVt|d'd(ƒVtt|d)d*ƒƒVtt|d+d,ƒƒVtt|d-ƒtƒVt t|d.ƒ|i ƒVt|d/ƒVtt|d0ƒƒVtt|d1ƒƒVtt|d2ƒƒVtt|d3ƒƒVt|d4ƒVt|d5d6ƒVxZt |d5iƒD]E}y|i |}Wnt j od7|}nXt||ƒVq™WdS(8Nt signatures%PE optional header signature (0x010b)i s$Invalid PE optional header signaturet maj_lnk_versMajor linker versiont min_lnk_versMinor linker versiont size_codes Size of codetsize_init_datasSize of initialized datatsize_uninit_datasSize of uninitialized datat entry_points%Address (RVA) of the code entry pointt base_codesBase (RVA) of codet base_datasBase (RVA) of datat image_basesImage base (RVA)t sect_alignsSection alignmentt file_alignsFile alignmentt maj_os_versMajor OS versiont min_os_versMinor OS versiont maj_img_versMajor image versiont min_img_versMinor image versiontmaj_subsys_versMajor subsystem versiontmin_subsys_versMinor subsystem versionRitsize_imgs Size of imagetsize_hdrsSize of headerstchecksumt subsystemt dll_flagstsize_stack_reservetsize_stack_committsize_heap_reservetsize_heap_committ loader_flagst nb_directorysNumber of RVA and sizess data_dir[%u](RR/RRRRRRR R tSUBSYSTEM_NAMEtxrangetDIRECTORY_NAMEtKeyErrorRC(R-tindexR((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pyR.°sNcCsd|di|difS(Ns&PE optional header: %s, entry point %sR€Rq(R4(R-((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pyR9Ùs (R@RARˆRŠR.R9(((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pyR]‘s<   )N(thachoir_core.fieldRRRRRRRRRR R R R R thachoir_core.text_handlerRRRthachoir_core.errorRRRCRDR](((sG/pentest/enumeration/google/metagoofil/hachoir_parser/program/exe_pe.pyt<module>s ^E 8
10,890
Python
.py
42
258.285714
1,048
0.506314
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,724
exe_pe.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/program/exe_pe.py
from hachoir_core.field import (FieldSet, ParserError, Bit, UInt8, UInt16, UInt32, TimestampUnix32, Bytes, String, Enum, PaddingBytes, PaddingBits, NullBytes, NullBits) from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir_core.error import HACHOIR_ERRORS class SectionHeader(FieldSet): static_size = 40 * 8 def createFields(self): yield String(self, "name", 8, charset="ASCII", strip="\0 ") yield filesizeHandler(UInt32(self, "mem_size", "Size in memory")) yield textHandler(UInt32(self, "rva", "RVA (location) in memory"), hexadecimal) yield filesizeHandler(UInt32(self, "phys_size", "Physical size (on disk)")) yield filesizeHandler(UInt32(self, "phys_off", "Physical location (on disk)")) yield PaddingBytes(self, "reserved", 12) # 0x0000000# yield NullBits(self, "reserved[]", 4) # 0x000000#0 yield NullBits(self, "reserved[]", 1) yield Bit(self, "has_code", "Contains code") yield Bit(self, "has_init_data", "Contains initialized data") yield Bit(self, "has_uninit_data", "Contains uninitialized data") # 0x00000#00 yield NullBits(self, "reserved[]", 1) yield Bit(self, "has_comment", "Contains comments?") yield NullBits(self, "reserved[]", 1) yield Bit(self, "remove", "Contents will not become part of image") # 0x0000#000 yield Bit(self, "has_comdata", "Contains comdat?") yield NullBits(self, "reserved[]", 1) yield Bit(self, "no_defer_spec_exc", "Reset speculative exceptions handling bits in the TLB entries") yield Bit(self, "gp_rel", "Content can be accessed relative to GP") # 0x000#0000 yield NullBits(self, "reserved[]", 4) # 0x00#00000 yield NullBits(self, "reserved[]", 4) # 0x0#000000 yield Bit(self, "ext_reloc", "Contains extended relocations?") yield Bit(self, "discarded", "Can be discarded?") yield Bit(self, "is_not_cached", "Is not cachable?") yield Bit(self, "is_not_paged", "Is not pageable?") # 0x#0000000 yield Bit(self, "is_shareable", "Is shareable?") yield Bit(self, "is_executable", "Is executable?") yield Bit(self, "is_readable", "Is readable?") yield Bit(self, "is_writable", "Is writable?") def rva2file(self, rva): return self["phys_off"].value + (rva - self["rva"].value) def createDescription(self): rva = self["rva"].value size = self["mem_size"].value info = [ "rva=0x%08x..0x%08x" % (rva, rva+size), "size=%s" % self["mem_size"].display, ] if self["is_executable"].value: info.append("exec") if self["is_readable"].value: info.append("read") if self["is_writable"].value: info.append("write") return 'Section "%s": %s' % (self["name"].value, ", ".join(info)) def createSectionName(self): try: name = str(self["name"].value.strip(".")) if name: return "section_%s" % name except HACHOIR_ERRORS, err: self.warning(unicode(err)) return "section[]" class DataDirectory(FieldSet): def createFields(self): yield textHandler(UInt32(self, "rva", "Virtual address"), hexadecimal) yield filesizeHandler(UInt32(self, "size")) def createDescription(self): if self["size"].value: return "Directory at %s (%s)" % ( self["rva"].display, self["size"].display) else: return "(empty directory)" class PE_Header(FieldSet): static_size = 24*8 cpu_name = { 0x0184: u"Alpha AXP", 0x01c0: u"ARM", 0x014C: u"Intel 80386", 0x014D: u"Intel 80486", 0x014E: u"Intel Pentium", 0x0200: u"Intel IA64", 0x0268: u"Motorola 68000", 0x0266: u"MIPS", 0x0284: u"Alpha AXP 64 bits", 0x0366: u"MIPS with FPU", 0x0466: u"MIPS16 with FPU", 0x01f0: u"PowerPC little endian", 0x0162: u"R3000", 0x0166: u"MIPS little endian (R4000)", 0x0168: u"R10000", 0x01a2: u"Hitachi SH3", 0x01a6: u"Hitachi SH4", 0x0160: u"R3000 (MIPS), big endian", 0x0162: u"R3000 (MIPS), little endian", 0x0166: u"R4000 (MIPS), little endian", 0x0168: u"R10000 (MIPS), little endian", 0x0184: u"DEC Alpha AXP", 0x01F0: u"IBM Power PC, little endian", } def createFields(self): yield Bytes(self, "header", 4, r"PE header signature (PE\0\0)") if self["header"].value != "PE\0\0": raise ParserError("Invalid PE header signature") yield Enum(UInt16(self, "cpu", "CPU type"), self.cpu_name) yield UInt16(self, "nb_section", "Number of sections") yield TimestampUnix32(self, "creation_date", "Creation date") yield UInt32(self, "ptr_to_sym", "Pointer to symbol table") yield UInt32(self, "nb_symbols", "Number of symbols") yield UInt16(self, "opt_hdr_size", "Optional header size") yield Bit(self, "reloc_stripped", "If true, don't contain base relocations.") yield Bit(self, "exec_image", "Executable image?") yield Bit(self, "line_nb_stripped", "COFF line numbers stripped?") yield Bit(self, "local_sym_stripped", "COFF symbol table entries stripped?") yield Bit(self, "aggr_ws", "Aggressively trim working set") yield Bit(self, "large_addr", "Application can handle addresses greater than 2 GB") yield NullBits(self, "reserved", 1) yield Bit(self, "reverse_lo", "Little endian: LSB precedes MSB in memory") yield Bit(self, "32bit", "Machine based on 32-bit-word architecture") yield Bit(self, "is_stripped", "Debugging information removed?") yield Bit(self, "swap", "If image is on removable media, copy and run from swap file") yield PaddingBits(self, "reserved2", 1) yield Bit(self, "is_system", "It's a system file") yield Bit(self, "is_dll", "It's a dynamic-link library (DLL)") yield Bit(self, "up", "File should be run only on a UP machine") yield Bit(self, "reverse_hi", "Big endian: MSB precedes LSB in memory") class PE_OptHeader(FieldSet): SUBSYSTEM_NAME = { 1: u"Native", 2: u"Windows GUI", 3: u"Windows CUI", 5: u"OS/2 CUI", 7: u"POSIX CUI", 8: u"Native Windows", 9: u"Windows CE GUI", 10: u"EFI application", 11: u"EFI boot service driver", 12: u"EFI runtime driver", 13: u"EFI ROM", 14: u"XBOX", 16: u"Windows boot application", } DIRECTORY_NAME = { 0: "export", 1: "import", 2: "resource", 3: "exception", 4: "certificate", 5: "relocation", 6: "debug", 7: "description", 8: "global_ptr", 9: "tls", # Thread local storage 10: "load_config", 11: "bound_import", 12: "import_address", } def createFields(self): yield UInt16(self, "signature", "PE optional header signature (0x010b)") # TODO: Support PE32+ (signature=0x020b) if self["signature"].value != 0x010b: raise ParserError("Invalid PE optional header signature") yield UInt8(self, "maj_lnk_ver", "Major linker version") yield UInt8(self, "min_lnk_ver", "Minor linker version") yield filesizeHandler(UInt32(self, "size_code", "Size of code")) yield filesizeHandler(UInt32(self, "size_init_data", "Size of initialized data")) yield filesizeHandler(UInt32(self, "size_uninit_data", "Size of uninitialized data")) yield textHandler(UInt32(self, "entry_point", "Address (RVA) of the code entry point"), hexadecimal) yield textHandler(UInt32(self, "base_code", "Base (RVA) of code"), hexadecimal) yield textHandler(UInt32(self, "base_data", "Base (RVA) of data"), hexadecimal) yield textHandler(UInt32(self, "image_base", "Image base (RVA)"), hexadecimal) yield filesizeHandler(UInt32(self, "sect_align", "Section alignment")) yield filesizeHandler(UInt32(self, "file_align", "File alignment")) yield UInt16(self, "maj_os_ver", "Major OS version") yield UInt16(self, "min_os_ver", "Minor OS version") yield UInt16(self, "maj_img_ver", "Major image version") yield UInt16(self, "min_img_ver", "Minor image version") yield UInt16(self, "maj_subsys_ver", "Major subsystem version") yield UInt16(self, "min_subsys_ver", "Minor subsystem version") yield NullBytes(self, "reserved", 4) yield filesizeHandler(UInt32(self, "size_img", "Size of image")) yield filesizeHandler(UInt32(self, "size_hdr", "Size of headers")) yield textHandler(UInt32(self, "checksum"), hexadecimal) yield Enum(UInt16(self, "subsystem"), self.SUBSYSTEM_NAME) yield UInt16(self, "dll_flags") yield filesizeHandler(UInt32(self, "size_stack_reserve")) yield filesizeHandler(UInt32(self, "size_stack_commit")) yield filesizeHandler(UInt32(self, "size_heap_reserve")) yield filesizeHandler(UInt32(self, "size_heap_commit")) yield UInt32(self, "loader_flags") yield UInt32(self, "nb_directory", "Number of RVA and sizes") for index in xrange(self["nb_directory"].value): try: name = self.DIRECTORY_NAME[index] except KeyError: name = "data_dir[%u]" % index yield DataDirectory(self, name) def createDescription(self): return "PE optional header: %s, entry point %s" % ( self["subsystem"].display, self["entry_point"].display)
9,929
Python
.py
208
38.745192
109
0.613515
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,725
cab.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/cab.py
""" Microsoft Cabinet (CAB) archive. Author: Victor Stinner Creation date: 31 january 2007 """ from hachoir_parser import Parser from hachoir_core.field import (FieldSet, Enum, CString, String, UInt16, UInt32, Bit, Bits, PaddingBits, NullBits, DateTimeMSDOS32, RawBytes) from hachoir_parser.common.msdos import MSDOSFileAttr16 from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir_core.endian import LITTLE_ENDIAN MAX_NB_FOLDER = 30 COMPRESSION_NONE = 0 COMPRESSION_NAME = { 0: "Uncompressed", 1: "Deflate", 2: "Quantum", 3: "LZX", } class Folder(FieldSet): def createFields(self): yield UInt32(self, "off_data", "Offset of data") yield UInt16(self, "cf_data") yield Enum(Bits(self, "compr_method", 4, "Compression method"), COMPRESSION_NAME) yield Bits(self, "compr_level", 5, "Compression level") yield PaddingBits(self, "padding", 7) def createDescription(self): text= "Folder: compression %s" % self["compr_method"].display if self["compr_method"].value != COMPRESSION_NONE: text += " (level %u)" % self["compr_level"].value return text class File(FieldSet): def createFields(self): yield filesizeHandler(UInt32(self, "filesize", "Uncompressed file size")) yield UInt32(self, "offset", "File offset after decompression") yield UInt16(self, "iFolder", "file control id") yield DateTimeMSDOS32(self, "timestamp") yield MSDOSFileAttr16(self, "attributes") yield CString(self, "filename", charset="ASCII") def createDescription(self): return "File %s (%s)" % ( self["filename"].display, self["filesize"].display) class Reserved(FieldSet): def createFields(self): yield UInt32(self, "size") size = self["size"].value if size: yield RawBytes(self, "data", size) class Flags(FieldSet): static_size = 16 def createFields(self): yield Bit(self, "has_previous") yield Bit(self, "has_next") yield Bit(self, "has_reserved") yield NullBits(self, "padding", 13) class CabFile(Parser): endian = LITTLE_ENDIAN MAGIC = "MSCF" PARSER_TAGS = { "id": "cab", "category": "archive", "file_ext": ("cab",), "mime": (u"application/vnd.ms-cab-compressed",), "magic": ((MAGIC, 0),), "min_size": 1*8, # header + file entry "description": "Microsoft Cabinet archive" } def validate(self): if self.stream.readBytes(0, 4) != self.MAGIC: return "Invalid magic" if self["cab_version"].value != 0x0103: return "Unknown version (%s)" % self["cab_version"].display if not (1 <= self["nb_folder"].value <= MAX_NB_FOLDER): return "Invalid number of folder (%s)" % self["nb_folder"].value return True def createFields(self): yield String(self, "magic", 4, "Magic (MSCF)", charset="ASCII") yield textHandler(UInt32(self, "hdr_checksum", "Header checksum (0 if not used)"), hexadecimal) yield filesizeHandler(UInt32(self, "filesize", "Cabinet file size")) yield textHandler(UInt32(self, "fld_checksum", "Folders checksum (0 if not used)"), hexadecimal) yield UInt32(self, "off_file", "Offset of first file") yield textHandler(UInt32(self, "files_checksum", "Files checksum (0 if not used)"), hexadecimal) yield textHandler(UInt16(self, "cab_version", "Cabinet version"), hexadecimal) yield UInt16(self, "nb_folder", "Number of folders") yield UInt16(self, "nb_files", "Number of files") yield Flags(self, "flags") yield UInt16(self, "setid") yield UInt16(self, "number", "Zero-based cabinet number") # --- TODO: Support flags if self["flags/has_reserved"].value: yield Reserved(self, "reserved") #(3) Previous cabinet name, if CAB_HEADER.flags & CAB_FLAG_HASPREV #(4) Previous disk name, if CAB_HEADER.flags & CAB_FLAG_HASPREV #(5) Next cabinet name, if CAB_HEADER.flags & CAB_FLAG_HASNEXT #(6) Next disk name, if CAB_HEADER.flags & CAB_FLAG_HASNEXT # ---- for index in xrange(self["nb_folder"].value): yield Folder(self, "folder[]") for index in xrange(self["nb_files"].value): yield File(self, "file[]") end = self.seekBit(self.size, "endraw") if end: yield end def createContentSize(self): return self["filesize"].value * 8
4,607
Python
.py
107
35.719626
104
0.637439
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,726
zip.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/zip.pyc
Ñò Î ÈMc @sŒdZddklZddklZlZlZlZlZl Z l Z l Z l Z l Z lZlZlZlZl Z ddklZlZlZddklZddklZddklZddklZdLZd Zhd d 6dd6dd6dd6dd6dd6dd6dd6dd 6dd6dd 6d!d"6d#d$6d!d%6d&d'6d!d(6d!d)6d!d*6d+d,6d-d.6d/d06Z d1„Z!d2efd3„ƒYZ"d4efd5„ƒYZ#d6efd7„ƒYZ$d8„Z%d9„Z&d:efd;„ƒYZ'd<efd=„ƒYZ(d>efd?„ƒYZ)d@efdA„ƒYZ*dBefdC„ƒYZ+dDefdE„ƒYZ,dFefdG„ƒYZ-dHefdI„ƒYZ.dJS(Msg Zip splitter. Status: can read most important headers Authors: Christophe Gisquet and Victor Stinner iÿÿÿÿ(tParser(tFieldSett ParserErrortBittBitstEnumtTimeDateMSDOS32tSubFiletUInt8tUInt16tUInt32tUInt64tStringtPascalString16tRawBytesR(t textHandlertfilesizeHandlert hexadecimal(tHACHOIR_ERRORS(t makeUnicode(t LITTLE_ENDIAN(tDeflateièiiuno compressioniuShrunkiuReduced (factor 1)iuReduced (factor 2)iuReduced (factor 3)iuReduced (factor 4)iuImplodediu TokenizingiuDeflateu Deflate64i uPKWARE Implodingi uReserved by PKWAREi u(File is compressed using BZIP2 algorithmi i u LZMA (EFS)iiiiu(File is compressed using IBM TERSE (new)iuIBM LZ77 z Architecture (PFS)iuPPMd version I, Rev 1ibcCsdt|idƒS(Ns%u.%ui (tdivmodtvalue(tfield((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyt ZipRevision0st ZipVersioncBs›eZdZhdd6dd6dd6dd6d d 6d d 6d d6dd6dd6dd6dd6dd6dd6dd6dd6dd 6d!d6d"d#6Zd$„ZRS(%iuFAT file system (DOS, OS/2, NT)iuAmigaiuVMS (VAX or Alpha AXP)iuUnixiuVM/CMSiuAtariiuHPFS file system (OS/2, NT 3.x)iu MacintoshiuZ-SystemiuCP/Mi uTOPS-20i uNTFS file system (NT)i uSMS/QDOSi u Acorn RISC OSi uVFAT file system (Win95, NT)iuMVSiuBeOS (BeBox or PowerMac)uTandemiccs;tt|ddƒtƒVtt|ddƒ|iƒVdS(Nt zip_versions ZIP versionthost_oss ZIP Host OS(RRRRtHOST_OS(tself((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyt createFieldsIs(t__name__t __module__t static_sizeRR(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyR3s* tZipGeneralFlagscBseZdZd„ZRS(iccs”|ii|iddtƒ}t|dddƒVt|ddƒVt|ddƒVt|d d ƒVt|dd dƒVt|d d ƒVt|ddƒVt|ddƒVt|ddƒV|djo&t|ddƒVt|ddƒVn�|d)jo@hdd6dd6dd6d d!6}tt|d"dƒ|ƒVnB|d#jo#t|d$d%ƒVt|dƒVnt|d&dƒVt|d'd(ƒVdS(*Nisunused[]itUnusedtencrypted_central_dirs3Selected data values in the Local Header are maskedt incompletes,Reserved by PKWARE for enhanced compression.t uses_unicodes"Filename and comments are in UTF-8itstrong_encrypts!Strong encryption (version >= 50)t is_patcheds%File is compressed with patched data?tenhanced_deflatesReserved for use with method 8thas_descriptors'Compressed data followed by descriptor?ituse_8k_slidings)Use 8K sliding dictionary (instead of 4K)t use_3shannons5Use a 3 Shannon-Fano tree (instead of 2 Shannon-Fano)ii sNormal compressionisMaximum compressionisFast compressionsSuper Fast compressionitmethoditlzma_eoss.LZMA stream is ended with a EndOfStream markertcompression_infot is_encryptedsFile is encrypted?(ii (tstreamtreadBitstabsolute_addressRRRR(RR.tNAME((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyROs4     (R R!R"R(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyR#Mst ExtraFieldcBs¿eZhdd6dd6dd6dd6dd 6d d 6d d 6dd6dd6dd6dd6dd6dd6dd6dd6dd6d d!6d"d#6d$d%6d&d'6d(d)6d*d+6d,d-6d.d/6Zd0„ZRS(1sAV Infois(OS/2 extended attributes (also Info-ZIP)i sPKWARE Win95/WinNT FileTimesi sPKWARE VAX/VMS (also Info-ZIP)i s PKWARE Unixi sPatch Descriptoris Info-ZIP Macintosh (old, J. Lee)iÈsZipIt Macintosh (first version)i&s5ZipIt Macintosh v 1.3.5 and newer (w/o full filename)i's-Info-ZIP Macintosh (new, D. Haase Mac3 field)iM3sAcorn/SparkFS (David Pilling)iACs+Windows NT security descriptor (binary ACL)iSDsVM/CMSiGtMVSiGs"FWKCS MD5 (third party, see below)iFKs#OS/2 access control list (text ACL)iALsInfo-ZIP VMS (VAX or Alpha)iIMsAOS/VS (binary ACL)iVSsextended timestampiUTs-Info-ZIP Unix (original; also OS/2, NT, etc.)iUXsBeOS (BeBox, PowerMac, etc.)iBesASi UnixinusInfo-ZIP Unix (new)iUxsSMS/QDOSiJûccs`tt|ddƒ|iƒVt|ddƒ}|V|idjot|d|dƒVndS(Ntfield_idsExtra field IDtfield_data_sizesExtra field data sizeit field_datasUnknown field data(RR tEXTRA_FIELD_IDRR(Rtsize((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRŠs  (R R!R;R(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyR6os4 ccs¯t|ddƒVt|ddƒVtt|ddƒtƒVt|ddƒVtt|d d ƒtƒVt|d d ƒVt|d dƒVt|ddƒVt|ddƒVdS(Ntversion_neededsVersion neededtflagssGeneral purpose flagt compressionsCompression methodtlast_modsLast modification file timetcrc32sCRC-32tcompressed_sizesCompressed sizetuncompressed_sizesUncompressed sizetfilename_lengthsFilename lengtht extra_lengthsExtra fields length( RR#RR tCOMPRESSION_METHODRRR R(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pytZipStartCommonFields’scCs|diodSdSdS(Nsflags/uses_unicodesUTF-8s ISO-8859-15(R(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyt zipGetCharsetžstZipCentralDirectorycBs eZdZd„Zd„ZRS(iPKc cst|ddƒVxt|ƒD] }|VqWt|ƒ}t|ddƒVt|ddƒVt|ddƒVt|d d ƒVt|d d ƒVt|d |didd|ƒVd|dijot|d|didƒVnd|dijo%t|d|didd|ƒVndS(Ntversion_made_bysVersion made bytcomment_lengthsComment lengthtdisk_number_startsDisk number startt internal_attrsInternal file attributest external_attrsExternal file attributest offset_headersRelative offset of local headertfilenameRDtFilenametcharsetiREtextras Extra fieldstcommenttComment(RRGRHR R R RR(RRRR((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyR¦s$    cCsd|diS(NsCentral directory: %sRP(tdisplay(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pytcreateDescription¼s(R R!tHEADERRRW(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRI¤s tZip64EndCentralDirectorycBseZdZd„ZRS(iPKccsÐt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|d d ƒVt|d d ƒVt|d dƒVt|ddƒVt|ddƒVd|dijot|d|didƒVndS(Ntzip64_end_sizes-Size of zip64 end of central directory recordRJsVersion made byR=sVersion needed to extractt number_disksNumber of this diskt number_disk2s:Number of the disk with the start of the central directorytnumber_entriess=Total number of entries in the central directory on this disktnumber_entries2s0Total number of entries in the central directoryR<sSize of the central directorytoffsets$Offset of start of central directoryit data_sectorszip64 extensible data sector(R RR RR(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRÁs     (R R!RXR(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRY¿stZipEndCentralDirectorycBseZdZd„ZRS(iPKccs{t|ddƒVt|ddƒVt|ddƒVt|ddƒVt|d d ƒVt|d d ƒVt|d dƒVdS(NR[sNumber of this diskR\sNumber in the central dirttotal_number_disks$Total number of entries in this diskttotal_number_disk2s*Total number of entries in the central dirR<sSize of the central directoryR_s$Offset of start of central directoryRTs ZIP comment(R R R (R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRÕs  (R R!RXR(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRaÓstZipDataDescriptorcBs#eZdZdZdZd„ZRS(sPKiPKi`ccsLtt|ddƒtƒVtt|ddƒƒVtt|ddƒƒVdS(Nt file_crc32sChecksum (CRC32)tfile_compressed_sizesCompressed size (bytes)tfile_uncompressed_sizesUncompressed size (bytes)(RR RR(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRäs     (R R!t HEADER_STRINGRXR"R(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRdàst FileEntrycBsAeZdZdZd„Zd„Zd„Zd„Zd„Z RS(iPKcCso|di}|djot|d|d|iƒSt|d|d|iƒ}|tjo t|ƒS|SdS(NR?itdataRPtcompressed_data(RRRPtCOMPRESSION_DEFLATER(RR<R?t compressed((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRjðs    ccsÕ|iitit|i|iƒ}|djotdtiƒ‚n|i|ƒVt t |ddƒt ƒVt|ddƒ}|V|di djo5|di |jo!td ||di fƒ‚ndS( NisCouldn't resync to %ssheader[]tHeadert data_descsData descriptorRARfs*Bad resync: position=>%i but data_desc=>%i( R2tsearchBytesLengthRdRhtFalseR4t current_sizeRRjRR RR(RR<Ro((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pytresyncús c csxt|ƒD] }|Vq W|di}|o6t|d|ddt|ƒƒ}|V|i|_n|diot|d|didƒVn|di}|d jo|i|ƒVn/|d io x|iƒD] }|VqÚWn|d iot|d d ƒVndS(NRDRPRQRRRERStExtraRBisflags/incompletesflags/has_descriptorRosData descriptor( RGRR RHRPRRjRsRd(RRtlengthRPR<((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyR s(       cCsd|di|difS(NsFile entry: %s (%s)RPRB(RRV(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRW#scCs(|ditjod|diSdS(NR?sUnknown compression method (%u)t(RRF(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pytvalidate'sN( R R!RXtNoneRPRjRsRRWRw(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRiìs   t ZipSignaturecBseZdZd„ZRS(iPKccst|ddƒVdS(Nt signaturet Signature(R (R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyR.s(R R!RXR(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRy,stZip64EndCentralDirectoryLocatorcBseZdZd„ZRS(iPKccs7t|ddƒVt|ddƒVt|ddƒVdS(Nt disk_numbersGNumber of the disk with the start of the zip64 end of central directorytrelative_offsets<Relative offset of the zip64 end of central directory recordtdisk_total_numbersTotal number of disks(R R (R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyR3s   (R R!RXR(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyR|1stZipFilecBsTeZeZhdd6dd6dd6dd6dd6dd 6d d 6d d 6dd6dd6dd6dd6dd6dd6dd6dd6dd6d d!6d"d#6d$d%6d&d'6d(d)6d*d+6d,d-6d.d/6d0d16d2d36Zhdd46d5d66eeiƒƒd76eeiƒƒd86dJd;6d<d=6dLdA6dBdC6ZdD„Z dE„Z dF„Z dG„Z dH„Z RS(Mtzipuapplication/zipuapplication/x-ziptjaruapplication/x-jaruapplication/java-archivetsxcuapplication/vnd.sun.xml.calctsxduapplication/vnd.sun.xml.drawtsxiuapplication/vnd.sun.xml.impresstsxwuapplication/vnd.sun.xml.writertsxmuapplication/vnd.sun.xml.mathtstcu%application/vnd.sun.xml.calc.templatetstdu%application/vnd.sun.xml.draw.templatetstiu(application/vnd.sun.xml.impress.templatetstwu'application/vnd.sun.xml.writer.templatetsxgu%application/vnd.sun.xml.writer.globaltodcu(application/vnd.oasis.opendocument.charttodiu(application/vnd.oasis.opendocument.imagetodbu+application/vnd.oasis.opendocument.databasetodfu*application/vnd.oasis.opendocument.formulatodgu+application/vnd.oasis.opendocument.graphicstodpu/application/vnd.oasis.opendocument.presentationtodsu.application/vnd.oasis.opendocument.spreadsheettodtu'application/vnd.oasis.opendocument.texttodmu.application/vnd.oasis.opendocument.text-mastertotgu4application/vnd.oasis.opendocument.graphics-templatetotpu8application/vnd.oasis.opendocument.presentation-templatetotsu7application/vnd.oasis.opendocument.spreadsheet-templatetottu0application/vnd.oasis.opendocument.text-templatetidtarchivetcategorytfile_exttmimesPKitmagictskiptsubfileiiitmin_sizes ZIP archivet descriptioncCsc|ditijodSy|d}Wntj o }dSX|iƒ}|o d|StS(Ns header[0]s Invalid magicsfile[0]sUnable to get file #0s File #0: %s(RRiRXRRwtTrue(Rtfile0terr((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyRwps  ccszd|_g|_xa|ipVtt|ddƒtƒ}|V|i}|ti jot|dƒVq|t i jot |dƒVq|djot |dƒVq|t i jot |dƒVq|t i jot |dd ƒVq|t i jot |d d ƒVq|ti jot|d d ƒVq|ti jot|ddƒVqtd|ƒ‚qWdS(Nsheader[]Rnsfile[]s spanning[]iPK00stemporary_spanning[]scentral_directory[]tend_central_directorysEnd of central directorytend64_central_directorysZIP64 end of central directoryRzR{t end_locators&ZIP64 Enf of central directory locators#Error, unknown ZIP header (0x%08X).(RxRztcentral_directoryteofRR RRRiRXRdRIRaRYRyR|R(Rtheader((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyR|s0     cCs.|didjot|diƒSdSdS(Nsfile[0]/filenametmimetypes file[0]/datauapplication/zip(RR(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pytcreateMimeType—scCsI|didjo1|di}||ijod|i|SndS(Nsfile[0]/filenameR­sfile[0]/compressed_datat.s.zip(Rt MIME_TYPES(RRž((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pytcreateFilenameSuffix�s  cCsBd}td}|iid||ƒ}|dj o |dSdS(NiisPKii°(t MAX_FILESIZER2t searchBytesRx(Rtstarttend((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pytcreateContentSize¤s    (sPKi((sPKiiið(R R!RtendianR°ttuplet itervaluestiterkeyst PARSER_TAGSRwRR®R±R¶(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyR€;sV     Ni i€>(/t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRRR R R R R Rthachoir_core.text_handlerRRRthachoir_core.errorRthachoir_core.toolsRthachoir_core.endianRthachoir_parser.common.deflateRR²RlRFRRR#R6RGRHRIRYRaRdRiRyR|R€(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/zip.pyt<module>sZd  "#   @
20,083
Python
.py
68
294.279412
1,606
0.51439
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,727
sevenzip.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/sevenzip.py
""" 7zip file parser Informations: - File 7zformat.txt of 7-zip SDK: http://www.7-zip.org/sdk.html Author: Olivier SCHWAB Creation date: 6 december 2006 """ from hachoir_parser import Parser from hachoir_core.field import (Field, FieldSet, ParserError, GenericVector, Enum, UInt8, UInt32, UInt64, Bytes, RawBytes) from hachoir_core.endian import LITTLE_ENDIAN from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler class SZUInt64(Field): """ Variable length UInt64, where the first byte gives both the number of bytes needed and the upper byte value. """ def __init__(self, parent, name, max_size=None, description=None): Field.__init__(self, parent, name, size=8, description=description) value = 0 addr = self.absolute_address mask = 0x80 firstByte = parent.stream.readBits(addr, 8, LITTLE_ENDIAN) for i in xrange(8): addr += 8 if not (firstByte & mask): value += ((firstByte & (mask-1)) << (8*i)) break value |= (parent.stream.readBits(addr, 8, LITTLE_ENDIAN) << (8*i)) mask >>= 1 self._size += 8 self.createValue = lambda: value ID_END, ID_HEADER, ID_ARCHIVE_PROPS, ID_ADD_STREAM_INFO, ID_MAIN_STREAM_INFO, \ ID_FILES_INFO, ID_PACK_INFO, ID_UNPACK_INFO, ID_SUBSTREAMS_INFO, ID_SIZE, \ ID_CRC, ID_FOLDER, ID_CODERS_UNPACK_SIZE, ID_NUM_UNPACK_STREAMS, \ ID_EMPTY_STREAM, ID_EMPTY_FILE, ID_ANTI, ID_NAME, ID_CREATION_TIME, \ ID_LAST_ACCESS_TIME, ID_LAST_WRITE_TIME, ID_WIN_ATTR, ID_COMMENT, \ ID_ENCODED_HEADER = xrange(24) ID_INFO = { ID_END : "End", ID_HEADER : "Header embedding another one", ID_ARCHIVE_PROPS : "Archive Properties", ID_ADD_STREAM_INFO : "Additional Streams Info", ID_MAIN_STREAM_INFO : "Main Streams Info", ID_FILES_INFO : "Files Info", ID_PACK_INFO : "Pack Info", ID_UNPACK_INFO : "Unpack Info", ID_SUBSTREAMS_INFO : "Substreams Info", ID_SIZE : "Size", ID_CRC : "CRC", ID_FOLDER : "Folder", ID_CODERS_UNPACK_SIZE: "Coders Unpacked size", ID_NUM_UNPACK_STREAMS: "Number of Unpacked Streams", ID_EMPTY_STREAM : "Empty Stream", ID_EMPTY_FILE : "Empty File", ID_ANTI : "Anti", ID_NAME : "Name", ID_CREATION_TIME : "Creation Time", ID_LAST_ACCESS_TIME : "Last Access Time", ID_LAST_WRITE_TIME : "Last Write Time", ID_WIN_ATTR : "Win Attributes", ID_COMMENT : "Comment", ID_ENCODED_HEADER : "Header holding encoded data info", } class SkippedData(FieldSet): def createFields(self): yield Enum(UInt8(self, "id[]"), ID_INFO) size = SZUInt64(self, "size") yield size if size.value > 0: yield RawBytes(self, "data", size.value) def waitForID(s, wait_id, wait_name="waited_id[]"): while not s.eof: addr = s.absolute_address+s.current_size uid = s.stream.readBits(addr, 8, LITTLE_ENDIAN) if uid == wait_id: yield Enum(UInt8(s, wait_name), ID_INFO) s.info("Found ID %s (%u)" % (ID_INFO[uid], uid)) return s.info("Skipping ID %u!=%u" % (uid, wait_id)) yield SkippedData(s, "skipped_id[]", "%u != %u" % (uid, wait_id)) class HashDigest(FieldSet): def __init__(self, parent, name, num_digests, desc=None): FieldSet.__init__(self, parent, name, desc) self.num_digests = num_digests def createFields(self): yield Enum(UInt8(self, "id"), ID_INFO) bytes = self.stream.readBytes(self.absolute_address, self.num_digests) if self.num_digests > 0: yield GenericVector(self, "defined[]", self.num_digests, UInt8, "bool") for index in xrange(self.num_digests): if bytes[index]: yield textHandler(UInt32(self, "hash[]", "Hash for digest %u" % index), hexadecimal) class PackInfo(FieldSet): def createFields(self): yield Enum(UInt8(self, "id"), ID_INFO) # Very important, helps determine where the data is yield SZUInt64(self, "pack_pos", "Position of the packs") num = SZUInt64(self, "num_pack_streams") yield num num = num.value for field in waitForID(self, ID_SIZE, "size_marker"): yield field for size in xrange(num): yield SZUInt64(self, "pack_size[]") while not self.eof: addr = self.absolute_address+self.current_size uid = self.stream.readBits(addr, 8, LITTLE_ENDIAN) if uid == ID_END: yield Enum(UInt8(self, "end_marker"), ID_INFO) break elif uid == ID_CRC: yield HashDigest(self, "hash_digest", size) else: yield SkippedData(self, "skipped_data") def lzmaParams(value): param = value.value remainder = param / 9 # Literal coder context bits lc = param % 9 # Position state bits pb = remainder / 5 # Literal coder position bits lp = remainder % 5 return "lc=%u pb=%u lp=%u" % (lc, lp, pb) class CoderID(FieldSet): CODECS = { # Only 2 methods ... and what about PPMD ? "\0" : "copy", "\3\1\1": "lzma", } def createFields(self): byte = UInt8(self, "id_size") yield byte byte = byte.value self.info("ID=%u" % byte) size = byte & 0xF if size > 0: name = self.stream.readBytes(self.absolute_address+self.current_size, size) if name in self.CODECS: name = self.CODECS[name] self.info("Codec is %s" % name) else: self.info("Undetermined codec %s" % name) name = "unknown" yield RawBytes(self, name, size) #yield textHandler(Bytes(self, "id", size), lambda: name) if byte & 0x10: yield SZUInt64(self, "num_stream_in") yield SZUInt64(self, "num_stream_out") self.info("Streams: IN=%u OUT=%u" % \ (self["num_stream_in"].value, self["num_stream_out"].value)) if byte & 0x20: size = SZUInt64(self, "properties_size[]") yield size if size.value == 5: #LzmaDecodeProperties@LZMAStateDecode.c yield textHandler(UInt8(self, "parameters"), lzmaParams) yield filesizeHandler(UInt32(self, "dictionary_size")) elif size.value > 0: yield RawBytes(self, "properties[]", size.value) class CoderInfo(FieldSet): def __init__(self, parent, name, desc=None): FieldSet.__init__(self, parent, name, desc) self.in_streams = 1 self.out_streams = 1 def createFields(self): # The real ID addr = self.absolute_address + self.current_size b = self.parent.stream.readBits(addr, 8, LITTLE_ENDIAN) cid = CoderID(self, "coder_id") yield cid if b&0x10: # Work repeated, ... self.in_streams = cid["num_stream_in"].value self.out_streams = cid["num_stream_out"].value # Skip other IDs while b&0x80: addr = self.absolute_address + self.current_size b = self.parent.stream.readBits(addr, 8, LITTLE_ENDIAN) yield CoderID(self, "unused_codec_id[]") class BindPairInfo(FieldSet): def createFields(self): # 64 bits values then cast to 32 in fact yield SZUInt64(self, "in_index") yield SZUInt64(self, "out_index") self.info("Indexes: IN=%u OUT=%u" % \ (self["in_index"].value, self["out_index"].value)) class FolderItem(FieldSet): def __init__(self, parent, name, desc=None): FieldSet.__init__(self, parent, name, desc) self.in_streams = 0 self.out_streams = 0 def createFields(self): yield SZUInt64(self, "num_coders") num = self["num_coders"].value self.info("Folder: %u codecs" % num) # Coders info for index in xrange(num): ci = CoderInfo(self, "coder_info[]") yield ci self.in_streams += ci.in_streams self.out_streams += ci.out_streams # Bin pairs self.info("out streams: %u" % self.out_streams) for index in xrange(self.out_streams-1): yield BindPairInfo(self, "bind_pair[]") # Packed streams # @todo: Actually find mapping packed_streams = self.in_streams - self.out_streams + 1 if packed_streams == 1: pass else: for index in xrange(packed_streams): yield SZUInt64(self, "pack_stream[]") class UnpackInfo(FieldSet): def createFields(self): yield Enum(UInt8(self, "id"), ID_INFO) # Wait for synch for field in waitForID(self, ID_FOLDER, "folder_marker"): yield field yield SZUInt64(self, "num_folders") # Get generic info num = self["num_folders"].value self.info("%u folders" % num) yield UInt8(self, "is_external") # Read folder items for folder_index in xrange(num): yield FolderItem(self, "folder_item[]") # Get unpack sizes for each coder of each folder for field in waitForID(self, ID_CODERS_UNPACK_SIZE, "coders_unpsize_marker"): yield field for folder_index in xrange(num): folder_item = self["folder_item[%u]" % folder_index] for index in xrange(folder_item.out_streams): #yield UInt8(self, "unpack_size[]") yield SZUInt64(self, "unpack_size[]") # Extract digests while not self.eof: addr = self.absolute_address+self.current_size uid = self.stream.readBits(addr, 8, LITTLE_ENDIAN) if uid == ID_END: yield Enum(UInt8(self, "end_marker"), ID_INFO) break elif uid == ID_CRC: yield HashDigest(self, "hash_digest", num) else: yield SkippedData(self, "skip_data") class SubStreamInfo(FieldSet): def createFields(self): yield Enum(UInt8(self, "id"), ID_INFO) raise ParserError("SubStreamInfo not implemented yet") class EncodedHeader(FieldSet): def createFields(self): yield Enum(UInt8(self, "id"), ID_INFO) while not self.eof: addr = self.absolute_address+self.current_size uid = self.stream.readBits(addr, 8, LITTLE_ENDIAN) if uid == ID_END: yield Enum(UInt8(self, "end_marker"), ID_INFO) break elif uid == ID_PACK_INFO: yield PackInfo(self, "pack_info", ID_INFO[ID_PACK_INFO]) elif uid == ID_UNPACK_INFO: yield UnpackInfo(self, "unpack_info", ID_INFO[ID_UNPACK_INFO]) elif uid == ID_SUBSTREAMS_INFO: yield SubStreamInfo(self, "substreams_info", ID_INFO[ID_SUBSTREAMS_INFO]) else: self.info("Unexpected ID (%i)" % uid) break class IDHeader(FieldSet): def createFields(self): yield Enum(UInt8(self, "id"), ID_INFO) ParserError("IDHeader not implemented") class NextHeader(FieldSet): def __init__(self, parent, name, desc="Next header"): FieldSet.__init__(self, parent, name, desc) self._size = 8*self["/signature/start_hdr/next_hdr_size"].value # Less work, as much interpretable information as the other # version... what an obnoxious format def createFields2(self): yield Enum(UInt8(self, "header_type"), ID_INFO) yield RawBytes(self, "header_data", self._size-1) def createFields(self): uid = self.stream.readBits(self.absolute_address, 8, LITTLE_ENDIAN) if uid == ID_HEADER: yield IDHeader(self, "header", ID_INFO[ID_HEADER]) elif uid == ID_ENCODED_HEADER: yield EncodedHeader(self, "encoded_hdr", ID_INFO[ID_ENCODED_HEADER]) # Game Over: this is usually encoded using LZMA, not copy # See SzReadAndDecodePackedStreams/SzDecode being called with the # data position from "/next_hdr/encoded_hdr/pack_info/pack_pos" # We should process further, yet we can't... else: ParserError("Unexpected ID %u" % uid) size = self._size - self.current_size if size > 0: yield RawBytes(self, "next_hdr_data", size//8, "Next header's data") class Body(FieldSet): def __init__(self, parent, name, desc="Body data"): FieldSet.__init__(self, parent, name, desc) self._size = 8*self["/signature/start_hdr/next_hdr_offset"].value def createFields(self): if "encoded_hdr" in self["/next_hdr/"]: pack_size = sum([s.value for s in self.array("/next_hdr/encoded_hdr/pack_info/pack_size")]) body_size = self["/next_hdr/encoded_hdr/pack_info/pack_pos"].value yield RawBytes(self, "compressed_data", body_size, "Compressed data") # Here we could check if copy method was used to "compress" it, # but this never happens, so just output "compressed file info" yield RawBytes(self, "compressed_file_info", pack_size, "Compressed file information") size = (self._size//8) - pack_size - body_size if size > 0: yield RawBytes(self, "unknown_data", size) elif "header" in self["/next_hdr"]: yield RawBytes(self, "compressed_data", self._size//8, "Compressed data") class StartHeader(FieldSet): static_size = 160 def createFields(self): yield textHandler(UInt64(self, "next_hdr_offset", "Next header offset"), hexadecimal) yield UInt64(self, "next_hdr_size", "Next header size") yield textHandler(UInt32(self, "next_hdr_crc", "Next header CRC"), hexadecimal) class SignatureHeader(FieldSet): static_size = 96 + StartHeader.static_size def createFields(self): yield Bytes(self, "signature", 6, "Signature Header") yield UInt8(self, "major_ver", "Archive major version") yield UInt8(self, "minor_ver", "Archive minor version") yield textHandler(UInt32(self, "start_hdr_crc", "Start header CRC"), hexadecimal) yield StartHeader(self, "start_hdr", "Start header") class SevenZipParser(Parser): PARSER_TAGS = { "id": "7zip", "category": "archive", "file_ext": ("7z",), "mime": (u"application/x-7z-compressed",), "min_size": 32*8, "magic": (("7z\xbc\xaf\x27\x1c", 0),), "description": "Compressed archive in 7z format" } endian = LITTLE_ENDIAN def createFields(self): yield SignatureHeader(self, "signature", "Signature Header") yield Body(self, "body_data") yield NextHeader(self, "next_hdr") def validate(self): if self.stream.readBytes(0,6) != "7z\xbc\xaf'\x1c": return "Invalid signature" return True def createContentSize(self): size = self["/signature/start_hdr/next_hdr_offset"].value size += self["/signature/start_hdr/next_hdr_size"].value size += 12 # Signature size size += 20 # Start header size return size*8
15,592
Python
.py
361
34.022161
103
0.592785
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,728
bzip2_parser.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/bzip2_parser.py
""" BZIP2 archive file Author: Victor Stinner """ from hachoir_parser import Parser from hachoir_core.field import (ParserError, String, Bytes, Character, UInt8, UInt32, CompressedField) from hachoir_core.endian import LITTLE_ENDIAN from hachoir_core.text_handler import textHandler, hexadecimal try: from bz2 import BZ2Decompressor class Bunzip2: def __init__(self, stream): self.bzip2 = BZ2Decompressor() def __call__(self, size, data=''): try: return self.bzip2.decompress(data) except EOFError: return '' has_deflate = True except ImportError: has_deflate = False class Bzip2Parser(Parser): PARSER_TAGS = { "id": "bzip2", "category": "archive", "file_ext": ("bz2",), "mime": (u"application/x-bzip2",), "min_size": 10*8, "magic": (('BZh', 0),), "description": "bzip2 archive" } endian = LITTLE_ENDIAN def validate(self): if self.stream.readBytes(0, 3) != 'BZh': return "Wrong file signature" if not("1" <= self["blocksize"].value <= "9"): return "Wrong blocksize" return True def createFields(self): yield String(self, "id", 3, "Identifier (BZh)", charset="ASCII") yield Character(self, "blocksize", "Block size (KB of memory needed to uncompress)") yield UInt8(self, "blockheader", "Block header") if self["blockheader"].value == 0x17: yield String(self, "id2", 4, "Identifier2 (re8P)", charset="ASCII") yield UInt8(self, "id3", "Identifier3 (0x90)") elif self["blockheader"].value == 0x31: yield String(self, "id2", 5, "Identifier 2 (AY&SY)", charset="ASCII") if self["id2"].value != "AY&SY": raise ParserError("Invalid identifier 2 (AY&SY)!") else: raise ParserError("Invalid block header!") yield textHandler(UInt32(self, "crc32", "CRC32"), hexadecimal) if self._size is None: # TODO: is it possible to handle piped input? raise NotImplementedError size = (self._size - self.current_size)/8 if size: for tag, filename in self.stream.tags: if tag == "filename" and filename.endswith(".bz2"): filename = filename[:-4] break else: filename = None data = Bytes(self, "file", size) if has_deflate: CompressedField(self, Bunzip2) def createInputStream(**args): if filename: args.setdefault("tags",[]).append(("filename", filename)) return self._createInputStream(**args) data._createInputStream = createInputStream yield data
2,880
Python
.py
72
29.930556
92
0.576029
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,729
bzip2_parser.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/bzip2_parser.pyc
—Ú Œ »Mc @s”dZddklZddklZlZlZlZlZl Z l Z ddk l Z ddk lZlZy-ddklZdd dÑÉYZeZWnej o eZnXd efd ÑÉYZd S( s, BZIP2 archive file Author: Victor Stinner iˇˇˇˇ(tParser(t ParserErrortStringtBytest CharactertUInt8tUInt32tCompressedField(t LITTLE_ENDIAN(t textHandlert hexadecimal(tBZ2DecompressortBunzip2cBseZdÑZddÑZRS(cCstÉ|_dS(N(R tbzip2(tselftstream((sM/pentest/enumeration/google/metagoofil/hachoir_parser/archive/bzip2_parser.pyt__init__stcCs/y|ii|ÉSWntj odSXdS(NR(R t decompresstEOFError(Rtsizetdata((sM/pentest/enumeration/google/metagoofil/hachoir_parser/archive/bzip2_parser.pyt__call__s(t__name__t __module__RR(((sM/pentest/enumeration/google/metagoofil/hachoir_parser/archive/bzip2_parser.pyR s t Bzip2ParsercBsWeZhdd6dd6dd6dd6dd 6dd 6dd6ZeZdÑZdÑZRS(R tidtarchivetcategorytbz2tfile_extuapplication/x-bzip2tmimei itmin_sizetBZhitmagics bzip2 archivet descriptioncCsO|iiddÉdjodSd|dijo djnpdStS( NiiR!sWrong file signaturet1t blocksizet9sWrong blocksize(Rt readBytestvaluetTrue(R((sM/pentest/enumeration/google/metagoofil/hachoir_parser/archive/bzip2_parser.pytvalidate*s %c#s—tàdddddÉVtàddÉVtàdd ÉVàdid jo/tàd d d ddÉVtàddÉVncàdidjoBtàd ddddÉVàd idjotdÉÇq·n tdÉÇttàddÉtÉVàidjo t Çnàiài d}|oùxLài i D]8\}â|djoàidÉoàd âPq=q=Wdâtàd|É}to,tàtÉááfdÜ}||_n|VndS(NRisIdentifier (BZh)tcharsettASCIIR%s.Block size (KB of memory needed to uncompress)t blockheaders Block headeritid2isIdentifier2 (re8P)tid3sIdentifier3 (0x90)i1isIdentifier 2 (AY&SY)sAY&SYsInvalid identifier 2 (AY&SY)!sInvalid block header!tcrc32tCRC32itfilenames.bz2i¸ˇˇˇtfilecs7ào#|idgÉidàfÉnài|çS(NttagsR2(t setdefaulttappendt_createInputStream(targs(RR2(sM/pentest/enumeration/google/metagoofil/hachoir_parser/archive/bzip2_parser.pytcreateInputStreamOs#(RRRR(RR RR t_sizetNonetNotImplementedErrort current_sizeRR4tendswithRt has_deflateRR R7(RRttagRR9((RR2sM/pentest/enumeration/google/metagoofil/hachoir_parser/archive/bzip2_parser.pyt createFields1s8       (sbz2(uapplication/x-bzip2iP(sBZhi((sBZhi(RRt PARSER_TAGSRtendianR*RA(((sM/pentest/enumeration/google/metagoofil/hachoir_parser/archive/bzip2_parser.pyRs  N((t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRthachoir_core.endianRthachoir_core.text_handlerR R RR R R)R?t ImportErrortFalseR(((sM/pentest/enumeration/google/metagoofil/hachoir_parser/archive/bzip2_parser.pyt<module>s4  
4,080
Python
.py
26
155.846154
685
0.480878
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,730
tar.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/tar.py
""" Tar archive parser. Author: Victor Stinner """ from hachoir_parser import Parser from hachoir_core.field import (FieldSet, Enum, UInt8, SubFile, String, NullBytes) from hachoir_core.tools import humanFilesize, paddingSize, timestampUNIX from hachoir_core.endian import BIG_ENDIAN import re class FileEntry(FieldSet): type_name = { # 48 is "0", 49 is "1", ... 0: u"Normal disk file (old format)", 48: u"Normal disk file", 49: u"Link to previously dumped file", 50: u"Symbolic link", 51: u"Character special file", 52: u"Block special file", 53: u"Directory", 54: u"FIFO special file", 55: u"Contiguous file" } def getOctal(self, name): return self.octal2int(self[name].value) def getDatetime(self): """ Create modification date as Unicode string, may raise ValueError. """ timestamp = self.getOctal("mtime") return timestampUNIX(timestamp) def createFields(self): yield String(self, "name", 100, "Name", strip="\0", charset="ISO-8859-1") yield String(self, "mode", 8, "Mode", strip=" \0", charset="ASCII") yield String(self, "uid", 8, "User ID", strip=" \0", charset="ASCII") yield String(self, "gid", 8, "Group ID", strip=" \0", charset="ASCII") yield String(self, "size", 12, "Size", strip=" \0", charset="ASCII") yield String(self, "mtime", 12, "Modification time", strip=" \0", charset="ASCII") yield String(self, "check_sum", 8, "Check sum", strip=" \0", charset="ASCII") yield Enum(UInt8(self, "type", "Type"), self.type_name) yield String(self, "lname", 100, "Link name", strip=" \0", charset="ISO-8859-1") yield String(self, "magic", 8, "Magic", strip=" \0", charset="ASCII") yield String(self, "uname", 32, "User name", strip=" \0", charset="ISO-8859-1") yield String(self, "gname", 32, "Group name", strip=" \0", charset="ISO-8859-1") yield String(self, "devmajor", 8, "Dev major", strip=" \0", charset="ASCII") yield String(self, "devminor", 8, "Dev minor", strip=" \0", charset="ASCII") yield NullBytes(self, "padding", 167, "Padding (zero)") filesize = self.getOctal("size") if filesize: yield SubFile(self, "content", filesize, filename=self["name"].value) size = paddingSize(self.current_size//8, 512) if size: yield NullBytes(self, "padding_end", size, "Padding (512 align)") def convertOctal(self, chunk): return self.octal2int(chunk.value) def isEmpty(self): return self["name"].value == "" def octal2int(self, text): try: return int(text, 8) except ValueError: return 0 def createDescription(self): if self.isEmpty(): desc = "(terminator, empty header)" else: filename = self["name"].value filesize = humanFilesize(self.getOctal("size")) desc = "(%s: %s, %s)" % \ (filename, self["type"].display, filesize) return "Tar File " + desc class TarFile(Parser): endian = BIG_ENDIAN PARSER_TAGS = { "id": "tar", "category": "archive", "file_ext": ("tar",), "mime": (u"application/x-tar", u"application/x-gtar"), "min_size": 512*8, "magic": (("ustar \0", 257*8),), "subfile": "skip", "description": "TAR archive", } _sign = re.compile("ustar *\0|[ \0]*$") def validate(self): if not self._sign.match(self.stream.readBytes(257*8, 8)): return "Invalid magic number" if self[0].name == "terminator": return "Don't contain any file" try: int(self["file[0]/uid"].value, 8) int(self["file[0]/gid"].value, 8) int(self["file[0]/size"].value, 8) except ValueError: return "Invalid file size" return True def createFields(self): while not self.eof: field = FileEntry(self, "file[]") if field.isEmpty(): yield NullBytes(self, "terminator", 512) break yield field if self.current_size < self._size: yield self.seekBit(self._size, "end") def createContentSize(self): return self["terminator"].address + self["terminator"].size
4,443
Python
.py
107
32.915888
90
0.581616
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,731
gzip_parser.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/gzip_parser.pyc
—Ú Œ »Mc@s∏dZddklZddklZlZlZlZlZl Z l Z l Z l Z l Z lZddklZlZlZddklZddklZdefdÑÉYZd S( s. GZIP archive parser. Author: Victor Stinner iˇˇˇˇ(tParser( tUInt8tUInt16tUInt32tEnumtTimestampUnix32tBittCStringtSubFiletNullBitstBytestRawBytes(t textHandlert hexadecimaltfilesizeHandler(t LITTLE_ENDIAN(tDeflatet GzipParsercBs’eZeZhdd6dd6d.d6d/d6d0d 6d2d 6dd6Zhdd 6dd6dd6dd6dd6dd6dd6dd6dd 6d d!6d"d#6d$d%6d&d'6d(d)6Zhd*d 6Zd+ÑZd,ÑZd-ÑZ RS(3tgziptidtarchivetcategorytgztfile_extuapplication/x-gziptmimeiitmin_sizesã.{5}[][- ]it magic_regexu gzip archivet descriptionuFAT filesystemuAmigaiuVMS (or OpenVMS)iuUnixiuVM/CMSiu Atari TOSiuHPFS filesystem (OS/2, NT)iu MacintoshiuZ-SystemuCP/Mi uTOPS-20i uNTFS filesystem (NT)i uQDOSi u Acorn RISCOSi udeflatecCsè|didjodS|di|ijod|diS|didjodS|d idjod S|d idjod StS( Nt signaturesãsInvalid signaturet compressionsUnknown compression method (%u)s reserved[0]isInvalid reserved[0] values reserved[1]sInvalid reserved[1] values reserved[2]sInvalid reserved[2] value(tvaluetCOMPRESSION_NAMEtTrue(tself((sL/pentest/enumeration/google/metagoofil/hachoir_parser/archive/gzip_parser.pytvalidate3sc cs≈t|dddÉVtt|ddÉ|iÉVt|ddÉVt|dd ÉVt|d d ÉVt|d d ÉVt|ddÉVt|ddÉVt|ddÉVt|ddÉVt|ddÉVt|ddÉVt|ddÉVtt|ddÉ|iÉV|d io0t |ddÉVt |d|didÉVn|d iot |d d!d"d#ÉVn|diot |d$d%ÉVn|diot t |d&d'Ét ÉVn|idjo tÉÇn|i|id(d(}d)|joè|d io|d i}nPxL|iiD]8\}}|d jo|id*Éo|d+ }Pq-q-Wd}tt|d,|d |ÉÉVnt t|d-d.Ét ÉVtt|d/d0ÉÉVdS(1NRisGZip file signature (\x1F\x8B)RsCompression methodtis_texts#File content is probably ASCII textt has_crc16s Header CRC16t has_extras"Extra informations (variable size)t has_filenamesContains filename?t has_commentsContains comment?s reserved[]itmtimesModification timeitslowests-Compressor used maximum compression (slowest)tfastests'Compressor used the fastest compressionitossOperating systemt extra_lengths Extra lengthtextratExtratfilenametFilenametcharsets ISO-8859-1tcommenttCommentt hdr_crc16sCRC16 of the headeriis.gzi˝ˇˇˇtfiletcrc32sUncompressed data content CRC32tsizesUncompressed size(R RRRRR Rtos_nameRRR RR R t_sizetNonetNotImplementedErrort current_sizetstreamttagstendswithRRRR(R!R7R/ttag((sL/pentest/enumeration/google/metagoofil/hachoir_parser/archive/gzip_parser.pyt createFields@sP      ! cCsõd}g}d|jo|id|diÉnd|jo|id|diÉn|dio|i|diÉnd|di|ÉfS( Nu gzip archiveR/s filename "%s"R7swas %sR(s%s: %ss, (tappendRtdisplaytjoin(R!tdesctinfo((sL/pentest/enumeration/google/metagoofil/hachoir_parser/archive/gzip_parser.pytcreateDescriptionws  (R(uapplication/x-gzipiê(sã.{5}[][- ]i((sã.{5}[][- ]i( t__name__t __module__Rtendiant PARSER_TAGSR8RR"RARG(((sL/pentest/enumeration/google/metagoofil/hachoir_parser/archive/gzip_parser.pyRs:    7N(t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRRR R R thachoir_core.text_handlerR R Rthachoir_core.endianRthachoir_parser.common.deflateRR(((sL/pentest/enumeration/google/metagoofil/hachoir_parser/archive/gzip_parser.pyt<module>s L
5,001
Python
.py
25
199
705
0.4791
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,732
rpm.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/rpm.pyc
Ñò Î ÈMc@sdZddklZddklZlZlZlZlZl Z l Z l Z l Z l Z lZlZlZlZddklZddklZddklZdefd„ƒYZd efd „ƒYZd efd „ƒYZd „Zdefd„ƒYZdefd„ƒYZdS(sA RPM archive parser. Author: Victor Stinner, 1st December 2005. iÿÿÿÿ(tParser(tFieldSett ParserErrortUInt8tUInt16tUInt32tUInt64tEnumt NullBytestBytestRawBytestSubFilet CharactertCStringtString(t BIG_ENDIAN(t GzipParser(t Bzip2Parsert ItemContentcBsfeZh ed6ed6ed6ed6ed6ed6ed6ed6ed6ed 6Z d „Z d „Z RS( iiiiiiiiii cCs6ti||||iƒ||_d|i|_dS(Ns content_%s(Rt__init__t descriptiont related_itemtnamet_name(tselftparentRtitem((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyRs ccsÂ|i}|di}|i|}|di}|tjo6|tjo|d|f}n |df}d}n*d|jo|df}n |df}xt|ƒD]}||ŒVq©WdS(Nttypetcounttvalueisvalue[](RRt format_typeR txrange(RRRtclsRtargstindex((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyt createFields#s           ( t__name__t __module__RR RRRR R RRR#(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyRs  tItemcBsÖeZh dd6dd6dd6dd6dd 6d d 6d d 6dd6dd6dd6Zh dd6dd6dd6dd6dd6dd6dd 6d!d"6d#d-6d#d.6d#d/6d&d06d(d16Zd2d2d*„Zd+„Zd,„ZRS(3tNULLitCHARitINT8itINT16itINT32itINT64itCSTRINGitBINit CSTRING_ARRAYisCSTRING?i s File sizeiès(Broken) MD5 signatureiésPGP 2.6.3 signatureiêiës MD5 signatureiìsGnuPG signatureiísPGP5 signatureiîs!Uncompressed payload size (bytes)iïsBroken SHA1 header digestii sDSA header signaturei sRSA header signaturei cCs=ti||||ƒ|djo ti}n||_dS(N(RRtNoneR&ttag_namet tag_name_dict(RRRRR2((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyRTs  ccs`tt|ddƒ|iƒVtt|ddƒtiƒVt|ddƒVt|ddƒVdS( NttagtTagRtTypetoffsettOffsetRtCount(RRR2R&t type_name(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyR#ZscCsd|di|difS(Ns Item: %s (%s)R3R(tdisplay(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pytcreateDescription`sii i i i N(R$R%R9R1R0RR#R;(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyR&7s8   t ItemHeadercBseZhIdd6dd6dd6dd6dd 6d d 6d d 6dd6dd6dd6dd6dd6dd6dd6dd6dd6d d!6d"d#6d$d%6d&d'6d(d)6d*d+6d,d-6d.d/6d0d16d2d36d4d56d6d76d8d96d:d;6d<d=6d>d?6d@dA6dBdC6dDdE6dFdG6dHdI6dJdK6dLdM6dNdO6dPdQ6dRdS6dTdU6dVdW6dXdY6dZd[6d\d]6d^d_6d`da6dbdc6ddde6dfdg6dhdi6djdk6dldm6dndo6dpdq6drds6dtdu6dvdw6dxdy6dzd{6d|d}6d~d6d€d�6d‚dƒ6d„d…6d†d‡6dˆd‰6dŠd‹6dŒd�6dŽd�6d�d‘6Zd“d’„ZRS(”s Current imagei=t Signaturesi>t Immutablei?tRegionsi@sI18N string localesidtNameiètVersioniétReleaseiêtEpochiëtSummaryiìt Descriptioniís Build timeiîs Build hostiïs Install timeiðtSizeiñt DistributioniòtVendoriótGifiôtXpmiõtLicenceiötPackageri÷tGroupiøt ChangelogiùtSourceiútPatchiûtUrliütOSiýtArchiþtPreiniÿtPostinitPreunitPostunis Old filenamesis File sizesis File statesis File modesis File uidsis File gidsis File rdevsi s File mtimesi s File MD5si sFile link to'si s File flagsi tRootis File usernameisFile groupnameitIconis Source rpmisFile verify flagsis Archive sizeis Provide nameis Require flagsis Require nameisRequire versionis No sourceisNo patchisConflict flagsis Conflict nameisConflict versionisDefault prefixi s Build rooti!sInstall prefixi"s Exclude archi#s Exclude OSi$sExclusive archi%s Exclusive OSi&s RPM versioni(sTrigger scriptsi)s Trigger namei*sTrigger versioni+s Trigger flagsi,s Trigger indexi-s Verify scripti7cCs ti|||||iƒdS(N(R&RR1(RRRR((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyR±sN(R$R%R1R0R(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyR<cs– cCst|di|diƒS(NR6(tintR(tatb((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyt sortRpmItem´st PropertySetcBseZd„Zd„ZRS(cGs9ti||||Œ|di|did|_dS(Nscontent_item[1]tsizei(RRtaddressRt_size(RRRR!((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyR¸sccs‰t|dddƒV|didjotdƒ‚nt|ddƒVt|dd d ƒVt|d d ƒVt|d dƒVg}x?td|d iƒD]'}t|dƒ}|V|i|ƒqœW|i t ƒ|i d}xc|D][}|di}||i d|}d|jot|d|ƒVnt |d|ƒVqèW||d i|i d}d|jot|d|ƒVndS(Nt signatureis!Property signature (\x8E\xAD\xE8)sŽ­èsInvalid property signaturetversionsSignature versiontreserveditReservedRR8R_RFisitem[]iR6s padding[]s content[]( R RRRRRtrangeR<tappendtsortR]t current_sizeR(RtitemstiRtstartR6tdiffR_((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyR#¼s2     (R$R%RR#(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyR^·s tRpmFilecBskeZhdd6dd6dd6dd6dd 6dd 6dd6Zhdd 6dd6ZeZd„Zd„ZRS(trpmtidtarchivetcategorytfile_extuapplication/x-rpmtmimei`iitmin_sizesí«îÛitmagics RPM packageRtBinaryROicCs]|didjodS|didjod|diS|di|ijodStS( NRbsí«îÛsInvalid signaturet major_verisUnknown major version (%u)RsInvalid RPM type(Rt TYPE_NAMEtTrue(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pytvalidateìsc cs}t|dddƒVt|ddƒVt|ddƒVtt|dd ƒtiƒVt|d d ƒVt|d d dddddƒVt|ddƒVt|ddƒVt|dddƒVt|ddƒVt|ddƒV|i djo t ‚n|i |i d}|ohd|jo=|i i|i dƒd jot|d!|d"d#tƒVqyt|d!|d$d#tƒVndS(%NRbis%RPM file signature (\xED\xAB\xEE\xDB)Rxs Major versiont minor_vers Minor versionRsRPM typet architecturet ArchitectureRiBs Archive nametstripttcharsettASCIItosRRtsignature_typesType of signatureRdiRetchecksumsChecksum (signature)theadertHeaderiitBZhtcontents bzip2 contenttparsers gzip content(R RRRRnRyRRR^RaR0tNotImplementedErrorRitstreamt readBytesR RR(RR_((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyR#õs$  ,(Ro(uapplication/x-rpmipi€i(sí«îÛi((sí«îÛi(R$R%t PARSER_TAGSRyRtendianR{R#(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyRnÜs   N(t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRRR R R R R Rthachoir_core.endianRt"hachoir_parser.archive.gzip_parserRt#hachoir_parser.archive.bzip2_parserRRR&R<R]R^Rn(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rpm.pyt<module>s^',Q %
10,359
Python
.py
46
224.173913
1,682
0.460002
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,733
mar.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/mar.pyc
Ñò Î ÈMc@sšdZdZddklZddklZlZlZlZddk l Z ddk l Z l Z lZdefd„ƒYZd efd „ƒYZd S( sL Microsoft Archive parser Author: Victor Stinner Creation date: 2007-03-04 i †iÿÿÿÿ(tParser(tFieldSettStringtUInt32tSubFile(t LITTLE_ENDIAN(t textHandlertfilesizeHandlert hexadecimalt FileIndexcBs eZdZd„Zd„ZRS(iDiccsZt|ddddddƒVtt|dƒƒVtt|dƒtƒVt|d ƒVdS( Ntfilenamei8ttruncatettcharsettASCIItfilesizetcrc32toffset(RRRRR(tself((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/mar.pyt createFieldsscCs&d|di|di|difS(NsFile %s (%s) at %sR RR(tvaluetdisplay(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/mar.pytcreateDescriptionsi (t__name__t __module__t static_sizeRR(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/mar.pyR s tMarFilecBs_eZdZhdd6dd6dd6dd6ed ffd 6d d 6ZeZd „Zd„ZRS(tMARCtmartidtarchivetcategorytfile_extiPitmin_sizeitmagicsMicrosoft Archivet descriptioncCsk|iiddƒ|ijodS|didjodSd|dijo tjnpd StS( Niis Invalid magictversionisInvalid versionitnb_filesInvalid number of file(tstreamt readBytestMAGICRt MAX_NB_FILEtTrue(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/mar.pytvalidate(s%c cst|dddddƒVt|dƒVt|dƒVg}xNt|diƒD]9}t|dƒ}|V|d io|i|ƒqPqPW|id d „ƒxs|D]k}|i|d iƒ}|o |Vn|d i}d |di}t|d||d|diƒVq§WdS(NR"isFile signature (MARC)R RR$R%sfile[]RtkeycSs |diS(R(R(titem((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/mar.pyt<lambda>;sRsFile %sR sdata[]( RRtxrangeRR tappendtsorttseekByteR(RtfilestindexR-tpaddingtsizetdesc((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/mar.pyR1s&  (smari€(RRR(t PARSER_TAGSRtendianR+R(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/mar.pyRs  N(t__doc__R)thachoir_parserRthachoir_core.fieldRRRRthachoir_core.endianRthachoir_core.text_handlerRRRR R(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/mar.pyt<module>s"
3,400
Python
.py
18
187.777778
486
0.459491
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,734
__init__.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/__init__.pyc
Ñò Î ÈMc @s´ddklZddklZddklZddklZddkl Z ddk l Z ddk l Z ddklZdd klZdd klZdd klZd S( iÿÿÿÿ(tAceFile(t ArchiveFile(t Bzip2Parser(tCabFile(t GzipParser(tTarFile(tZipFile(tRarFile(tRpmFile(tSevenZipParser(tMarFileN(thachoir_parser.archive.aceRthachoir_parser.archive.arRt#hachoir_parser.archive.bzip2_parserRthachoir_parser.archive.cabRt"hachoir_parser.archive.gzip_parserRthachoir_parser.archive.tarRthachoir_parser.archive.zipRthachoir_parser.archive.rarRthachoir_parser.archive.rpmRthachoir_parser.archive.sevenzipR thachoir_parser.archive.marR (((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/__init__.pyt<module>s
983
Python
.py
6
162.833333
541
0.552147
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,735
rar.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/rar.pyc
Ñò Î ÈMc@sÇdZddklZddklZlZlZlZlZl Z l Z l Z l Z l Z lZlZlZlZddklZlZlZddklZddklZdZZh d d 6d d 6d d6dd6dd6dd6dd6dd6dd6dd6Zhdd6dd 6d!d"6d#d$6d%d&6d'd(6Zd)Zd*Zhd+d)6d,d-6d.d*6d/d06Zhd1d)6d2d-6d3d*6d4d06d5d66d7d86Z d9„Z!d:„Z"d;efd<„ƒYZ#d=„Z$d>„Z%d?„Z&d@„Z'dA„Z(dB„Z)dC„Z*dD„Z+dEefdF„ƒYZ,dG„Z-dHefdI„ƒYZ.dJ„Z/dK„Z0dL„Z1dM„Z2dN„Z3dOefdP„ƒYZ4dQ„Z5dRefdS„ƒYZ6dTefdU„ƒYZ7dVefdW„ƒYZ8dXS([sW RAR parser Status: can only read higher-level attructures Author: Christophe Gisquet iÿÿÿÿ(tParser(tStaticFieldSettFieldSettBittBitstEnumtUInt8tUInt16tUInt32tUInt64tStringtTimeDateMSDOS32t NullBytestNullBitstRawBytes(t textHandlertfilesizeHandlert hexadecimal(t LITTLE_ENDIAN(tMSDOSFileAttr32ièitMarkerirtArchiveistFileittCommentius Extra infoivtSubblockiwsRecovery recordixsArchive authenticityiysNew-format subblockizs Archive endi{tStoringi0sFastest compressioni1sFast compressioni2sNormal compressioni3sGood compressioni4sBest compressioni5iisMS DOSsOS/2itWin32tUnixisDictionary size 64 KbsDictionary size 128 KbsDictionary size 256 KbsDictionary size 512 KbsDictionary size 1024 KbisFile is a directoryicCsdt|idƒS(s2 Decodes the RAR version stored on 1 byte s%u.%ui (tdivmodtvalue(tfield((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pytformatRARVersion<sccs&t|ddƒVt|ddƒVdS(Nthas_added_sizes+Additional field indicating additional sizet is_ignorables>Old versions of RAR should ignore this block when copying data(R(ts((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt commonFlagsBst ArchiveFlagscBs•eZeddfeddfeddfeddfeddfed d fed d fed d feddfeddfedddff ZRS(tvolsArchive volumet has_commentsWhether there is a commentt is_lockedtis_solids)Whether files can be extracted separatelyt new_numberings$New numbering, or compressed commentthas_authenticity_informations8The integrity/authenticity of the archive can be checkedt is_protectedt is_passwordeds Needs a password to be decryptedt is_first_volsWhether it is the first volumet is_encrypteds)Whether the encryption version is presenttinternalisReserved for 'internal use'(t__name__t __module__RR tformat(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR$Fs          ccst|ddƒVdS(NtflagssArchiver block flags(R$(R"((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt archiveFlagsUsccs,t|dddƒVt|dddƒVdS(Ns reserved[]is Reserved wordisReserved dword(R (R"((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt archiveHeaderXsccsett|ddƒƒVtt|ddƒƒVt|ddƒVt|ddƒVt|d d ƒVdS( Nt total_sizes"Comment header size + comment sizetuncompressed_sizesUncompressed comment sizetrequired_versions%RAR version needed to extract commenttpacking_methodsComment packing methodt comment_crc16s Comment CRC(RRR(R"((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt commentHeader\s ccs=|di|i}|djot|d|dƒVndS(NR6it comment_datasCompressed comment data(Rt current_sizeR(R"tsize((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt commentBodycs ccs:t|dƒVtt|dƒƒVtt|dƒƒVdS(Nt creation_timet arc_name_sizetuser_name_size(R RR(R"((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pytsignatureHeaderhsccs\tt|dƒƒVtt|dƒtƒVt|dƒVt|dƒVt|ddƒVdS(NR6tversiont rec_sectorst total_blockstmarki(RRRRRRR(R"((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pytrecoveryHeaderms ccswtt|ddƒƒVt|dddtƒVt|dddtƒVt|dd dtƒVt|d d dtƒVdS( NR6sTotal block sizeRDsVersion needed to decompressthandlertmethodsCompression methodt av_versionsVersion for AVtav_crcs AV info CRC32(RRRRR(R"((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt avInfoHeaderts ccs=|di|i}|djot|d|dƒVndS(NR6it av_info_datasAV info(RR=R(R"R>((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt avInfoBody{s t FileFlagscBseZdZd„ZRS(iccsøt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|d d ƒVtt|d d d ƒtƒVxt|ƒD] }|VqWt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVt|ddƒVdS(Ntcontinued_froms#File continued from previous volumet continued_insFile continued in next volumeR.sFile encrypted with passwordR&sFile comment presentR(s4Information from previous files is used (solid flag)tdictionary_sizeisDictionary sizetis_largesfile64 operations neededt is_unicodes#Filename also encoded using Unicodethas_saltsHas salt for encryptiontuses_file_versionsFile versioning is usedt has_ext_times Extra time ??t has_ext_flagss Extra flag ??(RRRtDICTIONARY_SIZER#(tselftbit((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt createFields‚s  (R0R1t static_sizeR](((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyRP€sccst|ddƒVdS(NR3sFile block flags(RP(R"((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt fileFlags“stExtTimecBseZd„ZRS(ccs¦tt|ddƒtƒV|di}xxtdƒD]j}|d|d?}|d@oG|ot|ddƒVn|d@ot|d|d@d ƒVqžq4q4WdS( Nt time_flagssFlags for extended timeiiis dos_time[]sDOS Times remainder[]sTime remainder(RRRRtxrangeR R(R[R3tindextrmode((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR]—s    (R0R1R](((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR`–sccsätt|ddƒƒVtt|ddƒƒVtt|ddƒtƒVtt|ddƒtƒVt|d d ƒVtt|d d ƒtƒVtt|d dƒt ƒVtt |ddƒƒV|di t t fjot|ddƒVntt|ddƒtƒV|di ott|ddƒƒVn|di }|djo<|di o d}nd}t|d|dd|ƒVn|oS|di ott|ddƒtƒVn|d i ot|d!d"ƒVqàndS(#Ntcompressed_sizesCompressed size (bytes)R7sUncompressed size (bytes)thost_oss#Operating system used for archivingtcrc32s File CRC32tftimesDate and time (MS DOS format)RDs"RAR version needed to extract fileRJsPacking methodtfilename_lengthsFile name sizet file_attrsFile attributessflags/is_larget large_sizesExtended 64bits filesizeisflags/is_unicodesUTF-8s ISO-8859-15tfilenametFilenametcharsetsflags/has_salttsalttSaltsflags/has_ext_timet extra_timesExtra time info(RRRRtOS_NAMERRR RtCOMPRESSION_NAMERRtOS_MSDOStOS_WIN32RR R R`(R"tis_fileR>Rn((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt specialHeader¢s0   cCs t|tƒS(N(RwtTrue(R"((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt fileHeaderÃsccsY|di}|dio||di7}n|djot|d|dƒVndS(NResflags/is_largeRkitcompressed_datasFile compressed data(RR(R"R>((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pytfileBodyÆs   cCsd|di|difS(NsFile entry: %s (%s)RlRe(tdisplay(R"((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pytfileDescriptionÎscCs t|tƒS(N(RwtFalse(R"((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt newSubHeaderÒstEndFlagsc BskeZeddfeddfedfeddfeddfed d fed d fedd ffZRS(t has_next_vols$Whether there is another next volumet has_data_crcsWhether a CRC value is presentt rev_spacethas_vol_numbers$Whether the volume number is presentsunused[]iR s+Additional field indicating additional sizeR!s>Old versions of RAR should ignore this block when copying datai(R0R1RRR2(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR€Õs       ccst|ddƒVdS(NR3sEnd block flags(R€(R"((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pytendFlagsást BlockFlagscBseZdZd„ZRS(iccsTtt|dddƒtƒVt|ddƒVt|ddƒVt|ddƒVdS( Nsunused[]isUnused flag bitsR s+Additional field indicating additional sizeR!s>Old versions of RAR should ignore this block when copying datai(RRRR(R[((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR]çs(R0R1R^R](((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR†ästBlockcBseZh d$d6ddeed#fd6deeeefd6dd d#e e fd 6d d d#e e fd 6ddd#e efd6ddd#ed#fd6ddd#ed#fd6ddee efd6dded#d#fd6Zd„Zd„Zd„Zd „Zd!„Zd"„ZRS(%tmarkersArchive headerirt archive_starts Archive infoissfile[]its comment[]s Stray commentius av_info[]sExtra informationivs sub_block[]sStray subblockiws recovery[]sRecovery blockixt signaturesSignature blockiysnew_sub_block[]sStray new-format subblockizt archive_endsArchive end blocki{c sÀtiˆ||ƒˆdi}|ˆijo½ˆi|\ˆ_‰‰‰‰tˆƒo‡‡fd†ˆ_nˆo ˆˆ_nˆo‡‡fd†ˆ_nˆo‡‡fd†ˆ_ nˆo‡‡fd†ˆ_ qþnˆi dt ƒdˆdiˆ_ |d jp |d joeˆi dˆd i7_ ˆd }d ˆd jo,ˆdioˆi dˆdi7_ q¼n,dˆjoˆi dˆdi7_ ndS(Nt block_typecs ˆˆƒS((((R[tdesc(sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt<lambda>scs ˆˆƒS((((t parseFlagsR[(sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyRŽscs ˆˆƒS((((t parseHeaderR[(sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyRŽscs ˆˆƒS((((R[t parseBody(sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyRŽss,Processing as unknown block block of type %uit block_sizeitizReR3RTsflags/is_largeRkR t added_size(Rt__init__Rt BLOCK_INFOt_nametcallabletcreateDescriptiont _descriptionR�R�R‘tinfottypet_size(R[tparenttnamettR3((R�R�R[R‘R�sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR”ýs.    " c csàtt|ddƒtƒVtt|ddƒtƒVx|iƒD] }|VqAWtt|ddƒƒVx|iƒD] }|VqtW|di|id}|djot |d |d ƒVnx|i ƒD] }|VqÍWdS( Ntcrc16s Block CRC16RŒs Block typeR’s Block sizeiitunknownsUnknow data (UInt32 probably)( RRRRR�RR�RR=RR‘(R[RR>((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR]s      cCsd|diS(NsBlock entry: %sR›(R|(R[((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR˜.sccst|ddƒVdS(NR3sBlock header flags(R†(R[((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR�1sccs>d|djo)|diott|ddƒƒVndS(NR R3sflags/has_added_sizeR“sSupplementary block size(RRR(R[((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR�4s ccsu|di|id}d|djo#|dio||di7}n|djot|d|d ƒVnd S( s1 Parse what is left of the block R’iR R3sflags/has_added_sizeR“itbodys Body dataN(RR=R(R[R>((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR‘:s  N(smarkersArchive headerNNN(R0R1tNoneR4R5R}R_RyR{R;R?RMRORRHRCR…R•R”R]R˜R�R�R‘(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR‡ís$       tRarFilecBsoeZdZhdd6dd6dd6dd6dd 6ed ffd 6d d6ZeZd„Zd„Zd„ZRS(sRar!trartidtarchivetcategorytfile_extuapplication/x-rar-compressedtmimeiitmin_sizeitmagicsRoshal archive (RAR)t descriptioncCs4|i}|iidt|ƒƒ|jodStS(Nis Invalid magic(tMAGICtstreamt readBytestlenRx(R[R¬((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pytvalidateQs "ccs$x|ipt|dƒVqWdS(Nsblock[](teofR‡(R[((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR]Ws cCsBd}td}|iid||ƒ}|dj o |dSdS(NiisÄ={@ii8(t MAX_FILESIZER¯t searchBytesR£(R[tstarttendtpos((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pytcreateContentSize[s    (R¥(uapplication/x-rar-compressedi8( R0R1R®t PARSER_TAGSRtendianR²R]R¹(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyR¤Ds   Ni i€>(9t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRRR R R R R Rthachoir_core.text_handlerRRRthachoir_core.endianRthachoir_parser.common.msdosRR´t BLOCK_NAMERsRtRuRrRZRR#R$R4R5R;R?RCRHRMRORPR_R`RwRyR{R}RR€R…R†R‡R¤(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/rar.pyt<module>s|^                !      W
18,323
Python
.py
78
233.589744
1,089
0.492409
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,736
gzip_parser.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/gzip_parser.py
""" GZIP archive parser. Author: Victor Stinner """ from hachoir_parser import Parser from hachoir_core.field import ( UInt8, UInt16, UInt32, Enum, TimestampUnix32, Bit, CString, SubFile, NullBits, Bytes, RawBytes) from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir_core.endian import LITTLE_ENDIAN from hachoir_parser.common.deflate import Deflate class GzipParser(Parser): endian = LITTLE_ENDIAN PARSER_TAGS = { "id": "gzip", "category": "archive", "file_ext": ("gz",), "mime": (u"application/x-gzip",), "min_size": 18*8, #"magic": (('\x1F\x8B\x08', 0),), "magic_regex": ( # (magic, compression=deflate, <flags>, <mtime>, ) ('\x1F\x8B\x08.{5}[\0\2\4\6][\x00-\x0D]', 0), ), "description": u"gzip archive", } os_name = { 0: u"FAT filesystem", 1: u"Amiga", 2: u"VMS (or OpenVMS)", 3: u"Unix", 4: u"VM/CMS", 5: u"Atari TOS", 6: u"HPFS filesystem (OS/2, NT)", 7: u"Macintosh", 8: u"Z-System", 9: u"CP/M", 10: u"TOPS-20", 11: u"NTFS filesystem (NT)", 12: u"QDOS", 13: u"Acorn RISCOS", } COMPRESSION_NAME = { 8: u"deflate", } def validate(self): if self["signature"].value != '\x1F\x8B': return "Invalid signature" if self["compression"].value not in self.COMPRESSION_NAME: return "Unknown compression method (%u)" % self["compression"].value if self["reserved[0]"].value != 0: return "Invalid reserved[0] value" if self["reserved[1]"].value != 0: return "Invalid reserved[1] value" if self["reserved[2]"].value != 0: return "Invalid reserved[2] value" return True def createFields(self): # Gzip header yield Bytes(self, "signature", 2, r"GZip file signature (\x1F\x8B)") yield Enum(UInt8(self, "compression", "Compression method"), self.COMPRESSION_NAME) # Flags yield Bit(self, "is_text", "File content is probably ASCII text") yield Bit(self, "has_crc16", "Header CRC16") yield Bit(self, "has_extra", "Extra informations (variable size)") yield Bit(self, "has_filename", "Contains filename?") yield Bit(self, "has_comment", "Contains comment?") yield NullBits(self, "reserved[]", 3) yield TimestampUnix32(self, "mtime", "Modification time") # Extra flags yield NullBits(self, "reserved[]", 1) yield Bit(self, "slowest", "Compressor used maximum compression (slowest)") yield Bit(self, "fastest", "Compressor used the fastest compression") yield NullBits(self, "reserved[]", 5) yield Enum(UInt8(self, "os", "Operating system"), self.os_name) # Optional fields if self["has_extra"].value: yield UInt16(self, "extra_length", "Extra length") yield RawBytes(self, "extra", self["extra_length"].value, "Extra") if self["has_filename"].value: yield CString(self, "filename", "Filename", charset="ISO-8859-1") if self["has_comment"].value: yield CString(self, "comment", "Comment") if self["has_crc16"].value: yield textHandler(UInt16(self, "hdr_crc16", "CRC16 of the header"), hexadecimal) if self._size is None: # TODO: is it possible to handle piped input? raise NotImplementedError() # Read file size = (self._size - self.current_size) // 8 - 8 # -8: crc32+size if 0 < size: if self["has_filename"].value: filename = self["filename"].value else: for tag, filename in self.stream.tags: if tag == "filename" and filename.endswith(".gz"): filename = filename[:-3] break else: filename = None yield Deflate(SubFile(self, "file", size, filename=filename)) # Footer yield textHandler(UInt32(self, "crc32", "Uncompressed data content CRC32"), hexadecimal) yield filesizeHandler(UInt32(self, "size", "Uncompressed size")) def createDescription(self): desc = u"gzip archive" info = [] if "filename" in self: info.append('filename "%s"' % self["filename"].value) if "size" in self: info.append("was %s" % self["size"].display) if self["mtime"].value: info.append(self["mtime"].display) return "%s: %s" % (desc, ", ".join(info))
4,742
Python
.py
116
31.37069
91
0.571645
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,737
ar.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/ar.py
""" GNU ar archive : archive file (.a) and Debian (.deb) archive. """ from hachoir_parser import Parser from hachoir_core.field import (FieldSet, ParserError, String, RawBytes, UnixLine) from hachoir_core.endian import BIG_ENDIAN class ArchiveFileEntry(FieldSet): def createFields(self): yield UnixLine(self, "header", "Header") info = self["header"].value.split() if len(info) != 7: raise ParserError("Invalid file entry header") size = int(info[5]) if 0 < size: yield RawBytes(self, "content", size, "File data") def createDescription(self): return "File entry (%s)" % self["header"].value.split()[0] class ArchiveFile(Parser): endian = BIG_ENDIAN MAGIC = '!<arch>\n' PARSER_TAGS = { "id": "unix_archive", "category": "archive", "file_ext": ("a", "deb"), "mime": (u"application/x-debian-package", u"application/x-archive", u"application/x-dpkg"), "min_size": (8 + 13)*8, # file signature + smallest file as possible "magic": ((MAGIC, 0),), "description": "Unix archive" } def validate(self): if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC: return "Invalid magic string" return True def createFields(self): yield String(self, "id", 8, "Unix archive identifier (\"<!arch>\")", charset="ASCII") while not self.eof: data = self.stream.readBytes(self.current_size, 1) if data == "\n": yield UnixLine(self, "empty_line[]", "Empty line") else: yield ArchiveFileEntry(self, "file[]", "File")
1,719
Python
.py
45
30.066667
93
0.588482
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,738
ace.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/ace.py
""" ACE parser From wotsit.org and the SDK header (bitflags) Partial study of a new block type (5) I've called "new_recovery", as its syntax is very close to the former one (of type 2). Status: can only read totally file and header blocks. Author: Christophe Gisquet <christophe.gisquet@free.fr> Creation date: 19 january 2006 """ from hachoir_parser import Parser from hachoir_core.field import (StaticFieldSet, FieldSet, Bit, Bits, NullBits, RawBytes, Enum, UInt8, UInt16, UInt32, PascalString8, PascalString16, String, TimeDateMSDOS32) from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal from hachoir_core.endian import LITTLE_ENDIAN from hachoir_parser.common.msdos import MSDOSFileAttr32 MAGIC = "**ACE**" OS_MSDOS = 0 OS_WIN32 = 2 HOST_OS = { 0: "MS-DOS", 1: "OS/2", 2: "Win32", 3: "Unix", 4: "MAC-OS", 5: "Win NT", 6: "Primos", 7: "APPLE GS", 8: "ATARI", 9: "VAX VMS", 10: "AMIGA", 11: "NEXT", } COMPRESSION_TYPE = { 0: "Store", 1: "Lempel-Ziv 77", 2: "ACE v2.0", } COMPRESSION_MODE = { 0: "fastest", 1: "fast", 2: "normal", 3: "good", 4: "best", } # TODO: Computing the CRC16 would also prove useful #def markerValidate(self): # return not self["extend"].value and self["signature"].value == MAGIC and \ # self["host_os"].value<12 class MarkerFlags(StaticFieldSet): format = ( (Bit, "extend", "Whether the header is extended"), (Bit, "has_comment", "Whether the archive has a comment"), (NullBits, "unused", 7, "Reserved bits"), (Bit, "sfx", "SFX"), (Bit, "limited_dict", "Junior SFX with 256K dictionary"), (Bit, "multi_volume", "Part of a set of ACE archives"), (Bit, "has_av_string", "This header holds an AV-string"), (Bit, "recovery_record", "Recovery record preset"), (Bit, "locked", "Archive is locked"), (Bit, "solid", "Archive uses solid compression") ) def markerFlags(self): yield MarkerFlags(self, "flags", "Marker flags") def markerHeader(self): yield String(self, "signature", 7, "Signature") yield UInt8(self, "ver_extract", "Version needed to extract archive") yield UInt8(self, "ver_created", "Version used to create archive") yield Enum(UInt8(self, "host_os", "OS where the files were compressed"), HOST_OS) yield UInt8(self, "vol_num", "Volume number") yield TimeDateMSDOS32(self, "time", "Date and time (MS DOS format)") yield Bits(self, "reserved", 64, "Reserved size for future extensions") flags = self["flags"] if flags["has_av_string"].value: yield PascalString8(self, "av_string", "AV String") if flags["has_comment"].value: size = filesizeHandler(UInt16(self, "comment_size", "Comment size")) yield size if size.value > 0: yield RawBytes(self, "compressed_comment", size.value, \ "Compressed comment") class FileFlags(StaticFieldSet): format = ( (Bit, "extend", "Whether the header is extended"), (Bit, "has_comment", "Presence of file comment"), (Bits, "unused", 10, "Unused bit flags"), (Bit, "encrypted", "File encrypted with password"), (Bit, "previous", "File continued from previous volume"), (Bit, "next", "File continues on the next volume"), (Bit, "solid", "File compressed using previously archived files") ) def fileFlags(self): yield FileFlags(self, "flags", "File flags") def fileHeader(self): yield filesizeHandler(UInt32(self, "compressed_size", "Size of the compressed file")) yield filesizeHandler(UInt32(self, "uncompressed_size", "Uncompressed file size")) yield TimeDateMSDOS32(self, "ftime", "Date and time (MS DOS format)") if self["/header/host_os"].value in (OS_MSDOS, OS_WIN32): yield MSDOSFileAttr32(self, "file_attr", "File attributes") else: yield textHandler(UInt32(self, "file_attr", "File attributes"), hexadecimal) yield textHandler(UInt32(self, "file_crc32", "CRC32 checksum over the compressed file)"), hexadecimal) yield Enum(UInt8(self, "compression_type", "Type of compression"), COMPRESSION_TYPE) yield Enum(UInt8(self, "compression_mode", "Quality of compression"), COMPRESSION_MODE) yield textHandler(UInt16(self, "parameters", "Compression parameters"), hexadecimal) yield textHandler(UInt16(self, "reserved", "Reserved data"), hexadecimal) # Filename yield PascalString16(self, "filename", "Filename") # Comment if self["flags/has_comment"].value: yield filesizeHandler(UInt16(self, "comment_size", "Size of the compressed comment")) if self["comment_size"].value > 0: yield RawBytes(self, "comment_data", self["comment_size"].value, "Comment data") def fileBody(self): size = self["compressed_size"].value if size > 0: yield RawBytes(self, "compressed_data", size, "Compressed data") def fileDesc(self): return "File entry: %s (%s)" % (self["filename"].value, self["compressed_size"].display) def recoveryHeader(self): yield filesizeHandler(UInt32(self, "rec_blk_size", "Size of recovery data")) self.body_size = self["rec_blk_size"].size yield String(self, "signature", 7, "Signature, normally '**ACE**'") yield textHandler(UInt32(self, "relative_start", "Relative start (to this block) of the data this block is mode of"), hexadecimal) yield UInt32(self, "num_blocks", "Number of blocks the data is split into") yield UInt32(self, "size_blocks", "Size of these blocks") yield UInt16(self, "crc16_blocks", "CRC16 over recovery data") # size_blocks blocks of size size_blocks follow # The ultimate data is the xor data of all those blocks size = self["size_blocks"].value for index in xrange(self["num_blocks"].value): yield RawBytes(self, "data[]", size, "Recovery block %i" % index) yield RawBytes(self, "xor_data", size, "The XOR value of the above data blocks") def recoveryDesc(self): return "Recovery block, size=%u" % self["body_size"].display def newRecoveryHeader(self): """ This header is described nowhere """ if self["flags/extend"].value: yield filesizeHandler(UInt32(self, "body_size", "Size of the unknown body following")) self.body_size = self["body_size"].value yield textHandler(UInt32(self, "unknown[]", "Unknown field, probably 0"), hexadecimal) yield String(self, "signature", 7, "Signature, normally '**ACE**'") yield textHandler(UInt32(self, "relative_start", "Offset (=crc16's) of this block in the file"), hexadecimal) yield textHandler(UInt32(self, "unknown[]", "Unknown field, probably 0"), hexadecimal) class BaseFlags(StaticFieldSet): format = ( (Bit, "extend", "Whether the header is extended"), (NullBits, "unused", 15, "Unused bit flags") ) def parseFlags(self): yield BaseFlags(self, "flags", "Unknown flags") def parseHeader(self): if self["flags/extend"].value: yield filesizeHandler(UInt32(self, "body_size", "Size of the unknown body following")) self.body_size = self["body_size"].value def parseBody(self): if self.body_size > 0: yield RawBytes(self, "body_data", self.body_size, "Body data, unhandled") class Block(FieldSet): TAG_INFO = { 0: ("header", "Archiver header", markerFlags, markerHeader, None), 1: ("file[]", fileDesc, fileFlags, fileHeader, fileBody), 2: ("recovery[]", recoveryDesc, recoveryHeader, None, None), 5: ("new_recovery[]", None, None, newRecoveryHeader, None) } def __init__(self, parent, name, description=None): FieldSet.__init__(self, parent, name, description) self.body_size = 0 self.desc_func = None type = self["block_type"].value if type in self.TAG_INFO: self._name, desc, self.parseFlags, self.parseHeader, self.parseBody = self.TAG_INFO[type] if desc: if isinstance(desc, str): self._description = desc else: self.desc_func = desc else: self.warning("Processing as unknown block block of type %u" % type) if not self.parseFlags: self.parseFlags = parseFlags if not self.parseHeader: self.parseHeader = parseHeader if not self.parseBody: self.parseBody = parseBody def createFields(self): yield textHandler(UInt16(self, "crc16", "Archive CRC16 (from byte 4 on)"), hexadecimal) yield filesizeHandler(UInt16(self, "head_size", "Block size (from byte 4 on)")) yield UInt8(self, "block_type", "Block type") # Flags for flag in self.parseFlags(self): yield flag # Rest of the header for field in self.parseHeader(self): yield field size = self["head_size"].value - (self.current_size//8) + (2+2) if size > 0: yield RawBytes(self, "extra_data", size, "Extra header data, unhandled") # Body in itself for field in self.parseBody(self): yield field def createDescription(self): if self.desc_func: return self.desc_func(self) else: return "Block: %s" % self["type"].display class AceFile(Parser): endian = LITTLE_ENDIAN PARSER_TAGS = { "id": "ace", "category": "archive", "file_ext": ("ace",), "mime": (u"application/x-ace-compressed",), "min_size": 50*8, "description": "ACE archive" } def validate(self): if self.stream.readBytes(7*8, len(MAGIC)) != MAGIC: return "Invalid magic" return True def createFields(self): while not self.eof: yield Block(self, "block[]")
9,944
Python
.py
232
36.293103
106
0.646585
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,739
ar.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/ar.pyc
Ñò Î ÈMc@s~dZddklZddklZlZlZlZlZddk l Z defd„ƒYZ defd„ƒYZ d S( s? GNU ar archive : archive file (.a) and Debian (.deb) archive. iÿÿÿÿ(tParser(tFieldSett ParserErrortStringtRawBytestUnixLine(t BIG_ENDIANtArchiveFileEntrycBseZd„Zd„ZRS(ccs€t|ddƒV|diiƒ}t|ƒdjotdƒ‚nt|dƒ}d|jot|d|dƒVndS( NtheadertHeaderisInvalid file entry headeriitcontents File data(RtvaluetsplittlenRtintR(tselftinfotsize((sC/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ar.pyt createFields s cCsd|diiƒdS(NsFile entry (%s)Ri(R R (R((sC/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ar.pytcreateDescriptions(t__name__t __module__RR(((sC/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ar.pyR s t ArchiveFilecBsfeZeZdZhdd6dd6dd6dd 6dd6edffd6dd6Zd„Zd„ZRS(s!<arch> t unix_archivetidtarchivetcategorytatdebtfile_extuapplication/x-debian-packageuapplication/x-archiveuapplication/x-dpkgtmimeii tmin_sizeitmagics Unix archivet descriptioncCs1|iidt|iƒƒ|ijodStS(NisInvalid magic string(tstreamt readBytesR tMAGICtTrue(R((sC/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ar.pytvalidate's(ccs{t|dddddƒVxZ|ipO|ii|idƒ}|djot|dd ƒVqt|d d ƒVqWdS( NRis#Unix archive identifier ("<!arch>")tcharsettASCIIis s empty_line[]s Empty linesfile[]tFile(RteofR"R#t current_sizeRR(Rtdata((sC/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ar.pyR,s  (RR(uapplication/x-debian-packageuapplication/x-archiveuapplication/x-dpkgii¨(RRRtendianR$t PARSER_TAGSR&R(((sC/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ar.pyRs  N( t__doc__thachoir_parserRthachoir_core.fieldRRRRRthachoir_core.endianRRR(((sC/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ar.pyt<module>s (
2,943
Python
.py
17
172.058824
788
0.48343
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,740
rar.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/rar.py
""" RAR parser Status: can only read higher-level attructures Author: Christophe Gisquet """ from hachoir_parser import Parser from hachoir_core.field import (StaticFieldSet, FieldSet, Bit, Bits, Enum, UInt8, UInt16, UInt32, UInt64, String, TimeDateMSDOS32, NullBytes, NullBits, RawBytes) from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal from hachoir_core.endian import LITTLE_ENDIAN from hachoir_parser.common.msdos import MSDOSFileAttr32 MAX_FILESIZE = 1000 * 1024 * 1024 BLOCK_NAME = { 0x72: "Marker", 0x73: "Archive", 0x74: "File", 0x75: "Comment", 0x76: "Extra info", 0x77: "Subblock", 0x78: "Recovery record", 0x79: "Archive authenticity", 0x7A: "New-format subblock", 0x7B: "Archive end", } COMPRESSION_NAME = { 0x30: "Storing", 0x31: "Fastest compression", 0x32: "Fast compression", 0x33: "Normal compression", 0x34: "Good compression", 0x35: "Best compression" } OS_MSDOS = 0 OS_WIN32 = 2 OS_NAME = { 0: "MS DOS", 1: "OS/2", 2: "Win32", 3: "Unix", } DICTIONARY_SIZE = { 0: "Dictionary size 64 Kb", 1: "Dictionary size 128 Kb", 2: "Dictionary size 256 Kb", 3: "Dictionary size 512 Kb", 4: "Dictionary size 1024 Kb", 7: "File is a directory", } def formatRARVersion(field): """ Decodes the RAR version stored on 1 byte """ return "%u.%u" % divmod(field.value, 10) def commonFlags(s): yield Bit(s, "has_added_size", "Additional field indicating additional size") yield Bit(s, "is_ignorable", "Old versions of RAR should ignore this block when copying data") class ArchiveFlags(StaticFieldSet): format = ( (Bit, "vol", "Archive volume"), (Bit, "has_comment", "Whether there is a comment"), (Bit, "is_locked", "Archive volume"), (Bit, "is_solid", "Whether files can be extracted separately"), (Bit, "new_numbering", "New numbering, or compressed comment"), # From unrar (Bit, "has_authenticity_information", "The integrity/authenticity of the archive can be checked"), (Bit, "is_protected", "The integrity/authenticity of the archive can be checked"), (Bit, "is_passworded", "Needs a password to be decrypted"), (Bit, "is_first_vol", "Whether it is the first volume"), (Bit, "is_encrypted", "Whether the encryption version is present"), (NullBits, "internal", 6, "Reserved for 'internal use'") ) def archiveFlags(s): yield ArchiveFlags(s, "flags", "Archiver block flags") def archiveHeader(s): yield NullBytes(s, "reserved[]", 2, "Reserved word") yield NullBytes(s, "reserved[]", 4, "Reserved dword") def commentHeader(s): yield filesizeHandler(UInt16(s, "total_size", "Comment header size + comment size")) yield filesizeHandler(UInt16(s, "uncompressed_size", "Uncompressed comment size")) yield UInt8(s, "required_version", "RAR version needed to extract comment") yield UInt8(s, "packing_method", "Comment packing method") yield UInt16(s, "comment_crc16", "Comment CRC") def commentBody(s): size = s["total_size"].value - s.current_size if size > 0: yield RawBytes(s, "comment_data", size, "Compressed comment data") def signatureHeader(s): yield TimeDateMSDOS32(s, "creation_time") yield filesizeHandler(UInt16(s, "arc_name_size")) yield filesizeHandler(UInt16(s, "user_name_size")) def recoveryHeader(s): yield filesizeHandler(UInt32(s, "total_size")) yield textHandler(UInt8(s, "version"), hexadecimal) yield UInt16(s, "rec_sectors") yield UInt32(s, "total_blocks") yield RawBytes(s, "mark", 8) def avInfoHeader(s): yield filesizeHandler(UInt16(s, "total_size", "Total block size")) yield UInt8(s, "version", "Version needed to decompress", handler=hexadecimal) yield UInt8(s, "method", "Compression method", handler=hexadecimal) yield UInt8(s, "av_version", "Version for AV", handler=hexadecimal) yield UInt32(s, "av_crc", "AV info CRC32", handler=hexadecimal) def avInfoBody(s): size = s["total_size"].value - s.current_size if size > 0: yield RawBytes(s, "av_info_data", size, "AV info") class FileFlags(FieldSet): static_size = 16 def createFields(self): yield Bit(self, "continued_from", "File continued from previous volume") yield Bit(self, "continued_in", "File continued in next volume") yield Bit(self, "is_encrypted", "File encrypted with password") yield Bit(self, "has_comment", "File comment present") yield Bit(self, "is_solid", "Information from previous files is used (solid flag)") # The 3 following lines are what blocks more staticity yield Enum(Bits(self, "dictionary_size", 3, "Dictionary size"), DICTIONARY_SIZE) for bit in commonFlags(self): yield bit yield Bit(self, "is_large", "file64 operations needed") yield Bit(self, "is_unicode", "Filename also encoded using Unicode") yield Bit(self, "has_salt", "Has salt for encryption") yield Bit(self, "uses_file_version", "File versioning is used") yield Bit(self, "has_ext_time", "Extra time ??") yield Bit(self, "has_ext_flags", "Extra flag ??") def fileFlags(s): yield FileFlags(s, "flags", "File block flags") class ExtTime(FieldSet): def createFields(self): yield textHandler(UInt16(self, "time_flags", "Flags for extended time"), hexadecimal) flags = self["time_flags"].value for index in xrange(4): rmode = flags >> ((3-index)*4) if rmode & 8: if index: yield TimeDateMSDOS32(self, "dos_time[]", "DOS Time") if rmode & 3: yield RawBytes(self, "remainder[]", rmode & 3, "Time remainder") def specialHeader(s, is_file): yield filesizeHandler(UInt32(s, "compressed_size", "Compressed size (bytes)")) yield filesizeHandler(UInt32(s, "uncompressed_size", "Uncompressed size (bytes)")) yield Enum(UInt8(s, "host_os", "Operating system used for archiving"), OS_NAME) yield textHandler(UInt32(s, "crc32", "File CRC32"), hexadecimal) yield TimeDateMSDOS32(s, "ftime", "Date and time (MS DOS format)") yield textHandler(UInt8(s, "version", "RAR version needed to extract file"), formatRARVersion) yield Enum(UInt8(s, "method", "Packing method"), COMPRESSION_NAME) yield filesizeHandler(UInt16(s, "filename_length", "File name size")) if s["host_os"].value in (OS_MSDOS, OS_WIN32): yield MSDOSFileAttr32(s, "file_attr", "File attributes") else: yield textHandler(UInt32(s, "file_attr", "File attributes"), hexadecimal) # Start additional field from unrar if s["flags/is_large"].value: yield filesizeHandler(UInt64(s, "large_size", "Extended 64bits filesize")) # End additional field size = s["filename_length"].value if size > 0: if s["flags/is_unicode"].value: charset = "UTF-8" else: charset = "ISO-8859-15" yield String(s, "filename", size, "Filename", charset=charset) # Start additional fields from unrar - file only if is_file: if s["flags/has_salt"].value: yield textHandler(UInt8(s, "salt", "Salt"), hexadecimal) if s["flags/has_ext_time"].value: yield ExtTime(s, "extra_time", "Extra time info") def fileHeader(s): return specialHeader(s, True) def fileBody(s): # File compressed data size = s["compressed_size"].value if s["flags/is_large"].value: size += s["large_size"].value if size > 0: yield RawBytes(s, "compressed_data", size, "File compressed data") def fileDescription(s): return "File entry: %s (%s)" % \ (s["filename"].display, s["compressed_size"].display) def newSubHeader(s): return specialHeader(s, False) class EndFlags(StaticFieldSet): format = ( (Bit, "has_next_vol", "Whether there is another next volume"), (Bit, "has_data_crc", "Whether a CRC value is present"), (Bit, "rev_space"), (Bit, "has_vol_number", "Whether the volume number is present"), (Bits, "unused[]", 4), (Bit, "has_added_size", "Additional field indicating additional size"), (Bit, "is_ignorable", "Old versions of RAR should ignore this block when copying data"), (Bits, "unused[]", 6), ) def endFlags(s): yield EndFlags(s, "flags", "End block flags") class BlockFlags(FieldSet): static_size = 16 def createFields(self): yield textHandler(Bits(self, "unused[]", 8, "Unused flag bits"), hexadecimal) yield Bit(self, "has_added_size", "Additional field indicating additional size") yield Bit(self, "is_ignorable", "Old versions of RAR should ignore this block when copying data") yield Bits(self, "unused[]", 6) class Block(FieldSet): BLOCK_INFO = { # None means 'use default function' 0x72: ("marker", "Archive header", None, None, None), 0x73: ("archive_start", "Archive info", archiveFlags, archiveHeader, None), 0x74: ("file[]", fileDescription, fileFlags, fileHeader, fileBody), 0x75: ("comment[]", "Stray comment", None, commentHeader, commentBody), 0x76: ("av_info[]", "Extra information", None, avInfoHeader, avInfoBody), 0x77: ("sub_block[]", "Stray subblock", None, newSubHeader, fileBody), 0x78: ("recovery[]", "Recovery block", None, recoveryHeader, None), 0x79: ("signature", "Signature block", None, signatureHeader, None), 0x7A: ("new_sub_block[]", "Stray new-format subblock", fileFlags, newSubHeader, fileBody), 0x7B: ("archive_end", "Archive end block", endFlags, None, None), } def __init__(self, parent, name): FieldSet.__init__(self, parent, name) t = self["block_type"].value if t in self.BLOCK_INFO: self._name, desc, parseFlags, parseHeader, parseBody = self.BLOCK_INFO[t] if callable(desc): self.createDescription = lambda: desc(self) elif desc: self._description = desc if parseFlags : self.parseFlags = lambda: parseFlags(self) if parseHeader : self.parseHeader = lambda: parseHeader(self) if parseBody : self.parseBody = lambda: parseBody(self) else: self.info("Processing as unknown block block of type %u" % type) self._size = 8*self["block_size"].value if t == 0x74 or t == 0x7A: self._size += 8*self["compressed_size"].value flags = self["flags"] if "is_large" in self["flags"] and self["flags/is_large"].value: self._size += 8*self["large_size"].value elif "has_added_size" in self: self._size += 8*self["added_size"].value # TODO: check if any other member is needed here def createFields(self): yield textHandler(UInt16(self, "crc16", "Block CRC16"), hexadecimal) yield textHandler(UInt8(self, "block_type", "Block type"), hexadecimal) # Parse flags for field in self.parseFlags(): yield field # Get block size yield filesizeHandler(UInt16(self, "block_size", "Block size")) # Parse remaining header for field in self.parseHeader(): yield field # Finish header with stuff of unknow size size = self["block_size"].value - (self.current_size//8) if size > 0: yield RawBytes(self, "unknown", size, "Unknow data (UInt32 probably)") # Parse body for field in self.parseBody(): yield field def createDescription(self): return "Block entry: %s" % self["type"].display def parseFlags(self): yield BlockFlags(self, "flags", "Block header flags") def parseHeader(self): if "has_added_size" in self["flags"] and \ self["flags/has_added_size"].value: yield filesizeHandler(UInt32(self, "added_size", "Supplementary block size")) def parseBody(self): """ Parse what is left of the block """ size = self["block_size"].value - (self.current_size//8) if "has_added_size" in self["flags"] and self["flags/has_added_size"].value: size += self["added_size"].value if size > 0: yield RawBytes(self, "body", size, "Body data") class RarFile(Parser): MAGIC = "Rar!\x1A\x07\x00" PARSER_TAGS = { "id": "rar", "category": "archive", "file_ext": ("rar",), "mime": (u"application/x-rar-compressed", ), "min_size": 7*8, "magic": ((MAGIC, 0),), "description": "Roshal archive (RAR)", } endian = LITTLE_ENDIAN def validate(self): magic = self.MAGIC if self.stream.readBytes(0, len(magic)) != magic: return "Invalid magic" return True def createFields(self): while not self.eof: yield Block(self, "block[]") def createContentSize(self): start = 0 end = MAX_FILESIZE * 8 pos = self.stream.searchBytes("\xC4\x3D\x7B\x00\x40\x07\x00", start, end) if pos is not None: return pos + 7*8 return None
13,398
Python
.py
304
36.904605
106
0.636385
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,741
mar.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/mar.py
""" Microsoft Archive parser Author: Victor Stinner Creation date: 2007-03-04 """ MAX_NB_FILE = 100000 from hachoir_parser import Parser from hachoir_core.field import FieldSet, String, UInt32, SubFile from hachoir_core.endian import LITTLE_ENDIAN from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal class FileIndex(FieldSet): static_size = 68*8 def createFields(self): yield String(self, "filename", 56, truncate="\0", charset="ASCII") yield filesizeHandler(UInt32(self, "filesize")) yield textHandler(UInt32(self, "crc32"), hexadecimal) yield UInt32(self, "offset") def createDescription(self): return "File %s (%s) at %s" % ( self["filename"].value, self["filesize"].display, self["offset"].value) class MarFile(Parser): MAGIC = "MARC" PARSER_TAGS = { "id": "mar", "category": "archive", "file_ext": ("mar",), "min_size": 80*8, # At least one file index "magic": ((MAGIC, 0),), "description": "Microsoft Archive", } endian = LITTLE_ENDIAN def validate(self): if self.stream.readBytes(0, 4) != self.MAGIC: return "Invalid magic" if self["version"].value != 3: return "Invalid version" if not(1 <= self["nb_file"].value <= MAX_NB_FILE): return "Invalid number of file" return True def createFields(self): yield String(self, "magic", 4, "File signature (MARC)", charset="ASCII") yield UInt32(self, "version") yield UInt32(self, "nb_file") files = [] for index in xrange(self["nb_file"].value): item = FileIndex(self, "file[]") yield item if item["filesize"].value: files.append(item) files.sort(key=lambda item: item["offset"].value) for index in files: padding = self.seekByte(index["offset"].value) if padding: yield padding size = index["filesize"].value desc = "File %s" % index["filename"].value yield SubFile(self, "data[]", size, desc, filename=index["filename"].value)
2,204
Python
.py
57
30.684211
87
0.608797
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,742
tar.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/tar.pyc
Ñò Î ÈMc@s¬dZddklZddklZlZlZlZlZl Z ddk l Z l Z l Z ddklZddkZdefd„ƒYZd efd „ƒYZdS( s- Tar archive parser. Author: Victor Stinner iÿÿÿÿ(tParser(tFieldSettEnumtUInt8tSubFiletStringt NullBytes(t humanFilesizet paddingSizet timestampUNIX(t BIG_ENDIANNt FileEntrycBsŒeZh dd6dd6dd6dd6dd 6d d 6d d 6dd6dd6Zd„Zd„Zd„Zd„Zd„Zd„Zd„Z RS(uNormal disk file (old format)iuNormal disk filei0uLink to previously dumped filei1u Symbolic linki2uCharacter special filei3uBlock special filei4u Directoryi5uFIFO special filei6uContiguous filei7cCs|i||iƒS(N(t octal2inttvalue(tselftname((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pytgetOctalscCs|idƒ}t|ƒS(sS Create modification date as Unicode string, may raise ValueError. tmtime(RR (Rt timestamp((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pyt getDatetimesc csBt|dddddddƒVt|dd d dd dd ƒVt|d d ddd dd ƒVt|dd ddd dd ƒVt|ddddd dd ƒVt|ddddd dd ƒVt|dd ddd dd ƒVtt|ddƒ|iƒVt|ddddd ddƒVt|dd ddd dd ƒVt|ddd dd ddƒVt|d!dd"dd ddƒVt|d#d d$dd dd ƒVt|d%d d&dd dd ƒVt|d'd(d)ƒV|idƒ}|o"t|d*|d+|diƒVnt|i d d,ƒ}|ot|d-|d.ƒVndS(/NRidtNametstripttcharsets ISO-8859-1tmodeitModes tASCIItuidsUser IDtgidsGroup IDtsizei tSizeRsModification timet check_sums Check sumttypetTypetlnames Link nametmagictMagictunamei s User nametgnames Group nametdevmajors Dev majortdevminors Dev minortpaddingi§sPadding (zero)tcontenttfilenameit padding_endsPadding (512 align)( RRRt type_nameRRRR Rt current_size(RtfilesizeR((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pyt createFields&s*             "cCs|i|iƒS(N(R R (Rtchunk((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pyt convertOctal?scCs|didjS(NRt(R (R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pytisEmptyBscCs,yt|dƒSWntj odSXdS(Nii(tintt ValueError(Rttext((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pyR EscCs[|iƒo d}n=|di}t|idƒƒ}d||di|f}d|S(Ns(terminator, empty header)RRs (%s: %s, %s)R s Tar File (R4R RRtdisplay(RtdescR+R/((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pytcreateDescriptionKs   ( t__name__t __module__R-RRR0R2R4R R:(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pyR s"       tTarFilecBseZeZhdd6dd6dd6dd6dd 6d dffd 6dd6dd6ZeidƒZd„Zd„Z d„Z RS(ttartidtarchivetcategorytfile_extuapplication/x-taruapplication/x-gtartmimeiitmin_sizesustar iR#tskiptsubfiles TAR archivet descriptionsustar *|[ ]*$cCs›|ii|iid dƒƒpdS|didjodSy@t|didƒt|didƒt|d idƒWntj od SXtS( NiisInvalid magic numberit terminatorsDon't contain any files file[0]/uids file[0]/gids file[0]/sizesInvalid file sizei( t_signtmatchtstreamt readBytesRR5R R6tTrue(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pytvalidatecs"ccsxxF|ip;t|dƒ}|iƒot|ddƒVPn|VqW|i|ijo|i|idƒVndS(Nsfile[]RHitend(teofR R4RR.t_sizetseekBit(Rtfield((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pyR0ps   cCs|di|diS(NRH(taddressR(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pytcreateContentSizezs(R>(uapplication/x-taruapplication/x-gtarii( R;R<R tendiant PARSER_TAGStretcompileRIRNR0RU(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pyR=Us  (t__doc__thachoir_parserRthachoir_core.fieldRRRRRRthachoir_core.toolsRRR thachoir_core.endianR RXR R=(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/tar.pyt<module>s. G
6,045
Python
.py
33
181.575758
685
0.465902
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,743
zip.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/zip.py
""" Zip splitter. Status: can read most important headers Authors: Christophe Gisquet and Victor Stinner """ from hachoir_parser import Parser from hachoir_core.field import (FieldSet, ParserError, Bit, Bits, Enum, TimeDateMSDOS32, SubFile, UInt8, UInt16, UInt32, UInt64, String, PascalString16, RawBytes, SubFile) from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal from hachoir_core.error import HACHOIR_ERRORS from hachoir_core.tools import makeUnicode from hachoir_core.endian import LITTLE_ENDIAN from hachoir_parser.common.deflate import Deflate MAX_FILESIZE = 1000 * 1024 * 1024 COMPRESSION_DEFLATE = 8 COMPRESSION_METHOD = { 0: u"no compression", 1: u"Shrunk", 2: u"Reduced (factor 1)", 3: u"Reduced (factor 2)", 4: u"Reduced (factor 3)", 5: u"Reduced (factor 4)", 6: u"Imploded", 7: u"Tokenizing", 8: u"Deflate", 9: u"Deflate64", 10: u"PKWARE Imploding", 11: u"Reserved by PKWARE", 12: u"File is compressed using BZIP2 algorithm", 13: u"Reserved by PKWARE", 14: u"LZMA (EFS)", 15: u"Reserved by PKWARE", 16: u"Reserved by PKWARE", 17: u"Reserved by PKWARE", 18: u"File is compressed using IBM TERSE (new)", 19: u"IBM LZ77 z Architecture (PFS)", 98: u"PPMd version I, Rev 1", } def ZipRevision(field): return "%u.%u" % divmod(field.value, 10) class ZipVersion(FieldSet): static_size = 16 HOST_OS = { 0: u"FAT file system (DOS, OS/2, NT)", 1: u"Amiga", 2: u"VMS (VAX or Alpha AXP)", 3: u"Unix", 4: u"VM/CMS", 5: u"Atari", 6: u"HPFS file system (OS/2, NT 3.x)", 7: u"Macintosh", 8: u"Z-System", 9: u"CP/M", 10: u"TOPS-20", 11: u"NTFS file system (NT)", 12: u"SMS/QDOS", 13: u"Acorn RISC OS", 14: u"VFAT file system (Win95, NT)", 15: u"MVS", 16: u"BeOS (BeBox or PowerMac)", 17: u"Tandem", } def createFields(self): yield textHandler(UInt8(self, "zip_version", "ZIP version"), ZipRevision) yield Enum(UInt8(self, "host_os", "ZIP Host OS"), self.HOST_OS) class ZipGeneralFlags(FieldSet): static_size = 16 def createFields(self): # Need the compression info from the parent, and that is the byte following method = self.stream.readBits(self.absolute_address+16, 16, LITTLE_ENDIAN) yield Bits(self, "unused[]", 2, "Unused") yield Bit(self, "encrypted_central_dir", "Selected data values in the Local Header are masked") yield Bit(self, "incomplete", "Reserved by PKWARE for enhanced compression.") yield Bit(self, "uses_unicode", "Filename and comments are in UTF-8") yield Bits(self, "unused[]", 4, "Unused") yield Bit(self, "strong_encrypt", "Strong encryption (version >= 50)") yield Bit(self, "is_patched", "File is compressed with patched data?") yield Bit(self, "enhanced_deflate", "Reserved for use with method 8") yield Bit(self, "has_descriptor", "Compressed data followed by descriptor?") if method == 6: yield Bit(self, "use_8k_sliding", "Use 8K sliding dictionary (instead of 4K)") yield Bit(self, "use_3shannon", "Use a 3 Shannon-Fano tree (instead of 2 Shannon-Fano)") elif method in (8, 9): NAME = { 0: "Normal compression", 1: "Maximum compression", 2: "Fast compression", 3: "Super Fast compression" } yield Enum(Bits(self, "method", 2), NAME) elif method == 14: #LZMA yield Bit(self, "lzma_eos", "LZMA stream is ended with a EndOfStream marker") yield Bit(self, "unused[]") else: yield Bits(self, "compression_info", 2) yield Bit(self, "is_encrypted", "File is encrypted?") class ExtraField(FieldSet): EXTRA_FIELD_ID = { 0x0007: "AV Info", 0x0009: "OS/2 extended attributes (also Info-ZIP)", 0x000a: "PKWARE Win95/WinNT FileTimes", # undocumented! 0x000c: "PKWARE VAX/VMS (also Info-ZIP)", 0x000d: "PKWARE Unix", 0x000f: "Patch Descriptor", 0x07c8: "Info-ZIP Macintosh (old, J. Lee)", 0x2605: "ZipIt Macintosh (first version)", 0x2705: "ZipIt Macintosh v 1.3.5 and newer (w/o full filename)", 0x334d: "Info-ZIP Macintosh (new, D. Haase Mac3 field)", 0x4341: "Acorn/SparkFS (David Pilling)", 0x4453: "Windows NT security descriptor (binary ACL)", 0x4704: "VM/CMS", 0x470f: "MVS", 0x4b46: "FWKCS MD5 (third party, see below)", 0x4c41: "OS/2 access control list (text ACL)", 0x4d49: "Info-ZIP VMS (VAX or Alpha)", 0x5356: "AOS/VS (binary ACL)", 0x5455: "extended timestamp", 0x5855: "Info-ZIP Unix (original; also OS/2, NT, etc.)", 0x6542: "BeOS (BeBox, PowerMac, etc.)", 0x756e: "ASi Unix", 0x7855: "Info-ZIP Unix (new)", 0xfb4a: "SMS/QDOS", } def createFields(self): yield Enum(UInt16(self, "field_id", "Extra field ID"), self.EXTRA_FIELD_ID) size = UInt16(self, "field_data_size", "Extra field data size") yield size if size.value > 0: yield RawBytes(self, "field_data", size, "Unknown field data") def ZipStartCommonFields(self): yield ZipVersion(self, "version_needed", "Version needed") yield ZipGeneralFlags(self, "flags", "General purpose flag") yield Enum(UInt16(self, "compression", "Compression method"), COMPRESSION_METHOD) yield TimeDateMSDOS32(self, "last_mod", "Last modification file time") yield textHandler(UInt32(self, "crc32", "CRC-32"), hexadecimal) yield UInt32(self, "compressed_size", "Compressed size") yield UInt32(self, "uncompressed_size", "Uncompressed size") yield UInt16(self, "filename_length", "Filename length") yield UInt16(self, "extra_length", "Extra fields length") def zipGetCharset(self): if self["flags/uses_unicode"].value: return "UTF-8" else: return "ISO-8859-15" class ZipCentralDirectory(FieldSet): HEADER = 0x02014b50 def createFields(self): yield ZipVersion(self, "version_made_by", "Version made by") for field in ZipStartCommonFields(self): yield field # Check unicode status charset = zipGetCharset(self) yield UInt16(self, "comment_length", "Comment length") yield UInt16(self, "disk_number_start", "Disk number start") yield UInt16(self, "internal_attr", "Internal file attributes") yield UInt32(self, "external_attr", "External file attributes") yield UInt32(self, "offset_header", "Relative offset of local header") yield String(self, "filename", self["filename_length"].value, "Filename", charset=charset) if 0 < self["extra_length"].value: yield RawBytes(self, "extra", self["extra_length"].value, "Extra fields") if 0 < self["comment_length"].value: yield String(self, "comment", self["comment_length"].value, "Comment", charset=charset) def createDescription(self): return "Central directory: %s" % self["filename"].display class Zip64EndCentralDirectory(FieldSet): HEADER = 0x06064b50 def createFields(self): yield UInt64(self, "zip64_end_size", "Size of zip64 end of central directory record") yield ZipVersion(self, "version_made_by", "Version made by") yield ZipVersion(self, "version_needed", "Version needed to extract") yield UInt32(self, "number_disk", "Number of this disk") yield UInt32(self, "number_disk2", "Number of the disk with the start of the central directory") yield UInt64(self, "number_entries", "Total number of entries in the central directory on this disk") yield UInt64(self, "number_entries2", "Total number of entries in the central directory") yield UInt64(self, "size", "Size of the central directory") yield UInt64(self, "offset", "Offset of start of central directory") if 0 < self["zip64_end_size"].value: yield RawBytes(self, "data_sector", self["zip64_end_size"].value, "zip64 extensible data sector") class ZipEndCentralDirectory(FieldSet): HEADER = 0x06054b50 def createFields(self): yield UInt16(self, "number_disk", "Number of this disk") yield UInt16(self, "number_disk2", "Number in the central dir") yield UInt16(self, "total_number_disk", "Total number of entries in this disk") yield UInt16(self, "total_number_disk2", "Total number of entries in the central dir") yield UInt32(self, "size", "Size of the central directory") yield UInt32(self, "offset", "Offset of start of central directory") yield PascalString16(self, "comment", "ZIP comment") class ZipDataDescriptor(FieldSet): HEADER_STRING = "\x50\x4B\x07\x08" HEADER = 0x08074B50 static_size = 96 def createFields(self): yield textHandler(UInt32(self, "file_crc32", "Checksum (CRC32)"), hexadecimal) yield filesizeHandler(UInt32(self, "file_compressed_size", "Compressed size (bytes)")) yield filesizeHandler(UInt32(self, "file_uncompressed_size", "Uncompressed size (bytes)")) class FileEntry(FieldSet): HEADER = 0x04034B50 filename = None def data(self, size): compression = self["compression"].value if compression == 0: return SubFile(self, "data", size, filename=self.filename) compressed = SubFile(self, "compressed_data", size, filename=self.filename) if compression == COMPRESSION_DEFLATE: return Deflate(compressed) else: return compressed def resync(self): # Non-seekable output, search the next data descriptor size = self.stream.searchBytesLength(ZipDataDescriptor.HEADER_STRING, False, self.absolute_address+self.current_size) if size <= 0: raise ParserError("Couldn't resync to %s" % ZipDataDescriptor.HEADER_STRING) yield self.data(size) yield textHandler(UInt32(self, "header[]", "Header"), hexadecimal) data_desc = ZipDataDescriptor(self, "data_desc", "Data descriptor") #self.info("Resynced!") yield data_desc # The above could be checked anytime, but we prefer trying parsing # than aborting if self["crc32"].value == 0 and \ data_desc["file_compressed_size"].value != size: raise ParserError("Bad resync: position=>%i but data_desc=>%i" % (size, data_desc["file_compressed_size"].value)) def createFields(self): for field in ZipStartCommonFields(self): yield field length = self["filename_length"].value if length: filename = String(self, "filename", length, "Filename", charset=zipGetCharset(self)) yield filename self.filename = filename.value if self["extra_length"].value: yield RawBytes(self, "extra", self["extra_length"].value, "Extra") size = self["compressed_size"].value if size > 0: yield self.data(size) elif self["flags/incomplete"].value: for field in self.resync(): yield field if self["flags/has_descriptor"].value: yield ZipDataDescriptor(self, "data_desc", "Data descriptor") def createDescription(self): return "File entry: %s (%s)" % \ (self["filename"].value, self["compressed_size"].display) def validate(self): if self["compression"].value not in COMPRESSION_METHOD: return "Unknown compression method (%u)" % self["compression"].value return "" class ZipSignature(FieldSet): HEADER = 0x05054B50 def createFields(self): yield PascalString16(self, "signature", "Signature") class Zip64EndCentralDirectoryLocator(FieldSet): HEADER = 0x07064b50 def createFields(self): yield UInt32(self, "disk_number", \ "Number of the disk with the start of the zip64 end of central directory") yield UInt64(self, "relative_offset", \ "Relative offset of the zip64 end of central directory record") yield UInt32(self, "disk_total_number", "Total number of disks") class ZipFile(Parser): endian = LITTLE_ENDIAN MIME_TYPES = { # Default ZIP archive u"application/zip": "zip", u"application/x-zip": "zip", # Java archive (JAR) u"application/x-jar": "jar", u"application/java-archive": "jar", # OpenOffice 1.0 u"application/vnd.sun.xml.calc": "sxc", u"application/vnd.sun.xml.draw": "sxd", u"application/vnd.sun.xml.impress": "sxi", u"application/vnd.sun.xml.writer": "sxw", u"application/vnd.sun.xml.math": "sxm", # OpenOffice 1.0 (template) u"application/vnd.sun.xml.calc.template": "stc", u"application/vnd.sun.xml.draw.template": "std", u"application/vnd.sun.xml.impress.template": "sti", u"application/vnd.sun.xml.writer.template": "stw", u"application/vnd.sun.xml.writer.global": "sxg", # OpenDocument u"application/vnd.oasis.opendocument.chart": "odc", u"application/vnd.oasis.opendocument.image": "odi", u"application/vnd.oasis.opendocument.database": "odb", u"application/vnd.oasis.opendocument.formula": "odf", u"application/vnd.oasis.opendocument.graphics": "odg", u"application/vnd.oasis.opendocument.presentation": "odp", u"application/vnd.oasis.opendocument.spreadsheet": "ods", u"application/vnd.oasis.opendocument.text": "odt", u"application/vnd.oasis.opendocument.text-master": "odm", # OpenDocument (template) u"application/vnd.oasis.opendocument.graphics-template": "otg", u"application/vnd.oasis.opendocument.presentation-template": "otp", u"application/vnd.oasis.opendocument.spreadsheet-template": "ots", u"application/vnd.oasis.opendocument.text-template": "ott", } PARSER_TAGS = { "id": "zip", "category": "archive", "file_ext": tuple(MIME_TYPES.itervalues()), "mime": tuple(MIME_TYPES.iterkeys()), "magic": (("PK\3\4", 0),), "subfile": "skip", "min_size": (4 + 26)*8, # header + file entry "description": "ZIP archive" } def validate(self): if self["header[0]"].value != FileEntry.HEADER: return "Invalid magic" try: file0 = self["file[0]"] except HACHOIR_ERRORS, err: return "Unable to get file #0" err = file0.validate() if err: return "File #0: %s" % err return True def createFields(self): # File data self.signature = None self.central_directory = [] while not self.eof: header = textHandler(UInt32(self, "header[]", "Header"), hexadecimal) yield header header = header.value if header == FileEntry.HEADER: yield FileEntry(self, "file[]") elif header == ZipDataDescriptor.HEADER: yield ZipDataDescriptor(self, "spanning[]") elif header == 0x30304b50: yield ZipDataDescriptor(self, "temporary_spanning[]") elif header == ZipCentralDirectory.HEADER: yield ZipCentralDirectory(self, "central_directory[]") elif header == ZipEndCentralDirectory.HEADER: yield ZipEndCentralDirectory(self, "end_central_directory", "End of central directory") elif header == Zip64EndCentralDirectory.HEADER: yield Zip64EndCentralDirectory(self, "end64_central_directory", "ZIP64 end of central directory") elif header == ZipSignature.HEADER: yield ZipSignature(self, "signature", "Signature") elif header == Zip64EndCentralDirectoryLocator.HEADER: yield Zip64EndCentralDirectoryLocator(self, "end_locator", "ZIP64 Enf of central directory locator") else: raise ParserError("Error, unknown ZIP header (0x%08X)." % header) def createMimeType(self): if self["file[0]/filename"].value == "mimetype": return makeUnicode(self["file[0]/data"].value) else: return u"application/zip" def createFilenameSuffix(self): if self["file[0]/filename"].value == "mimetype": mime = self["file[0]/compressed_data"].value if mime in self.MIME_TYPES: return "." + self.MIME_TYPES[mime] return ".zip" def createContentSize(self): start = 0 end = MAX_FILESIZE * 8 end = self.stream.searchBytes("PK\5\6", start, end) if end is not None: return end + 22*8 return None
17,503
Python
.py
386
35.981865
116
0.618705
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,744
__init__.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/__init__.py
from hachoir_parser.archive.ace import AceFile from hachoir_parser.archive.ar import ArchiveFile from hachoir_parser.archive.bzip2_parser import Bzip2Parser from hachoir_parser.archive.cab import CabFile from hachoir_parser.archive.gzip_parser import GzipParser from hachoir_parser.archive.tar import TarFile from hachoir_parser.archive.zip import ZipFile from hachoir_parser.archive.rar import RarFile from hachoir_parser.archive.rpm import RpmFile from hachoir_parser.archive.sevenzip import SevenZipParser from hachoir_parser.archive.mar import MarFile
557
Python
.py
11
49.545455
59
0.875229
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,745
sevenzip.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/sevenzip.pyc
—Ú Œ »Mc@sdZddklZddklZlZlZlZlZl Z l Z l Z l Z l Z ddklZddklZlZlZdefdÑÉYZedÉ\ZZZZZZZZZZZ Z!Z"Z#Z$Z%Z&Z'Z(Z)Z*Z+Z,Z-hd e6d e6d e6d e6d e6de6de6de6de6de6de 6de!6de"6de#6de$6de%6de&6de'6de(6de)6de*6de+6de,6d e-6Z.d!efd"ÑÉYZ/d#d$ÑZ0d%efd&ÑÉYZ1d'efd(ÑÉYZ2d)ÑZ3d*efd+ÑÉYZ4d,efd-ÑÉYZ5d.efd/ÑÉYZ6d0efd1ÑÉYZ7d2efd3ÑÉYZ8d4efd5ÑÉYZ9d6efd7ÑÉYZ:d8efd9ÑÉYZ;d:efd;ÑÉYZ<d<efd=ÑÉYZ=d>efd?ÑÉYZ>d@efdAÑÉYZ?dBefdCÑÉYZ@dDS(Esö 7zip file parser Informations: - File 7zformat.txt of 7-zip SDK: http://www.7-zip.org/sdk.html Author: Olivier SCHWAB Creation date: 6 december 2006 iˇˇˇˇ(tParser( tFieldtFieldSett ParserErrort GenericVectortEnumtUInt8tUInt32tUInt64tBytestRawBytes(t LITTLE_ENDIAN(t textHandlert hexadecimaltfilesizeHandlertSZUInt64cBseZdZdddÑZRS(sz Variable length UInt64, where the first byte gives both the number of bytes needed and the upper byte value. c sÍti|||ddd|Édâ|i}d}|ii|dtÉ}xÖtdÉD]w}|d7}||@pà||d@d|>7âPnà|ii|dtÉd|>Oâ|dL}|id7_qYWáfdÜ|_dS(Ntsizeit descriptioniiÄicsàS((((tvalue(sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyt<lambda>'s( Rt__init__tabsolute_addresststreamtreadBitsR txranget_sizet createValue( tselftparenttnametmax_sizeRtaddrtmaskt firstByteti((RsI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRs    $ N(t__name__t __module__t__doc__tNoneR(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRsitEndsHeader embedding another onesArchive PropertiessAdditional Streams InfosMain Streams Infos Files Infos Pack Infos Unpack InfosSubstreams InfotSizetCRCtFoldersCoders Unpacked sizesNumber of Unpacked Streamss Empty Streams Empty FiletAntitNames Creation TimesLast Access TimesLast Write TimesWin AttributestComments Header holding encoded data infot SkippedDatacBseZdÑZRS(ccsWtt|dÉtÉVt|dÉ}|V|idjot|d|iÉVndS(Nsid[]Ritdata(RRtID_INFORRR (RR((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyt createFieldsLs (R#R$R1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR.Kss waited_id[]ccs¥x≠|ip¢|i|i}|ii|dtÉ}||jo7tt||ÉtÉV|i dt||fÉdS|i d||fÉt |dd||fÉVqWdS(NisFound ID %s (%u)sSkipping ID %u!=%us skipped_id[]s%u != %u( teofRt current_sizeRRR RRR0tinfoR.(tstwait_idt wait_nameRtuid((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyt waitForIDSs  t HashDigestcBseZddÑZdÑZRS(cCs#ti||||É||_dS(N(RRt num_digests(RRRR;tdesc((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR_sccs´tt|dÉtÉV|ii|i|iÉ}|idjoet|d|itdÉVxHt|iÉD]3}||o"t t |dd|Ét ÉVqlqlWndS(Ntidis defined[]tboolshash[]sHash for digest %u( RRR0Rt readBytesRR;RRR RR (Rtbytestindex((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1bs  N(R#R$R&RR1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR:^s tPackInfocBseZdÑZRS(ccs#tt|dÉtÉVt|ddÉVt|dÉ}|V|i}xt|tdÉD] }|VqXWx"t|ÉD]}t|dÉVqtWxê|ipÖ|i |i }|i i |dt É}|tjott|dÉtÉVPqè|tjot|d |ÉVqèt|d ÉVqèWdS( NR=tpack_possPosition of the packstnum_pack_streamst size_markers pack_size[]it end_markert hash_digestt skipped_data(RRR0RRR9tID_SIZERR2RR3RRR tID_ENDtID_CRCR:R.(RtnumtfieldRRR8((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1ms*      (R#R$R1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRBlscCsB|i}|d}|d}|d}|d}d|||fS(Ni islc=%u pb=%u lp=%u(R(Rtparamt remaindertlctpbtlp((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyt lzmaParamsÜs      tCoderIDcBs%eZhdd6dd6ZdÑZRS(tcopyttlzmasccs†t|dÉ}|V|i}|id|É|d@}|djo}|ii|i|i|É}||ijo"|i|}|id|Én|id|Éd}t|||ÉVn|d@oEt |d ÉVt |d ÉV|id |d i|d ifÉn|d @ot |d É}|V|idjo/t t|dÉt ÉVt t |dÉÉVqú|idjot|d|iÉVqúndS(Ntid_sizesID=%uiis Codec is %ssUndetermined codec %stunknownit num_stream_intnum_stream_outsStreams: IN=%u OUT=%ui sproperties_size[]it parameterstdictionary_sizes properties[](RRR4RR?RR3tCODECSR RR RSRR(RtbyteRR((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1ós4        (R#R$R^R1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRTës t CoderInfocBseZddÑZdÑZRS(cCs,ti||||Éd|_d|_dS(Ni(RRt in_streamst out_streams(RRRR<((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR∑s ccsæ|i|i}|iii|dtÉ}t|dÉ}|V|d@o$|di|_|di|_ nxI|d@o=|i|i}|iii|dtÉ}t|dÉVqqWdS(Nitcoder_idiRZR[iÄsunused_codec_id[]( RR3RRRR RTRRaRb(RRtbtcid((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1ªs  N(R#R$R&RR1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR`∂s t BindPairInfocBseZdÑZRS(ccsEt|dÉVt|dÉV|id|di|difÉdS(Ntin_indext out_indexsIndexes: IN=%u OUT=%u(RR4R(R((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1Ãs (R#R$R1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRfÀst FolderItemcBseZddÑZdÑZRS(cCs,ti||||Éd|_d|_dS(Ni(RRRaRb(RRRR<((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR‘s ccs t|dÉV|di}|id|ÉxLt|ÉD]>}t|dÉ}|V|i|i7_|i|i7_q9W|id|iÉx)t|idÉD]}t|dÉVq£W|i|id}|djon&x"t|ÉD]}t|dÉVqÌWdS(Nt num_coderssFolder: %u codecss coder_info[]sout streams: %uis bind_pair[]s pack_stream[](RRR4RR`RaRbRf(RRLRAtcitpacked_streams((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1Ÿs&    N(R#R$R&RR1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRi”s t UnpackInfocBseZdÑZRS(c csûtt|dÉtÉVxt|tdÉD] }|Vq*Wt|dÉV|di}|id|Ét|dÉVx"t|ÉD]}t |dÉVqÄWxt|t dÉD] }|Vq´WxJt|ÉD]<}|d|}x%t|i ÉD]}t|d ÉVqÎWq«Wxê|i pÖ|i |i}|ii|d tÉ}|tjott|d ÉtÉVPq |tjot|d |ÉVq t|d ÉVq WdS(NR=t folder_markert num_folderss %u folderst is_externals folder_item[]tcoders_unpsize_markersfolder_item[%u]s unpack_size[]iRFRGt skip_data(RRR0R9t ID_FOLDERRRR4RRitID_CODERS_UNPACK_SIZERbR2RR3RRR RJRKR:R.(RRMRLt folder_indext folder_itemRARR8((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1ıs<        (R#R$R1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRmÙst SubStreamInfocBseZdÑZRS(ccs'tt|dÉtÉVtdÉÇdS(NR=s!SubStreamInfo not implemented yet(RRR0R(R((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1s(R#R$R1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRwst EncodedHeadercBseZdÑZRS(ccstt|dÉtÉVx‰|ipŸ|i|i}|ii|dtÉ}|t jott|dÉtÉVPq|t jot |dtt ÉVq|t jot |dtt ÉVq|tjot|dttÉVq|id|ÉPqWdS(NR=iRFt pack_infot unpack_infotsubstreams_infosUnexpected ID (%i)(RRR0R2RR3RRR RJt ID_PACK_INFORBtID_UNPACK_INFORmtID_SUBSTREAMS_INFORwR4(RRR8((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1 s      (R#R$R1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRxstIDHeadercBseZdÑZRS(ccs%tt|dÉtÉVtdÉdS(NR=sIDHeader not implemented(RRR0R(R((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR13s(R#R$R1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR2st NextHeadercBs&eZddÑZdÑZdÑZRS(s Next headercCs.ti||||Éd|di|_dS(Nis"/signature/start_hdr/next_hdr_size(RRRR(RRRR<((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR8sccs3tt|dÉtÉVt|d|idÉVdS(Nt header_typet header_datai(RRR0R R(R((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyt createFields2=sccs≤|ii|idtÉ}|tjot|dttÉVn5|tjot|dttÉVnt d|É|i |i }|djot |d|ddÉVndS(Nitheadert encoded_hdrsUnexpected ID %uit next_hdr_datasNext header's data( RRRR t ID_HEADERRR0tID_ENCODED_HEADERRxRRR3R (RR8R((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1@s   (R#R$RRÉR1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRÄ7s  tBodycBseZddÑZdÑZRS(s Body datacCs.ti||||Éd|di|_dS(Nis$/signature/start_hdr/next_hdr_offset(RRRR(RRRR<((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRQsccsËd|djo£tg}|idÉD]}||iq(~É}|di}t|d|dÉVt|d|dÉV|id ||}|d jot|d |ÉVq‰n1d |d jot|d|id dÉVndS(NRÖs /next_hdr/s)/next_hdr/encoded_hdr/pack_info/pack_sizes(/next_hdr/encoded_hdr/pack_info/pack_postcompressed_datasCompressed datatcompressed_file_infosCompressed file informationiit unknown_dataRÑs /next_hdr(tsumtarrayRR R(Rt_[1]R5t pack_sizet body_sizeR((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1Ts3   (R#R$RR1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRâPs t StartHeadercBseZdZdÑZRS(i†ccsItt|ddÉtÉVt|ddÉVtt|ddÉtÉVdS(Ntnext_hdr_offsetsNext header offsett next_hdr_sizesNext header sizet next_hdr_crcsNext header CRC(R RR R(R((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1es   (R#R$t static_sizeR1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRícstSignatureHeadercBseZdeiZdÑZRS(i`ccset|dddÉVt|ddÉVt|ddÉVtt|dd ÉtÉVt|d d ÉVdS( Nt signatureisSignature Headert major_versArchive major versiont minor_versArchive minor versiont start_hdr_crcsStart header CRCt start_hdrs Start header(R RR RR Rí(R((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1ns  (R#R$RíRñR1(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRóls tSevenZipParsercBs`eZhdd6dd6dd6dd6dd 6dd 6dd6ZeZdÑZdÑZdÑZRS(t7zipR=tarchivetcategoryt7ztfile_extuapplication/x-7z-compressedtmimei itmin_sizes7zºØ'itmagicsCompressed archive in 7z formatRccs1t|ddÉVt|dÉVt|dÉVdS(NRòsSignature Headert body_datatnext_hdr(RóRâRÄ(R((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyR1ÇscCs%|iiddÉdjodStS(Niis7zºØ'sInvalid signature(RR?tTrue(R((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pytvalidateáscCs:|di}||di7}|d7}|d7}|dS(Ns$/signature/start_hdr/next_hdr_offsets"/signature/start_hdr/next_hdr_sizei ii(R(RR((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pytcreateContentSizeås    (R°(uapplication/x-7z-compressedi(s7zºØ'i((s7zºØ'i(R#R$t PARSER_TAGSR tendianR1R©R™(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyRùvs   N(AR%thachoir_parserRthachoir_core.fieldRRRRRRRRR R thachoir_core.endianR thachoir_core.text_handlerR R RRRRJRátID_ARCHIVE_PROPStID_ADD_STREAM_INFOtID_MAIN_STREAM_INFOt ID_FILES_INFOR|R}R~RIRKRsRttID_NUM_UNPACK_STREAMStID_EMPTY_STREAMt ID_EMPTY_FILEtID_ANTItID_NAMEtID_CREATION_TIMEtID_LAST_ACCESS_TIMEtID_LAST_WRITE_TIMEt ID_WIN_ATTRt ID_COMMENTRàR0R.R9R:RBRSRTR`RfRiRmRwRxRRÄRâRíRóRù(((sI/pentest/enumeration/google/metagoofil/hachoir_parser/archive/sevenzip.pyt<module> sbFT   %!& 
18,891
Python
.py
76
247.342105
1,096
0.4524
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,746
ace.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/ace.pyc
—Ú Œ »Mc@s>dZddklZddklZlZlZlZlZl Z l Z l Z l Z l Z lZlZlZlZddklZlZlZddklZddklZdZdZd Zh d d6d d 6d d 6dd6dd6dd6dd6dd6dd6dd6dd6dd6Zhd d6d!d 6d"d 6Zhd#d6d$d 6d%d 6d&d6d'd6Zd(efd)ÑÉYZ d*ÑZ!d+ÑZ"d,efd-ÑÉYZ#d.ÑZ$d/ÑZ%d0ÑZ&d1ÑZ'd2ÑZ(d3ÑZ)d4ÑZ*d5efd6ÑÉYZ+d7ÑZ,d8ÑZ-d9ÑZ.d:efd;ÑÉYZ/d<efd=ÑÉYZ0d>S(?sG ACE parser From wotsit.org and the SDK header (bitflags) Partial study of a new block type (5) I've called "new_recovery", as its syntax is very close to the former one (of type 2). Status: can only read totally file and header blocks. Author: Christophe Gisquet <christophe.gisquet@free.fr> Creation date: 19 january 2006 iˇˇˇˇ(tParser(tStaticFieldSettFieldSettBittBitstNullBitstRawBytestEnumtUInt8tUInt16tUInt32t PascalString8tPascalString16tStringtTimeDateMSDOS32(t textHandlertfilesizeHandlert hexadecimal(t LITTLE_ENDIAN(tMSDOSFileAttr32s**ACE**iisMS-DOSsOS/2itWin32tUnixisMAC-OSisWin NTitPrimosisAPPLE GSitATARIisVAX VMSi tAMIGAi tNEXTi tStores Lempel-Ziv 77sACE v2.0tfastesttfasttnormaltgoodtbestt MarkerFlagsc BsâeZeddfeddfedddfeddfed d fed d fed dfeddfeddfeddff ZRS(textendsWhether the header is extendedt has_comments!Whether the archive has a commenttunusedis Reserved bitstsfxtSFXt limited_dictsJunior SFX with 256K dictionaryt multi_volumesPart of a set of ACE archivest has_av_stringsThis header holds an AV-stringtrecovery_recordsRecovery record presettlockedsArchive is lockedtsolidsArchive uses solid compression(t__name__t __module__RRtformat(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyR >s        ccst|ddÉVdS(Ntflagss Marker flags(R (tself((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyt markerFlagsLsccst|dddÉVt|ddÉVt|ddÉVtt|dd ÉtÉVt|d d ÉVt|d d ÉVt|dddÉV|d}|diot|ddÉVn|dioLtt |ddÉÉ}|V|idjot |d|idÉVq ndS(Nt signatureit Signaturet ver_extracts!Version needed to extract archivet ver_createdsVersion used to create archivethost_oss"OS where the files were compressedtvol_nums Volume numberttimesDate and time (MS DOS format)treservedi@s#Reserved size for future extensionsR/R(t av_strings AV StringR"t comment_sizes Comment sizeitcompressed_commentsCompressed comment( R RRtHOST_OSRRtvalueR RR R(R0R/tsize((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyt markerHeaderOs  t FileFlagsc BseeZeddfeddfedddfeddfed d fed d fed dffZRS(R!sWhether the header is extendedR"sPresence of file commentR#i sUnused bit flagst encryptedsFile encrypted with passwordtpreviouss#File continued from previous volumetnexts!File continues on the next volumeR+s/File compressed using previously archived files(R,R-RRR.(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyRAas     ccst|ddÉVdS(NR/s File flags(RA(R0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyt fileFlagslsccs{tt|ddÉÉVtt|ddÉÉVt|ddÉV|dittfjot|dd ÉVntt|dd ÉtÉVtt|d d ÉtÉVt t |d d Ét ÉVt t |ddÉt ÉVtt |ddÉtÉVtt |ddÉtÉVt|ddÉV|dioNtt |ddÉÉV|didjot|d|didÉVqwndS(Ntcompressed_sizesSize of the compressed filetuncompressed_sizesUncompressed file sizetftimesDate and time (MS DOS format)s/header/host_ost file_attrsFile attributest file_crc32s(CRC32 checksum over the compressed file)tcompression_typesType of compressiontcompression_modesQuality of compressiont parameterssCompression parametersR9s Reserved datatfilenametFilenamesflags/has_commentR;sSize of the compressed commentit comment_datas Comment data(RR RR>tOS_MSDOStOS_WIN32RRRRRtCOMPRESSION_TYPEtCOMPRESSION_MODER R R(R0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyt fileHeaderos ccs6|di}|djot|d|dÉVndS(NRFitcompressed_datasCompressed data(R>R(R0R?((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pytfileBodyÑs  cCsd|di|difS(NsFile entry: %s (%s)RNRF(R>tdisplay(R0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pytfileDescâsccs„tt|ddÉÉV|di|_t|dddÉVtt|ddÉtÉVt|dd ÉVt|d d ÉVt|d d ÉV|d i}x3t |diÉD]}t |d|d|ÉVq©Wt |d|dÉVdS(Nt rec_blk_sizesSize of recovery dataR2isSignature, normally '**ACE**'trelative_starts@Relative start (to this block) of the data this block is mode oft num_blockss'Number of blocks the data is split intot size_blockssSize of these blockst crc16_blockssCRC16 over recovery datasdata[]sRecovery block %itxor_datas&The XOR value of the above data blocks( RR R?t body_sizeR RRR R>txrangeR(R0R?tindex((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pytrecoveryHeaderås  cCsd|diS(NsRecovery block, size=%uR`(RX(R0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyt recoveryDescùsccsü|dio+tt|ddÉÉV|di|_ntt|ddÉtÉVt|dddÉVtt|d d ÉtÉVtt|ddÉtÉVd S( s* This header is described nowhere s flags/extendR`s"Size of the unknown body followings unknown[]sUnknown field, probably 0R2isSignature, normally '**ACE**'R[s+Offset (=crc16's) of this block in the fileN(R>RR R`RRR (R0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pytnewRecoveryHeader†s  t BaseFlagscBs)eZeddfedddffZRS(R!sWhether the header is extendedR#isUnused bit flags(R,R-RRR.(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyRfØs ccst|ddÉVdS(NR/s Unknown flags(Rf(R0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyt parseFlagsµsccs=|dio+tt|ddÉÉV|di|_ndS(Ns flags/extendR`s"Size of the unknown body following(R>RR R`(R0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyt parseHeader∏sccs/|idjot|d|idÉVndS(Nit body_datasBody data, unhandled(R`R(R0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyt parseBodyΩstBlockcBsÑeZhddeed fd6deeeefd6de e d d fd6dd d e d fd6Z d d ÑZ d ÑZd ÑZRS( theadersArchiver headerisfile[]is recovery[]isnew_recovery[]icCs¸ti||||Éd|_d|_|di}||ijo]|i|\|_}|_|_ |_ |o*t |t Éo ||_ qû||_q≥n|id|É|ip t|_n|i p t |_ n|i p t |_ ndS(Nit block_types,Processing as unknown block block of type %u(Rt__init__R`tNonet desc_funcR>tTAG_INFOt_nameRgRhRjt isinstancetstrt _descriptiontwarning(R0tparenttnamet descriptionttypetdesc((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyRn…s"   (      c cs‰tt|ddÉtÉVtt|ddÉÉVt|ddÉVx|i|ÉD] }|VqRWx|i|ÉD] }|VqqW|di|idd }|d jot |d |d ÉVnx|i |ÉD] }|Vq—WdS( Ntcrc16sArchive CRC16 (from byte 4 on)t head_sizesBlock size (from byte 4 on)Rms Block typeiiit extra_datasExtra header data, unhandledi( RR RRRRgRhR>t current_sizeRRj(R0tflagtfieldR?((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyt createFieldsfis   cCs+|io|i|ÉSd|diSdS(Ns Block: %sRz(RpRX(R0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pytcreateDescriptionÚs N(R,R-R1R@RoRYRERURWRdRcReRqRnRÇRÉ(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyRk¡s  tAceFilecBsPeZeZhdd6dd6dd6dd6dd 6d d 6Zd ÑZd ÑZRS(tacetidtarchivetcategorytfile_extuapplication/x-ace-compressedtmimei2itmin_sizes ACE archiveRycCs+|iidttÉÉtjodStS(Niis Invalid magici8(tstreamt readBytestlentMAGICtTrue(R0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pytvalidates"ccs$x|ipt|dÉVqWdS(Nsblock[](teofRk(R0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyRÇs (RÖ(uapplication/x-ace-compressediê(R,R-Rtendiant PARSER_TAGSRëRÇ(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyRѯs  N(1t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRRR R R R R Rthachoir_core.text_handlerRRRthachoir_core.endianRthachoir_parser.common.msdosRRèRQRRR=RSRTR R1R@RARERURWRYRcRdReRfRgRhRjRkRÑ(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/ace.pyt<module> s`^               7
12,452
Python
.py
66
187.5
708
0.499112
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,747
rpm.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/rpm.py
""" RPM archive parser. Author: Victor Stinner, 1st December 2005. """ from hachoir_parser import Parser from hachoir_core.field import (FieldSet, ParserError, UInt8, UInt16, UInt32, UInt64, Enum, NullBytes, Bytes, RawBytes, SubFile, Character, CString, String) from hachoir_core.endian import BIG_ENDIAN from hachoir_parser.archive.gzip_parser import GzipParser from hachoir_parser.archive.bzip2_parser import Bzip2Parser class ItemContent(FieldSet): format_type = { 0: UInt8, 1: Character, 2: UInt8, 3: UInt16, 4: UInt32, 5: UInt64, 6: CString, 7: RawBytes, 8: CString, 9: CString } def __init__(self, parent, name, item): FieldSet.__init__(self, parent, name, item.description) self.related_item = item self._name = "content_%s" % item.name def createFields(self): item = self.related_item type = item["type"].value cls = self.format_type[type] count = item["count"].value if cls is RawBytes: # or type == 8: if cls is RawBytes: args = (self, "value", count) else: args = (self, "value") # cls is CString count = 1 else: if 1 < count: args = (self, "value[]") else: args = (self, "value") for index in xrange(count): yield cls(*args) class Item(FieldSet): type_name = { 0: "NULL", 1: "CHAR", 2: "INT8", 3: "INT16", 4: "INT32", 5: "INT64", 6: "CSTRING", 7: "BIN", 8: "CSTRING_ARRAY", 9: "CSTRING?" } tag_name = { 1000: "File size", 1001: "(Broken) MD5 signature", 1002: "PGP 2.6.3 signature", 1003: "(Broken) MD5 signature", 1004: "MD5 signature", 1005: "GnuPG signature", 1006: "PGP5 signature", 1007: "Uncompressed payload size (bytes)", 256+8: "Broken SHA1 header digest", 256+9: "Broken SHA1 header digest", 256+13: "Broken SHA1 header digest", 256+11: "DSA header signature", 256+12: "RSA header signature" } def __init__(self, parent, name, description=None, tag_name_dict=None): FieldSet.__init__(self, parent, name, description) if tag_name_dict is None: tag_name_dict = Item.tag_name self.tag_name_dict = tag_name_dict def createFields(self): yield Enum(UInt32(self, "tag", "Tag"), self.tag_name_dict) yield Enum(UInt32(self, "type", "Type"), Item.type_name) yield UInt32(self, "offset", "Offset") yield UInt32(self, "count", "Count") def createDescription(self): return "Item: %s (%s)" % (self["tag"].display, self["type"].display) class ItemHeader(Item): tag_name = { 61: "Current image", 62: "Signatures", 63: "Immutable", 64: "Regions", 100: "I18N string locales", 1000: "Name", 1001: "Version", 1002: "Release", 1003: "Epoch", 1004: "Summary", 1005: "Description", 1006: "Build time", 1007: "Build host", 1008: "Install time", 1009: "Size", 1010: "Distribution", 1011: "Vendor", 1012: "Gif", 1013: "Xpm", 1014: "Licence", 1015: "Packager", 1016: "Group", 1017: "Changelog", 1018: "Source", 1019: "Patch", 1020: "Url", 1021: "OS", 1022: "Arch", 1023: "Prein", 1024: "Postin", 1025: "Preun", 1026: "Postun", 1027: "Old filenames", 1028: "File sizes", 1029: "File states", 1030: "File modes", 1031: "File uids", 1032: "File gids", 1033: "File rdevs", 1034: "File mtimes", 1035: "File MD5s", 1036: "File link to's", 1037: "File flags", 1038: "Root", 1039: "File username", 1040: "File groupname", 1043: "Icon", 1044: "Source rpm", 1045: "File verify flags", 1046: "Archive size", 1047: "Provide name", 1048: "Require flags", 1049: "Require name", 1050: "Require version", 1051: "No source", 1052: "No patch", 1053: "Conflict flags", 1054: "Conflict name", 1055: "Conflict version", 1056: "Default prefix", 1057: "Build root", 1058: "Install prefix", 1059: "Exclude arch", 1060: "Exclude OS", 1061: "Exclusive arch", 1062: "Exclusive OS", 1064: "RPM version", 1065: "Trigger scripts", 1066: "Trigger name", 1067: "Trigger version", 1068: "Trigger flags", 1069: "Trigger index", 1079: "Verify script", #TODO: Finish the list (id 1070..1162 using rpm library source code) } def __init__(self, parent, name, description=None): Item.__init__(self, parent, name, description, self.tag_name) def sortRpmItem(a,b): return int( a["offset"].value - b["offset"].value ) class PropertySet(FieldSet): def __init__(self, parent, name, *args): FieldSet.__init__(self, parent, name, *args) self._size = self["content_item[1]"].address + self["size"].value * 8 def createFields(self): # Read chunk header yield Bytes(self, "signature", 3, r"Property signature (\x8E\xAD\xE8)") if self["signature"].value != "\x8E\xAD\xE8": raise ParserError("Invalid property signature") yield UInt8(self, "version", "Signature version") yield NullBytes(self, "reserved", 4, "Reserved") yield UInt32(self, "count", "Count") yield UInt32(self, "size", "Size") # Read item header items = [] for i in range(0, self["count"].value): item = ItemHeader(self, "item[]") yield item items.append(item) # Sort items by their offset items.sort( sortRpmItem ) # Read item content start = self.current_size/8 for item in items: offset = item["offset"].value diff = offset - (self.current_size/8 - start) if 0 < diff: yield NullBytes(self, "padding[]", diff) yield ItemContent(self, "content[]", item) size = start + self["size"].value - self.current_size/8 if 0 < size: yield NullBytes(self, "padding[]", size) class RpmFile(Parser): PARSER_TAGS = { "id": "rpm", "category": "archive", "file_ext": ("rpm",), "mime": (u"application/x-rpm",), "min_size": (96 + 16 + 16)*8, # file header + checksum + content header "magic": (('\xED\xAB\xEE\xDB', 0),), "description": "RPM package" } TYPE_NAME = { 0: "Binary", 1: "Source" } endian = BIG_ENDIAN def validate(self): if self["signature"].value != '\xED\xAB\xEE\xDB': return "Invalid signature" if self["major_ver"].value != 3: return "Unknown major version (%u)" % self["major_ver"].value if self["type"].value not in self.TYPE_NAME: return "Invalid RPM type" return True def createFields(self): yield Bytes(self, "signature", 4, r"RPM file signature (\xED\xAB\xEE\xDB)") yield UInt8(self, "major_ver", "Major version") yield UInt8(self, "minor_ver", "Minor version") yield Enum(UInt16(self, "type", "RPM type"), RpmFile.TYPE_NAME) yield UInt16(self, "architecture", "Architecture") yield String(self, "name", 66, "Archive name", strip="\0", charset="ASCII") yield UInt16(self, "os", "OS") yield UInt16(self, "signature_type", "Type of signature") yield NullBytes(self, "reserved", 16, "Reserved") yield PropertySet(self, "checksum", "Checksum (signature)") yield PropertySet(self, "header", "Header") if self._size is None: # TODO: is it possible to handle piped input? raise NotImplementedError size = (self._size - self.current_size) // 8 if size: if 3 <= size and self.stream.readBytes(self.current_size, 3) == "BZh": yield SubFile(self, "content", size, "bzip2 content", parser=Bzip2Parser) else: yield SubFile(self, "content", size, "gzip content", parser=GzipParser)
8,568
Python
.py
243
26.506173
89
0.555114
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,748
cab.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_parser/archive/cab.pyc
—Ú Œ »Mc@sDdZddklZddklZlZlZlZlZl Z l Z l Z l Z l Z lZlZddklZddklZlZlZddklZdZdZhd d6d d 6d d 6dd6ZdefdÑÉYZdefdÑÉYZdefdÑÉYZdefdÑÉYZdefdÑÉYZdS(sY Microsoft Cabinet (CAB) archive. Author: Victor Stinner Creation date: 31 january 2007 iˇˇˇˇ(tParser( tFieldSettEnumtCStringtStringtUInt16tUInt32tBittBitst PaddingBitstNullBitstDateTimeMSDOS32tRawBytes(tMSDOSFileAttr16(t textHandlert hexadecimaltfilesizeHandler(t LITTLE_ENDIANiit UncompressedtDeflateitQuantumitLZXitFoldercBseZdÑZdÑZRS(ccset|ddÉVt|dÉVtt|dddÉtÉVt|ddd ÉVt|d d ÉVdS( Ntoff_datasOffset of datatcf_datat compr_methodisCompression methodt compr_levelisCompression leveltpaddingi(RRRRtCOMPRESSION_NAMER (tself((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyt createFieldss cCsBd|di}|ditjo|d|di7}n|S(NsFolder: compression %sRs (level %u)R(tdisplaytvaluetCOMPRESSION_NONE(Rttext((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pytcreateDescription#s(t__name__t __module__RR#(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyRs tFilecBseZdÑZdÑZRS(ccsmtt|ddÉÉVt|ddÉVt|ddÉVt|dÉVt|dÉVt|d d d ÉVdS( NtfilesizesUncompressed file sizetoffsetsFile offset after decompressiontiFoldersfile control idt timestampt attributestfilenametcharsettASCII(RRRR R R(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyR*s cCsd|di|difS(Ns File %s (%s)R,R'(R(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyR#2s(R$R%RR#(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyR&)s tReservedcBseZdÑZRS(ccs;t|dÉV|di}|ot|d|ÉVndS(Ntsizetdata(RR R (RR0((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyR7s (R$R%R(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyR/6stFlagscBseZdZdÑZRS(iccs?t|dÉVt|dÉVt|dÉVt|ddÉVdS(Nt has_previousthas_nextt has_reservedRi (RR (R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyR?s(R$R%t static_sizeR(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyR2=stCabFilecBsoeZeZdZhdd6dd6dd6dd6edffd 6dd 6d d6ZdÑZdÑZdÑZRS(tMSCFtcabtidtarchivetcategorytfile_extu!application/vnd.ms-cab-compressedtmimeitmagiciitmin_sizesMicrosoft Cabinet archivet descriptioncCsÅ|iiddÉ|ijodS|didjod|diSd|dijo tjnpd |diStS( Niis Invalid magict cab_versionisUnknown version (%s)it nb_foldersInvalid number of folder (%s)(tstreamt readBytestMAGICR Rt MAX_NB_FOLDERtTrue(R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pytvalidateRs%ccsöt|dddddÉVtt|ddÉtÉVtt|dd ÉÉVtt|d d ÉtÉVt|d d ÉVtt|ddÉtÉVtt|ddÉtÉVt|ddÉVt|ddÉVt|dÉVt|dÉVt|ddÉV|diot|dÉVnx)t |diÉD]}t |dÉVq-Wx)t |diÉD]}t |dÉVqYW|i |i dÉ}|o |VndS(NR?is Magic (MSCF)R-R.t hdr_checksumsHeader checksum (0 if not used)R'sCabinet file sizet fld_checksums Folders checksum (0 if not used)toff_filesOffset of first filetfiles_checksumsFiles checksum (0 if not used)RBsCabinet versionRCsNumber of folderstnb_filessNumber of filestflagstsetidtnumbersZero-based cabinet numbersflags/has_reservedtreservedsfolder[]sfile[]tendraw(RRRRRRR2R R/txrangeRR&tseekBitR0(Rtindextend((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyR[s.cCs|didS(NR'i(R (R((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pytcreateContentSize{s(R9(u!application/vnd.ms-cab-compressedi( R$R%RtendianRFt PARSER_TAGSRIRRX(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyR7Es  N( t__doc__thachoir_parserRthachoir_core.fieldRRRRRRRRR R R R thachoir_parser.common.msdosR thachoir_core.text_handlerRRRthachoir_core.endianRRGR!RRR&R/R2R7(((sD/pentest/enumeration/google/metagoofil/hachoir_parser/archive/cab.pyt<module>s"R  
6,469
Python
.py
22
293
1,282
0.478517
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,749
metadataExtractor.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/extractors/metadataExtractor.py
#!/usr/bin/env python import sys, re, os, subprocess class metaExtractor: def __init__(self,fname): self.fname=fname self.command="extract" #If any error put the full path self.data="" self.paths=[] self.users=[] def runExtract(self): comm=self.command+" "+self.fname try: process = subprocess.Popen([self.command,self.fname], shell=False, stdout=subprocess.PIPE) res=process.communicate() self.data=res[0] return "ok" except: return "error" def getData(self): pathre= re.compile('worked on .*') pathre2= re.compile('template -.*') for reg in (pathre,pathre2): path=reg.findall(self.data) if path !=[]: for x in path: try: temp=x.split('\'')[1] if self.paths.count(temp) == 0: self.paths.append(temp) except: pass author= re.compile(': Author \'.*\'') authors=author.findall(self.data) if authors !=[]: for x in authors: temp=x.split('\'')[1] temp=temp.replace('\'','') if self.users.count(temp) == 0: self.users.append(temp) def getUsers(self): return self.users def getPaths(self): return self.paths
1,128
Python
.py
43
22.302326
93
0.657699
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,750
metadataMSOffice.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/extractors/metadataMSOffice.pyc
—Ú Œ »Mc @sìddklZddklZddklZddklZddkl Z ddk l Z ddk l Z lZlZdd d ÑÉYZd S( iˇˇˇˇ(t HachoirError(tunicodeFilename(t createParser(t makePrintable(textractMetadata(tgetTerminalCharset(targvtstderrtexittmetaMs2kcBs>eZdÑZdÑZdÑZdÑZdÑZdÑZRS(cCsL||_g|_g|_g|_g|_g|_g|_d|_dS(Nt(tfilenametuserstpathstsoftwaret modificationt creationDatet lastPrintedtraw(tselfR ((sE/pentest/enumeration/google/metagoofil/extractors/metadataMSOffice.pyt__init__ s       c Cst|iÉ|i}}t||É}yt|É}Wn*tj o}dt|ÉGHd}nX|pd|iGHnï|iÉ}tÉ}x||D]t}|i dÉ} | ddjo|i i | dÉn.| ddjo|i i | dÉn| dd jo|i i | dÉn‹| dd jo|i i | dÉn≥| dd jo|ii | dÉnä| dd jo&|id dÉ} |ii | ÉnS| ddjo|i i | dÉn*| ddjo|ii | dÉn||_qòWdS(NsMetadata extraction error: %ss$Unable to extract metadata on file: t:is- Authoris Author:is - Producers- Creation dates- Last modifications Templates- Comment: Template:R s LastSavedBys LastPrintedtok(RR RRRtunicodetNonetexportPlaintextRtsplitR tappendRRRtreplaceR RR( RR trealnametparsertmetadataterrttexttcharsettlinetrestxres((sE/pentest/enumeration/google/metagoofil/extractors/metadataMSOffice.pytgetDatasB    cCs|iS(N(R (R((sE/pentest/enumeration/google/metagoofil/extractors/metadataMSOffice.pytgetUsers8scCs|iS(N(R(R((sE/pentest/enumeration/google/metagoofil/extractors/metadataMSOffice.pyt getSoftware:scCs|iS(N(R (R((sE/pentest/enumeration/google/metagoofil/extractors/metadataMSOffice.pytgetPaths<scCs|iS(N(R(R((sE/pentest/enumeration/google/metagoofil/extractors/metadataMSOffice.pytgetRaw>s(t__name__t __module__RR&R'R(R)R*(((sE/pentest/enumeration/google/metagoofil/extractors/metadataMSOffice.pyR s  $   N((thachoir_core.errorRthachoir_core.cmd_lineRthachoir_parserRthachoir_core.toolsRthachoir_metadataRthachoir_core.i18nRtsysRRRR (((sE/pentest/enumeration/google/metagoofil/extractors/metadataMSOffice.pyt<module>s
3,234
Python
.py
17
189.117647
1,138
0.45028
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,751
metadataPDF.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/extractors/metadataPDF.pyc
Ñò Î ÈMc@s}ddkZddkZddklZlZddklZlZddkl Z l Z l Z l Z ddd„ƒYZ dS(iÿÿÿÿN(t PSKeywordt PSLiteral(t PDFDocumentt PDFParser(t PDFStreamt PDFObjReftresolve1t stream_valuetmetapdfcBsJeZdd„Zd„Zd„Zd„Zd„Zd„Zd„ZRS(tcCsL||_||_d|_g|_g|_g|_d|_g|_dS(NR (tfnametpasswordtmetadatatuserstsoftwaretpathstrawtcompany(tselfR R ((s@/pentest/enumeration/google/metagoofil/extractors/metadataPDF.pyt__init__s       cCsÞyÏtƒ}t|idƒ}t|ƒ}|i|ƒ|i|ƒ|i|iƒt|i dƒ}|i ƒ|i ƒxL|i D]A}|i i dƒ}|ot|ƒ}n||_||_q…WdSWndSXdS(NtrbtMetadatatInfotokterror(RtfileR Rt set_documentt set_parsert initializeR RtcatalogtclosetxrefsttrailertgetR R(RtdoctfptparserR txreftinfo_reftinfo((s@/pentest/enumeration/google/metagoofil/extractors/metadataPDF.pytgetDatas(         cCs5|iidƒo|ii|idƒn|iS(NtAuthor(R thas_keyR tappend(R((s@/pentest/enumeration/google/metagoofil/extractors/metadataPDF.pytgetUsers-scCs5|iidƒo|ii|idƒn|iS(NtCompany(R R*R R+R(R((s@/pentest/enumeration/google/metagoofil/extractors/metadataPDF.pyt getCompany2scCsc|iidƒo|ii|idƒn|iidƒo|ii|idƒn|iS(NtProducertCreator(R R*RR+(R((s@/pentest/enumeration/google/metagoofil/extractors/metadataPDF.pyt getSoftware8s cCs|iS(N(R(R((s@/pentest/enumeration/google/metagoofil/extractors/metadataPDF.pytgetPaths?scCs|iS(N(R(R((s@/pentest/enumeration/google/metagoofil/extractors/metadataPDF.pytgetRawBs( t__name__t __module__RR(R,R.R1R2R3(((s@/pentest/enumeration/google/metagoofil/extractors/metadataPDF.pyR s     ((tsystretpdfminer.psparserRRtpdfminer.pdfparserRRtpdfminer.pdftypesRRRRR(((s@/pentest/enumeration/google/metagoofil/extractors/metadataPDF.pyt<module>s"
2,950
Python
.py
17
172.411765
515
0.433538
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,752
metadataMSOffice.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/extractors/metadataMSOffice.py
from hachoir_core.error import HachoirError from hachoir_core.cmd_line import unicodeFilename from hachoir_parser import createParser from hachoir_core.tools import makePrintable from hachoir_metadata import extractMetadata from hachoir_core.i18n import getTerminalCharset from sys import argv, stderr, exit class metaMs2k: def __init__(self,filename): self.filename=filename self.users=[] self.paths=[] self.software=[] self.modification=[] self.creationDate=[] self.lastPrinted=[] self.raw="" def getData(self): filename, realname = unicodeFilename(self.filename), self.filename parser = createParser(filename, realname) try: metadata = extractMetadata(parser) except HachoirError, err: print "Metadata extraction error: %s" % unicode(err) metadata = None if not metadata: print "Unable to extract metadata on file: " + self.filename else: text = metadata.exportPlaintext() charset = getTerminalCharset() for line in text: res=line.split(":") if res[0]=="- Author": self.users.append(res[1]) elif res[1]==" Author:": self.users.append(res[2]) elif res[0]=="- Producer": self.software.append(res[1]) elif res[0]=="- Creation date": self.creationDate.append(res[1]) elif res[0]=="- Last modification": self.modification.append(res[1]) elif res[1]==" Template": xres= line.replace("- Comment: Template:","") self.paths.append(xres) elif res[1]==" LastSavedBy": # print res[1] + res[2] self.users.append(res[2]) elif res[1]==" LastPrinted": self.lastPrinted.append(res[2]) self.raw=text return "ok" def getUsers(self): return self.users def getSoftware(self): return self.software def getPaths(self): return self.paths def getRaw(self): return self.raw
1,818
Python
.py
60
26.533333
68
0.710541
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,753
metadataMSOfficeXML.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/extractors/metadataMSOfficeXML.pyc
—Ú Œ »Mc @s_ddkZddkZddkZddkZddkZddkZdddÑÉYZdS(iˇˇˇˇNt metaInfoMScBskeZdÑZdÑZdÑZdÑZdÑZdÑZdÑZdÑZdÑZ d ÑZ d ÑZ RS( cCs˜d|_d|_d|_d|_d|_d|_d|_d|_d|_d|_ d|_ d|_ d|_ d|_ d|_d|_d|_d|_d|_d|_d|_d|_d|_d|_d|_d|_d|_dS(Nttok(ttemplatet totalTimetpagestwordst characterst applicationt docSecuritytlinest paragraphst scaleCroptcompanyt linksUpToDatetcharactersWithSpacestshareDocthyperlinksChangedt appVersionttitletsubjecttcreatortkeywordstlastModifiedBytrevisiont createdDatet modifiedDatet userscommentst thumbnailPathtcomments(tself((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pyt__init__ s6                          c Cs∆|GHd|_d|_d|_d|_d|_d|_d|_d|_d|_d|_ d|_ d|_ d|_ d|_ d|_d|_d|_d|_d|_d|_d|_d|_d|_d|_d|_ttidddÉÉ}ti|dÉ}td|ddÉi|i d ÉÉtd |ddÉi|i d ÉÉy4td |ddÉi|i d ÉÉd|_!Wnd|_!nXd}|i"Ét#d|ddÉ}|i É}|i$|É|i"É|i!djo>t#d |ddÉ}|i É}|i%|É|i"Ént#d |ddÉ}|i É}|i&|É||_|i"Ét'i(d|dÉt'i(d |dÉt'i(d |dÉdS(NRiiÈitrtapps.xmltwsdocProps/app.xmltcoresdocProps/core.xmlRsword/comments.xmlRterror()RRRRRRR R R R R RRRRRRRRRRRRRRtstrtrandomt randrangetzipfiletZipFiletfiletwritetreadRtclosetopentcargaAppt cargaCommt cargaCoretostremove( RtfilepathtrndtzipRtfR!tcommR#((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pyR&sl                         '''            cCs–dGHdt|iÉGHdt|iÉGHdt|iÉGHdt|iÉGHdt|iÉGHdt|iÉGHdt|iÉGHd t|iÉGHd t|i ÉGHd t|i ÉGHd t|i ÉGHd t|i ÉGHdt|i ÉGHdt|iÉGHdt|iÉGHdt|iÉGHdGHdt|iÉGHdt|iÉGHdt|iÉGHdt|iÉGHdt|iÉGHdt|iÉGHdt|iÉGHdt|iÉGHdt|iÉGHdS(Ns--- Metadata app ---s template: s totalTime: s pages: s words: s characters: s application: s docSecurity: s lines: s paragraphs: s scaleCrop: s company: s linksUpToDate: s charactersWithSpaces: s shareDoc:s hyperlinksChanged:s appVersion:s --- Metadata core ---s title:s subject:s creator:s keywords:s lastModifiedBy:s revision:s createdDate:s modifiedDate:s thumbnailPath:(R%RRRRRRR R R R R RRRRRRRRRRRRRR(R((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pyttoStringqs6cCs3y%tidÉ}|i|É|_WnnXdS(Nsw:author="(.*?)" w(tretcompiletfindallR(Rtdatostp((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pyR0ês c3Csîy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_ WnnXy/tidÉ}t|i|ÉdÉ|_ WnnXy/tid É}t|i|ÉdÉ|_ WnnXy/tid É}t|i|ÉdÉ|_ WnnXy/tid É}t|i|ÉdÉ|_ WnnXy/tid É}t|i|ÉdÉ|_WnnXy/tid É}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXdS(Ns<Template>(.*)</Template>is<TotalTime>(.*)</TotalTime>s<Pages>(.*)</Pages>s<Words>(.*)</Words>s<Characters>(.*)</Characters>s<Application>(.*)</Application>s<DocSecurity>(.*)</DocSecurity>s<Lines>(.*)</Lines>s<Paragraphs>(.*)</Paragraphs>s<ScaleCrop>(.*)</ScaleCrop>s<Company>(.*)</Company>s#<LinksUpToDate>(.*)</LinksUpToDate>s1<CharactersWithSpaces>(.*)</CharactersWithSpaces>s<SharedDoc>(.*)</SharedDoc>s+<HyperlinksChanged>(.*)</HyperlinksChanged>s<AppVersion>(.*)</AppVersion>(R:R;R%R<RRRRRRR R R R R RRt sharedDocRR(RR=R>((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pyR/òs†                cCsÃy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_WnnXy/tidÉ}t|i|ÉdÉ|_ WnnXy/tidÉ}t|i|ÉdÉ|_ WnnXy/tid É}t|i|ÉdÉ|_ WnnXdS( Ns<dc:title>(.*)</dc:title>is<dc:subject>(.*)</dc:subject>s<dc:creator>(.*)</dc:creator>s<cp:keywords>(.*)</cp:keywords>s+<cp:lastModifiedBy>(.*)</cp:lastModifiedBy>s<cp:revision>(.*)</cp:revision>s5<dcterms:created xsi:type=".*">(.*)</dcterms:created>s7<dcterms:modified xsi:type=".*">(.*)</dcterms:modified>( R:R;R%R<RRRRRRRR(RR=R>((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pyR1˘sP        cCsdS(NR((R((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pytgetData*scCs d}|S(NsNot implemented yet((Rtraw((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pytgetRaw-scCsåg}g}|i|iÉ|i|iÉ|idjo|i|iÉnx5|D]-}|i|Édjo|i|ÉqWqWW|S(NRi(tappendRRRtextendRtcount(Rtresttemporaltx((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pytgetUsers1scCs g}|S(N((RRF((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pytgetPathsAscCsg}|i|iÉ|S(N(RCR(RRF((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pyt getSoftwareFs( t__name__t __module__RR9R0R/R1R@RBRIRJRK(((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pyRs  K   a 1    ((tunzipR(tsysR:R2R&R(((sH/pentest/enumeration/google/metagoofil/extractors/metadataMSOfficeXML.pyt<module>s      
8,682
Python
.py
35
247
1,063
0.397086
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,754
metadataOpenOffice.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/extractors/metadataOpenOffice.py
import unzip import zipfile import sys import re import os import random class metaInfoOO: def __init__(self): self.version ="" self.generator="" self.creationDate="" self.date="" self.language="" self.editingCycles="" self.editingDuration="" self.tableCount="" self.imageCount="" self.objectCount="" self.pageCount="" self.paragraphCount="" self.wordCount="" self.characterCount="" self.initialCreator="" self.creator="" self.title="" self.description="" self.subject="" self.printedBy="" self.printDate="" def __init__(self,filepath): self.version ="" self.generator="" self.creationDate="" self.date="" self.language="" self.editingCycles="" self.editingDuration="" self.tableCount="" self.imageCount="" self.objectCount="" self.pageCount="" self.paragraphCount="" self.wordCount="" self.characterCount="" self.initialCreator="" self.creator="" self.title="" self.description="" self.subject="" self.printedBy="" self.printDate="" rnd = str(random.randrange(0, 1001, 3)) zip = zipfile.ZipFile(filepath, 'r') file('meta'+rnd+'.xml', 'w').write(zip.read('meta.xml')) zip.close() # done, ahora a currar con el xml f = open ('meta'+rnd+'.xml','r') meta = f.read() self.carga(meta) f.close() os.remove('meta'+rnd+'.xml') def toString(self): print "--- Metadata ---" print " version: " + str(self.version) print " generator: " + str(self.generator) print " creation-date: "+ str(self.creationDate) print " date: "+ str(self.date) print " language: "+ str(self.language) print " editing cycles: "+ str(self.editingCycles) print " editing duration: "+ str(self.editingDuration) print " table count: "+ str(self.tableCount) print " image count: "+ str(self.imageCount) print " object count: " + str(self.objectCount) print " page count: "+ str(self.pageCount) print " paragraph count: " + str(self.paragraphCount) print " word count: "+ str(self.wordCount) print " character count:" + str(self.characterCount) print " initial creator:" + str(self.initialCreator) print " creator:" + str(self.creator) print " title:" + str(self.title) print " description:" + str(self.description) print " subject:" + str(self.subject) print " printed by:" + str(self.printedBy) print " print date:" + str(self.printDate) def carga(self,datos): try: p = re.compile('office:version="([\d.]*)"><office:meta>') self.version = str (p.findall(datos)[0]) except: pass try: p = re.compile('<meta:generator>(.*)</meta:generator>') self.generator = str (p.findall(datos)[0]) except: pass try: p = re.compile('<meta:creation-date>(.*)</meta:creation-date>') self.creationDate = str (p.findall(datos)[0]) except: pass try: p = re.compile('<dc:date>(.*)</dc:date>') self.date = str (p.findall(datos)[0]) except: pass try: p = re.compile('<dc:language>(.*)</dc:language>') self.language = str (p.findall(datos)[0]) except: pass try: p = re.compile('<meta:editing-cycles>(.*)</meta:editing-cycles>') self.editingCycles = str (p.findall(datos)[0]) except: pass try: p = re.compile('<meta:editing-duration>(.*)</meta:editing-duration>') self.editingDuration = str (p.findall(datos)[0]) except: pass try: p = re.compile('meta:table-count="(\d*)"') self.tableCount = str (p.findall(datos)[0]) except: pass try: p = re.compile('meta:image-count="(\d*)"') self.imageCount = str (p.findall(datos)[0]) except: pass try: p = re.compile('meta:object-count="(\d*)"') self.objectCount = str (p.findall(datos)[0]) except: pass try: p = re.compile('meta:page-count="(\d*)"') self.pageCount = str (p.findall(datos)[0]) except: pass try: p = re.compile('meta:paragraph-count="(\d*)"') self.paragraphCount = str (p.findall(datos)[0]) except: pass try: p = re.compile('meta:word-count="(\d*)"') self.wordCount = str (p.findall(datos)[0]) except: pass try: p = re.compile('meta:character-count="(\d*)"') self.characterCount = str (p.findall(datos)[0]) except: pass try: p = re.compile('<meta:initial-creator>(.*)</meta:initial-creator>') self.initialCreator = str (p.findall(datos,re.DOTALL)[0]) except: pass try: p = re.compile('<dc:creator>(.*)</dc:creator>') self.creator = str (p.findall(datos,re.DOTALL)[0]) except: pass try: p = re.compile('<dc:title>(.*)</dc:title>') self.title = str (p.findall(datos)[0]) except: pass try: p = re.compile('<dc:description>(.*)</dc:description>') self.description = str (p.findall(datos)[0]) except: pass try: p = re.compile('<dc:subject>(.*)</dc:subject>') self.subject = str (p.findall(datos)[0]) except: pass try: p = re.compile('<meta:printed-by>(.*)</meta:printed-by>') self.printedBy = str (p.findall(datos)[0]) except: pass try: p = re.compile('<meta:print-date>(.*)</meta:print-date>') self.printDate = str (p.findall(datos)[0]) except: pass
5,203
Python
.py
190
23.4
72
0.646594
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,755
metadataOpenOffice.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/extractors/metadataOpenOffice.pyc
Ñò Î ÈMc @s_ddkZddkZddkZddkZddkZddkZddd„ƒYZdS(iÿÿÿÿNt metaInfoOOcBs,eZd„Zd„Zd„Zd„ZRS(cCsÁd|_d|_d|_d|_d|_d|_d|_d|_d|_d|_ d|_ d|_ d|_ d|_ d|_d|_d|_d|_d|_d|_d|_dS(Nt(tversiont generatort creationDatetdatetlanguaget editingCyclesteditingDurationt tableCountt imageCountt objectCountt pageCounttparagraphCountt wordCounttcharacterCounttinitialCreatortcreatorttitlet descriptiontsubjectt printedByt printDate(tself((sG/pentest/enumeration/google/metagoofil/extractors/metadataOpenOffice.pyt__init__ s*                    cCsnd|_d|_d|_d|_d|_d|_d|_d|_d|_d|_ d|_ d|_ d|_ d|_ d|_d|_d|_d|_d|_d|_d|_ttidddƒƒ}ti|dƒ}td|ddƒi|id ƒƒ|iƒtd|ddƒ}|iƒ}|i|ƒ|iƒt i!d|dƒdS( NRiiéitrtmetas.xmltwsmeta.xml("RRRRRRRR R R R R RRRRRRRRRtstrtrandomt randrangetzipfiletZipFiletfiletwritetreadtclosetopentcargatostremove(RtfilepathtrndtziptfR((sG/pentest/enumeration/google/metagoofil/extractors/metadataOpenOffice.pyR s<                     '    cCsƒdGHdt|iƒGHdt|iƒGHdt|iƒGHdt|iƒGHdt|iƒGHdt|iƒGHdt|iƒGHd t|iƒGHd t|i ƒGHd t|i ƒGHd t|i ƒGHd t|i ƒGHdt|i ƒGHdt|iƒGHdt|iƒGHdt|iƒGHdt|iƒGHdt|iƒGHdt|iƒGHdt|iƒGHdt|iƒGHdS(Ns--- Metadata ---s version: s generator: s creation-date: s date: s language: s editing cycles: s editing duration: s table count: s image count: s object count: s page count: s paragraph count: s word count: s character count:s initial creator:s creator:s title:s description:s subject:s printed by:s print date:(RRRRRRRRR R R R R RRRRRRRRR(R((sG/pentest/enumeration/google/metagoofil/extractors/metadataOpenOffice.pyttoStringEs,cBCs½y/tidƒ}t|i|ƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_ WnnXy/tidƒ}t|i|ƒdƒ|_ WnnXy/tid ƒ}t|i|ƒdƒ|_ WnnXy/tid ƒ}t|i|ƒdƒ|_ WnnXy/tid ƒ}t|i|ƒdƒ|_ WnnXy/tid ƒ}t|i|ƒdƒ|_WnnXy/tid ƒ}t|i|ƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_WnnXy5tidƒ}t|i|tiƒdƒ|_WnnXy5tidƒ}t|i|tiƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_WnnXy/tidƒ}t|i|ƒdƒ|_WnnXdS(Ns'office:version="([\d.]*)"><office:meta>is%<meta:generator>(.*)</meta:generator>s-<meta:creation-date>(.*)</meta:creation-date>s<dc:date>(.*)</dc:date>s<dc:language>(.*)</dc:language>s/<meta:editing-cycles>(.*)</meta:editing-cycles>s3<meta:editing-duration>(.*)</meta:editing-duration>smeta:table-count="(\d*)"smeta:image-count="(\d*)"smeta:object-count="(\d*)"smeta:page-count="(\d*)"smeta:paragraph-count="(\d*)"smeta:word-count="(\d*)"smeta:character-count="(\d*)"s1<meta:initial-creator>(.*)</meta:initial-creator>s<dc:creator>(.*)</dc:creator>s<dc:title>(.*)</dc:title>s%<dc:description>(.*)</dc:description>s<dc:subject>(.*)</dc:subject>s'<meta:printed-by>(.*)</meta:printed-by>s'<meta:print-date>(.*)</meta:print-date>(tretcompileRtfindallRRRRRRRR R R R R RRtDOTALLRRRRRRR(Rtdatostp((sG/pentest/enumeration/google/metagoofil/extractors/metadataOpenOffice.pyR&]sÒ              &&     (t__name__t __module__RR-R&(((sG/pentest/enumeration/google/metagoofil/extractors/metadataOpenOffice.pyRs  % ((tunzipRtsysR.R'RR(((sG/pentest/enumeration/google/metagoofil/extractors/metadataOpenOffice.pyt<module>s      
5,809
Python
.py
19
304.736842
1,639
0.389225
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,756
metadataPDF.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/extractors/metadataPDF.py
#!/usr/bin/env python # # metadataPDF.py - dump pdf metadata # # Copy of Yusuke's dumppdf to add dumpmeta import sys, re from pdfminer.psparser import PSKeyword, PSLiteral from pdfminer.pdfparser import PDFDocument, PDFParser from pdfminer.pdftypes import PDFStream, PDFObjRef, resolve1, stream_value # dumpmeta class metapdf: def __init__(self,fname, password=''): self.fname=fname self.password=password self.metadata='' self.users=[] self.software=[] self.paths=[] self.raw="" self.company=[] def getData(self): try: doc = PDFDocument() fp = file(self.fname, 'rb') parser = PDFParser(fp) parser.set_document(doc) doc.set_parser(parser) doc.initialize(self.password) metadata = resolve1(doc.catalog['Metadata']) parser.close() fp.close() for xref in doc.xrefs: info_ref=xref.trailer.get('Info') if info_ref: info=resolve1(info_ref) self.metadata=info self.raw = info return "ok" except: return "error" def getUsers(self): if self.metadata.has_key('Author'): self.users.append(self.metadata['Author']) return self.users def getCompany(self): if self.metadata.has_key('Company'): self.users.append(self.metadata['Company']) return self.company def getSoftware(self): if self.metadata.has_key('Producer'): self.software.append(self.metadata['Producer']) if self.metadata.has_key('Creator'): self.software.append(self.metadata['Creator']) return self.software def getPaths(self): return self.paths def getRaw(self): return self.raw
1,554
Python
.py
58
23.655172
74
0.731002
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,757
metadataExtractor.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/extractors/metadataExtractor.pyc
Ñò Î ÈMc@sGddkZddkZddkZddkZddd„ƒYZdS(iÿÿÿÿNt metaExtractorcBs5eZd„Zd„Zd„Zd„Zd„ZRS(cCs1||_d|_d|_g|_g|_dS(Ntextractt(tfnametcommandtdatatpathstusers(tselfR((sF/pentest/enumeration/google/metagoofil/extractors/metadataExtractor.pyt__init__s     c Csn|id|i}yKti|i|igdtdtiƒ}|iƒ}|d|_dSWndSXdS(Nt tshelltstdoutitokterror(RRt subprocesstPopentFalsetPIPEt communicateR(Rtcommtprocesstres((sF/pentest/enumeration/google/metagoofil/extractors/metadataExtractor.pyt runExtract s*  c CsPtidƒ}tidƒ}x–||fD]ˆ}|i|iƒ}|gjocx`|D]T}yD|idƒd}|ii|ƒdjo|ii|ƒnWqWqWXqWWq+q+Wtidƒ}|i|iƒ}|gjogxd|D]X}|idƒd}|iddƒ}|i i|ƒdjo|i i|ƒqìqìWndS(Ns worked on .*s template -.*s'iis : Author '.*'R( tretcompiletfindallRtsplitRtcounttappendtreplaceR( Rtpathretpathre2tregtpathtxttemptauthortauthors((sF/pentest/enumeration/google/metagoofil/extractors/metadataExtractor.pytgetDatas.   cCs|iS(N(R(R((sF/pentest/enumeration/google/metagoofil/extractors/metadataExtractor.pytgetUsers.scCs|iS(N(R(R((sF/pentest/enumeration/google/metagoofil/extractors/metadataExtractor.pytgetPaths1s(t__name__t __module__R RR'R(R)(((sF/pentest/enumeration/google/metagoofil/extractors/metadataExtractor.pyRs    ((tsysRtosRR(((sF/pentest/enumeration/google/metagoofil/extractors/metadataExtractor.pyt<module>s0
2,352
Python
.py
9
260.111111
679
0.436007
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,758
metadataMSOfficeXML.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/extractors/metadataMSOfficeXML.py
import unzip import zipfile import sys import re import os import random class metaInfoMS: def __init__(self): self.template ="" self.totalTime ="" self.pages ="" self.words ="" self.characters ="" self.application ="" self.docSecurity ="" self.lines ="" self.paragraphs ="" self.scaleCrop ="" self.company ="" self.linksUpToDate ="" self.charactersWithSpaces ="" self.shareDoc ="" self.hyperlinksChanged ="" self.appVersion ="" self.title ="" self.subject ="" self.creator ="" self.keywords ="" self.lastModifiedBy ="" self.revision ="" self.createdDate ="" self.modifiedDate ="" self.userscomments ="" self.thumbnailPath ="" self.comments= "ok" def __init__(self,filepath): print filepath self.template ="" self.totalTime ="" self.pages ="" self.words ="" self.characters ="" self.application ="" self.docSecurity ="" self.lines ="" self.paragraphs ="" self.scaleCrop ="" self.company ="" self.linksUpToDate ="" self.charactersWithSpaces ="" self.shareDoc ="" self.hyperlinksChanged ="" self.appVersion ="" self.title ="" self.subject ="" self.creator ="" self.keywords ="" self.lastModifiedBy ="" self.revision ="" self.createdDate ="" self.modifiedDate ="" self.thumbnailPath ="" rnd = str(random.randrange(0, 1001, 3)) zip = zipfile.ZipFile(filepath, 'r') file('app'+rnd+'.xml', 'w').write(zip.read('docProps/app.xml')) file('core'+rnd+'.xml', 'w').write(zip.read('docProps/core.xml')) try: file('comments'+rnd+'.xml', 'w').write(zip.read('word/comments.xml')) self.comments="ok" except: self.comments="error" thumbnailPath = "" #try: #file('thumbnail'+rnd+'.jpeg', 'w').write(zip.read('docProps/thumbnail.jpeg')) #thumbnailPath = 'thumbnail'+rnd+'.jpeg' #except: # pass zip.close() # primero algunas estadisticas del soft usado para la edicion y del documento f = open ('app'+rnd+'.xml','r') app = f.read() self.cargaApp(app) f.close() if self.comments=="ok": f = open ('comments'+rnd+'.xml','r') comm = f.read() self.cargaComm(comm) f.close() # datos respecto a autor, etc f = open ('core'+rnd+'.xml','r') core = f.read() self.cargaCore(core) self.thumbnailPath = thumbnailPath f.close() # borramos todo menos el thumbnail os.remove('app'+rnd+'.xml') os.remove('core'+rnd+'.xml') os.remove('comments'+rnd+'.xml') #self.toString() def toString(self): print "--- Metadata app ---" print " template: " + str(self.template) print " totalTime: " + str(self.totalTime) print " pages: "+ str(self.pages) print " words: "+ str(self.words) print " characters: "+ str(self.characters) print " application: "+ str(self.application) print " docSecurity: "+ str(self.docSecurity) print " lines: "+ str(self.lines) print " paragraphs: "+ str(self.paragraphs) print " scaleCrop: " + str(self.scaleCrop) print " company: "+ str(self.company) print " linksUpToDate: " + str(self.linksUpToDate) print " charactersWithSpaces: "+ str(self.charactersWithSpaces) print " shareDoc:" + str(self.shareDoc) print " hyperlinksChanged:" + str(self.hyperlinksChanged) print " appVersion:" + str(self.appVersion) print "\n --- Metadata core ---" print " title:" + str(self.title) print " subject:" + str(self.subject) print " creator:" + str(self.creator) print " keywords:" + str(self.keywords) print " lastModifiedBy:" + str(self.lastModifiedBy) print " revision:" + str(self.revision) print " createdDate:" + str(self.createdDate) print " modifiedDate:" + str(self.modifiedDate) print "\n thumbnailPath:" + str(self.thumbnailPath) def cargaComm(self,datos): try: p = re.compile('w:author="(.*?)" w') self.userscomments = p.findall(datos) except: pass def cargaApp(self,datos): try: p = re.compile('<Template>(.*)</Template>') self.template = str (p.findall(datos)[0]) except: pass try: p = re.compile('<TotalTime>(.*)</TotalTime>') self.totalTime = str (p.findall(datos)[0]) except: pass try: p = re.compile('<Pages>(.*)</Pages>') self.pages = str (p.findall(datos)[0]) except: pass try: p = re.compile('<Words>(.*)</Words>') self.words = str (p.findall(datos)[0]) except: pass try: p = re.compile('<Characters>(.*)</Characters>') self.characters = str (p.findall(datos)[0]) except: pass try: p = re.compile('<Application>(.*)</Application>') self.application = str (p.findall(datos)[0]) except: pass try: p = re.compile('<DocSecurity>(.*)</DocSecurity>') self.docSecurity = str (p.findall(datos)[0]) except: pass try: p = re.compile('<Lines>(.*)</Lines>') self.lines = str (p.findall(datos)[0]) except: pass try: p = re.compile('<Paragraphs>(.*)</Paragraphs>') self.paragraphs = str (p.findall(datos)[0]) except: pass try: p = re.compile('<ScaleCrop>(.*)</ScaleCrop>') self.scaleCrop = str (p.findall(datos)[0]) except: pass try: p = re.compile('<Company>(.*)</Company>') self.company = str (p.findall(datos)[0]) except: pass try: p = re.compile('<LinksUpToDate>(.*)</LinksUpToDate>') self.linksUpToDate = str (p.findall(datos)[0]) except: pass try: p = re.compile('<CharactersWithSpaces>(.*)</CharactersWithSpaces>') self.charactersWithSpaces = str (p.findall(datos)[0]) except: pass try: p = re.compile('<SharedDoc>(.*)</SharedDoc>') self.sharedDoc = str (p.findall(datos)[0]) except: pass try: p = re.compile('<HyperlinksChanged>(.*)</HyperlinksChanged>') self.hyperlinksChanged = str (p.findall(datos)[0]) except: pass try: p = re.compile('<AppVersion>(.*)</AppVersion>') self.appVersion = str (p.findall(datos)[0]) except: pass def cargaCore(self,datos): try: p = re.compile('<dc:title>(.*)</dc:title>') self.title = str (p.findall(datos)[0]) except: pass try: p = re.compile('<dc:subject>(.*)</dc:subject>') self.subject = str (p.findall(datos)[0]) except: pass try: p = re.compile('<dc:creator>(.*)</dc:creator>') self.creator = str (p.findall(datos)[0]) except: pass try: p = re.compile('<cp:keywords>(.*)</cp:keywords>') self.keywords = str (p.findall(datos)[0]) except: pass try: p = re.compile('<cp:lastModifiedBy>(.*)</cp:lastModifiedBy>') self.lastModifiedBy = str (p.findall(datos)[0]) except: pass try: p = re.compile('<cp:revision>(.*)</cp:revision>') self.revision = str (p.findall(datos)[0]) except: pass try: p = re.compile('<dcterms:created xsi:type=".*">(.*)</dcterms:created>') self.createdDate = str (p.findall(datos)[0]) except: pass try: p = re.compile('<dcterms:modified xsi:type=".*">(.*)</dcterms:modified>') self.modifiedDate = str (p.findall(datos)[0]) except: pass def getData(self): return "ok" def getRaw(self): raw = "Not implemented yet" return raw def getUsers(self): res=[] temporal=[] res.append(self.creator) res.append(self.lastModifiedBy) if self.comments == "ok": res.extend(self.userscomments) else: pass for x in res: if temporal.count(x) ==0: temporal.append(x) else: pass return temporal def getPaths(self): res=[] #res.append(self.revision) return res def getSoftware(self): res=[] res.append(self.application) return res
7,512
Python
.py
283
22.855124
81
0.653247
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,759
program.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/program.py
from hachoir_metadata.metadata import RootMetadata, registerExtractor from hachoir_parser.program import ExeFile from hachoir_metadata.safe import fault_tolerant, getValue class ExeMetadata(RootMetadata): KEY_TO_ATTR = { u"ProductName": "title", u"LegalCopyright": "copyright", u"LegalTrademarks": "copyright", u"LegalTrademarks1": "copyright", u"LegalTrademarks2": "copyright", u"CompanyName": "author", u"BuildDate": "creation_date", u"FileDescription": "title", u"ProductVersion": "version", } SKIP_KEY = set((u"InternalName", u"OriginalFilename", u"FileVersion", u"BuildVersion")) def extract(self, exe): if exe.isPE(): self.extractPE(exe) elif exe.isNE(): self.extractNE(exe) def extractNE(self, exe): if "ne_header" in exe: self.useNE_Header(exe["ne_header"]) if "info" in exe: self.useNEInfo(exe["info"]) @fault_tolerant def useNEInfo(self, info): for node in info.array("node"): if node["name"].value == "StringFileInfo": self.readVersionInfo(node["node[0]"]) def extractPE(self, exe): # Read information from headers if "pe_header" in exe: self.usePE_Header(exe["pe_header"]) if "pe_opt_header" in exe: self.usePE_OptHeader(exe["pe_opt_header"]) # Use PE resource resource = exe.getResource() if resource and "version_info/node[0]" in resource: for node in resource.array("version_info/node[0]/node"): if getValue(node, "name") == "StringFileInfo" \ and "node[0]" in node: self.readVersionInfo(node["node[0]"]) @fault_tolerant def useNE_Header(self, hdr): if hdr["is_dll"].value: self.format_version = u"New-style executable: Dynamic-link library (DLL)" elif hdr["is_win_app"].value: self.format_version = u"New-style executable: Windows 3.x application" else: self.format_version = u"New-style executable for Windows 3.x" @fault_tolerant def usePE_Header(self, hdr): self.creation_date = hdr["creation_date"].value self.comment = "CPU: %s" % hdr["cpu"].display if hdr["is_dll"].value: self.format_version = u"Portable Executable: Dynamic-link library (DLL)" else: self.format_version = u"Portable Executable: Windows application" @fault_tolerant def usePE_OptHeader(self, hdr): self.comment = "Subsystem: %s" % hdr["subsystem"].display def readVersionInfo(self, info): values = {} for node in info.array("node"): if "value" not in node or "name" not in node: continue value = node["value"].value.strip(" \0") if not value: continue key = node["name"].value values[key] = value if "ProductName" in values and "FileDescription" in values: # Make sure that FileDescription is set before ProductName # as title value self.title = values["FileDescription"] self.title = values["ProductName"] del values["FileDescription"] del values["ProductName"] for key, value in values.iteritems(): if key in self.KEY_TO_ATTR: setattr(self, self.KEY_TO_ATTR[key], value) elif key not in self.SKIP_KEY: self.comment = "%s=%s" % (key, value) registerExtractor(ExeFile, ExeMetadata)
3,646
Python
.py
86
32.395349
91
0.598421
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,760
riff.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/riff.pyc
Ñò Î ÈMc @sËdZddklZlZlZddklZlZddkl Z ddk l Z ddk l Z lZlZddklZddklZdd klZd efd „ƒYZee eƒd S( sB Extract metadata from RIFF file format: AVI video and WAV sound. iÿÿÿÿ(tMetadatatMultipleMetadatatregisterExtractor(tfault_toleranttgetValue(tRiffFile(tUNCOMPRESSED_AUDIO(t humanFilesizet makeUnicodettimedelta2seconds(t_(tcomputeComprRate(t timedeltat RiffMetadatacBsÄeZhdd6dd6dd6dd6dd 6d d 6d d 6d d6Zd„Zd„Zed„ƒZd„Zed„ƒZed„ƒZ ed„ƒZ ed„ƒZ d„Z ed„ƒZ RS(ttitletINAMtartisttIARTtcommenttICMTt copyrighttICOPtauthortIENGtproducertISFTt creation_datetICRDtIDITcCsÞ|di}|djo<|i|ƒt|dƒ}|ot||dƒq¸nc|djo7d|jo&|i|dƒ|i|dƒq¸n|djo|i|ƒnd|jo|i|dƒndS( NttypetWAVEsaudio_data/sizeisAVI theaderstACONtinfo(tvaluet extractWAVERtcomputeAudioComprRatet extractAVIt extractInfot extractAnim(tselftriffRtsize((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pytextracts       cCsyd|jodS|di}|di}||ijo|id||fƒdS|i|}t|||ƒdS(NttextttagsSkip RIFF metadata %s: %s(R"t TAG_TO_KEYtwarningtsetattr(R(tchunkR"R-tkey((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pyt processChunk*s    cCsC|d}|di|_|di|_|di|_|di|_d|joBd|dijo.tdt|diƒ|diƒ|_n|dit jo’|di|di|di|_ |i d ƒ oWd |joJ|i d ƒo:t|d iƒd |i d ƒ}td|ƒ|_q?ndS( Ntformatt nb_channeltbit_per_sampletsample_per_sectcodecsnb_sample/nb_sampleitsecondstdurationsaudio_data/sizetbit_ratei( R"R5tbits_per_samplet sample_ratetdisplayt compressionR tfloatR:RR;thastget(R(twavR4R:((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pyR#5s  .& $cCsixb|D]Z}|ipqnd|jo6|didjo|i|ƒqa|i|ƒqqWdS(NR-tLIST(t is_field_setR"R&R3(R(tfieldsettfield((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pyR&Ks  cCs.d|dit|diƒf|_|diot|dioft|diƒ|di}||_d|jo.tdt|diƒ|ƒ|_|_q©nd|jo>|d }|d i|_|d i|_ |d i|_ n7|d i|di|_|di|di|_ dS(Ns%s (fourcc:"%s")tfourcctratetscaleiR9tlengths../stream_fmt/widths ../stream_fmttwidththeighttdepthtrighttlefttbottomttop( R>RR"R?R@t frame_rateR R:RLRMtbits_per_pixel(R(theadertmetatfpsR4((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pytextractAVIVideoUs$  2  cCs@|di|_|di|_|did|_|dio|di|_nd|jo¨|d}|dioS|dioEt|diƒ|di}td t|d iƒ|ƒ|_n|d id jo%d |di|d if|_ q n|i dƒp|di|_ n|i |ƒdS(NtchannelR=R;iR<s ../stream_hdrRIRJR9RKRHts%s (fourcc:"%s")R8R?( R"R5R=R;R<R@R R:R>R?RAR$(R(R4RVRURS((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pytextractAVIAudiohs   '&cCsp|iddƒ}|pdS|idƒ|idƒ|idddƒ}|pdSt|ƒ||_dS(NR;iR5R=R<tdefaulti(RBR@t compr_rate(R(RVtuncomprtcompr((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pyR$|s/cCsŠ|di}|oRd||_t|dƒ}|o+|idƒ otd||ƒ|_qfn|di|_|di|_dS(Ntmicrosec_per_frameg€„.At total_frameR:t microsecondsRLRM(R"RSRRAR R:RLRM(R(RUtmicrosecRa((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pyt useAviHeader†s  cCs¢d}xð|idƒD]ß}d|joqn|di}|djoHd|jo7t|ƒ}|i|d|ƒ|id|dƒqõq|djoVd |joEt|ƒ}|i|d |ƒ|id ||d ƒ|d7}qõqqWd |jo|i|d ƒn|id ƒo>d|jo1t|diƒdt |i d ƒƒ|_ nd|jo(t dƒt |didƒ|_ndS(Nitstreamsstream_hdr/stream_typetvidst stream_hdrtvideos Video streamtaudst stream_fmts audio[%u]s Audio streamtavi_hdrR:s /movie/sizeis/indexsHas audio/video index (%s)(tarrayR"RRXtaddGroupR[RdRAR@R RBR;R RR*R(R(Rt audio_indexRet stream_typeRV((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pyR%‘s0         1  cCs½d|jovd}d}xD|idƒD]3}|d7}d|joPn||id7}q)W|o|o|||_qƒn|idƒ o%d|jod|di|_ndS( Nsanim_rate/rate[0]isanim_rate/rateiidgN@RSsanim_hdr/jiffie_rate(RlR"RSRA(R(R)tcountttotalRI((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pyR'®s   (t__name__t __module__R.R+R3RR#R&RXR[R$RdR%R'(((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pyR s&      N(t__doc__thachoir_metadata.metadataRRRthachoir_metadata.safeRRthachoir_parser.container.riffRthachoir_parser.video.fourccRthachoir_core.toolsRRR thachoir_core.i18nR thachoir_metadata.audioR R$tdatetimeR R (((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/riff.pyt<module>s¯
7,488
Python
.py
35
212.914286
833
0.405152
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,761
audio.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/audio.py
from hachoir_metadata.metadata import (registerExtractor, Metadata, RootMetadata, MultipleMetadata) from hachoir_parser.audio import AuFile, MpegAudioFile, RealAudioFile, AiffFile, FlacParser from hachoir_parser.container import OggFile, RealMediaFile from hachoir_core.i18n import _ from hachoir_core.tools import makePrintable, timedelta2seconds, humanBitRate from datetime import timedelta, date from hachoir_metadata.metadata_item import QUALITY_FAST, QUALITY_NORMAL, QUALITY_BEST from hachoir_metadata.safe import fault_tolerant, getValue def computeComprRate(meta, size): if not meta.has("duration") \ or not meta.has("sample_rate") \ or not meta.has("bits_per_sample") \ or not meta.has("nb_channel") \ or not size: return orig_size = timedelta2seconds(meta.get("duration")) * meta.get('sample_rate') * meta.get('bits_per_sample') * meta.get('nb_channel') meta.compr_rate = float(orig_size) / size def computeBitRate(meta): if not meta.has("bits_per_sample") \ or not meta.has("nb_channel") \ or not meta.has("sample_rate"): return meta.bit_rate = meta.get('bits_per_sample') * meta.get('nb_channel') * meta.get('sample_rate') VORBIS_KEY_TO_ATTR = { "ARTIST": "artist", "ALBUM": "album", "TRACKNUMBER": "track_number", "TRACKTOTAL": "track_total", "ENCODER": "producer", "TITLE": "title", "LOCATION": "location", "DATE": "creation_date", "ORGANIZATION": "organization", "GENRE": "music_genre", "": "comment", "COMPOSER": "music_composer", "DESCRIPTION": "comment", "COMMENT": "comment", "WWW": "url", "WOAF": "url", "LICENSE": "copyright", } @fault_tolerant def readVorbisComment(metadata, comment): metadata.producer = getValue(comment, "vendor") for item in comment.array("metadata"): if "=" in item.value: key, value = item.value.split("=", 1) key = key.upper() if key in VORBIS_KEY_TO_ATTR: key = VORBIS_KEY_TO_ATTR[key] setattr(metadata, key, value) elif value: metadata.warning("Skip Vorbis comment %s: %s" % (key, value)) class OggMetadata(MultipleMetadata): def extract(self, ogg): granule_quotient = None for index, page in enumerate(ogg.array("page")): if "segments" not in page: continue page = page["segments"] if "vorbis_hdr" in page: meta = Metadata(self) self.vorbisHeader(page["vorbis_hdr"], meta) self.addGroup("audio[]", meta, "Audio") if not granule_quotient and meta.has("sample_rate"): granule_quotient = meta.get('sample_rate') if "theora_hdr" in page: meta = Metadata(self) self.theoraHeader(page["theora_hdr"], meta) self.addGroup("video[]", meta, "Video") if "video_hdr" in page: meta = Metadata(self) self.videoHeader(page["video_hdr"], meta) self.addGroup("video[]", meta, "Video") if not granule_quotient and meta.has("frame_rate"): granule_quotient = meta.get('frame_rate') if "comment" in page: readVorbisComment(self, page["comment"]) if 3 <= index: # Only process pages 0..3 break # Compute duration if granule_quotient and QUALITY_NORMAL <= self.quality: page = ogg.createLastPage() if page and "abs_granule_pos" in page: try: self.duration = timedelta(seconds=float(page["abs_granule_pos"].value) / granule_quotient) except OverflowError: pass def videoHeader(self, header, meta): meta.compression = header["fourcc"].display meta.width = header["width"].value meta.height = header["height"].value meta.bits_per_pixel = header["bits_per_sample"].value if header["time_unit"].value: meta.frame_rate = 10000000.0 / header["time_unit"].value def theoraHeader(self, header, meta): meta.compression = "Theora" meta.format_version = "Theora version %u.%u (revision %u)" % (\ header["version_major"].value, header["version_minor"].value, header["version_revision"].value) meta.width = header["frame_width"].value meta.height = header["frame_height"].value if header["fps_den"].value: meta.frame_rate = float(header["fps_num"].value) / header["fps_den"].value if header["aspect_ratio_den"].value: meta.aspect_ratio = float(header["aspect_ratio_num"].value) / header["aspect_ratio_den"].value meta.pixel_format = header["pixel_format"].display meta.comment = "Quality: %s" % header["quality"].value def vorbisHeader(self, header, meta): meta.compression = u"Vorbis" meta.sample_rate = header["audio_sample_rate"].value meta.nb_channel = header["audio_channels"].value meta.format_version = u"Vorbis version %s" % header["vorbis_version"].value meta.bit_rate = header["bitrate_nominal"].value class AuMetadata(RootMetadata): def extract(self, audio): self.sample_rate = audio["sample_rate"].value self.nb_channel = audio["channels"].value self.compression = audio["codec"].display if "info" in audio: self.comment = audio["info"].value self.bits_per_sample = audio.getBitsPerSample() computeBitRate(self) if "audio_data" in audio: if self.has("bit_rate"): self.duration = timedelta(seconds=float(audio["audio_data"].size) / self.get('bit_rate')) computeComprRate(self, audio["audio_data"].size) class RealAudioMetadata(RootMetadata): FOURCC_TO_BITRATE = { u"28_8": 15200, # 28.8 kbit/sec (audio bit rate: 15.2 kbit/s) u"14_4": 8000, # 14.4 kbit/sec u"lpcJ": 8000, # 14.4 kbit/sec } def extract(self, real): version = real["version"].value if "metadata" in real: self.useMetadata(real["metadata"]) self.useRoot(real) self.format_version = "Real audio version %s" % version if version == 3: size = getValue(real, "data_size") elif "filesize" in real and "headersize" in real: size = (real["filesize"].value + 40) - (real["headersize"].value + 16) else: size = None if size: size *= 8 if self.has("bit_rate"): sec = float(size) / self.get('bit_rate') self.duration = timedelta(seconds=sec) computeComprRate(self, size) @fault_tolerant def useMetadata(self, info): self.title = info["title"].value self.author = info["author"].value self.copyright = info["copyright"].value self.comment = info["comment"].value @fault_tolerant def useRoot(self, real): self.bits_per_sample = 16 # FIXME: Is that correct? if real["version"].value != 3: self.sample_rate = real["sample_rate"].value self.nb_channel = real["channels"].value else: self.sample_rate = 8000 self.nb_channel = 1 fourcc = getValue(real, "FourCC") if fourcc: self.compression = fourcc try: self.bit_rate = self.FOURCC_TO_BITRATE[fourcc] except LookupError: pass class RealMediaMetadata(MultipleMetadata): KEY_TO_ATTR = { "generated by": "producer", "creation date": "creation_date", "modification date": "last_modification", "description": "comment", } def extract(self, media): if "file_prop" in media: self.useFileProp(media["file_prop"]) if "content_desc" in media: self.useContentDesc(media["content_desc"]) for index, stream in enumerate(media.array("stream_prop")): self.useStreamProp(stream, index) @fault_tolerant def useFileInfoProp(self, prop): key = prop["name"].value.lower() value = prop["value"].value if key in self.KEY_TO_ATTR: setattr(self, self.KEY_TO_ATTR[key], value) elif value: self.warning("Skip %s: %s" % (prop["name"].value, value)) @fault_tolerant def useFileProp(self, prop): self.bit_rate = prop["avg_bit_rate"].value self.duration = timedelta(milliseconds=prop["duration"].value) @fault_tolerant def useContentDesc(self, content): self.title = content["title"].value self.author = content["author"].value self.copyright = content["copyright"].value self.comment = content["comment"].value @fault_tolerant def useStreamProp(self, stream, index): meta = Metadata(self) meta.comment = "Start: %s" % stream["stream_start"].value if getValue(stream, "mime_type") == "logical-fileinfo": for prop in stream.array("file_info/prop"): self.useFileInfoProp(prop) else: meta.bit_rate = stream["avg_bit_rate"].value meta.duration = timedelta(milliseconds=stream["duration"].value) meta.mime_type = getValue(stream, "mime_type") meta.title = getValue(stream, "desc") self.addGroup("stream[%u]" % index, meta, "Stream #%u" % (1+index)) class MpegAudioMetadata(RootMetadata): TAG_TO_KEY = { # ID3 version 2.2 "TP1": "author", "COM": "comment", "TEN": "producer", "TRK": "track_number", "TAL": "album", "TT2": "title", "TYE": "creation_date", "TCO": "music_genre", # ID3 version 2.3+ "TPE1": "author", "COMM": "comment", "TENC": "producer", "TRCK": "track_number", "TALB": "album", "TIT2": "title", "TYER": "creation_date", "WXXX": "url", "TCON": "music_genre", "TLAN": "language", "TCOP": "copyright", "TDAT": "creation_date", "TRDA": "creation_date", "TORY": "creation_date", "TIT1": "title", } def processID3v2(self, field): # Read value if "content" not in field: return content = field["content"] if "text" not in content: return if "title" in content and content["title"].value: value = "%s: %s" % (content["title"].value, content["text"].value) else: value = content["text"].value # Known tag? tag = field["tag"].value if tag not in self.TAG_TO_KEY: if tag: if isinstance(tag, str): tag = makePrintable(tag, "ISO-8859-1", to_unicode=True) self.warning("Skip ID3v2 tag %s: %s" % (tag, value)) return key = self.TAG_TO_KEY[tag] setattr(self, key, value) def readID3v2(self, id3): for field in id3: if field.is_field_set and "tag" in field: self.processID3v2(field) def extract(self, mp3): if "/frames/frame[0]" in mp3: frame = mp3["/frames/frame[0]"] self.nb_channel = (frame.getNbChannel(), frame["channel_mode"].display) self.format_version = u"MPEG version %s layer %s" % \ (frame["version"].display, frame["layer"].display) self.sample_rate = frame.getSampleRate() self.bits_per_sample = 16 if mp3["frames"].looksConstantBitRate(): self.computeBitrate(frame) else: self.computeVariableBitrate(mp3) if "id3v1" in mp3: id3 = mp3["id3v1"] self.comment = id3["comment"].value self.author = id3["author"].value self.title = id3["song"].value self.album = id3["album"].value if id3["year"].value != "0": self.creation_date = id3["year"].value if "track_nb" in id3: self.track_number = id3["track_nb"].value if "id3v2" in mp3: self.readID3v2(mp3["id3v2"]) if "frames" in mp3: computeComprRate(self, mp3["frames"].size) def computeBitrate(self, frame): bit_rate = frame.getBitRate() # may returns None on error if not bit_rate: return self.bit_rate = (bit_rate, _("%s (constant)") % humanBitRate(bit_rate)) self.duration = timedelta(seconds=float(frame["/frames"].size) / bit_rate) def computeVariableBitrate(self, mp3): if self.quality <= QUALITY_FAST: return count = 0 if QUALITY_BEST <= self.quality: self.warning("Process all MPEG audio frames to compute exact duration") max_count = None else: max_count = 500 * self.quality total_bit_rate = 0.0 for index, frame in enumerate(mp3.array("frames/frame")): if index < 3: continue bit_rate = frame.getBitRate() if bit_rate: total_bit_rate += float(bit_rate) count += 1 if max_count and max_count <= count: break if not count: return bit_rate = total_bit_rate / count self.bit_rate = (bit_rate, _("%s (Variable bit rate)") % humanBitRate(bit_rate)) duration = timedelta(seconds=float(mp3["frames"].size) / bit_rate) self.duration = duration class AiffMetadata(RootMetadata): def extract(self, aiff): if "common" in aiff: self.useCommon(aiff["common"]) computeBitRate(self) @fault_tolerant def useCommon(self, info): self.nb_channel = info["nb_channel"].value self.bits_per_sample = info["sample_size"].value self.sample_rate = getValue(info, "sample_rate") if self.has("sample_rate"): rate = self.get("sample_rate") if rate: sec = float(info["nb_sample"].value) / rate self.duration = timedelta(seconds=sec) if "codec" in info: self.compression = info["codec"].display class FlacMetadata(RootMetadata): def extract(self, flac): if "metadata/stream_info/content" in flac: self.useStreamInfo(flac["metadata/stream_info/content"]) if "metadata/comment/content" in flac: readVorbisComment(self, flac["metadata/comment/content"]) @fault_tolerant def useStreamInfo(self, info): self.nb_channel = info["nb_channel"].value + 1 self.bits_per_sample = info["bits_per_sample"].value + 1 self.sample_rate = info["sample_hertz"].value sec = info["total_samples"].value if sec: sec = float(sec) / info["sample_hertz"].value self.duration = timedelta(seconds=sec) registerExtractor(AuFile, AuMetadata) registerExtractor(MpegAudioFile, MpegAudioMetadata) registerExtractor(OggFile, OggMetadata) registerExtractor(RealMediaFile, RealMediaMetadata) registerExtractor(RealAudioFile, RealAudioMetadata) registerExtractor(AiffFile, AiffMetadata) registerExtractor(FlacParser, FlacMetadata)
15,473
Python
.py
372
31.846774
136
0.588306
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,762
jpeg.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/jpeg.py
from hachoir_metadata.metadata import RootMetadata, registerExtractor from hachoir_metadata.image import computeComprRate from hachoir_parser.image.exif import ExifEntry from hachoir_parser.image.jpeg import ( JpegFile, JpegChunk, QUALITY_HASH_COLOR, QUALITY_SUM_COLOR, QUALITY_HASH_GRAY, QUALITY_SUM_GRAY) from hachoir_core.field import MissingField from hachoir_core.i18n import _ from hachoir_core.tools import makeUnicode from hachoir_metadata.safe import fault_tolerant, getValue from datetime import datetime import operator def deg2float(degree, minute, second): return degree + (float(minute) + float(second) / 60.0) / 60.0 class JpegMetadata(RootMetadata): EXIF_KEY = { # Exif metadatas ExifEntry.TAG_CAMERA_MANUFACTURER: "camera_manufacturer", ExifEntry.TAG_CAMERA_MODEL: "camera_model", ExifEntry.TAG_ORIENTATION: "image_orientation", ExifEntry.TAG_EXPOSURE: "camera_exposure", ExifEntry.TAG_FOCAL: "camera_focal", ExifEntry.TAG_BRIGHTNESS: "camera_brightness", ExifEntry.TAG_APERTURE: "camera_aperture", # Generic metadatas ExifEntry.TAG_IMG_TITLE: "title", ExifEntry.TAG_SOFTWARE: "producer", ExifEntry.TAG_FILE_TIMESTAMP: "creation_date", ExifEntry.TAG_WIDTH: "width", ExifEntry.TAG_HEIGHT: "height", ExifEntry.TAG_USER_COMMENT: "comment", } IPTC_KEY = { 80: "author", 90: "city", 101: "country", 116: "copyright", 120: "title", 231: "comment", } orientation_name = { 1: _('Horizontal (normal)'), 2: _('Mirrored horizontal'), 3: _('Rotated 180'), 4: _('Mirrored vertical'), 5: _('Mirrored horizontal then rotated 90 counter-clock-wise'), 6: _('Rotated 90 clock-wise'), 7: _('Mirrored horizontal then rotated 90 clock-wise'), 8: _('Rotated 90 counter clock-wise'), } def extract(self, jpeg): if "start_frame/content" in jpeg: self.startOfFrame(jpeg["start_frame/content"]) elif "start_scan/content/nr_components" in jpeg: self.bits_per_pixel = 8 * jpeg["start_scan/content/nr_components"].value if "app0/content" in jpeg: self.extractAPP0(jpeg["app0/content"]) if "exif/content" in jpeg: for ifd in jpeg.array("exif/content/ifd"): for entry in ifd.array("entry"): self.processIfdEntry(ifd, entry) self.readGPS(ifd) if "photoshop/content" in jpeg: psd = jpeg["photoshop/content"] if "version/content/reader_name" in psd: self.producer = psd["version/content/reader_name"].value if "iptc/content" in psd: self.parseIPTC(psd["iptc/content"]) for field in jpeg.array("comment"): if "content/comment" in field: self.comment = field["content/comment"].value self.computeQuality(jpeg) if "data" in jpeg: computeComprRate(self, jpeg["data"].size) if not self.has("producer") and "photoshop" in jpeg: self.producer = u"Adobe Photoshop" if self.has("compression"): self.compression = "JPEG" @fault_tolerant def startOfFrame(self, sof): # Set compression method key = sof["../type"].value self.compression = "JPEG (%s)" % JpegChunk.START_OF_FRAME[key] # Read image size and bits/pixel self.width = sof["width"].value self.height = sof["height"].value nb_components = sof["nr_components"].value self.bits_per_pixel = 8 * nb_components if nb_components == 3: self.pixel_format = _("YCbCr") elif nb_components == 1: self.pixel_format = _("Grayscale") self.nb_colors = 256 @fault_tolerant def computeQuality(self, jpeg): # This function is an adaption to Python of ImageMagick code # to compute JPEG quality using quantization tables # Read quantization tables qtlist = [] for dqt in jpeg.array("quantization"): for qt in dqt.array("content/qt"): # TODO: Take care of qt["index"].value? qtlist.append(qt) if not qtlist: return # Compute sum of all coefficients sumcoeff = 0 for qt in qtlist: coeff = qt.array("coeff") for index in xrange(64): sumcoeff += coeff[index].value # Choose the right quality table and compute hash value try: hashval= qtlist[0]["coeff[2]"].value + qtlist[0]["coeff[53]"].value if 2 <= len(qtlist): hashval += qtlist[1]["coeff[0]"].value + qtlist[1]["coeff[63]"].value hashtable = QUALITY_HASH_COLOR sumtable = QUALITY_SUM_COLOR else: hashtable = QUALITY_HASH_GRAY sumtable = QUALITY_SUM_GRAY except (MissingField, IndexError): # A coefficient is missing, so don't compute JPEG quality return # Find the JPEG quality for index in xrange(100): if (hashval >= hashtable[index]) or (sumcoeff >= sumtable[index]): quality = "%s%%" % (index + 1) if (hashval > hashtable[index]) or (sumcoeff > sumtable[index]): quality += " " + _("(approximate)") self.comment = "JPEG quality: %s" % quality return @fault_tolerant def extractAPP0(self, app0): self.format_version = u"JFIF %u.%02u" \ % (app0["ver_maj"].value, app0["ver_min"].value) if "y_density" in app0: self.width_dpi = app0["x_density"].value self.height_dpi = app0["y_density"].value @fault_tolerant def processIfdEntry(self, ifd, entry): # Skip unknown tags tag = entry["tag"].value if tag not in self.EXIF_KEY: return key = self.EXIF_KEY[tag] if key in ("width", "height") and self.has(key): # EXIF "valid size" are sometimes not updated when the image is scaled # so we just ignore it return # Read value rational = False if "value" in entry: value = entry["value"].value else: value = ifd["value_%s" % entry.name].value # Convert value to string if tag == ExifEntry.TAG_ORIENTATION: value = self.orientation_name.get(value, value) elif tag == ExifEntry.TAG_EXPOSURE: if not value: return if isinstance(value, float): value = (value, u"1/%g" % (1/value)) elif entry["type"].value in (ExifEntry.TYPE_RATIONAL, ExifEntry.TYPE_SIGNED_RATIONAL): value = (value, u"%.3g" % value) # Store information setattr(self, key, value) @fault_tolerant def readGPS(self, ifd): # Read latitude and longitude latitude_ref = None longitude_ref = None latitude = None longitude = None altitude_ref = 1 altitude = None timestamp = None datestamp = None for entry in ifd.array("entry"): tag = entry["tag"].value if tag == ExifEntry.TAG_GPS_LATITUDE_REF: if entry["value"].value == "N": latitude_ref = 1 else: latitude_ref = -1 elif tag == ExifEntry.TAG_GPS_LONGITUDE_REF: if entry["value"].value == "E": longitude_ref = 1 else: longitude_ref = -1 elif tag == ExifEntry.TAG_GPS_ALTITUDE_REF: if entry["value"].value == 1: altitude_ref = -1 else: altitude_ref = 1 elif tag == ExifEntry.TAG_GPS_LATITUDE: latitude = [ifd["value_%s[%u]" % (entry.name, index)].value for index in xrange(3)] elif tag == ExifEntry.TAG_GPS_LONGITUDE: longitude = [ifd["value_%s[%u]" % (entry.name, index)].value for index in xrange(3)] elif tag == ExifEntry.TAG_GPS_ALTITUDE: altitude = ifd["value_%s" % entry.name].value elif tag == ExifEntry.TAG_GPS_DATESTAMP: datestamp = ifd["value_%s" % entry.name].value elif tag == ExifEntry.TAG_GPS_TIMESTAMP: items = [ifd["value_%s[%u]" % (entry.name, index)].value for index in xrange(3)] items = map(int, items) items = map(str, items) timestamp = ":".join(items) if latitude_ref and latitude: value = deg2float(*latitude) if latitude_ref < 0: value = -value self.latitude = value if longitude and longitude_ref: value = deg2float(*longitude) if longitude_ref < 0: value = -value self.longitude = value if altitude: value = altitude if altitude_ref < 0: value = -value self.altitude = value if datestamp: if timestamp: datestamp += " " + timestamp self.creation_date = datestamp def parseIPTC(self, iptc): datestr = hourstr = None for field in iptc: # Skip incomplete field if "tag" not in field or "content" not in field: continue # Get value value = field["content"].value if isinstance(value, (str, unicode)): value = value.replace("\r", " ") value = value.replace("\n", " ") # Skip unknown tag tag = field["tag"].value if tag == 55: datestr = value continue if tag == 60: hourstr = value continue if tag not in self.IPTC_KEY: if tag != 0: self.warning("Skip IPTC key %s: %s" % ( field["tag"].display, makeUnicode(value))) continue setattr(self, self.IPTC_KEY[tag], value) if datestr and hourstr: try: year = int(datestr[0:4]) month = int(datestr[4:6]) day = int(datestr[6:8]) hour = int(hourstr[0:2]) min = int(hourstr[2:4]) sec = int(hourstr[4:6]) self.creation_date = datetime(year, month, day, hour, min, sec) except ValueError: pass registerExtractor(JpegFile, JpegMetadata)
10,855
Python
.py
266
29.293233
100
0.552632
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,763
image.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/image.pyc
—Ú Œ »Mc @s#ddklZlZlZlZddklZlZlZl Z l Z l Z l Z l Z lZlZddklZddklZddklZddklZddklZdÑZd efd ÑÉYZd efd ÑÉYZd efdÑÉYZdefdÑÉYZdefdÑÉYZ defdÑÉYZ!defdÑÉYZ"defdÑÉYZ#defdÑÉYZ$defdÑÉYZ%eeeÉee e"Éee e Éee e#ÉeeeÉeeeÉee e!Éee eÉeee$Éeee%ÉdS(iˇˇˇˇ(tregisterExtractortMetadatat RootMetadatatMultipleMetadata( tBmpFiletIcoFiletPcxFiletGifFiletPngFiletTiffFiletXcfFilet TargaFiletWMF_FiletPsdFile(tgetBitsPerPixel(t XcfProperty(t_(tHACHOIR_ERRORS(tfault_tolerantcCsÑ|idÉ p"|idÉ p|idÉ odS|pdS|idÉ|idÉ|idÉ}t|É||_dS(sÁ Compute image compression rate. Skip size of color palette, focus on image pixels. Original size is width x height x bpp. Compressed size is an argument (in bits). Set "compr_data" with a string like "1.52x". twidththeighttbits_per_pixelN(thastgettfloatt compr_rate(tmetat compr_sizet orig_size((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pytcomputeComprRate s)t BmpMetadatacBseZdÑZRS(cCs˜d|jodS|d}|di|_|di|_|di}|o;|djo!d|jo|di|_n||_n|di|_d|iÉ|_|d i|_ |d i|_ d |jot ||d i ÉndS( NtheaderRRtbppit used_colorst compressionuMicrosoft Bitmap version %sthorizontal_dpit vertical_dpitpixels( tvalueRRt nb_colorsRtdisplayR"tgetFormatVersiontformat_versiont width_dpit height_dpiRtsize(tselftimagethdrR ((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pytextracts      (t__name__t __module__R1(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRst TiffMetadatacBs.eZhdd6dd6ZdÑZdÑZRS(Rt img_widtht img_heightcCs&d|jo|i|dÉndS(Ntifd(tuseIFD(R.ttiff((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR1<s cCs|xu|D]m}|i}y|i|i}Wntj o qnXd|joqn|di}t|||ÉqWdS(NR&(tnamet key_to_attrtKeyErrorR&tsetattr(R.R7tfieldtkeytattrnameR&((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR8@s   (R2R3R;R1R8(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR42s   t IcoMetadatacBs,eZhdd6dd6dd6ZdÑZRS(iiiiiicCsÇx{t|idÉÉD]d\}}t|É}|di|_|di|_|di}|di}|djo;||_|djo!||ijo|i|}q«n|djo d}n||_|i t dÉd ||i dd É|i dd ÉfÉd |}||jo||i |_ nd |}||jot|||iÉn|id ||ÉqWdS(Nt icon_headerRRR tnb_coloriisIcon #%u (%sx%s)it?sicon_data[%u]/header/codecsicon_data[%u]/pixelss image[%u](t enumeratetarrayRR&RRR't color_to_bppRt setHeaderRRR(R"RR-taddGroup(R.ticontindexRR/R R'R?((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR1Ss.         -    (R2R3RGR1(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRALs  t PcxMetadatacBseZedÑÉZRS(cCs·d|di|_d|di|_|di|_|di|_|di|_d|dijo djnod|di|_ntd É|_d |d i |_ d |jot ||d i ÉndS( Nitxmaxtymaxt horiz_dpitvert_dpiR iisRun-length encoding (RLE)sPCX: %stversiont image_data( R&RRR+R,RR'RR"R(R*RR-(R.tpcx((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR1rs% (R2R3RR1(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRLqst XcfMetadatacBsDeZhdd6dd6dd6ZdÑZedÑÉZdÑZRS(iiiiicCsr|di|_|di|_y|i|di|_Wntj onX|di|_|i|ÉdS(NRRttype( R&RRt TYPE_TO_BPPRR<R(R*treadProperties(R.txcf((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR1ÑscCsÁ|di}|tijobx√|dD]O}d|jp d|joq(n|didjo|di|_q(q(Wne|tijo|di|_nA|tijo0t|diÉ|_ t|diÉ|_ ndS(NRUtdataR:s gimp-commentsdata/compressions data/xress data/yres( R&RtPROP_PARASITEStcommenttPROP_COMPRESSIONR(R"tPROP_RESOLUTIONtintR+R,(R.tpropRUR>((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pytprocessPropertyés  cCs+x$|idÉD]}|i|ÉqWdS(Ntproperty(RFR`(R.RXR_((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRWùs(R2R3RVR1RR`RW(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRTÄs t PngMetadatacBsKeZhdd6ZdÑZedÑÉZedÑÉZedÑÉZRS(tproducertsoftwarecCsNd|jo|i|dÉnd|jo|i|dÉnd|jo|i|dÉnxµ|idÉD]§}d|joqvn|di}|di}y'|i|iÉ}t|||ÉWqvtj o:|iÉdjod||f|_ q||_ qvXqvWt dÑ|id ÉDÉÉ}t ||ÉdS( NRttimetphysicalttexttkeywordR[s%s=%scssx|]}|iVqWdS(N(R-(t.0RY((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pys <genexpr>∫s RY( t useHeadertuseTimet usePhysicalRFR&t TEXT_TO_ATTRtlowerR=R<R[tsumR(R.tpngR[RhRgR?R((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR1¶s*      cCs|i|_dS(N(R&t creation_date(R.R>((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRkΩscCs$|di|_|di|_dS(Ntpixel_per_unit_xtpixel_per_unit_y(R&R+R,(R.R>((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRl¡scCs|di|_|di|_d|jo|did}nd}|dip4|diotdÉ|_qŒtdÉ|_nEd |jo(td É|_|o|d 8}qŒntd É|_t|É|_|o ||_n|d i |_ dS(NRRs /palette/sizeit has_palettet has_alphatRGBAtRGBs /transparencysColor index with transparencyis Color indexR"( R&RRtNoneRt pixel_formattpngBitsPerPixelRR'R(R"(R.RR'((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRj∆s$   (R2R3RmR1RRkRlRj(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRb°s   t GifMetadatacBs eZdÑZedÑÉZRS(cCsÊ|i|dÉ|idÉod|idÉ>|_ntdÉ|_d|di|_xD|idÉD]3}x*|i|i dÉD]}|i|_ qãWqnWd |jo!|d iotd É|_ ntd É|_ dS( Ns/screenRitLZWsGIF version %sRQtcommentss/commentsgraphic_ctl/has_transpsColor index with transparencys Color index( t useScreenRRR'RR"R&R*RFR:R[Ry(R.tgifR}R[((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR1„scCs8|di|_|di|_d|di|_dS(NRRiR (R&RRR(R.tscreen((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR~Òs(R2R3R1RR~(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR{‚s t TargaMetadatacBseZdÑZRS(cCsã|di|_|di|_|di|_|dio|di|_n|di|_d|jot||diÉndS(NRRR RCtcodecR%( R&RRRR'R(R"RR-(R.ttga((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR1¯s (R2R3R1(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRŘst WmfMetadatacBseZdÑZRS(cCs4|iÉo^d|joD|d}|di|di|_|di|di|_nd|_n∆|iÉo∏|d}d|joI|di}d |jo"|id d É\|_|_qÿ||_n|d io|d i|_ d |_n d|_|d i|_|di|_ndS(Nsamf_header/recttrighttlefttbottomttopit emf_headert descriptiontiR'itwidth_pxt height_px( tisAPMR&RRRtisEMFtsplitRcttitleR'(R.twmftrecttemftdesc((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR1s&         "   (R2R3R1(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRÑst PsdMetadatacBseZedÑÉZRS(cCs_|di|_|di|_|di|di|_|di|_|di|_dS(NRRtdeptht nb_channelst color_modeR"(R&RRRR(RyR"(R.tpsd((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyR1s (R2R3RR1(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyRñsN(&thachoir_metadata.metadataRRRRthachoir_parser.imageRRRRRR R R R R thachoir_parser.image.pngRRzthachoir_parser.image.xcfRthachoir_core.i18nRthachoir_core.errorRthachoir_metadata.safeRRRR4RARLRTRbR{RÅRÑRñ(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/image.pyt<module>s6"F %!A          
12,653
Python
.py
58
216.775862
1,620
0.40786
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,764
setter.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/setter.pyc
—Ú Œ »Mc @sddklZlZddkZddklZddklZlZddkl Z ddk l Z ddk l Z eidÉZeid ÉZeid ÉZeid ÉZeid ÉZeid ÉZdZdZdZdÑZdÑZdÑZdÑZdÑZdÑZdS(iˇˇˇˇ(tdatetdatetimeN(tLanguage(t setlocaletLC_ALL(tstrptime(tcreateTimezone(tconfigs[-/.: ]+s ^([0-9]{4})$s#^([0-9]{4})~([01][0-9])~([0-9]{2})$sF^([0-9]{4})~([01][0-9])~([0-9]{2})~([0-9]{1,2})~([0-9]{2})~([0-9]{2})$sG^([01]?[0-9])~([0-9]{2})~([0-9]{4})~([0-9]{1,2})~([0-9]{2})~([0-9]{2})$s^(.*)~([+-][0-9]{2})00$s%B~%Ys%a~%b~%d~%H~%M~%S~%Ys%a,~%d~%b~%Y~%H~%M~%ScCs˙tid|iÉÉ}ti|É}|oOy5t|idÉÉ}t|ddÉt|ÉfSWq}t j oq}Xnt i|É}|omySt|idÉÉ}t|idÉÉ}t|idÉÉ}t|||ÉSWqt j oqXnt i|É}|oµyõt|idÉÉ}t|idÉÉ}t|idÉÉ}t|idÉÉ}t|idÉÉ}t|idÉÉ}t ||||||ÉSWqÀt j oqÀXnt i|É}|oµyõt|idÉÉ}t|idÉÉ}t|idÉÉ}t|idÉÉ}t|idÉÉ}t|idÉÉ}t ||||||ÉSWqñt j oqñXnttdÉ}z@ti|É} | o4| idÉ} t| idÉÉ} t| É} n |} d } y:t| tÉ} t| d d!Éd | g} t | åSWnt j onXy:t| tÉ} t| d d!Éd | g} t | åSWnt j onXy0t|tÉ} t| d d!É} t| åSWnt j onXWd tt|ÉXd S( s≠ Year and date: >>> parseDatetime("2000") (datetime.date(2000, 1, 1), u'2000') >>> parseDatetime("2004-01-02") datetime.date(2004, 1, 2) Timestamp: >>> parseDatetime("2004-01-02 18:10:45") datetime.datetime(2004, 1, 2, 18, 10, 45) >>> parseDatetime("2004-01-02 18:10:45") datetime.datetime(2004, 1, 2, 18, 10, 45) Timestamp with timezone: >>> parseDatetime(u'Thu, 19 Jul 2007 09:03:57 +0000') datetime.datetime(2007, 7, 19, 9, 3, 57, tzinfo=<TimezoneUTC delta=0, name=u'UTC'>) >>> parseDatetime(u'Thu, 19 Jul 2007 09:03:57 +0200') datetime.datetime(2007, 7, 19, 9, 3, 57, tzinfo=<Timezone delta=2:00:00, name='+0200'>) t~iiiiiitCiN(tNORMALIZE_REGEXtsubtstript YEAR_REGEX1tmatchtinttgroupRtunicodet ValueErrort DATE_REGEX1tDATETIME_REGEX1RtDATETIME_REGEX2RRtTIMEZONE_REGEXRtNoneRt ISO_TIMESTAMPtlisttRIFF_TIMESTAMPt MONTH_YEAR(tvaluetregstyeartmonthtdaythourtmintsectcurrent_localeRtwithout_timezonetdeltat timestampt arguments((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/setter.pyt parseDatetime!så      cCs@t|ttfÉo t|ÉSt|ttfÉo|SdS(N(t isinstancetstrRR)RRR(tmetatkeyR((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/setter.pyt setDatetimeÄs  cCs t|ÉS(sú >>> setLanguage(None, None, "fre") <Language 'French', code='fre'> >>> setLanguage(None, None, u"ger") <Language 'German', code='ger'> (R(R,R-R((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/setter.pyt setLanguageáscCs:yt|ÉSWn%tj o|id|ÉdSXdS(s4 >>> setTrackTotal(None, None, "10") 10 sInvalid track total: %rN(RRtwarningR(R,R-ttotal((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/setter.pyt setTrackTotalês cCsát|ttfÉo|Sd|jo%|iddÉ\}}||_nyt|ÉSWn%tj o|id|ÉdSXdS(Nt/isInvalid track number: %r(R*Rtlongtsplitt track_totalRR0R(R,R-tnumberR1((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/setter.pytsetTrackNumberõs  cCstio|S|idÉS(Ns (Rt RAW_OUTPUTR (ttext((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/setter.pytnormalizeStringßs (RRtrethachoir_core.languageRtlocaleRRttimeRthachoir_metadata.timezoneRthachoir_metadataRtcompileR R RRRRRRRR)R.R/R2R8R;(((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/setter.pyt<module>s*  _ 
5,253
Python
.py
55
92.545455
711
0.426015
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,765
misc.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/misc.pyc
—Ú Œ »Mc @sKddklZlZddklZddklZddklZl Z l Z l Z ddk l Z ddklZddklZddklZd efd ÑÉYZd efd ÑÉYZd efdÑÉYZdefdÑÉYZdefdÑÉYZeeeÉee eÉee eÉee eÉeeeÉdS(iˇˇˇˇ(t RootMetadatatregisterExtractor(tfault_tolerant(tSwfFile(t TorrentFiletTrueTypeFontFilet OLE2_FiletPcfFile(tisString(twarning(t guessParser(tnormalizeStringtTorrentMetadatacBs^eZhdd6dd6dd6Zhdd6dd 6Zd ÑZed ÑÉZed ÑÉZRS( turluannouncetcommentucommentt creation_dateu creation_datet file_sizeulengthtfilenameunamecCs&x|dD]}|i|Éq WdS(Ni(t processRoot(tselfttorrenttfield((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pytextracts cCsá|i|ijo-|i|i}|i}t|||ÉnD|idjo3d|jo&x#|dD]}|i|ÉqhWndS(Ntinfotvalue(tnamet KEY_TO_ATTRRtsetattrt processInfo(RRtkeyR((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyRs  cCsh|i|ijo-|i|i}|i}t|||Én%|idjod|i|_ndS(Nt piece_lengthsPiece length: %s(Rt INFO_TO_ATTRRRtdisplayR(RRRR((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR#s  (t__name__t __module__RRRRRR(((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR s    t TTF_MetadatacBs_eZhdd6dd6dd6dd6dd 6dd 6Zd ÑZed ÑÉZed ÑÉZRS(t copyrightittitleitversionitauthoriR i icCsHd|jo|i|dÉnd|jo|i|dÉndS(Ntheadertnames(t extractHeadert extractNames(Rtttf((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR6s  cCsL|di|_|di|_d|di|_d|di|_dS(Ntcreatedtmodifiedu+Smallest readable size in pixels: %s pixelstlowestuFont direction: %stfont_dir(RRtlast_modificationRR (RR(((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR*<scCs·|di}xÕ|idÉD]º}|di}||di}|i|dÉ}| pt|É oqn|i}||ijoqn|i|}|djo|idÉo|d}nt|||ÉqWdS(NtoffsetR(tnameIDiR&uVersion (RtarraytgetFieldByAddressRtNAMEID_TO_ATTRt startswithR(RR)R2R(RtfoffsetRR((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR+Cs    (R!R"R6RRR*R+(((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR#,s  t OLE2_MetadatacBsÃeZhdd6dd6dd6dd6dd6d d 6d d 6d d6ZedÉZhdd6dd 6ZedÉZdÑZedÑZ e dÑÉZ dÑZ e dÑÉZ e dÑÉZe dÑÉZRS(R%iR'iRiiRi R1i tnb_pageitproduceriiicCs|i|ÉdS(N(t_extract(Rtole2((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyRlscCs›|o0|iÉd|jo|i|dÉq7n|i||dÉ}|o|i|tÉn|i||dÉ}|o|i|Én|i||dÉ}|o|i|tÉn|i||dÉ}dS(Nsroot[0]sdoc_summary[0]s word_doc[0]s summary[0]s table1[0](t_feedAlltuseRoottgetFieldt useSummarytTruetuseWordDocumenttFalse(Rtfieldsett main_documentt doc_summarytword_doctsummarytrevision((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR<os  cCsE|iÉ}t|É}|ptdÉdS|i|dtÉdS(Ns-Unable to create the OLE2 mini stream parser!RF(t getSubIStreamR R R<RD(Rtroottstreamt ministream((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR?Äs    cCsg||jodS|iÉ||}|o6|iÉ}t|É}|ptd|ÉdSn|S(Ns(Unable to create the OLE2 parser for %s!(tNoneR>RKR R (RRERFRRRM((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR@âs      cCsnd|jo|di|_nd|jodS|d}x*|idÉD]}|i|||ÉqMWdS(Ntoss section[0]tproperty_index(R RPR4t useProperty(RRItis_doc_summarytproperty((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyRAòs   cCsd|di|_dS(Ns Encrypted: %st fEncrypted(RR(Rtdoc((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyRC¢sc Cs|i|didÉ}| p d|jodS|d}|iÉpdS|i}t|ttfÉot|É}|pdSn|di}|o|i}|i}n|i }|i }||jodSy||} t } Wnt j od} t } nX| o;|di} | d jo | odSd| |f}n| d jo | odSt|| |ÉdS( NR2iRtidRtTotalEditingTimet LastPrinteds%s: %sR1(sTotalEditingTimes LastPrinted(R5RthasValuet isinstancetstrtunicodeR tDOC_SUMMARY_ID_TO_ATTRtIGNORE_DOC_SUMMARYtSUMMARY_ID_TO_ATTRtIGNORE_SUMMARYRDt LookupErrorRBR R( RRIRTRSRRtprop_idt id_to_attrtignoreRt use_prefixtprefix((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyRR¶sF                 (i(i(R!R"R`tsetRaR^R_RRBR<RR?R@RARCRR(((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR9Us.        t PcfMetadatacBsJeZhdd6dd6dd6dd6dd 6d d 6Zd ÑZd ÑZRS(tcharsettCHARSET_REGISTRYR$t COPYRIGHTt font_weightt WEIGHT_NAMER'tFOUNDRYR%tFONTR;t _XMBDFED_INFOcCs&d|jo|i|dÉndS(Nt properties(t useProperties(Rtpcf((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR‡s cCs¯|d}|i|i}x◊|idÉD]∆}|i||didÉ}|pq*n|i}|pq*n|i||didÉ}|pq*n|i}||ijotd||fÉq*n|i|}t|||Éq*WdS(Nttotal_str_lengthRTt value_offsetit name_offsets Skip %s=%r(taddresstsizeR4R5Rt PROP_TO_KEYR R(RRrtlasttoffset0tindexRRR((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyRs‰s&    (R!R"RzRRs(((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyRi÷s  t SwfMetadatacBseZdÑZRS(cCs\|di|_|di|_d|di|_|di|_d|di|_dS(Ns rect/ymaxs rect/xmaxsflash version %sR&t frame_ratesFrame count: %st frame_count(Rtheighttwidthtformat_versionRR(Rtswf((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR˙s (R!R"R(((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyR~˘sN(thachoir_metadata.metadataRRthachoir_metadata.safeRthachoir_parser.containerRthachoir_parser.miscRRRRthachoir_core.fieldRthachoir_core.errorR thachoir_parserR thachoir_metadata.setterR R R#R9RiR~(((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/misc.pyt<module>s""")Å#    
9,567
Python
.py
55
172.945455
1,168
0.424262
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,766
metadata_item.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/metadata_item.pyc
—Ú Œ »Mc@söddklZlZddklZddklZddklZdZ dZ dZ dZ d Z d Zd Zd dd ÑÉYZdddÑÉYZdS(iˇˇˇˇ(t makeUnicodetnormalizeNewline(tHACHOIR_ERRORS(tconfig(tnormalizeStringidiÁgg–?g‡?gË?g?t DataValuecBseZdÑZRS(cCs||_||_dS(N(tvaluettext(tselfRR((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pyt__init__s (t__name__t __module__R (((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pyRstDatacBsVeZdddddÑZddÑZdÑZdÑZdÑZdÑZdÑZ RS(cCsºt|jo tjnptÇt|tÉptÇd|_||_||_g|_ |o$t|t t fÉ o |f}n||_ ||_ ||_||_||_dS(sÇ handler is only used if value is not string nor unicode, prototype: def handler(value) -> str/unicode N(t MIN_PRIORITYt MAX_PRIORITYtAssertionErrort isinstancetunicodetNonetmetadatatkeyt descriptiontvaluesttupletlistttypet text_handlertfiltertpriorityt conversion(RRRRRRRR((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pyR s%         cCsx|djo^t|tÉo |}qk|io*|i|É}t|tÉptÇqkt|É}nt||ÉS(N(RRRRRRR(RRR((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pyt _createItem)s   c Csnt|tÉo3t|ÉdjotdÉÇn|\}}nd}|djodSt|ttfÉot|É}|pdSn|ioÓy|i|i |i |É}Wn3t j o'}|i i d|i |fÉdSX|djoMdi dÑ|iDÉÉ}|i i d|i |t|Éi|fÉdSt|tÉo%|o|d}qÅ|\}}q®|}n$t|tÉot|dÉ}n|ioat||iÉ oMdi d Ñ|iDÉÉ}|i i d |i |t|Éi|fÉdSt|tÉoEt|É}tio+tit|Éjo|ti d }qhn||jodS|io3|i|É o"|i i d |i |fÉdSt|tÉoáxÑt|iÉD]o\}}|i}t|tÉpq◊n|i|Éo|i||É|i|<dS|i|ÉodSq◊Wn|ii|i||ÉÉdS( Nis8Data.add() only accept tuple of 2 elements: (value,text)s'Error during conversion of %r value: %ss or css"x|]}t|iÉVqWdS(N(tstrR (t.0titem((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pys <genexpr>Ns s"Unable to convert %s=%r (%s) to %sis ISO-8859-1css"x|]}t|iÉVqWdS(N(RR (R R!((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pys <genexpr>]s s$Key %r: value %r type (%s) is not %ss(...)sSkip value %s=%r (filter)(RRtlent ValueErrorRRRRRRRRtwarningtjoinRR RRtMAX_STR_LENGTHRt enumerateRRt startswithRtappend(RRRt new_valueterrt dest_typestindexR!((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pytadd4sr                 cCs t|iÉS(N(R"R(R((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pyt__len__ÑscCs |i|S(N(R(RR-((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pyt __getitem__áscCs-x&|iD]}||ijotSq WtS(N(RRtTruetFalse(RRR!((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pyt __contains__äs   cCst|i|iÉS(N(tcmpR(Rtother((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pyt__cmp__êsN( R R RR RR.R/R0R3R6(((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pyR s P   N(((thachoir_core.toolsRRthachoir_core.errorRthachoir_metadataRthachoir_metadata.setterRR RtQUALITY_FASTESTt QUALITY_FASTtQUALITY_NORMALt QUALITY_GOODt QUALITY_BESTRR (((sH/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata_item.pyt<module>s
5,407
Python
.py
38
140.578947
552
0.41825
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,767
archive.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/archive.pyc
Ñò Î ÈMc @s„ddklZlZddklZlZddklZlZl Z l Z ddk l Z l Z lZlZlZlZddklZddklZd„Zd„Zd efd „ƒYZd efd „ƒYZd e fd„ƒYZde fd„ƒYZde fd„ƒYZde fd„ƒYZe e eƒe eeƒe e eƒe eeƒe eeƒe eeƒdS(iÿÿÿÿ(t QUALITY_BESTtQUALITY_FASTEST(tfault_toleranttgetValue(t RootMetadatatMetadatatMultipleMetadatatregisterExtractor(t Bzip2ParsertCabFilet GzipParsertTarFiletZipFiletMarFile(thumanUnixAttributes(t_cCs?|itjodSt|ijodSdtd|iƒS(Niii (tqualityRRtNonetint(tmeta((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyt maxNbFile s cCse|idƒ p|iddƒ odS|idƒ}|pdSt|ƒ|idƒ|_dS(s= Compute compression rate, sizes have to be in byte. t file_sizet compr_sizeiN(thastgettfloatt compr_rate(RR((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pytcomputeCompressionRatest Bzip2MetadatacBseZd„ZRS(cCs)d|jo|did|_ndS(Ntfilei(tsizeR(tselftzip((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pytextracts (t__name__t __module__R!(((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyRst GzipMetadatacBs eZd„Zed„ƒZRS(cCs|i|ƒt|ƒdS(N(t useHeaderR(Rtgzip((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyR!#s cCs¯|di|_|do|di|_n|di|_|diot|dƒ|_n|diot|dƒ|_n|did |_ |d i|_ dS( Nt compressiontmtimetost has_filenametfilenamet has_commenttcommentRiR( tdisplayR'tvaluetlast_modificationR)RR+R-RRR(RR&((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyR%'s (R"R#R!RR%(((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyR$"s t ZipMetadatacBs eZd„Zed„ƒZRS(cCsst|ƒ}x`t|idƒƒD]I\}}|dj o#||jo|id|ƒPn|i|ƒq"WdS(NRsFZIP archive contains many files, but only first %s files are processed(Rt enumeratetarrayRtwarningt processFile(RR tmax_nbtindextfield((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyR!5s  cCsât|ƒ}|di|_|di|_|di|_d|jo6|di|_|dio|di|_q±n3|di|_|dio|di|_nt|ƒ|i |i |d |i dƒƒdS( NR+tlast_modR't data_descs data_desc/file_uncompressed_sizesdata_desc/file_compressed_sizetuncompressed_sizetcompressed_sizes File "%s"( RR/R+t creation_dateR.R'RRRtaddGrouptnameR(RR8R((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyR5=s   (R"R#R!RR5(((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyR14s t TarMetadatacBs eZd„Zed„ƒZRS(cCsÑt|ƒ}x¾t|idƒƒD]§\}}|dj o#||jo|id|ƒPnt|ƒ}|i||ƒ|idƒotdƒ|i dƒ}n tdƒ}|i |i ||ƒq"WdS(NRsFTAR archive contains many files, but only first %s files are processedR+s File "%s"tFile( RR2R3RR4Rt extractFileRRtgetTextR>R?(RttarR6R7R8Rttitle((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyR!Os    c CsÄ|di|_t|idƒƒ|_|idƒ|_y'|idƒo|iƒ|_nWntj onX|di |_ d|di|idƒ|d i|id ƒf|_ dS( NR?tmodeRR(ttypes%s (uid=%s), group %s (gid=%s)tunametuidtgnametgid( R/R+RtgetOctalt file_attrRt getDatetimeR0t ValueErrorR.t file_typetauthor(RR8R((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyRB]s(R"R#R!RRB(((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyR@Ns t CabMetadatacBs/eZd„Zed„ƒZed„ƒZRS(cCsÊd|jo|i|dƒnd|di|_d|di|dif|_t|ƒ}x`t|idƒƒD]I\}}|dj o#||jo|i d|ƒPn|i |ƒqyWdS( Ns folder[0]sMicrosoft Cabinet version %st cab_versions%s folders, %s filest nb_foldertnb_filesRsFCAB archive contains many files, but only first %s files are processed( t useFolderR.tformat_versionR/R-RR2R3RR4tuseFile(RtcabR6R7R8((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyR!ns   cCsG|di}|didjo|d|di7}n||_dS(Nt compr_methodis (level %u)t compr_level(R.R/R'(Rtfoldertcompr((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyRV{s cCs¶t|ƒ}|di|_|di|_|di|_|di}|djo ||_n|idƒotdƒ|idƒ}n tdƒ}|i |i ||ƒdS(NR+tfilesizet timestampt attributess(none)s File "%s"RA( RR/R+RR=RMRRRCR>R?(RR8RtattrRE((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyRX‚s     (R"R#R!RRVRX(((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyRRms t MarMetadatacBseZd„ZRS(cCsæd|di|_d|di|_t|ƒ}x«t|idƒƒD]”\}}|dj o#||jo|id|ƒPnt|ƒ}|di|_ d|_ |d i|_ |i |i |d |idƒƒqJWdS( NsContains %s filestnb_filesMicrosoft Archive version %stversionRsFMAR archive contains many files, but only first %s files are processedR+RR^s File "%s"(R/R-RWRR2R3RR4RR+R'RR>R?RC(RtmarR6R7R8R((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyR!’s    (R"R#R!(((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyRb‘sN(thachoir_metadata.metadata_itemRRthachoir_metadata.safeRRthachoir_metadata.metadataRRRRthachoir_parser.archiveRR R R R R thachoir_core.toolsRthachoir_core.i18nRRRRR$R1R@RRRb(((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/archive.pyt<module>s&".  $     
8,473
Python
.py
36
234.138889
1,209
0.429367
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,768
filter.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/filter.py
from hachoir_metadata.timezone import UTC from datetime import date, datetime, timedelta # Year in 1850..2030 MIN_YEAR = 1850 MAX_YEAR = 2030 class Filter: def __init__(self, valid_types, min=None, max=None): self.types = valid_types self.min = min self.max = max def __call__(self, value): if not isinstance(value, self.types): return True if self.min is not None and value < self.min: return False if self.max is not None and self.max < value: return False return True class NumberFilter(Filter): def __init__(self, min=None, max=None): Filter.__init__(self, (int, long, float), min, max) class DatetimeFilter(Filter): def __init__(self, min=None, max=None): Filter.__init__(self, (date, datetime), datetime(MIN_YEAR, 1, 1), datetime(MAX_YEAR, 12, 31)) self.min_date = date(MIN_YEAR, 1, 1) self.max_date = date(MAX_YEAR, 12, 31) self.min_tz = datetime(MIN_YEAR, 1, 1, tzinfo=UTC) self.max_tz = datetime(MAX_YEAR, 12, 31, tzinfo=UTC) def __call__(self, value): """ Use different min/max values depending on value type (datetime with timezone, datetime or date). """ if not isinstance(value, self.types): return True if hasattr(value, "tzinfo") and value.tzinfo: return (self.min_tz <= value <= self.max_tz) elif isinstance(value, datetime): return (self.min <= value <= self.max) else: return (self.min_date <= value <= self.max_date) DATETIME_FILTER = DatetimeFilter()
1,675
Python
.py
44
30.159091
60
0.601356
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,769
__init__.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/__init__.pyc
Ñò Î ÈMc @s„ddklZddklZddkZddkZddkZddk Zddk Zddk Zddk Zddk ZdS(iÿÿÿÿ(tVERSION(textractMetadataN(thachoir_metadata.versionRt __version__thachoir_metadata.metadataRthachoir_metadata.archivethachoir_metadatathachoir_metadata.audiothachoir_metadata.imagethachoir_metadata.jpegthachoir_metadata.miscthachoir_metadata.programthachoir_metadata.riffthachoir_metadata.video(((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/__init__.pyt<module>s       
670
Python
.py
3
222.333333
552
0.580838
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,770
safe.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/safe.py
from hachoir_core.error import HACHOIR_ERRORS, warning, error def fault_tolerant(func, *args): def safe_func(*args, **kw): try: func(*args, **kw) except HACHOIR_ERRORS, err: warning("Error when calling function %s(): %s" % ( func.__name__, err)) return safe_func def getFieldAttribute(fieldset, key, attrname): try: field = fieldset[key] if field.hasValue(): return getattr(field, attrname) except HACHOIR_ERRORS, err: warning("Unable to get %s of field %s/%s: %s" % ( attrname, fieldset.path, key, err)) return None def getValue(fieldset, key): return getFieldAttribute(fieldset, key, "value") def getDisplay(fieldset, key): return getFieldAttribute(fieldset, key, "display")
814
Python
.py
22
29.772727
62
0.631512
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,771
archive.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/archive.py
from hachoir_metadata.metadata_item import QUALITY_BEST, QUALITY_FASTEST from hachoir_metadata.safe import fault_tolerant, getValue from hachoir_metadata.metadata import ( RootMetadata, Metadata, MultipleMetadata, registerExtractor) from hachoir_parser.archive import (Bzip2Parser, CabFile, GzipParser, TarFile, ZipFile, MarFile) from hachoir_core.tools import humanUnixAttributes from hachoir_core.i18n import _ def maxNbFile(meta): if meta.quality <= QUALITY_FASTEST: return 0 if QUALITY_BEST <= meta.quality: return None return 1 + int(10 * meta.quality) def computeCompressionRate(meta): """ Compute compression rate, sizes have to be in byte. """ if not meta.has("file_size") \ or not meta.get("compr_size", 0): return file_size = meta.get("file_size") if not file_size: return meta.compr_rate = float(file_size) / meta.get("compr_size") class Bzip2Metadata(RootMetadata): def extract(self, zip): if "file" in zip: self.compr_size = zip["file"].size/8 class GzipMetadata(RootMetadata): def extract(self, gzip): self.useHeader(gzip) computeCompressionRate(self) @fault_tolerant def useHeader(self, gzip): self.compression = gzip["compression"].display if gzip["mtime"]: self.last_modification = gzip["mtime"].value self.os = gzip["os"].display if gzip["has_filename"].value: self.filename = getValue(gzip, "filename") if gzip["has_comment"].value: self.comment = getValue(gzip, "comment") self.compr_size = gzip["file"].size/8 self.file_size = gzip["size"].value class ZipMetadata(MultipleMetadata): def extract(self, zip): max_nb = maxNbFile(self) for index, field in enumerate(zip.array("file")): if max_nb is not None and max_nb <= index: self.warning("ZIP archive contains many files, but only first %s files are processed" % max_nb) break self.processFile(field) @fault_tolerant def processFile(self, field): meta = Metadata(self) meta.filename = field["filename"].value meta.creation_date = field["last_mod"].value meta.compression = field["compression"].display if "data_desc" in field: meta.file_size = field["data_desc/file_uncompressed_size"].value if field["data_desc/file_compressed_size"].value: meta.compr_size = field["data_desc/file_compressed_size"].value else: meta.file_size = field["uncompressed_size"].value if field["compressed_size"].value: meta.compr_size = field["compressed_size"].value computeCompressionRate(meta) self.addGroup(field.name, meta, "File \"%s\"" % meta.get('filename')) class TarMetadata(MultipleMetadata): def extract(self, tar): max_nb = maxNbFile(self) for index, field in enumerate(tar.array("file")): if max_nb is not None and max_nb <= index: self.warning("TAR archive contains many files, but only first %s files are processed" % max_nb) break meta = Metadata(self) self.extractFile(field, meta) if meta.has("filename"): title = _('File "%s"') % meta.getText('filename') else: title = _("File") self.addGroup(field.name, meta, title) @fault_tolerant def extractFile(self, field, meta): meta.filename = field["name"].value meta.file_attr = humanUnixAttributes(field.getOctal("mode")) meta.file_size = field.getOctal("size") try: if field.getOctal("mtime"): meta.last_modification = field.getDatetime() except ValueError: pass meta.file_type = field["type"].display meta.author = "%s (uid=%s), group %s (gid=%s)" %\ (field["uname"].value, field.getOctal("uid"), field["gname"].value, field.getOctal("gid")) class CabMetadata(MultipleMetadata): def extract(self, cab): if "folder[0]" in cab: self.useFolder(cab["folder[0]"]) self.format_version = "Microsoft Cabinet version %s" % cab["cab_version"].display self.comment = "%s folders, %s files" % ( cab["nb_folder"].value, cab["nb_files"].value) max_nb = maxNbFile(self) for index, field in enumerate(cab.array("file")): if max_nb is not None and max_nb <= index: self.warning("CAB archive contains many files, but only first %s files are processed" % max_nb) break self.useFile(field) @fault_tolerant def useFolder(self, folder): compr = folder["compr_method"].display if folder["compr_method"].value != 0: compr += " (level %u)" % folder["compr_level"].value self.compression = compr @fault_tolerant def useFile(self, field): meta = Metadata(self) meta.filename = field["filename"].value meta.file_size = field["filesize"].value meta.creation_date = field["timestamp"].value attr = field["attributes"].value if attr != "(none)": meta.file_attr = attr if meta.has("filename"): title = _("File \"%s\"") % meta.getText('filename') else: title = _("File") self.addGroup(field.name, meta, title) class MarMetadata(MultipleMetadata): def extract(self, mar): self.comment = "Contains %s files" % mar["nb_file"].value self.format_version = "Microsoft Archive version %s" % mar["version"].value max_nb = maxNbFile(self) for index, field in enumerate(mar.array("file")): if max_nb is not None and max_nb <= index: self.warning("MAR archive contains many files, but only first %s files are processed" % max_nb) break meta = Metadata(self) meta.filename = field["filename"].value meta.compression = "None" meta.file_size = field["filesize"].value self.addGroup(field.name, meta, "File \"%s\"" % meta.getText('filename')) registerExtractor(CabFile, CabMetadata) registerExtractor(GzipParser, GzipMetadata) registerExtractor(Bzip2Parser, Bzip2Metadata) registerExtractor(TarFile, TarMetadata) registerExtractor(ZipFile, ZipMetadata) registerExtractor(MarFile, MarMetadata)
6,542
Python
.py
150
34.74
111
0.624216
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,772
metadata_item.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/metadata_item.py
from hachoir_core.tools import makeUnicode, normalizeNewline from hachoir_core.error import HACHOIR_ERRORS from hachoir_metadata import config from hachoir_metadata.setter import normalizeString MIN_PRIORITY = 100 MAX_PRIORITY = 999 QUALITY_FASTEST = 0.0 QUALITY_FAST = 0.25 QUALITY_NORMAL = 0.5 QUALITY_GOOD = 0.75 QUALITY_BEST = 1.0 class DataValue: def __init__(self, value, text): self.value = value self.text = text class Data: def __init__(self, key, priority, description, text_handler=None, type=None, filter=None, conversion=None): """ handler is only used if value is not string nor unicode, prototype: def handler(value) -> str/unicode """ assert MIN_PRIORITY <= priority <= MAX_PRIORITY assert isinstance(description, unicode) self.metadata = None self.key = key self.description = description self.values = [] if type and not isinstance(type, (tuple, list)): type = (type,) self.type = type self.text_handler = text_handler self.filter = filter self.priority = priority self.conversion = conversion def _createItem(self, value, text=None): if text is None: if isinstance(value, unicode): text = value elif self.text_handler: text = self.text_handler(value) assert isinstance(text, unicode) else: text = makeUnicode(value) return DataValue(value, text) def add(self, value): if isinstance(value, tuple): if len(value) != 2: raise ValueError("Data.add() only accept tuple of 2 elements: (value,text)") value, text = value else: text = None # Skip value 'None' if value is None: return if isinstance(value, (str, unicode)): value = normalizeString(value) if not value: return # Convert string to Unicode string using charset ISO-8859-1 if self.conversion: try: new_value = self.conversion(self.metadata, self.key, value) except HACHOIR_ERRORS, err: self.metadata.warning("Error during conversion of %r value: %s" % ( self.key, err)) return if new_value is None: dest_types = " or ".join(str(item.__name__) for item in self.type) self.metadata.warning("Unable to convert %s=%r (%s) to %s" % ( self.key, value, type(value).__name__, dest_types)) return if isinstance(new_value, tuple): if text: value = new_value[0] else: value, text = new_value else: value = new_value elif isinstance(value, str): value = unicode(value, "ISO-8859-1") if self.type and not isinstance(value, self.type): dest_types = " or ".join(str(item.__name__) for item in self.type) self.metadata.warning("Key %r: value %r type (%s) is not %s" % ( self.key, value, type(value).__name__, dest_types)) return # Skip empty strings if isinstance(value, unicode): value = normalizeNewline(value) if config.MAX_STR_LENGTH \ and config.MAX_STR_LENGTH < len(value): value = value[:config.MAX_STR_LENGTH] + "(...)" # Skip duplicates if value in self: return # Use filter if self.filter and not self.filter(value): self.metadata.warning("Skip value %s=%r (filter)" % (self.key, value)) return # For string, if you have "verlongtext" and "verylo", # keep the longer value if isinstance(value, unicode): for index, item in enumerate(self.values): item = item.value if not isinstance(item, unicode): continue if value.startswith(item): # Find longer value, replace the old one self.values[index] = self._createItem(value, text) return if item.startswith(value): # Find truncated value, skip it return # Add new value self.values.append(self._createItem(value, text)) def __len__(self): return len(self.values) def __getitem__(self, index): return self.values[index] def __contains__(self, value): for item in self.values: if value == item.value: return True return False def __cmp__(self, other): return cmp(self.priority, other.priority)
4,900
Python
.py
126
27.611111
92
0.557005
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,773
register.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/register.pyc
—Ú Œ »Mc@swddklZddklZlZlZlZlZlZl Z ddk l Z ddk l Z lZlZddklZlZlZddklZlZlZlZlZlZddklZlZlZlZddkl Z d Z!d Z"d Z#d Z$dZ%e$Z&dZ'e'Z(dZ)dZ*dZ+d Z,dZ-dZ.dZ/e eeddÉeddÉÉZ0dÑZ1dS( iˇˇˇˇ(t_(t humanDurationt makePrintablet humanBitRatethumanFrequencyt humanBitSizet humanFilesizet humanDatetime(tLanguage(tFiltert NumberFiltertDATETIME_FILTER(tdatetdatetimet timedelta(thumanAudioChannelthumanFrameRatethumanComprRatet humanAltitudethumanPixelSizethumanDPI(t setDatetimetsetTrackNumbert setTrackTotalt setLanguage(tDataiËiÓii NiÙii'iiiiñg@è@g¸©Ò“MbP?iÁt millisecondsitdaysimc Cs5 |itddtdÉdtÉÉ|itddtdÉdtÉÉ|itdd td ÉdtÉÉ|itd d td ÉdtÉÉ|itddtdÉdtÉÉ|itddtdÉdtdtdtÉÉ|itddtdÉdtdtÉÉÉ|itddtdÉdtÉÉ|itddtdÉd t dt ÉÉ|itd!d"td#Éd t dtdt Édt tfÉÉ|itd$d%td&Éd tdtdt Édt tfÉÉ|itd'd(td)ÉdtÉÉ|itd*d+td,ÉÉÉ|itd-d.td/ÉdtdtÉdt tfdtÉÉ|itd0d1td2ÉdtdtÉdt tfdtÉÉ|itd3d4td5ÉdtdtdtÉdt tfÉÉ|itd6d7td8ÉdtdtttÉdt ttfÉÉ|itd9d:td;Édtdtdd<Édt tfÉÉ|itd=d>td?ÉÉÉ|itd@dAtdBÉdtdtÉdt tfÉÉ|itdCdDtdEÉdtdtÉdt tfÉÉ|itdFdGtdHÉdtÉÉ|itdIdJtdKÉdtdt tfÉÉ|itdLdMtdNÉÉÉ|itdOdPtdQÉdtdt tfÉÉ|itdRdStdTÉdtdtttÉdt ttfÉÉ|itdUdVtdWÉdtdt Édt tfdt!ÉÉ|itdXdYtdZÉdtdt"Édt tfdt!ÉÉ|itd[d\td]ÉÉÉ|itd^d_td`ÉÉÉ|itdadbtdcÉdtÉÉ|itdddetdfÉdt#dt$dt%t&fd t'ÉÉ|itdgdhtdiÉdt#dt$dt%t&fd t'ÉÉ|itdjdktdlÉdtÉÉ|itdmdntdoÉdtÉÉ|itdpdntdqÉdtdt(ÉÉ|itdrdstdtÉdtÉÉ|itdudvtdwÉdtÉÉ|itdxdytdzÉdtÉÉ|itd{d|td}ÉdtÉÉ|itd~dtdÄÉÉÉ|itdÅdÇtdÉÉÉÉ|itdÑdÖtdÜÉÉÉ|itdádàtdâÉÉÉ|itdädstdãÉÉÉ|itdådvtdçÉdtÉÉ|itdédytdèÉdtÉÉ|itdêdëtdíÉÉÉ|itdìdîtdïÉdtÉÉ|itdñdótdòÉdtÉÉ|itdôdötdõÉdt)dtdt*Édt ttfÉÉ|itdúdùtdûÉdt+dtdt,Édt ttfÉÉ|itdüdùtd†Édt ttfÉÉ|itd°d¢td£ÉdtÉÉ|itd§d•td¶ÉdtÉÉ|itdßd®td©ÉdtÉÉ|itd™d´td¨ÉdtÉÉ|itd≠dÆtdØÉdtÉÉ|itd∞d±td≤ÉdtÉÉdS(≥NttitleidtTitlettypetartistietArtisttauthoriftAuthortmusic_composerigsMusic composertalbumi»tAlbumtdurationi…tDurationt text_handlertfiltertnb_pagei sNb pageit music_genreiÀs Music genretlanguageiÃRt conversiont track_numberiÕs Track numbert track_totaliŒs Track totalt organizationi“t Organizationtversioni‹tVersiontwidthi-s Image widththeighti.s Image heightt nb_channeli/tChannelt sample_ratei0s Sample ratetbits_per_samplei1s Bits/samplei@timage_orientationi2sImage orientationt nb_colorsi3sNumber of colorstbits_per_pixeli4s Bits/pixeltfilenamei5s File namet file_sizei6s File sizet pixel_formati7s Pixel formatt compr_sizei8sCompressed file sizet compr_ratei9sCompression ratet width_dpii@sImage DPI widtht height_dpiiAsImage DPI heightt file_attriêsFile attributest file_typeiës File typetsubtitle_authoriísSubtitle authort creation_dateiÙs Creation datetlast_modificationiısLast modificationtlatitudei˛tLatitudet longitudeiˇt LongitudetaltitudetAltitudetlocationitLocationtcityitCitytcountryitCountrytcharsetitCharsett font_weighti&s Font weighttcamera_apertureisCamera aperturet camera_focali s Camera focaltcamera_exposurei sCamera exposuretcamera_brightnesssCamera brightnesst camera_models Camera modeltcamera_manufacturersCamera manufacturert compressioniXt Compressiont copyrightiYt CopyrightturliZtURLt frame_ratei[s Frame ratetbit_ratei\sBit ratet aspect_ratios Aspect ratiotosiÑtOStproduceriÖtProducertcommentiÜtCommenttformat_versioni∂sFormat versiont mime_typei∑s MIME typetendiani∏tEndian(-tregisterRRtunicodeRRtDURATION_FILTERR t MAX_NB_PAGERRRt MAX_TRACKtinttlongRt MAX_WIDTHRt MAX_HEIGHTRtMAX_NB_CHANNELRtMIN_SAMPLE_RATEtMAX_SAMPLE_RATEtfloatRt MAX_NB_COLORtMAX_BITS_PER_PIXELRRtMIN_COMPR_RATEtMAX_COMPR_RATEt MAX_DPI_WIDTHRtMAX_DPI_HEIGHTRR R R RRRtMAX_FRAME_RATERt MAX_BIT_RATE(tmeta((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/register.pytregisterAllItems$sÑ%%%%%.%+!!%@@@C@::%11C@@%!!%%+%%%%%%%%!"!".%%%%%Ni–i@i(2thachoir_core.i18nRthachoir_core.toolsRRRRRRRthachoir_core.languageRthachoir_metadata.filterR R R R R Rthachoir_metadata.formatterRRRRRRthachoir_metadata.setterRRRRthachoir_metadata.metadata_itemRR{R|RzRxRÖRyRÇRÉR~RRÑRtRÅRÄRuRsRá(((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/register.pyt<module>s44." 
7,059
Python
.py
19
370.526316
2,577
0.479477
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,774
jpeg.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/jpeg.pyc
—Ú Œ »Mc @sˆddklZlZddklZddklZddklZl Z l Z l Z l Z l Z ddklZddklZddklZddklZlZdd klZdd kZd ÑZd efd ÑÉYZeeeÉd S(iˇˇˇˇ(t RootMetadatatregisterExtractor(tcomputeComprRate(t ExifEntry(tJpegFilet JpegChunktQUALITY_HASH_COLORtQUALITY_SUM_COLORtQUALITY_HASH_GRAYtQUALITY_SUM_GRAY(t MissingField(t_(t makeUnicode(tfault_toleranttgetValue(tdatetimeNcCs |t|Ét|ÉddS(NgN@(tfloat(tdegreetminutetsecond((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/jpeg.pyt deg2floatst JpegMetadatacBsãeZh dei6dei6dei6dei6dei6dei6dei 6dei 6dei 6d ei 6d ei 6d ei6d ei6Zhd d6dd6dd6dd6dd6d d6ZhedÉd6edÉd6edÉd6edÉd6edÉd 6ed!Éd"6ed#Éd$6ed%Éd&6Zd'ÑZed(ÑÉZed)ÑÉZed*ÑÉZed+ÑÉZed,ÑÉZd-ÑZRS(.tcamera_manufacturert camera_modeltimage_orientationtcamera_exposuret camera_focaltcamera_brightnesstcamera_aperturettitletproducert creation_datetwidththeighttcommenttauthoriPtcityiZtcountryiet copyrightitixiÁsHorizontal (normal)isMirrored horizontalis Rotated 180isMirrored verticalis6Mirrored horizontal then rotated 90 counter-clock-wiseisRotated 90 clock-wiseis.Mirrored horizontal then rotated 90 clock-wiseisRotated 90 counter clock-wiseicCs‚d|jo|i|dÉn&d|jod|di|_nd|jo|i|dÉnd|joUxR|idÉD]=}x'|idÉD]}|i||ÉqúW|i|ÉqÜWnd|joQ|d}d |jo|d i|_nd |jo|i|d Éq)nx8|id ÉD]'}d |jo|d i|_ q9q9W|i |Éd |jot ||d i Én|i dÉ od|jo d|_n|i dÉo d|_ndS(Nsstart_frame/contents start_scan/content/nr_componentsis app0/contents exif/contentsexif/content/ifdtentrysphotoshop/contentsversion/content/reader_names iptc/contentR"scontent/commenttdataRt photoshopuAdobe Photoshopt compressiontJPEG(t startOfFrametvaluetbits_per_pixelt extractAPP0tarraytprocessIfdEntrytreadGPSRt parseIPTCR"tcomputeQualityRtsizethasR*(tselftjpegtifdR'tpsdtfield((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/jpeg.pytextract:s<            cCs®|di}dti||_|di|_|di|_|di}d||_|djotdÉ|_n*|d jotd É|_d |_ ndS( Ns../types JPEG (%s)R R!t nr_componentsiitYCbCrit Grayscalei( R-RtSTART_OF_FRAMER*R R!R.R t pixel_formatt nb_colors(R7tsoftkeyt nb_components((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/jpeg.pyR,Xs     c CsÃg}x>|idÉD]-}x$|idÉD]}|i|Éq,WqW|pdSd}xE|D]=}|idÉ}x%tdÉD]}|||i7}qÇWq`Wyw|ddi|ddi}dt|Éjo4||d d i|d d i7}t} t} n t} t} Wnt t fj odSXxêtd ÉD]Ç}|| |jp|| |joZd |d } || |jp|| |jo| dt dÉ7} nd| |_ dSqBWdS(Nt quantizations content/qtitcoeffi@scoeff[2]s coeff[53]iiscoeff[0]s coeff[63]ids%s%%t s (approximate)sJPEG quality: %s( R0tappendtxrangeR-tlenRRRR R t IndexErrorR R"( R7R8tqtlisttdqttqttsumcoeffRGtindexthashvalt hashtabletsumtabletquality((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/jpeg.pyR4isB  $   "" cCsVd|di|dif|_d|jo$|di|_|di|_ndS(Nu JFIF %u.%02utver_majtver_mint y_densityt x_density(R-tformat_versiont width_dpit height_dpi(R7tapp0((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/jpeg.pyR/ïs  cCsB|di}||ijodS|i|}|d jo|i|ÉodSt}d|jo|di}n|d|ii}|tijo|ii||É}n}|ti jo8|pdSt |t Éo|dd|f}q.n5|diti ti fjo|d |f}nt|||ÉdS( NttagR R!R-svalue_%su1/%gittypeu%.3g(swidthsheight(R-tEXIF_KEYR6tFalsetnameRtTAG_ORIENTATIONtorientation_nametgett TAG_EXPOSUREt isinstanceRt TYPE_RATIONALtTYPE_SIGNED_RATIONALtsetattr(R7R9R'R^RDtrationalR-((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/jpeg.pyR1ùs(    cCsJd}d}d}d}d}d}d}d} x6|idÉD]%} | di} | tijo(| didjo d}qed}q@| tijo(| didjo d}qed}q@| tijo(| didjo d}qed}q@| tijo?g} tdÉD]!} | |d | i | fiq~ }q@| ti jo?g}tdÉD]!} ||d | i | fiqk~}q@| ti jo|d | i i}q@| ti jo|d | i i} q@| ti jolg}tdÉD]!} ||d | i | fiq ~}tt|É}tt|É}d i|É}q@q@W|o8|o1t|å}|d jo | }n||_n|o8|o1t|å}|d jo | }n||_n|o+|}|d jo | }n||_n| o&|o| d |7} n| |_ndS(NiR'R^R-tNiˇˇˇˇtEis value_%s[%u]svalue_%st:iRH(tNoneR0R-RtTAG_GPS_LATITUDE_REFtTAG_GPS_LONGITUDE_REFtTAG_GPS_ALTITUDE_REFtTAG_GPS_LATITUDERJRbtTAG_GPS_LONGITUDEtTAG_GPS_ALTITUDEtTAG_GPS_DATESTAMPtTAG_GPS_TIMESTAMPtmaptinttstrtjoinRtlatitudet longitudetaltitudeR(R7R9t latitude_reft longitude_refR|R}t altitude_refR~t timestampt datestampR'R^t_[1]RQt_[2]t_[3]titemsR-((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/jpeg.pyR2æsn       ??;           c CsÎd}}x|D]}d|jp d|joqn|di}t|ttfÉo(|iddÉ}|iddÉ}n|di}|djo |}qn|djo |}qn||ijo<|djo(|id |dit |ÉfÉqqnt ||i||ÉqW|oµ|oÆyît |dd !É}t |d d !É}t |d d !É} t |dd !É} t |d d !É} t |d d !É} t ||| | | | É|_ WqÁtj oqÁXndS(NR^tcontents RHs i7i<isSkip IPTC key %s: %siiii(RoR-RgRztunicodetreplacetIPTC_KEYtwarningtdisplayR RjRyRRt ValueError( R7tiptctdatestrthourstrR;R-R^tyeartmonthtdaythourtmintsec((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/jpeg.pyR3˚sB       "(t__name__t __module__RtTAG_CAMERA_MANUFACTURERtTAG_CAMERA_MODELRcRft TAG_FOCALtTAG_BRIGHTNESSt TAG_APERTUREt TAG_IMG_TITLEt TAG_SOFTWAREtTAG_FILE_TIMESTAMPt TAG_WIDTHt TAG_HEIGHTtTAG_USER_COMMENTR`RãR RdR<R R,R4R/R1R2R3(((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/jpeg.pyRsJ                      ,!=(thachoir_metadata.metadataRRthachoir_metadata.imageRthachoir_parser.image.exifRthachoir_parser.image.jpegRRRRRR thachoir_core.fieldR thachoir_core.i18nR thachoir_core.toolsR thachoir_metadata.safeR RRtoperatorRR(((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/jpeg.pyt<module>s.  ˇ
9,392
Python
.py
81
114.950617
1,159
0.419674
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,775
formatter.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/formatter.py
from hachoir_core.i18n import _, ngettext NB_CHANNEL_NAME = {1: _("mono"), 2: _("stereo")} def humanAudioChannel(value): return NB_CHANNEL_NAME.get(value, unicode(value)) def humanFrameRate(value): if isinstance(value, (int, long, float)): return _("%.1f fps") % value else: return value def humanComprRate(rate): return u"%.1fx" % rate def humanAltitude(value): return ngettext("%.1f meter", "%.1f meters", value) % value def humanPixelSize(value): return ngettext("%s pixel", "%s pixels", value) % value def humanDPI(value): return u"%s DPI" % value
604
Python
.py
17
31.470588
63
0.673575
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,776
timezone.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/timezone.pyc
Ñò Î ÈMc@sXddklZlZdefd„ƒYZdefd„ƒYZeƒZd„ZdS(iÿÿÿÿ(ttzinfot timedeltat TimezoneUTCcBs>eZdZedƒZd„Zd„Zd„Zd„ZRS(s UTC timezoneicCstiS(N(RtZERO(tselftdt((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pyt utcoffsetscCsdS(NuUTC((RR((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pyttzname scCstiS(N(RR(RR((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pytdst scCsdS(Ns"<TimezoneUTC delta=0, name=u'UTC'>((R((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pyt__repr__s( t__name__t __module__t__doc__RRRRRR (((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pyRs     tTimezonecBs2eZdZd„Zd„Zd„Zd„ZRS(sFixed offset in hour from UTC.cCs'td|dƒ|_d||_dS(Ntminutesi<u%+03u00(Rt_offsett_name(Rtoffset((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pyt__init__scCs|iS(N(R(RR((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pyRscCs|iS(N(R(RR((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pyRscCsd|i|ifS(Ns<Timezone delta=%s, name='%s'>(RR(R((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pyR s(R R R RRRR (((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pyR s    cCs|o t|ƒStSdS(N(R tUTC(R((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pytcreateTimezone%s N(tdatetimeRRRR RR(((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/timezone.pyt<module>s 
2,598
Python
.py
6
432
1,080
0.460856
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,777
safe.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/safe.pyc
Ñò Î ÈMc@sDddklZlZlZd„Zd„Zd„Zd„ZdS(iÿÿÿÿ(tHACHOIR_ERRORStwarningterrorcs‡fd†}|S(NcsDyˆ||ŽWn,tj o }tdˆi|fƒnXdS(Ns$Error when calling function %s(): %s(RRt__name__(targstkwterr(tfunc(s?/pentest/enumeration/google/metagoofil/hachoir_metadata/safe.pyt safe_funcs ((RRR((Rs?/pentest/enumeration/google/metagoofil/hachoir_metadata/safe.pytfault_tolerantscCsby)||}|iƒot||ƒSWn2tj o&}td||i||fƒnXdS(Ns#Unable to get %s of field %s/%s: %s(thasValuetgetattrRRtpathtNone(tfieldsettkeytattrnametfieldR((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/safe.pytgetFieldAttribute s  cCst||dƒS(Ntvalue(R(RR((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/safe.pytgetValuescCst||dƒS(Ntdisplay(R(RR((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/safe.pyt getDisplaysN(thachoir_core.errorRRRR RRR(((s?/pentest/enumeration/google/metagoofil/hachoir_metadata/safe.pyt<module>s 
1,601
Python
.py
8
198.625
352
0.452949
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,778
audio.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/audio.pyc
—Ú Œ »Mc @skddklZlZlZlZddklZlZlZl Z l Z ddk l Z l Z ddklZddklZlZlZddklZlZddklZlZlZddklZlZd ÑZd ÑZhd d 6d d6dd6dd6dd6dd6dd6dd6dd6dd6dd 6d!d"6dd#6dd$6d%d&6d%d'6d(d)6Z ed*ÑÉZ!d+efd,ÑÉYZ"d-efd.ÑÉYZ#d/efd0ÑÉYZ$d1efd2ÑÉYZ%d3efd4ÑÉYZ&d5efd6ÑÉYZ'd7efd8ÑÉYZ(eee#Éeee&Éee e"Éee e%Éeee$Éee e'Éee e(Éd9S(:iˇˇˇˇ(tregisterExtractortMetadatat RootMetadatatMultipleMetadata(tAuFilet MpegAudioFilet RealAudioFiletAiffFilet FlacParser(tOggFilet RealMediaFile(t_(t makePrintablettimedelta2secondst humanBitRate(t timedeltatdate(t QUALITY_FASTtQUALITY_NORMALt QUALITY_BEST(tfault_toleranttgetValuecCs§|idÉ p;|idÉ p*|idÉ p|idÉ p| odSt|idÉÉ|idÉ|idÉ|idÉ}t|É||_dS(Ntdurationt sample_ratetbits_per_samplet nb_channel(thasR tgettfloatt compr_rate(tmetatsizet orig_size((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pytcomputeComprRate s<cCsh|idÉ p"|idÉ p|idÉ odS|idÉ|idÉ|idÉ|_dS(NRRR(RRtbit_rate(R((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pytcomputeBitRates tartisttARTISTtalbumtALBUMt track_numbert TRACKNUMBERt track_totalt TRACKTOTALtproducertENCODERttitletTITLEtlocationtLOCATIONt creation_datetDATEt organizationt ORGANIZATIONt music_genretGENREtcommentttmusic_composertCOMPOSERt DESCRIPTIONtCOMMENTturltWWWtWOAFt copyrighttLICENSEcCs∏t|dÉ|_xü|idÉD]é}d|ijox|iiddÉ\}}|iÉ}|tjot|}t|||Éq∞|o|id||fÉq∞q"q"WdS(Ntvendortmetadatat=isSkip Vorbis comment %s: %s( RR,tarraytvaluetsplittuppertVORBIS_KEY_TO_ATTRtsetattrtwarning(RDR8titemtkeyRG((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pytreadVorbisComment0s   t OggMetadatacBs,eZdÑZdÑZdÑZdÑZRS(c Cs d}xót|idÉÉD]Ä\}}d|joqn|d}d|jobt|É}|i|d|É|id|dÉ| o#|idÉo|idÉ}qµnd|jo7t|É}|i|d|É|id|d Énd |jobt|É}|i |d |É|id|d É| o#|id Éo|id É}qhnd |jot ||d Énd |joPqqW|out |i joe|i É}|oNd|joAy'tdt|diÉ|É|_Wqtj oqXqndS(Ntpagetsegmentst vorbis_hdrsaudio[]tAudioRt theora_hdrsvideo[]tVideot video_hdrt frame_rateR8itabs_granule_postseconds(tNonet enumerateRFRt vorbisHeadertaddGroupRRt theoraHeadert videoHeaderRORtqualitytcreateLastPageRRRGRt OverflowError(tselftoggtgranule_quotienttindexRQR((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pytextract>sB             'cCsj|di|_|di|_|di|_|di|_|diod|di|_ndS(NtfourcctwidththeightRt time_unitg–cA(tdisplayt compressionRGRjRktbits_per_pixelRX(RdtheaderR((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyR`cs cCs‚d|_d|di|di|dif|_|di|_|di|_|dio%t|d iÉ|di|_n|d io%t|d iÉ|d i|_n|d i|_ d |di|_ dS(NtTheoras"Theora version %u.%u (revision %u)t version_majort version_minortversion_revisiont frame_widtht frame_heighttfps_dentfps_numtaspect_ratio_dentaspect_ratio_numt pixel_formats Quality: %sRa( RnRGtformat_versionRjRkRRXt aspect_ratioRmR{R8(RdRpR((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyR_ks   %%cCsQd|_|di|_|di|_d|di|_|di|_dS(NuVorbistaudio_sample_ratetaudio_channelsuVorbis version %stvorbis_versiontbitrate_nominal(RnRGRRR|R"(RdRpR((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyR]zs  (t__name__t __module__RhR`R_R](((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRP=s %  t AuMetadatacBseZdÑZRS(cCs”|di|_|di|_|di|_d|jo|di|_n|iÉ|_t|Éd|joX|i dÉo0t dt |di É|i dÉÉ|_nt||di ÉndS(NRtchannelstcodectinfot audio_dataR"RZ(RGRRRmRnR8tgetBitsPerSampleRR#RRRRRRR!(Rdtaudio((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRhÇs   0(RÇRÉRh(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRÑÅstRealAudioMetadatacBsJeZhdd6dd6dd6ZdÑZedÑÉZedÑÉZRS(i`;u28_8i@u14_4ulpcJcCs|di}d|jo|i|dÉn|i|Éd||_|djot|dÉ}nEd|jo1d|jo$|did|did }nd}|oZ|d 9}|id Éo/t|É|id É}t d |É|_ nt ||ÉndS( NtversionRDsReal audio version %sit data_sizetfilesizet headersizei(iiR"RZ( RGt useMetadatatuseRootR|RR[RRRRRR!(RdtrealRåRtsec((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRhñs      $ cCsD|di|_|di|_|di|_|di|_dS(NR.tauthorRAR8(RGR.RîRAR8(RdRá((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRê©scCs§d|_|didjo$|di|_|di|_nd|_d|_t|dÉ}|o7||_y|i||_Wq†tj oq†XndS( NiRåiRRÖi@itFourCC( RRGRRRRntFOURCC_TO_BITRATER"t LookupError(RdRíRi((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRë∞s    (RÇRÉRñRhRRêRë(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRãès  tRealMediaMetadatacBsoeZhdd6dd6dd6dd6ZdÑZed ÑÉZed ÑÉZed ÑÉZed ÑÉZRS( R,s generated byR2s creation datetlast_modificationsmodification dateR8t descriptioncCs~d|jo|i|dÉnd|jo|i|dÉnx3t|idÉÉD]\}}|i||ÉqZWdS(Nt file_propt content_desct stream_prop(t useFileProptuseContentDescR\RFt useStreamProp(RdtmediaRgtstream((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRh…s   cCsx|diiÉ}|di}||ijot||i||Én*|o"|id|di|fÉndS(NtnameRGs Skip %s: %s(RGtlowert KEY_TO_ATTRRKRL(RdtpropRNRG((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pytuseFileInfoProp—s  cCs-|di|_td|diÉ|_dS(Nt avg_bit_ratet millisecondsR(RGR"RR(RdR¶((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRû⁄scCsD|di|_|di|_|di|_|di|_dS(NR.RîRAR8(RGR.RîRAR8(Rdtcontent((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRüflscCs—t|É}d|di|_t|dÉdjo+xc|idÉD]}|i|ÉqFWn<|di|_td|diÉ|_t|dÉ|_ t|d É|_ |i d ||d d |ÉdS( Ns Start: %st stream_startt mime_typeslogical-fileinfosfile_info/propR®R©Rtdescs stream[%u]s Stream #%ui( RRGR8RRFRßR"RRR¨R.R^(RdR¢RgRR¶((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyR†Ês ( RÇRÉR•RhRRßRûRüR†(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRò¡s   tMpegAudioMetadatacBs‹eZhdd6dd6dd6dd6dd 6d d 6d d 6dd6dd6dd6dd6dd6dd6d d6d d6dd6dd6dd6dd6d d6d d6d d 6d d!6Zd"ÑZd#ÑZd$ÑZd%ÑZd&ÑZRS('RîtTP1R8tCOMR,tTENR(tTRKR&tTALR.tTT2R2tTYER6tTCOtTPE1tCOMMtTENCtTRCKtTALBtTIT2tTYERR>tWXXXtTCONtlanguagetTLANRAtTCOPtTDATtTRDAtTORYtTIT1cCsd|jodS|d}d|jodSd|jo0|dio"d|di|dif}n|di}|di}||ijoP|oDt|tÉot|ddtÉ}n|id||fÉndS|i|}t|||ÉdS( NR™ttextR.s%s: %sttags ISO-8859-1t to_unicodesSkip ID3v2 tag %s: %s(RGt TAG_TO_KEYt isinstancetstrR tTrueRLRK(RdtfieldR™RGR»RN((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyt processID3v2s"   "   cCs=x6|D].}|iod|jo|i|ÉqqWdS(NR»(t is_field_setRœ(Rdtid3RŒ((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyt readID3v2)scCséd|joí|d}|iÉ|dif|_d|di|dif|_|iÉ|_d|_|diÉo|i|Éqü|i |Énd|joó|d}|d i |_ |d i |_ |d i |_ |d i |_|d i djo|d i |_nd|jo|di |_qCnd|jo|i|dÉnd|jot||diÉndS(Ns/frames/frame[0]t channel_modeuMPEG version %s layer %sRåtlayeritframestid3v1R8RîtsongR&tyeart0ttrack_nbtid3v2(t getNbChannelRmRR|t getSampleRateRRtlooksConstantBitRatetcomputeBitratetcomputeVariableBitrateRGR8RîR.R&R2R(R“R!R(Rdtmp3tframeR—((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRh.s0        cCs^|iÉ}|pdS|tdÉt|Éf|_tdt|diÉ|É|_dS(Ns %s (constant)RZs/frames(t getBitRateR RR"RRRR(RdR‚R"((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRflIs  c Cs;|itjodSd}t|ijo|idÉd}nd|i}d}xÅt|idÉÉD]j\}}|djoqkn|iÉ}|o7|t|É7}|d7}|o||joPq’qkqkW|pdS||}|t dÉt |Éf|_ t d t|d i É|É}||_dS( Nis7Process all MPEG audio frames to compute exact durationiÙgs frames/frameiis%s (Variable bit rate)RZR’(RaRRRLR[R\RFR„RR RR"RRR( RdR·tcountt max_countttotal_bit_rateRgR‚R"R((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyR‡Ps2          (RÇRÉR RœR“RhRflR‡(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRÆÙs:     t AiffMetadatacBs eZdÑZedÑÉZRS(cCs0d|jo|i|dÉnt|ÉdS(Ntcommon(t useCommonR#(Rdtaiff((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRhls cCsÆ|di|_|di|_t|dÉ|_|idÉoG|idÉ}|o-t|diÉ|}td|É|_ qând|jo|di |_ ndS(NRt sample_sizeRt nb_sampleRZRÜ( RGRRRRRRRRRRmRn(RdRátrateRì((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRÈqs (RÇRÉRhRRÈ(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRÁks t FlacMetadatacBs eZdÑZedÑÉZRS(cCsHd|jo|i|dÉnd|jot||dÉndS(Nsmetadata/stream_info/contentsmetadata/comment/content(t useStreamInfoRO(Rdtflac((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRhs  cCs}|did|_|did|_|di|_|di}|o-t|É|di}td|É|_ndS(NRiRt sample_hertzt total_samplesRZ(RGRRRRRR(RdRáRì((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRÔÖs (RÇRÉRhRRÔ(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyRÓ~s N()thachoir_metadata.metadataRRRRthachoir_parser.audioRRRRRthachoir_parser.containerR R thachoir_core.i18nR thachoir_core.toolsR R RtdatetimeRRthachoir_metadata.metadata_itemRRRthachoir_metadata.safeRRR!R#RJRORPRÑRãRòRÆRÁRÓ(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/audio.pyt<module>sT"(   D23w      
16,466
Python
.py
74
221.486486
1,654
0.414872
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,779
timezone.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/timezone.py
from datetime import tzinfo, timedelta class TimezoneUTC(tzinfo): """UTC timezone""" ZERO = timedelta(0) def utcoffset(self, dt): return TimezoneUTC.ZERO def tzname(self, dt): return u"UTC" def dst(self, dt): return TimezoneUTC.ZERO def __repr__(self): return "<TimezoneUTC delta=0, name=u'UTC'>" class Timezone(TimezoneUTC): """Fixed offset in hour from UTC.""" def __init__(self, offset): self._offset = timedelta(minutes=offset*60) self._name = u"%+03u00" % offset def utcoffset(self, dt): return self._offset def tzname(self, dt): return self._name def __repr__(self): return "<Timezone delta=%s, name='%s'>" % ( self._offset, self._name) UTC = TimezoneUTC() def createTimezone(offset): if offset: return Timezone(offset) else: return UTC
907
Python
.py
30
23.766667
51
0.616185
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,780
metadata.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/metadata.py
# -*- coding: utf-8 -*- from hachoir_core.compatibility import any, sorted from hachoir_core.endian import endian_name from hachoir_core.tools import makePrintable, makeUnicode from hachoir_core.dict import Dict from hachoir_core.error import error, HACHOIR_ERRORS from hachoir_core.i18n import _ from hachoir_core.log import Logger from hachoir_metadata.metadata_item import ( Data, MIN_PRIORITY, MAX_PRIORITY, QUALITY_NORMAL) from hachoir_metadata.register import registerAllItems import re extractors = {} class Metadata(Logger): header = u"Metadata" def __init__(self, parent, quality=QUALITY_NORMAL): assert isinstance(self.header, unicode) # Limit to 0.0 .. 1.0 if parent: quality = parent.quality else: quality = min(max(0.0, quality), 1.0) object.__init__(self) object.__setattr__(self, "_Metadata__data", {}) object.__setattr__(self, "quality", quality) header = self.__class__.header object.__setattr__(self, "_Metadata__header", header) registerAllItems(self) def _logger(self): pass def __setattr__(self, key, value): """ Add a new value to data with name 'key'. Skip duplicates. """ # Invalid key? if key not in self.__data: raise KeyError(_("%s has no metadata '%s'") % (self.__class__.__name__, key)) # Skip duplicates self.__data[key].add(value) def setHeader(self, text): object.__setattr__(self, "header", text) def getItems(self, key): try: return self.__data[key] except LookupError: raise ValueError("Metadata has no value '%s'" % key) def getItem(self, key, index): try: return self.getItems(key)[index] except (LookupError, ValueError): return None def has(self, key): return 1 <= len(self.getItems(key)) def get(self, key, default=None, index=0): """ Read first value of tag with name 'key'. >>> from datetime import timedelta >>> a = RootMetadata() >>> a.duration = timedelta(seconds=2300) >>> a.get('duration') datetime.timedelta(0, 2300) >>> a.get('author', u'Anonymous') u'Anonymous' """ item = self.getItem(key, index) if item is None: if default is None: raise ValueError("Metadata has no value '%s' (index %s)" % (key, index)) else: return default return item.value def getValues(self, key): try: data = self.__data[key] except LookupError: raise ValueError("Metadata has no value '%s'" % key) return [ item.value for item in data ] def getText(self, key, default=None, index=0): """ Read first value, as unicode string, of tag with name 'key'. >>> from datetime import timedelta >>> a = RootMetadata() >>> a.duration = timedelta(seconds=2300) >>> a.getText('duration') u'38 min 20 sec' >>> a.getText('titre', u'Unknown') u'Unknown' """ item = self.getItem(key, index) if item is not None: return item.text else: return default def register(self, data): assert data.key not in self.__data data.metadata = self self.__data[data.key] = data def __iter__(self): return self.__data.itervalues() def __str__(self): r""" Create a multi-line ASCII string (end of line is "\n") which represents all datas. >>> a = RootMetadata() >>> a.author = "haypo" >>> a.copyright = unicode("© Hachoir", "UTF-8") >>> print a Metadata: - Author: haypo - Copyright: \xa9 Hachoir @see __unicode__() and exportPlaintext() """ text = self.exportPlaintext() return "\n".join( makePrintable(line, "ASCII") for line in text ) def __unicode__(self): r""" Create a multi-line Unicode string (end of line is "\n") which represents all datas. >>> a = RootMetadata() >>> a.copyright = unicode("© Hachoir", "UTF-8") >>> print repr(unicode(a)) u'Metadata:\n- Copyright: \xa9 Hachoir' @see __str__() and exportPlaintext() """ return "\n".join(self.exportPlaintext()) def exportPlaintext(self, priority=None, human=True, line_prefix=u"- ", title=None): r""" Convert metadata to multi-line Unicode string and skip datas with priority lower than specified priority. Default priority is Metadata.MAX_PRIORITY. If human flag is True, data key are translated to better human name (eg. "bit_rate" becomes "Bit rate") which may be translated using gettext. If priority is too small, metadata are empty and so None is returned. >>> print RootMetadata().exportPlaintext() None >>> meta = RootMetadata() >>> meta.copyright = unicode("© Hachoir", "UTF-8") >>> print repr(meta.exportPlaintext()) [u'Metadata:', u'- Copyright: \xa9 Hachoir'] @see __str__() and __unicode__() """ if priority is not None: priority = max(priority, MIN_PRIORITY) priority = min(priority, MAX_PRIORITY) else: priority = MAX_PRIORITY if not title: title = self.header text = ["%s:" % title] for data in sorted(self): if priority < data.priority: break if not data.values: continue if human: title = data.description else: title = data.key for item in data.values: if human: value = item.text else: value = makeUnicode(item.value) text.append("%s%s: %s" % (line_prefix, title, value)) if 1 < len(text): return text else: return None def __nonzero__(self): return any(item for item in self.__data.itervalues()) class RootMetadata(Metadata): def __init__(self, quality=QUALITY_NORMAL): Metadata.__init__(self, None, quality) class MultipleMetadata(RootMetadata): header = _("Common") def __init__(self, quality=QUALITY_NORMAL): RootMetadata.__init__(self, quality) object.__setattr__(self, "_MultipleMetadata__groups", Dict()) object.__setattr__(self, "_MultipleMetadata__key_counter", {}) def __contains__(self, key): return key in self.__groups def __getitem__(self, key): return self.__groups[key] def iterGroups(self): return self.__groups.itervalues() def __nonzero__(self): if RootMetadata.__nonzero__(self): return True return any(bool(group) for group in self.__groups) def addGroup(self, key, metadata, header=None): """ Add a new group (metadata of a sub-document). Returns False if the group is skipped, True if it has been added. """ if not metadata: self.warning("Skip empty group %s" % key) return False if key.endswith("[]"): key = key[:-2] if key in self.__key_counter: self.__key_counter[key] += 1 else: self.__key_counter[key] = 1 key += "[%u]" % self.__key_counter[key] if header: metadata.setHeader(header) self.__groups.append(key, metadata) return True def exportPlaintext(self, priority=None, human=True, line_prefix=u"- "): common = Metadata.exportPlaintext(self, priority, human, line_prefix) if common: text = common else: text = [] for key, metadata in self.__groups.iteritems(): if not human: title = key else: title = None value = metadata.exportPlaintext(priority, human, line_prefix, title=title) if value: text.extend(value) if len(text): return text else: return None def registerExtractor(parser, extractor): assert parser not in extractors assert issubclass(extractor, RootMetadata) extractors[parser] = extractor def extractMetadata(parser, quality=QUALITY_NORMAL): """ Create a Metadata class from a parser. Returns None if no metadata extractor does exist for the parser class. """ try: extractor = extractors[parser.__class__] except KeyError: return None metadata = extractor(quality) try: metadata.extract(parser) except HACHOIR_ERRORS, err: error("Error during metadata extraction: %s" % unicode(err)) if metadata: metadata.mime_type = parser.mime_type metadata.endian = endian_name[parser.endian] return metadata
9,138
Python
.py
248
27.673387
89
0.577632
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,781
__init__.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/__init__.py
from hachoir_metadata.version import VERSION as __version__ from hachoir_metadata.metadata import extractMetadata # Just import the module, # each module use registerExtractor() method import hachoir_metadata.archive import hachoir_metadata.audio import hachoir_metadata.image import hachoir_metadata.jpeg import hachoir_metadata.misc import hachoir_metadata.program import hachoir_metadata.riff import hachoir_metadata.video
428
Python
.py
12
34.5
59
0.869565
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,782
setter.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/setter.py
from datetime import date, datetime import re from hachoir_core.language import Language from locale import setlocale, LC_ALL from time import strptime from hachoir_metadata.timezone import createTimezone from hachoir_metadata import config NORMALIZE_REGEX = re.compile("[-/.: ]+") YEAR_REGEX1 = re.compile("^([0-9]{4})$") # Date regex: YYYY-MM-DD (US format) DATE_REGEX1 = re.compile("^([0-9]{4})~([01][0-9])~([0-9]{2})$") # Date regex: YYYY-MM-DD HH:MM:SS (US format) DATETIME_REGEX1 = re.compile("^([0-9]{4})~([01][0-9])~([0-9]{2})~([0-9]{1,2})~([0-9]{2})~([0-9]{2})$") # Datetime regex: "MM-DD-YYYY HH:MM:SS" (FR format) DATETIME_REGEX2 = re.compile("^([01]?[0-9])~([0-9]{2})~([0-9]{4})~([0-9]{1,2})~([0-9]{2})~([0-9]{2})$") # Timezone regex: "(...) +0200" TIMEZONE_REGEX = re.compile("^(.*)~([+-][0-9]{2})00$") # Timestmap: 'February 2007' MONTH_YEAR = "%B~%Y" # Timestmap: 'Sun Feb 24 15:51:09 2008' RIFF_TIMESTAMP = "%a~%b~%d~%H~%M~%S~%Y" # Timestmap: 'Thu, 19 Jul 2007 09:03:57' ISO_TIMESTAMP = "%a,~%d~%b~%Y~%H~%M~%S" def parseDatetime(value): """ Year and date: >>> parseDatetime("2000") (datetime.date(2000, 1, 1), u'2000') >>> parseDatetime("2004-01-02") datetime.date(2004, 1, 2) Timestamp: >>> parseDatetime("2004-01-02 18:10:45") datetime.datetime(2004, 1, 2, 18, 10, 45) >>> parseDatetime("2004-01-02 18:10:45") datetime.datetime(2004, 1, 2, 18, 10, 45) Timestamp with timezone: >>> parseDatetime(u'Thu, 19 Jul 2007 09:03:57 +0000') datetime.datetime(2007, 7, 19, 9, 3, 57, tzinfo=<TimezoneUTC delta=0, name=u'UTC'>) >>> parseDatetime(u'Thu, 19 Jul 2007 09:03:57 +0200') datetime.datetime(2007, 7, 19, 9, 3, 57, tzinfo=<Timezone delta=2:00:00, name='+0200'>) """ value = NORMALIZE_REGEX.sub("~", value.strip()) regs = YEAR_REGEX1.match(value) if regs: try: year = int(regs.group(1)) return (date(year, 1, 1), unicode(year)) except ValueError: pass regs = DATE_REGEX1.match(value) if regs: try: year = int(regs.group(1)) month = int(regs.group(2)) day = int(regs.group(3)) return date(year, month, day) except ValueError: pass regs = DATETIME_REGEX1.match(value) if regs: try: year = int(regs.group(1)) month = int(regs.group(2)) day = int(regs.group(3)) hour = int(regs.group(4)) min = int(regs.group(5)) sec = int(regs.group(6)) return datetime(year, month, day, hour, min, sec) except ValueError: pass regs = DATETIME_REGEX2.match(value) if regs: try: month = int(regs.group(1)) day = int(regs.group(2)) year = int(regs.group(3)) hour = int(regs.group(4)) min = int(regs.group(5)) sec = int(regs.group(6)) return datetime(year, month, day, hour, min, sec) except ValueError: pass current_locale = setlocale(LC_ALL, "C") try: match = TIMEZONE_REGEX.match(value) if match: without_timezone = match.group(1) delta = int(match.group(2)) delta = createTimezone(delta) else: without_timezone = value delta = None try: timestamp = strptime(without_timezone, ISO_TIMESTAMP) arguments = list(timestamp[0:6]) + [0, delta] return datetime(*arguments) except ValueError: pass try: timestamp = strptime(without_timezone, RIFF_TIMESTAMP) arguments = list(timestamp[0:6]) + [0, delta] return datetime(*arguments) except ValueError: pass try: timestamp = strptime(value, MONTH_YEAR) arguments = list(timestamp[0:3]) return date(*arguments) except ValueError: pass finally: setlocale(LC_ALL, current_locale) return None def setDatetime(meta, key, value): if isinstance(value, (str, unicode)): return parseDatetime(value) elif isinstance(value, (date, datetime)): return value return None def setLanguage(meta, key, value): """ >>> setLanguage(None, None, "fre") <Language 'French', code='fre'> >>> setLanguage(None, None, u"ger") <Language 'German', code='ger'> """ return Language(value) def setTrackTotal(meta, key, total): """ >>> setTrackTotal(None, None, "10") 10 """ try: return int(total) except ValueError: meta.warning("Invalid track total: %r" % total) return None def setTrackNumber(meta, key, number): if isinstance(number, (int, long)): return number if "/" in number: number, total = number.split("/", 1) meta.track_total = total try: return int(number) except ValueError: meta.warning("Invalid track number: %r" % number) return None def normalizeString(text): if config.RAW_OUTPUT: return text return text.strip(" \t\v\n\r\0")
5,203
Python
.py
152
26.921053
103
0.579293
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,783
formatter.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/formatter.pyc
Ñò Î ÈMc@spddklZlZhedƒd6edƒd6Zd„Zd„Zd„Zd „Zd „Zd „Z d S( iÿÿÿÿ(t_tngettexttmonoitstereoicCsti|t|ƒƒS(N(tNB_CHANNEL_NAMEtgettunicode(tvalue((sD/pentest/enumeration/google/metagoofil/hachoir_metadata/formatter.pythumanAudioChannelscCs0t|tttfƒotdƒ|S|SdS(Ns%.1f fps(t isinstancetinttlongtfloatR(R((sD/pentest/enumeration/google/metagoofil/hachoir_metadata/formatter.pythumanFrameRatescCsd|S(Nu%.1fx((trate((sD/pentest/enumeration/google/metagoofil/hachoir_metadata/formatter.pythumanComprRatescCstdd|ƒ|S(Ns %.1f meters %.1f meters(R(R((sD/pentest/enumeration/google/metagoofil/hachoir_metadata/formatter.pyt humanAltitudescCstdd|ƒ|S(Ns%s pixels %s pixels(R(R((sD/pentest/enumeration/google/metagoofil/hachoir_metadata/formatter.pythumanPixelSizescCsd|S(Nu%s DPI((R((sD/pentest/enumeration/google/metagoofil/hachoir_metadata/formatter.pythumanDPIsN( thachoir_core.i18nRRRRR RRRR(((sD/pentest/enumeration/google/metagoofil/hachoir_metadata/formatter.pyt<module>s      
1,663
Python
.py
6
276.166667
540
0.486128
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,784
filter.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/filter.pyc
Ñò Î ÈMc@s„ddklZddklZlZlZdZdZdd d„ƒYZdefd„ƒYZd efd „ƒYZ e ƒZ d S( iÿÿÿÿ(tUTC(tdatetdatetimet timedeltai:iîtFiltercBs eZddd„Zd„ZRS(cCs||_||_||_dS(N(ttypestmintmax(tselft valid_typesRR((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/filter.pyt__init__ s  cCsft||iƒptS|idj o||ijotS|idj o|i|jotStS(N(t isinstanceRtTrueRtNonetFalseR(Rtvalue((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/filter.pyt__call__s  N(t__name__t __module__R R R(((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/filter.pyRst NumberFiltercBseZddd„ZRS(cCs#ti|tttf||ƒdS(N(RR tinttlongtfloat(RRR((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/filter.pyR sN(RRR R (((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/filter.pyRstDatetimeFiltercBs eZddd„Zd„ZRS(cCs˜ti|ttfttddƒttddƒƒttddƒ|_ttddƒ|_ttdddtƒ|_ ttdddtƒ|_ dS(Nii ittzinfo( RR RRtMIN_YEARtMAX_YEARtmin_datetmax_dateRtmin_tztmax_tz(RRR((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/filter.pyR scCs¥t||iƒptSt|dƒo*|io |i|jo |ijSSt|tƒo |i|jo |i jSS|i |jo |i jSSdS(sz Use different min/max values depending on value type (datetime with timezone, datetime or date). RN( R RR thasattrRRRRRRRR(RR((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/filter.pyR%s  N(RRR R R(((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/filter.pyRs N(( thachoir_metadata.timezoneRRRRRRRRRtDATETIME_FILTER(((sA/pentest/enumeration/google/metagoofil/hachoir_metadata/filter.pyt<module>s
2,837
Python
.py
18
155.277778
523
0.421631
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,785
riff.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/riff.py
""" Extract metadata from RIFF file format: AVI video and WAV sound. """ from hachoir_metadata.metadata import Metadata, MultipleMetadata, registerExtractor from hachoir_metadata.safe import fault_tolerant, getValue from hachoir_parser.container.riff import RiffFile from hachoir_parser.video.fourcc import UNCOMPRESSED_AUDIO from hachoir_core.tools import humanFilesize, makeUnicode, timedelta2seconds from hachoir_core.i18n import _ from hachoir_metadata.audio import computeComprRate as computeAudioComprRate from datetime import timedelta class RiffMetadata(MultipleMetadata): TAG_TO_KEY = { "INAM": "title", "IART": "artist", "ICMT": "comment", "ICOP": "copyright", "IENG": "author", # (engineer) "ISFT": "producer", "ICRD": "creation_date", "IDIT": "creation_date", } def extract(self, riff): type = riff["type"].value if type == "WAVE": self.extractWAVE(riff) size = getValue(riff, "audio_data/size") if size: computeAudioComprRate(self, size*8) elif type == "AVI ": if "headers" in riff: self.extractAVI(riff["headers"]) self.extractInfo(riff["headers"]) elif type == "ACON": self.extractAnim(riff) if "info" in riff: self.extractInfo(riff["info"]) def processChunk(self, chunk): if "text" not in chunk: return value = chunk["text"].value tag = chunk["tag"].value if tag not in self.TAG_TO_KEY: self.warning("Skip RIFF metadata %s: %s" % (tag, value)) return key = self.TAG_TO_KEY[tag] setattr(self, key, value) @fault_tolerant def extractWAVE(self, wav): format = wav["format"] # Number of channel, bits/sample, sample rate self.nb_channel = format["nb_channel"].value self.bits_per_sample = format["bit_per_sample"].value self.sample_rate = format["sample_per_sec"].value self.compression = format["codec"].display if "nb_sample/nb_sample" in wav \ and 0 < format["sample_per_sec"].value: self.duration = timedelta(seconds=float(wav["nb_sample/nb_sample"].value) / format["sample_per_sec"].value) if format["codec"].value in UNCOMPRESSED_AUDIO: # Codec with fixed bit rate self.bit_rate = format["nb_channel"].value * format["bit_per_sample"].value * format["sample_per_sec"].value if not self.has("duration") \ and "audio_data/size" in wav \ and self.has("bit_rate"): duration = float(wav["audio_data/size"].value)*8 / self.get('bit_rate') self.duration = timedelta(seconds=duration) def extractInfo(self, fieldset): for field in fieldset: if not field.is_field_set: continue if "tag" in field: if field["tag"].value == "LIST": self.extractInfo(field) else: self.processChunk(field) @fault_tolerant def extractAVIVideo(self, header, meta): meta.compression = "%s (fourcc:\"%s\")" \ % (header["fourcc"].display, makeUnicode(header["fourcc"].value)) if header["rate"].value and header["scale"].value: fps = float(header["rate"].value) / header["scale"].value meta.frame_rate = fps if 0 < fps: self.duration = meta.duration = timedelta(seconds=float(header["length"].value) / fps) if "../stream_fmt/width" in header: format = header["../stream_fmt"] meta.width = format["width"].value meta.height = format["height"].value meta.bits_per_pixel = format["depth"].value else: meta.width = header["right"].value - header["left"].value meta.height = header["bottom"].value - header["top"].value @fault_tolerant def extractAVIAudio(self, format, meta): meta.nb_channel = format["channel"].value meta.sample_rate = format["sample_rate"].value meta.bit_rate = format["bit_rate"].value * 8 if format["bits_per_sample"].value: meta.bits_per_sample = format["bits_per_sample"].value if "../stream_hdr" in format: header = format["../stream_hdr"] if header["rate"].value and header["scale"].value: frame_rate = float(header["rate"].value) / header["scale"].value meta.duration = timedelta(seconds=float(header["length"].value) / frame_rate) if header["fourcc"].value != "": meta.compression = "%s (fourcc:\"%s\")" \ % (format["codec"].display, header["fourcc"].value) if not meta.has("compression"): meta.compression = format["codec"].display self.computeAudioComprRate(meta) @fault_tolerant def computeAudioComprRate(self, meta): uncompr = meta.get('bit_rate', 0) if not uncompr: return compr = meta.get('nb_channel') * meta.get('sample_rate') * meta.get('bits_per_sample', default=16) if not compr: return meta.compr_rate = float(compr) / uncompr @fault_tolerant def useAviHeader(self, header): microsec = header["microsec_per_frame"].value if microsec: self.frame_rate = 1000000.0 / microsec total_frame = getValue(header, "total_frame") if total_frame and not self.has("duration"): self.duration = timedelta(microseconds=total_frame * microsec) self.width = header["width"].value self.height = header["height"].value def extractAVI(self, headers): audio_index = 1 for stream in headers.array("stream"): if "stream_hdr/stream_type" not in stream: continue stream_type = stream["stream_hdr/stream_type"].value if stream_type == "vids": if "stream_hdr" in stream: meta = Metadata(self) self.extractAVIVideo(stream["stream_hdr"], meta) self.addGroup("video", meta, "Video stream") elif stream_type == "auds": if "stream_fmt" in stream: meta = Metadata(self) self.extractAVIAudio(stream["stream_fmt"], meta) self.addGroup("audio[%u]" % audio_index, meta, "Audio stream") audio_index += 1 if "avi_hdr" in headers: self.useAviHeader(headers["avi_hdr"]) # Compute global bit rate if self.has("duration") and "/movie/size" in headers: self.bit_rate = float(headers["/movie/size"].value) * 8 / timedelta2seconds(self.get('duration')) # Video has index? if "/index" in headers: self.comment = _("Has audio/video index (%s)") \ % humanFilesize(headers["/index"].size/8) @fault_tolerant def extractAnim(self, riff): if "anim_rate/rate[0]" in riff: count = 0 total = 0 for rate in riff.array("anim_rate/rate"): count += 1 if 100 < count: break total += rate.value / 60.0 if count and total: self.frame_rate = count / total if not self.has("frame_rate") and "anim_hdr/jiffie_rate" in riff: self.frame_rate = 60.0 / riff["anim_hdr/jiffie_rate"].value registerExtractor(RiffFile, RiffMetadata)
7,679
Python
.py
170
34.170588
120
0.579116
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,786
video.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/video.py
from hachoir_core.field import MissingField from hachoir_core.error import HachoirError from hachoir_metadata.metadata import (registerExtractor, Metadata, RootMetadata, MultipleMetadata) from hachoir_metadata.metadata_item import QUALITY_GOOD from hachoir_metadata.safe import fault_tolerant from hachoir_parser.video import MovFile, AsfFile, FlvFile from hachoir_parser.video.asf import Descriptor as ASF_Descriptor from hachoir_parser.container import MkvFile from hachoir_parser.container.mkv import dateToDatetime from hachoir_core.i18n import _ from hachoir_core.tools import makeUnicode, makePrintable, timedelta2seconds from hachoir_core.error import warning from datetime import timedelta class MkvMetadata(MultipleMetadata): tag_key = { "TITLE": "title", "URL": "url", "COPYRIGHT": "copyright", # TODO: use maybe another name? # Its value may be different than (...)/Info/DateUTC/date "DATE_RECORDED": "creation_date", # TODO: Extract subtitle metadata "SUBTITLE": "subtitle_author", } def extract(self, mkv): for segment in mkv.array("Segment"): self.processSegment(segment) def processSegment(self, segment): for field in segment: if field.name.startswith("Info["): self.processInfo(field) elif field.name.startswith("Tags["): for tag in field.array("Tag"): self.processTag(tag) elif field.name.startswith("Tracks["): self.processTracks(field) elif field.name.startswith("Cluster["): if self.quality < QUALITY_GOOD: return def processTracks(self, tracks): for entry in tracks.array("TrackEntry"): self.processTrack(entry) def processTrack(self, track): if "TrackType/enum" not in track: return if track["TrackType/enum"].display == "video": self.processVideo(track) elif track["TrackType/enum"].display == "audio": self.processAudio(track) elif track["TrackType/enum"].display == "subtitle": self.processSubtitle(track) def trackCommon(self, track, meta): if "Name/unicode" in track: meta.title = track["Name/unicode"].value if "Language/string" in track \ and track["Language/string"].value not in ("mis", "und"): meta.language = track["Language/string"].value def processVideo(self, track): video = Metadata(self) try: self.trackCommon(track, video) video.compression = track["CodecID/string"].value if "Video" in track: video.width = track["Video/PixelWidth/unsigned"].value video.height = track["Video/PixelHeight/unsigned"].value except MissingField: pass self.addGroup("video[]", video, "Video stream") def processAudio(self, track): audio = Metadata(self) try: self.trackCommon(track, audio) if "Audio" in track: audio.sample_rate = track["Audio/SamplingFrequency/float"].value audio.nb_channel = track["Audio/Channels/unsigned"].value audio.compression = track["CodecID/string"].value except MissingField: pass self.addGroup("audio[]", audio, "Audio stream") def processSubtitle(self, track): sub = Metadata(self) try: self.trackCommon(track, sub) sub.compression = track["CodecID/string"].value except MissingField: pass self.addGroup("subtitle[]", sub, "Subtitle") def processTag(self, tag): for field in tag.array("SimpleTag"): self.processSimpleTag(field) def processSimpleTag(self, tag): if "TagName/unicode" not in tag \ or "TagString/unicode" not in tag: return name = tag["TagName/unicode"].value if name not in self.tag_key: return key = self.tag_key[name] value = tag["TagString/unicode"].value setattr(self, key, value) def processInfo(self, info): if "Duration/float" in info \ and "TimecodeScale/unsigned" in info \ and 0 < info["Duration/float"].value: try: seconds = info["Duration/float"].value * info["TimecodeScale/unsigned"].value * 1e-9 self.duration = timedelta(seconds=seconds) except OverflowError: # Catch OverflowError for timedelta # (long int too large to convert to int) pass if "DateUTC/date" in info: try: self.creation_date = dateToDatetime(info["DateUTC/date"].value) except OverflowError: pass if "WritingApp/unicode" in info: self.producer = info["WritingApp/unicode"].value if "MuxingApp/unicode" in info: self.producer = info["MuxingApp/unicode"].value if "Title/unicode" in info: self.title = info["Title/unicode"].value class FlvMetadata(MultipleMetadata): def extract(self, flv): if "video[0]" in flv: meta = Metadata(self) self.extractVideo(flv["video[0]"], meta) self.addGroup("video", meta, "Video stream") if "audio[0]" in flv: meta = Metadata(self) self.extractAudio(flv["audio[0]"], meta) self.addGroup("audio", meta, "Audio stream") # TODO: Computer duration # One technic: use last video/audio chunk and use timestamp # But this is very slow self.format_version = flv.description if "metadata/entry[1]" in flv: self.extractAMF(flv["metadata/entry[1]"]) if self.has('duration'): self.bit_rate = flv.size / timedelta2seconds(self.get('duration')) @fault_tolerant def extractAudio(self, audio, meta): if audio["codec"].display == "MP3" and "music_data" in audio: meta.compression = audio["music_data"].description else: meta.compression = audio["codec"].display meta.sample_rate = audio.getSampleRate() if audio["is_16bit"].value: meta.bits_per_sample = 16 else: meta.bits_per_sample = 8 if audio["is_stereo"].value: meta.nb_channel = 2 else: meta.nb_channel = 1 @fault_tolerant def extractVideo(self, video, meta): meta.compression = video["codec"].display def extractAMF(self, amf): for entry in amf.array("item"): self.useAmfEntry(entry) @fault_tolerant def useAmfEntry(self, entry): key = entry["key"].value if key == "duration": self.duration = timedelta(seconds=entry["value"].value) elif key == "creator": self.producer = entry["value"].value elif key == "audiosamplerate": self.sample_rate = entry["value"].value elif key == "framerate": self.frame_rate = entry["value"].value elif key == "metadatacreator": self.producer = entry["value"].value elif key == "metadatadate": self.creation_date = entry.value elif key == "width": self.width = int(entry["value"].value) elif key == "height": self.height = int(entry["value"].value) class MovMetadata(RootMetadata): def extract(self, mov): for atom in mov: if "movie" in atom: self.processMovie(atom["movie"]) @fault_tolerant def processMovieHeader(self, hdr): self.creation_date = hdr["creation_date"].value self.last_modification = hdr["lastmod_date"].value self.duration = timedelta(seconds=float(hdr["duration"].value) / hdr["time_scale"].value) self.comment = _("Play speed: %.1f%%") % (hdr["play_speed"].value*100) self.comment = _("User volume: %.1f%%") % (float(hdr["volume"].value)*100//255) @fault_tolerant def processTrackHeader(self, hdr): width = int(hdr["frame_size_width"].value) height = int(hdr["frame_size_height"].value) if width and height: self.width = width self.height = height def processTrack(self, atom): for field in atom: if "track_hdr" in field: self.processTrackHeader(field["track_hdr"]) def processMovie(self, atom): for field in atom: if "track" in field: self.processTrack(field["track"]) if "movie_hdr" in field: self.processMovieHeader(field["movie_hdr"]) class AsfMetadata(MultipleMetadata): EXT_DESC_TO_ATTR = { "Encoder": "producer", "ToolName": "producer", "AlbumTitle": "album", "Track": "track_number", "TrackNumber": "track_total", "Year": "creation_date", "AlbumArtist": "author", } SKIP_EXT_DESC = set(( # Useless informations "WMFSDKNeeded", "WMFSDKVersion", "Buffer Average", "VBR Peak", "EncodingTime", "MediaPrimaryClassID", "UniqueFileIdentifier", )) def extract(self, asf): if "header/content" in asf: self.processHeader(asf["header/content"]) def processHeader(self, header): compression = [] bit_rates = [] is_vbr = None if "ext_desc/content" in header: # Extract all data from ext_desc data = {} for desc in header.array("ext_desc/content/descriptor"): self.useExtDescItem(desc, data) # Have ToolName and ToolVersion? If yes, group them to producer key if "ToolName" in data and "ToolVersion" in data: self.producer = "%s (version %s)" % (data["ToolName"], data["ToolVersion"]) del data["ToolName"] del data["ToolVersion"] # "IsVBR" key if "IsVBR" in data: is_vbr = (data["IsVBR"] == 1) del data["IsVBR"] # Store data for key, value in data.iteritems(): if key in self.EXT_DESC_TO_ATTR: key = self.EXT_DESC_TO_ATTR[key] else: if isinstance(key, str): key = makePrintable(key, "ISO-8859-1", to_unicode=True) value = "%s=%s" % (key, value) key = "comment" setattr(self, key, value) if "file_prop/content" in header: self.useFileProp(header["file_prop/content"], is_vbr) if "codec_list/content" in header: for codec in header.array("codec_list/content/codec"): if "name" in codec: text = codec["name"].value if "desc" in codec and codec["desc"].value: text = "%s (%s)" % (text, codec["desc"].value) compression.append(text) audio_index = 1 video_index = 1 for index, stream_prop in enumerate(header.array("stream_prop")): if "content/audio_header" in stream_prop: meta = Metadata(self) self.streamProperty(header, index, meta) self.streamAudioHeader(stream_prop["content/audio_header"], meta) if self.addGroup("audio[%u]" % audio_index, meta, "Audio stream #%u" % audio_index): audio_index += 1 elif "content/video_header" in stream_prop: meta = Metadata(self) self.streamProperty(header, index, meta) self.streamVideoHeader(stream_prop["content/video_header"], meta) if self.addGroup("video[%u]" % video_index, meta, "Video stream #%u" % video_index): video_index += 1 if "metadata/content" in header: info = header["metadata/content"] try: self.title = info["title"].value self.author = info["author"].value self.copyright = info["copyright"].value except MissingField: pass @fault_tolerant def streamAudioHeader(self, audio, meta): if not meta.has("compression"): meta.compression = audio["twocc"].display meta.nb_channel = audio["channels"].value meta.sample_rate = audio["sample_rate"].value meta.bits_per_sample = audio["bits_per_sample"].value @fault_tolerant def streamVideoHeader(self, video, meta): meta.width = video["width"].value meta.height = video["height"].value if "bmp_info" in video: bmp_info = video["bmp_info"] if not meta.has("compression"): meta.compression = bmp_info["codec"].display meta.bits_per_pixel = bmp_info["bpp"].value @fault_tolerant def useExtDescItem(self, desc, data): if desc["type"].value == ASF_Descriptor.TYPE_BYTE_ARRAY: # Skip binary data return key = desc["name"].value if "/" in key: # Replace "WM/ToolName" with "ToolName" key = key.split("/", 1)[1] if key in self.SKIP_EXT_DESC: # Skip some keys return value = desc["value"].value if not value: return value = makeUnicode(value) data[key] = value @fault_tolerant def useFileProp(self, prop, is_vbr): self.creation_date = prop["creation_date"].value self.duration = prop["play_duration"].value if prop["seekable"].value: self.comment = u"Is seekable" value = prop["max_bitrate"].value text = prop["max_bitrate"].display if is_vbr is True: text = "VBR (%s max)" % text elif is_vbr is False: text = "%s (CBR)" % text else: text = "%s (max)" % text self.bit_rate = (value, text) def streamProperty(self, header, index, meta): key = "bit_rates/content/bit_rate[%u]/avg_bitrate" % index if key in header: meta.bit_rate = header[key].value # TODO: Use codec list # It doesn't work when the video uses /header/content/bitrate_mutex # since the codec list are shared between streams but... how is it # shared? # key = "codec_list/content/codec[%u]" % index # if key in header: # codec = header[key] # if "name" in codec: # text = codec["name"].value # if "desc" in codec and codec["desc"].value: # meta.compression = "%s (%s)" % (text, codec["desc"].value) # else: # meta.compression = text registerExtractor(MovFile, MovMetadata) registerExtractor(AsfFile, AsfMetadata) registerExtractor(FlvFile, FlvMetadata) registerExtractor(MkvFile, MkvMetadata)
15,100
Python
.py
358
31.756983
100
0.581751
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,787
metadata.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/metadata.pyc
Ñò Î ÈMc @s!ddklZlZddklZddklZlZddkl Z ddk l Z l Z ddk lZddklZddklZlZlZlZdd klZdd kZhZd efd „ƒYZd efd„ƒYZdefd„ƒYZd„Zed„Zd S(iÿÿÿÿ(tanytsorted(t endian_name(t makePrintablet makeUnicode(tDict(terrortHACHOIR_ERRORS(t_(tLogger(tDatat MIN_PRIORITYt MAX_PRIORITYtQUALITY_NORMAL(tregisterAllItemsNtMetadatacBs¹eZdZed„Zd„Zd„Zd„Zd„Zd„Z d„Z ddd „Z d „Z ddd „Zd „Zd „Zd„Zd„Zdeddd„Zd„ZRS(uMetadatacCs¦t|itƒpt‚|o |i}nttd|ƒdƒ}ti|ƒti |dhƒti |d|ƒ|i i}ti |d|ƒt |ƒdS(Nggð?t_Metadata__datatqualityt_Metadata__header( t isinstancetheadertunicodetAssertionErrorRtmintmaxtobjectt__init__t __setattr__t __class__R(tselftparentRR((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyRs   cCsdS(N((R((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyt_logger$scCsN||ijo&ttdƒ|ii|fƒ‚n|i|i|ƒdS(sK Add a new value to data with name 'key'. Skip duplicates. s%s has no metadata '%s'N(RtKeyErrorRRt__name__tadd(Rtkeytvalue((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyR's&cCsti|d|ƒdS(NR(RR(Rttext((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyt setHeader2scCs9y|i|SWn#tj otd|ƒ‚nXdS(NsMetadata has no value '%s'(Rt LookupErrort ValueError(RR#((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pytgetItems5scCs6y|i|ƒ|SWnttfj odSXdS(N(R)R'R(tNone(RR#tindex((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pytgetItem;scCsdt|i|ƒƒjS(Ni(tlenR)(RR#((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pythasAsicCsU|i||ƒ}|djo/|djotd||fƒ‚qN|Sn|iS(s7 Read first value of tag with name 'key'. >>> from datetime import timedelta >>> a = RootMetadata() >>> a.duration = timedelta(seconds=2300) >>> a.get('duration') datetime.timedelta(0, 2300) >>> a.get('author', u'Anonymous') u'Anonymous' s%Metadata has no value '%s' (index %s)N(R,R*R(R$(RR#tdefaultR+titem((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pytgetDs   cCsYy|i|}Wn#tj otd|ƒ‚nXg}|D]}||iqB~S(NsMetadata has no value '%s'(RR'R(R$(RR#tdatat_[1]R0((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyt getValuesXs cCs/|i||ƒ}|dj o|iS|SdS(sC Read first value, as unicode string, of tag with name 'key'. >>> from datetime import timedelta >>> a = RootMetadata() >>> a.duration = timedelta(seconds=2300) >>> a.getText('duration') u'38 min 20 sec' >>> a.getText('titre', u'Unknown') u'Unknown' N(R,R*R%(RR#R/R+R0((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pytgetText_s  cCs7|i|ijpt‚||_||i|i<dS(N(R#RRtmetadata(RR2((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pytregisterqs cCs |iiƒS(N(Rt itervalues(R((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyt__iter__vscCs#|iƒ}did„|DƒƒS(sv Create a multi-line ASCII string (end of line is "\n") which represents all datas. >>> a = RootMetadata() >>> a.author = "haypo" >>> a.copyright = unicode("© Hachoir", "UTF-8") >>> print a Metadata: - Author: haypo - Copyright: \xa9 Hachoir @see __unicode__() and exportPlaintext() s css"x|]}t|dƒVqWdS(tASCIIN(R(t.0tline((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pys <genexpr>‰s (texportPlaintexttjoin(RR%((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyt__str__ys cCsdi|iƒƒS(sH Create a multi-line Unicode string (end of line is "\n") which represents all datas. >>> a = RootMetadata() >>> a.copyright = unicode("© Hachoir", "UTF-8") >>> print repr(unicode(a)) u'Metadata:\n- Copyright: \xa9 Hachoir' @see __str__() and exportPlaintext() s (R>R=(R((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyt __unicode__‹s u- c Cs!|dj o"t|tƒ}t|tƒ}nt}|p |i}nd|g}x¨t|ƒD]š}||ijoPn|ipqcn|o |i }n |i }xN|iD]C}|o |i }nt |i ƒ}|id|||fƒq¶WqcWdt|ƒjo|SdSdS(sÒ Convert metadata to multi-line Unicode string and skip datas with priority lower than specified priority. Default priority is Metadata.MAX_PRIORITY. If human flag is True, data key are translated to better human name (eg. "bit_rate" becomes "Bit rate") which may be translated using gettext. If priority is too small, metadata are empty and so None is returned. >>> print RootMetadata().exportPlaintext() None >>> meta = RootMetadata() >>> meta.copyright = unicode("© Hachoir", "UTF-8") >>> print repr(meta.exportPlaintext()) [u'Metadata:', u'- Copyright: \xa9 Hachoir'] @see __str__() and __unicode__() s%s:s%s%s: %siN(R*RR RR RRtprioritytvaluest descriptionR#R%RR$tappendR-( RRAthumant line_prefixttitleR%R2R0R$((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyR=™s2         "cCstd„|iiƒDƒƒS(Ncssx|] }|VqWdS(N((R;R0((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pys <genexpr>Ês (RRR8(R((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyt __nonzero__ÉsN(R!t __module__RR RRRR&R)R,R.R*R1R4R5R7R9R?R@tTrueR=RH(((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyRs"           0t RootMetadatacBseZed„ZRS(cCsti|d|ƒdS(N(RRR*(RR((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyRÍs(R!RIR R(((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyRKÌstMultipleMetadatacBsbeZedƒZed„Zd„Zd„Zd„Zd„Z d d„Z d e dd„Z RS( tCommoncCs=ti||ƒti|dtƒƒti|dhƒdS(Nt_MultipleMetadata__groupst_MultipleMetadata__key_counter(RKRRRR(RR((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyRÒscCs ||ijS(N(RN(RR#((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyt __contains__×scCs |i|S(N(RN(RR#((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyt __getitem__ÚscCs |iiƒS(N(RNR8(R((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyt iterGroupsÝscCs,ti|ƒotStd„|iDƒƒS(Ncssx|]}t|ƒVqWdS(N(tbool(R;tgroup((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pys <genexpr>ãs (RKRHRJRRN(R((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyRHàscCs³|p|id|ƒtS|idƒoW|d }||ijo|i|cd7<nd|i|<|d|i|7}n|o|i|ƒn|ii||ƒtS(sŠ Add a new group (metadata of a sub-document). Returns False if the group is skipped, True if it has been added. sSkip empty group %ss[]iþÿÿÿis[%u](twarningtFalsetendswithROR&RNRDRJ(RR#R6R((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pytaddGroupås  u- c Cs³ti||||ƒ}|o |}ng}xg|iiƒD]V\}}|p |}nd}|i|||d|ƒ} | o|i| ƒq?q?Wt|ƒo|SdSdS(NRG(RR=RNt iteritemsR*textendR-( RRARERFtcommonR%R#R6RGR$((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyR=ús    N(R!RIRRR RRPRQRRRHR*RXRJR=(((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyRLÐs       cCs9|tjpt‚t|tƒpt‚|t|<dS(N(t extractorsRt issubclassRK(tparsert extractor((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pytregisterExtractor scCsœyt|i}Wntj odSX||ƒ}y|i|ƒWn)tj o}tdt|ƒƒnX|o |i|_t |i |_ n|S(s{ Create a Metadata class from a parser. Returns None if no metadata extractor does exist for the parser class. s$Error during metadata extraction: %sN( R\RR R*textractRRRt mime_typeRtendian(R^RR_R6terr((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pytextractMetadatas  (thachoir_core.compatibilityRRthachoir_core.endianRthachoir_core.toolsRRthachoir_core.dictRthachoir_core.errorRRthachoir_core.i18nRthachoir_core.logR thachoir_metadata.metadata_itemR R R R thachoir_metadata.registerRtreR\RRKRLR`Re(((sC/pentest/enumeration/google/metagoofil/hachoir_metadata/metadata.pyt<module>s" ¼= 
12,613
Python
.py
97
124.175258
813
0.455781
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,788
misc.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/misc.py
from hachoir_metadata.metadata import RootMetadata, registerExtractor from hachoir_metadata.safe import fault_tolerant from hachoir_parser.container import SwfFile from hachoir_parser.misc import TorrentFile, TrueTypeFontFile, OLE2_File, PcfFile from hachoir_core.field import isString from hachoir_core.error import warning from hachoir_parser import guessParser from hachoir_metadata.setter import normalizeString class TorrentMetadata(RootMetadata): KEY_TO_ATTR = { u"announce": "url", u"comment": "comment", u"creation_date": "creation_date", } INFO_TO_ATTR = { u"length": "file_size", u"name": "filename", } def extract(self, torrent): for field in torrent[0]: self.processRoot(field) @fault_tolerant def processRoot(self, field): if field.name in self.KEY_TO_ATTR: key = self.KEY_TO_ATTR[field.name] value = field.value setattr(self, key, value) elif field.name == "info" and "value" in field: for field in field["value"]: self.processInfo(field) @fault_tolerant def processInfo(self, field): if field.name in self.INFO_TO_ATTR: key = self.INFO_TO_ATTR[field.name] value = field.value setattr(self, key, value) elif field.name == "piece_length": self.comment = "Piece length: %s" % field.display class TTF_Metadata(RootMetadata): NAMEID_TO_ATTR = { 0: "copyright", # Copyright notice 3: "title", # Unique font identifier 5: "version", # Version string 8: "author", # Manufacturer name 11: "url", # URL Vendor 14: "copyright", # License info URL } def extract(self, ttf): if "header" in ttf: self.extractHeader(ttf["header"]) if "names" in ttf: self.extractNames(ttf["names"]) @fault_tolerant def extractHeader(self, header): self.creation_date = header["created"].value self.last_modification = header["modified"].value self.comment = u"Smallest readable size in pixels: %s pixels" % header["lowest"].value self.comment = u"Font direction: %s" % header["font_dir"].display @fault_tolerant def extractNames(self, names): offset = names["offset"].value for header in names.array("header"): key = header["nameID"].value foffset = offset + header["offset"].value field = names.getFieldByAddress(foffset*8) if not field or not isString(field): continue value = field.value if key not in self.NAMEID_TO_ATTR: continue key = self.NAMEID_TO_ATTR[key] if key == "version" and value.startswith(u"Version "): # "Version 1.2" => "1.2" value = value[8:] setattr(self, key, value) class OLE2_Metadata(RootMetadata): SUMMARY_ID_TO_ATTR = { 2: "title", # Title 4: "author", 6: "comment", 8: "author", # Last saved by 12: "creation_date", 13: "last_modification", 14: "nb_page", 18: "producer", } IGNORE_SUMMARY = set(( 1, # Code page )) DOC_SUMMARY_ID_TO_ATTR = { 3: "title", # Subject 14: "author", # Manager } IGNORE_DOC_SUMMARY = set(( 1, # Code page )) def extract(self, ole2): self._extract(ole2) def _extract(self, fieldset, main_document=True): if main_document: # _feedAll() is needed to make sure that we get all root[*] fragments fieldset._feedAll() if "root[0]" in fieldset: self.useRoot(fieldset["root[0]"]) doc_summary = self.getField(fieldset, main_document, "doc_summary[0]") if doc_summary: self.useSummary(doc_summary, True) word_doc = self.getField(fieldset, main_document, "word_doc[0]") if word_doc: self.useWordDocument(word_doc) summary = self.getField(fieldset, main_document, "summary[0]") if summary: self.useSummary(summary, False) revision = self.getField(fieldset, main_document, "table1[0]") @fault_tolerant def useRoot(self, root): stream = root.getSubIStream() ministream = guessParser(stream) if not ministream: warning("Unable to create the OLE2 mini stream parser!") return self._extract(ministream, main_document=False) def getField(self, fieldset, main_document, name): if name not in fieldset: return None # _feedAll() is needed to make sure that we get all fragments # eg. summary[0], summary[1], ..., summary[n] fieldset._feedAll() field = fieldset[name] if main_document: stream = field.getSubIStream() field = guessParser(stream) if not field: warning("Unable to create the OLE2 parser for %s!" % name) return None return field @fault_tolerant def useSummary(self, summary, is_doc_summary): if "os" in summary: self.os = summary["os"].display if "section[0]" not in summary: return summary = summary["section[0]"] for property in summary.array("property_index"): self.useProperty(summary, property, is_doc_summary) @fault_tolerant def useWordDocument(self, doc): self.comment = "Encrypted: %s" % doc["fEncrypted"].value @fault_tolerant def useProperty(self, summary, property, is_doc_summary): field = summary.getFieldByAddress(property["offset"].value*8) if not field \ or "value" not in field: return field = field["value"] if not field.hasValue(): return # Get value value = field.value if isinstance(value, (str, unicode)): value = normalizeString(value) if not value: return # Get property identifier prop_id = property["id"].value if is_doc_summary: id_to_attr = self.DOC_SUMMARY_ID_TO_ATTR ignore = self.IGNORE_DOC_SUMMARY else: id_to_attr = self.SUMMARY_ID_TO_ATTR ignore = self.IGNORE_SUMMARY if prop_id in ignore: return # Get Hachoir metadata key try: key = id_to_attr[prop_id] use_prefix = False except LookupError: key = "comment" use_prefix = True if use_prefix: prefix = property["id"].display if (prefix in ("TotalEditingTime", "LastPrinted")) \ and (not field): # Ignore null time delta return value = "%s: %s" % (prefix, value) else: if (key == "last_modification") and (not field): # Ignore null timestamp return setattr(self, key, value) class PcfMetadata(RootMetadata): PROP_TO_KEY = { 'CHARSET_REGISTRY': 'charset', 'COPYRIGHT': 'copyright', 'WEIGHT_NAME': 'font_weight', 'FOUNDRY': 'author', 'FONT': 'title', '_XMBDFED_INFO': 'producer', } def extract(self, pcf): if "properties" in pcf: self.useProperties(pcf["properties"]) def useProperties(self, properties): last = properties["total_str_length"] offset0 = last.address + last.size for index in properties.array("property"): # Search name and value value = properties.getFieldByAddress(offset0+index["value_offset"].value*8) if not value: continue value = value.value if not value: continue name = properties.getFieldByAddress(offset0+index["name_offset"].value*8) if not name: continue name = name.value if name not in self.PROP_TO_KEY: warning("Skip %s=%r" % (name, value)) continue key = self.PROP_TO_KEY[name] setattr(self, key, value) class SwfMetadata(RootMetadata): def extract(self, swf): self.height = swf["rect/ymax"].value # twips self.width = swf["rect/xmax"].value # twips self.format_version = "flash version %s" % swf["version"].value self.frame_rate = swf["frame_rate"].value self.comment = "Frame count: %s" % swf["frame_count"].value registerExtractor(TorrentFile, TorrentMetadata) registerExtractor(TrueTypeFontFile, TTF_Metadata) registerExtractor(OLE2_File, OLE2_Metadata) registerExtractor(PcfFile, PcfMetadata) registerExtractor(SwfFile, SwfMetadata)
8,940
Python
.py
236
28.326271
94
0.585965
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,789
video.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/video.pyc
—Ú Œ »Mc@säddklZddklZddklZlZlZlZddk l Z ddk l Z ddk lZlZlZddklZddklZdd klZdd klZdd klZlZlZdd klZdd kl Z defdÑÉYZ!defdÑÉYZ"defdÑÉYZ#defdÑÉYZ$eee#Éeee$Éeee"Éeee!ÉdS(iˇˇˇˇ(t MissingField(t HachoirError(tregisterExtractortMetadatat RootMetadatatMultipleMetadata(t QUALITY_GOOD(tfault_tolerant(tMovFiletAsfFiletFlvFile(t Descriptor(tMkvFile(tdateToDatetime(t_(t makeUnicodet makePrintablettimedelta2seconds(twarning(t timedeltat MkvMetadatacBsîeZhdd6dd6dd6dd6dd 6Zd ÑZd ÑZd ÑZd ÑZdÑZdÑZdÑZ dÑZ dÑZ dÑZ dÑZ RS(ttitletTITLEturltURLt copyrightt COPYRIGHTt creation_datet DATE_RECORDEDtsubtitle_authortSUBTITLEcCs+x$|idÉD]}|i|ÉqWdS(NtSegment(tarraytprocessSegment(tselftmkvtsegment((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pytextractscCs«x¿|D]∏}|iidÉo|i|Éq|iidÉo+xx|idÉD]}|i|ÉqTWq|iidÉo|i|Éq|iidÉo|itjodSqqWdS(NsInfo[sTags[tTagsTracks[sCluster[(tnamet startswitht processInfoR t processTagt processTrackstqualityR(R"R$tfieldttag((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR!"scCs+x$|idÉD]}|i|ÉqWdS(Nt TrackEntry(R t processTrack(R"ttrackstentry((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR+/scCsÖd|jodS|didjo|i|ÉnK|didjo|i|Én&|didjo|i|ÉndS(NsTrackType/enumtvideotaudiotsubtitle(tdisplayt processVideot processAudiotprocessSubtitle(R"ttrack((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR03s cCsZd|jo|di|_nd|jo(|didjo|di|_ndS(Ns Name/unicodesLanguage/stringtmistund(smissund(tvalueRtlanguage(R"R:tmeta((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyt trackCommon=s   cCsét|É}yU|i||É|di|_d|jo$|di|_|di|_nWntj onX|id|dÉdS(NsCodecID/stringtVideosVideo/PixelWidth/unsignedsVideo/PixelHeight/unsignedsvideo[]s Video stream(RR@R=t compressiontwidththeightRtaddGroup(R"R:R3((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR7Ds  cCsét|É}yU|i||Éd|jo$|di|_|di|_n|di|_Wntj onX|id|dÉdS(NtAudiosAudio/SamplingFrequency/floatsAudio/Channels/unsignedsCodecID/stringsaudio[]s Audio stream(RR@R=t sample_ratet nb_channelRBRRE(R"R:R4((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR8Ps  cCs]t|É}y$|i||É|di|_Wntj onX|id|dÉdS(NsCodecID/strings subtitle[]tSubtitle(RR@R=RBRRE(R"R:tsub((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR9\s cCs+x$|idÉD]}|i|ÉqWdS(Nt SimpleTag(R tprocessSimpleTag(R"R.R-((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR*escCsod|jp d|jodS|di}||ijodS|i|}|di}t|||ÉdS(NsTagName/unicodesTagString/unicode(R=ttag_keytsetattr(R"R.R'tkeyR=((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyRLis     c Cs"d|jomd|jo`d|dijoLy2|di|did}td|É|_Wqztj oqzXnd|jo4yt|diÉ|_Wqªtj oqªXnd|jo|di|_nd|jo|di|_nd |jo|d i|_ndS( NsDuration/floatsTimecodeScale/unsignedigï÷&Ë .>tsecondss DateUTC/datesWritingApp/unicodesMuxingApp/unicodes Title/unicode(R=Rtdurationt OverflowErrorR RtproducerR(R"tinfoRP((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR)ts&        (t__name__t __module__RMR%R!R+R0R@R7R8R9R*RLR)(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyRs"      t FlvMetadatacBsGeZdÑZedÑÉZedÑÉZdÑZedÑÉZRS(cCsÌd|jo7t|É}|i|d|É|id|dÉnd|jo7t|É}|i|d|É|id|dÉn|i|_d|jo|i|dÉn|idÉo#|it |i dÉÉ|_ ndS( Nsvideo[0]R3s Video streamsaudio[0]R4s Audio streamsmetadata/entry[1]RQ( Rt extractVideoREt extractAudiot descriptiontformat_versiont extractAMFthastsizeRtgettbit_rate(R"tflvR?((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR%ås      cCs†|didjo!d|jo|di|_n|di|_|iÉ|_|dio d|_n d|_|dio d|_n d |_dS( NtcodectMP3t music_datatis_16bitiit is_stereoii(R6RZRBt getSampleRateRGR=tbits_per_sampleRH(R"R4R?((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyRYüs!   cCs|di|_dS(NRb(R6RB(R"R3R?((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyRXØscCs+x$|idÉD]}|i|ÉqWdS(Ntitem(R t useAmfEntry(R"tamfR2((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR\≥scCs*|di}|djotd|diÉ|_n|djo|di|_nœ|djo|di|_nÆ|djo|di|_nç|djo|di|_nl|d jo|i|_nO|d jot|diÉ|_n(|d jot|diÉ|_ ndS( NRORQRPR=tcreatortaudiosampleratet frameratetmetadatacreatort metadatadateRCRD( R=RRQRSRGt frame_rateRtintRCRD(R"R2RO((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyRj∑s"         (RURVR%RRYRXR\Rj(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyRWãs   t MovMetadatacBsAeZdÑZedÑÉZedÑÉZdÑZdÑZRS(cCs7x0|D](}d|jo|i|dÉqqWdS(Ntmovie(t processMovie(R"tmovtatom((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR%Ãs cCsî|di|_|di|_tdt|diÉ|diÉ|_tdÉ|did|_td Ét|d iÉdd |_dS( NRt lastmod_dateRPRQt time_scalesPlay speed: %.1f%%t play_speedidsUser volume: %.1f%%tvolumeiˇ(R=Rtlast_modificationRtfloatRQRtcomment(R"thdr((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pytprocessMovieHeader—s *cCsNt|diÉ}t|diÉ}|o|o||_||_ndS(Ntframe_size_widthtframe_size_height(RrR=RCRD(R"RRCRD((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pytprocessTrackHeaderŸs  cCs7x0|D](}d|jo|i|dÉqqWdS(Nt track_hdr(RÉ(R"RwR-((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR0·s cCsYxR|D]J}d|jo|i|dÉnd|jo|i|dÉqqWdS(NR:t movie_hdr(R0RÄ(R"RwR-((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyRuÊs   (RURVR%RRÄRÉR0Ru(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyRsÀs   t AsfMetadatacBs¢eZhdd6dd6dd6dd6dd6d d 6d d 6ZedÉZdÑZdÑZedÑÉZedÑÉZ edÑÉZ edÑÉZ dÑZ RS(RStEncodertToolNametalbumt AlbumTitlet track_numbertTrackt track_totalt TrackNumberRtYeartauthort AlbumArtistt WMFSDKNeededt WMFSDKVersionsBuffer AveragesVBR Peakt EncodingTimetMediaPrimaryClassIDtUniqueFileIdentifiercCs&d|jo|i|dÉndS(Nsheader/content(t processHeader(R"tasf((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR%ˇs c Cs{g}g}d}d|jo0h}x'|idÉD]}|i||Éq5Wd|jo:d|jo-d|d|df|_|d=|d=nd|jo|ddj}|d=nxé|iÉD]|\}}||ijo|i|}n@t|tÉot|dd t É}nd ||f}d }t |||ÉqÀWnd |jo|i |d |Énd |jox||idÉD]g} d| joT| di } d| jo)| di od| | di f} n|i | ÉqëqëWnd} d} xt|idÉÉD]Ï\} }d|joct|É}|i|| |É|i|d|É|id| |d| Éo| d7} qq"d|joct|É}|i|| |É|i|d|É|id| |d| Éo| d7} qq"q"Wd|joX|d}y4|di |_|di |_|di |_Wqwtj oqwXndS(Nsext_desc/contentsext_desc/content/descriptorRàt ToolVersions%s (version %s)tIsVBRis ISO-8859-1t to_unicodes%s=%sR~sfile_prop/contentscodec_list/contentscodec_list/content/codecR'tdescs%s (%s)t stream_propscontent/audio_headers audio[%u]sAudio stream #%uscontent/video_headers video[%u]sVideo stream #%usmetadata/contentRRêR(tNoneR tuseExtDescItemRSt iteritemstEXT_DESC_TO_ATTRt isinstancetstrRtTrueRNt useFilePropR=tappendt enumerateRtstreamPropertytstreamAudioHeaderREtstreamVideoHeaderRRêRR(R"theaderRBt bit_ratestis_vbrtdataRúROR=Rbttextt audio_indext video_indextindexRùR?RT((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyRóst                cCsX|idÉp|di|_n|di|_|di|_|di|_dS(NRBttwocctchannelsRGRh(R]R6RBR=RHRGRh(R"R4R?((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR©Hs cCss|di|_|di|_d|joB|d}|idÉp|di|_n|di|_ndS(NRCRDtbmp_infoRBRbtbpp(R=RCRDR]R6RBtbits_per_pixel(R"R3R?Rµ((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR™Ps  cCsò|ditijodS|di}d|jo|iddÉd}n||ijodS|di}|pdSt|É}|||<dS(NttypeR't/iR=(R=tASF_DescriptortTYPE_BYTE_ARRAYtsplitt SKIP_EXT_DESCR(R"RúRÆROR=((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyRüZs    cCs®|di|_|di|_|dio d|_n|di}|di}|tjod|}n&|tjod|}n d|}||f|_dS( NRt play_durationtseekableu Is seekablet max_bitrates VBR (%s max)s%s (CBR)s%s (max)(R=RRQR~R6R§tFalseR`(R"tpropR≠R=RØ((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR•ls      cCs/d|}||jo||i|_ndS(Ns*bit_rates/content/bit_rate[%u]/avg_bitrate(R=R`(R"R´R≤R?RO((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyR®|s  (RíRìsBuffer AveragesVBR PeakRîRïRñ( RURVR°tsetRΩR%RóRR©R™RüR•R®(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyRÜÓs&    E N(%thachoir_core.fieldRthachoir_core.errorRthachoir_metadata.metadataRRRRthachoir_metadata.metadata_itemRthachoir_metadata.safeRthachoir_parser.videoRR R thachoir_parser.video.asfR R∫thachoir_parser.containerR thachoir_parser.container.mkvR thachoir_core.i18nRthachoir_core.toolsRRRRtdatetimeRRRWRsRÜ(((s@/pentest/enumeration/google/metagoofil/hachoir_metadata/video.pyt<module>s("{@#°   
15,514
Python
.py
60
257.533333
950
0.42627
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,790
register.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/register.py
from hachoir_core.i18n import _ from hachoir_core.tools import ( humanDuration, makePrintable, humanBitRate, humanFrequency, humanBitSize, humanFilesize, humanDatetime) from hachoir_core.language import Language from hachoir_metadata.filter import Filter, NumberFilter, DATETIME_FILTER from datetime import date, datetime, timedelta from hachoir_metadata.formatter import ( humanAudioChannel, humanFrameRate, humanComprRate, humanAltitude, humanPixelSize, humanDPI) from hachoir_metadata.setter import ( setDatetime, setTrackNumber, setTrackTotal, setLanguage) from hachoir_metadata.metadata_item import Data MIN_SAMPLE_RATE = 1000 # 1 kHz MAX_SAMPLE_RATE = 192000 # 192 kHz MAX_NB_CHANNEL = 8 # 8 channels MAX_WIDTH = 20000 # 20 000 pixels MAX_BIT_RATE = 500 * 1024 * 1024 # 500 Mbit/s MAX_HEIGHT = MAX_WIDTH MAX_DPI_WIDTH = 10000 MAX_DPI_HEIGHT = MAX_DPI_WIDTH MAX_NB_COLOR = 2 ** 24 # 16 million of color MAX_BITS_PER_PIXEL = 256 # 256 bits/pixel MAX_FRAME_RATE = 150 # 150 frame/sec MAX_NB_PAGE = 20000 MAX_COMPR_RATE = 1000.0 MIN_COMPR_RATE = 0.001 MAX_TRACK = 999 DURATION_FILTER = Filter(timedelta, timedelta(milliseconds=1), timedelta(days=365)) def registerAllItems(meta): meta.register(Data("title", 100, _("Title"), type=unicode)) meta.register(Data("artist", 101, _("Artist"), type=unicode)) meta.register(Data("author", 102, _("Author"), type=unicode)) meta.register(Data("music_composer", 103, _("Music composer"), type=unicode)) meta.register(Data("album", 200, _("Album"), type=unicode)) meta.register(Data("duration", 201, _("Duration"), # integer in milliseconde type=timedelta, text_handler=humanDuration, filter=DURATION_FILTER)) meta.register(Data("nb_page", 202, _("Nb page"), filter=NumberFilter(1, MAX_NB_PAGE))) meta.register(Data("music_genre", 203, _("Music genre"), type=unicode)) meta.register(Data("language", 204, _("Language"), conversion=setLanguage, type=Language)) meta.register(Data("track_number", 205, _("Track number"), conversion=setTrackNumber, filter=NumberFilter(1, MAX_TRACK), type=(int, long))) meta.register(Data("track_total", 206, _("Track total"), conversion=setTrackTotal, filter=NumberFilter(1, MAX_TRACK), type=(int, long))) meta.register(Data("organization", 210, _("Organization"), type=unicode)) meta.register(Data("version", 220, _("Version"))) meta.register(Data("width", 301, _("Image width"), filter=NumberFilter(1, MAX_WIDTH), type=(int, long), text_handler=humanPixelSize)) meta.register(Data("height", 302, _("Image height"), filter=NumberFilter(1, MAX_HEIGHT), type=(int, long), text_handler=humanPixelSize)) meta.register(Data("nb_channel", 303, _("Channel"), text_handler=humanAudioChannel, filter=NumberFilter(1, MAX_NB_CHANNEL), type=(int, long))) meta.register(Data("sample_rate", 304, _("Sample rate"), text_handler=humanFrequency, filter=NumberFilter(MIN_SAMPLE_RATE, MAX_SAMPLE_RATE), type=(int, long, float))) meta.register(Data("bits_per_sample", 305, _("Bits/sample"), text_handler=humanBitSize, filter=NumberFilter(1, 64), type=(int, long))) meta.register(Data("image_orientation", 306, _("Image orientation"))) meta.register(Data("nb_colors", 307, _("Number of colors"), filter=NumberFilter(1, MAX_NB_COLOR), type=(int, long))) meta.register(Data("bits_per_pixel", 308, _("Bits/pixel"), filter=NumberFilter(1, MAX_BITS_PER_PIXEL), type=(int, long))) meta.register(Data("filename", 309, _("File name"), type=unicode)) meta.register(Data("file_size", 310, _("File size"), text_handler=humanFilesize, type=(int, long))) meta.register(Data("pixel_format", 311, _("Pixel format"))) meta.register(Data("compr_size", 312, _("Compressed file size"), text_handler=humanFilesize, type=(int, long))) meta.register(Data("compr_rate", 313, _("Compression rate"), text_handler=humanComprRate, filter=NumberFilter(MIN_COMPR_RATE, MAX_COMPR_RATE), type=(int, long, float))) meta.register(Data("width_dpi", 320, _("Image DPI width"), filter=NumberFilter(1, MAX_DPI_WIDTH), type=(int, long), text_handler=humanDPI)) meta.register(Data("height_dpi", 321, _("Image DPI height"), filter=NumberFilter(1, MAX_DPI_HEIGHT), type=(int, long), text_handler=humanDPI)) meta.register(Data("file_attr", 400, _("File attributes"))) meta.register(Data("file_type", 401, _("File type"))) meta.register(Data("subtitle_author", 402, _("Subtitle author"), type=unicode)) meta.register(Data("creation_date", 500, _("Creation date"), text_handler=humanDatetime, filter=DATETIME_FILTER, type=(datetime, date), conversion=setDatetime)) meta.register(Data("last_modification", 501, _("Last modification"), text_handler=humanDatetime, filter=DATETIME_FILTER, type=(datetime, date), conversion=setDatetime)) meta.register(Data("latitude", 510, _("Latitude"), type=float)) meta.register(Data("longitude", 511, _("Longitude"), type=float)) meta.register(Data("altitude", 511, _("Altitude"), type=float, text_handler=humanAltitude)) meta.register(Data("location", 530, _("Location"), type=unicode)) meta.register(Data("city", 531, _("City"), type=unicode)) meta.register(Data("country", 532, _("Country"), type=unicode)) meta.register(Data("charset", 540, _("Charset"), type=unicode)) meta.register(Data("font_weight", 550, _("Font weight"))) meta.register(Data("camera_aperture", 520, _("Camera aperture"))) meta.register(Data("camera_focal", 521, _("Camera focal"))) meta.register(Data("camera_exposure", 522, _("Camera exposure"))) meta.register(Data("camera_brightness", 530, _("Camera brightness"))) meta.register(Data("camera_model", 531, _("Camera model"), type=unicode)) meta.register(Data("camera_manufacturer", 532, _("Camera manufacturer"), type=unicode)) meta.register(Data("compression", 600, _("Compression"))) meta.register(Data("copyright", 601, _("Copyright"), type=unicode)) meta.register(Data("url", 602, _("URL"), type=unicode)) meta.register(Data("frame_rate", 603, _("Frame rate"), text_handler=humanFrameRate, filter=NumberFilter(1, MAX_FRAME_RATE), type=(int, long, float))) meta.register(Data("bit_rate", 604, _("Bit rate"), text_handler=humanBitRate, filter=NumberFilter(1, MAX_BIT_RATE), type=(int, long, float))) meta.register(Data("aspect_ratio", 604, _("Aspect ratio"), type=(int, long, float))) meta.register(Data("os", 900, _("OS"), type=unicode)) meta.register(Data("producer", 901, _("Producer"), type=unicode)) meta.register(Data("comment", 902, _("Comment"), type=unicode)) meta.register(Data("format_version", 950, _("Format version"), type=unicode)) meta.register(Data("mime_type", 951, _("MIME type"), type=unicode)) meta.register(Data("endian", 952, _("Endian"), type=unicode))
6,986
Python
.py
99
66.161616
172
0.691737
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,791
version.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/version.py
PACKAGE = "hachoir-metadata" VERSION = "1.2.1" WEBSITE = "http://hachoir.org/wiki/hachoir-metadata" LICENSE = "GNU GPL v2"
124
Python
.py
4
29.75
52
0.731092
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,792
program.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/program.pyc
Ñò Î ÈMc@scddklZlZddklZddklZlZdefd„ƒYZeeeƒdS(iÿÿÿÿ(t RootMetadatatregisterExtractor(tExeFile(tfault_toleranttgetValuet ExeMetadatacBs¹eZh dd6dd6dd6dd6dd6dd6d d 6dd 6d d 6ZedƒZd„Zd„Zed„ƒZd„Z ed„ƒZ ed„ƒZ ed„ƒZ d„Z RS(ttitleu ProductNamet copyrightuLegalCopyrightuLegalTrademarksuLegalTrademarks1uLegalTrademarks2tauthoru CompanyNamet creation_dateu BuildDateuFileDescriptiontversionuProductVersionu InternalNameuOriginalFilenameu FileVersionu BuildVersioncCs@|iƒo|i|ƒn|iƒo|i|ƒndS(N(tisPEt extractPEtisNEt extractNE(tselftexe((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/program.pytextracts  cCsHd|jo|i|dƒnd|jo|i|dƒndS(Nt ne_headertinfo(t useNE_Headert useNEInfo(RR((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/program.pyRs  cCsGx@|idƒD]/}|didjo|i|dƒqqWdS(NtnodetnametStringFileInfosnode[0](tarraytvaluetreadVersionInfo(RRR((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/program.pyRscCs¾d|jo|i|dƒnd|jo|i|dƒn|iƒ}|ocd|joVxS|idƒD]>}t|dƒdjo"d|jo|i|dƒqtqtWndS(Nt pe_headert pe_opt_headersversion_info/node[0]sversion_info/node[0]/nodeRRsnode[0](t usePE_HeadertusePE_OptHeadert getResourceRRR(RRtresourceR((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/program.pyR %s    cCsC|dio d|_n%|dio d|_n d|_dS(Ntis_dllu0New-style executable: Dynamic-link library (DLL)t is_win_appu-New-style executable: Windows 3.x applicationu$New-style executable for Windows 3.x(Rtformat_version(Rthdr((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/program.pyR4s   cCsL|di|_d|di|_|dio d|_n d|_dS(NR sCPU: %stcpuR"u/Portable Executable: Dynamic-link library (DLL)u(Portable Executable: Windows application(RR tdisplaytcommentR$(RR%((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/program.pyR=s  cCsd|di|_dS(Ns Subsystem: %st subsystem(R'R((RR%((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/program.pyRFscCs5h}xs|idƒD]b}d|jp d|joqn|diidƒ}|pqn|di}|||<qWd|jo9d|jo,|d|_|d|_|d=|d=nxl|iƒD]^\}}||ijot||i||ƒqÏ||ijod||f|_qÏqÏWdS(NRRRs t ProductNametFileDescriptions%s=%s( RRtstripRt iteritemst KEY_TO_ATTRtsetattrtSKIP_KEYR((RRtvaluesRRtkey((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/program.pyRJs*     (u InternalNameuOriginalFilenameu FileVersionu BuildVersion(t__name__t __module__R.tsetR0RRRRR RRRR(((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/program.pyRs&       N( thachoir_metadata.metadataRRthachoir_parser.programRthachoir_metadata.safeRRR(((sB/pentest/enumeration/google/metagoofil/hachoir_metadata/program.pyt<module>s^
4,429
Python
.py
11
401.636364
1,840
0.468432
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,793
image.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/hachoir_metadata/image.py
from hachoir_metadata.metadata import (registerExtractor, Metadata, RootMetadata, MultipleMetadata) from hachoir_parser.image import ( BmpFile, IcoFile, PcxFile, GifFile, PngFile, TiffFile, XcfFile, TargaFile, WMF_File, PsdFile) from hachoir_parser.image.png import getBitsPerPixel as pngBitsPerPixel from hachoir_parser.image.xcf import XcfProperty from hachoir_core.i18n import _ from hachoir_core.error import HACHOIR_ERRORS from hachoir_metadata.safe import fault_tolerant def computeComprRate(meta, compr_size): """ Compute image compression rate. Skip size of color palette, focus on image pixels. Original size is width x height x bpp. Compressed size is an argument (in bits). Set "compr_data" with a string like "1.52x". """ if not meta.has("width") \ or not meta.has("height") \ or not meta.has("bits_per_pixel"): return if not compr_size: return orig_size = meta.get('width') * meta.get('height') * meta.get('bits_per_pixel') meta.compr_rate = float(orig_size) / compr_size class BmpMetadata(RootMetadata): def extract(self, image): if "header" not in image: return hdr = image["header"] self.width = hdr["width"].value self.height = hdr["height"].value bpp = hdr["bpp"].value if bpp: if bpp <= 8 and "used_colors" in hdr: self.nb_colors = hdr["used_colors"].value self.bits_per_pixel = bpp self.compression = hdr["compression"].display self.format_version = u"Microsoft Bitmap version %s" % hdr.getFormatVersion() self.width_dpi = hdr["horizontal_dpi"].value self.height_dpi = hdr["vertical_dpi"].value if "pixels" in image: computeComprRate(self, image["pixels"].size) class TiffMetadata(RootMetadata): key_to_attr = { "img_width": "width", "img_height": "width", # TODO: Enable that (need link to value) # "description": "comment", # "doc_name": "title", # "orientation": "image_orientation", } def extract(self, tiff): if "ifd" in tiff: self.useIFD(tiff["ifd"]) def useIFD(self, ifd): for field in ifd: key = field.name try: attrname = self.key_to_attr[field.name] except KeyError: continue if "value" not in field: continue value = field["value"].value setattr(self, attrname, value) class IcoMetadata(MultipleMetadata): color_to_bpp = { 2: 1, 16: 4, 256: 8 } def extract(self, icon): for index, header in enumerate(icon.array("icon_header")): image = Metadata(self) # Read size and colors from header image.width = header["width"].value image.height = header["height"].value bpp = header["bpp"].value nb_colors = header["nb_color"].value if nb_colors != 0: image.nb_colors = nb_colors if bpp == 0 and nb_colors in self.color_to_bpp: bpp = self.color_to_bpp[nb_colors] elif bpp == 0: bpp = 8 image.bits_per_pixel = bpp image.setHeader(_("Icon #%u (%sx%s)") % (1+index, image.get("width", "?"), image.get("height", "?"))) # Read compression from data (if available) key = "icon_data[%u]/header/codec" % index if key in icon: image.compression = icon[key].display key = "icon_data[%u]/pixels" % index if key in icon: computeComprRate(image, icon[key].size) # Store new image self.addGroup("image[%u]" % index, image) class PcxMetadata(RootMetadata): @fault_tolerant def extract(self, pcx): self.width = 1 + pcx["xmax"].value self.height = 1 + pcx["ymax"].value self.width_dpi = pcx["horiz_dpi"].value self.height_dpi = pcx["vert_dpi"].value self.bits_per_pixel = pcx["bpp"].value if 1 <= pcx["bpp"].value <= 8: self.nb_colors = 2 ** pcx["bpp"].value self.compression = _("Run-length encoding (RLE)") self.format_version = "PCX: %s" % pcx["version"].display if "image_data" in pcx: computeComprRate(self, pcx["image_data"].size) class XcfMetadata(RootMetadata): # Map image type to bits/pixel TYPE_TO_BPP = {0: 24, 1: 8, 2: 8} def extract(self, xcf): self.width = xcf["width"].value self.height = xcf["height"].value try: self.bits_per_pixel = self.TYPE_TO_BPP[ xcf["type"].value ] except KeyError: pass self.format_version = xcf["type"].display self.readProperties(xcf) @fault_tolerant def processProperty(self, prop): type = prop["type"].value if type == XcfProperty.PROP_PARASITES: for field in prop["data"]: if "name" not in field or "data" not in field: continue if field["name"].value == "gimp-comment": self.comment = field["data"].value elif type == XcfProperty.PROP_COMPRESSION: self.compression = prop["data/compression"].display elif type == XcfProperty.PROP_RESOLUTION: self.width_dpi = int(prop["data/xres"].value) self.height_dpi = int(prop["data/yres"].value) def readProperties(self, xcf): for prop in xcf.array("property"): self.processProperty(prop) class PngMetadata(RootMetadata): TEXT_TO_ATTR = { "software": "producer", } def extract(self, png): if "header" in png: self.useHeader(png["header"]) if "time" in png: self.useTime(png["time"]) if "physical" in png: self.usePhysical(png["physical"]) for comment in png.array("text"): if "text" not in comment: continue keyword = comment["keyword"].value text = comment["text"].value try: key = self.TEXT_TO_ATTR[keyword.lower()] setattr(self, key, text) except KeyError: if keyword.lower() != "comment": self.comment = "%s=%s" % (keyword, text) else: self.comment = text compr_size = sum( data.size for data in png.array("data") ) computeComprRate(self, compr_size) @fault_tolerant def useTime(self, field): self.creation_date = field.value @fault_tolerant def usePhysical(self, field): self.width_dpi = field["pixel_per_unit_x"].value self.height_dpi = field["pixel_per_unit_y"].value @fault_tolerant def useHeader(self, header): self.width = header["width"].value self.height = header["height"].value # Read number of colors and pixel format if "/palette/size" in header: nb_colors = header["/palette/size"].value // 3 else: nb_colors = None if not header["has_palette"].value: if header["has_alpha"].value: self.pixel_format = _("RGBA") else: self.pixel_format = _("RGB") elif "/transparency" in header: self.pixel_format = _("Color index with transparency") if nb_colors: nb_colors -= 1 else: self.pixel_format = _("Color index") self.bits_per_pixel = pngBitsPerPixel(header) if nb_colors: self.nb_colors = nb_colors # Read compression, timestamp, etc. self.compression = header["compression"].display class GifMetadata(RootMetadata): def extract(self, gif): self.useScreen(gif["/screen"]) if self.has("bits_per_pixel"): self.nb_colors = (1 << self.get('bits_per_pixel')) self.compression = _("LZW") self.format_version = "GIF version %s" % gif["version"].value for comments in gif.array("comments"): for comment in gif.array(comments.name + "/comment"): self.comment = comment.value if "graphic_ctl/has_transp" in gif and gif["graphic_ctl/has_transp"].value: self.pixel_format = _("Color index with transparency") else: self.pixel_format = _("Color index") @fault_tolerant def useScreen(self, screen): self.width = screen["width"].value self.height = screen["height"].value self.bits_per_pixel = (1 + screen["bpp"].value) class TargaMetadata(RootMetadata): def extract(self, tga): self.width = tga["width"].value self.height = tga["height"].value self.bits_per_pixel = tga["bpp"].value if tga["nb_color"].value: self.nb_colors = tga["nb_color"].value self.compression = tga["codec"].display if "pixels" in tga: computeComprRate(self, tga["pixels"].size) class WmfMetadata(RootMetadata): def extract(self, wmf): if wmf.isAPM(): if "amf_header/rect" in wmf: rect = wmf["amf_header/rect"] self.width = (rect["right"].value - rect["left"].value) self.height = (rect["bottom"].value - rect["top"].value) self.bits_per_pixel = 24 elif wmf.isEMF(): emf = wmf["emf_header"] if "description" in emf: desc = emf["description"].value if "\0" in desc: self.producer, self.title = desc.split("\0", 1) else: self.producer = desc if emf["nb_colors"].value: self.nb_colors = emf["nb_colors"].value self.bits_per_pixel = 8 else: self.bits_per_pixel = 24 self.width = emf["width_px"].value self.height = emf["height_px"].value class PsdMetadata(RootMetadata): @fault_tolerant def extract(self, psd): self.width = psd["width"].value self.height = psd["height"].value self.bits_per_pixel = psd["depth"].value * psd["nb_channels"].value self.pixel_format = psd["color_mode"].display self.compression = psd["compression"].display registerExtractor(IcoFile, IcoMetadata) registerExtractor(GifFile, GifMetadata) registerExtractor(XcfFile, XcfMetadata) registerExtractor(TargaFile, TargaMetadata) registerExtractor(PcxFile, PcxMetadata) registerExtractor(BmpFile, BmpMetadata) registerExtractor(PngFile, PngMetadata) registerExtractor(TiffFile, TiffMetadata) registerExtractor(WMF_File, WmfMetadata) registerExtractor(PsdFile, PsdMetadata)
10,863
Python
.py
269
30.684015
85
0.584454
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,794
arcfour.pyc
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/pdfminer/arcfour.pyc
Ñò Î ÈMc@sGdZdefd„ƒYZedjoddkZeiƒndS(s] Python implementation of Arcfour encryption algorithm. This code is in the public domain. tArcfourcBs eZdZd„Zd„ZRS(s >>> Arcfour('Key').process('Plaintext').encode('hex') 'bbf316e8d940af0ad3' >>> Arcfour('Wiki').process('pedia').encode('hex') '1021bf0420' >>> Arcfour('Secret').process('Attack at dawn').encode('hex') '45a01f645fc35b383552544b9bf5' cCs•tdƒ}d}t|ƒ}xUtdƒD]G}|||t|||ƒd}||||||<||<q+W||_d\|_|_dS(Nii(ii(trangetlentxrangetordtstitj(tselftkeyRRtklenR((s:/pentest/enumeration/google/metagoofil/pdfminer/arcfour.pyt__init__s   $! cCs»|i|i}}|i}d}x|D]w}|dd}|||d}||||||<||<|||||d}|tt|ƒ|Aƒ7}q)W|||_|_|S(Ntii(RRRtchrR(RtdataRRRtrtctk((s:/pentest/enumeration/google/metagoofil/pdfminer/arcfour.pytprocess!s (t__name__t __module__t__doc__R R(((s:/pentest/enumeration/google/metagoofil/pdfminer/arcfour.pyR s  t__main__iÿÿÿÿN(RtobjectRRtdoctestttestmod(((s:/pentest/enumeration/google/metagoofil/pdfminer/arcfour.pyt<module>s$  
1,732
Python
.py
12
140.833333
879
0.40954
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,795
pdfcolor.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/pdfminer/pdfcolor.py
#!/usr/bin/env python2 from psparser import LIT ## PDFColorSpace ## LITERAL_DEVICE_GRAY = LIT('DeviceGray') LITERAL_DEVICE_RGB = LIT('DeviceRGB') LITERAL_DEVICE_CMYK = LIT('DeviceCMYK') class PDFColorSpace(object): def __init__(self, name, ncomponents): self.name = name self.ncomponents = ncomponents return def __repr__(self): return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents) PREDEFINED_COLORSPACE = dict( (name, PDFColorSpace(name,n)) for (name,n) in { 'CalRGB': 3, 'CalGray': 1, 'Lab': 3, 'DeviceRGB': 3, 'DeviceCMYK': 4, 'DeviceGray': 1, 'Separation': 1, 'Indexed': 1, 'Pattern': 1, }.iteritems())
701
Python
.py
26
23.307692
84
0.66018
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,796
runlength.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/pdfminer/runlength.py
#!/usr/bin/env python2 # # RunLength decoder (Adobe version) implementation based on PDF Reference # version 1.4 section 3.3.4. # # * public domain * # import sys def rldecode(data): """ RunLength decoder (Adobe version) implementation based on PDF Reference version 1.4 section 3.3.4: The RunLengthDecode filter decodes data that has been encoded in a simple byte-oriented format based on run length. The encoded data is a sequence of runs, where each run consists of a length byte followed by 1 to 128 bytes of data. If the length byte is in the range 0 to 127, the following length + 1 (1 to 128) bytes are copied literally during decompression. If length is in the range 129 to 255, the following single byte is to be copied 257 - length (2 to 128) times during decompression. A length value of 128 denotes EOD. >>> s = "\x05123456\xfa7\x04abcde\x80junk" >>> rldecode(s) '1234567777777abcde' """ decoded = [] i=0 while i < len(data): #print "data[%d]=:%d:" % (i,ord(data[i])) length = ord(data[i]) if length == 128: break if length >= 0 and length < 128: run = data[i+1:(i+1)+(length+1)] #print "length=%d, run=%s" % (length+1,run) decoded.append(run) i = (i+1) + (length+1) if length > 128: run = data[i+1]*(257-length) #print "length=%d, run=%s" % (257-length,run) decoded.append(run) i = (i+1) + 1 return ''.join(decoded) if __name__ == '__main__': import doctest doctest.testmod()
1,666
Python
.py
46
29.217391
75
0.604579
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,797
pdfinterp.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/pdfminer/pdfinterp.py
#!/usr/bin/env python2 import sys import re try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from cmapdb import CMapDB, CMap from psparser import PSException, PSTypeError, PSEOF from psparser import PSKeyword, literal_name, keyword_name from psparser import PSStackParser from psparser import LIT, KWD, STRICT from pdftypes import PDFException, PDFStream, PDFObjRef from pdftypes import resolve1 from pdftypes import int_value, float_value, num_value from pdftypes import str_value, list_value, dict_value, stream_value from pdffont import PDFFontError from pdffont import PDFType1Font, PDFTrueTypeFont, PDFType3Font from pdffont import PDFCIDFont from pdfparser import PDFDocument, PDFParser from pdfparser import PDFPasswordIncorrect from pdfcolor import PDFColorSpace from pdfcolor import PREDEFINED_COLORSPACE from pdfcolor import LITERAL_DEVICE_GRAY, LITERAL_DEVICE_RGB from pdfcolor import LITERAL_DEVICE_CMYK from utils import choplist from utils import mult_matrix, MATRIX_IDENTITY ## Exceptions ## class PDFResourceError(PDFException): pass class PDFInterpreterError(PDFException): pass ## Constants ## LITERAL_PDF = LIT('PDF') LITERAL_TEXT = LIT('Text') LITERAL_FONT = LIT('Font') LITERAL_FORM = LIT('Form') LITERAL_IMAGE = LIT('Image') ## PDFTextState ## class PDFTextState(object): def __init__(self): self.font = None self.fontsize = 0 self.charspace = 0 self.wordspace = 0 self.scaling = 100 self.leading = 0 self.render = 0 self.rise = 0 self.reset() # self.matrix is set # self.linematrix is set return def __repr__(self): return ('<PDFTextState: font=%r, fontsize=%r, charspace=%r, wordspace=%r, ' ' scaling=%r, leading=%r, render=%r, rise=%r, ' ' matrix=%r, linematrix=%r>' % (self.font, self.fontsize, self.charspace, self.wordspace, self.scaling, self.leading, self.render, self.rise, self.matrix, self.linematrix)) def copy(self): obj = PDFTextState() obj.font = self.font obj.fontsize = self.fontsize obj.charspace = self.charspace obj.wordspace = self.wordspace obj.scaling = self.scaling obj.leading = self.leading obj.render = self.render obj.rise = self.rise obj.matrix = self.matrix obj.linematrix = self.linematrix return obj def reset(self): self.matrix = MATRIX_IDENTITY self.linematrix = (0, 0) return ## PDFGraphicState ## class PDFGraphicState(object): def __init__(self): self.linewidth = 0 self.linecap = None self.linejoin = None self.miterlimit = None self.dash = None self.intent = None self.flatness = None return def copy(self): obj = PDFGraphicState() obj.linewidth = self.linewidth obj.linecap = self.linecap obj.linejoin = self.linejoin obj.miterlimit = self.miterlimit obj.dash = self.dash obj.intent = self.intent obj.flatness = self.flatness return obj def __repr__(self): return ('<PDFGraphicState: linewidth=%r, linecap=%r, linejoin=%r, ' ' miterlimit=%r, dash=%r, intent=%r, flatness=%r>' % (self.linewidth, self.linecap, self.linejoin, self.miterlimit, self.dash, self.intent, self.flatness)) ## Resource Manager ## class PDFResourceManager(object): """Repository of shared resources. ResourceManager facilitates reuse of shared resources such as fonts and images so that large objects are not allocated multiple times. """ debug = 0 def __init__(self, caching=True): self.caching = caching self._cached_fonts = {} return def get_procset(self, procs): for proc in procs: if proc is LITERAL_PDF: pass elif proc is LITERAL_TEXT: pass else: #raise PDFResourceError('ProcSet %r is not supported.' % proc) pass return def get_cmap(self, cmapname, strict=False): try: return CMapDB.get_cmap(cmapname) except CMapDB.CMapNotFound: if strict: raise return CMap() def get_font(self, objid, spec): if objid and objid in self._cached_fonts: font = self._cached_fonts[objid] else: if 2 <= self.debug: print >>sys.stderr, 'get_font: create: objid=%r, spec=%r' % (objid, spec) if STRICT: if spec['Type'] is not LITERAL_FONT: raise PDFFontError('Type is not /Font') # Create a Font object. if 'Subtype' in spec: subtype = literal_name(spec['Subtype']) else: if STRICT: raise PDFFontError('Font Subtype is not specified.') subtype = 'Type1' if subtype in ('Type1', 'MMType1'): # Type1 Font font = PDFType1Font(self, spec) elif subtype == 'TrueType': # TrueType Font font = PDFTrueTypeFont(self, spec) elif subtype == 'Type3': # Type3 Font font = PDFType3Font(self, spec) elif subtype in ('CIDFontType0', 'CIDFontType2'): # CID Font font = PDFCIDFont(self, spec) elif subtype == 'Type0': # Type0 Font dfonts = list_value(spec['DescendantFonts']) assert dfonts subspec = dict_value(dfonts[0]).copy() for k in ('Encoding', 'ToUnicode'): if k in spec: subspec[k] = resolve1(spec[k]) font = self.get_font(None, subspec) else: if STRICT: raise PDFFontError('Invalid Font spec: %r' % spec) font = PDFType1Font(self, spec) # this is so wrong! if objid and self.caching: self._cached_fonts[objid] = font return font ## PDFContentParser ## class PDFContentParser(PSStackParser): def __init__(self, streams): self.streams = streams self.istream = 0 PSStackParser.__init__(self, None) return def fillfp(self): if not self.fp: if self.istream < len(self.streams): strm = stream_value(self.streams[self.istream]) self.istream += 1 else: raise PSEOF('Unexpected EOF, file truncated?') self.fp = StringIO(strm.get_data()) return def seek(self, pos): self.fillfp() PSStackParser.seek(self, pos) return def fillbuf(self): if self.charpos < len(self.buf): return while 1: self.fillfp() self.bufpos = self.fp.tell() self.buf = self.fp.read(self.BUFSIZ) if self.buf: break self.fp = None self.charpos = 0 return def get_inline_data(self, pos, target='EI'): self.seek(pos) i = 0 data = '' while i <= len(target): self.fillbuf() if i: c = self.buf[self.charpos] data += c self.charpos += 1 if len(target) <= i and c.isspace(): i += 1 elif i < len(target) and c == target[i]: i += 1 else: i = 0 else: try: j = self.buf.index(target[0], self.charpos) #print 'found', (0, self.buf[j:j+10]) data += self.buf[self.charpos:j+1] self.charpos = j+1 i = 1 except ValueError: data += self.buf[self.charpos:] self.charpos = len(self.buf) data = data[:-(len(target)+1)] # strip the last part data = re.sub(r'(\x0d\x0a|[\x0d\x0a])$', '', data) return (pos, data) def flush(self): self.add_results(*self.popall()) return KEYWORD_BI = KWD('BI') KEYWORD_ID = KWD('ID') KEYWORD_EI = KWD('EI') def do_keyword(self, pos, token): if token is self.KEYWORD_BI: # inline image within a content stream self.start_type(pos, 'inline') elif token is self.KEYWORD_ID: try: (_, objs) = self.end_type('inline') if len(objs) % 2 != 0: raise PSTypeError('Invalid dictionary construct: %r' % objs) d = dict( (literal_name(k), v) for (k,v) in choplist(2, objs) ) (pos, data) = self.get_inline_data(pos+len('ID ')) obj = PDFStream(d, data) self.push((pos, obj)) self.push((pos, self.KEYWORD_EI)) except PSTypeError: if STRICT: raise else: self.push((pos, token)) return ## Interpreter ## class PDFPageInterpreter(object): debug = 0 def __init__(self, rsrcmgr, device): self.rsrcmgr = rsrcmgr self.device = device return def dup(self): return PDFPageInterpreter(self.rsrcmgr, self.device) # init_resources(resources): # Prepare the fonts and XObjects listed in the Resource attribute. def init_resources(self, resources): self.resources = resources self.fontmap = {} self.xobjmap = {} self.csmap = PREDEFINED_COLORSPACE.copy() if not resources: return def get_colorspace(spec): if isinstance(spec, list): name = literal_name(spec[0]) else: name = literal_name(spec) if name == 'ICCBased' and isinstance(spec, list) and 2 <= len(spec): return PDFColorSpace(name, stream_value(spec[1])['N']) elif name == 'DeviceN' and isinstance(spec, list) and 2 <= len(spec): return PDFColorSpace(name, len(list_value(spec[1]))) else: return PREDEFINED_COLORSPACE[name] for (k,v) in dict_value(resources).iteritems(): if 2 <= self.debug: print >>sys.stderr, 'Resource: %r: %r' % (k,v) if k == 'Font': for (fontid,spec) in dict_value(v).iteritems(): objid = None if isinstance(spec, PDFObjRef): objid = spec.objid spec = dict_value(spec) self.fontmap[fontid] = self.rsrcmgr.get_font(objid, spec) elif k == 'ColorSpace': for (csid,spec) in dict_value(v).iteritems(): self.csmap[csid] = get_colorspace(resolve1(spec)) elif k == 'ProcSet': self.rsrcmgr.get_procset(list_value(v)) elif k == 'XObject': for (xobjid,xobjstrm) in dict_value(v).iteritems(): self.xobjmap[xobjid] = xobjstrm return # init_state(ctm) # Initialize the text and graphic states for rendering a page. def init_state(self, ctm): # gstack: stack for graphical states. self.gstack = [] self.ctm = ctm self.device.set_ctm(self.ctm) self.textstate = PDFTextState() self.graphicstate = PDFGraphicState() self.curpath = [] # argstack: stack for command arguments. self.argstack = [] # set some global states. self.scs = self.ncs = None if self.csmap: self.scs = self.ncs = self.csmap.values()[0] return def push(self, obj): self.argstack.append(obj) return def pop(self, n): if n == 0: return [] x = self.argstack[-n:] self.argstack = self.argstack[:-n] return x def get_current_state(self): return (self.ctm, self.textstate.copy(), self.graphicstate.copy()) def set_current_state(self, state): (self.ctm, self.textstate, self.graphicstate) = state self.device.set_ctm(self.ctm) return # gsave def do_q(self): self.gstack.append(self.get_current_state()) return # grestore def do_Q(self): if self.gstack: self.set_current_state(self.gstack.pop()) return # concat-matrix def do_cm(self, a1, b1, c1, d1, e1, f1): self.ctm = mult_matrix((a1,b1,c1,d1,e1,f1), self.ctm) self.device.set_ctm(self.ctm) return # setlinewidth def do_w(self, linewidth): self.graphicstate.linewidth = linewidth return # setlinecap def do_J(self, linecap): self.graphicstate.linecap = linecap return # setlinejoin def do_j(self, linejoin): self.graphicstate.linejoin = linejoin return # setmiterlimit def do_M(self, miterlimit): self.graphicstate.miterlimit = miterlimit return # setdash def do_d(self, dash, phase): self.graphicstate.dash = (dash, phase) return # setintent def do_ri(self, intent): self.graphicstate.intent = intent return # setflatness def do_i(self, flatness): self.graphicstate.flatness = flatness return # load-gstate def do_gs(self, name): #XXX return # moveto def do_m(self, x, y): self.curpath.append(('m',x,y)) return # lineto def do_l(self, x, y): self.curpath.append(('l',x,y)) return # curveto def do_c(self, x1, y1, x2, y2, x3, y3): self.curpath.append(('c',x1,y1,x2,y2,x3,y3)) return # urveto def do_v(self, x2, y2, x3, y3): self.curpath.append(('v',x2,y2,x3,y3)) return # rveto def do_y(self, x1, y1, x3, y3): self.curpath.append(('y',x1,y1,x3,y3)) return # closepath def do_h(self): self.curpath.append(('h',)) return # rectangle def do_re(self, x, y, w, h): self.curpath.append(('m',x,y)) self.curpath.append(('l',x+w,y)) self.curpath.append(('l',x+w,y+h)) self.curpath.append(('l',x,y+h)) self.curpath.append(('h',)) return # stroke def do_S(self): self.device.paint_path(self.graphicstate, True, False, False, self.curpath) self.curpath = [] return # close-and-stroke def do_s(self): self.do_h() self.do_S() return # fill def do_f(self): self.device.paint_path(self.graphicstate, False, True, False, self.curpath) self.curpath = [] return # fill (obsolete) do_F = do_f # fill-even-odd def do_f_a(self): self.device.paint_path(self.graphicstate, False, True, True, self.curpath) self.curpath = [] return # fill-and-stroke def do_B(self): self.device.paint_path(self.graphicstate, True, True, False, self.curpath) self.curpath = [] return # fill-and-stroke-even-odd def do_B_a(self): self.device.paint_path(self.graphicstate, True, True, True, self.curpath) self.curpath = [] return # close-fill-and-stroke def do_b(self): self.do_h() self.do_B() return # close-fill-and-stroke-even-odd def do_b_a(self): self.do_h() self.do_B_a() return # close-only def do_n(self): self.curpath = [] return # clip def do_W(self): return # clip-even-odd def do_W_a(self): return # setcolorspace-stroking def do_CS(self, name): self.scs = self.csmap[literal_name(name)] return # setcolorspace-non-strokine def do_cs(self, name): self.ncs = self.csmap[literal_name(name)] return # setgray-stroking def do_G(self, gray): #self.do_CS(LITERAL_DEVICE_GRAY) return # setgray-non-stroking def do_g(self, gray): #self.do_cs(LITERAL_DEVICE_GRAY) return # setrgb-stroking def do_RG(self, r, g, b): #self.do_CS(LITERAL_DEVICE_RGB) return # setrgb-non-stroking def do_rg(self, r, g, b): #self.do_cs(LITERAL_DEVICE_RGB) return # setcmyk-stroking def do_K(self, c, m, y, k): #self.do_CS(LITERAL_DEVICE_CMYK) return # setcmyk-non-stroking def do_k(self, c, m, y, k): #self.do_cs(LITERAL_DEVICE_CMYK) return # setcolor def do_SCN(self): if self.scs: n = self.scs.ncomponents else: if STRICT: raise PDFInterpreterError('No colorspace specified!') n = 1 self.pop(n) return def do_scn(self): if self.ncs: n = self.ncs.ncomponents else: if STRICT: raise PDFInterpreterError('No colorspace specified!') n = 1 self.pop(n) return def do_SC(self): self.do_SCN() return def do_sc(self): self.do_scn() return # sharing-name def do_sh(self, name): return # begin-text def do_BT(self): self.textstate.reset() return # end-text def do_ET(self): return # begin-compat def do_BX(self): return # end-compat def do_EX(self): return # marked content operators def do_MP(self, tag): self.device.do_tag(tag) return def do_DP(self, tag, props): self.device.do_tag(tag, props) return def do_BMC(self, tag): self.device.begin_tag(tag) return def do_BDC(self, tag, props): self.device.begin_tag(tag, props) return def do_EMC(self): self.device.end_tag() return # setcharspace def do_Tc(self, space): self.textstate.charspace = space return # setwordspace def do_Tw(self, space): self.textstate.wordspace = space return # textscale def do_Tz(self, scale): self.textstate.scaling = scale return # setleading def do_TL(self, leading): self.textstate.leading = -leading return # selectfont def do_Tf(self, fontid, fontsize): try: self.textstate.font = self.fontmap[literal_name(fontid)] except KeyError: raise if STRICT: raise PDFInterpreterError('Undefined Font id: %r' % fontid) return self.textstate.fontsize = fontsize return # setrendering def do_Tr(self, render): self.textstate.render = render return # settextrise def do_Ts(self, rise): self.textstate.rise = rise return # text-move def do_Td(self, tx, ty): (a,b,c,d,e,f) = self.textstate.matrix self.textstate.matrix = (a,b,c,d,tx*a+ty*c+e,tx*b+ty*d+f) self.textstate.linematrix = (0, 0) #print >>sys.stderr, 'Td(%r,%r): %r' % (tx,ty,self.textstate) return # text-move def do_TD(self, tx, ty): (a,b,c,d,e,f) = self.textstate.matrix self.textstate.matrix = (a,b,c,d,tx*a+ty*c+e,tx*b+ty*d+f) self.textstate.leading = ty self.textstate.linematrix = (0, 0) #print >>sys.stderr, 'TD(%r,%r): %r' % (tx,ty,self.textstate) return # textmatrix def do_Tm(self, a,b,c,d,e,f): self.textstate.matrix = (a,b,c,d,e,f) self.textstate.linematrix = (0, 0) return # nextline def do_T_a(self): (a,b,c,d,e,f) = self.textstate.matrix self.textstate.matrix = (a,b,c,d,self.textstate.leading*c+e,self.textstate.leading*d+f) self.textstate.linematrix = (0, 0) return # show-pos def do_TJ(self, seq): #print >>sys.stderr, 'TJ(%r): %r' % (seq,self.textstate) if self.textstate.font is None: if STRICT: raise PDFInterpreterError('No font specified!') return self.device.render_string(self.textstate, seq) return # show def do_Tj(self, s): self.do_TJ([s]) return # quote def do__q(self, s): self.do_T_a() self.do_TJ([s]) return # doublequote def do__w(self, aw, ac, s): self.do_Tw(aw) self.do_Tc(ac) self.do_TJ([s]) return # inline image def do_BI(self): # never called return def do_ID(self): # never called return def do_EI(self, obj): if 'W' in obj and 'H' in obj: iobjid = str(id(obj)) self.device.begin_figure(iobjid, (0,0,1,1), MATRIX_IDENTITY) self.device.render_image(iobjid, obj) self.device.end_figure(iobjid) return # invoke an XObject def do_Do(self, xobjid): xobjid = literal_name(xobjid) try: xobj = stream_value(self.xobjmap[xobjid]) except KeyError: if STRICT: raise PDFInterpreterError('Undefined xobject id: %r' % xobjid) return if 1 <= self.debug: print >>sys.stderr, 'Processing xobj: %r' % xobj subtype = xobj.get('Subtype') if subtype is LITERAL_FORM and 'BBox' in xobj: interpreter = self.dup() bbox = list_value(xobj['BBox']) matrix = list_value(xobj.get('Matrix', MATRIX_IDENTITY)) # According to PDF reference 1.7 section 4.9.1, XObjects in # earlier PDFs (prior to v1.2) use the page's Resources entry # instead of having their own Resources entry. resources = dict_value(xobj.get('Resources')) or self.resources.copy() self.device.begin_figure(xobjid, bbox, matrix) interpreter.render_contents(resources, [xobj], ctm=mult_matrix(matrix, self.ctm)) self.device.end_figure(xobjid) elif subtype is LITERAL_IMAGE and 'Width' in xobj and 'Height' in xobj: self.device.begin_figure(xobjid, (0,0,1,1), MATRIX_IDENTITY) self.device.render_image(xobjid, xobj) self.device.end_figure(xobjid) else: # unsupported xobject type. pass return def process_page(self, page): if 1 <= self.debug: print >>sys.stderr, 'Processing page: %r' % page (x0,y0,x1,y1) = page.mediabox if page.rotate == 90: ctm = (0,-1,1,0, -y0,x1) elif page.rotate == 180: ctm = (-1,0,0,-1, x1,y1) elif page.rotate == 270: ctm = (0,1,-1,0, y1,-x0) else: ctm = (1,0,0,1, -x0,-y0) self.device.begin_page(page, ctm) self.render_contents(page.resources, page.contents, ctm=ctm) self.device.end_page(page) return # render_contents(resources, streams, ctm) # Render the content streams. # This method may be called recursively. def render_contents(self, resources, streams, ctm=MATRIX_IDENTITY): if 1 <= self.debug: print >>sys.stderr, ('render_contents: resources=%r, streams=%r, ctm=%r' % (resources, streams, ctm)) self.init_resources(resources) self.init_state(ctm) self.execute(list_value(streams)) return def execute(self, streams): try: parser = PDFContentParser(streams) except PSEOF: # empty page return while 1: try: (_,obj) = parser.nextobject() except PSEOF: break if isinstance(obj, PSKeyword): name = keyword_name(obj) method = 'do_%s' % name.replace('*','_a').replace('"','_w').replace("'",'_q') if hasattr(self, method): func = getattr(self, method) nargs = func.func_code.co_argcount-1 if nargs: args = self.pop(nargs) if 2 <= self.debug: print >>sys.stderr, 'exec: %s %r' % (name, args) if len(args) == nargs: func(*args) else: if 2 <= self.debug: print >>sys.stderr, 'exec: %s' % (name) func() else: if STRICT: raise PDFInterpreterError('Unknown operator: %r' % name) else: self.push(obj) return ## process_pdf ## class PDFTextExtractionNotAllowed(PDFInterpreterError): pass def process_pdf(rsrcmgr, device, fp, pagenos=None, maxpages=0, password='', caching=True, check_extractable=True): # Create a PDF parser object associated with the file object. parser = PDFParser(fp) # Create a PDF document object that stores the document structure. doc = PDFDocument(caching=caching) # Connect the parser and document objects. parser.set_document(doc) doc.set_parser(parser) # Supply the document password for initialization. # (If no password is set, give an empty string.) doc.initialize(password) # Check if the document allows text extraction. If not, abort. if check_extractable and not doc.is_extractable: raise PDFTextExtractionNotAllowed('Text extraction is not allowed: %r' % fp) # Create a PDF interpreter object. interpreter = PDFPageInterpreter(rsrcmgr, device) # Process each page contained in the document. for (pageno,page) in enumerate(doc.get_pages()): if pagenos and (pageno not in pagenos): continue interpreter.process_page(page) if maxpages and maxpages <= pageno+1: break return
26,147
Python
.py
770
24.503896
95
0.563001
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,798
lzw.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/pdfminer/lzw.py
#!/usr/bin/env python2 import sys try: from cStringIO import StringIO except ImportError: from StringIO import StringIO ## LZWDecoder ## class LZWDecoder(object): debug = 0 def __init__(self, fp): self.fp = fp self.buff = 0 self.bpos = 8 self.nbits = 9 self.table = None self.prevbuf = None return def readbits(self, bits): v = 0 while 1: # the number of remaining bits we can get from the current buffer. r = 8-self.bpos if bits <= r: # |-----8-bits-----| # |-bpos-|-bits-| | # | |----r----| v = (v<<bits) | ((self.buff>>(r-bits)) & ((1<<bits)-1)) self.bpos += bits break else: # |-----8-bits-----| # |-bpos-|---bits----... # | |----r----| v = (v<<r) | (self.buff & ((1<<r)-1)) bits -= r x = self.fp.read(1) if not x: raise EOFError self.buff = ord(x) self.bpos = 0 return v def feed(self, code): x = '' if code == 256: self.table = [ chr(c) for c in xrange(256) ] # 0-255 self.table.append(None) # 256 self.table.append(None) # 257 self.prevbuf = '' self.nbits = 9 elif code == 257: pass elif not self.prevbuf: x = self.prevbuf = self.table[code] else: if code < len(self.table): x = self.table[code] self.table.append(self.prevbuf+x[0]) else: self.table.append(self.prevbuf+self.prevbuf[0]) x = self.table[code] l = len(self.table) if l == 511: self.nbits = 10 elif l == 1023: self.nbits = 11 elif l == 2047: self.nbits = 12 self.prevbuf = x return x def run(self): while 1: try: code = self.readbits(self.nbits) except EOFError: break x = self.feed(code) yield x if self.debug: print >>sys.stderr, ('nbits=%d, code=%d, output=%r, table=%r' % (self.nbits, code, x, self.table[258:])) return # lzwdecode def lzwdecode(data): """ >>> lzwdecode('\x80\x0b\x60\x50\x22\x0c\x0c\x85\x01') '\x2d\x2d\x2d\x2d\x2d\x41\x2d\x2d\x2d\x42' """ fp = StringIO(data) return ''.join(LZWDecoder(fp).run()) if __name__ == '__main__': import doctest doctest.testmod()
2,787
Python
.py
92
19.228261
79
0.441176
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)
19,799
pdfdevice.py
pwnieexpress_raspberry_pwn/src/pentest/metagoofil/pdfminer/pdfdevice.py
#!/usr/bin/env python2 import sys from utils import mult_matrix, translate_matrix from utils import enc, bbox2str from pdffont import PDFUnicodeNotDefined ## PDFDevice ## class PDFDevice(object): debug = 0 def __init__(self, rsrcmgr): self.rsrcmgr = rsrcmgr self.ctm = None return def __repr__(self): return '<PDFDevice>' def close(self): return def set_ctm(self, ctm): self.ctm = ctm return def begin_tag(self, tag, props=None): return def end_tag(self): return def do_tag(self, tag, props=None): return def begin_page(self, page, ctm): return def end_page(self, page): return def begin_figure(self, name, bbox, matrix): return def end_figure(self, name): return def paint_path(self, graphicstate, stroke, fill, evenodd, path): return def render_image(self, name, stream): return def render_string(self, textstate, seq): return ## PDFTextDevice ## class PDFTextDevice(PDFDevice): def render_string(self, textstate, seq): matrix = mult_matrix(textstate.matrix, self.ctm) font = textstate.font fontsize = textstate.fontsize scaling = textstate.scaling * .01 charspace = textstate.charspace * scaling wordspace = textstate.wordspace * scaling rise = textstate.rise if font.is_multibyte(): wordspace = 0 dxscale = .001 * fontsize * scaling if font.is_vertical(): textstate.linematrix = self.render_string_vertical( seq, matrix, textstate.linematrix, font, fontsize, scaling, charspace, wordspace, rise, dxscale) else: textstate.linematrix = self.render_string_horizontal( seq, matrix, textstate.linematrix, font, fontsize, scaling, charspace, wordspace, rise, dxscale) return def render_string_horizontal(self, seq, matrix, (x,y), font, fontsize, scaling, charspace, wordspace, rise, dxscale): needcharspace = False for obj in seq: if isinstance(obj, int) or isinstance(obj, float): x -= obj*dxscale needcharspace = True else: for cid in font.decode(obj): if needcharspace: x += charspace x += self.render_char(translate_matrix(matrix, (x,y)), font, fontsize, scaling, rise, cid) if cid == 32 and wordspace: x += wordspace needcharspace = True return (x, y) def render_string_vertical(self, seq, matrix, (x,y), font, fontsize, scaling, charspace, wordspace, rise, dxscale): needcharspace = False for obj in seq: if isinstance(obj, int) or isinstance(obj, float): y -= obj*dxscale needcharspace = True else: for cid in font.decode(obj): if needcharspace: y += charspace y += self.render_char(translate_matrix(matrix, (x,y)), font, fontsize, scaling, rise, cid) if cid == 32 and wordspace: y += wordspace needcharspace = True return (x, y) def render_char(self, matrix, font, fontsize, scaling, rise, cid): return 0 ## TagExtractor ## class TagExtractor(PDFDevice): def __init__(self, rsrcmgr, outfp, codec='utf-8', debug=0): PDFDevice.__init__(self, rsrcmgr) self.outfp = outfp self.codec = codec self.debug = debug self.pageno = 0 self._stack = [] return def render_string(self, textstate, seq): font = textstate.font text = '' for obj in seq: if not isinstance(obj, str): continue chars = font.decode(obj) for cid in chars: try: char = font.to_unichr(cid) text += char except PDFUnicodeNotDefined: pass self.outfp.write(enc(text, self.codec)) return def begin_page(self, page, ctm): self.outfp.write('<page id="%s" bbox="%s" rotate="%d">' % (self.pageno, bbox2str(page.mediabox), page.rotate)) return def end_page(self, page): self.outfp.write('</page>\n') self.pageno += 1 return def begin_tag(self, tag, props=None): s = '' if isinstance(props, dict): s = ''.join( ' %s="%s"' % (enc(k), enc(str(v))) for (k,v) in sorted(props.iteritems()) ) self.outfp.write('<%s%s>' % (enc(tag.name), s)) self._stack.append(tag) return def end_tag(self): assert self._stack tag = self._stack.pop(-1) self.outfp.write('</%s>' % enc(tag.name)) return def do_tag(self, tag, props=None): self.begin_tag(tag, props) self._stack.pop(-1) return
5,319
Python
.py
149
24.671141
95
0.540362
pwnieexpress/raspberry_pwn
1,024
184
8
GPL-3.0
9/5/2024, 5:12:22 PM (Europe/Amsterdam)