id int64 0 458k | file_name stringlengths 4 119 | file_path stringlengths 14 227 | content stringlengths 24 9.96M | size int64 24 9.96M | language stringclasses 1 value | extension stringclasses 14 values | total_lines int64 1 219k | avg_line_length float64 2.52 4.63M | max_line_length int64 5 9.91M | alphanum_fraction float64 0 1 | repo_name stringlengths 7 101 | repo_stars int64 100 139k | repo_forks int64 0 26.4k | repo_open_issues int64 0 2.27k | repo_license stringclasses 12 values | repo_extraction_date stringclasses 433 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
22,000 | __init__.py | OctoPrint_OctoPrint/src/octoprint/filemanager/__init__.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import logging
import os
import time
from collections import namedtuple
import octoprint.plugin
import octoprint.util
from octoprint.events import Events, eventManager
from octoprint.util import get_fully_qualified_classname as fqcn
from octoprint.util import yaml
from .analysis import AnalysisQueue, QueueEntry # noqa: F401
from .destinations import FileDestinations # noqa: F401
from .storage import LocalFileStorage # noqa: F401
from .util import AbstractFileWrapper, DiskFileWrapper, StreamWrapper # noqa: F401
ContentTypeMapping = namedtuple("ContentTypeMapping", "extensions, content_type")
ContentTypeDetector = namedtuple("ContentTypeDetector", "extensions, detector")
extensions = {}
def full_extension_tree():
result = {
"machinecode": {"gcode": ContentTypeMapping(["gcode", "gco", "g"], "text/plain")}
}
def leaf_merger(a, b):
supported_leaf_types = (ContentTypeMapping, ContentTypeDetector, list)
if not isinstance(a, supported_leaf_types) or not isinstance(
b, supported_leaf_types
):
raise ValueError()
if isinstance(a, ContentTypeDetector) and isinstance(b, ContentTypeMapping):
raise ValueError()
if isinstance(a, ContentTypeMapping) and isinstance(b, ContentTypeDetector):
raise ValueError()
a_list = a if isinstance(a, list) else a.extensions
b_list = b if isinstance(b, list) else b.extensions
merged = a_list + b_list
content_type = None
if isinstance(b, ContentTypeMapping):
content_type = b.content_type
elif isinstance(a, ContentTypeMapping):
content_type = a.content_type
detector = None
if isinstance(b, ContentTypeDetector):
detector = b.detector
elif isinstance(a, ContentTypeDetector):
detector = a.detector
if content_type is not None:
return ContentTypeMapping(merged, content_type)
elif detector is not None:
return ContentTypeDetector(merged, detector)
else:
return merged
slicer_plugins = octoprint.plugin.plugin_manager().get_implementations(
octoprint.plugin.SlicerPlugin
)
for plugin in slicer_plugins:
try:
plugin_result = plugin.get_slicer_extension_tree()
if plugin_result is None or not isinstance(plugin_result, dict):
continue
octoprint.util.dict_merge(
result, plugin_result, leaf_merger=leaf_merger, in_place=True
)
except Exception:
logging.getLogger(__name__).exception(
"Exception while retrieving additional extension "
"tree entries from SlicerPlugin {name}".format(name=plugin._identifier),
extra={"plugin": plugin._identifier},
)
extension_tree_hooks = octoprint.plugin.plugin_manager().get_hooks(
"octoprint.filemanager.extension_tree"
)
for name, hook in extension_tree_hooks.items():
try:
hook_result = hook()
if hook_result is None or not isinstance(hook_result, dict):
continue
result = octoprint.util.dict_merge(
result, hook_result, leaf_merger=leaf_merger, in_place=True
)
except Exception:
logging.getLogger(__name__).exception(
"Exception while retrieving additional extension "
"tree entries from hook {name}".format(name=name),
extra={"plugin": name},
)
return result
def get_extensions(type, subtree=None):
if subtree is None:
subtree = full_extension_tree()
for key, value in subtree.items():
if key == type:
return get_all_extensions(subtree=value)
elif isinstance(value, dict):
sub_extensions = get_extensions(type, subtree=value)
if sub_extensions:
return sub_extensions
return None
def get_all_extensions(subtree=None):
if subtree is None:
subtree = full_extension_tree()
result = []
if isinstance(subtree, dict):
for value in subtree.values():
if isinstance(value, dict):
result += get_all_extensions(value)
elif isinstance(value, (ContentTypeMapping, ContentTypeDetector)):
result += value.extensions
elif isinstance(value, (list, tuple)):
result += value
elif isinstance(subtree, (ContentTypeMapping, ContentTypeDetector)):
result = subtree.extensions
elif isinstance(subtree, (list, tuple)):
result = subtree
return result
def get_path_for_extension(extension, subtree=None):
if subtree is None:
subtree = full_extension_tree()
for key, value in subtree.items():
if (
isinstance(value, (ContentTypeMapping, ContentTypeDetector))
and extension in value.extensions
):
return [key]
elif isinstance(value, (list, tuple)) and extension in value:
return [key]
elif isinstance(value, dict):
path = get_path_for_extension(extension, subtree=value)
if path:
return [key] + path
return None
def get_content_type_mapping_for_extension(extension, subtree=None):
if subtree is None:
subtree = full_extension_tree()
for value in subtree.values():
content_extension_matches = (
isinstance(value, (ContentTypeMapping, ContentTypeDetector))
and extension in value.extensions
)
list_extension_matches = isinstance(value, (list, tuple)) and extension in value
if content_extension_matches or list_extension_matches:
return value
elif isinstance(value, dict):
result = get_content_type_mapping_for_extension(extension, subtree=value)
if result is not None:
return result
return None
def valid_extension(extension, type=None, tree=None):
if not type:
return extension in get_all_extensions(subtree=tree)
else:
extensions = get_extensions(type, subtree=tree)
if extensions:
return extension in extensions
def valid_file_type(filename, type=None, tree=None):
_, extension = os.path.splitext(filename)
extension = extension[1:].lower()
return valid_extension(extension, type=type, tree=tree)
def get_file_type(filename):
_, extension = os.path.splitext(filename)
extension = extension[1:].lower()
return get_path_for_extension(extension)
def get_mime_type(filename):
_, extension = os.path.splitext(filename)
extension = extension[1:].lower()
mapping = get_content_type_mapping_for_extension(extension)
if mapping:
if isinstance(mapping, ContentTypeMapping) and mapping.content_type is not None:
return mapping.content_type
elif isinstance(mapping, ContentTypeDetector) and callable(mapping.detector):
result = mapping.detector(filename)
if result is not None:
return result
return "application/octet-stream"
class NoSuchStorage(Exception):
pass
class FileManager:
def __init__(
self,
analysis_queue,
slicing_manager,
printer_profile_manager,
initial_storage_managers=None,
):
self._logger = logging.getLogger(__name__)
self._analysis_queue = analysis_queue
self._analysis_queue.register_finish_callback(self._on_analysis_finished)
self._storage_managers = {}
if initial_storage_managers:
self._storage_managers.update(initial_storage_managers)
self._slicing_manager = slicing_manager
self._printer_profile_manager = printer_profile_manager
import threading
self._slicing_jobs = {}
self._slicing_jobs_mutex = threading.Lock()
self._slicing_progress_callbacks = []
self._last_slicing_progress = None
self._progress_plugins = []
self._preprocessor_hooks = {}
import octoprint.settings
self._recovery_file = os.path.join(
octoprint.settings.settings().getBaseFolder("data"),
"print_recovery_data.yaml",
)
self._analyzeGcode = octoprint.settings.settings().get(["gcodeAnalysis", "runAt"])
def initialize(self, process_backlog=False):
self.reload_plugins()
if process_backlog:
self.process_backlog()
def process_backlog(self):
# only check for a backlog if gcodeAnalysis is 'idle' or 'always'
if self._analyzeGcode == "never":
return
def worker():
self._logger.info(
"Adding backlog items from all storage types to analysis queue..."
)
for storage_type, storage_manager in self._storage_managers.items():
self._determine_analysis_backlog(storage_type, storage_manager)
import threading
thread = threading.Thread(target=worker)
thread.daemon = True
thread.start()
def reload_plugins(self):
self._progress_plugins = octoprint.plugin.plugin_manager().get_implementations(
octoprint.plugin.ProgressPlugin
)
self._preprocessor_hooks = octoprint.plugin.plugin_manager().get_hooks(
"octoprint.filemanager.preprocessor"
)
def register_slicingprogress_callback(self, callback):
self._slicing_progress_callbacks.append(callback)
def unregister_slicingprogress_callback(self, callback):
try:
self._slicing_progress_callbacks.remove(callback)
except ValueError:
# callback was not registered
pass
def _determine_analysis_backlog(
self, storage_type, storage_manager, root=None, high_priority=False
):
counter = 0
backlog_generator = storage_manager.analysis_backlog
if root is not None:
backlog_generator = storage_manager.analysis_backlog_for_path(path=root)
for entry, path, _ in backlog_generator:
file_type = get_file_type(path)[-1]
file_name = storage_manager.split_path(path)
# we'll use the default printer profile for the backlog since we don't know better
queue_entry = QueueEntry(
file_name,
entry,
file_type,
storage_type,
path,
self._printer_profile_manager.get_default(),
None,
)
if self._analysis_queue.enqueue(queue_entry, high_priority=high_priority):
counter += 1
if root:
self._logger.info(
f'Added {counter} items from storage type "{storage_type}" and root "{root}" to analysis queue'
)
else:
self._logger.info(
f'Added {counter} items from storage type "{storage_type}" to analysis queue'
)
def add_storage(self, storage_type, storage_manager):
self._storage_managers[storage_type] = storage_manager
self._determine_analysis_backlog(storage_type, storage_manager)
def remove_storage(self, type):
if type not in self._storage_managers:
return
del self._storage_managers[type]
@property
def registered_storages(self):
return list(self._storage_managers.keys())
@property
def slicing_enabled(self):
return self._slicing_manager.slicing_enabled
@property
def registered_slicers(self):
return self._slicing_manager.registered_slicers
@property
def default_slicer(self):
return self._slicing_manager.default_slicer
def analyse(self, destination, path, printer_profile_id=None):
if not self.file_exists(destination, path):
return
if printer_profile_id is None:
printer_profile = self._printer_profile_manager.get_current_or_default()
else:
printer_profile = self._printer_profile_manager.get(printer_profile_id)
if printer_profile is None:
printer_profile = self._printer_profile_manager.get_current_or_default()
queue_entry = self._analysis_queue_entry(destination, path)
self._analysis_queue.dequeue(queue_entry)
queue_entry = self._analysis_queue_entry(
destination, path, printer_profile=printer_profile
)
if queue_entry:
return self._analysis_queue.enqueue(queue_entry, high_priority=True)
return False
def slice(
self,
slicer_name,
source_location,
source_path,
dest_location,
dest_path,
position=None,
profile=None,
printer_profile_id=None,
overrides=None,
display=None,
callback=None,
callback_args=None,
):
absolute_source_path = self.path_on_disk(source_location, source_path)
def stlProcessed(
source_location,
source_path,
tmp_path,
dest_location,
dest_path,
start_time,
printer_profile_id,
callback,
callback_args,
_error=None,
_cancelled=False,
_analysis=None,
):
try:
if _error:
eventManager().fire(
Events.SLICING_FAILED,
{
"slicer": slicer_name,
"stl": source_path,
"stl_location": source_location,
"gcode": dest_path,
"gcode_location": dest_location,
"reason": _error,
},
)
elif _cancelled:
eventManager().fire(
Events.SLICING_CANCELLED,
{
"slicer": slicer_name,
"stl": source_path,
"stl_location": source_location,
"gcode": dest_path,
"gcode_location": dest_location,
},
)
else:
source_meta = self.get_metadata(source_location, source_path)
hash = source_meta.get("hash", "n/a")
import io
links = [("model", {"name": source_path})]
_, stl_name = self.split_path(source_location, source_path)
file_obj = StreamWrapper(
os.path.basename(dest_path),
io.BytesIO(
f";Generated from {stl_name} (hash: {hash})\n".encode(
"ascii", "replace"
)
),
io.FileIO(tmp_path, "rb"),
)
printer_profile = self._printer_profile_manager.get(
printer_profile_id
)
self.add_file(
dest_location,
dest_path,
file_obj,
display=display,
links=links,
allow_overwrite=True,
printer_profile=printer_profile,
analysis=_analysis,
)
end_time = time.monotonic()
eventManager().fire(
Events.SLICING_DONE,
{
"slicer": slicer_name,
"stl": source_path,
"stl_location": source_location,
"gcode": dest_path,
"gcode_location": dest_location,
"time": end_time - start_time,
},
)
if callback is not None:
if callback_args is None:
callback_args = ()
callback(*callback_args)
finally:
os.remove(tmp_path)
source_job_key = (source_location, source_path)
dest_job_key = (dest_location, dest_path)
with self._slicing_jobs_mutex:
if source_job_key in self._slicing_jobs:
del self._slicing_jobs[source_job_key]
if dest_job_key in self._slicing_jobs:
del self._slicing_jobs[dest_job_key]
slicer = self._slicing_manager.get_slicer(slicer_name)
start_time = time.monotonic()
eventManager().fire(
Events.SLICING_STARTED,
{
"slicer": slicer_name,
"stl": source_path,
"stl_location": source_location,
"gcode": dest_path,
"gcode_location": dest_location,
"progressAvailable": (
slicer.get_slicer_properties().get("progress_report", False)
if slicer
else False
),
},
)
import tempfile
f = tempfile.NamedTemporaryFile(suffix=".gco", delete=False)
temp_path = f.name
f.close()
with self._slicing_jobs_mutex:
source_job_key = (source_location, source_path)
dest_job_key = (dest_location, dest_path)
if dest_job_key in self._slicing_jobs:
(
job_slicer_name,
job_absolute_source_path,
job_temp_path,
) = self._slicing_jobs[dest_job_key]
self._slicing_manager.cancel_slicing(
job_slicer_name, job_absolute_source_path, job_temp_path
)
del self._slicing_jobs[dest_job_key]
self._slicing_jobs[dest_job_key] = self._slicing_jobs[source_job_key] = (
slicer_name,
absolute_source_path,
temp_path,
)
args = (
source_location,
source_path,
temp_path,
dest_location,
dest_path,
start_time,
printer_profile_id,
callback,
callback_args,
)
self._slicing_manager.slice(
slicer_name,
absolute_source_path,
temp_path,
profile,
stlProcessed,
position=position,
callback_args=args,
overrides=overrides,
printer_profile_id=printer_profile_id,
on_progress=self.on_slicing_progress,
on_progress_args=(
slicer_name,
source_location,
source_path,
dest_location,
dest_path,
),
)
def on_slicing_progress(
self,
slicer,
source_location,
source_path,
dest_location,
dest_path,
_progress=None,
):
if not _progress:
return
progress_int = int(_progress * 100)
if self._last_slicing_progress != progress_int:
self._last_slicing_progress = progress_int
for callback in self._slicing_progress_callbacks:
try:
callback.sendSlicingProgress(
slicer,
source_location,
source_path,
dest_location,
dest_path,
progress_int,
)
except Exception:
self._logger.exception(
"Exception while pushing slicing progress",
extra={"callback": fqcn(callback)},
)
if progress_int:
def call_plugins(
slicer,
source_location,
source_path,
dest_location,
dest_path,
progress,
):
for plugin in self._progress_plugins:
try:
plugin.on_slicing_progress(
slicer,
source_location,
source_path,
dest_location,
dest_path,
progress,
)
except Exception:
self._logger.exception(
"Exception while sending slicing progress to plugin %s"
% plugin._identifier,
extra={"plugin": plugin._identifier},
)
import threading
thread = threading.Thread(
target=call_plugins,
args=(
slicer,
source_location,
source_path,
dest_location,
dest_path,
progress_int,
),
)
thread.daemon = False
thread.start()
def get_busy_files(self):
return self._slicing_jobs.keys()
def file_in_path(self, destination, path, file):
return self._storage(destination).file_in_path(path, file)
def file_exists(self, destination, path):
return self._storage(destination).file_exists(path)
def folder_exists(self, destination, path):
return self._storage(destination).folder_exists(path)
def list_files(
self,
locations=None,
path=None,
filter=None,
recursive=None,
level=0,
force_refresh=False,
):
if not locations:
locations = list(self._storage_managers.keys())
if isinstance(locations, str):
locations = [locations]
result = {}
for loc in locations:
result[loc] = self._storage_managers[loc].list_files(
path=path,
filter=filter,
recursive=recursive,
level=level,
force_refresh=force_refresh,
)
return result
def add_file(
self,
location,
path,
file_object,
links=None,
allow_overwrite=False,
printer_profile=None,
analysis=None,
display=None,
):
if printer_profile is None:
printer_profile = self._printer_profile_manager.get_current_or_default()
path_in_storage = self._storage(location).path_in_storage(path)
for name, hook in self._preprocessor_hooks.items():
try:
hook_file_object = hook(
path_in_storage,
file_object,
links=links,
printer_profile=printer_profile,
allow_overwrite=allow_overwrite,
)
except Exception:
self._logger.exception(
"Error when calling preprocessor hook for plugin {}, ignoring".format(
name
),
extra={"plugin": name},
)
continue
if hook_file_object is not None:
file_object = hook_file_object
queue_entry = self._analysis_queue_entry(location, path_in_storage)
if queue_entry is not None:
self._analysis_queue.dequeue(queue_entry)
path_in_storage = self._storage(location).add_file(
path_in_storage,
file_object,
links=links,
printer_profile=printer_profile,
allow_overwrite=allow_overwrite,
display=display,
)
queue_entry = self._analysis_queue_entry(
location,
path_in_storage,
printer_profile=printer_profile,
analysis=analysis,
)
if queue_entry:
self._analysis_queue.enqueue(queue_entry, high_priority=True)
_, name = self._storage(location).split_path(path_in_storage)
eventManager().fire(
Events.FILE_ADDED,
{
"storage": location,
"path": path_in_storage,
"name": name,
"type": get_file_type(name),
"operation": "add",
},
)
eventManager().fire(Events.UPDATED_FILES, {"type": "printables"})
return path_in_storage
def remove_file(self, location, path):
path_in_storage = self._storage(location).path_in_storage(path)
queue_entry = self._analysis_queue_entry(location, path_in_storage)
self._analysis_queue.dequeue(queue_entry)
self._storage(location).remove_file(path_in_storage)
_, name = self._storage(location).split_path(path_in_storage)
eventManager().fire(
Events.FILE_REMOVED,
{
"storage": location,
"path": path,
"name": name,
"type": get_file_type(name),
"operation": "remove",
},
)
eventManager().fire(Events.UPDATED_FILES, {"type": "printables"})
def copy_file(self, location, source, destination):
path_in_storage = self._storage(location).copy_file(source, destination)
if not self.has_analysis(location, path_in_storage):
queue_entry = self._analysis_queue_entry(location, path_in_storage)
if queue_entry:
self._analysis_queue.enqueue(queue_entry)
_, name = self._storage(location).split_path(path_in_storage)
eventManager().fire(
Events.FILE_ADDED,
{
"storage": location,
"path": path_in_storage,
"name": name,
"type": get_file_type(name),
"operation": "copy",
},
)
eventManager().fire(Events.UPDATED_FILES, {"type": "printables"})
def move_file(self, location, source, destination):
source_in_storage = self._storage(location).path_in_storage(source)
destination_in_storage = self._storage(location).path_in_storage(destination)
queue_entry = self._analysis_queue_entry(location, source_in_storage)
self._analysis_queue.dequeue(queue_entry)
path = self._storage(location).move_file(
source_in_storage, destination_in_storage
)
if not self.has_analysis(location, path):
queue_entry = self._analysis_queue_entry(location, path)
if queue_entry:
self._analysis_queue.enqueue(queue_entry)
_, source_name = self._storage(location).split_path(source_in_storage)
_, destination_name = self._storage(location).split_path(destination_in_storage)
eventManager().fire(
Events.FILE_REMOVED,
{
"storage": location,
"path": source_in_storage,
"name": source_name,
"type": get_file_type(source_name),
"operation": "move",
},
)
eventManager().fire(
Events.FILE_ADDED,
{
"storage": location,
"path": destination_in_storage,
"name": destination_name,
"type": get_file_type(destination_name),
"operation": "move",
},
)
eventManager().fire(
Events.FILE_MOVED,
{
"storage": location,
"source_path": source_in_storage,
"source_name": source_name,
"source_type": get_file_type(source_name),
"destination_path": destination_in_storage,
"destination_name": destination_name,
"destination_type": get_file_type(destination_name),
},
)
eventManager().fire(Events.UPDATED_FILES, {"type": "printables"})
def add_folder(self, location, path, ignore_existing=True, display=None):
path_in_storage = self._storage(location).add_folder(
path, ignore_existing=ignore_existing, display=display
)
_, name = self._storage(location).split_path(path_in_storage)
eventManager().fire(
Events.FOLDER_ADDED,
{"storage": location, "path": path_in_storage, "name": name},
)
eventManager().fire(Events.UPDATED_FILES, {"type": "printables"})
return path_in_storage
def remove_folder(self, location, path, recursive=True):
path_in_storage = self._storage(location).path_in_storage(path)
self._analysis_queue.dequeue_folder(location, path_in_storage)
self._analysis_queue.pause()
self._storage(location).remove_folder(path_in_storage, recursive=recursive)
self._analysis_queue.resume()
_, name = self._storage(location).split_path(path_in_storage)
eventManager().fire(
Events.FOLDER_REMOVED,
{"storage": location, "path": path_in_storage, "name": name},
)
eventManager().fire(Events.UPDATED_FILES, {"type": "printables"})
def copy_folder(self, location, source, destination):
path_in_storage = self._storage(location).copy_folder(source, destination)
self._determine_analysis_backlog(
location, self._storage(location), root=path_in_storage
)
_, name = self._storage(location).split_path(path_in_storage)
eventManager().fire(
Events.FOLDER_ADDED,
{"storage": location, "path": path_in_storage, "name": name},
)
eventManager().fire(Events.UPDATED_FILES, {"type": "printables"})
def move_folder(self, location, source, destination):
source_in_storage = self._storage(location).path_in_storage(source)
destination_in_storage = self._storage(location).path_in_storage(destination)
self._analysis_queue.dequeue_folder(location, source_in_storage)
self._analysis_queue.pause()
destination_in_storage = self._storage(location).move_folder(
source_in_storage, destination_in_storage
)
self._determine_analysis_backlog(
location, self._storage(location), root=destination_in_storage
)
self._analysis_queue.resume()
_, source_name = self._storage(location).split_path(source_in_storage)
_, destination_name = self._storage(location).split_path(destination_in_storage)
eventManager().fire(
Events.FOLDER_REMOVED,
{"storage": location, "path": source_in_storage, "name": source_name},
)
eventManager().fire(
Events.FOLDER_ADDED,
{
"storage": location,
"path": destination_in_storage,
"name": destination_name,
},
)
eventManager().fire(
Events.FOLDER_MOVED,
{
"storage": location,
"source_path": source_in_storage,
"source_name": source_name,
"destination_path": destination_in_storage,
"destination_name": destination_name,
},
)
eventManager().fire(Events.UPDATED_FILES, {"type": "printables"})
def get_size(self, location, path):
try:
return self._storage(location).get_size(path)
except Exception:
return -1
def get_lastmodified(self, location: str, path: str) -> int:
try:
return self._storage(location).get_lastmodified(path)
except Exception:
return -1
def has_analysis(self, location, path):
return self._storage(location).has_analysis(path)
def get_metadata(self, location, path):
return self._storage(location).get_metadata(path)
def add_link(self, location, path, rel, data):
self._storage(location).add_link(path, rel, data)
def remove_link(self, location, path, rel, data):
self._storage(location).remove_link(path, rel, data)
def log_print(self, location, path, timestamp, print_time, success, printer_profile):
try:
if success:
self._storage(location).add_history(
path,
{
"timestamp": timestamp,
"printTime": print_time,
"success": success,
"printerProfile": printer_profile,
},
)
else:
self._storage(location).add_history(
path,
{
"timestamp": timestamp,
"success": success,
"printerProfile": printer_profile,
},
)
eventManager().fire(
Events.METADATA_STATISTICS_UPDATED, {"storage": location, "path": path}
)
except NoSuchStorage:
# if there's no storage configured where to log the print, we'll just not log it
pass
def save_recovery_data(self, location, path, pos):
import time
from octoprint.util import atomic_write
data = {
"origin": location,
"path": self.path_in_storage(location, path),
"pos": pos,
"date": time.time(),
}
try:
with atomic_write(self._recovery_file, mode="wt", max_permissions=0o666) as f:
yaml.save_to_file(data, file=f, pretty=True)
except Exception:
self._logger.exception(
f"Could not write recovery data to file {self._recovery_file}"
)
def delete_recovery_data(self):
if not os.path.isfile(self._recovery_file):
return
try:
os.remove(self._recovery_file)
except Exception:
self._logger.exception(
f"Error deleting recovery data file {self._recovery_file}"
)
def get_recovery_data(self):
if not os.path.isfile(self._recovery_file):
return None
try:
data = yaml.load_from_file(path=self._recovery_file)
if not isinstance(data, dict) or not all(
map(lambda x: x in data, ("origin", "path", "pos", "date"))
):
raise ValueError("Invalid recovery data structure")
return data
except Exception:
self._logger.exception(
f"Could not read recovery data from file {self._recovery_file}"
)
self.delete_recovery_data()
def get_additional_metadata(self, location, path, key):
self._storage(location).get_additional_metadata(path, key)
def set_additional_metadata(
self, location, path, key, data, overwrite=False, merge=False
):
self._storage(location).set_additional_metadata(
path, key, data, overwrite=overwrite, merge=merge
)
def remove_additional_metadata(self, location, path, key):
self._storage(location).remove_additional_metadata(path, key)
def path_on_disk(self, location, path):
return self._storage(location).path_on_disk(path)
def canonicalize(self, location, path):
return self._storage(location).canonicalize(path)
def sanitize(self, location, path):
return self._storage(location).sanitize(path)
def sanitize_name(self, location, name):
return self._storage(location).sanitize_name(name)
def sanitize_path(self, location, path):
return self._storage(location).sanitize_path(path)
def split_path(self, location, path):
return self._storage(location).split_path(path)
def join_path(self, location, *path):
return self._storage(location).join_path(*path)
def path_in_storage(self, location, path):
return self._storage(location).path_in_storage(path)
def last_modified(self, location, path=None, recursive=False):
return self._storage(location).last_modified(path=path, recursive=recursive)
def _storage(self, location):
if location not in self._storage_managers:
raise NoSuchStorage(f"No storage configured for destination {location}")
return self._storage_managers[location]
def _add_analysis_result(self, location, path, result):
if location not in self._storage_managers:
return
if not result:
return
storage_manager = self._storage_managers[location]
storage_manager.set_additional_metadata(path, "analysis", result, overwrite=True)
def _on_analysis_finished(self, entry, result):
self._add_analysis_result(entry.location, entry.path, result)
def _analysis_queue_entry(self, location, path, printer_profile=None, analysis=None):
if printer_profile is None:
printer_profile = self._printer_profile_manager.get_current_or_default()
path_in_storage = self._storage(location).path_in_storage(path)
absolute_path = self._storage(location).path_on_disk(path)
_, file_name = self._storage(location).split_path(path)
file_type = get_file_type(absolute_path)
if file_type:
return QueueEntry(
file_name,
path_in_storage,
file_type[-1],
location,
absolute_path,
printer_profile,
analysis,
)
else:
return None
| 38,693 | Python | .py | 942 | 28.169851 | 111 | 0.558515 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,001 | storage.py | OctoPrint_OctoPrint/src/octoprint/filemanager/storage.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import copy
import logging
import os
import shutil
from contextlib import contextmanager
from os import scandir, walk
import pylru
import octoprint.filemanager
from octoprint.util import (
atomic_write,
is_hidden_path,
time_this,
to_bytes,
to_unicode,
yaml,
)
from octoprint.util.files import sanitize_filename
class StorageInterface:
"""
Interface of storage adapters for OctoPrint.
"""
# noinspection PyUnreachableCode
@property
def analysis_backlog(self):
"""
Get an iterator over all items stored in the storage that need to be analysed by the :class:`~octoprint.filemanager.AnalysisQueue`.
The yielded elements are expected as storage specific absolute paths to the respective files. Don't forget
to recurse into folders if your storage adapter supports those.
:return: an iterator yielding all un-analysed files in the storage
"""
# empty generator pattern, yield is intentionally unreachable
return
yield
# noinspection PyUnreachableCode
def analysis_backlog_for_path(self, path=None):
# empty generator pattern, yield is intentionally unreachable
return
yield
def last_modified(self, path=None, recursive=False):
"""
Get the last modification date of the specified ``path`` or ``path``'s subtree.
Args:
path (str or None): Path for which to determine the subtree's last modification date. If left out or
set to None, defatuls to storage root.
recursive (bool): Whether to determine only the date of the specified ``path`` (False, default) or
the whole ``path``'s subtree (True).
Returns: (float) The last modification date of the indicated subtree
"""
raise NotImplementedError()
def get_size(self, path=None, recursive=False) -> int:
"""
Get the size of the specified ``path`` or ``path``'s subtree.
Args:
path (str or None): Path for which to determine the subtree's size. If left out or
set to None, defaults to storage root.
recursive (bool): Whether to determine only the size of the specified ``path`` (False, default) or
the whole ``path``'s subtree (True).
"""
raise NotImplementedError()
def get_lastmodified(self, path: str = None, recursive: bool = False) -> int:
"""
Get the modification date of the specified ``path`` or ``path``'s subtree.
Args:
path (str or None): Path for which to determine the modification date. If left our or
set to None, defaults to storage root.
recursive (bool): Whether to determine only the date of the specified ``path`` (False, default) or
the whole ``path``'s subtree (True).
"""
raise NotImplementedError()
def file_in_path(self, path, filepath):
"""
Returns whether the file indicated by ``file`` is inside ``path`` or not.
:param string path: the path to check
:param string filepath: path to the file
:return: ``True`` if the file is inside the path, ``False`` otherwise
"""
return NotImplementedError()
def file_exists(self, path):
"""
Returns whether the file indicated by ``path`` exists or not.
:param string path: the path to check for existence
:return: ``True`` if the file exists, ``False`` otherwise
"""
raise NotImplementedError()
def folder_exists(self, path):
"""
Returns whether the folder indicated by ``path`` exists or not.
:param string path: the path to check for existence
:return: ``True`` if the folder exists, ``False`` otherwise
"""
raise NotImplementedError()
def list_files(
self, path=None, filter=None, recursive=True, level=0, force_refresh=False
):
"""
List all files in storage starting at ``path``. If ``recursive`` is set to True (the default), also dives into
subfolders.
An optional filter function can be supplied which will be called with a file name and file data and which has
to return True if the file is to be included in the result or False if not.
The data structure of the returned result will be a dictionary mapping from file names to entry data. File nodes
will contain their metadata here, folder nodes will contain their contained files and folders. Example::
{
"some_folder": {
"name": "some_folder",
"path": "some_folder",
"type": "folder",
"children": {
"some_sub_folder": {
"name": "some_sub_folder",
"path": "some_folder/some_sub_folder",
"type": "folder",
"typePath": ["folder"],
"children": { ... }
},
"some_file.gcode": {
"name": "some_file.gcode",
"path": "some_folder/some_file.gcode",
"type": "machinecode",
"typePath": ["machinecode", "gcode"],
"hash": "<sha1 hash>",
"links": [ ... ],
...
},
...
}
"test.gcode": {
"name": "test.gcode",
"path": "test.gcode",
"type": "machinecode",
"typePath": ["machinecode", "gcode"],
"hash": "<sha1 hash>",
"links": [...],
...
},
"test.stl": {
"name": "test.stl",
"path": "test.stl",
"type": "model",
"typePath": ["model", "stl"],
"hash": "<sha1 hash>",
"links": [...],
...
},
...
}
:param string path: base path from which to recursively list all files, optional, if not supplied listing will start
from root of base folder
:param function filter: a filter that matches the files that are to be returned, may be left out in which case no
filtering will take place
:param bool recursive: will also step into sub folders for building the complete list if set to True, otherwise will only
do one step down into sub folders to be able to populate the ``children``.
:return: a dictionary mapping entry names to entry data that represents the whole file list
"""
raise NotImplementedError()
def add_folder(self, path, ignore_existing=True, display=None):
"""
Adds a folder as ``path``
The ``path`` will be sanitized.
:param string path: the path of the new folder
:param bool ignore_existing: if set to True, no error will be raised if the folder to be added already exists
:param str display: display name of the folder
:return: the sanitized name of the new folder to be used for future references to the folder
"""
raise NotImplementedError()
def remove_folder(self, path, recursive=True):
"""
Removes the folder at ``path``
:param string path: the path of the folder to remove
:param bool recursive: if set to True, contained folders and files will also be removed, otherwise an error will
be raised if the folder is not empty (apart from any metadata files) when it's to be removed
"""
raise NotImplementedError()
def copy_folder(self, source, destination):
"""
Copies the folder ``source`` to ``destination``
:param string source: path to the source folder
:param string destination: path to destination
:return: the path in the storage to the copy of the folder
"""
raise NotImplementedError()
def move_folder(self, source, destination):
"""
Moves the folder ``source`` to ``destination``
:param string source: path to the source folder
:param string destination: path to destination
:return: the new path in the storage to the folder
"""
raise NotImplementedError()
def add_file(
self,
path,
file_object,
printer_profile=None,
links=None,
allow_overwrite=False,
display=None,
):
"""
Adds the file ``file_object`` as ``path``
:param string path: the file's new path, will be sanitized
:param object file_object: a file object that provides a ``save`` method which will be called with the destination path
where the object should then store its contents
:param object printer_profile: the printer profile associated with this file (if any)
:param list links: any links to add with the file
:param bool allow_overwrite: if set to True no error will be raised if the file already exists and the existing file
and its metadata will just be silently overwritten
:param str display: display name of the file
:return: the sanitized name of the file to be used for future references to it
"""
raise NotImplementedError()
def remove_file(self, path):
"""
Removes the file at ``path``
Will also take care of deleting the corresponding entries
in the metadata and deleting all links pointing to the file.
:param string path: path of the file to remove
"""
raise NotImplementedError()
def copy_file(self, source, destination):
"""
Copies the file ``source`` to ``destination``
:param string source: path to the source file
:param string destination: path to destination
:return: the path in the storage to the copy of the file
"""
raise NotImplementedError()
def move_file(self, source, destination):
"""
Moves the file ``source`` to ``destination``
:param string source: path to the source file
:param string destination: path to destination
:return: the new path in the storage to the file
"""
raise NotImplementedError()
def has_analysis(self, path):
"""
Returns whether the file at path has been analysed yet
:param path: virtual path to the file for which to retrieve the metadata
"""
raise NotImplementedError()
def get_metadata(self, path):
"""
Retrieves the metadata for the file ``path``.
:param path: virtual path to the file for which to retrieve the metadata
:return: the metadata associated with the file
"""
raise NotImplementedError()
def add_link(self, path, rel, data):
"""
Adds a link of relation ``rel`` to file ``path`` with the given ``data``.
The following relation types are currently supported:
* ``model``: adds a link to a model from which the file was created/sliced, expected additional data is the ``name``
and optionally the ``hash`` of the file to link to. If the link can be resolved against another file on the
current ``path``, not only will it be added to the links of ``name`` but a reverse link of type ``machinecode``
referring to ``name`` and its hash will also be added to the linked ``model`` file
* ``machinecode``: adds a link to a file containing machine code created from the current file (model), expected
additional data is the ``name`` and optionally the ``hash`` of the file to link to. If the link can be resolved
against another file on the current ``path``, not only will it be added to the links of ``name`` but a reverse
link of type ``model`` referring to ``name`` and its hash will also be added to the linked ``model`` file.
* ``web``: adds a location on the web associated with this file (e.g. a website where to download a model),
expected additional data is a ``href`` attribute holding the website's URL and optionally a ``retrieved``
attribute describing when the content was retrieved
Note that adding ``model`` links to files identifying as models or ``machinecode`` links to files identifying
as machine code will be refused.
:param path: path of the file for which to add a link
:param rel: type of relation of the link to add (currently ``model``, ``machinecode`` and ``web`` are supported)
:param data: additional data of the link to add
"""
raise NotImplementedError()
def remove_link(self, path, rel, data):
"""
Removes the link consisting of ``rel`` and ``data`` from file ``name`` on ``path``.
:param path: path of the file from which to remove the link
:param rel: type of relation of the link to remove (currently ``model``, ``machinecode`` and ``web`` are supported)
:param data: additional data of the link to remove, must match existing link
"""
raise NotImplementedError()
def get_additional_metadata(self, path, key):
"""
Fetches additional metadata at ``key`` from the metadata of ``path``.
:param path: the virtual path to the file for which to fetch additional metadata
:param key: key of metadata to fetch
"""
raise NotImplementedError()
def set_additional_metadata(self, path, key, data, overwrite=False, merge=False):
"""
Adds additional metadata to the metadata of ``path``. Metadata in ``data`` will be saved under ``key``.
If ``overwrite`` is set and ``key`` already exists in ``name``'s metadata, the current value will be overwritten.
If ``merge`` is set and ``key`` already exists and both ``data`` and the existing data under ``key`` are dictionaries,
the two dictionaries will be merged recursively.
:param path: the virtual path to the file for which to add additional metadata
:param key: key of metadata to add
:param data: metadata to add
:param overwrite: if True and ``key`` already exists, it will be overwritten
:param merge: if True and ``key`` already exists and both ``data`` and the existing data are dictionaries, they
will be merged
"""
raise NotImplementedError()
def remove_additional_metadata(self, path, key):
"""
Removes additional metadata under ``key`` for ``name`` on ``path``
:param path: the virtual path to the file for which to remove the metadata under ``key``
:param key: the key to remove
"""
raise NotImplementedError()
def canonicalize(self, path):
"""
Canonicalizes the given ``path``. The ``path`` may consist of both folder and file name, the underlying
implementation must separate those if necessary.
By default, this calls :func:`~octoprint.filemanager.StorageInterface.sanitize`, which also takes care
of stripping any invalid characters.
Args:
path: the path to canonicalize
Returns:
a 2-tuple containing the canonicalized path and file name
"""
return self.sanitize(path)
def sanitize(self, path):
"""
Sanitizes the given ``path``, stripping it of all invalid characters. The ``path`` may consist of both
folder and file name, the underlying implementation must separate those if necessary and sanitize individually.
:param string path: the path to sanitize
:return: a 2-tuple containing the sanitized path and file name
"""
raise NotImplementedError()
def sanitize_path(self, path):
"""
Sanitizes the given folder-only ``path``, stripping it of all invalid characters.
:param string path: the path to sanitize
:return: the sanitized path
"""
raise NotImplementedError()
def sanitize_name(self, name):
"""
Sanitizes the given file ``name``, stripping it of all invalid characters.
:param string name: the file name to sanitize
:return: the sanitized name
"""
raise NotImplementedError()
def split_path(self, path):
"""
Split ``path`` into base directory and file name.
:param path: the path to split
:return: a tuple (base directory, file name)
"""
raise NotImplementedError()
def join_path(self, *path):
"""
Join path elements together
:param path: path elements to join
:return: joined representation of the path to be usable as fully qualified path for further operations
"""
raise NotImplementedError()
def path_on_disk(self, path):
"""
Retrieves the path on disk for ``path``.
Note: if the storage is not on disk and there exists no path on disk to refer to it, this method should
raise an :class:`io.UnsupportedOperation`
Opposite of :func:`path_in_storage`.
:param string path: the virtual path for which to retrieve the path on disk
:return: the path on disk to ``path``
"""
raise NotImplementedError()
def path_in_storage(self, path):
"""
Retrieves the equivalent in the storage adapter for ``path``.
Opposite of :func:`path_on_disk`.
:param string path: the path for which to retrieve the storage path
:return: the path in storage to ``path``
"""
raise NotImplementedError()
class StorageError(Exception):
UNKNOWN = "unknown"
INVALID_DIRECTORY = "invalid_directory"
INVALID_FILE = "invalid_file"
INVALID_SOURCE = "invalid_source"
INVALID_DESTINATION = "invalid_destination"
DOES_NOT_EXIST = "does_not_exist"
ALREADY_EXISTS = "already_exists"
SOURCE_EQUALS_DESTINATION = "source_equals_destination"
NOT_EMPTY = "not_empty"
def __init__(self, message, code=None, cause=None):
Exception.__init__(self, message)
self.cause = cause
if code is None:
code = StorageError.UNKNOWN
self.code = code
class LocalFileStorage(StorageInterface):
"""
The ``LocalFileStorage`` is a storage implementation which holds all files, folders and metadata on disk.
Metadata is managed inside ``.metadata.json`` files in the respective folders, indexed by the sanitized filenames
stored within the folder. Metadata access is managed through an LRU cache to minimize access overhead.
This storage type implements :func:`path_on_disk`.
"""
def __init__(self, basefolder, create=False, really_universal=False):
"""
Initializes a ``LocalFileStorage`` instance under the given ``basefolder``, creating the necessary folder
if necessary and ``create`` is set to ``True``.
:param string basefolder: the path to the folder under which to create the storage
:param bool create: ``True`` if the folder should be created if it doesn't exist yet, ``False`` otherwise
:param bool really_universal: ``True`` if the file names should be forced to really universal, ``False`` otherwise
"""
self._logger = logging.getLogger(__name__)
self.basefolder = os.path.realpath(os.path.abspath(to_unicode(basefolder)))
if not os.path.exists(self.basefolder) and create:
os.makedirs(self.basefolder)
if not os.path.exists(self.basefolder) or not os.path.isdir(self.basefolder):
raise StorageError(
f"{basefolder} is not a valid directory",
code=StorageError.INVALID_DIRECTORY,
)
self._really_universal = really_universal
import threading
self._metadata_lock_mutex = threading.RLock()
self._metadata_locks = {}
self._persisted_metadata_lock_mutex = threading.RLock()
self._persisted_metadata_locks = {}
self._metadata_cache = pylru.lrucache(100)
self._filelist_cache = {}
self._filelist_cache_mutex = threading.RLock()
self._old_metadata = None
self._initialize_metadata()
def _initialize_metadata(self):
self._logger.info(f"Initializing the file metadata for {self.basefolder}...")
old_metadata_path = os.path.join(self.basefolder, "metadata.yaml")
backup_path = os.path.join(self.basefolder, "metadata.yaml.backup")
if os.path.exists(old_metadata_path):
# load the old metadata file
try:
self._old_metadata = yaml.load_from_file(path=old_metadata_path)
except Exception:
self._logger.exception("Error while loading old metadata file")
# make sure the metadata is initialized as far as possible
self._list_folder(self.basefolder)
# rename the old metadata file
self._old_metadata = None
try:
import shutil
shutil.move(old_metadata_path, backup_path)
except Exception:
self._logger.exception("Could not rename old metadata.yaml file")
else:
# make sure the metadata is initialized as far as possible
self._list_folder(self.basefolder)
self._logger.info(
f"... file metadata for {self.basefolder} initialized successfully."
)
@property
def analysis_backlog(self):
return self.analysis_backlog_for_path()
def analysis_backlog_for_path(self, path=None):
if path:
path = self.sanitize_path(path)
yield from self._analysis_backlog_generator(path)
def _analysis_backlog_generator(self, path=None):
if path is None:
path = self.basefolder
metadata = self._get_metadata(path)
if not metadata:
metadata = {}
for entry in scandir(path):
if is_hidden_path(entry.name):
continue
if entry.is_file() and octoprint.filemanager.valid_file_type(entry.name):
if (
entry.name not in metadata
or not isinstance(metadata[entry.name], dict)
or "analysis" not in metadata[entry.name]
):
printer_profile_rels = self.get_link(entry.path, "printerprofile")
if printer_profile_rels:
printer_profile_id = printer_profile_rels[0]["id"]
else:
printer_profile_id = None
yield entry.name, entry.path, printer_profile_id
elif os.path.isdir(entry.path):
for sub_entry in self._analysis_backlog_generator(entry.path):
yield (
self.join_path(entry.name, sub_entry[0]),
sub_entry[1],
sub_entry[2],
)
def last_modified(self, path=None, recursive=False):
if path is None:
path = self.basefolder
else:
path = os.path.join(self.basefolder, path)
def last_modified_for_path(p):
metadata = os.path.join(p, ".metadata.json")
if os.path.exists(metadata):
return max(os.stat(p).st_mtime, os.stat(metadata).st_mtime)
else:
return os.stat(p).st_mtime
if recursive:
return max(last_modified_for_path(root) for root, _, _ in walk(path))
else:
return last_modified_for_path(path)
def get_size(self, path=None, recursive=False):
if path is None:
path = self.basefolder
path, name = self.sanitize(path)
path = os.path.join(path, name)
# shortcut for individual files
if os.path.isfile(path):
return os.stat(path).st_size
size = 0
for entry in os.scandir(path):
if entry.is_file():
size += entry.stat().st_size
elif recursive and entry.is_dir():
size += self.get_size(entry.path, recursive=recursive)
return size
def get_lastmodified(self, path: str = None, recursive: bool = False) -> int:
if path is None:
path = self.basefolder
path, name = self.sanitize(path)
path = os.path.join(path, name)
# shortcut for individual files
if os.path.isfile(path):
return int(os.stat(path).st_mtime)
last_modified = 0
for entry in os.scandir(path):
if entry.is_file():
last_modified = max(last_modified, entry.stat().st_mtime)
elif recursive and entry.is_dir():
last_modified = max(
last_modified,
self.get_lastmodified(entry.path, recursive=recursive),
)
return int(last_modified)
def file_in_path(self, path, filepath):
filepath = self.sanitize_path(filepath)
path = self.sanitize_path(path)
return filepath == path or filepath.startswith(path + os.sep)
def file_exists(self, path):
path, name = self.sanitize(path)
file_path = os.path.join(path, name)
return os.path.exists(file_path) and os.path.isfile(file_path)
def folder_exists(self, path):
path, name = self.sanitize(path)
folder_path = os.path.join(path, name)
return os.path.exists(folder_path) and os.path.isdir(folder_path)
def list_files(
self, path=None, filter=None, recursive=True, level=0, force_refresh=False
):
if path:
path = self.sanitize_path(to_unicode(path))
base = self.path_in_storage(path)
if base:
base += "/"
else:
path = self.basefolder
base = ""
def strip_children(nodes):
result = {}
for key, node in nodes.items():
if node["type"] == "folder":
node = copy.copy(node)
node["children"] = {}
result[key] = node
return result
def strip_grandchildren(nodes):
result = {}
for key, node in nodes.items():
if node["type"] == "folder":
node = copy.copy(node)
node["children"] = strip_children(node["children"])
result[key] = node
return result
def apply_filter(nodes, filter_func):
result = {}
for key, node in nodes.items():
if filter_func(node) or node["type"] == "folder":
if node["type"] == "folder":
node = copy.copy(node)
node["children"] = apply_filter(
node.get("children", {}), filter_func
)
result[key] = node
return result
result = self._list_folder(path, base=base, force_refresh=force_refresh)
if not recursive:
if level > 0:
result = strip_grandchildren(result)
else:
result = strip_children(result)
if callable(filter):
result = apply_filter(result, filter)
return result
def add_folder(self, path, ignore_existing=True, display=None):
display_path, display_name = self.canonicalize(path)
path = self.sanitize_path(display_path)
name = self.sanitize_name(display_name)
if display is not None:
display_name = display
folder_path = os.path.join(path, name)
if os.path.exists(folder_path):
if not ignore_existing:
raise StorageError(
f"{name} does already exist in {path}",
code=StorageError.ALREADY_EXISTS,
)
else:
os.mkdir(folder_path)
if display_name != name:
metadata = self._get_metadata_entry(path, name, default={})
metadata["display"] = display_name
self._update_metadata_entry(path, name, metadata)
return self.path_in_storage((path, name))
def remove_folder(self, path, recursive=True):
path, name = self.sanitize(path)
folder_path = os.path.join(path, name)
if not os.path.exists(folder_path):
return
empty = True
for entry in scandir(folder_path):
if entry.name == ".metadata.json" or entry.name == ".metadata.yaml":
continue
empty = False
break
if not empty and not recursive:
raise StorageError(
f"{name} in {path} is not empty",
code=StorageError.NOT_EMPTY,
)
import shutil
shutil.rmtree(folder_path)
self._remove_metadata_entry(path, name)
def _get_source_destination_data(self, source, destination, must_not_equal=False):
"""Prepares data dicts about source and destination for copy/move."""
source_path, source_name = self.sanitize(source)
destination_canon_path, destination_canon_name = self.canonicalize(destination)
destination_path = self.sanitize_path(destination_canon_path)
destination_name = self.sanitize_name(destination_canon_name)
source_fullpath = os.path.join(source_path, source_name)
destination_fullpath = os.path.join(destination_path, destination_name)
if not os.path.exists(source_fullpath):
raise StorageError(
f"{source_name} in {source_path} does not exist",
code=StorageError.INVALID_SOURCE,
)
if not os.path.isdir(destination_path):
raise StorageError(
"Destination path {} does not exist or is not a folder".format(
destination_path
),
code=StorageError.INVALID_DESTINATION,
)
if (
os.path.exists(destination_fullpath)
and source_fullpath != destination_fullpath
):
raise StorageError(
f"{destination_name} does already exist in {destination_path}",
code=StorageError.ALREADY_EXISTS,
)
source_meta = self._get_metadata_entry(source_path, source_name)
if source_meta:
source_display = source_meta.get("display", source_name)
else:
source_display = source_name
if (
must_not_equal or source_display == destination_canon_name
) and source_fullpath == destination_fullpath:
raise StorageError(
"Source {} and destination {} are the same folder".format(
source_path, destination_path
),
code=StorageError.SOURCE_EQUALS_DESTINATION,
)
source_data = {
"path": source_path,
"name": source_name,
"display": source_display,
"fullpath": source_fullpath,
}
destination_data = {
"path": destination_path,
"name": destination_name,
"display": destination_canon_name,
"fullpath": destination_fullpath,
}
return source_data, destination_data
def _set_display_metadata(self, destination_data, source_data=None):
if (
source_data
and destination_data["name"] == source_data["name"]
and source_data["name"] != source_data["display"]
):
display = source_data["display"]
elif destination_data["name"] != destination_data["display"]:
display = destination_data["display"]
else:
display = None
destination_meta = self._get_metadata_entry(
destination_data["path"], destination_data["name"], default={}
)
if display:
destination_meta["display"] = display
self._update_metadata_entry(
destination_data["path"], destination_data["name"], destination_meta
)
elif "display" in destination_meta:
del destination_meta["display"]
self._update_metadata_entry(
destination_data["path"], destination_data["name"], destination_meta
)
def copy_folder(self, source, destination):
source_data, destination_data = self._get_source_destination_data(
source, destination, must_not_equal=True
)
try:
shutil.copytree(source_data["fullpath"], destination_data["fullpath"])
except Exception as e:
raise StorageError(
"Could not copy %s in %s to %s in %s"
% (
source_data["name"],
source_data["path"],
destination_data["name"],
destination_data["path"],
),
cause=e,
)
self._set_display_metadata(destination_data, source_data=source_data)
return self.path_in_storage(destination_data["fullpath"])
def move_folder(self, source, destination):
source_data, destination_data = self._get_source_destination_data(
source, destination
)
# only a display rename? Update that and bail early
if source_data["fullpath"] == destination_data["fullpath"]:
self._set_display_metadata(destination_data)
return self.path_in_storage(destination_data["fullpath"])
try:
shutil.move(source_data["fullpath"], destination_data["fullpath"])
except Exception as e:
raise StorageError(
"Could not move %s in %s to %s in %s"
% (
source_data["name"],
source_data["path"],
destination_data["name"],
destination_data["path"],
),
cause=e,
)
self._set_display_metadata(destination_data, source_data=source_data)
self._remove_metadata_entry(source_data["path"], source_data["name"])
self._delete_metadata(source_data["fullpath"])
return self.path_in_storage(destination_data["fullpath"])
def add_file(
self,
path,
file_object,
printer_profile=None,
links=None,
allow_overwrite=False,
display=None,
):
display_path, display_name = self.canonicalize(path)
path = self.sanitize_path(display_path)
name = self.sanitize_name(display_name)
if display:
display_name = display
if not octoprint.filemanager.valid_file_type(name):
raise StorageError(
f"{name} is an unrecognized file type",
code=StorageError.INVALID_FILE,
)
file_path = os.path.join(path, name)
if os.path.exists(file_path) and not os.path.isfile(file_path):
raise StorageError(
f"{name} does already exist in {path} and is not a file",
code=StorageError.ALREADY_EXISTS,
)
if os.path.exists(file_path) and not allow_overwrite:
raise StorageError(
f"{name} does already exist in {path} and overwriting is prohibited",
code=StorageError.ALREADY_EXISTS,
)
# make sure folders exist
if not os.path.exists(path):
# TODO persist display names of path segments!
os.makedirs(path)
# save the file
file_object.save(file_path)
# save the file's hash to the metadata of the folder
file_hash = self._create_hash(file_path)
metadata = self._get_metadata_entry(path, name, default={})
metadata_dirty = False
if "hash" not in metadata or metadata["hash"] != file_hash:
# hash changed -> throw away old metadata
metadata = {"hash": file_hash}
metadata_dirty = True
if "display" not in metadata and display_name != name:
# display name is not the same as file name -> store in metadata
metadata["display"] = display_name
metadata_dirty = True
if metadata_dirty:
self._update_metadata_entry(path, name, metadata)
# process any links that were also provided for adding to the file
if not links:
links = []
if printer_profile is not None:
links.append(
(
"printerprofile",
{"id": printer_profile["id"], "name": printer_profile["name"]},
)
)
self._add_links(name, path, links)
# touch the file to set last access and modification time to now
os.utime(file_path, None)
return self.path_in_storage((path, name))
def remove_file(self, path):
path, name = self.sanitize(path)
file_path = os.path.join(path, name)
if not os.path.exists(file_path):
return
if not os.path.isfile(file_path):
raise StorageError(
f"{name} in {path} is not a file",
code=StorageError.INVALID_FILE,
)
try:
os.remove(file_path)
except Exception as e:
raise StorageError(f"Could not delete {name} in {path}", cause=e)
self._remove_metadata_entry(path, name)
def copy_file(self, source, destination):
source_data, destination_data = self._get_source_destination_data(
source, destination, must_not_equal=True
)
if not octoprint.filemanager.valid_file_type(destination_data["name"]):
raise StorageError(
f"{destination_data['name']} is an unrecognized file type",
code=StorageError.INVALID_FILE,
)
try:
shutil.copy2(source_data["fullpath"], destination_data["fullpath"])
except Exception as e:
raise StorageError(
"Could not copy %s in %s to %s in %s"
% (
source_data["name"],
source_data["path"],
destination_data["name"],
destination_data["path"],
),
cause=e,
)
self._copy_metadata_entry(
source_data["path"],
source_data["name"],
destination_data["path"],
destination_data["name"],
)
self._set_display_metadata(destination_data, source_data=source_data)
return self.path_in_storage(destination_data["fullpath"])
def move_file(self, source, destination, allow_overwrite=False):
source_data, destination_data = self._get_source_destination_data(
source, destination
)
if not octoprint.filemanager.valid_file_type(destination_data["name"]):
raise StorageError(
f"{destination_data['name']} is an unrecognized file type",
code=StorageError.INVALID_FILE,
)
# only a display rename? Update that and bail early
if source_data["fullpath"] == destination_data["fullpath"]:
self._set_display_metadata(destination_data)
return self.path_in_storage(destination_data["fullpath"])
try:
shutil.move(source_data["fullpath"], destination_data["fullpath"])
except Exception as e:
raise StorageError(
"Could not move %s in %s to %s in %s"
% (
source_data["name"],
source_data["path"],
destination_data["name"],
destination_data["path"],
),
cause=e,
)
self._copy_metadata_entry(
source_data["path"],
source_data["name"],
destination_data["path"],
destination_data["name"],
delete_source=True,
)
self._set_display_metadata(destination_data, source_data=source_data)
return self.path_in_storage(destination_data["fullpath"])
def has_analysis(self, path):
metadata = self.get_metadata(path)
return "analysis" in metadata
def get_metadata(self, path):
path, name = self.sanitize(path)
return self._get_metadata_entry(path, name)
def get_link(self, path, rel):
path, name = self.sanitize(path)
return self._get_links(name, path, rel)
def add_link(self, path, rel, data):
path, name = self.sanitize(path)
self._add_links(name, path, [(rel, data)])
def remove_link(self, path, rel, data):
path, name = self.sanitize(path)
self._remove_links(name, path, [(rel, data)])
def add_history(self, path, data):
path, name = self.sanitize(path)
self._add_history(name, path, data)
def update_history(self, path, index, data):
path, name = self.sanitize(path)
self._update_history(name, path, index, data)
def remove_history(self, path, index):
path, name = self.sanitize(path)
self._delete_history(name, path, index)
def get_additional_metadata(self, path, key):
path, name = self.sanitize(path)
metadata = self._get_metadata(path)
if name not in metadata:
return
return metadata[name].get(key)
def set_additional_metadata(self, path, key, data, overwrite=False, merge=False):
path, name = self.sanitize(path)
metadata = self._get_metadata(path)
metadata_dirty = False
if name not in metadata:
return
metadata = self._copied_metadata(metadata, name)
if key not in metadata[name] or overwrite:
metadata[name][key] = data
metadata_dirty = True
elif (
key in metadata[name]
and isinstance(metadata[name][key], dict)
and isinstance(data, dict)
and merge
):
import octoprint.util
metadata[name][key] = octoprint.util.dict_merge(
metadata[name][key], data, in_place=True
)
metadata_dirty = True
if metadata_dirty:
self._save_metadata(path, metadata)
def remove_additional_metadata(self, path, key):
path, name = self.sanitize(path)
metadata = self._get_metadata(path)
if name not in metadata:
return
if key not in metadata[name]:
return
metadata = self._copied_metadata(metadata, name)
del metadata[name][key]
self._save_metadata(path, metadata)
def split_path(self, path):
path = to_unicode(path)
split = path.split("/")
if len(split) == 1:
return "", split[0]
return self.path_in_storage(self.join_path(*split[:-1])), split[-1]
def join_path(self, *path):
return self.path_in_storage("/".join(map(to_unicode, path)))
def sanitize(self, path):
"""
Returns a ``(path, name)`` tuple derived from the provided ``path``.
``path`` may be:
* a storage path
* an absolute file system path
* a tuple or list containing all individual path elements
* a string representation of the path
* with or without a file name
Note that for a ``path`` without a trailing slash the last part will be considered a file name and
hence be returned at second position. If you only need to convert a folder path, be sure to
include a trailing slash for a string ``path`` or an empty last element for a list ``path``.
"""
path, name = self.canonicalize(path)
name = self.sanitize_name(name)
path = self.sanitize_path(path)
return path, name
def canonicalize(self, path):
name = None
if isinstance(path, str):
path = to_unicode(path)
if path.startswith(self.basefolder):
path = path[len(self.basefolder) :]
path = path.replace(os.path.sep, "/")
path = path.split("/")
if isinstance(path, (list, tuple)):
if len(path) == 1:
name = to_unicode(path[0])
path = ""
else:
name = to_unicode(path[-1])
path = self.join_path(*map(to_unicode, path[:-1]))
if not path:
path = ""
return path, name
def sanitize_name(self, name):
"""
Raises a :class:`ValueError` for a ``name`` containing ``/`` or ``\\``. Otherwise
sanitizes the given ``name`` using ``octoprint.files.sanitize_filename``. Also
strips any leading ``.``.
"""
return sanitize_filename(name, really_universal=self._really_universal)
def sanitize_path(self, path):
"""
Ensures that the on disk representation of ``path`` is located under the configured basefolder. Resolves all
relative path elements (e.g. ``..``) and sanitizes folder names using :func:`sanitize_name`. Final path is the
absolute path including leading ``basefolder`` path.
"""
path = to_unicode(path)
if len(path):
if path[0] == "/":
path = path[1:]
elif path[0] == "." and path[1] == "/":
path = path[2:]
path_elements = path.split("/")
joined_path = self.basefolder
for path_element in path_elements:
if path_element == ".." or path_element == ".":
joined_path = os.path.join(joined_path, path_element)
else:
joined_path = os.path.join(joined_path, self.sanitize_name(path_element))
path = os.path.realpath(joined_path)
if not path.startswith(self.basefolder):
raise ValueError(f"path not contained in base folder: {path}")
return path
def _sanitize_entry(self, entry, path, entry_path):
entry = to_unicode(entry)
sanitized = self.sanitize_name(entry)
if sanitized != entry:
# entry is not sanitized yet, let's take care of that
sanitized_path = os.path.join(path, sanitized)
sanitized_name, sanitized_ext = os.path.splitext(sanitized)
counter = 1
while os.path.exists(sanitized_path):
counter += 1
sanitized = self.sanitize_name(
f"{sanitized_name}_({counter}){sanitized_ext}"
)
sanitized_path = os.path.join(path, sanitized)
try:
shutil.move(entry_path, sanitized_path)
self._logger.info(f'Sanitized "{entry_path}" to "{sanitized_path}"')
return sanitized, sanitized_path
except Exception:
self._logger.exception(
'Error while trying to rename "{}" to "{}", ignoring file'.format(
entry_path, sanitized_path
)
)
raise
return entry, entry_path
def path_in_storage(self, path):
if isinstance(path, (tuple, list)):
path = self.join_path(*path)
if isinstance(path, str):
path = to_unicode(path)
if path.startswith(self.basefolder):
path = path[len(self.basefolder) :]
path = path.replace(os.path.sep, "/")
while path.startswith("/"):
path = path[1:]
return path
def path_on_disk(self, path):
path, name = self.sanitize(path)
return os.path.join(path, name)
##~~ internals
def _add_history(self, name, path, data):
metadata = self._copied_metadata(self._get_metadata(path), name)
if "hash" not in metadata[name]:
metadata[name]["hash"] = self._create_hash(os.path.join(path, name))
if "history" not in metadata[name]:
metadata[name]["history"] = []
metadata[name]["history"].append(data)
self._calculate_stats_from_history(name, path, metadata=metadata, save=False)
self._save_metadata(path, metadata)
def _update_history(self, name, path, index, data):
metadata = self._get_metadata(path)
if name not in metadata or "history" not in metadata[name]:
return
metadata = self._copied_metadata(metadata, name)
try:
metadata[name]["history"][index].update(data)
self._calculate_stats_from_history(name, path, metadata=metadata, save=False)
self._save_metadata(path, metadata)
except IndexError:
pass
def _delete_history(self, name, path, index):
metadata = self._get_metadata(path)
if name not in metadata or "history" not in metadata[name]:
return
metadata = self._copied_metadata(metadata, name)
try:
del metadata[name]["history"][index]
self._calculate_stats_from_history(name, path, metadata=metadata, save=False)
self._save_metadata(path, metadata)
except IndexError:
pass
def _calculate_stats_from_history(self, name, path, metadata=None, save=True):
if metadata is None:
metadata = self._copied_metadata(self._get_metadata(path), name)
if "history" not in metadata[name]:
return
# collect data from history
former_print_times = {}
last_print = {}
for history_entry in metadata[name]["history"]:
if (
"printTime" not in history_entry
or "success" not in history_entry
or not history_entry["success"]
or "printerProfile" not in history_entry
):
continue
printer_profile = history_entry["printerProfile"]
if not printer_profile:
continue
print_time = history_entry["printTime"]
try:
print_time = float(print_time)
except Exception:
self._logger.warning(
"Invalid print time value found in print history for {} in {}/.metadata.json: {!r}".format(
name, path, print_time
)
)
continue
if printer_profile not in former_print_times:
former_print_times[printer_profile] = []
former_print_times[printer_profile].append(print_time)
if (
printer_profile not in last_print
or last_print[printer_profile] is None
or (
"timestamp" in history_entry
and history_entry["timestamp"]
> last_print[printer_profile]["timestamp"]
)
):
last_print[printer_profile] = history_entry
# calculate stats
statistics = {"averagePrintTime": {}, "lastPrintTime": {}}
for printer_profile in former_print_times:
if not former_print_times[printer_profile]:
continue
statistics["averagePrintTime"][printer_profile] = sum(
former_print_times[printer_profile]
) / len(former_print_times[printer_profile])
for printer_profile in last_print:
if not last_print[printer_profile]:
continue
statistics["lastPrintTime"][printer_profile] = last_print[printer_profile][
"printTime"
]
metadata[name]["statistics"] = statistics
if save:
self._save_metadata(path, metadata)
def _get_links(self, name, path, searched_rel):
metadata = self._get_metadata(path)
result = []
if name not in metadata:
return result
if "links" not in metadata[name]:
return result
for data in metadata[name]["links"]:
if "rel" not in data or not data["rel"] == searched_rel:
continue
result.append(data)
return result
def _add_links(self, name, path, links):
file_type = octoprint.filemanager.get_file_type(name)
if file_type:
file_type = file_type[0]
metadata = self._copied_metadata(self._get_metadata(path), name)
metadata_dirty = False
if "hash" not in metadata[name]:
metadata[name]["hash"] = self._create_hash(os.path.join(path, name))
if "links" not in metadata[name]:
metadata[name]["links"] = []
for rel, data in links:
if (rel == "model" or rel == "machinecode") and "name" in data:
if file_type == "model" and rel == "model":
# adding a model link to a model doesn't make sense
return
elif file_type == "machinecode" and rel == "machinecode":
# adding a machinecode link to a machinecode doesn't make sense
return
ref_path = os.path.join(path, data["name"])
if not os.path.exists(ref_path):
# file doesn't exist, we won't create the link
continue
# fetch hash of target file
if data["name"] in metadata and "hash" in metadata[data["name"]]:
hash = metadata[data["name"]]["hash"]
else:
hash = self._create_hash(ref_path)
if data["name"] not in metadata:
metadata[data["name"]] = {"hash": hash, "links": []}
else:
metadata[data["name"]]["hash"] = hash
if "hash" in data and not data["hash"] == hash:
# file doesn't have the correct hash, we won't create the link
continue
if "links" not in metadata[data["name"]]:
metadata[data["name"]]["links"] = []
# add reverse link to link target file
metadata[data["name"]]["links"].append(
{
"rel": "machinecode" if rel == "model" else "model",
"name": name,
"hash": metadata[name]["hash"],
}
)
metadata_dirty = True
link_dict = {"rel": rel, "name": data["name"], "hash": hash}
elif rel == "web" and "href" in data:
link_dict = {"rel": rel, "href": data["href"]}
if "retrieved" in data:
link_dict["retrieved"] = data["retrieved"]
else:
continue
if link_dict:
metadata[name]["links"].append(link_dict)
metadata_dirty = True
if metadata_dirty:
self._save_metadata(path, metadata)
def _remove_links(self, name, path, links):
metadata = self._copied_metadata(self._get_metadata(path), name)
metadata_dirty = False
hash = metadata[name].get("hash", self._create_hash(os.path.join(path, name)))
for rel, data in links:
if (rel == "model" or rel == "machinecode") and "name" in data:
if data["name"] in metadata and "links" in metadata[data["name"]]:
ref_rel = "model" if rel == "machinecode" else "machinecode"
for link in metadata[data["name"]]["links"]:
if (
link["rel"] == ref_rel
and "name" in link
and link["name"] == name
and "hash" in link
and link["hash"] == hash
):
metadata[data["name"]] = copy.deepcopy(metadata[data["name"]])
metadata[data["name"]]["links"].remove(link)
metadata_dirty = True
if "links" in metadata[name]:
for link in metadata[name]["links"]:
if not link["rel"] == rel:
continue
matches = True
for k, v in data.items():
if k not in link or not link[k] == v:
matches = False
break
if not matches:
continue
metadata[name]["links"].remove(link)
metadata_dirty = True
if metadata_dirty:
self._save_metadata(path, metadata)
@time_this(
logtarget=__name__ + ".timings",
message="{func}({func_args},{func_kwargs}) took {timing:.2f}ms",
incl_func_args=True,
log_enter=True,
)
def _list_folder(self, path, base="", force_refresh=False, **kwargs):
def get_size(nodes):
total_size = 0
for node in nodes.values():
if "size" in node:
total_size += node["size"]
return total_size
def enrich_folders(nodes):
nodes = copy.copy(nodes)
for key, value in nodes.items():
if value["type"] == "folder":
value = copy.copy(value)
value["children"] = self._list_folder(
os.path.join(path, key),
base=value["path"] + "/",
force_refresh=force_refresh,
)
value["size"] = get_size(value["children"])
nodes[key] = value
return nodes
metadata_dirty = False
try:
with self._filelist_cache_mutex:
cache = self._filelist_cache.get(path)
lm = self.last_modified(path, recursive=True)
if not force_refresh and cache and cache[0] >= lm:
return enrich_folders(cache[1])
metadata = self._get_metadata(path)
if not metadata:
metadata = {}
result = {}
for entry in scandir(path):
if is_hidden_path(entry.name):
# no hidden files and folders
continue
try:
entry_name = entry_display = entry.name
entry_path = entry.path
entry_is_file = entry.is_file()
entry_is_dir = entry.is_dir()
entry_stat = entry.stat()
except Exception:
# error while trying to fetch file metadata, that might be thanks to file already having
# been moved or deleted - ignore it and continue
continue
try:
new_entry_name, new_entry_path = self._sanitize_entry(
entry_name, path, entry_path
)
if entry_name != new_entry_name or entry_path != new_entry_path:
entry_display = to_unicode(entry_name)
entry_name = new_entry_name
entry_path = new_entry_path
entry_stat = os.stat(entry_path)
except Exception:
# error while trying to rename the file, we'll continue here and ignore it
continue
path_in_location = entry_name if not base else base + entry_name
try:
# file handling
if entry_is_file:
type_path = octoprint.filemanager.get_file_type(entry_name)
if not type_path:
# only supported extensions
continue
else:
file_type = type_path[0]
if entry_name in metadata and isinstance(
metadata[entry_name], dict
):
entry_metadata = metadata[entry_name]
if (
"display" not in entry_metadata
and entry_display != entry_name
):
if not metadata_dirty:
metadata = self._copied_metadata(
metadata, entry_name
)
metadata[entry_name]["display"] = entry_display
entry_metadata["display"] = entry_display
metadata_dirty = True
else:
if not metadata_dirty:
metadata = self._copied_metadata(metadata, entry_name)
entry_metadata = self._add_basic_metadata(
path,
entry_name,
display_name=entry_display,
save=False,
metadata=metadata,
)
metadata_dirty = True
extended_entry_data = {}
extended_entry_data.update(entry_metadata)
extended_entry_data["name"] = entry_name
extended_entry_data["display"] = entry_metadata.get(
"display", entry_name
)
extended_entry_data["path"] = path_in_location
extended_entry_data["type"] = file_type
extended_entry_data["typePath"] = type_path
stat = entry_stat
if stat:
extended_entry_data["size"] = stat.st_size
extended_entry_data["date"] = int(stat.st_mtime)
result[entry_name] = extended_entry_data
# folder recursion
elif entry_is_dir:
if entry_name in metadata and isinstance(
metadata[entry_name], dict
):
entry_metadata = metadata[entry_name]
if (
"display" not in entry_metadata
and entry_display != entry_name
):
if not metadata_dirty:
metadata = self._copied_metadata(
metadata, entry_name
)
metadata[entry_name]["display"] = entry_display
entry_metadata["display"] = entry_display
metadata_dirty = True
elif entry_name != entry_display:
if not metadata_dirty:
metadata = self._copied_metadata(metadata, entry_name)
entry_metadata = self._add_basic_metadata(
path,
entry_name,
display_name=entry_display,
save=False,
metadata=metadata,
)
metadata_dirty = True
else:
entry_metadata = {}
entry_data = {
"name": entry_name,
"display": entry_metadata.get("display", entry_name),
"path": path_in_location,
"type": "folder",
"typePath": ["folder"],
}
if entry_stat:
entry_data["date"] = int(entry_stat.st_mtime)
result[entry_name] = entry_data
except Exception:
# So something went wrong somewhere while processing this file entry - log that and continue
self._logger.exception(
f"Error while processing entry {entry_path}"
)
continue
self._filelist_cache[path] = (
lm,
result,
)
return enrich_folders(result)
finally:
# save metadata
if metadata_dirty:
self._save_metadata(path, metadata)
def _add_basic_metadata(
self,
path,
entry,
display_name=None,
additional_metadata=None,
save=True,
metadata=None,
):
if additional_metadata is None:
additional_metadata = {}
if metadata is None:
metadata = self._get_metadata(path)
entry_path = os.path.join(path, entry)
if os.path.isfile(entry_path):
entry_data = {
"hash": self._create_hash(os.path.join(path, entry)),
"links": [],
"notes": [],
}
if (
path == self.basefolder
and self._old_metadata is not None
and entry in self._old_metadata
and "gcodeAnalysis" in self._old_metadata[entry]
):
# if there is still old metadata available and that contains an analysis for this file, use it!
entry_data["analysis"] = self._old_metadata[entry]["gcodeAnalysis"]
elif os.path.isdir(entry_path):
entry_data = {}
else:
return
if display_name is not None and not display_name == entry:
entry_data["display"] = display_name
entry_data.update(additional_metadata)
metadata = copy.copy(metadata)
metadata[entry] = entry_data
if save:
self._save_metadata(path, metadata)
return entry_data
def _create_hash(self, path):
import hashlib
blocksize = 65536
hash = hashlib.sha1()
with open(path, "rb") as f:
buffer = f.read(blocksize)
while len(buffer) > 0:
hash.update(buffer)
buffer = f.read(blocksize)
return hash.hexdigest()
def _get_metadata_entry(self, path, name, default=None):
with self._get_metadata_lock(path):
metadata = self._get_metadata(path)
return metadata.get(name, default)
def _remove_metadata_entry(self, path, name):
with self._get_metadata_lock(path):
metadata = self._get_metadata(path)
if name not in metadata:
return
metadata = copy.copy(metadata)
if "hash" in metadata[name]:
hash = metadata[name]["hash"]
for m in metadata.values():
if "links" not in m:
continue
links_hash = (
lambda link: "hash" in link
and link["hash"] == hash
and "rel" in link
and (link["rel"] == "model" or link["rel"] == "machinecode")
)
m["links"] = [link for link in m["links"] if not links_hash(link)]
del metadata[name]
self._save_metadata(path, metadata)
def _update_metadata_entry(self, path, name, data):
with self._get_metadata_lock(path):
metadata = copy.copy(self._get_metadata(path))
metadata[name] = data
self._save_metadata(path, metadata)
def _copy_metadata_entry(
self,
source_path,
source_name,
destination_path,
destination_name,
delete_source=False,
updates=None,
):
with self._get_metadata_lock(source_path):
source_data = self._get_metadata_entry(source_path, source_name, default={})
if not source_data:
return
if delete_source:
self._remove_metadata_entry(source_path, source_name)
if updates is not None:
source_data.update(updates)
with self._get_metadata_lock(destination_path):
self._update_metadata_entry(destination_path, destination_name, source_data)
def _get_metadata(self, path, force=False):
import json
if not force:
metadata = self._metadata_cache.get(path)
if metadata:
return metadata
self._migrate_metadata(path)
metadata_path = os.path.join(path, ".metadata.json")
metadata = None
with self._get_persisted_metadata_lock(path):
if os.path.exists(metadata_path):
with open(metadata_path, encoding="utf-8") as f:
try:
metadata = json.load(f)
except Exception:
self._logger.exception(
f"Error while reading .metadata.json from {path}"
)
def valid_json(value):
try:
json.dumps(value, allow_nan=False)
return True
except Exception:
return False
if isinstance(metadata, dict):
old_size = len(metadata)
metadata = {k: v for k, v in metadata.items() if valid_json(v)}
metadata = {
k: v for k, v in metadata.items() if os.path.exists(os.path.join(path, k))
}
new_size = len(metadata)
if new_size != old_size:
self._logger.info(
"Deleted {} stale or invalid entries from metadata for path {}".format(
old_size - new_size, path
)
)
self._save_metadata(path, metadata)
else:
with self._get_metadata_lock(path):
self._metadata_cache[path] = metadata
return metadata
else:
return {}
def _save_metadata(self, path, metadata):
import json
with self._get_metadata_lock(path):
self._metadata_cache[path] = metadata
with self._get_persisted_metadata_lock(path):
metadata_path = os.path.join(path, ".metadata.json")
try:
with atomic_write(metadata_path, mode="wb") as f:
f.write(
to_bytes(json.dumps(metadata, indent=2, separators=(",", ": ")))
)
except Exception:
self._logger.exception(f"Error while writing .metadata.json to {path}")
def _delete_metadata(self, path):
with self._get_metadata_lock(path):
if path in self._metadata_cache:
del self._metadata_cache[path]
with self._get_persisted_metadata_lock(path):
metadata_files = (".metadata.json", ".metadata.yaml")
for metadata_file in metadata_files:
metadata_path = os.path.join(path, metadata_file)
if os.path.exists(metadata_path):
try:
os.remove(metadata_path)
except Exception:
self._logger.exception(
f"Error while deleting {metadata_file} from {path}"
)
@staticmethod
def _copied_metadata(metadata, name):
metadata = copy.copy(metadata)
metadata[name] = copy.deepcopy(metadata.get(name, {}))
return metadata
def _migrate_metadata(self, path):
# we switched to json in 1.3.9 - if we still have yaml here, migrate it now
import json
with self._get_persisted_metadata_lock(path):
metadata_path_yaml = os.path.join(path, ".metadata.yaml")
metadata_path_json = os.path.join(path, ".metadata.json")
if not os.path.exists(metadata_path_yaml):
# nothing to migrate
return
if os.path.exists(metadata_path_json):
# already migrated
try:
os.remove(metadata_path_yaml)
except Exception:
self._logger.exception(
f"Error while removing .metadata.yaml from {path}"
)
return
try:
metadata = yaml.load_from_file(path=metadata_path_yaml)
except Exception:
self._logger.exception(f"Error while reading .metadata.yaml from {path}")
return
if not isinstance(metadata, dict):
# looks invalid, ignore it
return
with atomic_write(metadata_path_json, mode="wb") as f:
f.write(to_bytes(json.dumps(metadata, indent=2, separators=(",", ": "))))
try:
os.remove(metadata_path_yaml)
except Exception:
self._logger.exception(f"Error while removing .metadata.yaml from {path}")
@contextmanager
def _get_metadata_lock(self, path):
with self._metadata_lock_mutex:
if path not in self._metadata_locks:
import threading
self._metadata_locks[path] = (0, threading.RLock())
counter, lock = self._metadata_locks[path]
counter += 1
self._metadata_locks[path] = (counter, lock)
yield lock
with self._metadata_lock_mutex:
counter = self._metadata_locks[path][0]
counter -= 1
if counter <= 0:
del self._metadata_locks[path]
else:
self._metadata_locks[path] = (counter, lock)
@contextmanager
def _get_persisted_metadata_lock(self, path):
with self._persisted_metadata_lock_mutex:
if path not in self._persisted_metadata_locks:
import threading
self._persisted_metadata_locks[path] = (0, threading.RLock())
counter, lock = self._persisted_metadata_locks[path]
counter += 1
self._persisted_metadata_locks[path] = (counter, lock)
yield lock
with self._persisted_metadata_lock_mutex:
counter = self._persisted_metadata_locks[path][0]
counter -= 1
if counter <= 0:
del self._persisted_metadata_locks[path]
else:
self._persisted_metadata_locks[path] = (counter, lock)
| 76,616 | Python | .py | 1,677 | 31.954681 | 139 | 0.548429 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,002 | conf.py | OctoPrint_OctoPrint/docs/conf.py | #
# OctoPrint documentation build configuration file, created by
# sphinx-quickstart on Mon Dec 02 17:08:50 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath("../src/"))
sys.path.append(os.path.abspath("sphinxext"))
from datetime import date
import octoprint._version
year_since = 2013
year_current = date.today().year
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = "1.3"
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"codeblockext",
"onlineinclude",
"sphinx.ext.todo",
"sphinx.ext.autodoc",
"sphinxcontrib.httpdomain",
"sphinx.ext.napoleon",
"sphinxcontrib.mermaid",
"sphinx.ext.intersphinx",
"sphinx_rtd_theme",
]
todo_include_todos = True
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
"pyserial": ("https://pythonhosted.org/pyserial", None),
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "OctoPrint"
copyright = (
"%d-%d, Gina Häußge" % (year_since, year_current)
if year_current > year_since
else "%d, Gina Häußge" % year_since
)
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = octoprint.__version__
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "stata-dark"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
numfig = True
mermaid_version = ""
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of built-in themes.
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the built-in static files,
# so a file named "default.css" will overwrite the built-in "default.css".
html_static_path = ["_static"]
def setup(app):
app.add_css_file("theme_overrides.css")
app.add_js_file("mermaid.min.js")
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "OctoPrintDoc"
# -- Options for LaTeX output --------------------------------------------------
# latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# }
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
# latex_documents = [
# ('index', 'OctoPrint.tex', 'OctoPrint Documentation',
# 'Gina Häußge', 'manual'),
# ]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then top-level headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [("index", "octoprint", "OctoPrint Documentation", ["Gina Häußge"], 1)]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"OctoPrint",
"OctoPrint Documentation",
"Gina Häußge",
"OctoPrint",
"One line description of project.",
"Miscellaneous",
),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
| 8,894 | Python | .py | 213 | 39.760563 | 83 | 0.718041 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,003 | codeblockext.py | OctoPrint_OctoPrint/docs/sphinxext/codeblockext.py | from __future__ import annotations
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "The MIT License <http://opensource.org/licenses/MIT>"
__copyright__ = "Copyright (C) 2015 Gina Häußge - Released under terms of the MIT License"
from typing import Any
import sphinx.highlighting
from docutils import nodes
from docutils.parsers.rst import directives
from pygments import highlight
from pygments.filters import ErrorToken, VisibleWhitespaceFilter
from pygments.lexers.python import PythonConsoleLexer
from pygments.util import ClassNotFound
from six import text_type
from sphinx.directives.code import CodeBlock
from sphinx.ext import doctest
def _merge_dict(a, b):
"""
Little helper to merge two dicts a and b on the fly.
"""
result = dict(a)
result.update(b)
return result
class literal_block_ext(nodes.General, nodes.FixedTextElement):
"""
Custom node which is basically the same as a :class:`literal_block`, just with whitespace support and introduced
in order to be able to have a custom visitor.
"""
@classmethod
def from_literal_block(cls, block):
"""
Factory method constructing an instance exactly copying all attributes over from ``block`` and settings a
custom ``tagname``.
"""
new = literal_block_ext()
for a in (
"attributes",
"basic_attributes",
"child_text_separator",
"children",
"document",
"known_attributes",
"line",
"list_attributes",
"local_attributes",
"parent",
"rawsource",
"source",
):
setattr(new, a, getattr(block, a))
new.tagname = "literal_block_ext"
return new
class CodeBlockExt(CodeBlock):
"""
This is basically an extension of a regular :class:`CodeBlock` directive which just supports an additional option
``whitespace`` which if present will enable (together with everything else in here) to render whitespace in
code blocks.
"""
option_spec = _merge_dict(CodeBlock.option_spec, {"whitespace": directives.flag})
def run(self) -> list[nodes.Node]:
# get result from parent implementation
code_block = CodeBlock.run(self)
def find_and_wrap_literal_block(node):
"""
Recursive method to turn all literal blocks located within a node into :class:`literal_block_ext`.
"""
if isinstance(node, nodes.container):
# container node => handle all children
children = []
for child in node.children:
children.append(find_and_wrap_literal_block(child))
node.children = children
return node
elif isinstance(node, nodes.literal_block):
# literal block => replace it
return self._wrap_literal_block(node)
else:
# no idea what that is => leave it alone
return node
# replace all created literal_blocks with literal_block_ext instances
return list(map(find_and_wrap_literal_block, code_block))
def _wrap_literal_block(self, node):
literal = literal_block_ext.from_literal_block(node)
literal["whitespace"] = "whitespace" in self.options
return literal
class PygmentsBridgeExt:
"""
Wrapper for :class:`PygmentsBridge`, delegates everything to the wrapped ``bridge`` but :method:`highlight_block`,
which calls the parent implementation for lexer selection, then
"""
def __init__(self, bridge, whitespace):
self._bridge = bridge
self._whitespace = whitespace
def __getattr__(self, item):
return getattr(self._bridge, item)
def highlight_block(self, source, lang, opts=None, warn=None, force=False, **kwargs):
if not self._whitespace:
return self._bridge.highlight_block(
source, lang, opts=opts, warn=warn, force=force, **kwargs
)
# We are still here => we need to basically do everything the parent implementation does (and does so in a very
# inextensible way...), but inject the whitespace filter into the used lexer just before the highlighting run
# and remove it afterwards so the lexer can be safely reused.
#
# For this we define a context manager that will allow us to wrap a lexer and modify its filters on the fly to
# include the whitespace filter.
class whitespace:
def __init__(self, lexer):
self._lexer = lexer
self._orig_filters = lexer.filters
self._orig_tabsize = lexer.tabsize
def __enter__(self):
new_filters = list(self._orig_filters)
new_filters.append(
VisibleWhitespaceFilter(
spaces=True, tabs=True, tabsize=self._lexer.tabsize
)
)
self._lexer.filters = new_filters
self._lexer.tabsize = 0
return self._lexer
def __exit__(self, type, value, traceback):
self._lexer.filters = self._orig_filters
self._lexer.tabsize = self._orig_tabsize
# Then a ton of copy-pasted code follows. Sadly, we need to do this since we have no way to inject ourselves
# into the highlighting call otherwise - lexer selection and actual call are tightly coupled in the original
# "highlight_block" method, with no means for external code to inject different functionality.
#
# Unless otherwise marked ("MODIFIED"), any code in this method after this line is copied verbatim from the
# implementation of sphinx.highlighting.PygmentsBridge, released under the Simplified BSD License, the copyright
# lies with the respective authors.
if not isinstance(source, str):
source = source.decode()
# find out which lexer to use
if lang in ("py", "python"):
if source.startswith(">>>"):
# interactive session
lexer = sphinx.highlighting.lexers["pycon"]
elif not force:
# maybe Python -- try parsing it
if self.try_parse(source):
lexer = sphinx.highlighting.lexers["python"]
else:
lexer = sphinx.highlighting.lexers["none"]
else:
lexer = sphinx.highlighting.lexers["python"]
elif lang in ("python3", "py3") and source.startswith(">>>"):
# for py3, recognize interactive sessions, but do not try parsing...
lexer = sphinx.highlighting.lexers["pycon3"]
elif lang == "guess":
# try:
lexer = sphinx.highlighting.guess_lexer(source)
# except Exception:
# lexer = sphinx.highlighting.lexers['none']
else:
if lang in sphinx.highlighting.lexers:
lexer = sphinx.highlighting.lexers[lang]
else:
try:
lexer = sphinx.highlighting.lexers[
lang
] = sphinx.highlighting.get_lexer_by_name(lang, **opts or {})
except ClassNotFound:
if warn:
warn("Pygments lexer name %r is not known" % lang)
lexer = sphinx.highlighting.lexers["none"]
else:
raise
else:
lexer.add_filter("raiseonerror")
if not isinstance(source, str):
source = source.decode()
# trim doctest options if wanted
if isinstance(lexer, PythonConsoleLexer) and self._bridge.trim_doctest_flags:
source = doctest.blankline_re.sub("", source)
source = doctest.doctestopt_re.sub("", source)
# highlight via Pygments
formatter = self._bridge.get_formatter(**kwargs)
try:
# MODIFIED: replaced by whitespace wrapped call
with whitespace(lexer) as l:
hlsource = highlight(source, l, formatter)
# /MODIFIED
except ErrorToken:
# this is most probably not the selected language,
# so let it pass unhighlighted
# MODIFIED: replaced by whitespace wrapped call
with whitespace(sphinx.highlighting.lexers["none"]) as l:
hlsource = highlight(source, l, formatter)
# /MODIFIED
return hlsource
class whitespace_highlighter:
"""
Context manager for adapting the used highlighter on a translator for a given node's whitespace properties.
"""
def __init__(self, translator, node):
self.translator = translator
self.node = node
self._orig_highlighter = self.translator.highlighter
def __enter__(self):
whitespace = self.node["whitespace"] if "whitespace" in self.node else False
if whitespace:
self.translator.highlighter = PygmentsBridgeExt(
self._orig_highlighter, whitespace
)
return self.translator
def __exit__(self, exc_type, exc_val, exc_tb):
self.translator.highlighter = self._orig_highlighter
def visit_literal_block_ext(translator, node):
"""
When our custom code block is visited, we temporarily exchange the highlighter used in the translator, call the
visitor for regular literal blocks, then switch back again.
"""
with whitespace_highlighter(translator, node):
translator.visit_literal_block(node)
def depart_literal_block_ext(translator, node):
"""
Just call the depart function for regular literal blocks.
"""
with whitespace_highlighter(translator, node):
translator.depart_literal_block(node)
def setup(app):
# custom directive
app.add_directive("code-block-ext", CodeBlockExt)
# custom node type
handler = (visit_literal_block_ext, depart_literal_block_ext)
app.add_node(literal_block_ext, html=handler, latex=handler, text=handler)
return {"version": "0.1"}
| 10,273 | Python | .py | 227 | 34.77533 | 120 | 0.620272 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,004 | onlineinclude.py | OctoPrint_OctoPrint/docs/sphinxext/onlineinclude.py | from __future__ import annotations
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "The MIT License <http://opensource.org/licenses/MIT>"
__copyright__ = "Copyright (C) 2015 Gina Häußge - Released under terms of the MIT License"
import codecs
from contextlib import closing
from typing import Any
import requests
from sphinx.directives.code import (
LiteralInclude,
LiteralIncludeReader,
container_wrapper,
dedent_lines,
logger,
nodes,
parselinenos,
)
from sphinx.util.nodes import set_source_info
cache = {}
class OnlineIncludeReader(LiteralIncludeReader):
def read_file(self, filename: str, location: Any = None) -> list[str]:
global cache
try:
if filename in cache:
lines = cache[filename]
else:
with closing(requests.get(filename, stream=True)) as r:
r.encoding = self.encoding
lines = r.text.splitlines(True)
cache[filename] = lines
if "tab-width" in self.options:
lines = [line.expandtabs(self.options["tab-width"]) for line in lines]
return lines
except OSError:
raise OSError("Include file %r not found or reading it failed" % filename)
except UnicodeError:
raise UnicodeError(
"Encoding %r used for reading included file %r seems to "
"be wrong, try giving an :encoding: option" % (self.encoding, filename)
)
class OnlineIncludeDirective(LiteralInclude):
def run(self) -> list[nodes.Node]:
document = self.state.document
if not document.settings.file_insertion_enabled:
return [
document.reporter.warning("File insertion disabled", line=self.lineno)
]
# convert options['diff'] to absolute path
if "diff" in self.options:
_, path = self.env.relfn2path(self.options["diff"])
self.options["diff"] = path
try:
location = self.state_machine.get_source_and_line(self.lineno)
url = self.arguments[0]
reader = OnlineIncludeReader(url, self.options, self.config)
text, lines = reader.read(location=location)
retnode = nodes.literal_block(text, text, source=url)
set_source_info(self, retnode)
if self.options.get("diff"): # if diff is set, set udiff
retnode["language"] = "udiff"
elif "language" in self.options:
retnode["language"] = self.options["language"]
retnode["linenos"] = (
"linenos" in self.options
or "lineno-start" in self.options
or "lineno-match" in self.options
)
retnode["classes"] += self.options.get("class", [])
extra_args = retnode["highlight_args"] = {}
if "emphasize-lines" in self.options:
hl_lines = parselinenos(self.options["emphasize-lines"], lines)
if any(i >= lines for i in hl_lines):
logger.warning(
"line number spec is out of range(1-%d): %r"
% (lines, self.options["emphasize-lines"]),
location=location,
)
extra_args["hl_lines"] = [x + 1 for x in hl_lines if x < lines]
extra_args["linenostart"] = reader.lineno_start
if "caption" in self.options:
caption = self.options["caption"] or self.arguments[0]
retnode = container_wrapper(self, retnode, caption)
# retnode will be note_implicit_target that is linked from caption and numref.
# when options['name'] is provided, it should be primary ID.
self.add_name(retnode)
return [retnode]
except Exception as exc:
return [document.reporter.warning(str(exc), line=self.lineno)]
def visit_onlineinclude(translator, node):
translator.visit_literal_block(node)
def depart_onlineinclude(translator, node):
translator.depart_literal_block(node)
def setup(app):
app.add_directive("onlineinclude", OnlineIncludeDirective)
handler = (visit_onlineinclude, depart_onlineinclude)
app.add_node(OnlineIncludeDirective, html=handler, latex=handler, text=handler)
| 4,405 | Python | .py | 96 | 34.84375 | 90 | 0.607802 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,005 | test_events.py | OctoPrint_OctoPrint/tests/test_events.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2022 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import ddt
import octoprint.events
@ddt.ddt
class TestEvents(unittest.TestCase):
@ddt.data(
("plugin_example_event", "PLUGIN_EXAMPLE_EVENT"),
("plugin_Example_event", "PLUGIN_EXAMPLE_EVENT"),
("plugin_ExAmple_Event", "PLUGIN_EX_AMPLE_EVENT"),
("plugin_exAmple_EvEnt", "PLUGIN_EX_AMPLE_EV_ENT"),
)
@ddt.unpack
def test_to_identifier(self, value, expected):
actual = octoprint.events.Events._to_identifier(value)
self.assertEqual(actual, expected)
| 723 | Python | .py | 17 | 37.470588 | 103 | 0.703281 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,006 | test_octoprint_setuptools.py | OctoPrint_OctoPrint/tests/test_octoprint_setuptools.py | import unittest
import ddt
import octoprint_setuptools
@ddt.ddt
class OctoPrintSetuptoolsTest(unittest.TestCase):
@ddt.data(
("OctoPrint", ["OctoPrint", "flask"], True),
("OctoPrint", ["OctoPrint<1.3.7", "flask"], True),
("OctoPrint", ["OctoPrint<=1.3.7", "flask"], True),
("OctoPrint", ["OctoPrint==1.3.7", "flask"], True),
("OctoPrint", ["OctoPrint!=1.3.7", "flask"], True),
("OctoPrint", ["OctoPrint>=1.3.7", "flask"], True),
("OctoPrint", ["OctoPrint>1.3.7", "flask"], True),
("OctoPrint", ["OctoPrint~=1.3.7", "flask"], True),
("OctoPrint", ["OctoPrint===1.3.7", "flask"], True),
("OctoPrint", ["oCTOpRINT>=1.3.7", "flask"], True),
("OctoPrint", ["flask"], False),
("OctoPrint", [], False),
("OctoPrint", None, False),
(None, [], False),
)
@ddt.unpack
def test_has_requirement(self, requirement, requirements, expected):
actual = octoprint_setuptools.has_requirement(requirement, requirements)
self.assertEqual(actual, expected)
| 1,082 | Python | .py | 25 | 36.36 | 80 | 0.586895 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,007 | test_daemon.py | OctoPrint_OctoPrint/tests/test_daemon.py | import unittest
from unittest import mock
import octoprint.daemon
class ExpectedExit(BaseException):
pass
class DaemonTest(unittest.TestCase):
def setUp(self):
run_method = mock.MagicMock()
echo_method = mock.MagicMock()
error_method = mock.MagicMock()
class TestDaemon(octoprint.daemon.Daemon):
def run(self):
run_method()
def echo(self, line):
echo_method(line)
def error(self, line):
error_method(line)
self.pidfile = "/my/pid/file"
self.daemon = TestDaemon(self.pidfile)
self.run_method = run_method
self.echo_method = echo_method
self.error_method = error_method
@mock.patch("os.fork", create=True)
@mock.patch("os.chdir")
@mock.patch("os.setsid", create=True)
@mock.patch("os.umask")
@mock.patch("sys.exit")
def test_double_fork(self, mock_exit, mock_umask, mock_setsid, mock_chdir, mock_fork):
# setup
pid1 = 1234
pid2 = 2345
mock_fork.side_effect = [pid1, pid2]
# test
self.daemon._double_fork()
# assert
self.assertListEqual(mock_fork.mock_calls, [mock.call(), mock.call()])
self.assertListEqual(mock_exit.mock_calls, [mock.call(0), mock.call(0)])
mock_chdir.assert_called_once_with("/")
mock_setsid.assert_called_once_with()
mock_umask.assert_called_once_with(0o002)
@mock.patch("os.fork", create=True)
@mock.patch("sys.exit")
def test_double_fork_failed_first(self, mock_exit, mock_fork):
# setup
mock_fork.side_effect = OSError()
mock_exit.side_effect = ExpectedExit()
# test
try:
self.daemon._double_fork()
self.fail("Expected an exit")
except ExpectedExit:
pass
# assert
self.assertListEqual(mock_fork.mock_calls, [mock.call()])
self.assertListEqual(mock_exit.mock_calls, [mock.call(1)])
self.assertEqual(len(self.error_method.mock_calls), 1)
@mock.patch("os.fork", create=True)
@mock.patch("os.chdir")
@mock.patch("os.setsid", create=True)
@mock.patch("os.umask")
@mock.patch("sys.exit")
def test_double_fork_failed_second(
self, mock_exit, mock_umask, mock_setsid, mock_chdir, mock_fork
):
# setup
mock_fork.side_effect = [1234, OSError()]
mock_exit.side_effect = [None, ExpectedExit()]
# test
try:
self.daemon._double_fork()
self.fail("Expected an exit")
except ExpectedExit:
pass
# assert
self.assertEqual(mock_fork.call_count, 2)
self.assertListEqual(mock_exit.mock_calls, [mock.call(0), mock.call(1)])
self.assertEqual(self.error_method.call_count, 1)
mock_chdir.assert_called_once_with("/")
mock_setsid.assert_called_once_with()
mock_umask.assert_called_once_with(0o002)
@mock.patch("sys.stdin")
@mock.patch("sys.stdout")
@mock.patch("sys.stderr")
@mock.patch("os.devnull")
@mock.patch("builtins.open")
@mock.patch("os.dup2")
def test_redirect_io(
self, mock_dup2, mock_open, mock_devnull, mock_stderr, mock_stdout, mock_stdin
):
# setup
mock_stdin.fileno.return_value = "stdin"
mock_stdout.fileno.return_value = "stdout"
mock_stderr.fileno.return_value = "stderr"
new_stdin = mock.MagicMock()
new_stdout = mock.MagicMock()
new_stderr = mock.MagicMock()
new_stdin.fileno.return_value = "new_stdin"
new_stdout.fileno.return_value = "new_stdout"
new_stderr.fileno.return_value = "new_stderr"
mock_open.side_effect = [new_stdin, new_stdout, new_stderr]
# test
self.daemon._redirect_io()
# assert
mock_stdout.flush.assert_called_once_with()
mock_stderr.flush.assert_called_once_with()
self.assertListEqual(
mock_open.mock_calls,
[
mock.call(mock_devnull, encoding="utf-8"),
mock.call(mock_devnull, "a+", encoding="utf-8"),
mock.call(mock_devnull, "a+", encoding="utf-8"),
],
)
self.assertListEqual(
mock_dup2.mock_calls,
[
mock.call("new_stdin", "stdin"),
mock.call("new_stdout", "stdout"),
mock.call("new_stderr", "stderr"),
],
)
@mock.patch("os.getpid")
@mock.patch("signal.signal")
def test_daemonize(self, mock_signal, mock_getpid):
# setup
self.daemon._double_fork = mock.MagicMock()
self.daemon._redirect_io = mock.MagicMock()
self.daemon.set_pid = mock.MagicMock()
pid = 1234
mock_getpid.return_value = pid
# test
self.daemon.start()
# assert
self.daemon._double_fork.assert_called_once_with()
self.daemon._redirect_io.assert_called_once_with()
self.daemon.set_pid.assert_called_once_with(str(pid))
def test_terminated(self):
# setup
self.daemon.remove_pidfile = mock.MagicMock()
# test
self.daemon.terminated()
# assert
self.daemon.remove_pidfile.assert_called_once_with()
def test_start(self):
# setup
self.daemon._daemonize = mock.MagicMock()
self.daemon.get_pid = mock.MagicMock()
self.daemon.get_pid.return_value = None
# test
self.daemon.start()
# assert
self.daemon._daemonize.assert_called_once_with()
self.daemon.get_pid.assert_called_once_with()
self.echo_method.assert_called_once_with("Starting daemon...")
self.assertTrue(self.run_method.called)
@mock.patch("sys.exit")
def test_start_running(self, mock_exit):
# setup
pid = "1234"
self.daemon.get_pid = mock.MagicMock()
self.daemon.get_pid.return_value = pid
mock_exit.side_effect = ExpectedExit()
# test
try:
self.daemon.start()
self.fail("Expected an exit")
except ExpectedExit:
pass
# assert
self.daemon.get_pid.assert_called_once_with()
self.assertTrue(self.error_method.called)
mock_exit.assert_called_once_with(1)
@mock.patch("os.kill")
@mock.patch("time.sleep")
def test_stop(self, mock_sleep, mock_kill):
import signal
# setup
pid = "1234"
self.daemon.get_pid = mock.MagicMock()
self.daemon.get_pid.return_value = pid
self.daemon.remove_pidfile = mock.MagicMock()
mock_kill.side_effect = [None, OSError("No such process")]
# test
self.daemon.stop()
# assert
self.daemon.get_pid.assert_called_once_with()
self.assertListEqual(
mock_kill.mock_calls,
[mock.call(pid, signal.SIGTERM), mock.call(pid, signal.SIGTERM)],
)
mock_sleep.assert_called_once_with(0.1)
self.daemon.remove_pidfile.assert_called_once_with()
@mock.patch("sys.exit")
def test_stop_not_running(self, mock_exit):
# setup
self.daemon.get_pid = mock.MagicMock()
self.daemon.get_pid.return_value = None
mock_exit.side_effect = ExpectedExit()
# test
try:
self.daemon.stop()
self.fail("Expected an exit")
except ExpectedExit:
pass
# assert
self.daemon.get_pid.assert_called_once_with()
self.assertEqual(self.error_method.call_count, 1)
mock_exit.assert_called_once_with(1)
@mock.patch("sys.exit")
def test_stop_not_running_no_error(self, mock_exit):
# setup
self.daemon.get_pid = mock.MagicMock()
self.daemon.get_pid.return_value = None
# test
self.daemon.stop(check_running=False)
# assert
self.daemon.get_pid.assert_called_once_with()
self.assertFalse(mock_exit.called)
@mock.patch("os.kill")
@mock.patch("sys.exit")
def test_stop_unknown_error(self, mock_exit, mock_kill):
# setup
pid = "1234"
self.daemon.get_pid = mock.MagicMock()
self.daemon.get_pid.return_value = pid
mock_exit.side_effect = ExpectedExit()
mock_kill.side_effect = OSError("Unknown")
# test
try:
self.daemon.stop()
self.fail("Expected an exit")
except ExpectedExit:
pass
# assert
self.assertTrue(self.error_method.called)
mock_exit.assert_called_once_with(1)
def test_restart(self):
# setup
self.daemon.start = mock.MagicMock()
self.daemon.stop = mock.MagicMock()
# test
self.daemon.restart()
# assert
self.daemon.stop.assert_called_once_with(check_running=False)
self.daemon.start.assert_called_once_with()
def test_status_running(self):
# setup
self.daemon.is_running = mock.MagicMock()
self.daemon.is_running.return_value = True
# test
self.daemon.status()
# assert
self.echo_method.assert_called_once_with("Daemon is running")
def test_status_not_running(self):
# setup
self.daemon.is_running = mock.MagicMock()
self.daemon.is_running.return_value = False
# test
self.daemon.status()
# assert
self.echo_method.assert_called_once_with("Daemon is not running")
@mock.patch("os.kill")
def test_is_running_true(self, mock_kill):
# setup
pid = "1234"
self.daemon.get_pid = mock.MagicMock()
self.daemon.get_pid.return_value = pid
self.daemon.remove_pidfile = mock.MagicMock()
# test
result = self.daemon.is_running()
# assert
self.assertTrue(result)
mock_kill.assert_called_once_with(pid, 0)
self.assertFalse(self.daemon.remove_pidfile.called)
self.assertFalse(self.error_method.called)
def test_is_running_false_no_pid(self):
# setup
self.daemon.get_pid = mock.MagicMock()
self.daemon.get_pid.return_value = None
# test
result = self.daemon.is_running()
# assert
self.assertFalse(result)
@mock.patch("os.kill")
def test_is_running_false_pidfile_removed(self, mock_kill):
# setup
pid = "1234"
self.daemon.get_pid = mock.MagicMock()
self.daemon.get_pid.return_value = pid
mock_kill.side_effect = OSError()
self.daemon.remove_pidfile = mock.MagicMock()
# test
result = self.daemon.is_running()
# assert
self.assertFalse(result)
mock_kill.assert_called_once_with(pid, 0)
self.daemon.remove_pidfile.assert_called_once_with()
self.assertFalse(self.error_method.called)
@mock.patch("os.kill")
def test_is_running_false_pidfile_error(self, mock_kill):
# setup
pid = "1234"
self.daemon.get_pid = mock.MagicMock()
self.daemon.get_pid.return_value = pid
mock_kill.side_effect = OSError()
self.daemon.remove_pidfile = mock.MagicMock()
self.daemon.remove_pidfile.side_effect = IOError()
# test
result = self.daemon.is_running()
# assert
self.assertFalse(result)
mock_kill.assert_called_once_with(pid, 0)
self.daemon.remove_pidfile.assert_called_once_with()
self.assertTrue(self.error_method.called)
def test_get_pid(self):
# setup
pid = 1234
# test
with mock.patch(
"builtins.open",
mock.mock_open(read_data=f"{pid}\n"),
create=True,
) as m:
result = self.daemon.get_pid()
# assert
self.assertEqual(result, pid)
m.assert_called_once_with(self.pidfile, encoding="utf-8")
def test_get_pid_ioerror(self):
# setup
handle = mock.MagicMock()
handle.__enter__.side_effect = IOError()
# test
with mock.patch("builtins.open", mock.mock_open(), create=True) as m:
result = self.daemon.get_pid()
# assert
self.assertIsNone(result)
m.assert_called_once_with(self.pidfile, encoding="utf-8")
def test_get_pid_valueerror(self):
# setup
pid = "not an integer"
# test
with mock.patch(
"builtins.open",
mock.mock_open(read_data=f"{pid}\n"),
create=True,
) as m:
result = self.daemon.get_pid()
# assert
self.assertIsNone(result)
m.assert_called_once_with(self.pidfile, encoding="utf-8")
def test_set_pid(self):
# setup
pid = "1234"
# test
with mock.patch("builtins.open", mock.mock_open(), create=True) as m:
self.daemon.set_pid(pid)
# assert
m.assert_called_once_with(self.pidfile, "w+", encoding="utf-8")
handle = m()
handle.write.assert_called_once_with(f"{pid}\n")
def test_set_pid_int(self):
# setup
pid = 1234
# test
with mock.patch("builtins.open", mock.mock_open(), create=True) as m:
self.daemon.set_pid(pid)
# assert
m.assert_called_once_with(self.pidfile, "w+", encoding="utf-8")
handle = m()
handle.write.assert_called_once_with(f"{pid}\n")
@mock.patch("os.path.isfile")
@mock.patch("os.remove")
def test_remove_pidfile_exists(self, mock_remove, mock_isfile):
# setup
mock_isfile.return_value = True
# test
self.daemon.remove_pidfile()
# assert
mock_isfile.assert_called_once_with(self.pidfile)
mock_remove.assert_called_once_with(self.pidfile)
@mock.patch("os.path.isfile")
@mock.patch("os.remove")
def test_remove_pidfile_doesnt_exist(self, mock_remove, mock_isfile):
# setup
mock_isfile.return_value = False
# test
self.daemon.remove_pidfile()
# assert
mock_isfile.assert_called_once_with(self.pidfile)
self.assertFalse(mock_remove.called)
| 14,295 | Python | .py | 390 | 27.589744 | 90 | 0.60142 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,008 | test_script.py | OctoPrint_OctoPrint/tests/printer/test_script.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2021 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
from unittest.mock import ANY, MagicMock, call, patch
from octoprint.printer.standard import Printer
class ScriptsTestCase(unittest.TestCase):
def setUp(self):
# mock comm
self.comm = MagicMock()
# mock event manager
self.event_manager = MagicMock()
self.event_manager_patcher = patch("octoprint.printer.standard.eventManager")
self.event_manager_getter = self.event_manager_patcher.start()
self.event_manager_getter.return_value = self.event_manager
# mock plugin manager
self.plugin_manager = MagicMock()
self.plugin_manager.get_hooks.return_value = {}
self.plugin_manager_patcher = patch("octoprint.printer.standard.plugin_manager")
self.plugin_manager_getter = self.plugin_manager_patcher.start()
self.plugin_manager_getter.return_value = self.plugin_manager
# mock settings
self.settings = MagicMock()
self.settings.getInt.return_value = 1
self.settings.getBoolean.return_value = False
self.settings_patcher = patch("octoprint.printer.standard.settings")
self.settings_getter = self.settings_patcher.start()
self.settings_getter.return_value = self.settings
self.printer = Printer(MagicMock(), MagicMock(), MagicMock())
self.printer._comm = self.comm
def tearDown(self):
self.settings_patcher.stop()
self.plugin_manager_patcher.stop()
self.event_manager_patcher.stop()
def test_event_name(self):
self.printer.script("testEvent")
self.event_manager.fire.assert_any_call("GcodeScriptTestEventRunning", ANY)
def test_event_order(self):
self.printer.script("EventOrder")
expected_order = [
call("GcodeScriptEventOrderRunning", ANY),
call("GcodeScriptEventOrderFinished", ANY),
]
self.event_manager.fire.assert_has_calls(expected_order)
def test_payload_handling(self):
expected_paylod = {"payloadKey": "payloadValue"}
context = {"event": expected_paylod}
self.printer.script("GetPayload", context)
self.event_manager.fire.assert_called_with(ANY, expected_paylod)
self.printer.script("WrongPayload", [])
self.event_manager.fire.assert_called_with(ANY, None)
self.printer.script("WrongContext", {"noEvent": {}})
self.event_manager.fire.assert_called_with(ANY, None)
| 2,653 | Python | .py | 52 | 42.711538 | 103 | 0.691114 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,009 | test_estimation.py | OctoPrint_OctoPrint/tests/printer/test_estimation.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
from ddt import data, ddt, unpack
from octoprint.printer.estimation import TimeEstimationHelper
@ddt
class EstimationTestCase(unittest.TestCase):
def setUp(self):
self.estimation_helper = type(TimeEstimationHelper)(
TimeEstimationHelper.__name__,
(TimeEstimationHelper,),
{"STABLE_THRESHOLD": 0.1, "STABLE_ROLLING_WINDOW": 3, "STABLE_COUNTDOWN": 1},
)()
@data(
((1.0, 2.0, 3.0, 4.0, 5.0), 3.0),
((1.0, 2.0, 0.0, 1.0, 2.0), 1.2),
((1.0, -2.0, -1.0, -2.0, 3.0), -0.2),
)
@unpack
def test_average_total(self, estimates, expected):
for estimate in estimates:
self.estimation_helper.update(estimate)
self.assertEqual(self.estimation_helper.average_total, expected)
@data(
((1.0, 2.0), -1), # not enough values, have 1, need 3
((1.0, 2.0, 3.0), -1), # not enough values, have 2, need 3
(
(1.0, 2.0, 3.0, 4.0),
0.5,
), # average totals: 1.0, 1.5, 2.0, 2.5 => (3 * 0.5 / 3 = 0.5
(
(1.0, 2.0, 3.0, 4.0, 5.0),
0.5,
), # average totals: 1.0, 1.5, 2.0, 2.5, 3.0 => (0.5 + 0.5 + 0.5) / 3 = 0.5
(
(1.0, 2.0, 0.0, 1.0, 2.0),
0.7 / 3,
), # average totals: 1.0, 1.5, 1.0, 1.0, 1.2 => (0.5 + 0.0 + 0.2) / 3 = 0.7 / 3
)
@unpack
def test_average_distance(self, estimates, expected):
for estimate in estimates:
self.estimation_helper.update(estimate)
self.assertEqual(self.estimation_helper.average_distance, expected)
@data(
((1.0, 1.0), -1),
((1.0, 1.0, 1.0), 1.0),
((1.0, 2.0, 3.0, 4.0, 5.0), 4.0),
)
@unpack
def test_average_total_rolling(self, estimates, expected):
for estimate in estimates:
self.estimation_helper.update(estimate)
self.assertEqual(self.estimation_helper.average_total_rolling, expected)
@data(
(
(1.0, 1.0, 1.0, 1.0),
False,
), # average totals: 1.0, 1.0, 1.0, 1.0 => 3.0 / 3 = 1.0
(
(1.0, 1.0, 1.0, 1.0, 1.0),
True,
), # average totals: 1.0, 1.0, 1.0, 1.0, 1.0 => 0.0 / 3 = 0.0
(
(1.0, 2.0, 3.0, 4.0, 5.0),
False,
), # average totals: 1.0, 1.5, 2.0, 2.5, 3.0 => 1.5 / 3 = 0.5
(
(0.0, 0.09, 0.18, 0.27, 0.36),
True,
), # average totals: 0.0, 0.045, 0.09, 0.135, 0.18 => (0.045 + 0.045 + 0.045) / 3 = 0.045
)
@unpack
def test_is_stable(self, estimates, expected):
for estimate in estimates:
self.estimation_helper.update(estimate)
self.assertEqual(self.estimation_helper.is_stable(), expected)
| 3,049 | Python | .py | 78 | 30.5 | 103 | 0.526565 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,010 | __init__.py | OctoPrint_OctoPrint/tests/printer/__init__.py | """
Unit tests for ``octoprint.printer``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 285 | Python | .py | 6 | 46 | 103 | 0.721014 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,011 | __init__.py | OctoPrint_OctoPrint/tests/settings/__init__.py | """
Unit tests for ``octoprint.settings``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 286 | Python | .py | 6 | 46.166667 | 103 | 0.722022 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,012 | test_settings.py | OctoPrint_OctoPrint/tests/settings/test_settings.py | """
Tests for OctoPrint's Settings class
.. todo::
* tests for base folder management
* tests for script management
* tests for settings migration
"""
import contextlib
import hashlib
import os
import re
import shutil
import tempfile
import time
import unittest
import unittest.mock
import ddt
import pytest
import yaml
import octoprint.settings
from octoprint.util import dict_merge
base_path = os.path.join(os.path.dirname(__file__), "_files")
def _load_yaml(fname):
with open(fname, encoding="utf-8") as f:
return yaml.safe_load(f)
def _dump_yaml(fname, config):
with open(fname, "w", encoding="utf-8") as f:
yaml.safe_dump(config, f)
@ddt.ddt
class SettingsTest(unittest.TestCase):
def setUp(self):
self.config_path = os.path.realpath(os.path.join(base_path, "config.yaml"))
self.overlay_path = os.path.realpath(os.path.join(base_path, "overlay.yaml"))
self.defaults_path = os.path.realpath(os.path.join(base_path, "defaults.yaml"))
self.config = _load_yaml(self.config_path)
self.overlay = _load_yaml(self.overlay_path)
self.defaults = _load_yaml(self.defaults_path)
self.expected_effective = dict_merge(
dict_merge(self.defaults, self.overlay), self.config
)
self.expected_effective[octoprint.settings.Settings.OVERLAY_KEY] = "overlay"
def test_basedir_initialization(self):
with self.mocked_basedir() as basedir:
# construct settings
settings = octoprint.settings.Settings()
# verify
self.assertTrue(os.path.isdir(basedir))
self.assertTrue(os.path.isfile(os.path.join(basedir, "config.yaml")))
self.assertIsNotNone(settings.get(["api", "key"]))
def test_basedir_folder_creation(self):
with self.mocked_basedir() as basedir:
# construct settings
settings = octoprint.settings.Settings()
expected_upload_folder = os.path.join(basedir, "uploads")
expected_timelapse_folder = os.path.join(basedir, "timelapse")
expected_timelapse_tmp_folder = os.path.join(basedir, "timelapse", "tmp")
# test
upload_folder = settings.getBaseFolder("uploads")
timelapse_folder = settings.getBaseFolder("timelapse")
timelapse_tmp_folder = settings.getBaseFolder("timelapse_tmp")
for folder, expected in (
(upload_folder, expected_upload_folder),
(timelapse_folder, expected_timelapse_folder),
(timelapse_tmp_folder, expected_timelapse_tmp_folder),
):
self.assertIsNotNone(folder)
self.assertEqual(folder, expected)
self.assertTrue(os.path.isdir(folder))
def test_basedir_initialization_with_custom_basedir(self):
with self.mocked_basedir() as default_basedir:
my_basedir = None
try:
my_basedir = tempfile.mkdtemp("octoprint-settings-test-custom")
self.assertNotEqual(my_basedir, default_basedir)
octoprint.settings.Settings(basedir=my_basedir)
self.assertFalse(
os.path.isfile(os.path.join(default_basedir, "config.yaml"))
)
self.assertTrue(os.path.isfile(os.path.join(my_basedir, "config.yaml")))
finally:
try:
shutil.rmtree(my_basedir)
except Exception:
self.fail("Could not remove temporary custom basedir")
def test_basedir_initialization_with_custom_config(self):
config_path = os.path.realpath(
os.path.join(os.path.dirname(__file__), "_files", "config.yaml")
)
with self.mocked_basedir() as basedir:
my_configdir = None
try:
my_configdir = tempfile.mkdtemp("octoprint-settings-test-custom")
my_configfile = os.path.join(my_configdir, "config.yaml")
shutil.copy(config_path, my_configfile)
expected_upload_folder = os.path.join(basedir, "uploads")
settings = octoprint.settings.Settings(configfile=my_configfile)
upload_folder = settings.getBaseFolder("uploads")
self.assertFalse(os.path.isfile(os.path.join(basedir, "config.yaml")))
self.assertTrue(os.path.isfile(my_configfile))
self.assertIsNotNone(upload_folder)
self.assertTrue(os.path.isdir(upload_folder))
self.assertEqual(expected_upload_folder, upload_folder)
finally:
try:
shutil.rmtree(my_configdir)
except Exception:
self.fail("Could not remove temporary custom basedir")
##~~ regexes
def test_should_have_regex_filters(self):
# we don't want the mocked_config, because we're testing the actual value.
# with self.mocked_config():
filters = octoprint.settings.Settings().get(["terminalFilters"])
# we *should* have at least three, but we'll ensure there's at least one as a sanity check.
self.assertGreater(len(filters), 0)
def test_should_have_suppress_temperature_regex(self):
# we don't want the mocked_config, because we're testing the actual value.
# with self.mocked_config():
filters = octoprint.settings.Settings().get(["terminalFilters"])
temperature_regex_filters = [
x for x in filters if x.get("name") == "Suppress temperature messages"
]
self.assertEqual(len(temperature_regex_filters), 1)
# we know there's a 'name' by now, so just ensure we have the regex key
temperature_regex_filter = temperature_regex_filters[0]
self.assertIn("regex", temperature_regex_filter)
def test_temperature_regex_should_not_match(self):
"""random entries that aren't temperature regex entries"""
# we don't want the mocked_config, because we're testing the actual value.
# with self.mocked_config():
bad_terminal_entries = [
"Send: N71667 G1 X163.151 Y35.424 E0.02043*83",
"Send: N85343 G1 Z29.880 F10800.000*15",
"Recv: ok",
"Recv: FIRMWARE_NAME:Marlin 1.1.7-C2 (Github) SOURCE_CODE_URL:https://github.com/Robo3D/Marlin-C2 PROTOCOL_VERSION:C2 MACHINE_TYPE:RoboC2 EXTRUDER_COUNT:1 UUID:cede2a2f-41a2-4748-9b12-c55c62f367ff EMERGENCY_CODES:M108,M112,M410",
]
filters = octoprint.settings.Settings().get(["terminalFilters"])
temperature_pattern = [
x for x in filters if x.get("name") == "Suppress temperature messages"
][0]["regex"]
matcher = re.compile(temperature_pattern)
for terminal_string in bad_terminal_entries:
match_result = matcher.match(terminal_string)
# can switch to assertIsNone after 3.x upgrade.
self.assertFalse(
match_result,
f"string matched and it shouldn't have: {terminal_string!r}",
)
def test_temperature_regex_matches(self):
# we don't want the mocked_config, because we're testing the actual value.
# with self.mocked_config():
common_terminal_entries = [
"Send: M105",
"Send: N123 M105*456",
"Recv: ok N5993 P15 B15 T:59.2 /0.0 B:31.8 /0.0 T0:59.2 /0.0 @:0 B@:100:", # monoprice mini delta
"Recv: ok T:210.3 /210.0 B:60.3 /60.0 T0:210.3 /210.0 @:79 B@:0 P:35.9 A:40.0", # Prusa mk3
"Recv: T:210.3 /210.0",
]
filters = octoprint.settings.Settings().get(["terminalFilters"])
temperature_pattern = [
x for x in filters if x.get("name") == "Suppress temperature messages"
][0]["regex"]
matcher = re.compile(temperature_pattern)
for terminal_string in common_terminal_entries:
match_result = matcher.match(terminal_string)
# can switch to assertIsNotNone after 3.x upgrade.
self.assertTrue(
match_result,
f"string did not match and it should have: {terminal_string!r}",
)
##~~ test getters
def test_get(self):
with self.settings() as settings:
expected_api_key = "test"
api_key = settings.get(["api", "key"])
self.assertIsNotNone(api_key)
self.assertEqual(api_key, expected_api_key)
def test_get_int(self):
with self.settings() as settings:
expected_server_port = 8080
server_port = settings.get(["server", "port"])
self.assertIsNotNone(server_port)
self.assertEqual(server_port, expected_server_port)
def test_get_int_converted(self):
with self.settings() as settings:
value = settings.getInt(["serial", "timeout", "connection"])
self.assertEqual(5, value)
def test_get_int_invalid(self):
with self.settings() as settings:
value = settings.getInt(["server", "host"])
self.assertIsNone(value)
def test_get_float(self):
with self.settings() as settings:
expected_serial_timeout = 1.0
serial_timeout = settings.get(["serial", "timeout", "detection"])
self.assertIsNotNone(serial_timeout)
self.assertEqual(serial_timeout, expected_serial_timeout)
def test_get_float_converted(self):
with self.settings() as settings:
value = settings.getFloat(["serial", "timeout", "connection"])
self.assertEqual(5.0, value)
def test_get_float_invalid(self):
with self.settings() as settings:
value = settings.getFloat(["server", "host"])
self.assertIsNone(value)
def test_get_boolean(self):
with self.settings() as settings:
value = settings.get(["devel", "virtualPrinter", "enabled"])
self.assertTrue(value)
def test_get_list(self):
with self.settings() as settings:
data = settings.get(["serial", "additionalPorts"])
self.assertEqual(len(data), 2)
self.assertListEqual(["/dev/portA", "/dev/portB"], data)
def test_get_map(self):
with self.settings() as settings:
data = settings.get(["devel", "virtualPrinter"])
self.assertDictEqual(self.config["devel"]["virtualPrinter"], data)
def test_get_map_merged(self):
with self.settings() as settings:
data = settings.get(["devel", "virtualPrinter"], merged=True)
expected = dict_merge(
self.overlay["devel"]["virtualPrinter"],
self.config["devel"]["virtualPrinter"],
)
self.assertEqual(expected, data)
def test_get_multiple(self):
with self.settings() as settings:
data = settings.get(["serial", ["timeout", "additionalPorts"]])
self.assertIsInstance(data, list)
self.assertEqual(len(data), 2)
self.assertIsInstance(data[0], dict)
self.assertIsInstance(data[1], list)
def test_get_multiple_asdict(self):
with self.settings() as settings:
data = settings.get(["serial", ["timeout", "additionalPorts"]], asdict=True)
self.assertIsInstance(data, dict)
self.assertEqual(len(data), 2)
self.assertTrue("timeout" in data)
self.assertTrue("additionalPorts" in data)
def test_get_invalid(self):
with self.settings() as settings:
value = settings.get(["i", "do", "not", "exist"])
self.assertIsNone(value)
def test_get_invalid_error(self):
with self.settings() as settings:
try:
settings.get(["i", "do", "not", "exist"], error_on_path=True)
self.fail("Expected NoSuchSettingsPath")
except octoprint.settings.NoSuchSettingsPath:
pass
def test_get_custom_config(self):
with self.settings() as settings:
server_port = settings.getInt(
["server", "port"], config={"server": {"port": 9090}}
)
self.assertEqual(9090, server_port)
def test_get_custom_defaults(self):
with self.settings() as settings:
api_enabled = settings.getBoolean(
["api", "enabled"], defaults={"api": {"enabled": False}}
)
self.assertFalse(api_enabled)
def test_get_empty_path(self):
with self.settings() as settings:
self.assertIsNone(settings.get([]))
try:
settings.get([], error_on_path=True)
self.fail("Expected NoSuchSettingsPath")
except octoprint.settings.NoSuchSettingsPath:
pass
def test_set(self):
with self.settings() as settings:
settings.set(["server", "host"], "127.0.0.1")
self.assertEqual("127.0.0.1", settings._config["server"]["host"])
def test_set_int(self):
with self.settings() as settings:
settings.setInt(["server", "port"], 8181)
self.assertEqual(8181, settings._config["server"]["port"])
def test_set_int_convert(self):
with self.settings() as settings:
settings.setInt(["server", "port"], "8181")
self.assertEqual(8181, settings._config["server"]["port"])
def test_set_float(self):
with self.settings() as settings:
settings.setFloat(["serial", "timeout", "detection"], 1.2)
self.assertEqual(1.2, settings._config["serial"]["timeout"]["detection"])
def test_set_float_convert(self):
with self.settings() as settings:
settings.setFloat(["serial", "timeout", "detection"], "1.2")
self.assertEqual(1.2, settings._config["serial"]["timeout"]["detection"])
def test_set_boolean(self):
with self.settings() as settings:
settings.setBoolean(["devel", "virtualPrinter", "sendWait"], False)
self.assertEqual(
False, settings._config["devel"]["virtualPrinter"]["sendWait"]
)
@ddt.data("1", "yes", "true", "TrUe", "y", "Y", "YES")
def test_set_boolean_convert_string_true(self, value):
with self.settings() as settings:
settings.setBoolean(
["devel", "virtualPrinter", "repetierStyleResends"], value
)
self.assertEqual(
True, settings._config["devel"]["virtualPrinter"]["repetierStyleResends"]
)
@ddt.data("0", "no", "false", ["some", "list"], {"a": "dictionary"}, lambda: None)
def test_set_boolean_convert_any_false(self, value):
with self.settings() as settings:
settings.setBoolean(["api", "enabled"], value)
self.assertEqual(False, settings._config["api"]["enabled"])
def test_set_default(self):
with self.settings() as settings:
self.assertEqual(8080, settings._config["server"]["port"])
settings.set(["server", "port"], 5000)
self.assertNotIn("server", settings._config)
self.assertEqual(5000, settings.get(["server", "port"]))
def test_set_default_subtree(self):
with self.settings() as settings:
default = {"host": "0.0.0.0", "port": 5000}
self.assertEqual(
{"host": "0.0.0.0", "port": 8080}, settings.get(["server"], merged=True)
)
settings.set(["server"], default)
self.assertNotIn("server", settings._config)
self.assertEqual(default, settings.get(["server"], merged=True))
def test_set_none(self):
with self.settings() as settings:
self.assertTrue("port" in settings._config["server"])
settings.set(["server", "port"], None)
self.assertIs(settings.get(["server", "port"]), None)
@ddt.data(
[], ["api", "lock"], ["api", "lock", "door"], ["serial", "additionalPorts", "key"]
)
def test_set_invalid(self, path):
with self.settings() as settings:
try:
settings.set(path, "value", error_on_path=True)
self.fail("Expected NoSuchSettingsPath")
except octoprint.settings.NoSuchSettingsPath:
pass
##~~ test remove
def test_remove(self):
with self.settings() as settings:
self.assertTrue("port" in settings._config["server"])
settings.remove(["server", "port"])
self.assertFalse(
"server" in settings._config and "port" in settings._config["server"]
)
self.assertEqual(5000, settings.get(["server", "port"]))
@ddt.data([], ["server", "lock"], ["serial", "additionalPorts", "key"])
def test_remove_invalid(self, path):
with self.settings() as settings:
try:
settings.remove(path, error_on_path=True)
self.fail("Expected NoSuchSettingsPath")
except octoprint.settings.NoSuchSettingsPath:
pass
##~~ test has
def test_has(self):
with self.settings() as settings:
self.assertTrue(settings.has(["api", "key"]))
self.assertFalse(settings.has(["api", "lock"]))
##~~ test properties
def test_effective(self):
with self.settings() as settings:
effective = settings.effective
self.assertDictEqual(self.expected_effective, effective)
def test_effective_hash(self):
with self.settings() as settings:
hash = hashlib.md5()
hash.update(yaml.safe_dump(self.expected_effective).encode("utf-8"))
expected_effective_hash = hash.hexdigest()
print(yaml.safe_dump(self.expected_effective))
effective_hash = settings.effective_hash
print(yaml.safe_dump(settings.effective))
self.assertEqual(expected_effective_hash, effective_hash)
def test_config_hash(self):
with self.settings() as settings:
hash = hashlib.md5()
hash.update(yaml.safe_dump(self.config).encode("utf-8"))
expected_config_hash = hash.hexdigest()
config_hash = settings.config_hash
self.assertEqual(expected_config_hash, config_hash)
def test_last_modified(self):
with self.settings() as settings:
configfile = settings._configfile
last_modified = os.stat(configfile).st_mtime
self.assertEqual(settings.last_modified, last_modified)
##~~ test preprocessors
def test_get_preprocessor(self):
with self.settings() as settings:
config = {}
defaults = {"test_preprocessor": "some string"}
preprocessors = {"test_preprocessor": lambda x: x.upper()}
value = settings.get(
["test_preprocessor"],
config=config,
defaults=defaults,
preprocessors=preprocessors,
)
self.assertEqual("SOME STRING", value)
def test_set_preprocessor(self):
with self.settings() as settings:
config = {}
defaults = {"foo_preprocessor": {"bar": "fnord"}}
preprocessors = {"foo_preprocessor": {"bar": lambda x: x.upper()}}
settings.set(
["foo_preprocessor", "bar"],
"value",
config=config,
defaults=defaults,
preprocessors=preprocessors,
)
self.assertEqual("VALUE", config["foo_preprocessor"]["bar"])
def test_set_external_modification(self):
with self.settings() as settings:
configfile = settings._configfile
# Make sure the config files last modified time changes
time.sleep(1.0)
self.assertEqual("0.0.0.0", settings.get(["server", "host"]))
# modify yaml file externally
config = _load_yaml(configfile)
config["server"]["host"] = "127.0.0.1"
_dump_yaml(configfile, config)
# set some value, should also reload file before setting new api key
settings.set(["api", "key"], "key")
# verify updated values
self.assertEqual("127.0.0.1", settings.get(["server", "host"]))
self.assertEqual("key", settings.get(["api", "key"]))
##~~ test update callbacks
def test_update_callback_change(
self,
):
with self.settings() as settings:
callback = unittest.mock.Mock()
settings.add_path_update_callback(["api", "key"], callback)
# set a new value
settings.set(["api", "key"], "newkey")
# verify callback was called
callback.assert_called_once_with(["api", "key"], "test", "newkey")
def test_update_callback_reset_to_default(self):
with self.settings() as settings:
callback = unittest.mock.Mock()
settings.add_path_update_callback(["server", "port"], callback)
# set back to default
settings.set(["server", "port"], 5000)
# verify callback was called
callback.assert_called_once_with(["server", "port"], 8080, 5000)
def test_update_callback_wrong_path(self):
with self.settings() as settings:
callback = unittest.mock.Mock()
settings.add_path_update_callback(["wrong", "path"], callback)
# set a new value
settings.set(["api", "key"], "newkey")
# verify callback was not called
callback.assert_not_called()
def test_update_callback_removal(self):
with self.settings() as settings:
callback = unittest.mock.Mock()
settings.add_path_update_callback(["api", "key"], callback)
# set a new value
settings.set(["api", "key"], "newkey")
# verify callback was called
callback.assert_called_once_with(["api", "key"], "test", "newkey")
# remove callback
settings.remove_path_update_callback(["api", "key"], callback)
# set a new value
settings.set(["api", "key"], "newkey2")
# verify callback was not called again
callback.assert_called_once_with(["api", "key"], "test", "newkey")
##~~ test overlays
def test_overlay_add_and_remove(self):
with self.settings() as settings:
# add overlay
overlay = {"server": {"host": "1.1.1.1"}}
overlay_key = settings.add_overlay(overlay)
# verify overlay was added
self.assertEqual(settings.get(["server", "host"]), "1.1.1.1")
# remove overlay
settings.remove_overlay(overlay_key)
# verify overlay was removed
self.assertEqual(settings.get(["server", "host"]), "0.0.0.0")
def test_overlay_add_and_remove_deprecated(self):
with self.settings() as settings:
# add overlay
overlay = {"server": {"host": "1.1.1.1"}}
overlay_key = settings.add_overlay(overlay, deprecated="test")
# verify overlay was added and path marked as deprecated
self.assertEqual(settings.get(["server", "host"]), "1.1.1.1")
self.assertTrue(settings._is_deprecated_path(["server", "host"]))
# remove overlay
settings.remove_overlay(overlay_key)
# verify overlay was removed and path no longer marked as deprecated
self.assertEqual(settings.get(["server", "host"]), "0.0.0.0")
self.assertFalse(settings._is_deprecated_path(["server", "host"]))
##~~ test save
def test_save(self):
with self.settings() as settings:
config_path = settings._configfile
# current modification date of config.yaml
current_modified = os.stat(config_path).st_mtime
# sleep a bit to make sure we do have a change in the timestamp
time.sleep(1.0)
# set a new value
settings.set(["api", "key"], "newkey")
# should not be written automatically
self.assertEqual(current_modified, os.stat(config_path).st_mtime)
# should be updated after calling save though
settings.save()
self.assertNotEqual(current_modified, os.stat(config_path).st_mtime)
def test_save_unmodified(self):
with self.settings() as settings:
last_modified = settings.last_modified
# sleep a bit to make sure we do have a change in the timestamp
time.sleep(1.0)
settings.save()
self.assertEqual(settings.last_modified, last_modified)
settings.save(force=True)
self.assertGreater(settings.last_modified, last_modified)
##~~ helpers
@contextlib.contextmanager
def mocked_basedir(self):
orig_default_basedir = octoprint.settings._default_basedir
directory = None
try:
directory = tempfile.mkdtemp("octoprint-settings-test")
octoprint.settings._default_basedir = lambda *args, **kwargs: directory
yield directory
finally:
octoprint.settings._default_basedir = orig_default_basedir
if directory is not None:
try:
shutil.rmtree(directory)
except Exception:
self.fail("Could not remove temporary basedir")
@contextlib.contextmanager
def mocked_config(self):
orig_defaults = octoprint.settings.default_settings
with self.mocked_basedir() as basedir:
fresh_config_path = os.path.join(basedir, "config.yaml")
shutil.copy(self.config_path, fresh_config_path)
try:
octoprint.settings.default_settings = self.defaults
yield
finally:
octoprint.settings.default_settings = orig_defaults
@contextlib.contextmanager
def settings(self):
with self.mocked_config():
settings = octoprint.settings.Settings()
settings.add_overlay(self.overlay, key="overlay")
yield settings
@ddt.ddt
class HelpersTest(unittest.TestCase):
@ddt.data(
(True, True),
("true", True),
("True", True),
("tRuE", True),
("yes", True),
("YES", True),
("y", True),
("Y", True),
("1", True),
(1, True),
(False, False),
("Truuuuuuuuue", False),
("Nope", False),
(None, False),
)
@ddt.unpack
def test_valid_boolean_trues(self, value, expected):
self.assertEqual(expected, value in octoprint.settings.valid_boolean_trues)
@ddt.data(
(
{"a": {"b": "c"}, "d": 1, "e": {"f": {"g": {"h": 1, "i": 1, "j": 1}}}},
[
["a", "b"],
[
"d",
],
["e", "f", "g", "h"],
["e", "f", "g", "i"],
["e", "f", "g", "j"],
],
),
)
@ddt.unpack
def test_recursive_paths(self, value, expected):
self.assertEqual(expected, list(octoprint.settings._paths([], value)))
def _key(*path):
return octoprint.settings._CHAINMAP_SEP.join(path)
def _prefix(*path):
return _key(*path) + octoprint.settings._CHAINMAP_SEP
@ddt.ddt
class ChainmapTest(unittest.TestCase):
def setUp(self):
self.config_path = os.path.realpath(os.path.join(base_path, "config.yaml"))
self.overlay_path = os.path.realpath(os.path.join(base_path, "overlay.yaml"))
self.defaults_path = os.path.realpath(os.path.join(base_path, "defaults.yaml"))
self.config = _load_yaml(self.config_path)
self.overlay = _load_yaml(self.overlay_path)
self.defaults = _load_yaml(self.defaults_path)
self.chainmap = octoprint.settings.HierarchicalChainMap(
self.config, self.overlay, self.defaults
)
def test_has_path(self):
self.assertTrue(self.chainmap.has_path(["api", "key"]))
self.assertTrue(self.chainmap.has_path(["devel"]))
self.assertTrue(self.chainmap.has_path(["devel", "virtualPrinter"]))
self.assertTrue(self.chainmap.has_path(["devel", "virtualPrinter", "enabled"]))
self.assertTrue(self.chainmap.has_path(["plugins", "foo", "bar"]))
self.assertFalse(self.chainmap.has_path(["api", "lock"]))
def test_get_by_path(self):
self.assertEqual(
True, self.chainmap.get_by_path(["devel", "virtualPrinter", "enabled"])
)
self.assertEqual(
False,
self.chainmap.get_by_path(
["devel", "virtualPrinter", "enabled"], only_defaults=True
),
)
with pytest.raises(KeyError):
self.assertEqual(None, self.chainmap.get_by_path(["test"], only_local=True))
self.assertEqual(self.overlay["test"], self.chainmap.get_by_path(["test"]))
self.assertEqual(
dict_merge(self.defaults["test"], self.overlay["test"]),
self.chainmap.get_by_path(["test"], merged=True),
)
self.assertEqual(
self.config["plugins"]["foo"]["bar"],
self.chainmap.get_by_path(["plugins", "foo", "bar"]),
)
self.assertEqual(
self.config["plugins"]["fnord"]["bar"],
self.chainmap.get_by_path(["plugins", "fnord", "bar"]),
)
self.assertEqual(
dict_merge(
self.overlay["plugins"]["fnord"]["bar"],
self.config["plugins"]["fnord"]["bar"],
),
self.chainmap.get_by_path(["plugins", "fnord", "bar"], merged=True),
)
def test_set_by_path(self):
self.chainmap.set_by_path(["devel", "virtualPrinter", "sendWait"], False)
updated = dict(self.config)
updated["devel"]["virtualPrinter"]["sendWait"] = False
flattened = octoprint.settings.HierarchicalChainMap._flatten(updated)
self.assertEqual(flattened, self.chainmap._chainmap.maps[0])
def test_set_empty_dict(self):
self.assertTrue(_key("empty", "value", "a") in self.chainmap._chainmap.maps[0])
self.chainmap.set_by_path(["empty", "value"], {})
self.assertFalse(_key("empty", "value", "a") in self.chainmap._chainmap.maps[0])
def test_del_by_path(self):
self.chainmap.del_by_path(
["devel", "virtualPrinter", "capabilities", "autoreport_temp"]
)
# make sure we only see the empty default now
self.assertEqual(
{}, self.chainmap.get_by_path(["devel", "virtualPrinter", "capabilities"])
)
# make sure the whole (empty) tree is gone from top layer
path = ["devel", "virtualPrinter", "capabilities", "autoreport_temp"]
while len(path):
self.assertFalse(_key(*path) in self.chainmap._chainmap.maps[0])
path = path[:-1]
def test_del_by_path_with_subtree(self):
self.chainmap.del_by_path(["devel", "virtualPrinter", "capabilities"])
# make sure we only see the empty default now
self.assertEqual(
{}, self.chainmap.get_by_path(["devel", "virtualPrinter", "capabilities"])
)
# make sure the whole (empty) tree is gone from top layer
path = ["devel", "virtualPrinter", "capabilities", "autoreport_temp"]
while len(path):
self.assertFalse(_key(*path) in self.chainmap._chainmap.maps[0])
path = path[:-1]
@ddt.data(
(
{"a": 1},
{_key("a"): 1},
),
({"a": {"b": "b"}}, {_key("a", "b"): "b"}),
(
{"a": {"b": "b", "c": "c", "d": {"e": "e"}}},
{_key("a", "b"): "b", _key("a", "c"): "c", _key("a", "d", "e"): "e"},
),
)
@ddt.unpack
def test_flatten(self, value, expected):
self.assertEqual(
expected, octoprint.settings.HierarchicalChainMap._flatten(value)
)
@ddt.data(
(
{_key("a"): 1},
{"a": 1},
),
(
{_key("a", "b"): "b"},
{"a": {"b": "b"}},
),
(
{_key("a", "b"): "b", _key("a", "c"): "c", _key("a", "d", "e"): "e"},
{"a": {"b": "b", "c": "c", "d": {"e": "e"}}},
),
(
{_key("a"): None, _key("a", "b"): "b"},
{"a": {"b": "b"}},
),
(
{_key("a"): "", _key("a", "b"): "b"},
{"a": {"b": "b"}},
),
)
@ddt.unpack
def test_unflatten(self, value, expected):
self.assertEqual(
expected, octoprint.settings.HierarchicalChainMap._unflatten(value)
)
def test_prefix_caching_has_populates(self):
# this should populate the prefix cache
self.chainmap.has_path(["plugins", "foo"])
# validate that
self.assertTrue(len(self.chainmap._prefixed_keys) == 1)
self.assertTrue(_prefix("plugins", "foo") in self.chainmap._prefixed_keys)
def test_prefix_caching_get_populates(self):
# this should populate the prefix cache
self.chainmap.get_by_path(["plugins", "foo"])
# validate that
self.assertTrue(len(self.chainmap._prefixed_keys) == 1)
self.assertTrue(_prefix("plugins", "foo") in self.chainmap._prefixed_keys)
def test_prefix_caching_scalars_ignored(self):
# this shouldn't populate the prefix cache
self.chainmap.has_path(["api", "key"])
# validate that
self.assertTrue(len(self.chainmap._prefixed_keys) == 0)
def test_prefix_caching_set_invalidates(self):
# this should populate the prefix cache
self.chainmap.has_path(["plugins", "foo"])
# validate that
self.assertTrue(len(self.chainmap._prefixed_keys) == 1)
self.assertTrue(_prefix("plugins", "foo") in self.chainmap._prefixed_keys)
# this should extend the prefix cache
self.chainmap.get_by_path(["plugins", "foo", "bar"])
# validate that
self.assertTrue(len(self.chainmap._prefixed_keys) == 2)
self.chainmap.has_path(["plugins", "foo", "bar"])
# this should remove all plugins.foo keys in the prefix cache
self.chainmap.set_by_path(["plugins", "foo"], {})
# validate that
self.assertTrue(len(self.chainmap._prefixed_keys) == 0)
def test_prefix_caching_del_invalidates(self):
# this should populate the prefix cache
self.chainmap.has_path(["plugins", "baz", "d"])
# validate that
self.assertTrue(len(self.chainmap._prefixed_keys) == 1)
self.assertTrue(_prefix("plugins", "baz", "d") in self.chainmap._prefixed_keys)
# this should remove all plugins.baz keys in the prefix cache
self.chainmap.del_by_path(["plugins", "baz"])
# validate that
keys = [
key
for key in self.chainmap._prefixed_keys
if key.startswith(_prefix("plugins", "baz"))
]
self.assertTrue(len(keys) == 0)
def test_prefix_caching_custom_defaults(self):
# this should populate the prefix cache
self.chainmap.has_path(["swu", "checks"])
# validate that
self.assertTrue(len(self.chainmap._prefixed_keys) == 1)
self.assertTrue(_prefix("swu", "checks") in self.chainmap._prefixed_keys)
# this should save a new key for some custom defaults
path = ["swu", "checks", "yetanother"]
defaults = {"swu": {"checks": {"yetanother": {}}}}
self.chainmap.with_config_defaults(defaults=defaults).set_by_path(
path, {"foo": 3}
)
# getting the first path now should include our new data
data = self.chainmap.get_by_path(["swu", "checks"], merged=True)
# validate that
self.assertTrue("yetanother" in data)
| 36,275 | Python | .py | 785 | 35.340127 | 241 | 0.588619 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,013 | test_types_blueprint.py | OctoPrint_OctoPrint/tests/plugin/test_types_blueprint.py | import unittest
from unittest import mock
import octoprint.plugin
class BlueprintPluginTest(unittest.TestCase):
def setUp(self):
self.basefolder = "/some/funny/basefolder"
self.plugin = octoprint.plugin.BlueprintPlugin()
self.plugin._basefolder = self.basefolder
class MyAssetPlugin(
octoprint.plugin.BlueprintPlugin, octoprint.plugin.AssetPlugin
):
def get_asset_folder(self):
return "/some/asset/folder"
class MyTemplatePlugin(
octoprint.plugin.BlueprintPlugin, octoprint.plugin.TemplatePlugin
):
def get_template_folder(self):
return "/some/template/folder"
self.assetplugin = MyAssetPlugin()
self.assetplugin._basefolder = self.basefolder
self.templateplugin = MyTemplatePlugin()
self.templateplugin._basefolder = self.basefolder
def test_route(self):
def test_method():
pass
octoprint.plugin.BlueprintPlugin.route("/test/method", methods=["GET"])(
test_method
)
octoprint.plugin.BlueprintPlugin.route("/test/method/{foo}", methods=["PUT"])(
test_method
)
self.assertTrue(hasattr(test_method, "_blueprint_rules"))
self.assertTrue("test_method" in test_method._blueprint_rules)
self.assertTrue(len(test_method._blueprint_rules["test_method"]) == 2)
self.assertListEqual(
test_method._blueprint_rules["test_method"],
[
("/test/method", {"methods": ["GET"]}),
("/test/method/{foo}", {"methods": ["PUT"]}),
],
)
def test_errorhandler(self):
def test_method():
pass
octoprint.plugin.BlueprintPlugin.errorhandler(404)(test_method)
self.assertTrue(hasattr(test_method, "_blueprint_error_handler"))
self.assertTrue("test_method" in test_method._blueprint_error_handler)
self.assertTrue(len(test_method._blueprint_error_handler["test_method"]) == 1)
self.assertListEqual(test_method._blueprint_error_handler["test_method"], [404])
def test_get_blueprint_kwargs(self):
import os
expected = {
"static_folder": os.path.join(self.basefolder, "static"),
"template_folder": os.path.join(self.basefolder, "templates"),
}
result = self.plugin.get_blueprint_kwargs()
self.assertEqual(result, expected)
def test_get_blueprint_kwargs_assetplugin(self):
import os
expected = {
"static_folder": self.assetplugin.get_asset_folder(),
"template_folder": os.path.join(self.basefolder, "templates"),
}
result = self.assetplugin.get_blueprint_kwargs()
self.assertEqual(result, expected)
def test_get_blueprint_kwargs_templateplugin(self):
import os
expected = {
"static_folder": os.path.join(self.basefolder, "static"),
"template_folder": self.templateplugin.get_template_folder(),
}
result = self.templateplugin.get_blueprint_kwargs()
self.assertEqual(result, expected)
def test_get_blueprint(self):
import os
expected_kwargs = {
"static_folder": os.path.join(self.basefolder, "static"),
"template_folder": os.path.join(self.basefolder, "templates"),
}
class MyPlugin(octoprint.plugin.BlueprintPlugin):
@octoprint.plugin.BlueprintPlugin.route("/some/path", methods=["GET"])
def route_method(self):
pass
@octoprint.plugin.BlueprintPlugin.errorhandler(404)
def errorhandler_method(self):
pass
@octoprint.plugin.BlueprintPlugin.route("/hidden/path", methods=["GET"])
def _hidden_method(self):
pass
plugin = MyPlugin()
plugin._basefolder = self.basefolder
plugin._identifier = "myplugin"
with mock.patch("flask.Blueprint") as MockBlueprint:
blueprint = mock.MagicMock()
MockBlueprint.return_value = blueprint
errorhandler = mock.MagicMock()
blueprint.errorhandler.return_value = errorhandler
result = plugin.get_blueprint()
self.assertEqual(result, blueprint)
MockBlueprint.assert_called_once_with("myplugin", "myplugin", **expected_kwargs)
blueprint.add_url_rule.assert_called_once_with(
"/some/path", "route_method", view_func=plugin.route_method, methods=["GET"]
)
blueprint.errorhandler.assert_called_once_with(404)
errorhandler.assert_called_once_with(plugin.errorhandler_method)
def test_get_blueprint_cached(self):
blueprint = mock.MagicMock()
self.plugin._blueprint = blueprint
result = self.plugin.get_blueprint()
self.assertEqual(blueprint, result)
| 4,969 | Python | .py | 110 | 34.745455 | 88 | 0.633665 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,014 | test_types_settings.py | OctoPrint_OctoPrint/tests/plugin/test_types_settings.py | import unittest
from unittest import mock
import octoprint.plugin
class TestSettingsPlugin(unittest.TestCase):
def setUp(self):
self.settings = mock.MagicMock()
self.plugin = octoprint.plugin.SettingsPlugin()
self.plugin._settings = self.settings
def test_on_settings_cleanup(self):
"""Tests that after cleanup only minimal config is left in storage."""
### setup
# settings defaults
defaults = {
"foo": {"a": 1, "b": 2, "l1": ["some", "list"], "l2": ["another", "list"]},
"bar": True,
"fnord": None,
}
self.plugin.get_settings_defaults = mock.MagicMock()
self.plugin.get_settings_defaults.return_value = defaults
# stored config, containing one redundant entry (bar=True, same as default)
in_config = {
"foo": {
"l1": ["some", "other", "list"],
"l2": ["another", "list"],
"l3": ["a", "third", "list"],
},
"bar": True,
"fnord": {"c": 3, "d": 4},
}
self.settings.get_all_data.return_value = in_config
### execute
self.plugin.on_settings_cleanup()
### assert
# minimal config (current without redundant value) should have been set
expected = {
"foo": {"l1": ["some", "other", "list"], "l3": ["a", "third", "list"]},
"fnord": {"c": 3, "d": 4},
}
self.settings.set.assert_called_once_with([], expected)
def test_on_settings_cleanup_configversion(self):
"""Tests that set config version is always left stored."""
### setup
defaults = {"foo": "fnord"}
self.plugin.get_settings_defaults = mock.MagicMock()
self.plugin.get_settings_defaults.return_value = defaults
in_config = {"_config_version": 1, "foo": "fnord"}
self.settings.get_all_data.return_value = in_config
### execute
self.plugin.on_settings_cleanup()
### assert
# minimal config incl. config version should have been set
self.settings.set.assert_called_once_with([], {"_config_version": 1})
def test_on_settings_cleanup_noconfigversion(self):
"""Tests that config versions of None are cleaned from stored data."""
### setup
defaults = {"foo": "bar"}
self.plugin.get_settings_defaults = mock.MagicMock()
self.plugin.get_settings_defaults.return_value = defaults
# stored config version is None
in_config = {"_config_version": None, "foo": "fnord"}
self.settings.get_all_data.return_value = in_config
### execute
self.plugin.on_settings_cleanup()
### assert
# minimal config without config version should have been set
self.settings.set.assert_called_once_with([], {"foo": "fnord"})
def test_on_settings_cleanup_emptydiff(self):
"""Tests that settings are cleaned up if the diff data <-> defaults is empty."""
### setup
defaults = {"foo": "bar"}
self.plugin.get_settings_defaults = mock.MagicMock()
self.plugin.get_settings_defaults.return_value = defaults
# current stored config, same as defaults
in_config = {"foo": "bar"}
self.settings.get_all_data.return_value = in_config
### execute
self.plugin.on_settings_cleanup()
### assert
# should have been cleared
self.settings.clean_all_data.assert_called_once_with()
def test_on_settings_cleanup_nosuchpath(self):
"""Tests that no processing is done if nothing is stored in settings."""
from octoprint.settings import NoSuchSettingsPath
### setup
# simulate no settings stored in config.yaml
self.settings.get_all_data.side_effect = NoSuchSettingsPath()
### execute
self.plugin.on_settings_cleanup()
### assert
# only get_all_data should have been called
self.settings.get_all_data.assert_called_once_with(
merged=False, incl_defaults=False, error_on_path=True
)
self.assertTrue(len(self.settings.method_calls) == 1)
def test_on_settings_cleanup_none(self):
"""Tests the None entries in config get cleaned up."""
### setup
# simulate None entry in config.yaml
self.settings.get_all_data.return_value = None
### execute
self.plugin.on_settings_cleanup()
### assert
# should have been cleaned
self.settings.clean_all_data.assert_called_once_with()
def test_on_settings_save(self):
"""Tests that only the diff is saved."""
### setup
current = {"foo": "bar"}
self.settings.get_all_data.return_value = current
defaults = {"foo": "foo", "bar": {"a": 1, "b": 2}}
self.plugin.get_settings_defaults = mock.MagicMock()
self.plugin.get_settings_defaults.return_value = defaults
### execute
data = {"foo": "fnord", "bar": {"a": 1, "b": 2}}
diff = self.plugin.on_settings_save(data)
### assert
# the minimal diff should have been saved
expected = {"foo": "fnord"}
self.settings.set.assert_called_once_with([], expected)
self.assertEqual(diff, expected)
def test_on_settings_save_nodiff(self):
"""Tests that data is cleaned if there's not difference between data and defaults."""
### setup
self.settings.get_all_data.return_value = None
defaults = {"foo": "bar", "bar": {"a": 1, "b": 2, "l": ["some", "list"]}}
self.plugin.get_settings_defaults = mock.MagicMock()
self.plugin.get_settings_defaults.return_value = defaults
### execute
data = {"foo": "bar"}
diff = self.plugin.on_settings_save(data)
### assert
self.settings.clean_all_data.assert_called_once_with()
self.assertEqual(diff, {})
def test_on_settings_save_configversion(self):
"""Tests that saved data gets stripped config version and set correct one."""
### setup
self.settings.get_all_data.return_value = None
defaults = {"foo": "bar"}
self.plugin.get_settings_defaults = mock.MagicMock()
self.plugin.get_settings_defaults.return_value = defaults
version = 1
self.plugin.get_settings_version = mock.MagicMock()
self.plugin.get_settings_version.return_value = version
### execute
data = {"_config_version": None, "foo": "bar"}
diff = self.plugin.on_settings_save(data)
### assert
expected_diff = {}
expected_set = {"_config_version": version}
# while there was no diff, we should still have saved the new config version
self.settings.set.assert_called_once_with([], expected_set)
self.assertEqual(diff, expected_diff)
def test_on_settings_load(self):
"""Tests that on_settings_load returns what's stored in the config, without config version."""
### setup
# current data incl. config version
current = {
"_config_version": 3,
"foo": "bar",
"fnord": {"a": 1, "b": 2, "l": ["some", "list"]},
}
# expected is current without _config_version - we make the copy now
# since our current dict will be modified by the test
expected = dict(current)
del expected["_config_version"]
self.settings.get_all_data.return_value = expected
### execute
result = self.plugin.on_settings_load()
### assert
self.assertEqual(result, expected)
| 7,719 | Python | .py | 170 | 35.894118 | 102 | 0.604262 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,015 | __init__.py | OctoPrint_OctoPrint/tests/plugin/__init__.py | """
Unit tests for ``octoprint.plugin``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 284 | Python | .py | 6 | 45.833333 | 103 | 0.72 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,016 | test_core.py | OctoPrint_OctoPrint/tests/plugin/test_core.py | import unittest
from unittest import mock
import ddt
import octoprint.plugin
import octoprint.plugin.core
##~~ Helpers for testing mixin type extraction
class A:
pass
class A_1(A):
pass
class A_2(A):
pass
class A_3(A):
pass
class A1_1(A_1):
pass
class B:
pass
class B_1(B):
pass
class C:
pass
class C_1(C):
pass
class D:
pass
@ddt.ddt
class PluginTestCase(unittest.TestCase):
def setUp(self):
import logging
logging.basicConfig(level=logging.DEBUG)
# TODO mock pkg_resources to return some defined entry_points
import os
self.plugin_folder = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "_plugins"
)
# prepare compiled files
import py_compile
py_compile.compile(
os.path.join(self.plugin_folder, "asset_plugin.py"),
os.path.join(self.plugin_folder, "asset_plugin_2.pyc"),
)
py_compile.compile(
os.path.join(self.plugin_folder, "not_a_plugin.py"),
os.path.join(self.plugin_folder, "not_a_plugin_either.pyc"),
)
plugin_folders = [self.plugin_folder]
plugin_bases = [octoprint.plugin.OctoPrintPlugin]
plugin_entry_points = None
self.plugin_manager = octoprint.plugin.core.PluginManager(
plugin_folders,
plugin_bases,
plugin_entry_points,
plugin_disabled_list=[],
logging_prefix="logging_prefix.",
)
# This may warn about __plugin_implementations__
import warnings
with warnings.catch_warnings(record=True) as w:
self.plugin_manager.reload_plugins(
startup=True, initialize_implementations=False
)
self.plugin_manager.initialize_implementations()
if len(w):
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
assert "__plugin_implementation__" in str(w[-1].message)
def test_plugin_loading(self):
self.assertEqual(7, len(self.plugin_manager.enabled_plugins))
self.assertEqual(8, len(self.plugin_manager.plugins))
self.assertEqual(2, len(self.plugin_manager.plugin_hooks))
self.assertEqual(4, len(self.plugin_manager.plugin_implementations))
self.assertEqual(3, len(self.plugin_manager.plugin_implementations_by_type))
# hook_plugin
self.assertTrue("octoprint.core.startup" in self.plugin_manager.plugin_hooks)
self.assertEqual(
1, len(self.plugin_manager.plugin_hooks["octoprint.core.startup"])
)
# ordered hook plugins
self.assertTrue("some.ordered.callback" in self.plugin_manager.plugin_hooks)
self.assertEqual(
3, len(self.plugin_manager.plugin_hooks["some.ordered.callback"])
)
# TestStartupPlugin & TestMixedPlugin
self.assertTrue(
octoprint.plugin.StartupPlugin
in self.plugin_manager.plugin_implementations_by_type
)
self.assertEqual(
2,
len(
self.plugin_manager.plugin_implementations_by_type[
octoprint.plugin.StartupPlugin
]
),
)
# TestSettingsPlugin & TestMixedPlugin
self.assertTrue(
octoprint.plugin.SettingsPlugin
in self.plugin_manager.plugin_implementations_by_type
)
self.assertEqual(
2,
len(
self.plugin_manager.plugin_implementations_by_type[
octoprint.plugin.SettingsPlugin
]
),
)
# TestAssetPlugin
self.assertTrue(
octoprint.plugin.AssetPlugin
in self.plugin_manager.plugin_implementations_by_type
)
self.assertEqual(
1,
len(
self.plugin_manager.plugin_implementations_by_type[
octoprint.plugin.AssetPlugin
]
),
)
def test_plugin_initializing(self):
def test_factory(name, implementation):
return {"test_factory": "test_factory_%s" % name}
def verify_injection_order(name, implementation):
self.assertTrue(hasattr(implementation, "_basefolder"))
return {}
additional_injects = {"additional_inject": "additional_inject"}
additional_inject_factories = [test_factory, verify_injection_order]
self.plugin_manager.initialize_implementations(
additional_injects=additional_injects,
additional_inject_factories=additional_inject_factories,
)
all_implementations = self.plugin_manager.plugin_implementations
self.assertEqual(4, len(all_implementations))
for name, impl in all_implementations.items():
self.assertTrue(name in self.plugin_manager.enabled_plugins)
plugin = self.plugin_manager.enabled_plugins[name]
# test that the standard fields were properly initialized
self.assertTrue(hasattr(impl, "_identifier"))
self.assertEqual(name, impl._identifier)
self.assertTrue(hasattr(impl, "_plugin_name"))
self.assertEqual(plugin.name, impl._plugin_name)
self.assertTrue(hasattr(impl, "_plugin_version"))
self.assertEqual(plugin.version, impl._plugin_version)
self.assertTrue(hasattr(impl, "_logger"))
self.assertIsNotNone(impl._logger)
self.assertEqual("logging_prefix.%s" % name, impl._logger.name)
self.assertTrue(hasattr(impl, "_basefolder"))
self.assertTrue(impl._basefolder.startswith(self.plugin_folder))
# test that the additional injects were properly injected
self.assertTrue(hasattr(impl, "_additional_inject"))
self.assertEqual("additional_inject", impl._additional_inject)
# test that the injection factory was properly executed and the result injected
self.assertTrue(hasattr(impl, "_test_factory"))
self.assertEqual("test_factory_%s" % name, impl._test_factory)
def test_get_plugin(self):
plugin = self.plugin_manager.get_plugin("hook_plugin")
self.assertIsNotNone(plugin)
self.assertEqual("Hook Plugin", plugin.__plugin_name__)
plugin = self.plugin_manager.get_plugin("mixed_plugin")
self.assertIsNotNone(plugin)
self.assertEqual("Mixed Plugin", plugin.__plugin_name__)
plugin = self.plugin_manager.get_plugin("unknown_plugin")
self.assertIsNone(plugin)
def test_get_plugin_info(self):
plugin_info = self.plugin_manager.get_plugin_info("hook_plugin")
self.assertIsNotNone(plugin_info)
self.assertEqual("Hook Plugin", plugin_info.name)
plugin_info = self.plugin_manager.get_plugin_info("unknown_plugin")
self.assertIsNone(plugin_info)
def test_get_hooks(self):
hooks = self.plugin_manager.get_hooks("octoprint.core.startup")
self.assertEqual(1, len(hooks))
self.assertTrue("hook_plugin" in hooks)
self.assertEqual("success", hooks["hook_plugin"]())
hooks = self.plugin_manager.get_hooks("octoprint.printing.print")
self.assertEqual(0, len(hooks))
def test_sorted_hooks(self):
hooks = self.plugin_manager.get_hooks("some.ordered.callback")
self.assertEqual(3, len(hooks))
self.assertListEqual(
["one_ordered_hook_plugin", "another_ordered_hook_plugin", "hook_plugin"],
list(hooks.keys()),
)
def test_get_implementations(self):
implementations = self.plugin_manager.get_implementations(
octoprint.plugin.StartupPlugin
)
self.assertListEqual(
["mixed_plugin", "startup_plugin"],
list(map(lambda x: x._identifier, implementations)),
)
implementations = self.plugin_manager.get_implementations(
octoprint.plugin.SettingsPlugin
)
self.assertListEqual(
["mixed_plugin", "settings_plugin"],
list(map(lambda x: x._identifier, implementations)),
)
implementations = self.plugin_manager.get_implementations(
octoprint.plugin.StartupPlugin, octoprint.plugin.SettingsPlugin
)
self.assertListEqual(
["mixed_plugin"], list(map(lambda x: x._identifier, implementations))
)
implementations = self.plugin_manager.get_implementations(
octoprint.plugin.AssetPlugin
)
self.assertListEqual(
["asset_plugin"],
list(map(lambda x: x._identifier, implementations)),
)
def test_get_filtered_implementations(self):
implementations = self.plugin_manager.get_filtered_implementations(
lambda x: x._identifier.startswith("startup"), octoprint.plugin.StartupPlugin
)
self.assertEqual(1, len(implementations))
def test_get_sorted_implementations(self):
implementations = self.plugin_manager.get_implementations(
octoprint.plugin.StartupPlugin, sorting_context="sorting_test"
)
self.assertListEqual(
["startup_plugin", "mixed_plugin"],
list(map(lambda x: x._identifier, implementations)),
)
def test_client_registration(self):
def test_client(*args, **kwargs):
pass
self.assertEqual(0, len(self.plugin_manager.registered_clients))
self.plugin_manager.register_message_receiver(test_client)
self.assertEqual(1, len(self.plugin_manager.registered_clients))
self.assertIn(test_client, self.plugin_manager.registered_clients)
self.plugin_manager.unregister_message_receiver(test_client)
self.assertEqual(0, len(self.plugin_manager.registered_clients))
self.assertNotIn(test_client, self.plugin_manager.registered_clients)
def test_send_plugin_message(self):
client1 = mock.Mock()
client2 = mock.Mock()
self.plugin_manager.register_message_receiver(client1.on_plugin_message)
self.plugin_manager.register_message_receiver(client2.on_plugin_message)
plugin = "some plugin"
data = "some data"
self.plugin_manager.send_plugin_message(plugin, data)
client1.on_plugin_message.assert_called_once_with(plugin, data, permissions=None)
client2.on_plugin_message.assert_called_once_with(plugin, data, permissions=None)
def test_broken_plugin(self):
self.assertTrue("not_a_plugin" in self.plugin_manager.plugins)
plugin = self.plugin_manager.plugins["not_a_plugin"]
self.assertFalse(plugin.looks_like_plugin)
self.assertFalse(plugin.loaded)
self.assertFalse(plugin.enabled)
@ddt.data(
(
["octoprint.some_hook"],
["octoprint.some_hook", "octoprint.another_hook"],
True,
),
(["octoprint.*"], ["octoprint.some_hook", "octoprint.another_hook"], True),
(["octoprint.some_hook"], ["octoprint.another_hook"], False),
(["octoprint.some_hook"], [], False),
([], ["octoprint.some_hook"], False),
)
@ddt.unpack
def test_has_any_of_hooks(self, hooks_to_test_for, plugin_hooks, expected):
plugin = mock.MagicMock()
plugin.hooks = {hook: hook for hook in plugin_hooks}
actual = octoprint.plugin.core.PluginManager.has_any_of_hooks(
plugin, hooks_to_test_for
)
self.assertEqual(actual, expected)
def test_has_any_of_hooks_varargs(self):
plugin = mock.MagicMock()
plugin.hooks = {
hook: hook for hook in ["octoprint.some_hook", "octoprint.another_hook"]
}
result = octoprint.plugin.core.PluginManager.has_any_of_hooks(
plugin, "octoprint.some_hook", "octoprint.some_other_hook"
)
self.assertTrue(result)
def test_has_any_of_hooks_nohooks(self):
plugin = mock.MagicMock()
result = octoprint.plugin.core.PluginManager.has_any_of_hooks(
plugin, "octoprint.some_hook", "octoprint.some_other_hook"
)
self.assertFalse(result)
@ddt.data(
("octoprint.some_hook", ["octoprint.another_hook", "octoprint.some_hook"], True),
("octoprint.some_hook", ["octoprint.*"], True),
("octoprint.some_hook", ["octoprint.some_hook*"], True),
("octoprint.some_hook", ["octoprint.*_hook"], True),
("octoprint.some_hook", ["octoprint.another_hook.*"], False),
("", ["octoprint.some_hook"], False),
(None, ["octoprint.some_hook"], False),
("octoprint.some_hook", [], False),
("octoprint.some_hook", None, False),
("octoprint.some_hook", [None], False),
)
@ddt.unpack
def test_hook_matches_hooks(self, hook, hooks, expected):
actual = octoprint.plugin.core.PluginManager.hook_matches_hooks(hook, hooks)
self.assertEqual(actual, expected)
def test_hook_matches_hooks_varargs(self):
result = octoprint.plugin.core.PluginManager.hook_matches_hooks(
"octoprint.some_hook", "octoprint.another_hook", "octoprint.some_hook"
)
self.assertTrue(result)
@ddt.data(
(
[octoprint.plugin.RestartNeedingPlugin],
[octoprint.plugin.Plugin, octoprint.plugin.RestartNeedingPlugin],
True,
),
([octoprint.plugin.RestartNeedingPlugin], [octoprint.plugin.Plugin], False),
([], [octoprint.plugin.Plugin], False),
([octoprint.plugin.RestartNeedingPlugin], [], False),
)
@ddt.unpack
def test_has_any_of_mixins(self, mixins_to_test_for, plugin_mixins, expected):
plugin = mock.MagicMock()
plugin.implementation = mock.MagicMock()
for mixin in plugin_mixins:
plugin.implementation.mock_add_spec(mixin)
actual = octoprint.plugin.core.PluginManager.has_any_of_mixins(
plugin, mixins_to_test_for
)
self.assertEqual(actual, expected)
def test_has_any_of_mixins_varargs(self):
plugin = mock.MagicMock()
plugin.implementation = mock.MagicMock()
plugin.implementation.mock_add_spec(octoprint.plugin.Plugin)
plugin.implementation.mock_add_spec(octoprint.plugin.RestartNeedingPlugin)
result = octoprint.plugin.core.PluginManager.has_any_of_mixins(
plugin, octoprint.plugin.RestartNeedingPlugin
)
self.assertTrue(result)
def test_has_any_of_mixins_noimplementation(self):
plugin = mock.MagicMock()
result = octoprint.plugin.core.PluginManager.has_any_of_mixins(
plugin, octoprint.plugin.RestartNeedingPlugin
)
self.assertFalse(result)
@ddt.data(
((A1_1, A_2, B_1, C_1), (A, C), (A_1, A1_1, A_2, C_1)),
((A1_1, A_2, B_1, C_1), (B,), (B_1,)),
# not a subclass
((A1_1, A_2, B_1, C_1), (D,), ()),
# subclass only of base
((A,), (A,), ()),
)
@ddt.unpack
def test_mixins_matching_bases(self, bases_to_set, bases_to_check, expected):
Foo = type("Foo", bases_to_set, {})
actual = octoprint.plugin.core.PluginManager.mixins_matching_bases(
Foo, *bases_to_check
)
self.assertSetEqual(actual, set(expected))
| 15,515 | Python | .py | 358 | 33.703911 | 91 | 0.636894 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,017 | test_settings.py | OctoPrint_OctoPrint/tests/plugin/test_settings.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
from unittest import mock
import pytest
import octoprint.plugin
import octoprint.settings
plugin_key = "test_plugin"
defaults = {
"some_raw_key": "some_raw_value",
"some_int_key": 1,
"some_float_key": 2.5,
"some_boolean_key": True,
"preprocessed": {"get": "PreProcessed", "set": "PreProcessed"},
}
get_preprocessors = {"preprocessed": {"get": lambda x: x.upper()}}
set_preprocessors = {"preprocessed": {"set": lambda x: x.lower()}}
@pytest.fixture()
def settings():
yield mock.create_autospec(octoprint.settings.Settings)
@pytest.fixture()
def plugin_settings(settings):
yield octoprint.plugin.PluginSettings(
settings,
plugin_key,
defaults=defaults,
get_preprocessors=get_preprocessors,
set_preprocessors=set_preprocessors,
)
@pytest.mark.parametrize(
"getter, getter_args, getter_kwargs, forwarded",
[
("get", (["some_raw_key"],), {}, "get"),
("get", (["some_raw_key"],), {"merged": True}, "get"),
("get", (["some_raw_key"],), {"asdict": True}, "get"),
("get", (["some_raw_key"],), {"merged": True, "asdict": True}, "get"),
("get_int", (["some_int_key,"],), {}, "getInt"),
("get_float", (["some_float_key"],), {}, "getFloat"),
("get_boolean", (["some_boolean_key"],), {}, "getBoolean"),
],
)
def test_forwarded_getter(
plugin_settings, settings, getter, getter_args, getter_kwargs, forwarded
):
method_under_test = getattr(plugin_settings, getter)
assert callable(method_under_test)
method_under_test(*getter_args, **getter_kwargs)
forwarded_method = getattr(settings, forwarded)
forwarded_args = (["plugins", plugin_key] + getter_args[0],)
forwarded_kwargs = getter_kwargs
forwarded_kwargs.update(
{
"defaults": {"plugins": {"test_plugin": defaults}},
"preprocessors": {"plugins": {"test_plugin": get_preprocessors}},
}
)
forwarded_method.assert_called_once_with(*forwarded_args, **forwarded_kwargs)
@pytest.mark.parametrize(
"getter, getter_args, getter_kwargs, forwarded",
[
("global_get", (["some_raw_key"],), {}, "get"),
("global_get", (["some_raw_key"],), {"merged": True}, "get"),
("global_get", (["some_raw_key"],), {"asdict": True}, "get"),
("global_get", (["some_raw_key"],), {"merged": True, "asdict": True}, "get"),
("global_get_int", (["some_int_key"],), {}, "getInt"),
("global_get_float", (["some_float_key"],), {}, "getFloat"),
("global_get_boolean", (["some_boolean_key"],), {}, "getBoolean"),
],
)
def test_global_getter(
plugin_settings, settings, getter, getter_args, getter_kwargs, forwarded
):
method_under_test = getattr(plugin_settings, getter)
assert callable(method_under_test)
method_under_test(*getter_args, **getter_kwargs)
forwarded_method = getattr(settings, forwarded)
forwarded_method.assert_called_once_with(*getter_args, **getter_kwargs)
@pytest.mark.parametrize(
"deprecated, current, forwarded",
[
("getInt", "get_int", "getInt"),
("getFloat", "get_float", "getFloat"),
("getBoolean", "get_boolean", "getBoolean"),
],
)
def test_deprecated_forwarded_getter(
plugin_settings, settings, deprecated, current, forwarded
):
called_method = getattr(settings, forwarded)
# further mock out our mocked function so things work as they should
called_method.__name__ = forwarded
called_method.__qualname__ = forwarded
called_method.__annotations__ = {}
called_method.__type_params__ = ()
method = getattr(plugin_settings, deprecated)
assert callable(method)
with pytest.warns(
DeprecationWarning, match=f"{deprecated} has been renamed to {current}"
):
method(["some_raw_key"])
called_method.assert_called_once_with(
["plugins", plugin_key, "some_raw_key"],
defaults={"plugins": {"test_plugin": defaults}},
preprocessors={"plugins": {"test_plugin": get_preprocessors}},
)
@pytest.mark.parametrize(
"setter, setter_args, setter_kwargs, forwarded",
[
(
"set",
(
["some_raw_key"],
"some_value",
),
{},
"set",
),
(
"set",
(
["some_raw_key"],
"some_value",
),
{"force": True},
"set",
),
(
"set_int",
(
["some_int_key"],
23,
),
{},
"setInt",
),
(
"set_int",
(
["some_int_key"],
23,
),
{"force": True},
"setInt",
),
(
"set_float",
(
["some_float_key"],
2.3,
),
{},
"setFloat",
),
(
"set_float",
(
["some_float_key"],
2.3,
),
{"force": True},
"setFloat",
),
(
"set_boolean",
(
["some_boolean_key"],
True,
),
{},
"setBoolean",
),
(
"set_boolean",
(
["some_boolean_key"],
True,
),
{"force": True},
"setBoolean",
),
],
)
def test_forwarded_setter(
plugin_settings, settings, setter, setter_args, setter_kwargs, forwarded
):
method_under_test = getattr(plugin_settings, setter)
assert callable(method_under_test)
method_under_test(*setter_args, **setter_kwargs)
forwarded_method = getattr(settings, forwarded)
forwarded_args = (["plugins", plugin_key] + setter_args[0], setter_args[1])
forwarded_kwargs = setter_kwargs
forwarded_kwargs.update(
{
"defaults": {"plugins": {"test_plugin": defaults}},
"preprocessors": {"plugins": {"test_plugin": set_preprocessors}},
}
)
forwarded_method.assert_called_once_with(*forwarded_args, **forwarded_kwargs)
@pytest.mark.parametrize(
"setter, setter_args, setter_kwargs, forwarded",
[
(
"global_set",
(
["some_raw_key"],
"some_value",
),
{},
"set",
),
(
"global_set",
(
["some_raw_key"],
"some_value",
),
{"force": True},
"set",
),
(
"global_set_int",
(
["some_int_key"],
23,
),
{},
"setInt",
),
(
"global_set_int",
(
["some_int_key"],
23,
),
{"force": True},
"setInt",
),
(
"global_set_float",
(
["some_float_key"],
2.3,
),
{},
"setFloat",
),
(
"global_set_float",
(
["some_float_key"],
2.3,
),
{"force": True},
"setFloat",
),
(
"global_set_boolean",
(
["some_boolean_key"],
True,
),
{},
"setBoolean",
),
(
"global_set_boolean",
(
["some_boolean_key"],
True,
),
{"force": True},
"setBoolean",
),
],
)
def test_global_setter(
plugin_settings, settings, setter, setter_args, setter_kwargs, forwarded
):
method_under_test = getattr(plugin_settings, setter)
assert callable(method_under_test)
method_under_test(*setter_args, **setter_kwargs)
forwarded_method = getattr(settings, forwarded)
forwarded_method.assert_called_once_with(*setter_args, **setter_kwargs)
@pytest.mark.parametrize(
"deprecated, current, forwarded, value",
[
("setInt", "set_int", "setInt", 1),
("setFloat", "set_float", "setFloat", 2.5),
("setBoolean", "set_boolean", "setBoolean", True),
],
)
def test_deprecated_forwarded_setter(
plugin_settings, settings, deprecated, current, forwarded, value
):
called_method = getattr(settings, forwarded)
# further mock out our mocked function so things work as they should
called_method.__name__ = forwarded
called_method.__qualname__ = forwarded
called_method.__annotations__ = {}
called_method.__type_params__ = ()
method = getattr(plugin_settings, deprecated)
assert callable(method)
with pytest.warns(
DeprecationWarning, match=f"{deprecated} has been renamed to {current}"
):
method(["some_raw_key"], value)
called_method.assert_called_once_with(
["plugins", plugin_key, "some_raw_key"],
value,
defaults={"plugins": {"test_plugin": defaults}},
preprocessors={"plugins": {"test_plugin": set_preprocessors}},
)
def test_global_get_basefolder(plugin_settings, settings):
plugin_settings.global_get_basefolder("some_folder")
settings.getBaseFolder.assert_called_once_with("some_folder")
def test_logfile_path(plugin_settings, settings):
import os
settings.getBaseFolder.return_value = "/some/folder"
path = plugin_settings.get_plugin_logfile_path()
settings.getBaseFolder.assert_called_once_with("logs")
assert f"/some/folder/plugin_{plugin_key}.log" == path.replace(os.sep, "/")
def test_logfile_path_with_postfix(plugin_settings, settings):
import os
settings.getBaseFolder.return_value = "/some/folder"
path = plugin_settings.get_plugin_logfile_path(postfix="mypostfix")
settings.getBaseFolder.assert_called_once_with("logs")
assert f"/some/folder/plugin_{plugin_key}_mypostfix.log" == path.replace(os.sep, "/")
def test_unhandled_method(plugin_settings):
with pytest.raises(
AttributeError, match="Mock object has no attribute 'some_method'"
):
plugin_settings.some_method("some_parameter")
| 10,653 | Python | .py | 332 | 23.385542 | 103 | 0.543241 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,018 | startup_plugin.py | OctoPrint_OctoPrint/tests/plugin/_plugins/startup_plugin.py | import octoprint.plugin
class TestStartupPlugin(octoprint.plugin.StartupPlugin):
def get_sorting_key(self, context=None):
if context == "sorting_test":
return 10
else:
return None
__plugin_name__ = "Startup Plugin"
__plugin_description__ = "Test startup plugin"
__plugin_implementation__ = TestStartupPlugin()
__plugin_pythoncompat__ = ">=2.7,<4"
| 395 | Python | .py | 11 | 30.545455 | 56 | 0.681579 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,019 | hook_plugin.py | OctoPrint_OctoPrint/tests/plugin/_plugins/hook_plugin.py | def hook_startup():
return "success"
__plugin_name__ = "Hook Plugin"
__plugin_description__ = "Test hook plugin"
__plugin_hooks__ = {
"octoprint.core.startup": hook_startup,
"some.ordered.callback": hook_startup,
}
__plugin_pythoncompat__ = ">=2.7,<4"
| 266 | Python | .py | 9 | 27 | 43 | 0.670588 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,020 | asset_plugin.py | OctoPrint_OctoPrint/tests/plugin/_plugins/asset_plugin.py | import octoprint.plugin
class TestAssetPlugin(octoprint.plugin.AssetPlugin):
pass
__plugin_name__ = "Asset Plugin"
__plugin_description__ = "Test asset plugin"
__plugin_implementation__ = TestAssetPlugin()
__plugin_pythoncompat__ = ">=2.7,<4"
| 251 | Python | .py | 7 | 33.714286 | 52 | 0.75 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,021 | one_ordered_hook_plugin.py | OctoPrint_OctoPrint/tests/plugin/_plugins/one_ordered_hook_plugin.py | def callback(*args, **kwargs):
pass
__plugin_hooks__ = {"some.ordered.callback": (callback, 10)}
__plugin_pythoncompat__ = ">=2.7,<4"
| 140 | Python | .py | 4 | 32.5 | 60 | 0.641791 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,022 | another_ordered_hook_plugin.py | OctoPrint_OctoPrint/tests/plugin/_plugins/another_ordered_hook_plugin.py | def callback(*args, **kwargs):
pass
__plugin_hooks__ = {"some.ordered.callback": (callback, 100)}
__plugin_pythoncompat__ = ">=2.7,<4"
| 141 | Python | .py | 4 | 32.75 | 61 | 0.644444 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,023 | settings_plugin.py | OctoPrint_OctoPrint/tests/plugin/_plugins/settings_plugin.py | import octoprint.plugin
class TestSettingsPlugin(octoprint.plugin.SettingsPlugin):
pass
__plugin_name__ = "Settings Plugin"
__plugin_description__ = "Test settings plugin"
__plugin_implementation__ = TestSettingsPlugin()
__plugin_pythoncompat__ = ">=2.7,<4"
| 266 | Python | .py | 7 | 35.857143 | 58 | 0.764706 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,024 | __init__.py | OctoPrint_OctoPrint/tests/plugin/_plugins/mixed_plugin/__init__.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import octoprint.plugin
class TestMixedPlugin(octoprint.plugin.StartupPlugin, octoprint.plugin.SettingsPlugin):
def get_sorting_key(self, context=None):
if context == "sorting_test":
return 100
else:
return None
__plugin_name__ = "Mixed Plugin"
__plugin_description__ = "Test mixed plugin"
__plugin_implementation__ = TestMixedPlugin()
__plugin_pythoncompat__ = ">=2.7,<4"
| 660 | Python | .py | 14 | 42.5 | 103 | 0.707355 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,025 | always_update.py | OctoPrint_OctoPrint/tests/manual_tests/always_update.py | """
Place in ~/.octoprint/plugins & restart server to test:
* python_checker and python_updater mechanism
* demotion of pip and python setup.py clean output that
gets written to stderr but isn't as severe as that would
look
Plugin will always demand to update itself, multiple
consecutive runs are not a problem.
"""
import time
NAME = "Always Update"
OLD_VERSION = "1.0.0"
NEW_VERSION = "2.0.0"
class Foo:
def get_latest(self, target, check, full_data=None):
information = {
"local": {"name": OLD_VERSION, "value": OLD_VERSION},
"remote": {"name": NEW_VERSION, "value": NEW_VERSION},
}
current = False
return information, current
def can_perform_update(self, target, check):
return True
def perform_update(self, target, check, target_version, log_cb=None):
if not callable(log_cb):
import sys
def log_cb(lines, prefix=None, stream=None, strip=True):
if stream == "stdout":
f = sys.stdout
elif stream == "stderr":
f = sys.stderr
else:
f = None
for line in lines:
print(line, file=f)
log_cb(["Updating Always Update..."])
time.sleep(1)
log_cb(
["running clean", "recursively removing *.pyc from 'src'"], stream="stdout"
)
log_cb(
[
"'build/lib' does not exist -- can't clean it",
"'build/bdist.win32' does not exist -- can't clean it",
"'build/scripts-2.7' does not exist -- can't clean it",
],
stream="stderr",
)
log_cb(
[
"removing 'Development\\OctoPrint\\OctoPrint\\src\\octoprint_setuptools\\__init__.pyc'"
],
stream="stdout",
)
time.sleep(1)
log_cb(["This should be red"], stream="stderr")
log_cb(
[
"You are using pip version 7.1.2, however version 9.0.1 is available.",
"You should consider upgrading via the 'python -m pip install --upgrade pip' command.",
],
stream="stderr",
)
time.sleep(3)
log_cb(["Done!"])
def get_update_information():
foo = Foo()
return {
"always_update": {
"displayName": NAME,
"displayVersion": OLD_VERSION,
"type": "python_checker",
"python_checker": foo,
"python_updater": foo,
}
}
__plugin_name__ = NAME
__plugin_hooks__ = {
"octoprint.plugin.softwareupdate.check_config": get_update_information,
}
| 2,734 | Python | .py | 80 | 24.4375 | 103 | 0.541493 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,026 | __init__.py | OctoPrint_OctoPrint/tests/access/__init__.py | """
Unit tests for octoprint.access
"""
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2015 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 233 | Python | .py | 5 | 45.4 | 103 | 0.748899 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,027 | test_permissions.py | OctoPrint_OctoPrint/tests/access/test_permissions.py | """
Unit tests for octoprint.access.permissions
"""
import unittest
from octoprint.access.permissions import Permissions
class PermissionsTest(unittest.TestCase):
def test_find(self):
permission = Permissions.find("ADMIN")
self.assertIsNotNone(permission)
self.assertEqual(permission.get_name(), "Admin")
def test_find_fail(self):
permission = Permissions.find("doesntexist")
self.assertIsNone(permission)
| 459 | Python | .py | 13 | 30.230769 | 56 | 0.741497 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,028 | test_usermanager.py | OctoPrint_OctoPrint/tests/access/users/test_usermanager.py | """
Unit tests for octoprint.access.users.UserManager
"""
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2017 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import ddt
import octoprint.access.users
@ddt.ddt
class UserManagerTest(unittest.TestCase):
def test_createPasswordHash_nonascii(self):
"""Test for issue #1891"""
password = "password with ümläutß"
# should not throw an exception
octoprint.access.users.UserManager.create_password_hash(password)
def test_createPasswordHash_is_valid(self):
password = "test1234"
password_hash = octoprint.access.users.UserManager.create_password_hash(password)
user = octoprint.access.users.User(
"username",
password_hash,
True,
permissions=[],
apikey="apikey",
settings={"key": "value"},
)
self.assertTrue(user.check_password(password))
| 1,053 | Python | .py | 27 | 31.888889 | 103 | 0.681145 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,029 | __init__.py | OctoPrint_OctoPrint/tests/access/users/__init__.py | """
Unit tests for octoprint.access.users
"""
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2017 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 239 | Python | .py | 5 | 46.6 | 103 | 0.751073 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,030 | test_users.py | OctoPrint_OctoPrint/tests/access/users/test_users.py | """
Unit tests for octoprint.access.users
"""
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2017 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import octoprint.access.users
class SessionUserTestCase(unittest.TestCase):
def setUp(self):
self.user = octoprint.access.users.User(
"username",
"passwordHash",
True,
permissions=[],
apikey="apikey",
settings={"key": "value"},
)
def test_two_sessions(self):
session1 = octoprint.access.users.SessionUser(self.user)
session2 = octoprint.access.users.SessionUser(self.user)
# session should be different, wrapped object should be identical
self.assertNotEqual(session1.session, session2.session)
self.assertEqual(session1.__wrapped__, session2.__wrapped__)
self.assertEqual(session1.get_name(), session2.get_name())
def test_settings_change_propagates(self):
session1 = octoprint.access.users.SessionUser(self.user)
session2 = octoprint.access.users.SessionUser(self.user)
# change should propagate from User to SessionUser
self.user.set_setting("otherkey", "othervalue")
self.assertDictEqual(
{"key": "value", "otherkey": "othervalue"}, session1.get_all_settings()
)
# change should propagate from SessionUser to SessionUser
session2.set_setting("otherkey", "yetanothervalue")
self.assertDictEqual(
{"key": "value", "otherkey": "yetanothervalue"}, session1.get_all_settings()
)
def test_repr(self):
user = octoprint.access.users.SessionUser(self.user)
expected = "SessionUser({!r},session={},created={})".format(
self.user, user.session, user.created
)
self.assertEqual(expected, repr(user))
def test_isinstance(self):
session = octoprint.access.users.SessionUser(self.user)
# needs to be detected as User instance
self.assertTrue(isinstance(session, octoprint.access.users.User))
# also needs to be detected as SessionUser instance
self.assertTrue(isinstance(session, octoprint.access.users.SessionUser))
# but wrapped user should NOT be detected as SessionUser instance of course
self.assertFalse(isinstance(self.user, octoprint.access.users.SessionUser))
| 2,487 | Python | .py | 51 | 40.411765 | 103 | 0.68071 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,031 | permissions_plugin.py | OctoPrint_OctoPrint/tests/access/_plugins/permissions_plugin.py | def plugin_permissions(components):
return [
{
"name": "fancy permission",
"description": "My Fancy new Permission",
"roles": ["fancy"],
},
{
"name": "fancy permission with two roles",
"description": "My Fancy new Permission with two roles",
"roles": ["fancy1", "fancy2"],
},
]
__plugin_name__ = "Permissions Plugin"
__plugin_description__ = "Test permissions plugin"
__plugin_hooks__ = {
"octoprint.access.permissions": plugin_permissions,
}
| 561 | Python | .py | 18 | 23.611111 | 68 | 0.565619 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,032 | test_groupmanager.py | OctoPrint_OctoPrint/tests/access/groups/test_groupmanager.py | """
Unit tests for octoprint.access.groups.GroupManager
"""
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2017 The OctoPrint Project - Released under terms of the AGPLv3 License"
import contextlib
import os
import tempfile
import unittest
import octoprint.access.groups
from octoprint.access.permissions import OctoPrintPermission
TEST_PERMISSION_1 = OctoPrintPermission("Test 1", "Test permission 1", "p1")
TEST_PERMISSION_2 = OctoPrintPermission("Test 2", "Test permission 2", "p2")
@contextlib.contextmanager
def group_manager_with_temp_file():
with tempfile.NamedTemporaryFile() as f:
path = f.name
try:
f.close()
group_manager = octoprint.access.groups.FilebasedGroupManager(path=path)
yield group_manager
finally:
if os.path.exists(path):
os.remove(path)
class GroupManagerTestCase(unittest.TestCase):
def test_add_remove_group(self):
with group_manager_with_temp_file() as group_manager:
group_manager.add_group(
"fancy",
"Fancy Group",
"My Fancy New Group",
permissions=[TEST_PERMISSION_1],
subgroups=[],
save=False,
)
self.assertIsNotNone(group_manager.find_group("fancy"))
group_manager.remove_group("fancy")
self.assertIsNone(group_manager.find_group("fancy"))
| 1,514 | Python | .py | 38 | 31.763158 | 103 | 0.660532 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,033 | __init__.py | OctoPrint_OctoPrint/tests/access/groups/__init__.py | """
Unit tests for octoprint.access.groups
"""
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2017 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 240 | Python | .py | 5 | 46.8 | 103 | 0.752137 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,034 | __init__.py | OctoPrint_OctoPrint/tests/slicing/__init__.py | """
Unit tests for ``octoprint.slicing``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 285 | Python | .py | 6 | 46 | 103 | 0.721014 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,035 | test_slicingmanager.py | OctoPrint_OctoPrint/tests/slicing/test_slicingmanager.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
from unittest import mock
import octoprint.slicing
class TestSlicingManager(unittest.TestCase):
def setUp(self):
self.addCleanup(self.cleanUp)
import tempfile
self.profile_path = tempfile.mkdtemp()
self.slicer_plugin = mock.MagicMock()
self.slicer_plugin.get_slicer_properties.return_value = {
"type": "mock",
"name": "Mock",
"same_device": True,
}
self.slicer_plugin.is_slicer_configured.return_value = True
# mock plugin manager
self.plugin_manager_patcher = mock.patch("octoprint.plugin.plugin_manager")
self.plugin_manager = self.plugin_manager_patcher.start()
self._mock_slicer_plugins(self.slicer_plugin)
# mock profile manager
import octoprint.printer.profile
self.printer_profile_manager = mock.MagicMock(
spec=octoprint.printer.profile.PrinterProfileManager
)
# mock settings
self.settings_patcher = mock.patch("octoprint.slicing.settings")
settings = self.settings_patcher.start()
self.settings = settings.return_value
self.slicing_manager = octoprint.slicing.SlicingManager(
self.profile_path, self.printer_profile_manager
)
self.slicing_manager.initialize()
def tearDown(self):
import shutil
shutil.rmtree(self.profile_path)
def cleanUp(self):
self.settings_patcher.stop()
self.plugin_manager_patcher.stop()
def _mock_slicer_plugins(self, *plugins):
def get_implementations(*types):
import octoprint.plugin
if octoprint.plugin.SlicerPlugin in types:
return plugins
return {}
self.plugin_manager.return_value.get_implementations.side_effect = (
get_implementations
)
def test_registered_slicers(self):
self.assertEqual(["mock"], self.slicing_manager.registered_slicers)
def test_slicing_enabled(self):
self.assertTrue(self.slicing_manager.slicing_enabled)
def test_default_slicer(self):
def get(path):
if path == ["slicing", "defaultSlicer"]:
return "mock"
else:
return None
self.settings.get.side_effect = get
self.assertEqual("mock", self.slicing_manager.default_slicer)
def test_default_slicer_unknown(self):
def get(path):
if path == ["slicing", "defaultSlicer"]:
return "unknown"
else:
return None
self.settings.get.side_effect = get
self.assertIsNone(self.slicing_manager.default_slicer)
@mock.patch("threading.Thread")
@mock.patch("tempfile.NamedTemporaryFile")
@mock.patch("os.remove")
def test_slice(self, mocked_os_remove, mocked_tempfile, mocked_thread):
# mock temporary file
temp_file = mock.MagicMock()
temp_file.name = "tmp.file"
mocked_tempfile.return_value = temp_file
# mock retrieval of default profile
def get(path):
return {}
self.settings.get.side_effect = get
default_profile = octoprint.slicing.SlicingProfile(
"mock", "default", {"layer_height": 0.2, "fill_density": 40}
)
self.slicer_plugin.get_slicer_default_profile.return_value = default_profile
# mock threading
class MockThread:
def __init__(self):
self.target = None
self.args = None
self.mock = None
def constructor(self, target=None, args=None):
self.target = target
self.args = args
self.mock = mock.MagicMock()
self.mock.start.side_effect = self.start
return self.mock
def start(self):
self.target(*self.args)
mock_thread = MockThread()
mocked_thread.side_effect = mock_thread.constructor
# mock slicing
self.slicer_plugin.do_slice.return_value = True, None
# mock printer profile manager
printer_profile = {"_id": "mock_printer", "_name": "Mock Printer Profile"}
def get_printer_profile(printer_profile_id):
self.assertEqual("mock_printer", printer_profile_id)
return printer_profile
self.printer_profile_manager.get.side_effect = get_printer_profile
##~~ call tested method
slicer_name = "mock"
source_path = "prefix/source.file"
dest_path = "prefix/dest.file"
profile_name = "dummy_profile"
printer_profile_id = "mock_printer"
position = {"x": 10, "y": 20}
callback = mock.MagicMock()
callback_args = ("one", "two", "three")
callback_kwargs = {"foo": "bar"}
overrides = {"layer_height": 0.5}
self.slicing_manager.slice(
slicer_name,
source_path,
dest_path,
profile_name,
callback,
printer_profile_id=printer_profile_id,
position=position,
callback_args=callback_args,
callback_kwargs=callback_kwargs,
overrides=overrides,
)
# assert that temporary profile was created properly
self.slicer_plugin.save_slicer_profile.assert_called_once_with(
"tmp.file", default_profile, overrides=overrides
)
# assert that slicing thread was created properly
mocked_thread.assert_called_once_with(
target=mock.ANY,
args=(
self.slicer_plugin,
source_path,
dest_path,
profile_name,
overrides,
printer_profile,
position,
callback,
callback_args,
callback_kwargs,
),
)
self.assertTrue(mock_thread.mock.daemon)
self.assertEqual(mock_thread.mock.start.call_count, 1)
# assert that slicer was called correctly
self.slicer_plugin.do_slice.assert_called_once_with(
source_path,
printer_profile,
machinecode_path=dest_path,
profile_path="tmp.file",
position=position,
on_progress=None,
on_progress_args=None,
on_progress_kwargs=None,
)
# assert that temporary profile was deleted again
mocked_os_remove.assert_called_once_with("tmp.file")
# assert that callback was called property
callback.assert_called_once_with(*callback_args, **callback_kwargs)
| 6,937 | Python | .py | 170 | 30.029412 | 103 | 0.60735 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,036 | __init__.py | OctoPrint_OctoPrint/tests/server/__init__.py | """
Unit tests for ``octoprint.server``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 284 | Python | .py | 6 | 45.833333 | 103 | 0.72 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,037 | __init__.py | OctoPrint_OctoPrint/tests/server/api/__init__.py | """
Unit tests for ``octoprint.server.api``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 289 | Python | .py | 6 | 46.5 | 103 | 0.72043 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,038 | test_system.py | OctoPrint_OctoPrint/tests/server/api/test_system.py | """
Unit tests for ``octoprint.server.api`` system.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
from unittest import mock
class GetFolderUsageTest(unittest.TestCase):
def test_readUsageForFolder(self):
from octoprint.server.api.system import _usageForFolders
with mock.patch("psutil.disk_usage") as disk_usage_mock:
disk_usage = mock.MagicMock()
disk_usage.free = 50
disk_usage.total = 512
disk_usage_mock.return_value = disk_usage
with mock.patch("octoprint.server.api.system.s") as settings_mock:
settings = mock.MagicMock()
settings.get.return_value = {"uploads": "mocked"}
settings.getBaseFolder.return_value = "mocked"
settings_mock.return_value = settings
data = _usageForFolders()
self.assertEqual(data["uploads"]["free"], 50)
self.assertEqual(data["uploads"]["total"], 512)
| 1,197 | Python | .py | 24 | 40.416667 | 103 | 0.64716 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,039 | test_flask.py | OctoPrint_OctoPrint/tests/server/util/test_flask.py | """
Unit tests for ``octoprint.server.util.flask``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
from unittest import mock
import flask
from ddt import data, ddt, unpack
from octoprint.server.util.flask import (
OctoPrintFlaskRequest,
OctoPrintFlaskResponse,
ReverseProxiedEnvironment,
)
standard_environ = {
"HTTP_HOST": "localhost:5000",
"SERVER_NAME": "localhost",
"SERVER_PORT": "5000",
"SCRIPT_NAME": "",
"PATH_INFO": "/",
"wsgi.url_scheme": "http",
}
@ddt
class ReverseProxiedEnvironmentTest(unittest.TestCase):
@data(
# defaults
({}, {}),
# prefix set, path info not prefixed
(
{"HTTP_X_SCRIPT_NAME": "/octoprint", "PATH_INFO": "/static/online.gif"},
{"SCRIPT_NAME": "/octoprint"},
),
# prefix set, path info prefixed
(
{
"HTTP_X_SCRIPT_NAME": "/octoprint",
"PATH_INFO": "/octoprint/static/online.gif",
},
{"SCRIPT_NAME": "/octoprint", "PATH_INFO": "/static/online.gif"},
),
# host set
(
{"HTTP_X_FORWARDED_HOST": "example.com"},
{
"HTTP_HOST": "example.com",
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
},
),
# host set with port
(
{"HTTP_X_FORWARDED_HOST": "example.com:1234"},
{
"HTTP_HOST": "example.com:1234",
"SERVER_NAME": "example.com",
"SERVER_PORT": "1234",
},
),
# host and scheme set
(
{"HTTP_X_FORWARDED_HOST": "example.com", "HTTP_X_FORWARDED_PROTO": "https"},
{
"HTTP_HOST": "example.com",
"SERVER_NAME": "example.com",
"SERVER_PORT": "443",
"wsgi.url_scheme": "https",
},
),
# host and scheme 2 set
(
{"HTTP_X_FORWARDED_HOST": "example.com", "HTTP_X_SCHEME": "https"},
{
"HTTP_HOST": "example.com",
"SERVER_NAME": "example.com",
"SERVER_PORT": "443",
"wsgi.url_scheme": "https",
},
),
# host, server and port headers set -> only host wins
(
{
"HTTP_X_FORWARDED_HOST": "example.com",
"HTTP_X_FORWARDED_SERVER": "example2.com",
"HTTP_X_FORWARDED_PORT": "444",
"HTTP_X_FORWARDED_PROTO": "https",
},
{
"HTTP_HOST": "example.com",
"SERVER_NAME": "example.com",
"SERVER_PORT": "443",
"wsgi.url_scheme": "https",
},
),
# host set, server and port differ -> updated, standard port
(
{
"HTTP_HOST": "example.com",
"wsgi.url_scheme": "https",
"SERVER_NAME": "localhost",
"SERVER_PORT": "80",
},
{
"HTTP_HOST": "example.com",
"SERVER_NAME": "example.com",
"SERVER_PORT": "443",
},
),
# host set, server and port differ -> updated, non standard port
(
{
"HTTP_HOST": "example.com:444",
"wsgi.url_scheme": "https",
"SERVER_NAME": "localhost",
"SERVER_PORT": "80",
},
{
"HTTP_HOST": "example.com:444",
"SERVER_NAME": "example.com",
"SERVER_PORT": "444",
},
),
# multiple scheme entries -> only use first one
(
{
"HTTP_X_FORWARDED_PROTO": "https,http",
},
{"wsgi.url_scheme": "https"},
),
# host = none (should never happen but you never know) -> server & port used for reconstruction
(
{
"HTTP_HOST": None,
"HTTP_X_FORWARDED_SERVER": "example.com",
"HTTP_X_FORWARDED_PORT": "80",
},
{
"HTTP_HOST": "example.com",
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
},
),
# host = none, default port -> server & port used for reconstruction (ipv4)
(
{"HTTP_HOST": None, "SERVER_NAME": "127.0.0.1", "SERVER_PORT": "80"},
{"HTTP_HOST": "127.0.0.1", "SERVER_NAME": "127.0.0.1", "SERVER_PORT": "80"},
),
# host = none, non standard port -> server & port used for reconstruction (ipv4)
(
{"HTTP_HOST": None, "SERVER_NAME": "127.0.0.1", "SERVER_PORT": "444"},
{
"HTTP_HOST": "127.0.0.1:444",
"SERVER_NAME": "127.0.0.1",
"SERVER_PORT": "444",
},
),
# host = none, default port -> server & port used for reconstruction (ipv6)
(
{"HTTP_HOST": None, "SERVER_NAME": "fec1::1", "SERVER_PORT": "80"},
{"HTTP_HOST": "fec1::1", "SERVER_NAME": "fec1::1", "SERVER_PORT": "80"},
),
# host = none, non standard port -> server & port used for reconstruction (ipv6)
(
{"HTTP_HOST": None, "SERVER_NAME": "fec1::1", "SERVER_PORT": "444"},
{
"HTTP_HOST": "[fec1::1]:444",
"SERVER_NAME": "fec1::1",
"SERVER_PORT": "444",
},
),
# host set, server and port not, default port -> server & port derived from host (ipv4)
(
{"HTTP_HOST": "127.0.0.1", "SERVER_NAME": None, "SERVER_PORT": None},
{"HTTP_HOST": "127.0.0.1", "SERVER_NAME": "127.0.0.1", "SERVER_PORT": "80"},
),
# host set, server and port not, non standard port -> server & port derived from host (ipv4)
(
{"HTTP_HOST": "127.0.0.1:444", "SERVER_NAME": None, "SERVER_PORT": None},
{
"HTTP_HOST": "127.0.0.1:444",
"SERVER_NAME": "127.0.0.1",
"SERVER_PORT": "444",
},
),
# host set, server and port not, default port -> server & port derived from host (ipv6)
(
{"HTTP_HOST": "fec1::1", "SERVER_NAME": None, "SERVER_PORT": None},
{"HTTP_HOST": "fec1::1", "SERVER_NAME": "fec1::1", "SERVER_PORT": "80"},
),
# host set, server and port not, non standard port -> server & port derived from host (ipv6)
(
{"HTTP_HOST": "[fec1::1]:444", "SERVER_NAME": None, "SERVER_PORT": None},
{
"HTTP_HOST": "[fec1::1]:444",
"SERVER_NAME": "fec1::1",
"SERVER_PORT": "444",
},
),
)
@unpack
def test_stock(self, environ, expected):
reverse_proxied = ReverseProxiedEnvironment()
merged_environ = dict(standard_environ)
merged_environ.update(environ)
actual = reverse_proxied(merged_environ)
merged_expected = dict(standard_environ)
merged_expected.update(environ)
merged_expected.update(expected)
self.assertDictEqual(merged_expected, actual)
@data(
# server and port headers set -> host derived with port
(
{
"SERVER_NAME": "example2.com",
"SERVER_PORT": "444",
"HTTP_X_FORWARDED_PROTO": "https",
},
{
"HTTP_HOST": "example2.com:444",
"SERVER_NAME": "example2.com",
"SERVER_PORT": "444",
"wsgi.url_scheme": "https",
},
),
# server and port headers set, standard port -> host derived, no port
(
{
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
},
{
"HTTP_HOST": "example.com",
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
},
),
# server and port forwarded headers set -> host derived with port
(
{
"HTTP_X_FORWARDED_SERVER": "example2.com",
"HTTP_X_FORWARDED_PORT": "444",
"HTTP_X_FORWARDED_PROTO": "https",
},
{
"HTTP_HOST": "example2.com:444",
"SERVER_NAME": "example2.com",
"SERVER_PORT": "444",
"wsgi.url_scheme": "https",
},
),
# server and port forwarded headers set, standard port -> host derived, no port
(
{
"HTTP_X_FORWARDED_SERVER": "example.com",
"HTTP_X_FORWARDED_PORT": "80",
},
{
"HTTP_HOST": "example.com",
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
},
),
)
@unpack
def test_nohost(self, environ, expected):
reverse_proxied = ReverseProxiedEnvironment()
merged_environ = dict(standard_environ)
merged_environ.update(environ)
del merged_environ["HTTP_HOST"]
actual = reverse_proxied(merged_environ)
merged_expected = dict(standard_environ)
merged_expected.update(environ)
merged_expected.update(expected)
self.assertDictEqual(merged_expected, actual)
@data(
# prefix overridden
(
{"prefix": "fallback_prefix"},
{},
{
"SCRIPT_NAME": "fallback_prefix",
},
),
# scheme overridden
({"scheme": "https"}, {}, {"wsgi.url_scheme": "https"}),
# host overridden, default port
(
{"host": "example.com"},
{},
{
"HTTP_HOST": "example.com",
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
},
),
# host overridden, included port
(
{"host": "example.com:81"},
{},
{
"HTTP_HOST": "example.com:81",
"SERVER_NAME": "example.com",
"SERVER_PORT": "81",
},
),
# prefix not really overridden, forwarded headers take precedence
({"prefix": "/octoprint"}, {"HTTP_X_SCRIPT_NAME": ""}, {}),
# scheme not really overridden, forwarded headers take precedence
({"scheme": "https"}, {"HTTP_X_FORWARDED_PROTO": "http"}, {}),
# scheme 2 not really overridden, forwarded headers take precedence
({"scheme": "https"}, {"HTTP_X_SCHEME": "http"}, {}),
# host not really overridden, forwarded headers take precedence
({"host": "example.com:444"}, {"HTTP_X_FORWARDED_HOST": "localhost:5000"}, {}),
# server not really overridden, forwarded headers take precedence
({"server": "example.com"}, {"HTTP_X_FORWARDED_SERVER": "localhost"}, {}),
# port not really overridden, forwarded headers take precedence
({"port": "444"}, {"HTTP_X_FORWARDED_PORT": "5000"}, {}),
# server and port not really overridden, Host header wins
({"server": "example.com", "port": "80"}, {}, {}),
)
@unpack
def test_fallbacks(self, fallbacks, environ, expected):
reverse_proxied = ReverseProxiedEnvironment(**fallbacks)
merged_environ = dict(standard_environ)
merged_environ.update(environ)
actual = reverse_proxied(merged_environ)
merged_expected = dict(standard_environ)
merged_expected.update(environ)
merged_expected.update(expected)
self.assertDictEqual(merged_expected, actual)
@data(
# server overridden
(
{"server": "example.com"},
{},
{
"HTTP_HOST": "example.com:5000",
"SERVER_NAME": "example.com",
"SERVER_PORT": "5000",
},
),
# port overridden, standard port
({"port": "80"}, {}, {"HTTP_HOST": "localhost", "SERVER_PORT": "80"}),
# port overridden, non standard port
({"port": "81"}, {}, {"HTTP_HOST": "localhost:81", "SERVER_PORT": "81"}),
# server and port overridden, default port
(
{"server": "example.com", "port": "80"},
{},
{
"HTTP_HOST": "example.com",
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
},
),
# server and port overridden, non default port
(
{"server": "example.com", "port": "81"},
{},
{
"HTTP_HOST": "example.com:81",
"SERVER_NAME": "example.com",
"SERVER_PORT": "81",
},
),
)
@unpack
def test_fallbacks_nohost(self, fallbacks, environ, expected):
reverse_proxied = ReverseProxiedEnvironment(**fallbacks)
merged_environ = dict(standard_environ)
merged_environ.update(environ)
del merged_environ["HTTP_HOST"]
actual = reverse_proxied(merged_environ)
merged_expected = dict(standard_environ)
merged_expected.update(environ)
merged_expected.update(expected)
self.assertDictEqual(merged_expected, actual)
def test_header_config_ok(self):
result = ReverseProxiedEnvironment.to_header_candidates(
["prefix-header1", "prefix-header2"]
)
self.assertSetEqual(set(result), {"HTTP_PREFIX_HEADER1", "HTTP_PREFIX_HEADER2"})
def test_header_config_string(self):
result = ReverseProxiedEnvironment.to_header_candidates("prefix-header")
self.assertSetEqual(set(result), {"HTTP_PREFIX_HEADER"})
def test_header_config_none(self):
result = ReverseProxiedEnvironment.to_header_candidates(None)
self.assertEqual(result, [])
##~~
class OctoPrintFlaskRequestTest(unittest.TestCase):
def setUp(self):
self.orig_environment_wrapper = OctoPrintFlaskRequest.environment_wrapper
self.app = flask.Flask("testapp")
self.app.config["SECRET_KEY"] = "secret"
def tearDown(self):
OctoPrintFlaskRequest.environment_wrapper = staticmethod(
self.orig_environment_wrapper
)
def test_environment_wrapper(self):
def environment_wrapper(environ):
environ.update({"TEST": "yes"})
return environ
OctoPrintFlaskRequest.environment_wrapper = staticmethod(environment_wrapper)
request = OctoPrintFlaskRequest(standard_environ)
self.assertTrue("TEST" in request.environ)
def test_server_name(self):
request = OctoPrintFlaskRequest(standard_environ)
self.assertEqual("localhost", request.server_name)
def test_server_port(self):
request = OctoPrintFlaskRequest(standard_environ)
self.assertEqual("5000", request.server_port)
def test_cookie_suffix(self):
request = OctoPrintFlaskRequest(standard_environ)
self.assertEqual("_P5000", request.cookie_suffix)
def test_cookie_suffix_with_root(self):
script_root_environ = dict(standard_environ)
script_root_environ["SCRIPT_NAME"] = "/path/to/octoprint"
request = OctoPrintFlaskRequest(script_root_environ)
self.assertEqual("_P5000_R|path|to|octoprint", request.cookie_suffix)
def test_cookies(self):
environ = dict(standard_environ)
environ["HTTP_COOKIE"] = (
"postfixed_P5000=postfixed_value; "
"postfixed_wrong_P5001=postfixed_wrong_value; "
"unpostfixed=unpostfixed_value; "
"both_P5000=both_postfixed_value; "
"both=both_unpostfixed_value;"
)
request = OctoPrintFlaskRequest(environ)
with self.app.app_context():
cookies = request.cookies
self.assertDictEqual(
{
"postfixed": "postfixed_value",
"postfixed_wrong_P5001": "postfixed_wrong_value",
"unpostfixed": "unpostfixed_value",
"both": "both_postfixed_value",
},
cookies,
)
##~~
@ddt
class OctoPrintFlaskResponseTest(unittest.TestCase):
def setUp(self):
# mock settings
self.settings_patcher = mock.patch("octoprint.settings.settings")
self.settings_getter = self.settings_patcher.start()
self.settings = mock.MagicMock()
self.settings_getter.return_value = self.settings
self.app = flask.Flask("testapp")
self.app.config["SECRET_KEY"] = "secret"
def tearDown(self):
self.settings_patcher.stop()
@data(
[None, None, False, None, None],
[None, None, False, "none", "None"],
[None, None, False, "lax", "lax"],
[None, None, False, "StRiCt", "strict"],
[None, None, False, "INVALID", None],
[None, None, True, None, None],
["/subfolder/", None, False, None, None],
[None, "/some/other/script/root", False, None, None],
["/subfolder/", "/some/other/script/root", False, None, None],
)
@unpack
def test_cookie_set_and_delete(
self, path, scriptroot, secure, samesite, expected_samesite
):
environ = dict(standard_environ)
expected_suffix = "_P5000"
if scriptroot is not None:
environ.update({"SCRIPT_NAME": scriptroot})
expected_suffix += "_R" + scriptroot.replace("/", "|")
request = OctoPrintFlaskRequest(environ)
if path:
expected_path_set = expected_path_delete = path
else:
expected_path_set = expected_path_delete = "/"
if scriptroot:
expected_path_set = scriptroot + expected_path_set
if path is not None:
kwargs = {"path": path}
else:
kwargs = {}
with mock.patch("flask.request", new=request):
with mock.patch("octoprint.server.util.flask.settings") as settings_mock:
settings = mock.MagicMock()
settings.getBoolean.return_value = secure
settings.get.return_value = samesite
settings_mock.return_value = settings
response = OctoPrintFlaskResponse()
# test set_cookie
with mock.patch("flask.Response.set_cookie") as set_cookie_mock:
with self.app.app_context():
response.set_cookie("some_key", "some_value", **kwargs)
# set_cookie should have key and path values adjusted
set_cookie_mock.assert_called_once_with(
response,
"some_key" + expected_suffix,
value="some_value",
path=expected_path_set,
secure=secure,
samesite=expected_samesite,
)
# test delete_cookie
with mock.patch("flask.Response.set_cookie") as set_cookie_mock:
with mock.patch("flask.Response.delete_cookie") as delete_cookie_mock:
with self.app.app_context():
response.delete_cookie("some_key", **kwargs)
# delete_cookie internally calls set_cookie - so our delete_cookie call still uses the non modified
# key and path values, set_cookie will translate those (as tested above)
delete_cookie_mock.assert_called_once_with(
response, "some_key", path=expected_path_delete, domain=None
)
# we also test if an additional set_cookie call for the non modified versions happens, as
# implemented to ensure any old cookies from before introduction of the suffixes and path handling
# are deleted as well
set_cookie_mock.assert_called_once_with(
response,
"some_key",
expires=0,
max_age=0,
path=expected_path_delete,
domain=None,
)
| 20,755 | Python | .py | 525 | 27.546667 | 123 | 0.520236 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,040 | test_webassets.py | OctoPrint_OctoPrint/tests/server/util/test_webassets.py | """
Unit tests for ``octoprint.server.util.flask``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import ddt
from octoprint.server.util.webassets import replace_url
@ddt.ddt
class UrlReplaceTest(unittest.TestCase):
@ddt.data(
(
"mytest/some/path/",
"mytest/another/longer/path/",
"http://example.com/foo.html",
"http://example.com/foo.html",
),
(
"mytest/some/path/",
"mytest/another/longer/path/",
"/path/foo.html",
"/path/foo.html",
),
(
"http://example.com/mytest/some/path/",
"mytest/another/longer/path/",
"../foo.html",
"http://example.com/mytest/some/foo.html",
),
(
"mytest/some/path/",
"mytest/another/longer/path/",
"../foo.html",
"../../../some/foo.html",
),
)
@ddt.unpack
def test_replace_url(self, source_url, output_url, url, expected):
actual = replace_url(source_url, output_url, url)
self.assertEqual(actual, expected)
| 1,343 | Python | .py | 41 | 24.512195 | 103 | 0.566899 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,041 | test_tornado.py | OctoPrint_OctoPrint/tests/server/util/test_tornado.py | """
Unit tests for ``octoprint.server.util.tornado``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
from ddt import data, ddt, unpack
##~~ _parse_header
@ddt
class ParseHeaderTest(unittest.TestCase):
@data(
("form-data; filename=test.gco", "form-data", {"filename": "test.gco"}),
('form-data; filename="test.gco"', "form-data", {"filename": "test.gco"}),
("form-data; filename=test\\\\.gco", "form-data", {"filename": "test\\\\.gco"}),
('form-data; filename="test\\\\.gco"', "form-data", {"filename": "test\\.gco"}),
)
@unpack
def test_parse_header_strip_quotes(self, value, expected_key, expected_dict):
from octoprint.server.util.tornado import _parse_header
actual_key, actual_dict = _parse_header(value)
self.assertEqual(expected_key, actual_key)
self.assertDictEqual(expected_dict, actual_dict)
@data(
("form-data; filename=test.gco", "form-data", {"filename": "test.gco"}),
('form-data; filename="test.gco"', "form-data", {"filename": '"test.gco"'}),
("form-data; filename=test\\\\.gco", "form-data", {"filename": "test\\\\.gco"}),
(
'form-data; filename="test\\\\.gco"',
"form-data",
{"filename": '"test\\\\.gco"'},
),
(
"form-data; filename=iso-8859-1'en'test.gco",
"form-data",
{"filename": "iso-8859-1'en'test.gco"},
),
)
@unpack
def test_parse_header_leave_quotes(self, value, expected_key, expected_dict):
from octoprint.server.util.tornado import _parse_header
actual_key, actual_dict = _parse_header(value, strip_quotes=False)
self.assertEqual(expected_key, actual_key)
self.assertDictEqual(expected_dict, actual_dict)
##~~ _strip_value_quotes
@ddt
class StripValueQuotesTest(unittest.TestCase):
@data(
("", ""),
(None, None),
('"test.gco"', "test.gco"),
('"test".gco', '"test".gco'),
("test\\\\.gco", "test\\\\.gco"),
('"test\\\\.gco"', "test\\.gco"),
)
@unpack
def test_strip_value_quotes(self, value, expected):
from octoprint.server.util.tornado import _strip_value_quotes
actual = _strip_value_quotes(value)
self.assertEqual(expected, actual)
##~~ _extended_header_value
@ddt
class ExtendedHeaderValueTest(unittest.TestCase):
@data(
("", ""),
(None, None),
('"quoted-string"', "quoted-string"),
('"qüöted-string"', "qüöted-string"),
("iso-8859-1'en'%A3%20rates", "£ rates"),
("UTF-8''%c2%a3%20and%20%e2%82%ac%20rates", "£ and € rates"),
('"quoted-string"', "quoted-string"),
('"qüöted-string"', "qüöted-string"),
("iso-8859-1'en'%A3%20rates", "£ rates"),
("UTF-8''%c2%a3%20and%20%e2%82%ac%20rates", "£ and € rates"),
)
@unpack
def test_extended_header_value(self, value, expected):
from octoprint.server.util.tornado import _extended_header_value
actual = _extended_header_value(value)
self.assertEqual(expected, actual)
| 3,356 | Python | .py | 80 | 34.625 | 103 | 0.601732 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,042 | __init__.py | OctoPrint_OctoPrint/tests/server/util/__init__.py | """
Unit tests for ``octoprint.server.util``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 289 | Python | .py | 6 | 46.666667 | 103 | 0.721429 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,043 | test_util.py | OctoPrint_OctoPrint/tests/server/util/test_util.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2022 The OctoPrint Project - Released under terms of the AGPLv3 License"
import pytest
DEFAULT_ALLOWED_PATHS = ["/", "/recovery/", "/plugin/appkeys/auth/*"]
PREFIXED_ALLOWED_PATHS = list(map(lambda x: "/octoprint" + x, DEFAULT_ALLOWED_PATHS))
@pytest.mark.parametrize(
"url,paths,expected",
[
# various default UI URLs
("/", DEFAULT_ALLOWED_PATHS, True),
("/?", DEFAULT_ALLOWED_PATHS, True),
("/?l10n=de", DEFAULT_ALLOWED_PATHS, True),
("/?l10n=de&", DEFAULT_ALLOWED_PATHS, True),
("/octoprint/", PREFIXED_ALLOWED_PATHS, True),
# various recovery URLs
("/recovery/", DEFAULT_ALLOWED_PATHS, True),
("/recovery/?", DEFAULT_ALLOWED_PATHS, True),
("/recovery/?l10n=de", DEFAULT_ALLOWED_PATHS, True),
("/octoprint/recovery/?l10n=de", PREFIXED_ALLOWED_PATHS, True),
# various appkeys URLs
("/plugin/appkeys/auth/1234567890", DEFAULT_ALLOWED_PATHS, True),
("/plugin/appkeys/auth/1234567890?", DEFAULT_ALLOWED_PATHS, True),
("/plugin/appkeys/auth/1234567890?l10n=de", DEFAULT_ALLOWED_PATHS, True),
("/octoprint/plugin/appkeys/auth/1234567890", PREFIXED_ALLOWED_PATHS, True),
# various external URLs
("http://example.com", DEFAULT_ALLOWED_PATHS, False),
("https://example.com", DEFAULT_ALLOWED_PATHS, False),
("//example.com", DEFAULT_ALLOWED_PATHS, False),
("/\\/\\example.com", DEFAULT_ALLOWED_PATHS, False),
(" /\\/\\example.com", DEFAULT_ALLOWED_PATHS, False),
("\\/\\/example.com", DEFAULT_ALLOWED_PATHS, False),
(" \\/\\/example.com", DEFAULT_ALLOWED_PATHS, False),
# other stuff
("javascript:alert(document.cookie)", DEFAULT_ALLOWED_PATHS, False),
],
)
def test_validate_local_redirect(url, paths, expected):
from octoprint.server.util import validate_local_redirect
assert validate_local_redirect(url, paths) == expected
| 2,072 | Python | .py | 39 | 46.153846 | 103 | 0.651381 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,044 | __init__.py | OctoPrint_OctoPrint/tests/timelapse/__init__.py | """
Unit tests for ``octoprint.timelapse``.
"""
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 241 | Python | .py | 5 | 47 | 103 | 0.73617 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,045 | test_timelapse_helpers.py | OctoPrint_OctoPrint/tests/timelapse/test_timelapse_helpers.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
import os
import time
import unittest
from collections import OrderedDict, namedtuple
from unittest import mock
import ddt
import octoprint.plugin
import octoprint.settings
import octoprint.timelapse
_stat = namedtuple("StatResult", "st_size, st_ctime, st_mtime")
_entry = namedtuple("DirEntry", "name, path, is_file, is_dir, stat")
@ddt.ddt
class TimelapseTest(unittest.TestCase):
def setUp(self):
# mock settings
self.settings_patcher = mock.patch("octoprint.timelapse.settings")
self.settings_getter = self.settings_patcher.start()
self.settings = mock.create_autospec(octoprint.settings.Settings)
self.settings_getter.return_value = self.settings
self.plugin_manager_patcher = mock.patch("octoprint.timelapse.plugin_manager")
self.plugin_manager_getter = self.plugin_manager_patcher.start()
self.plugin_manager = mock.create_autospec(octoprint.plugin.PluginManager)
self.plugin_manager_getter.return_value = self.plugin_manager
def extension_factory(*args, **kwargs):
return ["gif"]
hooks = {"test": extension_factory}
self.plugin_manager.get_hooks.return_value = hooks
self.now = time.time()
def cleanUp(self):
self.settings_patcher.stop()
self.plugin_manager_patcher.stop()
@ddt.data(("test.mpg", True), ("test.dat", False), ("test.gif", True))
@ddt.unpack
def test_valid_timelapse(self, input, expected):
## test
actual = octoprint.timelapse.valid_timelapse(input)
## verify
self.assertEqual(expected, actual)
@mock.patch("os.remove")
@mock.patch("os.scandir")
def test_delete_unrendered_timelapse(self, mock_scandir, mock_remove):
## prepare
mocked_path = "/path/to/timelapse/tmp"
mocked_files = self._generate_scandir(
mocked_path,
[
"a-0.jpg",
"a-1.jpg",
"a-2.jpg",
"b-0.jpg",
"b-1.jpg",
"tmp_00000.jpg",
"tmp_00001.jpg",
],
)
self.settings.getBaseFolder.return_value = mocked_path
mock_scandir.return_value = mocked_files.values()
## test
octoprint.timelapse.delete_unrendered_timelapse("b")
## verify
expected_deletions = map(
lambda x: os.path.join(mocked_path, x), ["b-0.jpg", "b-1.jpg"]
)
expected_deletion_calls = list(map(mock.call, expected_deletions))
self.assertListEqual(mock_remove.mock_calls, expected_deletion_calls)
@mock.patch("time.time")
@mock.patch("os.remove")
@mock.patch("os.scandir")
def test_delete_old_unrendered_timelapses(self, mock_scandir, mock_remove, mock_time):
## prepare
mocked_path = "/path/to/timelapse/tmp"
files = [
"old-0.jpg",
"old-1.jpg",
"old-2.jpg",
"prefix-0.jpg",
"prefix-1.jpg",
"tmp_00000.jpg",
"tmp_00001.jpg",
]
files = {f: None for f in files}
files["old-0.jpg"] = _stat(st_size=10, st_ctime=0, st_mtime=0)
now = self.now
days = 1
self.settings.getBaseFolder.return_value = mocked_path
self.settings.getInt.return_value = days
mock_time.return_value = now
mock_scandir.return_value = self._generate_scandir(mocked_path, files).values()
## test
octoprint.timelapse.delete_old_unrendered_timelapses()
## verify
expected_deletions = map(
lambda x: os.path.join(mocked_path, x),
["tmp_00000.jpg", "tmp_00001.jpg", "old-0.jpg", "old-1.jpg", "old-2.jpg"],
)
expected_deletion_calls = list(map(mock.call, expected_deletions))
self.assertListEqual(mock_remove.mock_calls, expected_deletion_calls)
@mock.patch("os.scandir")
def test_get_finished_timelapses(self, mock_listdir):
## prepare
mocked_path = "/path/to/timelapse"
files = {}
files["one.mpg"] = _stat(st_size=1024, st_ctime=self.now, st_mtime=self.now)
files["nope.jpg"] = _stat(st_size=100, st_ctime=self.now, st_mtime=self.now)
files["two.mpg"] = _stat(st_size=2048, st_ctime=self.now, st_mtime=self.now)
self.settings.getBaseFolder.return_value = mocked_path
mock_listdir.return_value = self._generate_scandir(mocked_path, files).values()
## test
result = octoprint.timelapse.get_finished_timelapses()
## verify
self.assertEqual(len(result), 2)
self.assertEqual(result[0]["name"], "one.mpg")
self.assertEqual(result[0]["bytes"], 1024)
self.assertEqual(result[1]["name"], "two.mpg")
self.assertEqual(result[1]["bytes"], 2048)
@mock.patch("os.scandir")
def test_unrendered_timelapses(self, mock_scandir):
## prepare
files = {}
files["one-0.jpg"] = _stat(
st_size=1, st_ctime=self.now - 1, st_mtime=self.now - 1
)
files["one-1.jpg"] = _stat(st_size=2, st_ctime=self.now, st_mtime=self.now)
files["one-2.jpg"] = _stat(st_size=3, st_ctime=self.now, st_mtime=self.now)
files["nope.mpg"] = _stat(st_size=2048, st_ctime=self.now, st_mtime=self.now)
files["two-0.jpg"] = _stat(st_size=4, st_ctime=self.now, st_mtime=self.now)
files["two-1.jpg"] = _stat(st_size=5, st_ctime=self.now, st_mtime=self.now)
mocked_path = "/path/to/timelapse/tmp"
self.settings.getBaseFolder.return_value = mocked_path
mock_scandir.return_value = self._generate_scandir(mocked_path, files).values()
## test
result = octoprint.timelapse.get_unrendered_timelapses()
## verify
self.assertEqual(len(result), 2)
self.assertEqual(result[0]["name"], "one")
self.assertEqual(result[0]["count"], 3)
self.assertEqual(result[0]["bytes"], 6)
self.assertEqual(result[1]["name"], "two")
self.assertEqual(result[1]["count"], 2)
self.assertEqual(result[1]["bytes"], 9)
def _generate_scandir(self, path, files):
result = OrderedDict()
def add_to_result(name, stat=None):
if stat is None:
stat = _stat(st_size=10, st_ctime=self.now, st_mtime=self.now)
result[name] = _entry(
name=name,
path=os.path.join(path, name),
is_file=True,
is_dir=False,
stat=lambda: stat,
)
if isinstance(files, dict):
for f in sorted(files.keys()):
stat = files[f]
add_to_result(f, stat)
elif isinstance(files, (list, tuple)):
for f in files:
add_to_result(f)
else:
raise ValueError("files must be either dict or list/tuple")
return result
def test_ffmpeg_parse(self):
# Test strings
noChange1Str = " built on Jan 7 2014 22:07:02 with gcc 4.8.2 (GCC)"
durationStr = (
" Duration: 00:00:18.60, start: 0.000000, bitrate: " # 18s duration
)
noChange2Str = (
"0: Video: mpeg2video, yuv420p, 640x480, q=2-31, 10000 kb/s, 90k tbn, 25 tbc"
)
progress1Str = "frame= 134 fps=0.0 q=1.6 size= 1528kB time=00:00:05.28 bitrate=2370.7kbits/s dup=80 drop=0 " # 5s elapsed
expectedProgress1 = 5 / 18 * 100
progress2Str = "frame= 274 fps=270 q=2.0 size= 2748kB time=00:00:10.88 bitrate=2069.1kbits/s dup=164 drop=0 " # 10s elapsed
expectedProgress2 = 10 / 18 * 100
# Callback mock
callback = mock.MagicMock()
callback.sendRenderProgress = mock.MagicMock()
# Register mock callback
octoprint.timelapse.register_callback(callback)
r = octoprint.timelapse.TimelapseRenderJob("", "", "", "")
self.assertEqual(r._parsed_duration, 0)
r._process_ffmpeg_output(noChange1Str)
self.assertEqual(r._parsed_duration, 0)
r._process_ffmpeg_output(progress1Str)
self.assertEqual(r._parsed_duration, 0)
r._process_ffmpeg_output(durationStr)
self.assertEqual(r._parsed_duration, 18)
r._process_ffmpeg_output(noChange2Str)
self.assertEqual(r._parsed_duration, 18)
r._process_ffmpeg_output(progress1Str)
self.assertEqual(r._parsed_duration, 18)
self.assertAlmostEqual(
callback.sendRenderProgress.call_args_list[0][0][0], expectedProgress1
)
r._process_ffmpeg_output(progress2Str)
self.assertEqual(r._parsed_duration, 18)
self.assertAlmostEqual(
callback.sendRenderProgress.call_args_list[1][0][0], expectedProgress2
)
| 9,040 | Python | .py | 202 | 35.277228 | 136 | 0.614149 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,046 | test_timelapse_renderjob.py | OctoPrint_OctoPrint/tests/timelapse/test_timelapse_renderjob.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
from ddt import data, ddt, unpack
from octoprint.timelapse import TimelapseRenderJob
@ddt
class TimelapseRenderJobTest(unittest.TestCase):
@data(
(
(
'{ffmpeg} -r {fps} -i "{input}" -vcodec {videocodec} -threads {threads} -b {bitrate} -f {containerformat} -y {filters} "{output}"',
"/path/to/ffmpeg",
25,
"10000k",
1,
"/path/to/input/files_%d.jpg",
"/path/to/output.mpg",
"mpeg2video",
),
{},
'/path/to/ffmpeg -r 25 -i "/path/to/input/files_%d.jpg" -vcodec mpeg2video -threads 1 -b 10000k -f vob -y -vf \'[in] format=yuv420p [out]\' "/path/to/output.mpg"',
),
(
(
'{ffmpeg} -r {fps} -i "{input}" -vcodec {videocodec} -threads {threads} -b {bitrate} -f {containerformat} -y -g 5 {filters} "{output}"',
"/path/to/ffmpeg",
25,
"10000k",
1,
"/path/to/input/files_%d.jpg",
"/path/to/output.mpg",
"mpeg2video",
),
{},
'/path/to/ffmpeg -r 25 -i "/path/to/input/files_%d.jpg" -vcodec mpeg2video -threads 1 -b 10000k -f vob -y -g 5 -vf \'[in] format=yuv420p [out]\' "/path/to/output.mpg"',
),
(
(
'{ffmpeg} -r {fps} -i "{input}" -vcodec {videocodec} -threads {threads} -b {bitrate} -f {containerformat} -y {filters} "{output}"',
"/path/to/ffmpeg",
25,
"10000k",
1,
"/path/to/input/files_%d.jpg",
"/path/to/output.mp4",
"libx264",
),
{},
'/path/to/ffmpeg -r 25 -i "/path/to/input/files_%d.jpg" -vcodec libx264 -threads 1 -b 10000k -f mp4 -y -vf \'[in] format=yuv420p [out]\' "/path/to/output.mp4"',
),
(
(
'{ffmpeg} -r {fps} -i "{input}" -vcodec {videocodec} -threads {threads} -b {bitrate} -f {containerformat} -y {filters} "{output}"',
"/path/to/ffmpeg",
25,
"10000k",
1,
"/path/to/input/files_%d.jpg",
"/path/to/output.mpg",
"mpeg2video",
),
{"hflip": True},
'/path/to/ffmpeg -r 25 -i "/path/to/input/files_%d.jpg" -vcodec mpeg2video -threads 1 -b 10000k -f vob -y -vf \'[in] format=yuv420p,hflip [out]\' "/path/to/output.mpg"',
),
(
(
'{ffmpeg} -r {fps} -i "{input}" -vcodec {videocodec} -threads {threads} -b {bitrate} -f {containerformat} -y {filters} "{output}"',
"/path/to/ffmpeg",
25,
"20000k",
4,
"/path/to/input/files_%d.jpg",
"/path/to/output.mpg",
"mpeg2video",
),
{"rotate": True, "watermark": "/path/to/watermark.png"},
'/path/to/ffmpeg -r 25 -i "/path/to/input/files_%d.jpg" -vcodec mpeg2video -threads 4 -b 20000k -f vob -y -vf \'[in] format=yuv420p,transpose=2 [postprocessed]; movie=/path/to/watermark.png [wm]; [postprocessed][wm] overlay=10:main_h-overlay_h-10 [out]\' "/path/to/output.mpg"',
),
)
@unpack
def test_create_ffmpeg_command_string(self, args, kwargs, expected):
actual = TimelapseRenderJob._create_ffmpeg_command_string(*args, **kwargs)
self.assertEqual(expected, actual)
@data(
({}, "[in] format=yuv420p [out]"),
({"pixfmt": "test"}, "[in] format=test [out]"),
({"hflip": True}, "[in] format=yuv420p,hflip [out]"),
({"vflip": True}, "[in] format=yuv420p,vflip [out]"),
({"rotate": True}, "[in] format=yuv420p,transpose=2 [out]"),
({"vflip": True, "rotate": True}, "[in] format=yuv420p,vflip,transpose=2 [out]"),
(
{"vflip": True, "hflip": True, "rotate": True},
"[in] format=yuv420p,hflip,vflip,transpose=2 [out]",
),
(
{"watermark": "/path/to/watermark.png"},
"[in] format=yuv420p [postprocessed]; movie=/path/to/watermark.png [wm]; [postprocessed][wm] overlay=10:main_h-overlay_h-10 [out]",
),
(
{"hflip": True, "watermark": "/path/to/watermark.png"},
"[in] format=yuv420p,hflip [postprocessed]; movie=/path/to/watermark.png [wm]; [postprocessed][wm] overlay=10:main_h-overlay_h-10 [out]",
),
)
@unpack
def test_create_filter_string(self, kwargs, expected):
actual = TimelapseRenderJob._create_filter_string(**kwargs)
self.assertEqual(actual, expected)
| 4,986 | Python | .py | 107 | 34.401869 | 290 | 0.512826 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,047 | test_net.py | OctoPrint_OctoPrint/tests/util/test_net.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2017 The OctoPrint Project - Released under terms of the AGPLv3 License"
import socket
from unittest import mock
import pytest
import octoprint.util.net
def patched_interfaces():
return ["eth0"]
def patched_ifaddresses(addr, netmask):
if addr == "eth0":
return {
socket.AF_INET: [
{"addr": "192.168.123.10", netmask: "255.255.255.0"},
{"addr": "12.1.1.10", netmask: "255.0.0.0"},
],
socket.AF_INET6: [
{"addr": "2a01:4f8:1c0c:6958::1", netmask: "ffff:ffff:ffff:ffff::/64"}
],
}
return {}
def patched_ifaddresses_mask(addr):
return patched_ifaddresses(addr, "mask")
def patched_ifaddresses_netmask(addr):
return patched_ifaddresses(addr, "netmask")
@pytest.mark.parametrize(
"input_address,input_additional,expected",
[
("127.0.0.1", [], True),
("192.168.123.234", [], True),
("172.24.0.1", [], True),
("10.1.2.3", [], True),
("fc00::1", [], True),
("::1", [], True),
("::ffff:192.168.1.1", [], True),
("::ffff:8.8.8.8", [], False),
("11.1.2.3", [], False),
("11.1.2.3", ["11/8"], True),
("12.1.1.123", [], True),
("2a01:4f8:1c0c:6958::1:23", [], True),
("fe80::89f3:31bb:ced0:2093%wlan0", [], True),
(None, [], True),
],
)
def test_is_lan_address(input_address, input_additional, expected):
with mock.patch(
"netifaces.interfaces", side_effect=patched_interfaces
), mock.patch.object(octoprint.util.net, "HAS_V6", True):
for side_effect in (patched_ifaddresses_mask, patched_ifaddresses_netmask):
with mock.patch("netifaces.ifaddresses", side_effect=side_effect):
assert (
octoprint.util.net.is_lan_address(
input_address, additional_private=input_additional
)
== expected
)
@pytest.mark.parametrize(
"address,expected",
[
("fe80::89f3:31bb:ced0:2093%wlan0", "fe80::89f3:31bb:ced0:2093"),
("2a01:4f8:1c0c:6958::1:23", "2a01:4f8:1c0c:6958::1:23"),
("10.1.2.3", "10.1.2.3"),
],
)
def test_strip_interface_tag(address, expected):
assert octoprint.util.net.strip_interface_tag(address) == expected
@pytest.mark.parametrize(
"address,expected",
[
("::ffff:192.168.1.1", "192.168.1.1"),
("::ffff:2a01:4f8", "::ffff:2a01:4f8"),
("2a01:4f8:1c0c:6958::1:23", "2a01:4f8:1c0c:6958::1:23"),
("11.1.2.3", "11.1.2.3"),
],
)
def test_unmap_v4_in_v6(address, expected):
assert octoprint.util.net.unmap_v4_as_v6(address) == expected
@pytest.mark.parametrize(
"address,expected",
[
({"mask": "192.168.0.0/24"}, "192.168.0.0/24"),
({"netmask": "192.168.0.0/24"}, "192.168.0.0/24"),
],
)
def test_get_netmask(address, expected):
assert octoprint.util.net.get_netmask(address) == expected
def test_get_netmask_broken_address():
with pytest.raises(ValueError):
octoprint.util.net.get_netmask({"nm": "192.168.0.0/24"})
@pytest.mark.parametrize(
"remote_addr,header,trusted_proxies,expected",
[
(
"127.0.0.1",
None,
["127.0.0.1", "::1"],
"127.0.0.1",
), # direct access via ipv4 localhost
(
"::1",
None,
["127.0.0.1", "::1"],
"::1",
), # direct access via ipv6 localhost
(
"192.168.1.10",
None,
["127.0.0.1", "::1"],
"192.168.1.10",
), # direct access via lan
(
"127.0.0.1",
"192.168.1.10",
["127.0.0.1", "::1"],
"192.168.1.10",
), # access through reverse proxy on 127.0.0.1
(
"127.0.0.1",
"10.1.2.3, 192.168.1.10",
["127.0.0.1", "::1", "192.168.1.10"],
"10.1.2.3",
), # access through trusted reverse proxies on 127.0.0.1 and 192.168.1.10
(
"127.0.0.1",
"10.1.2.3, 192.168.1.10",
["127.0.0.1", "::1", "192.168.1.0/24"],
"10.1.2.3",
), # access through trusted reverse proxies on 127.0.0.1 and something on 192.168.1.0/24
(
"127.0.0.1",
"10.1.2.3, 192.168.1.10",
["127.0.0.1", "::1", "unknown", "192.168.1.0/24"],
"10.1.2.3",
), # access through trusted reverse proxies on 127.0.0.1 and something on 192.168.1.0/24, invalid proxy in between
(
"::1",
"fd12:3456:789a:2::1, fd12:3456:789a:1::1",
["127.0.0.1", "::1", "fd12:3456:789a:1::/64"],
"fd12:3456:789a:2::1",
), # access through trusted reverse proxies on ::1 and something on fd12:3456:789a:1::/64
(
"127.100.100.1",
"10.1.2.3, 192.168.1.10",
["0.0.0.0/0"],
"127.100.100.1",
), # everything is trusted (BAD IDEA!)
(
"192.168.1.10",
"127.0.0.1",
["127.0.0.1", "::1"],
"192.168.1.10",
), # spoofing attempt #1: direct access via lan, spoofed to 127.0.0.1
(
"::ffff:192.168.1.10",
"::1",
["127.0.0.1", "::1"],
"192.168.1.10",
), # spoofing attempt #2: direct access via lan, spoofed to ::1
(
"127.0.0.1",
"127.0.0.1, 192.168.1.10",
["127.0.0.1", "::1"],
"192.168.1.10",
), # spoofing attempt #3: access through reverse proxy on 127.0.0.1, real ip 192.168.1.10, spoofed to 127.0.0.1
(
"::1",
"::1, ::ffff:192.168.1.10",
["127.0.0.1", "::1"],
"192.168.1.10",
), # spoofing attempt #4: access through reverse proxy on ::1, real ip 192.168.1.10, spoofed to ::1
(
"127.0.0.1",
"127.0.0.1, 10.1.2.3, 192.168.1.10",
["127.0.0.1", "::1", "192.168.1.10"],
"10.1.2.3",
), # spoofing attempt #5: access through trusted reverse proxies on 127.0.0.1 and 192.168.1.10, real ip 10.1.2.3, spoofed to 127.0.0.1
(
"::1",
"::1, fd12:3456:789a:2::1, fd12:3456:789a:1::1",
["127.0.0.1", "::1", "fd12:3456:789a:1::/64"],
"fd12:3456:789a:2::1",
), # spoofing attempt #6: access through trusted reverse proxies on ::1 and something on fd12:3456:789a:1::/64, spoofed to ::1
],
)
def test_get_http_client_ip(remote_addr, header, trusted_proxies, expected):
assert (
octoprint.util.net.get_http_client_ip(remote_addr, header, trusted_proxies)
== expected
)
| 6,954 | Python | .py | 188 | 27.691489 | 143 | 0.508306 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,048 | test_comm_helpers.py | OctoPrint_OctoPrint/tests/util/test_comm_helpers.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
from ddt import data, ddt, unpack
@ddt
class TestCommHelpers(unittest.TestCase):
@data(
("M117 Test", "M117 Test"),
("M117 Test ; foo", "M117 Test "),
("M117 Test \\; foo", "M117 Test \\; foo"),
("M117 Test \\\\; foo", "M117 Test \\\\"),
("M117 Test \\\\\\; foo", "M117 Test \\\\\\; foo"),
("; foo", ""),
)
@unpack
def test_strip_comment(self, input, expected):
from octoprint.util import comm
self.assertEqual(expected, comm.strip_comment(input))
@data(
("M117 Test", None, None, "M117 Test"),
("", None, None, None),
(" \t \r \n", None, None, None),
("M117 Test", {}, 0, "M117 Test"),
)
@unpack
def test_process_gcode_line(self, input, offsets, current_tool, expected):
from octoprint.util import comm
self.assertEqual(
expected,
comm.process_gcode_line(input, offsets=offsets, current_tool=current_tool),
)
@data(
("M104 S200", None, None, None),
("M117 Test", {}, None, None),
("M104 T0", {}, None, None),
("M104 S220", {"tool0": 10, "tool1": 20, "bed": 30}, 0, 230.0),
("M104 T1 S220", {"tool0": 10, "tool1": 20, "bed": 30}, 0, 240.0),
("M104 S220", {"tool0": 10, "tool1": 20, "bed": 30}, 1, 240.0),
("M140 S100", {"tool0": 10, "tool1": 20, "bed": 30}, 1, 130.0),
("M190 S100", {"tool0": 10, "tool1": 20, "bed": 30}, 1, 130.0),
("M109 S220", {"tool0": 10, "tool1": 20, "bed": 30}, 0, 230.0),
("M109 S220", {}, 0, None),
("M140 S100", {}, 0, None),
("M104 S220", {"tool0": 0}, 0, None),
("M104 S220", {"tool0": 20}, None, None),
("M104 S0", {"tool0": 20}, 0, None),
)
@unpack
def test_apply_temperature_offsets(self, input, offsets, current_tool, expected):
from octoprint.util import comm
actual = comm.apply_temperature_offsets(input, offsets, current_tool=current_tool)
if expected is None:
self.assertEqual(input, actual)
else:
import re
match = re.search(r"S(\d+(\.\d+)?)", actual)
if not match:
self.fail("No temperature found")
temperature = float(match.group(1))
self.assertEqual(expected, temperature)
self.assertEqual(input[: match.start(1)], actual[: match.start(1)])
self.assertEqual(input[match.end(1) :], actual[match.end(1) :])
def test_convert_pause_triggers(self):
configured_triggers = [
{"regex": "pause1", "type": "enable"},
{"regex": "pause2", "type": "enable"},
{"regex": "resume", "type": "disable"},
{"regex": "toggle", "type": "toggle"},
{"type": "enable"},
{"regex": "regex"},
{"regex": "regex", "type": "unknown"},
]
from octoprint.util import comm
trigger_matchers = comm.convert_pause_triggers(configured_triggers)
self.assertIsNotNone(trigger_matchers)
self.assertIn("enable", trigger_matchers)
self.assertEqual("(pause1)|(pause2)", trigger_matchers["enable"].pattern)
self.assertIn("disable", trigger_matchers)
self.assertEqual("(resume)", trigger_matchers["disable"].pattern)
self.assertIn("toggle", trigger_matchers)
self.assertEqual("(toggle)", trigger_matchers["toggle"].pattern)
self.assertNotIn("unknown", trigger_matchers)
def test_convert_feedback_controls(self):
def md5sum(input):
import hashlib
m = hashlib.md5()
m.update(input)
return m.hexdigest()
# rb'' doesn't exist in Python2
temp_regex = rb"T:((\d*\.)\d+)"
temp_template = b"Temp: {}"
temp2_template = b"Temperature: {}"
temp_key = md5sum(temp_regex)
temp_template_key = md5sum(temp_template)
temp2_template_key = md5sum(temp2_template)
x_regex = rb"X:(?P<x>\d+)"
x_template = b"X: {x}"
x_key = md5sum(x_regex)
x_template_key = md5sum(x_template)
configured_controls = [
{
"key": temp_key,
"regex": temp_regex,
"template": temp_template,
"template_key": temp_template_key,
},
{"command": "M117 Hello World", "name": "Test"},
{
"children": [
{
"key": x_key,
"regex": x_regex,
"template": x_template,
"template_key": x_template_key,
},
{
"key": temp_key,
"regex": temp_regex,
"template": temp2_template,
"template_key": temp2_template_key,
},
]
},
]
from octoprint.util import comm
controls, matcher = comm.convert_feedback_controls(configured_controls)
self.assertEqual(2, len(controls))
# temp_regex is used twice, so we should have two templates for it
self.assertIn(temp_key, controls)
temp = controls[temp_key]
self.assertIsNotNone(temp["matcher"])
self.assertEqual(temp_regex, temp["matcher"].pattern)
self.assertEqual(temp_regex, temp["pattern"])
self.assertEqual(2, len(temp["templates"]))
self.assertIn(temp_template_key, temp["templates"])
self.assertEqual(temp_template, temp["templates"][temp_template_key])
self.assertIn(temp2_template_key, temp["templates"])
self.assertEqual(temp2_template, temp["templates"][temp2_template_key])
# x_regex is used once, so we should have only one template for it
self.assertIn(x_key, controls)
x = controls[x_key]
self.assertIsNotNone(x["matcher"])
self.assertEqual(x_regex, x["matcher"].pattern)
self.assertEqual(x_regex, x["pattern"])
self.assertEqual(1, len(x["templates"]))
self.assertIn(x_template_key, x["templates"])
self.assertEqual(x_template, x["templates"][x_template_key])
self.assertEqual(
f"(?P<group{temp_key}>{temp_regex})|(?P<group{x_key}>{x_regex})",
matcher.pattern,
)
@data(
("G4 P2.0", "floatP", True, "2.0"),
("M109 S220.0", "floatS", True, "220.0"),
("G1 X10.0 Y10.0 Z0.2", "floatZ", True, "0.2"),
("G1X10.0Y10.0Z0.2", "floatZ", True, "0.2"),
("g1x10.0y10.0z0.2", "floatZ", True, "0.2"),
("M110 N0", "intN", True, "0"),
("M104 S220.0 T1", "intT", True, "1"),
("M104 T1 S220.0", "intT", True, "1"),
("N100 M110", "intN", True, "100"),
("NP100", "floatP", False, None),
)
@unpack
def test_parameter_regexes(self, line, parameter, should_match, expected_value):
from octoprint.util.comm import regexes_parameters
regex = regexes_parameters[parameter]
match = regex.search(line)
if should_match:
self.assertIsNotNone(match)
self.assertEqual(expected_value, match.group("value"))
else:
self.assertIsNone(match)
@data(
("G0 X0", "G0"),
("G28 X0 Y0", "G28"),
("M109 S220.0 T1", "M109"),
("M117 Hello World", "M117"),
("T0", "T"),
("T3", "T"),
(None, None),
("No match", None),
)
@unpack
def test_gcode_command_for_cmd(self, cmd, expected):
from octoprint.util.comm import gcode_command_for_cmd
result = gcode_command_for_cmd(cmd)
self.assertEqual(expected, result)
@data(
("G0 X0", "G0", None),
("M105", "M105", None),
("T2", "T", None),
("M80.1", "M80", "1"),
("G28.2", "G28", "2"),
("T0.3", "T", None),
("M80.nosubcode", "M80", None),
(None, None, None),
("No match", None, None),
)
@unpack
def test_gcode_and_subcode_for_cmd(self, cmd, expected_gcode, expected_subcode):
from octoprint.util.comm import gcode_and_subcode_for_cmd
actual_gcode, actual_subcode = gcode_and_subcode_for_cmd(cmd)
self.assertEqual(expected_gcode, actual_gcode)
self.assertEqual(expected_subcode, actual_subcode)
@data(
("T:23.0 B:60.0", 0, {"T0": (23.0, None), "B": (60.0, None)}, 0),
("T:23.0 B:60.0", 1, {"T1": (23.0, None), "B": (60.0, None)}, 1),
("T:23.0/220.0 B:60.0/70.0", 0, {"T0": (23.0, 220.0), "B": (60.0, 70.0)}, 0),
(
"ok T:23.0/220.0 T0:23.0/220.0 T1:50.2/210.0 T2:39.4/220.0 B:60.0",
0,
{
"T0": (23.0, 220.0),
"T1": (50.2, 210.0),
"T2": (39.4, 220.0),
"B": (60.0, None),
},
2,
),
(
"ok T:50.2/210.0 T0:23.0/220.0 T1:50.2/210.0 T2:39.4/220.0 B:60.0",
1,
{
"T0": (23.0, 220.0),
"T1": (50.2, 210.0),
"T2": (39.4, 220.0),
"B": (60.0, None),
},
2,
),
(
"ok T:-55.7/0 T0:-55.7/0 T1:150.0/210.0",
0,
{"T0": (-55.7, 0), "T1": (150.0, 210.0)},
1,
),
(
"ok T:150.0/210.0 T0:-55.7/0 T1:150.0/210.0",
1,
{"T0": (-55.7, 0), "T1": (150.0, 210.0)},
1,
),
(
"T:210.04 /210.00 B:52.00 /52.00 @:85 B@:31pS_XYZ:5",
0,
{
"T0": (210.04, 210.0),
"B": (52.00, 52.0),
},
0,
),
(
"T:210.04 /210.00 B:52.00 /52.00 @:85 31pS_XYZ:5",
0,
{
"T0": (210.04, 210.0),
"B": (52.00, 52.0),
"31pS_XYZ": (5, None),
},
0,
),
(
"T:210.04 /210.00 B:52.00 /52.00 @:85 F0:255.0 /255.0",
0,
{
"T0": (210.04, 210.0),
"B": (52.00, 52.0),
"F0": (255.0, 255.0),
},
0,
),
(
"T:210.04 /210.00 @B:52.00 /52.00",
0,
{
"T0": (210.04, 210.0),
},
0,
),
(
"T:210.04 /210.00 @B:52.00 /52.00 TXYZ:2",
0,
{
"T0": (210.04, 210.0),
"TXYZ": (2, None),
},
0,
),
(
# Only first occurrence of a sensor should be used, second B gets ignored
"T:210.04 /210.00 B:52.00 /52.00 @:85 B:1234.0 /1234.0",
0,
{
"T0": (210.04, 210.0),
"B": (52.00, 52.0),
},
0,
),
)
@unpack
def test_process_temperature_line(self, line, current, expected_result, expected_max):
from octoprint.util.comm import parse_temperature_line
maxtool, result = parse_temperature_line(line, current)
self.assertDictEqual(expected_result, result)
self.assertEqual(expected_max, maxtool)
@data(
# T => T0
({"T": (23.0, None)}, 0, {"T0": (23.0, None)}),
# T => T1
({"T": (23.0, None)}, 1, {"T1": (23.0, None)}),
# T and Tn present => Tn wins
(
{"T": (23.0, None), "T0": (23.0, None), "T1": (42.0, None)},
0,
{"T0": (23.0, None), "T1": (42.0, None)},
),
(
{"T": (42.0, None), "T0": (23.0, None), "T1": (42.0, None)},
1,
{"T0": (23.0, None), "T1": (42.0, None)},
),
(
{"T": (21.0, None), "T0": (23.0, None), "T1": (42.0, None)},
0,
{"T0": (23.0, None), "T1": (42.0, None)},
),
(
{"T": (41.0, None), "T0": (23.0, None), "T1": (42.0, None)},
1,
{"T0": (23.0, None), "T1": (42.0, None)},
),
# T and no T0 => Smoothieware, T = T0
(
{"T": (23.0, None), "T1": (42.0, None)},
1,
{"T0": (23.0, None), "T1": (42.0, None)},
),
# no T => as-is
(
{"T0": (23.0, None), "T1": (42.0, None)},
1,
{"T0": (23.0, None), "T1": (42.0, None)},
),
)
@unpack
def test_canonicalize_temperatures(self, parsed, current, expected):
from octoprint.util.comm import canonicalize_temperatures
result = canonicalize_temperatures(parsed, current)
self.assertDictEqual(expected, result)
@data(
(
"KEY1:Value 1 FIRMWARE_NAME:Some Firmware With Spaces KEY2:Value 2",
{
"KEY1": "Value 1",
"KEY2": "Value 2",
"FIRMWARE_NAME": "Some Firmware With Spaces",
},
),
(
"NAME: Malyan VER: 2.9 MODEL: M200 HW: HA02",
{"NAME": "Malyan", "VER": "2.9", "MODEL": "M200", "HW": "HA02"},
),
(
"NAME. Malyan VER: 3.8 MODEL: M100 HW: HB02",
{"NAME": "Malyan", "VER": "3.8", "MODEL": "M100", "HW": "HB02"},
),
(
"NAME. Malyan VER: 3.7 MODEL: M300 HW: HG01",
{"NAME": "Malyan", "VER": "3.7", "MODEL": "M300", "HW": "HG01"},
),
(
"FIRMWARE_NAME:Marlin 1.1.0 From Archive SOURCE_CODE_URL:http:// ... PROTOCOL_VERSION:1.0 MACHINE_TYPE:www.cxsw3d.com EXTRUDER_COUNT:1 UUID:00000000-0000-0000-0000-000000000000",
{
"FIRMWARE_NAME": "Marlin 1.1.0 From Archive",
"SOURCE_CODE_URL": "http:// ...",
"PROTOCOL_VERSION": "1.0",
"MACHINE_TYPE": "www.cxsw3d.com",
"EXTRUDER_COUNT": "1",
"UUID": "00000000-0000-0000-0000-000000000000",
},
),
# Test firmware name with time created
(
"FIRMWARE_NAME:Marlin 2.0.7.2 (Nov 27 2020 14:30:11) SOURCE_CODE_URL:https://github.com/MarlinFirmware/Marlin PROTOCOL_VERSION:1.0 MACHINE_TYPE:Ender 5 Pro EXTRUDER_COUNT:1 UUID:cede2a2f-41a2-4748-9b12-c55c62f367ff",
{
"FIRMWARE_NAME": "Marlin 2.0.7.2 (Nov 27 2020 14:30:11)",
"SOURCE_CODE_URL": "https://github.com/MarlinFirmware/Marlin",
"PROTOCOL_VERSION": "1.0",
"MACHINE_TYPE": "Ender 5 Pro",
"EXTRUDER_COUNT": "1",
"UUID": "cede2a2f-41a2-4748-9b12-c55c62f367ff",
},
),
# Test that keys beginning with _ or number are ignored
(
"KEY1:VALUE1 _KEY2:INVALID 123:INVALID 1KEY:INVALID KEY2:VALUE2",
{"KEY1": "VALUE1 _KEY2:INVALID 123:INVALID 1KEY:INVALID", "KEY2": "VALUE2"},
),
)
@unpack
def test_parse_firmware_line(self, line, expected):
from octoprint.util.comm import parse_firmware_line
result = parse_firmware_line(line)
self.assertDictEqual(expected, result)
@data(
("Cap:EEPROM:1", ("EEPROM", True)),
("Cap:EEPROM:0", ("EEPROM", False)),
("AUTOREPORT_TEMP:1", ("AUTOREPORT_TEMP", True)),
("AUTOREPORT_TEMP:0", ("AUTOREPORT_TEMP", False)),
("TOO:MANY:FIELDS", None),
("Cap:", None),
("TOOLITTLEFIELDS", None),
("WRONG:FLAG", None),
)
@unpack
def test_parse_capability_line(self, line, expected):
from octoprint.util.comm import parse_capability_line
result = parse_capability_line(line)
self.assertEqual(expected, result)
@data(
("Resend:23", 23),
("Resend: N23", 23),
("Resend: N:23", 23),
("rs 23", 23),
("rs N23", 23),
("rs N:23", 23),
("rs N23 expected checksum 109", 23), # teacup, see #300
)
@unpack
def test_parse_resend_line(self, line, expected):
from octoprint.util.comm import parse_resend_line
result = parse_resend_line(line)
self.assertEqual(expected, result)
@data(
# Marlin
(
"ok X:62.417 Y:64.781 Z:0.2 E:2.72328 Count: A:6241 B:6478 C:20",
{"x": 62.417, "y": 64.781, "z": 0.2, "e": 2.72328},
),
(
"X:62.417 Y:64.781 Z:0.2 E:2.72328 Count: A:6241 B:6478 C:20",
{"x": 62.417, "y": 64.781, "z": 0.2, "e": 2.72328},
),
# RepRapFirmware
(
"X:96.99 Y:88.31 Z:0.30 E0:0.0 E1:0.0 E2:0.0 E3:0.0 E4:0.0 E5:0.0",
{
"x": 96.99,
"y": 88.31,
"z": 0.3,
"e0": 0.0,
"e1": 0.0,
"e2": 0.0,
"e3": 0.0,
"e4": 0.0,
"e5": 0.0,
},
),
# whitespace after the :, e.g. AlfaWise U20, see #2839
("X:150.0 Y:150.0 Z: 0.7 E: 0.0", {"x": 150.0, "y": 150.0, "z": 0.7, "e": 0.0}),
# invalid
("", None),
("X:62.417 Y:64.781 Z:0.2", None),
)
@unpack
def test_parse_position_line(self, line, expected):
from octoprint.util.comm import parse_position_line
result = parse_position_line(line)
if expected is None:
self.assertIsNone(result)
else:
self.assertDictEqual(expected, result)
class TestPositionRecord(unittest.TestCase):
def test_as_dict_regular(self):
coords = {"x": 1, "y": 2, "z": 3, "e": 4}
position = self._create_position(**coords)
expected = dict(coords)
expected.update({"f": None, "t": None})
self.assertDictEqual(position.as_dict(), expected)
def test_as_dict_extra_e(self):
coords = {"x": 1, "y": 2, "z": 3, "e0": 4, "e1": 5}
position = self._create_position(**coords)
expected = dict(coords)
expected.update({"e": None, "f": None, "t": None})
self.assertDictEqual(position.as_dict(), expected)
def test_copy_from_regular(self):
coords = {"x": 1, "y": 2, "z": 3, "e": 4}
position1 = self._create_position(**coords)
position2 = self._create_position()
position2.copy_from(position1)
expected = dict(coords)
expected.update({"f": None, "t": None})
self.assertDictEqual(position2.as_dict(), expected)
def test_copy_from_extra_e(self):
coords = {"x": 1, "y": 2, "z": 3, "e0": 4, "e1": 5}
position1 = self._create_position(**coords)
position2 = self._create_position()
position2.copy_from(position1)
expected = dict(coords)
expected.update({"e": None, "f": None, "t": None})
self.assertDictEqual(position2.as_dict(), expected)
def test_copy_from_extra_e_changed(self):
coords1 = {"x": 1, "y": 2, "z": 3, "e0": 4, "e1": 5}
position1 = self._create_position(**coords1)
coords2 = {"x": 2, "y": 4, "z": 6, "e0": 8, "e1": 10, "e2": 12}
position2 = self._create_position(**coords2)
expected_before = dict(coords2)
expected_before.update({"e": None, "f": None, "t": None})
self.assertDictEqual(position2.as_dict(), expected_before)
position2.copy_from(position1)
expected_after = dict(coords1)
expected_after.update({"e": None, "f": None, "t": None})
self.assertDictEqual(position2.as_dict(), expected_after)
def _create_position(self, **kwargs):
from octoprint.util.comm import PositionRecord
return PositionRecord(**kwargs)
@ddt
class TestTemperatureRecord(unittest.TestCase):
@data("TX", "B2", "BX", "SOMETHING_CUSTOM", "1234B456", "blub", "fnord", "C1", "CX")
def test_set_custom(self, identifier):
temperature = self._create_temperature()
temperature.set_custom(identifier, 1, 2)
self.assertTrue(identifier in temperature.custom)
@data("T", "T1", "T42", "B", "C")
def test_set_custom_reserved(self, identifier):
temperature = self._create_temperature()
try:
temperature.set_custom(identifier, 1, 2)
self.fail(f"Expected ValueError for reserved identifier {identifier}")
except ValueError as ex:
self.assertTrue("is a reserved identifier" in str(ex))
def _create_temperature(self, **kwargs):
from octoprint.util.comm import TemperatureRecord
return TemperatureRecord(**kwargs)
| 20,946 | Python | .py | 536 | 28.475746 | 228 | 0.503026 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,049 | test_counted_event.py | OctoPrint_OctoPrint/tests/util/test_counted_event.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2015 The OctoPrint Project - Released under terms of the AGPLv3 License"
import threading
import time
import unittest
from octoprint.util import CountedEvent
class CountedEventTest(unittest.TestCase):
def test_set_once(self):
"""The counter should go from 0 to 1."""
event = CountedEvent()
self.assertEqual(0, event._counter)
self.assertFalse(event._event.is_set())
event.set()
self.assertEqual(1, event._counter)
self.assertTrue(event._event.is_set())
def test_set_more_than_max(self):
"""The counter should never rise above max."""
event = CountedEvent(max=1)
self.assertEqual(0, event._counter)
self.assertFalse(event._event.is_set())
event.set()
self.assertEqual(1, event._counter)
self.assertTrue(event._event.is_set())
event.set()
self.assertEqual(1, event._counter)
self.assertTrue(event._event.is_set())
def test_clear_once(self):
"""The counter should to from 1 to 0."""
event = CountedEvent(1)
self.assertEqual(1, event._counter)
self.assertTrue(event._event.is_set())
event.clear()
self.assertEqual(0, event._counter)
self.assertFalse(event._event.is_set())
def test_clear_all(self):
"""The counter should go from 10 to 0."""
event = CountedEvent(10)
self.assertEqual(10, event._counter)
self.assertTrue(event._event.is_set())
event.clear(completely=True)
self.assertEqual(0, event._counter)
self.assertFalse(event._event.is_set())
def test_clear_more_than_available(self):
"""The counter should never sink below 0."""
event = CountedEvent(1)
self.assertEqual(1, event._counter)
self.assertTrue(event._event.is_set())
event.clear()
self.assertEqual(0, event._counter)
self.assertFalse(event._event.is_set())
event.clear()
self.assertEqual(0, event._counter)
self.assertFalse(event._event.is_set())
def test_clear_more_than_available_without_minimum(self):
"""The counter may sink below zero if initialized without a minimum."""
event = CountedEvent(1, minimum=None)
self.assertEqual(1, event._counter)
self.assertTrue(event._event.is_set())
event.clear()
self.assertEqual(0, event._counter)
self.assertFalse(event._event.is_set())
event.clear()
self.assertEqual(-1, event._counter)
self.assertFalse(event._event.is_set())
def test_blocked(self):
"""Blocked should only be true if the counter is 0."""
event = CountedEvent(0)
self.assertTrue(event.blocked())
event.set()
self.assertFalse(event.blocked())
event.clear()
self.assertTrue(event.blocked())
def test_wait_immediately(self):
"""Unblocked wait should immediately return."""
event = CountedEvent(1)
start = time.time()
event.wait(timeout=2)
duration = time.time() - start
self.assertLess(duration, 1)
def test_wait_blocking(self):
"""Set should immediately have blocked wait return."""
event = CountedEvent(0)
def set_event():
time.sleep(1)
event.set()
thread = threading.Thread(target=set_event)
thread.daemon = True
thread.start()
start = time.time()
event.wait(timeout=2)
duration = time.time() - start
self.assertLess(duration, 2)
def test_wait_timeout(self):
"""Blocked should only wait until timeout."""
event = CountedEvent(0)
start = time.time()
event.wait(timeout=2)
duration = time.time() - start
self.assertGreaterEqual(duration, 2)
self.assertLess(duration, 3)
| 4,076 | Python | .py | 101 | 31.712871 | 103 | 0.632184 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,050 | test_platform.py | OctoPrint_OctoPrint/tests/util/test_platform.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2017 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
from unittest import mock
import ddt
@ddt.ddt
class PlatformUtilTest(unittest.TestCase):
@ddt.data(
("win32", "windows"),
("linux2", "linux"),
("darwin", "macos"),
("linux", "linux"),
("linux3", "linux"),
("freebsd", "freebsd"),
("freebsd2342", "freebsd"),
("os2", "unmapped"),
("sunos5", "unmapped"),
)
@ddt.unpack
def test_get_os(self, sys_platform, expected):
with mock.patch("sys.platform", sys_platform):
from octoprint.util.platform import get_os
actual = get_os()
self.assertEqual(actual, expected)
@ddt.data(
("linux", "linux2", [], True),
("linux", "linux2", ["linux", "freebsd"], True),
("windows", "win32", ["linux", "freebsd"], False),
("linux", "linux2", ["!windows"], True),
("windows", "win32", ["!windows"], False),
("unmapped", "os2", [], True),
("unmapped", "os2", ["linux", "freebsd"], False),
("unmapped", "os2", ["!os2"], False),
("unmapped", "sunos5", ["linux", "freebsd", "sunos"], True),
("unmapped", "sunos5", ["!sunos", "!os2"], False),
# both black and white listing at the same time usually doesn't
# make a whole lot of sense, but let's test it anyhow
("linux", "linux2", ["!windows", "linux", "freebsd"], True),
("linux", "linux2", ["!windows", "freebsd"], False),
("windows", "win32", ["!windows", "linux", "freebsd"], False),
("unmapped", "sunos5", ["!windows", "linux", "freebsd"], False),
)
@ddt.unpack
def test_is_os_compatible(self, current_os, sys_platform, entries, expected):
with mock.patch("sys.platform", sys_platform):
from octoprint.util.platform import is_os_compatible
actual = is_os_compatible(entries, current_os=current_os)
self.assertEqual(actual, expected)
| 2,143 | Python | .py | 48 | 36.833333 | 103 | 0.576149 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,051 | test_comm.py | OctoPrint_OctoPrint/tests/util/test_comm.py | import unittest
from unittest import mock
import ddt
import pytest
import octoprint.util.comm
from octoprint.util.files import m20_timestamp_to_unix_timestamp
@ddt.ddt
class TestCommErrorHandling(unittest.TestCase):
def setUp(self):
self._comm = mock.create_autospec(octoprint.util.comm.MachineCom)
# mocks
self._comm._handle_errors = (
lambda *args, **kwargs: octoprint.util.comm.MachineCom._handle_errors(
self._comm, *args, **kwargs
)
)
self._comm._trigger_error = (
lambda *args, **kwargs: octoprint.util.comm.MachineCom._trigger_error(
self._comm, *args, **kwargs
)
)
self._comm._recoverable_communication_errors = (
octoprint.util.comm.MachineCom._recoverable_communication_errors
)
self._comm._resend_request_communication_errors = (
octoprint.util.comm.MachineCom._resend_request_communication_errors
)
self._comm._sd_card_errors = octoprint.util.comm.MachineCom._sd_card_errors
self._comm._lastCommError = None
self._comm._errorValue = None
self._comm._clear_to_send = mock.Mock()
self._comm._error_message_hooks = {}
self._comm._trigger_emergency_stop = mock.Mock()
self._comm._callback = mock.Mock()
# settings
self._comm._ignore_errors = False
self._comm._disconnect_on_errors = True
self._comm._send_m112_on_error = True
self._comm.isPrinting.return_value = True
self._comm.isSdPrinting.return_value = False
self._comm.isError.return_value = False
@ddt.data(
# Marlin
"Error: Line Number is not Last Line Number+1, Last Line: 1",
# Repetier
"Error: Expected Line 1 got 2",
# !! error type for good measure
"!! expected line 1 got 2",
)
def test_lineno_mismatch(self, line):
result = self._comm._handle_errors(line)
self.assertEqual(line, result)
self.assert_resend()
@ddt.data(
# Marlin
"Error: No Line Number with checksum, Last Line: 1",
)
def test_lineno_missing(self, line):
"""Should simulate OK to force resend request"""
result = self._comm._handle_errors(line)
self.assertEqual(line, result)
self.assert_recoverable()
@ddt.data(
# Marlin
"Error: checksum mismatch",
# Repetier
"Error: Wrong checksum",
)
def test_checksum_mismatch(self, line):
"""Should prepare receiving resend request"""
result = self._comm._handle_errors(line)
self.assertEqual(line, result)
self.assert_resend()
@ddt.data(
# Marlin
"Error: No Checksum with line number, Last Line: 1",
# Repetier
"Error: Missing checksum",
)
def test_checksum_missing(self, line):
"""Should prepare receiving resend request"""
result = self._comm._handle_errors(line)
self.assertEqual(line, result)
self.assert_resend()
@ddt.data(
# Marlin
"Error: volume.init failed",
"Error: openRoot failed",
"Error: workDir open failed",
"Error: Cannot enter subdir: folder",
# Repetier
"Error: file.open failed",
# Marlin & Repetier (halleluja!)
"Error: error writing to file",
"Error: open failed, File: foo.gco",
# Legacy?
"Error: Cannot open foo.gco",
)
def test_sd_error(self, line):
"""Should pass"""
result = self._comm._handle_errors(line)
self.assertEqual(line, result)
self.assert_nop()
@ddt.data(
# Marlin
'Error: Unknown command: "ABC"',
# Repetier
"Error: Unknown command:ABC",
)
def test_unknown_command(self, line):
"""Should pass"""
result = self._comm._handle_errors(line)
self.assertEqual(line, result)
self.assert_nop()
@ddt.data("Error: This should get handled", "!! This should also get handled")
def test_unknown_handled(self, line):
"""Should pass"""
def handler(comm, message, *args, **kwargs):
return "handled" in message
self._comm._error_message_hooks["test"] = handler
result = self._comm._handle_errors(line)
self.assertEqual(line, result)
self.assert_nop()
@ddt.data("Error: Printer on fire")
def test_other_error_disconnect(self, line):
"""Should trigger escalation"""
result = self._comm._handle_errors(line)
self.assertEqual(line, result)
# what should have happened
self.assert_m112_sent()
self.assert_disconnected()
# what should not have happened
self.assert_not_handle_ok()
self.assert_not_last_comm_error()
self.assert_not_print_cancelled()
self.assert_not_cleared_to_send()
@ddt.data("Error: Printer on fire")
def test_other_error_no_m112(self, line):
"""Should trigger escalation"""
self._comm._send_m112_on_error = False
result = self._comm._handle_errors(line)
self.assertEqual(line, result)
# what should have happened
self.assert_disconnected()
# what should not have happened
self.assert_not_handle_ok()
self.assert_not_last_comm_error()
self.assert_not_print_cancelled()
self.assert_not_cleared_to_send()
self.assert_not_m112_sent()
@ddt.data("Error: Printer on fire")
def test_other_error_cancel(self, line):
"""Should trigger print cancel"""
self._comm._disconnect_on_errors = False
result = self._comm._handle_errors(line)
self.assertEqual(line, result)
# what should have happened
self.assert_print_cancelled()
self.assert_cleared_to_send()
# what should not have happened
self.assert_not_handle_ok()
self.assert_not_last_comm_error()
self.assert_not_m112_sent()
self.assert_not_disconnected()
@ddt.data("Error: Printer on fire")
def test_other_error_ignored(self, line):
"""Should only log"""
self._comm._ignore_errors = True
result = self._comm._handle_errors(line)
self.assertEqual(line, result)
# what should have happened
self.assert_cleared_to_send()
# what should not have happened
self.assert_not_handle_ok()
self.assert_not_last_comm_error()
self.assert_not_print_cancelled()
self.assert_not_m112_sent()
self.assert_not_disconnected()
def test_not_an_error(self):
"""Should pass"""
result = self._comm._handle_errors("Not an error")
self.assertEqual("Not an error", result)
self.assert_nop()
def test_already_error(self):
"""Should pass"""
self._comm.isError.return_value = True
result = self._comm._handle_errors("Error: Printer on fire")
self.assertEqual("Error: Printer on fire", result)
self.assert_nop()
def test_line_none(self):
"""Should pass"""
self.assertIsNone(self._comm._handle_errors(None))
##~~ assertion helpers
def assert_handle_ok(self):
self._comm._handle_ok.assert_called_once()
def assert_not_handle_ok(self):
self._comm._handle_ok.assert_not_called()
def assert_last_comm_error(self):
self.assertIsNotNone(self._comm._lastCommError)
def assert_not_last_comm_error(self):
self.assertIsNone(self._comm._lastCommError)
def assert_m112_sent(self):
self._comm._trigger_emergency_stop.assert_called_once_with(close=False)
def assert_not_m112_sent(self):
self._comm._trigger_emergency_stop.assert_not_called()
def assert_disconnected(self):
self.assertIsNotNone(self._comm._errorValue)
self._comm._changeState.assert_called_with(self._comm.STATE_ERROR)
self._comm.close.assert_called_once_with(is_error=True)
def assert_not_disconnected(self):
self.assertIsNone(self._comm._errorValue)
self._comm._changeState.assert_not_called()
self._comm.close.assert_not_called()
def assert_print_cancelled(self):
self._comm.cancelPrint.assert_called_once()
def assert_not_print_cancelled(self):
self._comm.cancelPrint.assert_not_called()
def assert_cleared_to_send(self):
self._comm._clear_to_send.set.assert_called_once()
def assert_not_cleared_to_send(self):
self._comm._clear_to_send.set.assert_not_called()
def assert_nop(self):
self.assert_not_handle_ok()
self.assert_not_last_comm_error()
self.assert_not_disconnected()
self.assert_not_print_cancelled()
self.assert_not_cleared_to_send()
def assert_recoverable(self):
self.assert_handle_ok()
self.assert_not_last_comm_error()
self.assert_not_disconnected()
self.assert_not_print_cancelled()
self.assert_not_cleared_to_send()
def assert_resend(self):
self.assert_last_comm_error()
self.assert_not_handle_ok()
self.assert_not_disconnected()
self.assert_not_print_cancelled()
self.assert_not_cleared_to_send()
@pytest.mark.parametrize(
"val,expected",
[
("aaa", False),
("1234", False),
("0x21bf7d", True),
("0xghijk", False),
("0x28210800", True),
],
)
def test__validate_m20_timestamp(val, expected):
assert octoprint.util.comm._validate_m20_timestamp(val) == expected
@pytest.mark.parametrize(
"val,expected",
[
(
"line that makes little sense",
("line that makes little sense", None, None, None),
),
("name.gco", ("name.gco", None, None, None)),
("name.gco invalid-size", ("name.gco invalid-size", None, None, None)),
(
"name.gco 3424324",
("name.gco", 3424324, None, None),
),
(
"name.gco 3424324 longname.gcode",
("name.gco", 3424324, None, "longname.gcode"),
),
(
"name.gco 3424324 0x21bf7d",
(
"name.gco",
3424324,
m20_timestamp_to_unix_timestamp("0x21bf7d"),
None,
),
),
(
"name.gco 3424324 0xinvalid_timestamp_as_longname",
("name.gco", 3424324, None, "0xinvalid_timestamp_as_longname"),
),
(
"longname.gcode 3424324 0x21bf7d",
(
"longname.gcode",
3424324,
m20_timestamp_to_unix_timestamp("0x21bf7d"),
None,
),
),
(
"longname.gcode 3424324 longname.gcode",
("longname.gcode", 3424324, None, "longname.gcode"),
),
(
"name.gco 32424 0x21bf7d long name without quoting",
(
"name.gco",
32424,
m20_timestamp_to_unix_timestamp("0x21bf7d"),
"long name without quoting",
),
),
(
"name.gco 32424 0x21bf7d long name without quoting",
(
"name.gco",
32424,
m20_timestamp_to_unix_timestamp("0x21bf7d"),
"long name without quoting",
),
),
(
'name.gco 32424 0x21bf7d "long name with quoting"',
(
"name.gco",
32424,
m20_timestamp_to_unix_timestamp("0x21bf7d"),
"long name with quoting",
),
),
(
'name.gco 32424 0x21bf7d "long name with quoting"',
(
"name.gco",
32424,
m20_timestamp_to_unix_timestamp("0x21bf7d"),
"long name with quoting",
),
),
],
)
def test_parse_file_list_line(val, expected):
assert octoprint.util.comm.parse_file_list_line(val) == expected
| 12,195 | Python | .py | 334 | 27.308383 | 83 | 0.591021 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,052 | test_misc.py | OctoPrint_OctoPrint/tests/util/test_misc.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2017 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import ddt
from frozendict import frozendict
import octoprint.util
@ddt.ddt
class MiscTestCase(unittest.TestCase):
def test_get_class(self):
octoprint.util.get_class("octoprint.access.users.FilebasedUserManager")
def test_get_class_wrongmodule(self):
try:
octoprint.util.get_class("octoprint2.users.FilebasedUserManager")
self.fail("This should have thrown an ImportError")
except ImportError:
# success
pass
def test_get_class_wrongclass(self):
try:
octoprint.util.get_class(
"octoprint.access.users.FilebasedUserManagerBzzztWrong"
)
self.fail("This should have thrown an ImportError")
except ImportError:
# success
pass
@ddt.data(
(
"http://example.com",
{"source": "source"},
"http://example.com?utm_source=source",
),
(
"http://example.com?q=1",
{"source": "source"},
"http://example.com?q=1&utm_source=source",
),
(
"http://example.com",
{"source": "source", "medium": "medium"},
"http://example.com?utm_source=source&utm_medium=medium",
),
(
"http://example.com",
{"source": "source", "medium": "medium", "content": "content with spaces"},
"http://example.com?utm_source=source&utm_medium=medium&utm_content=content+with+spaces",
),
# no handling
("http://example.com", {}, "http://example.com"),
)
@ddt.unpack
def test_utmify(self, link, kwargs, expected):
actual = octoprint.util.utmify(link, **kwargs)
self.assertEqual(actual, expected)
@ddt.data(
(frozendict(a=1, b=2, c=3), {"a": 1, "b": 2, "c": 3}),
(
frozendict(a=1, b=2, c=frozendict(c1=1, c2=2)),
{"a": 1, "b": 2, "c": {"c1": 1, "c2": 2}},
),
({"a": 1, "b": 2, "c": 3}, {"a": 1, "b": 2, "c": 3}),
(
{"a": 1, "b": 2, "c": frozendict(c1=1, c2=2)},
{"a": 1, "b": 2, "c": {"c1": 1, "c2": 2}},
),
(
{
"a": 1,
"b": 2,
"c": {"c1": 1, "c2": 2, "c3": frozendict(c11=11, c12=12)},
},
{"a": 1, "b": 2, "c": {"c1": 1, "c2": 2, "c3": {"c11": 11, "c12": 12}}},
),
)
@ddt.unpack
def test_unfreeze_frozendict(self, input, expected):
result = octoprint.util.thaw_frozendict(input)
self.assertIsInstance(result, dict)
self.assertDictEqual(result, expected)
@ddt.data(None, "invalid", 3, [1, 2], (3, 4))
def test_unfreeze_frozendict_invalid(self, input):
try:
octoprint.util.thaw_frozendict(input)
self.fail("expected ValueError")
except ValueError:
# expected
pass
| 3,171 | Python | .py | 87 | 26.816092 | 103 | 0.527823 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,053 | test_commandline.py | OctoPrint_OctoPrint/tests/util/test_commandline.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2022 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import ddt
import pytest
import octoprint.util.commandline
@ddt.ddt
class CommandlineTest(unittest.TestCase):
@ddt.data(
(
"Some text with some \x1b[31mred words\x1b[39m in it",
"Some text with some red words in it",
),
(
"We \x1b[?25lhide the cursor here and then \x1b[?25hshow it again here",
"We hide the cursor here and then show it again here",
),
(
"━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 6.5/6.5 MB 1.1 MB/s eta 0:00:00",
"━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 6.5/6.5 MB 1.1 MB/s eta 0:00:00",
),
)
@ddt.unpack
def test_clean_ansi(self, input, expected):
actual = octoprint.util.commandline.clean_ansi(input)
self.assertEqual(expected, actual)
def test_clean_ansi_deprecated(self):
with pytest.deprecated_call():
actual = octoprint.util.commandline.clean_ansi(
b"Some bytes with some \x1b[31mred words\x1b[39m in it"
)
self.assertEqual(b"Some bytes with some red words in it", actual)
| 1,488 | Python | .py | 32 | 33.5625 | 103 | 0.587597 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,054 | test_files.py | OctoPrint_OctoPrint/tests/util/test_files.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2021 The OctoPrint Project - Released under terms of the AGPLv3 License"
import datetime
import os
import re
import unittest
import pytest
from ddt import data, ddt, unpack
from octoprint.util.files import (
m20_timestamp_to_unix_timestamp,
sanitize_filename,
search_through_file,
search_through_file_python,
unix_timestamp_to_m20_timestamp,
)
@ddt
class FilesUtilTest(unittest.TestCase):
@data(
("some_file.gcode", "some_file.gcode", False),
("NUL.gcode", "NUL_.gcode", False),
("LPT1", "LPT1_", False),
(".test.gcode", "test.gcode", False),
("..test.gcode", "test.gcode", False),
("file with space.gcode", "file with space.gcode", False),
("W√∂lfe üê∫.gcode", "W√∂lfe üê∫.gcode", False),
("file with space.gcode", "file_with_space.gcode", True),
("W√∂lfe üê∫.gcode", "Wolfe_wolf.gcode", True),
)
@unpack
def test_sanitize_filename(self, filename, expected, really_universal):
actual = sanitize_filename(filename, really_universal=really_universal)
self.assertEqual(actual, expected)
@data("file/with/slash.gcode", "file\\with\\backslash.gcode")
def test_sanitize_filename_invalid(self, filename):
try:
sanitize_filename(filename)
self.fail("expected ValueError")
except ValueError as ex:
self.assertEqual(str(ex), "name must not contain / or \\")
@data(
("umlaut", False, True),
("BOM", False, True),
(r"^[^#]*BOM", True, False),
)
@unpack
def test_search_through_file(self, term, regex, expected):
path = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "_files", "utf8_without_bom.txt"
)
actual = search_through_file(path, term, regex=regex)
self.assertEqual(actual, expected)
@data(
("umlaut", True),
("BOM", True),
(r"^[^#]*BOM", False),
)
@unpack
def test_search_through_file_python(self, term, expected):
compiled = re.compile(term)
path = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "_files", "utf8_without_bom.txt"
)
actual = search_through_file_python(path, term, compiled)
self.assertEqual(actual, expected)
# based on https://github.com/nathanhi/pyfatfs/blob/master/tests/test_DosDateTime.py
m20_timestamp_tests = [
("0x210000", datetime.datetime(1980, 1, 1).timestamp()),
("0x21bf7d", datetime.datetime(1980, 1, 1, 23, 59, 58).timestamp()),
("0x549088aa", datetime.datetime(2022, 4, 16, 17, 5, 20).timestamp()),
("0x28210800", datetime.datetime(2000, 1, 1, 1, 0).timestamp()),
]
# 32bit time_t systems will fail with:
# "OverflowError: timestamp out of range for platform time_t"
# for this date.
try:
m20_timestamp_tests.append(
("0xff9f0000", datetime.datetime(2107, 12, 31).timestamp())
)
except OverflowError:
pass
@pytest.mark.parametrize("val,expected", m20_timestamp_tests)
def test_m20_timestamp_to_unix_timestamp(val, expected):
assert m20_timestamp_to_unix_timestamp(val) == expected
@pytest.mark.parametrize("expected,val", m20_timestamp_tests)
def test_unix_timestamp_to_m20_timestamp(expected, val):
assert unix_timestamp_to_m20_timestamp(val) == expected
| 3,447 | Python | .py | 86 | 34.209302 | 103 | 0.655409 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,055 | test_string_helpers.py | OctoPrint_OctoPrint/tests/util/test_string_helpers.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2019 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import pytest
import octoprint.util
class StringHelperTest(unittest.TestCase):
def test_to_unicode_unicode(self):
result = octoprint.util.to_unicode("test")
self.assertEqual(result, "test")
self.assertIsInstance(result, str)
def test_to_unicode_bytes(self):
result = octoprint.util.to_unicode(b"test")
self.assertEqual(result, "test")
self.assertIsInstance(result, str)
def test_to_unicode_bytes_utf8(self):
data = "äöüß"
result = octoprint.util.to_unicode(data.encode("utf-8"), encoding="utf-8")
self.assertEqual(result, data)
self.assertIsInstance(result, str)
def test_to_unicode_bytes_utf8_vs_ascii(self):
self.assertRaises(
UnicodeDecodeError,
octoprint.util.to_unicode,
"äöüß".encode(),
encoding="ascii",
)
def test_to_unicode_bytes_utf8_vs_ascii_replace(self):
data = "äöüß"
result = octoprint.util.to_unicode(
data.encode("utf-8"), encoding="ascii", errors="replace"
)
self.assertEqual(result, data.encode("utf-8").decode("ascii", errors="replace"))
self.assertIsInstance(result, str)
def test_to_bytes_bytes(self):
result = octoprint.util.to_bytes(b"test")
self.assertEqual(result, b"test")
self.assertIsInstance(result, bytes)
def test_to_bytes_str(self):
result = octoprint.util.to_bytes("test")
self.assertEqual(result, b"test")
self.assertIsInstance(result, bytes)
def test_to_bytes_str_utf8(self):
data = "äöüß"
result = octoprint.util.to_bytes(data, encoding="utf-8")
self.assertEqual(result, data.encode("utf-8"))
self.assertIsInstance(result, bytes)
def test_to_bytes_str_utf8_vs_ascii(self):
self.assertRaises(
UnicodeEncodeError, octoprint.util.to_bytes, "äöüß", encoding="ascii"
)
def test_to_bytes_str_utf8_vs_ascii_replace(self):
data = "äöüß"
result = octoprint.util.to_bytes(data, encoding="ascii", errors="replace")
self.assertEqual(result, data.encode("ascii", errors="replace"))
self.assertIsInstance(result, bytes)
def test_to_str(self):
with pytest.deprecated_call():
result = octoprint.util.to_str("test")
self.assertEqual(result, b"test")
self.assertIsInstance(result, bytes)
def test_to_native_str(self):
with pytest.deprecated_call():
result = octoprint.util.to_native_str(b"test")
self.assertEqual(result, "test")
self.assertIsInstance(result, str)
| 2,903 | Python | .py | 65 | 35.784615 | 103 | 0.651537 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,056 | test_repeated_timer.py | OctoPrint_OctoPrint/tests/util/test_repeated_timer.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2015 The OctoPrint Project - Released under terms of the AGPLv3 License"
import time
import unittest
from unittest import mock
from octoprint.util import RepeatedTimer
class Countdown:
def __init__(self, start):
self._counter = start
def step(self):
self._counter -= 1
@property
def counter(self):
return self._counter
class IncreasingInterval(Countdown):
def __init__(self, start, factor):
Countdown.__init__(self, start)
self._start = start
self._factor = factor
def interval(self):
result = (self._start - self._counter + 1) * self._factor
return result
class RepeatedTimerTest(unittest.TestCase):
def setUp(self):
pass
def test_condition(self):
countdown = Countdown(5)
timer_task = mock.MagicMock()
timer_task.side_effect = countdown.step
timer = RepeatedTimer(0.1, timer_task, condition=lambda: countdown.counter > 0)
timer.start()
# wait for it
timer.join()
self.assertEqual(5, timer_task.call_count)
def test_finished_callback(self):
countdown = Countdown(5)
timer_task = mock.MagicMock()
timer_task.side_effect = countdown.step
on_finished = mock.MagicMock()
timer = RepeatedTimer(
0.1,
timer_task,
condition=lambda: countdown.counter > 0,
on_finish=on_finished,
)
timer.start()
# wait for it
timer.join()
self.assertEqual(1, on_finished.call_count)
def test_condition_callback(self):
countdown = Countdown(5)
timer_task = mock.MagicMock()
timer_task.side_effect = countdown.step
on_cancelled = mock.MagicMock()
on_condition_false = mock.MagicMock()
timer = RepeatedTimer(
0.1,
timer_task,
condition=lambda: countdown.counter > 0,
on_condition_false=on_condition_false,
on_cancelled=on_cancelled,
)
timer.start()
# wait for it
timer.join()
self.assertEqual(1, on_condition_false.call_count)
self.assertEqual(0, on_cancelled.call_count)
def test_cancelled_callback(self):
countdown = Countdown(5)
timer_task = mock.MagicMock()
timer_task.side_effect = countdown.step
on_cancelled = mock.MagicMock()
on_condition_false = mock.MagicMock()
timer = RepeatedTimer(
10,
timer_task,
condition=lambda: countdown.counter > 0,
on_condition_false=on_condition_false,
on_cancelled=on_cancelled,
)
timer.start()
# give it some time to run
time.sleep(1)
# then cancel it and wait for the thread to really finish
timer.cancel()
timer.join()
self.assertEqual(0, on_condition_false.call_count)
self.assertEqual(1, on_cancelled.call_count)
def test_run_first(self):
timer_task = mock.MagicMock()
timer = RepeatedTimer(60, timer_task, run_first=True)
timer.start()
# give it some time to run
time.sleep(1)
# then cancel it and wait for the thread to really finish
timer.cancel()
timer.join()
# should have run once
self.assertEqual(1, timer_task.call_count)
def test_not_run_first(self):
timer_task = mock.MagicMock()
timer = RepeatedTimer(60, timer_task)
timer.start()
# give it some time to run - should hang in the sleep phase though
time.sleep(1)
# then cancel it and wait for the thread to really finish
timer.cancel()
timer.join()
self.assertEqual(0, timer_task.call_count)
def test_adjusted_interval(self):
increasing_interval = IncreasingInterval(3, 1)
timer_task = mock.MagicMock()
timer_task.side_effect = increasing_interval.step
timer = RepeatedTimer(
increasing_interval.interval,
timer_task,
condition=lambda: increasing_interval.counter > 0,
)
# this should take 1 + 2 + 3 = 6s
start_time = time.time()
timer.start()
timer.join()
duration = time.time() - start_time
self.assertEqual(3, timer_task.call_count)
self.assertGreaterEqual(duration, 6)
self.assertLess(duration, 7)
def test_condition_change_during_task(self):
def sleep():
time.sleep(2)
timer_task = mock.MagicMock()
timer_task.side_effect = sleep
timer = RepeatedTimer(0.1, timer_task, run_first=True)
timer.start()
time.sleep(1)
timer.condition = lambda: False
timer.join()
self.assertEqual(1, timer_task.call_count)
| 5,035 | Python | .py | 138 | 27.594203 | 103 | 0.616846 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,057 | test_json.py | OctoPrint_OctoPrint/tests/util/test_json.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2022 The OctoPrint Project - Released under terms of the AGPLv3 License"
import datetime
import time
import warnings
import pytest
from frozendict import frozendict
from octoprint.util import json
class SomeClass:
...
class SomeSubclass(frozendict):
...
def test_deprecated_dump():
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
assert json.dump({"foo": "bar"}) == '{"foo":"bar"}'
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
assert "dump has been renamed to dumps" in str(w[-1].message)
@pytest.mark.parametrize(
"val,expected",
[
pytest.param({"foo": "bar"}, '{"foo":"bar"}', id="dict"),
pytest.param(frozendict({"foo": "bar"}), '{"foo":"bar"}', id="frozendict"),
pytest.param(b"foo", '"foo"', id="bytes"),
],
)
def test_encoding_dumps(val, expected):
assert json.encoding.dumps(val) == expected
def test_encoding_loads():
assert json.encoding.loads('{"foo":"bar"}') == {"foo": "bar"}
def test_encoding_dumps_typeerror():
with pytest.raises(TypeError):
json.encoding.dumps(SomeClass())
@pytest.mark.parametrize(
"val",
[
pytest.param({"foo": "bar"}, id="dict"),
pytest.param(b"\x00\x01\x02\x03", id="bytes"),
# invalid bytes from https://stackoverflow.com/a/3886015
pytest.param(
b"\xc3\x28\xa0\xa1\xe2\x28\xa1\xe2\x82\x28\xf0"
b"\x28\x8c\xbc\xf0\x90\x28\xbc\xf0\x28\x8c\x28",
id="invalid utf-8",
),
pytest.param(frozendict({"foo": "bar"}), id="frozendict"),
pytest.param(time.struct_time((2018, 1, 1, 0, 0, 0, 0, 0, 0)), id="struct_time"),
pytest.param(
datetime.datetime(2022, 3, 21, 5, 24, 0, 0, tzinfo=datetime.timezone.utc),
id="datetime",
),
pytest.param(frozendict({"a": b"b"}), id="nested"),
pytest.param(SomeSubclass({"a": b"b"}), id="subclass"),
],
)
def test_serializing_roundtrips(val):
assert json.serializing.loads(json.serializing.dumps(val)) == val
def test_serializing_dumps_typeerror():
with pytest.raises(TypeError):
json.serializing.dumps(SomeClass())
| 2,354 | Python | .py | 60 | 33.233333 | 103 | 0.636324 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,058 | __init__.py | OctoPrint_OctoPrint/tests/util/__init__.py | """
Unit tests for ``octoprint.util``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 282 | Python | .py | 6 | 45.5 | 103 | 0.717949 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,059 | test_file_helpers.py | OctoPrint_OctoPrint/tests/util/test_file_helpers.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2015 The OctoPrint Project - Released under terms of the AGPLv3 License"
import os
import sys
import unittest
from unittest import mock
import ddt
import octoprint.util
class BomAwareOpenTest(unittest.TestCase):
"""
Tests for :func:`octoprint.util.bom_aware_open`.
"""
def setUp(self):
self.filename_utf8_with_bom = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "_files", "utf8_with_bom.txt"
)
self.filename_utf8_without_bom = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "_files", "utf8_without_bom.txt"
)
def test_bom_aware_open_with_bom(self):
"""Tests that the contents of a UTF8 file with BOM are loaded correctly (without the BOM)."""
# test
with octoprint.util.bom_aware_open(
self.filename_utf8_with_bom, encoding="utf-8"
) as f:
contents = f.readlines()
# assert
self.assertEqual(len(contents), 3)
self.assertTrue(contents[0].startswith("#"))
def test_bom_aware_open_without_bom(self):
"""Tests that the contents of a UTF8 file without BOM are loaded correctly."""
# test
with octoprint.util.bom_aware_open(
self.filename_utf8_without_bom, encoding="utf-8"
) as f:
contents = f.readlines()
# assert
self.assertEqual(len(contents), 3)
self.assertTrue(contents[0].startswith("#"))
def test_bom_aware_open_ascii(self):
"""Tests that the contents of a UTF8 file loaded as ASCII are replaced correctly if "replace" is specified on errors."""
# test
with octoprint.util.bom_aware_open(
self.filename_utf8_with_bom, errors="replace"
) as f:
contents = f.readlines()
# assert
self.assertEqual(len(contents), 3)
self.assertTrue(contents[0].startswith("\ufffd" * 3 + "#"))
self.assertTrue(contents[2].endswith("\ufffd\ufffd" * 6))
def test_bom_aware_open_encoding_error(self):
"""Tests that an encoding error is thrown if not suppressed when opening a UTF8 file as ASCII."""
try:
with octoprint.util.bom_aware_open(self.filename_utf8_without_bom) as f:
f.readlines()
self.fail("Expected an exception")
except UnicodeDecodeError:
pass
def test_bom_aware_open_parameters_text_mode(self):
"""Tests that the parameters are propagated properly in text mode."""
with mock.patch("builtins.open", wraps=open) as mock_open:
with octoprint.util.bom_aware_open(
self.filename_utf8_without_bom,
mode="rt",
encoding="utf-8",
errors="ignore",
) as f:
f.readlines()
calls = [
mock.call(self.filename_utf8_without_bom, mode="rb"),
mock.call(
self.filename_utf8_without_bom,
encoding="utf-8",
mode="rt",
errors="ignore",
),
]
mock_open.assert_has_calls(calls)
def test_bom_aware_open_parameters_binary_mode(self):
"""Tests that binary mode raises an AssertionError."""
self.assertRaises(
AssertionError,
octoprint.util.bom_aware_open,
self.filename_utf8_without_bom,
mode="rb",
encoding="utf-8",
errors="ignore",
)
class GetBomTest(unittest.TestCase):
"""
Tests for :func:`octoprint.util.get_bom`.
"""
def setUp(self):
self.filename_utf8_with_bom = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "_files", "utf8_with_bom.txt"
)
self.filename_utf8_without_bom = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "_files", "utf8_without_bom.txt"
)
def test_get_bom_no_bom(self):
"""Tests that no BOM is returned if no BOM is present."""
# test
bom = octoprint.util.get_bom(self.filename_utf8_without_bom, "utf-8-sig")
# assert
self.assertIsNone(bom)
def test_get_bom_utf8_bom(self):
"""Tests that a UTF8 BOM is returned if present."""
# test
bom = octoprint.util.get_bom(self.filename_utf8_with_bom, "utf-8-sig")
# assert
self.assertEqual(bom, b"\xef\xbb\xbf")
def test_get_bom_wrong_encoding(self):
"""Tests that an UTF8 BOM is only returned if proper encoding was set."""
# test
bom = octoprint.util.get_bom(self.filename_utf8_with_bom, "utf-16-le")
# assert
self.assertIsNone(bom)
class TestAtomicWrite(unittest.TestCase):
"""
Tests for :func:`octoprint.util.atomic_write`.
"""
def setUp(self):
pass
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
@mock.patch("os.chmod")
@mock.patch("os.path.exists")
def test_atomic_write(self, mock_exists, mock_chmod, mock_ntf, mock_move):
"""Tests the regular basic "good" case."""
# setup
path = "tempfile.tmp"
umask = 0o026
mock_file = mock.MagicMock()
mock_file.name = path
mock_ntf.return_value = mock_file
mock_exists.return_value = False
# test
with mock.patch("octoprint.util.UMASK", umask):
with octoprint.util.atomic_write("somefile.yaml") as f:
f.write("test")
# assert
mock_ntf.assert_called_once_with(
mode="w+b", prefix="tmp", suffix="", dir="", delete=False
)
mock_file.write.assert_called_once_with("test")
mock_file.close.assert_called_once_with()
mock_chmod.assert_called_once_with(path, 0o644 & ~umask)
mock_move.assert_called_once_with(path, "somefile.yaml")
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
@mock.patch("os.chmod") # not used but needs to be mocked
@mock.patch("os.path.exists")
def test_atomic_write_path_aware(self, mock_exists, mock_chmod, mock_ntf, mock_move):
"""Tests whether the tempoary file is to created in the same directory as the target file."""
# setup
tmpdirpath = "/testpath/with/subdirectories"
path = os.path.join(tmpdirpath, "tempfile.tmp")
targetpath = os.path.join(tmpdirpath, "somefile.yaml")
mock_file = mock.MagicMock()
mock_file.name = path
mock_ntf.return_value = mock_file
mock_exists.return_value = False
# test
with octoprint.util.atomic_write(targetpath) as f:
f.write("test")
# assert
mock_ntf.assert_called_once_with(
mode="w+b",
prefix="tmp",
suffix="",
dir=tmpdirpath,
delete=False,
)
mock_move.assert_called_once_with(path, targetpath)
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
@mock.patch("os.chmod") # not used but needs to be mocked
@mock.patch("os.path.exists")
def test_atomic_write_rel_path_aware(
self, mock_exists, mock_chmod, mock_ntf, mock_move
):
"""Tests whether the tempoary file is to created in the same directory as the target file. This time submitting a relative path."""
# setup
tmpdirpath = "../test"
path = os.path.join(tmpdirpath, "tempfile.tmp")
targetpath = os.path.join(tmpdirpath, "somefile.yaml")
mock_file = mock.MagicMock()
mock_file.name = path
mock_ntf.return_value = mock_file
mock_exists.return_value = False
# test
with octoprint.util.atomic_write(targetpath) as f:
f.write("test")
# assert
mock_ntf.assert_called_once_with(
mode="w+b",
prefix="tmp",
suffix="",
dir=tmpdirpath,
delete=False,
)
mock_move.assert_called_once_with(path, targetpath)
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
@mock.patch("os.chmod")
@mock.patch("os.path.exists")
def test_atomic_write_error_on_write(
self, mock_exists, mock_chmod, mock_ntf, mock_move
):
"""Tests the error case where something in the wrapped code fails."""
# setup
path = "tempfile.tmp"
mock_file = mock.MagicMock()
mock_file.name = path
mock_file.write.side_effect = RuntimeError()
mock_ntf.return_value = mock_file
mock_exists.return_value = False
# test
try:
with octoprint.util.atomic_write("somefile.yaml") as f:
f.write("test")
self.fail("Expected an exception")
except RuntimeError:
pass
# assert
mock_ntf.assert_called_once_with(
mode="w+b", prefix="tmp", suffix="", dir="", delete=False
)
mock_file.close.assert_called_once_with()
self.assertFalse(mock_move.called)
self.assertFalse(mock_chmod.called)
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
@mock.patch("os.chmod")
@mock.patch("os.path.exists")
def test_atomic_write_error_on_move(
self, mock_exists, mock_chmod, mock_ntf, mock_move
):
"""Tests the error case where the final move fails."""
# setup
path = "tempfile.tmp"
mock_file = mock.MagicMock()
mock_file.name = path
mock_ntf.return_value = mock_file
mock_move.side_effect = RuntimeError()
mock_exists.return_value = False
# test
try:
with octoprint.util.atomic_write("somefile.yaml") as f:
f.write("test")
self.fail("Expected an exception")
except RuntimeError:
pass
# assert
mock_ntf.assert_called_once_with(
mode="w+b", prefix="tmp", suffix="", dir="", delete=False
)
mock_file.close.assert_called_once_with()
self.assertTrue(mock_move.called)
self.assertTrue(mock_chmod.called)
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
@mock.patch("os.chmod")
@mock.patch("os.path.exists")
def test_atomic_write_parameters(self, mock_exists, mock_chmod, mock_ntf, mock_move):
"""Tests that the open parameters are propagated properly."""
# setup
path = "tempfile.tmp"
umask = 0o026
mock_file = mock.MagicMock()
mock_file.name = path
mock_ntf.return_value = mock_file
mock_exists.return_value = False
# test
with mock.patch("octoprint.util.UMASK", umask):
with octoprint.util.atomic_write(
"somefile.yaml", mode="w", prefix="foo", suffix="bar"
) as f:
f.write("test")
# assert
mock_ntf.assert_called_once_with(
mode="w", prefix="foo", suffix="bar", dir="", delete=False, encoding="utf-8"
)
mock_file.close.assert_called_once_with()
mock_chmod.assert_called_once_with(path, 0o664 & ~umask)
mock_move.assert_called_once_with(path, "somefile.yaml")
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
@mock.patch("os.chmod")
@mock.patch("os.path.exists")
def test_atomic_write_custom_permissions(
self, mock_exists, mock_chmod, mock_ntf, mock_move
):
"""Tests that custom permissions may be set."""
# setup
path = "tempfile.tmp"
mock_file = mock.MagicMock()
mock_file.name = path
mock_ntf.return_value = mock_file
mock_exists.return_value = False
# test
with octoprint.util.atomic_write(
"somefile.yaml", mode="wt", permissions=0o755
) as f:
f.write("test")
# assert
mock_ntf.assert_called_once_with(
mode="wt", prefix="tmp", suffix="", dir="", delete=False, encoding="utf-8"
)
mock_file.close.assert_called_once_with()
mock_chmod.assert_called_once_with(path, 0o755)
mock_move.assert_called_once_with(path, "somefile.yaml")
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
@mock.patch("os.chmod")
@mock.patch("os.path.exists")
@mock.patch("os.stat")
def test_atomic_permissions_combined(
self,
mock_stat,
mock_exists,
mock_chmod,
mock_ntf,
mock_move,
):
"""Tests that the permissions of an existing file are combined with the requested permissions."""
# setup
path = "tempfile.tmp"
mock_file = mock.MagicMock()
mock_file.name = path
mock_ntf.return_value = mock_file
mock_exists.return_value = True
mock_stat_result = mock.MagicMock()
mock_stat_result.st_mode = 0o666
mock_stat.return_value = mock_stat_result
# test
with octoprint.util.atomic_write(
"somefile.yaml", mode="wt", permissions=0o755
) as f:
f.write("test")
# assert
mock_ntf.assert_called_once_with(
mode="wt", prefix="tmp", suffix="", dir="", delete=False, encoding="utf-8"
)
mock_file.close.assert_called_once_with()
mock_chmod.assert_called_once_with(path, 0o777) # 0o755 | 0o666
mock_move.assert_called_once_with(path, "somefile.yaml")
@mock.patch("shutil.move")
@mock.patch("tempfile.NamedTemporaryFile")
@mock.patch("os.chmod")
@mock.patch("os.path.exists")
@mock.patch("os.stat")
def test_atomic_permissions_limited(
self,
mock_stat,
mock_exists,
mock_chmod,
mock_ntf,
mock_move,
):
"""Tests that max_permissions limit the combined file permissions."""
# setup
path = "tempfile.tmp"
mock_file = mock.MagicMock()
mock_file.name = path
mock_ntf.return_value = mock_file
mock_exists.return_value = True
mock_stat_result = mock.MagicMock()
mock_stat_result.st_mode = 0o755
mock_stat.return_value = mock_stat_result
# test
with octoprint.util.atomic_write(
"somefile.yaml", mode="wt", permissions=0o600, max_permissions=0o666
) as f:
f.write("test")
# assert
mock_ntf.assert_called_once_with(
mode="wt", prefix="tmp", suffix="", dir="", delete=False, encoding="utf-8"
)
mock_file.close.assert_called_once_with()
mock_chmod.assert_called_once_with(
path, 0o644
) # (0o600 | 0o755) & 0o666 = 0o755 & 0o666 = 0o644
mock_move.assert_called_once_with(path, "somefile.yaml")
class TempDirTest(unittest.TestCase):
@mock.patch("shutil.rmtree")
@mock.patch("tempfile.mkdtemp")
def test_tempdir(self, mock_mkdtemp, mock_rmtree):
"""Tests regular "good" case."""
# setup
path = "/path/to/tmpdir"
mock_mkdtemp.return_value = path
# test
with octoprint.util.tempdir() as td:
self.assertEqual(td, path)
# assert
mock_mkdtemp.assert_called_once_with()
mock_rmtree.assert_called_once_with(path, ignore_errors=False, onerror=None)
@mock.patch("shutil.rmtree")
@mock.patch("tempfile.mkdtemp")
def test_tempdir_parameters_mkdtemp(self, mock_mkdtemp, mock_rmtree):
"""Tests that parameters for mkdtemp are properly propagated."""
# setup
path = "/path/to/tmpdir"
mock_mkdtemp.return_value = path
# test
with octoprint.util.tempdir(prefix="prefix", suffix="suffix", dir="dir") as td:
self.assertEqual(td, path)
# assert
mock_mkdtemp.assert_called_once_with(prefix="prefix", suffix="suffix", dir="dir")
mock_rmtree.assert_called_once_with(path, ignore_errors=False, onerror=None)
@mock.patch("shutil.rmtree")
@mock.patch("tempfile.mkdtemp")
def test_tempdir_parameters_rmtree(self, mock_mkdtemp, mock_rmtree):
"""Tests that parameters for rmtree are properly propagated."""
# setup
path = "/path/to/tmpdir"
mock_mkdtemp.return_value = path
onerror = mock.MagicMock()
# test
with octoprint.util.tempdir(ignore_errors=True, onerror=onerror) as td:
self.assertEqual(td, path)
# assert
mock_mkdtemp.assert_called_once_with()
mock_rmtree.assert_called_once_with(path, ignore_errors=True, onerror=onerror)
@ddt.ddt
class IsHiddenPathTest(unittest.TestCase):
def setUp(self):
import tempfile
self.basepath = tempfile.mkdtemp()
self.path_always_visible = os.path.join(self.basepath, "always_visible.txt")
self.path_hidden_on_windows = os.path.join(self.basepath, "hidden_on_windows.txt")
self.path_always_hidden = os.path.join(self.basepath, ".always_hidden.txt")
import sys
for attr in (
"path_always_visible",
"path_hidden_on_windows",
"path_always_hidden",
):
path = getattr(self, attr)
with open(path, "w+", encoding="utf-8") as f:
f.write(attr)
if sys.platform == "win32":
# we use ctypes and the windows API to set the hidden attribute on the file
# only hidden on windows
import ctypes
ctypes.windll.kernel32.SetFileAttributesW(str(self.path_hidden_on_windows), 2)
def tearDown(self):
import shutil
shutil.rmtree(self.basepath)
@ddt.data(
(None, False),
("path_always_visible", False),
("path_always_hidden", True),
("path_hidden_on_windows", sys.platform == "win32"),
)
@ddt.unpack
def test_is_hidden_path(self, path_id, expected):
path = getattr(self, path_id) if path_id is not None else None
self.assertEqual(octoprint.util.is_hidden_path(path), expected)
| 18,278 | Python | .py | 460 | 30.606522 | 139 | 0.606903 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,060 | test_fast_deepcopy.py | OctoPrint_OctoPrint/tests/util/test_fast_deepcopy.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2021 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import octoprint.util
class FastDeepcopyTest(unittest.TestCase):
def test_clean(self):
data = {"a": 1, "b": 2, "c": 3}
self.assertEqual(data, octoprint.util.fast_deepcopy(data))
def test_function(self):
data = {"a": 1, "b": 2, "c": 3, "f": lambda x: x + 1}
self.assertEqual(data, octoprint.util.fast_deepcopy(data))
| 569 | Python | .py | 11 | 46.636364 | 103 | 0.676311 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,061 | test_caseinsensitive_set.py | OctoPrint_OctoPrint/tests/util/test_caseinsensitive_set.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2018 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import ddt
import octoprint.util
@ddt.ddt
class TestCaseInsensitiveSet(unittest.TestCase):
def setUp(self):
self.set = octoprint.util.CaseInsensitiveSet("A", "FoO", True, 23)
@ddt.data("A", "a", "foo", True, 23)
def test_contained(self, value):
self.assertIn(value, self.set)
@ddt.data("b", "fnord", False, 42)
def test_not_contained(self, value):
self.assertNotIn(value, self.set)
| 642 | Python | .py | 15 | 38.333333 | 103 | 0.704362 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,062 | test_pip.py | OctoPrint_OctoPrint/tests/util/test_pip.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2017 The OctoPrint Project - Released under terms of the AGPLv3 License"
import os
import site
import unittest
from unittest import mock
import ddt
import pkg_resources
import octoprint.util.pip
@ddt.ddt
class PipCallerTest(unittest.TestCase):
@ddt.data(
# remove --process-dependency-links for versions < 1.5
(
["install", "--process-dependency-links", "http://example.com/foo.zip"],
"1.1",
True,
False,
False,
True,
["install", "http://example.com/foo.zip"],
),
# keep --process-dependency-links for versions >= 1.5, --no-use-wheel for ==1.5.0
(
["install", "--process-dependency-links", "http://example.com/foo.zip"],
"1.5",
True,
False,
False,
True,
[
"install",
"--process-dependency-links",
"http://example.com/foo.zip",
"--no-use-wheel",
],
),
# keep --process-dependency-links for versions >= 1.5
(
["install", "--process-dependency-links", "http://example.com/foo.zip"],
"9.0.1",
True,
False,
False,
True,
["install", "--process-dependency-links", "http://example.com/foo.zip"],
),
# remove --user in virtual env
(
["install", "--user", "http://example.com/foo.zip"],
"9.0.1",
True,
False,
False,
True,
["install", "http://example.com/foo.zip"],
),
# ignore use_user in virtual env
(
["install", "http://example.com/foo.zip"],
"9.0.1",
True,
True,
False,
True,
["install", "http://example.com/foo.zip"],
),
# ignore force_user in virtual env
(
["install", "http://example.com/foo.zip"],
"9.0.1",
True,
False,
True,
True,
["install", "http://example.com/foo.zip"],
),
# remove --user with disabled user_site
(
["install", "--user", "http://example.com/foo.zip"],
"9.0.1",
False,
False,
False,
False,
["install", "http://example.com/foo.zip"],
),
# add --user when not in virtual env and use_user is True
(
["install", "http://example.com/foo.zip"],
"9.0.1",
False,
True,
False,
True,
["install", "http://example.com/foo.zip", "--user"],
),
# ignore use_user with disabled user_site
(
["install", "http://example.com/foo.zip"],
"9.0.1",
False,
True,
False,
False,
["install", "http://example.com/foo.zip"],
),
# add --user when not in virtual env and force_user is True
(
["install", "http://example.com/foo.zip"],
"9.0.1",
False,
False,
True,
True,
["install", "http://example.com/foo.zip", "--user"],
),
# ignore force_user with disabled user_site
(
["install", "http://example.com/foo.zip"],
"9.0.1",
False,
False,
True,
False,
["install", "http://example.com/foo.zip"],
),
)
@ddt.unpack
def test_clean_install_command(
self, args, version, virtual_env, use_user, force_user, user_site, expected
):
with mock.patch.object(site, "ENABLE_USER_SITE", user_site):
parsed = pkg_resources.parse_version(version)
actual = octoprint.util.pip.PipCaller.clean_install_command(
args, parsed, virtual_env, use_user, force_user
)
self.assertEqual(expected, actual)
def test_check_setup(self):
"""Initialization against local pip should work, including testballoon"""
caller = octoprint.util.pip.PipCaller()
self.assertIsNotNone(caller._command)
self.assertIsNotNone(caller._version)
@ddt.ddt
class PipUtilTest(unittest.TestCase):
def setUp(self):
self._test_data = os.path.join(
os.path.dirname(__file__), "_files", "pip_test_data"
)
def _get_lines(self, file):
with open(os.path.join(self._test_data, file), encoding="utf-8") as f:
lines = list(map(str.rstrip, f.readlines()))
return lines
@ddt.data(
("already_installed_1.txt", True),
("already_installed_2.txt", True),
("successful_install_1.txt", False),
("dependency_error.txt", False),
)
@ddt.unpack
def test_is_already_installed(self, file, expected):
lines = self._get_lines(file)
actual = octoprint.util.pip.is_already_installed(lines)
self.assertEqual(expected, actual)
@ddt.data(
("egg_problem_1.txt", True),
("egg_problem_2.txt", True),
("successful_install_1.txt", False),
("already_installed_1.txt", False),
)
@ddt.unpack
def test_is_egg_problem(self, file, expected):
lines = self._get_lines(file)
actual = octoprint.util.pip.is_egg_problem(lines)
self.assertEqual(expected, actual)
@ddt.data(
("python_mismatch_1.txt", True),
("python_mismatch_2.txt", True),
("successful_install_1.txt", False),
("already_installed_1.txt", False),
)
@ddt.unpack
def test_is_python_mismatch(self, file, expected):
lines = self._get_lines(file)
actual = octoprint.util.pip.is_python_mismatch(lines)
self.assertEqual(expected, actual)
@ddt.data(
(
"successful_install_1.txt",
"Successfully installed Better-Grbl-Support-2.1.0-rc1.3",
),
("already_installed_1.txt", ""),
)
@ddt.unpack
def test_get_result_line(self, file, expected):
lines = self._get_lines(file)
actual = octoprint.util.pip.get_result_line(lines)
self.assertEqual(expected, actual)
| 6,488 | Python | .py | 198 | 22.792929 | 103 | 0.521427 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,063 | test_resettable_timer.py | OctoPrint_OctoPrint/tests/util/test_resettable_timer.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2015 The OctoPrint Project - Released under terms of the AGPLv3 License"
import time
import unittest
from unittest import mock
from octoprint.util import ResettableTimer
class ResettableTimerTest(unittest.TestCase):
def setUp(self):
pass
def test_function(self):
timer_task = mock.MagicMock()
timer = ResettableTimer(10, timer_task)
timer.start()
# wait for it
timer.join()
self.assertEqual(1, timer_task.call_count)
def test_reset_callback(self):
timer_task = mock.MagicMock()
on_reset_cb = mock.MagicMock()
timer = ResettableTimer(10, timer_task, on_reset=on_reset_cb)
timer.start()
timer.reset()
# wait for it
timer.join()
self.assertEqual(1, timer_task.call_count)
self.assertEqual(1, on_reset_cb.call_count)
def test_canceled_callback(self):
timer_task = mock.MagicMock()
on_cancelled_cb = mock.MagicMock()
timer = ResettableTimer(10, timer_task, on_cancelled=on_cancelled_cb)
timer.start()
time.sleep(5)
timer.cancel()
time.sleep(10)
self.assertEqual(0, timer_task.call_count)
self.assertEqual(1, on_cancelled_cb.call_count)
| 1,431 | Python | .py | 37 | 31.27027 | 103 | 0.662782 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,064 | test_jinja.py | OctoPrint_OctoPrint/tests/util/test_jinja.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2015 The OctoPrint Project - Released under terms of the AGPLv3 License"
import os
import unittest
import jinja2
from ddt import data, ddt, unpack
import octoprint.util.jinja
NONE_FILTER = None
HIDDEN_FILTER = lambda x: not os.path.basename(x).startswith(".")
NO_TXT_FILTER = lambda x: x.endswith(".txt")
COMBINED_FILTER = lambda x: HIDDEN_FILTER(x) and NO_TXT_FILTER(x)
@ddt
class FilteredFileSystemLoaderTest(unittest.TestCase):
def setUp(self):
self.basepath = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "_files", "jinja_test_data"
)
self.environment = jinja2.Environment()
def loader_factory(self, path_filter):
return octoprint.util.jinja.FilteredFileSystemLoader(
self.basepath, path_filter=path_filter
)
@data(
(NONE_FILTER, [".hidden_everywhere.txt", "normal_text.txt", "not_a_text.dat"]),
(HIDDEN_FILTER, ["normal_text.txt", "not_a_text.dat"]),
(NO_TXT_FILTER, [".hidden_everywhere.txt", "normal_text.txt"]),
(COMBINED_FILTER, ["normal_text.txt"]),
)
@unpack
def test_list_templates(self, path_filter, expected):
loader = self.loader_factory(path_filter=path_filter)
templates = loader.list_templates()
self.assertListEqual(templates, expected)
@data(
(
NONE_FILTER,
(
(".hidden_everywhere.txt", True),
("normal_text.txt", True),
("not_a_text.dat", True),
),
),
(
HIDDEN_FILTER,
(
(".hidden_everywhere.txt", False),
("normal_text.txt", True),
("not_a_text.dat", True),
),
),
(
NO_TXT_FILTER,
(
(".hidden_everywhere.txt", True),
("normal_text.txt", True),
("not_a_text.dat", False),
),
),
(
COMBINED_FILTER,
(
(".hidden_everywhere.txt", False),
("normal_text.txt", True),
("not_a_text.dat", False),
),
),
)
@unpack
def test_get_source_none_filter(self, path_filter, param_sets):
loader = self.loader_factory(path_filter=path_filter)
for param_set in param_sets:
template, success = param_set
if success:
self._test_get_source_success(loader, template)
else:
self._test_get_source_notfound(loader, template)
def _test_get_source_success(self, loader, template):
loader.get_source(self.environment, template)
def _test_get_source_notfound(self, loader, template):
try:
loader.get_source(self.environment, template)
self.fail("Expected an exception")
except jinja2.TemplateNotFound:
pass
| 3,051 | Python | .py | 84 | 26.702381 | 103 | 0.571912 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,065 | test_version.py | OctoPrint_OctoPrint/tests/util/test_version.py | __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2021 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import ddt
import pkg_resources
@ddt.ddt
class VersionUtilTest(unittest.TestCase):
@ddt.data(
("1.6.0.dev303+g328853170.dirty", None, "1.6.0.dev303+g328853170.dirty"),
("1.6.0.dev303+g328853170.dirty", 0, "1.6.0"),
("1.6.0.dev303+g328853170.dirty", 1, "1.6"),
("1.6.0", 0, "1.6.0"),
("1.6.0", 23, "1.6.0"),
("1.6.0", -1, ValueError),
)
@ddt.unpack
def test_get_comparable_version(self, version, cut, expected):
from octoprint.util.version import get_comparable_version
try:
actual = get_comparable_version(version, cut=cut)
except Exception as exc:
if isinstance(expected, type) and isinstance(exc, expected):
pass
else:
raise
else:
self.assertEqual(actual, pkg_resources.parse_version(expected))
def test_get_comparable_version_base(self):
from octoprint.util.version import get_comparable_version
actual = get_comparable_version("1.6.0.dev303+g328853170.dirty", base=True)
self.assertEqual(actual, pkg_resources.parse_version("1.6.0"))
@ddt.data(
("1.6.0", "1.6.0"),
("v1.6.0", "1.6.0"),
("V1.6.0", "1.6.0"),
("1.6.0+", "1.6.0"),
("\t1.6.0 \r\n", "1.6.0"),
)
@ddt.unpack
def test_normalize_version(self, version, expected):
from octoprint.util.version import normalize_version
actual = normalize_version(version)
self.assertEqual(actual, expected)
| 1,753 | Python | .py | 43 | 32.930233 | 103 | 0.614706 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,066 | __init__.py | OctoPrint_OctoPrint/tests/plugins/__init__.py | """
Unit tests for bundled plugins.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2017 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 279 | Python | .py | 6 | 45 | 103 | 0.72963 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,067 | test_announcements.py | OctoPrint_OctoPrint/tests/plugins/test_announcements.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2021 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
from unittest import mock
from octoprint.util import TemporaryDirectory
RSS_EXAMPLE = """<?xml version="1.0" encoding="UTF-8" ?>
<rss version="2.0">
<channel>
<title>RSS Title</title>
<description>This is an example of an RSS feed</description>
<link>https://www.example.com/main.html</link>
<copyright>2020 Example.com All rights reserved</copyright>
<lastBuildDate>Mon, 06 Sep 2010 00:01:00 +0000</lastBuildDate>
<pubDate>Sun, 06 Sep 2009 16:20:00 +0000</pubDate>
<ttl>1800</ttl>
<item>
<title>Example entry</title>
<description>Here is some text containing an interesting description.</description>
<link>https://www.example.com/blog/post/1</link>
<guid isPermaLink="false">7bd204c6-1655-4c27-aeee-53f933c5395f</guid>
<pubDate>Sun, 06 Sep 2009 16:20:00 +0000</pubDate>
</item>
</channel>
</rss>
"""
class TestAnnouncements(unittest.TestCase):
def test_caches(self):
from octoprint.plugins.announcements import AnnouncementPlugin
plugin = AnnouncementPlugin()
plugin._logger = mock.MagicMock()
with TemporaryDirectory() as data_folder:
plugin._data_folder = data_folder
with mock.patch("requests.get") as mock_get:
mock_get.return_value = mock.MagicMock(status_code=200)
mock_get.return_value.text = RSS_EXAMPLE
network_response = plugin._get_channel_data_from_network(
"test", {"url": "https://example.com/feed.xml"}
)
cache_response = plugin._get_channel_data_from_cache("test", {"ttl": 1000})
self.maxDiff = 100000
self.assertDictEqual(network_response, cache_response)
| 1,938 | Python | .py | 42 | 40.119048 | 103 | 0.691614 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,068 | test_notification_filter.py | OctoPrint_OctoPrint/tests/plugins/pluginmanager/test_notification_filter.py | import unittest
import ddt
@ddt.ddt
class NotificationFilterTest(unittest.TestCase):
@ddt.data(
(
{"plugin": "foo", "text": "text", "date": "2020-09-15 00:00:00Z"},
"1.0.0",
"1.5.0",
True,
),
(
{
"plugin": "foo",
"text": "text",
"date": "2020-09-15 00:00:00Z",
"pluginversions": [">=1.0.0"],
},
"1.0.0",
"1.5.0",
True,
),
(
{
"plugin": "foo",
"text": "text",
"date": "2020-09-15 00:00:00Z",
"pluginversions": [">=2.0.0", "0.9.0", "0.9.1", "1.0.0"],
},
"1.0.0",
"1.5.0",
True,
),
(
{
"plugin": "foo",
"text": "text",
"date": "2020-09-15 00:00:00Z",
"pluginversions": [">=2.0.0", "0.9.0", "0.9.1", "1.0.0"],
},
"1.1.0",
"1.5.0",
False,
),
(
{
"plugin": "foo",
"text": "text",
"date": "2020-09-15 00:00:00Z",
"versions": ["0.9.0", "0.9.1", "1.0.0", "1.0.1"],
},
"1.0.0",
"1.5.0",
True,
),
(
{
"plugin": "foo",
"text": "text",
"date": "2020-09-15 00:00:00Z",
"versions": ["0.9.0", "0.9.1", "1.0.0", "1.0.1"],
},
"1.1.0",
"1.5.0",
False,
),
(
{
"plugin": "foo",
"text": "text",
"date": "2020-09-15 00:00:00Z",
"octoversions": ["1.5.0"],
},
"1.0.0",
"1.5.0",
True,
),
(
{
"plugin": "foo",
"text": "text",
"date": "2020-09-15 00:00:00Z",
"octoversions": ["==1.4.2"],
},
"1.0.0",
"1.5.0",
False,
),
)
@ddt.unpack
def test_notification_filter(
self, notification, plugin_version, octoprint_version, expected
):
from octoprint.plugins.pluginmanager import _filter_relevant_notification
result = _filter_relevant_notification(
notification, plugin_version, octoprint_version
)
self.assertEqual(expected, result)
| 2,622 | Python | .py | 98 | 14.316327 | 81 | 0.334657 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,069 | test_localstorage.py | OctoPrint_OctoPrint/tests/filemanager/test_localstorage.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import os
import os.path
import unittest
from contextlib import contextmanager
from unittest import mock
from ddt import data, ddt, unpack
from octoprint.filemanager.storage import LocalFileStorage, StorageError
class FileWrapper:
def __init__(self, filename):
self.path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "_files", filename
)
import hashlib
blocksize = 65536
hash = hashlib.sha1()
with open(self.path, "rb") as f:
buffer = f.read(blocksize)
while len(buffer) > 0:
hash.update(buffer)
buffer = f.read(blocksize)
self.hash = hash.hexdigest()
def save(self, destination):
import shutil
shutil.copy(self.path, destination)
FILE_BP_CASE_STL = FileWrapper("bp_case.stl")
FILE_BP_CASE_GCODE = FileWrapper("bp_case.gcode")
FILE_CRAZYRADIO_STL = FileWrapper("crazyradio.stl")
@ddt
class LocalStorageTest(unittest.TestCase):
def setUp(self):
import tempfile
self.basefolder = os.path.realpath(os.path.abspath(tempfile.mkdtemp()))
self.storage = LocalFileStorage(self.basefolder)
# mock file manager module
self.filemanager_patcher = mock.patch("octoprint.filemanager")
self.filemanager = self.filemanager_patcher.start()
self.filemanager.valid_file_type.return_value = True
def get_file_type(name):
if name.lower().endswith(".stl"):
return ["model", "stl"]
elif (
name.lower().endswith(".gco")
or name.lower().endswith(".gcode")
or name.lower.endswith(".g")
):
return ["machinecode", "gcode"]
else:
return None
self.filemanager.get_file_type.side_effect = get_file_type
def tearDown(self):
import shutil
shutil.rmtree(self.basefolder)
self.filemanager_patcher.stop()
def test_add_file(self):
self._add_and_verify_file("bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL)
def test_add_file_overwrite(self):
self._add_and_verify_file("bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL)
from octoprint.filemanager.storage import StorageError
self.assertRaises(
StorageError,
self._add_and_verify_file,
"bp_case.stl",
"bp_case.stl",
FILE_BP_CASE_STL,
overwrite=False,
)
self._add_and_verify_file(
"bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL, overwrite=True
)
def test_add_file_with_display(self):
stl_name = self._add_and_verify_file(
"bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL, display="bp_cäse.stl"
)
stl_metadata = self.storage.get_metadata(stl_name)
self.assertIsNotNone(stl_metadata)
self.assertIn("display", stl_metadata)
self.assertEqual("bp_cäse.stl", stl_metadata["display"])
def test_add_file_with_web(self):
import time
href = "http://www.example.com"
retrieved = time.time()
stl_name = self._add_and_verify_file(
"bp_case.stl",
"bp_case.stl",
FILE_BP_CASE_STL,
links=[("web", {"href": href, "retrieved": retrieved})],
)
stl_metadata = self.storage.get_metadata(stl_name)
self.assertIsNotNone(stl_metadata)
self.assertEqual(1, len(stl_metadata["links"]))
link = stl_metadata["links"][0]
self.assertTrue("web", link["rel"])
self.assertTrue("href" in link)
self.assertEqual(href, link["href"])
self.assertTrue("retrieved" in link)
self.assertEqual(retrieved, link["retrieved"])
def test_add_file_with_association(self):
stl_name = self._add_and_verify_file(
"bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL
)
gcode_name = self._add_and_verify_file(
"bp_case.gcode",
"bp_case.gcode",
FILE_BP_CASE_GCODE,
links=[("model", {"name": stl_name})],
)
stl_metadata = self.storage.get_metadata(stl_name)
gcode_metadata = self.storage.get_metadata(gcode_name)
# forward link
self.assertEqual(1, len(gcode_metadata["links"]))
link = gcode_metadata["links"][0]
self.assertEqual("model", link["rel"])
self.assertTrue("name" in link)
self.assertEqual(stl_name, link["name"])
self.assertTrue("hash" in link)
self.assertEqual(FILE_BP_CASE_STL.hash, link["hash"])
# reverse link
self.assertEqual(1, len(stl_metadata["links"]))
link = stl_metadata["links"][0]
self.assertEqual("machinecode", link["rel"])
self.assertTrue("name" in link)
self.assertEqual(gcode_name, link["name"])
self.assertTrue("hash" in link)
self.assertEqual(FILE_BP_CASE_GCODE.hash, link["hash"])
def test_remove_file(self):
stl_name = self._add_and_verify_file(
"bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL
)
gcode_name = self._add_and_verify_file(
"bp_case.gcode",
"bp_case.gcode",
FILE_BP_CASE_GCODE,
links=[("model", {"name": stl_name})],
)
stl_metadata = self.storage.get_metadata(stl_name)
gcode_metadata = self.storage.get_metadata(gcode_name)
self.assertIsNotNone(stl_metadata)
self.assertIsNotNone(gcode_metadata)
self.storage.remove_file(stl_name)
self.assertFalse(os.path.exists(os.path.join(self.basefolder, stl_name)))
stl_metadata = self.storage.get_metadata(stl_name)
gcode_metadata = self.storage.get_metadata(gcode_name)
self.assertIsNone(stl_metadata)
self.assertIsNotNone(gcode_metadata)
self.assertEqual(0, len(gcode_metadata["links"]))
def test_copy_file(self):
self._add_file("bp_case.stl", FILE_BP_CASE_STL)
self._add_folder("test")
self.assertTrue(os.path.isfile(os.path.join(self.basefolder, "bp_case.stl")))
self.assertTrue(os.path.isdir(os.path.join(self.basefolder, "test")))
self.storage.copy_file("bp_case.stl", "test/copied.stl")
self.assertTrue(os.path.isfile(os.path.join(self.basefolder, "bp_case.stl")))
self.assertTrue(
os.path.isfile(os.path.join(self.basefolder, "test", "copied.stl"))
)
stl_metadata = self.storage.get_metadata("bp_case.stl")
copied_metadata = self.storage.get_metadata("test/copied.stl")
self.assertIsNotNone(stl_metadata)
self.assertIsNotNone(copied_metadata)
self.assertDictEqual(stl_metadata, copied_metadata)
def test_move_file(self):
self._add_file("bp_case.stl", FILE_BP_CASE_STL)
self._add_folder("test")
self.assertTrue(os.path.isfile(os.path.join(self.basefolder, "bp_case.stl")))
self.assertTrue(os.path.isdir(os.path.join(self.basefolder, "test")))
before_stl_metadata = self.storage.get_metadata("bp_case.stl")
self.storage.move_file("bp_case.stl", "test/copied.stl")
self.assertFalse(os.path.isfile(os.path.join(self.basefolder, "bp_case.stl")))
self.assertTrue(
os.path.isfile(os.path.join(self.basefolder, "test", "copied.stl"))
)
after_stl_metadata = self.storage.get_metadata("bp_case.stl")
copied_metadata = self.storage.get_metadata("test/copied.stl")
self.assertIsNotNone(before_stl_metadata)
self.assertIsNone(after_stl_metadata)
self.assertIsNotNone(copied_metadata)
self.assertDictEqual(before_stl_metadata, copied_metadata)
def test_copy_file_same_name(self):
self._add_file("bp_case.stl", FILE_BP_CASE_STL)
try:
self.storage.copy_file("bp_case.stl", "bp_case.stl")
self.fail("Expected an exception")
except StorageError as e:
self.assertEqual(e.code, StorageError.SOURCE_EQUALS_DESTINATION)
@data("copy_file", "move_file")
def test_copy_move_file_different_display(self, operation):
self._add_file("bp_case.stl", FILE_BP_CASE_STL, display="bp_cäse.stl")
before_metadata = self.storage.get_metadata("bp_case.stl")
getattr(self.storage, operation)("bp_case.stl", "test.stl")
after_metadata = self.storage.get_metadata("test.stl")
self.assertIsNotNone(before_metadata)
self.assertIsNotNone(after_metadata)
self.assertNotIn("display", after_metadata)
@data("copy_file", "move_file")
def test_copy_move_file_same(self, operation):
self._add_file("bp_case.stl", FILE_BP_CASE_STL)
try:
getattr(self.storage, operation)("bp_case.stl", "bp_case.stl")
self.fail("Expected an exception")
except StorageError as e:
self.assertEqual(e.code, StorageError.SOURCE_EQUALS_DESTINATION)
@data("copy_file", "move_file")
def test_copy_move_file_missing_source(self, operation):
try:
getattr(self.storage, operation)("bp_case.stl", "test/copied.stl")
self.fail("Expected an exception")
except StorageError as e:
self.assertEqual(e.code, StorageError.INVALID_SOURCE)
@data("copy_file", "move_file")
def test_copy_move_file_missing_destination_folder(self, operation):
self._add_file("bp_case.stl", FILE_BP_CASE_STL)
try:
getattr(self.storage, operation)("bp_case.stl", "test/copied.stl")
self.fail("Expected an exception")
except StorageError as e:
self.assertEqual(e.code, StorageError.INVALID_DESTINATION)
@data("copy_file", "move_file")
def test_copy_move_file_existing_destination_path(self, operation):
self._add_file("bp_case.stl", FILE_BP_CASE_STL)
self._add_folder("test")
self._add_file("test/crazyradio.stl", FILE_CRAZYRADIO_STL)
try:
getattr(self.storage, operation)("bp_case.stl", "test/crazyradio.stl")
self.fail("Expected an exception")
except StorageError as e:
self.assertEqual(e.code, StorageError.ALREADY_EXISTS)
def test_add_folder(self):
self._add_and_verify_folder("test", "test")
def test_add_folder_with_display(self):
self._add_and_verify_folder("test", "test", display="täst")
metadata = self.storage.get_metadata("test")
self.assertIsNotNone(metadata)
self.assertIn("display", metadata)
self.assertEqual("täst", metadata["display"])
def test_add_subfolder(self):
folder_name = self._add_and_verify_folder("folder", "folder")
subfolder_name = self._add_and_verify_folder(
(folder_name, "subfolder"), folder_name + "/subfolder"
)
stl_name = self._add_and_verify_file(
(subfolder_name, "bp_case.stl"),
subfolder_name + "/bp_case.stl",
FILE_BP_CASE_STL,
)
self.assertTrue(os.path.exists(os.path.join(self.basefolder, folder_name)))
self.assertTrue(os.path.exists(os.path.join(self.basefolder, subfolder_name)))
self.assertTrue(os.path.exists(os.path.join(self.basefolder, stl_name)))
def test_remove_folder(self):
content_folder = self._add_and_verify_folder("content", "content")
other_stl_name = self._add_and_verify_file(
(content_folder, "crazyradio.stl"),
content_folder + "/crazyradio.stl",
FILE_CRAZYRADIO_STL,
)
empty_folder = self._add_and_verify_folder("empty", "empty")
try:
self.storage.remove_folder(content_folder, recursive=False)
except Exception:
self.assertTrue(os.path.exists(os.path.join(self.basefolder, content_folder)))
self.assertTrue(os.path.isdir(os.path.join(self.basefolder, content_folder)))
self.assertTrue(os.path.exists(os.path.join(self.basefolder, other_stl_name)))
self.assertIsNotNone(self.storage.get_metadata(other_stl_name))
self.storage.remove_folder(content_folder, recursive=True)
self.assertFalse(os.path.exists(os.path.join(self.basefolder, content_folder)))
self.assertFalse(os.path.isdir(os.path.join(self.basefolder, content_folder)))
self.storage.remove_folder(empty_folder, recursive=False)
self.assertFalse(os.path.exists(os.path.join(self.basefolder, empty_folder)))
self.assertFalse(os.path.isdir(os.path.join(self.basefolder, empty_folder)))
def test_remove_folder_with_display(self):
self._add_folder("folder", display="földer")
before_metadata = self.storage.get_metadata("folder")
self.storage.remove_folder("folder")
after_metadata = self.storage.get_metadata("folder")
self.assertIsNotNone(before_metadata)
self.assertDictEqual(before_metadata, {"display": "földer"})
self.assertIsNone(after_metadata)
def test_copy_folder(self):
self._add_folder("source")
self._add_folder("destination")
self._add_file("source/crazyradio.stl", FILE_CRAZYRADIO_STL)
source_metadata = self.storage.get_metadata("source/crazyradio.stl")
self.storage.copy_folder("source", "destination/copied")
copied_metadata = self.storage.get_metadata("destination/copied/crazyradio.stl")
self.assertTrue(os.path.isdir(os.path.join(self.basefolder, "source")))
self.assertTrue(
os.path.isfile(os.path.join(self.basefolder, "source", "crazyradio.stl"))
)
self.assertTrue(os.path.isdir(os.path.join(self.basefolder, "destination")))
self.assertTrue(
os.path.isdir(os.path.join(self.basefolder, "destination", "copied"))
)
self.assertTrue(
os.path.isfile(
os.path.join(self.basefolder, "destination", "copied", ".metadata.json")
)
)
self.assertTrue(
os.path.isfile(
os.path.join(self.basefolder, "destination", "copied", "crazyradio.stl")
)
)
self.assertIsNotNone(source_metadata)
self.assertIsNotNone(copied_metadata)
self.assertDictEqual(source_metadata, copied_metadata)
def test_move_folder(self):
self._add_folder("source")
self._add_folder("destination")
self._add_file("source/crazyradio.stl", FILE_CRAZYRADIO_STL)
before_source_metadata = self.storage.get_metadata("source/crazyradio.stl")
self.storage.move_folder("source", "destination/copied")
after_source_metadata = self.storage.get_metadata("source/crazyradio.stl")
copied_metadata = self.storage.get_metadata("destination/copied/crazyradio.stl")
self.assertFalse(os.path.isdir(os.path.join(self.basefolder, "source")))
self.assertFalse(
os.path.isfile(os.path.join(self.basefolder, "source", "crazyradio.stl"))
)
self.assertTrue(os.path.isdir(os.path.join(self.basefolder, "destination")))
self.assertTrue(
os.path.isdir(os.path.join(self.basefolder, "destination", "copied"))
)
self.assertTrue(
os.path.isfile(
os.path.join(self.basefolder, "destination", "copied", ".metadata.json")
)
)
self.assertTrue(
os.path.isfile(
os.path.join(self.basefolder, "destination", "copied", "crazyradio.stl")
)
)
self.assertIsNotNone(before_source_metadata)
self.assertIsNone(after_source_metadata)
self.assertIsNotNone(copied_metadata)
self.assertDictEqual(before_source_metadata, copied_metadata)
def test_copy_folder_same_name(self):
self._add_folder("folder")
try:
self.storage.copy_folder("folder", "folder")
self.fail("Expected an exception")
except StorageError as e:
self.assertEqual(e.code, StorageError.SOURCE_EQUALS_DESTINATION)
@data("copy_folder", "move_folder")
def test_copy_move_folder_different_display(self, operation):
self._add_folder("folder", display="földer")
before_metadata = self.storage.get_metadata("folder")
getattr(self.storage, operation)("folder", "test")
after_metadata = self.storage.get_metadata("test")
self.assertIsNotNone(before_metadata)
self.assertDictEqual(before_metadata, {"display": "földer"})
self.assertIsNone(after_metadata)
@data("copy_folder", "move_folder")
def test_copy_move_folder_same(self, operation):
self._add_folder("folder")
try:
getattr(self.storage, operation)("folder", "folder")
self.fail("Expected an exception")
except StorageError as e:
self.assertEqual(e.code, StorageError.SOURCE_EQUALS_DESTINATION)
@data("copy_folder", "move_folder")
def test_copy_move_folder_missing_source(self, operation):
try:
getattr(self.storage, operation)("source", "destination/copied")
self.fail("Expected an exception")
except StorageError as e:
self.assertEqual(e.code, StorageError.INVALID_SOURCE)
@data("copy_folder", "move_folder")
def test_copy_move_folder_missing_destination_folder(self, operation):
self._add_folder("source")
self._add_file("source/crazyradio.stl", FILE_CRAZYRADIO_STL)
try:
getattr(self.storage, operation)("source", "destination/copied")
self.fail("Expected an exception")
except StorageError as e:
self.assertEqual(e.code, StorageError.INVALID_DESTINATION)
@data("copy_folder", "move_folder")
def test_copy_move_folder_existing_destination_path(self, operation):
self._add_folder("source")
self._add_file("source/crazyradio.stl", FILE_CRAZYRADIO_STL)
self._add_folder("destination")
self._add_folder("destination/copied")
try:
getattr(self.storage, operation)("source", "destination/copied")
self.fail("Expected an exception")
except StorageError as e:
self.assertEqual(e.code, StorageError.ALREADY_EXISTS)
def test_list(self):
bp_case_stl = self._add_and_verify_file(
"bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL
)
self._add_and_verify_file(
"bp_case.gcode",
"bp_case.gcode",
FILE_BP_CASE_GCODE,
links=[("model", {"name": bp_case_stl})],
)
content_folder = self._add_and_verify_folder("content", "content")
self._add_and_verify_file(
(content_folder, "crazyradio.stl"),
content_folder + "/crazyradio.stl",
FILE_CRAZYRADIO_STL,
)
self._add_and_verify_folder("empty", "empty")
file_list = self.storage.list_files()
self.assertEqual(4, len(file_list))
self.assertTrue("bp_case.stl" in file_list)
self.assertTrue("bp_case.gcode" in file_list)
self.assertTrue("content" in file_list)
self.assertTrue("empty" in file_list)
self.assertEqual("model", file_list["bp_case.stl"]["type"])
self.assertEqual(FILE_BP_CASE_STL.hash, file_list["bp_case.stl"]["hash"])
self.assertEqual("machinecode", file_list["bp_case.gcode"]["type"])
self.assertEqual(FILE_BP_CASE_GCODE.hash, file_list["bp_case.gcode"]["hash"])
self.assertEqual("folder", file_list[content_folder]["type"])
self.assertEqual(1, len(file_list[content_folder]["children"]))
self.assertTrue("crazyradio.stl" in file_list["content"]["children"])
self.assertEqual(
"model", file_list["content"]["children"]["crazyradio.stl"]["type"]
)
self.assertEqual(
FILE_CRAZYRADIO_STL.hash,
file_list["content"]["children"]["crazyradio.stl"]["hash"],
)
self.assertEqual("folder", file_list["empty"]["type"])
self.assertEqual(0, len(file_list["empty"]["children"]))
def test_list_with_filter(self):
bp_case_stl = self._add_and_verify_file(
"bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL
)
self._add_and_verify_file(
"bp_case.gcode",
"bp_case.gcode",
FILE_BP_CASE_GCODE,
links=[("model", {"name": bp_case_stl})],
)
content_folder = self._add_and_verify_folder("content", "content")
self._add_and_verify_file(
(content_folder, "crazyradio.stl"),
content_folder + "/crazyradio.stl",
FILE_CRAZYRADIO_STL,
)
self._add_and_verify_file(
(content_folder, "bp_case.gcode"),
content_folder + "/bp_case.gcode",
FILE_BP_CASE_GCODE,
)
self._add_and_verify_folder("empty", "empty")
def filter_machinecode(node):
return node["type"] == "machinecode"
file_list = self.storage.list_files(filter=filter_machinecode)
self.assertTrue(3, len(file_list))
self.assertTrue("bp_case.gcode" in file_list)
self.assertTrue("content" in file_list)
self.assertTrue("empty" in file_list)
self.assertEqual("folder", file_list[content_folder]["type"])
self.assertEqual(1, len(file_list[content_folder]["children"]))
self.assertTrue("bp_case.gcode" in file_list[content_folder]["children"])
self.assertEqual("folder", file_list["empty"]["type"])
self.assertEqual(0, len(file_list["empty"]["children"]))
def test_list_without_recursive(self):
bp_case_stl = self._add_and_verify_file(
"bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL
)
self._add_and_verify_file(
"bp_case.gcode",
"bp_case.gcode",
FILE_BP_CASE_GCODE,
links=[("model", {"name": bp_case_stl})],
)
content_folder = self._add_and_verify_folder("content", "content")
self._add_and_verify_file(
(content_folder, "crazyradio.stl"),
content_folder + "/crazyradio.stl",
FILE_CRAZYRADIO_STL,
)
self._add_and_verify_folder("empty", "empty")
file_list = self.storage.list_files(recursive=False)
self.assertTrue(3, len(file_list))
self.assertTrue("bp_case.gcode" in file_list)
self.assertTrue("content" in file_list)
self.assertTrue("empty" in file_list)
self.assertEqual("folder", file_list[content_folder]["type"])
self.assertEqual(0, len(file_list[content_folder]["children"]))
self.assertNotEqual(0, file_list[content_folder]["size"])
self.assertEqual("folder", file_list["empty"]["type"])
self.assertEqual(0, len(file_list["empty"]["children"]))
def test_add_link_model(self):
stl_name = self._add_and_verify_file(
"bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL
)
gcode_name = self._add_and_verify_file(
"bp_case.gcode", "bp_case.gcode", FILE_BP_CASE_GCODE
)
self.storage.add_link(gcode_name, "model", {"name": stl_name})
stl_metadata = self.storage.get_metadata(stl_name)
gcode_metadata = self.storage.get_metadata(gcode_name)
# forward link
self.assertEqual(1, len(gcode_metadata["links"]))
link = gcode_metadata["links"][0]
self.assertEqual("model", link["rel"])
self.assertTrue("name" in link)
self.assertEqual(stl_name, link["name"])
self.assertTrue("hash" in link)
self.assertEqual(FILE_BP_CASE_STL.hash, link["hash"])
# reverse link
self.assertEqual(1, len(stl_metadata["links"]))
link = stl_metadata["links"][0]
self.assertEqual("machinecode", link["rel"])
self.assertTrue("name" in link)
self.assertEqual(gcode_name, link["name"])
self.assertTrue("hash" in link)
self.assertEqual(FILE_BP_CASE_GCODE.hash, link["hash"])
def test_add_link_machinecode(self):
stl_name = self._add_and_verify_file(
"bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL
)
gcode_name = self._add_and_verify_file(
"bp_case.gcode", "bp_case.gcode", FILE_BP_CASE_GCODE
)
self.storage.add_link(stl_name, "machinecode", {"name": gcode_name})
stl_metadata = self.storage.get_metadata(stl_name)
gcode_metadata = self.storage.get_metadata(gcode_name)
# forward link
self.assertEqual(1, len(gcode_metadata["links"]))
link = gcode_metadata["links"][0]
self.assertEqual("model", link["rel"])
self.assertTrue("name" in link)
self.assertEqual(stl_name, link["name"])
self.assertTrue("hash" in link)
self.assertEqual(FILE_BP_CASE_STL.hash, link["hash"])
# reverse link
self.assertEqual(1, len(stl_metadata["links"]))
link = stl_metadata["links"][0]
self.assertEqual("machinecode", link["rel"])
self.assertTrue("name" in link)
self.assertEqual(gcode_name, link["name"])
self.assertTrue("hash" in link)
self.assertEqual(FILE_BP_CASE_GCODE.hash, link["hash"])
def test_remove_link(self):
stl_name = self._add_and_verify_file(
"bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL
)
self.storage.add_link(stl_name, "web", {"href": "http://www.example.com"})
self.storage.add_link(stl_name, "web", {"href": "http://www.example2.com"})
stl_metadata = self.storage.get_metadata(stl_name)
self.assertEqual(2, len(stl_metadata["links"]))
self.storage.remove_link(stl_name, "web", {"href": "http://www.example.com"})
stl_metadata = self.storage.get_metadata(stl_name)
self.assertEqual(1, len(stl_metadata["links"]))
self.storage.remove_link(stl_name, "web", {"href": "wrong_href"})
stl_metadata = self.storage.get_metadata(stl_name)
self.assertEqual(1, len(stl_metadata["links"]))
def test_remove_link_bidirectional(self):
stl_name = self._add_and_verify_file(
"bp_case.stl", "bp_case.stl", FILE_BP_CASE_STL
)
gcode_name = self._add_and_verify_file(
"bp_case.gcode", "bp_case.gcode", FILE_BP_CASE_GCODE
)
self.storage.add_link(stl_name, "machinecode", {"name": gcode_name})
self.storage.add_link(stl_name, "web", {"href": "http://www.example.com"})
stl_metadata = self.storage.get_metadata(stl_name)
gcode_metadata = self.storage.get_metadata(gcode_name)
self.assertEqual(1, len(gcode_metadata["links"]))
self.assertEqual(2, len(stl_metadata["links"]))
self.storage.remove_link(
gcode_name, "model", {"name": stl_name, "hash": FILE_BP_CASE_STL.hash}
)
stl_metadata = self.storage.get_metadata(stl_name)
gcode_metadata = self.storage.get_metadata(gcode_name)
self.assertEqual(0, len(gcode_metadata["links"]))
self.assertEqual(1, len(stl_metadata["links"]))
@data(
("", ("", "")),
("/", ("", "")),
("some_file.gco", ("", "some_file.gco")),
("/some_file.gco", ("", "some_file.gco")),
("some/folder/and/some file.gco", ("some/folder/and", "some file.gco")),
("/some/folder/and/some file.gco", ("some/folder/and", "some file.gco")),
)
@unpack
def test_split_path(self, input, expected):
actual = self.storage.split_path(input)
self.assertEqual(expected, actual)
@data(
(("", ""), ""),
(("", "some_file.gco"), "some_file.gco"),
(("/", "some_file.gco"), "some_file.gco"),
(("some/folder/and", "some file.gco"), "some/folder/and/some file.gco"),
(("/some/folder/and", "some file.gco"), "some/folder/and/some file.gco"),
)
@unpack
def test_join_path(self, input, expected):
actual = self.storage.join_path(*input)
self.assertEqual(expected, actual)
@data(
("some_file.gco", "some_file.gco", False),
("some file.gco", "some file.gco", False),
(
"some_file with (parentheses) and ümläuts and digits 123.gco",
"some_file with (parentheses) and ümläuts and digits 123.gco",
False,
),
("there is no b in häußge.gco", "there is no b in häußge.gco", False),
("some file.gco", "some_file.gco", True),
(
"some_file with (parentheses) and ümläuts and digits 123.gco",
"some_file_with_(parentheses)_and_umlauts_and_digits_123.gco",
True,
),
("there is no b in häußge.gco", "there_is_no_b_in_haussge.gco", True),
)
@unpack
def test_sanitize_name(self, input, expected, really_universal):
with _set_really_universal(self.storage, really_universal):
actual = self.storage.sanitize_name(input)
self.assertEqual(expected, actual)
self.storage._really_universal = False
@data("some/folder/still/left.gco", "also\\no\\backslashes.gco")
def test_sanitize_name_invalid(self, input):
try:
self.storage.sanitize_name(input)
self.fail("expected a ValueError")
except ValueError as e:
self.assertEqual("name must not contain / or \\", e.args[0])
@data(
("folder/with/subfolder", "/folder/with/subfolder"),
("folder/with/subfolder/../other/folder", "/folder/with/other/folder"),
("/folder/with/leading/slash", "/folder/with/leading/slash"),
("folder/with/leading/dot", "/folder/with/leading/dot"),
)
@unpack
def test_sanitize_path(self, input, expected):
actual = self.storage.sanitize_path(input)
self.assertTrue(actual.startswith(self.basefolder))
self.assertEqual(
expected, actual[len(self.basefolder) :].replace(os.path.sep, "/")
)
@data("../../folder/out/of/the/basefolder", "some/folder/../../../and/then/back")
def test_sanitize_path_invalid(self, input):
try:
self.storage.sanitize_path(input)
self.fail("expected a ValueError")
except ValueError as e:
self.assertTrue(e.args[0].startswith("path not contained in base folder: "))
@data(
("", "/", "", False),
(
"some/folder/with/trailing/slash/",
"/some/folder/with/trailing/slash",
"",
False,
),
(("some", "folder", ""), "/some/folder", "", False),
("some/folder/and/some file.gco", "/some/folder/and", "some file.gco", False),
(
("some", "folder", "and", "some file.gco"),
"/some/folder/and",
"some file.gco",
False,
),
("some file.gco", "/", "some file.gco", False),
(("some file.gco",), "/", "some file.gco", False),
("some/folder/and/some file.gco", "/some/folder/and", "some_file.gco", True),
(
("some", "folder", "and", "some file.gco"),
"/some/folder/and",
"some_file.gco",
True,
),
("some file.gco", "/", "some_file.gco", True),
(("some file.gco",), "/", "some_file.gco", True),
)
@unpack
def test_sanitize(self, input, expected_path, expected_name, really_universal):
with _set_really_universal(self.storage, really_universal):
actual = self.storage.sanitize(input)
self.assertTrue(isinstance(actual, tuple))
self.assertEqual(2, len(actual))
actual_path, actual_name = actual
self.assertTrue(actual_path.startswith(self.basefolder))
actual_path = actual_path[len(self.basefolder) :].replace(os.path.sep, "/")
if not actual_path.startswith("/"):
# if the actual path originally was just the base folder, we just stripped
# away everything, so let's add a / again so the behaviour matches the
# other preprocessing of our test data here
actual_path = "/" + actual_path
self.assertEqual(expected_path, actual_path)
self.assertEqual(expected_name, actual_name)
def _add_and_verify_file(
self, path, expected_path, file_object, links=None, overwrite=False, display=None
):
"""Adds a file to the storage and verifies the sanitized path."""
sanitized_path = self._add_file(
path, file_object, links=links, overwrite=overwrite, display=display
)
self.assertEqual(expected_path, sanitized_path)
return sanitized_path
def test_migrate_metadata_to_json(self):
metadata = {"test.gco": {"hash": "aabbccddeeff", "links": [], "notes": []}}
yaml_path = os.path.join(self.basefolder, ".metadata.yaml")
json_path = os.path.join(self.basefolder, ".metadata.json")
# prepare
import yaml
with open(yaml_path, "w") as f:
yaml.safe_dump(metadata, f)
# migrate
self.storage._migrate_metadata(self.basefolder)
# verify
self.assertTrue(os.path.exists(json_path))
self.assertFalse(os.path.exists(yaml_path)) # TODO 1.3.10 change to assertFalse
import json
with open(json_path, encoding="utf-8") as f:
json_metadata = json.load(f)
self.assertDictEqual(metadata, json_metadata)
def _add_file(self, path, file_object, links=None, overwrite=False, display=None):
"""
Adds a file to the storage.
Ensures file is present, metadata is present, hash and links (if applicable)
are populated correctly.
Returns sanitized path.
"""
sanitized_path = self.storage.add_file(
path, file_object, links=links, allow_overwrite=overwrite, display=display
)
split_path = sanitized_path.split("/")
if len(split_path) == 1:
file_path = os.path.join(self.basefolder, split_path[0])
folder_path = self.basefolder
else:
file_path = os.path.join(self.basefolder, os.path.join(*split_path))
folder_path = os.path.join(self.basefolder, os.path.join(*split_path[:-1]))
self.assertTrue(os.path.isfile(file_path))
self.assertTrue(os.path.isfile(os.path.join(folder_path, ".metadata.json")))
metadata = self.storage.get_metadata(sanitized_path)
self.assertIsNotNone(metadata)
# assert hash
self.assertTrue("hash" in metadata)
self.assertEqual(file_object.hash, metadata["hash"])
# assert presence of links if supplied
if links:
self.assertTrue("links" in metadata)
return sanitized_path
def _add_and_verify_folder(self, path, expected_path, display=None):
"""Adds a folder to the storage and verifies sanitized path."""
sanitized_path = self._add_folder(path, display=display)
self.assertEqual(expected_path, sanitized_path)
return sanitized_path
def _add_folder(self, path, display=None):
"""
Adds a folder to the storage.
Verifies existence of folder.
Returns sanitized path.
"""
sanitized_path = self.storage.add_folder(path, display=display)
self.assertTrue(
os.path.isdir(
os.path.join(self.basefolder, os.path.join(*sanitized_path.split("/")))
)
)
return sanitized_path
@contextmanager
def _set_really_universal(storage, value):
orig = storage._really_universal
try:
storage._really_universal = value
yield
finally:
storage._really_universal = orig
| 36,140 | Python | .py | 771 | 37.230869 | 103 | 0.61968 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,070 | __init__.py | OctoPrint_OctoPrint/tests/filemanager/__init__.py | """
Unit tests for ``octoprint.filemanager.``.
"""
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
| 290 | Python | .py | 6 | 46.833333 | 103 | 0.72242 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,071 | test_filemanager.py | OctoPrint_OctoPrint/tests/filemanager/test_filemanager.py | __author__ = "Gina Häußge <osd@foosel.net>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import io
import unittest
from unittest import mock
import octoprint.filemanager
import octoprint.filemanager.util
import octoprint.settings
class FilemanagerMethodTest(unittest.TestCase):
def setUp(self):
# mock plugin manager
self.plugin_manager_patcher = mock.patch("octoprint.plugin.plugin_manager")
self.plugin_manager_getter = self.plugin_manager_patcher.start()
self.plugin_manager = mock.MagicMock()
hook_extensions = {
"some_plugin": lambda: dict({"machinecode": {"foo": ["foo", "f"]}}),
"other_plugin": lambda: dict({"model": {"amf": ["amf"]}}),
"mime_map": lambda: {
"mime_map": {
"mime_map_yes": octoprint.filemanager.ContentTypeMapping(
["mime_map_yes"], "application/mime_map_yes"
)
}
},
"mime_detect": lambda: dict(
{
"machinecode": {
"mime_detect_yes": octoprint.filemanager.ContentTypeDetector(
["mime_detect_yes"], lambda x: "application/mime_detect_yes"
),
"mime_detect_no": octoprint.filemanager.ContentTypeDetector(
["mime_detect_no"], lambda x: None
),
}
}
),
}
self.plugin_manager.get_hooks.return_value = hook_extensions
self.plugin_manager_getter.return_value = self.plugin_manager
def tearDown(self):
self.plugin_manager_patcher.stop()
def test_full_extension_tree(self):
full = octoprint.filemanager.full_extension_tree()
self.assertTrue("machinecode" in full)
self.assertTrue("gcode" in full["machinecode"])
self.assertTrue(
isinstance(
full["machinecode"]["gcode"], octoprint.filemanager.ContentTypeMapping
)
)
self.assertSetEqual(
{"gcode", "gco", "g"}, set(full["machinecode"]["gcode"].extensions)
)
self.assertTrue("foo" in full["machinecode"])
self.assertTrue(isinstance(full["machinecode"]["foo"], list))
self.assertSetEqual({"f", "foo"}, set(full["machinecode"]["foo"]))
self.assertTrue("model" in full)
self.assertTrue("amf" in full["model"])
self.assertTrue(isinstance(full["model"]["amf"], list))
self.assertSetEqual({"amf"}, set(full["model"]["amf"]))
def test_get_mimetype(self):
self.assertEqual(octoprint.filemanager.get_mime_type("foo.gcode"), "text/plain")
self.assertEqual(
octoprint.filemanager.get_mime_type("foo.unknown"), "application/octet-stream"
)
self.assertEqual(
octoprint.filemanager.get_mime_type("foo.mime_map_yes"),
"application/mime_map_yes",
)
self.assertEqual(
octoprint.filemanager.get_mime_type("foo.mime_map_no"),
"application/octet-stream",
)
self.assertEqual(
octoprint.filemanager.get_mime_type("foo.mime_detect_yes"),
"application/mime_detect_yes",
)
self.assertEqual(
octoprint.filemanager.get_mime_type("foo.mime_detect_no"),
"application/octet-stream",
)
def test_valid_file_type(self):
self.assertTrue(octoprint.filemanager.valid_file_type("foo.amf", type="model"))
self.assertTrue(octoprint.filemanager.valid_file_type("foo.amf", type="amf"))
self.assertFalse(
octoprint.filemanager.valid_file_type("foo.stl", type="machinecode")
)
self.assertTrue(
octoprint.filemanager.valid_file_type("foo.foo", type="machinecode")
)
self.assertTrue(octoprint.filemanager.valid_file_type("foo.foo", type="foo"))
self.assertTrue(octoprint.filemanager.valid_file_type("foo.foo"))
self.assertTrue(octoprint.filemanager.valid_file_type("foo.mime_map_yes"))
self.assertTrue(octoprint.filemanager.valid_file_type("foo.mime_detect_yes"))
self.assertFalse(octoprint.filemanager.valid_file_type("foo.unknown"))
extension_tree = {
"machinecode": {
"gcode": octoprint.filemanager.ContentTypeMapping(
["gcode", "gco", "g"], "text/plain"
),
"foo": ["foo", "f"],
}
}
# With cached extension tree
self.assertTrue(
octoprint.filemanager.valid_file_type("foo.foo", tree=extension_tree)
)
self.assertFalse(
octoprint.filemanager.valid_file_type("foo.amf", tree=extension_tree)
)
def test_get_file_type(self):
self.assertEqual(
["machinecode", "gcode"], octoprint.filemanager.get_file_type("foo.gcode")
)
self.assertEqual(
["machinecode", "gcode"], octoprint.filemanager.get_file_type("foo.gco")
)
self.assertEqual(
["machinecode", "foo"], octoprint.filemanager.get_file_type("foo.f")
)
self.assertEqual(["model", "amf"], octoprint.filemanager.get_file_type("foo.amf"))
self.assertIsNone(octoprint.filemanager.get_file_type("foo.unknown"))
def test_hook_failure(self):
def hook():
raise RuntimeError("Boo!")
self.plugin_manager.get_hooks.return_value = {"hook": hook}
with mock.patch("octoprint.filemanager.logging") as patched_logging:
logger = mock.MagicMock()
patched_logging.getLogger.return_value = logger
octoprint.filemanager.get_all_extensions()
self.assertEqual(1, len(logger.mock_calls))
class FileManagerTest(unittest.TestCase):
def setUp(self):
import octoprint.filemanager.storage
import octoprint.printer.profile
import octoprint.slicing
self.addCleanup(self.cleanUp)
# mock event manager
self.event_manager_patcher = mock.patch("octoprint.filemanager.eventManager")
event_manager = self.event_manager_patcher.start()
event_manager.return_value.fire = mock.MagicMock()
self.fire_event = event_manager.return_value.fire
# mock plugin manager
self.plugin_manager_patcher = mock.patch("octoprint.plugin.plugin_manager")
self.plugin_manager = self.plugin_manager_patcher.start()
# mock settings
self.settings_patcher = mock.patch("octoprint.settings.settings")
self.settings_getter = self.settings_patcher.start()
self.settings = mock.create_autospec(octoprint.settings.Settings)
self.settings.getBaseFolder.return_value = "/path/to/a/base_folder"
self.settings_getter.return_value = self.settings
self.analysis_queue = mock.MagicMock(spec=octoprint.filemanager.AnalysisQueue)
self.slicing_manager = mock.MagicMock(spec=octoprint.slicing.SlicingManager)
self.printer_profile_manager = mock.MagicMock(
spec=octoprint.printer.profile.PrinterProfileManager
)
self.local_storage = mock.MagicMock(
spec=octoprint.filemanager.storage.LocalFileStorage
)
self.local_storage.analysis_backlog = iter([])
self.storage_managers = {}
self.storage_managers[
octoprint.filemanager.FileDestinations.LOCAL
] = self.local_storage
self.file_manager = octoprint.filemanager.FileManager(
self.analysis_queue,
self.slicing_manager,
self.printer_profile_manager,
initial_storage_managers=self.storage_managers,
)
def cleanUp(self):
self.event_manager_patcher.stop()
self.plugin_manager_patcher.stop()
self.settings_patcher.stop()
def test_add_file(self):
wrapper = object()
self.local_storage.add_file.return_value = ("", "test.gcode")
self.local_storage.path_in_storage.return_value = "test.gcode"
self.local_storage.path_on_disk.return_value = "prefix/test.gcode"
self.local_storage.split_path.return_value = ("", "test.gcode")
test_profile = {"id": "_default", "name": "My Default Profile"}
self.printer_profile_manager.get_current_or_default.return_value = test_profile
file_path = self.file_manager.add_file(
octoprint.filemanager.FileDestinations.LOCAL, "test.gcode", wrapper
)
self.assertEqual(("", "test.gcode"), file_path)
self.local_storage.add_file.assert_called_once_with(
"test.gcode",
wrapper,
printer_profile=test_profile,
allow_overwrite=False,
links=None,
display=None,
)
expected_events = [
mock.call(
octoprint.filemanager.Events.FILE_ADDED,
{
"storage": octoprint.filemanager.FileDestinations.LOCAL,
"name": "test.gcode",
"path": "test.gcode",
"type": ["machinecode", "gcode"],
},
),
mock.call(octoprint.filemanager.Events.UPDATED_FILES, {"type": "printables"}),
]
self.fire_event.call_args_list = expected_events
def test_add_file_display(self):
wrapper = object()
self.local_storage.add_file.return_value = ("", "test.gcode")
self.local_storage.path_in_storage.return_value = "test.gcode"
self.local_storage.path_on_disk.return_value = "prefix/test.gcode"
self.local_storage.split_path.return_value = ("", "test.gcode")
test_profile = {"id": "_default", "name": "My Default Profile"}
self.printer_profile_manager.get_current_or_default.return_value = test_profile
file_path = self.file_manager.add_file(
octoprint.filemanager.FileDestinations.LOCAL,
"test.gcode",
wrapper,
display="täst.gcode",
)
self.assertEqual(("", "test.gcode"), file_path)
self.local_storage.add_file.assert_called_once_with(
"test.gcode",
wrapper,
printer_profile=test_profile,
allow_overwrite=False,
links=None,
display="täst.gcode",
)
def test_remove_file(self):
self.local_storage.path_in_storage.return_value = "test.gcode"
self.local_storage.path_on_disk.return_value = "prefix/test.gcode"
self.local_storage.split_path.return_value = ("", "test.gcode")
self.file_manager.remove_file(
octoprint.filemanager.FileDestinations.LOCAL, "test.gcode"
)
self.local_storage.remove_file.assert_called_once_with("test.gcode")
self.analysis_queue.dequeue.assert_called_once()
expected_events = [
mock.call(
octoprint.filemanager.Events.FILE_REMOVED,
{
"storage": octoprint.filemanager.FileDestinations.LOCAL,
"name": "test.gcode",
"path": "test.gcode",
"type": ["machinecode", "gcode"],
},
),
mock.call(octoprint.filemanager.Events.UPDATED_FILES, {"type": "printables"}),
]
self.fire_event.call_args_list = expected_events
def test_add_folder(self):
self.local_storage.add_folder.return_value = ("", "test_folder")
self.local_storage.split_path.return_value = ("", "test_folder")
folder_path = self.file_manager.add_folder(
octoprint.filemanager.FileDestinations.LOCAL, "test_folder"
)
self.assertEqual(("", "test_folder"), folder_path)
self.local_storage.add_folder.assert_called_once_with(
"test_folder", ignore_existing=True, display=None
)
expected_events = [
mock.call(
octoprint.filemanager.Events.FOLDER_ADDED,
{
"storage": octoprint.filemanager.FileDestinations.LOCAL,
"name": "test_folder",
"path": "test_folder",
},
),
mock.call(octoprint.filemanager.Events.UPDATED_FILES, {"type": "printables"}),
]
self.fire_event.call_args_list = expected_events
def test_add_folder_not_ignoring_existing(self):
self.local_storage.add_folder.side_effect = RuntimeError("already there")
with self.assertRaises(RuntimeError, msg="already there"):
self.file_manager.add_folder(
octoprint.filemanager.FileDestinations.LOCAL,
"test_folder",
ignore_existing=False,
)
self.fail("Expected an exception to occur!")
self.local_storage.add_folder.assert_called_once_with(
"test_folder", ignore_existing=False, display=None
)
def test_add_folder_display(self):
self.local_storage.add_folder.side_effect = RuntimeError("already there")
with self.assertRaises(RuntimeError, msg="already there"):
self.file_manager.add_folder(
octoprint.filemanager.FileDestinations.LOCAL,
"test_folder",
display="täst_folder",
)
self.fail("Expected an exception to occur!")
self.local_storage.add_folder.assert_called_once_with(
"test_folder", ignore_existing=True, display="täst_folder"
)
def test_remove_folder(self):
self.local_storage.path_in_storage.return_value = "test_folder"
self.local_storage.split_path.return_value = ("", "test_folder")
self.file_manager.remove_folder(
octoprint.filemanager.FileDestinations.LOCAL, "test_folder"
)
self.local_storage.remove_folder.assert_called_once_with(
"test_folder", recursive=True
)
self.analysis_queue.dequeue_folder.assert_called_once_with(
octoprint.filemanager.FileDestinations.LOCAL, "test_folder"
)
expected_events = [
mock.call(
octoprint.filemanager.Events.FOLDER_REMOVED,
{
"storage": octoprint.filemanager.FileDestinations.LOCAL,
"name": "test_folder",
"path": "test_folder",
},
),
mock.call(octoprint.filemanager.Events.UPDATED_FILES, {"type": "printables"}),
]
self.fire_event.call_args_list = expected_events
def test_remove_folder_nonrecursive(self):
self.local_storage.path_in_storage.return_value = "test_folder"
self.local_storage.split_path.return_value = ("", "test_folder")
self.file_manager.remove_folder(
octoprint.filemanager.FileDestinations.LOCAL, "test_folder", recursive=False
)
self.local_storage.remove_folder.assert_called_once_with(
"test_folder", recursive=False
)
self.analysis_queue.dequeue_folder.assert_called_once_with(
octoprint.filemanager.FileDestinations.LOCAL, "test_folder"
)
@mock.patch("octoprint.util.atomic_write", create=True)
@mock.patch("octoprint.util.yaml.save_to_file", create=True)
@mock.patch("time.time")
def test_save_recovery_data(
self, mock_time, mock_yaml_save_to_file, mock_atomic_write
):
import os
now = 123456789
path = "some_file.gco"
pos = 1234
recovery_file = os.path.join("/path/to/a/base_folder", "print_recovery_data.yaml")
mock_atomic_write_handle = mock_atomic_write.return_value.__enter__.return_value
mock_time.return_value = now
self.local_storage.path_in_storage.return_value = path
with mock.patch("builtins.open", mock.mock_open(), create=True):
self.file_manager.save_recovery_data(
octoprint.filemanager.FileDestinations.LOCAL, path, pos
)
mock_atomic_write.assert_called_with(
recovery_file, max_permissions=0o666, mode="wt"
)
expected = {
"origin": octoprint.filemanager.FileDestinations.LOCAL,
"path": path,
"pos": pos,
"date": now,
}
mock_yaml_save_to_file.assert_called_with(
expected,
file=mock_atomic_write_handle,
pretty=True,
)
@mock.patch("octoprint.util.atomic_write", create=True)
@mock.patch("octoprint.util.yaml.save_to_file", create=True)
@mock.patch("time.time")
def test_save_recovery_data_with_error(
self, mock_time, mock_yaml_safe_dump, mock_atomic_write
):
path = "some_file.gco"
pos = 1234
self.local_storage.path_in_storage.return_value = path
mock_yaml_safe_dump.side_effect = RuntimeError
with mock.patch("builtins.open", mock.mock_open(), create=True):
self.file_manager.save_recovery_data(
octoprint.filemanager.FileDestinations.LOCAL, path, pos
)
@mock.patch("os.path.isfile")
@mock.patch("os.remove")
def test_delete_recovery_data(self, mock_remove, mock_isfile):
import os
recovery_file = os.path.join("/path/to/a/base_folder", "print_recovery_data.yaml")
mock_isfile.return_value = True
self.file_manager.delete_recovery_data()
mock_remove.assert_called_with(recovery_file)
@mock.patch("os.path.isfile")
@mock.patch("os.remove")
def test_delete_recovery_data_no_file(self, mock_remove, mock_isfile):
mock_isfile.return_value = False
self.file_manager.delete_recovery_data()
self.assertFalse(mock_remove.called)
@mock.patch("os.path.isfile")
@mock.patch("os.remove")
def test_delete_recovery_data_error(self, mock_remove, mock_isfile):
mock_isfile.return_value = True
mock_remove.side_effect = RuntimeError
self.file_manager.delete_recovery_data()
@mock.patch("os.path.isfile", return_value=True)
def test_get_recovery_data(self, mock_isfile):
import os
recovery_file = os.path.join("/path/to/a/base_folder", "print_recovery_data.yaml")
data = {
"path": "some_path.gco",
"origin": "local",
"pos": 1234,
"date": 123456789,
}
# moved safe_load to here so we could mock up the return value properly
with mock.patch("octoprint.util.yaml.load_from_file", return_value=data) as n:
result = self.file_manager.get_recovery_data()
self.assertDictEqual(data, result)
n.assert_called_with(path=recovery_file)
mock_isfile.assert_called_with(recovery_file)
@mock.patch("os.path.isfile")
def test_get_recovery_data_no_file(self, mock_isfile):
mock_isfile.return_value = False
result = self.file_manager.get_recovery_data()
self.assertIsNone(result)
@mock.patch("os.path.isfile")
@mock.patch("octoprint.util.yaml.load_from_file")
@mock.patch("os.remove")
def test_get_recovery_data_broken_file(
self, mock_remove, mock_yaml_load, mock_isfile
):
import os
recovery_file = os.path.join("/path/to/a/base_folder", "print_recovery_data.yaml")
mock_isfile.return_value = True
mock_yaml_load.side_effect = RuntimeError
result = self.file_manager.get_recovery_data()
self.assertIsNone(result)
mock_remove.assert_called_with(recovery_file)
def test_get_metadata(self):
expected = {"key": "value"}
self.local_storage.get_metadata.return_value = expected
metadata = self.file_manager.get_metadata(
octoprint.filemanager.FileDestinations.LOCAL, "test.file"
)
self.assertEqual(metadata, expected)
self.local_storage.get_metadata.assert_called_once_with("test.file")
@mock.patch("octoprint.filemanager.util.atomic_write")
@mock.patch("io.FileIO")
@mock.patch("shutil.copyfileobj")
@mock.patch("os.remove")
@mock.patch("tempfile.NamedTemporaryFile")
@mock.patch("os.chmod")
def test_slice(
self,
mocked_chmod,
mocked_tempfile,
mocked_os,
mocked_shutil,
mocked_fileio,
mocked_atomic_write,
):
callback = mock.MagicMock()
callback_args = ("one", "two", "three")
# mock temporary file
temp_file = mock.MagicMock()
temp_file.name = "tmp.file"
mocked_tempfile.return_value = temp_file
# mock metadata on local storage
metadata = {"hash": "aabbccddeeff"}
self.local_storage.get_metadata.return_value = metadata
# mock printer profile
expected_printer_profile = {"id": "_default", "name": "My Default Profile"}
self.printer_profile_manager.get_current_or_default.return_value = (
expected_printer_profile
)
self.printer_profile_manager.get.return_value = None
# mock path_in_storage method on local storage
def path_in_storage(path):
if isinstance(path, tuple):
path = "/".join(path)
while path.startswith("/"):
path = path[1:]
return path
self.local_storage.path_in_storage.side_effect = path_in_storage
# mock path_on_disk method on local storage
def path_on_disk(path):
if isinstance(path, tuple):
import os
joined_path = ""
for part in path:
joined_path = os.path.join(joined_path, part)
path = joined_path
return "prefix/" + path
self.local_storage.path_on_disk.side_effect = path_on_disk
# mock split_path method on local storage - no folder support
def split_path(path):
return "", path
self.local_storage.split_path.side_effect = split_path
# mock add_file method on local storage
def add_file(
path,
file_obj,
printer_profile=None,
links=None,
allow_overwrite=False,
display=None,
):
file_obj.save("prefix/" + path)
return path
self.local_storage.add_file.side_effect = add_file
# mock slice method on slicing manager
def slice(
slicer_name,
source_path,
dest_path,
profile,
done_cb,
printer_profile_id=None,
position=None,
callback_args=None,
overrides=None,
on_progress=None,
on_progress_args=None,
on_progress_kwargs=None,
):
self.assertEqual("some_slicer", slicer_name)
self.assertEqual("prefix/source.file", source_path)
self.assertEqual("tmp.file", dest_path)
self.assertIsNone(profile)
self.assertIsNone(overrides)
self.assertIsNone(printer_profile_id)
self.assertIsNone(position)
self.assertIsNotNone(on_progress)
self.assertIsNotNone(on_progress_args)
self.assertTupleEqual(
(
"some_slicer",
octoprint.filemanager.FileDestinations.LOCAL,
"source.file",
octoprint.filemanager.FileDestinations.LOCAL,
"dest.file",
),
on_progress_args,
)
self.assertIsNone(on_progress_kwargs)
if not callback_args:
callback_args = ()
done_cb(*callback_args)
self.slicing_manager.slice.side_effect = slice
##~~ execute tested method
self.file_manager.slice(
"some_slicer",
octoprint.filemanager.FileDestinations.LOCAL,
"source.file",
octoprint.filemanager.FileDestinations.LOCAL,
"dest.file",
callback=callback,
callback_args=callback_args,
)
# assert that events where fired
expected_events = [
mock.call(
octoprint.filemanager.Events.SLICING_STARTED,
{"stl": "source.file", "gcode": "dest.file", "progressAvailable": False},
),
mock.call(
octoprint.filemanager.Events.SLICING_DONE,
{"stl": "source.file", "gcode": "dest.file", "time": 15.694000005722046},
),
mock.call(
octoprint.filemanager.Events.FILE_ADDED,
{
"storage": octoprint.filemanager.FileDestinations.LOCAL,
"name": "dest.file",
"path": "dest.file",
"type": None,
},
),
]
self.fire_event.call_args_list = expected_events
# assert that model links were added
expected_links = [("model", {"name": "source.file"})]
self.local_storage.add_file.assert_called_once_with(
"dest.file",
mock.ANY,
printer_profile=expected_printer_profile,
allow_overwrite=True,
links=expected_links,
display=None,
)
# assert that the generated gcode was manipulated as required
expected_atomic_write_calls = [mock.call("prefix/dest.file", mode="wb")]
self.assertEqual(mocked_atomic_write.call_args_list, expected_atomic_write_calls)
# mocked_open.return_value.write.assert_called_once_with(";Generated from source.file aabbccddeeff\r")
# assert that shutil was asked to copy the concatenated multistream
self.assertEqual(2, len(mocked_shutil.call_args_list))
self.assertTrue(isinstance(mocked_shutil.call_args_list[0][0][0], io.BytesIO))
# assert that the temporary file was deleted
mocked_os.assert_called_once_with("tmp.file")
# assert that our callback was called with the supplied arguments
callback.assert_called_once_with(*callback_args)
@mock.patch("os.remove")
@mock.patch("tempfile.NamedTemporaryFile")
def test_slice_error(self, mocked_tempfile, mocked_os):
callback = mock.MagicMock()
callback_args = ("one", "two", "three")
# mock temporary file
temp_file = mock.MagicMock()
temp_file.name = "tmp.file"
mocked_tempfile.return_value = temp_file
# mock path_on_disk method on local storage
def path_on_disk(path):
if isinstance(path, tuple):
import os
joined_path = ""
for part in path:
joined_path = os.path.join(joined_path, part)
path = joined_path
return "prefix/" + path
self.local_storage.path_on_disk.side_effect = path_on_disk
# mock slice method on slicing manager
def slice(
slicer_name,
source_path,
dest_path,
profile,
done_cb,
printer_profile_id=None,
position=None,
callback_args=None,
overrides=None,
on_progress=None,
on_progress_args=None,
on_progress_kwargs=None,
):
self.assertEqual("some_slicer", slicer_name)
self.assertEqual("prefix/source.file", source_path)
self.assertEqual("tmp.file", dest_path)
self.assertIsNone(profile)
self.assertIsNone(overrides)
self.assertIsNone(printer_profile_id)
self.assertIsNone(position)
self.assertIsNotNone(on_progress)
self.assertIsNotNone(on_progress_args)
self.assertTupleEqual(
(
"some_slicer",
octoprint.filemanager.FileDestinations.LOCAL,
"source.file",
octoprint.filemanager.FileDestinations.LOCAL,
"dest.file",
),
on_progress_args,
)
self.assertIsNone(on_progress_kwargs)
if not callback_args:
callback_args = ()
done_cb(*callback_args, _error="Something went wrong")
self.slicing_manager.slice.side_effect = slice
##~~ execute tested method
self.file_manager.slice(
"some_slicer",
octoprint.filemanager.FileDestinations.LOCAL,
"source.file",
octoprint.filemanager.FileDestinations.LOCAL,
"dest.file",
callback=callback,
callback_args=callback_args,
)
# assert that events where fired
expected_events = [
mock.call(
octoprint.filemanager.Events.SLICING_STARTED,
{"stl": "source.file", "gcode": "dest.file"},
),
mock.call(
octoprint.filemanager.Events.SLICING_FAILED,
{
"stl": "source.file",
"gcode": "dest.file",
"reason": "Something went wrong",
},
),
]
self.fire_event.call_args_list = expected_events
# assert that the temporary file was deleted
mocked_os.assert_called_once_with("tmp.file")
| 29,816 | Python | .py | 684 | 32.111111 | 110 | 0.596682 | OctoPrint/OctoPrint | 8,222 | 1,667 | 264 | AGPL-3.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,072 | setup.py | wummel_linkchecker/setup.py | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Setup file for the distuils module.
It includes the following features:
- creation and installation of configuration files with installation data
- automatic detection and usage of GNU99 standard for C compiler
- automatic MANIFEST.in check
- automatic generation of .mo locale files
- automatic permission setting on POSIX systems for installed files
Because of all the features, this script is nasty and big.
Change it very carefully.
"""
from __future__ import print_function
import sys
if not (hasattr(sys, 'version_info') or
sys.version_info < (2, 7, 0, 'final', 0)):
raise SystemExit("This program requires Python 2.7 or later.")
import os
import re
import codecs
import subprocess
import stat
import glob
import shutil
try:
unicode
except NameError:
unicode = lambda x: x
# import Distutils stuff
from setuptools import setup
from distutils.core import Extension
from distutils.command.install_lib import install_lib
from distutils.command.build_ext import build_ext
from distutils.command.sdist import sdist
from distutils.command.clean import clean
from distutils.command.install_data import install_data
from distutils.dir_util import remove_tree
from distutils.file_util import write_file
from distutils import util, log
from distutils.core import Distribution
from distutils.command.build import build
# the application version
AppVersion = "9.4"
# the application name
AppName = "LinkChecker"
Description = "check links in web documents or full websites"
def get_long_description():
"""Try to read long description from README.rst."""
try:
with open('README.rst') as f:
return f.read()
except:
return Description
def normpath (path):
"""Norm a path name to platform specific notation."""
return os.path.normpath(path)
def cnormpath (path):
"""Norm a path name to platform specific notation and make it absolute."""
path = normpath(path)
if os.name == 'nt':
# replace slashes with backslashes
path = path.replace("/", "\\")
if not os.path.isabs(path):
path = normpath(os.path.join(sys.prefix, path))
return path
release_ro = re.compile(r"\(released (.+)\)")
def get_release_date ():
"""Parse and return relase date as string from doc/changelog.txt."""
fname = os.path.join("doc", "changelog.txt")
release_date = "unknown"
with open(fname) as fd:
# the release date is on the first line
line = fd.readline()
mo = release_ro.search(line)
if mo:
release_date = mo.groups(1)
return release_date
def get_portable():
"""Return portable flag as string."""
return os.environ.get('LINKCHECKER_PORTABLE', '0')
class MyInstallLib (install_lib, object):
"""Custom library installation."""
def install (self):
"""Install the generated config file."""
outs = super(MyInstallLib, self).install()
infile = self.create_conf_file()
outfile = os.path.join(self.install_dir, os.path.basename(infile))
self.copy_file(infile, outfile)
outs.append(outfile)
return outs
def create_conf_file (self):
"""Create configuration file."""
cmd_obj = self.distribution.get_command_obj("install")
cmd_obj.ensure_finalized()
# we have to write a configuration file because we need the
# <install_data> directory (and other stuff like author, url, ...)
# all paths are made absolute by cnormpath()
data = []
for d in ['purelib', 'platlib', 'lib', 'headers', 'scripts', 'data']:
attr = 'install_%s' % d
if cmd_obj.root:
# cut off root path prefix
cutoff = len(cmd_obj.root)
# don't strip the path separator
if cmd_obj.root.endswith(os.sep):
cutoff -= 1
val = getattr(cmd_obj, attr)[cutoff:]
else:
val = getattr(cmd_obj, attr)
if attr == 'install_data':
cdir = os.path.join(val, "share", "linkchecker")
data.append('config_dir = %r' % cnormpath(cdir))
elif attr == 'install_lib':
if cmd_obj.root:
_drive, tail = os.path.splitdrive(val)
if tail.startswith(os.sep):
tail = tail[1:]
self.install_lib = os.path.join(cmd_obj.root, tail)
else:
self.install_lib = val
data.append("%s = %r" % (attr, cnormpath(val)))
self.distribution.create_conf_file(data, directory=self.install_lib)
return self.get_conf_output()
def get_conf_output (self):
"""Get name of configuration file."""
return self.distribution.get_conf_filename(self.install_lib)
def get_outputs (self):
"""Add the generated config file to the list of outputs."""
outs = super(MyInstallLib, self).get_outputs()
conf_output = self.get_conf_output()
outs.append(conf_output)
if self.compile:
outs.extend(self._bytecode_filenames([conf_output]))
return outs
class MyInstallData (install_data, object):
"""Fix file permissions."""
def run (self):
"""Adjust permissions on POSIX systems."""
super(MyInstallData, self).run()
self.fix_permissions()
def fix_permissions (self):
"""Set correct read permissions on POSIX systems. Might also
be possible by setting umask?"""
if os.name == 'posix' and not self.dry_run:
# Make the data files we just installed world-readable,
# and the directories world-executable as well.
for path in self.get_outputs():
mode = os.stat(path)[stat.ST_MODE]
if stat.S_ISDIR(mode):
mode |= 0o11
mode |= 0o44
os.chmod(path, mode)
class MyDistribution (Distribution, object):
"""Custom distribution class generating config file."""
def __init__ (self, attrs):
"""Set console and windows scripts."""
super(MyDistribution, self).__init__(attrs)
self.console = ['linkchecker']
def run_commands (self):
"""Generate config file and run commands."""
cwd = os.getcwd()
data = []
data.append('config_dir = %r' % os.path.join(cwd, "config"))
data.append("install_data = %r" % cwd)
data.append("install_scripts = %r" % cwd)
self.create_conf_file(data)
super(MyDistribution, self).run_commands()
def get_conf_filename (self, directory):
"""Get name for config file."""
return os.path.join(directory, "_%s_configdata.py" % self.get_name())
def create_conf_file (self, data, directory=None):
"""Create local config file from given data (list of lines) in
the directory (or current directory if not given)."""
data.insert(0, "# this file is automatically created by setup.py")
data.insert(0, "# -*- coding: iso-8859-1 -*-")
if directory is None:
directory = os.getcwd()
filename = self.get_conf_filename(directory)
# add metadata
metanames = ("name", "version", "author", "author_email",
"maintainer", "maintainer_email", "url",
"license", "description", "long_description",
"keywords", "platforms", "fullname", "contact",
"contact_email")
for name in metanames:
method = "get_" + name
val = getattr(self.metadata, method)()
if isinstance(val, str):
val = unicode(val)
cmd = "%s = %r" % (name, val)
data.append(cmd)
data.append('release_date = "%s"' % get_release_date())
data.append('portable = %s' % get_portable())
# write the config file
util.execute(write_file, (filename, data),
"creating %s" % filename, self.verbose >= 1, self.dry_run)
def cc_run (args):
"""Run the C compiler with a simple main program.
@return: successful exit flag
@rtype: bool
"""
prog = b"int main(){}\n"
pipe = subprocess.Popen(args,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True)
pipe.communicate(input=prog)
if os.WIFEXITED(pipe.returncode):
return os.WEXITSTATUS(pipe.returncode) == 0
return False
def cc_supports_option (cc, option):
"""Check if the given C compiler supports the given option.
@return: True if the compiler supports the option, else False
@rtype: bool
"""
return cc_run([cc[0], "-E", option, "-"])
class MyBuildExt (build_ext, object):
"""Custom build extension command."""
def build_extensions (self):
"""Add -std=gnu99 to build options if supported."""
# For gcc >= 3 we can add -std=gnu99 to get rid of warnings.
extra = []
if self.compiler.compiler_type == 'unix':
option = "-std=gnu99"
if cc_supports_option(self.compiler.compiler, option):
extra.append(option)
# First, sanity-check the 'extensions' list
self.check_extensions_list(self.extensions)
for ext in self.extensions:
for opt in extra:
if opt not in ext.extra_compile_args:
ext.extra_compile_args.append(opt)
self.build_extension(ext)
def list_message_files (package, suffix=".mo"):
"""Return list of all found message files and their installation paths."""
for fname in glob.glob("po/*" + suffix):
# basename (without extension) is a locale name
localename = os.path.splitext(os.path.basename(fname))[0]
domainname = "%s.mo" % package.lower()
yield (fname, os.path.join(
"share", "locale", localename, "LC_MESSAGES", domainname))
def check_manifest ():
"""Snatched from roundup.sf.net.
Check that the files listed in the MANIFEST are present when the
source is unpacked."""
try:
f = open('MANIFEST')
except Exception:
print('\n*** SOURCE WARNING: The MANIFEST file is missing!')
return
try:
manifest = [l.strip() for l in f.readlines() if not l.startswith('#')]
finally:
f.close()
err = [line for line in manifest if not os.path.exists(line)]
if err:
n = len(manifest)
print('\n*** SOURCE WARNING: There are files missing (%d/%d found)!' %
(n - len(err), n))
print('\nMissing: '.join(err))
class MyBuild (build, object):
"""Custom build command."""
def run (self):
"""Check MANIFEST before building."""
check_manifest()
build.run(self)
class MyClean (clean, object):
"""Custom clean command."""
def run (self):
"""Remove share directory on clean."""
if self.all:
# remove share directory
directory = os.path.join("build", "share")
if os.path.exists(directory):
remove_tree(directory, dry_run=self.dry_run)
else:
log.warn("'%s' does not exist -- can't clean it", directory)
clean.run(self)
class MySdist (sdist, object):
"""Custom sdist command."""
def get_file_list (self):
"""Add MANIFEST to the file list."""
super(MySdist, self).get_file_list()
self.filelist.append("MANIFEST")
# global include dirs
include_dirs = []
# global macros
define_macros = []
# compiler args
extra_compile_args = []
# library directories
library_dirs = []
# libraries
libraries = []
# scripts
scripts = ['linkchecker']
if os.name == 'nt':
# windows does not have unistd.h
define_macros.append(('YY_NO_UNISTD_H', None))
else:
extra_compile_args.append("-pedantic")
if sys.platform == 'darwin':
define_macros.extend([('HAVE_STRLCPY', None), ('HAVE_STRLCAT', None)])
myname = "Bastian Kleineidam"
myemail = "bastian.kleineidam@web.de"
data_files = [
('share/linkchecker',
['config/linkcheckerrc',
'doc/html/lccollection.qhc', 'doc/html/lcdoc.qch']),
('share/linkchecker/examples',
['cgi-bin/lconline/leer.html.en',
'cgi-bin/lconline/leer.html.de',
'cgi-bin/lconline/index.html',
'cgi-bin/lconline/lc_cgi.html.en',
'cgi-bin/lconline/lc_cgi.html.de',
'cgi-bin/lconline/check.js',
'cgi-bin/lc.wsgi',
'config/linkchecker.apache2.conf',
]),
]
for (src, dst) in list_message_files(AppName):
data_files.append((dst, [src]))
if os.name == 'posix':
data_files.append(('share/man/man1', ['doc/en/linkchecker.1']))
data_files.append(('share/man/man5', ['doc/en/linkcheckerrc.5']))
data_files.append(('share/man/de/man1', ['doc/de/linkchecker.1']))
data_files.append(('share/man/de/man5', ['doc/de/linkcheckerrc.5']))
data_files.append(('share/linkchecker/examples',
['config/linkchecker-completion',
'doc/examples/check_blacklist.sh',
'doc/examples/check_for_x_errors.sh',
'doc/examples/check_urls.sh']))
data_files.append(('share/applications', ['doc/linkchecker.desktop']))
args = dict(
name = AppName,
version = AppVersion,
description = Description,
keywords = "link,url,site,checking,crawling,verification,validation",
author = myname,
author_email = myemail,
maintainer = myname,
maintainer_email = myemail,
url = "http://wummel.github.io/linkchecker/",
license = "GPL",
long_description = get_long_description(),
distclass = MyDistribution,
cmdclass = {
'install_lib': MyInstallLib,
'install_data': MyInstallData,
'build_ext': MyBuildExt,
'build': MyBuild,
'clean': MyClean,
'sdist': MySdist,
},
package_dir = {
'linkcheck_dns.dns': 'third_party/dnspython/dns',
},
packages = [
'linkcheck',
'linkcheck.bookmarks',
'linkcheck.cache',
'linkcheck.checker',
'linkcheck.configuration',
'linkcheck.director',
'linkcheck.htmlutil',
'linkcheck.HtmlParser',
'linkcheck.logger',
'linkcheck.network',
'linkcheck.parser',
'linkcheck.plugins',
'linkcheck_dns.dns',
'linkcheck_dns.dns.rdtypes',
'linkcheck_dns.dns.rdtypes.ANY',
'linkcheck_dns.dns.rdtypes.IN',
],
ext_modules = [
Extension('linkcheck.HtmlParser.htmlsax',
sources = [
'linkcheck/HtmlParser/htmllex.c',
'linkcheck/HtmlParser/htmlparse.c',
'linkcheck/HtmlParser/s_util.c',
],
extra_compile_args = extra_compile_args,
library_dirs = library_dirs,
libraries = libraries,
define_macros = define_macros + [('YY_NO_INPUT', None)],
include_dirs = include_dirs + [normpath("linkcheck/HtmlParser")],
),
Extension("linkcheck.network._network",
sources = ["linkcheck/network/_network.c"],
extra_compile_args = extra_compile_args,
library_dirs = library_dirs,
libraries = libraries,
define_macros = define_macros,
include_dirs = include_dirs,
),
],
scripts = scripts,
data_files = data_files,
classifiers = [
'Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python',
'Programming Language :: C',
],
options = {
},
# Requirements, usable with setuptools or the new Python packaging module.
# Commented out since they are untested and not officially supported.
# See also doc/install.txt for more detailed dependency documentation.
#extra_requires = {
# "IP country info": ['GeoIP'], # http://www.maxmind.com/app/python
# "GNOME proxies": ['pygtk'], # http://www.pygtk.org/downloads.html
# "Bash completion": ['argcomplete'], # https://pypi.python.org/pypi/argcomplete
# "Memory debugging": ['meliae'], # https://launchpad.net/meliae
#}
)
setup(**args)
| 17,166 | Python | .py | 433 | 32.145497 | 87 | 0.626125 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,073 | .pydevproject | wummel_linkchecker/.pydevproject | <?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?eclipse-pydev version="1.0"?>
<pydev_project>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
<path>/linkchecker-git</path>
</pydev_pathproperty>
</pydev_project>
| 421 | Python | .py | 9 | 45.666667 | 91 | 0.776156 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,074 | chat_demo.py | wummel_linkchecker/third_party/miniboa-r42/chat_demo.py | #!/usr/bin/env python
#------------------------------------------------------------------------------
# chat_demo.py
# Copyright 2009 Jim Storch
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#------------------------------------------------------------------------------
"""
Chat Room Demo for Miniboa.
"""
from miniboa import TelnetServer
IDLE_TIMEOUT = 300
CLIENT_LIST = []
SERVER_RUN = True
def on_connect(client):
"""
Sample on_connect function.
Handles new connections.
"""
print "++ Opened connection to %s" % client.addrport()
broadcast('%s joins the conversation.\n' % client.addrport() )
CLIENT_LIST.append(client)
client.send("Welcome to the Chat Server, %s.\n" % client.addrport() )
def on_disconnect(client):
"""
Sample on_disconnect function.
Handles lost connections.
"""
print "-- Lost connection to %s" % client.addrport()
CLIENT_LIST.remove(client)
broadcast('%s leaves the conversation.\n' % client.addrport() )
def kick_idle():
"""
Looks for idle clients and disconnects them by setting active to False.
"""
## Who hasn't been typing?
for client in CLIENT_LIST:
if client.idle() > IDLE_TIMEOUT:
print('-- Kicking idle lobby client from %s' % client.addrport())
client.active = False
def process_clients():
"""
Check each client, if client.cmd_ready == True then there is a line of
input available via client.get_command().
"""
for client in CLIENT_LIST:
if client.active and client.cmd_ready:
## If the client sends input echo it to the chat room
chat(client)
def broadcast(msg):
"""
Send msg to every client.
"""
for client in CLIENT_LIST:
client.send(msg)
def chat(client):
"""
Echo whatever client types to everyone.
"""
global SERVER_RUN
msg = client.get_command()
print '%s says, "%s"' % (client.addrport(), msg)
for guest in CLIENT_LIST:
if guest != client:
guest.send('%s says, %s\n' % (client.addrport(), msg))
else:
guest.send('You say, %s\n' % msg)
cmd = msg.lower()
## bye = disconnect
if cmd == 'bye':
client.active = False
## shutdown == stop the server
elif cmd == 'shutdown':
SERVER_RUN = False
#------------------------------------------------------------------------------
# Main
#------------------------------------------------------------------------------
if __name__ == '__main__':
## Simple chat server to demonstrate connection handling via the
## async and telnet modules.
## Create a telnet server with a port, address,
## a function to call with new connections
## and one to call with lost connections.
telnet_server = TelnetServer(
port=7777,
address='',
on_connect=on_connect,
on_disconnect=on_disconnect,
timeout = .05
)
print(">> Listening for connections on port %d. CTRL-C to break."
% telnet_server.port)
## Server Loop
while SERVER_RUN:
telnet_server.poll() ## Send, Recv, and look for new connections
kick_idle() ## Check for idle clients
process_clients() ## Check for client input
print(">> Server shutdown.")
| 3,854 | Python | .py | 104 | 31.846154 | 79 | 0.589689 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,075 | handler_demo.py | wummel_linkchecker/third_party/miniboa-r42/handler_demo.py | #!/usr/bin/env python
#------------------------------------------------------------------------------
# handler_demo.py
# Copyright 2009 Jim Storch
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#------------------------------------------------------------------------------
"""
Example of using on_connect and on_disconnect handlers.
"""
from miniboa import TelnetServer
CLIENTS = []
def my_on_connect(client):
"""
Example on_connect handler.
"""
client.send('You connected from %s\n' % client.addrport())
if CLIENTS:
client.send('Also connected are:\n')
for neighbor in CLIENTS:
client.send('%s\n' % neighbor.addrport())
else:
client.send('Sadly, you are alone.\n')
CLIENTS.append(client)
def my_on_disconnect(client):
"""
Example on_disconnect handler.
"""
CLIENTS.remove(client)
server = TelnetServer()
server.on_connect=my_on_connect
server.on_disconnect=my_on_disconnect
print "\n\nStarting server on port %d. CTRL-C to interrupt.\n" % server.port
while True:
server.poll()
| 1,583 | Python | .py | 41 | 35.292683 | 79 | 0.64337 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,076 | hello_demo.py | wummel_linkchecker/third_party/miniboa-r42/hello_demo.py | #!/usr/bin/env python
#------------------------------------------------------------------------------
# hello_demo.py
# Copyright 2009 Jim Storch
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#------------------------------------------------------------------------------
"""
As simple as it gets.
Launch the Telnet server on the default port and greet visitors using the
placeholder 'on_connect()' function. Does nothing else.
"""
from miniboa import TelnetServer
server = TelnetServer()
print "\n\nStarting server on port %d. CTRL-C to interrupt.\n" % server.port
while True:
server.poll()
| 1,110 | Python | .py | 23 | 46.913043 | 79 | 0.65097 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,077 | error.py | wummel_linkchecker/third_party/miniboa-r42/miniboa/error.py | # -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# miniboa/error.py
# Copyright 2009 Jim Storch
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#------------------------------------------------------------------------------
class BogConnectionLost(Exception):
"""
Custom exception to signal a lost connection to the Telnet Server.
"""
pass
| 918 | Python | .py | 18 | 49.055556 | 79 | 0.614016 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,078 | telnet.py | wummel_linkchecker/third_party/miniboa-r42/miniboa/telnet.py | # -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# miniboa/telnet.py
# Copyright 2009 Jim Storch
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#------------------------------------------------------------------------------
"""
Manage one Telnet client connected via a TCP/IP socket.
"""
import socket
import time
from miniboa.error import BogConnectionLost
from miniboa.xterm import colorize
from miniboa.xterm import word_wrap
#---[ Telnet Notes ]-----------------------------------------------------------
# (See RFC 854 for more information)
#
# Negotiating a Local Option
# --------------------------
#
# Side A begins with:
#
# "IAC WILL/WONT XX" Meaning "I would like to [use|not use] option XX."
#
# Side B replies with either:
#
# "IAC DO XX" Meaning "OK, you may use option XX."
# "IAC DONT XX" Meaning "No, you cannot use option XX."
#
#
# Negotiating a Remote Option
# ----------------------------
#
# Side A begins with:
#
# "IAC DO/DONT XX" Meaning "I would like YOU to [use|not use] option XX."
#
# Side B replies with either:
#
# "IAC WILL XX" Meaning "I will begin using option XX"
# "IAC WONT XX" Meaning "I will not begin using option XX"
#
#
# The syntax is designed so that if both parties receive simultaneous requests
# for the same option, each will see the other's request as a positive
# acknowledgement of it's own.
#
# If a party receives a request to enter a mode that it is already in, the
# request should not be acknowledged.
## Where you see DE in my comments I mean 'Distant End', e.g. the client.
UNKNOWN = -1
#--[ Telnet Commands ]---------------------------------------------------------
SE = chr(240) # End of subnegotiation parameters
NOP = chr(241) # No operation
DATMK = chr(242) # Data stream portion of a sync.
BREAK = chr(243) # NVT Character BRK
IP = chr(244) # Interrupt Process
AO = chr(245) # Abort Output
AYT = chr(246) # Are you there
EC = chr(247) # Erase Character
EL = chr(248) # Erase Line
GA = chr(249) # The Go Ahead Signal
SB = chr(250) # Sub-option to follow
WILL = chr(251) # Will; request or confirm option begin
WONT = chr(252) # Wont; deny option request
DO = chr(253) # Do = Request or confirm remote option
DONT = chr(254) # Don't = Demand or confirm option halt
IAC = chr(255) # Interpret as Command
SEND = chr(001) # Sub-process negotiation SEND command
IS = chr(000) # Sub-process negotiation IS command
#--[ Telnet Options ]----------------------------------------------------------
BINARY = chr( 0) # Transmit Binary
ECHO = chr( 1) # Echo characters back to sender
RECON = chr( 2) # Reconnection
SGA = chr( 3) # Suppress Go-Ahead
TTYPE = chr( 24) # Terminal Type
NAWS = chr( 31) # Negotiate About Window Size
LINEMO = chr( 34) # Line Mode
#-----------------------------------------------------------------Telnet Option
class TelnetOption(object):
"""
Simple class used to track the status of an extended Telnet option.
"""
def __init__(self):
self.local_option = UNKNOWN # Local state of an option
self.remote_option = UNKNOWN # Remote state of an option
self.reply_pending = False # Are we expecting a reply?
#------------------------------------------------------------------------Telnet
class TelnetClient(object):
"""
Represents a client connection via Telnet.
First argument is the socket discovered by the Telnet Server.
Second argument is the tuple (ip address, port number).
"""
def __init__(self, sock, addr_tup):
self.protocol = 'telnet'
self.active = True # Turns False when the connection is lost
self.sock = sock # The connection's socket
self.fileno = sock.fileno() # The socket's file descriptor
self.address = addr_tup[0] # The client's remote TCP/IP address
self.port = addr_tup[1] # The client's remote port
self.terminal_type = 'unknown client' # set via request_terminal_type()
self.use_ansi = True
self.columns = 80
self.rows = 24
self.send_pending = False
self.send_buffer = ''
self.recv_buffer = ''
self.bytes_sent = 0
self.bytes_received = 0
self.cmd_ready = False
self.command_list = []
self.connect_time = time.time()
self.last_input_time = time.time()
## State variables for interpreting incoming telnet commands
self.telnet_got_iac = False # Are we inside an IAC sequence?
self.telnet_got_cmd = None # Did we get a telnet command?
self.telnet_got_sb = False # Are we inside a subnegotiation?
self.telnet_opt_dict = {} # Mapping for up to 256 TelnetOptions
self.telnet_echo = False # Echo input back to the client?
self.telnet_echo_password = False # Echo back '*' for passwords?
self.telnet_sb_buffer = '' # Buffer for sub-negotiations
# def __del__(self):
# print "Telnet destructor called"
# pass
def get_command(self):
"""
Get a line of text that was received from the DE. The class's
cmd_ready attribute will be true if lines are available.
"""
cmd = None
count = len(self.command_list)
if count > 0:
cmd = self.command_list.pop(0)
## If that was the last line, turn off lines_pending
if count == 1:
self.cmd_ready = False
return cmd
def send(self, text):
"""
Send raw text to the distant end.
"""
if text:
self.send_buffer += text.replace('\n', '\r\n')
self.send_pending = True
def send_cc(self, text):
"""
Send text with caret codes converted to ansi.
"""
self.send(colorize(text, self.use_ansi))
def send_wrapped(self, text):
"""
Send text padded and wrapped to the user's screen width.
"""
lines = word_wrap(text, self.columns)
for line in lines:
self.send_cc(line + '\n')
def deactivate(self):
"""
Set the client to disconnect on the next server poll.
"""
self.active = False
def addrport(self):
"""
Return the DE's IP address and port number as a string.
"""
return "%s:%s" % (self.address, self.port)
def idle(self):
"""
Returns the number of seconds that have elasped since the DE
last sent us some input.
"""
return time.time() - self.last_input_time
def duration(self):
"""
Returns the number of seconds the DE has been connected.
"""
return time.time() - self.connect_time
def request_do_sga(self):
"""
Request DE to Suppress Go-Ahead. See RFC 858.
"""
self._iac_do(SGA)
self._note_reply_pending(SGA, True)
def request_will_echo(self):
"""
Tell the DE that we would like to echo their text. See RFC 857.
"""
self._iac_will(ECHO)
self._note_reply_pending(ECHO, True)
self.telnet_echo = True
def request_wont_echo(self):
"""
Tell the DE that we would like to stop echoing their text.
See RFC 857.
"""
self._iac_wont(ECHO)
self._note_reply_pending(ECHO, True)
self.telnet_echo = False
def password_mode_on(self):
"""
Tell DE we will echo (but don't) so typed passwords don't show.
"""
self._iac_will(ECHO)
self._note_reply_pending(ECHO, True)
def password_mode_off(self):
"""
Tell DE we are done echoing (we lied) and show typing again.
"""
self._iac_wont(ECHO)
self._note_reply_pending(ECHO, True)
def request_naws(self):
"""
Request to Negotiate About Window Size. See RFC 1073.
"""
self._iac_do(NAWS)
self._note_reply_pending(NAWS, True)
def request_terminal_type(self):
"""
Begins the Telnet negotiations to request the terminal type from
the client. See RFC 779.
"""
self._iac_do(TTYPE)
self._note_reply_pending(TTYPE, True)
def socket_send(self):
"""
Called by TelnetServer when send data is ready.
"""
if len(self.send_buffer):
try:
sent = self.sock.send(self.send_buffer)
except socket.error, err:
print("!! SEND error '%d:%s' from %s" % (err[0], err[1],
self.addrport()))
self.active = False
return
self.bytes_sent += sent
self.send_buffer = self.send_buffer[sent:]
else:
self.send_pending = False
def socket_recv(self):
"""
Called by TelnetServer when recv data is ready.
"""
try:
data = self.sock.recv(2048)
except socket.error, ex:
print ("?? socket.recv() error '%d:%s' from %s" %
(ex[0], ex[1], self.addrport()))
raise BogConnectionLost()
## Did they close the connection?
size = len(data)
if size == 0:
raise BogConnectionLost()
## Update some trackers
self.last_input_time = time.time()
self.bytes_received += size
## Test for telnet commands
for byte in data:
self._iac_sniffer(byte)
## Look for newline characters to get whole lines from the buffer
while True:
mark = self.recv_buffer.find('\n')
if mark == -1:
break
cmd = self.recv_buffer[:mark].strip()
self.command_list.append(cmd)
self.cmd_ready = True
self.recv_buffer = self.recv_buffer[mark+1:]
def _recv_byte(self, byte):
"""
Non-printable filtering currently disabled because it did not play
well with extended character sets.
"""
## Filter out non-printing characters
#if (byte >= ' ' and byte <= '~') or byte == '\n':
if self.telnet_echo:
self._echo_byte(byte)
self.recv_buffer += byte
def _echo_byte(self, byte):
"""
Echo a character back to the client and convert LF into CR\LF.
"""
if byte == '\n':
self.send_buffer += '\r'
if self.telnet_echo_password:
self.send_buffer += '*'
else:
self.send_buffer += byte
def _iac_sniffer(self, byte):
"""
Watches incomming data for Telnet IAC sequences.
Passes the data, if any, with the IAC commands stripped to
_recv_byte().
"""
## Are we not currently in an IAC sequence coming from the DE?
if self.telnet_got_iac is False:
if byte == IAC:
## Well, we are now
self.telnet_got_iac = True
return
## Are we currenty in a sub-negotion?
elif self.telnet_got_sb is True:
## Sanity check on length
if len(self.telnet_sb_buffer) < 64:
self.telnet_sb_buffer += byte
else:
self.telnet_got_sb = False
self.telnet_sb_buffer = ""
return
else:
## Just a normal NVT character
self._recv_byte(byte)
return
## Byte handling when already in an IAC sequence sent from the DE
else:
## Did we get sent a second IAC?
if byte == IAC and self.telnet_got_sb is True:
## Must be an escaped 255 (IAC + IAC)
self.telnet_sb_buffer += byte
self.telnet_got_iac = False
return
## Do we already have an IAC + CMD?
elif self.telnet_got_cmd:
## Yes, so handle the option
self._three_byte_cmd(byte)
return
## We have IAC but no CMD
else:
## Is this the middle byte of a three-byte command?
if byte == DO:
self.telnet_got_cmd = DO
return
elif byte == DONT:
self.telnet_got_cmd = DONT
return
elif byte == WILL:
self.telnet_got_cmd = WILL
return
elif byte == WONT:
self.telnet_got_cmd = WONT
return
else:
## Nope, must be a two-byte command
self._two_byte_cmd(byte)
def _two_byte_cmd(self, cmd):
"""
Handle incoming Telnet commands that are two bytes long.
"""
#print "got two byte cmd %d" % ord(cmd)
if cmd == SB:
## Begin capturing a sub-negotiation string
self.telnet_got_sb = True
self.telnet_sb_buffer = ''
elif cmd == SE:
## Stop capturing a sub-negotiation string
self.telnet_got_sb = False
self._sb_decoder()
elif cmd == NOP:
pass
elif cmd == DATMK:
pass
elif cmd == IP:
pass
elif cmd == AO:
pass
elif cmd == AYT:
pass
elif cmd == EC:
pass
elif cmd == EL:
pass
elif cmd == GA:
pass
else:
print "2BC: Should not be here."
self.telnet_got_iac = False
self.telnet_got_cmd = None
def _three_byte_cmd(self, option):
"""
Handle incoming Telnet commmands that are three bytes long.
"""
cmd = self.telnet_got_cmd
#print "got three byte cmd %d:%d" % (ord(cmd), ord(option))
## Incoming DO's and DONT's refer to the status of this end
#---[ DO ]-------------------------------------------------------------
if cmd == DO:
if option == BINARY:
if self._check_reply_pending(BINARY):
self._note_reply_pending(BINARY, False)
self._note_local_option(BINARY, True)
elif (self._check_local_option(BINARY) is False or
self._check_local_option(BINARY) is UNKNOWN):
self._note_local_option(BINARY, True)
self._iac_will(BINARY)
## Just nod
elif option == ECHO:
if self._check_reply_pending(ECHO):
self._note_reply_pending(ECHO, False)
self._note_local_option(ECHO, True)
elif (self._check_local_option(ECHO) is False or
self._check_local_option(ECHO) is UNKNOWN):
self._note_local_option(ECHO, True)
self._iac_will(ECHO)
self.telnet_echo = True
elif option == SGA:
if self._check_reply_pending(SGA):
self._note_reply_pending(SGA, False)
self._note_local_option(SGA, True)
elif (self._check_local_option(SGA) is False or
self._check_local_option(SGA) is UNKNOWN):
self._note_local_option(SGA, True)
self._iac_will(SGA)
## Just nod
else:
## ALL OTHER OTHERS = Default to refusing once
if self._check_local_option(option) is UNKNOWN:
self._note_local_option(option, False)
self._iac_wont(option)
#---[ DONT ]-----------------------------------------------------------
elif cmd == DONT:
if option == BINARY:
if self._check_reply_pending(BINARY):
self._note_reply_pending(BINARY, False)
self._note_local_option(BINARY, False)
elif (self._check_local_option(BINARY) is True or
self._check_local_option(BINARY) is UNKNOWN):
self._note_local_option(BINARY, False)
self._iac_wont(BINARY)
## Just nod
elif option == ECHO:
if self._check_reply_pending(ECHO):
self._note_reply_pending(ECHO, False)
self._note_local_option(ECHO, True)
self.telnet_echo = False
elif (self._check_local_option(BINARY) is True or
self._check_local_option(BINARY) is UNKNOWN):
self._note_local_option(ECHO, False)
self._iac_wont(ECHO)
self.telnet_echo = False
elif option == SGA:
if self._check_reply_pending(SGA):
self._note_reply_pending(SGA, False)
self._note_local_option(SGA, False)
elif (self._check_remote_option(SGA) is True or
self._check_remote_option(SGA) is UNKNOWN):
self._note_local_option(SGA, False)
self._iac_will(SGA)
## Just nod
else:
## ALL OTHER OPTIONS = Default to ignoring
pass
## Incoming WILL's and WONT's refer to the status of the DE
#---[ WILL ]-----------------------------------------------------------
elif cmd == WILL:
if option == ECHO:
## Nutjob DE offering to echo the server...
if self._check_remote_option(ECHO) is UNKNOWN:
self._note_remote_option(ECHO, False)
# No no, bad DE!
self._iac_dont(ECHO)
elif option == NAWS:
if self._check_reply_pending(NAWS):
self._note_reply_pending(NAWS, False)
self._note_remote_option(NAWS, True)
## Nothing else to do, client follow with SB
elif (self._check_remote_option(NAWS) is False or
self._check_remote_option(NAWS) is UNKNOWN):
self._note_remote_option(NAWS, True)
self._iac_do(NAWS)
## Client should respond with SB
elif option == SGA:
if self._check_reply_pending(SGA):
self._note_reply_pending(SGA, False)
self._note_remote_option(SGA, True)
elif (self._check_remote_option(SGA) is False or
self._check_remote_option(SGA) is UNKNOWN):
self._note_remote_option(SGA, True)
self._iac_do(SGA)
## Just nod
elif option == TTYPE:
if self._check_reply_pending(TTYPE):
self._note_reply_pending(TTYPE, False)
self._note_remote_option(TTYPE, True)
## Tell them to send their terminal type
self.send('%c%c%c%c%c%c' % (IAC, SB, TTYPE, SEND, IAC, SE))
elif (self._check_remote_option(TTYPE) is False or
self._check_remote_option(TTYPE) is UNKNOWN):
self._note_remote_option(TTYPE, True)
self._iac_do(TTYPE)
#---[ WONT ]-----------------------------------------------------------
elif cmd == WONT:
if option == ECHO:
## DE states it wont echo us -- good, they're not suppose to.
if self._check_remote_option(ECHO) is UNKNOWN:
self._note_remote_option(ECHO, False)
self._iac_dont(ECHO)
elif option == SGA:
if self._check_reply_pending(SGA):
self._note_reply_pending(SGA, False)
self._note_remote_option(SGA, False)
elif (self._check_remote_option(SGA) is True or
self._check_remote_option(SGA) is UNKNOWN):
self._note_remote_option(SGA, False)
self._iac_dont(SGA)
if self._check_reply_pending(TTYPE):
self._note_reply_pending(TTYPE, False)
self._note_remote_option(TTYPE, False)
elif (self._check_remote_option(TTYPE) is True or
self._check_remote_option(TTYPE) is UNKNOWN):
self._note_remote_option(TTYPE, False)
self._iac_dont(TTYPE)
else:
print "3BC: Should not be here."
self.telnet_got_iac = False
self.telnet_got_cmd = None
def _sb_decoder(self):
"""
Figures out what to do with a received sub-negotiation block.
"""
#print "at decoder"
bloc = self.telnet_sb_buffer
if len(bloc) > 2:
if bloc[0] == TTYPE and bloc[1] == IS:
self.terminal_type = bloc[2:]
#print "Terminal type = '%s'" % self.terminal_type
if bloc[0] == NAWS:
if len(bloc) != 5:
print "Bad length on NAWS SB:", len(bloc)
else:
self.columns = (256 * ord(bloc[1])) + ord(bloc[2])
self.rows = (256 * ord(bloc[3])) + ord(bloc[4])
#print "Screen is %d x %d" % (self.columns, self.rows)
self.telnet_sb_buffer = ''
#---[ State Juggling for Telnet Options ]----------------------------------
## Sometimes verbiage is tricky. I use 'note' rather than 'set' here
## because (to me) set infers something happened.
def _check_local_option(self, option):
"""Test the status of local negotiated Telnet options."""
if not self.telnet_opt_dict.has_key(option):
self.telnet_opt_dict[option] = TelnetOption()
return self.telnet_opt_dict[option].local_option
def _note_local_option(self, option, state):
"""Record the status of local negotiated Telnet options."""
if not self.telnet_opt_dict.has_key(option):
self.telnet_opt_dict[option] = TelnetOption()
self.telnet_opt_dict[option].local_option = state
def _check_remote_option(self, option):
"""Test the status of remote negotiated Telnet options."""
if not self.telnet_opt_dict.has_key(option):
self.telnet_opt_dict[option] = TelnetOption()
return self.telnet_opt_dict[option].remote_option
def _note_remote_option(self, option, state):
"""Record the status of local negotiated Telnet options."""
if not self.telnet_opt_dict.has_key(option):
self.telnet_opt_dict[option] = TelnetOption()
self.telnet_opt_dict[option].remote_option = state
def _check_reply_pending(self, option):
"""Test the status of requested Telnet options."""
if not self.telnet_opt_dict.has_key(option):
self.telnet_opt_dict[option] = TelnetOption()
return self.telnet_opt_dict[option].reply_pending
def _note_reply_pending(self, option, state):
"""Record the status of requested Telnet options."""
if not self.telnet_opt_dict.has_key(option):
self.telnet_opt_dict[option] = TelnetOption()
self.telnet_opt_dict[option].reply_pending = state
#---[ Telnet Command Shortcuts ]-------------------------------------------
def _iac_do(self, option):
"""Send a Telnet IAC "DO" sequence."""
self.send('%c%c%c' % (IAC, DO, option))
def _iac_dont(self, option):
"""Send a Telnet IAC "DONT" sequence."""
self.send('%c%c%c' % (IAC, DONT, option))
def _iac_will(self, option):
"""Send a Telnet IAC "WILL" sequence."""
self.send('%c%c%c' % (IAC, WILL, option))
def _iac_wont(self, option):
"""Send a Telnet IAC "WONT" sequence."""
self.send('%c%c%c' % (IAC, WONT, option))
| 24,890 | Python | .py | 587 | 31.415673 | 79 | 0.529996 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,079 | xterm.py | wummel_linkchecker/third_party/miniboa-r42/miniboa/xterm.py | # -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# mudlib/usr/xterm.py
# Copyright 2009 Jim Storch
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#------------------------------------------------------------------------------
"""
Support for color and formatting for Xterm style clients.
"""
import re
_PARA_BREAK = re.compile(r"(\n\s*\n)", re.MULTILINE)
#--[ Caret Code to ANSI TABLE ]------------------------------------------------
_ANSI_CODES = (
( '^k', '\x1b[22;30m' ), # black
( '^K', '\x1b[1;30m' ), # bright black (grey)
( '^r', '\x1b[22;31m' ), # red
( '^R', '\x1b[1;31m' ), # bright red
( '^g', '\x1b[22;32m' ), # green
( '^G', '\x1b[1;32m' ), # bright green
( '^y', '\x1b[22;33m' ), # yellow
( '^Y', '\x1b[1;33m' ), # bright yellow
( '^b', '\x1b[22;34m' ), # blue
( '^B', '\x1b[1;34m' ), # bright blue
( '^m', '\x1b[22;35m' ), # magenta
( '^M', '\x1b[1;35m' ), # bright magenta
( '^c', '\x1b[22;36m' ), # cyan
( '^C', '\x1b[1;36m' ), # bright cyan
( '^w', '\x1b[22;37m' ), # white
( '^W', '\x1b[1;37m' ), # bright white
( '^0', '\x1b[40m' ), # black background
( '^1', '\x1b[41m' ), # red background
( '^2', '\x1b[42m' ), # green background
( '^3', '\x1b[43m' ), # yellow background
( '^4', '\x1b[44m' ), # blue background
( '^5', '\x1b[45m' ), # magenta background
( '^6', '\x1b[46m' ), # cyan background
( '^d', '\x1b[39m' ), # default (should be white on black)
( '^I', '\x1b[7m' ), # inverse text on
( '^i', '\x1b[27m' ), # inverse text off
( '^~', '\x1b[0m' ), # reset all
( '^U', '\x1b[4m' ), # underline on
( '^u', '\x1b[24m' ), # underline off
( '^!', '\x1b[1m' ), # bold on
( '^.', '\x1b[22m'), # bold off
( '^s', '\x1b[2J'), # clear screen
( '^l', '\x1b[2K'), # clear to end of line
)
def strip_caret_codes(text):
"""
Strip out any caret codes from a string.
"""
## temporarily escape out ^^
text = text.replace('^^', '\x00')
for token, foo in _ANSI_CODES:
text = text.replace(token, '')
return text.replace('\x00', '^')
def colorize(text, ansi=True):
"""
If the client wants ansi, replace the tokens with ansi sequences --
otherwise, simply strip them out.
"""
if ansi:
text = text.replace('^^', '\x00')
for token, code in _ANSI_CODES:
text = text.replace(token, code)
text = text.replace('\x00', '^')
else:
text = strip_caret_codes(text)
return text
def word_wrap(text, columns=80, indent=4, padding=2):
"""
Given a block of text, breaks into a list of lines wrapped to
length.
"""
paragraphs = _PARA_BREAK.split(text)
lines = []
columns -= padding
for para in paragraphs:
if para.isspace():
continue
line = ' ' * indent
for word in para.split():
if (len(line) + 1 + len(word)) > columns:
lines.append(line)
line = ' ' * padding
line += word
else:
line += ' ' + word
if not line.isspace():
lines.append(line)
return lines
| 3,911 | Python | .py | 98 | 34.459184 | 79 | 0.497238 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,080 | __init__.py | wummel_linkchecker/third_party/miniboa-r42/miniboa/__init__.py | # -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# miniboa/__init__.py
# Copyright 2009 Jim Storch
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#------------------------------------------------------------------------------
from miniboa.async import TelnetServer
| 831 | Python | .py | 14 | 58.071429 | 79 | 0.608856 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,081 | async.py | wummel_linkchecker/third_party/miniboa-r42/miniboa/async.py | # -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# miniboa/async.py
# Copyright 2009 Jim Storch
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#------------------------------------------------------------------------------
"""
Handle Asynchronous Telnet Connections.
"""
import socket
import select
import sys
from miniboa.telnet import TelnetClient
from miniboa.error import BogConnectionLost
## Cap sockets to 512 on Windows because winsock can only process 512 at time
if sys.platform == 'win32':
MAX_CONNECTIONS = 512
## Cap sockets to 1000 on Linux because you can only have 1024 file descriptors
else:
MAX_CONNECTIONS = 1000
#-----------------------------------------------------Dummy Connection Handlers
def _on_connect(client):
"""
Placeholder new connection handler.
"""
print "++ Opened connection to %s, sending greeting..." % client.addrport()
client.send("Greetings from Miniboa! "
" Now it's time to add your code.\n")
def _on_disconnect(client):
"""
Placeholder lost connection handler.
"""
print "-- Lost connection to %s" % client.addrport()
#-----------------------------------------------------------------Telnet Server
class TelnetServer(object):
"""
Poll sockets for new connections and sending/receiving data from clients.
"""
def __init__(self, port=7777, host='', on_connect=_on_connect,
on_disconnect=_on_disconnect, timeout=0.005):
"""
Create a new Telnet Server.
port -- Port to listen for new connection on. On UNIX-like platforms,
you made need root access to use ports under 1025.
host -- Address of the LOCAL network interface to listen on. You
can usually leave this blank unless you want to restrict traffic
to a specific network device. This will usually NOT be the same
as the Internet address of your server.
on_connect -- function to call with new telnet connections
on_disconnect -- function to call when a client's connection dies,
either through a terminated session or client.active being set
to False.
timeout -- amount of time that Poll() will wait from user inport
before returning. Also frees a slice of CPU time.
"""
self.port = port
self.host = host
self.on_connect = on_connect
self.on_disconnect = on_disconnect
self.timeout = timeout
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind((host, port))
self.address = server_socket.getsockname()
server_socket.listen(5)
self.server_socket = server_socket
self.server_fileno = server_socket.fileno()
## Dictionary of active clients,
## key = file descriptor, value = TelnetClient (see miniboa.telnet)
self.clients = {}
def client_count(self):
"""
Returns the number of active connections.
"""
return len(self.clients)
def client_list(self):
"""
Returns a list of connected clients.
"""
return self.clients.values()
def poll(self):
"""
Perform a non-blocking scan of recv and send states on the server
and client connection sockets. Process new connection requests,
read incomming data, and send outgoing data. Sends and receives may
be partial.
"""
#print len(self.connections)
## Build a list of connections to test for receive data pending
recv_list = [self.server_fileno] # always add the server
for client in self.clients.values():
if client.active:
recv_list.append(client.fileno)
## Delete inactive connections from the dictionary
else:
#print "-- Lost connection to %s" % client.addrport()
#client.sock.close()
self.on_disconnect(client)
del self.clients[client.fileno]
## Build a list of connections that need to send data
send_list = []
for client in self.clients.values():
if client.send_pending:
send_list.append(client.fileno)
## Get active socket file descriptors from select.select()
try:
rlist, slist, elist = select.select(recv_list, send_list, [],
self.timeout)
except select.error, err:
## If we can't even use select(), game over man, game over
print >> sys.stderr, ("!! FATAL SELECT error '%d:%s'!"
% (err[0], err[1]))
sys.exit(1)
## Process socket file descriptors with data to recieve
for sock_fileno in rlist:
## If it's coming from the server's socket then this is a new
## connection request.
if sock_fileno == self.server_fileno:
try:
sock, addr_tup = self.server_socket.accept()
except socket.error, err:
print >> sys.stderr, ("!! ACCEPT error '%d:%s'." %
(err[0], err[1]))
continue
## Check for maximum connections
if self.client_count() >= MAX_CONNECTIONS:
print '?? Refusing new connection; maximum in use.'
sock.close()
continue
new_client = TelnetClient(sock, addr_tup)
#print "++ Opened connection to %s" % new_client.addrport()
## Add the connection to our dictionary and call handler
self.clients[new_client.fileno] = new_client
self.on_connect(new_client)
else:
## Call the connection's recieve method
try:
self.clients[sock_fileno].socket_recv()
except BogConnectionLost:
self.clients[sock_fileno].deactivate()
## Process sockets with data to send
for sock_fileno in slist:
## Call the connection's send method
self.clients[sock_fileno].socket_send()
| 6,849 | Python | .py | 151 | 35.794702 | 79 | 0.593905 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,082 | message.py | wummel_linkchecker/third_party/dnspython/tests/message.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import cStringIO
import os
import unittest
import dns.exception
import dns.message
query_text = """id 1234
opcode QUERY
rcode NOERROR
flags RD
edns 0
eflags DO
payload 4096
;QUESTION
wwww.dnspython.org. IN A
;ANSWER
;AUTHORITY
;ADDITIONAL"""
goodhex = '04d201000001000000000001047777777709646e73707974686f6e' \
'036f726700000100010000291000000080000000'
goodwire = goodhex.decode('hex_codec')
answer_text = """id 1234
opcode QUERY
rcode NOERROR
flags QR AA RD
;QUESTION
dnspython.org. IN SOA
;ANSWER
dnspython.org. 3600 IN SOA woof.dnspython.org. hostmaster.dnspython.org. 2003052700 3600 1800 604800 3600
;AUTHORITY
dnspython.org. 3600 IN NS ns1.staff.nominum.org.
dnspython.org. 3600 IN NS ns2.staff.nominum.org.
dnspython.org. 3600 IN NS woof.play-bow.org.
;ADDITIONAL
woof.play-bow.org. 3600 IN A 204.152.186.150
"""
goodhex2 = '04d2 8500 0001 0001 0003 0001' \
'09646e73707974686f6e036f726700 0006 0001' \
'c00c 0006 0001 00000e10 0028 ' \
'04776f6f66c00c 0a686f73746d6173746572c00c' \
'7764289c 00000e10 00000708 00093a80 00000e10' \
'c00c 0002 0001 00000e10 0014' \
'036e7331057374616666076e6f6d696e756dc016' \
'c00c 0002 0001 00000e10 0006 036e7332c063' \
'c00c 0002 0001 00000e10 0010 04776f6f6608706c61792d626f77c016' \
'c091 0001 0001 00000e10 0004 cc98ba96'
goodwire2 = goodhex2.replace(' ', '').decode('hex_codec')
query_text_2 = """id 1234
opcode QUERY
rcode 4095
flags RD
edns 0
eflags DO
payload 4096
;QUESTION
wwww.dnspython.org. IN A
;ANSWER
;AUTHORITY
;ADDITIONAL"""
goodhex3 = '04d2010f0001000000000001047777777709646e73707974686f6e' \
'036f726700000100010000291000ff0080000000'
goodwire3 = goodhex3.decode('hex_codec')
class MessageTestCase(unittest.TestCase):
def test_comparison_eq1(self):
q1 = dns.message.from_text(query_text)
q2 = dns.message.from_text(query_text)
self.failUnless(q1 == q2)
def test_comparison_ne1(self):
q1 = dns.message.from_text(query_text)
q2 = dns.message.from_text(query_text)
q2.id = 10
self.failUnless(q1 != q2)
def test_comparison_ne2(self):
q1 = dns.message.from_text(query_text)
q2 = dns.message.from_text(query_text)
q2.question = []
self.failUnless(q1 != q2)
def test_comparison_ne3(self):
q1 = dns.message.from_text(query_text)
self.failUnless(q1 != 1)
def test_EDNS_to_wire1(self):
q = dns.message.from_text(query_text)
w = q.to_wire()
self.failUnless(w == goodwire)
def test_EDNS_from_wire1(self):
m = dns.message.from_wire(goodwire)
self.failUnless(str(m) == query_text)
def test_EDNS_to_wire2(self):
q = dns.message.from_text(query_text_2)
w = q.to_wire()
self.failUnless(w == goodwire3)
def test_EDNS_from_wire2(self):
m = dns.message.from_wire(goodwire3)
self.failUnless(str(m) == query_text_2)
def test_TooBig(self):
def bad():
q = dns.message.from_text(query_text)
for i in xrange(0, 25):
rrset = dns.rrset.from_text('foo%d.' % i, 3600,
dns.rdataclass.IN,
dns.rdatatype.A,
'10.0.0.%d' % i)
q.additional.append(rrset)
w = q.to_wire(max_size=512)
self.failUnlessRaises(dns.exception.TooBig, bad)
def test_answer1(self):
a = dns.message.from_text(answer_text)
wire = a.to_wire(want_shuffle=False)
self.failUnless(wire == goodwire2)
def test_TrailingJunk(self):
def bad():
badwire = goodwire + '\x00'
m = dns.message.from_wire(badwire)
self.failUnlessRaises(dns.message.TrailingJunk, bad)
def test_ShortHeader(self):
def bad():
badwire = '\x00' * 11
m = dns.message.from_wire(badwire)
self.failUnlessRaises(dns.message.ShortHeader, bad)
def test_RespondingToResponse(self):
def bad():
q = dns.message.make_query('foo', 'A')
r1 = dns.message.make_response(q)
r2 = dns.message.make_response(r1)
self.failUnlessRaises(dns.exception.FormError, bad)
def test_ExtendedRcodeSetting(self):
m = dns.message.make_query('foo', 'A')
m.set_rcode(4095)
self.failUnless(m.rcode() == 4095)
m.set_rcode(2)
self.failUnless(m.rcode() == 2)
def test_EDNSVersionCoherence(self):
m = dns.message.make_query('foo', 'A')
m.use_edns(1)
self.failUnless((m.ednsflags >> 16) & 0xFF == 1)
if __name__ == '__main__':
unittest.main()
| 5,594 | Python | .py | 150 | 30.666667 | 105 | 0.665005 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,083 | bugs.py | wummel_linkchecker/third_party/dnspython/tests/bugs.py | # Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.rdata
import dns.rdataclass
import dns.rdatatype
import dns.ttl
class BugsTestCase(unittest.TestCase):
def test_float_LOC(self):
rdata = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.LOC,
"30 30 0.000 N 100 30 0.000 W 10.00m 20m 2000m 20m")
self.failUnless(rdata.float_latitude == 30.5)
self.failUnless(rdata.float_longitude == -100.5)
def test_SOA_BIND8_TTL(self):
rdata1 = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.SOA,
"a b 100 1s 1m 1h 1d")
rdata2 = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.SOA,
"a b 100 1 60 3600 86400")
self.failUnless(rdata1 == rdata2)
def test_TTL_bounds_check(self):
def bad():
ttl = dns.ttl.from_text("2147483648")
self.failUnlessRaises(dns.ttl.BadTTL, bad)
if __name__ == '__main__':
unittest.main()
| 1,782 | Python | .py | 37 | 41.513514 | 88 | 0.691024 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,084 | set.py | wummel_linkchecker/third_party/dnspython/tests/set.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.set
# for convenience
S = dns.set.Set
class SimpleSetTestCase(unittest.TestCase):
def testLen1(self):
s1 = S()
self.failUnless(len(s1) == 0)
def testLen2(self):
s1 = S([1, 2, 3])
self.failUnless(len(s1) == 3)
def testLen3(self):
s1 = S([1, 2, 3, 3, 3])
self.failUnless(len(s1) == 3)
def testUnion1(self):
s1 = S([1, 2, 3])
s2 = S([1, 2, 3])
e = S([1, 2, 3])
self.failUnless(s1 | s2 == e)
def testUnion2(self):
s1 = S([1, 2, 3])
s2 = S([])
e = S([1, 2, 3])
self.failUnless(s1 | s2 == e)
def testUnion3(self):
s1 = S([1, 2, 3])
s2 = S([3, 4])
e = S([1, 2, 3, 4])
self.failUnless(s1 | s2 == e)
def testIntersection1(self):
s1 = S([1, 2, 3])
s2 = S([1, 2, 3])
e = S([1, 2, 3])
self.failUnless(s1 & s2 == e)
def testIntersection2(self):
s1 = S([0, 1, 2, 3])
s2 = S([1, 2, 3, 4])
e = S([1, 2, 3])
self.failUnless(s1 & s2 == e)
def testIntersection3(self):
s1 = S([1, 2, 3])
s2 = S([])
e = S([])
self.failUnless(s1 & s2 == e)
def testIntersection4(self):
s1 = S([1, 2, 3])
s2 = S([5, 4])
e = S([])
self.failUnless(s1 & s2 == e)
def testDifference1(self):
s1 = S([1, 2, 3])
s2 = S([5, 4])
e = S([1, 2, 3])
self.failUnless(s1 - s2 == e)
def testDifference2(self):
s1 = S([1, 2, 3])
s2 = S([])
e = S([1, 2, 3])
self.failUnless(s1 - s2 == e)
def testDifference3(self):
s1 = S([1, 2, 3])
s2 = S([3, 2])
e = S([1])
self.failUnless(s1 - s2 == e)
def testDifference4(self):
s1 = S([1, 2, 3])
s2 = S([3, 2, 1])
e = S([])
self.failUnless(s1 - s2 == e)
def testSubset1(self):
s1 = S([1, 2, 3])
s2 = S([3, 2, 1])
self.failUnless(s1.issubset(s2))
def testSubset2(self):
s1 = S([1, 2, 3])
self.failUnless(s1.issubset(s1))
def testSubset3(self):
s1 = S([])
s2 = S([1, 2, 3])
self.failUnless(s1.issubset(s2))
def testSubset4(self):
s1 = S([1])
s2 = S([1, 2, 3])
self.failUnless(s1.issubset(s2))
def testSubset5(self):
s1 = S([])
s2 = S([])
self.failUnless(s1.issubset(s2))
def testSubset6(self):
s1 = S([1, 4])
s2 = S([1, 2, 3])
self.failUnless(not s1.issubset(s2))
def testSuperset1(self):
s1 = S([1, 2, 3])
s2 = S([3, 2, 1])
self.failUnless(s1.issuperset(s2))
def testSuperset2(self):
s1 = S([1, 2, 3])
self.failUnless(s1.issuperset(s1))
def testSuperset3(self):
s1 = S([1, 2, 3])
s2 = S([])
self.failUnless(s1.issuperset(s2))
def testSuperset4(self):
s1 = S([1, 2, 3])
s2 = S([1])
self.failUnless(s1.issuperset(s2))
def testSuperset5(self):
s1 = S([])
s2 = S([])
self.failUnless(s1.issuperset(s2))
def testSuperset6(self):
s1 = S([1, 2, 3])
s2 = S([1, 4])
self.failUnless(not s1.issuperset(s2))
def testUpdate1(self):
s1 = S([1, 2, 3])
u = (4, 5, 6)
e = S([1, 2, 3, 4, 5, 6])
s1.update(u)
self.failUnless(s1 == e)
def testUpdate2(self):
s1 = S([1, 2, 3])
u = []
e = S([1, 2, 3])
s1.update(u)
self.failUnless(s1 == e)
def testGetitem(self):
s1 = S([1, 2, 3])
i0 = s1[0]
i1 = s1[1]
i2 = s1[2]
s2 = S([i0, i1, i2])
self.failUnless(s1 == s2)
def testGetslice(self):
s1 = S([1, 2, 3])
slice = s1[0:2]
self.failUnless(len(slice) == 2)
item = s1[2]
slice.append(item)
s2 = S(slice)
self.failUnless(s1 == s2)
def testDelitem(self):
s1 = S([1, 2, 3])
del s1[0]
i1 = s1[0]
i2 = s1[1]
self.failUnless(i1 != i2)
self.failUnless(i1 == 1 or i1 == 2 or i1 == 3)
self.failUnless(i2 == 1 or i2 == 2 or i2 == 3)
def testDelslice(self):
s1 = S([1, 2, 3])
del s1[0:2]
i1 = s1[0]
self.failUnless(i1 == 1 or i1 == 2 or i1 == 3)
if __name__ == '__main__':
unittest.main()
| 5,255 | Python | .py | 171 | 23.222222 | 72 | 0.513176 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,085 | flags.py | wummel_linkchecker/third_party/dnspython/tests/flags.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.flags
import dns.rcode
import dns.opcode
class FlagsTestCase(unittest.TestCase):
def test_rcode1(self):
self.failUnless(dns.rcode.from_text('FORMERR') == dns.rcode.FORMERR)
def test_rcode2(self):
self.failUnless(dns.rcode.to_text(dns.rcode.FORMERR) == "FORMERR")
def test_rcode3(self):
self.failUnless(dns.rcode.to_flags(dns.rcode.FORMERR) == (1, 0))
def test_rcode4(self):
self.failUnless(dns.rcode.to_flags(dns.rcode.BADVERS) == \
(0, 0x01000000))
def test_rcode6(self):
self.failUnless(dns.rcode.from_flags(0, 0x01000000) == \
dns.rcode.BADVERS)
def test_rcode6(self):
self.failUnless(dns.rcode.from_flags(5, 0) == dns.rcode.REFUSED)
def test_rcode7(self):
def bad():
dns.rcode.to_flags(4096)
self.failUnlessRaises(ValueError, bad)
def test_flags1(self):
self.failUnless(dns.flags.from_text("RA RD AA QR") == \
dns.flags.QR|dns.flags.AA|dns.flags.RD|dns.flags.RA)
def test_flags2(self):
flags = dns.flags.QR|dns.flags.AA|dns.flags.RD|dns.flags.RA
self.failUnless(dns.flags.to_text(flags) == "QR AA RD RA")
if __name__ == '__main__':
unittest.main()
| 2,092 | Python | .py | 45 | 40.466667 | 77 | 0.695032 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,086 | namedict.py | wummel_linkchecker/third_party/dnspython/tests/namedict.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.name
import dns.namedict
class NameTestCase(unittest.TestCase):
def setUp(self):
self.ndict = dns.namedict.NameDict()
n1 = dns.name.from_text('foo.bar.')
n2 = dns.name.from_text('bar.')
self.ndict[n1] = 1
self.ndict[n2] = 2
self.rndict = dns.namedict.NameDict()
n1 = dns.name.from_text('foo.bar', None)
n2 = dns.name.from_text('bar', None)
self.rndict[n1] = 1
self.rndict[n2] = 2
def testDepth(self):
self.failUnless(self.ndict.max_depth == 3)
def testLookup1(self):
k = dns.name.from_text('foo.bar.')
self.failUnless(self.ndict[k] == 1)
def testLookup2(self):
k = dns.name.from_text('foo.bar.')
self.failUnless(self.ndict.get_deepest_match(k)[1] == 1)
def testLookup3(self):
k = dns.name.from_text('a.b.c.foo.bar.')
self.failUnless(self.ndict.get_deepest_match(k)[1] == 1)
def testLookup4(self):
k = dns.name.from_text('a.b.c.bar.')
self.failUnless(self.ndict.get_deepest_match(k)[1] == 2)
def testLookup5(self):
def bad():
n = dns.name.from_text('a.b.c.')
(k, v) = self.ndict.get_deepest_match(n)
self.failUnlessRaises(KeyError, bad)
def testLookup6(self):
def bad():
(k, v) = self.ndict.get_deepest_match(dns.name.empty)
self.failUnlessRaises(KeyError, bad)
def testLookup7(self):
self.ndict[dns.name.empty] = 100
n = dns.name.from_text('a.b.c.')
(k, v) = self.ndict.get_deepest_match(n)
self.failUnless(v == 100)
def testLookup8(self):
def bad():
self.ndict['foo'] = 100
self.failUnlessRaises(ValueError, bad)
def testRelDepth(self):
self.failUnless(self.rndict.max_depth == 2)
def testRelLookup1(self):
k = dns.name.from_text('foo.bar', None)
self.failUnless(self.rndict[k] == 1)
def testRelLookup2(self):
k = dns.name.from_text('foo.bar', None)
self.failUnless(self.rndict.get_deepest_match(k)[1] == 1)
def testRelLookup3(self):
k = dns.name.from_text('a.b.c.foo.bar', None)
self.failUnless(self.rndict.get_deepest_match(k)[1] == 1)
def testRelLookup4(self):
k = dns.name.from_text('a.b.c.bar', None)
self.failUnless(self.rndict.get_deepest_match(k)[1] == 2)
def testRelLookup7(self):
self.rndict[dns.name.empty] = 100
n = dns.name.from_text('a.b.c', None)
(k, v) = self.rndict.get_deepest_match(n)
self.failUnless(v == 100)
if __name__ == '__main__':
unittest.main()
| 3,459 | Python | .py | 82 | 35.426829 | 72 | 0.64641 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,087 | name.py | wummel_linkchecker/third_party/dnspython/tests/name.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import cStringIO
import socket
import dns.name
import dns.reversename
import dns.e164
class NameTestCase(unittest.TestCase):
def setUp(self):
self.origin = dns.name.from_text('example.')
def testFromTextRel1(self):
n = dns.name.from_text('foo.bar')
self.failUnless(n.labels == ('foo', 'bar', ''))
def testFromTextRel2(self):
n = dns.name.from_text('foo.bar', origin=self.origin)
self.failUnless(n.labels == ('foo', 'bar', 'example', ''))
def testFromTextRel3(self):
n = dns.name.from_text('foo.bar', origin=None)
self.failUnless(n.labels == ('foo', 'bar'))
def testFromTextRel4(self):
n = dns.name.from_text('@', origin=None)
self.failUnless(n == dns.name.empty)
def testFromTextRel5(self):
n = dns.name.from_text('@', origin=self.origin)
self.failUnless(n == self.origin)
def testFromTextAbs1(self):
n = dns.name.from_text('foo.bar.')
self.failUnless(n.labels == ('foo', 'bar', ''))
def testTortureFromText(self):
good = [
r'.',
r'a',
r'a.',
r'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
r'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
r'\000.\008.\010.\032.\046.\092.\099.\255',
r'\\',
r'\..\.',
r'\\.\\',
r'!"#%&/()=+-',
r'\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255.\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255.\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255.\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255',
]
bad = [
r'..',
r'.a',
r'\\..',
'\\', # yes, we don't want the 'r' prefix!
r'\0',
r'\00',
r'\00Z',
r'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
r'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
r'\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255.\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255.\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255.\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255',
]
for t in good:
try:
n = dns.name.from_text(t)
except:
self.fail("good test '%s' raised an exception" % t)
for t in bad:
caught = False
try:
n = dns.name.from_text(t)
except:
caught = True
if not caught:
self.fail("bad test '%s' did not raise an exception" % t)
def testImmutable1(self):
def bad():
self.origin.labels = ()
self.failUnlessRaises(TypeError, bad)
def testImmutable2(self):
def bad():
self.origin.labels[0] = 'foo'
self.failUnlessRaises(TypeError, bad)
def testAbs1(self):
self.failUnless(dns.name.root.is_absolute())
def testAbs2(self):
self.failUnless(not dns.name.empty.is_absolute())
def testAbs3(self):
self.failUnless(self.origin.is_absolute())
def testAbs3(self):
n = dns.name.from_text('foo', origin=None)
self.failUnless(not n.is_absolute())
def testWild1(self):
n = dns.name.from_text('*.foo', origin=None)
self.failUnless(n.is_wild())
def testWild2(self):
n = dns.name.from_text('*a.foo', origin=None)
self.failUnless(not n.is_wild())
def testWild3(self):
n = dns.name.from_text('a.*.foo', origin=None)
self.failUnless(not n.is_wild())
def testWild4(self):
self.failUnless(not dns.name.root.is_wild())
def testWild5(self):
self.failUnless(not dns.name.empty.is_wild())
def testHash1(self):
n1 = dns.name.from_text('fOo.COM')
n2 = dns.name.from_text('foo.com')
self.failUnless(hash(n1) == hash(n2))
def testCompare1(self):
n1 = dns.name.from_text('a')
n2 = dns.name.from_text('b')
self.failUnless(n1 < n2)
self.failUnless(n2 > n1)
def testCompare2(self):
n1 = dns.name.from_text('')
n2 = dns.name.from_text('b')
self.failUnless(n1 < n2)
self.failUnless(n2 > n1)
def testCompare3(self):
self.failUnless(dns.name.empty < dns.name.root)
self.failUnless(dns.name.root > dns.name.empty)
def testCompare4(self):
self.failUnless(dns.name.root != 1)
def testCompare5(self):
self.failUnless(dns.name.root < 1 or dns.name.root > 1)
def testSubdomain1(self):
self.failUnless(not dns.name.empty.is_subdomain(dns.name.root))
def testSubdomain2(self):
self.failUnless(not dns.name.root.is_subdomain(dns.name.empty))
def testSubdomain3(self):
n = dns.name.from_text('foo', origin=self.origin)
self.failUnless(n.is_subdomain(self.origin))
def testSubdomain4(self):
n = dns.name.from_text('foo', origin=self.origin)
self.failUnless(n.is_subdomain(dns.name.root))
def testSubdomain5(self):
n = dns.name.from_text('foo', origin=self.origin)
self.failUnless(n.is_subdomain(n))
def testSuperdomain1(self):
self.failUnless(not dns.name.empty.is_superdomain(dns.name.root))
def testSuperdomain2(self):
self.failUnless(not dns.name.root.is_superdomain(dns.name.empty))
def testSuperdomain3(self):
n = dns.name.from_text('foo', origin=self.origin)
self.failUnless(self.origin.is_superdomain(n))
def testSuperdomain4(self):
n = dns.name.from_text('foo', origin=self.origin)
self.failUnless(dns.name.root.is_superdomain(n))
def testSuperdomain5(self):
n = dns.name.from_text('foo', origin=self.origin)
self.failUnless(n.is_superdomain(n))
def testCanonicalize1(self):
n = dns.name.from_text('FOO.bar', origin=self.origin)
c = n.canonicalize()
self.failUnless(c.labels == ('foo', 'bar', 'example', ''))
def testToText1(self):
n = dns.name.from_text('FOO.bar', origin=self.origin)
t = n.to_text()
self.failUnless(t == 'FOO.bar.example.')
def testToText2(self):
n = dns.name.from_text('FOO.bar', origin=self.origin)
t = n.to_text(True)
self.failUnless(t == 'FOO.bar.example')
def testToText3(self):
n = dns.name.from_text('FOO.bar', origin=None)
t = n.to_text()
self.failUnless(t == 'FOO.bar')
def testToText4(self):
t = dns.name.empty.to_text()
self.failUnless(t == '@')
def testToText5(self):
t = dns.name.root.to_text()
self.failUnless(t == '.')
def testToText6(self):
n = dns.name.from_text('FOO bar', origin=None)
t = n.to_text()
self.failUnless(t == r'FOO\032bar')
def testToText7(self):
n = dns.name.from_text(r'FOO\.bar', origin=None)
t = n.to_text()
self.failUnless(t == r'FOO\.bar')
def testToText8(self):
n = dns.name.from_text(r'\070OO\.bar', origin=None)
t = n.to_text()
self.failUnless(t == r'FOO\.bar')
def testSlice1(self):
n = dns.name.from_text(r'a.b.c.', origin=None)
s = n[:]
self.failUnless(s == ('a', 'b', 'c', ''))
def testSlice2(self):
n = dns.name.from_text(r'a.b.c.', origin=None)
s = n[:2]
self.failUnless(s == ('a', 'b'))
def testSlice3(self):
n = dns.name.from_text(r'a.b.c.', origin=None)
s = n[2:]
self.failUnless(s == ('c', ''))
def testEmptyLabel1(self):
def bad():
n = dns.name.Name(['a', '', 'b'])
self.failUnlessRaises(dns.name.EmptyLabel, bad)
def testEmptyLabel2(self):
def bad():
n = dns.name.Name(['', 'b'])
self.failUnlessRaises(dns.name.EmptyLabel, bad)
def testEmptyLabel3(self):
n = dns.name.Name(['b', ''])
self.failUnless(n)
def testLongLabel(self):
n = dns.name.Name(['a' * 63])
self.failUnless(n)
def testLabelTooLong(self):
def bad():
n = dns.name.Name(['a' * 64, 'b'])
self.failUnlessRaises(dns.name.LabelTooLong, bad)
def testLongName(self):
n = dns.name.Name(['a' * 63, 'a' * 63, 'a' * 63, 'a' * 62])
self.failUnless(n)
def testNameTooLong(self):
def bad():
n = dns.name.Name(['a' * 63, 'a' * 63, 'a' * 63, 'a' * 63])
self.failUnlessRaises(dns.name.NameTooLong, bad)
def testConcat1(self):
n1 = dns.name.Name(['a', 'b'])
n2 = dns.name.Name(['c', 'd'])
e = dns.name.Name(['a', 'b', 'c', 'd'])
r = n1 + n2
self.failUnless(r == e)
def testConcat2(self):
n1 = dns.name.Name(['a', 'b'])
n2 = dns.name.Name([])
e = dns.name.Name(['a', 'b'])
r = n1 + n2
self.failUnless(r == e)
def testConcat2(self):
n1 = dns.name.Name([])
n2 = dns.name.Name(['a', 'b'])
e = dns.name.Name(['a', 'b'])
r = n1 + n2
self.failUnless(r == e)
def testConcat3(self):
n1 = dns.name.Name(['a', 'b', ''])
n2 = dns.name.Name([])
e = dns.name.Name(['a', 'b', ''])
r = n1 + n2
self.failUnless(r == e)
def testConcat4(self):
n1 = dns.name.Name(['a', 'b'])
n2 = dns.name.Name(['c', ''])
e = dns.name.Name(['a', 'b', 'c', ''])
r = n1 + n2
self.failUnless(r == e)
def testConcat5(self):
def bad():
n1 = dns.name.Name(['a', 'b', ''])
n2 = dns.name.Name(['c'])
r = n1 + n2
self.failUnlessRaises(dns.name.AbsoluteConcatenation, bad)
def testBadEscape(self):
def bad():
n = dns.name.from_text(r'a.b\0q1.c.')
print n
self.failUnlessRaises(dns.name.BadEscape, bad)
def testDigestable1(self):
n = dns.name.from_text('FOO.bar')
d = n.to_digestable()
self.failUnless(d == '\x03foo\x03bar\x00')
def testDigestable2(self):
n1 = dns.name.from_text('FOO.bar')
n2 = dns.name.from_text('foo.BAR.')
d1 = n1.to_digestable()
d2 = n2.to_digestable()
self.failUnless(d1 == d2)
def testDigestable3(self):
d = dns.name.root.to_digestable()
self.failUnless(d == '\x00')
def testDigestable4(self):
n = dns.name.from_text('FOO.bar', None)
d = n.to_digestable(dns.name.root)
self.failUnless(d == '\x03foo\x03bar\x00')
def testBadDigestable(self):
def bad():
n = dns.name.from_text('FOO.bar', None)
d = n.to_digestable()
self.failUnlessRaises(dns.name.NeedAbsoluteNameOrOrigin, bad)
def testToWire1(self):
n = dns.name.from_text('FOO.bar')
f = cStringIO.StringIO()
compress = {}
n.to_wire(f, compress)
self.failUnless(f.getvalue() == '\x03FOO\x03bar\x00')
def testToWire2(self):
n = dns.name.from_text('FOO.bar')
f = cStringIO.StringIO()
compress = {}
n.to_wire(f, compress)
n.to_wire(f, compress)
self.failUnless(f.getvalue() == '\x03FOO\x03bar\x00\xc0\x00')
def testToWire3(self):
n1 = dns.name.from_text('FOO.bar')
n2 = dns.name.from_text('foo.bar')
f = cStringIO.StringIO()
compress = {}
n1.to_wire(f, compress)
n2.to_wire(f, compress)
self.failUnless(f.getvalue() == '\x03FOO\x03bar\x00\xc0\x00')
def testToWire4(self):
n1 = dns.name.from_text('FOO.bar')
n2 = dns.name.from_text('a.foo.bar')
f = cStringIO.StringIO()
compress = {}
n1.to_wire(f, compress)
n2.to_wire(f, compress)
self.failUnless(f.getvalue() == '\x03FOO\x03bar\x00\x01\x61\xc0\x00')
def testToWire5(self):
n1 = dns.name.from_text('FOO.bar')
n2 = dns.name.from_text('a.foo.bar')
f = cStringIO.StringIO()
compress = {}
n1.to_wire(f, compress)
n2.to_wire(f, None)
self.failUnless(f.getvalue() == \
'\x03FOO\x03bar\x00\x01\x61\x03foo\x03bar\x00')
def testToWire6(self):
n = dns.name.from_text('FOO.bar')
v = n.to_wire()
self.failUnless(v == '\x03FOO\x03bar\x00')
def testBadToWire(self):
def bad():
n = dns.name.from_text('FOO.bar', None)
f = cStringIO.StringIO()
compress = {}
n.to_wire(f, compress)
self.failUnlessRaises(dns.name.NeedAbsoluteNameOrOrigin, bad)
def testSplit1(self):
n = dns.name.from_text('foo.bar.')
(prefix, suffix) = n.split(2)
ep = dns.name.from_text('foo', None)
es = dns.name.from_text('bar.', None)
self.failUnless(prefix == ep and suffix == es)
def testSplit2(self):
n = dns.name.from_text('foo.bar.')
(prefix, suffix) = n.split(1)
ep = dns.name.from_text('foo.bar', None)
es = dns.name.from_text('.', None)
self.failUnless(prefix == ep and suffix == es)
def testSplit3(self):
n = dns.name.from_text('foo.bar.')
(prefix, suffix) = n.split(0)
ep = dns.name.from_text('foo.bar.', None)
es = dns.name.from_text('', None)
self.failUnless(prefix == ep and suffix == es)
def testSplit4(self):
n = dns.name.from_text('foo.bar.')
(prefix, suffix) = n.split(3)
ep = dns.name.from_text('', None)
es = dns.name.from_text('foo.bar.', None)
self.failUnless(prefix == ep and suffix == es)
def testBadSplit1(self):
def bad():
n = dns.name.from_text('foo.bar.')
(prefix, suffix) = n.split(-1)
self.failUnlessRaises(ValueError, bad)
def testBadSplit2(self):
def bad():
n = dns.name.from_text('foo.bar.')
(prefix, suffix) = n.split(4)
self.failUnlessRaises(ValueError, bad)
def testRelativize1(self):
n = dns.name.from_text('a.foo.bar.', None)
o = dns.name.from_text('bar.', None)
e = dns.name.from_text('a.foo', None)
self.failUnless(n.relativize(o) == e)
def testRelativize2(self):
n = dns.name.from_text('a.foo.bar.', None)
o = n
e = dns.name.empty
self.failUnless(n.relativize(o) == e)
def testRelativize3(self):
n = dns.name.from_text('a.foo.bar.', None)
o = dns.name.from_text('blaz.', None)
e = n
self.failUnless(n.relativize(o) == e)
def testRelativize4(self):
n = dns.name.from_text('a.foo', None)
o = dns.name.root
e = n
self.failUnless(n.relativize(o) == e)
def testDerelativize1(self):
n = dns.name.from_text('a.foo', None)
o = dns.name.from_text('bar.', None)
e = dns.name.from_text('a.foo.bar.', None)
self.failUnless(n.derelativize(o) == e)
def testDerelativize2(self):
n = dns.name.empty
o = dns.name.from_text('a.foo.bar.', None)
e = o
self.failUnless(n.derelativize(o) == e)
def testDerelativize3(self):
n = dns.name.from_text('a.foo.bar.', None)
o = dns.name.from_text('blaz.', None)
e = n
self.failUnless(n.derelativize(o) == e)
def testChooseRelativity1(self):
n = dns.name.from_text('a.foo.bar.', None)
o = dns.name.from_text('bar.', None)
e = dns.name.from_text('a.foo', None)
self.failUnless(n.choose_relativity(o, True) == e)
def testChooseRelativity2(self):
n = dns.name.from_text('a.foo.bar.', None)
o = dns.name.from_text('bar.', None)
e = n
self.failUnless(n.choose_relativity(o, False) == e)
def testChooseRelativity3(self):
n = dns.name.from_text('a.foo', None)
o = dns.name.from_text('bar.', None)
e = dns.name.from_text('a.foo.bar.', None)
self.failUnless(n.choose_relativity(o, False) == e)
def testChooseRelativity4(self):
n = dns.name.from_text('a.foo', None)
o = None
e = n
self.failUnless(n.choose_relativity(o, True) == e)
def testChooseRelativity5(self):
n = dns.name.from_text('a.foo', None)
o = None
e = n
self.failUnless(n.choose_relativity(o, False) == e)
def testChooseRelativity6(self):
n = dns.name.from_text('a.foo.', None)
o = None
e = n
self.failUnless(n.choose_relativity(o, True) == e)
def testChooseRelativity7(self):
n = dns.name.from_text('a.foo.', None)
o = None
e = n
self.failUnless(n.choose_relativity(o, False) == e)
def testFromWire1(self):
w = '\x03foo\x00\xc0\x00'
(n1, cused1) = dns.name.from_wire(w, 0)
(n2, cused2) = dns.name.from_wire(w, cused1)
en1 = dns.name.from_text('foo.')
en2 = en1
ecused1 = 5
ecused2 = 2
self.failUnless(n1 == en1 and cused1 == ecused1 and \
n2 == en2 and cused2 == ecused2)
def testFromWire1(self):
w = '\x03foo\x00\x01a\xc0\x00\x01b\xc0\x05'
current = 0
(n1, cused1) = dns.name.from_wire(w, current)
current += cused1
(n2, cused2) = dns.name.from_wire(w, current)
current += cused2
(n3, cused3) = dns.name.from_wire(w, current)
en1 = dns.name.from_text('foo.')
en2 = dns.name.from_text('a.foo.')
en3 = dns.name.from_text('b.a.foo.')
ecused1 = 5
ecused2 = 4
ecused3 = 4
self.failUnless(n1 == en1 and cused1 == ecused1 and \
n2 == en2 and cused2 == ecused2 and \
n3 == en3 and cused3 == ecused3)
def testBadFromWire1(self):
def bad():
w = '\x03foo\xc0\x04'
(n, cused) = dns.name.from_wire(w, 0)
self.failUnlessRaises(dns.name.BadPointer, bad)
def testBadFromWire2(self):
def bad():
w = '\x03foo\xc0\x05'
(n, cused) = dns.name.from_wire(w, 0)
self.failUnlessRaises(dns.name.BadPointer, bad)
def testBadFromWire3(self):
def bad():
w = '\xbffoo'
(n, cused) = dns.name.from_wire(w, 0)
self.failUnlessRaises(dns.name.BadLabelType, bad)
def testBadFromWire4(self):
def bad():
w = '\x41foo'
(n, cused) = dns.name.from_wire(w, 0)
self.failUnlessRaises(dns.name.BadLabelType, bad)
def testParent1(self):
n = dns.name.from_text('foo.bar.')
self.failUnless(n.parent() == dns.name.from_text('bar.'))
self.failUnless(n.parent().parent() == dns.name.root)
def testParent2(self):
n = dns.name.from_text('foo.bar', None)
self.failUnless(n.parent() == dns.name.from_text('bar', None))
self.failUnless(n.parent().parent() == dns.name.empty)
def testParent3(self):
def bad():
n = dns.name.root
n.parent()
self.failUnlessRaises(dns.name.NoParent, bad)
def testParent4(self):
def bad():
n = dns.name.empty
n.parent()
self.failUnlessRaises(dns.name.NoParent, bad)
def testFromUnicode1(self):
n = dns.name.from_text(u'foo.bar')
self.failUnless(n.labels == ('foo', 'bar', ''))
def testFromUnicode2(self):
n = dns.name.from_text(u'foo\u1234bar.bar')
self.failUnless(n.labels == ('xn--foobar-r5z', 'bar', ''))
def testFromUnicodeAlternateDot1(self):
n = dns.name.from_text(u'foo\u3002bar')
self.failUnless(n.labels == ('foo', 'bar', ''))
def testFromUnicodeAlternateDot2(self):
n = dns.name.from_text(u'foo\uff0ebar')
self.failUnless(n.labels == ('foo', 'bar', ''))
def testFromUnicodeAlternateDot3(self):
n = dns.name.from_text(u'foo\uff61bar')
self.failUnless(n.labels == ('foo', 'bar', ''))
def testToUnicode1(self):
n = dns.name.from_text(u'foo.bar')
s = n.to_unicode()
self.failUnless(s == u'foo.bar.')
def testToUnicode2(self):
n = dns.name.from_text(u'foo\u1234bar.bar')
s = n.to_unicode()
self.failUnless(s == u'foo\u1234bar.bar.')
def testToUnicode3(self):
n = dns.name.from_text('foo.bar')
s = n.to_unicode()
self.failUnless(s == u'foo.bar.')
def testReverseIPv4(self):
e = dns.name.from_text('1.0.0.127.in-addr.arpa.')
n = dns.reversename.from_address('127.0.0.1')
self.failUnless(e == n)
def testReverseIPv6(self):
e = dns.name.from_text('1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.ip6.arpa.')
n = dns.reversename.from_address('::1')
self.failUnless(e == n)
def testBadReverseIPv4(self):
def bad():
n = dns.reversename.from_address('127.0.foo.1')
self.failUnlessRaises(socket.error, bad)
def testBadReverseIPv6(self):
def bad():
n = dns.reversename.from_address('::1::1')
self.failUnlessRaises(socket.error, bad)
def testForwardIPv4(self):
n = dns.name.from_text('1.0.0.127.in-addr.arpa.')
e = '127.0.0.1'
text = dns.reversename.to_address(n)
self.failUnless(text == e)
def testForwardIPv6(self):
n = dns.name.from_text('1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.ip6.arpa.')
e = '::1'
text = dns.reversename.to_address(n)
self.failUnless(text == e)
def testE164ToEnum(self):
text = '+1 650 555 1212'
e = dns.name.from_text('2.1.2.1.5.5.5.0.5.6.1.e164.arpa.')
n = dns.e164.from_e164(text)
self.failUnless(n == e)
def testEnumToE164(self):
n = dns.name.from_text('2.1.2.1.5.5.5.0.5.6.1.e164.arpa.')
e = '+16505551212'
text = dns.e164.to_e164(n)
self.failUnless(text == e)
if __name__ == '__main__':
unittest.main()
| 24,929 | Python | .py | 574 | 34.731707 | 1,023 | 0.594751 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,088 | ntoaaton.py | wummel_linkchecker/third_party/dnspython/tests/ntoaaton.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.exception
import dns.ipv6
class NtoAAtoNTestCase(unittest.TestCase):
def test_aton1(self):
a = dns.ipv6.inet_aton('::')
self.failUnless(a == '\x00' * 16)
def test_aton2(self):
a = dns.ipv6.inet_aton('::1')
self.failUnless(a == '\x00' * 15 + '\x01')
def test_aton3(self):
a = dns.ipv6.inet_aton('::10.0.0.1')
self.failUnless(a == '\x00' * 12 + '\x0a\x00\x00\x01')
def test_aton4(self):
a = dns.ipv6.inet_aton('abcd::dcba')
self.failUnless(a == '\xab\xcd' + '\x00' * 12 + '\xdc\xba')
def test_aton5(self):
a = dns.ipv6.inet_aton('1:2:3:4:5:6:7:8')
self.failUnless(a == \
'00010002000300040005000600070008'.decode('hex_codec'))
def test_bad_aton1(self):
def bad():
a = dns.ipv6.inet_aton('abcd:dcba')
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def test_bad_aton2(self):
def bad():
a = dns.ipv6.inet_aton('abcd::dcba::1')
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def test_bad_aton3(self):
def bad():
a = dns.ipv6.inet_aton('1:2:3:4:5:6:7:8:9')
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def test_aton1(self):
a = dns.ipv6.inet_aton('::')
self.failUnless(a == '\x00' * 16)
def test_aton2(self):
a = dns.ipv6.inet_aton('::1')
self.failUnless(a == '\x00' * 15 + '\x01')
def test_aton3(self):
a = dns.ipv6.inet_aton('::10.0.0.1')
self.failUnless(a == '\x00' * 12 + '\x0a\x00\x00\x01')
def test_aton4(self):
a = dns.ipv6.inet_aton('abcd::dcba')
self.failUnless(a == '\xab\xcd' + '\x00' * 12 + '\xdc\xba')
def test_ntoa1(self):
b = '00010002000300040005000600070008'.decode('hex_codec')
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '1:2:3:4:5:6:7:8')
def test_ntoa2(self):
b = '\x00' * 16
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '::')
def test_ntoa3(self):
b = '\x00' * 15 + '\x01'
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '::1')
def test_ntoa4(self):
b = '\x80' + '\x00' * 15
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '8000::')
def test_ntoa5(self):
b = '\x01\xcd' + '\x00' * 12 + '\x03\xef'
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '1cd::3ef')
def test_ntoa6(self):
b = 'ffff00000000ffff000000000000ffff'.decode('hex_codec')
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == 'ffff:0:0:ffff::ffff')
def test_ntoa7(self):
b = '00000000ffff000000000000ffffffff'.decode('hex_codec')
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '0:0:ffff::ffff:ffff')
def test_ntoa8(self):
b = 'ffff0000ffff00000000ffff00000000'.decode('hex_codec')
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == 'ffff:0:ffff::ffff:0:0')
def test_ntoa9(self):
b = '0000000000000000000000000a000001'.decode('hex_codec')
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '::10.0.0.1')
def test_ntoa10(self):
b = '0000000000000000000000010a000001'.decode('hex_codec')
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '::1:a00:1')
def test_ntoa11(self):
b = '00000000000000000000ffff0a000001'.decode('hex_codec')
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '::ffff:10.0.0.1')
def test_ntoa12(self):
b = '000000000000000000000000ffffffff'.decode('hex_codec')
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '::255.255.255.255')
def test_ntoa13(self):
b = '00000000000000000000ffffffffffff'.decode('hex_codec')
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '::ffff:255.255.255.255')
def test_ntoa14(self):
b = '0000000000000000000000000001ffff'.decode('hex_codec')
t = dns.ipv6.inet_ntoa(b)
self.failUnless(t == '::0.1.255.255')
def test_bad_ntoa1(self):
def bad():
a = dns.ipv6.inet_ntoa('')
self.failUnlessRaises(ValueError, bad)
def test_bad_ntoa2(self):
def bad():
a = dns.ipv6.inet_ntoa('\x00' * 17)
self.failUnlessRaises(ValueError, bad)
if __name__ == '__main__':
unittest.main()
| 5,198 | Python | .py | 124 | 34.532258 | 79 | 0.608092 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,089 | rrset.py | wummel_linkchecker/third_party/dnspython/tests/rrset.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.rrset
class RRsetTestCase(unittest.TestCase):
def testEqual1(self):
r1 = dns.rrset.from_text('foo', 300, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 300, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 == r2)
def testEqual2(self):
r1 = dns.rrset.from_text('foo', 300, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 600, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 == r2)
def testNotEqual1(self):
r1 = dns.rrset.from_text('fooa', 30, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual2(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1', '10.0.0.3')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual3(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1', '10.0.0.2',
'10.0.0.3')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual4(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
if __name__ == '__main__':
unittest.main()
| 2,274 | Python | .py | 44 | 45.795455 | 79 | 0.612613 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,090 | zone.py | wummel_linkchecker/third_party/dnspython/tests/zone.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import cStringIO
import filecmp
import os
import unittest
import dns.exception
import dns.rdata
import dns.rdataclass
import dns.rdatatype
import dns.rrset
import dns.zone
example_text = """$TTL 3600
$ORIGIN example.
@ soa foo bar 1 2 3 4 5
@ ns ns1
@ ns ns2
ns1 a 10.0.0.1
ns2 a 10.0.0.2
$TTL 300
$ORIGIN foo.example.
bar mx 0 blaz
"""
example_text_output = """@ 3600 IN SOA foo bar 1 2 3 4 5
@ 3600 IN NS ns1
@ 3600 IN NS ns2
bar.foo 300 IN MX 0 blaz.foo
ns1 3600 IN A 10.0.0.1
ns2 3600 IN A 10.0.0.2
"""
something_quite_similar = """@ 3600 IN SOA foo bar 1 2 3 4 5
@ 3600 IN NS ns1
@ 3600 IN NS ns2
bar.foo 300 IN MX 0 blaz.foo
ns1 3600 IN A 10.0.0.1
ns2 3600 IN A 10.0.0.3
"""
something_different = """@ 3600 IN SOA fooa bar 1 2 3 4 5
@ 3600 IN NS ns11
@ 3600 IN NS ns21
bar.fooa 300 IN MX 0 blaz.fooa
ns11 3600 IN A 10.0.0.11
ns21 3600 IN A 10.0.0.21
"""
ttl_example_text = """$TTL 1h
$ORIGIN example.
@ soa foo bar 1 2 3 4 5
@ ns ns1
@ ns ns2
ns1 1d1s a 10.0.0.1
ns2 1w1D1h1m1S a 10.0.0.2
"""
no_soa_text = """$TTL 1h
$ORIGIN example.
@ ns ns1
@ ns ns2
ns1 1d1s a 10.0.0.1
ns2 1w1D1h1m1S a 10.0.0.2
"""
no_ns_text = """$TTL 1h
$ORIGIN example.
@ soa foo bar 1 2 3 4 5
"""
include_text = """$INCLUDE "example"
"""
bad_directive_text = """$FOO bar
$ORIGIN example.
@ soa foo bar 1 2 3 4 5
@ ns ns1
@ ns ns2
ns1 1d1s a 10.0.0.1
ns2 1w1D1h1m1S a 10.0.0.2
"""
_keep_output = False
class ZoneTestCase(unittest.TestCase):
def testFromFile1(self):
z = dns.zone.from_file('example', 'example')
ok = False
try:
z.to_file('example1.out', nl='\x0a')
ok = filecmp.cmp('example1.out', 'example1.good')
finally:
if not _keep_output:
os.unlink('example1.out')
self.failUnless(ok)
def testFromFile2(self):
z = dns.zone.from_file('example', 'example', relativize=False)
ok = False
try:
z.to_file('example2.out', relativize=False, nl='\x0a')
ok = filecmp.cmp('example2.out', 'example2.good')
finally:
if not _keep_output:
os.unlink('example2.out')
self.failUnless(ok)
def testFromText(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
f = cStringIO.StringIO()
names = z.nodes.keys()
names.sort()
for n in names:
print >> f, z[n].to_text(n)
self.failUnless(f.getvalue() == example_text_output)
def testTorture1(self):
#
# Read a zone containing all our supported RR types, and
# for each RR in the zone, convert the rdata into wire format
# and then back out, and see if we get equal rdatas.
#
f = cStringIO.StringIO()
o = dns.name.from_text('example.')
z = dns.zone.from_file('example', o)
for (name, node) in z.iteritems():
for rds in node:
for rd in rds:
f.seek(0)
f.truncate()
rd.to_wire(f, origin=o)
wire = f.getvalue()
rd2 = dns.rdata.from_wire(rds.rdclass, rds.rdtype,
wire, 0, len(wire),
origin = o)
self.failUnless(rd == rd2)
def testEqual(self):
z1 = dns.zone.from_text(example_text, 'example.', relativize=True)
z2 = dns.zone.from_text(example_text_output, 'example.',
relativize=True)
self.failUnless(z1 == z2)
def testNotEqual1(self):
z1 = dns.zone.from_text(example_text, 'example.', relativize=True)
z2 = dns.zone.from_text(something_quite_similar, 'example.',
relativize=True)
self.failUnless(z1 != z2)
def testNotEqual2(self):
z1 = dns.zone.from_text(example_text, 'example.', relativize=True)
z2 = dns.zone.from_text(something_different, 'example.',
relativize=True)
self.failUnless(z1 != z2)
def testNotEqual3(self):
z1 = dns.zone.from_text(example_text, 'example.', relativize=True)
z2 = dns.zone.from_text(something_different, 'example2.',
relativize=True)
self.failUnless(z1 != z2)
def testFindRdataset1(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
rds = z.find_rdataset('@', 'soa')
exrds = dns.rdataset.from_text('IN', 'SOA', 300, 'foo bar 1 2 3 4 5')
self.failUnless(rds == exrds)
def testFindRdataset2(self):
def bad():
z = dns.zone.from_text(example_text, 'example.', relativize=True)
rds = z.find_rdataset('@', 'loc')
self.failUnlessRaises(KeyError, bad)
def testFindRRset1(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
rrs = z.find_rrset('@', 'soa')
exrrs = dns.rrset.from_text('@', 300, 'IN', 'SOA', 'foo bar 1 2 3 4 5')
self.failUnless(rrs == exrrs)
def testFindRRset2(self):
def bad():
z = dns.zone.from_text(example_text, 'example.', relativize=True)
rrs = z.find_rrset('@', 'loc')
self.failUnlessRaises(KeyError, bad)
def testGetRdataset1(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
rds = z.get_rdataset('@', 'soa')
exrds = dns.rdataset.from_text('IN', 'SOA', 300, 'foo bar 1 2 3 4 5')
self.failUnless(rds == exrds)
def testGetRdataset2(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
rds = z.get_rdataset('@', 'loc')
self.failUnless(rds == None)
def testGetRRset1(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
rrs = z.get_rrset('@', 'soa')
exrrs = dns.rrset.from_text('@', 300, 'IN', 'SOA', 'foo bar 1 2 3 4 5')
self.failUnless(rrs == exrrs)
def testGetRRset2(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
rrs = z.get_rrset('@', 'loc')
self.failUnless(rrs == None)
def testReplaceRdataset1(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
rdataset = dns.rdataset.from_text('in', 'ns', 300, 'ns3', 'ns4')
z.replace_rdataset('@', rdataset)
rds = z.get_rdataset('@', 'ns')
self.failUnless(rds is rdataset)
def testReplaceRdataset2(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
rdataset = dns.rdataset.from_text('in', 'txt', 300, '"foo"')
z.replace_rdataset('@', rdataset)
rds = z.get_rdataset('@', 'txt')
self.failUnless(rds is rdataset)
def testDeleteRdataset1(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
z.delete_rdataset('@', 'ns')
rds = z.get_rdataset('@', 'ns')
self.failUnless(rds is None)
def testDeleteRdataset2(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
z.delete_rdataset('ns1', 'a')
node = z.get_node('ns1')
self.failUnless(node is None)
def testNodeFindRdataset1(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
node = z['@']
rds = node.find_rdataset(dns.rdataclass.IN, dns.rdatatype.SOA)
exrds = dns.rdataset.from_text('IN', 'SOA', 300, 'foo bar 1 2 3 4 5')
self.failUnless(rds == exrds)
def testNodeFindRdataset2(self):
def bad():
z = dns.zone.from_text(example_text, 'example.', relativize=True)
node = z['@']
rds = node.find_rdataset(dns.rdataclass.IN, dns.rdatatype.LOC)
self.failUnlessRaises(KeyError, bad)
def testNodeGetRdataset1(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
node = z['@']
rds = node.get_rdataset(dns.rdataclass.IN, dns.rdatatype.SOA)
exrds = dns.rdataset.from_text('IN', 'SOA', 300, 'foo bar 1 2 3 4 5')
self.failUnless(rds == exrds)
def testNodeGetRdataset2(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
node = z['@']
rds = node.get_rdataset(dns.rdataclass.IN, dns.rdatatype.LOC)
self.failUnless(rds == None)
def testNodeDeleteRdataset1(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
node = z['@']
rds = node.delete_rdataset(dns.rdataclass.IN, dns.rdatatype.SOA)
rds = node.get_rdataset(dns.rdataclass.IN, dns.rdatatype.SOA)
self.failUnless(rds == None)
def testNodeDeleteRdataset2(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
node = z['@']
rds = node.delete_rdataset(dns.rdataclass.IN, dns.rdatatype.LOC)
rds = node.get_rdataset(dns.rdataclass.IN, dns.rdatatype.LOC)
self.failUnless(rds == None)
def testIterateRdatasets(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
ns = [n for n, r in z.iterate_rdatasets('A')]
ns.sort()
self.failUnless(ns == [dns.name.from_text('ns1', None),
dns.name.from_text('ns2', None)])
def testIterateAllRdatasets(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
ns = [n for n, r in z.iterate_rdatasets()]
ns.sort()
self.failUnless(ns == [dns.name.from_text('@', None),
dns.name.from_text('@', None),
dns.name.from_text('bar.foo', None),
dns.name.from_text('ns1', None),
dns.name.from_text('ns2', None)])
def testIterateRdatas(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
l = list(z.iterate_rdatas('A'))
l.sort()
exl = [(dns.name.from_text('ns1', None),
3600,
dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.A,
'10.0.0.1')),
(dns.name.from_text('ns2', None),
3600,
dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.A,
'10.0.0.2'))]
self.failUnless(l == exl)
def testIterateAllRdatas(self):
z = dns.zone.from_text(example_text, 'example.', relativize=True)
l = list(z.iterate_rdatas())
l.sort()
exl = [(dns.name.from_text('@', None),
3600,
dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.NS,
'ns1')),
(dns.name.from_text('@', None),
3600,
dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.NS,
'ns2')),
(dns.name.from_text('@', None),
3600,
dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.SOA,
'foo bar 1 2 3 4 5')),
(dns.name.from_text('bar.foo', None),
300,
dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.MX,
'0 blaz.foo')),
(dns.name.from_text('ns1', None),
3600,
dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.A,
'10.0.0.1')),
(dns.name.from_text('ns2', None),
3600,
dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.A,
'10.0.0.2'))]
self.failUnless(l == exl)
def testTTLs(self):
z = dns.zone.from_text(ttl_example_text, 'example.', relativize=True)
n = z['@']
rds = n.get_rdataset(dns.rdataclass.IN, dns.rdatatype.SOA)
self.failUnless(rds.ttl == 3600)
n = z['ns1']
rds = n.get_rdataset(dns.rdataclass.IN, dns.rdatatype.A)
self.failUnless(rds.ttl == 86401)
n = z['ns2']
rds = n.get_rdataset(dns.rdataclass.IN, dns.rdatatype.A)
self.failUnless(rds.ttl == 694861)
def testNoSOA(self):
def bad():
z = dns.zone.from_text(no_soa_text, 'example.',
relativize=True)
self.failUnlessRaises(dns.zone.NoSOA, bad)
def testNoNS(self):
def bad():
z = dns.zone.from_text(no_ns_text, 'example.',
relativize=True)
self.failUnlessRaises(dns.zone.NoNS, bad)
def testInclude(self):
z1 = dns.zone.from_text(include_text, 'example.', relativize=True,
allow_include=True)
z2 = dns.zone.from_file('example', 'example.', relativize=True)
self.failUnless(z1 == z2)
def testBadDirective(self):
def bad():
z = dns.zone.from_text(bad_directive_text, 'example.',
relativize=True)
self.failUnlessRaises(dns.exception.SyntaxError, bad)
if __name__ == '__main__':
unittest.main()
| 14,036 | Python | .py | 340 | 31.826471 | 79 | 0.579468 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,091 | dnssec.py | wummel_linkchecker/third_party/dnspython/tests/dnssec.py | # Copyright (C) 2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.dnssec
import dns.name
import dns.rdata
import dns.rdataclass
import dns.rdatatype
import dns.rrset
abs_dnspython_org = dns.name.from_text('dnspython.org')
abs_keys = { abs_dnspython_org :
dns.rrset.from_text('dnspython.org.', 3600, 'IN', 'DNSKEY',
'257 3 5 AwEAAenVTr9L1OMlL1/N2ta0Qj9LLLnnmFWIr1dJoAsWM9BQfsbV7kFZ XbAkER/FY9Ji2o7cELxBwAsVBuWn6IUUAJXLH74YbC1anY0lifjgt29z SwDzuB7zmC7yVYZzUunBulVW4zT0tg1aePbpVL2EtTL8VzREqbJbE25R KuQYHZtFwG8S4iBxJUmT2Bbd0921LLxSQgVoFXlQx/gFV2+UERXcJ5ce iX6A6wc02M/pdg/YbJd2rBa0MYL3/Fz/Xltre0tqsImZGxzi6YtYDs45 NC8gH+44egz82e2DATCVM1ICPmRDjXYTLldQiWA2ZXIWnK0iitl5ue24 7EsWJefrIhE=',
'256 3 5 AwEAAdSSghOGjU33IQZgwZM2Hh771VGXX05olJK49FxpSyuEAjDBXY58 LGU9R2Zgeecnk/b9EAhFu/vCV9oECtiTCvwuVAkt9YEweqYDluQInmgP NGMJCKdSLlnX93DkjDw8rMYv5dqXCuSGPlKChfTJOLQxIAxGloS7lL+c 0CTZydAF')
}
rel_keys = { dns.name.empty :
dns.rrset.from_text('@', 3600, 'IN', 'DNSKEY',
'257 3 5 AwEAAenVTr9L1OMlL1/N2ta0Qj9LLLnnmFWIr1dJoAsWM9BQfsbV7kFZ XbAkER/FY9Ji2o7cELxBwAsVBuWn6IUUAJXLH74YbC1anY0lifjgt29z SwDzuB7zmC7yVYZzUunBulVW4zT0tg1aePbpVL2EtTL8VzREqbJbE25R KuQYHZtFwG8S4iBxJUmT2Bbd0921LLxSQgVoFXlQx/gFV2+UERXcJ5ce iX6A6wc02M/pdg/YbJd2rBa0MYL3/Fz/Xltre0tqsImZGxzi6YtYDs45 NC8gH+44egz82e2DATCVM1ICPmRDjXYTLldQiWA2ZXIWnK0iitl5ue24 7EsWJefrIhE=',
'256 3 5 AwEAAdSSghOGjU33IQZgwZM2Hh771VGXX05olJK49FxpSyuEAjDBXY58 LGU9R2Zgeecnk/b9EAhFu/vCV9oECtiTCvwuVAkt9YEweqYDluQInmgP NGMJCKdSLlnX93DkjDw8rMYv5dqXCuSGPlKChfTJOLQxIAxGloS7lL+c 0CTZydAF')
}
when = 1290250287
abs_soa = dns.rrset.from_text('dnspython.org.', 3600, 'IN', 'SOA',
'howl.dnspython.org. hostmaster.dnspython.org. 2010020047 3600 1800 604800 3600')
abs_other_soa = dns.rrset.from_text('dnspython.org.', 3600, 'IN', 'SOA',
'foo.dnspython.org. hostmaster.dnspython.org. 2010020047 3600 1800 604800 3600')
abs_soa_rrsig = dns.rrset.from_text('dnspython.org.', 3600, 'IN', 'RRSIG',
'SOA 5 2 3600 20101127004331 20101119213831 61695 dnspython.org. sDUlltRlFTQw5ITFxOXW3TgmrHeMeNpdqcZ4EXxM9FHhIlte6V9YCnDw t6dvM9jAXdIEi03l9H/RAd9xNNW6gvGMHsBGzpvvqFQxIBR2PoiZA1mX /SWHZFdbt4xjYTtXqpyYvrMK0Dt7bUYPadyhPFCJ1B+I8Zi7B5WJEOd0 8vs=')
rel_soa = dns.rrset.from_text('@', 3600, 'IN', 'SOA',
'howl hostmaster 2010020047 3600 1800 604800 3600')
rel_other_soa = dns.rrset.from_text('@', 3600, 'IN', 'SOA',
'foo hostmaster 2010020047 3600 1800 604800 3600')
rel_soa_rrsig = dns.rrset.from_text('@', 3600, 'IN', 'RRSIG',
'SOA 5 2 3600 20101127004331 20101119213831 61695 @ sDUlltRlFTQw5ITFxOXW3TgmrHeMeNpdqcZ4EXxM9FHhIlte6V9YCnDw t6dvM9jAXdIEi03l9H/RAd9xNNW6gvGMHsBGzpvvqFQxIBR2PoiZA1mX /SWHZFdbt4xjYTtXqpyYvrMK0Dt7bUYPadyhPFCJ1B+I8Zi7B5WJEOd0 8vs=')
sep_key = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.DNSKEY,
'257 3 5 AwEAAenVTr9L1OMlL1/N2ta0Qj9LLLnnmFWIr1dJoAsWM9BQfsbV7kFZ XbAkER/FY9Ji2o7cELxBwAsVBuWn6IUUAJXLH74YbC1anY0lifjgt29z SwDzuB7zmC7yVYZzUunBulVW4zT0tg1aePbpVL2EtTL8VzREqbJbE25R KuQYHZtFwG8S4iBxJUmT2Bbd0921LLxSQgVoFXlQx/gFV2+UERXcJ5ce iX6A6wc02M/pdg/YbJd2rBa0MYL3/Fz/Xltre0tqsImZGxzi6YtYDs45 NC8gH+44egz82e2DATCVM1ICPmRDjXYTLldQiWA2ZXIWnK0iitl5ue24 7EsWJefrIhE=')
good_ds = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.DS,
'57349 5 2 53A79A3E7488AB44FFC56B2D1109F0699D1796DD977E72108B841F96 E47D7013')
when2 = 1290425644
abs_example = dns.name.from_text('example')
abs_dsa_keys = { abs_example :
dns.rrset.from_text('example.', 86400, 'IN', 'DNSKEY',
'257 3 3 CI3nCqyJsiCJHTjrNsJOT4RaszetzcJPYuoH3F9ZTVt3KJXncCVR3bwn 1w0iavKljb9hDlAYSfHbFCp4ic/rvg4p1L8vh5s8ToMjqDNl40A0hUGQ Ybx5hsECyK+qHoajilUX1phYSAD8d9WAGO3fDWzUPBuzR7o85NiZCDxz yXuNVfni0uhj9n1KYhEO5yAbbruDGN89wIZcxMKuQsdUY2GYD93ssnBv a55W6XRABYWayKZ90WkRVODLVYLSn53Pj/wwxGH+XdhIAZJXimrZL4yl My7rtBsLMqq8Ihs4Tows7LqYwY7cp6y/50tw6pj8tFqMYcPUjKZV36l1 M/2t5BVg3i7IK61Aidt6aoC3TDJtzAxg3ZxfjZWJfhHjMJqzQIfbW5b9 q1mjFsW5EUv39RaNnX+3JWPRLyDqD4pIwDyqfutMsdk/Py3paHn82FGp CaOg+nicqZ9TiMZURN/XXy5JoXUNQ3RNvbHCUiPUe18KUkY6mTfnyHld 1l9YCWmzXQVClkx/hOYxjJ4j8Ife58+Obu5X',
'256 3 3 CJE1yb9YRQiw5d2xZrMUMR+cGCTt1bp1KDCefmYKmS+Z1+q9f42ETVhx JRiQwXclYwmxborzIkSZegTNYIV6mrYwbNB27Q44c3UGcspb3PiOw5TC jNPRYEcdwGvDZ2wWy+vkSV/S9tHXY8O6ODiE6abZJDDg/RnITyi+eoDL R3KZ5n/V1f1T1b90rrV6EewhBGQJpQGDogaXb2oHww9Tm6NfXyo7SoMM pbwbzOckXv+GxRPJIQNSF4D4A9E8XCksuzVVdE/0lr37+uoiAiPia38U 5W2QWe/FJAEPLjIp2eTzf0TrADc1pKP1wrA2ASpdzpm/aX3IB5RPp8Ew S9U72eBFZJAUwg635HxJVxH1maG6atzorR566E+e0OZSaxXS9o1o6QqN 3oPlYLGPORDiExilKfez3C/x/yioOupW9K5eKF0gmtaqrHX0oq9s67f/ RIM2xVaKHgG9Vf2cgJIZkhv7sntujr+E4htnRmy9P9BxyFxsItYxPI6Z bzygHAZpGhlI/7ltEGlIwKxyTK3ZKBm67q7B')
}
abs_dsa_soa = dns.rrset.from_text('example.', 86400, 'IN', 'SOA',
'ns1.example. hostmaster.example. 2 10800 3600 604800 86400')
abs_other_dsa_soa = dns.rrset.from_text('example.', 86400, 'IN', 'SOA',
'ns1.example. hostmaster.example. 2 10800 3600 604800 86401')
abs_dsa_soa_rrsig = dns.rrset.from_text('example.', 86400, 'IN', 'RRSIG',
'SOA 3 1 86400 20101129143231 20101122112731 42088 example. CGul9SuBofsktunV8cJs4eRs6u+3NCS3yaPKvBbD+pB2C76OUXDZq9U=')
example_sep_key = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.DNSKEY,
'257 3 3 CI3nCqyJsiCJHTjrNsJOT4RaszetzcJPYuoH3F9ZTVt3KJXncCVR3bwn 1w0iavKljb9hDlAYSfHbFCp4ic/rvg4p1L8vh5s8ToMjqDNl40A0hUGQ Ybx5hsECyK+qHoajilUX1phYSAD8d9WAGO3fDWzUPBuzR7o85NiZCDxz yXuNVfni0uhj9n1KYhEO5yAbbruDGN89wIZcxMKuQsdUY2GYD93ssnBv a55W6XRABYWayKZ90WkRVODLVYLSn53Pj/wwxGH+XdhIAZJXimrZL4yl My7rtBsLMqq8Ihs4Tows7LqYwY7cp6y/50tw6pj8tFqMYcPUjKZV36l1 M/2t5BVg3i7IK61Aidt6aoC3TDJtzAxg3ZxfjZWJfhHjMJqzQIfbW5b9 q1mjFsW5EUv39RaNnX+3JWPRLyDqD4pIwDyqfutMsdk/Py3paHn82FGp CaOg+nicqZ9TiMZURN/XXy5JoXUNQ3RNvbHCUiPUe18KUkY6mTfnyHld 1l9YCWmzXQVClkx/hOYxjJ4j8Ife58+Obu5X')
example_ds_sha1 = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.DS,
'18673 3 1 71b71d4f3e11bbd71b4eff12cde69f7f9215bbe7')
example_ds_sha256 = dns.rdata.from_text(dns.rdataclass.IN, dns.rdatatype.DS,
'18673 3 2 eb8344cbbf07c9d3d3d6c81d10c76653e28d8611a65e639ef8f716e4e4e5d913')
class DNSSECValidatorTestCase(unittest.TestCase):
def testAbsoluteRSAGood(self):
dns.dnssec.validate(abs_soa, abs_soa_rrsig, abs_keys, None, when)
def testAbsoluteRSABad(self):
def bad():
dns.dnssec.validate(abs_other_soa, abs_soa_rrsig, abs_keys, None,
when)
self.failUnlessRaises(dns.dnssec.ValidationFailure, bad)
def testRelativeRSAGood(self):
dns.dnssec.validate(rel_soa, rel_soa_rrsig, rel_keys,
abs_dnspython_org, when)
def testRelativeRSABad(self):
def bad():
dns.dnssec.validate(rel_other_soa, rel_soa_rrsig, rel_keys,
abs_dnspython_org, when)
self.failUnlessRaises(dns.dnssec.ValidationFailure, bad)
def testMakeSHA256DS(self):
ds = dns.dnssec.make_ds(abs_dnspython_org, sep_key, 'SHA256')
self.failUnless(ds == good_ds)
def testAbsoluteDSAGood(self):
dns.dnssec.validate(abs_dsa_soa, abs_dsa_soa_rrsig, abs_dsa_keys, None,
when2)
def testAbsoluteDSABad(self):
def bad():
dns.dnssec.validate(abs_other_dsa_soa, abs_dsa_soa_rrsig,
abs_dsa_keys, None, when2)
self.failUnlessRaises(dns.dnssec.ValidationFailure, bad)
def testMakeExampleSHA1DS(self):
ds = dns.dnssec.make_ds(abs_example, example_sep_key, 'SHA1')
self.failUnless(ds == example_ds_sha1)
def testMakeExampleSHA256DS(self):
ds = dns.dnssec.make_ds(abs_example, example_sep_key, 'SHA256')
self.failUnless(ds == example_ds_sha256)
if __name__ == '__main__':
import_ok = False
try:
import Crypto.Util.number
import_ok = True
except:
pass
if import_ok:
unittest.main()
else:
print 'skipping DNSSEC tests because pycrypto is not installed'
| 9,344 | Python | .py | 112 | 71.571429 | 598 | 0.741466 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,092 | resolver.py | wummel_linkchecker/third_party/dnspython/tests/resolver.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import cStringIO
import select
import sys
import time
import unittest
import dns.name
import dns.message
import dns.name
import dns.rdataclass
import dns.rdatatype
import dns.resolver
resolv_conf = """
/t/t
# comment 1
; comment 2
domain foo
nameserver 10.0.0.1
nameserver 10.0.0.2
"""
message_text = """id 1234
opcode QUERY
rcode NOERROR
flags QR AA RD
;QUESTION
example. IN A
;ANSWER
example. 1 IN A 10.0.0.1
;AUTHORITY
;ADDITIONAL
"""
class BaseResolverTests(object):
if sys.platform != 'win32':
def testRead(self):
f = cStringIO.StringIO(resolv_conf)
r = dns.resolver.Resolver(f)
self.failUnless(r.nameservers == ['10.0.0.1', '10.0.0.2'] and
r.domain == dns.name.from_text('foo'))
def testCacheExpiration(self):
message = dns.message.from_text(message_text)
name = dns.name.from_text('example.')
answer = dns.resolver.Answer(name, dns.rdatatype.A, dns.rdataclass.IN,
message)
cache = dns.resolver.Cache()
cache.put((name, dns.rdatatype.A, dns.rdataclass.IN), answer)
time.sleep(2)
self.failUnless(cache.get((name, dns.rdatatype.A, dns.rdataclass.IN))
is None)
def testCacheCleaning(self):
message = dns.message.from_text(message_text)
name = dns.name.from_text('example.')
answer = dns.resolver.Answer(name, dns.rdatatype.A, dns.rdataclass.IN,
message)
cache = dns.resolver.Cache(cleaning_interval=1.0)
cache.put((name, dns.rdatatype.A, dns.rdataclass.IN), answer)
time.sleep(2)
self.failUnless(cache.get((name, dns.rdatatype.A, dns.rdataclass.IN))
is None)
def testZoneForName1(self):
name = dns.name.from_text('www.dnspython.org.')
ezname = dns.name.from_text('dnspython.org.')
zname = dns.resolver.zone_for_name(name)
self.failUnless(zname == ezname)
def testZoneForName2(self):
name = dns.name.from_text('a.b.www.dnspython.org.')
ezname = dns.name.from_text('dnspython.org.')
zname = dns.resolver.zone_for_name(name)
self.failUnless(zname == ezname)
def testZoneForName3(self):
name = dns.name.from_text('dnspython.org.')
ezname = dns.name.from_text('dnspython.org.')
zname = dns.resolver.zone_for_name(name)
self.failUnless(zname == ezname)
def testZoneForName4(self):
def bad():
name = dns.name.from_text('dnspython.org', None)
zname = dns.resolver.zone_for_name(name)
self.failUnlessRaises(dns.resolver.NotAbsolute, bad)
class PollingMonkeyPatchMixin(object):
def setUp(self):
self.__native_polling_backend = dns.query._polling_backend
dns.query._set_polling_backend(self.polling_backend())
unittest.TestCase.setUp(self)
def tearDown(self):
dns.query._set_polling_backend(self.__native_polling_backend)
unittest.TestCase.tearDown(self)
class SelectResolverTestCase(PollingMonkeyPatchMixin, BaseResolverTests, unittest.TestCase):
def polling_backend(self):
return dns.query._select_for
if hasattr(select, 'poll'):
class PollResolverTestCase(PollingMonkeyPatchMixin, BaseResolverTests, unittest.TestCase):
def polling_backend(self):
return dns.query._poll_for
if __name__ == '__main__':
unittest.main()
| 4,279 | Python | .py | 108 | 33.240741 | 94 | 0.682563 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,093 | tokenizer.py | wummel_linkchecker/third_party/dnspython/tests/tokenizer.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.exception
import dns.tokenizer
Token = dns.tokenizer.Token
class TokenizerTestCase(unittest.TestCase):
def testQuotedString1(self):
tok = dns.tokenizer.Tokenizer(r'"foo"')
token = tok.get()
self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, 'foo'))
def testQuotedString2(self):
tok = dns.tokenizer.Tokenizer(r'""')
token = tok.get()
self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, ''))
def testQuotedString3(self):
tok = dns.tokenizer.Tokenizer(r'"\"foo\""')
token = tok.get()
self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, '"foo"'))
def testQuotedString4(self):
tok = dns.tokenizer.Tokenizer(r'"foo\010bar"')
token = tok.get()
self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, 'foo\x0abar'))
def testQuotedString5(self):
def bad():
tok = dns.tokenizer.Tokenizer(r'"foo')
token = tok.get()
self.failUnlessRaises(dns.exception.UnexpectedEnd, bad)
def testQuotedString6(self):
def bad():
tok = dns.tokenizer.Tokenizer(r'"foo\01')
token = tok.get()
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def testQuotedString7(self):
def bad():
tok = dns.tokenizer.Tokenizer('"foo\nbar"')
token = tok.get()
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def testEmpty1(self):
tok = dns.tokenizer.Tokenizer('')
token = tok.get()
self.failUnless(token.is_eof())
def testEmpty2(self):
tok = dns.tokenizer.Tokenizer('')
token1 = tok.get()
token2 = tok.get()
self.failUnless(token1.is_eof() and token2.is_eof())
def testEOL(self):
tok = dns.tokenizer.Tokenizer('\n')
token1 = tok.get()
token2 = tok.get()
self.failUnless(token1.is_eol() and token2.is_eof())
def testWS1(self):
tok = dns.tokenizer.Tokenizer(' \n')
token1 = tok.get()
self.failUnless(token1.is_eol())
def testWS2(self):
tok = dns.tokenizer.Tokenizer(' \n')
token1 = tok.get(want_leading=True)
self.failUnless(token1.is_whitespace())
def testComment1(self):
tok = dns.tokenizer.Tokenizer(' ;foo\n')
token1 = tok.get()
self.failUnless(token1.is_eol())
def testComment2(self):
tok = dns.tokenizer.Tokenizer(' ;foo\n')
token1 = tok.get(want_comment = True)
token2 = tok.get()
self.failUnless(token1 == Token(dns.tokenizer.COMMENT, 'foo') and
token2.is_eol())
def testComment3(self):
tok = dns.tokenizer.Tokenizer(' ;foo bar\n')
token1 = tok.get(want_comment = True)
token2 = tok.get()
self.failUnless(token1 == Token(dns.tokenizer.COMMENT, 'foo bar') and
token2.is_eol())
def testMultiline1(self):
tok = dns.tokenizer.Tokenizer('( foo\n\n bar\n)')
tokens = list(iter(tok))
self.failUnless(tokens == [Token(dns.tokenizer.IDENTIFIER, 'foo'),
Token(dns.tokenizer.IDENTIFIER, 'bar')])
def testMultiline2(self):
tok = dns.tokenizer.Tokenizer('( foo\n\n bar\n)\n')
tokens = list(iter(tok))
self.failUnless(tokens == [Token(dns.tokenizer.IDENTIFIER, 'foo'),
Token(dns.tokenizer.IDENTIFIER, 'bar'),
Token(dns.tokenizer.EOL, '\n')])
def testMultiline3(self):
def bad():
tok = dns.tokenizer.Tokenizer('foo)')
tokens = list(iter(tok))
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def testMultiline4(self):
def bad():
tok = dns.tokenizer.Tokenizer('((foo)')
tokens = list(iter(tok))
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def testUnget1(self):
tok = dns.tokenizer.Tokenizer('foo')
t1 = tok.get()
tok.unget(t1)
t2 = tok.get()
self.failUnless(t1 == t2 and t1.ttype == dns.tokenizer.IDENTIFIER and \
t1.value == 'foo')
def testUnget2(self):
def bad():
tok = dns.tokenizer.Tokenizer('foo')
t1 = tok.get()
tok.unget(t1)
tok.unget(t1)
self.failUnlessRaises(dns.tokenizer.UngetBufferFull, bad)
def testGetEOL1(self):
tok = dns.tokenizer.Tokenizer('\n')
t = tok.get_eol()
self.failUnless(t == '\n')
def testGetEOL2(self):
tok = dns.tokenizer.Tokenizer('')
t = tok.get_eol()
self.failUnless(t == '')
def testEscapedDelimiter1(self):
tok = dns.tokenizer.Tokenizer(r'ch\ ld')
t = tok.get()
self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch\ ld')
def testEscapedDelimiter2(self):
tok = dns.tokenizer.Tokenizer(r'ch\032ld')
t = tok.get()
self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch\032ld')
def testEscapedDelimiter3(self):
tok = dns.tokenizer.Tokenizer(r'ch\ild')
t = tok.get()
self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch\ild')
def testEscapedDelimiter1u(self):
tok = dns.tokenizer.Tokenizer(r'ch\ ld')
t = tok.get().unescape()
self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch ld')
def testEscapedDelimiter2u(self):
tok = dns.tokenizer.Tokenizer(r'ch\032ld')
t = tok.get().unescape()
self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == 'ch ld')
def testEscapedDelimiter3u(self):
tok = dns.tokenizer.Tokenizer(r'ch\ild')
t = tok.get().unescape()
self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'child')
if __name__ == '__main__':
unittest.main()
| 6,789 | Python | .py | 157 | 34.636943 | 87 | 0.623882 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,094 | update.py | wummel_linkchecker/third_party/dnspython/tests/update.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.update
import dns.rdata
import dns.rdataset
goodhex = '0001 2800 0001 0005 0007 0000' \
'076578616d706c6500 0006 0001' \
'03666f6fc00c 00ff 00ff 00000000 0000' \
'c019 0001 00ff 00000000 0000' \
'03626172c00c 0001 0001 00000000 0004 0a000005' \
'05626c617a32c00c 00ff 00fe 00000000 0000' \
'c049 0001 00fe 00000000 0000' \
'c019 0001 00ff 00000000 0000' \
'c019 0001 0001 0000012c 0004 0a000001' \
'c019 0001 0001 0000012c 0004 0a000002' \
'c035 0001 0001 0000012c 0004 0a000003' \
'c035 0001 00fe 00000000 0004 0a000004' \
'04626c617ac00c 0001 00ff 00000000 0000' \
'c049 00ff 00ff 00000000 0000'
goodwire = goodhex.replace(' ', '').decode('hex_codec')
update_text="""id 1
opcode UPDATE
rcode NOERROR
;ZONE
example. IN SOA
;PREREQ
foo ANY ANY
foo ANY A
bar 0 IN A 10.0.0.5
blaz2 NONE ANY
blaz2 NONE A
;UPDATE
foo ANY A
foo 300 IN A 10.0.0.1
foo 300 IN A 10.0.0.2
bar 300 IN A 10.0.0.3
bar 0 NONE A 10.0.0.4
blaz ANY A
blaz2 ANY ANY
"""
class UpdateTestCase(unittest.TestCase):
def test_to_wire1(self):
update = dns.update.Update('example')
update.id = 1
update.present('foo')
update.present('foo', 'a')
update.present('bar', 'a', '10.0.0.5')
update.absent('blaz2')
update.absent('blaz2', 'a')
update.replace('foo', 300, 'a', '10.0.0.1', '10.0.0.2')
update.add('bar', 300, 'a', '10.0.0.3')
update.delete('bar', 'a', '10.0.0.4')
update.delete('blaz','a')
update.delete('blaz2')
self.failUnless(update.to_wire() == goodwire)
def test_to_wire2(self):
update = dns.update.Update('example')
update.id = 1
update.present('foo')
update.present('foo', 'a')
update.present('bar', 'a', '10.0.0.5')
update.absent('blaz2')
update.absent('blaz2', 'a')
update.replace('foo', 300, 'a', '10.0.0.1', '10.0.0.2')
update.add('bar', 300, dns.rdata.from_text(1, 1, '10.0.0.3'))
update.delete('bar', 'a', '10.0.0.4')
update.delete('blaz','a')
update.delete('blaz2')
self.failUnless(update.to_wire() == goodwire)
def test_to_wire3(self):
update = dns.update.Update('example')
update.id = 1
update.present('foo')
update.present('foo', 'a')
update.present('bar', 'a', '10.0.0.5')
update.absent('blaz2')
update.absent('blaz2', 'a')
update.replace('foo', 300, 'a', '10.0.0.1', '10.0.0.2')
update.add('bar', dns.rdataset.from_text(1, 1, 300, '10.0.0.3'))
update.delete('bar', 'a', '10.0.0.4')
update.delete('blaz','a')
update.delete('blaz2')
self.failUnless(update.to_wire() == goodwire)
def test_from_text1(self):
update = dns.message.from_text(update_text)
w = update.to_wire(origin=dns.name.from_text('example'),
want_shuffle=False)
self.failUnless(w == goodwire)
if __name__ == '__main__':
unittest.main()
| 3,919 | Python | .py | 103 | 31.961165 | 72 | 0.634954 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,095 | rdtypeandclass.py | wummel_linkchecker/third_party/dnspython/tests/rdtypeandclass.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.rdataclass
import dns.rdatatype
class RdTypeAndClassTestCase(unittest.TestCase):
# Classes
def test_class_meta1(self):
self.failUnless(dns.rdataclass.is_metaclass(dns.rdataclass.ANY))
def test_class_meta2(self):
self.failUnless(not dns.rdataclass.is_metaclass(dns.rdataclass.IN))
def test_class_bytext1(self):
self.failUnless(dns.rdataclass.from_text('IN') == dns.rdataclass.IN)
def test_class_bytext2(self):
self.failUnless(dns.rdataclass.from_text('CLASS1') ==
dns.rdataclass.IN)
def test_class_bytext_bounds1(self):
self.failUnless(dns.rdataclass.from_text('CLASS0') == 0)
self.failUnless(dns.rdataclass.from_text('CLASS65535') == 65535)
def test_class_bytext_bounds2(self):
def bad():
junk = dns.rdataclass.from_text('CLASS65536')
self.failUnlessRaises(ValueError, bad)
def test_class_bytext_unknown(self):
def bad():
junk = dns.rdataclass.from_text('XXX')
self.failUnlessRaises(dns.rdataclass.UnknownRdataclass, bad)
def test_class_totext1(self):
self.failUnless(dns.rdataclass.to_text(dns.rdataclass.IN) == 'IN')
def test_class_totext1(self):
self.failUnless(dns.rdataclass.to_text(999) == 'CLASS999')
def test_class_totext_bounds1(self):
def bad():
junk = dns.rdataclass.to_text(-1)
self.failUnlessRaises(ValueError, bad)
def test_class_totext_bounds2(self):
def bad():
junk = dns.rdataclass.to_text(65536)
self.failUnlessRaises(ValueError, bad)
# Types
def test_type_meta1(self):
self.failUnless(dns.rdatatype.is_metatype(dns.rdatatype.ANY))
def test_type_meta2(self):
self.failUnless(dns.rdatatype.is_metatype(dns.rdatatype.OPT))
def test_type_meta3(self):
self.failUnless(not dns.rdatatype.is_metatype(dns.rdatatype.A))
def test_type_singleton1(self):
self.failUnless(dns.rdatatype.is_singleton(dns.rdatatype.SOA))
def test_type_singleton2(self):
self.failUnless(not dns.rdatatype.is_singleton(dns.rdatatype.A))
def test_type_bytext1(self):
self.failUnless(dns.rdatatype.from_text('A') == dns.rdatatype.A)
def test_type_bytext2(self):
self.failUnless(dns.rdatatype.from_text('TYPE1') ==
dns.rdatatype.A)
def test_type_bytext_bounds1(self):
self.failUnless(dns.rdatatype.from_text('TYPE0') == 0)
self.failUnless(dns.rdatatype.from_text('TYPE65535') == 65535)
def test_type_bytext_bounds2(self):
def bad():
junk = dns.rdatatype.from_text('TYPE65536')
self.failUnlessRaises(ValueError, bad)
def test_type_bytext_unknown(self):
def bad():
junk = dns.rdatatype.from_text('XXX')
self.failUnlessRaises(dns.rdatatype.UnknownRdatatype, bad)
def test_type_totext1(self):
self.failUnless(dns.rdatatype.to_text(dns.rdatatype.A) == 'A')
def test_type_totext1(self):
self.failUnless(dns.rdatatype.to_text(999) == 'TYPE999')
def test_type_totext_bounds1(self):
def bad():
junk = dns.rdatatype.to_text(-1)
self.failUnlessRaises(ValueError, bad)
def test_type_totext_bounds2(self):
def bad():
junk = dns.rdatatype.to_text(65536)
self.failUnlessRaises(ValueError, bad)
if __name__ == '__main__':
unittest.main()
| 4,284 | Python | .py | 92 | 39.402174 | 76 | 0.69118 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,096 | inet.py | wummel_linkchecker/third_party/dnspython/dns/inet.py | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Generic Internet address helper functions."""
import socket
import dns.ipv4
import dns.ipv6
# We assume that AF_INET is always defined.
AF_INET = socket.AF_INET
# AF_INET6 might not be defined in the socket module, but we need it.
# We'll try to use the socket module's value, and if it doesn't work,
# we'll use our own value.
try:
AF_INET6 = socket.AF_INET6
except AttributeError:
AF_INET6 = 9999
def inet_pton(family, text):
"""Convert the textual form of a network address into its binary form.
@param family: the address family
@type family: int
@param text: the textual address
@type text: string
@raises NotImplementedError: the address family specified is not
implemented.
@rtype: string
"""
if family == AF_INET:
return dns.ipv4.inet_aton(text)
elif family == AF_INET6:
return dns.ipv6.inet_aton(text)
else:
raise NotImplementedError
def inet_ntop(family, address):
"""Convert the binary form of a network address into its textual form.
@param family: the address family
@type family: int
@param address: the binary address
@type address: string
@raises NotImplementedError: the address family specified is not
implemented.
@rtype: string
"""
if family == AF_INET:
return dns.ipv4.inet_ntoa(address)
elif family == AF_INET6:
return dns.ipv6.inet_ntoa(address)
else:
raise NotImplementedError
def af_for_address(text):
"""Determine the address family of a textual-form network address.
@param text: the textual address
@type text: string
@raises ValueError: the address family cannot be determined from the input.
@rtype: int
"""
try:
junk = dns.ipv4.inet_aton(text)
return AF_INET
except Exception:
try:
junk = dns.ipv6.inet_aton(text)
return AF_INET6
except Exception:
raise ValueError
def is_multicast(text):
"""Is the textual-form network address a multicast address?
@param text: the textual address
@raises ValueError: the address family cannot be determined from the input.
@rtype: bool
"""
try:
first = ord(dns.ipv4.inet_aton(text)[0])
return (first >= 224 and first <= 239)
except Exception:
try:
first = ord(dns.ipv6.inet_aton(text)[0])
return (first == 255)
except Exception:
raise ValueError
| 3,267 | Python | .py | 90 | 31.233333 | 79 | 0.70497 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,097 | rcode.py | wummel_linkchecker/third_party/dnspython/dns/rcode.py | # Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Result Codes."""
import dns.exception
NOERROR = 0
FORMERR = 1
SERVFAIL = 2
NXDOMAIN = 3
NOTIMP = 4
REFUSED = 5
YXDOMAIN = 6
YXRRSET = 7
NXRRSET = 8
NOTAUTH = 9
NOTZONE = 10
BADVERS = 16
_by_text = {
'NOERROR' : NOERROR,
'FORMERR' : FORMERR,
'SERVFAIL' : SERVFAIL,
'NXDOMAIN' : NXDOMAIN,
'NOTIMP' : NOTIMP,
'REFUSED' : REFUSED,
'YXDOMAIN' : YXDOMAIN,
'YXRRSET' : YXRRSET,
'NXRRSET' : NXRRSET,
'NOTAUTH' : NOTAUTH,
'NOTZONE' : NOTZONE,
'BADVERS' : BADVERS
}
# We construct the inverse mapping programmatically to ensure that we
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
# would cause the mapping not to be a true inverse.
_by_value = dict([(y, x) for x, y in _by_text.iteritems()])
class UnknownRcode(dns.exception.DNSException):
"""Raised if an rcode is unknown."""
pass
def from_text(text):
"""Convert text into an rcode.
@param text: the texual rcode
@type text: string
@raises UnknownRcode: the rcode is unknown
@rtype: int
"""
if text.isdigit():
v = int(text)
if v >= 0 and v <= 4095:
return v
v = _by_text.get(text.upper())
if v is None:
raise UnknownRcode
return v
def from_flags(flags, ednsflags):
"""Return the rcode value encoded by flags and ednsflags.
@param flags: the DNS flags
@type flags: int
@param ednsflags: the EDNS flags
@type ednsflags: int
@raises ValueError: rcode is < 0 or > 4095
@rtype: int
"""
value = (flags & 0x000f) | ((ednsflags >> 20) & 0xff0)
if value < 0 or value > 4095:
raise ValueError('rcode must be >= 0 and <= 4095')
return value
def to_flags(value):
"""Return a (flags, ednsflags) tuple which encodes the rcode.
@param value: the rcode
@type value: int
@raises ValueError: rcode is < 0 or > 4095
@rtype: (int, int) tuple
"""
if value < 0 or value > 4095:
raise ValueError('rcode must be >= 0 and <= 4095')
v = value & 0xf
ev = long(value & 0xff0) << 20
return (v, ev)
def to_text(value):
"""Convert rcode into text.
@param value: the rcode
@type value: int
@rtype: string
"""
text = _by_value.get(value)
if text is None:
text = str(value)
return text
| 3,105 | Python | .py | 99 | 27.414141 | 72 | 0.67783 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,098 | rdataclass.py | wummel_linkchecker/third_party/dnspython/dns/rdataclass.py | # Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Rdata Classes.
@var _by_text: The rdata class textual name to value mapping
@type _by_text: dict
@var _by_value: The rdata class value to textual name mapping
@type _by_value: dict
@var _metaclasses: If an rdataclass is a metaclass, there will be a mapping
whose key is the rdatatype value and whose value is True in this dictionary.
@type _metaclasses: dict"""
import re
import dns.exception
RESERVED0 = 0
IN = 1
CH = 3
HS = 4
NONE = 254
ANY = 255
_by_text = {
'RESERVED0' : RESERVED0,
'IN' : IN,
'CH' : CH,
'HS' : HS,
'NONE' : NONE,
'ANY' : ANY
}
# We construct the inverse mapping programmatically to ensure that we
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
# would cause the mapping not to be true inverse.
_by_value = dict([(y, x) for x, y in _by_text.iteritems()])
# Now that we've built the inverse map, we can add class aliases to
# the _by_text mapping.
_by_text.update({
'INTERNET' : IN,
'CHAOS' : CH,
'HESIOD' : HS
})
_metaclasses = {
NONE : True,
ANY : True
}
_unknown_class_pattern = re.compile('CLASS([0-9]+)$', re.I);
class UnknownRdataclass(dns.exception.DNSException):
"""Raised when a class is unknown."""
pass
def from_text(text):
"""Convert text into a DNS rdata class value.
@param text: the text
@type text: string
@rtype: int
@raises dns.rdataclass.UnknownRdataClass: the class is unknown
@raises ValueError: the rdata class value is not >= 0 and <= 65535
"""
value = _by_text.get(text.upper())
if value is None:
match = _unknown_class_pattern.match(text)
if match == None:
raise UnknownRdataclass
value = int(match.group(1))
if value < 0 or value > 65535:
raise ValueError("class must be between >= 0 and <= 65535")
return value
def to_text(value):
"""Convert a DNS rdata class to text.
@param value: the rdata class value
@type value: int
@rtype: string
@raises ValueError: the rdata class value is not >= 0 and <= 65535
"""
if value < 0 or value > 65535:
raise ValueError("class must be between >= 0 and <= 65535")
text = _by_value.get(value)
if text is None:
text = 'CLASS' + repr(value)
return text
def is_metaclass(rdclass):
"""True if the class is a metaclass.
@param rdclass: the rdata class
@type rdclass: int
@rtype: bool"""
if rdclass in _metaclasses:
return True
return False
| 3,300 | Python | .py | 95 | 30.926316 | 76 | 0.694288 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |
22,099 | message.py | wummel_linkchecker/third_party/dnspython/dns/message.py | # Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Messages"""
import cStringIO
import random
import struct
import sys
import time
import dns.edns
import dns.exception
import dns.flags
import dns.name
import dns.opcode
import dns.entropy
import dns.rcode
import dns.rdata
import dns.rdataclass
import dns.rdatatype
import dns.rrset
import dns.renderer
import dns.tsig
import dns.wiredata
class ShortHeader(dns.exception.FormError):
"""Raised if the DNS packet passed to from_wire() is too short."""
pass
class TrailingJunk(dns.exception.FormError):
"""Raised if the DNS packet passed to from_wire() has extra junk
at the end of it."""
pass
class UnknownHeaderField(dns.exception.DNSException):
"""Raised if a header field name is not recognized when converting from
text into a message."""
pass
class BadEDNS(dns.exception.FormError):
"""Raised if an OPT record occurs somewhere other than the start of
the additional data section."""
pass
class BadTSIG(dns.exception.FormError):
"""Raised if a TSIG record occurs somewhere other than the end of
the additional data section."""
pass
class UnknownTSIGKey(dns.exception.DNSException):
"""Raised if we got a TSIG but don't know the key."""
pass
class Message(object):
"""A DNS message.
@ivar id: The query id; the default is a randomly chosen id.
@type id: int
@ivar flags: The DNS flags of the message. @see: RFC 1035 for an
explanation of these flags.
@type flags: int
@ivar question: The question section.
@type question: list of dns.rrset.RRset objects
@ivar answer: The answer section.
@type answer: list of dns.rrset.RRset objects
@ivar authority: The authority section.
@type authority: list of dns.rrset.RRset objects
@ivar additional: The additional data section.
@type additional: list of dns.rrset.RRset objects
@ivar edns: The EDNS level to use. The default is -1, no Edns.
@type edns: int
@ivar ednsflags: The EDNS flags
@type ednsflags: long
@ivar payload: The EDNS payload size. The default is 0.
@type payload: int
@ivar options: The EDNS options
@type options: list of dns.edns.Option objects
@ivar request_payload: The associated request's EDNS payload size.
@type request_payload: int
@ivar keyring: The TSIG keyring to use. The default is None.
@type keyring: dict
@ivar keyname: The TSIG keyname to use. The default is None.
@type keyname: dns.name.Name object
@ivar keyalgorithm: The TSIG algorithm to use; defaults to
dns.tsig.default_algorithm. Constants for TSIG algorithms are defined
in dns.tsig, and the currently implemented algorithms are
HMAC_MD5, HMAC_SHA1, HMAC_SHA224, HMAC_SHA256, HMAC_SHA384, and
HMAC_SHA512.
@type keyalgorithm: string
@ivar request_mac: The TSIG MAC of the request message associated with
this message; used when validating TSIG signatures. @see: RFC 2845 for
more information on TSIG fields.
@type request_mac: string
@ivar fudge: TSIG time fudge; default is 300 seconds.
@type fudge: int
@ivar original_id: TSIG original id; defaults to the message's id
@type original_id: int
@ivar tsig_error: TSIG error code; default is 0.
@type tsig_error: int
@ivar other_data: TSIG other data.
@type other_data: string
@ivar mac: The TSIG MAC for this message.
@type mac: string
@ivar xfr: Is the message being used to contain the results of a DNS
zone transfer? The default is False.
@type xfr: bool
@ivar origin: The origin of the zone in messages which are used for
zone transfers or for DNS dynamic updates. The default is None.
@type origin: dns.name.Name object
@ivar tsig_ctx: The TSIG signature context associated with this
message. The default is None.
@type tsig_ctx: hmac.HMAC object
@ivar had_tsig: Did the message decoded from wire format have a TSIG
signature?
@type had_tsig: bool
@ivar multi: Is this message part of a multi-message sequence? The
default is false. This variable is used when validating TSIG signatures
on messages which are part of a zone transfer.
@type multi: bool
@ivar first: Is this message standalone, or the first of a multi
message sequence? This variable is used when validating TSIG signatures
on messages which are part of a zone transfer.
@type first: bool
@ivar index: An index of rrsets in the message. The index key is
(section, name, rdclass, rdtype, covers, deleting). Indexing can be
disabled by setting the index to None.
@type index: dict
"""
def __init__(self, id=None):
if id is None:
self.id = dns.entropy.random_16()
else:
self.id = id
self.flags = 0
self.question = []
self.answer = []
self.authority = []
self.additional = []
self.edns = -1
self.ednsflags = 0
self.payload = 0
self.options = []
self.request_payload = 0
self.keyring = None
self.keyname = None
self.keyalgorithm = dns.tsig.default_algorithm
self.request_mac = ''
self.other_data = ''
self.tsig_error = 0
self.fudge = 300
self.original_id = self.id
self.mac = ''
self.xfr = False
self.origin = None
self.tsig_ctx = None
self.had_tsig = False
self.multi = False
self.first = True
self.index = {}
def __repr__(self):
return '<DNS message, ID ' + repr(self.id) + '>'
def __str__(self):
return self.to_text()
def to_text(self, origin=None, relativize=True, **kw):
"""Convert the message to text.
The I{origin}, I{relativize}, and any other keyword
arguments are passed to the rrset to_wire() method.
@rtype: string
"""
s = cStringIO.StringIO()
print >> s, 'id %d' % self.id
print >> s, 'opcode %s' % \
dns.opcode.to_text(dns.opcode.from_flags(self.flags))
rc = dns.rcode.from_flags(self.flags, self.ednsflags)
print >> s, 'rcode %s' % dns.rcode.to_text(rc)
print >> s, 'flags %s' % dns.flags.to_text(self.flags)
if self.edns >= 0:
print >> s, 'edns %s' % self.edns
if self.ednsflags != 0:
print >> s, 'eflags %s' % \
dns.flags.edns_to_text(self.ednsflags)
print >> s, 'payload', self.payload
is_update = dns.opcode.is_update(self.flags)
if is_update:
print >> s, ';ZONE'
else:
print >> s, ';QUESTION'
for rrset in self.question:
print >> s, rrset.to_text(origin, relativize, **kw)
if is_update:
print >> s, ';PREREQ'
else:
print >> s, ';ANSWER'
for rrset in self.answer:
print >> s, rrset.to_text(origin, relativize, **kw)
if is_update:
print >> s, ';UPDATE'
else:
print >> s, ';AUTHORITY'
for rrset in self.authority:
print >> s, rrset.to_text(origin, relativize, **kw)
print >> s, ';ADDITIONAL'
for rrset in self.additional:
print >> s, rrset.to_text(origin, relativize, **kw)
#
# We strip off the final \n so the caller can print the result without
# doing weird things to get around eccentricities in Python print
# formatting
#
return s.getvalue()[:-1]
def __hash__(self):
return hash((self.id, self.flags, self.question, self.answer,
self.authority))
def __eq__(self, other):
"""Two messages are equal if they have the same content in the
header, question, answer, and authority sections.
@rtype: bool"""
if not isinstance(other, Message):
return False
if self.id != other.id:
return False
if self.flags != other.flags:
return False
for n in self.question:
if n not in other.question:
return False
for n in other.question:
if n not in self.question:
return False
for n in self.answer:
if n not in other.answer:
return False
for n in other.answer:
if n not in self.answer:
return False
for n in self.authority:
if n not in other.authority:
return False
for n in other.authority:
if n not in self.authority:
return False
return True
def __ne__(self, other):
"""Are two messages not equal?
@rtype: bool"""
return not self.__eq__(other)
def is_response(self, other):
"""Is other a response to self?
@rtype: bool"""
if other.flags & dns.flags.QR == 0 or \
self.id != other.id or \
dns.opcode.from_flags(self.flags) != \
dns.opcode.from_flags(other.flags):
return False
if dns.rcode.from_flags(other.flags, other.ednsflags) != \
dns.rcode.NOERROR:
return True
if dns.opcode.is_update(self.flags):
return True
for n in self.question:
if n not in other.question:
return False
for n in other.question:
if n not in self.question:
return False
return True
def section_number(self, section):
if section is self.question:
return 0
elif section is self.answer:
return 1
elif section is self.authority:
return 2
elif section is self.additional:
return 3
else:
raise ValueError('unknown section')
def find_rrset(self, section, name, rdclass, rdtype,
covers=dns.rdatatype.NONE, deleting=None, create=False,
force_unique=False):
"""Find the RRset with the given attributes in the specified section.
@param section: the section of the message to look in, e.g.
self.answer.
@type section: list of dns.rrset.RRset objects
@param name: the name of the RRset
@type name: dns.name.Name object
@param rdclass: the class of the RRset
@type rdclass: int
@param rdtype: the type of the RRset
@type rdtype: int
@param covers: the covers value of the RRset
@type covers: int
@param deleting: the deleting value of the RRset
@type deleting: int
@param create: If True, create the RRset if it is not found.
The created RRset is appended to I{section}.
@type create: bool
@param force_unique: If True and create is also True, create a
new RRset regardless of whether a matching RRset exists already.
@type force_unique: bool
@raises KeyError: the RRset was not found and create was False
@rtype: dns.rrset.RRset object"""
key = (self.section_number(section),
name, rdclass, rdtype, covers, deleting)
if not force_unique:
if not self.index is None:
rrset = self.index.get(key)
if not rrset is None:
return rrset
else:
for rrset in section:
if rrset.match(name, rdclass, rdtype, covers, deleting):
return rrset
if not create:
raise KeyError
rrset = dns.rrset.RRset(name, rdclass, rdtype, covers, deleting)
section.append(rrset)
if not self.index is None:
self.index[key] = rrset
return rrset
def get_rrset(self, section, name, rdclass, rdtype,
covers=dns.rdatatype.NONE, deleting=None, create=False,
force_unique=False):
"""Get the RRset with the given attributes in the specified section.
If the RRset is not found, None is returned.
@param section: the section of the message to look in, e.g.
self.answer.
@type section: list of dns.rrset.RRset objects
@param name: the name of the RRset
@type name: dns.name.Name object
@param rdclass: the class of the RRset
@type rdclass: int
@param rdtype: the type of the RRset
@type rdtype: int
@param covers: the covers value of the RRset
@type covers: int
@param deleting: the deleting value of the RRset
@type deleting: int
@param create: If True, create the RRset if it is not found.
The created RRset is appended to I{section}.
@type create: bool
@param force_unique: If True and create is also True, create a
new RRset regardless of whether a matching RRset exists already.
@type force_unique: bool
@rtype: dns.rrset.RRset object or None"""
try:
rrset = self.find_rrset(section, name, rdclass, rdtype, covers,
deleting, create, force_unique)
except KeyError:
rrset = None
return rrset
def to_wire(self, origin=None, max_size=0, **kw):
"""Return a string containing the message in DNS compressed wire
format.
Additional keyword arguments are passed to the rrset to_wire()
method.
@param origin: The origin to be appended to any relative names.
@type origin: dns.name.Name object
@param max_size: The maximum size of the wire format output; default
is 0, which means 'the message's request payload, if nonzero, or
65536'.
@type max_size: int
@raises dns.exception.TooBig: max_size was exceeded
@rtype: string
"""
if max_size == 0:
if self.request_payload != 0:
max_size = self.request_payload
else:
max_size = 65535
if max_size < 512:
max_size = 512
elif max_size > 65535:
max_size = 65535
r = dns.renderer.Renderer(self.id, self.flags, max_size, origin)
for rrset in self.question:
r.add_question(rrset.name, rrset.rdtype, rrset.rdclass)
for rrset in self.answer:
r.add_rrset(dns.renderer.ANSWER, rrset, **kw)
for rrset in self.authority:
r.add_rrset(dns.renderer.AUTHORITY, rrset, **kw)
if self.edns >= 0:
r.add_edns(self.edns, self.ednsflags, self.payload, self.options)
for rrset in self.additional:
r.add_rrset(dns.renderer.ADDITIONAL, rrset, **kw)
r.write_header()
if not self.keyname is None:
r.add_tsig(self.keyname, self.keyring[self.keyname],
self.fudge, self.original_id, self.tsig_error,
self.other_data, self.request_mac,
self.keyalgorithm)
self.mac = r.mac
return r.get_wire()
def use_tsig(self, keyring, keyname=None, fudge=300,
original_id=None, tsig_error=0, other_data='',
algorithm=dns.tsig.default_algorithm):
"""When sending, a TSIG signature using the specified keyring
and keyname should be added.
@param keyring: The TSIG keyring to use; defaults to None.
@type keyring: dict
@param keyname: The name of the TSIG key to use; defaults to None.
The key must be defined in the keyring. If a keyring is specified
but a keyname is not, then the key used will be the first key in the
keyring. Note that the order of keys in a dictionary is not defined,
so applications should supply a keyname when a keyring is used, unless
they know the keyring contains only one key.
@type keyname: dns.name.Name or string
@param fudge: TSIG time fudge; default is 300 seconds.
@type fudge: int
@param original_id: TSIG original id; defaults to the message's id
@type original_id: int
@param tsig_error: TSIG error code; default is 0.
@type tsig_error: int
@param other_data: TSIG other data.
@type other_data: string
@param algorithm: The TSIG algorithm to use; defaults to
dns.tsig.default_algorithm
"""
self.keyring = keyring
if keyname is None:
self.keyname = self.keyring.keys()[0]
else:
if isinstance(keyname, (str, unicode)):
keyname = dns.name.from_text(keyname)
self.keyname = keyname
self.keyalgorithm = algorithm
self.fudge = fudge
if original_id is None:
self.original_id = self.id
else:
self.original_id = original_id
self.tsig_error = tsig_error
self.other_data = other_data
def use_edns(self, edns=0, ednsflags=0, payload=1280, request_payload=None, options=None):
"""Configure EDNS behavior.
@param edns: The EDNS level to use. Specifying None, False, or -1
means 'do not use EDNS', and in this case the other parameters are
ignored. Specifying True is equivalent to specifying 0, i.e. 'use
EDNS0'.
@type edns: int or bool or None
@param ednsflags: EDNS flag values.
@type ednsflags: int
@param payload: The EDNS sender's payload field, which is the maximum
size of UDP datagram the sender can handle.
@type payload: int
@param request_payload: The EDNS payload size to use when sending
this message. If not specified, defaults to the value of payload.
@type request_payload: int or None
@param options: The EDNS options
@type options: None or list of dns.edns.Option objects
@see: RFC 2671
"""
if edns is None or edns is False:
edns = -1
if edns is True:
edns = 0
if request_payload is None:
request_payload = payload
if edns < 0:
ednsflags = 0
payload = 0
request_payload = 0
options = []
else:
# make sure the EDNS version in ednsflags agrees with edns
ednsflags &= 0xFF00FFFFL
ednsflags |= (edns << 16)
if options is None:
options = []
self.edns = edns
self.ednsflags = ednsflags
self.payload = payload
self.options = options
self.request_payload = request_payload
def want_dnssec(self, wanted=True):
"""Enable or disable 'DNSSEC desired' flag in requests.
@param wanted: Is DNSSEC desired? If True, EDNS is enabled if
required, and then the DO bit is set. If False, the DO bit is
cleared if EDNS is enabled.
@type wanted: bool
"""
if wanted:
if self.edns < 0:
self.use_edns()
self.ednsflags |= dns.flags.DO
elif self.edns >= 0:
self.ednsflags &= ~dns.flags.DO
def rcode(self):
"""Return the rcode.
@rtype: int
"""
return dns.rcode.from_flags(self.flags, self.ednsflags)
def set_rcode(self, rcode):
"""Set the rcode.
@param rcode: the rcode
@type rcode: int
"""
(value, evalue) = dns.rcode.to_flags(rcode)
self.flags &= 0xFFF0
self.flags |= value
self.ednsflags &= 0x00FFFFFFL
self.ednsflags |= evalue
if self.ednsflags != 0 and self.edns < 0:
self.edns = 0
def opcode(self):
"""Return the opcode.
@rtype: int
"""
return dns.opcode.from_flags(self.flags)
def set_opcode(self, opcode):
"""Set the opcode.
@param opcode: the opcode
@type opcode: int
"""
self.flags &= 0x87FF
self.flags |= dns.opcode.to_flags(opcode)
class _WireReader(object):
"""Wire format reader.
@ivar wire: the wire-format message.
@type wire: string
@ivar message: The message object being built
@type message: dns.message.Message object
@ivar current: When building a message object from wire format, this
variable contains the offset from the beginning of wire of the next octet
to be read.
@type current: int
@ivar updating: Is the message a dynamic update?
@type updating: bool
@ivar one_rr_per_rrset: Put each RR into its own RRset?
@type one_rr_per_rrset: bool
@ivar zone_rdclass: The class of the zone in messages which are
DNS dynamic updates.
@type zone_rdclass: int
"""
def __init__(self, wire, message, question_only=False,
one_rr_per_rrset=False):
self.wire = dns.wiredata.maybe_wrap(wire)
self.message = message
self.current = 0
self.updating = False
self.zone_rdclass = dns.rdataclass.IN
self.question_only = question_only
self.one_rr_per_rrset = one_rr_per_rrset
def _get_question(self, qcount):
"""Read the next I{qcount} records from the wire data and add them to
the question section.
@param qcount: the number of questions in the message
@type qcount: int"""
if self.updating and qcount > 1:
raise dns.exception.FormError
for i in xrange(0, qcount):
(qname, used) = dns.name.from_wire(self.wire, self.current)
if not self.message.origin is None:
qname = qname.relativize(self.message.origin)
self.current = self.current + used
(rdtype, rdclass) = \
struct.unpack('!HH',
self.wire[self.current:self.current + 4])
self.current = self.current + 4
self.message.find_rrset(self.message.question, qname,
rdclass, rdtype, create=True,
force_unique=True)
if self.updating:
self.zone_rdclass = rdclass
def _get_section(self, section, count):
"""Read the next I{count} records from the wire data and add them to
the specified section.
@param section: the section of the message to which to add records
@type section: list of dns.rrset.RRset objects
@param count: the number of records to read
@type count: int"""
if self.updating or self.one_rr_per_rrset:
force_unique = True
else:
force_unique = False
seen_opt = False
for i in xrange(0, count):
rr_start = self.current
(name, used) = dns.name.from_wire(self.wire, self.current)
absolute_name = name
if not self.message.origin is None:
name = name.relativize(self.message.origin)
self.current = self.current + used
(rdtype, rdclass, ttl, rdlen) = \
struct.unpack('!HHIH',
self.wire[self.current:self.current + 10])
self.current = self.current + 10
if rdtype == dns.rdatatype.OPT:
if not section is self.message.additional or seen_opt:
raise BadEDNS
self.message.payload = rdclass
self.message.ednsflags = ttl
self.message.edns = (ttl & 0xff0000) >> 16
self.message.options = []
current = self.current
optslen = rdlen
while optslen > 0:
(otype, olen) = \
struct.unpack('!HH',
self.wire[current:current + 4])
current = current + 4
opt = dns.edns.option_from_wire(otype, self.wire, current, olen)
self.message.options.append(opt)
current = current + olen
optslen = optslen - 4 - olen
seen_opt = True
elif rdtype == dns.rdatatype.TSIG:
if not (section is self.message.additional and
i == (count - 1)):
raise BadTSIG
if self.message.keyring is None:
raise UnknownTSIGKey('got signed message without keyring')
secret = self.message.keyring.get(absolute_name)
if secret is None:
raise UnknownTSIGKey("key '%s' unknown" % name)
self.message.tsig_ctx = \
dns.tsig.validate(self.wire,
absolute_name,
secret,
int(time.time()),
self.message.request_mac,
rr_start,
self.current,
rdlen,
self.message.tsig_ctx,
self.message.multi,
self.message.first)
self.message.had_tsig = True
else:
if ttl < 0:
ttl = 0
if self.updating and \
(rdclass == dns.rdataclass.ANY or
rdclass == dns.rdataclass.NONE):
deleting = rdclass
rdclass = self.zone_rdclass
else:
deleting = None
if deleting == dns.rdataclass.ANY or \
(deleting == dns.rdataclass.NONE and \
section is self.message.answer):
covers = dns.rdatatype.NONE
rd = None
else:
rd = dns.rdata.from_wire(rdclass, rdtype, self.wire,
self.current, rdlen,
self.message.origin)
covers = rd.covers()
if self.message.xfr and rdtype == dns.rdatatype.SOA:
force_unique = True
rrset = self.message.find_rrset(section, name,
rdclass, rdtype, covers,
deleting, True, force_unique)
if not rd is None:
rrset.add(rd, ttl)
self.current = self.current + rdlen
def read(self):
"""Read a wire format DNS message and build a dns.message.Message
object."""
l = len(self.wire)
if l < 12:
raise ShortHeader
(self.message.id, self.message.flags, qcount, ancount,
aucount, adcount) = struct.unpack('!HHHHHH', self.wire[:12])
self.current = 12
if dns.opcode.is_update(self.message.flags):
self.updating = True
self._get_question(qcount)
if self.question_only:
return
self._get_section(self.message.answer, ancount)
self._get_section(self.message.authority, aucount)
self._get_section(self.message.additional, adcount)
if self.current != l:
raise TrailingJunk
if self.message.multi and self.message.tsig_ctx and \
not self.message.had_tsig:
self.message.tsig_ctx.update(self.wire)
def from_wire(wire, keyring=None, request_mac='', xfr=False, origin=None,
tsig_ctx = None, multi = False, first = True,
question_only = False, one_rr_per_rrset = False):
"""Convert a DNS wire format message into a message
object.
@param keyring: The keyring to use if the message is signed.
@type keyring: dict
@param request_mac: If the message is a response to a TSIG-signed request,
I{request_mac} should be set to the MAC of that request.
@type request_mac: string
@param xfr: Is this message part of a zone transfer?
@type xfr: bool
@param origin: If the message is part of a zone transfer, I{origin}
should be the origin name of the zone.
@type origin: dns.name.Name object
@param tsig_ctx: The ongoing TSIG context, used when validating zone
transfers.
@type tsig_ctx: hmac.HMAC object
@param multi: Is this message part of a multiple message sequence?
@type multi: bool
@param first: Is this message standalone, or the first of a multi
message sequence?
@type first: bool
@param question_only: Read only up to the end of the question section?
@type question_only: bool
@param one_rr_per_rrset: Put each RR into its own RRset
@type one_rr_per_rrset: bool
@raises ShortHeader: The message is less than 12 octets long.
@raises TrailingJunk: There were octets in the message past the end
of the proper DNS message.
@raises BadEDNS: An OPT record was in the wrong section, or occurred more
than once.
@raises BadTSIG: A TSIG record was not the last record of the additional
data section.
@rtype: dns.message.Message object"""
m = Message(id=0)
m.keyring = keyring
m.request_mac = request_mac
m.xfr = xfr
m.origin = origin
m.tsig_ctx = tsig_ctx
m.multi = multi
m.first = first
reader = _WireReader(wire, m, question_only, one_rr_per_rrset)
reader.read()
return m
class _TextReader(object):
"""Text format reader.
@ivar tok: the tokenizer
@type tok: dns.tokenizer.Tokenizer object
@ivar message: The message object being built
@type message: dns.message.Message object
@ivar updating: Is the message a dynamic update?
@type updating: bool
@ivar zone_rdclass: The class of the zone in messages which are
DNS dynamic updates.
@type zone_rdclass: int
@ivar last_name: The most recently read name when building a message object
from text format.
@type last_name: dns.name.Name object
"""
def __init__(self, text, message):
self.message = message
self.tok = dns.tokenizer.Tokenizer(text)
self.last_name = None
self.zone_rdclass = dns.rdataclass.IN
self.updating = False
def _header_line(self, section):
"""Process one line from the text format header section."""
token = self.tok.get()
what = token.value
if what == 'id':
self.message.id = self.tok.get_int()
elif what == 'flags':
while True:
token = self.tok.get()
if not token.is_identifier():
self.tok.unget(token)
break
self.message.flags = self.message.flags | \
dns.flags.from_text(token.value)
if dns.opcode.is_update(self.message.flags):
self.updating = True
elif what == 'edns':
self.message.edns = self.tok.get_int()
self.message.ednsflags = self.message.ednsflags | \
(self.message.edns << 16)
elif what == 'eflags':
if self.message.edns < 0:
self.message.edns = 0
while True:
token = self.tok.get()
if not token.is_identifier():
self.tok.unget(token)
break
self.message.ednsflags = self.message.ednsflags | \
dns.flags.edns_from_text(token.value)
elif what == 'payload':
self.message.payload = self.tok.get_int()
if self.message.edns < 0:
self.message.edns = 0
elif what == 'opcode':
text = self.tok.get_string()
self.message.flags = self.message.flags | \
dns.opcode.to_flags(dns.opcode.from_text(text))
elif what == 'rcode':
text = self.tok.get_string()
self.message.set_rcode(dns.rcode.from_text(text))
else:
raise UnknownHeaderField
self.tok.get_eol()
def _question_line(self, section):
"""Process one line from the text format question section."""
token = self.tok.get(want_leading = True)
if not token.is_whitespace():
self.last_name = dns.name.from_text(token.value, None)
name = self.last_name
token = self.tok.get()
if not token.is_identifier():
raise dns.exception.SyntaxError
# Class
try:
rdclass = dns.rdataclass.from_text(token.value)
token = self.tok.get()
if not token.is_identifier():
raise dns.exception.SyntaxError
except dns.exception.SyntaxError:
raise dns.exception.SyntaxError
except Exception:
rdclass = dns.rdataclass.IN
# Type
rdtype = dns.rdatatype.from_text(token.value)
self.message.find_rrset(self.message.question, name,
rdclass, rdtype, create=True,
force_unique=True)
if self.updating:
self.zone_rdclass = rdclass
self.tok.get_eol()
def _rr_line(self, section):
"""Process one line from the text format answer, authority, or
additional data sections.
"""
deleting = None
# Name
token = self.tok.get(want_leading = True)
if not token.is_whitespace():
self.last_name = dns.name.from_text(token.value, None)
name = self.last_name
token = self.tok.get()
if not token.is_identifier():
raise dns.exception.SyntaxError
# TTL
try:
ttl = int(token.value, 0)
token = self.tok.get()
if not token.is_identifier():
raise dns.exception.SyntaxError
except dns.exception.SyntaxError:
raise dns.exception.SyntaxError
except Exception:
ttl = 0
# Class
try:
rdclass = dns.rdataclass.from_text(token.value)
token = self.tok.get()
if not token.is_identifier():
raise dns.exception.SyntaxError
if rdclass == dns.rdataclass.ANY or rdclass == dns.rdataclass.NONE:
deleting = rdclass
rdclass = self.zone_rdclass
except dns.exception.SyntaxError:
raise dns.exception.SyntaxError
except Exception:
rdclass = dns.rdataclass.IN
# Type
rdtype = dns.rdatatype.from_text(token.value)
token = self.tok.get()
if not token.is_eol_or_eof():
self.tok.unget(token)
rd = dns.rdata.from_text(rdclass, rdtype, self.tok, None)
covers = rd.covers()
else:
rd = None
covers = dns.rdatatype.NONE
rrset = self.message.find_rrset(section, name,
rdclass, rdtype, covers,
deleting, True, self.updating)
if not rd is None:
rrset.add(rd, ttl)
def read(self):
"""Read a text format DNS message and build a dns.message.Message
object."""
line_method = self._header_line
section = None
while 1:
token = self.tok.get(True, True)
if token.is_eol_or_eof():
break
if token.is_comment():
u = token.value.upper()
if u == 'HEADER':
line_method = self._header_line
elif u == 'QUESTION' or u == 'ZONE':
line_method = self._question_line
section = self.message.question
elif u == 'ANSWER' or u == 'PREREQ':
line_method = self._rr_line
section = self.message.answer
elif u == 'AUTHORITY' or u == 'UPDATE':
line_method = self._rr_line
section = self.message.authority
elif u == 'ADDITIONAL':
line_method = self._rr_line
section = self.message.additional
self.tok.get_eol()
continue
self.tok.unget(token)
line_method(section)
def from_text(text):
"""Convert the text format message into a message object.
@param text: The text format message.
@type text: string
@raises UnknownHeaderField:
@raises dns.exception.SyntaxError:
@rtype: dns.message.Message object"""
# 'text' can also be a file, but we don't publish that fact
# since it's an implementation detail. The official file
# interface is from_file().
m = Message()
reader = _TextReader(text, m)
reader.read()
return m
def from_file(f):
"""Read the next text format message from the specified file.
@param f: file or string. If I{f} is a string, it is treated
as the name of a file to open.
@raises UnknownHeaderField:
@raises dns.exception.SyntaxError:
@rtype: dns.message.Message object"""
if sys.hexversion >= 0x02030000:
# allow Unicode filenames; turn on universal newline support
str_type = basestring
opts = 'rU'
else:
str_type = str
opts = 'r'
if isinstance(f, str_type):
f = file(f, opts)
want_close = True
else:
want_close = False
try:
m = from_text(f)
finally:
if want_close:
f.close()
return m
def make_query(qname, rdtype, rdclass = dns.rdataclass.IN, use_edns=None,
want_dnssec=False):
"""Make a query message.
The query name, type, and class may all be specified either
as objects of the appropriate type, or as strings.
The query will have a randomly choosen query id, and its DNS flags
will be set to dns.flags.RD.
@param qname: The query name.
@type qname: dns.name.Name object or string
@param rdtype: The desired rdata type.
@type rdtype: int
@param rdclass: The desired rdata class; the default is class IN.
@type rdclass: int
@param use_edns: The EDNS level to use; the default is None (no EDNS).
See the description of dns.message.Message.use_edns() for the possible
values for use_edns and their meanings.
@type use_edns: int or bool or None
@param want_dnssec: Should the query indicate that DNSSEC is desired?
@type want_dnssec: bool
@rtype: dns.message.Message object"""
if isinstance(qname, (str, unicode)):
qname = dns.name.from_text(qname)
if isinstance(rdtype, (str, unicode)):
rdtype = dns.rdatatype.from_text(rdtype)
if isinstance(rdclass, (str, unicode)):
rdclass = dns.rdataclass.from_text(rdclass)
m = Message()
m.flags |= dns.flags.RD
m.find_rrset(m.question, qname, rdclass, rdtype, create=True,
force_unique=True)
m.use_edns(use_edns)
m.want_dnssec(want_dnssec)
return m
def make_response(query, recursion_available=False, our_payload=8192):
"""Make a message which is a response for the specified query.
The message returned is really a response skeleton; it has all
of the infrastructure required of a response, but none of the
content.
The response's question section is a shallow copy of the query's
question section, so the query's question RRsets should not be
changed.
@param query: the query to respond to
@type query: dns.message.Message object
@param recursion_available: should RA be set in the response?
@type recursion_available: bool
@param our_payload: payload size to advertise in EDNS responses; default
is 8192.
@type our_payload: int
@rtype: dns.message.Message object"""
if query.flags & dns.flags.QR:
raise dns.exception.FormError('specified query message is not a query')
response = dns.message.Message(query.id)
response.flags = dns.flags.QR | (query.flags & dns.flags.RD)
if recursion_available:
response.flags |= dns.flags.RA
response.set_opcode(query.opcode())
response.question = list(query.question)
if query.edns >= 0:
response.use_edns(0, 0, our_payload, query.payload)
if not query.keyname is None:
response.keyname = query.keyname
response.keyring = query.keyring
response.request_mac = query.mac
return response
| 41,384 | Python | .py | 1,001 | 30.962038 | 94 | 0.595155 | wummel/linkchecker | 1,417 | 234 | 200 | GPL-2.0 | 9/5/2024, 5:13:10 PM (Europe/Amsterdam) |