File size: 7,434 Bytes
0bc2d02 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 | """
fractal_json/decoder.py
Recursive Pattern Reconstruction and Fractal Decoding Engine
"""
import json
from typing import Any, Dict, List, Optional, Union
class FractalDecoder:
"""
Decodes fractal.json format back to standard JSON while preserving recursive patterns.
"""
SYMBOLIC_MARKERS = {
'🜏': 'root',
'∴': 'seed',
'⇌': 'bidirectional',
'⧖': 'compression',
'☍': 'anchor'
}
def __init__(self):
self.pattern_registry = {}
self.expansion_cache = {}
self.recursion_depth = 0
self.max_recursion = 100
def decode(self, fractal_data: Union[Dict, List, Any]) -> Any:
"""
Main decoding function that converts fractal format to standard JSON.
"""
# Handle primitive types
if not isinstance(fractal_data, (dict, list)):
return fractal_data
# Extract metadata if present
if isinstance(fractal_data, dict) and "$fractal" in fractal_data:
self._process_metadata(fractal_data["$fractal"])
fractal_data = fractal_data.get("content", {})
# Recurse through structure
return self._decode_recursive(fractal_data)
def _decode_recursive(self, data: Any) -> Any:
"""
Recursively decode fractal structures.
"""
# Check recursion limit
self.recursion_depth += 1
if self.recursion_depth > self.max_recursion:
raise RecursionError("Maximum recursion depth exceeded in fractal decoding")
try:
if isinstance(data, dict):
return self._decode_dict(data)
elif isinstance(data, list):
return self._decode_list(data)
else:
return data
finally:
self.recursion_depth -= 1
def _decode_dict(self, data: Dict) -> Union[Dict, Any]:
"""
Decode fractal dictionary structure.
"""
# Check if this is a fractal node
if self._is_fractal_node(data):
# Check for anchor reference
anchor_key = f"{self._get_marker('anchor')}anchor"
if anchor_key in data:
return self._resolve_anchor(data[anchor_key], data)
# Extract pattern and seed
pattern_key = f"{self._get_marker('root')}pattern"
seed_key = f"{self._get_marker('seed')}seed"
pattern_id = data.get(pattern_key)
seed = data.get(seed_key)
if pattern_id and seed:
# Expand from seed
expanded = self._expand_from_seed(pattern_id, seed, data)
if expanded is not None:
return expanded
# Decode children recursively
decoded = {}
for key, value in data.items():
# Remove symbolic markers from keys
clean_key = self._clean_key(key)
# Skip metadata fields
if not self._is_metadata_key(key):
decoded[clean_key] = self._decode_recursive(value)
return decoded
def _decode_list(self, data: List) -> List:
"""
Decode list structure.
"""
# If list contains fractal patterns, decode them
decoded = []
for item in data:
decoded.append(self._decode_recursive(item))
return decoded
def _is_fractal_node(self, data: Dict) -> bool:
"""
Check if dictionary represents a fractal node.
"""
if not isinstance(data, dict):
return False
# Check for fractal markers
has_depth = any(key.startswith(self._get_marker('compression')) for key in data.keys())
has_pattern = any(key.startswith(self._get_marker('root')) for key in data.keys())
return has_depth and has_pattern
def _get_marker(self, marker_name: str) -> str:
"""
Get symbolic marker by name.
"""
for symbol, name in self.SYMBOLIC_MARKERS.items():
if name == marker_name:
return symbol
return ''
def _clean_key(self, key: str) -> str:
"""
Remove symbolic markers from keys.
"""
for marker in self.SYMBOLIC_MARKERS.keys():
if key.startswith(marker):
return key[len(marker):]
return key
def _is_metadata_key(self, key: str) -> bool:
"""
Check if key represents metadata.
"""
metadata_prefixes = ['depth', 'pattern', 'anchor']
clean_key = self._clean_key(key)
return clean_key in metadata_prefixes
def _resolve_anchor(self, anchor: str, context: Dict) -> Any:
"""
Resolve anchor reference to actual data.
"""
if anchor in self.expansion_cache:
return self.expansion_cache[anchor]
# Extract pattern from anchor
if anchor.startswith("#/patterns/"):
pattern_id = anchor.split("/")[-1]
if pattern_id in self.pattern_registry:
# Expand pattern with context
expanded = self._expand_pattern(self.pattern_registry[pattern_id], context)
self.expansion_cache[anchor] = expanded
return expanded
# Cannot resolve - return as is
return context
def _expand_from_seed(self, pattern_id: str, seed: Any, context: Dict) -> Optional[Any]:
"""
Expand full structure from seed pattern.
"""
if not isinstance(seed, dict):
return None
expanded = {}
for key, value in seed.items():
if isinstance(value, str) and value.endswith("expand"):
# Replace with full expansion if available in context
children_key = f"{self._get_marker('bidirectional')}children"
if children_key in context:
children = context[children_key]
expanded_key = f"{self._get_marker('bidirectional')}{key}"
if expanded_key in children:
expanded[key] = self._decode_recursive(children[expanded_key])
else:
expanded[key] = None
else:
expanded[key] = value
return expanded
def _expand_pattern(self, pattern: Dict, context: Dict) -> Any:
"""
Expand pattern with context-specific values.
"""
# Simple pattern expansion for now
# This could be made more sophisticated based on pattern type
return pattern
def _process_metadata(self, metadata: Dict) -> None:
"""
Process fractal metadata for decoding context.
"""
if "interpretability_map" in metadata:
# Store interpretability patterns for reference
self.pattern_registry.update(metadata["interpretability_map"])
def get_decoding_stats(self) -> Dict:
"""
Return decoding statistics.
"""
return {
"patterns_resolved": len(self.expansion_cache),
"max_recursion_depth": self.recursion_depth,
"pattern_registry_size": len(self.pattern_registry)
}
|