code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
self._validate(writing=True) self._write_superbox(fptr, b'ftbl')
def write(self, fptr)
Write a fragment table box to file.
36.799492
16.182119
2.274084
# Must seek to end of box. nbytes = offset + length - fptr.tell() fptr.read(nbytes) return cls(length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse JPX free box. Parameters ---------- f : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- FreeBox Instance of the current free box.
6.263519
7.89535
0.793317
fptr.write(struct.pack('>I4s', 22, b'ihdr')) # signedness and bps are stored together in a single byte bit_depth_signedness = 0x80 if self.signed else 0x00 bit_depth_signedness |= self.bits_per_component - 1 read_buffer = struct.pack('>IIHBBBB', self.height, self.width, self.num_components, bit_depth_signedness, self.compression, 1 if self.colorspace_unknown else 0, 1 if self.ip_provided else 0) fptr.write(read_buffer)
def write(self, fptr)
Write an Image Header box to file.
4.445174
3.879015
1.145954
# Read the box information read_buffer = fptr.read(14) params = struct.unpack('>IIHBBBB', read_buffer) height = params[0] width = params[1] num_components = params[2] bits_per_component = (params[3] & 0x7f) + 1 signed = (params[3] & 0x80) > 1 compression = params[4] colorspace_unknown = True if params[5] else False ip_provided = True if params[6] else False return cls(height, width, num_components=num_components, bits_per_component=bits_per_component, signed=signed, compression=compression, colorspace_unknown=colorspace_unknown, ip_provided=ip_provided, length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse JPEG 2000 image header box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- ImageHeaderBox Instance of the current image header box.
3.033928
2.735413
1.10913
nbytes = length - 8 data = fptr.read(nbytes) bpc = tuple(((x & 0x7f) + 1) for x in bytearray(data)) signed = tuple(((x & 0x80) > 0) for x in bytearray(data)) return cls(bpc, signed, length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse bits per component box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- BitsPerComponent Instance of the current bits per component box.
3.690512
3.932758
0.938403
box = cls(length=length, offset=offset) # The JP2 header box is a superbox, so go ahead and parse its child # boxes. box.box = box.parse_superbox(fptr) return box
def parse(cls, fptr, offset, length)
Parse JPEG 2000 header box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- JP2HeaderBox Instance of the current JP2 header box.
8.102286
7.950745
1.01906
fptr.write(struct.pack('>I4s', 12, b'jP ')) fptr.write(struct.pack('>BBBB', *self.signature))
def write(self, fptr)
Write a JPEG 2000 Signature box to file.
5.856055
3.554577
1.647469
read_buffer = fptr.read(4) signature = struct.unpack('>BBBB', read_buffer) return cls(signature=signature, length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse JPEG 2000 signature box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- JPEG2000SignatureBox Instance of the current JPEG2000 signature box.
4.643933
5.008365
0.927235
if ((len(self.bits_per_component) != len(self.signed)) or (len(self.signed) != self.palette.shape[1])): msg = ("The length of the 'bits_per_component' and the 'signed' " "members must equal the number of columns of the palette.") self._dispatch_validation_error(msg, writing=writing) bps = self.bits_per_component if writing and not all(b == bps[0] for b in bps): # We don't support writing palettes with bit depths that are # different. msg = "Writing palettes with varying bit depths is not supported." self._dispatch_validation_error(msg, writing=writing)
def _validate(self, writing=False)
Verify that the box obeys the specifications.
3.677655
3.655208
1.006141
self._validate(writing=True) bytes_per_row = sum(self.bits_per_component) / 8 bytes_per_palette = bytes_per_row * self.palette.shape[0] box_length = 8 + 3 + self.palette.shape[1] + bytes_per_palette # Write the usual (L, T) header. write_buffer = struct.pack('>I4s', int(box_length), b'pclr') fptr.write(write_buffer) # NE, NPC write_buffer = struct.pack('>HB', self.palette.shape[0], self.palette.shape[1]) fptr.write(write_buffer) # Bits Per Sample. Signed components aren't supported. bps_signed = [x - 1 for x in self.bits_per_component] write_buffer = struct.pack('>' + 'B' * self.palette.shape[1], *bps_signed) fptr.write(write_buffer) # C(i,j) fptr.write(memoryview(self.palette))
def write(self, fptr)
Write a Palette box to file.
4.570248
4.095945
1.115798
num_bytes = offset + length - fptr.tell() read_buffer = fptr.read(num_bytes) nrows, ncols = struct.unpack_from('>HB', read_buffer, offset=0) bps_signed = struct.unpack_from('>' + 'B' * ncols, read_buffer, offset=3) bps = [((x & 0x7f) + 1) for x in bps_signed] signed = [((x & 0x80) > 1) for x in bps_signed] # Are any components signed or differently sized? We don't handle # that. if any(signed) or len(set(bps)) != 1: msg = ("Palettes with signed components or differently sized " "components are not supported.") raise IOError(msg) # The palette is unsigned and all components have the same width. # This should cover all but a vanishingly small share of palettes. b = bps[0] dtype = np.uint8 if b <=8 else np.uint16 if b <= 16 else np.uint32 palette = np.frombuffer(read_buffer[3 + ncols:], dtype=dtype) palette = np.reshape(palette, (nrows, ncols)) return cls(palette, bps, signed, length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse palette box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- PaletteBox Instance of the current palette box.
3.897194
3.965351
0.982812
num_bytes = offset + length - fptr.tell() read_buffer = fptr.read(num_bytes) mask_length, = struct.unpack_from('>B', read_buffer, offset=0) # Fully Understands Aspect Mask # Decodes Completely Mask fuam = dcm = standard_flag = standard_mask = [] vendor_feature = vendor_mask = [] # The mask length tells us the format string to use when unpacking # from the buffer read from file. try: mask_format = {1: 'B', 2: 'H', 4: 'I', 8: 'Q'}[mask_length] fuam, dcm = struct.unpack_from('>' + mask_format * 2, read_buffer, offset=1) std_flg_offset = 1 + 2 * mask_length data = _parse_standard_flag(read_buffer[std_flg_offset:], mask_length) standard_flag, standard_mask = data nflags = len(standard_flag) vndr_offset = 1 + 2 * mask_length + 2 + (2 + mask_length) * nflags data = _parse_vendor_features(read_buffer[vndr_offset:], mask_length) vendor_feature, vendor_mask = data except KeyError: msg = ('The ReaderRequirements box (rreq) has a mask length of ' '{length} bytes, but only values of 1, 2, 4, or 8 are ' 'supported. The box contents will not be interpreted.') warnings.warn(msg.format(length=mask_length), UserWarning) return cls(fuam, dcm, standard_flag, standard_mask, vendor_feature, vendor_mask, length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse reader requirements box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- ReaderRequirementsBox Instance of the current reader requirements box.
4.254651
4.13754
1.028304
read_buffer = fptr.read(10) (rn1, rd1, rn2, rd2, re1, re2) = struct.unpack('>HHHHBB', read_buffer) vres = rn1 / rd1 * math.pow(10, re1) hres = rn2 / rd2 * math.pow(10, re2) return cls(vres, hres, length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse CaptureResolutionBox. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- CaptureResolutionBox Instance of the current capture resolution box.
3.699134
3.895888
0.949497
length = 8 + len(self.label.encode()) fptr.write(struct.pack('>I4s', length, b'lbl ')) fptr.write(self.label.encode())
def write(self, fptr)
Write a Label box to file.
4.475803
3.574996
1.251974
num_bytes = offset + length - fptr.tell() read_buffer = fptr.read(num_bytes) label = read_buffer.decode('utf-8') return cls(label, length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse Label box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- LabelBox Instance of the current label box.
3.695872
4.066183
0.908929
num_bytes = offset + length - fptr.tell() raw_data = fptr.read(num_bytes) num_associations = int(len(raw_data) / 4) lst = struct.unpack('>' + 'I' * num_associations, raw_data) return cls(lst, length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse number list box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- LabelBox Instance of the current number list box.
3.476343
4.042283
0.859995
fptr.write(struct.pack('>I4s', len(self.associations) * 4 + 8, b'nlst')) fmt = '>' + 'I' * len(self.associations) write_buffer = struct.pack(fmt, *self.associations) fptr.write(write_buffer)
def write(self, fptr)
Write a NumberList box to file.
4.318263
3.830586
1.127311
read_buffer = ET.tostring(self.xml.getroot(), encoding='utf-8') fptr.write(struct.pack('>I4s', len(read_buffer) + 8, b'xml ')) fptr.write(read_buffer)
def write(self, fptr)
Write an XML box to file.
4.515563
4.273833
1.056561
num_bytes = offset + length - fptr.tell() read_buffer = fptr.read(num_bytes) if sys.hexversion < 0x03000000 and codecs.BOM_UTF8 in read_buffer: # Python3 with utf-8 handles this just fine. Actually so does # Python2 right here since we decode using utf-8. The real # problem comes when __str__ is used on the XML box, and that # is where Python2 falls short because of the ascii codec. msg = ('A BOM (byte order marker) was detected and ' 'removed from the XML contents in the box starting at byte ' 'offset {offset:d}.') msg = msg.format(offset=offset) warnings.warn(msg, UserWarning) read_buffer = read_buffer.replace(codecs.BOM_UTF8, b'') try: text = read_buffer.decode('utf-8') except UnicodeDecodeError as err: # Possibly bad string of bytes to begin with. # Try to search for <?xml and go from there. decl_start = read_buffer.find(b'<?xml') if decl_start <= -1: # Nope, that's not it. All is lost. msg = ('A problem was encountered while parsing an XML box:' '\n\n\t"{error}"\n\nNo XML was retrieved.') warnings.warn(msg.format(error=str(err)), UserWarning) return XMLBox(xml=None, length=length, offset=offset) text = read_buffer[decl_start:].decode('utf-8') # Let the user know that the XML box was problematic. msg = ('A UnicodeDecodeError was encountered parsing an XML box ' 'at byte position {offset:d} ({reason}), but the XML was ' 'still recovered.') msg = msg.format(offset=offset, reason=err.reason) warnings.warn(msg, UserWarning) # Strip out any trailing nulls, as they can foul up XML parsing. text = text.rstrip(chr(0)) bfptr = io.BytesIO(text.encode('utf-8')) try: xml = ET.parse(bfptr) except ET.ParseError as err: msg = ('A problem was encountered while parsing an XML box:' '\n\n\t"{reason}"\n\nNo XML was retrieved.') msg = msg.format(reason=str(err)) warnings.warn(msg, UserWarning) xml = None return cls(xml=xml, length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse XML box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- XMLBox Instance of the current XML box.
3.943589
3.887644
1.014391
num_uuids = len(self.ulst) length = 4 + 4 + 2 + num_uuids * 16 write_buffer = struct.pack('>I4sH', length, b'ulst', num_uuids) fptr.write(write_buffer) for j in range(num_uuids): fptr.write(self.ulst[j].bytes)
def write(self, fptr)
Write a UUID list box to file.
3.469026
2.921505
1.18741
num_bytes = offset + length - fptr.tell() read_buffer = fptr.read(num_bytes) num_uuids, = struct.unpack_from('>H', read_buffer) ulst = [] for j in range(num_uuids): uuid_buffer = read_buffer[2 + j * 16:2 + (j + 1) * 16] ulst.append(UUID(bytes=uuid_buffer)) return cls(ulst, length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse UUIDList box. Parameters ---------- f : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- UUIDListBox Instance of the current UUID list box.
2.806219
2.93039
0.957627
# Make sure it is written out as null-terminated. url = self.url if self.url[-1] != chr(0): url = url + chr(0) url = url.encode() length = 8 + 1 + 3 + len(url) write_buffer = struct.pack('>I4sBBBB', length, b'url ', self.version, self.flag[0], self.flag[1], self.flag[2]) fptr.write(write_buffer) fptr.write(url)
def write(self, fptr)
Write a data entry url box to file.
4.359691
4.07187
1.070685
num_bytes = offset + length - fptr.tell() read_buffer = fptr.read(num_bytes) data = struct.unpack_from('>BBBB', read_buffer) version = data[0] flag = data[1:4] url = read_buffer[4:].decode('utf-8').rstrip(chr(0)) return cls(version, flag, url, length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse data entry URL box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- DataEntryURLbox Instance of the current data entry URL box.
3.509732
3.80373
0.922708
if self.uuid == _XMP_UUID: txt = self.raw_data.decode('utf-8') elt = ET.fromstring(txt) self.data = ET.ElementTree(elt) elif self.uuid == _GEOTIFF_UUID: self.data = tiff_header(self.raw_data) elif self.uuid == _EXIF_UUID: # Cut off 'EXIF\0\0' part. self.data = tiff_header(self.raw_data[6:]) else: self.data = self.raw_data
def _parse_raw_data(self)
Private function for parsing UUID payloads if possible.
3.561779
3.427016
1.039324
if self.data is None: return "corrupt" in_mem_name = '/vsimem/geo.tif' gdal.FileFromMemBuffer(in_mem_name, self.raw_data) gtif = gdal.Open(in_mem_name) # Report projection proj_ref = gtif.GetProjectionRef() sref = osr.SpatialReference() sref.ImportFromWkt(proj_ref) psz_pretty_wkt = sref.ExportToPrettyWkt(False) # report geotransform geo_transform = gtif.GetGeoTransform(can_return_null=True) fmt = ('Origin = ({origin_x:.15f},{origin_y:.15f})\n' 'Pixel Size = ({pixel_x:.15f},{pixel_y:.15f})') geotransform_str = fmt.format(origin_x=geo_transform[0], origin_y=geo_transform[3], pixel_x=geo_transform[1], pixel_y=geo_transform[5]) # setup projected to lat/long transform if appropriate if proj_ref is not None and len(proj_ref) > 0: hProj = osr.SpatialReference(proj_ref) if hProj is not None: hLatLong = hProj.CloneGeogCS() if hLatLong is not None: gdal.PushErrorHandler('CPLQuietErrorHandler') hTransform = osr.CoordinateTransformation(hProj, hLatLong) gdal.PopErrorHandler() msg = 'Unable to load PROJ.4 library' # report corners uleft = self.GDALInfoReportCorner(gtif, hTransform, "Upper Left", 0, 0) lleft = self.GDALInfoReportCorner(gtif, hTransform, "Lower Left", 0, gtif.RasterYSize) uright = self.GDALInfoReportCorner(gtif, hTransform, "Upper Right", gtif.RasterXSize, 0) lright = self.GDALInfoReportCorner(gtif, hTransform, "Lower Right", gtif.RasterXSize, gtif.RasterYSize) center = self.GDALInfoReportCorner(gtif, hTransform, "Center", gtif.RasterXSize / 2.0, gtif.RasterYSize / 2.0) gdal.Unlink(in_mem_name) fmt = ("Coordinate System =\n" "{coordinate_system}\n" "{geotransform}\n" "Corner Coordinates:\n" "{upper_left}\n" "{lower_left}\n" "{upper_right}\n" "{lower_right}\n" "{center}") msg = fmt.format(coordinate_system=self._indent(psz_pretty_wkt), geotransform=geotransform_str, upper_left=uleft, upper_right=uright, lower_left=lleft, lower_right=lright, center=center) return msg
def _print_geotiff(self)
Print geotiff information. Shamelessly ripped off from gdalinfo.py Returns ------- str String representation of the degenerate geotiff.
2.284284
2.272527
1.005173
length = 4 + 4 + 16 + len(self.raw_data) write_buffer = struct.pack('>I4s', length, b'uuid') fptr.write(write_buffer) fptr.write(self.uuid.bytes) fptr.write(self.raw_data)
def write(self, fptr)
Write a UUID box to file.
3.329885
2.923724
1.138919
num_bytes = offset + length - fptr.tell() read_buffer = fptr.read(num_bytes) the_uuid = UUID(bytes=read_buffer[0:16]) return cls(the_uuid, read_buffer[16:], length=length, offset=offset)
def parse(cls, fptr, offset, length)
Parse UUID box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- UUIDBox Instance of the current UUID box.
3.432842
3.631781
0.945223
spcod = np.frombuffer(spcod, dtype=np.uint8) precinct_size = [] for item in spcod: ep2 = (item & 0xF0) >> 4 ep1 = item & 0x0F precinct_size.append((2 ** ep1, 2 ** ep2)) return tuple(precinct_size)
def _parse_precinct_size(spcod)
Compute precinct size from SPcod or SPcoc.
2.881117
2.773558
1.03878
msg = 'Code block context:\n ' lines = ['Selective arithmetic coding bypass: {0}', 'Reset context probabilities on coding pass boundaries: {1}', 'Termination on each coding pass: {2}', 'Vertically stripe causal context: {3}', 'Predictable termination: {4}', 'Segmentation symbols: {5}'] msg += '\n '.join(lines) msg = msg.format(((context & 0x01) > 0), ((context & 0x02) > 0), ((context & 0x04) > 0), ((context & 0x08) > 0), ((context & 0x10) > 0), ((context & 0x20) > 0)) return msg
def _context_string(context)
Produce a string to represent the code block context
4.641468
4.245909
1.093162
numbytes = len(read_buffer) exponent = [] mantissa = [] if sqcd & 0x1f == 0: # no quantization data = struct.unpack('>' + 'B' * numbytes, read_buffer) for j in range(len(data)): exponent.append(data[j] >> 3) mantissa.append(0) else: fmt = '>' + 'H' * int(numbytes / 2) data = struct.unpack(fmt, read_buffer) for j in range(len(data)): exponent.append(data[j] >> 11) mantissa.append(data[j] & 0x07ff) return mantissa, exponent
def parse_quantization(read_buffer, sqcd)
Tease out the quantization values. Parameters ---------- read_buffer: sequence of bytes from the QCC and QCD segments. Returns ------ tuple Mantissa and exponents from quantization buffer.
2.333806
2.493913
0.935801
msg = '\n Quantization style: ' if sqcc & 0x1f == 0: msg += 'no quantization, ' elif sqcc & 0x1f == 1: msg += 'scalar implicit, ' elif sqcc & 0x1f == 2: msg += 'scalar explicit, ' return msg
def _print_quantization_style(sqcc)
Only to be used with QCC and QCD segments.
3.429068
3.088276
1.11035
msg = ("Unrecognized codestream marker 0x{marker_id:x} encountered at " "byte offset {offset}.") msg = msg.format(marker_id=self._marker_id, offset=fptr.tell()) warnings.warn(msg, UserWarning) cpos = fptr.tell() read_buffer = fptr.read(2) next_item, = struct.unpack('>H', read_buffer) fptr.seek(cpos) if ((next_item & 0xff00) >> 8) == 255: # No segment associated with this marker, so reset # to two bytes after it. segment = Segment(id='0x{0:x}'.format(self._marker_id), offset=self._offset, length=0) else: segment = self._parse_reserved_segment(fptr) return segment
def _parse_unrecognized_segment(self, fptr)
Looks like a valid marker, but not sure from reading the specs.
4.235684
4.035488
1.049609
offset = fptr.tell() - 2 read_buffer = fptr.read(2) length, = struct.unpack('>H', read_buffer) if length > 0: data = fptr.read(length - 2) else: data = None segment = Segment(marker_id='0x{0:x}'.format(self._marker_id), offset=offset, length=length, data=data) return segment
def _parse_reserved_segment(self, fptr)
Parse valid marker segment, segment description is unknown. Parameters ---------- fptr : file object The file to parse. Returns ------- Segment The current segment.
3.076641
3.275245
0.939362
read_buffer = fptr.read(tile_length) # The tile length could possibly be too large and extend past # the end of file. We need to be a bit resilient. count = min(tile_length, len(read_buffer)) packet = np.frombuffer(read_buffer, dtype=np.uint8, count=count) indices = np.where(packet == 0xff) for idx in indices[0]: try: if packet[idx + 1] == 0x91 and (idx < (len(packet) - 5)): offset = sod_marker.offset + 2 + idx length = 4 nsop = packet[(idx + 4):(idx + 6)].view('uint16')[0] if sys.byteorder == 'little': nsop = nsop.byteswap() segment = SOPsegment(nsop, length, offset) self.segment.append(segment) elif packet[idx + 1] == 0x92: offset = sod_marker.offset + 2 + idx length = 0 segment = EPHsegment(length, offset) self.segment.append(segment) except IndexError: continue
def _parse_tile_part_bit_stream(self, fptr, sod_marker, tile_length)
Parse the tile part bit stream for SOP, EPH marker segments.
3.55167
3.308641
1.073453
offset = fptr.tell() - 2 read_buffer = fptr.read(4) data = struct.unpack('>HH', read_buffer) length = data[0] rcme = data[1] ccme = fptr.read(length - 4) return CMEsegment(rcme, ccme, length, offset)
def _parse_cme_segment(self, fptr)
Parse the CME marker segment. Parameters ---------- fptr : file Open file object. Returns ------- CMESegment The current CME segment.
4.469002
4.844839
0.922425
kwargs = {} offset = fptr.tell() - 2 kwargs['offset'] = offset read_buffer = fptr.read(2) length, = struct.unpack('>H', read_buffer) kwargs['length'] = length fmt = '>B' if self._csiz <= 255 else '>H' nbytes = 1 if self._csiz <= 255 else 2 read_buffer = fptr.read(nbytes) ccoc, = struct.unpack(fmt, read_buffer) read_buffer = fptr.read(1) scoc, = struct.unpack('>B', read_buffer) numbytes = offset + 2 + length - fptr.tell() read_buffer = fptr.read(numbytes) spcoc = np.frombuffer(read_buffer, dtype=np.uint8) spcoc = spcoc return COCsegment(ccoc, scoc, spcoc, length, offset)
def _parse_coc_segment(self, fptr)
Parse the COC marker segment. Parameters ---------- fptr : file Open file object. Returns ------- COCSegment The current COC segment.
2.875211
2.947467
0.975485
offset = fptr.tell() - 2 read_buffer = fptr.read(2) length, = struct.unpack('>H', read_buffer) read_buffer = fptr.read(length - 2) lst = struct.unpack_from('>BBHBBBBBB', read_buffer, offset=0) scod, prog, nlayers, mct, nr, xcb, ycb, cstyle, xform = lst if len(read_buffer) > 10: precinct_size = _parse_precinct_size(read_buffer[10:]) else: precinct_size = None sop = (scod & 2) > 0 eph = (scod & 4) > 0 if sop or eph: cls._parse_tpart_flag = True else: cls._parse_tpart_flag = False pargs = (scod, prog, nlayers, mct, nr, xcb, ycb, cstyle, xform, precinct_size) return CODsegment(*pargs, length=length, offset=offset)
def _parse_cod_segment(cls, fptr)
Parse the COD segment. Parameters ---------- fptr : file Open file object. Returns ------- CODSegment The current COD segment.
4.03923
4.039896
0.999835
offset = fptr.tell() - 2 read_buffer = fptr.read(2) length, = struct.unpack('>H', read_buffer) read_buffer = fptr.read(4 * self._csiz) data = struct.unpack('>' + 'HH' * self._csiz, read_buffer) xcrg = data[0::2] ycrg = data[1::2] return CRGsegment(xcrg, ycrg, length, offset)
def _parse_crg_segment(self, fptr)
Parse the CRG marker segment. Parameters ---------- fptr : file Open file object. Returns ------- CRGSegment The current CRG segment.
3.323476
3.52378
0.943156
offset = fptr.tell() - 2 length = 0 return EOCsegment(length, offset)
def _parse_eoc_segment(self, fptr)
Parse the EOC (end-of-codestream) marker segment. Parameters ---------- fptr : file Open file object. Returns ------- EOCSegment The current EOC segment.
9.446766
13.854305
0.681865
offset = fptr.tell() - 2 read_buffer = fptr.read(3) length, zplt = struct.unpack('>HB', read_buffer) numbytes = length - 3 read_buffer = fptr.read(numbytes) iplt = np.frombuffer(read_buffer, dtype=np.uint8) packet_len = [] plen = 0 for byte in iplt: plen |= (byte & 0x7f) if byte & 0x80: # Continue by or-ing in the next byte. plen <<= 7 else: packet_len.append(plen) plen = 0 iplt = packet_len return PLTsegment(zplt, iplt, length, offset)
def _parse_plt_segment(self, fptr)
Parse the PLT segment. The packet headers are not parsed, i.e. they remain uninterpreted raw data buffers. Parameters ---------- fptr : file Open file object. Returns ------- PLTSegment The current PLT segment.
4.015817
4.098565
0.979811
offset = fptr.tell() - 2 read_buffer = fptr.read(2) length, = struct.unpack('>H', read_buffer) n = ((length - 2) / 7) if self._csiz < 257 else ((length - 2) / 9) n = int(n) nbytes = n * 7 if self._csiz < 257 else n * 9 read_buffer = fptr.read(nbytes) fmt = '>' + 'BBHBBB' * n if self._csiz < 257 else '>' + 'BHHBHB' * n pod_params = struct.unpack(fmt, read_buffer) return PODsegment(pod_params, length, offset)
def _parse_pod_segment(self, fptr)
Parse the POD segment. Parameters ---------- fptr : file Open file object. Returns ------- PODSegment The current POD segment.
3.346999
3.485214
0.960343
offset = fptr.tell() - 2 read_buffer = fptr.read(3) length, zppm = struct.unpack('>HB', read_buffer) numbytes = length - 3 read_buffer = fptr.read(numbytes) return PPMsegment(zppm, read_buffer, length, offset)
def _parse_ppm_segment(self, fptr)
Parse the PPM segment. Parameters ---------- fptr : file Open file object. Returns ------- PPMSegment The current PPM segment.
4.559151
5.242672
0.869624
offset = fptr.tell() - 2 read_buffer = fptr.read(3) length, zppt = struct.unpack('>HB', read_buffer) length = length zppt = zppt numbytes = length - 3 ippt = fptr.read(numbytes) return PPTsegment(zppt, ippt, length, offset)
def _parse_ppt_segment(self, fptr)
Parse the PPT segment. The packet headers are not parsed, i.e. they remain "uninterpreted" raw data beffers. Parameters ---------- fptr : file object The file to parse. Returns ------- PPTSegment The current PPT segment.
5.951349
6.717725
0.885917
offset = fptr.tell() - 2 read_buffer = fptr.read(2) length, = struct.unpack('>H', read_buffer) read_buffer = fptr.read(length - 2) fmt = '>HB' if cls._csiz > 256 else '>BB' mantissa_exponent_offset = 3 if cls._csiz > 256 else 2 cqcc, sqcc = struct.unpack_from(fmt, read_buffer) if cqcc >= cls._csiz: msg = ("Invalid QCC component number ({invalid_comp_no}), " "the actual number of components is only {valid_comp_no}.") msg = msg.format(invalid_comp_no=cqcc, valid_comp_no=cls._csiz) warnings.warn(msg, UserWarning) spqcc = read_buffer[mantissa_exponent_offset:] return QCCsegment(cqcc, sqcc, spqcc, length, offset)
def _parse_qcc_segment(cls, fptr)
Parse the QCC segment. Parameters ---------- fptr : file object The file to parse. Returns ------- QCCSegment The current QCC segment.
3.987456
4.123902
0.966913
offset = fptr.tell() - 2 read_buffer = fptr.read(3) length, sqcd = struct.unpack('>HB', read_buffer) spqcd = fptr.read(length - 3) return QCDsegment(sqcd, spqcd, length, offset)
def _parse_qcd_segment(self, fptr)
Parse the QCD segment. Parameters ---------- fptr : file Open file object. Returns ------- QCDSegment The current QCD segment.
5.827024
6.666397
0.874089
offset = fptr.tell() - 2 read_buffer = fptr.read(2) length, = struct.unpack('>H', read_buffer) nbytes = 3 if cls._csiz < 257 else 4 fmt = '>BBB' if cls._csiz < 257 else '>HBB' read_buffer = fptr.read(nbytes) data = struct.unpack(fmt, read_buffer) length = length crgn = data[0] srgn = data[1] sprgn = data[2] return RGNsegment(crgn, srgn, sprgn, length, offset)
def _parse_rgn_segment(cls, fptr)
Parse the RGN segment. Parameters ---------- fptr : file Open file object. Returns ------- RGNSegment The current RGN segment.
3.539211
3.755099
0.942508
offset = fptr.tell() - 2 read_buffer = fptr.read(2) length, = struct.unpack('>H', read_buffer) read_buffer = fptr.read(length - 2) data = struct.unpack_from('>HIIIIIIIIH', read_buffer) rsiz = data[0] if rsiz not in _KNOWN_PROFILES: msg = "Invalid profile: (Rsiz={rsiz}).".format(rsiz=rsiz) warnings.warn(msg, UserWarning) xysiz = (data[1], data[2]) xyosiz = (data[3], data[4]) xytsiz = (data[5], data[6]) xytosiz = (data[7], data[8]) # Csiz is the number of components Csiz = data[9] data = struct.unpack_from('>' + 'B' * (length - 36 - 2), read_buffer, offset=36) bitdepth = tuple(((x & 0x7f) + 1) for x in data[0::3]) signed = tuple(((x & 0x80) > 0) for x in data[0::3]) xrsiz = data[1::3] yrsiz = data[2::3] for j, subsampling in enumerate(zip(xrsiz, yrsiz)): if 0 in subsampling: msg = ("Invalid subsampling value for component {comp}: " "dx={dx}, dy={dy}.") msg = msg.format(comp=j, dx=subsampling[0], dy=subsampling[1]) warnings.warn(msg, UserWarning) try: num_tiles_x = (xysiz[0] - xyosiz[0]) / (xytsiz[0] - xytosiz[0]) num_tiles_y = (xysiz[1] - xyosiz[1]) / (xytsiz[1] - xytosiz[1]) except ZeroDivisionError: msg = ("Invalid tile specification: " "size of {num_tile_rows} x {num_tile_cols}, " "offset of {row_offset} x {col_offset}.") msg = msg.format(num_tile_rows=xytsiz[1], num_tile_cols=xytsiz[0], row_offset=xytosiz[1], col_offset=xytosiz[0]) warnings.warn(msg, UserWarning) else: numtiles = math.ceil(num_tiles_x) * math.ceil(num_tiles_y) if numtiles > 65535: msg = "Invalid number of tiles: ({numtiles})." msg = msg.format(numtiles=numtiles) warnings.warn(msg, UserWarning) kwargs = { 'rsiz': rsiz, 'xysiz': xysiz, 'xyosiz': xyosiz, 'xytsiz': xytsiz, 'xytosiz': xytosiz, 'Csiz': Csiz, 'bitdepth': bitdepth, 'signed': signed, 'xyrsiz': (xrsiz, yrsiz), 'length': length, 'offset': offset } segment = SIZsegment(**kwargs) # Need to keep track of the number of components from SIZ for # other segments. cls._csiz = Csiz return segment
def _parse_siz_segment(cls, fptr)
Parse the SIZ segment. Parameters ---------- fptr : file Open file object. Returns ------- SIZSegment The current SIZ segment.
2.438271
2.418645
1.008114
offset = fptr.tell() - 2 length = 0 return SODsegment(length, offset)
def _parse_sod_segment(self, fptr)
Parse the SOD (start-of-data) segment. Parameters ---------- fptr : file Open file object. Returns ------- SODSegment The current SOD segment.
10.369824
13.664239
0.758902
offset = fptr.tell() - 2 read_buffer = fptr.read(10) data = struct.unpack('>HHIBB', read_buffer) length = data[0] isot = data[1] psot = data[2] tpsot = data[3] tnsot = data[4] segment = SOTsegment(isot, psot, tpsot, tnsot, length, offset) # Need to keep easy access to tile offsets and lengths for when # we encounter start-of-data marker segments. self._tile_offset.append(segment.offset) if segment.psot == 0: tile_part_length = (self.offset + self.length - segment.offset - 2) else: tile_part_length = segment.psot self._tile_length.append(tile_part_length) return segment
def _parse_sot_segment(self, fptr)
Parse the SOT segment. Parameters ---------- fptr : file Open file object. Returns ------- SOTSegment The current SOT segment.
4.354477
4.623061
0.941904
offset = fptr.tell() - 2 read_buffer = fptr.read(2) length, = struct.unpack('>H', read_buffer) read_buffer = fptr.read(length - 2) ztlm, stlm = struct.unpack_from('>BB', read_buffer) ttlm_st = (stlm >> 4) & 0x3 ptlm_sp = (stlm >> 6) & 0x1 nbytes = length - 4 if ttlm_st == 0: ntiles = nbytes / ((ptlm_sp + 1) * 2) else: ntiles = nbytes / (ttlm_st + (ptlm_sp + 1) * 2) if ttlm_st == 0: ttlm = None fmt = '' elif ttlm_st == 1: fmt = 'B' elif ttlm_st == 2: fmt = 'H' if ptlm_sp == 0: fmt += 'H' else: fmt += 'I' data = struct.unpack_from('>' + fmt * int(ntiles), read_buffer, offset=2) if ttlm_st == 0: ttlm = None ptlm = data else: ttlm = data[0::2] ptlm = data[1::2] return TLMsegment(ztlm, ttlm, ptlm, length, offset)
def _parse_tlm_segment(self, fptr)
Parse the TLM segment. Parameters ---------- fptr : file Open file object. Returns ------- TLMSegment The current TLM segment.
2.77214
2.807718
0.987329
the_id = '0x{0:x}'.format(self._marker_id) segment = Segment(marker_id=the_id, offset=self._offset, length=0) return segment
def _parse_reserved_marker(self, fptr)
Marker range between 0xff30 and 0xff39.
6.975397
6.18914
1.127038
event_as_dict = copy.deepcopy(self.event_body) if self.timestamp: if "keen" in event_as_dict: event_as_dict["keen"]["timestamp"] = self.timestamp.isoformat() else: event_as_dict["keen"] = {"timestamp": self.timestamp.isoformat()} return json.dumps(event_as_dict)
def to_json(self)
Serializes the event to JSON. :returns: a string
2.857398
3.058136
0.934359
event = Event(self.project_id, event_collection, event_body, timestamp=timestamp) self.persistence_strategy.persist(event)
def add_event(self, event_collection, event_body, timestamp=None)
Adds an event. Depending on the persistence strategy of the client, this will either result in the event being uploaded to Keen immediately or will result in saving the event to some local cache. :param event_collection: the name of the collection to insert the event to :param event_body: dict, the body of the event to insert the event to :param timestamp: datetime, optional, the timestamp of the event
5.273162
6.523705
0.808308
event = Event(self.project_id, event_collection, event_body, timestamp=timestamp) event_json = event.to_json() return "{0}/{1}/projects/{2}/events/{3}?api_key={4}&data={5}".format( self.api.base_url, self.api.api_version, self.project_id, self._url_escape(event_collection), self.api.write_key.decode(sys.getdefaultencoding()), self._base64_encode(event_json) )
def generate_image_beacon(self, event_collection, event_body, timestamp=None)
Generates an image beacon URL. :param event_collection: the name of the collection to insert the event to :param event_body: dict, the body of the event to insert the event to :param timestamp: datetime, optional, the timestamp of the event
3.463385
3.733136
0.927741
params = self.get_params(timeframe=timeframe, timezone=timezone, filters=filters) return self.api.delete_events(event_collection, params)
def delete_events(self, event_collection, timeframe=None, timezone=None, filters=None)
Deletes events. :param event_collection: string, the event collection from which event are being deleted :param timeframe: string or dict, the timeframe in which the events happened example: "previous_7_days" :param timezone: int, the timezone you'd like to use for the timeframe and interval in seconds :param filters: array of dict, contains the filters you'd like to apply to the data example: [{"property_name":"device", "operator":"eq", "property_value":"iPhone"}]
2.663973
2.890662
0.921579
return self.api.create_access_key(name=name, is_active=is_active, permitted=permitted, options=options)
def create_access_key(self, name, is_active=True, permitted=[], options={})
Creates a new access key. A master key must be set first. :param name: the name of the access key to create :param is_active: Boolean value dictating whether this key is currently active (default True) :param permitted: list of strings describing which operation types this key will permit Legal values include "writes", "queries", "saved_queries", "cached_queries", "datasets", and "schema". :param options: dictionary containing more details about the key's permitted and restricted functionality
2.57395
3.222757
0.798679
return self.api.update_access_key_full(access_key_id, name, is_active, permitted, options)
def update_access_key_full(self, access_key_id, name, is_active, permitted, options)
Replaces the 'name', 'is_active', 'permitted', and 'options' values of a given key. A master key must be set first. :param access_key_id: the 'key' value of the access key for which the values will be replaced :param name: the new name desired for this access key :param is_active: whether the key should become enabled (True) or revoked (False) :param permitted: the new list of permissions desired for this access key :param options: the new dictionary of options for this access key
2.124354
2.926569
0.725886
try: # python 2 return base64.b64encode(string_to_encode) except TypeError: # python 3 encoding = sys.getdefaultencoding() base64_bytes = base64.b64encode(bytes(string_to_encode, encoding)) return base64_bytes.decode(encoding)
def _base64_encode(self, string_to_encode)
Base64 encodes a string, with either Python 2 or 3. :param string_to_encode: the string to encode
1.957528
2.03267
0.963033
params = self.get_params(event_collection=event_collection, timeframe=timeframe, timezone=timezone, interval=interval, filters=filters, group_by=group_by, order_by=order_by, target_property=target_property, max_age=max_age, limit=limit) return self.api.query("select_unique", params)
def select_unique(self, event_collection, target_property, timeframe=None, timezone=None, interval=None, filters=None, group_by=None, order_by=None, max_age=None, limit=None)
Performs a select unique query Returns an array of the unique values of a target property for events that meet the given criteria. :param event_collection: string, the name of the collection to query :param target_property: string, the name of the event property you would like use :param timeframe: string or dict, the timeframe in which the events happened example: "previous_7_days" :param timezone: int, the timezone you'd like to use for the timeframe and interval in seconds :param interval: string, the time interval used for measuring data over time example: "daily" :param filters: array of dict, contains the filters you'd like to apply to the data example: [{"property_name":"device", "operator":"eq", "property_value":"iPhone"}] :param group_by: string or array of strings, the name(s) of the properties you would like to group your results by. example: "customer.id" or ["browser","operating_system"] :param order_by: dictionary or list of dictionary objects containing the property_name(s) to order by and the desired direction(s) of sorting. Example: {"property_name":"result", "direction":keen.direction.DESCENDING} May not be used without a group_by specified. :param limit: positive integer limiting the displayed results of a query using order_by :param max_age: an integer, greater than 30 seconds, the maximum 'staleness' you're willing to trade for increased query performance, in seconds
1.875002
2.304409
0.813659
params = self.get_params(event_collection=event_collection, timeframe=timeframe, timezone=timezone, filters=filters, latest=latest, email=email, property_names=property_names) return self.api.query("extraction", params)
def extraction(self, event_collection, timeframe=None, timezone=None, filters=None, latest=None, email=None, property_names=None)
Performs a data extraction Returns either a JSON object of events or a response indicating an email will be sent to you with data. :param event_collection: string, the name of the collection to query :param timeframe: string or dict, the timeframe in which the events happened example: "previous_7_days" :param timezone: int, the timezone you'd like to use for the timeframe and interval in seconds :param filters: array of dict, contains the filters you'd like to apply to the data example: [{"property_name":"device", "operator":"eq", "property_value":"iPhone"}] :param latest: int, the number of most recent records you'd like to return :param email: string, optional string containing an email address to email results to :param property_names: string or list of strings, used to limit the properties returned
2.228215
2.972006
0.749734
params = self.get_params( steps=steps, timeframe=timeframe, timezone=timezone, max_age=max_age, ) return self.api.query("funnel", params, all_keys=all_keys)
def funnel(self, steps, timeframe=None, timezone=None, max_age=None, all_keys=False)
Performs a Funnel query Returns an object containing the results for each step of the funnel. :param steps: array of dictionaries, one for each step. example: [{"event_collection":"signup","actor_property":"user.id"}, {"event_collection":"purchase","actor_property:"user.id"}] :param timeframe: string or dict, the timeframe in which the events happened example: "previous_7_days" :param timezone: int, the timezone you'd like to use for the timeframe and interval in seconds :param max_age: an integer, greater than 30 seconds, the maximum 'staleness' you're willing to trade for increased query performance, in seconds :all_keys: set to true to return all keys on response (i.e. "result", "actors", "steps")
2.618564
3.145093
0.832587
OPENJPEG.opj_version.restype = ctypes.c_char_p library_version = OPENJPEG.opj_version() if sys.hexversion >= 0x03000000: return library_version.decode('utf-8') else: return library_version
def version()
Wrapper for opj_version library routine.
3.159983
2.891171
1.092977
argtypes = [ctypes.POINTER(CommonStructType), ctypes.c_char_p, ctypes.c_int] OPENJPEG.opj_cio_open.argtypes = argtypes OPENJPEG.opj_cio_open.restype = ctypes.POINTER(CioType) if src is None: length = 0 else: length = len(src) cio = OPENJPEG.opj_cio_open(ctypes.cast(cinfo, ctypes.POINTER(CommonStructType)), src, length) return cio
def cio_open(cinfo, src=None)
Wrapper for openjpeg library function opj_cio_open.
3.064531
2.494007
1.228758
OPENJPEG.opj_cio_close.argtypes = [ctypes.POINTER(CioType)] OPENJPEG.opj_cio_close(cio)
def cio_close(cio)
Wraps openjpeg library function cio_close.
5.948994
5.026667
1.183487
OPENJPEG.cio_tell.argtypes = [ctypes.POINTER(CioType)] OPENJPEG.cio_tell.restype = ctypes.c_int pos = OPENJPEG.cio_tell(cio) return pos
def cio_tell(cio)
Get position in byte stream.
3.602913
3.228205
1.116073
OPENJPEG.opj_create_compress.argtypes = [ctypes.c_int] OPENJPEG.opj_create_compress.restype = ctypes.POINTER(CompressionInfoType) cinfo = OPENJPEG.opj_create_compress(fmt) return cinfo
def create_compress(fmt)
Wrapper for openjpeg library function opj_create_compress. Creates a J2K/JPT/JP2 compression structure.
3.500527
3.284184
1.065874
OPENJPEG.opj_create_decompress.argtypes = [ctypes.c_int] restype = ctypes.POINTER(DecompressionInfoType) OPENJPEG.opj_create_decompress.restype = restype dinfo = OPENJPEG.opj_create_decompress(fmt) return dinfo
def create_decompress(fmt)
Wraps openjpeg library function opj_create_decompress.
3.767837
3.012287
1.250823
argtypes = [ctypes.POINTER(DecompressionInfoType), ctypes.POINTER(CioType)] OPENJPEG.opj_decode.argtypes = argtypes OPENJPEG.opj_decode.restype = ctypes.POINTER(ImageType) image = OPENJPEG.opj_decode(dinfo, cio) return image
def decode(dinfo, cio)
Wrapper for opj_decode.
4.542125
3.732548
1.216897
argtypes = [ctypes.POINTER(CompressionInfoType)] OPENJPEG.opj_destroy_compress.argtypes = argtypes OPENJPEG.opj_destroy_compress(cinfo)
def destroy_compress(cinfo)
Wrapper for openjpeg library function opj_destroy_compress. Release resources for a compressor handle.
5.501136
4.572337
1.203134
argtypes = [ctypes.POINTER(CompressionInfoType), ctypes.POINTER(CioType), ctypes.POINTER(ImageType)] OPENJPEG.opj_encode.argtypes = argtypes OPENJPEG.opj_encode.restype = ctypes.c_int status = OPENJPEG.opj_encode(cinfo, cio, image) return status
def encode(cinfo, cio, image)
Wrapper for openjpeg library function opj_encode. Encodes an image into a JPEG-2000 codestream. Parameters ---------- cinfo : compression handle cio : output buffer stream image : image to encode
3.145663
3.660648
0.859318
argtypes = [ctypes.POINTER(DecompressionInfoType)] OPENJPEG.opj_destroy_decompress.argtypes = argtypes OPENJPEG.opj_destroy_decompress(dinfo)
def destroy_decompress(dinfo)
Wraps openjpeg library function opj_destroy_decompress.
5.577543
3.707014
1.504592
lst = [ctypes.c_int, ctypes.POINTER(ImageComptParmType), ctypes.c_int] OPENJPEG.opj_image_create.argtypes = lst OPENJPEG.opj_image_create.restype = ctypes.POINTER(ImageType) image = OPENJPEG.opj_image_create(len(cmptparms), cmptparms, cspace) return(image)
def image_create(cmptparms, cspace)
Wrapper for openjpeg library function opj_image_create.
3.777342
3.13692
1.204156
OPENJPEG.opj_image_destroy.argtypes = [ctypes.POINTER(ImageType)] OPENJPEG.opj_image_destroy(image)
def image_destroy(image)
Wraps openjpeg library function opj_image_destroy.
5.88014
3.376229
1.741629
cparams = CompressionParametersType() argtypes = [ctypes.POINTER(CompressionParametersType)] OPENJPEG.opj_set_default_encoder_parameters.argtypes = argtypes OPENJPEG.opj_set_default_encoder_parameters(ctypes.byref(cparams)) return cparams
def set_default_encoder_parameters()
Wrapper for openjpeg library function opj_set_default_encoder_parameters.
4.167906
3.130628
1.331332
argtypes = [ctypes.POINTER(DecompressionParametersType)] OPENJPEG.opj_set_default_decoder_parameters.argtypes = argtypes OPENJPEG.opj_set_default_decoder_parameters(dparams_p)
def set_default_decoder_parameters(dparams_p)
Wrapper for opj_set_default_decoder_parameters.
4.433886
3.78309
1.172027
argtypes = [ctypes.POINTER(CommonStructType), ctypes.POINTER(EventMgrType), ctypes.c_void_p] OPENJPEG.opj_set_event_mgr.argtypes = argtypes OPENJPEG.opj_set_event_mgr(ctypes.cast(dinfo, ctypes.POINTER(CommonStructType)), event_mgr, context)
def set_event_mgr(dinfo, event_mgr, context=None)
Wrapper for openjpeg library function opj_set_event_mgr.
3.447581
2.828352
1.218937
argtypes = [ctypes.POINTER(CompressionInfoType), ctypes.POINTER(CompressionParametersType), ctypes.POINTER(ImageType)] OPENJPEG.opj_setup_encoder.argtypes = argtypes OPENJPEG.opj_setup_encoder(cinfo, cparameters, image)
def setup_encoder(cinfo, cparameters, image)
Wrapper for openjpeg library function opj_setup_decoder.
3.944243
3.151027
1.251732
argtypes = [ctypes.POINTER(DecompressionInfoType), ctypes.POINTER(DecompressionParametersType)] OPENJPEG.opj_setup_decoder.argtypes = argtypes OPENJPEG.opj_setup_decoder(dinfo, dparams)
def setup_decoder(dinfo, dparams)
Wrapper for openjpeg library function opj_setup_decoder.
5.13906
3.376858
1.521847
# Print column header # Assumes first row contains all needed headers first = sorted(matrix.keys())[0] print('\t', end=' ') for i in matrix[first]: print('{}\t'.format(i), end=' ') print() indent_count = 0 for i in matrix: # Print line header print('{}\t'.format(i), end=' ') if indent_count: print('\t' * indent_count, end=' ') for j in sorted(matrix[i]): # required because dict doesn't guarantee insertion order print('{}\t'.format(matrix[i][j]), end=' ') print() indent_count = indent_count + 1
def print_upper_triangular_matrix(matrix)
Prints a CVRP data dict matrix Arguments --------- matrix : dict Description Notes ----- It is assummed that the first row of matrix contains all needed headers.
3.630487
3.422722
1.060702
for i in sorted(matrix.keys()): for j in sorted(matrix.keys()): a, b = i, j if a > b: a, b = b, a print(matrix[a][b], end=' ') print()
def print_upper_triangular_matrix_as_complete(matrix)
Prints a CVRP data dict upper triangular matrix as a normal matrix Doesn't print headers. Arguments --------- matrix : dict Description
2.714718
3.164247
0.857935
total_cost = 0 for solution in solution.routes(): cost = solution.length() total_cost = total_cost + cost print('{}: {}'.format(solution, cost)) #print('xxx') print('Total cost: {}'.format(total_cost))
def print_solution(solution)
Prints a solution Arguments --------- solution : BaseSolution Example ------- :: [8, 9, 10, 7]: 160 [5, 6]: 131 [3, 4, 2]: 154 Total cost: 445
4.472735
4.621461
0.967818
'''Validate if total generation of a grid in a pkl file is what expected. Parameters ---------- session : sqlalchemy.orm.session.Session Database session nw: The network Returns ------- DataFrame compare_by_level DataFrame compare_by_type ''' #config network intern variables nw._config = nw.import_config() nw._pf_config = nw.import_pf_config() nw._static_data = nw.import_static_data() nw._orm = nw.import_orm() #rescue generation from input table generation_input = nw.list_generators(session) #make table of generators that are in the grid gen_idx = 0 gen_dict = {} for mv_district in nw.mv_grid_districts(): #search over MV grid for node in mv_district.mv_grid.graph_nodes_sorted(): if isinstance(node, GeneratorDing0): gen_idx+=1 subtype = node.subtype if subtype == None: subtype = 'other' type = node.type if type == None: type = 'other' gen_dict[gen_idx] = { 'v_level':node.v_level, 'type':type, 'subtype':subtype, 'GenCap':node.capacity, } #search over LV grids for LA in mv_district.lv_load_areas(): for lv_district in LA.lv_grid_districts(): # generation capacity for g in lv_district.lv_grid.generators(): gen_idx+=1 subtype = g.subtype if subtype == None: subtype = 'other' type = g.type if type == None: type = 'other' gen_dict[gen_idx] = { 'v_level':g.v_level, 'type':type, 'subtype':subtype, 'GenCap':g.capacity, } generation_effective = pd.DataFrame.from_dict(gen_dict, orient='index') #compare by voltage level input_by_level = generation_input.groupby('v_level').sum()['GenCap'].apply(lambda x: np.round(x,3)) effective_by_level = generation_effective.groupby('v_level').sum()['GenCap'].apply(lambda x: np.round(x,3)) compare_by_level = pd.concat([input_by_level,effective_by_level,input_by_level==effective_by_level],axis=1) compare_by_level.columns = ['table','ding0','equal?'] #compare by type/subtype generation_input['type'] =generation_input['type']+'/'+generation_input['subtype'] generation_effective['type'] =generation_effective['type']+'/'+generation_effective['subtype'] input_by_type = generation_input.groupby('type').sum()['GenCap'].apply(lambda x: np.round(x,3)) effective_by_type = generation_effective.groupby('type').sum()['GenCap'].apply(lambda x: np.round(x,3)) compare_by_type = pd.concat([input_by_type,effective_by_type,input_by_type==effective_by_type],axis=1) compare_by_type.columns = ['table','ding0','equal?'] compare_by_type.index.names = ['type/subtype'] return compare_by_level, compare_by_type
def validate_generation(session, nw)
Validate if total generation of a grid in a pkl file is what expected. Parameters ---------- session : sqlalchemy.orm.session.Session Database session nw: The network Returns ------- DataFrame compare_by_level DataFrame compare_by_type
3.037123
2.614145
1.161804
'''Validate if total load of a grid in a pkl file is what expected from load areas Parameters ---------- session : sqlalchemy.orm.session.Session Database session nw The network Returns ------- DataFrame compare_by_la Bool True if data base IDs of LAs are the same as the IDs in the grid ''' #config network intern variables nw._config = nw.import_config() nw._pf_config = nw.import_pf_config() nw._static_data = nw.import_static_data() nw._orm = nw.import_orm() #rescue peak load from input table load_input = nw.list_load_areas(session, nw.mv_grid_districts()) la_input = sorted(load_input.index) load_input = load_input.sum(axis=0).apply(lambda x: np.round(x,3)) load_input.sort_index(inplace=True) #search for LA in the grid la_idx = 0 la_dict = {} for mv_district in nw.mv_grid_districts(): for LA in mv_district.lv_load_areas(): la_idx +=1 la_dict[la_idx] = { 'id_db':LA.id_db, 'peak_load_residential':LA.peak_load_residential, 'peak_load_retail':LA.peak_load_retail, 'peak_load_industrial':LA.peak_load_industrial, 'peak_load_agricultural':LA.peak_load_agricultural, } #compare by LA load_effective = pd.DataFrame.from_dict(la_dict,orient='index').set_index('id_db') la_effective = sorted(load_effective.index) load_effective = load_effective.sum(axis=0).apply(lambda x: np.round(x,3)) load_effective.sort_index(inplace=True) compare_by_la = pd.concat([load_input,load_effective,load_input==load_effective],axis=1) compare_by_la.columns = ['table','ding0','equal?'] compare_by_la.index.names = ['sector'] return compare_by_la, la_input==la_effective
def validate_load_areas(session, nw)
Validate if total load of a grid in a pkl file is what expected from load areas Parameters ---------- session : sqlalchemy.orm.session.Session Database session nw The network Returns ------- DataFrame compare_by_la Bool True if data base IDs of LAs are the same as the IDs in the grid
4.293168
3.033305
1.415343
crit_nodes = {} if mode == 'MV': # load max. voltage difference for load and feedin case mv_max_v_level_lc_diff_normal = float(cfg_ding0.get('mv_routing_tech_constraints', 'mv_max_v_level_lc_diff_normal')) mv_max_v_level_fc_diff_normal = float(cfg_ding0.get('mv_routing_tech_constraints', 'mv_max_v_level_fc_diff_normal')) # check nodes' voltages voltage_station = grid._station.voltage_res for node in grid.graph_nodes_sorted(): try: # compare node's voltage with max. allowed voltage difference for load and feedin case if (abs(voltage_station[0] - node.voltage_res[0]) > mv_max_v_level_lc_diff_normal) or\ (abs(voltage_station[1] - node.voltage_res[1]) > mv_max_v_level_fc_diff_normal): crit_nodes[node] = {'node': node, 'v_diff': max([abs(v2-v1) for v1, v2 in zip(node.voltage_res, voltage_station)])} except: pass elif mode == 'LV': raise NotImplementedError if crit_nodes: logger.info('==> {} nodes have voltage issues.'.format(len(crit_nodes))) return [_['node'] for _ in sorted(crit_nodes.values(), key=lambda _: _['v_diff'], reverse=True)]
def check_voltage(grid, mode)
Checks for voltage stability issues at all nodes for MV or LV grid Parameters ---------- grid : GridDing0 Grid identifier. mode : str Kind of grid ('MV' or 'LV'). Returns ------- :any:`list` of :any:`GridDing0` List of critical nodes, sorted descending by voltage difference. Notes ----- The examination is done in two steps, according to [#]_ : 1. It is checked #TODO: what? 2. #TODO: what's next? References ---------- .. [#] dena VNS
4.094612
3.761922
1.088436
cos_phi_load = cfg_ding0.get('assumptions', 'cos_phi_load') cos_phi_feedin = cfg_ding0.get('assumptions', 'cos_phi_gen') lf_trafo_load = cfg_ding0.get('assumptions', "load_factor_lv_trans_lc_normal") lf_trafo_gen = cfg_ding0.get('assumptions', "load_factor_lv_trans_fc_normal") critical_branches = [] critical_stations = [] # Convert grid to a tree (is a directed graph) # based on this tree, descendants of each node are accessible station = grid._station tree = nx.dfs_tree(grid._graph, station) for node in tree.nodes(): # list of descendant nodes including the node itself descendants = list(nx.descendants(tree, node)) descendants.append(node) if isinstance(node, LVStationDing0): # determine cumulative peak load at node and assign to branch peak_load, peak_gen = peak_load_generation_at_node(descendants) if grid.id_db == 61107: if isinstance(node, LVStationDing0): print(node) # get trafos cumulative apparent power s_max_trafos = sum([_.s_max_a for _ in node._transformers]) # compare with load and generation connected to if (((peak_load / cos_phi_load) > s_max_trafos * lf_trafo_load) or ((peak_gen / cos_phi_feedin) > s_max_trafos * lf_trafo_gen)): critical_stations.append( {'station': node, 's_max': [ peak_load / cos_phi_load, peak_gen / cos_phi_feedin]}) else: # preceeding node of node predecessors = list(tree.predecessors(node)) # a non-meshed grid topology returns a list with only 1 item predecessor = predecessors[0] # get preceeding branches = grid.graph_branches_from_node(node) preceeding_branch = [branch for branch in branches if branch[0] is predecessor][0] # determine cumulative peak load at node and assign to branch peak_load, peak_gen = peak_load_generation_at_node(descendants) s_max_th = 3 ** 0.5 * preceeding_branch[1]['branch'].type['U_n'] * \ preceeding_branch[1]['branch'].type['I_max_th'] / 1e3 if (((peak_load / cos_phi_load) > s_max_th) or ((peak_gen / cos_phi_feedin) > s_max_th)): critical_branches.append( {'branch': preceeding_branch[1]['branch'], 's_max': [ peak_load / cos_phi_load, peak_gen / cos_phi_feedin]}) return critical_branches, critical_stations
def get_critical_line_loading(grid)
Assign line loading to each branch determined by peak load and peak generation of descendant branches The attribute `s_res` is a list of two elements 1. apparent power in load case 2. apparent power in feed-in case Parameters ---------- grid : ding0.core.network.grids.LVGridDing0 Ding0 LV grid object Returns ------- :any:`list` List of critical branches incl. its line loading :any:`list` List of critical stations incl. its transformer loading
4.439156
3.913928
1.134195
loads = [node.peak_load for node in nodes if isinstance(node, LVLoadDing0)] peak_load = sum(loads) generation = [node.capacity for node in nodes if isinstance(node, GeneratorDing0)] peak_generation = sum(generation) return peak_load, peak_generation
def peak_load_generation_at_node(nodes)
Get maximum occuring load and generation at a certain node Summarizes peak loads and nominal generation power of descendant nodes of a branch Parameters ---------- nodes : :any:`list` Any LV grid Ding0 node object that is part of the grid topology Return ------ :any:`float` peak_load : Sum of peak loads of descendant nodes :any:`float` peak_generation : Sum of nominal power of generation at descendant nodes
5.62212
3.51105
1.601264
delta_v = (s_max * ( r * cos_phi + x * math.sin(math.acos(cos_phi)))) / v_nom ** 2 return delta_v
def voltage_delta_vde(v_nom, s_max, r, x, cos_phi)
Estimate voltrage drop/increase The VDE [#]_ proposes a simplified method to estimate voltage drop or increase in radial grids. Parameters ---------- v_nom : int Nominal voltage s_max : float Apparent power r : float Short-circuit resistance from node to HV/MV substation (in ohm) x : float Short-circuit reactance from node to HV/MV substation (in ohm). Must be a signed number indicating (+) inductive reactive consumer (load case) or (-) inductive reactive supplier (generation case) cos_phi : float Returns ------- :any:`float` Voltage drop or increase References ---------- .. [#] VDE Anwenderrichtlinie: Erzeugungsanlagen am Niederspannungsnetz – Technische Mindestanforderungen für Anschluss und Parallelbetrieb von Erzeugungsanlagen am Niederspannungsnetz, 2011
4.163304
6.218809
0.66947
generation = 0 peak_load = 0 for cus_1 in graph.successors(node): for cus_2 in graph.successors(cus_1): if not isinstance(cus_2, list): cus_2 = [cus_2] generation += sum([gen.capacity for gen in cus_2 if isinstance(gen, GeneratorDing0)]) peak_load += sum([load.peak_load for load in cus_2 if isinstance(load, LVLoadDing0)]) return [peak_load, generation]
def get_house_conn_gen_load(graph, node)
Get generation capacity/ peak load of neighboring house connected to main branch Parameters ---------- graph : :networkx:`NetworkX Graph Obj< >` Directed graph node : graph node Node of the main branch of LV grid Returns ------- :any:`list` A list containing two items # peak load of connected house branch # generation capacity of connected generators
4.370119
4.223135
1.034804
cos_phi_load = cfg_ding0.get('assumptions', 'cos_phi_load') cos_phi_feedin = cfg_ding0.get('assumptions', 'cos_phi_gen') v_nom = cfg_ding0.get('assumptions', 'lv_nominal_voltage') omega = 2 * math.pi * 50 # add resitance/ reactance to preceeding in_edge = [_ for _ in grid.graph_branches_from_node(node) if _[0] in list(tree.predecessors(node))][0][1] r = r_preceeding + (in_edge['branch'].type['R'] * in_edge['branch'].length) x = x_preceeding + (in_edge['branch'].type['L'] / 1e3 * omega * in_edge['branch'].length) # get apparent power for load and generation case peak_load, gen_capacity = get_house_conn_gen_load(tree, node) s_max_load = peak_load / cos_phi_load s_max_feedin = gen_capacity / cos_phi_feedin # determine voltage increase/ drop a node voltage_delta_load = voltage_delta_vde(v_nom, s_max_load, r, x, cos_phi_load) voltage_delta_gen = voltage_delta_vde(v_nom, s_max_feedin, r, -x, cos_phi_feedin) return [voltage_delta_load, voltage_delta_gen, r, x]
def get_voltage_delta_branch(grid, tree, node, r_preceeding, x_preceeding)
Determine voltage for a preceeding branch (edge) of node Parameters ---------- grid : LVGridDing0 Ding0 grid object tree : :networkx:`NetworkX Graph Obj< >` Tree of grid topology node : graph node Node to determine voltage level at r_preceeding : float Resitance of preceeding grid x_preceeding : float Reactance of preceeding grid Return ------ :any:`float` Delta voltage for node
5.058012
4.869014
1.038816
omega = 2 * math.pi * 50 mv_grid = grid.grid_district.lv_load_area.mv_grid_district.mv_grid edges = mv_grid.find_path(grid._station, mv_grid._station, type='edges') r_mv_grid = sum([e[2]['branch'].type['R'] * e[2]['branch'].length / 1e3 for e in edges]) x_mv_grid = sum([e[2]['branch'].type['L'] / 1e3 * omega * e[2][ 'branch'].length / 1e3 for e in edges]) return [r_mv_grid, x_mv_grid]
def get_mv_impedance(grid)
Determine MV grid impedance (resistance and reactance separately) Parameters ---------- grid : LVGridDing0 Returns ------- :any:`list` List containing resistance and reactance of MV grid
5.667569
5.508829
1.028816
cos_phi_load = cfg_ding0.get('assumptions', 'cos_phi_load') cos_phi_feedin = cfg_ding0.get('assumptions', 'cos_phi_gen') v_nom = cfg_ding0.get('assumptions', 'lv_nominal_voltage') omega = 2 * math.pi * 50 stub_branch = [_ for _ in grid.graph_branches_from_node(main_branch_node) if _[0] == stub_node][0][1] r_stub = stub_branch['branch'].type['R'] * stub_branch[ 'branch'].length / 1e3 x_stub = stub_branch['branch'].type['L'] / 1e3 * omega * \ stub_branch['branch'].length / 1e3 s_max_gen = [_.capacity / cos_phi_feedin for _ in tree.successors(stub_node) if isinstance(_, GeneratorDing0)] if s_max_gen: s_max_gen = s_max_gen[0] v_delta_stub_gen = voltage_delta_vde(v_nom, s_max_gen, r_stub + r_preceeding, x_stub + x_preceedig, cos_phi_feedin) else: v_delta_stub_gen = 0 s_max_load = [_.peak_load / cos_phi_load for _ in tree.successors(stub_node) if isinstance(_, LVLoadDing0)] if s_max_load: s_max_load = s_max_load[0] v_delta_stub_load = voltage_delta_vde(v_nom, s_max_load, r_stub + r_preceeding, x_stub + x_preceedig, cos_phi_load) else: v_delta_stub_load = 0 return [v_delta_stub_load, v_delta_stub_gen]
def voltage_delta_stub(grid, tree, main_branch_node, stub_node, r_preceeding, x_preceedig)
Determine voltage for stub branches Parameters ---------- grid : LVGridDing0 Ding0 grid object tree : :networkx:`NetworkX Graph Obj< >` Tree of grid topology main_branch_node : graph node Node of main branch that stub branch node in connected to main_branch : dict Nodes of main branch r_preceeding : float Resitance of preceeding grid x_preceeding : float Reactance of preceeding grid Return ------ :any:`float` Delta voltage for node
2.934404
2.790995
1.051383
# voltage at substation bus bar r_mv_grid, x_mv_grid = get_mv_impedance(grid) r_trafo = sum([tr.r for tr in grid._station._transformers]) x_trafo = sum([tr.x for tr in grid._station._transformers]) cos_phi_load = cfg_ding0.get('assumptions', 'cos_phi_load') cos_phi_feedin = cfg_ding0.get('assumptions', 'cos_phi_gen') v_nom = cfg_ding0.get('assumptions', 'lv_nominal_voltage') # loads and generators connected to bus bar bus_bar_load = sum( [node.peak_load for node in tree.successors(grid._station) if isinstance(node, LVLoadDing0)]) / cos_phi_load bus_bar_generation = sum( [node.capacity for node in tree.successors(grid._station) if isinstance(node, GeneratorDing0)]) / cos_phi_feedin v_delta_load_case_bus_bar = voltage_delta_vde(v_nom, bus_bar_load, (r_mv_grid + r_trafo), (x_mv_grid + x_trafo), cos_phi_load) v_delta_gen_case_bus_bar = voltage_delta_vde(v_nom, bus_bar_generation, (r_mv_grid + r_trafo), -(x_mv_grid + x_trafo), cos_phi_feedin) return v_delta_load_case_bus_bar, v_delta_gen_case_bus_bar
def get_voltage_at_bus_bar(grid, tree)
Determine voltage level at bus bar of MV-LV substation Parameters ---------- grid : LVGridDing0 Ding0 grid object tree : :networkx:`NetworkX Graph Obj< >` Tree of grid topology: Returns ------- :any:`list` Voltage at bus bar. First item refers to load case, second item refers to voltage in feedin (generation) case
3.418229
3.081486
1.109279
# TODO: check docstring self._lv_load_areas.append(lv_load_area) if not isinstance(lv_load_area, MVCableDistributorDing0): self.peak_load += lv_load_area.peak_load
def add_lv_load_area(self, lv_load_area)
Adds a LV load_area to _lv_load_areas if not already existing Args ---- lv_load_area: :shapely:`Shapely Polygon object<polygons>` Descr
10.68629
12.011903
0.889642
# TODO: check docstring # get power factor for loads cos_phi_load = cfg_ding0.get('assumptions', 'cos_phi_load') lv_load_area = node.lv_load_area if lv_load_area not in self.lv_load_areas(): # and isinstance(lv_load_area, LVLoadAreaDing0): path_length_to_root = lv_load_area.mv_grid_district.mv_grid.graph_path_length(self.root_node, node) if ((path_length_to_root <= self.branch_length_max) and (lv_load_area.peak_load + self.peak_load) / cos_phi_load <= self.peak_load_max): return True else: return False
def can_add_lv_load_area(self, node)
Sums up peak load of LV stations That is, total peak load for satellite string Args ---- node: GridDing0 Descr Returns ------- bool True if ????
6.598901
6.300644
1.047338
# TODO: check docstring # shorter var names for loop dm = graph._matrix dn = graph._nodes
def operator_cross(self, graph, solution, op_diff_round_digits)
applies Cross inter-route operator to solution Takes every node from every route and calculates savings when inserted into all possible positions in other routes. Insertion is done at position with max. saving and procedure starts over again with newly created graph as input. Stops when no improvement is found. Args ---- graph: :networkx:`NetworkX Graph Obj< >` Descr solution: BaseSolution Descr op_diff_round_digits: float Precision (floating point digits) for rounding route length differences. *Details*: In some cases when an exchange is performed on two routes with one node each, the difference between the both solutions (before and after the exchange) is not zero. This is due to internal rounding errors of float type. So the loop won't break (alternating between these two solutions), we need an additional criterion to avoid this behaviour: A threshold to handle values very close to zero as if they were zero (for a more detaisled description of the matter see http://floating-point-gui.de or https://docs.python.org/3.5/tutorial/floatingpoint.html) Returns ------- LocalSearchSolution A solution (LocalSearchSolution class) Todo ---- * allow moves of a 2-node chain * Remove ugly nested loops, convert to more efficient matrix operations
47.920998
60.166374
0.796475
operators = {self.operator_exchange: 'exchange', self.operator_relocate: 'relocate', self.operator_oropt: 'oropt'} for op in it.permutations(operators): solution = solution.clone() solution = op[0](graph, solution, op_diff_round_digits) solution = op[1](graph, solution, op_diff_round_digits) solution = op[2](graph, solution, op_diff_round_digits) logger.info("{0} {1} {2} => Length: {3}".format( operators[op[0]], operators[op[1]], operators[op[2]], solution.length()))
def benchmark_operator_order(self, graph, solution, op_diff_round_digits)
performs all possible permutations of route improvement and prints graph length Args ---- graph: :networkx:`NetworkX Graph Obj< >` A NetworkX graaph is used. solution: BaseSolution BaseSolution instance op_diff_round_digits: float Precision (floating point digits) for rounding route length differences. *Details*: In some cases when an exchange is performed on two routes with one node each, the difference between the both solutions (before and after the exchange) is not zero. This is due to internal rounding errors of float type. So the loop won't break (alternating between these two solutions), we need an additional criterion to avoid this behaviour: A threshold to handle values very close to zero as if they were zero (for a more detailed description of the matter see http://floating-point-gui.de or https://docs.python.org/3.5/tutorial/floatingpoint.html)
3.340123
3.128343
1.067697
# TODO: If necessary, use timeout to set max processing time of local search # load threshold for operator (see exchange or relocate operator's description for more information) op_diff_round_digits = int(cfg_ding0.get('mv_routing', 'operator_diff_round_digits')) solution = LocalSearchSolution(graph, savings_solution) # FOR BENCHMARKING OF OPERATOR'S ORDER: #self.benchmark_operator_order(graph, savings_solution, op_diff_round_digits) for run in range(10): start = time.time() solution = self.operator_exchange(graph, solution, op_diff_round_digits, anim) time1 = time.time() if debug: logger.debug('Elapsed time (exchange, run {1}): {0}, ' 'Solution\'s length: {2}'.format( time1 - start, str(run), solution.length())) solution = self.operator_relocate(graph, solution, op_diff_round_digits, anim) time2 = time.time() if debug: logger.debug('Elapsed time (relocate, run {1}): {0}, ' 'Solution\'s length: {2}'.format( time2 - time1, str(run), solution.length())) solution = self.operator_oropt(graph, solution, op_diff_round_digits, anim) time3 = time.time() if debug: logger.debug('Elapsed time (oropt, run {1}): {0}, ' 'Solution\'s length: {2}'.format( time3 - time2, str(run), solution.length())) return solution
def solve(self, graph, savings_solution, timeout, debug=False, anim=None)
Improve initial savings solution using local search Parameters ---------- graph: :networkx:`NetworkX Graph Obj< >` Graph instance savings_solution: SavingsSolution initial solution of CVRP problem (instance of `SavingsSolution` class) timeout: int max processing time in seconds debug: bool, defaults to False If True, information is printed while routing anim: AnimationDing0 AnimationDing0 object Returns ------- LocalSearchSolution A solution (LocalSearchSolution class)
3.786432
3.573021
1.059728
station = grid._station tree = nx.dfs_tree(grid._graph, station) # TODO: idea # 1. build tree from lv_grid station as root -> diretions should point to # descending leafs # 2. for analysis of current issues get list of descendants with # nx.descendants(tree, station). Sum peak load / gen capacity # 3. Extract nodes belonging to main route of a branch by checking all # successors if these are LVCalbleDistributors # notes and hints: # 1. associated edges can be accessed via grid._graph.in_edges(<node>) # respectively grid._graph.out_edges(<node>) # 2. when using nx.descendants(tree, station) make sure the order of nodes # is maintained as this is important to properly assess voltage and over- # loading issues # first_cbl_dists = [x for x in grid._graph.neighbors(station) # if isinstance(x, LVCableDistributorDing0)] # if len(first_cbl_dists) > 0: # ancestors = nx.ancestors(grid._graph, first_cbl_dists[0]) # else: # ancestors = None # return ancestors branch_heads = list(nx.neighbors(tree, station)) descendants = {branch_head: list(nx.descendants(tree, branch_head)) for branch_head in branch_heads} return descendants
def get_branches(grid)
Individual graphs of sectoral loads :param geid: :return:
10.050244
10.075967
0.997447
# TODO: use setter method here (make attribute '_mv_grid_districts' private) if mv_grid_district not in self.mv_grid_districts(): self._mv_grid_districts.append(mv_grid_district)
def add_mv_grid_district(self, mv_grid_district)
Adds a MV grid_district to _mv_grid_districts if not already existing
4.638049
3.955123
1.172669
mv_grid_districts_dict = {} lv_load_areas_dict = {} lv_grid_districts_dict = {} lv_stations_dict = {} for mv_grid_district in self.mv_grid_districts(): mv_grid_districts_dict[mv_grid_district.id_db] = mv_grid_district for lv_load_area in mv_grid_district.lv_load_areas(): lv_load_areas_dict[lv_load_area.id_db] = lv_load_area for lv_grid_district in lv_load_area.lv_grid_districts(): lv_grid_districts_dict[lv_grid_district.id_db] = lv_grid_district lv_stations_dict[lv_grid_district.lv_grid.station().id_db] = lv_grid_district.lv_grid.station() return mv_grid_districts_dict, lv_load_areas_dict, lv_grid_districts_dict, lv_stations_dict
def get_mvgd_lvla_lvgd_obj_from_id(self)
Build dict with mapping from LVLoadAreaDing0 id to LVLoadAreaDing0 object, MVGridDistrictDing0 id to MVGridDistrictDing0 object, LVGridDistrictDing0 id to LVGridDistrictDing0 object and LVStationDing0 id to LVStationDing0 object Returns ------- :obj:`dict` mv_grid_districts_dict:: { mv_grid_district_id_1: mv_grid_district_obj_1, ..., mv_grid_district_id_n: mv_grid_district_obj_n } :obj:`dict` lv_load_areas_dict:: { lv_load_area_id_1: lv_load_area_obj_1, ..., lv_load_area_id_n: lv_load_area_obj_n } :obj:`dict` lv_grid_districts_dict:: { lv_grid_district_id_1: lv_grid_district_obj_1, ..., lv_grid_district_id_n: lv_grid_district_obj_n } :obj:`dict` lv_stations_dict:: { lv_station_id_1: lv_station_obj_1, ..., lv_station_id_n: lv_station_obj_n }
1.817368
1.420613
1.279284