query stringlengths 9 9.05k | document stringlengths 10 222k | negatives listlengths 19 20 | metadata dict |
|---|---|---|---|
Check if `self` is entirely within `distance` from `right`. | def d_fully_within(
self,
right: GeoSpatialValue,
distance: ir.FloatingValue,
) -> ir.BooleanValue:
return ops.GeoDFullyWithin(self, right, distance).to_expr() | [
"def d_within(\n self,\n right: GeoSpatialValue,\n distance: ir.FloatingValue,\n ) -> ir.BooleanValue:\n return ops.GeoDWithin(self, right, distance).to_expr()",
"def is_to_the_right(self, other_rect):\n distance = other_rect.distance_x(self.rect)\n return distance != ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Check if `self` is partially within `distance` from `right`. | def d_within(
self,
right: GeoSpatialValue,
distance: ir.FloatingValue,
) -> ir.BooleanValue:
return ops.GeoDWithin(self, right, distance).to_expr() | [
"def d_fully_within(\n self,\n right: GeoSpatialValue,\n distance: ir.FloatingValue,\n ) -> ir.BooleanValue:\n return ops.GeoDFullyWithin(self, right, distance).to_expr()",
"def within_distance(self, point, distance):\n return all(distance >= seg.shortest_distance_to(point)\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get the 1based Nth geometry of a multi geometry. | def geometry_n(self, n: int | ir.IntegerValue) -> GeoSpatialValue:
return ops.GeoGeometryN(self, n).to_expr() | [
"def getMultiGeometry(geometry):\n geom = arcpy.Array()\n for feature in geometry:\n array = arcpy.Array()\n for point in feature:\n point = arcpy.Point(float(point[0]), float(point[1]))\n array.add(point)\n geom.add(array)\n return geom",
"def _multigeometry(se... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get the type of a geometry. Returns StringValue String representing the type of `self`. | def geometry_type(self) -> ir.StringValue:
return ops.GeoGeometryType(self).to_expr() | [
"def get_geometry_type(self):\n return self.geometry_type",
"def get_geometry_type(self):\n return self._geometry_type",
"def _get_type_geometry_object(self):\n # FeatureModel.get_geometry_object() retorna\n # self.geometry_object relacionado ao objeto\n # FeatureModel (o padr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Compute the distance between two geospatial expressions. | def distance(self, right: GeoSpatialValue) -> ir.FloatingValue:
return ops.GeoDistance(self, right).to_expr() | [
"def get_geo_distance(c_1: City, c_2: City) -> float:\n return great_circle(c_1.coordinates, c_2.coordinates).km",
"def distance(self, other):\n if not isinstance(other, GEOSGeometry):\n raise TypeError(\"distance() works only on other GEOS Geometries.\")\n return capi.geos_distance(se... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Compute the length of a geospatial expression. Returns FloatingValue Length of `self` | def length(self) -> ir.FloatingValue:
return ops.GeoLength(self).to_expr() | [
"def length(self):\n return capi.geos_length(self.ptr, byref(c_double()))",
"def length(self):\n return self.magnitude()",
"def length(self):\n def func(x):\n return np.sqrt(1.0 + self.interp_slope(x)**2)\n return quad(func, self.x[0], self.x[-1])[0]",
"def length(self) ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Compute the perimeter of a geospatial expression. Returns FloatingValue Perimeter of `self` | def perimeter(self) -> ir.FloatingValue:
return ops.GeoPerimeter(self).to_expr() | [
"def perimeter(self) -> float:\n return double_of(multiply_by_pi(self._radius()))",
"def perimeter(self):\n return math.pi * (3 * (self.r + self.b) - ((3 * self.r + self.b) *\n (self.r + 3 * self.b)) ** 0.5)",
"def perimeter(self):\n return self._radius * 2 * math.pi",
"def per... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the 2dimensional max distance between two geometries in projected units. If `self` and `right` are the same geometry the function will return the distance between the two vertices most far from each other in that geometry. | def max_distance(self, right: GeoSpatialValue) -> ir.FloatingValue:
return ops.GeoMaxDistance(self, right).to_expr() | [
"def distance(self, right: GeoSpatialValue) -> ir.FloatingValue:\n return ops.GeoDistance(self, right).to_expr()",
"def calc_distance(self, other):\n longitude_diff = (self.__longitude - other.__longitude) * \\\n WayPoint.DISTANCE_PER_DEGREE_LONGITUDE\n latitude_diff = (self.__latitude... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the X minima of a geometry. Returns FloatingValue X minima | def x_min(self) -> ir.FloatingValue:
return ops.GeoXMin(self).to_expr() | [
"def x_min(self):\n return self.get_min_value(self.X_INDEX)",
"def x_min(self):\n return self.x[self.nodes].values.min()",
"def min_values_float(self):\n vals = np.zeros((self.n_turbines, 2), dtype=FC.DTYPE)\n vals[:] = self.boundary.p_min()[None, :]\n return vals.reshape(self... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the X maxima of a geometry. Returns FloatingValue X maxima | def x_max(self) -> ir.FloatingValue:
return ops.GeoXMax(self).to_expr() | [
"def x_max(self):\n return self.x[self.nodes].values.max()",
"def x_max(self):\n return self.get_max_value(self.X_INDEX)",
"def max_x(self):\n return max(point[0] for point in self.points)",
"def xminmax ( self ) :\n return self.xvar.minmax()",
"def max_values_float(self):\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the Y minima of a geometry. Returns FloatingValue Y minima | def y_min(self) -> ir.FloatingValue:
return ops.GeoYMin(self).to_expr() | [
"def get_min_y(self, format=Array):\n try:\n yy = self.get_data_y(format)\n low, high = self.calc_indexes()\n yy = yy[low:high + 1]\n y = [float(e) for e in yy]\n except:\n y = []\n if len(y) == 0:\n return \"-\"\n return ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the Y maxima of a geometry. Returns FloatingValue Y maxima | def y_max(self) -> ir.FloatingValue:
return ops.GeoYMax(self).to_expr() | [
"def get_max_y(self, format=Array):\n try:\n yy = self.get_data_y(format)\n low, high = self.calc_indexes()\n yy = yy[low:high + 1]\n y = [float(e) for e in yy]\n except:\n y = []\n if len(y) == 0:\n return \"-\"\n return ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the first point of a `LINESTRING` geometry as a `POINT`. Return `NULL` if the input parameter is not a `LINESTRING` Returns PointValue Start point | def start_point(self) -> PointValue:
return ops.GeoStartPoint(self).to_expr() | [
"def get_point_from_linestring(geom_row, X=0, behaviour='last'):\n\n lat = None\n lng = None\n try:\n X = round(X)\n except Exception as e:\n raise TypeError(\"Please enter a number for the index of the point within the linestring (X)\")\n\n if behaviour in ['last', 'ignore']:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the last point of a `LINESTRING` geometry as a `POINT`. Return `NULL` if the input parameter is not a `LINESTRING` Returns PointValue End point | def end_point(self) -> PointValue:
return ops.GeoEndPoint(self).to_expr() | [
"def get_point_from_linestring(geom_row, X=0, behaviour='last'):\n\n lat = None\n lng = None\n try:\n X = round(X)\n except Exception as e:\n raise TypeError(\"Please enter a number for the index of the point within the linestring (X)\")\n\n if behaviour in ['last', 'ignore']:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the number of points in a geometry. Works for all geometries. Returns IntegerValue Number of points | def n_points(self) -> ir.IntegerValue:
return ops.GeoNPoints(self).to_expr() | [
"def GetNumberOfPoints(self):\n return self.GetNumberOfElements(ArrayAssociation.POINT)",
"def GetPointCount(self) -> int:\n ...",
"def get_num_points(self):\n dimensions = self.data.shape\n return dimensions[0]",
"def n_vertices(self):\n return self.shape_model.template_ins... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the spatial reference identifier for the ST_Geometry. Returns IntegerValue SRID | def srid(self) -> ir.IntegerValue:
return ops.GeoSRID(self).to_expr() | [
"def SRID():\r\n return SurveyPointMixin._SRID",
"def srs(self):\n if HAS_GDAL:\n if hasattr(self, '_srs'):\n # Returning a clone of the cached SpatialReference object.\n return self._srs.clone()\n else:\n # Attempting to cache a Spa... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the centroid of the geometry. Returns PointValue The centroid | def centroid(self) -> PointValue:
return ops.GeoCentroid(self).to_expr() | [
"def centroid(self):\n return self._topology(capi.geos_centroid(self.ptr))",
"def centroid(self) -> Point:\n return self._context.polygon_centroid(self)",
"def centroid(self) -> Point[Scalar]:\n return self._context.multipoint_centroid(self)",
"def centroid(self):\n A = 1 / (6*self... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Transform a geometry into a new SRID. | def transform(self, srid: ir.IntegerValue) -> GeoSpatialValue:
return ops.GeoTransform(self, srid).to_expr() | [
"def smart_transform(geom, srid, clone=True):\r\n if not geom.srs:\r\n geom.srid = 4326\r\n return geom.transform(srid, clone=clone)",
"def smart_transform(geom, srid, clone=True):\n if not geom.srs:\n geom.srid = 4326\n return geom.transform(srid, clone=clone)",
"def warp_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Clip a substring from a LineString. Returns a linestring that is a substring of the input one, starting and ending at the given fractions of the total 2d length. The second and third arguments are floating point values between zero and one. This only works with linestrings. | def line_substring(
self, start: ir.FloatingValue, end: ir.FloatingValue
) -> ir.LineStringValue:
return ops.GeoLineSubstring(self, start, end).to_expr() | [
"def clip(st,length):\n if len(st) > length:\n return st[:length] + \"...\"\n else:\n return st",
"def linestring_segment(linestring: LineString, dist: float, threshold_length: float):\n coord_1 = linestring.interpolate(dist).coords[0]\n coord_2 = linestring.interpolate(dist + threshold_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Aggregate a set of geometries into a union. This corresponds to the aggregate version of the PostGIS ST_Union. We give it a different name (following the corresponding method in GeoPandas) to avoid name conflicts with the nonaggregate version. Returns GeoSpatialScalar Union of geometries | def unary_union(self) -> ir.GeoSpatialScalar:
return ops.GeoUnaryUnion(self).to_expr().name("union") | [
"def unary_union(self):\n return GeoSeries(arctern.ST_Union_Aggr(self))",
"def union(self, other):\n return self._geomgen(capi.geom_union, other)",
"def union(feature):\n\n mp = MultiPolygon([Polygon([tuple(z) for z in y]) for y in feature.coord])\n union = ops.unary_union(mp)\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
we use Augmentor lib a pipeline of augment no params input | def augment():
print("augmenting......")
path1 = '../trainp1/'
path2 = '../trainp2/'
# path of pair1 and pair2 similar to img & mask task for segmentation
p = Augmentor.Pipeline(path1) # pair1
p.ground_truth(path2) # pair2
p.rotate(probability=0.3, max_left_rotation=3, max_right_rotation=3... | [
"def augment(self, image):\n pass",
"def get_image_augmentation_pipeline():\n\n return imgaug.augmenters.Sequential(\n children=[\n imgaug.augmenters.SomeOf(\n n=(0, None),\n children=[\n imgaug.augmenters.Grayscale(alpha=(0.2, 1)),\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Command line interface for the ``rsyncsystembackup`` program. | def main():
# Initialize logging to the terminal and system log.
coloredlogs.install(syslog=True)
# Parse the command line arguments.
context_opts = dict()
program_opts = dict()
dest_opts = dict()
try:
options, arguments = getopt.gnu_getopt(sys.argv[1:], 'bsrm:c:t:i:unx:fvqhVQp', [
... | [
"def command_backup(self):\n required_arguments = {\n 'nice': self.BINARY_PATHS['nice'],\n 'nice_value': self.NICE_VALUE,\n 'rdiff': self.BINARY_PATHS['rdiff-backup'],\n 'cwd': self.env['cwd'],\n 'bwd': self.env['bwd']\n }\n\n self._pre... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Explicitly enable an action and disable other implicit actions. | def enable_explicit_action(options, explicit_action):
options[explicit_action] = True
for implicit_action in 'backup_enabled', 'snapshot_enabled', 'rotate_enabled':
if implicit_action != explicit_action:
options.setdefault(implicit_action, False) | [
"def disable_other_actions(self, disable_other_actions):\n\n self._disable_other_actions = disable_other_actions",
"async def handle_action(**kwargs):\n name = kwargs[NAME]\n automation = automations[name]\n enabled = kwargs.get(ENABLED, not automation.enabled)\n if enabled:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the Courses that the given user is an instructor for. Returns a QuerySet. | def faculty_courses_for_user(user):
return Course.objects.filter(faculty_group__in=user.groups.all()) | [
"def get_courses(user):\n if user.is_staff: return Course.objects.all()\n \n return Course.objects.filter(published=True, users__pk=user.pk)",
"def get_enrollments_for_courses_in_program(user, program):\n course_keys = [\n CourseKey.from_string(key)\n for key in course_run_ke... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return True if the given user is a faculty member on any courses. | def is_faculty(user):
return Affil.objects.filter(user=user).exists() or \
faculty_courses_for_user(user).exists() | [
"def is_faculty():\n return _is_member('uw_faculty')",
"def has_member(self, user):\n return user.data in set(dbm.member for dbm in self.board_members)",
"def is_member(user: User) -> bool:\n if not user:\n raise TypeError('user should not be None')\n return user.name.startswith('L')",
... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Handles GET /caps/mba request. | def get():
mba_info = caps.mba_info()
res = {
'clos_num': mba_info['clos_num'],
'mba_enabled': mba_info['enabled'],
'mba_bw_enabled': mba_info['ctrl_enabled']
}
return res, 200 | [
"def get():\n\n mba_ctrl_info = caps.mba_ctrl_info()\n\n res = {\n 'supported': mba_ctrl_info['supported'],\n 'enabled': mba_ctrl_info['enabled']\n }\n return res, 200",
"def capa(self):\n def _parsecap(line):\n lst = line.decode('ascii').split()... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Handles HTTP /caps/mba_ctrl request. Retrieve MBA CTRL capability and current state details | def get():
mba_ctrl_info = caps.mba_ctrl_info()
res = {
'supported': mba_ctrl_info['supported'],
'enabled': mba_ctrl_info['enabled']
}
return res, 200 | [
"def get():\n\n mba_info = caps.mba_info()\n\n res = {\n 'clos_num': mba_info['clos_num'],\n 'mba_enabled': mba_info['enabled'],\n 'mba_bw_enabled': mba_info['ctrl_enabled']\n }\n return res, 200",
"def put():\n json_data = request.get_json()\n\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Handles PUT /caps/mba_ctrl request. Raises BadRequest, InternalError | def put():
json_data = request.get_json()
# validate request
try:
schema, resolver = ConfigStore.load_json_schema('modify_mba_ctrl.json')
jsonschema.validate(json_data, schema, resolver=resolver)
except (jsonschema.ValidationError, OverflowError) as error:
... | [
"def get():\n\n mba_ctrl_info = caps.mba_ctrl_info()\n\n res = {\n 'supported': mba_ctrl_info['supported'],\n 'enabled': mba_ctrl_info['enabled']\n }\n return res, 200",
"def put():\n json_data = request.get_json()\n\n # validate request\n try... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Handles PUT /caps/rdt_iface request. Raises BadRequest, InternalError | def put():
json_data = request.get_json()
# validate request
try:
schema, resolver = ConfigStore.load_json_schema('modify_rdt_iface.json')
jsonschema.validate(json_data, schema, resolver=resolver)
except (jsonschema.ValidationError, OverflowError) as error:
... | [
"def post_logical_interface_update(self, resource_id, resource_dict):\n pass",
"def update(self):\n # Modified variable\n modified = False\n\n ospf_interface_data = {}\n\n ospf_interface_data = utils.get_attrs(self, self.config_attrs)\n\n # Get port uri\n if self.p... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Handles GET /caps/l3ca request. | def get():
l3ca_info = caps.l3ca_info()
res = {
'cache_size': l3ca_info['cache_size'],
'cw_size': l3ca_info['cache_way_size'],
'cw_num': l3ca_info['cache_ways_num'],
'clos_num': l3ca_info['clos_num'],
'cdp_supported': l3ca_info['cdp_supported... | [
"def get():\n\n l2ca_info = caps.l2ca_info()\n\n res = {\n 'cache_size': l2ca_info['cache_size'],\n 'cw_size': l2ca_info['cache_way_size'],\n 'cw_num': l2ca_info['cache_ways_num'],\n 'clos_num': l2ca_info['clos_num'],\n 'cdp_supported': l2ca_info[... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Handles GET /caps/l2ca request. | def get():
l2ca_info = caps.l2ca_info()
res = {
'cache_size': l2ca_info['cache_size'],
'cw_size': l2ca_info['cache_way_size'],
'cw_num': l2ca_info['cache_ways_num'],
'clos_num': l2ca_info['clos_num'],
'cdp_supported': l2ca_info['cdp_supported... | [
"def get():\n\n l3ca_info = caps.l3ca_info()\n\n res = {\n 'cache_size': l3ca_info['cache_size'],\n 'cw_size': l3ca_info['cache_way_size'],\n 'cw_num': l3ca_info['cache_ways_num'],\n 'clos_num': l3ca_info['clos_num'],\n 'cdp_supported': l3ca_info[... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Makes call to regulations.gov and retrieves the docket data | def get_docket_data(api_key, docket_id):
LOGGER.info('Requesting docket from regulations.gov')
response = requests.get("https://api.data.gov:443/" +
"regulations/v3/docket.json?api_key=" +
api_key +
"&docketId=" +
... | [
"def fetch_data():\n url = \"http://iresearch.worldbank.org/PovcalNet/PovcalNetAPI.ashx?\"\n smy_params = {\n \"Countries\": \"all\",\n \"GroupedBy\": \"WB\",\n \"PovertyLine\": \"1.9\",\n \"RefYears\": \"all\",\n \"Display\": \"C\",\n \"format\": \"csv\"\n }\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Delete a page in a wiki | def delete(self, request, slug, page_name):
try:
wiki = Wiki.objects.get(slug=slug)
except Wiki.DoesNotExist:
error_msg = "Wiki not found."
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
username = request.user.username
if wiki.username != use... | [
"def wiki_delete(self, page_id):\n return self._get('wiki_pages/{0}.json'.format(page_id), auth=True,\n method='DELETE')",
"def _delete_page(self, pageId):",
"def wiki_page_deleted(self, page):\n if 'wiki' not in self.sources:\n return\n gnp = GrowlNotific... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Given a stream to read from, return the parsed representation. Should return parsed data, or a `DataAndFiles` object consisting of the parsed data and files. | def parse(self, stream, media_type=None, parser_context=None):
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
decoded_stream = codecs.getreader(encoding)(stream)
raw_body = decoded_stream.read()
request = parser_context.get('request')
setattr(request, 'raw_bo... | [
"def parse_stream(self, stream, **contextkw):\n context = Container(**contextkw)\n context._parsing = True\n context._building = False\n context._sizing = False\n context._params = context\n try:\n return self._parsereport(stream, context, \"(parsing)\")\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Creates an email with no attachement. | def generate_withno_attachement(sender, recipient, subject, body):
# Basic Email formatting
message = email.message.EmailMessage()
message["From"] = sender
message["To"] = recipient
message["Subject"] = subject
message.set_content(body)
return message | [
"def _createEmail(self, address_to, message, emailSubject):\r\n\t\tfrom_email = Email(self.sender)\r\n\t\tto_email = To(address_to)\r\n\t\tsubject = emailSubject\r\n\t\tcontent = Content(\"text/plain\", message)\r\n\t\t#creates Mail object from sendgrid api\r\n\t\tmail = Mail(from_email, to_email, subject, content)... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sends the message to the configured SMTP server. | def send(message):
mail_server = smtplib.SMTP('localhost')
mail_server.send_message(message)
mail_server.quit() | [
"def send_email(self):\n server = smtplib.SMTP(self.server, self.port)\n server.ehlo()\n server.starttls()\n msg = self.build_msg()\n try:\n server.login(self.from_addr, self.password)\n server.sendmail(self.from_addr, self.to_addr, msg)\n print('S... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Write interesting attributes from a ServiceInfo to the log. Information written depends on the log level, basic info is written w/ log level INFO, if the log level is DEBUG more the basic info plus more (all properties) is written w/ log level DEBUG. | def log_serviceinfo(logger, info):
try:
debugging = logger.isEnabledFor(logging.DEBUG)
log_level = logging.INFO
log_info = {'name': info.name,
'address': socket.inet_ntoa(info.addresses[0]),
'port': info.port}
log_hdr = "\n {address}:{port} ... | [
"def write_info(self, line_info):\n try:\n with open(self.log2file, \"a+\") as objfile:\n # Open to append\n objfile.write(line_info)\n except IOError as ioe:\n with open(self.log2file, \"w+\") as objfile:\n # Create to write\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Announce our shares via Zeroconf. | def __init__(self, logger):
self.share_names = []
self.share_info = []
self.logger = logger
self.rz = zeroconf.Zeroconf()
self.renamed = {}
old_titles = self.scan()
address = socket.inet_aton(config.get_ip())
port = int(config.getPort())
logger.inf... | [
"def announce(self, title: str, msg: str) -> None:\n ...",
"def test_show_nas_share(self):\n pass",
"def test_show_nas_share_by_nas(self):\n pass",
"def announce(self, *args):\n return _yarp.NameStore_announce(self, *args)",
"def test_update_nas_share(self):\n pass",
"de... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Look for TiVos using Zeroconf. | def scan(self):
VIDS = '_tivo-videos._tcp.local.'
names = []
self.logger.info('Scanning for TiVos...\n')
# Get the names of servers offering TiVo videos
browser = zeroconf.ServiceBrowser(self.rz, VIDS, None, ZCListener(names, logger=self.logger))
# Give them a second (... | [
"def test_numvfs_preconfigured(self):\n\n os.makedirs(sriov_config._SYS_CLASS_NET + \"/p2p1/device\")\n f = open(sriov_config._SYS_CLASS_NET + \"/p2p1/device/sriov_numvfs\",\n \"w+\")\n f.write(\"10\")\n f.close()\n self.assertEqual(None, sriov_config.main(['ARG0',... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Exchange beacons, and extract the machine name. | def get_name(self, address):
our_beacon = self.format_beacon('connected', False)
machine_name = re.compile('machine=(.*)\n').search
try:
tsock = socket.socket()
tsock.connect((address, 2190))
self.send_packet(tsock, our_beacon)
tivo_beacon = self.... | [
"async def get_beacons(self):\n try:\n return await self._get_gist_data(comm_type='beacon')\n except Exception:\n self.log.debug('Receiving beacons over c2 (%s) failed!' % self.__class__.__name__)\n return []",
"def shiftNameToApplication():",
"async def handle_bea... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the current power in watts on a desired channel | def get_power(self, channel):
power = self.device.query(f':POW{channel}:VAL?')
return float(power) | [
"def current_power_w(self):\n return self.smartplug.power",
"def current_power_mwh(self):\n return self.consumption",
"def get_power(self):\r\n return self._api.get_power()",
"def cooling_power(self):\n temp_status = self._driver.query_temp_status(self._handle)\n return temp... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Use configparser to load ini_file into self.config | def __init__(self, ini_file):
self.config = configparser.ConfigParser()
self.config.read(ini_file)
#print(self.config) | [
"def __init__(self, ini_file):\n self.config = configparser.ConfigParser()\n self.config.read(ini_file)",
"def load_config(self):\n\n parser = SafeConfigParser()\n parser.read(self.ini, encoding=\"utf-8\")\n\n # Lecture de parser et copie dans un dictionnaire\n # chercher... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the number of sections in the ini file. | def number_of_sections(self):
#print (len(self.config.sections()))
return len(self.config.sections()) | [
"def number_of_sections(self):\n sections = self.config.sections()\n return len(sections)",
"def number_of_sections(self):\n return len(self.config.sections())",
"def getN_Sections(self):\n return len(self.sections_dict)",
"def sections(self) -> int:\n return len(self.string... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return a list of environments (= "envlist" attribute of [tox] section) | def environments(self):
envs = self.config["tox"]["envlist"]
#result = re.split("[^a-zA-Z0-9]", envs)
result = re.split(r'\n| ,|,', envs)
#print ([string for string in result if string != ""])
result = (([string.strip() for string in result if string != ""]))
print(list(d... | [
"def environments(self):\n env_txt = self.config[\"tox\"][\"envlist\"]\n env_lst_raw = env_txt.strip().replace(\"\\n\",\",\").split(\",\")\n env_lst = [x.strip() for x in env_lst_raw if x != \"\"]\n return env_lst",
"def environments(self):\n envs = self.config[\"tox\"][\"envlis... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return a list of all basepython across the ini file | def base_python_versions(self):
result = []
#print("HELLO?")
#print (len(self.config.sections()))
for section in self.config.sections():
#print(self.config.options(section))
for part in self.config[section]:
if part == "basepython":
... | [
"def base_python_versions(self):\n basepython = {\n self.config[section].get(\"basepython\")\n for section in self.config\n if self.config[section].get(\"basepython\")\n }\n return list(basepython)",
"def base_python_versions(self):\n sections = self.co... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Update and return an object. This is a thin wrapper around the findAndModify_ command. The positional arguments are designed to match the first three arguments | def find_and_modify(self, query={}, update=None, upsert=False, sort=None,
**kwargs):
if (not update and not kwargs.get('remove', None)):
raise ValueError("Must either update or remove")
if (update and kwargs.get('remove', None)):
raise ValueError("Can't do both update and... | [
"def do_object_update(ident, model, attrs, data=None):\n \n obj = get_object(ident, model, attrs=attrs)\n if not obj:\n print_notfound(ident, model, attrs)\n return 1\n \n return update_object_bydata(obj, data)",
"def update_object(self, name: str) -> None:",
"def update(self, reque... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get a list of distinct values for `key` among all documents in this collection. | def distinct(self, key):
return self.database.command({'distinct': self.name,
'key': key})['values'] | [
"def find_distinct(self, collection, key):\n obj = getattr(self.db, collection)\n result = obj.distinct(key)\n return result",
"def getall(self, key):\n return self.values.get(key, [])",
"def list_values(key):\n return meta.list_values(key=key)",
"def get_keys(self, ckey=None):\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Insert a document or documents into this collection. | def insert(self, doc_or_docs):
return self.database.connection.request.insert_documents(
self.database.name, self.name, doc_or_docs) | [
"def insert_documents(self, database, collection, doc_or_docs):\n validators.check_documents_to_insert(doc_or_docs)\n r = self.__get_response(settings.INS_DOCS,\n {\"db\": database, \"col\": collection}, data=doc_or_docs)\n if r[\"status\"] == 200:\n return r[\"result\"]\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Update a document or documents into this collection. | def update(self, spec, document, upsert=False, multi=False):
return self.database.connection.request.update_documents(
self.database.name, self.name, spec, document, upsert, multi) | [
"def update(self, *args, **kargs):\r\n collection = kargs.pop('collection', self.collection)\r\n if not collection:\r\n collection = self.collection\r\n return self.db[collection].update(*args, **kargs)",
"def update(self, collection, query, document):\n Database.replaceObje... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Remove a document or documents into this collection. | def remove(self, spec_or_id=None):
if isinstance(spec_or_id, ObjectId) or \
isinstance(spec_or_id, basestring):
return self.database.connection.request.delete_document(
self.database.name, self.name, spec_or_id)
if not spec_or_id:
spec_or_id = {}
... | [
"def remove_document(self, spec_or_id):\n return self._coll.remove(spec_or_id=spec_or_id)",
"async def remove_doc(self, *args, **kwargs):\n pass",
"def remove(self, query: dict, limit: Optional[int] = 0) -> None:\n\n matches = self.find(query, limit)\n for match in matches:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a "real" Project object. The returned object is "writable" too that is its state can be changed etc. | def real_obj(self):
return RemoteProject(xml_data=etree.tostring(self)) | [
"def project_obj(self, *args, **kwargs):\n if self.project_path is None:\n return None\n return Project(self.project_path, *args, **kwargs)",
"def ex_get_project(self):\n response = self.connection.request('', method='GET').object\n return self._to_project(response)",
"def... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a Collection with objects which match the xpath. path is the remote path which is used for the http request. xp is the xpath which is used for the search (either an Expression object or a string). | def _find(path, xp, tag_class={}, **kwargs):
request = Osc.get_osc().get_reqobj()
xpath = xp
if hasattr(xp, 'tostring'):
xpath = xp.tostring()
f = request.get(path, match=xpath, **kwargs)
return fromstring(f.read(), **tag_class) | [
"def get_nodes_by_path(self, path, actions=\"ACDR\"):\n\n nodes = []\n for a in actions:\n upath = (a, path)\n if self.__nodes.has_key(upath):\n nodes.append(self.__nodes[upath])\n return nodes",
"def findall(self, path):\n if self.parser == XML_nod... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a RequestCollection with objects which match the xpath. xp is the xpath which is used for the search (either an Expression object or a string). | def find_request(xp, **kwargs):
path = '/search/request'
if 'schema' not in kwargs:
kwargs['schema'] = RequestCollection.SCHEMA
tag_class = {'collection': RequestCollection, 'request': RORequest}
return _find(path, xp, tag_class, **kwargs) | [
"def get_all_values(dom, xpath_request, namespaces={}):\n if namespaces != {}:\n xpath_list_result = dom.xpath(xpath_request, namespaces=namespaces)\n else:\n xpath_list_result = dom.xpath(xpath_request)\n\n return xpath_list_result",
"def findall(self, xpath):\n return self.xml.find... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a ProjectCollection with objects which match the xpath. xp is the xpath which is used for the search (either an Expression object or a string). | def find_project(xp, **kwargs):
path = '/search/project'
if 'schema' not in kwargs:
kwargs['schema'] = ProjectCollection.SCHEMA
tag_class = {'collection': ProjectCollection, 'project': ROProject}
return _find(path, xp, tag_class, **kwargs) | [
"def xpath(self, xpath):\n if self.parser == XML_node.etree:\n # etree doesn't currently have native support for xpath.\n # Make my own, using regex for xpath expressions like \"reaction[@label='2']\":\n import re\n import itertools\n regex = re.compile(... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a PackageCollection with objects which match the xpath. xp is the xpath which is used for the search (either an Expression object or a string). | def find_package(xp, **kwargs):
path = '/search/package'
if 'schema' not in kwargs:
kwargs['schema'] = PackageCollection.SCHEMA
tag_class = {'collection': PackageCollection, 'package': ROPackage}
return _find(path, xp, tag_class, **kwargs) | [
"def get_packages(xml, ns):\r\n execs = []\r\n for x in xml.iter():\r\n if x.xpath('./SSIS:PackageMetaData', namespaces = ns):\r\n for x_exec in x:\r\n execs.append(x_exec)\r\n\r\n return execs",
"def findall(self, xpath):\n return self.xml.findall(xpath)",
"def ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
test al_contacts.view.View.notify() with nonlist data. | def testNotifyWithNonListData(self):
self.assertRaises(ViewException, self.view.notify, self.mockViews, 'Invalid Data') | [
"def testNotifyWithValidListDate(self):\r\n self.assertEqual(self.view.notify(self.mockViews, []), None)",
"async def notify_view(self):\n await self.game_view.notify()",
"def notifyPut(self, data):\n if self.notify_hidden:\n self.notify_gui.dataQ.put('*SHOW*') # if gui is hi... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
test al_contacts.view.View.notify() with valid list data. | def testNotifyWithValidListDate(self):
self.assertEqual(self.view.notify(self.mockViews, []), None) | [
"def testNotifyWithNonListData(self):\r\n self.assertRaises(ViewException, self.view.notify, self.mockViews, 'Invalid Data')",
"def test_admin_contact_view_list(self):\n response = self.client.get(\"/admin/dialer_contact/contact/\")\n self.failUnlessEqual(response.status_code, 200)",
"def t... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
test String Representation On Instantiation | def testStringRepresentationOnInstantiation(self):
self.assertEqual(str(self.tv), 'table') | [
"def test_string_representation(self):\n entry = Meal(name=\"Foo Meal\")\n self.assertEqual(str(entry), 'Foo Meal')",
"def test_str_method(self):\n s1 = Square(5)\n self.assertEqual(str(s1), \"[Square] (1) 0/0 - 5\")",
"def test_string_representation(self):\n entry = DeliveryM... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns information about the dependencies required by this repository. The return value should be an OrderedDict if the repository supports multiple configurations (aka is configurable) or a single Configuration if not. | def GetDependencies():
# To support a single (unnamed) configuration...
return Configuration(
"Standard Build Environment",
[
Dependency(
"5C7E1B3369B74BC098141FAD290288DA", # Id for Common_Environment; found in <Common_Environment>/_... | [
"def dependencies(self):\n return self.config.get('dependencies')",
"def get_dependency_configurations(self):\n deps = []\n\n for variant in self.resolve_variants():\n # Note: the variants have already been resolved\n # This for loop simply needs to resolve the dependencies one\n # by ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns an action or list of actions that should be invoked as part of the setup process. Actions are generic command line statements defined in /Libraries/Python/CommonEnvironment/v1.0/CommonEnvironment/Shell/Commands/__init__.py that are converted into statements appropriate for the current scripting language (in mos... | def GetCustomActions(debug, verbose, explicit_configurations):
return [] | [
"def list_actions_cmd():\n command = [cmd()]\n if not min_version('2.1'):\n command.append('list-actions')\n elif not min_version('2.0'):\n command.extend(['action', 'defined'])\n return command",
"def run_action_cmd():\n command = [cmd()]\n if not min_version('2.1'):\n comm... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a list of each transaction profiled along with the time spent. | def showTransactions(self):
self.scanTransactions()
txns = []
# Summarize the stats
for x in range(len(self._trans)):
stats = self._trans[x]
trans_time = 0
remote_calls = 0
for name, stat in stats:
trans_time += stat.total_... | [
"def list_profile_times(endTime=None, maxResults=None, nextToken=None, orderBy=None, period=None, profilingGroupName=None, startTime=None):\n pass",
"def timeprofile():\n\n return TimeProfiler()",
"def get_users_with_transactions(transcript):\n df = transcript.copy(deep=True)\n idx = pd.IndexSlice\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
counts the number of infinities within a 1D matrix or list | def countInfinites(mat):
isFinite = np.all(np.isfinite(mat))
if not isFinite:
count = 0
indices = []
for i in range(0,len(mat)):
if mat[i] in [-np.inf,np.inf]:
count+=1
indices.append(i) | [
"def numSpecial(self, mat: list[list[int]]) -> int:\n ans = 0\n col_cache = {}\n for row in mat:\n # print(row)\n ones = []\n for i, n in enumerate(row):\n if n == 1:\n ones.append(i)\n # print(ones)\n if l... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
removes infinities from a matrix returns a matrix with the infinities replaced with the average of the matrix values | def removeInfinities(mat):
isFinite = np.all(np.isfinite(mat))
if not isFinite:
nrow, ncol = mat.shape
matCopy = mat.copy()
matReshaped = matCopy.reshape(-1)
minVal = np.nanmin(matReshaped[matReshaped != -np.inf])
maxVal = max(matReshaped)
#count inf... | [
"def inf_to_mean(X):\n X = np.nan_to_num(X, nan = np.nan, posinf = np.nan)\n col_mean = np.nanmean(X, axis = 0)\n inds = np.where(np.isnan(X)) \n X[inds] = np.take(col_mean, inds[1]) \n return X",
"def infmean(arr, axis):\n masked = ma.masked_invalid(arr)\n masked = masked... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Converts the matrix to the equivalent matrix of the unsigned 8 bit integer datatype Returns the equivalent uint8 matrix | def make8UC(mat):
mat_256 = mat[:,:]# *255
mat_256.round()
mat_8UC = np.uint8(mat_256)
return mat_8UC | [
"def make8UC3(mat):\n mat_8UC = make8UC(mat)\n mat_8UC3 = np.stack((mat_8UC,)*3, axis = -1)\n \n return mat_8UC3",
"def to_uint8(image):\n\tnp.clip(image, 0, 255, out=image)\n\treturn image.astype(np.uint8)",
"def _to_uint8(chan: np.ndarray):\n # Normalize sample\n chan = 0.25 * (chan ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Converts the matrix to the equivalent matrix of the unsigned 8 bit integer datatype with 3 channels Returns the equivalent uint8 matrix | def make8UC3(mat):
mat_8UC = make8UC(mat)
mat_8UC3 = np.stack((mat_8UC,)*3, axis = -1)
return mat_8UC3 | [
"def make8UC(mat):\n mat_256 = mat[:,:]# *255\n mat_256.round()\n mat_8UC = np.uint8(mat_256)\n \n return mat_8UC",
"def _to_uint8(chan: np.ndarray):\n # Normalize sample\n chan = 0.25 * (chan - chan.mean()) / (chan.std() + K.epsilon())\n # Clip to [0, 1]\n chan = np.cli... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Do watershed segmentation on a non noisy binary image Returns the image with the nuclei segmented | def watershed(mask, img, plotImage = False, kernelSize = None):
imgCopy = img.copy()
maskCopy = np.array(mask.copy(), dtype=np.uint8)
if kernelSize is None:
kernelSize = 2
# Finding sure foreground area
#dist_transform = cv2.distanceTransform(mask, cv2.DIST_L2, 5)
#ret, sure_fg = c... | [
"def manuel_segmentation():\n image_segmented = sk.imread(\"Result_Pictures/Seeded_Region_Growing/NIH3T3/dna-42_merged_0.056_200.tif\")\n second_background = np.where(image_segmented == 19)\n image_segmented[second_background] = 1\n return image_segmented",
"def segment_nuclei3D_5(instack, sigma1=3, s... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Parse XML File and returns an object containing all the vertices | def parseXML(xmlFile, pattern):
tree = ET.parse(xmlFile) # Convert XML file into tree representation
root = tree.getroot()
regions = root.iter('Region') # Extract all Regions
vertices = {pattern: []} # Store all vertices in a dictionary
for region in regions:
label = region.get('Text... | [
"def loadVesselucida_xml(self):\n\n\t\txmlFilePath, ext = os.path.splitext(self.tifPath)\n\t\txmlFilePath += '.xml'\n\t\tif not os.path.isfile(xmlFilePath):\n\t\t\t#print('bSlabList.loadVesselucida_xml() warning, did not find', xmlFilePath)\n\t\t\treturn False\n\n\t\tprint('loadVesselucida_xml() file', xmlFilePath)... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calculates the ratio between the highest resolution image and lowest resolution image. Returns the ratio as a tuple (Xratio, Yratio). | def calculateRatio(levelDims):
highestReso = np.asarray(levelDims[0])
lowestReso = np.asarray(levelDims[-1])
Xratio, Yratio = highestReso/lowestReso
return (Xratio, Yratio) | [
"def determine_real_to_pixel_ratio(\n image_shape: Tuple[int, int],\n min_x: float,\n min_y: float,\n max_x: float,\n max_y: float,\n):\n image_x = image_shape[1]\n image_y = image_shape[0]\n diff_x = max_x - min_x\n diff_y = max_y - min_y\n\n resolution_x = image_x / diff_x\n resol... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Parses XML File to get mask vertices and returns matrix masks where 1 indicates the pixel is inside the mask, and 0 indicates outside the mask. | def getMask(xmlFile, svsFile, pattern):
vertices = parseXML(xmlFile, pattern) # Parse XML to get vertices of mask
if not len(vertices[pattern]):
slide = 0
mask = 0
return slide, mask
slide = open_slide(svsFile)
levelDims = slide.level_dimensions
mask = createMask(levelD... | [
"def mask(self):\n mask = np.zeros((self.height, self.width))\n pts = [\n np.array(anno).reshape(-1, 2).round().astype(int)\n for anno in self.segmentation\n ]\n mask = cv2.fillPoly(mask, pts, 1)\n return mask",
"def get_mask(self):\n mask = np.zeros... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns [x,y] numpy array of random pixel. {numpy matrix} mask from which to choose random pixel. | def chooseRandPixel(mask):
array = np.transpose(np.nonzero(mask)) # Get the indices of nonzero elements of mask.
index = random.randint(0,len(array)-1) # Select a random index
return array[index] | [
"def _random_pixel(reference_image):\n\n x = random.randint(0, reference_image.width - 1)\n y = random.randint(0, reference_image.height - 1)\n\n return (x, y)",
"def random_image(x, y, out):\n\n pixels = []\n for pixel in range(0, x*y):\n pixels.append(random_pixel())\n\n new_image(x, y,... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Generates and saves 'numPatches' patches with dimension 'dims' from image 'slide' contained within 'mask'. | def getPatches(slide, mask, numPatches=0, dims=(0,0), dirPath='', slideNum='', plot=False, plotMask=False):
# extractPatchByXMLLabeling
w,h = dims
levelDims = slide.level_dimensions
Xratio, Yratio = calculateRatio(levelDims)
i = 0
while i < numPatches:
firstLoop = True # Boolean to en... | [
"def extractPatches(output,filename,maskname, classes, level, patchSize,j, background):\n # Opening the files\n im = op.OpenSlide(filename)\n imload = im.read_region((0,0), level, im.level_dimensions[level])\n print(\"Image dimension : \", im.level_dimensions[level])\n mask = Image.open(maskname)\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return a unique name. | def unique_name():
return "unique-{0}".format(uuid.uuid4()) | [
"def _get_unique_name(self):\n s = self.model.replace(' ', '') + '-' + self.experiment.replace(' ', '')\n s = s.replace('#', '-')\n if hasattr(self, 'ens_member'):\n s += '-' + str(self.ens_member)\n return s",
"def generate_unique_job_name(self, name='no_name_job'):\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Mock the nuke library. | def nuke_mocker(request):
m = mock.patch.dict("sys.modules", {"nuke": mock.Mock()})
m.start()
request.addfinalizer(m.stop) | [
"def _mockme(self):\n pass",
"def mock_remote_unit(monkeypatch):\n monkeypatch.setattr(\"libgitlab.hookenv.remote_unit\", lambda: \"unit-mock/0\")",
"def test_recharge_packages_get(self):\n pass",
"def test_unit_get(self):\n pass",
"def setUp(self):\r\n self.mock_model = Mock(... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Mock the PySide2 library. | def pyside_mocker(request):
m = mock.patch.dict("sys.modules", {"PySide2": mock.Mock()})
m.start()
request.addfinalizer(m.stop) | [
"def is_pyside2():\n\n return __binding__ == 'PySide2'",
"def load_PySide_QtCore(finder, module):\r\n finder.IncludeModule(\"atexit\")",
"def load_PySide_QtCore(finder, module):\n finder.IncludeModule(\"atexit\")",
"def is_pyside():\n\n return __binding__ == 'PySide'",
"def setup_apiv2():\n #... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Lifts a function into the elevated world | def lift(cls, func):
raise NotImplementedError | [
"def elevateUAC( func ):\n # Define Imports Required for Elevation\n import ctypes, sys\n import time\n # Define Inner Test to Evaluate\n def is_admin():\n try:\n return ctypes.windll.shell32.IsUserAnAdmin()\n except:\n return False\n # Define Inner Wrapper Func... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get year with links | def get_year_with_links():
response = get_response(MAIN_PAGE)
if response.ok:
soup = BeautifulSoup(response.text, 'html.parser')
years_li = soup.find_all(
'md-card-footer'
)
years_dict = {}
# Not including the last <a> tag because that is not relevant.
... | [
"def get_year_with_link():\n response = get_response(MAIN_PAGE)\n if response.ok:\n soup = BeautifulSoup(response.text, 'html.parser')\n years_li = soup.find_all(\n 'li', 'mdl-list__item mdl-list__item--one-line')\n years_dict = {}\n for years_html in years_li:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calculate x/y projection of RA1/Dec1 in system with center at RAcen, Deccen. Input radians. | def gnomonic_project_toxy(RA1, Dec1, RAcen, Deccen):
# also used in Global Telescope Network website
cosc = np.sin(Deccen) * np.sin(Dec1) + np.cos(Deccen) * np.cos(Dec1) * np.cos(RA1-RAcen)
x = np.cos(Dec1) * np.sin(RA1-RAcen) / cosc
y = (np.cos(Deccen)*np.sin(Dec1) - np.sin(Deccen)*np.cos(Dec1)*np.cos(... | [
"def project_rad(self, ra, dec, projection=None):\n if projection not in CelestialCoord._valid_projections:\n raise ValueError('Unknown projection: %s'%projection)\n\n self._set_aux()\n\n cosra = np.cos(ra)\n sinra = np.sin(ra)\n cosdec = np.cos(dec)\n sindec = n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Project the stars to x,y plane for a given visit. | def starsProject(stars, visit):
names=['x','y','radius']
types=[float,float,float]
xtemp,ytemp = gnomonic_project_toxy(np.radians(stars['ra']),np.radians(stars['decl']),
visit['ra'], visit['dec'])
# Rotate the field using the visit rotSkyPos. Hope I got th... | [
"def proj(self, x, c):",
"def move_and_draw_stars(self):\r\n origin_x = self.screen.get_width() / 2\r\n origin_y = self.screen.get_height() / 2\r\n \r\n for star in self.stars:\r\n # The Z component is decreased on each frame.\r\n star[2] -= 0.19\r\n\r\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Assign PatchIDs to everything. Assume that stars have already been projected to x,y | def assignPatches(stars, visit, nPatches=16, radiusFoV=1.8):
maxx, maxy = gnomonic_project_toxy(0., np.radians(radiusFoV), 0., 0.)
nsides = nPatches**0.5
# This should move all coords to 0 < x < nsides-1
px = np.floor((stars['x'] + maxy)/(2.*maxy)*nsides)
py = np.floor((stars['y'] + maxy)/(2.*max... | [
"def create_patches(self):\n # reclassify q_disc ras to ints\n self.logger.info(\"Creating rating curve fit patches...\")\n int_ras = Reclassify(self.q_disc_ras, 'Value', RemapValue(list(zip(self.discharges, range(len(self.discharges))))))\n # convert to polygon\n patch_poly = os.... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Find the number of jumps before reaching the exit (rule 1) | def puzzle1(offsets):
return find_jumps_to_exit(offsets, lambda o: o + 1) | [
"def get_jumps_nops(instruction_list):\n done_instructions = []\n jumps_and_nops = []\n inst_num = 0\n while True:\n if inst_num in done_instructions:\n break\n if instruction_list[inst_num][0] == \"nop\":\n jumps_and_nops.append(inst_num)\n pass\n e... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get GFS sounding data for the given timeslot. | def get_gfs_sounding(timeslot, lat, lon, interp=None, step=200.):
fname = fetch_gfs_data(timeslot, lat, lon)
return read_gfs_data(fname, lat, lon, interp=interp, step=200.) | [
"def get_sfx_data(player):\n return api.ChainballCentralAPI.central_api_get(\n sub_api=\"api\", path=f\"players/{player}/get_sfx_data\"\n )",
"def get_snap(self, chan, freq): #MHz\n\n#\tif freq != None:\n\n#\t\tself.syn.output_on()\n#\t\tself.syn.set_freq(freq*1e6)\n#\t\ttime.sleep(1)\n\n\treg = self... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
>>> m = PSYCOP() >>> m.encode_proposition("Aac") (A(x_0) > C(x_0)) >>> m.encode_proposition("Iac") (A(a_1) AND C(a_1)) | def encode_proposition(self, p, hat=False):
i = self.get_fresh_id()
if p[0] == "A":
# A(x) -> B(x)
return self.Prop(self.PT.implies,
self.get_atomic_proposition(p[1].upper(), i, False, hat),
self.get_atomic_propo... | [
"def prove_and_commutativity() -> Proof:\n all_lines = []\n all_lines.append(Proof.Line(Formula.parse('(p&q)')))\n all_lines.append(Proof.Line(Formula.parse('q'), AE1_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('p'), AE2_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('(q&p)'), A_... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Encode premises as propositions, possibly adding implicatures | def encode_premises(self, syllogism, ex_implicatures=True, grice_implicatures=False):
to = sylutil.term_order(syllogism[2])
premises = []
pr = []
for i in [0, 1]:
pr.append(syllogism[i] + to[i])
pr = sylutil.add_implicatures(pr, existential=ex_implicatures, gric... | [
"def premises_encoding(self):\n return self.premises",
"def encode(self, peptides):\n raise NotImplementedError",
"def to_prenex_normal_form(formula: Formula) -> Tuple[Formula, Proof]:\r\n # Task 11.10\r\n\r\n prover = Prover(Prover.AXIOMS.union(ADDITIONAL_QUANTIFICATION_AXIOMS))\r\n\r\n\r\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
same_nameness = True "notational variant", see p. 197 >>> m = PSYCOP() >>> a0 = m.Prop(m.PT.atomic, m.Atom("A", 0, False, False), None) >>> a1 = m.Prop(m.PT.atomic, m.Atom("A", 1, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", 2, False, False), None) >>> p1 = m.Prop(m.PT.implies, a0, b) >>> p2 = m.Prop(m.... | def isomorphic(self, p1, p2, same_nameness=False):
if p1 is None and p2 is None:
return True
if p1 is None or p2 is None:
return False
if type(p1) is self.Atom and type(p2) is self.Atom:
if p1.predicate == p2.predicate:
if same_namen... | [
"def test_is_isomorphic(self):\n mol1 = converter.s_bonds_mol_from_xyz(self.xyz1['dict'])\n mol2 = converter.s_bonds_mol_from_xyz(self.xyz1['dict_diff_order'])\n self.assertTrue(mol1.is_isomorphic(mol2, save_order=True, strict=False))\n\n mol1 = converter.s_bonds_mol_from_xyz(self.xyz11[... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PSYCOP transitivity rule >>> m = PSYCOP() >>> i = m.get_fresh_id() >>> a = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> c = m.Prop(m.PT.atomic, m.Atom("C", i, False, False), None) >>> p1 = m.Prop(m.PT.implies, a, b) >>> p2 = m.Prop(m.PT.impl... | def rule_transitivity(self, p1, p2, domain):
if p1.type == self.PT.implies and p2.type == self.PT.implies:
if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic and \
p2.v1.type == self.PT.atomic and p2.v2.type == self.PT.atomic:
if p1.v1.v1.arg_id ... | [
"def rule_exclusivity(self, p1, p2, domain):\r\n\r\n if p1.type == self.PT.implies and p2.type == self.PT.negation:\r\n if p2.v1.type == self.PT.conjunction:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic:\r\n if p2.v1.v1.type == self.PT.atom... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PSYCOP exclusivity rule >>> m = PSYCOP() >>> i = m.get_fresh_id() >>> j = m.get_fresh_id() >>> ai = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> bi = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> bj = m.Prop(m.PT.atomic, m.Atom("B", j, False, False), None) >>> cj = m.Prop(m.PT.atomic, m.Ato... | def rule_exclusivity(self, p1, p2, domain):
if p1.type == self.PT.implies and p2.type == self.PT.negation:
if p2.v1.type == self.PT.conjunction:
if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic:
if p2.v1.v1.type == self.PT.atomic and p2.v1.v2... | [
"def rule_transitivity(self, p1, p2, domain):\r\n if p1.type == self.PT.implies and p2.type == self.PT.implies:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic and \\\r\n p2.v1.type == self.PT.atomic and p2.v2.type == self.PT.atomic:\r\n if p1... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PSYCOP conversion rule >>> m = PSYCOP() >>> i = m.get_fresh_id() >>> a = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> p = m.Prop(m.PT.negation, m.Prop(m.PT.conjunction, a, b), None) >>> m.rule_conversion(p, set()) [NOT ((B(x_1) AND A(x_1)))] | def rule_conversion(self, p, domain):
if p.type == self.PT.negation:
if p.v1.type == self.PT.conjunction:
if p.v1.v1.type == self.PT.atomic and p.v1.v2.type == self.PT.atomic:
i = self.get_fresh_id()
p_new = self.Prop(self.PT.negation,
... | [
"def rule_transitivity(self, p1, p2, domain):\r\n if p1.type == self.PT.implies and p2.type == self.PT.implies:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic and \\\r\n p2.v1.type == self.PT.atomic and p2.v2.type == self.PT.atomic:\r\n if p1... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns leftmost atom in p. | def get_leftmost_atom(self, p):
if p.type == self.PT.atomic:
return p.v1
else:
return self.get_leftmost_atom(p.v1) | [
"def left(self, p):\n node = self._validate(p)\n return self._make_position(node.left)",
"def left(self, p):\n node = self._validate(p)\n return self._make_position(node._left)",
"def getLeftmost(self, root):\n current = root\n while current.left is not None:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
a = m.Prop(m.PT.atomic, v1='a', v2=None) b = m.Prop(m.PT.atomic, v1='b', v2=None) >>> m = PSYCOP() >>> i = m.get_fresh_id() >>> a = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> prop = m.Prop(m.PT.negation, m.Prop(m.PT.conjunction, a, b), Non... | def rule_backward_conjunctive_syllogism(self, p, g):
if g.type == self.PT.negation and p.type == self.PT.negation:
# g = NOT(A(x))
if p.v1.type == self.PT.conjunction:
# p = NOT(A(x) AND B(x))
if self.matching(p.v1.v1, g.v1):
re... | [
"def rule_exclusivity(self, p1, p2, domain):\r\n\r\n if p1.type == self.PT.implies and p2.type == self.PT.negation:\r\n if p2.v1.type == self.PT.conjunction:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic:\r\n if p2.v1.v1.type == self.PT.atom... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
>>> m = PSYCOP() >>> i = m.get_fresh_id() >>> a = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> m.rule_backward_if_elimination(m.Prop(m.PT.implies, a, b), b) [A(x_0)] | def rule_backward_if_elimination(self, p, g):
if p.type == self.PT.implies:
# p = IF A(x) THEN B(x)
if self.matching(p.v2, g):
return [self.atom_prop_replace_properties(p.v1, new_arg_id=g.v1.arg_id,
new_is_n... | [
"def rule_backward_conjunctive_syllogism(self, p, g):\r\n\r\n if g.type == self.PT.negation and p.type == self.PT.negation:\r\n # g = NOT(A(x))\r\n if p.v1.type == self.PT.conjunction:\r\n # p = NOT(A(x) AND B(x))\r\n if self.matching(p.v1.v1, g.v1):\r\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Removes isomorphic propositions where both involve variables | def remove_duplicates(self, propositions):
propositions_copy = list(propositions)
uniques = []
while True:
duplicates = []
if len(propositions_copy) == 0:
return uniques
p1 = propositions_copy[0]
for p2 in propositions_copy:... | [
"def _variable_elimination(self, variables, operation, evidence=None, elimination_order=None):\n if isinstance(variables, string_types):\n raise TypeError(\"variables must be a list of strings\")\n if isinstance(evidence, string_types):\n raise TypeError(\"evidence must be a list... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
>>> m = PSYCOP() >>> i = m.get_fresh_id() >>> a = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> c = m.Prop(m.PT.atomic, m.Atom("C", i, False, False), None) >>> p1 = m.Prop(m.PT.implies, a, b) >>> p2 = m.Prop(m.PT.implies, b, c) >>> p3 = m.Pro... | def extract_ac_conclusions(self, propositions):
prop_ac = []
for p in propositions:
s = self.proposition_to_string(p)
if s is not None:
if {s[1], s[2]} == {"a", "c"}:
prop_ac.append(s)
return prop_ac | [
"def get_available_cops():\n allIncidents = Incident.get_all()\n cops = []\n \n for i in allIncidents:\n if(inicioAmostragem <= i.reporting_date and i.reporting_date <=terminoAmostragem):\n# cops.append(i['operations_center']['id'])\n#conf\n cops.append(i['operations_center'... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Removes megahit intermediates in place. Renames contigs to contig.fa | def megahit(path_in):
shutil.rmtree("{0}/{1}".format(path_in, "intermediate_contigs/")) #
os.rename("{0}/final.contigs.fa".format(path_in), "{0}/contig.fa".format(path_in))
os.listdir(path_in)
return | [
"def cleanup_precluster_intermediate_files(batch_index):\n files = [\"seed{0}.S.fasta\".format(batch_index),\n \"seed{0}.orphans.fasta\".format(batch_index),\n \"batch{0}.fasta\".format(batch_index),\n \"batch{0}.remains.fasta\".format(batch_index),\n \"batch{0}.re... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
1. Pop last element in the heap 2. If it isn't the only element, then the root is the max. Insert the last element to the root and sift up the max element 3. If it is the last element so just return it | def heap_pop_max(heap):
last = heap.pop()
if heap:
return_item = heap[0]
heap[0] = last
heapq._siftup_max(heap, 0)
else:
return_item = last
return return_item | [
"def _heappop_max(heap):\n lastelt = heap.pop() # raises appropriate IndexError if heap is empty\n if heap:\n returnitem = heap[0]\n heap[0] = lastelt\n _siftup_max(heap, 0)\n return returnitem\n return lastelt",
"def delete_top_from_max_heap(x):\n last = x[-1]\n x = ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Test The probas_to_classes class method | def test_RecurrentNeuralNetwork_probas_to_classes():
arr1 = np.asarray([0.1, 0.2, 0.7], dtype=np.float32)
arr2 = np.asarray([0.1], dtype=np.float32)
assert RecurrentNeuralNetwork.probas_to_classes(arr1) == 2
assert RecurrentNeuralNetwork.probas_to_classes(arr2) == 0 | [
"def class_probabilities(self, sample):\n\t\tpass",
"def classProbs(observation, tree, classes):\n res = classify(observation, tree) #res = results\n total = sum(res.values())\n probs = []\n for c in classes:\n if c in res.keys():\n probs.append(float(res[c])/total)\n else:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Determines whether or not a specific marker with a ship has been sunk when visually representing the board | def is_ship_sunk(self, x, y):
marker = self.markers[x][y]
total_hits = self.ship_hits[marker]
return total_hits == MarkerType.MAX_HITS[marker] | [
"def is_sunk(ship_cells):\n return all(v[2] for v in ship_cells)",
"def hit(self):\n\n self.units.pop()\n return (len(self.units) == 0) # Returns True if the ship has been sunk",
"def is_game_over():\n game_over = False\n number_of_ships_sunk = 0\n for ship in Ship.list_of_ships:\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Perform translation on data according to desc and language | def translate(desc, data, language = None):
# sanity checks
if desc is None or data is None:
return data
# create a translation configuration
prepared = translator.prepare(desc, language)
# perform the translation
if isinstance(data, types.DictType):
# translate single entry
... | [
"def translate(self, language=None):",
"def translate():\n pass",
"def help_translate(self):\n print_say(\"translates from one language to another.\", self)",
"def question_new_translate():",
"def translate(self, message, languages, context=None, name=None):\n raise NotImplementedError"... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get the list of cached code tables | def list():
return cache.codeTableList() | [
"def get_cached_sources(self):\n\t\tsources = []\n\t\tsource_pattern = \"%s_source_\" % self.cache_prefix\n\t\tfor cache_name in sorted(self.cache.keys()):\n\t\t\tif re.search(source_pattern, cache_name):\n\t\t\t\tcache_name = cache_name.replace(source_pattern, \"\")\n\t\t\t\tsources.append(cache_name.split(\"_\"))... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Move file(s) to a given destination. | def move_files(file: str, destination: str):
try:
result = _process_files("mv", "-v", file, destination)
except FileNotFoundError:
print("ERROR: '{}' does not exist.".format(file))
except FolderNotFoundError:
print(
"ERROR: '{}' destination does not exist.".format(destin... | [
"def move_files(src, dst, filenames):\n for filename in filenames:\n os.rename(os.path.join(src, filename), os.path.join(dst, filename))",
"def move_files(*args, **kwargs):\n source_dir = kwargs[\"source_location\"]\n logging.info('Accessing source directory at: {}'.format(source_dir))\n target... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Draws the restart menu state. | def draw(screen):
MY.restart_button.draw(screen)
MY.display_text.draw(screen) | [
"def redraw_start_window():\n # Create title text\n text_title, text_rect = draw_title(\"Sudoku\", 60, LIGHT_BLUE)\n\n # Center title rectangle and add to center of display\n text_rect.center = (win_width // 2, win_height // 5)\n window.blit(text_title, text_rect)\n\n # Draw start menu buttons\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |