id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
15,800
|
mitodl/edx-api-client
|
edx_api/enrollments/__init__.py
|
CourseEnrollments._get_enrollments_list_page
|
def _get_enrollments_list_page(self, params=None):
"""
Submit request to retrieve enrollments list.
Args:
params (dict): Query parameters to use in the request. Valid parameters are:
* course_id: Filters the result to course enrollments for the course
corresponding to the given course ID. The value must be URL encoded.
Optional.
* username: username: List of comma-separated usernames. Filters the result to the
course enrollments of the given users. Optional.
"""
req_url = urljoin(self.base_url, self.enrollment_list_url)
resp = self.requester.get(req_url, params=params)
resp.raise_for_status()
resp_json = resp.json()
results = resp_json['results']
next_url_str = resp_json.get('next')
cursor = None
qstr_cursor = None
if next_url_str:
next_url = urlparse(next_url_str)
qstr = parse_qs(next_url.query)
qstr_cursor = qstr.get('cursor')
if qstr_cursor and isinstance(qstr_cursor, list):
cursor = qstr_cursor[0]
return results, cursor
|
python
|
def _get_enrollments_list_page(self, params=None):
"""
Submit request to retrieve enrollments list.
Args:
params (dict): Query parameters to use in the request. Valid parameters are:
* course_id: Filters the result to course enrollments for the course
corresponding to the given course ID. The value must be URL encoded.
Optional.
* username: username: List of comma-separated usernames. Filters the result to the
course enrollments of the given users. Optional.
"""
req_url = urljoin(self.base_url, self.enrollment_list_url)
resp = self.requester.get(req_url, params=params)
resp.raise_for_status()
resp_json = resp.json()
results = resp_json['results']
next_url_str = resp_json.get('next')
cursor = None
qstr_cursor = None
if next_url_str:
next_url = urlparse(next_url_str)
qstr = parse_qs(next_url.query)
qstr_cursor = qstr.get('cursor')
if qstr_cursor and isinstance(qstr_cursor, list):
cursor = qstr_cursor[0]
return results, cursor
|
[
"def",
"_get_enrollments_list_page",
"(",
"self",
",",
"params",
"=",
"None",
")",
":",
"req_url",
"=",
"urljoin",
"(",
"self",
".",
"base_url",
",",
"self",
".",
"enrollment_list_url",
")",
"resp",
"=",
"self",
".",
"requester",
".",
"get",
"(",
"req_url",
",",
"params",
"=",
"params",
")",
"resp",
".",
"raise_for_status",
"(",
")",
"resp_json",
"=",
"resp",
".",
"json",
"(",
")",
"results",
"=",
"resp_json",
"[",
"'results'",
"]",
"next_url_str",
"=",
"resp_json",
".",
"get",
"(",
"'next'",
")",
"cursor",
"=",
"None",
"qstr_cursor",
"=",
"None",
"if",
"next_url_str",
":",
"next_url",
"=",
"urlparse",
"(",
"next_url_str",
")",
"qstr",
"=",
"parse_qs",
"(",
"next_url",
".",
"query",
")",
"qstr_cursor",
"=",
"qstr",
".",
"get",
"(",
"'cursor'",
")",
"if",
"qstr_cursor",
"and",
"isinstance",
"(",
"qstr_cursor",
",",
"list",
")",
":",
"cursor",
"=",
"qstr_cursor",
"[",
"0",
"]",
"return",
"results",
",",
"cursor"
] |
Submit request to retrieve enrollments list.
Args:
params (dict): Query parameters to use in the request. Valid parameters are:
* course_id: Filters the result to course enrollments for the course
corresponding to the given course ID. The value must be URL encoded.
Optional.
* username: username: List of comma-separated usernames. Filters the result to the
course enrollments of the given users. Optional.
|
[
"Submit",
"request",
"to",
"retrieve",
"enrollments",
"list",
"."
] |
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
|
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/enrollments/__init__.py#L31-L59
|
15,801
|
mitodl/edx-api-client
|
edx_api/enrollments/__init__.py
|
CourseEnrollments.get_enrollments
|
def get_enrollments(self, course_id=None, usernames=None):
"""
List all course enrollments.
Args:
course_id (str, optional): If used enrollments will be filtered to the specified
course id.
usernames (list, optional): List of usernames to filter enrollments.
Notes:
- This method returns an iterator to avoid going through the entire pagination at once.
- The :class:`Enrollments` instance returned for each generated item will not have any
course details.
Examples:
Get all enrollments for a specific course id
>>> api = EdxApi({'access_token': 'token'}, 'http://base_url')
>>> enrollments = api.enrollments.get_enrollments(course_id='course_id')
>>> for enrollment in enrollments:
do_something(enrollment)
Get all enrollments for a set of usernames
>>> api = EdxApi({'access_token': 'token'}, 'http://base_url')
>>> enrollments = api.enrollments.get_enrollments(usernames=['user1', 'user2'])
>>> for enrollment in enrollments:
do_something(enrollment)
Returns:
Generator with an instance of :class:`Enrollments` for each item.
"""
params = {}
if course_id is not None:
params['course_id'] = course_id
if usernames is not None and isinstance(usernames, list):
params['username'] = ','.join(usernames)
done = False
while not done:
enrollments, next_cursor = self._get_enrollments_list_page(params)
for enrollment in enrollments:
yield Enrollment(enrollment)
if next_cursor:
params['cursor'] = next_cursor
else:
done = True
|
python
|
def get_enrollments(self, course_id=None, usernames=None):
"""
List all course enrollments.
Args:
course_id (str, optional): If used enrollments will be filtered to the specified
course id.
usernames (list, optional): List of usernames to filter enrollments.
Notes:
- This method returns an iterator to avoid going through the entire pagination at once.
- The :class:`Enrollments` instance returned for each generated item will not have any
course details.
Examples:
Get all enrollments for a specific course id
>>> api = EdxApi({'access_token': 'token'}, 'http://base_url')
>>> enrollments = api.enrollments.get_enrollments(course_id='course_id')
>>> for enrollment in enrollments:
do_something(enrollment)
Get all enrollments for a set of usernames
>>> api = EdxApi({'access_token': 'token'}, 'http://base_url')
>>> enrollments = api.enrollments.get_enrollments(usernames=['user1', 'user2'])
>>> for enrollment in enrollments:
do_something(enrollment)
Returns:
Generator with an instance of :class:`Enrollments` for each item.
"""
params = {}
if course_id is not None:
params['course_id'] = course_id
if usernames is not None and isinstance(usernames, list):
params['username'] = ','.join(usernames)
done = False
while not done:
enrollments, next_cursor = self._get_enrollments_list_page(params)
for enrollment in enrollments:
yield Enrollment(enrollment)
if next_cursor:
params['cursor'] = next_cursor
else:
done = True
|
[
"def",
"get_enrollments",
"(",
"self",
",",
"course_id",
"=",
"None",
",",
"usernames",
"=",
"None",
")",
":",
"params",
"=",
"{",
"}",
"if",
"course_id",
"is",
"not",
"None",
":",
"params",
"[",
"'course_id'",
"]",
"=",
"course_id",
"if",
"usernames",
"is",
"not",
"None",
"and",
"isinstance",
"(",
"usernames",
",",
"list",
")",
":",
"params",
"[",
"'username'",
"]",
"=",
"','",
".",
"join",
"(",
"usernames",
")",
"done",
"=",
"False",
"while",
"not",
"done",
":",
"enrollments",
",",
"next_cursor",
"=",
"self",
".",
"_get_enrollments_list_page",
"(",
"params",
")",
"for",
"enrollment",
"in",
"enrollments",
":",
"yield",
"Enrollment",
"(",
"enrollment",
")",
"if",
"next_cursor",
":",
"params",
"[",
"'cursor'",
"]",
"=",
"next_cursor",
"else",
":",
"done",
"=",
"True"
] |
List all course enrollments.
Args:
course_id (str, optional): If used enrollments will be filtered to the specified
course id.
usernames (list, optional): List of usernames to filter enrollments.
Notes:
- This method returns an iterator to avoid going through the entire pagination at once.
- The :class:`Enrollments` instance returned for each generated item will not have any
course details.
Examples:
Get all enrollments for a specific course id
>>> api = EdxApi({'access_token': 'token'}, 'http://base_url')
>>> enrollments = api.enrollments.get_enrollments(course_id='course_id')
>>> for enrollment in enrollments:
do_something(enrollment)
Get all enrollments for a set of usernames
>>> api = EdxApi({'access_token': 'token'}, 'http://base_url')
>>> enrollments = api.enrollments.get_enrollments(usernames=['user1', 'user2'])
>>> for enrollment in enrollments:
do_something(enrollment)
Returns:
Generator with an instance of :class:`Enrollments` for each item.
|
[
"List",
"all",
"course",
"enrollments",
"."
] |
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
|
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/enrollments/__init__.py#L61-L106
|
15,802
|
mitodl/edx-api-client
|
edx_api/enrollments/__init__.py
|
CourseEnrollments.get_student_enrollments
|
def get_student_enrollments(self):
"""
Returns an Enrollments object with the user enrollments
Returns:
Enrollments: object representing the student enrollments
"""
# the request is done in behalf of the current logged in user
resp = self.requester.get(
urljoin(self.base_url, self.enrollment_url))
resp.raise_for_status()
return Enrollments(resp.json())
|
python
|
def get_student_enrollments(self):
"""
Returns an Enrollments object with the user enrollments
Returns:
Enrollments: object representing the student enrollments
"""
# the request is done in behalf of the current logged in user
resp = self.requester.get(
urljoin(self.base_url, self.enrollment_url))
resp.raise_for_status()
return Enrollments(resp.json())
|
[
"def",
"get_student_enrollments",
"(",
"self",
")",
":",
"# the request is done in behalf of the current logged in user",
"resp",
"=",
"self",
".",
"requester",
".",
"get",
"(",
"urljoin",
"(",
"self",
".",
"base_url",
",",
"self",
".",
"enrollment_url",
")",
")",
"resp",
".",
"raise_for_status",
"(",
")",
"return",
"Enrollments",
"(",
"resp",
".",
"json",
"(",
")",
")"
] |
Returns an Enrollments object with the user enrollments
Returns:
Enrollments: object representing the student enrollments
|
[
"Returns",
"an",
"Enrollments",
"object",
"with",
"the",
"user",
"enrollments"
] |
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
|
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/enrollments/__init__.py#L108-L119
|
15,803
|
mitodl/edx-api-client
|
edx_api/enrollments/__init__.py
|
CourseEnrollments.create_audit_student_enrollment
|
def create_audit_student_enrollment(self, course_id):
"""
Creates an audit enrollment for the user in a given course
Args:
course_id (str): an edX course id
Returns:
Enrollment: object representing the student enrollment in the provided course
"""
audit_enrollment = {
"mode": "audit",
"course_details": {"course_id": course_id}
}
# the request is done in behalf of the current logged in user
resp = self.requester.post(
urljoin(self.base_url, self.enrollment_url),
json=audit_enrollment
)
resp.raise_for_status()
return Enrollment(resp.json())
|
python
|
def create_audit_student_enrollment(self, course_id):
"""
Creates an audit enrollment for the user in a given course
Args:
course_id (str): an edX course id
Returns:
Enrollment: object representing the student enrollment in the provided course
"""
audit_enrollment = {
"mode": "audit",
"course_details": {"course_id": course_id}
}
# the request is done in behalf of the current logged in user
resp = self.requester.post(
urljoin(self.base_url, self.enrollment_url),
json=audit_enrollment
)
resp.raise_for_status()
return Enrollment(resp.json())
|
[
"def",
"create_audit_student_enrollment",
"(",
"self",
",",
"course_id",
")",
":",
"audit_enrollment",
"=",
"{",
"\"mode\"",
":",
"\"audit\"",
",",
"\"course_details\"",
":",
"{",
"\"course_id\"",
":",
"course_id",
"}",
"}",
"# the request is done in behalf of the current logged in user",
"resp",
"=",
"self",
".",
"requester",
".",
"post",
"(",
"urljoin",
"(",
"self",
".",
"base_url",
",",
"self",
".",
"enrollment_url",
")",
",",
"json",
"=",
"audit_enrollment",
")",
"resp",
".",
"raise_for_status",
"(",
")",
"return",
"Enrollment",
"(",
"resp",
".",
"json",
"(",
")",
")"
] |
Creates an audit enrollment for the user in a given course
Args:
course_id (str): an edX course id
Returns:
Enrollment: object representing the student enrollment in the provided course
|
[
"Creates",
"an",
"audit",
"enrollment",
"for",
"the",
"user",
"in",
"a",
"given",
"course"
] |
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
|
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/enrollments/__init__.py#L121-L141
|
15,804
|
mitodl/edx-api-client
|
edx_api/certificates/__init__.py
|
UserCertificates.get_student_certificate
|
def get_student_certificate(self, username, course_id):
"""
Returns an Certificate object with the user certificates
Args:
username (str): an edx user's username
course_id (str): an edX course id.
Returns:
Certificate: object representing the student certificate for a course
"""
# the request is done in behalf of the current logged in user
resp = self.requester.get(
urljoin(
self.base_url,
'/api/certificates/v0/certificates/{username}/courses/{course_key}/'.format(
username=username,
course_key=course_id
)
)
)
resp.raise_for_status()
return Certificate(resp.json())
|
python
|
def get_student_certificate(self, username, course_id):
"""
Returns an Certificate object with the user certificates
Args:
username (str): an edx user's username
course_id (str): an edX course id.
Returns:
Certificate: object representing the student certificate for a course
"""
# the request is done in behalf of the current logged in user
resp = self.requester.get(
urljoin(
self.base_url,
'/api/certificates/v0/certificates/{username}/courses/{course_key}/'.format(
username=username,
course_key=course_id
)
)
)
resp.raise_for_status()
return Certificate(resp.json())
|
[
"def",
"get_student_certificate",
"(",
"self",
",",
"username",
",",
"course_id",
")",
":",
"# the request is done in behalf of the current logged in user",
"resp",
"=",
"self",
".",
"requester",
".",
"get",
"(",
"urljoin",
"(",
"self",
".",
"base_url",
",",
"'/api/certificates/v0/certificates/{username}/courses/{course_key}/'",
".",
"format",
"(",
"username",
"=",
"username",
",",
"course_key",
"=",
"course_id",
")",
")",
")",
"resp",
".",
"raise_for_status",
"(",
")",
"return",
"Certificate",
"(",
"resp",
".",
"json",
"(",
")",
")"
] |
Returns an Certificate object with the user certificates
Args:
username (str): an edx user's username
course_id (str): an edX course id.
Returns:
Certificate: object representing the student certificate for a course
|
[
"Returns",
"an",
"Certificate",
"object",
"with",
"the",
"user",
"certificates"
] |
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
|
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/certificates/__init__.py#L25-L49
|
15,805
|
mitodl/edx-api-client
|
edx_api/certificates/__init__.py
|
UserCertificates.get_student_certificates
|
def get_student_certificates(self, username, course_ids=None):
"""
Returns an Certificates object with the user certificates
Args:
username (str): an edx user's username
course_ids (list): a list of edX course ids.
Returns:
Certificates: object representing the student certificates for a course
"""
# if no course ids are provided, let's get the user enrollments
if course_ids is None:
enrollments_client = CourseEnrollments(self.requester, self.base_url)
enrollments = enrollments_client.get_student_enrollments()
course_ids = list(enrollments.get_enrolled_course_ids())
all_certificates = []
for course_id in course_ids:
try:
all_certificates.append(self.get_student_certificate(username, course_id))
except HTTPError as error:
if error.response.status_code >= 500:
raise
return Certificates(all_certificates)
|
python
|
def get_student_certificates(self, username, course_ids=None):
"""
Returns an Certificates object with the user certificates
Args:
username (str): an edx user's username
course_ids (list): a list of edX course ids.
Returns:
Certificates: object representing the student certificates for a course
"""
# if no course ids are provided, let's get the user enrollments
if course_ids is None:
enrollments_client = CourseEnrollments(self.requester, self.base_url)
enrollments = enrollments_client.get_student_enrollments()
course_ids = list(enrollments.get_enrolled_course_ids())
all_certificates = []
for course_id in course_ids:
try:
all_certificates.append(self.get_student_certificate(username, course_id))
except HTTPError as error:
if error.response.status_code >= 500:
raise
return Certificates(all_certificates)
|
[
"def",
"get_student_certificates",
"(",
"self",
",",
"username",
",",
"course_ids",
"=",
"None",
")",
":",
"# if no course ids are provided, let's get the user enrollments",
"if",
"course_ids",
"is",
"None",
":",
"enrollments_client",
"=",
"CourseEnrollments",
"(",
"self",
".",
"requester",
",",
"self",
".",
"base_url",
")",
"enrollments",
"=",
"enrollments_client",
".",
"get_student_enrollments",
"(",
")",
"course_ids",
"=",
"list",
"(",
"enrollments",
".",
"get_enrolled_course_ids",
"(",
")",
")",
"all_certificates",
"=",
"[",
"]",
"for",
"course_id",
"in",
"course_ids",
":",
"try",
":",
"all_certificates",
".",
"append",
"(",
"self",
".",
"get_student_certificate",
"(",
"username",
",",
"course_id",
")",
")",
"except",
"HTTPError",
"as",
"error",
":",
"if",
"error",
".",
"response",
".",
"status_code",
">=",
"500",
":",
"raise",
"return",
"Certificates",
"(",
"all_certificates",
")"
] |
Returns an Certificates object with the user certificates
Args:
username (str): an edx user's username
course_ids (list): a list of edX course ids.
Returns:
Certificates: object representing the student certificates for a course
|
[
"Returns",
"an",
"Certificates",
"object",
"with",
"the",
"user",
"certificates"
] |
083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6
|
https://github.com/mitodl/edx-api-client/blob/083fd23a48b3ef0d39602fc3e7e53ef02f4ad6d6/edx_api/certificates/__init__.py#L51-L76
|
15,806
|
metakirby5/colorz
|
colorz.py
|
get_colors
|
def get_colors(img):
"""
Returns a list of all the image's colors.
"""
w, h = img.size
return [color[:3] for count, color in img.convert('RGB').getcolors(w * h)]
|
python
|
def get_colors(img):
"""
Returns a list of all the image's colors.
"""
w, h = img.size
return [color[:3] for count, color in img.convert('RGB').getcolors(w * h)]
|
[
"def",
"get_colors",
"(",
"img",
")",
":",
"w",
",",
"h",
"=",
"img",
".",
"size",
"return",
"[",
"color",
"[",
":",
"3",
"]",
"for",
"count",
",",
"color",
"in",
"img",
".",
"convert",
"(",
"'RGB'",
")",
".",
"getcolors",
"(",
"w",
"*",
"h",
")",
"]"
] |
Returns a list of all the image's colors.
|
[
"Returns",
"a",
"list",
"of",
"all",
"the",
"image",
"s",
"colors",
"."
] |
11fd47a28d7a4af5b91d29978524335c8fef8cc9
|
https://github.com/metakirby5/colorz/blob/11fd47a28d7a4af5b91d29978524335c8fef8cc9/colorz.py#L56-L61
|
15,807
|
metakirby5/colorz
|
colorz.py
|
clamp
|
def clamp(color, min_v, max_v):
"""
Clamps a color such that the value is between min_v and max_v.
"""
h, s, v = rgb_to_hsv(*map(down_scale, color))
min_v, max_v = map(down_scale, (min_v, max_v))
v = min(max(min_v, v), max_v)
return tuple(map(up_scale, hsv_to_rgb(h, s, v)))
|
python
|
def clamp(color, min_v, max_v):
"""
Clamps a color such that the value is between min_v and max_v.
"""
h, s, v = rgb_to_hsv(*map(down_scale, color))
min_v, max_v = map(down_scale, (min_v, max_v))
v = min(max(min_v, v), max_v)
return tuple(map(up_scale, hsv_to_rgb(h, s, v)))
|
[
"def",
"clamp",
"(",
"color",
",",
"min_v",
",",
"max_v",
")",
":",
"h",
",",
"s",
",",
"v",
"=",
"rgb_to_hsv",
"(",
"*",
"map",
"(",
"down_scale",
",",
"color",
")",
")",
"min_v",
",",
"max_v",
"=",
"map",
"(",
"down_scale",
",",
"(",
"min_v",
",",
"max_v",
")",
")",
"v",
"=",
"min",
"(",
"max",
"(",
"min_v",
",",
"v",
")",
",",
"max_v",
")",
"return",
"tuple",
"(",
"map",
"(",
"up_scale",
",",
"hsv_to_rgb",
"(",
"h",
",",
"s",
",",
"v",
")",
")",
")"
] |
Clamps a color such that the value is between min_v and max_v.
|
[
"Clamps",
"a",
"color",
"such",
"that",
"the",
"value",
"is",
"between",
"min_v",
"and",
"max_v",
"."
] |
11fd47a28d7a4af5b91d29978524335c8fef8cc9
|
https://github.com/metakirby5/colorz/blob/11fd47a28d7a4af5b91d29978524335c8fef8cc9/colorz.py#L64-L71
|
15,808
|
metakirby5/colorz
|
colorz.py
|
order_by_hue
|
def order_by_hue(colors):
"""
Orders colors by hue.
"""
hsvs = [rgb_to_hsv(*map(down_scale, color)) for color in colors]
hsvs.sort(key=lambda t: t[0])
return [tuple(map(up_scale, hsv_to_rgb(*hsv))) for hsv in hsvs]
|
python
|
def order_by_hue(colors):
"""
Orders colors by hue.
"""
hsvs = [rgb_to_hsv(*map(down_scale, color)) for color in colors]
hsvs.sort(key=lambda t: t[0])
return [tuple(map(up_scale, hsv_to_rgb(*hsv))) for hsv in hsvs]
|
[
"def",
"order_by_hue",
"(",
"colors",
")",
":",
"hsvs",
"=",
"[",
"rgb_to_hsv",
"(",
"*",
"map",
"(",
"down_scale",
",",
"color",
")",
")",
"for",
"color",
"in",
"colors",
"]",
"hsvs",
".",
"sort",
"(",
"key",
"=",
"lambda",
"t",
":",
"t",
"[",
"0",
"]",
")",
"return",
"[",
"tuple",
"(",
"map",
"(",
"up_scale",
",",
"hsv_to_rgb",
"(",
"*",
"hsv",
")",
")",
")",
"for",
"hsv",
"in",
"hsvs",
"]"
] |
Orders colors by hue.
|
[
"Orders",
"colors",
"by",
"hue",
"."
] |
11fd47a28d7a4af5b91d29978524335c8fef8cc9
|
https://github.com/metakirby5/colorz/blob/11fd47a28d7a4af5b91d29978524335c8fef8cc9/colorz.py#L74-L80
|
15,809
|
metakirby5/colorz
|
colorz.py
|
brighten
|
def brighten(color, brightness):
"""
Adds or subtracts value to a color.
"""
h, s, v = rgb_to_hsv(*map(down_scale, color))
return tuple(map(up_scale, hsv_to_rgb(h, s, v + down_scale(brightness))))
|
python
|
def brighten(color, brightness):
"""
Adds or subtracts value to a color.
"""
h, s, v = rgb_to_hsv(*map(down_scale, color))
return tuple(map(up_scale, hsv_to_rgb(h, s, v + down_scale(brightness))))
|
[
"def",
"brighten",
"(",
"color",
",",
"brightness",
")",
":",
"h",
",",
"s",
",",
"v",
"=",
"rgb_to_hsv",
"(",
"*",
"map",
"(",
"down_scale",
",",
"color",
")",
")",
"return",
"tuple",
"(",
"map",
"(",
"up_scale",
",",
"hsv_to_rgb",
"(",
"h",
",",
"s",
",",
"v",
"+",
"down_scale",
"(",
"brightness",
")",
")",
")",
")"
] |
Adds or subtracts value to a color.
|
[
"Adds",
"or",
"subtracts",
"value",
"to",
"a",
"color",
"."
] |
11fd47a28d7a4af5b91d29978524335c8fef8cc9
|
https://github.com/metakirby5/colorz/blob/11fd47a28d7a4af5b91d29978524335c8fef8cc9/colorz.py#L83-L88
|
15,810
|
metakirby5/colorz
|
colorz.py
|
colorz
|
def colorz(fd, n=DEFAULT_NUM_COLORS, min_v=DEFAULT_MINV, max_v=DEFAULT_MAXV,
bold_add=DEFAULT_BOLD_ADD, order_colors=True):
"""
Get the n most dominant colors of an image.
Clamps value to between min_v and max_v.
Creates bold colors using bold_add.
Total number of colors returned is 2*n, optionally ordered by hue.
Returns as a list of pairs of RGB triples.
For terminal colors, the hue order is:
red, yellow, green, cyan, blue, magenta
"""
img = Image.open(fd)
img.thumbnail(THUMB_SIZE)
obs = get_colors(img)
clamped = [clamp(color, min_v, max_v) for color in obs]
clusters, _ = kmeans(array(clamped).astype(float), n)
colors = order_by_hue(clusters) if order_colors else clusters
return list(zip(colors, [brighten(c, bold_add) for c in colors]))
|
python
|
def colorz(fd, n=DEFAULT_NUM_COLORS, min_v=DEFAULT_MINV, max_v=DEFAULT_MAXV,
bold_add=DEFAULT_BOLD_ADD, order_colors=True):
"""
Get the n most dominant colors of an image.
Clamps value to between min_v and max_v.
Creates bold colors using bold_add.
Total number of colors returned is 2*n, optionally ordered by hue.
Returns as a list of pairs of RGB triples.
For terminal colors, the hue order is:
red, yellow, green, cyan, blue, magenta
"""
img = Image.open(fd)
img.thumbnail(THUMB_SIZE)
obs = get_colors(img)
clamped = [clamp(color, min_v, max_v) for color in obs]
clusters, _ = kmeans(array(clamped).astype(float), n)
colors = order_by_hue(clusters) if order_colors else clusters
return list(zip(colors, [brighten(c, bold_add) for c in colors]))
|
[
"def",
"colorz",
"(",
"fd",
",",
"n",
"=",
"DEFAULT_NUM_COLORS",
",",
"min_v",
"=",
"DEFAULT_MINV",
",",
"max_v",
"=",
"DEFAULT_MAXV",
",",
"bold_add",
"=",
"DEFAULT_BOLD_ADD",
",",
"order_colors",
"=",
"True",
")",
":",
"img",
"=",
"Image",
".",
"open",
"(",
"fd",
")",
"img",
".",
"thumbnail",
"(",
"THUMB_SIZE",
")",
"obs",
"=",
"get_colors",
"(",
"img",
")",
"clamped",
"=",
"[",
"clamp",
"(",
"color",
",",
"min_v",
",",
"max_v",
")",
"for",
"color",
"in",
"obs",
"]",
"clusters",
",",
"_",
"=",
"kmeans",
"(",
"array",
"(",
"clamped",
")",
".",
"astype",
"(",
"float",
")",
",",
"n",
")",
"colors",
"=",
"order_by_hue",
"(",
"clusters",
")",
"if",
"order_colors",
"else",
"clusters",
"return",
"list",
"(",
"zip",
"(",
"colors",
",",
"[",
"brighten",
"(",
"c",
",",
"bold_add",
")",
"for",
"c",
"in",
"colors",
"]",
")",
")"
] |
Get the n most dominant colors of an image.
Clamps value to between min_v and max_v.
Creates bold colors using bold_add.
Total number of colors returned is 2*n, optionally ordered by hue.
Returns as a list of pairs of RGB triples.
For terminal colors, the hue order is:
red, yellow, green, cyan, blue, magenta
|
[
"Get",
"the",
"n",
"most",
"dominant",
"colors",
"of",
"an",
"image",
".",
"Clamps",
"value",
"to",
"between",
"min_v",
"and",
"max_v",
"."
] |
11fd47a28d7a4af5b91d29978524335c8fef8cc9
|
https://github.com/metakirby5/colorz/blob/11fd47a28d7a4af5b91d29978524335c8fef8cc9/colorz.py#L91-L111
|
15,811
|
ansible/pytest-ansible
|
setup.py
|
long_description
|
def long_description(*paths):
'''Returns a RST formated string.
'''
result = ''
# attempt to import pandoc
try:
import pypandoc
except (ImportError, OSError) as e:
print("Unable to import pypandoc - %s" % e)
return result
# attempt md -> rst conversion
try:
for path in paths:
result += '\n' + pypandoc.convert(
path, 'rst', format='markdown'
)
except (OSError, IOError) as e:
print("Failed to convert with pypandoc - %s" % e)
return result
return result
|
python
|
def long_description(*paths):
'''Returns a RST formated string.
'''
result = ''
# attempt to import pandoc
try:
import pypandoc
except (ImportError, OSError) as e:
print("Unable to import pypandoc - %s" % e)
return result
# attempt md -> rst conversion
try:
for path in paths:
result += '\n' + pypandoc.convert(
path, 'rst', format='markdown'
)
except (OSError, IOError) as e:
print("Failed to convert with pypandoc - %s" % e)
return result
return result
|
[
"def",
"long_description",
"(",
"*",
"paths",
")",
":",
"result",
"=",
"''",
"# attempt to import pandoc",
"try",
":",
"import",
"pypandoc",
"except",
"(",
"ImportError",
",",
"OSError",
")",
"as",
"e",
":",
"print",
"(",
"\"Unable to import pypandoc - %s\"",
"%",
"e",
")",
"return",
"result",
"# attempt md -> rst conversion",
"try",
":",
"for",
"path",
"in",
"paths",
":",
"result",
"+=",
"'\\n'",
"+",
"pypandoc",
".",
"convert",
"(",
"path",
",",
"'rst'",
",",
"format",
"=",
"'markdown'",
")",
"except",
"(",
"OSError",
",",
"IOError",
")",
"as",
"e",
":",
"print",
"(",
"\"Failed to convert with pypandoc - %s\"",
"%",
"e",
")",
"return",
"result",
"return",
"result"
] |
Returns a RST formated string.
|
[
"Returns",
"a",
"RST",
"formated",
"string",
"."
] |
0f7eea80887715fb290a425b8ea4a1c1cfad1ecf
|
https://github.com/ansible/pytest-ansible/blob/0f7eea80887715fb290a425b8ea4a1c1cfad1ecf/setup.py#L78-L100
|
15,812
|
wmayner/pyphi
|
pyphi/cache.py
|
memory_full
|
def memory_full():
"""Check if the memory is too full for further caching."""
current_process = psutil.Process(os.getpid())
return (current_process.memory_percent() >
config.MAXIMUM_CACHE_MEMORY_PERCENTAGE)
|
python
|
def memory_full():
"""Check if the memory is too full for further caching."""
current_process = psutil.Process(os.getpid())
return (current_process.memory_percent() >
config.MAXIMUM_CACHE_MEMORY_PERCENTAGE)
|
[
"def",
"memory_full",
"(",
")",
":",
"current_process",
"=",
"psutil",
".",
"Process",
"(",
"os",
".",
"getpid",
"(",
")",
")",
"return",
"(",
"current_process",
".",
"memory_percent",
"(",
")",
">",
"config",
".",
"MAXIMUM_CACHE_MEMORY_PERCENTAGE",
")"
] |
Check if the memory is too full for further caching.
|
[
"Check",
"if",
"the",
"memory",
"is",
"too",
"full",
"for",
"further",
"caching",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L26-L30
|
15,813
|
wmayner/pyphi
|
pyphi/cache.py
|
cache
|
def cache(cache={}, maxmem=config.MAXIMUM_CACHE_MEMORY_PERCENTAGE,
typed=False):
"""Memory-limited cache decorator.
``maxmem`` is a float between 0 and 100, inclusive, specifying the maximum
percentage of physical memory that the cache can use.
If ``typed`` is ``True``, arguments of different types will be cached
separately. For example, f(3.0) and f(3) will be treated as distinct calls
with distinct results.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, currsize)
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
"""
# Constants shared by all lru cache instances:
# Unique object used to signal cache misses.
sentinel = object()
# Build a key from the function arguments.
make_key = _make_key
def decorating_function(user_function, hits=0, misses=0):
full = False
# Bound method to look up a key or return None.
cache_get = cache.get
if not maxmem:
def wrapper(*args, **kwds):
# Simple caching without memory limit.
nonlocal hits, misses
key = make_key(args, kwds, typed)
result = cache_get(key, sentinel)
if result is not sentinel:
hits += 1
return result
result = user_function(*args, **kwds)
cache[key] = result
misses += 1
return result
else:
def wrapper(*args, **kwds):
# Memory-limited caching.
nonlocal hits, misses, full
key = make_key(args, kwds, typed)
result = cache_get(key)
if result is not None:
hits += 1
return result
result = user_function(*args, **kwds)
if not full:
cache[key] = result
# Cache is full if the total recursive usage is greater
# than the maximum allowed percentage.
current_process = psutil.Process(os.getpid())
full = current_process.memory_percent() > maxmem
misses += 1
return result
def cache_info():
"""Report cache statistics."""
return _CacheInfo(hits, misses, len(cache))
def cache_clear():
"""Clear the cache and cache statistics."""
nonlocal hits, misses, full
cache.clear()
hits = misses = 0
full = False
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return update_wrapper(wrapper, user_function)
return decorating_function
|
python
|
def cache(cache={}, maxmem=config.MAXIMUM_CACHE_MEMORY_PERCENTAGE,
typed=False):
"""Memory-limited cache decorator.
``maxmem`` is a float between 0 and 100, inclusive, specifying the maximum
percentage of physical memory that the cache can use.
If ``typed`` is ``True``, arguments of different types will be cached
separately. For example, f(3.0) and f(3) will be treated as distinct calls
with distinct results.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, currsize)
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
"""
# Constants shared by all lru cache instances:
# Unique object used to signal cache misses.
sentinel = object()
# Build a key from the function arguments.
make_key = _make_key
def decorating_function(user_function, hits=0, misses=0):
full = False
# Bound method to look up a key or return None.
cache_get = cache.get
if not maxmem:
def wrapper(*args, **kwds):
# Simple caching without memory limit.
nonlocal hits, misses
key = make_key(args, kwds, typed)
result = cache_get(key, sentinel)
if result is not sentinel:
hits += 1
return result
result = user_function(*args, **kwds)
cache[key] = result
misses += 1
return result
else:
def wrapper(*args, **kwds):
# Memory-limited caching.
nonlocal hits, misses, full
key = make_key(args, kwds, typed)
result = cache_get(key)
if result is not None:
hits += 1
return result
result = user_function(*args, **kwds)
if not full:
cache[key] = result
# Cache is full if the total recursive usage is greater
# than the maximum allowed percentage.
current_process = psutil.Process(os.getpid())
full = current_process.memory_percent() > maxmem
misses += 1
return result
def cache_info():
"""Report cache statistics."""
return _CacheInfo(hits, misses, len(cache))
def cache_clear():
"""Clear the cache and cache statistics."""
nonlocal hits, misses, full
cache.clear()
hits = misses = 0
full = False
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return update_wrapper(wrapper, user_function)
return decorating_function
|
[
"def",
"cache",
"(",
"cache",
"=",
"{",
"}",
",",
"maxmem",
"=",
"config",
".",
"MAXIMUM_CACHE_MEMORY_PERCENTAGE",
",",
"typed",
"=",
"False",
")",
":",
"# Constants shared by all lru cache instances:",
"# Unique object used to signal cache misses.",
"sentinel",
"=",
"object",
"(",
")",
"# Build a key from the function arguments.",
"make_key",
"=",
"_make_key",
"def",
"decorating_function",
"(",
"user_function",
",",
"hits",
"=",
"0",
",",
"misses",
"=",
"0",
")",
":",
"full",
"=",
"False",
"# Bound method to look up a key or return None.",
"cache_get",
"=",
"cache",
".",
"get",
"if",
"not",
"maxmem",
":",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
":",
"# Simple caching without memory limit.",
"nonlocal",
"hits",
",",
"misses",
"key",
"=",
"make_key",
"(",
"args",
",",
"kwds",
",",
"typed",
")",
"result",
"=",
"cache_get",
"(",
"key",
",",
"sentinel",
")",
"if",
"result",
"is",
"not",
"sentinel",
":",
"hits",
"+=",
"1",
"return",
"result",
"result",
"=",
"user_function",
"(",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
"cache",
"[",
"key",
"]",
"=",
"result",
"misses",
"+=",
"1",
"return",
"result",
"else",
":",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
":",
"# Memory-limited caching.",
"nonlocal",
"hits",
",",
"misses",
",",
"full",
"key",
"=",
"make_key",
"(",
"args",
",",
"kwds",
",",
"typed",
")",
"result",
"=",
"cache_get",
"(",
"key",
")",
"if",
"result",
"is",
"not",
"None",
":",
"hits",
"+=",
"1",
"return",
"result",
"result",
"=",
"user_function",
"(",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
"if",
"not",
"full",
":",
"cache",
"[",
"key",
"]",
"=",
"result",
"# Cache is full if the total recursive usage is greater",
"# than the maximum allowed percentage.",
"current_process",
"=",
"psutil",
".",
"Process",
"(",
"os",
".",
"getpid",
"(",
")",
")",
"full",
"=",
"current_process",
".",
"memory_percent",
"(",
")",
">",
"maxmem",
"misses",
"+=",
"1",
"return",
"result",
"def",
"cache_info",
"(",
")",
":",
"\"\"\"Report cache statistics.\"\"\"",
"return",
"_CacheInfo",
"(",
"hits",
",",
"misses",
",",
"len",
"(",
"cache",
")",
")",
"def",
"cache_clear",
"(",
")",
":",
"\"\"\"Clear the cache and cache statistics.\"\"\"",
"nonlocal",
"hits",
",",
"misses",
",",
"full",
"cache",
".",
"clear",
"(",
")",
"hits",
"=",
"misses",
"=",
"0",
"full",
"=",
"False",
"wrapper",
".",
"cache_info",
"=",
"cache_info",
"wrapper",
".",
"cache_clear",
"=",
"cache_clear",
"return",
"update_wrapper",
"(",
"wrapper",
",",
"user_function",
")",
"return",
"decorating_function"
] |
Memory-limited cache decorator.
``maxmem`` is a float between 0 and 100, inclusive, specifying the maximum
percentage of physical memory that the cache can use.
If ``typed`` is ``True``, arguments of different types will be cached
separately. For example, f(3.0) and f(3) will be treated as distinct calls
with distinct results.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, currsize)
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
|
[
"Memory",
"-",
"limited",
"cache",
"decorator",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L78-L156
|
15,814
|
wmayner/pyphi
|
pyphi/cache.py
|
MICECache
|
def MICECache(subsystem, parent_cache=None):
"""Construct a |MICE| cache.
Uses either a Redis-backed cache or a local dict cache on the object.
Args:
subsystem (Subsystem): The subsystem that this is a cache for.
Kwargs:
parent_cache (MICECache): The cache generated by the uncut
version of ``subsystem``. Any cached |MICE| which are
unaffected by the cut are reused in this cache. If None,
the cache is initialized empty.
"""
if config.REDIS_CACHE:
cls = RedisMICECache
else:
cls = DictMICECache
return cls(subsystem, parent_cache=parent_cache)
|
python
|
def MICECache(subsystem, parent_cache=None):
"""Construct a |MICE| cache.
Uses either a Redis-backed cache or a local dict cache on the object.
Args:
subsystem (Subsystem): The subsystem that this is a cache for.
Kwargs:
parent_cache (MICECache): The cache generated by the uncut
version of ``subsystem``. Any cached |MICE| which are
unaffected by the cut are reused in this cache. If None,
the cache is initialized empty.
"""
if config.REDIS_CACHE:
cls = RedisMICECache
else:
cls = DictMICECache
return cls(subsystem, parent_cache=parent_cache)
|
[
"def",
"MICECache",
"(",
"subsystem",
",",
"parent_cache",
"=",
"None",
")",
":",
"if",
"config",
".",
"REDIS_CACHE",
":",
"cls",
"=",
"RedisMICECache",
"else",
":",
"cls",
"=",
"DictMICECache",
"return",
"cls",
"(",
"subsystem",
",",
"parent_cache",
"=",
"parent_cache",
")"
] |
Construct a |MICE| cache.
Uses either a Redis-backed cache or a local dict cache on the object.
Args:
subsystem (Subsystem): The subsystem that this is a cache for.
Kwargs:
parent_cache (MICECache): The cache generated by the uncut
version of ``subsystem``. Any cached |MICE| which are
unaffected by the cut are reused in this cache. If None,
the cache is initialized empty.
|
[
"Construct",
"a",
"|MICE|",
"cache",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L396-L414
|
15,815
|
wmayner/pyphi
|
pyphi/cache.py
|
method
|
def method(cache_name, key_prefix=None):
"""Caching decorator for object-level method caches.
Cache key generation is delegated to the cache.
Args:
cache_name (str): The name of the (already-instantiated) cache
on the decorated object which should be used to store results
of this method.
*key_prefix: A constant to use as part of the cache key in addition
to the method arguments.
"""
def decorator(func):
if (func.__name__ in ['cause_repertoire', 'effect_repertoire'] and
not config.CACHE_REPERTOIRES):
return func
@wraps(func)
def wrapper(obj, *args, **kwargs):
cache = getattr(obj, cache_name)
# Delegate key generation
key = cache.key(*args, _prefix=key_prefix, **kwargs)
# Get cached value, or compute
value = cache.get(key)
if value is None: # miss
value = func(obj, *args, **kwargs)
cache.set(key, value)
return value
return wrapper
return decorator
|
python
|
def method(cache_name, key_prefix=None):
"""Caching decorator for object-level method caches.
Cache key generation is delegated to the cache.
Args:
cache_name (str): The name of the (already-instantiated) cache
on the decorated object which should be used to store results
of this method.
*key_prefix: A constant to use as part of the cache key in addition
to the method arguments.
"""
def decorator(func):
if (func.__name__ in ['cause_repertoire', 'effect_repertoire'] and
not config.CACHE_REPERTOIRES):
return func
@wraps(func)
def wrapper(obj, *args, **kwargs):
cache = getattr(obj, cache_name)
# Delegate key generation
key = cache.key(*args, _prefix=key_prefix, **kwargs)
# Get cached value, or compute
value = cache.get(key)
if value is None: # miss
value = func(obj, *args, **kwargs)
cache.set(key, value)
return value
return wrapper
return decorator
|
[
"def",
"method",
"(",
"cache_name",
",",
"key_prefix",
"=",
"None",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"if",
"(",
"func",
".",
"__name__",
"in",
"[",
"'cause_repertoire'",
",",
"'effect_repertoire'",
"]",
"and",
"not",
"config",
".",
"CACHE_REPERTOIRES",
")",
":",
"return",
"func",
"@",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"obj",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"cache",
"=",
"getattr",
"(",
"obj",
",",
"cache_name",
")",
"# Delegate key generation",
"key",
"=",
"cache",
".",
"key",
"(",
"*",
"args",
",",
"_prefix",
"=",
"key_prefix",
",",
"*",
"*",
"kwargs",
")",
"# Get cached value, or compute",
"value",
"=",
"cache",
".",
"get",
"(",
"key",
")",
"if",
"value",
"is",
"None",
":",
"# miss",
"value",
"=",
"func",
"(",
"obj",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"cache",
".",
"set",
"(",
"key",
",",
"value",
")",
"return",
"value",
"return",
"wrapper",
"return",
"decorator"
] |
Caching decorator for object-level method caches.
Cache key generation is delegated to the cache.
Args:
cache_name (str): The name of the (already-instantiated) cache
on the decorated object which should be used to store results
of this method.
*key_prefix: A constant to use as part of the cache key in addition
to the method arguments.
|
[
"Caching",
"decorator",
"for",
"object",
"-",
"level",
"method",
"caches",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L426-L458
|
15,816
|
wmayner/pyphi
|
pyphi/cache.py
|
DictCache.get
|
def get(self, key):
"""Get a value out of the cache.
Returns None if the key is not in the cache. Updates cache
statistics.
"""
if key in self.cache:
self.hits += 1
return self.cache[key]
self.misses += 1
return None
|
python
|
def get(self, key):
"""Get a value out of the cache.
Returns None if the key is not in the cache. Updates cache
statistics.
"""
if key in self.cache:
self.hits += 1
return self.cache[key]
self.misses += 1
return None
|
[
"def",
"get",
"(",
"self",
",",
"key",
")",
":",
"if",
"key",
"in",
"self",
".",
"cache",
":",
"self",
".",
"hits",
"+=",
"1",
"return",
"self",
".",
"cache",
"[",
"key",
"]",
"self",
".",
"misses",
"+=",
"1",
"return",
"None"
] |
Get a value out of the cache.
Returns None if the key is not in the cache. Updates cache
statistics.
|
[
"Get",
"a",
"value",
"out",
"of",
"the",
"cache",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L183-L193
|
15,817
|
wmayner/pyphi
|
pyphi/cache.py
|
DictCache.key
|
def key(self, *args, _prefix=None, **kwargs):
"""Get the cache key for the given function args.
Kwargs:
prefix: A constant to prefix to the key.
"""
if kwargs:
raise NotImplementedError(
'kwarg cache keys not implemented')
return (_prefix,) + tuple(args)
|
python
|
def key(self, *args, _prefix=None, **kwargs):
"""Get the cache key for the given function args.
Kwargs:
prefix: A constant to prefix to the key.
"""
if kwargs:
raise NotImplementedError(
'kwarg cache keys not implemented')
return (_prefix,) + tuple(args)
|
[
"def",
"key",
"(",
"self",
",",
"*",
"args",
",",
"_prefix",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"kwargs",
":",
"raise",
"NotImplementedError",
"(",
"'kwarg cache keys not implemented'",
")",
"return",
"(",
"_prefix",
",",
")",
"+",
"tuple",
"(",
"args",
")"
] |
Get the cache key for the given function args.
Kwargs:
prefix: A constant to prefix to the key.
|
[
"Get",
"the",
"cache",
"key",
"for",
"the",
"given",
"function",
"args",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L201-L210
|
15,818
|
wmayner/pyphi
|
pyphi/cache.py
|
RedisCache.info
|
def info(self):
"""Return cache information.
.. note:: This is not the cache info for the entire Redis key space.
"""
info = redis_conn.info()
return _CacheInfo(info['keyspace_hits'],
info['keyspace_misses'],
self.size())
|
python
|
def info(self):
"""Return cache information.
.. note:: This is not the cache info for the entire Redis key space.
"""
info = redis_conn.info()
return _CacheInfo(info['keyspace_hits'],
info['keyspace_misses'],
self.size())
|
[
"def",
"info",
"(",
"self",
")",
":",
"info",
"=",
"redis_conn",
".",
"info",
"(",
")",
"return",
"_CacheInfo",
"(",
"info",
"[",
"'keyspace_hits'",
"]",
",",
"info",
"[",
"'keyspace_misses'",
"]",
",",
"self",
".",
"size",
"(",
")",
")"
] |
Return cache information.
.. note:: This is not the cache info for the entire Redis key space.
|
[
"Return",
"cache",
"information",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L251-L259
|
15,819
|
wmayner/pyphi
|
pyphi/cache.py
|
RedisMICECache.set
|
def set(self, key, value):
"""Only need to set if the subsystem is uncut.
Caches are only inherited from uncut subsystems.
"""
if not self.subsystem.is_cut:
super().set(key, value)
|
python
|
def set(self, key, value):
"""Only need to set if the subsystem is uncut.
Caches are only inherited from uncut subsystems.
"""
if not self.subsystem.is_cut:
super().set(key, value)
|
[
"def",
"set",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"if",
"not",
"self",
".",
"subsystem",
".",
"is_cut",
":",
"super",
"(",
")",
".",
"set",
"(",
"key",
",",
"value",
")"
] |
Only need to set if the subsystem is uncut.
Caches are only inherited from uncut subsystems.
|
[
"Only",
"need",
"to",
"set",
"if",
"the",
"subsystem",
"is",
"uncut",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L334-L340
|
15,820
|
wmayner/pyphi
|
pyphi/cache.py
|
DictMICECache._build
|
def _build(self, parent_cache):
"""Build the initial cache from the parent.
Only include the |MICE| which are unaffected by the subsystem cut.
A |MICE| is affected if either the cut splits the mechanism
or splits the connections between the purview and mechanism
"""
for key, mice in parent_cache.cache.items():
if not mice.damaged_by_cut(self.subsystem):
self.cache[key] = mice
|
python
|
def _build(self, parent_cache):
"""Build the initial cache from the parent.
Only include the |MICE| which are unaffected by the subsystem cut.
A |MICE| is affected if either the cut splits the mechanism
or splits the connections between the purview and mechanism
"""
for key, mice in parent_cache.cache.items():
if not mice.damaged_by_cut(self.subsystem):
self.cache[key] = mice
|
[
"def",
"_build",
"(",
"self",
",",
"parent_cache",
")",
":",
"for",
"key",
",",
"mice",
"in",
"parent_cache",
".",
"cache",
".",
"items",
"(",
")",
":",
"if",
"not",
"mice",
".",
"damaged_by_cut",
"(",
"self",
".",
"subsystem",
")",
":",
"self",
".",
"cache",
"[",
"key",
"]",
"=",
"mice"
] |
Build the initial cache from the parent.
Only include the |MICE| which are unaffected by the subsystem cut.
A |MICE| is affected if either the cut splits the mechanism
or splits the connections between the purview and mechanism
|
[
"Build",
"the",
"initial",
"cache",
"from",
"the",
"parent",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L362-L371
|
15,821
|
wmayner/pyphi
|
pyphi/cache.py
|
PurviewCache.set
|
def set(self, key, value):
"""Only set if purview caching is enabled"""
if config.CACHE_POTENTIAL_PURVIEWS:
self.cache[key] = value
|
python
|
def set(self, key, value):
"""Only set if purview caching is enabled"""
if config.CACHE_POTENTIAL_PURVIEWS:
self.cache[key] = value
|
[
"def",
"set",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"if",
"config",
".",
"CACHE_POTENTIAL_PURVIEWS",
":",
"self",
".",
"cache",
"[",
"key",
"]",
"=",
"value"
] |
Only set if purview caching is enabled
|
[
"Only",
"set",
"if",
"purview",
"caching",
"is",
"enabled"
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/cache.py#L420-L423
|
15,822
|
wmayner/pyphi
|
pyphi/connectivity.py
|
apply_boundary_conditions_to_cm
|
def apply_boundary_conditions_to_cm(external_indices, cm):
"""Remove connections to or from external nodes."""
cm = cm.copy()
cm[external_indices, :] = 0 # Zero-out row
cm[:, external_indices] = 0 # Zero-out columnt
return cm
|
python
|
def apply_boundary_conditions_to_cm(external_indices, cm):
"""Remove connections to or from external nodes."""
cm = cm.copy()
cm[external_indices, :] = 0 # Zero-out row
cm[:, external_indices] = 0 # Zero-out columnt
return cm
|
[
"def",
"apply_boundary_conditions_to_cm",
"(",
"external_indices",
",",
"cm",
")",
":",
"cm",
"=",
"cm",
".",
"copy",
"(",
")",
"cm",
"[",
"external_indices",
",",
":",
"]",
"=",
"0",
"# Zero-out row",
"cm",
"[",
":",
",",
"external_indices",
"]",
"=",
"0",
"# Zero-out columnt",
"return",
"cm"
] |
Remove connections to or from external nodes.
|
[
"Remove",
"connections",
"to",
"or",
"from",
"external",
"nodes",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L13-L18
|
15,823
|
wmayner/pyphi
|
pyphi/connectivity.py
|
get_inputs_from_cm
|
def get_inputs_from_cm(index, cm):
"""Return indices of inputs to the node with the given index."""
return tuple(i for i in range(cm.shape[0]) if cm[i][index])
|
python
|
def get_inputs_from_cm(index, cm):
"""Return indices of inputs to the node with the given index."""
return tuple(i for i in range(cm.shape[0]) if cm[i][index])
|
[
"def",
"get_inputs_from_cm",
"(",
"index",
",",
"cm",
")",
":",
"return",
"tuple",
"(",
"i",
"for",
"i",
"in",
"range",
"(",
"cm",
".",
"shape",
"[",
"0",
"]",
")",
"if",
"cm",
"[",
"i",
"]",
"[",
"index",
"]",
")"
] |
Return indices of inputs to the node with the given index.
|
[
"Return",
"indices",
"of",
"inputs",
"to",
"the",
"node",
"with",
"the",
"given",
"index",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L21-L23
|
15,824
|
wmayner/pyphi
|
pyphi/connectivity.py
|
get_outputs_from_cm
|
def get_outputs_from_cm(index, cm):
"""Return indices of the outputs of node with the given index."""
return tuple(i for i in range(cm.shape[0]) if cm[index][i])
|
python
|
def get_outputs_from_cm(index, cm):
"""Return indices of the outputs of node with the given index."""
return tuple(i for i in range(cm.shape[0]) if cm[index][i])
|
[
"def",
"get_outputs_from_cm",
"(",
"index",
",",
"cm",
")",
":",
"return",
"tuple",
"(",
"i",
"for",
"i",
"in",
"range",
"(",
"cm",
".",
"shape",
"[",
"0",
"]",
")",
"if",
"cm",
"[",
"index",
"]",
"[",
"i",
"]",
")"
] |
Return indices of the outputs of node with the given index.
|
[
"Return",
"indices",
"of",
"the",
"outputs",
"of",
"node",
"with",
"the",
"given",
"index",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L26-L28
|
15,825
|
wmayner/pyphi
|
pyphi/connectivity.py
|
causally_significant_nodes
|
def causally_significant_nodes(cm):
"""Return indices of nodes that have both inputs and outputs."""
inputs = cm.sum(0)
outputs = cm.sum(1)
nodes_with_inputs_and_outputs = np.logical_and(inputs > 0, outputs > 0)
return tuple(np.where(nodes_with_inputs_and_outputs)[0])
|
python
|
def causally_significant_nodes(cm):
"""Return indices of nodes that have both inputs and outputs."""
inputs = cm.sum(0)
outputs = cm.sum(1)
nodes_with_inputs_and_outputs = np.logical_and(inputs > 0, outputs > 0)
return tuple(np.where(nodes_with_inputs_and_outputs)[0])
|
[
"def",
"causally_significant_nodes",
"(",
"cm",
")",
":",
"inputs",
"=",
"cm",
".",
"sum",
"(",
"0",
")",
"outputs",
"=",
"cm",
".",
"sum",
"(",
"1",
")",
"nodes_with_inputs_and_outputs",
"=",
"np",
".",
"logical_and",
"(",
"inputs",
">",
"0",
",",
"outputs",
">",
"0",
")",
"return",
"tuple",
"(",
"np",
".",
"where",
"(",
"nodes_with_inputs_and_outputs",
")",
"[",
"0",
"]",
")"
] |
Return indices of nodes that have both inputs and outputs.
|
[
"Return",
"indices",
"of",
"nodes",
"that",
"have",
"both",
"inputs",
"and",
"outputs",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L31-L36
|
15,826
|
wmayner/pyphi
|
pyphi/connectivity.py
|
relevant_connections
|
def relevant_connections(n, _from, to):
"""Construct a connectivity matrix.
Args:
n (int): The dimensions of the matrix
_from (tuple[int]): Nodes with outgoing connections to ``to``
to (tuple[int]): Nodes with incoming connections from ``_from``
Returns:
np.ndarray: An |n x n| connectivity matrix with the |i,jth| entry is
``1`` if |i| is in ``_from`` and |j| is in ``to``, and 0 otherwise.
"""
cm = np.zeros((n, n))
# Don't try and index with empty arrays. Older versions of NumPy
# (at least up to 1.9.3) break with empty array indices.
if not _from or not to:
return cm
cm[np.ix_(_from, to)] = 1
return cm
|
python
|
def relevant_connections(n, _from, to):
"""Construct a connectivity matrix.
Args:
n (int): The dimensions of the matrix
_from (tuple[int]): Nodes with outgoing connections to ``to``
to (tuple[int]): Nodes with incoming connections from ``_from``
Returns:
np.ndarray: An |n x n| connectivity matrix with the |i,jth| entry is
``1`` if |i| is in ``_from`` and |j| is in ``to``, and 0 otherwise.
"""
cm = np.zeros((n, n))
# Don't try and index with empty arrays. Older versions of NumPy
# (at least up to 1.9.3) break with empty array indices.
if not _from or not to:
return cm
cm[np.ix_(_from, to)] = 1
return cm
|
[
"def",
"relevant_connections",
"(",
"n",
",",
"_from",
",",
"to",
")",
":",
"cm",
"=",
"np",
".",
"zeros",
"(",
"(",
"n",
",",
"n",
")",
")",
"# Don't try and index with empty arrays. Older versions of NumPy",
"# (at least up to 1.9.3) break with empty array indices.",
"if",
"not",
"_from",
"or",
"not",
"to",
":",
"return",
"cm",
"cm",
"[",
"np",
".",
"ix_",
"(",
"_from",
",",
"to",
")",
"]",
"=",
"1",
"return",
"cm"
] |
Construct a connectivity matrix.
Args:
n (int): The dimensions of the matrix
_from (tuple[int]): Nodes with outgoing connections to ``to``
to (tuple[int]): Nodes with incoming connections from ``_from``
Returns:
np.ndarray: An |n x n| connectivity matrix with the |i,jth| entry is
``1`` if |i| is in ``_from`` and |j| is in ``to``, and 0 otherwise.
|
[
"Construct",
"a",
"connectivity",
"matrix",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L40-L60
|
15,827
|
wmayner/pyphi
|
pyphi/connectivity.py
|
block_cm
|
def block_cm(cm):
"""Return whether ``cm`` can be arranged as a block connectivity matrix.
If so, the corresponding mechanism/purview is trivially reducible.
Technically, only square matrices are "block diagonal", but the notion of
connectivity carries over.
We test for block connectivity by trying to grow a block of nodes such
that:
- 'source' nodes only input to nodes in the block
- 'sink' nodes only receive inputs from source nodes in the block
For example, the following connectivity matrix represents connections from
``nodes1 = A, B, C`` to ``nodes2 = D, E, F, G`` (without loss of
generality, note that ``nodes1`` and ``nodes2`` may share elements)::
D E F G
A [1, 1, 0, 0]
B [1, 1, 0, 0]
C [0, 0, 1, 1]
Since nodes |AB| only connect to nodes |DE|, and node |C| only connects to
nodes |FG|, the subgraph is reducible, because the cut ::
A,B C
─── ✕ ───
D,E F,G
does not change the structure of the graph.
"""
if np.any(cm.sum(1) == 0):
return True
if np.all(cm.sum(1) == 1):
return True
outputs = list(range(cm.shape[1]))
# CM helpers:
def outputs_of(nodes):
"""Return all nodes that `nodes` connect to (output to)."""
return np.where(cm[nodes, :].sum(0))[0]
def inputs_to(nodes):
"""Return all nodes which connect to (input to) `nodes`."""
return np.where(cm[:, nodes].sum(1))[0]
# Start: source node with most outputs
sources = [np.argmax(cm.sum(1))]
sinks = outputs_of(sources)
sink_inputs = inputs_to(sinks)
while True:
if np.array_equal(sink_inputs, sources):
# sources exclusively connect to sinks.
# There are no other nodes which connect sink nodes,
# hence set(sources) + set(sinks) form a component
# which is not connected to the rest of the graph
return True
# Recompute sources, sinks, and sink_inputs
sources = sink_inputs
sinks = outputs_of(sources)
sink_inputs = inputs_to(sinks)
# Considering all output nodes?
if np.array_equal(sinks, outputs):
return False
|
python
|
def block_cm(cm):
"""Return whether ``cm`` can be arranged as a block connectivity matrix.
If so, the corresponding mechanism/purview is trivially reducible.
Technically, only square matrices are "block diagonal", but the notion of
connectivity carries over.
We test for block connectivity by trying to grow a block of nodes such
that:
- 'source' nodes only input to nodes in the block
- 'sink' nodes only receive inputs from source nodes in the block
For example, the following connectivity matrix represents connections from
``nodes1 = A, B, C`` to ``nodes2 = D, E, F, G`` (without loss of
generality, note that ``nodes1`` and ``nodes2`` may share elements)::
D E F G
A [1, 1, 0, 0]
B [1, 1, 0, 0]
C [0, 0, 1, 1]
Since nodes |AB| only connect to nodes |DE|, and node |C| only connects to
nodes |FG|, the subgraph is reducible, because the cut ::
A,B C
─── ✕ ───
D,E F,G
does not change the structure of the graph.
"""
if np.any(cm.sum(1) == 0):
return True
if np.all(cm.sum(1) == 1):
return True
outputs = list(range(cm.shape[1]))
# CM helpers:
def outputs_of(nodes):
"""Return all nodes that `nodes` connect to (output to)."""
return np.where(cm[nodes, :].sum(0))[0]
def inputs_to(nodes):
"""Return all nodes which connect to (input to) `nodes`."""
return np.where(cm[:, nodes].sum(1))[0]
# Start: source node with most outputs
sources = [np.argmax(cm.sum(1))]
sinks = outputs_of(sources)
sink_inputs = inputs_to(sinks)
while True:
if np.array_equal(sink_inputs, sources):
# sources exclusively connect to sinks.
# There are no other nodes which connect sink nodes,
# hence set(sources) + set(sinks) form a component
# which is not connected to the rest of the graph
return True
# Recompute sources, sinks, and sink_inputs
sources = sink_inputs
sinks = outputs_of(sources)
sink_inputs = inputs_to(sinks)
# Considering all output nodes?
if np.array_equal(sinks, outputs):
return False
|
[
"def",
"block_cm",
"(",
"cm",
")",
":",
"if",
"np",
".",
"any",
"(",
"cm",
".",
"sum",
"(",
"1",
")",
"==",
"0",
")",
":",
"return",
"True",
"if",
"np",
".",
"all",
"(",
"cm",
".",
"sum",
"(",
"1",
")",
"==",
"1",
")",
":",
"return",
"True",
"outputs",
"=",
"list",
"(",
"range",
"(",
"cm",
".",
"shape",
"[",
"1",
"]",
")",
")",
"# CM helpers:",
"def",
"outputs_of",
"(",
"nodes",
")",
":",
"\"\"\"Return all nodes that `nodes` connect to (output to).\"\"\"",
"return",
"np",
".",
"where",
"(",
"cm",
"[",
"nodes",
",",
":",
"]",
".",
"sum",
"(",
"0",
")",
")",
"[",
"0",
"]",
"def",
"inputs_to",
"(",
"nodes",
")",
":",
"\"\"\"Return all nodes which connect to (input to) `nodes`.\"\"\"",
"return",
"np",
".",
"where",
"(",
"cm",
"[",
":",
",",
"nodes",
"]",
".",
"sum",
"(",
"1",
")",
")",
"[",
"0",
"]",
"# Start: source node with most outputs",
"sources",
"=",
"[",
"np",
".",
"argmax",
"(",
"cm",
".",
"sum",
"(",
"1",
")",
")",
"]",
"sinks",
"=",
"outputs_of",
"(",
"sources",
")",
"sink_inputs",
"=",
"inputs_to",
"(",
"sinks",
")",
"while",
"True",
":",
"if",
"np",
".",
"array_equal",
"(",
"sink_inputs",
",",
"sources",
")",
":",
"# sources exclusively connect to sinks.",
"# There are no other nodes which connect sink nodes,",
"# hence set(sources) + set(sinks) form a component",
"# which is not connected to the rest of the graph",
"return",
"True",
"# Recompute sources, sinks, and sink_inputs",
"sources",
"=",
"sink_inputs",
"sinks",
"=",
"outputs_of",
"(",
"sources",
")",
"sink_inputs",
"=",
"inputs_to",
"(",
"sinks",
")",
"# Considering all output nodes?",
"if",
"np",
".",
"array_equal",
"(",
"sinks",
",",
"outputs",
")",
":",
"return",
"False"
] |
Return whether ``cm`` can be arranged as a block connectivity matrix.
If so, the corresponding mechanism/purview is trivially reducible.
Technically, only square matrices are "block diagonal", but the notion of
connectivity carries over.
We test for block connectivity by trying to grow a block of nodes such
that:
- 'source' nodes only input to nodes in the block
- 'sink' nodes only receive inputs from source nodes in the block
For example, the following connectivity matrix represents connections from
``nodes1 = A, B, C`` to ``nodes2 = D, E, F, G`` (without loss of
generality, note that ``nodes1`` and ``nodes2`` may share elements)::
D E F G
A [1, 1, 0, 0]
B [1, 1, 0, 0]
C [0, 0, 1, 1]
Since nodes |AB| only connect to nodes |DE|, and node |C| only connects to
nodes |FG|, the subgraph is reducible, because the cut ::
A,B C
─── ✕ ───
D,E F,G
does not change the structure of the graph.
|
[
"Return",
"whether",
"cm",
"can",
"be",
"arranged",
"as",
"a",
"block",
"connectivity",
"matrix",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L63-L130
|
15,828
|
wmayner/pyphi
|
pyphi/connectivity.py
|
block_reducible
|
def block_reducible(cm, nodes1, nodes2):
"""Return whether connections from ``nodes1`` to ``nodes2`` are reducible.
Args:
cm (np.ndarray): The network's connectivity matrix.
nodes1 (tuple[int]): Source nodes
nodes2 (tuple[int]): Sink nodes
"""
# Trivial case
if not nodes1 or not nodes2:
return True
cm = cm[np.ix_(nodes1, nodes2)]
# Validate the connectivity matrix.
if not cm.sum(0).all() or not cm.sum(1).all():
return True
if len(nodes1) > 1 and len(nodes2) > 1:
return block_cm(cm)
return False
|
python
|
def block_reducible(cm, nodes1, nodes2):
"""Return whether connections from ``nodes1`` to ``nodes2`` are reducible.
Args:
cm (np.ndarray): The network's connectivity matrix.
nodes1 (tuple[int]): Source nodes
nodes2 (tuple[int]): Sink nodes
"""
# Trivial case
if not nodes1 or not nodes2:
return True
cm = cm[np.ix_(nodes1, nodes2)]
# Validate the connectivity matrix.
if not cm.sum(0).all() or not cm.sum(1).all():
return True
if len(nodes1) > 1 and len(nodes2) > 1:
return block_cm(cm)
return False
|
[
"def",
"block_reducible",
"(",
"cm",
",",
"nodes1",
",",
"nodes2",
")",
":",
"# Trivial case",
"if",
"not",
"nodes1",
"or",
"not",
"nodes2",
":",
"return",
"True",
"cm",
"=",
"cm",
"[",
"np",
".",
"ix_",
"(",
"nodes1",
",",
"nodes2",
")",
"]",
"# Validate the connectivity matrix.",
"if",
"not",
"cm",
".",
"sum",
"(",
"0",
")",
".",
"all",
"(",
")",
"or",
"not",
"cm",
".",
"sum",
"(",
"1",
")",
".",
"all",
"(",
")",
":",
"return",
"True",
"if",
"len",
"(",
"nodes1",
")",
">",
"1",
"and",
"len",
"(",
"nodes2",
")",
">",
"1",
":",
"return",
"block_cm",
"(",
"cm",
")",
"return",
"False"
] |
Return whether connections from ``nodes1`` to ``nodes2`` are reducible.
Args:
cm (np.ndarray): The network's connectivity matrix.
nodes1 (tuple[int]): Source nodes
nodes2 (tuple[int]): Sink nodes
|
[
"Return",
"whether",
"connections",
"from",
"nodes1",
"to",
"nodes2",
"are",
"reducible",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L135-L154
|
15,829
|
wmayner/pyphi
|
pyphi/connectivity.py
|
_connected
|
def _connected(cm, nodes, connection):
"""Test connectivity for the connectivity matrix."""
if nodes is not None:
cm = cm[np.ix_(nodes, nodes)]
num_components, _ = connected_components(cm, connection=connection)
return num_components < 2
|
python
|
def _connected(cm, nodes, connection):
"""Test connectivity for the connectivity matrix."""
if nodes is not None:
cm = cm[np.ix_(nodes, nodes)]
num_components, _ = connected_components(cm, connection=connection)
return num_components < 2
|
[
"def",
"_connected",
"(",
"cm",
",",
"nodes",
",",
"connection",
")",
":",
"if",
"nodes",
"is",
"not",
"None",
":",
"cm",
"=",
"cm",
"[",
"np",
".",
"ix_",
"(",
"nodes",
",",
"nodes",
")",
"]",
"num_components",
",",
"_",
"=",
"connected_components",
"(",
"cm",
",",
"connection",
"=",
"connection",
")",
"return",
"num_components",
"<",
"2"
] |
Test connectivity for the connectivity matrix.
|
[
"Test",
"connectivity",
"for",
"the",
"connectivity",
"matrix",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L157-L163
|
15,830
|
wmayner/pyphi
|
pyphi/connectivity.py
|
is_full
|
def is_full(cm, nodes1, nodes2):
"""Test connectivity of one set of nodes to another.
Args:
cm (``np.ndarrray``): The connectivity matrix
nodes1 (tuple[int]): The nodes whose outputs to ``nodes2`` will be
tested.
nodes2 (tuple[int]): The nodes whose inputs from ``nodes1`` will
be tested.
Returns:
bool: ``True`` if all elements in ``nodes1`` output to some element in
``nodes2`` and all elements in ``nodes2`` have an input from some
element in ``nodes1``, or if either set of nodes is empty; ``False``
otherwise.
"""
if not nodes1 or not nodes2:
return True
cm = cm[np.ix_(nodes1, nodes2)]
# Do all nodes have at least one connection?
return cm.sum(0).all() and cm.sum(1).all()
|
python
|
def is_full(cm, nodes1, nodes2):
"""Test connectivity of one set of nodes to another.
Args:
cm (``np.ndarrray``): The connectivity matrix
nodes1 (tuple[int]): The nodes whose outputs to ``nodes2`` will be
tested.
nodes2 (tuple[int]): The nodes whose inputs from ``nodes1`` will
be tested.
Returns:
bool: ``True`` if all elements in ``nodes1`` output to some element in
``nodes2`` and all elements in ``nodes2`` have an input from some
element in ``nodes1``, or if either set of nodes is empty; ``False``
otherwise.
"""
if not nodes1 or not nodes2:
return True
cm = cm[np.ix_(nodes1, nodes2)]
# Do all nodes have at least one connection?
return cm.sum(0).all() and cm.sum(1).all()
|
[
"def",
"is_full",
"(",
"cm",
",",
"nodes1",
",",
"nodes2",
")",
":",
"if",
"not",
"nodes1",
"or",
"not",
"nodes2",
":",
"return",
"True",
"cm",
"=",
"cm",
"[",
"np",
".",
"ix_",
"(",
"nodes1",
",",
"nodes2",
")",
"]",
"# Do all nodes have at least one connection?",
"return",
"cm",
".",
"sum",
"(",
"0",
")",
".",
"all",
"(",
")",
"and",
"cm",
".",
"sum",
"(",
"1",
")",
".",
"all",
"(",
")"
] |
Test connectivity of one set of nodes to another.
Args:
cm (``np.ndarrray``): The connectivity matrix
nodes1 (tuple[int]): The nodes whose outputs to ``nodes2`` will be
tested.
nodes2 (tuple[int]): The nodes whose inputs from ``nodes1`` will
be tested.
Returns:
bool: ``True`` if all elements in ``nodes1`` output to some element in
``nodes2`` and all elements in ``nodes2`` have an input from some
element in ``nodes1``, or if either set of nodes is empty; ``False``
otherwise.
|
[
"Test",
"connectivity",
"of",
"one",
"set",
"of",
"nodes",
"to",
"another",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/connectivity.py#L192-L214
|
15,831
|
wmayner/pyphi
|
pyphi/models/cuts.py
|
_CutBase.apply_cut
|
def apply_cut(self, cm):
"""Return a modified connectivity matrix with all connections that are
severed by this cut removed.
Args:
cm (np.ndarray): A connectivity matrix.
"""
# Invert the cut matrix, creating a matrix of preserved connections
inverse = np.logical_not(self.cut_matrix(cm.shape[0])).astype(int)
return cm * inverse
|
python
|
def apply_cut(self, cm):
"""Return a modified connectivity matrix with all connections that are
severed by this cut removed.
Args:
cm (np.ndarray): A connectivity matrix.
"""
# Invert the cut matrix, creating a matrix of preserved connections
inverse = np.logical_not(self.cut_matrix(cm.shape[0])).astype(int)
return cm * inverse
|
[
"def",
"apply_cut",
"(",
"self",
",",
"cm",
")",
":",
"# Invert the cut matrix, creating a matrix of preserved connections",
"inverse",
"=",
"np",
".",
"logical_not",
"(",
"self",
".",
"cut_matrix",
"(",
"cm",
".",
"shape",
"[",
"0",
"]",
")",
")",
".",
"astype",
"(",
"int",
")",
"return",
"cm",
"*",
"inverse"
] |
Return a modified connectivity matrix with all connections that are
severed by this cut removed.
Args:
cm (np.ndarray): A connectivity matrix.
|
[
"Return",
"a",
"modified",
"connectivity",
"matrix",
"with",
"all",
"connections",
"that",
"are",
"severed",
"by",
"this",
"cut",
"removed",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/cuts.py#L48-L57
|
15,832
|
wmayner/pyphi
|
pyphi/models/cuts.py
|
_CutBase.cuts_connections
|
def cuts_connections(self, a, b):
"""Check if this cut severs any connections from ``a`` to ``b``.
Args:
a (tuple[int]): A set of nodes.
b (tuple[int]): A set of nodes.
"""
n = max(self.indices) + 1
return self.cut_matrix(n)[np.ix_(a, b)].any()
|
python
|
def cuts_connections(self, a, b):
"""Check if this cut severs any connections from ``a`` to ``b``.
Args:
a (tuple[int]): A set of nodes.
b (tuple[int]): A set of nodes.
"""
n = max(self.indices) + 1
return self.cut_matrix(n)[np.ix_(a, b)].any()
|
[
"def",
"cuts_connections",
"(",
"self",
",",
"a",
",",
"b",
")",
":",
"n",
"=",
"max",
"(",
"self",
".",
"indices",
")",
"+",
"1",
"return",
"self",
".",
"cut_matrix",
"(",
"n",
")",
"[",
"np",
".",
"ix_",
"(",
"a",
",",
"b",
")",
"]",
".",
"any",
"(",
")"
] |
Check if this cut severs any connections from ``a`` to ``b``.
Args:
a (tuple[int]): A set of nodes.
b (tuple[int]): A set of nodes.
|
[
"Check",
"if",
"this",
"cut",
"severs",
"any",
"connections",
"from",
"a",
"to",
"b",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/cuts.py#L59-L67
|
15,833
|
wmayner/pyphi
|
pyphi/models/cuts.py
|
_CutBase.all_cut_mechanisms
|
def all_cut_mechanisms(self):
"""Return all mechanisms with elements on both sides of this cut.
Yields:
tuple[int]: The next cut mechanism.
"""
for mechanism in utils.powerset(self.indices, nonempty=True):
if self.splits_mechanism(mechanism):
yield mechanism
|
python
|
def all_cut_mechanisms(self):
"""Return all mechanisms with elements on both sides of this cut.
Yields:
tuple[int]: The next cut mechanism.
"""
for mechanism in utils.powerset(self.indices, nonempty=True):
if self.splits_mechanism(mechanism):
yield mechanism
|
[
"def",
"all_cut_mechanisms",
"(",
"self",
")",
":",
"for",
"mechanism",
"in",
"utils",
".",
"powerset",
"(",
"self",
".",
"indices",
",",
"nonempty",
"=",
"True",
")",
":",
"if",
"self",
".",
"splits_mechanism",
"(",
"mechanism",
")",
":",
"yield",
"mechanism"
] |
Return all mechanisms with elements on both sides of this cut.
Yields:
tuple[int]: The next cut mechanism.
|
[
"Return",
"all",
"mechanisms",
"with",
"elements",
"on",
"both",
"sides",
"of",
"this",
"cut",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/cuts.py#L81-L89
|
15,834
|
wmayner/pyphi
|
pyphi/models/cuts.py
|
Cut.cut_matrix
|
def cut_matrix(self, n):
"""Compute the cut matrix for this cut.
The cut matrix is a square matrix which represents connections severed
by the cut.
Args:
n (int): The size of the network.
Example:
>>> cut = Cut((1,), (2,))
>>> cut.cut_matrix(3)
array([[0., 0., 0.],
[0., 0., 1.],
[0., 0., 0.]])
"""
return connectivity.relevant_connections(n, self.from_nodes,
self.to_nodes)
|
python
|
def cut_matrix(self, n):
"""Compute the cut matrix for this cut.
The cut matrix is a square matrix which represents connections severed
by the cut.
Args:
n (int): The size of the network.
Example:
>>> cut = Cut((1,), (2,))
>>> cut.cut_matrix(3)
array([[0., 0., 0.],
[0., 0., 1.],
[0., 0., 0.]])
"""
return connectivity.relevant_connections(n, self.from_nodes,
self.to_nodes)
|
[
"def",
"cut_matrix",
"(",
"self",
",",
"n",
")",
":",
"return",
"connectivity",
".",
"relevant_connections",
"(",
"n",
",",
"self",
".",
"from_nodes",
",",
"self",
".",
"to_nodes",
")"
] |
Compute the cut matrix for this cut.
The cut matrix is a square matrix which represents connections severed
by the cut.
Args:
n (int): The size of the network.
Example:
>>> cut = Cut((1,), (2,))
>>> cut.cut_matrix(3)
array([[0., 0., 0.],
[0., 0., 1.],
[0., 0., 0.]])
|
[
"Compute",
"the",
"cut",
"matrix",
"for",
"this",
"cut",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/cuts.py#L153-L170
|
15,835
|
wmayner/pyphi
|
pyphi/models/cuts.py
|
KCut.cut_matrix
|
def cut_matrix(self, n):
"""The matrix of connections that are severed by this cut."""
cm = np.zeros((n, n))
for part in self.partition:
from_, to = self.direction.order(part.mechanism, part.purview)
# All indices external to this part
external = tuple(set(self.indices) - set(to))
cm[np.ix_(from_, external)] = 1
return cm
|
python
|
def cut_matrix(self, n):
"""The matrix of connections that are severed by this cut."""
cm = np.zeros((n, n))
for part in self.partition:
from_, to = self.direction.order(part.mechanism, part.purview)
# All indices external to this part
external = tuple(set(self.indices) - set(to))
cm[np.ix_(from_, external)] = 1
return cm
|
[
"def",
"cut_matrix",
"(",
"self",
",",
"n",
")",
":",
"cm",
"=",
"np",
".",
"zeros",
"(",
"(",
"n",
",",
"n",
")",
")",
"for",
"part",
"in",
"self",
".",
"partition",
":",
"from_",
",",
"to",
"=",
"self",
".",
"direction",
".",
"order",
"(",
"part",
".",
"mechanism",
",",
"part",
".",
"purview",
")",
"# All indices external to this part",
"external",
"=",
"tuple",
"(",
"set",
"(",
"self",
".",
"indices",
")",
"-",
"set",
"(",
"to",
")",
")",
"cm",
"[",
"np",
".",
"ix_",
"(",
"from_",
",",
"external",
")",
"]",
"=",
"1",
"return",
"cm"
] |
The matrix of connections that are severed by this cut.
|
[
"The",
"matrix",
"of",
"connections",
"that",
"are",
"severed",
"by",
"this",
"cut",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/cuts.py#L204-L214
|
15,836
|
wmayner/pyphi
|
pyphi/compute/distance.py
|
concept_distance
|
def concept_distance(c1, c2):
"""Return the distance between two concepts in concept space.
Args:
c1 (Concept): The first concept.
c2 (Concept): The second concept.
Returns:
float: The distance between the two concepts in concept space.
"""
# Calculate the sum of the cause and effect EMDs, expanding the repertoires
# to the combined purview of the two concepts, so that the EMD signatures
# are the same size.
cause_purview = tuple(set(c1.cause.purview + c2.cause.purview))
effect_purview = tuple(set(c1.effect.purview + c2.effect.purview))
# Take the sum
return (repertoire_distance(c1.expand_cause_repertoire(cause_purview),
c2.expand_cause_repertoire(cause_purview)) +
repertoire_distance(c1.expand_effect_repertoire(effect_purview),
c2.expand_effect_repertoire(effect_purview)))
|
python
|
def concept_distance(c1, c2):
"""Return the distance between two concepts in concept space.
Args:
c1 (Concept): The first concept.
c2 (Concept): The second concept.
Returns:
float: The distance between the two concepts in concept space.
"""
# Calculate the sum of the cause and effect EMDs, expanding the repertoires
# to the combined purview of the two concepts, so that the EMD signatures
# are the same size.
cause_purview = tuple(set(c1.cause.purview + c2.cause.purview))
effect_purview = tuple(set(c1.effect.purview + c2.effect.purview))
# Take the sum
return (repertoire_distance(c1.expand_cause_repertoire(cause_purview),
c2.expand_cause_repertoire(cause_purview)) +
repertoire_distance(c1.expand_effect_repertoire(effect_purview),
c2.expand_effect_repertoire(effect_purview)))
|
[
"def",
"concept_distance",
"(",
"c1",
",",
"c2",
")",
":",
"# Calculate the sum of the cause and effect EMDs, expanding the repertoires",
"# to the combined purview of the two concepts, so that the EMD signatures",
"# are the same size.",
"cause_purview",
"=",
"tuple",
"(",
"set",
"(",
"c1",
".",
"cause",
".",
"purview",
"+",
"c2",
".",
"cause",
".",
"purview",
")",
")",
"effect_purview",
"=",
"tuple",
"(",
"set",
"(",
"c1",
".",
"effect",
".",
"purview",
"+",
"c2",
".",
"effect",
".",
"purview",
")",
")",
"# Take the sum",
"return",
"(",
"repertoire_distance",
"(",
"c1",
".",
"expand_cause_repertoire",
"(",
"cause_purview",
")",
",",
"c2",
".",
"expand_cause_repertoire",
"(",
"cause_purview",
")",
")",
"+",
"repertoire_distance",
"(",
"c1",
".",
"expand_effect_repertoire",
"(",
"effect_purview",
")",
",",
"c2",
".",
"expand_effect_repertoire",
"(",
"effect_purview",
")",
")",
")"
] |
Return the distance between two concepts in concept space.
Args:
c1 (Concept): The first concept.
c2 (Concept): The second concept.
Returns:
float: The distance between the two concepts in concept space.
|
[
"Return",
"the",
"distance",
"between",
"two",
"concepts",
"in",
"concept",
"space",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/distance.py#L16-L35
|
15,837
|
wmayner/pyphi
|
pyphi/compute/distance.py
|
small_phi_ces_distance
|
def small_phi_ces_distance(C1, C2):
"""Return the difference in |small_phi| between |CauseEffectStructure|."""
return sum(c.phi for c in C1) - sum(c.phi for c in C2)
|
python
|
def small_phi_ces_distance(C1, C2):
"""Return the difference in |small_phi| between |CauseEffectStructure|."""
return sum(c.phi for c in C1) - sum(c.phi for c in C2)
|
[
"def",
"small_phi_ces_distance",
"(",
"C1",
",",
"C2",
")",
":",
"return",
"sum",
"(",
"c",
".",
"phi",
"for",
"c",
"in",
"C1",
")",
"-",
"sum",
"(",
"c",
".",
"phi",
"for",
"c",
"in",
"C2",
")"
] |
Return the difference in |small_phi| between |CauseEffectStructure|.
|
[
"Return",
"the",
"difference",
"in",
"|small_phi|",
"between",
"|CauseEffectStructure|",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/distance.py#L149-L151
|
15,838
|
wmayner/pyphi
|
pyphi/node.py
|
generate_nodes
|
def generate_nodes(tpm, cm, network_state, indices, node_labels=None):
"""Generate |Node| objects for a subsystem.
Args:
tpm (np.ndarray): The system's TPM
cm (np.ndarray): The corresponding CM.
network_state (tuple): The state of the network.
indices (tuple[int]): Indices to generate nodes for.
Keyword Args:
node_labels (|NodeLabels|): Textual labels for each node.
Returns:
tuple[Node]: The nodes of the system.
"""
if node_labels is None:
node_labels = NodeLabels(None, indices)
node_state = utils.state_of(indices, network_state)
return tuple(Node(tpm, cm, index, state, node_labels)
for index, state in zip(indices, node_state))
|
python
|
def generate_nodes(tpm, cm, network_state, indices, node_labels=None):
"""Generate |Node| objects for a subsystem.
Args:
tpm (np.ndarray): The system's TPM
cm (np.ndarray): The corresponding CM.
network_state (tuple): The state of the network.
indices (tuple[int]): Indices to generate nodes for.
Keyword Args:
node_labels (|NodeLabels|): Textual labels for each node.
Returns:
tuple[Node]: The nodes of the system.
"""
if node_labels is None:
node_labels = NodeLabels(None, indices)
node_state = utils.state_of(indices, network_state)
return tuple(Node(tpm, cm, index, state, node_labels)
for index, state in zip(indices, node_state))
|
[
"def",
"generate_nodes",
"(",
"tpm",
",",
"cm",
",",
"network_state",
",",
"indices",
",",
"node_labels",
"=",
"None",
")",
":",
"if",
"node_labels",
"is",
"None",
":",
"node_labels",
"=",
"NodeLabels",
"(",
"None",
",",
"indices",
")",
"node_state",
"=",
"utils",
".",
"state_of",
"(",
"indices",
",",
"network_state",
")",
"return",
"tuple",
"(",
"Node",
"(",
"tpm",
",",
"cm",
",",
"index",
",",
"state",
",",
"node_labels",
")",
"for",
"index",
",",
"state",
"in",
"zip",
"(",
"indices",
",",
"node_state",
")",
")"
] |
Generate |Node| objects for a subsystem.
Args:
tpm (np.ndarray): The system's TPM
cm (np.ndarray): The corresponding CM.
network_state (tuple): The state of the network.
indices (tuple[int]): Indices to generate nodes for.
Keyword Args:
node_labels (|NodeLabels|): Textual labels for each node.
Returns:
tuple[Node]: The nodes of the system.
|
[
"Generate",
"|Node|",
"objects",
"for",
"a",
"subsystem",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/node.py#L156-L177
|
15,839
|
wmayner/pyphi
|
pyphi/node.py
|
expand_node_tpm
|
def expand_node_tpm(tpm):
"""Broadcast a node TPM over the full network.
This is different from broadcasting the TPM of a full system since the last
dimension (containing the state of the node) contains only the probability
of *this* node being on, rather than the probabilities for each node.
"""
uc = np.ones([2 for node in tpm.shape])
return uc * tpm
|
python
|
def expand_node_tpm(tpm):
"""Broadcast a node TPM over the full network.
This is different from broadcasting the TPM of a full system since the last
dimension (containing the state of the node) contains only the probability
of *this* node being on, rather than the probabilities for each node.
"""
uc = np.ones([2 for node in tpm.shape])
return uc * tpm
|
[
"def",
"expand_node_tpm",
"(",
"tpm",
")",
":",
"uc",
"=",
"np",
".",
"ones",
"(",
"[",
"2",
"for",
"node",
"in",
"tpm",
".",
"shape",
"]",
")",
"return",
"uc",
"*",
"tpm"
] |
Broadcast a node TPM over the full network.
This is different from broadcasting the TPM of a full system since the last
dimension (containing the state of the node) contains only the probability
of *this* node being on, rather than the probabilities for each node.
|
[
"Broadcast",
"a",
"node",
"TPM",
"over",
"the",
"full",
"network",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/node.py#L180-L188
|
15,840
|
wmayner/pyphi
|
pyphi/tpm.py
|
condition_tpm
|
def condition_tpm(tpm, fixed_nodes, state):
"""Return a TPM conditioned on the given fixed node indices, whose states
are fixed according to the given state-tuple.
The dimensions of the new TPM that correspond to the fixed nodes are
collapsed onto their state, making those dimensions singletons suitable for
broadcasting. The number of dimensions of the conditioned TPM will be the
same as the unconditioned TPM.
"""
conditioning_indices = [[slice(None)]] * len(state)
for i in fixed_nodes:
# Preserve singleton dimensions with `np.newaxis`
conditioning_indices[i] = [state[i], np.newaxis]
# Flatten the indices.
conditioning_indices = list(chain.from_iterable(conditioning_indices))
# Obtain the actual conditioned TPM by indexing with the conditioning
# indices.
return tpm[tuple(conditioning_indices)]
|
python
|
def condition_tpm(tpm, fixed_nodes, state):
"""Return a TPM conditioned on the given fixed node indices, whose states
are fixed according to the given state-tuple.
The dimensions of the new TPM that correspond to the fixed nodes are
collapsed onto their state, making those dimensions singletons suitable for
broadcasting. The number of dimensions of the conditioned TPM will be the
same as the unconditioned TPM.
"""
conditioning_indices = [[slice(None)]] * len(state)
for i in fixed_nodes:
# Preserve singleton dimensions with `np.newaxis`
conditioning_indices[i] = [state[i], np.newaxis]
# Flatten the indices.
conditioning_indices = list(chain.from_iterable(conditioning_indices))
# Obtain the actual conditioned TPM by indexing with the conditioning
# indices.
return tpm[tuple(conditioning_indices)]
|
[
"def",
"condition_tpm",
"(",
"tpm",
",",
"fixed_nodes",
",",
"state",
")",
":",
"conditioning_indices",
"=",
"[",
"[",
"slice",
"(",
"None",
")",
"]",
"]",
"*",
"len",
"(",
"state",
")",
"for",
"i",
"in",
"fixed_nodes",
":",
"# Preserve singleton dimensions with `np.newaxis`",
"conditioning_indices",
"[",
"i",
"]",
"=",
"[",
"state",
"[",
"i",
"]",
",",
"np",
".",
"newaxis",
"]",
"# Flatten the indices.",
"conditioning_indices",
"=",
"list",
"(",
"chain",
".",
"from_iterable",
"(",
"conditioning_indices",
")",
")",
"# Obtain the actual conditioned TPM by indexing with the conditioning",
"# indices.",
"return",
"tpm",
"[",
"tuple",
"(",
"conditioning_indices",
")",
"]"
] |
Return a TPM conditioned on the given fixed node indices, whose states
are fixed according to the given state-tuple.
The dimensions of the new TPM that correspond to the fixed nodes are
collapsed onto their state, making those dimensions singletons suitable for
broadcasting. The number of dimensions of the conditioned TPM will be the
same as the unconditioned TPM.
|
[
"Return",
"a",
"TPM",
"conditioned",
"on",
"the",
"given",
"fixed",
"node",
"indices",
"whose",
"states",
"are",
"fixed",
"according",
"to",
"the",
"given",
"state",
"-",
"tuple",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/tpm.py#L29-L46
|
15,841
|
wmayner/pyphi
|
pyphi/tpm.py
|
expand_tpm
|
def expand_tpm(tpm):
"""Broadcast a state-by-node TPM so that singleton dimensions are expanded
over the full network.
"""
unconstrained = np.ones([2] * (tpm.ndim - 1) + [tpm.shape[-1]])
return tpm * unconstrained
|
python
|
def expand_tpm(tpm):
"""Broadcast a state-by-node TPM so that singleton dimensions are expanded
over the full network.
"""
unconstrained = np.ones([2] * (tpm.ndim - 1) + [tpm.shape[-1]])
return tpm * unconstrained
|
[
"def",
"expand_tpm",
"(",
"tpm",
")",
":",
"unconstrained",
"=",
"np",
".",
"ones",
"(",
"[",
"2",
"]",
"*",
"(",
"tpm",
".",
"ndim",
"-",
"1",
")",
"+",
"[",
"tpm",
".",
"shape",
"[",
"-",
"1",
"]",
"]",
")",
"return",
"tpm",
"*",
"unconstrained"
] |
Broadcast a state-by-node TPM so that singleton dimensions are expanded
over the full network.
|
[
"Broadcast",
"a",
"state",
"-",
"by",
"-",
"node",
"TPM",
"so",
"that",
"singleton",
"dimensions",
"are",
"expanded",
"over",
"the",
"full",
"network",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/tpm.py#L49-L54
|
15,842
|
wmayner/pyphi
|
pyphi/tpm.py
|
marginalize_out
|
def marginalize_out(node_indices, tpm):
"""Marginalize out nodes from a TPM.
Args:
node_indices (list[int]): The indices of nodes to be marginalized out.
tpm (np.ndarray): The TPM to marginalize the node out of.
Returns:
np.ndarray: A TPM with the same number of dimensions, with the nodes
marginalized out.
"""
return tpm.sum(tuple(node_indices), keepdims=True) / (
np.array(tpm.shape)[list(node_indices)].prod())
|
python
|
def marginalize_out(node_indices, tpm):
"""Marginalize out nodes from a TPM.
Args:
node_indices (list[int]): The indices of nodes to be marginalized out.
tpm (np.ndarray): The TPM to marginalize the node out of.
Returns:
np.ndarray: A TPM with the same number of dimensions, with the nodes
marginalized out.
"""
return tpm.sum(tuple(node_indices), keepdims=True) / (
np.array(tpm.shape)[list(node_indices)].prod())
|
[
"def",
"marginalize_out",
"(",
"node_indices",
",",
"tpm",
")",
":",
"return",
"tpm",
".",
"sum",
"(",
"tuple",
"(",
"node_indices",
")",
",",
"keepdims",
"=",
"True",
")",
"/",
"(",
"np",
".",
"array",
"(",
"tpm",
".",
"shape",
")",
"[",
"list",
"(",
"node_indices",
")",
"]",
".",
"prod",
"(",
")",
")"
] |
Marginalize out nodes from a TPM.
Args:
node_indices (list[int]): The indices of nodes to be marginalized out.
tpm (np.ndarray): The TPM to marginalize the node out of.
Returns:
np.ndarray: A TPM with the same number of dimensions, with the nodes
marginalized out.
|
[
"Marginalize",
"out",
"nodes",
"from",
"a",
"TPM",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/tpm.py#L57-L69
|
15,843
|
wmayner/pyphi
|
pyphi/tpm.py
|
infer_edge
|
def infer_edge(tpm, a, b, contexts):
"""Infer the presence or absence of an edge from node A to node B.
Let |S| be the set of all nodes in a network. Let |A' = S - {A}|. We call
the state of |A'| the context |C| of |A|. There is an edge from |A| to |B|
if there exists any context |C(A)| such that |Pr(B | C(A), A=0) != Pr(B |
C(A), A=1)|.
Args:
tpm (np.ndarray): The TPM in state-by-node, multidimensional form.
a (int): The index of the putative source node.
b (int): The index of the putative sink node.
Returns:
bool: ``True`` if the edge |A -> B| exists, ``False`` otherwise.
"""
def a_in_context(context):
"""Given a context C(A), return the states of the full system with A
OFF and ON, respectively.
"""
a_off = context[:a] + OFF + context[a:]
a_on = context[:a] + ON + context[a:]
return (a_off, a_on)
def a_affects_b_in_context(context):
"""Return ``True`` if A has an effect on B, given a context."""
a_off, a_on = a_in_context(context)
return tpm[a_off][b] != tpm[a_on][b]
return any(a_affects_b_in_context(context) for context in contexts)
|
python
|
def infer_edge(tpm, a, b, contexts):
"""Infer the presence or absence of an edge from node A to node B.
Let |S| be the set of all nodes in a network. Let |A' = S - {A}|. We call
the state of |A'| the context |C| of |A|. There is an edge from |A| to |B|
if there exists any context |C(A)| such that |Pr(B | C(A), A=0) != Pr(B |
C(A), A=1)|.
Args:
tpm (np.ndarray): The TPM in state-by-node, multidimensional form.
a (int): The index of the putative source node.
b (int): The index of the putative sink node.
Returns:
bool: ``True`` if the edge |A -> B| exists, ``False`` otherwise.
"""
def a_in_context(context):
"""Given a context C(A), return the states of the full system with A
OFF and ON, respectively.
"""
a_off = context[:a] + OFF + context[a:]
a_on = context[:a] + ON + context[a:]
return (a_off, a_on)
def a_affects_b_in_context(context):
"""Return ``True`` if A has an effect on B, given a context."""
a_off, a_on = a_in_context(context)
return tpm[a_off][b] != tpm[a_on][b]
return any(a_affects_b_in_context(context) for context in contexts)
|
[
"def",
"infer_edge",
"(",
"tpm",
",",
"a",
",",
"b",
",",
"contexts",
")",
":",
"def",
"a_in_context",
"(",
"context",
")",
":",
"\"\"\"Given a context C(A), return the states of the full system with A\n OFF and ON, respectively.\n \"\"\"",
"a_off",
"=",
"context",
"[",
":",
"a",
"]",
"+",
"OFF",
"+",
"context",
"[",
"a",
":",
"]",
"a_on",
"=",
"context",
"[",
":",
"a",
"]",
"+",
"ON",
"+",
"context",
"[",
"a",
":",
"]",
"return",
"(",
"a_off",
",",
"a_on",
")",
"def",
"a_affects_b_in_context",
"(",
"context",
")",
":",
"\"\"\"Return ``True`` if A has an effect on B, given a context.\"\"\"",
"a_off",
",",
"a_on",
"=",
"a_in_context",
"(",
"context",
")",
"return",
"tpm",
"[",
"a_off",
"]",
"[",
"b",
"]",
"!=",
"tpm",
"[",
"a_on",
"]",
"[",
"b",
"]",
"return",
"any",
"(",
"a_affects_b_in_context",
"(",
"context",
")",
"for",
"context",
"in",
"contexts",
")"
] |
Infer the presence or absence of an edge from node A to node B.
Let |S| be the set of all nodes in a network. Let |A' = S - {A}|. We call
the state of |A'| the context |C| of |A|. There is an edge from |A| to |B|
if there exists any context |C(A)| such that |Pr(B | C(A), A=0) != Pr(B |
C(A), A=1)|.
Args:
tpm (np.ndarray): The TPM in state-by-node, multidimensional form.
a (int): The index of the putative source node.
b (int): The index of the putative sink node.
Returns:
bool: ``True`` if the edge |A -> B| exists, ``False`` otherwise.
|
[
"Infer",
"the",
"presence",
"or",
"absence",
"of",
"an",
"edge",
"from",
"node",
"A",
"to",
"node",
"B",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/tpm.py#L72-L101
|
15,844
|
wmayner/pyphi
|
pyphi/tpm.py
|
infer_cm
|
def infer_cm(tpm):
"""Infer the connectivity matrix associated with a state-by-node TPM in
multidimensional form.
"""
network_size = tpm.shape[-1]
all_contexts = tuple(all_states(network_size - 1))
cm = np.empty((network_size, network_size), dtype=int)
for a, b in np.ndindex(cm.shape):
cm[a][b] = infer_edge(tpm, a, b, all_contexts)
return cm
|
python
|
def infer_cm(tpm):
"""Infer the connectivity matrix associated with a state-by-node TPM in
multidimensional form.
"""
network_size = tpm.shape[-1]
all_contexts = tuple(all_states(network_size - 1))
cm = np.empty((network_size, network_size), dtype=int)
for a, b in np.ndindex(cm.shape):
cm[a][b] = infer_edge(tpm, a, b, all_contexts)
return cm
|
[
"def",
"infer_cm",
"(",
"tpm",
")",
":",
"network_size",
"=",
"tpm",
".",
"shape",
"[",
"-",
"1",
"]",
"all_contexts",
"=",
"tuple",
"(",
"all_states",
"(",
"network_size",
"-",
"1",
")",
")",
"cm",
"=",
"np",
".",
"empty",
"(",
"(",
"network_size",
",",
"network_size",
")",
",",
"dtype",
"=",
"int",
")",
"for",
"a",
",",
"b",
"in",
"np",
".",
"ndindex",
"(",
"cm",
".",
"shape",
")",
":",
"cm",
"[",
"a",
"]",
"[",
"b",
"]",
"=",
"infer_edge",
"(",
"tpm",
",",
"a",
",",
"b",
",",
"all_contexts",
")",
"return",
"cm"
] |
Infer the connectivity matrix associated with a state-by-node TPM in
multidimensional form.
|
[
"Infer",
"the",
"connectivity",
"matrix",
"associated",
"with",
"a",
"state",
"-",
"by",
"-",
"node",
"TPM",
"in",
"multidimensional",
"form",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/tpm.py#L104-L113
|
15,845
|
wmayner/pyphi
|
pyphi/compute/parallel.py
|
get_num_processes
|
def get_num_processes():
"""Return the number of processes to use in parallel."""
cpu_count = multiprocessing.cpu_count()
if config.NUMBER_OF_CORES == 0:
raise ValueError(
'Invalid NUMBER_OF_CORES; value may not be 0.')
if config.NUMBER_OF_CORES > cpu_count:
log.info('Requesting %s cores; only %s available',
config.NUMBER_OF_CORES, cpu_count)
return cpu_count
if config.NUMBER_OF_CORES < 0:
num = cpu_count + config.NUMBER_OF_CORES + 1
if num <= 0:
raise ValueError(
'Invalid NUMBER_OF_CORES; negative value is too negative: '
'requesting {} cores, {} available.'.format(num, cpu_count))
return num
return config.NUMBER_OF_CORES
|
python
|
def get_num_processes():
"""Return the number of processes to use in parallel."""
cpu_count = multiprocessing.cpu_count()
if config.NUMBER_OF_CORES == 0:
raise ValueError(
'Invalid NUMBER_OF_CORES; value may not be 0.')
if config.NUMBER_OF_CORES > cpu_count:
log.info('Requesting %s cores; only %s available',
config.NUMBER_OF_CORES, cpu_count)
return cpu_count
if config.NUMBER_OF_CORES < 0:
num = cpu_count + config.NUMBER_OF_CORES + 1
if num <= 0:
raise ValueError(
'Invalid NUMBER_OF_CORES; negative value is too negative: '
'requesting {} cores, {} available.'.format(num, cpu_count))
return num
return config.NUMBER_OF_CORES
|
[
"def",
"get_num_processes",
"(",
")",
":",
"cpu_count",
"=",
"multiprocessing",
".",
"cpu_count",
"(",
")",
"if",
"config",
".",
"NUMBER_OF_CORES",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"'Invalid NUMBER_OF_CORES; value may not be 0.'",
")",
"if",
"config",
".",
"NUMBER_OF_CORES",
">",
"cpu_count",
":",
"log",
".",
"info",
"(",
"'Requesting %s cores; only %s available'",
",",
"config",
".",
"NUMBER_OF_CORES",
",",
"cpu_count",
")",
"return",
"cpu_count",
"if",
"config",
".",
"NUMBER_OF_CORES",
"<",
"0",
":",
"num",
"=",
"cpu_count",
"+",
"config",
".",
"NUMBER_OF_CORES",
"+",
"1",
"if",
"num",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"'Invalid NUMBER_OF_CORES; negative value is too negative: '",
"'requesting {} cores, {} available.'",
".",
"format",
"(",
"num",
",",
"cpu_count",
")",
")",
"return",
"num",
"return",
"config",
".",
"NUMBER_OF_CORES"
] |
Return the number of processes to use in parallel.
|
[
"Return",
"the",
"number",
"of",
"processes",
"to",
"use",
"in",
"parallel",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L24-L46
|
15,846
|
wmayner/pyphi
|
pyphi/compute/parallel.py
|
MapReduce.init_progress_bar
|
def init_progress_bar(self):
"""Initialize and return a progress bar."""
# Forked worker processes can't show progress bars.
disable = MapReduce._forked or not config.PROGRESS_BARS
# Don't materialize iterable unless we have to: huge iterables
# (e.g. of `KCuts`) eat memory.
if disable:
total = None
else:
self.iterable = list(self.iterable)
total = len(self.iterable)
return tqdm(total=total, disable=disable, leave=False,
desc=self.description)
|
python
|
def init_progress_bar(self):
"""Initialize and return a progress bar."""
# Forked worker processes can't show progress bars.
disable = MapReduce._forked or not config.PROGRESS_BARS
# Don't materialize iterable unless we have to: huge iterables
# (e.g. of `KCuts`) eat memory.
if disable:
total = None
else:
self.iterable = list(self.iterable)
total = len(self.iterable)
return tqdm(total=total, disable=disable, leave=False,
desc=self.description)
|
[
"def",
"init_progress_bar",
"(",
"self",
")",
":",
"# Forked worker processes can't show progress bars.",
"disable",
"=",
"MapReduce",
".",
"_forked",
"or",
"not",
"config",
".",
"PROGRESS_BARS",
"# Don't materialize iterable unless we have to: huge iterables",
"# (e.g. of `KCuts`) eat memory.",
"if",
"disable",
":",
"total",
"=",
"None",
"else",
":",
"self",
".",
"iterable",
"=",
"list",
"(",
"self",
".",
"iterable",
")",
"total",
"=",
"len",
"(",
"self",
".",
"iterable",
")",
"return",
"tqdm",
"(",
"total",
"=",
"total",
",",
"disable",
"=",
"disable",
",",
"leave",
"=",
"False",
",",
"desc",
"=",
"self",
".",
"description",
")"
] |
Initialize and return a progress bar.
|
[
"Initialize",
"and",
"return",
"a",
"progress",
"bar",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L144-L158
|
15,847
|
wmayner/pyphi
|
pyphi/compute/parallel.py
|
MapReduce.worker
|
def worker(compute, task_queue, result_queue, log_queue, complete,
*context):
"""A worker process, run by ``multiprocessing.Process``."""
try:
MapReduce._forked = True
log.debug('Worker process starting...')
configure_worker_logging(log_queue)
for obj in iter(task_queue.get, POISON_PILL):
if complete.is_set():
log.debug('Worker received signal - exiting early')
break
log.debug('Worker got %s', obj)
result_queue.put(compute(obj, *context))
log.debug('Worker finished %s', obj)
result_queue.put(POISON_PILL)
log.debug('Worker process exiting')
except Exception as e: # pylint: disable=broad-except
result_queue.put(ExceptionWrapper(e))
|
python
|
def worker(compute, task_queue, result_queue, log_queue, complete,
*context):
"""A worker process, run by ``multiprocessing.Process``."""
try:
MapReduce._forked = True
log.debug('Worker process starting...')
configure_worker_logging(log_queue)
for obj in iter(task_queue.get, POISON_PILL):
if complete.is_set():
log.debug('Worker received signal - exiting early')
break
log.debug('Worker got %s', obj)
result_queue.put(compute(obj, *context))
log.debug('Worker finished %s', obj)
result_queue.put(POISON_PILL)
log.debug('Worker process exiting')
except Exception as e: # pylint: disable=broad-except
result_queue.put(ExceptionWrapper(e))
|
[
"def",
"worker",
"(",
"compute",
",",
"task_queue",
",",
"result_queue",
",",
"log_queue",
",",
"complete",
",",
"*",
"context",
")",
":",
"try",
":",
"MapReduce",
".",
"_forked",
"=",
"True",
"log",
".",
"debug",
"(",
"'Worker process starting...'",
")",
"configure_worker_logging",
"(",
"log_queue",
")",
"for",
"obj",
"in",
"iter",
"(",
"task_queue",
".",
"get",
",",
"POISON_PILL",
")",
":",
"if",
"complete",
".",
"is_set",
"(",
")",
":",
"log",
".",
"debug",
"(",
"'Worker received signal - exiting early'",
")",
"break",
"log",
".",
"debug",
"(",
"'Worker got %s'",
",",
"obj",
")",
"result_queue",
".",
"put",
"(",
"compute",
"(",
"obj",
",",
"*",
"context",
")",
")",
"log",
".",
"debug",
"(",
"'Worker finished %s'",
",",
"obj",
")",
"result_queue",
".",
"put",
"(",
"POISON_PILL",
")",
"log",
".",
"debug",
"(",
"'Worker process exiting'",
")",
"except",
"Exception",
"as",
"e",
":",
"# pylint: disable=broad-except",
"result_queue",
".",
"put",
"(",
"ExceptionWrapper",
"(",
"e",
")",
")"
] |
A worker process, run by ``multiprocessing.Process``.
|
[
"A",
"worker",
"process",
"run",
"by",
"multiprocessing",
".",
"Process",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L161-L183
|
15,848
|
wmayner/pyphi
|
pyphi/compute/parallel.py
|
MapReduce.start_parallel
|
def start_parallel(self):
"""Initialize all queues and start the worker processes and the log
thread.
"""
self.num_processes = get_num_processes()
self.task_queue = multiprocessing.Queue(maxsize=Q_MAX_SIZE)
self.result_queue = multiprocessing.Queue()
self.log_queue = multiprocessing.Queue()
# Used to signal worker processes when a result is found that allows
# the computation to terminate early.
self.complete = multiprocessing.Event()
args = (self.compute, self.task_queue, self.result_queue,
self.log_queue, self.complete) + self.context
self.processes = [
multiprocessing.Process(target=self.worker, args=args, daemon=True)
for i in range(self.num_processes)]
for process in self.processes:
process.start()
self.log_thread = LogThread(self.log_queue)
self.log_thread.start()
self.initialize_tasks()
|
python
|
def start_parallel(self):
"""Initialize all queues and start the worker processes and the log
thread.
"""
self.num_processes = get_num_processes()
self.task_queue = multiprocessing.Queue(maxsize=Q_MAX_SIZE)
self.result_queue = multiprocessing.Queue()
self.log_queue = multiprocessing.Queue()
# Used to signal worker processes when a result is found that allows
# the computation to terminate early.
self.complete = multiprocessing.Event()
args = (self.compute, self.task_queue, self.result_queue,
self.log_queue, self.complete) + self.context
self.processes = [
multiprocessing.Process(target=self.worker, args=args, daemon=True)
for i in range(self.num_processes)]
for process in self.processes:
process.start()
self.log_thread = LogThread(self.log_queue)
self.log_thread.start()
self.initialize_tasks()
|
[
"def",
"start_parallel",
"(",
"self",
")",
":",
"self",
".",
"num_processes",
"=",
"get_num_processes",
"(",
")",
"self",
".",
"task_queue",
"=",
"multiprocessing",
".",
"Queue",
"(",
"maxsize",
"=",
"Q_MAX_SIZE",
")",
"self",
".",
"result_queue",
"=",
"multiprocessing",
".",
"Queue",
"(",
")",
"self",
".",
"log_queue",
"=",
"multiprocessing",
".",
"Queue",
"(",
")",
"# Used to signal worker processes when a result is found that allows",
"# the computation to terminate early.",
"self",
".",
"complete",
"=",
"multiprocessing",
".",
"Event",
"(",
")",
"args",
"=",
"(",
"self",
".",
"compute",
",",
"self",
".",
"task_queue",
",",
"self",
".",
"result_queue",
",",
"self",
".",
"log_queue",
",",
"self",
".",
"complete",
")",
"+",
"self",
".",
"context",
"self",
".",
"processes",
"=",
"[",
"multiprocessing",
".",
"Process",
"(",
"target",
"=",
"self",
".",
"worker",
",",
"args",
"=",
"args",
",",
"daemon",
"=",
"True",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"num_processes",
")",
"]",
"for",
"process",
"in",
"self",
".",
"processes",
":",
"process",
".",
"start",
"(",
")",
"self",
".",
"log_thread",
"=",
"LogThread",
"(",
"self",
".",
"log_queue",
")",
"self",
".",
"log_thread",
".",
"start",
"(",
")",
"self",
".",
"initialize_tasks",
"(",
")"
] |
Initialize all queues and start the worker processes and the log
thread.
|
[
"Initialize",
"all",
"queues",
"and",
"start",
"the",
"worker",
"processes",
"and",
"the",
"log",
"thread",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L185-L211
|
15,849
|
wmayner/pyphi
|
pyphi/compute/parallel.py
|
MapReduce.initialize_tasks
|
def initialize_tasks(self):
"""Load the input queue to capacity.
Overfilling causes a deadlock when `queue.put` blocks when
full, so further tasks are enqueued as results are returned.
"""
# Add a poison pill to shutdown each process.
self.tasks = chain(self.iterable, [POISON_PILL] * self.num_processes)
for task in islice(self.tasks, Q_MAX_SIZE):
log.debug('Putting %s on queue', task)
self.task_queue.put(task)
|
python
|
def initialize_tasks(self):
"""Load the input queue to capacity.
Overfilling causes a deadlock when `queue.put` blocks when
full, so further tasks are enqueued as results are returned.
"""
# Add a poison pill to shutdown each process.
self.tasks = chain(self.iterable, [POISON_PILL] * self.num_processes)
for task in islice(self.tasks, Q_MAX_SIZE):
log.debug('Putting %s on queue', task)
self.task_queue.put(task)
|
[
"def",
"initialize_tasks",
"(",
"self",
")",
":",
"# Add a poison pill to shutdown each process.",
"self",
".",
"tasks",
"=",
"chain",
"(",
"self",
".",
"iterable",
",",
"[",
"POISON_PILL",
"]",
"*",
"self",
".",
"num_processes",
")",
"for",
"task",
"in",
"islice",
"(",
"self",
".",
"tasks",
",",
"Q_MAX_SIZE",
")",
":",
"log",
".",
"debug",
"(",
"'Putting %s on queue'",
",",
"task",
")",
"self",
".",
"task_queue",
".",
"put",
"(",
"task",
")"
] |
Load the input queue to capacity.
Overfilling causes a deadlock when `queue.put` blocks when
full, so further tasks are enqueued as results are returned.
|
[
"Load",
"the",
"input",
"queue",
"to",
"capacity",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L213-L223
|
15,850
|
wmayner/pyphi
|
pyphi/compute/parallel.py
|
MapReduce.maybe_put_task
|
def maybe_put_task(self):
"""Enqueue the next task, if there are any waiting."""
try:
task = next(self.tasks)
except StopIteration:
pass
else:
log.debug('Putting %s on queue', task)
self.task_queue.put(task)
|
python
|
def maybe_put_task(self):
"""Enqueue the next task, if there are any waiting."""
try:
task = next(self.tasks)
except StopIteration:
pass
else:
log.debug('Putting %s on queue', task)
self.task_queue.put(task)
|
[
"def",
"maybe_put_task",
"(",
"self",
")",
":",
"try",
":",
"task",
"=",
"next",
"(",
"self",
".",
"tasks",
")",
"except",
"StopIteration",
":",
"pass",
"else",
":",
"log",
".",
"debug",
"(",
"'Putting %s on queue'",
",",
"task",
")",
"self",
".",
"task_queue",
".",
"put",
"(",
"task",
")"
] |
Enqueue the next task, if there are any waiting.
|
[
"Enqueue",
"the",
"next",
"task",
"if",
"there",
"are",
"any",
"waiting",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L225-L233
|
15,851
|
wmayner/pyphi
|
pyphi/compute/parallel.py
|
MapReduce.run_parallel
|
def run_parallel(self):
"""Perform the computation in parallel, reading results from the output
queue and passing them to ``process_result``.
"""
try:
self.start_parallel()
result = self.empty_result(*self.context)
while self.num_processes > 0:
r = self.result_queue.get()
self.maybe_put_task()
if r is POISON_PILL:
self.num_processes -= 1
elif isinstance(r, ExceptionWrapper):
r.reraise()
else:
result = self.process_result(r, result)
self.progress.update(1)
# Did `process_result` decide to terminate early?
if self.done:
self.complete.set()
self.finish_parallel()
except Exception:
raise
finally:
log.debug('Removing progress bar')
self.progress.close()
return result
|
python
|
def run_parallel(self):
"""Perform the computation in parallel, reading results from the output
queue and passing them to ``process_result``.
"""
try:
self.start_parallel()
result = self.empty_result(*self.context)
while self.num_processes > 0:
r = self.result_queue.get()
self.maybe_put_task()
if r is POISON_PILL:
self.num_processes -= 1
elif isinstance(r, ExceptionWrapper):
r.reraise()
else:
result = self.process_result(r, result)
self.progress.update(1)
# Did `process_result` decide to terminate early?
if self.done:
self.complete.set()
self.finish_parallel()
except Exception:
raise
finally:
log.debug('Removing progress bar')
self.progress.close()
return result
|
[
"def",
"run_parallel",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"start_parallel",
"(",
")",
"result",
"=",
"self",
".",
"empty_result",
"(",
"*",
"self",
".",
"context",
")",
"while",
"self",
".",
"num_processes",
">",
"0",
":",
"r",
"=",
"self",
".",
"result_queue",
".",
"get",
"(",
")",
"self",
".",
"maybe_put_task",
"(",
")",
"if",
"r",
"is",
"POISON_PILL",
":",
"self",
".",
"num_processes",
"-=",
"1",
"elif",
"isinstance",
"(",
"r",
",",
"ExceptionWrapper",
")",
":",
"r",
".",
"reraise",
"(",
")",
"else",
":",
"result",
"=",
"self",
".",
"process_result",
"(",
"r",
",",
"result",
")",
"self",
".",
"progress",
".",
"update",
"(",
"1",
")",
"# Did `process_result` decide to terminate early?",
"if",
"self",
".",
"done",
":",
"self",
".",
"complete",
".",
"set",
"(",
")",
"self",
".",
"finish_parallel",
"(",
")",
"except",
"Exception",
":",
"raise",
"finally",
":",
"log",
".",
"debug",
"(",
"'Removing progress bar'",
")",
"self",
".",
"progress",
".",
"close",
"(",
")",
"return",
"result"
] |
Perform the computation in parallel, reading results from the output
queue and passing them to ``process_result``.
|
[
"Perform",
"the",
"computation",
"in",
"parallel",
"reading",
"results",
"from",
"the",
"output",
"queue",
"and",
"passing",
"them",
"to",
"process_result",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L235-L269
|
15,852
|
wmayner/pyphi
|
pyphi/compute/parallel.py
|
MapReduce.finish_parallel
|
def finish_parallel(self):
"""Orderly shutdown of workers."""
for process in self.processes:
process.join()
# Shutdown the log thread
log.debug('Joining log thread')
self.log_queue.put(POISON_PILL)
self.log_thread.join()
self.log_queue.close()
# Close all queues
log.debug('Closing queues')
self.task_queue.close()
self.result_queue.close()
|
python
|
def finish_parallel(self):
"""Orderly shutdown of workers."""
for process in self.processes:
process.join()
# Shutdown the log thread
log.debug('Joining log thread')
self.log_queue.put(POISON_PILL)
self.log_thread.join()
self.log_queue.close()
# Close all queues
log.debug('Closing queues')
self.task_queue.close()
self.result_queue.close()
|
[
"def",
"finish_parallel",
"(",
"self",
")",
":",
"for",
"process",
"in",
"self",
".",
"processes",
":",
"process",
".",
"join",
"(",
")",
"# Shutdown the log thread",
"log",
".",
"debug",
"(",
"'Joining log thread'",
")",
"self",
".",
"log_queue",
".",
"put",
"(",
"POISON_PILL",
")",
"self",
".",
"log_thread",
".",
"join",
"(",
")",
"self",
".",
"log_queue",
".",
"close",
"(",
")",
"# Close all queues",
"log",
".",
"debug",
"(",
"'Closing queues'",
")",
"self",
".",
"task_queue",
".",
"close",
"(",
")",
"self",
".",
"result_queue",
".",
"close",
"(",
")"
] |
Orderly shutdown of workers.
|
[
"Orderly",
"shutdown",
"of",
"workers",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L271-L285
|
15,853
|
wmayner/pyphi
|
pyphi/compute/parallel.py
|
MapReduce.run_sequential
|
def run_sequential(self):
"""Perform the computation sequentially, only holding two computed
objects in memory at a time.
"""
try:
result = self.empty_result(*self.context)
for obj in self.iterable:
r = self.compute(obj, *self.context)
result = self.process_result(r, result)
self.progress.update(1)
# Short-circuited?
if self.done:
break
except Exception as e:
raise e
finally:
self.progress.close()
return result
|
python
|
def run_sequential(self):
"""Perform the computation sequentially, only holding two computed
objects in memory at a time.
"""
try:
result = self.empty_result(*self.context)
for obj in self.iterable:
r = self.compute(obj, *self.context)
result = self.process_result(r, result)
self.progress.update(1)
# Short-circuited?
if self.done:
break
except Exception as e:
raise e
finally:
self.progress.close()
return result
|
[
"def",
"run_sequential",
"(",
"self",
")",
":",
"try",
":",
"result",
"=",
"self",
".",
"empty_result",
"(",
"*",
"self",
".",
"context",
")",
"for",
"obj",
"in",
"self",
".",
"iterable",
":",
"r",
"=",
"self",
".",
"compute",
"(",
"obj",
",",
"*",
"self",
".",
"context",
")",
"result",
"=",
"self",
".",
"process_result",
"(",
"r",
",",
"result",
")",
"self",
".",
"progress",
".",
"update",
"(",
"1",
")",
"# Short-circuited?",
"if",
"self",
".",
"done",
":",
"break",
"except",
"Exception",
"as",
"e",
":",
"raise",
"e",
"finally",
":",
"self",
".",
"progress",
".",
"close",
"(",
")",
"return",
"result"
] |
Perform the computation sequentially, only holding two computed
objects in memory at a time.
|
[
"Perform",
"the",
"computation",
"sequentially",
"only",
"holding",
"two",
"computed",
"objects",
"in",
"memory",
"at",
"a",
"time",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L287-L307
|
15,854
|
wmayner/pyphi
|
pyphi/conf.py
|
configure_logging
|
def configure_logging(conf):
"""Reconfigure PyPhi logging based on the current configuration."""
logging.config.dictConfig({
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s [%(name)s] %(levelname)s '
'%(processName)s: %(message)s'
}
},
'handlers': {
'file': {
'level': conf.LOG_FILE_LEVEL,
'filename': conf.LOG_FILE,
'class': 'logging.FileHandler',
'formatter': 'standard',
},
'stdout': {
'level': conf.LOG_STDOUT_LEVEL,
'class': 'pyphi.log.TqdmHandler',
'formatter': 'standard',
}
},
'root': {
'level': 'DEBUG',
'handlers': (['file'] if conf.LOG_FILE_LEVEL else []) +
(['stdout'] if conf.LOG_STDOUT_LEVEL else [])
}
})
|
python
|
def configure_logging(conf):
"""Reconfigure PyPhi logging based on the current configuration."""
logging.config.dictConfig({
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s [%(name)s] %(levelname)s '
'%(processName)s: %(message)s'
}
},
'handlers': {
'file': {
'level': conf.LOG_FILE_LEVEL,
'filename': conf.LOG_FILE,
'class': 'logging.FileHandler',
'formatter': 'standard',
},
'stdout': {
'level': conf.LOG_STDOUT_LEVEL,
'class': 'pyphi.log.TqdmHandler',
'formatter': 'standard',
}
},
'root': {
'level': 'DEBUG',
'handlers': (['file'] if conf.LOG_FILE_LEVEL else []) +
(['stdout'] if conf.LOG_STDOUT_LEVEL else [])
}
})
|
[
"def",
"configure_logging",
"(",
"conf",
")",
":",
"logging",
".",
"config",
".",
"dictConfig",
"(",
"{",
"'version'",
":",
"1",
",",
"'disable_existing_loggers'",
":",
"False",
",",
"'formatters'",
":",
"{",
"'standard'",
":",
"{",
"'format'",
":",
"'%(asctime)s [%(name)s] %(levelname)s '",
"'%(processName)s: %(message)s'",
"}",
"}",
",",
"'handlers'",
":",
"{",
"'file'",
":",
"{",
"'level'",
":",
"conf",
".",
"LOG_FILE_LEVEL",
",",
"'filename'",
":",
"conf",
".",
"LOG_FILE",
",",
"'class'",
":",
"'logging.FileHandler'",
",",
"'formatter'",
":",
"'standard'",
",",
"}",
",",
"'stdout'",
":",
"{",
"'level'",
":",
"conf",
".",
"LOG_STDOUT_LEVEL",
",",
"'class'",
":",
"'pyphi.log.TqdmHandler'",
",",
"'formatter'",
":",
"'standard'",
",",
"}",
"}",
",",
"'root'",
":",
"{",
"'level'",
":",
"'DEBUG'",
",",
"'handlers'",
":",
"(",
"[",
"'file'",
"]",
"if",
"conf",
".",
"LOG_FILE_LEVEL",
"else",
"[",
"]",
")",
"+",
"(",
"[",
"'stdout'",
"]",
"if",
"conf",
".",
"LOG_STDOUT_LEVEL",
"else",
"[",
"]",
")",
"}",
"}",
")"
] |
Reconfigure PyPhi logging based on the current configuration.
|
[
"Reconfigure",
"PyPhi",
"logging",
"based",
"on",
"the",
"current",
"configuration",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L328-L357
|
15,855
|
wmayner/pyphi
|
pyphi/conf.py
|
Option._validate
|
def _validate(self, value):
"""Validate the new value."""
if self.values and value not in self.values:
raise ValueError(
'{} is not a valid value for {}'.format(value, self.name))
|
python
|
def _validate(self, value):
"""Validate the new value."""
if self.values and value not in self.values:
raise ValueError(
'{} is not a valid value for {}'.format(value, self.name))
|
[
"def",
"_validate",
"(",
"self",
",",
"value",
")",
":",
"if",
"self",
".",
"values",
"and",
"value",
"not",
"in",
"self",
".",
"values",
":",
"raise",
"ValueError",
"(",
"'{} is not a valid value for {}'",
".",
"format",
"(",
"value",
",",
"self",
".",
"name",
")",
")"
] |
Validate the new value.
|
[
"Validate",
"the",
"new",
"value",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L201-L205
|
15,856
|
wmayner/pyphi
|
pyphi/conf.py
|
Config.options
|
def options(cls):
"""Return a dictionary of the ``Option`` objects for this config."""
return {k: v for k, v in cls.__dict__.items() if isinstance(v, Option)}
|
python
|
def options(cls):
"""Return a dictionary of the ``Option`` objects for this config."""
return {k: v for k, v in cls.__dict__.items() if isinstance(v, Option)}
|
[
"def",
"options",
"(",
"cls",
")",
":",
"return",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"cls",
".",
"__dict__",
".",
"items",
"(",
")",
"if",
"isinstance",
"(",
"v",
",",
"Option",
")",
"}"
] |
Return a dictionary of the ``Option`` objects for this config.
|
[
"Return",
"a",
"dictionary",
"of",
"the",
"Option",
"objects",
"for",
"this",
"config",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L262-L264
|
15,857
|
wmayner/pyphi
|
pyphi/conf.py
|
Config.defaults
|
def defaults(self):
"""Return the default values of this configuration."""
return {k: v.default for k, v in self.options().items()}
|
python
|
def defaults(self):
"""Return the default values of this configuration."""
return {k: v.default for k, v in self.options().items()}
|
[
"def",
"defaults",
"(",
"self",
")",
":",
"return",
"{",
"k",
":",
"v",
".",
"default",
"for",
"k",
",",
"v",
"in",
"self",
".",
"options",
"(",
")",
".",
"items",
"(",
")",
"}"
] |
Return the default values of this configuration.
|
[
"Return",
"the",
"default",
"values",
"of",
"this",
"configuration",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L266-L268
|
15,858
|
wmayner/pyphi
|
pyphi/conf.py
|
Config.load_dict
|
def load_dict(self, dct):
"""Load a dictionary of configuration values."""
for k, v in dct.items():
setattr(self, k, v)
|
python
|
def load_dict(self, dct):
"""Load a dictionary of configuration values."""
for k, v in dct.items():
setattr(self, k, v)
|
[
"def",
"load_dict",
"(",
"self",
",",
"dct",
")",
":",
"for",
"k",
",",
"v",
"in",
"dct",
".",
"items",
"(",
")",
":",
"setattr",
"(",
"self",
",",
"k",
",",
"v",
")"
] |
Load a dictionary of configuration values.
|
[
"Load",
"a",
"dictionary",
"of",
"configuration",
"values",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L270-L273
|
15,859
|
wmayner/pyphi
|
pyphi/conf.py
|
Config.load_file
|
def load_file(self, filename):
"""Load config from a YAML file."""
filename = os.path.abspath(filename)
with open(filename) as f:
self.load_dict(yaml.load(f))
self._loaded_files.append(filename)
|
python
|
def load_file(self, filename):
"""Load config from a YAML file."""
filename = os.path.abspath(filename)
with open(filename) as f:
self.load_dict(yaml.load(f))
self._loaded_files.append(filename)
|
[
"def",
"load_file",
"(",
"self",
",",
"filename",
")",
":",
"filename",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"filename",
")",
"with",
"open",
"(",
"filename",
")",
"as",
"f",
":",
"self",
".",
"load_dict",
"(",
"yaml",
".",
"load",
"(",
"f",
")",
")",
"self",
".",
"_loaded_files",
".",
"append",
"(",
"filename",
")"
] |
Load config from a YAML file.
|
[
"Load",
"config",
"from",
"a",
"YAML",
"file",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L275-L282
|
15,860
|
wmayner/pyphi
|
pyphi/conf.py
|
PyphiConfig.log
|
def log(self):
"""Log current settings."""
log.info('PyPhi v%s', __about__.__version__)
if self._loaded_files:
log.info('Loaded configuration from %s', self._loaded_files)
else:
log.info('Using default configuration (no configuration file '
'provided)')
log.info('Current PyPhi configuration:\n %s', str(self))
|
python
|
def log(self):
"""Log current settings."""
log.info('PyPhi v%s', __about__.__version__)
if self._loaded_files:
log.info('Loaded configuration from %s', self._loaded_files)
else:
log.info('Using default configuration (no configuration file '
'provided)')
log.info('Current PyPhi configuration:\n %s', str(self))
|
[
"def",
"log",
"(",
"self",
")",
":",
"log",
".",
"info",
"(",
"'PyPhi v%s'",
",",
"__about__",
".",
"__version__",
")",
"if",
"self",
".",
"_loaded_files",
":",
"log",
".",
"info",
"(",
"'Loaded configuration from %s'",
",",
"self",
".",
"_loaded_files",
")",
"else",
":",
"log",
".",
"info",
"(",
"'Using default configuration (no configuration file '",
"'provided)'",
")",
"log",
".",
"info",
"(",
"'Current PyPhi configuration:\\n %s'",
",",
"str",
"(",
"self",
")",
")"
] |
Log current settings.
|
[
"Log",
"current",
"settings",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L636-L644
|
15,861
|
wmayner/pyphi
|
pyphi/convert.py
|
be2le_state_by_state
|
def be2le_state_by_state(tpm):
"""Convert a state-by-state TPM from big-endian to little-endian or vice
versa.
Args:
tpm (np.ndarray): A state-by-state TPM.
Returns:
np.ndarray: The state-by-state TPM in the other indexing format.
Example:
>>> tpm = np.arange(16).reshape([4, 4])
>>> be2le_state_by_state(tpm)
array([[ 0., 1., 2., 3.],
[ 8., 9., 10., 11.],
[ 4., 5., 6., 7.],
[12., 13., 14., 15.]])
"""
le = np.empty(tpm.shape)
N = tpm.shape[0]
n = int(log2(N))
for i in range(N):
le[i, :] = tpm[be2le(i, n), :]
return le
|
python
|
def be2le_state_by_state(tpm):
"""Convert a state-by-state TPM from big-endian to little-endian or vice
versa.
Args:
tpm (np.ndarray): A state-by-state TPM.
Returns:
np.ndarray: The state-by-state TPM in the other indexing format.
Example:
>>> tpm = np.arange(16).reshape([4, 4])
>>> be2le_state_by_state(tpm)
array([[ 0., 1., 2., 3.],
[ 8., 9., 10., 11.],
[ 4., 5., 6., 7.],
[12., 13., 14., 15.]])
"""
le = np.empty(tpm.shape)
N = tpm.shape[0]
n = int(log2(N))
for i in range(N):
le[i, :] = tpm[be2le(i, n), :]
return le
|
[
"def",
"be2le_state_by_state",
"(",
"tpm",
")",
":",
"le",
"=",
"np",
".",
"empty",
"(",
"tpm",
".",
"shape",
")",
"N",
"=",
"tpm",
".",
"shape",
"[",
"0",
"]",
"n",
"=",
"int",
"(",
"log2",
"(",
"N",
")",
")",
"for",
"i",
"in",
"range",
"(",
"N",
")",
":",
"le",
"[",
"i",
",",
":",
"]",
"=",
"tpm",
"[",
"be2le",
"(",
"i",
",",
"n",
")",
",",
":",
"]",
"return",
"le"
] |
Convert a state-by-state TPM from big-endian to little-endian or vice
versa.
Args:
tpm (np.ndarray): A state-by-state TPM.
Returns:
np.ndarray: The state-by-state TPM in the other indexing format.
Example:
>>> tpm = np.arange(16).reshape([4, 4])
>>> be2le_state_by_state(tpm)
array([[ 0., 1., 2., 3.],
[ 8., 9., 10., 11.],
[ 4., 5., 6., 7.],
[12., 13., 14., 15.]])
|
[
"Convert",
"a",
"state",
"-",
"by",
"-",
"state",
"TPM",
"from",
"big",
"-",
"endian",
"to",
"little",
"-",
"endian",
"or",
"vice",
"versa",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/convert.py#L147-L170
|
15,862
|
wmayner/pyphi
|
pyphi/convert.py
|
to_multidimensional
|
def to_multidimensional(tpm):
"""Reshape a state-by-node TPM to the multidimensional form.
See documentation for the |Network| object for more information on TPM
formats.
"""
# Cast to np.array.
tpm = np.array(tpm)
# Get the number of nodes.
N = tpm.shape[-1]
# Reshape. We use Fortran ordering here so that the rows use the
# little-endian convention (least-significant bits correspond to low-index
# nodes). Note that this does not change the actual memory layout (C- or
# Fortran-contiguous), so there is no performance loss.
return tpm.reshape([2] * N + [N], order="F").astype(float)
|
python
|
def to_multidimensional(tpm):
"""Reshape a state-by-node TPM to the multidimensional form.
See documentation for the |Network| object for more information on TPM
formats.
"""
# Cast to np.array.
tpm = np.array(tpm)
# Get the number of nodes.
N = tpm.shape[-1]
# Reshape. We use Fortran ordering here so that the rows use the
# little-endian convention (least-significant bits correspond to low-index
# nodes). Note that this does not change the actual memory layout (C- or
# Fortran-contiguous), so there is no performance loss.
return tpm.reshape([2] * N + [N], order="F").astype(float)
|
[
"def",
"to_multidimensional",
"(",
"tpm",
")",
":",
"# Cast to np.array.",
"tpm",
"=",
"np",
".",
"array",
"(",
"tpm",
")",
"# Get the number of nodes.",
"N",
"=",
"tpm",
".",
"shape",
"[",
"-",
"1",
"]",
"# Reshape. We use Fortran ordering here so that the rows use the",
"# little-endian convention (least-significant bits correspond to low-index",
"# nodes). Note that this does not change the actual memory layout (C- or",
"# Fortran-contiguous), so there is no performance loss.",
"return",
"tpm",
".",
"reshape",
"(",
"[",
"2",
"]",
"*",
"N",
"+",
"[",
"N",
"]",
",",
"order",
"=",
"\"F\"",
")",
".",
"astype",
"(",
"float",
")"
] |
Reshape a state-by-node TPM to the multidimensional form.
See documentation for the |Network| object for more information on TPM
formats.
|
[
"Reshape",
"a",
"state",
"-",
"by",
"-",
"node",
"TPM",
"to",
"the",
"multidimensional",
"form",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/convert.py#L176-L190
|
15,863
|
wmayner/pyphi
|
pyphi/convert.py
|
state_by_state2state_by_node
|
def state_by_state2state_by_node(tpm):
"""Convert a state-by-state TPM to a state-by-node TPM.
.. danger::
Many nondeterministic state-by-state TPMs can be represented by a
single a state-by-state TPM. However, the mapping can be made to be
one-to-one if we assume the state-by-state TPM is conditionally
independent, as this function does. **If the given TPM is not
conditionally independent, the conditional dependencies will be
silently lost.**
.. note::
The indices of the rows and columns of the state-by-state TPM are
assumed to follow the little-endian convention. The indices of the rows
of the resulting state-by-node TPM also follow the little-endian
convention. See the documentation on PyPhi the :ref:`tpm-conventions`
more information.
Args:
tpm (list[list] or np.ndarray): A square state-by-state TPM with row
and column indices following the little-endian convention.
Returns:
np.ndarray: A state-by-node TPM, with row indices following the
little-endian convention.
Example:
>>> tpm = np.array([[0.5, 0.5, 0.0, 0.0],
... [0.0, 1.0, 0.0, 0.0],
... [0.0, 0.2, 0.0, 0.8],
... [0.0, 0.3, 0.7, 0.0]])
>>> state_by_state2state_by_node(tpm)
array([[[0.5, 0. ],
[1. , 0.8]],
<BLANKLINE>
[[1. , 0. ],
[0.3, 0.7]]])
"""
# Cast to np.array.
tpm = np.array(tpm)
# Get the number of states from the length of one side of the TPM.
S = tpm.shape[-1]
# Get the number of nodes from the number of states.
N = int(log2(S))
# Initialize the new state-by node TPM.
sbn_tpm = np.zeros(([2] * N + [N]))
# Map indices to state-tuples with the little-endian convention.
states = {i: le_index2state(i, N) for i in range(S)}
# Get an array for each node with 1 in positions that correspond to that
# node being on in the next state, and a 0 otherwise.
node_on = np.array([[states[i][n] for i in range(S)] for n in range(N)])
on_probabilities = [tpm * node_on[n] for n in range(N)]
for i, state in states.items():
# Get the probability of each node being on given the previous state i,
# i.e., a row of the state-by-node TPM.
# Assign that row to the ith state in the state-by-node TPM.
sbn_tpm[state] = [np.sum(on_probabilities[n][i]) for n in range(N)]
return sbn_tpm
|
python
|
def state_by_state2state_by_node(tpm):
"""Convert a state-by-state TPM to a state-by-node TPM.
.. danger::
Many nondeterministic state-by-state TPMs can be represented by a
single a state-by-state TPM. However, the mapping can be made to be
one-to-one if we assume the state-by-state TPM is conditionally
independent, as this function does. **If the given TPM is not
conditionally independent, the conditional dependencies will be
silently lost.**
.. note::
The indices of the rows and columns of the state-by-state TPM are
assumed to follow the little-endian convention. The indices of the rows
of the resulting state-by-node TPM also follow the little-endian
convention. See the documentation on PyPhi the :ref:`tpm-conventions`
more information.
Args:
tpm (list[list] or np.ndarray): A square state-by-state TPM with row
and column indices following the little-endian convention.
Returns:
np.ndarray: A state-by-node TPM, with row indices following the
little-endian convention.
Example:
>>> tpm = np.array([[0.5, 0.5, 0.0, 0.0],
... [0.0, 1.0, 0.0, 0.0],
... [0.0, 0.2, 0.0, 0.8],
... [0.0, 0.3, 0.7, 0.0]])
>>> state_by_state2state_by_node(tpm)
array([[[0.5, 0. ],
[1. , 0.8]],
<BLANKLINE>
[[1. , 0. ],
[0.3, 0.7]]])
"""
# Cast to np.array.
tpm = np.array(tpm)
# Get the number of states from the length of one side of the TPM.
S = tpm.shape[-1]
# Get the number of nodes from the number of states.
N = int(log2(S))
# Initialize the new state-by node TPM.
sbn_tpm = np.zeros(([2] * N + [N]))
# Map indices to state-tuples with the little-endian convention.
states = {i: le_index2state(i, N) for i in range(S)}
# Get an array for each node with 1 in positions that correspond to that
# node being on in the next state, and a 0 otherwise.
node_on = np.array([[states[i][n] for i in range(S)] for n in range(N)])
on_probabilities = [tpm * node_on[n] for n in range(N)]
for i, state in states.items():
# Get the probability of each node being on given the previous state i,
# i.e., a row of the state-by-node TPM.
# Assign that row to the ith state in the state-by-node TPM.
sbn_tpm[state] = [np.sum(on_probabilities[n][i]) for n in range(N)]
return sbn_tpm
|
[
"def",
"state_by_state2state_by_node",
"(",
"tpm",
")",
":",
"# Cast to np.array.",
"tpm",
"=",
"np",
".",
"array",
"(",
"tpm",
")",
"# Get the number of states from the length of one side of the TPM.",
"S",
"=",
"tpm",
".",
"shape",
"[",
"-",
"1",
"]",
"# Get the number of nodes from the number of states.",
"N",
"=",
"int",
"(",
"log2",
"(",
"S",
")",
")",
"# Initialize the new state-by node TPM.",
"sbn_tpm",
"=",
"np",
".",
"zeros",
"(",
"(",
"[",
"2",
"]",
"*",
"N",
"+",
"[",
"N",
"]",
")",
")",
"# Map indices to state-tuples with the little-endian convention.",
"states",
"=",
"{",
"i",
":",
"le_index2state",
"(",
"i",
",",
"N",
")",
"for",
"i",
"in",
"range",
"(",
"S",
")",
"}",
"# Get an array for each node with 1 in positions that correspond to that",
"# node being on in the next state, and a 0 otherwise.",
"node_on",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"states",
"[",
"i",
"]",
"[",
"n",
"]",
"for",
"i",
"in",
"range",
"(",
"S",
")",
"]",
"for",
"n",
"in",
"range",
"(",
"N",
")",
"]",
")",
"on_probabilities",
"=",
"[",
"tpm",
"*",
"node_on",
"[",
"n",
"]",
"for",
"n",
"in",
"range",
"(",
"N",
")",
"]",
"for",
"i",
",",
"state",
"in",
"states",
".",
"items",
"(",
")",
":",
"# Get the probability of each node being on given the previous state i,",
"# i.e., a row of the state-by-node TPM.",
"# Assign that row to the ith state in the state-by-node TPM.",
"sbn_tpm",
"[",
"state",
"]",
"=",
"[",
"np",
".",
"sum",
"(",
"on_probabilities",
"[",
"n",
"]",
"[",
"i",
"]",
")",
"for",
"n",
"in",
"range",
"(",
"N",
")",
"]",
"return",
"sbn_tpm"
] |
Convert a state-by-state TPM to a state-by-node TPM.
.. danger::
Many nondeterministic state-by-state TPMs can be represented by a
single a state-by-state TPM. However, the mapping can be made to be
one-to-one if we assume the state-by-state TPM is conditionally
independent, as this function does. **If the given TPM is not
conditionally independent, the conditional dependencies will be
silently lost.**
.. note::
The indices of the rows and columns of the state-by-state TPM are
assumed to follow the little-endian convention. The indices of the rows
of the resulting state-by-node TPM also follow the little-endian
convention. See the documentation on PyPhi the :ref:`tpm-conventions`
more information.
Args:
tpm (list[list] or np.ndarray): A square state-by-state TPM with row
and column indices following the little-endian convention.
Returns:
np.ndarray: A state-by-node TPM, with row indices following the
little-endian convention.
Example:
>>> tpm = np.array([[0.5, 0.5, 0.0, 0.0],
... [0.0, 1.0, 0.0, 0.0],
... [0.0, 0.2, 0.0, 0.8],
... [0.0, 0.3, 0.7, 0.0]])
>>> state_by_state2state_by_node(tpm)
array([[[0.5, 0. ],
[1. , 0.8]],
<BLANKLINE>
[[1. , 0. ],
[0.3, 0.7]]])
|
[
"Convert",
"a",
"state",
"-",
"by",
"-",
"state",
"TPM",
"to",
"a",
"state",
"-",
"by",
"-",
"node",
"TPM",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/convert.py#L207-L264
|
15,864
|
wmayner/pyphi
|
pyphi/convert.py
|
state_by_node2state_by_state
|
def state_by_node2state_by_state(tpm):
"""Convert a state-by-node TPM to a state-by-state TPM.
.. important::
A nondeterministic state-by-node TPM can have more than one
representation as a state-by-state TPM. However, the mapping can be
made to be one-to-one if we assume the TPMs to be conditionally
independent. Therefore, **this function returns the corresponding
conditionally independent state-by-state TPM.**
.. note::
The indices of the rows of the state-by-node TPM are assumed to follow
the little-endian convention, while the indices of the columns follow
the big-endian convention. The indices of the rows and columns of the
resulting state-by-state TPM both follow the big-endian convention. See
the documentation on PyPhi :ref:`tpm-conventions` for more info.
Args:
tpm (list[list] or np.ndarray): A state-by-node TPM with row indices
following the little-endian convention and column indices following
the big-endian convention.
Returns:
np.ndarray: A state-by-state TPM, with both row and column indices
following the big-endian convention.
>>> tpm = np.array([[1, 1, 0],
... [0, 0, 1],
... [0, 1, 1],
... [1, 0, 0],
... [0, 0, 1],
... [1, 0, 0],
... [1, 1, 1],
... [1, 0, 1]])
>>> state_by_node2state_by_state(tpm)
array([[0., 0., 0., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 0.],
[0., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 0., 0., 0.],
[0., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 1.],
[0., 0., 0., 0., 0., 1., 0., 0.]])
"""
# Cast to np.array.
tpm = np.array(tpm)
# Convert to multidimensional form.
tpm = to_multidimensional(tpm)
# Get the number of nodes from the last dimension of the TPM.
N = tpm.shape[-1]
# Get the number of states.
S = 2**N
# Initialize the state-by-state TPM.
sbs_tpm = np.zeros((S, S))
if not np.any(np.logical_and(tpm < 1, tpm > 0)):
# TPM is deterministic.
for previous_state_index in range(S):
# Use the little-endian convention to get the row and column
# indices.
previous_state = le_index2state(previous_state_index, N)
current_state_index = state2le_index(tpm[previous_state])
sbs_tpm[previous_state_index, current_state_index] = 1
else:
# TPM is nondeterministic.
for previous_state_index in range(S):
# Use the little-endian convention to get the row and column
# indices.
previous_state = le_index2state(previous_state_index, N)
marginal_tpm = tpm[previous_state]
for current_state_index in range(S):
current_state = np.array(
[i for i in le_index2state(current_state_index, N)])
sbs_tpm[previous_state_index, current_state_index] = (
np.prod(marginal_tpm[current_state == 1]) *
np.prod(1 - marginal_tpm[current_state == 0]))
return sbs_tpm
|
python
|
def state_by_node2state_by_state(tpm):
"""Convert a state-by-node TPM to a state-by-state TPM.
.. important::
A nondeterministic state-by-node TPM can have more than one
representation as a state-by-state TPM. However, the mapping can be
made to be one-to-one if we assume the TPMs to be conditionally
independent. Therefore, **this function returns the corresponding
conditionally independent state-by-state TPM.**
.. note::
The indices of the rows of the state-by-node TPM are assumed to follow
the little-endian convention, while the indices of the columns follow
the big-endian convention. The indices of the rows and columns of the
resulting state-by-state TPM both follow the big-endian convention. See
the documentation on PyPhi :ref:`tpm-conventions` for more info.
Args:
tpm (list[list] or np.ndarray): A state-by-node TPM with row indices
following the little-endian convention and column indices following
the big-endian convention.
Returns:
np.ndarray: A state-by-state TPM, with both row and column indices
following the big-endian convention.
>>> tpm = np.array([[1, 1, 0],
... [0, 0, 1],
... [0, 1, 1],
... [1, 0, 0],
... [0, 0, 1],
... [1, 0, 0],
... [1, 1, 1],
... [1, 0, 1]])
>>> state_by_node2state_by_state(tpm)
array([[0., 0., 0., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 0.],
[0., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 0., 0., 0.],
[0., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 1.],
[0., 0., 0., 0., 0., 1., 0., 0.]])
"""
# Cast to np.array.
tpm = np.array(tpm)
# Convert to multidimensional form.
tpm = to_multidimensional(tpm)
# Get the number of nodes from the last dimension of the TPM.
N = tpm.shape[-1]
# Get the number of states.
S = 2**N
# Initialize the state-by-state TPM.
sbs_tpm = np.zeros((S, S))
if not np.any(np.logical_and(tpm < 1, tpm > 0)):
# TPM is deterministic.
for previous_state_index in range(S):
# Use the little-endian convention to get the row and column
# indices.
previous_state = le_index2state(previous_state_index, N)
current_state_index = state2le_index(tpm[previous_state])
sbs_tpm[previous_state_index, current_state_index] = 1
else:
# TPM is nondeterministic.
for previous_state_index in range(S):
# Use the little-endian convention to get the row and column
# indices.
previous_state = le_index2state(previous_state_index, N)
marginal_tpm = tpm[previous_state]
for current_state_index in range(S):
current_state = np.array(
[i for i in le_index2state(current_state_index, N)])
sbs_tpm[previous_state_index, current_state_index] = (
np.prod(marginal_tpm[current_state == 1]) *
np.prod(1 - marginal_tpm[current_state == 0]))
return sbs_tpm
|
[
"def",
"state_by_node2state_by_state",
"(",
"tpm",
")",
":",
"# Cast to np.array.",
"tpm",
"=",
"np",
".",
"array",
"(",
"tpm",
")",
"# Convert to multidimensional form.",
"tpm",
"=",
"to_multidimensional",
"(",
"tpm",
")",
"# Get the number of nodes from the last dimension of the TPM.",
"N",
"=",
"tpm",
".",
"shape",
"[",
"-",
"1",
"]",
"# Get the number of states.",
"S",
"=",
"2",
"**",
"N",
"# Initialize the state-by-state TPM.",
"sbs_tpm",
"=",
"np",
".",
"zeros",
"(",
"(",
"S",
",",
"S",
")",
")",
"if",
"not",
"np",
".",
"any",
"(",
"np",
".",
"logical_and",
"(",
"tpm",
"<",
"1",
",",
"tpm",
">",
"0",
")",
")",
":",
"# TPM is deterministic.",
"for",
"previous_state_index",
"in",
"range",
"(",
"S",
")",
":",
"# Use the little-endian convention to get the row and column",
"# indices.",
"previous_state",
"=",
"le_index2state",
"(",
"previous_state_index",
",",
"N",
")",
"current_state_index",
"=",
"state2le_index",
"(",
"tpm",
"[",
"previous_state",
"]",
")",
"sbs_tpm",
"[",
"previous_state_index",
",",
"current_state_index",
"]",
"=",
"1",
"else",
":",
"# TPM is nondeterministic.",
"for",
"previous_state_index",
"in",
"range",
"(",
"S",
")",
":",
"# Use the little-endian convention to get the row and column",
"# indices.",
"previous_state",
"=",
"le_index2state",
"(",
"previous_state_index",
",",
"N",
")",
"marginal_tpm",
"=",
"tpm",
"[",
"previous_state",
"]",
"for",
"current_state_index",
"in",
"range",
"(",
"S",
")",
":",
"current_state",
"=",
"np",
".",
"array",
"(",
"[",
"i",
"for",
"i",
"in",
"le_index2state",
"(",
"current_state_index",
",",
"N",
")",
"]",
")",
"sbs_tpm",
"[",
"previous_state_index",
",",
"current_state_index",
"]",
"=",
"(",
"np",
".",
"prod",
"(",
"marginal_tpm",
"[",
"current_state",
"==",
"1",
"]",
")",
"*",
"np",
".",
"prod",
"(",
"1",
"-",
"marginal_tpm",
"[",
"current_state",
"==",
"0",
"]",
")",
")",
"return",
"sbs_tpm"
] |
Convert a state-by-node TPM to a state-by-state TPM.
.. important::
A nondeterministic state-by-node TPM can have more than one
representation as a state-by-state TPM. However, the mapping can be
made to be one-to-one if we assume the TPMs to be conditionally
independent. Therefore, **this function returns the corresponding
conditionally independent state-by-state TPM.**
.. note::
The indices of the rows of the state-by-node TPM are assumed to follow
the little-endian convention, while the indices of the columns follow
the big-endian convention. The indices of the rows and columns of the
resulting state-by-state TPM both follow the big-endian convention. See
the documentation on PyPhi :ref:`tpm-conventions` for more info.
Args:
tpm (list[list] or np.ndarray): A state-by-node TPM with row indices
following the little-endian convention and column indices following
the big-endian convention.
Returns:
np.ndarray: A state-by-state TPM, with both row and column indices
following the big-endian convention.
>>> tpm = np.array([[1, 1, 0],
... [0, 0, 1],
... [0, 1, 1],
... [1, 0, 0],
... [0, 0, 1],
... [1, 0, 0],
... [1, 1, 1],
... [1, 0, 1]])
>>> state_by_node2state_by_state(tpm)
array([[0., 0., 0., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 0.],
[0., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 0., 0., 0.],
[0., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 1.],
[0., 0., 0., 0., 0., 1., 0., 0.]])
|
[
"Convert",
"a",
"state",
"-",
"by",
"-",
"node",
"TPM",
"to",
"a",
"state",
"-",
"by",
"-",
"state",
"TPM",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/convert.py#L270-L345
|
15,865
|
wmayner/pyphi
|
profiling/code_to_profile.py
|
load_json_network
|
def load_json_network(json_dict):
"""Load a network from a json file"""
network = pyphi.Network.from_json(json_dict['network'])
state = json_dict['state']
return (network, state)
|
python
|
def load_json_network(json_dict):
"""Load a network from a json file"""
network = pyphi.Network.from_json(json_dict['network'])
state = json_dict['state']
return (network, state)
|
[
"def",
"load_json_network",
"(",
"json_dict",
")",
":",
"network",
"=",
"pyphi",
".",
"Network",
".",
"from_json",
"(",
"json_dict",
"[",
"'network'",
"]",
")",
"state",
"=",
"json_dict",
"[",
"'state'",
"]",
"return",
"(",
"network",
",",
"state",
")"
] |
Load a network from a json file
|
[
"Load",
"a",
"network",
"from",
"a",
"json",
"file"
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/profiling/code_to_profile.py#L35-L39
|
15,866
|
wmayner/pyphi
|
profiling/code_to_profile.py
|
all_network_files
|
def all_network_files():
"""All network files"""
# TODO: list explicitly since some are missing?
network_types = [
'AND-circle',
'MAJ-specialized',
'MAJ-complete',
'iit-3.0-modular'
]
network_sizes = range(5, 8)
network_files = []
for n in network_sizes:
for t in network_types:
network_files.append('{}-{}'.format(n, t))
return network_files
|
python
|
def all_network_files():
"""All network files"""
# TODO: list explicitly since some are missing?
network_types = [
'AND-circle',
'MAJ-specialized',
'MAJ-complete',
'iit-3.0-modular'
]
network_sizes = range(5, 8)
network_files = []
for n in network_sizes:
for t in network_types:
network_files.append('{}-{}'.format(n, t))
return network_files
|
[
"def",
"all_network_files",
"(",
")",
":",
"# TODO: list explicitly since some are missing?",
"network_types",
"=",
"[",
"'AND-circle'",
",",
"'MAJ-specialized'",
",",
"'MAJ-complete'",
",",
"'iit-3.0-modular'",
"]",
"network_sizes",
"=",
"range",
"(",
"5",
",",
"8",
")",
"network_files",
"=",
"[",
"]",
"for",
"n",
"in",
"network_sizes",
":",
"for",
"t",
"in",
"network_types",
":",
"network_files",
".",
"append",
"(",
"'{}-{}'",
".",
"format",
"(",
"n",
",",
"t",
")",
")",
"return",
"network_files"
] |
All network files
|
[
"All",
"network",
"files"
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/profiling/code_to_profile.py#L42-L56
|
15,867
|
wmayner/pyphi
|
profiling/code_to_profile.py
|
profile_network
|
def profile_network(filename):
"""Profile a network.
Saves PyPhi results, pstats, and logs to respective directories.
"""
log = logging.getLogger(filename)
logfile = os.path.join(LOGS, filename + '.log')
os.makedirs(os.path.dirname(logfile), exist_ok=True)
handler = logging.FileHandler(logfile)
handler.setFormatter(formatter)
log.addHandler(handler)
log.setLevel(logging.INFO)
try:
with open(os.path.join(NETWORKS, filename + '.json')) as f:
network, state = load_json_network(json.load(f))
log.info('Profiling %s...', filename)
log.info('PyPhi configuration:\n%s',
pyphi.config.get_config_string())
start = time()
pr = cProfile.Profile()
pr.enable()
results = tuple(pyphi.compute.complexes(network, state))
pr.disable()
end = time()
pstatsfile = os.path.join(PSTATS, filename + '.pstats')
os.makedirs(os.path.dirname(pstatsfile), exist_ok=True)
pr.dump_stats(pstatsfile)
log.info('Finished in %i seconds.', end - start)
resultfile = os.path.join(RESULTS, filename + '-results.pkl')
os.makedirs(os.path.dirname(resultfile), exist_ok=True)
with open(resultfile, 'wb') as f:
pickle.dump(results, f)
except Exception as e:
log.error(e)
raise e
|
python
|
def profile_network(filename):
"""Profile a network.
Saves PyPhi results, pstats, and logs to respective directories.
"""
log = logging.getLogger(filename)
logfile = os.path.join(LOGS, filename + '.log')
os.makedirs(os.path.dirname(logfile), exist_ok=True)
handler = logging.FileHandler(logfile)
handler.setFormatter(formatter)
log.addHandler(handler)
log.setLevel(logging.INFO)
try:
with open(os.path.join(NETWORKS, filename + '.json')) as f:
network, state = load_json_network(json.load(f))
log.info('Profiling %s...', filename)
log.info('PyPhi configuration:\n%s',
pyphi.config.get_config_string())
start = time()
pr = cProfile.Profile()
pr.enable()
results = tuple(pyphi.compute.complexes(network, state))
pr.disable()
end = time()
pstatsfile = os.path.join(PSTATS, filename + '.pstats')
os.makedirs(os.path.dirname(pstatsfile), exist_ok=True)
pr.dump_stats(pstatsfile)
log.info('Finished in %i seconds.', end - start)
resultfile = os.path.join(RESULTS, filename + '-results.pkl')
os.makedirs(os.path.dirname(resultfile), exist_ok=True)
with open(resultfile, 'wb') as f:
pickle.dump(results, f)
except Exception as e:
log.error(e)
raise e
|
[
"def",
"profile_network",
"(",
"filename",
")",
":",
"log",
"=",
"logging",
".",
"getLogger",
"(",
"filename",
")",
"logfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"LOGS",
",",
"filename",
"+",
"'.log'",
")",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"logfile",
")",
",",
"exist_ok",
"=",
"True",
")",
"handler",
"=",
"logging",
".",
"FileHandler",
"(",
"logfile",
")",
"handler",
".",
"setFormatter",
"(",
"formatter",
")",
"log",
".",
"addHandler",
"(",
"handler",
")",
"log",
".",
"setLevel",
"(",
"logging",
".",
"INFO",
")",
"try",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"NETWORKS",
",",
"filename",
"+",
"'.json'",
")",
")",
"as",
"f",
":",
"network",
",",
"state",
"=",
"load_json_network",
"(",
"json",
".",
"load",
"(",
"f",
")",
")",
"log",
".",
"info",
"(",
"'Profiling %s...'",
",",
"filename",
")",
"log",
".",
"info",
"(",
"'PyPhi configuration:\\n%s'",
",",
"pyphi",
".",
"config",
".",
"get_config_string",
"(",
")",
")",
"start",
"=",
"time",
"(",
")",
"pr",
"=",
"cProfile",
".",
"Profile",
"(",
")",
"pr",
".",
"enable",
"(",
")",
"results",
"=",
"tuple",
"(",
"pyphi",
".",
"compute",
".",
"complexes",
"(",
"network",
",",
"state",
")",
")",
"pr",
".",
"disable",
"(",
")",
"end",
"=",
"time",
"(",
")",
"pstatsfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"PSTATS",
",",
"filename",
"+",
"'.pstats'",
")",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"pstatsfile",
")",
",",
"exist_ok",
"=",
"True",
")",
"pr",
".",
"dump_stats",
"(",
"pstatsfile",
")",
"log",
".",
"info",
"(",
"'Finished in %i seconds.'",
",",
"end",
"-",
"start",
")",
"resultfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"RESULTS",
",",
"filename",
"+",
"'-results.pkl'",
")",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"resultfile",
")",
",",
"exist_ok",
"=",
"True",
")",
"with",
"open",
"(",
"resultfile",
",",
"'wb'",
")",
"as",
"f",
":",
"pickle",
".",
"dump",
"(",
"results",
",",
"f",
")",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"error",
"(",
"e",
")",
"raise",
"e"
] |
Profile a network.
Saves PyPhi results, pstats, and logs to respective directories.
|
[
"Profile",
"a",
"network",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/profiling/code_to_profile.py#L59-L102
|
15,868
|
wmayner/pyphi
|
pyphi/timescale.py
|
run_tpm
|
def run_tpm(tpm, time_scale):
"""Iterate a TPM by the specified number of time steps.
Args:
tpm (np.ndarray): A state-by-node tpm.
time_scale (int): The number of steps to run the tpm.
Returns:
np.ndarray
"""
sbs_tpm = convert.state_by_node2state_by_state(tpm)
if sparse(tpm):
tpm = sparse_time(sbs_tpm, time_scale)
else:
tpm = dense_time(sbs_tpm, time_scale)
return convert.state_by_state2state_by_node(tpm)
|
python
|
def run_tpm(tpm, time_scale):
"""Iterate a TPM by the specified number of time steps.
Args:
tpm (np.ndarray): A state-by-node tpm.
time_scale (int): The number of steps to run the tpm.
Returns:
np.ndarray
"""
sbs_tpm = convert.state_by_node2state_by_state(tpm)
if sparse(tpm):
tpm = sparse_time(sbs_tpm, time_scale)
else:
tpm = dense_time(sbs_tpm, time_scale)
return convert.state_by_state2state_by_node(tpm)
|
[
"def",
"run_tpm",
"(",
"tpm",
",",
"time_scale",
")",
":",
"sbs_tpm",
"=",
"convert",
".",
"state_by_node2state_by_state",
"(",
"tpm",
")",
"if",
"sparse",
"(",
"tpm",
")",
":",
"tpm",
"=",
"sparse_time",
"(",
"sbs_tpm",
",",
"time_scale",
")",
"else",
":",
"tpm",
"=",
"dense_time",
"(",
"sbs_tpm",
",",
"time_scale",
")",
"return",
"convert",
".",
"state_by_state2state_by_node",
"(",
"tpm",
")"
] |
Iterate a TPM by the specified number of time steps.
Args:
tpm (np.ndarray): A state-by-node tpm.
time_scale (int): The number of steps to run the tpm.
Returns:
np.ndarray
|
[
"Iterate",
"a",
"TPM",
"by",
"the",
"specified",
"number",
"of",
"time",
"steps",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/timescale.py#L28-L43
|
15,869
|
wmayner/pyphi
|
pyphi/timescale.py
|
run_cm
|
def run_cm(cm, time_scale):
"""Iterate a connectivity matrix the specified number of steps.
Args:
cm (np.ndarray): A connectivity matrix.
time_scale (int): The number of steps to run.
Returns:
np.ndarray: The connectivity matrix at the new timescale.
"""
cm = np.linalg.matrix_power(cm, time_scale)
# Round non-unitary values back to 1
cm[cm > 1] = 1
return cm
|
python
|
def run_cm(cm, time_scale):
"""Iterate a connectivity matrix the specified number of steps.
Args:
cm (np.ndarray): A connectivity matrix.
time_scale (int): The number of steps to run.
Returns:
np.ndarray: The connectivity matrix at the new timescale.
"""
cm = np.linalg.matrix_power(cm, time_scale)
# Round non-unitary values back to 1
cm[cm > 1] = 1
return cm
|
[
"def",
"run_cm",
"(",
"cm",
",",
"time_scale",
")",
":",
"cm",
"=",
"np",
".",
"linalg",
".",
"matrix_power",
"(",
"cm",
",",
"time_scale",
")",
"# Round non-unitary values back to 1",
"cm",
"[",
"cm",
">",
"1",
"]",
"=",
"1",
"return",
"cm"
] |
Iterate a connectivity matrix the specified number of steps.
Args:
cm (np.ndarray): A connectivity matrix.
time_scale (int): The number of steps to run.
Returns:
np.ndarray: The connectivity matrix at the new timescale.
|
[
"Iterate",
"a",
"connectivity",
"matrix",
"the",
"specified",
"number",
"of",
"steps",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/timescale.py#L46-L59
|
15,870
|
wmayner/pyphi
|
pyphi/compute/network.py
|
_reachable_subsystems
|
def _reachable_subsystems(network, indices, state):
"""A generator over all subsystems in a valid state."""
validate.is_network(network)
# Return subsystems largest to smallest to optimize parallel
# resource usage.
for subset in utils.powerset(indices, nonempty=True, reverse=True):
try:
yield Subsystem(network, state, subset)
except exceptions.StateUnreachableError:
pass
|
python
|
def _reachable_subsystems(network, indices, state):
"""A generator over all subsystems in a valid state."""
validate.is_network(network)
# Return subsystems largest to smallest to optimize parallel
# resource usage.
for subset in utils.powerset(indices, nonempty=True, reverse=True):
try:
yield Subsystem(network, state, subset)
except exceptions.StateUnreachableError:
pass
|
[
"def",
"_reachable_subsystems",
"(",
"network",
",",
"indices",
",",
"state",
")",
":",
"validate",
".",
"is_network",
"(",
"network",
")",
"# Return subsystems largest to smallest to optimize parallel",
"# resource usage.",
"for",
"subset",
"in",
"utils",
".",
"powerset",
"(",
"indices",
",",
"nonempty",
"=",
"True",
",",
"reverse",
"=",
"True",
")",
":",
"try",
":",
"yield",
"Subsystem",
"(",
"network",
",",
"state",
",",
"subset",
")",
"except",
"exceptions",
".",
"StateUnreachableError",
":",
"pass"
] |
A generator over all subsystems in a valid state.
|
[
"A",
"generator",
"over",
"all",
"subsystems",
"in",
"a",
"valid",
"state",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/network.py#L21-L31
|
15,871
|
wmayner/pyphi
|
pyphi/compute/network.py
|
all_complexes
|
def all_complexes(network, state):
"""Return a generator for all complexes of the network.
.. note::
Includes reducible, zero-|big_phi| complexes (which are not, strictly
speaking, complexes at all).
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Yields:
SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the
|Network|.
"""
engine = FindAllComplexes(subsystems(network, state))
return engine.run(config.PARALLEL_COMPLEX_EVALUATION)
|
python
|
def all_complexes(network, state):
"""Return a generator for all complexes of the network.
.. note::
Includes reducible, zero-|big_phi| complexes (which are not, strictly
speaking, complexes at all).
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Yields:
SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the
|Network|.
"""
engine = FindAllComplexes(subsystems(network, state))
return engine.run(config.PARALLEL_COMPLEX_EVALUATION)
|
[
"def",
"all_complexes",
"(",
"network",
",",
"state",
")",
":",
"engine",
"=",
"FindAllComplexes",
"(",
"subsystems",
"(",
"network",
",",
"state",
")",
")",
"return",
"engine",
".",
"run",
"(",
"config",
".",
"PARALLEL_COMPLEX_EVALUATION",
")"
] |
Return a generator for all complexes of the network.
.. note::
Includes reducible, zero-|big_phi| complexes (which are not, strictly
speaking, complexes at all).
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Yields:
SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the
|Network|.
|
[
"Return",
"a",
"generator",
"for",
"all",
"complexes",
"of",
"the",
"network",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/network.py#L93-L109
|
15,872
|
wmayner/pyphi
|
pyphi/compute/network.py
|
complexes
|
def complexes(network, state):
"""Return all irreducible complexes of the network.
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Yields:
SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the
|Network|, excluding those with |big_phi = 0|.
"""
engine = FindIrreducibleComplexes(possible_complexes(network, state))
return engine.run(config.PARALLEL_COMPLEX_EVALUATION)
|
python
|
def complexes(network, state):
"""Return all irreducible complexes of the network.
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Yields:
SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the
|Network|, excluding those with |big_phi = 0|.
"""
engine = FindIrreducibleComplexes(possible_complexes(network, state))
return engine.run(config.PARALLEL_COMPLEX_EVALUATION)
|
[
"def",
"complexes",
"(",
"network",
",",
"state",
")",
":",
"engine",
"=",
"FindIrreducibleComplexes",
"(",
"possible_complexes",
"(",
"network",
",",
"state",
")",
")",
"return",
"engine",
".",
"run",
"(",
"config",
".",
"PARALLEL_COMPLEX_EVALUATION",
")"
] |
Return all irreducible complexes of the network.
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Yields:
SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the
|Network|, excluding those with |big_phi = 0|.
|
[
"Return",
"all",
"irreducible",
"complexes",
"of",
"the",
"network",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/network.py#L121-L133
|
15,873
|
wmayner/pyphi
|
pyphi/compute/network.py
|
major_complex
|
def major_complex(network, state):
"""Return the major complex of the network.
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Returns:
SystemIrreducibilityAnalysis: The |SIA| for the |Subsystem| with
maximal |big_phi|.
"""
log.info('Calculating major complex...')
result = complexes(network, state)
if result:
result = max(result)
else:
empty_subsystem = Subsystem(network, state, ())
result = _null_sia(empty_subsystem)
log.info("Finished calculating major complex.")
return result
|
python
|
def major_complex(network, state):
"""Return the major complex of the network.
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Returns:
SystemIrreducibilityAnalysis: The |SIA| for the |Subsystem| with
maximal |big_phi|.
"""
log.info('Calculating major complex...')
result = complexes(network, state)
if result:
result = max(result)
else:
empty_subsystem = Subsystem(network, state, ())
result = _null_sia(empty_subsystem)
log.info("Finished calculating major complex.")
return result
|
[
"def",
"major_complex",
"(",
"network",
",",
"state",
")",
":",
"log",
".",
"info",
"(",
"'Calculating major complex...'",
")",
"result",
"=",
"complexes",
"(",
"network",
",",
"state",
")",
"if",
"result",
":",
"result",
"=",
"max",
"(",
"result",
")",
"else",
":",
"empty_subsystem",
"=",
"Subsystem",
"(",
"network",
",",
"state",
",",
"(",
")",
")",
"result",
"=",
"_null_sia",
"(",
"empty_subsystem",
")",
"log",
".",
"info",
"(",
"\"Finished calculating major complex.\"",
")",
"return",
"result"
] |
Return the major complex of the network.
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Returns:
SystemIrreducibilityAnalysis: The |SIA| for the |Subsystem| with
maximal |big_phi|.
|
[
"Return",
"the",
"major",
"complex",
"of",
"the",
"network",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/network.py#L136-L158
|
15,874
|
wmayner/pyphi
|
pyphi/compute/network.py
|
condensed
|
def condensed(network, state):
"""Return a list of maximal non-overlapping complexes.
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Returns:
list[SystemIrreducibilityAnalysis]: A list of |SIA| for non-overlapping
complexes with maximal |big_phi| values.
"""
result = []
covered_nodes = set()
for c in reversed(sorted(complexes(network, state))):
if not any(n in covered_nodes for n in c.subsystem.node_indices):
result.append(c)
covered_nodes = covered_nodes | set(c.subsystem.node_indices)
return result
|
python
|
def condensed(network, state):
"""Return a list of maximal non-overlapping complexes.
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Returns:
list[SystemIrreducibilityAnalysis]: A list of |SIA| for non-overlapping
complexes with maximal |big_phi| values.
"""
result = []
covered_nodes = set()
for c in reversed(sorted(complexes(network, state))):
if not any(n in covered_nodes for n in c.subsystem.node_indices):
result.append(c)
covered_nodes = covered_nodes | set(c.subsystem.node_indices)
return result
|
[
"def",
"condensed",
"(",
"network",
",",
"state",
")",
":",
"result",
"=",
"[",
"]",
"covered_nodes",
"=",
"set",
"(",
")",
"for",
"c",
"in",
"reversed",
"(",
"sorted",
"(",
"complexes",
"(",
"network",
",",
"state",
")",
")",
")",
":",
"if",
"not",
"any",
"(",
"n",
"in",
"covered_nodes",
"for",
"n",
"in",
"c",
".",
"subsystem",
".",
"node_indices",
")",
":",
"result",
".",
"append",
"(",
"c",
")",
"covered_nodes",
"=",
"covered_nodes",
"|",
"set",
"(",
"c",
".",
"subsystem",
".",
"node_indices",
")",
"return",
"result"
] |
Return a list of maximal non-overlapping complexes.
Args:
network (Network): The |Network| of interest.
state (tuple[int]): The state of the network (a binary tuple).
Returns:
list[SystemIrreducibilityAnalysis]: A list of |SIA| for non-overlapping
complexes with maximal |big_phi| values.
|
[
"Return",
"a",
"list",
"of",
"maximal",
"non",
"-",
"overlapping",
"complexes",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/network.py#L161-L180
|
15,875
|
wmayner/pyphi
|
pyphi/examples.py
|
basic_network
|
def basic_network(cm=False):
"""A 3-node network of logic gates.
Diagram::
+~~~~~~~~+
+~~~~>| A |<~~~~+
| | (OR) +~~~+ |
| +~~~~~~~~+ | |
| | |
| v |
+~+~~~~~~+ +~~~~~+~+
| B |<~~~~~~+ C |
| (COPY) +~~~~~~>| (XOR) |
+~~~~~~~~+ +~~~~~~~+
TPM:
+----------------+---------------+
| Previous state | Current state |
+----------------+---------------+
| A, B, C | A, B, C |
+================+===============+
| 0, 0, 0 | 0, 0, 0 |
+----------------+---------------+
| 1, 0, 0 | 0, 0, 1 |
+----------------+---------------+
| 0, 1, 0 | 1, 0, 1 |
+----------------+---------------+
| 1, 1, 0 | 1, 0, 0 |
+----------------+---------------+
| 0, 0, 1 | 1, 1, 0 |
+----------------+---------------+
| 1, 0, 1 | 1, 1, 1 |
+----------------+---------------+
| 0, 1, 1 | 1, 1, 1 |
+----------------+---------------+
| 1, 1, 1 | 1, 1, 0 |
+----------------+---------------+
Connectivity matrix:
+---+---+---+---+
| . | A | B | C |
+---+---+---+---+
| A | 0 | 0 | 1 |
+---+---+---+---+
| B | 1 | 0 | 1 |
+---+---+---+---+
| C | 1 | 1 | 0 |
+---+---+---+---+
.. note::
|CM[i][j] = 1| means that there is a directed edge |(i,j)| from node
|i| to node |j| and |CM[i][j] = 0| means there is no edge from |i| to
|j|.
"""
tpm = np.array([
[0, 0, 0],
[0, 0, 1],
[1, 0, 1],
[1, 0, 0],
[1, 1, 0],
[1, 1, 1],
[1, 1, 1],
[1, 1, 0]
])
if cm is False:
cm = np.array([
[0, 0, 1],
[1, 0, 1],
[1, 1, 0]
])
else:
cm = None
return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
|
python
|
def basic_network(cm=False):
"""A 3-node network of logic gates.
Diagram::
+~~~~~~~~+
+~~~~>| A |<~~~~+
| | (OR) +~~~+ |
| +~~~~~~~~+ | |
| | |
| v |
+~+~~~~~~+ +~~~~~+~+
| B |<~~~~~~+ C |
| (COPY) +~~~~~~>| (XOR) |
+~~~~~~~~+ +~~~~~~~+
TPM:
+----------------+---------------+
| Previous state | Current state |
+----------------+---------------+
| A, B, C | A, B, C |
+================+===============+
| 0, 0, 0 | 0, 0, 0 |
+----------------+---------------+
| 1, 0, 0 | 0, 0, 1 |
+----------------+---------------+
| 0, 1, 0 | 1, 0, 1 |
+----------------+---------------+
| 1, 1, 0 | 1, 0, 0 |
+----------------+---------------+
| 0, 0, 1 | 1, 1, 0 |
+----------------+---------------+
| 1, 0, 1 | 1, 1, 1 |
+----------------+---------------+
| 0, 1, 1 | 1, 1, 1 |
+----------------+---------------+
| 1, 1, 1 | 1, 1, 0 |
+----------------+---------------+
Connectivity matrix:
+---+---+---+---+
| . | A | B | C |
+---+---+---+---+
| A | 0 | 0 | 1 |
+---+---+---+---+
| B | 1 | 0 | 1 |
+---+---+---+---+
| C | 1 | 1 | 0 |
+---+---+---+---+
.. note::
|CM[i][j] = 1| means that there is a directed edge |(i,j)| from node
|i| to node |j| and |CM[i][j] = 0| means there is no edge from |i| to
|j|.
"""
tpm = np.array([
[0, 0, 0],
[0, 0, 1],
[1, 0, 1],
[1, 0, 0],
[1, 1, 0],
[1, 1, 1],
[1, 1, 1],
[1, 1, 0]
])
if cm is False:
cm = np.array([
[0, 0, 1],
[1, 0, 1],
[1, 1, 0]
])
else:
cm = None
return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
|
[
"def",
"basic_network",
"(",
"cm",
"=",
"False",
")",
":",
"tpm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
",",
"1",
"]",
",",
"[",
"1",
",",
"0",
",",
"1",
"]",
",",
"[",
"1",
",",
"0",
",",
"0",
"]",
",",
"[",
"1",
",",
"1",
",",
"0",
"]",
",",
"[",
"1",
",",
"1",
",",
"1",
"]",
",",
"[",
"1",
",",
"1",
",",
"1",
"]",
",",
"[",
"1",
",",
"1",
",",
"0",
"]",
"]",
")",
"if",
"cm",
"is",
"False",
":",
"cm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0",
",",
"0",
",",
"1",
"]",
",",
"[",
"1",
",",
"0",
",",
"1",
"]",
",",
"[",
"1",
",",
"1",
",",
"0",
"]",
"]",
")",
"else",
":",
"cm",
"=",
"None",
"return",
"Network",
"(",
"tpm",
",",
"cm",
"=",
"cm",
",",
"node_labels",
"=",
"LABELS",
"[",
":",
"tpm",
".",
"shape",
"[",
"1",
"]",
"]",
")"
] |
A 3-node network of logic gates.
Diagram::
+~~~~~~~~+
+~~~~>| A |<~~~~+
| | (OR) +~~~+ |
| +~~~~~~~~+ | |
| | |
| v |
+~+~~~~~~+ +~~~~~+~+
| B |<~~~~~~+ C |
| (COPY) +~~~~~~>| (XOR) |
+~~~~~~~~+ +~~~~~~~+
TPM:
+----------------+---------------+
| Previous state | Current state |
+----------------+---------------+
| A, B, C | A, B, C |
+================+===============+
| 0, 0, 0 | 0, 0, 0 |
+----------------+---------------+
| 1, 0, 0 | 0, 0, 1 |
+----------------+---------------+
| 0, 1, 0 | 1, 0, 1 |
+----------------+---------------+
| 1, 1, 0 | 1, 0, 0 |
+----------------+---------------+
| 0, 0, 1 | 1, 1, 0 |
+----------------+---------------+
| 1, 0, 1 | 1, 1, 1 |
+----------------+---------------+
| 0, 1, 1 | 1, 1, 1 |
+----------------+---------------+
| 1, 1, 1 | 1, 1, 0 |
+----------------+---------------+
Connectivity matrix:
+---+---+---+---+
| . | A | B | C |
+---+---+---+---+
| A | 0 | 0 | 1 |
+---+---+---+---+
| B | 1 | 0 | 1 |
+---+---+---+---+
| C | 1 | 1 | 0 |
+---+---+---+---+
.. note::
|CM[i][j] = 1| means that there is a directed edge |(i,j)| from node
|i| to node |j| and |CM[i][j] = 0| means there is no edge from |i| to
|j|.
|
[
"A",
"3",
"-",
"node",
"network",
"of",
"logic",
"gates",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L24-L99
|
15,876
|
wmayner/pyphi
|
pyphi/examples.py
|
basic_noisy_selfloop_network
|
def basic_noisy_selfloop_network():
"""Based on the basic_network, but with added selfloops and noisy edges.
Nodes perform deterministic functions of their inputs, but those inputs
may be flipped (i.e. what should be a 0 becomes a 1, and vice versa) with
probability epsilon (eps = 0.1 here).
Diagram::
+~~+
| v
+~~~~~~~~+
+~~~~>| A |<~~~~+
| | (OR) +~~~+ |
| +~~~~~~~~+ | |
| | |
| v |
+~+~~~~~~+ +~~~~~+~+
| B |<~~~~~~+ C |
+>| (COPY) +~~~~~~>| (XOR) |<+
| +~~~~~~~~+ +~~~~~~~+ |
| | | |
+~~~+ +~~~+
"""
tpm = np.array([
[0.271, 0.19, 0.244],
[0.919, 0.19, 0.756],
[0.919, 0.91, 0.756],
[0.991, 0.91, 0.244],
[0.919, 0.91, 0.756],
[0.991, 0.91, 0.244],
[0.991, 0.99, 0.244],
[0.999, 0.99, 0.756]
])
cm = np.array([
[1, 0, 1],
[1, 1, 1],
[1, 1, 1]
])
return Network(tpm, cm=cm)
|
python
|
def basic_noisy_selfloop_network():
"""Based on the basic_network, but with added selfloops and noisy edges.
Nodes perform deterministic functions of their inputs, but those inputs
may be flipped (i.e. what should be a 0 becomes a 1, and vice versa) with
probability epsilon (eps = 0.1 here).
Diagram::
+~~+
| v
+~~~~~~~~+
+~~~~>| A |<~~~~+
| | (OR) +~~~+ |
| +~~~~~~~~+ | |
| | |
| v |
+~+~~~~~~+ +~~~~~+~+
| B |<~~~~~~+ C |
+>| (COPY) +~~~~~~>| (XOR) |<+
| +~~~~~~~~+ +~~~~~~~+ |
| | | |
+~~~+ +~~~+
"""
tpm = np.array([
[0.271, 0.19, 0.244],
[0.919, 0.19, 0.756],
[0.919, 0.91, 0.756],
[0.991, 0.91, 0.244],
[0.919, 0.91, 0.756],
[0.991, 0.91, 0.244],
[0.991, 0.99, 0.244],
[0.999, 0.99, 0.756]
])
cm = np.array([
[1, 0, 1],
[1, 1, 1],
[1, 1, 1]
])
return Network(tpm, cm=cm)
|
[
"def",
"basic_noisy_selfloop_network",
"(",
")",
":",
"tpm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0.271",
",",
"0.19",
",",
"0.244",
"]",
",",
"[",
"0.919",
",",
"0.19",
",",
"0.756",
"]",
",",
"[",
"0.919",
",",
"0.91",
",",
"0.756",
"]",
",",
"[",
"0.991",
",",
"0.91",
",",
"0.244",
"]",
",",
"[",
"0.919",
",",
"0.91",
",",
"0.756",
"]",
",",
"[",
"0.991",
",",
"0.91",
",",
"0.244",
"]",
",",
"[",
"0.991",
",",
"0.99",
",",
"0.244",
"]",
",",
"[",
"0.999",
",",
"0.99",
",",
"0.756",
"]",
"]",
")",
"cm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"1",
",",
"0",
",",
"1",
"]",
",",
"[",
"1",
",",
"1",
",",
"1",
"]",
",",
"[",
"1",
",",
"1",
",",
"1",
"]",
"]",
")",
"return",
"Network",
"(",
"tpm",
",",
"cm",
"=",
"cm",
")"
] |
Based on the basic_network, but with added selfloops and noisy edges.
Nodes perform deterministic functions of their inputs, but those inputs
may be flipped (i.e. what should be a 0 becomes a 1, and vice versa) with
probability epsilon (eps = 0.1 here).
Diagram::
+~~+
| v
+~~~~~~~~+
+~~~~>| A |<~~~~+
| | (OR) +~~~+ |
| +~~~~~~~~+ | |
| | |
| v |
+~+~~~~~~+ +~~~~~+~+
| B |<~~~~~~+ C |
+>| (COPY) +~~~~~~>| (XOR) |<+
| +~~~~~~~~+ +~~~~~~~+ |
| | | |
+~~~+ +~~~+
|
[
"Based",
"on",
"the",
"basic_network",
"but",
"with",
"added",
"selfloops",
"and",
"noisy",
"edges",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L116-L158
|
15,877
|
wmayner/pyphi
|
pyphi/examples.py
|
residue_network
|
def residue_network():
"""The network for the residue example.
Current and previous state are all nodes OFF.
Diagram::
+~~~~~~~+ +~~~~~~~+
| A | | B |
+~~>| (AND) | | (AND) |<~~+
| +~~~~~~~+ +~~~~~~~+ |
| ^ ^ |
| | | |
| +~~~~~+ +~~~~~+ |
| | | |
+~~~+~~~+ +~+~~~+~+ +~~~+~~~+
| C | | D | | E |
| | | | | |
+~~~~~~~+ +~~~~~~~+ +~~~~~~~+
Connectivity matrix:
+---+---+---+---+---+---+
| . | A | B | C | D | E |
+---+---+---+---+---+---+
| A | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| B | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| C | 1 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| D | 1 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| E | 0 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+
"""
tpm = np.array([
[int(s) for s in bin(x)[2:].zfill(5)[::-1]] for x in range(32)
])
tpm[np.where(np.sum(tpm[0:, 2:4], 1) == 2), 0] = 1
tpm[np.where(np.sum(tpm[0:, 3:5], 1) == 2), 1] = 1
tpm[np.where(np.sum(tpm[0:, 2:4], 1) < 2), 0] = 0
tpm[np.where(np.sum(tpm[0:, 3:5], 1) < 2), 1] = 0
cm = np.zeros((5, 5))
cm[2:4, 0] = 1
cm[3:, 1] = 1
return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
|
python
|
def residue_network():
"""The network for the residue example.
Current and previous state are all nodes OFF.
Diagram::
+~~~~~~~+ +~~~~~~~+
| A | | B |
+~~>| (AND) | | (AND) |<~~+
| +~~~~~~~+ +~~~~~~~+ |
| ^ ^ |
| | | |
| +~~~~~+ +~~~~~+ |
| | | |
+~~~+~~~+ +~+~~~+~+ +~~~+~~~+
| C | | D | | E |
| | | | | |
+~~~~~~~+ +~~~~~~~+ +~~~~~~~+
Connectivity matrix:
+---+---+---+---+---+---+
| . | A | B | C | D | E |
+---+---+---+---+---+---+
| A | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| B | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| C | 1 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| D | 1 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| E | 0 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+
"""
tpm = np.array([
[int(s) for s in bin(x)[2:].zfill(5)[::-1]] for x in range(32)
])
tpm[np.where(np.sum(tpm[0:, 2:4], 1) == 2), 0] = 1
tpm[np.where(np.sum(tpm[0:, 3:5], 1) == 2), 1] = 1
tpm[np.where(np.sum(tpm[0:, 2:4], 1) < 2), 0] = 0
tpm[np.where(np.sum(tpm[0:, 3:5], 1) < 2), 1] = 0
cm = np.zeros((5, 5))
cm[2:4, 0] = 1
cm[3:, 1] = 1
return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
|
[
"def",
"residue_network",
"(",
")",
":",
"tpm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"int",
"(",
"s",
")",
"for",
"s",
"in",
"bin",
"(",
"x",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"5",
")",
"[",
":",
":",
"-",
"1",
"]",
"]",
"for",
"x",
"in",
"range",
"(",
"32",
")",
"]",
")",
"tpm",
"[",
"np",
".",
"where",
"(",
"np",
".",
"sum",
"(",
"tpm",
"[",
"0",
":",
",",
"2",
":",
"4",
"]",
",",
"1",
")",
"==",
"2",
")",
",",
"0",
"]",
"=",
"1",
"tpm",
"[",
"np",
".",
"where",
"(",
"np",
".",
"sum",
"(",
"tpm",
"[",
"0",
":",
",",
"3",
":",
"5",
"]",
",",
"1",
")",
"==",
"2",
")",
",",
"1",
"]",
"=",
"1",
"tpm",
"[",
"np",
".",
"where",
"(",
"np",
".",
"sum",
"(",
"tpm",
"[",
"0",
":",
",",
"2",
":",
"4",
"]",
",",
"1",
")",
"<",
"2",
")",
",",
"0",
"]",
"=",
"0",
"tpm",
"[",
"np",
".",
"where",
"(",
"np",
".",
"sum",
"(",
"tpm",
"[",
"0",
":",
",",
"3",
":",
"5",
"]",
",",
"1",
")",
"<",
"2",
")",
",",
"1",
"]",
"=",
"0",
"cm",
"=",
"np",
".",
"zeros",
"(",
"(",
"5",
",",
"5",
")",
")",
"cm",
"[",
"2",
":",
"4",
",",
"0",
"]",
"=",
"1",
"cm",
"[",
"3",
":",
",",
"1",
"]",
"=",
"1",
"return",
"Network",
"(",
"tpm",
",",
"cm",
"=",
"cm",
",",
"node_labels",
"=",
"LABELS",
"[",
":",
"tpm",
".",
"shape",
"[",
"1",
"]",
"]",
")"
] |
The network for the residue example.
Current and previous state are all nodes OFF.
Diagram::
+~~~~~~~+ +~~~~~~~+
| A | | B |
+~~>| (AND) | | (AND) |<~~+
| +~~~~~~~+ +~~~~~~~+ |
| ^ ^ |
| | | |
| +~~~~~+ +~~~~~+ |
| | | |
+~~~+~~~+ +~+~~~+~+ +~~~+~~~+
| C | | D | | E |
| | | | | |
+~~~~~~~+ +~~~~~~~+ +~~~~~~~+
Connectivity matrix:
+---+---+---+---+---+---+
| . | A | B | C | D | E |
+---+---+---+---+---+---+
| A | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| B | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| C | 1 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| D | 1 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+
| E | 0 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+
|
[
"The",
"network",
"for",
"the",
"residue",
"example",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L170-L218
|
15,878
|
wmayner/pyphi
|
pyphi/examples.py
|
propagation_delay_network
|
def propagation_delay_network():
"""A version of the primary example from the IIT 3.0 paper with
deterministic COPY gates on each connection. These copy gates essentially
function as propagation delays on the signal between OR, AND and XOR gates
from the original system.
The current and previous states of the network are also selected to mimic
the corresponding states from the IIT 3.0 paper.
Diagram::
+----------+
+------------------+ C (COPY) +<----------------+
v +----------+ |
+-------+-+ +-+-------+
| | +----------+ | |
| A (OR) +--------------->+ B (COPY) +-------------->+ D (XOR) |
| | +----------+ | |
+-+-----+-+ +-+-----+-+
| ^ ^ |
| | | |
| | +----------+ +----------+ | |
| +---+ H (COPY) +<----+ +---->+ F (COPY) +---+ |
| +----------+ | | +----------+ |
| | | |
| +-+-----+-+ |
| +----------+ | | +----------+ |
+-------->+ I (COPY) +-->| G (AND) |<--+ E (COPY) +<--------+
+----------+ | | +----------+
+---------+
Connectivity matrix:
+---+---+---+---+---+---+---+---+---+---+
| . | A | B | C | D | E | F | G | H | I |
+---+---+---+---+---+---+---+---+---+---+
| A | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
+---+---+---+---+---+---+---+---+---+---+
| B | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| C | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| D | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| E | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| F | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| G | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| H | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| I | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
States:
In the IIT 3.0 paper example, the previous state of the system has only the
XOR gate ON. For the propagation delay network, this corresponds to a state
of
``(0, 0, 0, 1, 0, 0, 0, 0, 0)``.
The current state of the IIT 3.0 example has only the OR gate ON. By
advancing the propagation delay system two time steps, the current state
``(1, 0, 0, 0, 0, 0, 0, 0, 0)`` is achieved, with corresponding previous
state ``(0, 0, 1, 0, 1, 0, 0, 0, 0)``.
"""
num_nodes = 9
num_states = 2 ** num_nodes
tpm = np.zeros((num_states, num_nodes))
for previous_state_index, previous in enumerate(all_states(num_nodes)):
current_state = [0 for i in range(num_nodes)]
if previous[2] == 1 or previous[7] == 1:
current_state[0] = 1
if previous[0] == 1:
current_state[1] = 1
current_state[8] = 1
if previous[3] == 1:
current_state[2] = 1
current_state[4] = 1
if previous[1] == 1 ^ previous[5] == 1:
current_state[3] = 1
if previous[4] == 1 and previous[8] == 1:
current_state[6] = 1
if previous[6] == 1:
current_state[5] = 1
current_state[7] = 1
tpm[previous_state_index, :] = current_state
cm = np.array([[0, 1, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 1, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 1, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0]])
return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
|
python
|
def propagation_delay_network():
"""A version of the primary example from the IIT 3.0 paper with
deterministic COPY gates on each connection. These copy gates essentially
function as propagation delays on the signal between OR, AND and XOR gates
from the original system.
The current and previous states of the network are also selected to mimic
the corresponding states from the IIT 3.0 paper.
Diagram::
+----------+
+------------------+ C (COPY) +<----------------+
v +----------+ |
+-------+-+ +-+-------+
| | +----------+ | |
| A (OR) +--------------->+ B (COPY) +-------------->+ D (XOR) |
| | +----------+ | |
+-+-----+-+ +-+-----+-+
| ^ ^ |
| | | |
| | +----------+ +----------+ | |
| +---+ H (COPY) +<----+ +---->+ F (COPY) +---+ |
| +----------+ | | +----------+ |
| | | |
| +-+-----+-+ |
| +----------+ | | +----------+ |
+-------->+ I (COPY) +-->| G (AND) |<--+ E (COPY) +<--------+
+----------+ | | +----------+
+---------+
Connectivity matrix:
+---+---+---+---+---+---+---+---+---+---+
| . | A | B | C | D | E | F | G | H | I |
+---+---+---+---+---+---+---+---+---+---+
| A | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
+---+---+---+---+---+---+---+---+---+---+
| B | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| C | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| D | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| E | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| F | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| G | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| H | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| I | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
States:
In the IIT 3.0 paper example, the previous state of the system has only the
XOR gate ON. For the propagation delay network, this corresponds to a state
of
``(0, 0, 0, 1, 0, 0, 0, 0, 0)``.
The current state of the IIT 3.0 example has only the OR gate ON. By
advancing the propagation delay system two time steps, the current state
``(1, 0, 0, 0, 0, 0, 0, 0, 0)`` is achieved, with corresponding previous
state ``(0, 0, 1, 0, 1, 0, 0, 0, 0)``.
"""
num_nodes = 9
num_states = 2 ** num_nodes
tpm = np.zeros((num_states, num_nodes))
for previous_state_index, previous in enumerate(all_states(num_nodes)):
current_state = [0 for i in range(num_nodes)]
if previous[2] == 1 or previous[7] == 1:
current_state[0] = 1
if previous[0] == 1:
current_state[1] = 1
current_state[8] = 1
if previous[3] == 1:
current_state[2] = 1
current_state[4] = 1
if previous[1] == 1 ^ previous[5] == 1:
current_state[3] = 1
if previous[4] == 1 and previous[8] == 1:
current_state[6] = 1
if previous[6] == 1:
current_state[5] = 1
current_state[7] = 1
tpm[previous_state_index, :] = current_state
cm = np.array([[0, 1, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 1, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 1, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0]])
return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
|
[
"def",
"propagation_delay_network",
"(",
")",
":",
"num_nodes",
"=",
"9",
"num_states",
"=",
"2",
"**",
"num_nodes",
"tpm",
"=",
"np",
".",
"zeros",
"(",
"(",
"num_states",
",",
"num_nodes",
")",
")",
"for",
"previous_state_index",
",",
"previous",
"in",
"enumerate",
"(",
"all_states",
"(",
"num_nodes",
")",
")",
":",
"current_state",
"=",
"[",
"0",
"for",
"i",
"in",
"range",
"(",
"num_nodes",
")",
"]",
"if",
"previous",
"[",
"2",
"]",
"==",
"1",
"or",
"previous",
"[",
"7",
"]",
"==",
"1",
":",
"current_state",
"[",
"0",
"]",
"=",
"1",
"if",
"previous",
"[",
"0",
"]",
"==",
"1",
":",
"current_state",
"[",
"1",
"]",
"=",
"1",
"current_state",
"[",
"8",
"]",
"=",
"1",
"if",
"previous",
"[",
"3",
"]",
"==",
"1",
":",
"current_state",
"[",
"2",
"]",
"=",
"1",
"current_state",
"[",
"4",
"]",
"=",
"1",
"if",
"previous",
"[",
"1",
"]",
"==",
"1",
"^",
"previous",
"[",
"5",
"]",
"==",
"1",
":",
"current_state",
"[",
"3",
"]",
"=",
"1",
"if",
"previous",
"[",
"4",
"]",
"==",
"1",
"and",
"previous",
"[",
"8",
"]",
"==",
"1",
":",
"current_state",
"[",
"6",
"]",
"=",
"1",
"if",
"previous",
"[",
"6",
"]",
"==",
"1",
":",
"current_state",
"[",
"5",
"]",
"=",
"1",
"current_state",
"[",
"7",
"]",
"=",
"1",
"tpm",
"[",
"previous_state_index",
",",
":",
"]",
"=",
"current_state",
"cm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0",
",",
"1",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"1",
"]",
",",
"[",
"0",
",",
"0",
",",
"0",
",",
"1",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
"]",
",",
"[",
"1",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
",",
"1",
",",
"0",
",",
"1",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"1",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
",",
"0",
",",
"1",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"1",
",",
"0",
",",
"1",
",",
"0",
"]",
",",
"[",
"1",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"1",
",",
"0",
",",
"0",
"]",
"]",
")",
"return",
"Network",
"(",
"tpm",
",",
"cm",
"=",
"cm",
",",
"node_labels",
"=",
"LABELS",
"[",
":",
"tpm",
".",
"shape",
"[",
"1",
"]",
"]",
")"
] |
A version of the primary example from the IIT 3.0 paper with
deterministic COPY gates on each connection. These copy gates essentially
function as propagation delays on the signal between OR, AND and XOR gates
from the original system.
The current and previous states of the network are also selected to mimic
the corresponding states from the IIT 3.0 paper.
Diagram::
+----------+
+------------------+ C (COPY) +<----------------+
v +----------+ |
+-------+-+ +-+-------+
| | +----------+ | |
| A (OR) +--------------->+ B (COPY) +-------------->+ D (XOR) |
| | +----------+ | |
+-+-----+-+ +-+-----+-+
| ^ ^ |
| | | |
| | +----------+ +----------+ | |
| +---+ H (COPY) +<----+ +---->+ F (COPY) +---+ |
| +----------+ | | +----------+ |
| | | |
| +-+-----+-+ |
| +----------+ | | +----------+ |
+-------->+ I (COPY) +-->| G (AND) |<--+ E (COPY) +<--------+
+----------+ | | +----------+
+---------+
Connectivity matrix:
+---+---+---+---+---+---+---+---+---+---+
| . | A | B | C | D | E | F | G | H | I |
+---+---+---+---+---+---+---+---+---+---+
| A | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
+---+---+---+---+---+---+---+---+---+---+
| B | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| C | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| D | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| E | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| F | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| G | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| H | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
| I | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 |
+---+---+---+---+---+---+---+---+---+---+
States:
In the IIT 3.0 paper example, the previous state of the system has only the
XOR gate ON. For the propagation delay network, this corresponds to a state
of
``(0, 0, 0, 1, 0, 0, 0, 0, 0)``.
The current state of the IIT 3.0 example has only the OR gate ON. By
advancing the propagation delay system two time steps, the current state
``(1, 0, 0, 0, 0, 0, 0, 0, 0)`` is achieved, with corresponding previous
state ``(0, 0, 1, 0, 1, 0, 0, 0, 0)``.
|
[
"A",
"version",
"of",
"the",
"primary",
"example",
"from",
"the",
"IIT",
"3",
".",
"0",
"paper",
"with",
"deterministic",
"COPY",
"gates",
"on",
"each",
"connection",
".",
"These",
"copy",
"gates",
"essentially",
"function",
"as",
"propagation",
"delays",
"on",
"the",
"signal",
"between",
"OR",
"AND",
"and",
"XOR",
"gates",
"from",
"the",
"original",
"system",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L395-L496
|
15,879
|
wmayner/pyphi
|
pyphi/examples.py
|
macro_network
|
def macro_network():
"""A network of micro elements which has greater integrated information
after coarse graining to a macro scale.
"""
tpm = np.array([[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 1.0, 1.0],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 1.0, 1.0],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 1.0, 1.0],
[1.0, 1.0, 0.3, 0.3],
[1.0, 1.0, 0.3, 0.3],
[1.0, 1.0, 0.3, 0.3],
[1.0, 1.0, 1.0, 1.0]])
return Network(tpm, node_labels=LABELS[:tpm.shape[1]])
|
python
|
def macro_network():
"""A network of micro elements which has greater integrated information
after coarse graining to a macro scale.
"""
tpm = np.array([[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 1.0, 1.0],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 1.0, 1.0],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 0.3, 0.3],
[0.3, 0.3, 1.0, 1.0],
[1.0, 1.0, 0.3, 0.3],
[1.0, 1.0, 0.3, 0.3],
[1.0, 1.0, 0.3, 0.3],
[1.0, 1.0, 1.0, 1.0]])
return Network(tpm, node_labels=LABELS[:tpm.shape[1]])
|
[
"def",
"macro_network",
"(",
")",
":",
"tpm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0.3",
",",
"0.3",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"0.3",
",",
"0.3",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"0.3",
",",
"0.3",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"0.3",
",",
"0.3",
",",
"1.0",
",",
"1.0",
"]",
",",
"[",
"0.3",
",",
"0.3",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"0.3",
",",
"0.3",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"0.3",
",",
"0.3",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"0.3",
",",
"0.3",
",",
"1.0",
",",
"1.0",
"]",
",",
"[",
"0.3",
",",
"0.3",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"0.3",
",",
"0.3",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"0.3",
",",
"0.3",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"0.3",
",",
"0.3",
",",
"1.0",
",",
"1.0",
"]",
",",
"[",
"1.0",
",",
"1.0",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"1.0",
",",
"1.0",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"1.0",
",",
"1.0",
",",
"0.3",
",",
"0.3",
"]",
",",
"[",
"1.0",
",",
"1.0",
",",
"1.0",
",",
"1.0",
"]",
"]",
")",
"return",
"Network",
"(",
"tpm",
",",
"node_labels",
"=",
"LABELS",
"[",
":",
"tpm",
".",
"shape",
"[",
"1",
"]",
"]",
")"
] |
A network of micro elements which has greater integrated information
after coarse graining to a macro scale.
|
[
"A",
"network",
"of",
"micro",
"elements",
"which",
"has",
"greater",
"integrated",
"information",
"after",
"coarse",
"graining",
"to",
"a",
"macro",
"scale",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L499-L519
|
15,880
|
wmayner/pyphi
|
pyphi/examples.py
|
blackbox_network
|
def blackbox_network():
"""A micro-network to demonstrate blackboxing.
Diagram::
+----------+
+-------------------->+ A (COPY) + <---------------+
| +----------+ |
| +----------+ |
| +-----------+ B (COPY) + <-------------+ |
v v +----------+ | |
+-+-----+-+ +-+-----+-+
| | | |
| C (AND) | | F (AND) |
| | | |
+-+-----+-+ +-+-----+-+
| | ^ ^
| | +----------+ | |
| +---------> + D (COPY) +---------------+ |
| +----------+ |
| +----------+ |
+-------------------> + E (COPY) +-----------------+
+----------+
Connectivity Matrix:
+---+---+---+---+---+---+---+
| . | A | B | C | D | E | F |
+---+---+---+---+---+---+---+
| A | 0 | 0 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+
| B | 0 | 0 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+
| C | 0 | 0 | 0 | 1 | 1 | 0 |
+---+---+---+---+---+---+---+
| D | 0 | 0 | 0 | 0 | 0 | 1 |
+---+---+---+---+---+---+---+
| E | 0 | 0 | 0 | 0 | 0 | 1 |
+---+---+---+---+---+---+---+
| F | 1 | 1 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+
In the documentation example, the state is (0, 0, 0, 0, 0, 0).
"""
num_nodes = 6
num_states = 2 ** num_nodes
tpm = np.zeros((num_states, num_nodes))
for index, previous_state in enumerate(all_states(num_nodes)):
current_state = [0 for i in range(num_nodes)]
if previous_state[5] == 1:
current_state[0] = 1
current_state[1] = 1
if previous_state[0] == 1 and previous_state[1]:
current_state[2] = 1
if previous_state[2] == 1:
current_state[3] = 1
current_state[4] = 1
if previous_state[3] == 1 and previous_state[4] == 1:
current_state[5] = 1
tpm[index, :] = current_state
cm = np.array([
[0, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 1],
[1, 1, 0, 0, 0, 0]
])
return Network(tpm, cm, node_labels=LABELS[:tpm.shape[1]])
|
python
|
def blackbox_network():
"""A micro-network to demonstrate blackboxing.
Diagram::
+----------+
+-------------------->+ A (COPY) + <---------------+
| +----------+ |
| +----------+ |
| +-----------+ B (COPY) + <-------------+ |
v v +----------+ | |
+-+-----+-+ +-+-----+-+
| | | |
| C (AND) | | F (AND) |
| | | |
+-+-----+-+ +-+-----+-+
| | ^ ^
| | +----------+ | |
| +---------> + D (COPY) +---------------+ |
| +----------+ |
| +----------+ |
+-------------------> + E (COPY) +-----------------+
+----------+
Connectivity Matrix:
+---+---+---+---+---+---+---+
| . | A | B | C | D | E | F |
+---+---+---+---+---+---+---+
| A | 0 | 0 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+
| B | 0 | 0 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+
| C | 0 | 0 | 0 | 1 | 1 | 0 |
+---+---+---+---+---+---+---+
| D | 0 | 0 | 0 | 0 | 0 | 1 |
+---+---+---+---+---+---+---+
| E | 0 | 0 | 0 | 0 | 0 | 1 |
+---+---+---+---+---+---+---+
| F | 1 | 1 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+
In the documentation example, the state is (0, 0, 0, 0, 0, 0).
"""
num_nodes = 6
num_states = 2 ** num_nodes
tpm = np.zeros((num_states, num_nodes))
for index, previous_state in enumerate(all_states(num_nodes)):
current_state = [0 for i in range(num_nodes)]
if previous_state[5] == 1:
current_state[0] = 1
current_state[1] = 1
if previous_state[0] == 1 and previous_state[1]:
current_state[2] = 1
if previous_state[2] == 1:
current_state[3] = 1
current_state[4] = 1
if previous_state[3] == 1 and previous_state[4] == 1:
current_state[5] = 1
tpm[index, :] = current_state
cm = np.array([
[0, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 1],
[1, 1, 0, 0, 0, 0]
])
return Network(tpm, cm, node_labels=LABELS[:tpm.shape[1]])
|
[
"def",
"blackbox_network",
"(",
")",
":",
"num_nodes",
"=",
"6",
"num_states",
"=",
"2",
"**",
"num_nodes",
"tpm",
"=",
"np",
".",
"zeros",
"(",
"(",
"num_states",
",",
"num_nodes",
")",
")",
"for",
"index",
",",
"previous_state",
"in",
"enumerate",
"(",
"all_states",
"(",
"num_nodes",
")",
")",
":",
"current_state",
"=",
"[",
"0",
"for",
"i",
"in",
"range",
"(",
"num_nodes",
")",
"]",
"if",
"previous_state",
"[",
"5",
"]",
"==",
"1",
":",
"current_state",
"[",
"0",
"]",
"=",
"1",
"current_state",
"[",
"1",
"]",
"=",
"1",
"if",
"previous_state",
"[",
"0",
"]",
"==",
"1",
"and",
"previous_state",
"[",
"1",
"]",
":",
"current_state",
"[",
"2",
"]",
"=",
"1",
"if",
"previous_state",
"[",
"2",
"]",
"==",
"1",
":",
"current_state",
"[",
"3",
"]",
"=",
"1",
"current_state",
"[",
"4",
"]",
"=",
"1",
"if",
"previous_state",
"[",
"3",
"]",
"==",
"1",
"and",
"previous_state",
"[",
"4",
"]",
"==",
"1",
":",
"current_state",
"[",
"5",
"]",
"=",
"1",
"tpm",
"[",
"index",
",",
":",
"]",
"=",
"current_state",
"cm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0",
",",
"0",
",",
"1",
",",
"0",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
",",
"1",
",",
"0",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
",",
"0",
",",
"1",
",",
"1",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"1",
"]",
",",
"[",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"1",
"]",
",",
"[",
"1",
",",
"1",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
"]",
"]",
")",
"return",
"Network",
"(",
"tpm",
",",
"cm",
",",
"node_labels",
"=",
"LABELS",
"[",
":",
"tpm",
".",
"shape",
"[",
"1",
"]",
"]",
")"
] |
A micro-network to demonstrate blackboxing.
Diagram::
+----------+
+-------------------->+ A (COPY) + <---------------+
| +----------+ |
| +----------+ |
| +-----------+ B (COPY) + <-------------+ |
v v +----------+ | |
+-+-----+-+ +-+-----+-+
| | | |
| C (AND) | | F (AND) |
| | | |
+-+-----+-+ +-+-----+-+
| | ^ ^
| | +----------+ | |
| +---------> + D (COPY) +---------------+ |
| +----------+ |
| +----------+ |
+-------------------> + E (COPY) +-----------------+
+----------+
Connectivity Matrix:
+---+---+---+---+---+---+---+
| . | A | B | C | D | E | F |
+---+---+---+---+---+---+---+
| A | 0 | 0 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+
| B | 0 | 0 | 1 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+
| C | 0 | 0 | 0 | 1 | 1 | 0 |
+---+---+---+---+---+---+---+
| D | 0 | 0 | 0 | 0 | 0 | 1 |
+---+---+---+---+---+---+---+
| E | 0 | 0 | 0 | 0 | 0 | 1 |
+---+---+---+---+---+---+---+
| F | 1 | 1 | 0 | 0 | 0 | 0 |
+---+---+---+---+---+---+---+
In the documentation example, the state is (0, 0, 0, 0, 0, 0).
|
[
"A",
"micro",
"-",
"network",
"to",
"demonstrate",
"blackboxing",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L531-L603
|
15,881
|
wmayner/pyphi
|
pyphi/examples.py
|
actual_causation
|
def actual_causation():
"""The actual causation example network, consisting of an ``OR`` and
``AND`` gate with self-loops.
"""
tpm = np.array([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 0, 0, 1]
])
cm = np.array([
[1, 1],
[1, 1]
])
return Network(tpm, cm, node_labels=('OR', 'AND'))
|
python
|
def actual_causation():
"""The actual causation example network, consisting of an ``OR`` and
``AND`` gate with self-loops.
"""
tpm = np.array([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 0, 0, 1]
])
cm = np.array([
[1, 1],
[1, 1]
])
return Network(tpm, cm, node_labels=('OR', 'AND'))
|
[
"def",
"actual_causation",
"(",
")",
":",
"tpm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"1",
",",
"0",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"1",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"1",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
",",
"0",
",",
"1",
"]",
"]",
")",
"cm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"1",
",",
"1",
"]",
",",
"[",
"1",
",",
"1",
"]",
"]",
")",
"return",
"Network",
"(",
"tpm",
",",
"cm",
",",
"node_labels",
"=",
"(",
"'OR'",
",",
"'AND'",
")",
")"
] |
The actual causation example network, consisting of an ``OR`` and
``AND`` gate with self-loops.
|
[
"The",
"actual",
"causation",
"example",
"network",
"consisting",
"of",
"an",
"OR",
"and",
"AND",
"gate",
"with",
"self",
"-",
"loops",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L1063-L1077
|
15,882
|
wmayner/pyphi
|
pyphi/examples.py
|
prevention
|
def prevention():
"""The |Transition| for the prevention example from Actual Causation
Figure 5D.
"""
tpm = np.array([
[0.5, 0.5, 1],
[0.5, 0.5, 0],
[0.5, 0.5, 1],
[0.5, 0.5, 1],
[0.5, 0.5, 1],
[0.5, 0.5, 0],
[0.5, 0.5, 1],
[0.5, 0.5, 1]
])
cm = np.array([
[0, 0, 1],
[0, 0, 1],
[0, 0, 0]
])
network = Network(tpm, cm, node_labels=['A', 'B', 'F'])
x_state = (1, 1, 1)
y_state = (1, 1, 1)
return Transition(network, x_state, y_state, (0, 1), (2,))
|
python
|
def prevention():
"""The |Transition| for the prevention example from Actual Causation
Figure 5D.
"""
tpm = np.array([
[0.5, 0.5, 1],
[0.5, 0.5, 0],
[0.5, 0.5, 1],
[0.5, 0.5, 1],
[0.5, 0.5, 1],
[0.5, 0.5, 0],
[0.5, 0.5, 1],
[0.5, 0.5, 1]
])
cm = np.array([
[0, 0, 1],
[0, 0, 1],
[0, 0, 0]
])
network = Network(tpm, cm, node_labels=['A', 'B', 'F'])
x_state = (1, 1, 1)
y_state = (1, 1, 1)
return Transition(network, x_state, y_state, (0, 1), (2,))
|
[
"def",
"prevention",
"(",
")",
":",
"tpm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0.5",
",",
"0.5",
",",
"1",
"]",
",",
"[",
"0.5",
",",
"0.5",
",",
"0",
"]",
",",
"[",
"0.5",
",",
"0.5",
",",
"1",
"]",
",",
"[",
"0.5",
",",
"0.5",
",",
"1",
"]",
",",
"[",
"0.5",
",",
"0.5",
",",
"1",
"]",
",",
"[",
"0.5",
",",
"0.5",
",",
"0",
"]",
",",
"[",
"0.5",
",",
"0.5",
",",
"1",
"]",
",",
"[",
"0.5",
",",
"0.5",
",",
"1",
"]",
"]",
")",
"cm",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0",
",",
"0",
",",
"1",
"]",
",",
"[",
"0",
",",
"0",
",",
"1",
"]",
",",
"[",
"0",
",",
"0",
",",
"0",
"]",
"]",
")",
"network",
"=",
"Network",
"(",
"tpm",
",",
"cm",
",",
"node_labels",
"=",
"[",
"'A'",
",",
"'B'",
",",
"'F'",
"]",
")",
"x_state",
"=",
"(",
"1",
",",
"1",
",",
"1",
")",
"y_state",
"=",
"(",
"1",
",",
"1",
",",
"1",
")",
"return",
"Transition",
"(",
"network",
",",
"x_state",
",",
"y_state",
",",
"(",
"0",
",",
"1",
")",
",",
"(",
"2",
",",
")",
")"
] |
The |Transition| for the prevention example from Actual Causation
Figure 5D.
|
[
"The",
"|Transition|",
"for",
"the",
"prevention",
"example",
"from",
"Actual",
"Causation",
"Figure",
"5D",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L1113-L1136
|
15,883
|
wmayner/pyphi
|
benchmarks/benchmarks/subsystem.py
|
clear_subsystem_caches
|
def clear_subsystem_caches(subsys):
"""Clear subsystem caches"""
try:
# New-style caches
subsys._repertoire_cache.clear()
subsys._mice_cache.clear()
except TypeError:
try:
# Pre cache.clear() implementation
subsys._repertoire_cache.cache = {}
subsys._mice_cache.cache = {}
except AttributeError:
# Old school, pre cache refactor
subsys._repertoire_cache = {}
subsys._repertoire_cache_info = [0, 0]
subsys._mice_cache = {}
|
python
|
def clear_subsystem_caches(subsys):
"""Clear subsystem caches"""
try:
# New-style caches
subsys._repertoire_cache.clear()
subsys._mice_cache.clear()
except TypeError:
try:
# Pre cache.clear() implementation
subsys._repertoire_cache.cache = {}
subsys._mice_cache.cache = {}
except AttributeError:
# Old school, pre cache refactor
subsys._repertoire_cache = {}
subsys._repertoire_cache_info = [0, 0]
subsys._mice_cache = {}
|
[
"def",
"clear_subsystem_caches",
"(",
"subsys",
")",
":",
"try",
":",
"# New-style caches",
"subsys",
".",
"_repertoire_cache",
".",
"clear",
"(",
")",
"subsys",
".",
"_mice_cache",
".",
"clear",
"(",
")",
"except",
"TypeError",
":",
"try",
":",
"# Pre cache.clear() implementation",
"subsys",
".",
"_repertoire_cache",
".",
"cache",
"=",
"{",
"}",
"subsys",
".",
"_mice_cache",
".",
"cache",
"=",
"{",
"}",
"except",
"AttributeError",
":",
"# Old school, pre cache refactor",
"subsys",
".",
"_repertoire_cache",
"=",
"{",
"}",
"subsys",
".",
"_repertoire_cache_info",
"=",
"[",
"0",
",",
"0",
"]",
"subsys",
".",
"_mice_cache",
"=",
"{",
"}"
] |
Clear subsystem caches
|
[
"Clear",
"subsystem",
"caches"
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/benchmarks/benchmarks/subsystem.py#L24-L39
|
15,884
|
wmayner/pyphi
|
pyphi/utils.py
|
all_states
|
def all_states(n, big_endian=False):
"""Return all binary states for a system.
Args:
n (int): The number of elements in the system.
big_endian (bool): Whether to return the states in big-endian order
instead of little-endian order.
Yields:
tuple[int]: The next state of an ``n``-element system, in little-endian
order unless ``big_endian`` is ``True``.
"""
if n == 0:
return
for state in product((0, 1), repeat=n):
if big_endian:
yield state
else:
yield state[::-1]
|
python
|
def all_states(n, big_endian=False):
"""Return all binary states for a system.
Args:
n (int): The number of elements in the system.
big_endian (bool): Whether to return the states in big-endian order
instead of little-endian order.
Yields:
tuple[int]: The next state of an ``n``-element system, in little-endian
order unless ``big_endian`` is ``True``.
"""
if n == 0:
return
for state in product((0, 1), repeat=n):
if big_endian:
yield state
else:
yield state[::-1]
|
[
"def",
"all_states",
"(",
"n",
",",
"big_endian",
"=",
"False",
")",
":",
"if",
"n",
"==",
"0",
":",
"return",
"for",
"state",
"in",
"product",
"(",
"(",
"0",
",",
"1",
")",
",",
"repeat",
"=",
"n",
")",
":",
"if",
"big_endian",
":",
"yield",
"state",
"else",
":",
"yield",
"state",
"[",
":",
":",
"-",
"1",
"]"
] |
Return all binary states for a system.
Args:
n (int): The number of elements in the system.
big_endian (bool): Whether to return the states in big-endian order
instead of little-endian order.
Yields:
tuple[int]: The next state of an ``n``-element system, in little-endian
order unless ``big_endian`` is ``True``.
|
[
"Return",
"all",
"binary",
"states",
"for",
"a",
"system",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/utils.py#L27-L46
|
15,885
|
wmayner/pyphi
|
pyphi/utils.py
|
np_hash
|
def np_hash(a):
"""Return a hash of a NumPy array."""
if a is None:
return hash(None)
# Ensure that hashes are equal whatever the ordering in memory (C or
# Fortran)
a = np.ascontiguousarray(a)
# Compute the digest and return a decimal int
return int(hashlib.sha1(a.view(a.dtype)).hexdigest(), 16)
|
python
|
def np_hash(a):
"""Return a hash of a NumPy array."""
if a is None:
return hash(None)
# Ensure that hashes are equal whatever the ordering in memory (C or
# Fortran)
a = np.ascontiguousarray(a)
# Compute the digest and return a decimal int
return int(hashlib.sha1(a.view(a.dtype)).hexdigest(), 16)
|
[
"def",
"np_hash",
"(",
"a",
")",
":",
"if",
"a",
"is",
"None",
":",
"return",
"hash",
"(",
"None",
")",
"# Ensure that hashes are equal whatever the ordering in memory (C or",
"# Fortran)",
"a",
"=",
"np",
".",
"ascontiguousarray",
"(",
"a",
")",
"# Compute the digest and return a decimal int",
"return",
"int",
"(",
"hashlib",
".",
"sha1",
"(",
"a",
".",
"view",
"(",
"a",
".",
"dtype",
")",
")",
".",
"hexdigest",
"(",
")",
",",
"16",
")"
] |
Return a hash of a NumPy array.
|
[
"Return",
"a",
"hash",
"of",
"a",
"NumPy",
"array",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/utils.py#L55-L63
|
15,886
|
wmayner/pyphi
|
pyphi/utils.py
|
powerset
|
def powerset(iterable, nonempty=False, reverse=False):
"""Generate the power set of an iterable.
Args:
iterable (Iterable): The iterable from which to generate the power set.
Keyword Args:
nonempty (boolean): If True, don't include the empty set.
reverse (boolean): If True, reverse the order of the powerset.
Returns:
Iterable: An iterator over the power set.
Example:
>>> ps = powerset(np.arange(2))
>>> list(ps)
[(), (0,), (1,), (0, 1)]
>>> ps = powerset(np.arange(2), nonempty=True)
>>> list(ps)
[(0,), (1,), (0, 1)]
>>> ps = powerset(np.arange(2), nonempty=True, reverse=True)
>>> list(ps)
[(1, 0), (1,), (0,)]
"""
iterable = list(iterable)
if nonempty: # Don't include 0-length subsets
start = 1
else:
start = 0
seq_sizes = range(start, len(iterable) + 1)
if reverse:
seq_sizes = reversed(seq_sizes)
iterable.reverse()
return chain.from_iterable(combinations(iterable, r) for r in seq_sizes)
|
python
|
def powerset(iterable, nonempty=False, reverse=False):
"""Generate the power set of an iterable.
Args:
iterable (Iterable): The iterable from which to generate the power set.
Keyword Args:
nonempty (boolean): If True, don't include the empty set.
reverse (boolean): If True, reverse the order of the powerset.
Returns:
Iterable: An iterator over the power set.
Example:
>>> ps = powerset(np.arange(2))
>>> list(ps)
[(), (0,), (1,), (0, 1)]
>>> ps = powerset(np.arange(2), nonempty=True)
>>> list(ps)
[(0,), (1,), (0, 1)]
>>> ps = powerset(np.arange(2), nonempty=True, reverse=True)
>>> list(ps)
[(1, 0), (1,), (0,)]
"""
iterable = list(iterable)
if nonempty: # Don't include 0-length subsets
start = 1
else:
start = 0
seq_sizes = range(start, len(iterable) + 1)
if reverse:
seq_sizes = reversed(seq_sizes)
iterable.reverse()
return chain.from_iterable(combinations(iterable, r) for r in seq_sizes)
|
[
"def",
"powerset",
"(",
"iterable",
",",
"nonempty",
"=",
"False",
",",
"reverse",
"=",
"False",
")",
":",
"iterable",
"=",
"list",
"(",
"iterable",
")",
"if",
"nonempty",
":",
"# Don't include 0-length subsets",
"start",
"=",
"1",
"else",
":",
"start",
"=",
"0",
"seq_sizes",
"=",
"range",
"(",
"start",
",",
"len",
"(",
"iterable",
")",
"+",
"1",
")",
"if",
"reverse",
":",
"seq_sizes",
"=",
"reversed",
"(",
"seq_sizes",
")",
"iterable",
".",
"reverse",
"(",
")",
"return",
"chain",
".",
"from_iterable",
"(",
"combinations",
"(",
"iterable",
",",
"r",
")",
"for",
"r",
"in",
"seq_sizes",
")"
] |
Generate the power set of an iterable.
Args:
iterable (Iterable): The iterable from which to generate the power set.
Keyword Args:
nonempty (boolean): If True, don't include the empty set.
reverse (boolean): If True, reverse the order of the powerset.
Returns:
Iterable: An iterator over the power set.
Example:
>>> ps = powerset(np.arange(2))
>>> list(ps)
[(), (0,), (1,), (0, 1)]
>>> ps = powerset(np.arange(2), nonempty=True)
>>> list(ps)
[(0,), (1,), (0, 1)]
>>> ps = powerset(np.arange(2), nonempty=True, reverse=True)
>>> list(ps)
[(1, 0), (1,), (0,)]
|
[
"Generate",
"the",
"power",
"set",
"of",
"an",
"iterable",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/utils.py#L146-L183
|
15,887
|
wmayner/pyphi
|
pyphi/utils.py
|
load_data
|
def load_data(directory, num):
"""Load numpy data from the data directory.
The files should stored in ``../data/<dir>`` and named
``0.npy, 1.npy, ... <num - 1>.npy``.
Returns:
list: A list of loaded data, such that ``list[i]`` contains the the
contents of ``i.npy``.
"""
root = os.path.abspath(os.path.dirname(__file__))
def get_path(i): # pylint: disable=missing-docstring
return os.path.join(root, 'data', directory, str(i) + '.npy')
return [np.load(get_path(i)) for i in range(num)]
|
python
|
def load_data(directory, num):
"""Load numpy data from the data directory.
The files should stored in ``../data/<dir>`` and named
``0.npy, 1.npy, ... <num - 1>.npy``.
Returns:
list: A list of loaded data, such that ``list[i]`` contains the the
contents of ``i.npy``.
"""
root = os.path.abspath(os.path.dirname(__file__))
def get_path(i): # pylint: disable=missing-docstring
return os.path.join(root, 'data', directory, str(i) + '.npy')
return [np.load(get_path(i)) for i in range(num)]
|
[
"def",
"load_data",
"(",
"directory",
",",
"num",
")",
":",
"root",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
")",
"def",
"get_path",
"(",
"i",
")",
":",
"# pylint: disable=missing-docstring",
"return",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"'data'",
",",
"directory",
",",
"str",
"(",
"i",
")",
"+",
"'.npy'",
")",
"return",
"[",
"np",
".",
"load",
"(",
"get_path",
"(",
"i",
")",
")",
"for",
"i",
"in",
"range",
"(",
"num",
")",
"]"
] |
Load numpy data from the data directory.
The files should stored in ``../data/<dir>`` and named
``0.npy, 1.npy, ... <num - 1>.npy``.
Returns:
list: A list of loaded data, such that ``list[i]`` contains the the
contents of ``i.npy``.
|
[
"Load",
"numpy",
"data",
"from",
"the",
"data",
"directory",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/utils.py#L186-L201
|
15,888
|
wmayner/pyphi
|
pyphi/utils.py
|
time_annotated
|
def time_annotated(func, *args, **kwargs):
"""Annotate the decorated function or method with the total execution
time.
The result is annotated with a `time` attribute.
"""
start = time()
result = func(*args, **kwargs)
end = time()
result.time = round(end - start, config.PRECISION)
return result
|
python
|
def time_annotated(func, *args, **kwargs):
"""Annotate the decorated function or method with the total execution
time.
The result is annotated with a `time` attribute.
"""
start = time()
result = func(*args, **kwargs)
end = time()
result.time = round(end - start, config.PRECISION)
return result
|
[
"def",
"time_annotated",
"(",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"start",
"=",
"time",
"(",
")",
"result",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"end",
"=",
"time",
"(",
")",
"result",
".",
"time",
"=",
"round",
"(",
"end",
"-",
"start",
",",
"config",
".",
"PRECISION",
")",
"return",
"result"
] |
Annotate the decorated function or method with the total execution
time.
The result is annotated with a `time` attribute.
|
[
"Annotate",
"the",
"decorated",
"function",
"or",
"method",
"with",
"the",
"total",
"execution",
"time",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/utils.py#L207-L217
|
15,889
|
wmayner/pyphi
|
pyphi/models/mechanism.py
|
_null_ria
|
def _null_ria(direction, mechanism, purview, repertoire=None, phi=0.0):
"""The irreducibility analysis for a reducible mechanism."""
# TODO Use properties here to infer mechanism and purview from
# partition yet access them with .mechanism and .partition
return RepertoireIrreducibilityAnalysis(
direction=direction,
mechanism=mechanism,
purview=purview,
partition=None,
repertoire=repertoire,
partitioned_repertoire=None,
phi=phi
)
|
python
|
def _null_ria(direction, mechanism, purview, repertoire=None, phi=0.0):
"""The irreducibility analysis for a reducible mechanism."""
# TODO Use properties here to infer mechanism and purview from
# partition yet access them with .mechanism and .partition
return RepertoireIrreducibilityAnalysis(
direction=direction,
mechanism=mechanism,
purview=purview,
partition=None,
repertoire=repertoire,
partitioned_repertoire=None,
phi=phi
)
|
[
"def",
"_null_ria",
"(",
"direction",
",",
"mechanism",
",",
"purview",
",",
"repertoire",
"=",
"None",
",",
"phi",
"=",
"0.0",
")",
":",
"# TODO Use properties here to infer mechanism and purview from",
"# partition yet access them with .mechanism and .partition",
"return",
"RepertoireIrreducibilityAnalysis",
"(",
"direction",
"=",
"direction",
",",
"mechanism",
"=",
"mechanism",
",",
"purview",
"=",
"purview",
",",
"partition",
"=",
"None",
",",
"repertoire",
"=",
"repertoire",
",",
"partitioned_repertoire",
"=",
"None",
",",
"phi",
"=",
"phi",
")"
] |
The irreducibility analysis for a reducible mechanism.
|
[
"The",
"irreducibility",
"analysis",
"for",
"a",
"reducible",
"mechanism",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/mechanism.py#L135-L147
|
15,890
|
wmayner/pyphi
|
pyphi/models/mechanism.py
|
MaximallyIrreducibleCauseOrEffect.damaged_by_cut
|
def damaged_by_cut(self, subsystem):
"""Return ``True`` if this MICE is affected by the subsystem's cut.
The cut affects the MICE if it either splits the MICE's mechanism
or splits the connections between the purview and mechanism.
"""
return (subsystem.cut.splits_mechanism(self.mechanism) or
np.any(self._relevant_connections(subsystem) *
subsystem.cut.cut_matrix(subsystem.network.size) == 1))
|
python
|
def damaged_by_cut(self, subsystem):
"""Return ``True`` if this MICE is affected by the subsystem's cut.
The cut affects the MICE if it either splits the MICE's mechanism
or splits the connections between the purview and mechanism.
"""
return (subsystem.cut.splits_mechanism(self.mechanism) or
np.any(self._relevant_connections(subsystem) *
subsystem.cut.cut_matrix(subsystem.network.size) == 1))
|
[
"def",
"damaged_by_cut",
"(",
"self",
",",
"subsystem",
")",
":",
"return",
"(",
"subsystem",
".",
"cut",
".",
"splits_mechanism",
"(",
"self",
".",
"mechanism",
")",
"or",
"np",
".",
"any",
"(",
"self",
".",
"_relevant_connections",
"(",
"subsystem",
")",
"*",
"subsystem",
".",
"cut",
".",
"cut_matrix",
"(",
"subsystem",
".",
"network",
".",
"size",
")",
"==",
"1",
")",
")"
] |
Return ``True`` if this MICE is affected by the subsystem's cut.
The cut affects the MICE if it either splits the MICE's mechanism
or splits the connections between the purview and mechanism.
|
[
"Return",
"True",
"if",
"this",
"MICE",
"is",
"affected",
"by",
"the",
"subsystem",
"s",
"cut",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/mechanism.py#L277-L285
|
15,891
|
wmayner/pyphi
|
pyphi/models/mechanism.py
|
Concept.eq_repertoires
|
def eq_repertoires(self, other):
"""Return whether this concept has the same repertoires as another.
.. warning::
This only checks if the cause and effect repertoires are equal as
arrays; mechanisms, purviews, or even the nodes that the mechanism
and purview indices refer to, might be different.
"""
return (
np.array_equal(self.cause_repertoire, other.cause_repertoire) and
np.array_equal(self.effect_repertoire, other.effect_repertoire))
|
python
|
def eq_repertoires(self, other):
"""Return whether this concept has the same repertoires as another.
.. warning::
This only checks if the cause and effect repertoires are equal as
arrays; mechanisms, purviews, or even the nodes that the mechanism
and purview indices refer to, might be different.
"""
return (
np.array_equal(self.cause_repertoire, other.cause_repertoire) and
np.array_equal(self.effect_repertoire, other.effect_repertoire))
|
[
"def",
"eq_repertoires",
"(",
"self",
",",
"other",
")",
":",
"return",
"(",
"np",
".",
"array_equal",
"(",
"self",
".",
"cause_repertoire",
",",
"other",
".",
"cause_repertoire",
")",
"and",
"np",
".",
"array_equal",
"(",
"self",
".",
"effect_repertoire",
",",
"other",
".",
"effect_repertoire",
")",
")"
] |
Return whether this concept has the same repertoires as another.
.. warning::
This only checks if the cause and effect repertoires are equal as
arrays; mechanisms, purviews, or even the nodes that the mechanism
and purview indices refer to, might be different.
|
[
"Return",
"whether",
"this",
"concept",
"has",
"the",
"same",
"repertoires",
"as",
"another",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/mechanism.py#L431-L441
|
15,892
|
wmayner/pyphi
|
pyphi/models/mechanism.py
|
Concept.emd_eq
|
def emd_eq(self, other):
"""Return whether this concept is equal to another in the context of
an EMD calculation.
"""
return (self.phi == other.phi and
self.mechanism == other.mechanism and
self.eq_repertoires(other))
|
python
|
def emd_eq(self, other):
"""Return whether this concept is equal to another in the context of
an EMD calculation.
"""
return (self.phi == other.phi and
self.mechanism == other.mechanism and
self.eq_repertoires(other))
|
[
"def",
"emd_eq",
"(",
"self",
",",
"other",
")",
":",
"return",
"(",
"self",
".",
"phi",
"==",
"other",
".",
"phi",
"and",
"self",
".",
"mechanism",
"==",
"other",
".",
"mechanism",
"and",
"self",
".",
"eq_repertoires",
"(",
"other",
")",
")"
] |
Return whether this concept is equal to another in the context of
an EMD calculation.
|
[
"Return",
"whether",
"this",
"concept",
"is",
"equal",
"to",
"another",
"in",
"the",
"context",
"of",
"an",
"EMD",
"calculation",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/mechanism.py#L443-L449
|
15,893
|
wmayner/pyphi
|
pyphi/actual.py
|
directed_account
|
def directed_account(transition, direction, mechanisms=False, purviews=False,
allow_neg=False):
"""Return the set of all |CausalLinks| of the specified direction."""
if mechanisms is False:
mechanisms = utils.powerset(transition.mechanism_indices(direction),
nonempty=True)
links = [
transition.find_causal_link(direction, mechanism, purviews=purviews,
allow_neg=allow_neg)
for mechanism in mechanisms]
# Filter out causal links with zero alpha
return DirectedAccount(filter(None, links))
|
python
|
def directed_account(transition, direction, mechanisms=False, purviews=False,
allow_neg=False):
"""Return the set of all |CausalLinks| of the specified direction."""
if mechanisms is False:
mechanisms = utils.powerset(transition.mechanism_indices(direction),
nonempty=True)
links = [
transition.find_causal_link(direction, mechanism, purviews=purviews,
allow_neg=allow_neg)
for mechanism in mechanisms]
# Filter out causal links with zero alpha
return DirectedAccount(filter(None, links))
|
[
"def",
"directed_account",
"(",
"transition",
",",
"direction",
",",
"mechanisms",
"=",
"False",
",",
"purviews",
"=",
"False",
",",
"allow_neg",
"=",
"False",
")",
":",
"if",
"mechanisms",
"is",
"False",
":",
"mechanisms",
"=",
"utils",
".",
"powerset",
"(",
"transition",
".",
"mechanism_indices",
"(",
"direction",
")",
",",
"nonempty",
"=",
"True",
")",
"links",
"=",
"[",
"transition",
".",
"find_causal_link",
"(",
"direction",
",",
"mechanism",
",",
"purviews",
"=",
"purviews",
",",
"allow_neg",
"=",
"allow_neg",
")",
"for",
"mechanism",
"in",
"mechanisms",
"]",
"# Filter out causal links with zero alpha",
"return",
"DirectedAccount",
"(",
"filter",
"(",
"None",
",",
"links",
")",
")"
] |
Return the set of all |CausalLinks| of the specified direction.
|
[
"Return",
"the",
"set",
"of",
"all",
"|CausalLinks|",
"of",
"the",
"specified",
"direction",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L440-L452
|
15,894
|
wmayner/pyphi
|
pyphi/actual.py
|
account
|
def account(transition, direction=Direction.BIDIRECTIONAL):
"""Return the set of all causal links for a |Transition|.
Args:
transition (Transition): The transition of interest.
Keyword Args:
direction (Direction): By default the account contains actual causes
and actual effects.
"""
if direction != Direction.BIDIRECTIONAL:
return directed_account(transition, direction)
return Account(directed_account(transition, Direction.CAUSE) +
directed_account(transition, Direction.EFFECT))
|
python
|
def account(transition, direction=Direction.BIDIRECTIONAL):
"""Return the set of all causal links for a |Transition|.
Args:
transition (Transition): The transition of interest.
Keyword Args:
direction (Direction): By default the account contains actual causes
and actual effects.
"""
if direction != Direction.BIDIRECTIONAL:
return directed_account(transition, direction)
return Account(directed_account(transition, Direction.CAUSE) +
directed_account(transition, Direction.EFFECT))
|
[
"def",
"account",
"(",
"transition",
",",
"direction",
"=",
"Direction",
".",
"BIDIRECTIONAL",
")",
":",
"if",
"direction",
"!=",
"Direction",
".",
"BIDIRECTIONAL",
":",
"return",
"directed_account",
"(",
"transition",
",",
"direction",
")",
"return",
"Account",
"(",
"directed_account",
"(",
"transition",
",",
"Direction",
".",
"CAUSE",
")",
"+",
"directed_account",
"(",
"transition",
",",
"Direction",
".",
"EFFECT",
")",
")"
] |
Return the set of all causal links for a |Transition|.
Args:
transition (Transition): The transition of interest.
Keyword Args:
direction (Direction): By default the account contains actual causes
and actual effects.
|
[
"Return",
"the",
"set",
"of",
"all",
"causal",
"links",
"for",
"a",
"|Transition|",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L455-L469
|
15,895
|
wmayner/pyphi
|
pyphi/actual.py
|
_evaluate_cut
|
def _evaluate_cut(transition, cut, unpartitioned_account,
direction=Direction.BIDIRECTIONAL):
"""Find the |AcSystemIrreducibilityAnalysis| for a given cut."""
cut_transition = transition.apply_cut(cut)
partitioned_account = account(cut_transition, direction)
log.debug("Finished evaluating %s.", cut)
alpha = account_distance(unpartitioned_account, partitioned_account)
return AcSystemIrreducibilityAnalysis(
alpha=round(alpha, config.PRECISION),
direction=direction,
account=unpartitioned_account,
partitioned_account=partitioned_account,
transition=transition,
cut=cut)
|
python
|
def _evaluate_cut(transition, cut, unpartitioned_account,
direction=Direction.BIDIRECTIONAL):
"""Find the |AcSystemIrreducibilityAnalysis| for a given cut."""
cut_transition = transition.apply_cut(cut)
partitioned_account = account(cut_transition, direction)
log.debug("Finished evaluating %s.", cut)
alpha = account_distance(unpartitioned_account, partitioned_account)
return AcSystemIrreducibilityAnalysis(
alpha=round(alpha, config.PRECISION),
direction=direction,
account=unpartitioned_account,
partitioned_account=partitioned_account,
transition=transition,
cut=cut)
|
[
"def",
"_evaluate_cut",
"(",
"transition",
",",
"cut",
",",
"unpartitioned_account",
",",
"direction",
"=",
"Direction",
".",
"BIDIRECTIONAL",
")",
":",
"cut_transition",
"=",
"transition",
".",
"apply_cut",
"(",
"cut",
")",
"partitioned_account",
"=",
"account",
"(",
"cut_transition",
",",
"direction",
")",
"log",
".",
"debug",
"(",
"\"Finished evaluating %s.\"",
",",
"cut",
")",
"alpha",
"=",
"account_distance",
"(",
"unpartitioned_account",
",",
"partitioned_account",
")",
"return",
"AcSystemIrreducibilityAnalysis",
"(",
"alpha",
"=",
"round",
"(",
"alpha",
",",
"config",
".",
"PRECISION",
")",
",",
"direction",
"=",
"direction",
",",
"account",
"=",
"unpartitioned_account",
",",
"partitioned_account",
"=",
"partitioned_account",
",",
"transition",
"=",
"transition",
",",
"cut",
"=",
"cut",
")"
] |
Find the |AcSystemIrreducibilityAnalysis| for a given cut.
|
[
"Find",
"the",
"|AcSystemIrreducibilityAnalysis|",
"for",
"a",
"given",
"cut",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L492-L507
|
15,896
|
wmayner/pyphi
|
pyphi/actual.py
|
_get_cuts
|
def _get_cuts(transition, direction):
"""A list of possible cuts to a transition."""
n = transition.network.size
if direction is Direction.BIDIRECTIONAL:
yielded = set()
for cut in chain(_get_cuts(transition, Direction.CAUSE),
_get_cuts(transition, Direction.EFFECT)):
cm = utils.np_hashable(cut.cut_matrix(n))
if cm not in yielded:
yielded.add(cm)
yield cut
else:
mechanism = transition.mechanism_indices(direction)
purview = transition.purview_indices(direction)
for partition in mip_partitions(mechanism, purview,
transition.node_labels):
yield ActualCut(direction, partition, transition.node_labels)
|
python
|
def _get_cuts(transition, direction):
"""A list of possible cuts to a transition."""
n = transition.network.size
if direction is Direction.BIDIRECTIONAL:
yielded = set()
for cut in chain(_get_cuts(transition, Direction.CAUSE),
_get_cuts(transition, Direction.EFFECT)):
cm = utils.np_hashable(cut.cut_matrix(n))
if cm not in yielded:
yielded.add(cm)
yield cut
else:
mechanism = transition.mechanism_indices(direction)
purview = transition.purview_indices(direction)
for partition in mip_partitions(mechanism, purview,
transition.node_labels):
yield ActualCut(direction, partition, transition.node_labels)
|
[
"def",
"_get_cuts",
"(",
"transition",
",",
"direction",
")",
":",
"n",
"=",
"transition",
".",
"network",
".",
"size",
"if",
"direction",
"is",
"Direction",
".",
"BIDIRECTIONAL",
":",
"yielded",
"=",
"set",
"(",
")",
"for",
"cut",
"in",
"chain",
"(",
"_get_cuts",
"(",
"transition",
",",
"Direction",
".",
"CAUSE",
")",
",",
"_get_cuts",
"(",
"transition",
",",
"Direction",
".",
"EFFECT",
")",
")",
":",
"cm",
"=",
"utils",
".",
"np_hashable",
"(",
"cut",
".",
"cut_matrix",
"(",
"n",
")",
")",
"if",
"cm",
"not",
"in",
"yielded",
":",
"yielded",
".",
"add",
"(",
"cm",
")",
"yield",
"cut",
"else",
":",
"mechanism",
"=",
"transition",
".",
"mechanism_indices",
"(",
"direction",
")",
"purview",
"=",
"transition",
".",
"purview_indices",
"(",
"direction",
")",
"for",
"partition",
"in",
"mip_partitions",
"(",
"mechanism",
",",
"purview",
",",
"transition",
".",
"node_labels",
")",
":",
"yield",
"ActualCut",
"(",
"direction",
",",
"partition",
",",
"transition",
".",
"node_labels",
")"
] |
A list of possible cuts to a transition.
|
[
"A",
"list",
"of",
"possible",
"cuts",
"to",
"a",
"transition",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L511-L529
|
15,897
|
wmayner/pyphi
|
pyphi/actual.py
|
sia
|
def sia(transition, direction=Direction.BIDIRECTIONAL):
"""Return the minimal information partition of a transition in a specific
direction.
Args:
transition (Transition): The candidate system.
Returns:
AcSystemIrreducibilityAnalysis: A nested structure containing all the
data from the intermediate calculations. The top level contains the
basic irreducibility information for the given subsystem.
"""
validate.direction(direction, allow_bi=True)
log.info("Calculating big-alpha for %s...", transition)
if not transition:
log.info('Transition %s is empty; returning null SIA '
'immediately.', transition)
return _null_ac_sia(transition, direction)
if not connectivity.is_weak(transition.network.cm,
transition.node_indices):
log.info('%s is not strongly/weakly connected; returning null SIA '
'immediately.', transition)
return _null_ac_sia(transition, direction)
log.debug("Finding unpartitioned account...")
unpartitioned_account = account(transition, direction)
log.debug("Found unpartitioned account.")
if not unpartitioned_account:
log.info('Empty unpartitioned account; returning null AC SIA '
'immediately.')
return _null_ac_sia(transition, direction)
cuts = _get_cuts(transition, direction)
engine = ComputeACSystemIrreducibility(
cuts, transition, direction, unpartitioned_account)
result = engine.run_sequential()
log.info("Finished calculating big-ac-phi data for %s.", transition)
log.debug("RESULT: \n%s", result)
return result
|
python
|
def sia(transition, direction=Direction.BIDIRECTIONAL):
"""Return the minimal information partition of a transition in a specific
direction.
Args:
transition (Transition): The candidate system.
Returns:
AcSystemIrreducibilityAnalysis: A nested structure containing all the
data from the intermediate calculations. The top level contains the
basic irreducibility information for the given subsystem.
"""
validate.direction(direction, allow_bi=True)
log.info("Calculating big-alpha for %s...", transition)
if not transition:
log.info('Transition %s is empty; returning null SIA '
'immediately.', transition)
return _null_ac_sia(transition, direction)
if not connectivity.is_weak(transition.network.cm,
transition.node_indices):
log.info('%s is not strongly/weakly connected; returning null SIA '
'immediately.', transition)
return _null_ac_sia(transition, direction)
log.debug("Finding unpartitioned account...")
unpartitioned_account = account(transition, direction)
log.debug("Found unpartitioned account.")
if not unpartitioned_account:
log.info('Empty unpartitioned account; returning null AC SIA '
'immediately.')
return _null_ac_sia(transition, direction)
cuts = _get_cuts(transition, direction)
engine = ComputeACSystemIrreducibility(
cuts, transition, direction, unpartitioned_account)
result = engine.run_sequential()
log.info("Finished calculating big-ac-phi data for %s.", transition)
log.debug("RESULT: \n%s", result)
return result
|
[
"def",
"sia",
"(",
"transition",
",",
"direction",
"=",
"Direction",
".",
"BIDIRECTIONAL",
")",
":",
"validate",
".",
"direction",
"(",
"direction",
",",
"allow_bi",
"=",
"True",
")",
"log",
".",
"info",
"(",
"\"Calculating big-alpha for %s...\"",
",",
"transition",
")",
"if",
"not",
"transition",
":",
"log",
".",
"info",
"(",
"'Transition %s is empty; returning null SIA '",
"'immediately.'",
",",
"transition",
")",
"return",
"_null_ac_sia",
"(",
"transition",
",",
"direction",
")",
"if",
"not",
"connectivity",
".",
"is_weak",
"(",
"transition",
".",
"network",
".",
"cm",
",",
"transition",
".",
"node_indices",
")",
":",
"log",
".",
"info",
"(",
"'%s is not strongly/weakly connected; returning null SIA '",
"'immediately.'",
",",
"transition",
")",
"return",
"_null_ac_sia",
"(",
"transition",
",",
"direction",
")",
"log",
".",
"debug",
"(",
"\"Finding unpartitioned account...\"",
")",
"unpartitioned_account",
"=",
"account",
"(",
"transition",
",",
"direction",
")",
"log",
".",
"debug",
"(",
"\"Found unpartitioned account.\"",
")",
"if",
"not",
"unpartitioned_account",
":",
"log",
".",
"info",
"(",
"'Empty unpartitioned account; returning null AC SIA '",
"'immediately.'",
")",
"return",
"_null_ac_sia",
"(",
"transition",
",",
"direction",
")",
"cuts",
"=",
"_get_cuts",
"(",
"transition",
",",
"direction",
")",
"engine",
"=",
"ComputeACSystemIrreducibility",
"(",
"cuts",
",",
"transition",
",",
"direction",
",",
"unpartitioned_account",
")",
"result",
"=",
"engine",
".",
"run_sequential",
"(",
")",
"log",
".",
"info",
"(",
"\"Finished calculating big-ac-phi data for %s.\"",
",",
"transition",
")",
"log",
".",
"debug",
"(",
"\"RESULT: \\n%s\"",
",",
"result",
")",
"return",
"result"
] |
Return the minimal information partition of a transition in a specific
direction.
Args:
transition (Transition): The candidate system.
Returns:
AcSystemIrreducibilityAnalysis: A nested structure containing all the
data from the intermediate calculations. The top level contains the
basic irreducibility information for the given subsystem.
|
[
"Return",
"the",
"minimal",
"information",
"partition",
"of",
"a",
"transition",
"in",
"a",
"specific",
"direction",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L532-L573
|
15,898
|
wmayner/pyphi
|
pyphi/actual.py
|
nexus
|
def nexus(network, before_state, after_state,
direction=Direction.BIDIRECTIONAL):
"""Return a tuple of all irreducible nexus of the network."""
validate.is_network(network)
sias = (sia(transition, direction) for transition in
transitions(network, before_state, after_state))
return tuple(sorted(filter(None, sias), reverse=True))
|
python
|
def nexus(network, before_state, after_state,
direction=Direction.BIDIRECTIONAL):
"""Return a tuple of all irreducible nexus of the network."""
validate.is_network(network)
sias = (sia(transition, direction) for transition in
transitions(network, before_state, after_state))
return tuple(sorted(filter(None, sias), reverse=True))
|
[
"def",
"nexus",
"(",
"network",
",",
"before_state",
",",
"after_state",
",",
"direction",
"=",
"Direction",
".",
"BIDIRECTIONAL",
")",
":",
"validate",
".",
"is_network",
"(",
"network",
")",
"sias",
"=",
"(",
"sia",
"(",
"transition",
",",
"direction",
")",
"for",
"transition",
"in",
"transitions",
"(",
"network",
",",
"before_state",
",",
"after_state",
")",
")",
"return",
"tuple",
"(",
"sorted",
"(",
"filter",
"(",
"None",
",",
"sias",
")",
",",
"reverse",
"=",
"True",
")",
")"
] |
Return a tuple of all irreducible nexus of the network.
|
[
"Return",
"a",
"tuple",
"of",
"all",
"irreducible",
"nexus",
"of",
"the",
"network",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L626-L633
|
15,899
|
wmayner/pyphi
|
pyphi/actual.py
|
causal_nexus
|
def causal_nexus(network, before_state, after_state,
direction=Direction.BIDIRECTIONAL):
"""Return the causal nexus of the network."""
validate.is_network(network)
log.info("Calculating causal nexus...")
result = nexus(network, before_state, after_state, direction)
if result:
result = max(result)
else:
null_transition = Transition(
network, before_state, after_state, (), ())
result = _null_ac_sia(null_transition, direction)
log.info("Finished calculating causal nexus.")
log.debug("RESULT: \n%s", result)
return result
|
python
|
def causal_nexus(network, before_state, after_state,
direction=Direction.BIDIRECTIONAL):
"""Return the causal nexus of the network."""
validate.is_network(network)
log.info("Calculating causal nexus...")
result = nexus(network, before_state, after_state, direction)
if result:
result = max(result)
else:
null_transition = Transition(
network, before_state, after_state, (), ())
result = _null_ac_sia(null_transition, direction)
log.info("Finished calculating causal nexus.")
log.debug("RESULT: \n%s", result)
return result
|
[
"def",
"causal_nexus",
"(",
"network",
",",
"before_state",
",",
"after_state",
",",
"direction",
"=",
"Direction",
".",
"BIDIRECTIONAL",
")",
":",
"validate",
".",
"is_network",
"(",
"network",
")",
"log",
".",
"info",
"(",
"\"Calculating causal nexus...\"",
")",
"result",
"=",
"nexus",
"(",
"network",
",",
"before_state",
",",
"after_state",
",",
"direction",
")",
"if",
"result",
":",
"result",
"=",
"max",
"(",
"result",
")",
"else",
":",
"null_transition",
"=",
"Transition",
"(",
"network",
",",
"before_state",
",",
"after_state",
",",
"(",
")",
",",
"(",
")",
")",
"result",
"=",
"_null_ac_sia",
"(",
"null_transition",
",",
"direction",
")",
"log",
".",
"info",
"(",
"\"Finished calculating causal nexus.\"",
")",
"log",
".",
"debug",
"(",
"\"RESULT: \\n%s\"",
",",
"result",
")",
"return",
"result"
] |
Return the causal nexus of the network.
|
[
"Return",
"the",
"causal",
"nexus",
"of",
"the",
"network",
"."
] |
deeca69a084d782a6fde7bf26f59e93b593c5d77
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L636-L652
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.