function_name stringlengths 1 63 | docstring stringlengths 50 5.89k | masked_code stringlengths 50 882k | implementation stringlengths 169 12.9k | start_line int32 1 14.6k | end_line int32 16 14.6k | file_content stringlengths 274 882k |
|---|---|---|---|---|---|---|
reorder_categories | Reorders categories as specified in new_categories.
`new_categories` need to include all old categories and no new category
items.
Raises
------
ValueError
If the new categories do not contain all old category items or any
new ones
Parameters
----------
new_categories : Index-like
The categories in new or... | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def reorder_categories(self, new_categories, ordered=None, inplace=False):
""" Reorders categories as specified in new_categories.
`new_categories` need to include all old categories and no new category
items.
Raises
------
ValueError
If the new categori... | 945 | 985 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
add_categories | Add new categories.
`new_categories` will be included at the last/highest place in the
categories and will be unused directly after this call.
Raises
------
ValueError
If the new categories include old categories or do not validate as
categories
Parameters
----------
new_categories : category or list-like of... | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def add_categories(self, new_categories, inplace=False):
""" Add new categories.
`new_categories` will be included at the last/highest place in the
categories and will be unused directly after this call.
Raises
------
ValueError
If the new categories inc... | 987 | 1,034 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
remove_categories | Removes the specified categories.
`removals` must be included in the old categories. Values which were in
the removed categories will be set to NaN
Raises
------
ValueError
If the removals are not contained in the categories
Parameters
----------
removals : category or list of categories
The categories which ... | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def remove_categories(self, removals, inplace=False):
""" Removes the specified categories.
`removals` must be included in the old categories. Values which were in
the removed categories will be set to NaN
Raises
------
ValueError
If the removals are not... | 1,036 | 1,086 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
remove_unused_categories | Removes categories which are not used.
Parameters
----------
inplace : boolean (default: False)
Whether or not to drop unused categories inplace or return a copy of
this categorical with unused categories dropped.
Returns
-------
cat : Categorical with unused categories dropped or None if inplace.
See also
---... | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def remove_unused_categories(self, inplace=False):
""" Removes categories which are not used.
Parameters
----------
inplace : boolean (default: False)
Whether or not to drop unused categories inplace or return a copy of
this categorical with unused categories d... | 1,088 | 1,123 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
value_counts | Returns a Series containing counts of each category.
Every category will have an entry, even those with a count of 0.
Parameters
----------
dropna : boolean, default True
Don't include counts of NaN.
Returns
-------
counts : Series
See Also
--------
Series.value_counts | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def value_counts(self, dropna=True):
"""
Returns a Series containing counts of each category.
Every category will have an entry, even those with a count of 0.
Parameters
----------
dropna : boolean, default True
Don't include counts of NaN.
Retu... | 1,419 | 1,456 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
get_values | Return the values.
For internal compatibility with pandas formatting.
Returns
-------
values : numpy array
A numpy array of the same dtype as categorical.categories.dtype or
Index if datetime / periods | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def get_values(self):
""" Return the values.
For internal compatibility with pandas formatting.
Returns
-------
values : numpy array
A numpy array of the same dtype as categorical.categories.dtype or
Index if datetime / periods
"""
# ... | 1,458 | 1,472 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
sort_values | Sorts the Categorical by category value returning a new
Categorical by default.
While an ordering is applied to the category values, sorting in this
context refers more to organizing and grouping together based on
matching category values. Thus, this function can be called on an
unordered Categorical instance unlike t... | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def sort_values(self, inplace=False, ascending=True, na_position='last'):
""" Sorts the Categorical by category value returning a new
Categorical by default.
While an ordering is applied to the category values, sorting in this
context refers more to organizing and grouping together ... | 1,530 | 1,634 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
_values_for_rank | For correctly ranking ordered categorical data. See GH#15420
Ordered categorical data should be ranked on the basis of
codes with -1 translated to NaN.
Returns
-------
numpy array | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def _values_for_rank(self):
"""
For correctly ranking ordered categorical data. See GH#15420
Ordered categorical data should be ranked on the basis of
codes with -1 translated to NaN.
Returns
-------
numpy array
"""
from pandas import Series... | 1,636 | 1,663 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
_reverse_indexer | Compute the inverse of a categorical, returning
a dict of categories -> indexers.
*This is an internal function*
Returns
-------
dict of categories -> indexers
Example
-------
In [1]: c = pd.Categorical(list('aabca'))
In [2]: c
Out[2]:
[a, a, b, c, a]
Categories (3, object): [a, b, c]
In [3]: c.categories
Out[3]: ... | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def _reverse_indexer(self):
"""
Compute the inverse of a categorical, returning
a dict of categories -> indexers.
*This is an internal function*
Returns
-------
dict of categories -> indexers
Example
-------
In [1]: c = pd.Categorica... | 2,032 | 2,069 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
min | The minimum value of the object.
Only ordered `Categoricals` have a minimum!
Raises
------
TypeError
If the `Categorical` is not `ordered`.
Returns
-------
min : the minimum of this `Categorical` | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def min(self, numeric_only=None, **kwargs):
""" The minimum value of the object.
Only ordered `Categoricals` have a minimum!
Raises
------
TypeError
If the `Categorical` is not `ordered`.
Returns
-------
min : the minimum of this `Catego... | 2,079 | 2,102 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
max | The maximum value of the object.
Only ordered `Categoricals` have a maximum!
Raises
------
TypeError
If the `Categorical` is not `ordered`.
Returns
-------
max : the maximum of this `Categorical` | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def max(self, numeric_only=None, **kwargs):
""" The maximum value of the object.
Only ordered `Categoricals` have a maximum!
Raises
------
TypeError
If the `Categorical` is not `ordered`.
Returns
-------
max : the maximum of this `Catego... | 2,104 | 2,127 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
mode | Returns the mode(s) of the Categorical.
Always returns `Categorical` even if only one value.
Parameters
----------
dropna : boolean, default True
Don't consider counts of NaN/NaT.
.. versionadded:: 0.24.0
Returns
-------
modes : `Categorical` (sorted) | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def mode(self, dropna=True):
"""
Returns the mode(s) of the Categorical.
Always returns `Categorical` even if only one value.
Parameters
----------
dropna : boolean, default True
Don't consider counts of NaN/NaT.
.. versionadded:: 0.24.0
... | 2,129 | 2,153 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
unique | Return the ``Categorical`` which ``categories`` and ``codes`` are
unique. Unused categories are NOT returned.
- unordered category: values and categories are sorted by appearance
order.
- ordered category: values are sorted by appearance order, categories
keeps existing order.
Returns
-------
unique values : ``Ca... | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def unique(self):
"""
Return the ``Categorical`` which ``categories`` and ``codes`` are
unique. Unused categories are NOT returned.
- unordered category: values and categories are sorted by appearance
order.
- ordered category: values are sorted by appearance order... | 2,155 | 2,209 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
isin | Check whether `values` are contained in Categorical.
Return a boolean NumPy Array showing whether each element in
the Categorical matches an element in the passed sequence of
`values` exactly.
Parameters
----------
values : set or list-like
The sequence of values to test. Passing in a single string will
raise... | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... | def isin(self, values):
"""
Check whether `values` are contained in Categorical.
Return a boolean NumPy Array showing whether each element in
the Categorical matches an element in the passed sequence of
`values` exactly.
Parameters
----------
values ... | 2,308 | 2,359 | # pylint: disable=E1101,W0232
import numpy as np
from warnings import warn
import textwrap
from pandas import compat
from pandas.compat import u, lzip
from pandas._libs import lib, algos as libalgos
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndexClass, ABCCategoricalIndex)
from pandas.core.dtypes.mi... |
submit | Submits a transaction to Pay.ir.
When called, the method submits the necessary information about the transaction to Pay.ir and returns a
HttpResponseRedirect object that can redirect the user to the gateway, if nothing goes wrong. In case of an
error, a GatewayError is raised, containing the error_code and error_messa... | from django.db import models
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from django.shortcuts import redirect
from django.urls import reverse
from django.utils import timezone
import requests
from . import exceptions
class Gateway(models.Model):
label = models.CharFie... | def submit(self, request, transaction, mobile: str = None, valid_card_number: str = None, callback: str = None):
"""Submits a transaction to Pay.ir.
When called, the method submits the necessary information about the transaction to Pay.ir and returns a
HttpResponseRedirect object that can r... | 37 | 57 | from django.db import models
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from django.shortcuts import redirect
from django.urls import reverse
from django.utils import timezone
import requests
from . import exceptions
class Gateway(models.Model):
label = models.CharFie... |
create_and_submit | Creates a transaction object and submits the transaction to Pay.ir.
When called, the method submits the necessary information about the transaction to Pay.ir and returns a
HttpResponseRedirect object that can redirect the user to the gateway, if nothing goes wrong. In case of an
error, a GatewayError is raised, contai... | from django.db import models
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from django.shortcuts import redirect
from django.urls import reverse
from django.utils import timezone
import requests
from . import exceptions
class Gateway(models.Model):
label = models.CharFie... | def create_and_submit(self, request, account, amount: int, mobile: str = None, valid_card_number: str = None, callback: str = None):
"""Creates a transaction object and submits the transaction to Pay.ir.
When called, the method submits the necessary information about the transaction to Pay.ir and r... | 59 | 75 | from django.db import models
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from django.shortcuts import redirect
from django.urls import reverse
from django.utils import timezone
import requests
from . import exceptions
class Gateway(models.Model):
label = models.CharFie... |
__call__ | params:
s: [B*T, x]
visual_s: [B*T, y]
cell_state: Tuple([B, z],)
return:
feat: [B, a]
cell_state: Tuple([B, z],) |
import numpy as np
import tensorflow as tf
from copy import deepcopy
from abc import ABC, abstractmethod
from tensorflow.keras import Model as M
from rls.utils.indexs import OutputNetworkType
from rls.nn.networks import get_visual_network_from_type
from rls.nn.models import get_output_network_from_type
from rls.nn.... | def __call__(self, s, visual_s, cell_state, *, need_split=False):
'''
params:
s: [B*T, x]
visual_s: [B*T, y]
cell_state: Tuple([B, z],)
return:
feat: [B, a]
cell_state: Tuple([B, z],)
'''
batch_size = tf.shape(s)[0]
... | 99 | 126 |
import numpy as np
import tensorflow as tf
from copy import deepcopy
from abc import ABC, abstractmethod
from tensorflow.keras import Model as M
from rls.utils.indexs import OutputNetworkType
from rls.nn.networks import get_visual_network_from_type
from rls.nn.models import get_output_network_from_type
... |
get_encoder_feature | params:
s: [B, x]
visual_s: [B, y]
return:
feat: [B, z] |
import numpy as np
import tensorflow as tf
from copy import deepcopy
from abc import ABC, abstractmethod
from tensorflow.keras import Model as M
from rls.utils.indexs import OutputNetworkType
from rls.nn.networks import get_visual_network_from_type
from rls.nn.models import get_output_network_from_type
from rls.nn.... | def get_encoder_feature(self, s, visual_s):
'''
params:
s: [B, x]
visual_s: [B, y]
return:
feat: [B, z]
'''
if self.vector_net.use_vector and self.visual_net.use_visual:
feat = self.get_vec_feature(s)
vis_feat = sel... | 148 | 168 |
import numpy as np
import tensorflow as tf
from copy import deepcopy
from abc import ABC, abstractmethod
from tensorflow.keras import Model as M
from rls.utils.indexs import OutputNetworkType
from rls.nn.networks import get_visual_network_from_type
from rls.nn.models import get_output_network_from_type
... |
__init__ | Initialize a new Clients
object with the provided RestSession.
Args:
session(RestSession): The RESTful session object to be used for
API calls to the DNA Center service.
Raises:
TypeError: If the parameter types are incorrect. | # -*- coding: utf-8 -*-
"""Cisco DNA Center Clients API wrapper.
Copyright (c) 2019-2021 Cisco Systems.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limi... | def __init__(self, session, object_factory, request_validator):
"""Initialize a new Clients
object with the provided RestSession.
Args:
session(RestSession): The RESTful session object to be used for
API calls to the DNA Center service.
Raises:
... | 55 | 73 | # -*- coding: utf-8 -*-
"""Cisco DNA Center Clients API wrapper.
Copyright (c) 2019-2021 Cisco Systems.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limi... |
get_client_enrichment_details | Enriches a given network End User context (a network user-id or
end user's device Mac Address) with details about the
user, the devices that the user is connected to and the
assurance issues that the user is impacted by.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**reque... | # -*- coding: utf-8 -*-
"""Cisco DNA Center Clients API wrapper.
Copyright (c) 2019-2021 Cisco Systems.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limi... | def get_client_enrichment_details(self,
headers=None,
**request_parameters):
"""Enriches a given network End User context (a network user-id or
end user's device Mac Address) with details about the
user, the devices ... | 75 | 136 | # -*- coding: utf-8 -*-
"""Cisco DNA Center Clients API wrapper.
Copyright (c) 2019-2021 Cisco Systems.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limi... |
get_overall_client_health | Returns Overall Client Health information by Client type (Wired
and Wireless) for any given point of time.
Args:
timestamp(basestring, int): Epoch time(in milliseconds) when the Client health data is required.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters... | # -*- coding: utf-8 -*-
"""Cisco DNA Center Clients API wrapper.
Copyright (c) 2019-2021 Cisco Systems.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limi... | def get_overall_client_health(self,
timestamp=None,
headers=None,
**request_parameters):
"""Returns Overall Client Health information by Client type (Wired
and Wireless) for any given point of time.... | 138 | 196 | # -*- coding: utf-8 -*-
"""Cisco DNA Center Clients API wrapper.
Copyright (c) 2019-2021 Cisco Systems.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limi... |
get_client_detail | Returns detailed Client information retrieved by Mac Address for
any given point of time. .
Args:
timestamp(basestring, int): Epoch time(in milliseconds) when the Client health data is required.
mac_address(basestring): MAC Address of the client.
headers(dict): Dictionary of HTTP Headers to send with the R... | # -*- coding: utf-8 -*-
"""Cisco DNA Center Clients API wrapper.
Copyright (c) 2019-2021 Cisco Systems.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limi... | def get_client_detail(self,
mac_address,
timestamp=None,
headers=None,
**request_parameters):
"""Returns detailed Client information retrieved by Mac Address for
any given point of time. .
... | 198 | 262 | # -*- coding: utf-8 -*-
"""Cisco DNA Center Clients API wrapper.
Copyright (c) 2019-2021 Cisco Systems.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limi... |
download | Downloads the file.
:param directory: the directory to download the file to. This must exist.
:return: the path to the downloaded file. | import json
import os
import re
from yandeley.models.annotations import Annotation
from yandeley.response import SessionResponseObject
class File(SessionResponseObject):
"""
A file attached to a document.
.. attribute:: id
.. attribute:: size
.. attribute:: file_name
.. attribute:: mime_type... | def download(self, directory):
"""
Downloads the file.
:param directory: the directory to download the file to. This must exist.
:return: the path to the downloaded file.
"""
rsp = self.session.get('/files/%s' % self.id, stream=True)
filename = self.filename... | 46 | 64 | import json
import os
import re
from yandeley.models.annotations import Annotation
from yandeley.response import SessionResponseObject
class File(SessionResponseObject):
"""
A file attached to a document.
.. attribute:: id
.. attribute:: size
.. attribute:: file_name
.. attribute:: mime_type... |
add_sticky_note | Adds a sticky note to this file.
:param text: the text of the sticky_note.
:param x_position: the x position on the file of the sticky_note.
:param y_position: the y position on the file of the stick_note.
:param page_number: the page_number on the file of the sticky_note.
:return: a :class:`Annotation <yandeley.model... | import json
import os
import re
from yandeley.models.annotations import Annotation
from yandeley.response import SessionResponseObject
class File(SessionResponseObject):
"""
A file attached to a document.
.. attribute:: id
.. attribute:: size
.. attribute:: file_name
.. attribute:: mime_type... | def add_sticky_note(self, text, x_position, y_position, page_number):
"""
Adds a sticky note to this file.
:param text: the text of the sticky_note.
:param x_position: the x position on the file of the sticky_note.
:param y_position: the y position on the file of the stick_n... | 72 | 96 | import json
import os
import re
from yandeley.models.annotations import Annotation
from yandeley.response import SessionResponseObject
class File(SessionResponseObject):
"""
A file attached to a document.
.. attribute:: id
.. attribute:: size
.. attribute:: file_name
.. attribute:: mime_type... |
add_highlight | Adds a highlight to this file.
:param bounding_boxes: the area the highlight covers on the file.
:param color: the color of the highlight.
:return: a :class:`Annotation <yandeley.models.annotations.Annotation>`. | import json
import os
import re
from yandeley.models.annotations import Annotation
from yandeley.response import SessionResponseObject
class File(SessionResponseObject):
"""
A file attached to a document.
.. attribute:: id
.. attribute:: size
.. attribute:: file_name
.. attribute:: mime_type... | def add_highlight(self, bounding_boxes, color):
"""
Adds a highlight to this file.
:param bounding_boxes: the area the highlight covers on the file.
:param color: the color of the highlight.
:return: a :class:`Annotation <yandeley.models.annotations.Annotation>`.
"""... | 98 | 118 | import json
import os
import re
from yandeley.models.annotations import Annotation
from yandeley.response import SessionResponseObject
class File(SessionResponseObject):
"""
A file attached to a document.
.. attribute:: id
.. attribute:: size
.. attribute:: file_name
.. attribute:: mime_type... |
__init__ | Args:
year (int): If provided, NUTS regions for this year will be used (if available)
scale (int): If provided, NUTS regions at this resolution will be used (if available) | """
nuts_finder
-----------
You give it a point, it tells you all the EU NUTS regions
"""
import geojson
import requests
import re
from io import BytesIO
from zipfile import ZipFile
from shapely import geometry
from functools import lru_cache
import logging
YEAR_REGEX = "NUTS ([0-9]+)"
SCALE_REGEX = "1:([0-9]+) Milli... | def __init__(self, year=None, scale=None):
"""
Args:
year (int): If provided, NUTS regions for this year will be used (if available)
scale (int): If provided, NUTS regions at this resolution will be used (if available)
"""
self.years = list(_get_available(YEAR... | 65 | 75 | """
nuts_finder
-----------
You give it a point, it tells you all the EU NUTS regions
"""
import geojson
import requests
import re
from io import BytesIO
from zipfile import ZipFile
from shapely import geometry
from functools import lru_cache
import logging
YEAR_REGEX = "NUTS ([0-9]+)"
SCALE_REGEX = "1:([0-9]+) Milli... |
bleu_score | Approximate BLEU score computation between labels and predictions.
An approximate BLEU scoring method since we do not glue word pieces or
decode the ids and tokenize the output. By default, we use ngram order of 4
and use brevity penalty. Also, this does not have beam search.
Args:
logits: Tensor of size [batch_siz... | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def bleu_score(logits, labels):
"""Approximate BLEU score computation between labels and predictions.
An approximate BLEU scoring method since we do not glue word pieces or
decode the ids and tokenize the output. By default, we use ngram order of 4
and use brevity penalty. Also, this does not have beam search.... | 190 | 207 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
rouge_2_fscore | ROUGE-2 F1 score computation between labels and predictions.
This is an approximate ROUGE scoring method since we do not glue word pieces
or decode the ids and tokenize the output.
Args:
logits: tensor, model predictions
labels: tensor, gold output.
Returns:
rouge2_fscore: approx rouge-2 f1 score. | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def rouge_2_fscore(logits, labels):
"""ROUGE-2 F1 score computation between labels and predictions.
This is an approximate ROUGE scoring method since we do not glue word pieces
or decode the ids and tokenize the output.
Args:
logits: tensor, model predictions
labels: tensor, gold output.
Returns:
... | 296 | 312 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
_get_ngrams | Calculates n-grams.
Args:
n: which n-grams to calculate
text: An array of tokens
Returns:
A set of n-grams | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def _get_ngrams(n, text):
"""Calculates n-grams.
Args:
n: which n-grams to calculate
text: An array of tokens
Returns:
A set of n-grams
"""
ngram_set = set()
text_length = len(text)
max_index_ngram_start = text_length - n
for i in range(max_index_ngram_start + 1):
ngram_set.add(tuple(t... | 315 | 330 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
rouge_l_fscore | ROUGE scores computation between labels and predictions.
This is an approximate ROUGE scoring method since we do not glue word pieces
or decode the ids and tokenize the output.
Args:
predictions: tensor, model predictions
labels: tensor, gold output.
Returns:
rouge_l_fscore: approx rouge-l f1 score. | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def rouge_l_fscore(predictions, labels):
"""ROUGE scores computation between labels and predictions.
This is an approximate ROUGE scoring method since we do not glue word pieces
or decode the ids and tokenize the output.
Args:
predictions: tensor, model predictions
labels: tensor, gold output.
Retu... | 373 | 389 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
_len_lcs | Returns the length of the Longest Common Subsequence between two seqs.
Source: http://www.algorithmist.com/index.php/Longest_Common_Subsequence
Args:
x: sequence of words
y: sequence of words
Returns
integer: Length of LCS between x and y | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def _len_lcs(x, y):
"""Returns the length of the Longest Common Subsequence between two seqs.
Source: http://www.algorithmist.com/index.php/Longest_Common_Subsequence
Args:
x: sequence of words
y: sequence of words
Returns
integer: Length of LCS between x and y
"""
table = _lcs(x, y)
n, m =... | 426 | 440 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
_f_lcs | Computes the LCS-based F-measure score.
Source: http://research.microsoft.com/en-us/um/people/cyl/download/papers/
rouge-working-note-v1.3.1.pdf
Args:
llcs: Length of LCS
m: number of words in reference summary
n: number of words in candidate summary
Returns:
Float. LCS-based F-measure score | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def _f_lcs(llcs, m, n):
"""Computes the LCS-based F-measure score.
Source: http://research.microsoft.com/en-us/um/people/cyl/download/papers/
rouge-working-note-v1.3.1.pdf
Args:
llcs: Length of LCS
m: number of words in reference summary
n: number of words in candidate summary
Returns:
Floa... | 470 | 490 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
text_to_sequence | Converts a string of text to a sequence of IDs corresponding to the symbols in the text.
The text can optionally have ARPAbet sequences enclosed in curly braces embedded
in it. For example, "Turn left on {HH AW1 S S T AH0 N} Street."
Args:
text: string to convert to a sequence
cleaner_names: names of the cleaner ... | # -*- coding: utf-8 -*-
import re
from packaging import version
import phonemizer
from phonemizer.phonemize import phonemize
from TTS.utils.text import cleaners
from TTS.utils.text.symbols import make_symbols, symbols, phonemes, _phoneme_punctuations, _bos, \
_eos
# Mappings from symbol to numeric ID and vice ver... | def text_to_sequence(text, cleaner_names, tp=None):
'''Converts a string of text to a sequence of IDs corresponding to the symbols in the text.
The text can optionally have ARPAbet sequences enclosed in curly braces embedded
in it. For example, "Turn left on {HH AW1 S S T AH0 N} Street."
Args:
... | 110 | 140 | # -*- coding: utf-8 -*-
import re
from packaging import version
import phonemizer
from phonemizer.phonemize import phonemize
from TTS.utils.text import cleaners
from TTS.utils.text.symbols import make_symbols, symbols, phonemes, _phoneme_punctuations, _bos, \
_eos
# Mappings from symbol to numeric ID and vice ver... |
plot_contours | Plot the decision boundaries for a classifier.
Parameters
----------
ax: matplotlib axes object
clf: a classifier
xx: meshgrid ndarray
yy: meshgrid ndarray
params: dictionary of params to pass to contourf, optional | import cv2.cv2 as cv2
import skimage.io as io
from skimage.transform import downscale_local_mean
import numpy as np
from model import *
from sklearn.naive_bayes import GaussianNB
from sklearn.model_selection import train_test_split
import numpy as np
from sklearn.naive_bayes import GaussianNB
from sklearn.linear_mod... | def plot_contours(ax, clf, xx, yy, proba=False, **params):
"""Plot the decision boundaries for a classifier.
Parameters
----------
ax: matplotlib axes object
clf: a classifier
xx: meshgrid ndarray
yy: meshgrid ndarray
params: dictionary of params to p... | 749 | 766 | import cv2.cv2 as cv2
import skimage.io as io
from skimage.transform import downscale_local_mean
import numpy as np
from model import *
from sklearn.naive_bayes import GaussianNB
from sklearn.model_selection import train_test_split
import numpy as np
from sklearn.naive_bayes import GaussianNB
from sklear... |
__init__ | Initializes the CASE document.
Args:
graph: The graph to populate (instance of rdflib.Graph)
If not provided, a graph in memory will be used. | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... | def __init__(self, graph=None):
"""
Initializes the CASE document.
Args:
graph: The graph to populate (instance of rdflib.Graph)
If not provided, a graph in memory will be used.
"""
if not graph:
graph = rdflib.Graph()
graph... | 28 | 39 | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... |
serialize | Serializes the document's graph to a destination.
(Follows same arguments as rdflib.Graph().serialize()) | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... | def serialize(self, format='json-ld', **kwargs):
"""Serializes the document's graph to a destination.
(Follows same arguments as rdflib.Graph().serialize())"""
if format == 'json-ld':
if 'context' not in kwargs:
kwargs['context'] = self._json_ld_context()
... | 89 | 97 | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... |
__init__ | Initializes and adds a node to the graph.
NOTE: At least the type or a property must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rdflib.Graph)
uri: Optional string to set th URI to. (If not provided a UUID will be generated.)
bnode: Whether to c... | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... | def __init__(self, graph, uri=None, bnode=False, rdf_type=None, **kwargs):
"""Initializes and adds a node to the graph.
NOTE: At least the type or a property must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rd... | 161 | 194 | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... |
__init__ | Initializes and adds a node to the graph.
NOTE: At least the type or a property must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rdflib.Graph)
rdf_type: The RDF type to set this node to.
properties: Extra properties to add to this node.
(More... | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... | def __init__(self, graph, rdf_type=None, **kwargs):
"""Initializes and adds a node to the graph.
NOTE: At least the type or a property must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rdflib.Graph)
... | 230 | 246 | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... |
create_PropertyBundle | Convenience function for adding property bundles to this Trace.
Args:
type: The @type of property bundle (can be of type rdflib.URIRef or string).
properties: Properties to add to the created property bundle.
Returns:
The property bundle created (instance of PropertyBundle). | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... | def create_PropertyBundle(self, prop_type=None, **kwargs):
"""Convenience function for adding property bundles to this Trace.
Args:
type: The @type of property bundle (can be of type rdflib.URIRef or string).
properties: Properties to add to the created property bundle.
... | 249 | 263 | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... |
__init__ | Initializes and adds a node to the graph.
NOTE: At least the type or a property must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rdflib.Graph)
rdf_type: The RDF type to set this node to.
properties: Extra properties to add to this node.
(More... | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... | def __init__(self, graph, rdf_type=None, **kwargs):
"""Initializes and adds a node to the graph.
NOTE: At least the type or a property must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rdflib.Graph)
... | 270 | 289 | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... |
__init__ | Initializes and adds a node to the graph.
NOTE: At least the type must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rdflib.Graph)
rdf_type: The RDF type to set this node to.
properties: Extra properties to add to this node.
(More properties ca... | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... | def __init__(self, graph, rdf_type=None, **kwargs):
"""Initializes and adds a node to the graph.
NOTE: At least the type must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rdflib.Graph)
rdf_type: The ... | 296 | 311 | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... |
__init__ | Initializes and adds a node to the graph.
NOTE: At least the type must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rdflib.Graph)
rdf_type: The RDF type to set this node to.
properties: Extra properties to add to this node.
(More properties ca... | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... | def __init__(self, graph, rdf_type=None, **kwargs):
"""Initializes and adds a node to the graph.
NOTE: At least the type must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rdflib.Graph)
rdf_type: The ... | 318 | 333 | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... |
__init__ | Initializes and adds a node to the graph.
NOTE: At least the type must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rdflib.Graph)
rdf_type: The RDF type to set this node to.
properties: Extra properties to add to this node.
(More properties ca... | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... | def __init__(self, graph, rdf_type=None, **kwargs):
"""Initializes and adds a node to the graph.
NOTE: At least the type must be supplied for the Node
to exist in the graph.
Args:
graph: The graph to add this node to. (instance of rdflib.Graph)
rdf_type: The ... | 340 | 355 | # NOTICE
#
# This software was produced for the U.S. Government under
# contract SB-1341-14-CQ-0010, and is subject to the Rights
# in Data-General Clause 52.227-14, Alt. IV (DEC 2007)
#
# (c) 2018 The MITRE Corporation. All Rights Reserved.
#====================================================
# CASE API
#!/usr/bin... |
put_object_from_filelike | Store the byte contents of a file in the repository.
:param handle: filelike object with the byte content to be stored.
:return: the generated fully qualified identifier for the object within the repository.
:raises TypeError: if the handle is not a byte stream. | # -*- coding: utf-8 -*-
"""Class that defines the abstract interface for an object repository.
The scope of this class is intentionally very narrow. Any backend implementation should merely provide the methods to
store binary blobs, or "objects", and return a string-based key that unique identifies the object that was... | def put_object_from_filelike(self, handle: BinaryIO) -> str:
"""Store the byte contents of a file in the repository.
:param handle: filelike object with the byte content to be stored.
:return: the generated fully qualified identifier for the object within the repository.
:raises Typ... | 71 | 80 | # -*- coding: utf-8 -*-
"""Class that defines the abstract interface for an object repository.
The scope of this class is intentionally very narrow. Any backend implementation should merely provide the methods to
store binary blobs, or "objects", and return a string-based key that unique identifies the object that was... |
__init__ | Generative Adversarial Imitation Learning that accepts Image Obs
Most parameters are described in and passed to `AdversarialTrainer.__init__`.
Additional parameters that `CNNGAIL` adds on top of its superclass initializer are
as follows:
Args:
discrim_kwargs: Optional keyword arguments to use while constructing t... | import logging
from typing import Iterable, Mapping, Optional, Union
import gym
import numpy as np
import torch as th
from stable_baselines3.common import on_policy_algorithm, vec_env
from imitation.data import types
from imitation.rewards import discrim_nets
from imitation.algorithms.adversarial import AdversarialT... | def __init__(
self,
venv: vec_env.VecEnv,
expert_data: Union[Iterable[Mapping], types.Transitions],
expert_batch_size: int,
gen_algo: on_policy_algorithm.OnPolicyAlgorithm,
discrim=None,
*,
discrim_kwargs: Optional[Mapping] = None,
**kwargs,
... | 18 | 53 | import logging
from typing import Iterable, Mapping, Optional, Union
import gym
import numpy as np
import torch as th
from stable_baselines3.common import on_policy_algorithm, vec_env
from imitation.data import types
from imitation.rewards import discrim_nets
from imitation.algorithms.adversarial import AdversarialT... |
workspace | Workspace Factory Fixture.
Yields:
directory(Workspace): Workspace Created. | """ Orlov Module : workspace module fixture. """
import os
import logging
import pytest
from orlov.libs.workspace import Workspace
logger = logging.getLogger(__name__)
# MASKED: workspace function (lines 11-29) | @pytest.fixture(scope='session')
def workspace(request) -> Workspace:
""" Workspace Factory Fixture.
Yields:
directory(Workspace): Workspace Created.
"""
logger.debug('Setup of test structure.')
# create screenshot directory
if request.config.getoption('workspace'):
result_dir ... | 11 | 29 | """ Orlov Module : workspace module fixture. """
import os
import logging
import pytest
from orlov.libs.workspace import Workspace
logger = logging.getLogger(__name__)
@pytest.fixture(scope='session')
def workspace(request) -> Workspace:
""" Workspace Factory Fixture.
Yields:
directory(Workspace): ... |
pytest_collection_modifyitems | Skip tests marked with '@pytest.mark.skip_for_nginx_oss' for Nginx OSS runs.
Skip tests marked with '@pytest.mark.appprotect' for non AP images.
:param config: pytest config
:param items: pytest collected test-items
:return: | """Describe overall framework configuration."""
import os
import pytest
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
from settings import (
DEFAULT_IMAGE,
DEFAULT_PULL_POLICY,
DEFAULT_IC_TYPE,
DEFAULT_SERVICE,
DEFAULT_DEPLOYMENT_TYPE,
NUM_REPLICAS,
BATCH_START,
... | def pytest_collection_modifyitems(config, items) -> None:
"""
Skip tests marked with '@pytest.mark.skip_for_nginx_oss' for Nginx OSS runs.
Skip tests marked with '@pytest.mark.appprotect' for non AP images.
:param config: pytest config
:param items: pytest collected test-items
:return:
"""
... | 103 | 131 | """Describe overall framework configuration."""
import os
import pytest
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
from settings import (
DEFAULT_IMAGE,
DEFAULT_PULL_POLICY,
DEFAULT_IC_TYPE,
DEFAULT_SERVICE,
DEFAULT_DEPLOYMENT_TYPE,
NUM_REPLICAS,
BATCH_START,
... |
pytest_runtest_makereport | Print out IC Pod logs on test failure.
Only look at actual failing test calls, not setup/teardown.
Only show the logs if commandline argument `--show-ic-logs` is set to 'yes'
:param item:
:return: | """Describe overall framework configuration."""
import os
import pytest
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
from settings import (
DEFAULT_IMAGE,
DEFAULT_PULL_POLICY,
DEFAULT_IC_TYPE,
DEFAULT_SERVICE,
DEFAULT_DEPLOYMENT_TYPE,
NUM_REPLICAS,
BATCH_START,
... | @pytest.hookimpl(tryfirst=True, hookwrapper=True)
def pytest_runtest_makereport(item) -> None:
"""
Print out IC Pod logs on test failure.
Only look at actual failing test calls, not setup/teardown.
Only show the logs if commandline argument `--show-ic-logs` is set to 'yes'
:param item:
:return... | 134 | 163 | """Describe overall framework configuration."""
import os
import pytest
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
from settings import (
DEFAULT_IMAGE,
DEFAULT_PULL_POLICY,
DEFAULT_IC_TYPE,
DEFAULT_SERVICE,
DEFAULT_DEPLOYMENT_TYPE,
NUM_REPLICAS,
BATCH_START,
... |
read_args_with_defaults | Look up parameters starting in the driver's private parameter space, but also searching outer namespaces.
Defining them in a higher namespace allows the axis_ptz.py script to share parameters with the driver. | #!/usr/bin/env python
"""
Axis camera video driver. Inspired by:
https://code.ros.org/svn/wg-ros-pkg/branches/trunk_cturtle/sandbox/axis_camera/axis.py
Communication with the camera is done using the Axis VAPIX API described at
http://www.axis.com/global/en/support/developer-support/vapix
.. note::
This is a ma... | def read_args_with_defaults(arg_defaults):
"""Look up parameters starting in the driver's private parameter space, but also searching outer namespaces.
Defining them in a higher namespace allows the axis_ptz.py script to share parameters with the driver."""
args = {}
for name, val in arg_defaults.iterit... | 628 | 647 | #!/usr/bin/env python
"""
Axis camera video driver. Inspired by:
https://code.ros.org/svn/wg-ros-pkg/branches/trunk_cturtle/sandbox/axis_camera/axis.py
Communication with the camera is done using the Axis VAPIX API described at
http://www.axis.com/global/en/support/developer-support/vapix
.. note::
This is a ma... |
set_resolution | Request a new resolution for the video stream.
:param resolution_value: The string of type `width`x`height` or a :py:class:`VideoResolution` object.
:type resolution_value: basestring|VideoResolution
:raises: :py:exc:`ValueError` if the resolution is unknown/unsupported. | #!/usr/bin/env python
"""
Axis camera video driver. Inspired by:
https://code.ros.org/svn/wg-ros-pkg/branches/trunk_cturtle/sandbox/axis_camera/axis.py
Communication with the camera is done using the Axis VAPIX API described at
http://www.axis.com/global/en/support/developer-support/vapix
.. note::
This is a ma... | def set_resolution(self, resolution_value):
"""Request a new resolution for the video stream.
:param resolution_value: The string of type `width`x`height` or a :py:class:`VideoResolution` object.
:type resolution_value: basestring|VideoResolution
:raises: :py:exc:`ValueError` if the... | 288 | 309 | #!/usr/bin/env python
"""
Axis camera video driver. Inspired by:
https://code.ros.org/svn/wg-ros-pkg/branches/trunk_cturtle/sandbox/axis_camera/axis.py
Communication with the camera is done using the Axis VAPIX API described at
http://www.axis.com/global/en/support/developer-support/vapix
.. note::
This is a ma... |
__init__ | Create a representation of the resolution.
:param width: Width of the resolution in pixels.
:type width: int
:param height: Height of the resolution in pixels.
:type height: int | #!/usr/bin/env python
"""
Axis camera video driver. Inspired by:
https://code.ros.org/svn/wg-ros-pkg/branches/trunk_cturtle/sandbox/axis_camera/axis.py
Communication with the camera is done using the Axis VAPIX API described at
http://www.axis.com/global/en/support/developer-support/vapix
.. note::
This is a ma... | def __init__(self, width, height):
"""Create a representation of the resolution.
:param width: Width of the resolution in pixels.
:type width: int
:param height: Height of the resolution in pixels.
:type height: int
"""
super(VideoResolution, self).__init__()... | 492 | 506 | #!/usr/bin/env python
"""
Axis camera video driver. Inspired by:
https://code.ros.org/svn/wg-ros-pkg/branches/trunk_cturtle/sandbox/axis_camera/axis.py
Communication with the camera is done using the Axis VAPIX API described at
http://www.axis.com/global/en/support/developer-support/vapix
.. note::
This is a ma... |
set_application | Set ``CommandLineInterface`` instance for this connection.
(This can be replaced any time.)
:param cli: CommandLineInterface instance.
:param callback: Callable that takes the result of the CLI. | """
Telnet server.
Example usage::
class MyTelnetApplication(TelnetApplication):
def client_connected(self, telnet_connection):
# Set CLI with simple prompt.
telnet_connection.set_application(
telnet_connection.create_prompt_application(...))
def handle_com... | def set_application(self, app, callback=None):
"""
Set ``CommandLineInterface`` instance for this connection.
(This can be replaced any time.)
:param cli: CommandLineInterface instance.
:param callback: Callable that takes the result of the CLI.
"""
assert is... | 139 | 185 | """
Telnet server.
Example usage::
class MyTelnetApplication(TelnetApplication):
def client_connected(self, telnet_connection):
# Set CLI with simple prompt.
telnet_connection.set_application(
telnet_connection.create_prompt_application(...))
def... |
display_auth_cache | Writes to the screen the state of the authentication cache. (For debugging
authentication issues.) BEWARE: DO NOT email the output of this command!!!
You must keep the tokens secure. Treat them as passwords. | #!/usr/bin/env python
# Copyright 2015 Coursera
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or ag... | def display_auth_cache(args):
'''
Writes to the screen the state of the authentication cache. (For debugging
authentication issues.) BEWARE: DO NOT email the output of this command!!!
You must keep the tokens secure. Treat them as passwords.
'''
oauth2_instance = oauth2.build_oauth2(args)
if... | 70 | 93 | #!/usr/bin/env python
# Copyright 2015 Coursera
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or ag... |
_log_ref | Log a reference to an in-memory object.
Return True if this object is new and was assigned
a new ID. Otherwise return False. | # Copyright (C) 2008 John Paulett (john -at- paulett.org)
# Copyright (C) 2009-2018 David Aguilar (davvid -at- gmail.com)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
from __future__ import absolute_import, division... | def _log_ref(self, obj):
"""
Log a reference to an in-memory object.
Return True if this object is new and was assigned
a new ID. Otherwise return False.
"""
objid = id(obj)
is_new = objid not in self._objs
if is_new:
new_id = len(self._obj... | 196 | 207 | # Copyright (C) 2008 John Paulett (john -at- paulett.org)
# Copyright (C) 2009-2018 David Aguilar (davvid -at- gmail.com)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
from __future__ import absolute_import, division... |
flatten | Takes an object and returns a JSON-safe representation of it.
Simply returns any of the basic builtin datatypes
>>> p = Pickler()
>>> p.flatten('hello world') == 'hello world'
True
>>> p.flatten(49)
49
>>> p.flatten(350.0)
350.0
>>> p.flatten(True)
True
>>> p.flatten(False)
False
>>> r = p.flatten(None)
>>> r is None... | # Copyright (C) 2008 John Paulett (john -at- paulett.org)
# Copyright (C) 2009-2018 David Aguilar (davvid -at- gmail.com)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
from __future__ import absolute_import, division... | def flatten(self, obj, reset=True):
"""Takes an object and returns a JSON-safe representation of it.
Simply returns any of the basic builtin datatypes
>>> p = Pickler()
>>> p.flatten('hello world') == 'hello world'
True
>>> p.flatten(49)
49
>>> p.fla... | 222 | 252 | # Copyright (C) 2008 John Paulett (john -at- paulett.org)
# Copyright (C) 2009-2018 David Aguilar (davvid -at- gmail.com)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
from __future__ import absolute_import, division... |
download_pdc_id | Download a PDC dataset by its PDC study id.
Returns:
pandas.DataFrame: The clinical table for the study id.
pandas.DataFrame: The quantitative table for the study id. | # Copyright 2018 Samuel Payne sam_payne@byu.edu
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law ... | def download_pdc_id(pdc_id, _download_msg=True):
"""Download a PDC dataset by its PDC study id.
Returns:
pandas.DataFrame: The clinical table for the study id.
pandas.DataFrame: The quantitative table for the study id.
"""
if _download_msg:
clin_msg = f"Downloading clinical table f... | 147 | 204 | # Copyright 2018 Samuel Payne sam_payne@byu.edu
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law ... |
run_on_kubernetes | Run dask function inside a pod using the given config.
Create a pod, using the local kubernetes configuration that starts a Dask Cluster
using dask-kubernetes and runs a function specified within the `config` dictionary.
Args:
config (dict):
Config dictionary.
namespace (str):
Kubernetes nam... | # -*- coding: utf-8 -*-
import argparse
import importlib
import json
import logging
import os
import re
import sys
from io import StringIO
import boto3
import tabulate
import yaml
from dask.distributed import Client
from dask_kubernetes import KubeCluster
from kubernetes.client import Configuration
from kubernetes.cli... | def run_on_kubernetes(config, namespace='default'):
"""Run dask function inside a pod using the given config.
Create a pod, using the local kubernetes configuration that starts a Dask Cluster
using dask-kubernetes and runs a function specified within the `config` dictionary.
Args:
config (di... | 197 | 218 | # -*- coding: utf-8 -*-
import argparse
import importlib
import json
import logging
import os
import re
import sys
from io import StringIO
import boto3
import tabulate
import yaml
from dask.distributed import Client
from dask_kubernetes import KubeCluster
from kubernetes.client import Configuration
from kubernetes.cli... |
get_event_categories | ## Example Usage
List the event categories of all the RDS resources.
```python
import pulumi
import pulumi_aws as aws
example_event_categories = aws.rds.get_event_categories()
pulumi.export("example", example_event_categories.event_categories)
```
List the event categories specific to the RDS resource `db-snapshot`... | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import ... | def get_event_categories(source_type: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetEventCategoriesResult:
"""
## Example Usage
List the event categories of all the RDS resources.
```python
import pulumi
import pulumi_aws as aws
... | 66 | 105 | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import ... |
_parse | Parse the test output.
See also https://github.com/axboe/fio/blob/master/HOWTO | from collections import defaultdict
import requests
from logger import logger
from perfrunner.helpers.misc import pretty_dict
from perfrunner.helpers.remote import RemoteHelper
from perfrunner.tests import PerfTest
class FIOTest(PerfTest):
TRACKER = 'fio.sc.couchbase.com'
TEMPLATE = {
'group': '{}... | @staticmethod
def _parse(results):
"""Parse the test output.
See also https://github.com/axboe/fio/blob/master/HOWTO
"""
stats = defaultdict(int)
for host, output in results.items():
for job in output.split():
stats[host] += int(job.split(';')... | 29 | 40 | from collections import defaultdict
import requests
from logger import logger
from perfrunner.helpers.misc import pretty_dict
from perfrunner.helpers.remote import RemoteHelper
from perfrunner.tests import PerfTest
class FIOTest(PerfTest):
TRACKER = 'fio.sc.couchbase.com'
TEMPLATE = {
'group': '{}... |
predict | Predict none zero elements of coo sparse matrix X according to the fitted model.
Parameters
----------
X {array-like, sparse coo matrix} shape (m, n)
Data matrix in coo format. Values are ignored.
Returns
-------
{array-like, sparse coo matrix} shape (m, n)
Predicted values. | # - * - encoding : utf - 8 - * -
# pylint: disable=fixme, line-too-long
"""
Matrix factorization solver.
:copyright: 2017-2019 H2O.ai, Inc.
:license: Apache License Version 2.0 (see LICENSE for details)
"""
import numpy as np
import scipy
import scipy.sparse
def _get_sparse_matrixes(X):
'''Create csc, csr and ... | def predict(self, X):
'''Predict none zero elements of coo sparse matrix X according to the fitted model.
Parameters
----------
X {array-like, sparse coo matrix} shape (m, n)
Data matrix in coo format. Values are ignored.
Returns
-------
... | 294 | 316 | # - * - encoding : utf - 8 - * -
# pylint: disable=fixme, line-too-long
"""
Matrix factorization solver.
:copyright: 2017-2019 H2O.ai, Inc.
:license: Apache License Version 2.0 (see LICENSE for details)
"""
import numpy as np
import scipy
import scipy.sparse
def _get_sparse_matrixes(X):
'''Create csc, csr and ... |
ascii_escaped | If val is pure ascii, returns it as a str(). Otherwise, escapes
bytes objects into a sequence of escaped bytes:
b'ôÅÖ' -> '\xc3\xb4\xc5\xd6'
and escapes unicode objects into a sequence of escaped unicode
ids, e.g.:
'4\nV\U00043efa\x0eMXWB\x1e\u3028\u15fd\xcd\U0007d944'
note:
the obvious "v.decode('unicode-esca... | """
python version compatibility code
"""
import functools
import inspect
import io
import re
import sys
from contextlib import contextmanager
from inspect import Parameter
from inspect import signature
import attr
import py
import _pytest
from _pytest._io.saferepr import saferepr
from _pytest.outcomes import fail
fr... | def ascii_escaped(val):
"""If val is pure ascii, returns it as a str(). Otherwise, escapes
bytes objects into a sequence of escaped bytes:
b'\xc3\xb4\xc5\xd6' -> '\\xc3\\xb4\\xc5\\xd6'
and escapes unicode objects into a sequence of escaped unicode
ids, e.g.:
'4\\nV\\U00043efa\\x0eMXWB\\x1e\\... | 187 | 209 | """
python version compatibility code
"""
import functools
import inspect
import io
import re
import sys
from contextlib import contextmanager
from inspect import Parameter
from inspect import signature
import attr
import py
import _pytest
from _pytest._io.saferepr import saferepr
from _pytest.outcomes import fail
fr... |
resnet18 | Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet | #!/usr/bin/env python
# coding: utf-8
#
# This code is based on torchvison resnet
# URL: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet1... | def resnet18(pretrained=False, **kwargs):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet18']))... | 171 | 179 | #!/usr/bin/env python
# coding: utf-8
#
# This code is based on torchvison resnet
# URL: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
... |
resnet34 | Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet | #!/usr/bin/env python
# coding: utf-8
#
# This code is based on torchvison resnet
# URL: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet1... | def resnet34(pretrained=False, **kwargs):
"""Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet34']))... | 182 | 190 | #!/usr/bin/env python
# coding: utf-8
#
# This code is based on torchvison resnet
# URL: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
... |
resnet50 | Constructs a ResNet-50 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet | #!/usr/bin/env python
# coding: utf-8
#
# This code is based on torchvison resnet
# URL: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet1... | def resnet50(pretrained=False, **kwargs):
"""Constructs a ResNet-50 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet50']))... | 193 | 201 | #!/usr/bin/env python
# coding: utf-8
#
# This code is based on torchvison resnet
# URL: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
... |
resnet101 | Constructs a ResNet-101 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet | #!/usr/bin/env python
# coding: utf-8
#
# This code is based on torchvison resnet
# URL: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet1... | def resnet101(pretrained=False, **kwargs):
"""Constructs a ResNet-101 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet101... | 204 | 212 | #!/usr/bin/env python
# coding: utf-8
#
# This code is based on torchvison resnet
# URL: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
... |
resnet152 | Constructs a ResNet-152 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet | #!/usr/bin/env python
# coding: utf-8
#
# This code is based on torchvison resnet
# URL: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet1... | def resnet152(pretrained=False, **kwargs):
"""Constructs a ResNet-152 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet152... | 215 | 223 | #!/usr/bin/env python
# coding: utf-8
#
# This code is based on torchvison resnet
# URL: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
... |
set_random_seed | function: Set random seed.
Args:
seed (int): Seed to be used.
deterministic (bool): Whether to set the deterministic option for
CUDNN backend, i.e., set `torch.backends.cudnn.deterministic`
to True and `torch.backends.cudnn.benchmark` to False.
Default: False. | import sys
import time
import torch
import random
import argparse
import numpy as np
import torch.nn as nn
import torchvision.transforms as transforms
from torchvision import datasets
from torch.utils.data import DataLoader
# new #
import torch.cuda.amp as amp
def printParaNum(model):
'''
function: pri... | def set_random_seed(seed, deterministic=False):
'''
function: Set random seed.
Args:
seed (int): Seed to be used.
deterministic (bool): Whether to set the deterministic option for
CUDNN backend, i.e., set `torch.backends.cudnn.deterministic`
to True and `torch.backen... | 28 | 46 | import sys
import time
import torch
import random
import argparse
import numpy as np
import torch.nn as nn
import torchvision.transforms as transforms
from torchvision import datasets
from torch.utils.data import DataLoader
# new #
import torch.cuda.amp as amp
def printParaNum(model):
'''
function: pri... |
demo | Step 1: User Authorization.
Redirect the user/resource owner to the OAuth provider (i.e. Github)
using an URL with a few key OAuth parameters. | import os
import pathlib
from flask import Flask
from flask import request
from flask import redirect
from flask import url_for
from flask import session
from flask import render_template
from flask.json import jsonify
from td.app.auth import FlaskTDAuth
from configparser import ConfigParser
# Define the templates f... | @app.route("/login")
def demo():
"""Step 1: User Authorization.
Redirect the user/resource owner to the OAuth provider (i.e. Github)
using an URL with a few key OAuth parameters.
"""
# Build the authorization URL.
auth_tuple = app.config['auth_client'].authorization_url()
# State is used ... | 33 | 47 | import os
import pathlib
from flask import Flask
from flask import request
from flask import redirect
from flask import url_for
from flask import session
from flask import render_template
from flask.json import jsonify
from td.app.auth import FlaskTDAuth
from configparser import ConfigParser
# Define the templates f... |
_test_repr_or_str | Test Queue's repr or str.
fn is repr or str. expect_id is True if we expect the Queue's id to
appear in fn(Queue()). | """Tests for queues.py"""
import sys
import unittest
from unittest import mock
import asyncio
from .. import utils as test_utils
class _QueueTestBase(test_utils.TestCase):
def setUp(self):
super().setUp()
self.loop = self.new_test_loop()
class QueueBasicTests(_QueueTestBase):
# MASKED: _test_r... | def _test_repr_or_str(self, fn, expect_id):
"""Test Queue's repr or str.
fn is repr or str. expect_id is True if we expect the Queue's id to
appear in fn(Queue()).
"""
def gen():
when = yield
self.assertAlmostEqual(0.1, when)
when = yield... | 18 | 68 | """Tests for queues.py"""
import sys
import unittest
from unittest import mock
import asyncio
from .. import utils as test_utils
class _QueueTestBase(test_utils.TestCase):
def setUp(self):
super().setUp()
self.loop = self.new_test_loop()
class QueueBasicTests(_QueueTestBase):
def _test_rep... |
get_git_version | Gets application version in the format [last-tag]-[last-commit-sha].
:param strip_v_in_version: If the version tag starts with 'v' (like 'v1.2.3),
this chooses if the 'v' should be stripped, so the resulting tag is '1.2.3'.
If there's a "-", "." or "_" separator after "v", it is removed as well.
:return: The version st... | """Module with git related utilities."""
import git
class GitRepoVersionInfo:
"""
Provides application versions information based on the tags and commits in the repo
"""
def __init__(self, path: str):
"""
Create an instance of GitRepoVersionInfo
:param path: The path to search... | def get_git_version(self, strip_v_in_version: bool = True) -> str:
"""
Gets application version in the format [last-tag]-[last-commit-sha].
:param strip_v_in_version: If the version tag starts with 'v' (like 'v1.2.3),
this chooses if the 'v' should be stripped, so the resulting tag i... | 31 | 52 | """Module with git related utilities."""
import git
class GitRepoVersionInfo:
"""
Provides application versions information based on the tags and commits in the repo
"""
def __init__(self, path: str):
"""
Create an instance of GitRepoVersionInfo
:param path: The path to search... |
test | Tests the model.
Parameters:
env - the environment to test the policy on
actor_model - the actor model to load in
Return:
None | """
The file contains the PPO class to train with.
NOTE: All "ALG STEP"s are following the numbers from the original PPO pseudocode.
It can be found here: https://spinningup.openai.com/en/latest/_images/math/e62a8971472597f4b014c2da064f636ffe365ba3.svg
"""
import gym
import numpy as np
import torch
import torch... | def test(env, actor_model, is_discrete):
"""
Tests the model.
Parameters:
env - the environment to test the policy on
actor_model - the actor model to load in
Return:
None
"""
print(f"Testing {actor_model}", flush=True)
# If the actor model is not specified, then exit
if actor_model == '':
print(... | 491 | 525 | """
The file contains the PPO class to train with.
NOTE: All "ALG STEP"s are following the numbers from the original PPO pseudocode.
It can be found here: https://spinningup.openai.com/en/latest/_images/math/e62a8971472597f4b014c2da064f636ffe365ba3.svg
"""
import gym
import numpy as np
import torch
i... |
__init__ | Initializes the PPO model, including hyperparameters.
Parameters:
policy_class - the policy class to use for our actor/critic networks.
env - the environment to train on.
hyperparameters - all extra arguments passed into PPO that should be hyperparameters.
Returns:
None | """
The file contains the PPO class to train with.
NOTE: All "ALG STEP"s are following the numbers from the original PPO pseudocode.
It can be found here: https://spinningup.openai.com/en/latest/_images/math/e62a8971472597f4b014c2da064f636ffe365ba3.svg
"""
import gym
import numpy as np
import torch
import torch... | def __init__(self, env, **hyperparameters):
"""
Initializes the PPO model, including hyperparameters.
Parameters:
policy_class - the policy class to use for our actor/critic networks.
env - the environment to train on.
hyperparameters - all extra arguments passed into PPO that should be hyperparam... | 30 | 86 | """
The file contains the PPO class to train with.
NOTE: All "ALG STEP"s are following the numbers from the original PPO pseudocode.
It can be found here: https://spinningup.openai.com/en/latest/_images/math/e62a8971472597f4b014c2da064f636ffe365ba3.svg
"""
import gym
import numpy as np
import torch
i... |
compute_rtgs | Compute the Reward-To-Go of each timestep in a batch given the rewards.
Parameters:
batch_rews - the rewards in a batch, Shape: (number of episodes, number of timesteps per episode)
Return:
batch_rtgs - the rewards to go, Shape: (number of timesteps in batch) | """
The file contains the PPO class to train with.
NOTE: All "ALG STEP"s are following the numbers from the original PPO pseudocode.
It can be found here: https://spinningup.openai.com/en/latest/_images/math/e62a8971472597f4b014c2da064f636ffe365ba3.svg
"""
import gym
import numpy as np
import torch
import torch... | def compute_rtgs(self, batch_rews):
"""
Compute the Reward-To-Go of each timestep in a batch given the rewards.
Parameters:
batch_rews - the rewards in a batch, Shape: (number of episodes, number of timesteps per episode)
Return:
batch_rtgs - the rewards to go, Shape: (number of timesteps in batch... | 288 | 316 | """
The file contains the PPO class to train with.
NOTE: All "ALG STEP"s are following the numbers from the original PPO pseudocode.
It can be found here: https://spinningup.openai.com/en/latest/_images/math/e62a8971472597f4b014c2da064f636ffe365ba3.svg
"""
import gym
import numpy as np
import torch
i... |
_init_hyperparameters | Initialize default and custom values for hyperparameters
Parameters:
hyperparameters - the extra arguments included when creating the PPO model, should only include
hyperparameters defined below with custom values.
Return:
None | """
The file contains the PPO class to train with.
NOTE: All "ALG STEP"s are following the numbers from the original PPO pseudocode.
It can be found here: https://spinningup.openai.com/en/latest/_images/math/e62a8971472597f4b014c2da064f636ffe365ba3.svg
"""
import gym
import numpy as np
import torch
import torch... | def _init_hyperparameters(self, hyperparameters):
"""
Initialize default and custom values for hyperparameters
Parameters:
hyperparameters - the extra arguments included when creating the PPO model, should only include
hyperparameters defined below with custom values.
Return:
None
"""
... | 399 | 437 | """
The file contains the PPO class to train with.
NOTE: All "ALG STEP"s are following the numbers from the original PPO pseudocode.
It can be found here: https://spinningup.openai.com/en/latest/_images/math/e62a8971472597f4b014c2da064f636ffe365ba3.svg
"""
import gym
import numpy as np
import torch
i... |
_log_summary | Print to stdout what we've logged so far in the most recent batch.
Parameters:
None
Return:
None | """
The file contains the PPO class to train with.
NOTE: All "ALG STEP"s are following the numbers from the original PPO pseudocode.
It can be found here: https://spinningup.openai.com/en/latest/_images/math/e62a8971472597f4b014c2da064f636ffe365ba3.svg
"""
import gym
import numpy as np
import torch
import torch... | def _log_summary(self):
"""
Print to stdout what we've logged so far in the most recent batch.
Parameters:
None
Return:
None
"""
# Calculate logging values. I use a few python shortcuts to calculate each value
# without explaining since it's not too important to PPO; feel free to look it ove... | 439 | 488 | """
The file contains the PPO class to train with.
NOTE: All "ALG STEP"s are following the numbers from the original PPO pseudocode.
It can be found here: https://spinningup.openai.com/en/latest/_images/math/e62a8971472597f4b014c2da064f636ffe365ba3.svg
"""
import gym
import numpy as np
import torch
i... |
baseline_re_single_analysis | Analyze edge cases of relative errors on a single network
Uses the samp^{th} sample network of size N in test data test_data_id.
Relative errors in the range (0, 2^{cap}) are considered edge cases.
Returns the number of edge cases divided by the difference |T-B| - |B-T| as a percent.
T and B are as defined in (Katz and... | """
Methods for assessing treatment of finite-precision issues
"""
import os
import sys
import time
import multiprocessing as mp
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.markers as mrk
import plotter as ptr
import rnn_fxpts as rfx
import fxpt_experiments as fe
import... | def baseline_re_single_analysis(test_data_id, N, samp, cap=10):
"""
Analyze edge cases of relative errors on a single network
Uses the samp^{th} sample network of size N in test data test_data_id.
Relative errors in the range (0, 2^{cap}) are considered edge cases.
Returns the number of edge cases d... | 78 | 91 | """
Methods for assessing treatment of finite-precision issues
"""
import os
import sys
import time
import multiprocessing as mp
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.markers as mrk
import plotter as ptr
import rnn_fxpts as rfx
import fxpt_experiments as fe
import... |
run_baseline_rd | Run get_baseline_rd on all networks in test_data_id whose size is in the list Ns.
Multiprocessing is used to run on multiple networks in parallel.
num_procs is the number of processors to use. | """
Methods for assessing treatment of finite-precision issues
"""
import os
import sys
import time
import multiprocessing as mp
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.markers as mrk
import plotter as ptr
import rnn_fxpts as rfx
import fxpt_experiments as fe
import... | def run_baseline_rd(test_data_id, Ns, num_procs):
"""
Run get_baseline_rd on all networks in test_data_id whose size is in the list Ns.
Multiprocessing is used to run on multiple networks in parallel.
num_procs is the number of processors to use.
"""
cpu_count = mp.cpu_count()
print('%d cpus... | 178 | 204 | """
Methods for assessing treatment of finite-precision issues
"""
import os
import sys
import time
import multiprocessing as mp
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.markers as mrk
import plotter as ptr
import rnn_fxpts as rfx
import fxpt_experiments as fe
import... |
run_traverse_rd | Run get_traverse_rd on all networks in test_data_id whose size is in the list Ns.
Multiprocessing is used to run on multiple networks in parallel.
num_procs is the number of processors to use. | """
Methods for assessing treatment of finite-precision issues
"""
import os
import sys
import time
import multiprocessing as mp
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.markers as mrk
import plotter as ptr
import rnn_fxpts as rfx
import fxpt_experiments as fe
import... | def run_traverse_rd(test_data_id, Ns, num_procs):
"""
Run get_traverse_rd on all networks in test_data_id whose size is in the list Ns.
Multiprocessing is used to run on multiple networks in parallel.
num_procs is the number of processors to use.
"""
cpu_count = mp.cpu_count()
print('%d cpu... | 244 | 271 | """
Methods for assessing treatment of finite-precision issues
"""
import os
import sys
import time
import multiprocessing as mp
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.markers as mrk
import plotter as ptr
import rnn_fxpts as rfx
import fxpt_experiments as fe
import... |
run_simple_rd | Run get_simple_rd on all networks in test_data_id whose size is in the list Ns.
Multiprocessing is used to run on multiple networks in parallel.
num_procs is the number of processors to use. | """
Methods for assessing treatment of finite-precision issues
"""
import os
import sys
import time
import multiprocessing as mp
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.markers as mrk
import plotter as ptr
import rnn_fxpts as rfx
import fxpt_experiments as fe
import... | def run_simple_rd(test_data_id, Ns, num_procs):
"""
Run get_simple_rd on all networks in test_data_id whose size is in the list Ns.
Multiprocessing is used to run on multiple networks in parallel.
num_procs is the number of processors to use.
"""
cpu_count = mp.cpu_count()
print('%d cpus, u... | 316 | 343 | """
Methods for assessing treatment of finite-precision issues
"""
import os
import sys
import time
import multiprocessing as mp
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.markers as mrk
import plotter as ptr
import rnn_fxpts as rfx
import fxpt_experiments as fe
import... |
get_interaction_table | Get interaction_table that is used for fetching user-item interaction label in LS regularization.
Args:
user_id(torch.Tensor): the user id in user-item interactions, shape: [n_interactions, 1]
item_id(torch.Tensor): the item id in user-item interactions, shape: [n_interactions, 1]
y(torch.Tensor): the labe... | # -*- coding: utf-8 -*-
# @Time : 2020/10/3
# @Author : Changxin Tian
# @Email : cx.tian@outlook.com
r"""
KGNNLS
################################################
Reference:
Hongwei Wang et al. "Knowledge-aware Graph Neural Networks with Label Smoothness Regularization
for Recommender Systems." in KDD 2019.... | def get_interaction_table(self, user_id, item_id, y):
r"""Get interaction_table that is used for fetching user-item interaction label in LS regularization.
Args:
user_id(torch.Tensor): the user id in user-item interactions, shape: [n_interactions, 1]
item_id(torch.Tensor): t... | 90 | 110 | # -*- coding: utf-8 -*-
# @Time : 2020/10/3
# @Author : Changxin Tian
# @Email : cx.tian@outlook.com
r"""
KGNNLS
################################################
Reference:
Hongwei Wang et al. "Knowledge-aware Graph Neural Networks with Label Smoothness Regularization
for Recommender Systems." in KDD 2019.... |
sample_neg_interaction | Sample neg_interaction to construct train data.
Args:
pos_interaction_table(dict): the interaction_table that only contains pos_interaction.
offset(int): The offset that is used for calculating the key(index) in interaction_table
Returns:
interaction_table(dict): key: user_id * 10^offset + item_id; value:... | # -*- coding: utf-8 -*-
# @Time : 2020/10/3
# @Author : Changxin Tian
# @Email : cx.tian@outlook.com
r"""
KGNNLS
################################################
Reference:
Hongwei Wang et al. "Knowledge-aware Graph Neural Networks with Label Smoothness Regularization
for Recommender Systems." in KDD 2019.... | def sample_neg_interaction(self, pos_interaction_table, offset):
r"""Sample neg_interaction to construct train data.
Args:
pos_interaction_table(dict): the interaction_table that only contains pos_interaction.
offset(int): The offset that is used for calculating the key(inde... | 112 | 133 | # -*- coding: utf-8 -*-
# @Time : 2020/10/3
# @Author : Changxin Tian
# @Email : cx.tian@outlook.com
r"""
KGNNLS
################################################
Reference:
Hongwei Wang et al. "Knowledge-aware Graph Neural Networks with Label Smoothness Regularization
for Recommender Systems." in KDD 2019.... |
construct_adj | Get neighbors and corresponding relations for each entity in the KG.
Args:
kg_graph(scipy.sparse.coo_matrix): an undirected graph
Returns:
tuple:
- adj_entity (torch.LongTensor): each line stores the sampled neighbor entities for a given entity,
shape: [n_entities, neighbor_sample_size]
... | # -*- coding: utf-8 -*-
# @Time : 2020/10/3
# @Author : Changxin Tian
# @Email : cx.tian@outlook.com
r"""
KGNNLS
################################################
Reference:
Hongwei Wang et al. "Knowledge-aware Graph Neural Networks with Label Smoothness Regularization
for Recommender Systems." in KDD 2019.... | def construct_adj(self, kg_graph):
r"""Get neighbors and corresponding relations for each entity in the KG.
Args:
kg_graph(scipy.sparse.coo_matrix): an undirected graph
Returns:
tuple:
- adj_entity (torch.LongTensor): each line stores the sampled nei... | 135 | 191 | # -*- coding: utf-8 -*-
# @Time : 2020/10/3
# @Author : Changxin Tian
# @Email : cx.tian@outlook.com
r"""
KGNNLS
################################################
Reference:
Hongwei Wang et al. "Knowledge-aware Graph Neural Networks with Label Smoothness Regularization
for Recommender Systems." in KDD 2019.... |
calculate_ls_loss | Calculate label smoothness loss.
Args:
user(torch.FloatTensor): the index of users, shape: [batch_size*2],
item(torch.FloatTensor): the index of items, shape: [batch_size*2],
target(torch.FloatTensor): the label of user-item, shape: [batch_size*2],
Returns:
ls_loss: label smoothness loss | # -*- coding: utf-8 -*-
# @Time : 2020/10/3
# @Author : Changxin Tian
# @Email : cx.tian@outlook.com
r"""
KGNNLS
################################################
Reference:
Hongwei Wang et al. "Knowledge-aware Graph Neural Networks with Label Smoothness Regularization
for Recommender Systems." in KDD 2019.... | def calculate_ls_loss(self, user, item, target):
r"""Calculate label smoothness loss.
Args:
user(torch.FloatTensor): the index of users, shape: [batch_size*2],
item(torch.FloatTensor): the index of items, shape: [batch_size*2],
target(torch.FloatTensor): the labe... | 397 | 414 | # -*- coding: utf-8 -*-
# @Time : 2020/10/3
# @Author : Changxin Tian
# @Email : cx.tian@outlook.com
r"""
KGNNLS
################################################
Reference:
Hongwei Wang et al. "Knowledge-aware Graph Neural Networks with Label Smoothness Regularization
for Recommender Systems." in KDD 2019.... |
load_data | generate the train and val dataloader, you can change this for your specific task
Args:
traindir (str): train dataset dir
valdir (str): validation dataset dir
Returns:
tuple: the train dataset and validation dataset | import argparse
import os
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
from ignite.metrics import IoU, Precision, Recall
import torchsat.transforms.transforms_cd as T
from torchsat.datasets.f... | def load_data(traindir, valdir, **kwargs):
"""generate the train and val dataloader, you can change this for your specific task
Args:
traindir (str): train dataset dir
valdir (str): validation dataset dir
Returns:
tuple: the train dataset and validation dataset
"""
train_tr... | 72 | 96 | import argparse
import os
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
from ignite.metrics import IoU, Precision, Recall
import torchsat.transforms.transforms_cd as T
from torchsat.datasets.f... |
build_collective_reduce | Build a subgraph that does one full all-reduce, using the collective Op.
Args:
input_tensors: tensors within a single worker graph that are to be reduced
together; must be one per device.
num_workers: total number of workers with identical independent graphs that
will be doing this same reduction. The red... | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def build_collective_reduce(input_tensors,
num_workers,
collective_keys,
reduction_op='Add',
unary_op='Id'):
"""Build a subgraph that does one full all-reduce, using the collective Op.
Args:
input_te... | 326 | 364 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
sum_gradients_all_reduce | Apply all-reduce algorithm over specified gradient tensors.
Args:
dev_prefixes: list of prefix strings to use to generate PS device names.
replica_grads: the gradients to reduce.
num_workers: number of worker processes across entire job.
alg: the all-reduce algorithm to apply.
num_shards: alg-specific shardi... | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def sum_gradients_all_reduce(dev_prefixes, replica_grads, num_workers, alg,
num_shards, gpu_indices):
"""Apply all-reduce algorithm over specified gradient tensors.
Args:
dev_prefixes: list of prefix strings to use to generate PS device names.
replica_grads: the gradients to re... | 408 | 447 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
pack_range | Form the concatenation of a specified range of gradient tensors.
Args:
key: Value under which to store meta-data in packing that will be used
later to restore the grad_var list structure.
packing: Dict holding data describing packed ranges of small tensors.
grad_vars: List of (grad, var) pairs for one replic... | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def pack_range(key, packing, grad_vars, rng):
"""Form the concatenation of a specified range of gradient tensors.
Args:
key: Value under which to store meta-data in packing that will be used
later to restore the grad_var list structure.
packing: Dict holding data describing packed ranges of small ten... | 490 | 519 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
unpack_grad_tuple | Unpack a previously packed collection of gradient tensors.
Args:
gv: A (grad, var) pair to be unpacked.
gpt: A GradPackTuple describing the packing operation that produced gv.
Returns:
A list of (grad, var) pairs corresponding to the values that were
originally packed into gv, maybe following subsequent oper... | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def unpack_grad_tuple(gv, gpt):
"""Unpack a previously packed collection of gradient tensors.
Args:
gv: A (grad, var) pair to be unpacked.
gpt: A GradPackTuple describing the packing operation that produced gv.
Returns:
A list of (grad, var) pairs corresponding to the values that were
originall... | 522 | 542 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
unpack_small_tensors | Undo the structure alterations to replica_grads done by pack_small_tensors.
Args:
replica_grads: List of List of (grad, var) tuples.
packing: A dict generated by pack_small_tensors describing the changes
it made to replica_grads.
Returns:
new_replica_grads: identical to replica_grads except that concatenati... | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def unpack_small_tensors(replica_grads, packing):
"""Undo the structure alterations to replica_grads done by pack_small_tensors.
Args:
replica_grads: List of List of (grad, var) tuples.
packing: A dict generated by pack_small_tensors describing the changes
it made to replica_grads.
Returns:
ne... | 601 | 630 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
__init__ | Initializes the object.
Args:
group_key_start: the starting integer of group key.
instance_key_start: the starting integer of instance key.
instance_key_with_id_start: the starting integer of instance key that is
recorded with an id. | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def __init__(self,
group_key_start=1,
instance_key_start=100,
instance_key_with_id_start=10000):
"""Initializes the object.
Args:
group_key_start: the starting integer of group key.
instance_key_start: the starting integer of instance key.
instan... | 252 | 272 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
get_group_key | Returns a group key for the set of devices.
Args:
devices: list of strings naming devices in a collective group.
Returns:
int key uniquely identifying the set of device names. | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def get_group_key(self, devices):
"""Returns a group key for the set of devices.
Args:
devices: list of strings naming devices in a collective group.
Returns:
int key uniquely identifying the set of device names.
"""
parsed = [pydev.DeviceSpec.from_string(d) for d in devices]
# I... | 281 | 303 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
export_environment | Exports an environment to a local folder. Hosts of the environment
are exported also.
@param env: The environment to export.
@type env: L{Environment}
@param path: Path to local directory.
@type path: string | # coding: utf-8
"""
Provides the exporter tool. The exporter can be used to export ComodIT entities
to local directories.
"""
from __future__ import print_function
from builtins import object
import os
from comodit_client.api.collection import EntityNotFoundException
from comodit_client.api.exceptions import PythonAp... | def export_environment(self, env, path):
"""
Exports an environment to a local folder. Hosts of the environment
are exported also.
@param env: The environment to export.
@type env: L{Environment}
@param path: Path to local directory.
@type path: string
... | 132 | 147 | # coding: utf-8
"""
Provides the exporter tool. The exporter can be used to export ComodIT entities
to local directories.
"""
from __future__ import print_function
from builtins import object
import os
from comodit_client.api.collection import EntityNotFoundException
from comodit_client.api.exceptions import PythonAp... |
export_host | Exports a host to a local folder. Contexts and instance are exported
also.
@param host: The host to export.
@type host: L{Host}
@param path: Path to local directory.
@type path: string | # coding: utf-8
"""
Provides the exporter tool. The exporter can be used to export ComodIT entities
to local directories.
"""
from __future__ import print_function
from builtins import object
import os
from comodit_client.api.collection import EntityNotFoundException
from comodit_client.api.exceptions import PythonAp... | def export_host(self, host, path):
"""
Exports a host to a local folder. Contexts and instance are exported
also.
@param host: The host to export.
@type host: L{Host}
@param path: Path to local directory.
@type path: string
"""
self._export_e... | 185 | 222 | # coding: utf-8
"""
Provides the exporter tool. The exporter can be used to export ComodIT entities
to local directories.
"""
from __future__ import print_function
from builtins import object
import os
from comodit_client.api.collection import EntityNotFoundException
from comodit_client.api.exceptions import PythonAp... |
export_organization | Exports an organization to a local folder. Environments, applications,
distributions and platforms are exported also.
@param org: The organization to export.
@type org: L{Organization}
@param path: Path to local directory.
@type path: string | # coding: utf-8
"""
Provides the exporter tool. The exporter can be used to export ComodIT entities
to local directories.
"""
from __future__ import print_function
from builtins import object
import os
from comodit_client.api.collection import EntityNotFoundException
from comodit_client.api.exceptions import PythonAp... | def export_organization(self, org, path):
"""
Exports an organization to a local folder. Environments, applications,
distributions and platforms are exported also.
@param org: The organization to export.
@type org: L{Organization}
@param path: Path to local directory... | 224 | 253 | # coding: utf-8
"""
Provides the exporter tool. The exporter can be used to export ComodIT entities
to local directories.
"""
from __future__ import print_function
from builtins import object
import os
from comodit_client.api.collection import EntityNotFoundException
from comodit_client.api.exceptions import PythonAp... |
_ensure_tf_install | Attempt to import tensorflow, and ensure its version is sufficient.
Raises:
ImportError: if either tensorflow is not importable or its version is
inadequate. | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | def _ensure_tf_install(): # pylint: disable=g-statement-before-imports
"""Attempt to import tensorflow, and ensure its version is sufficient.
Raises:
ImportError: if either tensorflow is not importable or its version is
inadequate.
"""
try:
import tensorflow as tf
except ImportError:
# Print... | 38 | 71 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
_try_cast | Convert input to numpy ndarray and optionally cast to a given dtype.
Parameters
----------
arr : ndarray, scalar, list, tuple, iterator (catchall)
Excludes: ExtensionArray, Series, Index.
dtype : np.dtype, ExtensionDtype or None
copy : bool
If False, don't copy the data if not needed.
raise_cast_failure : bool... | """
Constructor functions intended to be shared by pd.array, Series.__init__,
and Index.__new__.
These should not depend on core.internals.
"""
from __future__ import annotations
from collections import abc
from typing import TYPE_CHECKING, Any, Optional, Sequence, Union, cast
import numpy as np
import numpy.ma as m... | def _try_cast(arr, dtype: Optional[DtypeObj], copy: bool, raise_cast_failure: bool):
"""
Convert input to numpy ndarray and optionally cast to a given dtype.
Parameters
----------
arr : ndarray, scalar, list, tuple, iterator (catchall)
Excludes: ExtensionArray, Series, Index.
dtype : np... | 538 | 593 | """
Constructor functions intended to be shared by pd.array, Series.__init__,
and Index.__new__.
These should not depend on core.internals.
"""
from __future__ import annotations
from collections import abc
from typing import TYPE_CHECKING, Any, Optional, Sequence, Union, cast
import numpy as np
import numpy.ma as m... |
is_empty_data | Utility to check if a Series is instantiated with empty data,
which does not contain dtype information.
Parameters
----------
data : array-like, Iterable, dict, or scalar value
Contains data stored in Series.
Returns
-------
bool | """
Constructor functions intended to be shared by pd.array, Series.__init__,
and Index.__new__.
These should not depend on core.internals.
"""
from __future__ import annotations
from collections import abc
from typing import TYPE_CHECKING, Any, Optional, Sequence, Union, cast
import numpy as np
import numpy.ma as m... | def is_empty_data(data: Any) -> bool:
"""
Utility to check if a Series is instantiated with empty data,
which does not contain dtype information.
Parameters
----------
data : array-like, Iterable, dict, or scalar value
Contains data stored in Series.
Returns
-------
bool
... | 596 | 613 | """
Constructor functions intended to be shared by pd.array, Series.__init__,
and Index.__new__.
These should not depend on core.internals.
"""
from __future__ import annotations
from collections import abc
from typing import TYPE_CHECKING, Any, Optional, Sequence, Union, cast
import numpy as np
import numpy.ma as m... |
load_raw_mask | load two kinds of mask of VOC dataset.
image_id: id of mask
class_or_object: 'class_mask' or 'object_mask' for SegmentationClass or SegmentationObject
Returns:
image: numpy of mask image. | import os
import sys
import json
import datetime
import numpy as np
import skimage.draw
from bs4 import BeautifulSoup as bs
import cv2
import imgaug
from utils import *
# Root directory of the project
ROOT_DIR = os.path.abspath("../../")
# Inference result directory
RESULTS_DIR = os.path.abspath("./inference/")
# Impo... | def load_raw_mask(self, image_id, class_or_object):
'''load two kinds of mask of VOC dataset.
image_id: id of mask
class_or_object: 'class_mask' or 'object_mask' for SegmentationClass or SegmentationObject
Returns:
image: numpy of mask image.
'''
assert class_... | 132 | 146 | import os
import sys
import json
import datetime
import numpy as np
import skimage.draw
from bs4 import BeautifulSoup as bs
import cv2
import imgaug
from utils import *
# Root directory of the project
ROOT_DIR = os.path.abspath("../../")
# Inference result directory
RESULTS_DIR = os.path.abspath("./inference/")
# Impo... |
set | Set a value on a host.
@param hostKeyId: The key id for the destination host to set the
given key. This could be the local host, in which case the hostKey
will be the same as this C{Peer}'s keyStore keyId.
@param storagePath: The path to the key to set. For instance, this
could be something like /chat/<somekey>/inb... | from tint.ssl.context import PFSContextFactory
from tint.log import Logger
from tint.protocols.tintp import ConnectionPool
from tint.protocols.tintp import TintProtocolFactory
from tint.friends import FriendsList
class Peer(object):
def __init__(self, keyStore, storage, resolver):
self.keyStore = keyStor... | def set(self, hostKeyId, storagePath, storageValue):
"""
Set a value on a host.
@param hostKeyId: The key id for the destination host to set the
given key. This could be the local host, in which case the hostKey
will be the same as this C{Peer}'s keyStore keyId.
@p... | 35 | 50 | from tint.ssl.context import PFSContextFactory
from tint.log import Logger
from tint.protocols.tintp import ConnectionPool
from tint.protocols.tintp import TintProtocolFactory
from tint.friends import FriendsList
class Peer(object):
def __init__(self, keyStore, storage, resolver):
self.keyStore = keyStor... |
_test_null_distribution_basic | Test if de.wald() generates a uniform p-value distribution
if it is given data simulated based on the null model. Returns the p-value
of the two-side Kolmgorov-Smirnov test for equality of the observed
p-value distriubution and a uniform distribution.
:param n_cells: Number of cells to simulate (number of observation... | import logging
import unittest
import numpy as np
import pandas as pd
import scipy.stats as stats
import diffxpy.api as de
class _TestPairwiseNull:
noise_model: str
def _prepate_data(
self,
n_cells: int,
n_genes: int,
n_groups: int
):
if self.nois... | def _test_null_distribution_basic(
self,
test: str,
lazy: bool,
quick_scale: bool = False,
n_cells: int = 3000,
n_genes: int = 200,
n_groups: int = 3
):
"""
Test if de.wald() generates a uniform p-value distribut... | 44 | 86 | import logging
import unittest
import numpy as np
import pandas as pd
import scipy.stats as stats
import diffxpy.api as de
class _TestPairwiseNull:
noise_model: str
def _prepate_data(
self,
n_cells: int,
n_genes: int,
n_groups: int
):
if self.nois... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.