_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q18600
|
Logger.warning
|
train
|
def warning(self, msg, *args, **kwargs) -> Task: # type: ignore
"""
Log msg with severity 'WARNING'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
await logger.warning("Houston, we have a bit of a problem", exc_info=1)
"""
return self._make_log_task(logging.WARNING, msg, args, **kwargs)
|
python
|
{
"resource": ""
}
|
q18601
|
Logger.error
|
train
|
def error(self, msg, *args, **kwargs) -> Task: # type: ignore
"""
Log msg with severity 'ERROR'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
await logger.error("Houston, we have a major problem", exc_info=1)
"""
return self._make_log_task(logging.ERROR, msg, args, **kwargs)
|
python
|
{
"resource": ""
}
|
q18602
|
Logger.critical
|
train
|
def critical(self, msg, *args, **kwargs) -> Task: # type: ignore
"""
Log msg with severity 'CRITICAL'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
await logger.critical("Houston, we have a major disaster", exc_info=1)
"""
return self._make_log_task(logging.CRITICAL, msg, args, **kwargs)
|
python
|
{
"resource": ""
}
|
q18603
|
Logger.exception
|
train
|
def exception( # type: ignore
self, msg, *args, exc_info=True, **kwargs
) -> Task:
"""
Convenience method for logging an ERROR with exception information.
"""
return self.error(msg, *args, exc_info=exc_info, **kwargs)
|
python
|
{
"resource": ""
}
|
q18604
|
validate_aggregation
|
train
|
def validate_aggregation(agg):
"""Validate an aggregation for use in Vega-Lite.
Translate agg to one of the following supported named aggregations:
['mean', 'sum', 'median', 'min', 'max', 'count']
Parameters
----------
agg : string or callable
A string
Supported reductions are ['mean', 'sum', 'median', 'min', 'max', 'count'].
If agg is a numpy function, the return value is the string representation.
If agg is unrecognized, raise a ValueError
"""
if agg is None:
return agg
supported_aggs = ['mean', 'sum', 'median', 'min', 'max', 'count']
numpy_aggs = {getattr(np, a): a
for a in ['mean', 'sum', 'median', 'min', 'max']}
builtin_aggs = {min: 'min', max: 'max', sum: 'sum'}
agg = numpy_aggs.get(agg, agg)
agg = builtin_aggs.get(agg, agg)
if agg not in supported_aggs:
raise ValueError("Unrecognized Vega-Lite aggregation: {0}".format(agg))
return agg
|
python
|
{
"resource": ""
}
|
q18605
|
andrews_curves
|
train
|
def andrews_curves(
data, class_column, samples=200, alpha=None, width=450, height=300, **kwds
):
"""
Generates an Andrews curves visualization for visualising clusters of
multivariate data.
Andrews curves have the functional form:
f(t) = x_1/sqrt(2) + x_2 sin(t) + x_3 cos(t) +
x_4 sin(2t) + x_5 cos(2t) + ...
Where x coefficients correspond to the values of each dimension and t is
linearly spaced between -pi and +pi. Each row of frame then corresponds to
a single curve.
Parameters:
-----------
data : DataFrame
Data to be plotted, preferably normalized to (0.0, 1.0)
class_column : string
Name of the column containing class names
samples : integer
Number of points to plot in each curve
alpha: float, optional
The transparency of the lines
width : int, optional
the width of the plot in pixels
height : int, optional
the height of the plot in pixels
**kwds: keywords
Additional options
Returns:
--------
chart: alt.Chart object
"""
if kwds:
warnings.warn(
"Unrecognized keywords in pdvega.andrews_curves(): {0}"
"".format(list(kwds.keys()))
)
t = np.linspace(-np.pi, np.pi, samples)
vals = data.drop(class_column, axis=1).values.T
curves = np.outer(vals[0], np.ones_like(t))
for i in range(1, len(vals)):
ft = ((i + 1) // 2) * t
if i % 2 == 1:
curves += np.outer(vals[i], np.sin(ft))
else:
curves += np.outer(vals[i], np.cos(ft))
df = pd.DataFrame(
{
"t": np.tile(t, curves.shape[0]),
"sample": np.repeat(np.arange(curves.shape[0]), curves.shape[1]),
" ": curves.ravel(),
class_column: np.repeat(data[class_column], samples),
}
)
chart = alt.Chart(df).properties(width=width, height=height).mark_line()
chart = chart.encode(
x=alt.X(field="t", type="quantitative"),
y=alt.Y(field=" ", type="quantitative"),
color=alt.Color(field=class_column, type=infer_vegalite_type(df[class_column])),
detail=alt.Detail(field='sample', type="quantitative")
)
if alpha is None and df[class_column].nunique() > 20:
alpha = 0.5
if alpha is not None:
assert 0 <= alpha <= 1
return chart.encode(opacity=alt.value(alpha))
return chart
|
python
|
{
"resource": ""
}
|
q18606
|
dict_hash
|
train
|
def dict_hash(dct):
"""Return a hash of the contents of a dictionary"""
dct_s = json.dumps(dct, sort_keys=True)
try:
m = md5(dct_s)
except TypeError:
m = md5(dct_s.encode())
return m.hexdigest()
|
python
|
{
"resource": ""
}
|
q18607
|
exec_then_eval
|
train
|
def exec_then_eval(code, namespace=None):
"""Exec a code block & return evaluation of the last line"""
# TODO: make this less brittle.
namespace = namespace or {}
block = ast.parse(code, mode='exec')
last = ast.Expression(block.body.pop().value)
exec(compile(block, '<string>', mode='exec'), namespace)
return eval(compile(last, '<string>', mode='eval'), namespace)
|
python
|
{
"resource": ""
}
|
q18608
|
import_obj
|
train
|
def import_obj(clsname, default_module=None):
"""
Import the object given by clsname.
If default_module is specified, import from this module.
"""
if default_module is not None:
if not clsname.startswith(default_module + '.'):
clsname = '{0}.{1}'.format(default_module, clsname)
mod, clsname = clsname.rsplit('.', 1)
mod = importlib.import_module(mod)
try:
obj = getattr(mod, clsname)
except AttributeError:
raise ImportError('Cannot import {0} from {1}'.format(clsname, mod))
return obj
|
python
|
{
"resource": ""
}
|
q18609
|
get_scheme_cartocss
|
train
|
def get_scheme_cartocss(column, scheme_info):
"""Get TurboCARTO CartoCSS based on input parameters"""
if 'colors' in scheme_info:
color_scheme = '({})'.format(','.join(scheme_info['colors']))
else:
color_scheme = 'cartocolor({})'.format(scheme_info['name'])
if not isinstance(scheme_info['bins'], int):
bins = ','.join(str(i) for i in scheme_info['bins'])
else:
bins = scheme_info['bins']
bin_method = scheme_info['bin_method']
comparison = ', {}'.format(BinMethod.mapping.get(bin_method, '>='))
return ('ramp([{column}], {color_scheme}, '
'{bin_method}({bins}){comparison})').format(
column=column,
color_scheme=color_scheme,
bin_method=bin_method,
bins=bins,
comparison=comparison)
|
python
|
{
"resource": ""
}
|
q18610
|
custom
|
train
|
def custom(colors, bins=None, bin_method=BinMethod.quantiles):
"""Create a custom scheme.
Args:
colors (list of str): List of hex values for styling data
bins (int, optional): Number of bins to style by. If not given, the
number of colors will be used.
bin_method (str, optional): Classification method. One of the values
in :obj:`BinMethod`. Defaults to `quantiles`, which only works with
quantitative data.
"""
return {
'colors': colors,
'bins': bins if bins is not None else len(colors),
'bin_method': bin_method,
}
|
python
|
{
"resource": ""
}
|
q18611
|
scheme
|
train
|
def scheme(name, bins, bin_method='quantiles'):
"""Return a custom scheme based on CARTOColors.
Args:
name (str): Name of a CARTOColor.
bins (int or iterable): If an `int`, the number of bins for classifying
data. CARTOColors have 7 bins max for quantitative data, and 11 max
for qualitative data. If `bins` is a `list`, it is the upper range
for classifying data. E.g., `bins` can be of the form ``(10, 20, 30,
40, 50)``.
bin_method (str, optional): One of methods in :obj:`BinMethod`.
Defaults to ``quantiles``. If `bins` is an interable, then that is
the bin method that will be used and this will be ignored.
.. Warning::
Input types are particularly sensitive in this function, and little
feedback is given for errors. ``name`` and ``bin_method`` arguments
are case-sensitive.
"""
return {
'name': name,
'bins': bins,
'bin_method': (bin_method if isinstance(bins, int) else ''),
}
|
python
|
{
"resource": ""
}
|
q18612
|
CartoContext._is_authenticated
|
train
|
def _is_authenticated(self):
"""Checks if credentials allow for authenticated carto access"""
if not self.auth_api_client.is_valid_api_key():
raise CartoException(
'Cannot authenticate user `{}`. Check credentials.'.format(
self.creds.username()))
|
python
|
{
"resource": ""
}
|
q18613
|
CartoContext.read
|
train
|
def read(self, table_name, limit=None, decode_geom=False, shared_user=None, retry_times=3):
"""Read a table from CARTO into a pandas DataFrames.
Args:
table_name (str): Name of table in user's CARTO account.
limit (int, optional): Read only `limit` lines from
`table_name`. Defaults to ``None``, which reads the full table.
decode_geom (bool, optional): Decodes CARTO's geometries into a
`Shapely <https://github.com/Toblerity/Shapely>`__
object that can be used, for example, in `GeoPandas
<http://geopandas.org/>`__.
shared_user (str, optional): If a table has been shared with you,
specify the user name (schema) who shared it.
retry_times (int, optional): If the read call is rate limited,
number of retries to be made
Returns:
pandas.DataFrame: DataFrame representation of `table_name` from
CARTO.
Example:
.. code:: python
import cartoframes
cc = cartoframes.CartoContext(BASEURL, APIKEY)
df = cc.read('acadia_biodiversity')
"""
# choose schema (default user - org or standalone - or shared)
schema = 'public' if not self.is_org else (
shared_user or self.creds.username())
dataset = Dataset(self, table_name, schema)
return dataset.download(limit, decode_geom, retry_times)
|
python
|
{
"resource": ""
}
|
q18614
|
CartoContext.tables
|
train
|
def tables(self):
"""List all tables in user's CARTO account
Returns:
:obj:`list` of :py:class:`Table <cartoframes.analysis.Table>`
"""
datasets = DatasetManager(self.auth_client).filter(
show_table_size_and_row_count='false',
show_table='false',
show_stats='false',
show_likes='false',
show_liked='false',
show_permission='false',
show_uses_builder_features='false',
show_synchronization='false',
load_totals='false')
return [Table.from_dataset(d) for d in datasets]
|
python
|
{
"resource": ""
}
|
q18615
|
CartoContext.write
|
train
|
def write(self, df, table_name, temp_dir=CACHE_DIR, overwrite=False,
lnglat=None, encode_geom=False, geom_col=None, **kwargs):
"""Write a DataFrame to a CARTO table.
Examples:
Write a pandas DataFrame to CARTO.
.. code:: python
cc.write(df, 'brooklyn_poverty', overwrite=True)
Scrape an HTML table from Wikipedia and send to CARTO with content
guessing to create a geometry from the country column. This uses
a CARTO Import API param `content_guessing` parameter.
.. code:: python
url = 'https://en.wikipedia.org/wiki/List_of_countries_by_life_expectancy'
# retrieve first HTML table from that page
df = pd.read_html(url, header=0)[0]
# send to carto, let it guess polygons based on the 'country'
# column. Also set privacy to 'public'
cc.write(df, 'life_expectancy',
content_guessing=True,
privacy='public')
cc.map(layers=Layer('life_expectancy',
color='both_sexes_life_expectancy'))
Args:
df (pandas.DataFrame): DataFrame to write to ``table_name`` in user
CARTO account
table_name (str): Table to write ``df`` to in CARTO.
temp_dir (str, optional): Directory for temporary storage of data
that is sent to CARTO. Defaults are defined by `appdirs
<https://github.com/ActiveState/appdirs/blob/master/README.rst>`__.
overwrite (bool, optional): Behavior for overwriting ``table_name``
if it exits on CARTO. Defaults to ``False``.
lnglat (tuple, optional): lng/lat pair that can be used for
creating a geometry on CARTO. Defaults to ``None``. In some
cases, geometry will be created without specifying this. See
CARTO's `Import API
<https://carto.com/developers/import-api/reference/#tag/Standard-Tables>`__
for more information.
encode_geom (bool, optional): Whether to write `geom_col` to CARTO
as `the_geom`.
geom_col (str, optional): The name of the column where geometry
information is stored. Used in conjunction with `encode_geom`.
**kwargs: Keyword arguments to control write operations. Options
are:
- `compression` to set compression for files sent to CARTO.
This will cause write speedups depending on the dataset.
Options are ``None`` (no compression, default) or ``gzip``.
- Some arguments from CARTO's Import API. See the `params
listed in the documentation
<https://carto.com/developers/import-api/reference/#tag/Standard-Tables>`__
for more information. For example, when using
`content_guessing='true'`, a column named 'countries' with
country names will be used to generate polygons for each
country. Another use is setting the privacy of a dataset. To
avoid unintended consequences, avoid `file`, `url`, and other
similar arguments.
Returns:
:py:class:`Dataset <cartoframes.datasets.Dataset>`
.. note::
DataFrame indexes are changed to ordinary columns. CARTO creates
an index called `cartodb_id` for every table that runs from 1 to
the length of the DataFrame.
""" # noqa
tqdm.write('Params: encode_geom, geom_col and everything in kwargs are deprecated and not being used any more')
dataset = Dataset(self, table_name, df=df)
if_exists = Dataset.FAIL
if overwrite:
if_exists = Dataset.REPLACE
dataset = dataset.upload(with_lonlat=lnglat, if_exists=if_exists)
tqdm.write('Table successfully written to CARTO: {table_url}'.format(
table_url=utils.join_url(self.creds.base_url(),
'dataset',
dataset.table_name)))
return dataset
|
python
|
{
"resource": ""
}
|
q18616
|
CartoContext._get_privacy
|
train
|
def _get_privacy(self, table_name):
"""gets current privacy of a table"""
ds_manager = DatasetManager(self.auth_client)
try:
dataset = ds_manager.get(table_name)
return dataset.privacy.lower()
except NotFoundException:
return None
|
python
|
{
"resource": ""
}
|
q18617
|
CartoContext._update_privacy
|
train
|
def _update_privacy(self, table_name, privacy):
"""Updates the privacy of a dataset"""
ds_manager = DatasetManager(self.auth_client)
dataset = ds_manager.get(table_name)
dataset.privacy = privacy
dataset.save()
|
python
|
{
"resource": ""
}
|
q18618
|
CartoContext.fetch
|
train
|
def fetch(self, query, decode_geom=False):
"""Pull the result from an arbitrary SELECT SQL query from a CARTO account
into a pandas DataFrame.
Args:
query (str): SELECT query to run against CARTO user database. This data
will then be converted into a pandas DataFrame.
decode_geom (bool, optional): Decodes CARTO's geometries into a
`Shapely <https://github.com/Toblerity/Shapely>`__
object that can be used, for example, in `GeoPandas
<http://geopandas.org/>`__.
Returns:
pandas.DataFrame: DataFrame representation of query supplied.
Pandas data types are inferred from PostgreSQL data types.
In the case of PostgreSQL date types, dates are attempted to be
converted, but on failure a data type 'object' is used.
Examples:
This query gets the 10 highest values from a table and
returns a dataframe.
.. code:: python
topten_df = cc.query(
'''
SELECT * FROM
my_table
ORDER BY value_column DESC
LIMIT 10
'''
)
This query joins points to polygons based on intersection, and
aggregates by summing the values of the points in each polygon. The
query returns a dataframe, with a geometry column that contains
polygons.
.. code:: python
points_aggregated_to_polygons = cc.query(
'''
SELECT polygons.*, sum(points.values)
FROM polygons JOIN points
ON ST_Intersects(points.the_geom, polygons.the_geom)
GROUP BY polygons.the_geom, polygons.cartodb_id
''',
decode_geom=True
)
"""
copy_query = 'COPY ({query}) TO stdout WITH (FORMAT csv, HEADER true)'.format(query=query)
query_columns = get_columns(self, query)
result = recursive_read(self, copy_query)
df_types = dtypes(query_columns, exclude_dates=True)
df = pd.read_csv(result, dtype=df_types,
parse_dates=date_columns_names(query_columns),
true_values=['t'],
false_values=['f'],
index_col='cartodb_id' if 'cartodb_id' in df_types.keys() else False,
converters={'the_geom': lambda x: _decode_geom(x) if decode_geom else x})
if decode_geom:
df.rename({'the_geom': 'geometry'}, axis='columns', inplace=True)
return df
|
python
|
{
"resource": ""
}
|
q18619
|
CartoContext.query
|
train
|
def query(self, query, table_name=None, decode_geom=False, is_select=None):
"""Pull the result from an arbitrary SQL SELECT query from a CARTO account
into a pandas DataFrame. This is the default behavior, when `is_select=True`
Can also be used to perform database operations (creating/dropping tables,
adding columns, updates, etc.). In this case, you have to explicitly
specify `is_select=False`
This method is a helper for the `CartoContext.fetch` and `CartoContext.execute`
methods. We strongly encourage you to use any of those methods depending on the
type of query you want to run. If you want to get the results of a `SELECT` query
into a pandas DataFrame, then use `CartoContext.fetch`. For any other query that
performs an operation into the CARTO database, use `CartoContext.execute`
Args:
query (str): Query to run against CARTO user database. This data
will then be converted into a pandas DataFrame.
table_name (str, optional): If set (and `is_select=True`), this will create a new
table in the user's CARTO account that is the result of the SELECT
query provided. Defaults to None (no table created).
decode_geom (bool, optional): Decodes CARTO's geometries into a
`Shapely <https://github.com/Toblerity/Shapely>`__
object that can be used, for example, in `GeoPandas
<http://geopandas.org/>`__. It only works for SELECT queries when `is_select=True`
is_select (bool, optional): This argument has to be set depending on the query
performed. True for SELECT queries, False for any other query.
For the case of a SELECT SQL query (`is_select=True`) the result will be stored into a
pandas DataFrame.
When an arbitrary SQL query (`is_select=False`) it will perform a database
operation (UPDATE, DROP, INSERT, etc.)
By default `is_select=None` that means that the method will return a dataframe if
the `query` starts with a `select` clause, otherwise it will just execute the query
and return `None`
Returns:
pandas.DataFrame: When `is_select=True` and the query is actually a SELECT query
this method returns a pandas DataFrame representation of query supplied otherwise
returns None.
Pandas data types are inferred from PostgreSQL data types.
In the case of PostgreSQL date types, dates are attempted to be
converted, but on failure a data type 'object' is used.
Raises:
CartoException: If there's any error when executing the query
Examples:
Query a table in CARTO and write a new table that is result of
query. This query gets the 10 highest values from a table and
returns a dataframe, as well as creating a new table called
'top_ten' in the CARTO account.
.. code:: python
topten_df = cc.query(
'''
SELECT * FROM
my_table
ORDER BY value_column DESC
LIMIT 10
''',
table_name='top_ten'
)
This query joins points to polygons based on intersection, and
aggregates by summing the values of the points in each polygon. The
query returns a dataframe, with a geometry column that contains
polygons and also creates a new table called
'points_aggregated_to_polygons' in the CARTO account.
.. code:: python
points_aggregated_to_polygons = cc.query(
'''
SELECT polygons.*, sum(points.values)
FROM polygons JOIN points
ON ST_Intersects(points.the_geom, polygons.the_geom)
GROUP BY polygons.the_geom, polygons.cartodb_id
''',
table_name='points_aggregated_to_polygons',
decode_geom=True
)
Drops `my_table`
.. code:: python
cc.query(
'''
DROP TABLE my_table
'''
)
Updates the column `my_column` in the table `my_table`
.. code:: python
cc.query(
'''
UPDATE my_table SET my_column = 1
'''
)
"""
dataframe = None
is_select_query = is_select or (is_select is None and query.strip().lower().startswith('select'))
if is_select_query:
if table_name:
dataset = Dataset.create_from_query(self, query, table_name)
dataframe = dataset.download(decode_geom=decode_geom)
else:
dataframe = self.fetch(query, decode_geom=decode_geom)
else:
self.execute(query)
return dataframe
|
python
|
{
"resource": ""
}
|
q18620
|
CartoContext._check_query
|
train
|
def _check_query(self, query, style_cols=None):
"""Checks if query from Layer or QueryLayer is valid"""
try:
self.sql_client.send(
utils.minify_sql((
'EXPLAIN',
'SELECT',
' {style_cols}{comma}',
' the_geom, the_geom_webmercator',
'FROM ({query}) _wrap;',
)).format(query=query,
comma=',' if style_cols else '',
style_cols=(','.join(style_cols)
if style_cols else '')),
do_post=False)
except Exception as err:
raise ValueError(('Layer query `{query}` and/or style column(s) '
'{cols} are not valid: {err}.'
'').format(query=query,
cols=', '.join(['`{}`'.format(c)
for c in style_cols]),
err=err))
|
python
|
{
"resource": ""
}
|
q18621
|
CartoContext._get_bounds
|
train
|
def _get_bounds(self, layers):
"""Return the bounds of all data layers involved in a cartoframes map.
Args:
layers (list): List of cartoframes layers. See `cartoframes.layers`
for all types.
Returns:
dict: Dictionary of northern, southern, eastern, and western bounds
of the superset of data layers. Keys are `north`, `south`,
`east`, and `west`. Units are in WGS84.
"""
extent_query = ('SELECT ST_EXTENT(the_geom) AS the_geom '
'FROM ({query}) AS t{idx}\n')
union_query = 'UNION ALL\n'.join(
[extent_query.format(query=layer.orig_query, idx=idx)
for idx, layer in enumerate(layers)
if not layer.is_basemap])
extent = self.sql_client.send(
utils.minify_sql((
'SELECT',
' ST_XMIN(ext) AS west,',
' ST_YMIN(ext) AS south,',
' ST_XMAX(ext) AS east,',
' ST_YMAX(ext) AS north',
'FROM (',
' SELECT ST_Extent(the_geom) AS ext',
' FROM ({union_query}) AS _wrap1',
') AS _wrap2',
)).format(union_query=union_query),
do_post=False)
return extent['rows'][0]
|
python
|
{
"resource": ""
}
|
q18622
|
vmap
|
train
|
def vmap(layers,
context,
size=None,
basemap=BaseMaps.voyager,
bounds=None,
viewport=None,
**kwargs):
"""CARTO VL-powered interactive map
Args:
layers (list of Layer-types): List of layers. One or more of
:py:class:`Layer <cartoframes.contrib.vector.Layer>`,
:py:class:`QueryLayer <cartoframes.contrib.vector.QueryLayer>`, or
:py:class:`LocalLayer <cartoframes.contrib.vector.LocalLayer>`.
context (:py:class:`CartoContext <cartoframes.context.CartoContext>`):
A :py:class:`CartoContext <cartoframes.context.CartoContext>`
instance
size (tuple of int or str): a (width, height) pair for the size of the map.
Default is None, which makes the map 100% wide and 640px tall. If specified as int,
will be used as pixels, but you can also use string values for the CSS attributes.
So, you could specify it as size=('75%', 250).
basemap (str):
- if a `str`, name of a CARTO vector basemap. One of `positron`,
`voyager`, or `darkmatter` from the :obj:`BaseMaps` class
- if a `dict`, Mapbox or other style as the value of the `style` key.
If a Mapbox style, the access token is the value of the `token`
key.
bounds (dict or list): a dict with `east`,`north`,`west`,`south`
properties, or a list of floats in the following order: [west,
south, east, north]. If not provided the bounds will be automatically
calculated to fit all features.
viewport (dict): Configure where and how map will be centered. If not specified, or
specified without lat / lng, automatic bounds or the bounds argument will be used
to center the map. You can specify only zoom, bearing or pitch if you desire
automatic bounds but want to tweak the viewport.
- lng (float): Longitude to center the map on. Must specify lat as well.
- lat (float): Latitude to center the map on. Must specify lng as well.
- zoom (float): Zoom level.
- bearing (float): A bearing, or heading, is the direction you're facing,
measured clockwise as an angle from true north on a compass.
(north is 0, east is 90, south is 180, and west is 270).
- pitch (float): The angle towards the horizon measured in degrees, with a
range between 0 and 60 degrees. Zero degrees results in a two-dimensional
map, as if your line of sight forms a perpendicular angle with
the earth's surface.
Example:
.. code::
from cartoframes.contrib import vector
from cartoframes import CartoContext
cc = CartoContext(
base_url='https://your_user_name.carto.com',
api_key='your api key'
)
vector.vmap([vector.Layer('table in your account'), ], cc)
CARTO basemap style.
.. code::
from cartoframes.contrib import vector
from cartoframes import CartoContext
cc = CartoContext(
base_url='https://your_user_name.carto.com',
api_key='your api key'
)
vector.vmap(
[vector.Layer('table in your account'), ],
context=cc,
basemap=vector.BaseMaps.darkmatter
)
Custom basemap style. Here we use the Mapbox streets style, which
requires an access token.
.. code::
from cartoframes.contrib import vector
from cartoframes import CartoContext
cc = CartoContext(
base_url='https://<username>.carto.com',
api_key='your api key'
)
vector.vmap(
[vector.Layer('table in your account'), ],
context=cc,
basemap={
'style': 'mapbox://styles/mapbox/streets-v9',
'token: '<your mapbox token>'
}
)
Custom bounds
.. code::
from cartoframes.contrib import vector
from cartoframes import CartoContext
cc = CartoContext(
base_url='https://<username>.carto.com',
api_key='your api key'
)
vector.vmap(
[vector.Layer('table in your account'), ],
context=cc,
bounds={'west': -10, 'east': 10, 'north': -10, 'south': 10}
)
Adjusting the map's viewport.
.. code::
from cartoframes.contrib import vector
from cartoframes import CartoContext
cc = CartoContext(
base_url='https://<username>.carto.com',
api_key='your api key'
)
vector.vmap(
[vector.Layer('table in your account'), ],
context=cc,
viewport={'lng': 10, 'lat': 15, 'zoom': 10, 'bearing': 90, 'pitch': 45}
)
"""
if bounds:
bounds = _format_bounds(bounds)
else:
bounds = _get_super_bounds(layers, context)
jslayers = []
for _, layer in enumerate(layers):
is_local = isinstance(layer, LocalLayer)
intera = (
dict(event=layer.interactivity, header=layer.header)
if layer.interactivity is not None
else None
)
jslayers.append({
'is_local': is_local,
'styling': layer.styling,
'source': layer._geojson_str if is_local else layer.query,
'interactivity': intera,
'legend': layer.legend
})
_carto_vl_path = kwargs.get('_carto_vl_path', _DEFAULT_CARTO_VL_PATH)
_airship_path = kwargs.get('_airship_path', None)
html = _get_html_doc(
size,
jslayers,
bounds,
creds=context.creds,
viewport=viewport,
basemap=basemap,
_carto_vl_path=_carto_vl_path,
_airship_path=_airship_path)
return HTML(html)
|
python
|
{
"resource": ""
}
|
q18623
|
_get_bounds_local
|
train
|
def _get_bounds_local(layers):
"""Aggregates bounding boxes of all local layers
return: dict of bounding box of all bounds in layers
"""
if not layers:
return {'west': None, 'south': None, 'east': None, 'north': None}
bounds = layers[0].bounds
for layer in layers[1:]:
bounds = np.concatenate(
(
np.minimum(
bounds[:2],
layer.bounds[:2]
),
np.maximum(
bounds[2:],
layer.bounds[2:]
)
)
)
return dict(zip(['west', 'south', 'east', 'north'], bounds))
|
python
|
{
"resource": ""
}
|
q18624
|
_combine_bounds
|
train
|
def _combine_bounds(bbox1, bbox2):
"""Takes two bounding boxes dicts and gives a new bbox that encompasses
them both"""
WORLD = {'west': -180, 'south': -85.1, 'east': 180, 'north': 85.1}
ALL_KEYS = set(WORLD.keys())
def dict_all_nones(bbox_dict):
"""Returns True if all dict values are None"""
return all(v is None for v in bbox_dict.values())
# if neither are defined, use the world
if not bbox1 and not bbox2:
return WORLD
# if all nones, use the world
if dict_all_nones(bbox1) and dict_all_nones(bbox2):
return WORLD
assert ALL_KEYS == set(bbox1.keys()) and ALL_KEYS == set(bbox2.keys()),\
'Input bounding boxes must have the same dictionary keys'
# create dict with cardinal directions and None-valued keys
outbbox = dict.fromkeys(['west', 'south', 'east', 'north'])
def conv2nan(val):
"""convert Nones to np.nans"""
return np.nan if val is None else val
# set values and/or defaults
for coord in ('north', 'east'):
outbbox[coord] = np.nanmax([
conv2nan(bbox1[coord]),
conv2nan(bbox2[coord])
])
for coord in ('south', 'west'):
outbbox[coord] = np.nanmin([
conv2nan(bbox1[coord]),
conv2nan(bbox2[coord])
])
return outbbox
|
python
|
{
"resource": ""
}
|
q18625
|
QueryLayer._compose_style
|
train
|
def _compose_style(self):
"""Appends `prop` with `style` to layer styling"""
valid_styles = (
'color', 'width', 'filter', 'strokeWidth', 'strokeColor',
)
self.styling = '\n'.join(
'{prop}: {style}'.format(prop=s, style=getattr(self, s))
for s in valid_styles
if getattr(self, s) is not None
)
|
python
|
{
"resource": ""
}
|
q18626
|
QueryLayer._set_interactivity
|
train
|
def _set_interactivity(self, interactivity):
"""Adds interactivity syntax to the styling"""
event_default = 'hover'
if interactivity is None:
return
if isinstance(interactivity, (tuple, list)):
self.interactivity = event_default
interactive_cols = '\n'.join(
'@{0}: ${0}'.format(col) for col in interactivity
)
elif isinstance(interactivity, str):
self.interactivity = event_default
interactive_cols = '@{0}: ${0}'.format(interactivity)
elif isinstance(interactivity, dict):
self.interactivity = interactivity.get('event', event_default)
self.header = interactivity.get('header')
interactive_cols = '\n'.join(
'@{0}: ${0}'.format(col) for col in interactivity['cols']
)
else:
raise ValueError('`interactivity` must be a str, a list of str, '
'or a dict with a `cols` key')
self.styling = '\n'.join([interactive_cols, self.styling])
|
python
|
{
"resource": ""
}
|
q18627
|
get_map_name
|
train
|
def get_map_name(layers, has_zoom):
"""Creates a map named based on supplied parameters"""
version = '20170406'
num_layers = len(non_basemap_layers(layers))
has_labels = len(layers) > 1 and layers[-1].is_basemap
has_time = has_time_layer(layers)
basemap_id = dict(light=0, dark=1, voyager=2)[layers[0].source]
return ('cartoframes_ver{version}'
'_layers{layers}'
'_time{has_time}'
'_baseid{baseid}'
'_labels{has_labels}'
'_zoom{has_zoom}').format(
version=version,
layers=num_layers,
has_time=('1' if has_time else '0'),
# TODO: Remove this once baselayer urls can be passed in named
# map config
baseid=basemap_id,
has_labels=('1' if has_labels else '0'),
has_zoom=('1' if has_zoom else '0')
)
|
python
|
{
"resource": ""
}
|
q18628
|
get_map_template
|
train
|
def get_map_template(layers, has_zoom):
"""Creates a map template based on custom parameters supplied"""
num_layers = len(non_basemap_layers(layers))
has_time = has_time_layer(layers)
name = get_map_name(layers, has_zoom=has_zoom)
# Add basemap layer
layers_field = [{
'type': 'http',
'options': {
# TODO: Remove this once baselayer urls can be passed in named map
# config
'urlTemplate': layers[0].url,
# 'urlTemplate': '<%= basemap_url %>',
'subdomains': "abcd",
},
}]
# [BUG] Remove this once baselayer urls can be passed in named map config
placeholders = {}
# placeholders = {
# 'basemap_url': {
# 'type': 'sql_ident',
# 'default': ('https://cartodb-basemaps-{s}.global.ssl.fastly.net/'
# 'dark_all/{z}/{x}/{y}.png'),
# },
# }
for idx in range(num_layers):
layers_field.extend([{
'type': ('torque' if (has_time and idx == (num_layers - 1))
else 'mapnik'),
'options': {
'cartocss_version': '2.1.1',
'cartocss': '<%= cartocss_{idx} %>'.format(idx=idx),
'sql': '<%= sql_{idx} %>'.format(idx=idx),
# [BUG] No [] for templating
# 'interactivity': '<%= interactivity_{idx} %>'.format(
# idx=idx),
}
}])
placeholders.update({
'cartocss_{idx}'.format(idx=idx): {
'type': 'sql_ident',
'default': ('#layer {'
' marker-fill: red;'
' marker-width: 5;'
' marker-allow-overlap: true;'
' marker-line-color: #000; }'),
},
'sql_{idx}'.format(idx=idx): {
'type': 'sql_ident',
'default': (
"SELECT "
"ST_PointFromText('POINT(0 0)', 4326) AS the_geom, "
"1 AS cartodb_id, "
"ST_PointFromText('Point(0 0)', 3857) AS "
"the_geom_webmercator"
),
},
# [BUG] No [] for templating
# 'interactivity_{idx}'.format(idx=idx): {
# 'type': 'sql_ident',
# 'default': '["cartodb_id"]',
# },
})
# Add labels if they're in front
if num_layers > 0 and layers[-1].is_basemap:
layers_field.extend([{
'type': 'http',
'options': {
# TODO: Remove this once baselayer urls can be passed in named
# map config
'urlTemplate': layers[-1].url,
# 'urlTemplate': '<%= basemap_url %>',
'subdomains': "abcd",
},
}])
if has_zoom:
view = {
'zoom': '<%= zoom %>',
'center': {
'lng': '<%= lng %>',
'lat': '<%= lat %>',
},
}
placeholders.update({
'zoom': {
'type': 'number',
'default': 3,
},
'lng': {
'type': 'number',
'default': 0,
},
'lat': {
'type': 'number',
'default': 0,
},
})
else:
view = {
'bounds': {
'west': '<%= west %>',
'south': '<%= south %>',
'east': '<%= east %>',
'north': '<%= north %>',
},
}
placeholders.update({
'west': {
'type': 'number',
'default': -45,
},
'south': {
'type': 'number',
'default': -45,
},
'east': {
'type': 'number',
'default': 45,
},
'north': {
'type': 'number',
'default': 45,
},
})
return json.dumps({
'version': '0.0.1',
'name': name,
'placeholders': placeholders,
'layergroup': {
'version': '1.0.1',
'layers': layers_field,
},
'view': view,
})
|
python
|
{
"resource": ""
}
|
q18629
|
QueryLayer._parse_color
|
train
|
def _parse_color(self, color):
"""Setup the color scheme"""
# If column was specified, force a scheme
# It could be that there is a column named 'blue' for example
if isinstance(color, dict):
if 'column' not in color:
raise ValueError("Color must include a 'column' value")
# get scheme if exists. if not, one will be chosen later if needed
scheme = color.get('scheme')
color = color['column']
self.style_cols[color] = None
elif (color and
color[0] != '#' and
color not in webcolors.CSS3_NAMES_TO_HEX):
# color specified that is not a web color or hex value so its
# assumed to be a column name
color = color
self.style_cols[color] = None
scheme = None
else:
# assume it's a color (hex, rgb(...), or webcolor name)
color = color
scheme = None
return color, scheme
|
python
|
{
"resource": ""
}
|
q18630
|
QueryLayer._parse_time
|
train
|
def _parse_time(self, time):
"""Parse time inputs"""
if time is None:
return None
if isinstance(time, dict):
if 'column' not in time:
raise ValueError("`time` must include a 'column' key/value")
time_column = time['column']
time_options = time
elif isinstance(time, str):
time_column = time
time_options = {}
else:
raise ValueError(
'`time` should be a column name or dictionary of '
'styling options.')
self.style_cols[time_column] = None
time = {
'column': time_column,
'method': 'count',
'cumulative': False,
'frames': 256,
'duration': 30,
'trails': 2,
}
time.update(time_options)
return time
|
python
|
{
"resource": ""
}
|
q18631
|
QueryLayer._parse_size
|
train
|
def _parse_size(self, size, has_time=False):
"""Parse size inputs"""
if has_time:
size = size or 4
else:
size = size or 10
if isinstance(size, str):
size = {'column': size}
if isinstance(size, dict):
if 'column' not in size:
raise ValueError("`size` must include a 'column' key/value")
if has_time:
raise ValueError("When time is specified, size can "
"only be a fixed size")
old_size = size
# Default size range, bins, and bin_method
size = {
'range': [5, 25],
'bins': 5,
'bin_method': BinMethod.quantiles,
}
# Assign default range and update if min/max given
old_size['range'] = old_size.get('range', size['range'])
if 'min' in old_size:
old_size['range'][0] = old_size['min']
old_size.pop('min')
if 'max' in old_size:
old_size['range'][1] = old_size['max']
old_size.pop('max')
# Update all the keys in size if they exist in old_size
size.update(old_size)
self.style_cols[size['column']] = None
return size
|
python
|
{
"resource": ""
}
|
q18632
|
QueryLayer._validate_columns
|
train
|
def _validate_columns(self):
"""Validate the options in the styles"""
geom_cols = {'the_geom', 'the_geom_webmercator', }
col_overlap = set(self.style_cols) & geom_cols
if col_overlap:
raise ValueError('Style columns cannot be geometry '
'columns. `{col}` was chosen.'.format(
col=','.join(col_overlap)))
|
python
|
{
"resource": ""
}
|
q18633
|
QueryLayer._setup
|
train
|
def _setup(self, layers, layer_idx):
"""Setups layers once geometry types and data types are known, and when
a map is requested to be rendered from zero or more data layers"""
basemap = layers[0]
# if color not specified, choose a default
if self.time:
# default time/torque color
self.color = self.color or '#2752ff'
else:
self.color = self.color or DEFAULT_COLORS[layer_idx]
if isinstance(self.size, (int, float)):
if self.geom_type == 'point':
self.size = self.size or 4
else:
self.size = self.size or 1.5
# choose appropriate scheme if not already specified
if (not self.scheme) and (self.color in self.style_cols):
self._choose_scheme()
if self.time:
self._setup_time(basemap)
else:
# use turbo-carto for non-animated maps
self.cartocss = self._get_cartocss(basemap)
|
python
|
{
"resource": ""
}
|
q18634
|
QueryLayer._choose_scheme
|
train
|
def _choose_scheme(self):
"""Choose color scheme"""
if self.style_cols[self.color] in ('string', 'boolean', ):
self.scheme = antique(10)
elif self.style_cols[self.color] in ('number', ):
self.scheme = mint(5)
elif self.style_cols[self.color] in ('date', 'geometry', ):
raise ValueError(
'Cannot style column `{col}` of type `{type}`. It must be '
'numeric, text, or boolean.'.format(
col=self.color, type=self.style_cols[self.color]))
|
python
|
{
"resource": ""
}
|
q18635
|
normalize_names
|
train
|
def normalize_names(column_names):
"""Given an arbitrary column name, translate to a SQL-normalized column
name a la CARTO's Import API will translate to
Examples
* 'Field: 2' -> 'field_2'
* '2 Items' -> '_2_items'
* 'Unnamed: 0' -> 'unnamed_0',
* '201moore' -> '_201moore',
* '201moore' -> '_201moore_1',
* 'Acadia 1.2.3' -> 'acadia_1_2_3',
* 'old_soaker' -> 'old_soaker',
* '_testingTesting' -> '_testingtesting',
* 1 -> '_1',
* 1.0 -> '_1_0',
* 'public' -> 'public',
* 'SELECT' -> '_select',
* 'à' -> 'a',
* 'longcolumnshouldbesplittedsomehowanditellyouwhereitsgonnabesplittedrightnow' -> \
'longcolumnshouldbesplittedsomehowanditellyouwhereitsgonnabespli',
* 'longcolumnshouldbesplittedsomehowanditellyouwhereitsgonnabesplittedrightnow' -> \
'longcolumnshouldbesplittedsomehowanditellyouwhereitsgonnabe_1',
* 'all' -> '_all'
Args:
column_names (list): List of column names that will be SQL normalized
Returns:
list: List of SQL-normalized column names
"""
result = []
for column_name in column_names:
column = Column(column_name).normalize(forbidden_column_names=result)
result.append(column.name)
return result
|
python
|
{
"resource": ""
}
|
q18636
|
cssify
|
train
|
def cssify(css_dict):
"""Function to get CartoCSS from Python dicts"""
css = ''
for key, value in dict_items(css_dict):
css += '{key} {{ '.format(key=key)
for field, field_value in dict_items(value):
css += ' {field}: {field_value};'.format(field=field,
field_value=field_value)
css += '} '
return css.strip()
|
python
|
{
"resource": ""
}
|
q18637
|
temp_ignore_warnings
|
train
|
def temp_ignore_warnings(func):
"""Temporarily ignores warnings like those emitted by the carto python sdk
"""
@wraps(func)
def wrapper(*args, **kwargs):
"""wrapper around func to filter/reset warnings"""
with catch_warnings():
filterwarnings('ignore')
evaled_func = func(*args, **kwargs)
return evaled_func
return wrapper
|
python
|
{
"resource": ""
}
|
q18638
|
get_columns
|
train
|
def get_columns(context, query):
"""Get list of cartoframes.columns.Column"""
table_info = context.sql_client.send(query)
if 'fields' in table_info:
return Column.from_sql_api_fields(table_info['fields'])
return None
|
python
|
{
"resource": ""
}
|
q18639
|
get_column_names
|
train
|
def get_column_names(context, query):
"""Get column names and types from a query"""
table_info = context.sql_client.send(query)
if 'fields' in table_info:
return table_info['fields']
return None
|
python
|
{
"resource": ""
}
|
q18640
|
_encode_decode_decorator
|
train
|
def _encode_decode_decorator(func):
"""decorator for encoding and decoding geoms"""
def wrapper(*args):
"""error catching"""
try:
processed_geom = func(*args)
return processed_geom
except ImportError as err:
raise ImportError('The Python package `shapely` needs to be '
'installed to encode or decode geometries. '
'({})'.format(err))
return wrapper
|
python
|
{
"resource": ""
}
|
q18641
|
_decode_geom
|
train
|
def _decode_geom(ewkb):
"""Decode encoded wkb into a shapely geometry
"""
# it's already a shapely object
if hasattr(ewkb, 'geom_type'):
return ewkb
from shapely import wkb
from shapely import wkt
if ewkb:
try:
return wkb.loads(ba.unhexlify(ewkb))
except Exception:
try:
return wkb.loads(ba.unhexlify(ewkb), hex=True)
except Exception:
try:
return wkb.loads(ewkb, hex=True)
except Exception:
try:
return wkb.loads(ewkb)
except Exception:
try:
return wkt.loads(ewkb)
except Exception:
pass
return None
|
python
|
{
"resource": ""
}
|
q18642
|
Dataset.exists
|
train
|
def exists(self):
"""Checks to see if table exists"""
try:
self.cc.sql_client.send(
'EXPLAIN SELECT * FROM "{table_name}"'.format(
table_name=self.table_name),
do_post=False)
return True
except CartoException as err:
# If table doesn't exist, we get an error from the SQL API
self.cc._debug_print(err=err)
return False
|
python
|
{
"resource": ""
}
|
q18643
|
Credentials.save
|
train
|
def save(self, config_loc=None):
"""Saves current user credentials to user directory.
Args:
config_loc (str, optional): Location where credentials are to be
stored. If no argument is provided, it will be send to the
default location.
Example:
.. code::
from cartoframes import Credentials
creds = Credentials(username='eschbacher', key='abcdefg')
creds.save() # save to default location
"""
if not os.path.exists(_USER_CONFIG_DIR):
"""create directory if not exists"""
os.makedirs(_USER_CONFIG_DIR)
with open(_DEFAULT_PATH, 'w') as f:
json.dump({'key': self._key, 'base_url': self._base_url,
'username': self._username}, f)
|
python
|
{
"resource": ""
}
|
q18644
|
Credentials._retrieve
|
train
|
def _retrieve(self, config_file=None):
"""Retrives credentials from a file. Defaults to the user config
directory"""
with open(config_file or _DEFAULT_PATH, 'r') as f:
creds = json.load(f)
self._key = creds.get('key')
self._base_url = creds.get('base_url')
self._username = creds.get('username')
|
python
|
{
"resource": ""
}
|
q18645
|
Credentials.delete
|
train
|
def delete(self, config_file=None):
"""Deletes the credentials file specified in `config_file`. If no
file is specified, it deletes the default user credential file.
Args:
config_file (str): Path to configuration file. Defaults to delete
the user default location if `None`.
.. Tip::
To see if there is a default user credential file stored, do the
following::
>>> creds = Credentials()
>>> print(creds)
Credentials(username=eschbacher, key=abcdefg,
base_url=https://eschbacher.carto.com/)
"""
path_to_remove = config_file or _DEFAULT_PATH
try:
os.remove(path_to_remove)
print('Credentials at {} successfully removed.'.format(
path_to_remove))
except OSError as err:
warnings.warn('No credential file found at {}.'.format(
path_to_remove))
|
python
|
{
"resource": ""
}
|
q18646
|
Credentials.set
|
train
|
def set(self, key=None, username=None, base_url=None):
"""Update the credentials of a Credentials instance instead with new
values.
Args:
key (str): API key of user account. Defaults to previous value if
not specified.
username (str): User name of account. This parameter is optional if
`base_url` is not specified, but defaults to the previous
value if not set.
base_url (str): Base URL of user account. This parameter is
optional if `username` is specified and on CARTO's
cloud-based account. Generally of the form
``https://your_user_name.carto.com/`` for cloud-based accounts.
If on-prem or otherwise, contact your admin.
Example:
.. code::
from cartoframes import Credentials
# load credentials saved in previous session
creds = Credentials()
# set new API key
creds.set(key='new_api_key')
# save new creds to default user config directory
creds.save()
Note:
If the `username` is specified but the `base_url` is not, the
`base_url` will be updated to ``https://<username>.carto.com/``.
"""
self.__init__(key=(key or self._key),
username=(username or self._username),
base_url=base_url)
|
python
|
{
"resource": ""
}
|
q18647
|
Credentials.base_url
|
train
|
def base_url(self, base_url=None):
"""Return or set `base_url`.
Args:
base_url (str, optional): If set, updates the `base_url`. Otherwise
returns current `base_url`.
Note:
This does not update the `username` attribute. Separately update
the username with ``Credentials.username`` or update `base_url` and
`username` at the same time with ``Credentials.set``.
Example:
.. code::
>>> from cartoframes import Credentials
# load credentials saved in previous session
>>> creds = Credentials()
# returns current base_url
>>> creds.base_url()
'https://eschbacher.carto.com/'
# updates base_url with new value
>>> creds.base_url('new_base_url')
"""
if base_url:
# POSTs need to be over HTTPS (e.g., Import API reverts to a GET)
if urlparse(base_url).scheme != 'https':
raise ValueError(
'`base_url`s need to be over `https`. Update your '
'`base_url`.'
)
self._base_url = base_url
else:
return self._base_url
|
python
|
{
"resource": ""
}
|
q18648
|
chat
|
train
|
def chat(room=None, stream=None, **kwargs):
"""Quick setup for a chatroom.
:param str room: Roomname, if not given, a random sequence is generated and printed.
:param MediaStream stream: The media stream to share, if not given a CameraStream will be created.
:rtype: WebRTCRoom
"""
if room is None:
room = _random_room()
print("room =", room)
if stream is None:
stream = CameraStream()
room = WebRTCRoomMqtt(stream=stream, room=room)
box = widgets.HBox(children=[])
widgets.jslink((room, 'streams'), (box, 'children'))
display(box)
return room
|
python
|
{
"resource": ""
}
|
q18649
|
SceneGraph.add_child
|
train
|
def add_child(self, child):
"""Adds an object as a child in the scene graph."""
if not issubclass(child.__class__, SceneGraph):
raise TypeError("child must have parent/child iteration implemented to be a node in a SceneGraph.")
# if not hasattr(child, 'update'):
# raise TypeError("child must have an attribute update()")
child._parent = self
self._children.append(child)
|
python
|
{
"resource": ""
}
|
q18650
|
ProjectionBase.copy
|
train
|
def copy(self):
"""Returns a copy of the projection matrix"""
params = {}
for key, val in self.__dict__.items():
if 'matrix' not in key:
k = key[1:] if key[0] == '_' else key
params[k] = val
# params = {param: params[param] for param in params}
return self.__class__(**params)
|
python
|
{
"resource": ""
}
|
q18651
|
PerspectiveProjection.match_aspect_to_viewport
|
train
|
def match_aspect_to_viewport(self):
"""Updates Camera.aspect to match the viewport's aspect ratio."""
viewport = self.viewport
self.aspect = float(viewport.width) / viewport.height
|
python
|
{
"resource": ""
}
|
q18652
|
Camera.to_pickle
|
train
|
def to_pickle(self, filename):
"""Save Camera to a pickle file, given a filename."""
with open(filename, 'wb') as f:
pickle.dump(self, f)
|
python
|
{
"resource": ""
}
|
q18653
|
Camera.from_pickle
|
train
|
def from_pickle(cls, filename):
"""Loads and Returns a Camera from a pickle file, given a filename."""
with open(filename, 'rb') as f:
cam = pickle.load(f)
projection = cam.projection.copy()
return cls(projection=projection, position=cam.position.xyz, rotation=cam.rotation.__class__(*cam.rotation[:]))
|
python
|
{
"resource": ""
}
|
q18654
|
CameraGroup.look_at
|
train
|
def look_at(self, x, y, z):
"""Converges the two cameras to look at the specific point"""
for camera in self.cameras:
camera.look_at(x, y, z)
|
python
|
{
"resource": ""
}
|
q18655
|
FBO.bind
|
train
|
def bind(self):
"""Bind the FBO. Anything drawn afterward will be stored in the FBO's texture."""
# This is called simply to deal with anything that might be currently bound (for example, Pyglet objects),
gl.glBindTexture(gl.GL_TEXTURE_2D, 0)
# Store current viewport size for later
self._old_viewport = get_viewport()
# Bind the FBO, and change the viewport to fit its texture.
gl.glBindFramebufferEXT(gl.GL_FRAMEBUFFER_EXT, self.id) # Rendering off-screen
gl.glViewport(0, 0, self.texture.width, self.texture.height)
|
python
|
{
"resource": ""
}
|
q18656
|
FBO.unbind
|
train
|
def unbind(self):
"""Unbind the FBO."""
# Unbind the FBO
if self.texture.mipmap:
with self.texture:
self.texture.generate_mipmap()
gl.glBindFramebufferEXT(gl.GL_FRAMEBUFFER_EXT, 0)
# Restore the old viewport size
gl.glViewport(*self._old_viewport)
|
python
|
{
"resource": ""
}
|
q18657
|
create_opengl_object
|
train
|
def create_opengl_object(gl_gen_function, n=1):
"""Returns int pointing to an OpenGL texture"""
handle = gl.GLuint(1)
gl_gen_function(n, byref(handle)) # Create n Empty Objects
if n > 1:
return [handle.value + el for el in range(n)] # Return list of handle values
else:
return handle.value
|
python
|
{
"resource": ""
}
|
q18658
|
vec
|
train
|
def vec(data, dtype=float):
""" Makes GLfloat or GLuint vector containing float or uint args.
By default, newtype is 'float', but can be set to 'int' to make
uint list. """
gl_types = {float: gl.GLfloat, int: gl.GLuint}
try:
gl_dtype = gl_types[dtype]
except KeyError:
raise TypeError('dtype not recognized. Recognized types are int and float')
if gl_dtype == gl.GLuint:
for el in data:
if el < 0:
raise ValueError("integer ratcave.vec arrays are unsigned--negative values are not supported.")
return (gl_dtype * len(data))(*data)
|
python
|
{
"resource": ""
}
|
q18659
|
calculate_normals
|
train
|
def calculate_normals(vertices):
"""Return Nx3 normal array from Nx3 vertex array."""
verts = np.array(vertices, dtype=float)
normals = np.zeros_like(verts)
for start, end in pairwise(np.arange(0, verts.shape[0] + 1, 3)):
vecs = np.vstack((verts[start + 1] - verts[start], verts[start + 2] - verts[start])) # Get triangle of vertices and calculate 2-1 and 3-1
vecs /= np.linalg.norm(vecs, axis=1, keepdims=True) # normalize vectors
normal = np.cross(*vecs) # normal is the cross products of vectors.
normals[start:end, :] = normal / np.linalg.norm(normal)
return normals
|
python
|
{
"resource": ""
}
|
q18660
|
Scene.draw
|
train
|
def draw(self, clear=True):
"""Draw each visible mesh in the scene from the perspective of the scene's camera and lit by its light."""
if clear:
self.clear()
with self.gl_states, self.camera, self.light:
for mesh in self.meshes:
try:
mesh.draw()
except AttributeError:
pass
|
python
|
{
"resource": ""
}
|
q18661
|
Scene.draw360_to_texture
|
train
|
def draw360_to_texture(self, cubetexture, **kwargs):
"""
Draw each visible mesh in the scene from the perspective of the scene's camera and lit by its light, and
applies it to each face of cubetexture, which should be currently bound to an FBO.
"""
assert self.camera.projection.aspect == 1. and self.camera.projection.fov_y == 90 # todo: fix aspect property, which currently reads from viewport.
if not isinstance(cubetexture, TextureCube):
raise ValueError("Must render to TextureCube")
# for face, rotation in enumerate([[180, 90, 0], [180, -90, 0], [90, 0, 0], [-90, 0, 0], [180, 0, 0], [0, 0, 180]]):
old_rotation = self.camera.rotation
self.camera.rotation = self.camera.rotation.to_euler(units='deg')
for face, rotation in enumerate([[180, -90, 0], [180, 90, 0], [90, 0, 0], [-90, 0, 0], [180, 0, 0], [0, 0, 180]]): #first 2 switched
self.camera.rotation.xyz = rotation
cubetexture.attach_to_fbo(face)
self.draw(**kwargs)
self.camera.rotation = old_rotation
|
python
|
{
"resource": ""
}
|
q18662
|
VAO.assign_vertex_attrib_location
|
train
|
def assign_vertex_attrib_location(self, vbo, location):
"""Load data into a vbo"""
with vbo:
if self.n_verts:
assert vbo.data.shape[0] == self.n_verts
else:
self.n_verts = vbo.data.shape[0]
# vbo.buffer_data()
gl.glVertexAttribPointer(location, vbo.data.shape[1], gl.GL_FLOAT, gl.GL_FALSE, 0, 0)
gl.glEnableVertexAttribArray(location)
|
python
|
{
"resource": ""
}
|
q18663
|
Mesh.copy
|
train
|
def copy(self):
"""Returns a copy of the Mesh."""
return Mesh(arrays=deepcopy([arr.copy() for arr in [self.vertices, self.normals, self.texcoords]]), texture=self.textures, mean_center=deepcopy(self._mean_center),
position=self.position.xyz, rotation=self.rotation.__class__(*self.rotation[:]), scale=self.scale.xyz,
drawmode=self.drawmode, point_size=self.point_size, dynamic=self.dynamic, visible=self.visible,
gl_states=deepcopy(self.gl_states))
|
python
|
{
"resource": ""
}
|
q18664
|
Mesh.from_pickle
|
train
|
def from_pickle(cls, filename):
"""Loads and Returns a Mesh from a pickle file, given a filename."""
with open(filename, 'rb') as f:
mesh = pickle.load(f).copy()
return mesh
|
python
|
{
"resource": ""
}
|
q18665
|
Mesh.reset_uniforms
|
train
|
def reset_uniforms(self):
""" Resets the uniforms to the Mesh object to the ""global"" coordinate system"""
self.uniforms['model_matrix'] = self.model_matrix_global.view()
self.uniforms['normal_matrix'] = self.normal_matrix_global.view()
|
python
|
{
"resource": ""
}
|
q18666
|
Mesh._fill_vao
|
train
|
def _fill_vao(self):
"""Put array location in VAO for shader in same order as arrays given to Mesh."""
with self.vao:
self.vbos = []
for loc, verts in enumerate(self.arrays):
vbo = VBO(verts)
self.vbos.append(vbo)
self.vao.assign_vertex_attrib_location(vbo, loc)
|
python
|
{
"resource": ""
}
|
q18667
|
Mesh.draw
|
train
|
def draw(self):
""" Draw the Mesh if it's visible, from the perspective of the camera and lit by the light. The function sends the uniforms"""
if not self.vao:
self.vao = VAO(indices=self.array_indices)
self._fill_vao()
if self.visible:
if self.dynamic:
for vbo in self.vbos:
vbo._buffer_subdata()
if self.drawmode == gl.GL_POINTS:
gl.glPointSize(self.point_size)
for texture in self.textures:
texture.bind()
with self.vao as vao:
self.uniforms.send()
vao.draw(mode=self.drawmode)
for texture in self.textures:
texture.unbind()
|
python
|
{
"resource": ""
}
|
q18668
|
cross_product_matrix
|
train
|
def cross_product_matrix(vec):
"""Returns a 3x3 cross-product matrix from a 3-element vector."""
return np.array([[0, -vec[2], vec[1]],
[vec[2], 0, -vec[0]],
[-vec[1], vec[0], 0]])
|
python
|
{
"resource": ""
}
|
q18669
|
Texture.max_texture_limit
|
train
|
def max_texture_limit(self):
"""The maximum number of textures available for this graphic card's fragment shader."""
max_unit_array = (gl.GLint * 1)()
gl.glGetIntegerv(gl.GL_MAX_TEXTURE_IMAGE_UNITS, max_unit_array)
return max_unit_array[0]
|
python
|
{
"resource": ""
}
|
q18670
|
Texture._apply_filter_settings
|
train
|
def _apply_filter_settings(self):
"""Applies some hard-coded texture filtering settings."""
# TODO: Allow easy customization of filters
if self.mipmap:
gl.glTexParameterf(self.target, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR_MIPMAP_LINEAR)
else:
gl.glTexParameterf(self.target, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR)
gl.glTexParameterf(self.target, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR)
gl.glTexParameterf(self.target, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP_TO_EDGE)
gl.glTexParameterf(self.target, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP_TO_EDGE)
|
python
|
{
"resource": ""
}
|
q18671
|
Texture.attach_to_fbo
|
train
|
def attach_to_fbo(self):
"""Attach the texture to a bound FBO object, for rendering to texture."""
gl.glFramebufferTexture2DEXT(gl.GL_FRAMEBUFFER_EXT, self.attachment_point, self.target0, self.id, 0)
|
python
|
{
"resource": ""
}
|
q18672
|
Texture.from_image
|
train
|
def from_image(cls, img_filename, mipmap=False, **kwargs):
"""Uses Pyglet's image.load function to generate a Texture from an image file. If 'mipmap', then texture will
have mipmap layers calculated."""
img = pyglet.image.load(img_filename)
tex = img.get_mipmapped_texture() if mipmap else img.get_texture()
gl.glBindTexture(gl.GL_TEXTURE_2D, 0)
return cls(id=tex.id, data=tex, mipmap=mipmap, **kwargs)
|
python
|
{
"resource": ""
}
|
q18673
|
TextureCube._genTex2D
|
train
|
def _genTex2D(self):
"""Generate an empty texture in OpenGL"""
for face in range(6):
gl.glTexImage2D(self.target0 + face, 0, self.internal_fmt, self.width, self.height, 0,
self.pixel_fmt, gl.GL_UNSIGNED_BYTE, 0)
|
python
|
{
"resource": ""
}
|
q18674
|
WavefrontReader.get_mesh
|
train
|
def get_mesh(self, body_name, **kwargs):
"""Builds Mesh from geom name in the wavefront file. Takes all keyword arguments that Mesh takes."""
body = self.bodies[body_name]
vertices = body['v']
normals = body['vn'] if 'vn' in body else None
texcoords = body['vt'] if 'vt' in body else None
mesh = Mesh.from_incomplete_data(vertices=vertices, normals=normals, texcoords=texcoords, **kwargs)
uniforms = kwargs['uniforms'] if 'uniforms' in kwargs else {}
if 'material' in body:
material_props = {self.material_property_map[key]: value for key, value in iteritems(body['material'])}
for key, value in iteritems(material_props):
if isinstance(value, str):
if key == 'map_Kd':
if not value in self.textures:
self.textures[value] = Texture.from_image(value)
mesh.textures.append(self.textures[value])
else:
setattr(mesh, key, value)
elif hasattr(value, '__len__'): # iterable materials
mesh.uniforms[key] = value
elif key in ['d', 'illum']: # integer materials
mesh.uniforms[key] = value
elif key in ['spec_weight', 'Ni']: # float materials: should be specially converted to float if not already done.
mesh.uniforms[key] = float(value)
else:
print('Warning: Not applying uniform {}: {}'.format(key, value))
return mesh
|
python
|
{
"resource": ""
}
|
q18675
|
UniformCollection.send
|
train
|
def send(self):
"""
Sends all the key-value pairs to the graphics card.
These uniform variables will be available in the currently-bound shader.
"""
for name, array in iteritems(self):
shader_id = c_int(0)
gl.glGetIntegerv(gl.GL_CURRENT_PROGRAM, byref(shader_id))
if shader_id.value == 0:
raise UnboundLocalError("""Shader not bound to OpenGL context--uniform cannot be sent.
------------ Tip -------------
with ratcave.default_shader:
mesh.draw()
------------------------------
""")
# Attach a shader location value to the array, for quick memory lookup. (gl calls are expensive, for some reason)
try:
loc, shader_id_for_array = array.loc
if shader_id.value != shader_id_for_array:
raise Exception('Uniform location bound to a different shader')
except (AttributeError, Exception) as e:
array.loc = (gl.glGetUniformLocation(shader_id.value, name.encode('ascii')), shader_id.value)
if array.ndim == 2: # Assuming a 4x4 float32 matrix (common for graphics operations)
try:
pointer = array.pointer
except AttributeError:
array.pointer = array.ctypes.data_as(POINTER(c_float * 16)).contents
pointer = array.pointer
gl.glUniformMatrix4fv(array.loc[0], 1, True, pointer)
else:
sendfun = self._sendfuns[array.dtype.kind][len(array) - 1] # Find correct glUniform function
sendfun(array.loc[0], *array)
|
python
|
{
"resource": ""
}
|
q18676
|
Shader.bind
|
train
|
def bind(self):
"""Activate this Shader, making it the currently-bound program.
Any Mesh.draw() calls after bind() will have their data processed by this Shader. To unbind, call Shader.unbind().
Example::
shader.bind()
mesh.draw()
shader.unbind()
.. note:: Shader.bind() and Shader.unbind() can be also be called implicitly by using the 'with' statement.
Example of with statement with Shader::
with shader:
mesh.draw()
"""
if not self.is_linked:
if not self.is_compiled:
self.compile()
self.link()
super(self.__class__, self).bind()
|
python
|
{
"resource": ""
}
|
q18677
|
Shader.from_file
|
train
|
def from_file(cls, vert, frag, **kwargs):
"""
Reads the shader programs, given the vert and frag filenames
Arguments:
- vert (str): The filename of the vertex shader program (ex: 'vertshader.vert')
- frag (str): The filename of the fragment shader program (ex: 'fragshader.frag')
Returns:
- shader (Shader): The Shader using these files.
"""
vert_program = open(vert).read()
frag_program = open(frag).read()
return cls(vert=vert_program, frag=frag_program, **kwargs)
|
python
|
{
"resource": ""
}
|
q18678
|
Shader.link
|
train
|
def link(self):
"""link the program, making it the active shader.
.. note:: Shader.bind() is preferred here, because link() Requires the Shader to be compiled already.
"""
gl.glLinkProgram(self.id)
# Check if linking was successful. If not, print the log.
link_status = c_int(0)
gl.glGetProgramiv(self.id, gl.GL_LINK_STATUS, byref(link_status))
if not link_status:
gl.glGetProgramiv(self.id, gl.GL_INFO_LOG_LENGTH, byref(link_status)) # retrieve the log length
buffer = create_string_buffer(link_status.value) # create a buffer for the log
gl.glGetProgramInfoLog(self.id, link_status, None, buffer) # retrieve the log text
print(buffer.value) # print the log to the console
self.is_linked = True
|
python
|
{
"resource": ""
}
|
q18679
|
PhysicalGraph.add_child
|
train
|
def add_child(self, child, modify=False):
""" Adds an object as a child in the scene graph. With modify=True, model_matrix_transform gets change from identity and prevents the changes of the coordinates of the child"""
SceneGraph.add_child(self, child)
self.notify()
if modify:
child._model_matrix_transform[:] = trans.inverse_matrix(self.model_matrix_global)
child._normal_matrix_transform[:] = trans.inverse_matrix(self.normal_matrix_global)
|
python
|
{
"resource": ""
}
|
q18680
|
Example.setup_tree
|
train
|
def setup_tree(self):
"""Setup an example Treeview"""
self.tree.insert("", tk.END, text="Example 1", iid="1")
self.tree.insert("", tk.END, text="Example 2", iid="2")
self.tree.insert("2", tk.END, text="Example Child")
self.tree.heading("#0", text="Example heading")
|
python
|
{
"resource": ""
}
|
q18681
|
Example.grid_widgets
|
train
|
def grid_widgets(self):
"""Put widgets in the grid"""
sticky = {"sticky": "nswe"}
self.label.grid(row=1, column=1, columnspan=2, **sticky)
self.dropdown.grid(row=2, column=1, **sticky)
self.entry.grid(row=2, column=2, **sticky)
self.button.grid(row=3, column=1, columnspan=2, **sticky)
self.radio_one.grid(row=4, column=1, **sticky)
self.radio_two.grid(row=4, column=2, **sticky)
self.checked.grid(row=5, column=1, **sticky)
self.unchecked.grid(row=5, column=2, **sticky)
self.scroll.grid(row=1, column=3, rowspan=8, padx=5, **sticky)
self.tree.grid(row=6, column=1, columnspan=2, **sticky)
self.scale_entry.grid(row=7, column=1, columnspan=2, **sticky)
self.combo.grid(row=8, column=1, columnspan=2, **sticky)
self.progress.grid(row=9, column=1, columnspan=2, padx=5, pady=5, **sticky)
|
python
|
{
"resource": ""
}
|
q18682
|
Example.screenshot
|
train
|
def screenshot(self, *args):
"""Take a screenshot, crop and save"""
from mss import mss
if not os.path.exists("screenshots"):
os.makedirs("screenshots")
box = {
"top": self.winfo_y(),
"left": self.winfo_x(),
"width": self.winfo_width(),
"height": self.winfo_height()
}
screenshot = mss().grab(box)
screenshot = Image.frombytes("RGB", screenshot.size, screenshot.rgb)
screenshot.save("screenshots/{}.png".format(ttk.Style(self).theme_use()))
|
python
|
{
"resource": ""
}
|
q18683
|
Example.screenshot_themes
|
train
|
def screenshot_themes(self, *args):
"""Take a screenshot for all themes available"""
from time import sleep
for theme in THEMES:
example.set_theme(theme)
example.update()
sleep(0.05)
self.screenshot()
|
python
|
{
"resource": ""
}
|
q18684
|
ThemedWidget._load_themes
|
train
|
def _load_themes(self):
"""Load the themes into the Tkinter interpreter"""
with utils.temporary_chdir(utils.get_file_directory()):
self._append_theme_dir("themes")
self.tk.eval("source themes/pkgIndex.tcl")
theme_dir = "gif" if not self.png_support else "png"
self._append_theme_dir(theme_dir)
self.tk.eval("source {}/pkgIndex.tcl".format(theme_dir))
self.tk.call("package", "require", "ttk::theme::scid")
|
python
|
{
"resource": ""
}
|
q18685
|
ThemedWidget._append_theme_dir
|
train
|
def _append_theme_dir(self, name):
"""Append a theme dir to the Tk interpreter auto_path"""
path = "[{}]".format(get_file_directory() + "/" + name)
self.tk.call("lappend", "auto_path", path)
|
python
|
{
"resource": ""
}
|
q18686
|
ThemedWidget.set_theme
|
train
|
def set_theme(self, theme_name):
"""
Set new theme to use. Uses a direct tk call to allow usage
of the themes supplied with this package.
:param theme_name: name of theme to activate
"""
package = theme_name if theme_name not in self.PACKAGES else self.PACKAGES[theme_name]
self.tk.call("package", "require", "ttk::theme::{}".format(package))
self.tk.call("ttk::setTheme", theme_name)
|
python
|
{
"resource": ""
}
|
q18687
|
ThemedWidget.set_theme_advanced
|
train
|
def set_theme_advanced(self, theme_name, brightness=1.0,
saturation=1.0, hue=1.0,
preserve_transparency=True, output_dir=None,
advanced_name="advanced"):
"""
Load an advanced theme that is dynamically created
Applies the given modifiers to the images of the theme given and
then creates a theme from these new images with the name
'advanced' and then applies this theme. Is not available without
support for PNG-based themes, then raises RuntimeError.
"""
if not self.png_support:
raise RuntimeError("PNG-based themes are not supported in the environment")
# Check if the theme is a pixmap theme
if theme_name not in self.pixmap_themes:
raise ValueError("Theme is not a valid pixmap theme")
# Check if theme is available in the first place
if theme_name not in self.themes:
raise ValueError("Theme to create new theme from is not available: {}".format(theme_name))
if advanced_name in self.themes:
raise RuntimeError("The same name for an advanced theme cannot be used twice")
# Unload advanced if already loaded
output_dir = os.path.join(utils.get_temp_directory(), advanced_name) if output_dir is None else output_dir
self._setup_advanced_theme(theme_name, output_dir, advanced_name)
# Perform image operations
image_directory = os.path.join(output_dir, advanced_name, advanced_name)
self._setup_images(image_directory, brightness, saturation, hue, preserve_transparency)
# Load the new theme
with utils.temporary_chdir(output_dir):
self.tk.call("lappend", "auto_path", "[{}]".format(output_dir))
self.tk.eval("source pkgIndex.tcl")
self.set_theme(advanced_name)
|
python
|
{
"resource": ""
}
|
q18688
|
ThemedWidget._setup_advanced_theme
|
train
|
def _setup_advanced_theme(self, theme_name, output_dir, advanced_name):
"""
Setup all the files required to enable an advanced theme.
Copies all the files over and creates the required directories
if they do not exist.
:param theme_name: theme to copy the files over from
:param output_dir: output directory to place the files in
"""
"""Directories"""
output_theme_dir = os.path.join(output_dir, advanced_name)
output_images_dir = os.path.join(output_theme_dir, advanced_name)
input_theme_dir = os.path.join(
utils.get_themes_directory(theme_name, self.png_support), theme_name)
input_images_dir = os.path.join(input_theme_dir, theme_name)
advanced_pkg_dir = os.path.join(utils.get_file_directory(), "advanced")
"""Directory creation"""
for directory in [output_dir, output_theme_dir]:
utils.create_directory(directory)
"""Theme TCL file"""
file_name = theme_name + ".tcl"
theme_input = os.path.join(input_theme_dir, file_name)
theme_output = os.path.join(output_theme_dir, "{}.tcl".format(advanced_name))
with open(theme_input, "r") as fi, open(theme_output, "w") as fo:
for line in fi:
# Setup new theme
line = line.replace(theme_name, advanced_name)
# Setup new image format
line = line.replace("gif89", "png")
line = line.replace("gif", "png")
# Write processed line
fo.write(line)
"""pkgIndex.tcl file"""
theme_pkg_input = os.path.join(advanced_pkg_dir, "pkgIndex.tcl")
theme_pkg_output = os.path.join(output_theme_dir, "pkgIndex.tcl")
with open(theme_pkg_input, "r") as fi, open(theme_pkg_output, "w") as fo:
for line in fi:
fo.write(line.replace("advanced", advanced_name))
"""pkgIndex_package.tcl -> pkgIndex.tcl"""
theme_pkg_input = os.path.join(advanced_pkg_dir, "pkgIndex_package.tcl")
theme_pkg_output = os.path.join(output_dir, "pkgIndex.tcl")
with open(theme_pkg_input, "r") as fi, open(theme_pkg_output, "w") as fo:
for line in fi:
fo.write(line.replace("advanced", advanced_name))
"""Images"""
if os.path.exists(output_images_dir):
rmtree(output_images_dir)
copytree(input_images_dir, output_images_dir)
|
python
|
{
"resource": ""
}
|
q18689
|
ThemedWidget._setup_images
|
train
|
def _setup_images(directory, brightness, saturation, hue, preserve_transparency):
"""
Apply modifiers to the images of a theme
Modifies the images using the PIL.ImageEnhance module. Using
this function, theme images are modified to given them a
unique look and feel. Works best with PNG-based images.
"""
for file_name in os.listdir(directory):
with open(os.path.join(directory, file_name), "rb") as fi:
image = Image.open(fi).convert("RGBA")
# Only perform required operations
if brightness != 1.0:
enhancer = ImageEnhance.Brightness(image)
image = enhancer.enhance(brightness)
if saturation != 1.0:
enhancer = ImageEnhance.Color(image)
image = enhancer.enhance(saturation)
if hue != 1.0:
image = imgops.shift_hue(image, hue)
if preserve_transparency is True:
image = imgops.make_transparent(image)
# Save the new image
image.save(os.path.join(directory, file_name.replace("gif", "png")))
image.close()
for file_name in (item for item in os.listdir(directory) if item.endswith(".gif")):
os.remove(os.path.join(directory, file_name))
|
python
|
{
"resource": ""
}
|
q18690
|
ThemedTk.set_theme
|
train
|
def set_theme(self, theme_name, toplevel=None, themebg=None):
"""Redirect the set_theme call to also set Tk background color"""
if self._toplevel is not None and toplevel is None:
toplevel = self._toplevel
if self._themebg is not None and themebg is None:
themebg = self._themebg
ThemedWidget.set_theme(self, theme_name)
color = self._get_bg_color()
if themebg is True:
self.config(background=color)
if toplevel is True:
self._setup_toplevel_hook(color)
|
python
|
{
"resource": ""
}
|
q18691
|
ThemedTk.config
|
train
|
def config(self, kw=None, **kwargs):
"""configure redirect to support additional options"""
themebg = kwargs.pop("themebg", self._themebg)
toplevel = kwargs.pop("toplevel", self._toplevel)
theme = kwargs.pop("theme", self.current_theme)
color = self._get_bg_color()
if themebg != self._themebg:
if themebg is False:
self.configure(bg="white")
else:
self.configure(bg=color)
self._themebg = themebg
if toplevel != self._toplevel:
if toplevel is True:
self._setup_toplevel_hook(color)
else:
tk.Toplevel.__init__ = self.__init__toplevel
self._toplevel = toplevel
if theme != self.current_theme:
self.set_theme(theme)
return tk.Tk.config(self, kw, **kwargs)
|
python
|
{
"resource": ""
}
|
q18692
|
ThemedTk.cget
|
train
|
def cget(self, k):
"""cget redirect to support additional options"""
if k == "themebg":
return self._themebg
elif k == "toplevel":
return self._toplevel
elif k == "theme":
return self.current_theme
return tk.Tk.cget(self, k)
|
python
|
{
"resource": ""
}
|
q18693
|
build_and_install_wheel
|
train
|
def build_and_install_wheel(python):
"""Build a binary distribution wheel and install it"""
dist_type = "bdist_wheel" if not SDIST else "sdist"
return_code = run_command("{} setup.py {}".format(python, dist_type))
if return_code != 0:
print("Building and installing wheel failed.")
exit(return_code)
# Check if an artifact exists
assert check_wheel_existence()
print("Wheel file exists.")
# Install the wheel file
wheel = [file for file in os.listdir("dist") if file.endswith((".whl", ".tar.gz"))][0]
wheel = os.path.join("dist", wheel)
print("Wheel file:", wheel)
return_code = run_command("{} -m pip install --ignore-installed {}".format(python, wheel))
if return_code != 0:
print("Installation of wheel failed.")
exit(return_code)
print("Wheel file installed.")
|
python
|
{
"resource": ""
}
|
q18694
|
ci
|
train
|
def ci(python="python", codecov="codecov", coverage_file="coverage.xml", wheel=True):
"""
Run the most common CI tasks
"""
# Import pip
from pip import __version__ as pip_version
if Version(pip_version) >= Version("10.0.0"):
import pip._internal as pip
else:
import pip
# Install requirements with pip
pip.main(["install"] + DEPENDENCIES + REQUIREMENTS + ["-U"])
# Build the installation wheel
if wheel is True:
build_and_install_wheel(python)
# Remove all non-essential files
for to_delete in TO_DELETE:
rmtree(to_delete)
# Run the tests on the installed ttkthemes
return_code = run_command("{} -m nose --with-coverage --cover-xml --cover-package=ttkthemes".format(python))
if return_code != 0:
print("Tests failed.")
exit(return_code)
print("Tests successful.")
# Run codecov
return_code = run_command("{} -f {}".format(codecov, coverage_file))
if return_code != 0:
print("Codecov failed.")
exit(return_code)
# Successfully finished CI
exit(0)
|
python
|
{
"resource": ""
}
|
q18695
|
ci_macos
|
train
|
def ci_macos():
"""
Setup Travis-CI macOS for wheel building
"""
run_command("brew install $PYTHON pipenv || echo \"Installed PipEnv\"")
command_string = "sudo -H $PIP install "
for element in DEPENDENCIES + REQUIREMENTS + ["-U"]:
command_string += element + " "
run_command(command_string)
# Build a wheel
run_command("sudo -H $PYTHON setup.py bdist_wheel")
assert check_wheel_existence()
exit(0)
|
python
|
{
"resource": ""
}
|
q18696
|
ThemedStyle.theme_use
|
train
|
def theme_use(self, theme_name=None):
"""
Set a new theme to use or return current theme name
:param theme_name: name of theme to use
:returns: active theme name
"""
if theme_name is not None:
self.set_theme(theme_name)
return ttk.Style.theme_use(self)
|
python
|
{
"resource": ""
}
|
q18697
|
get_temp_directory
|
train
|
def get_temp_directory():
"""Return an absolute path to an existing temporary directory"""
# Supports all platforms supported by tempfile
directory = os.path.join(gettempdir(), "ttkthemes")
if not os.path.exists(directory):
os.makedirs(directory)
return directory
|
python
|
{
"resource": ""
}
|
q18698
|
create_directory
|
train
|
def create_directory(directory):
"""Create directory but first delete it if it exists"""
if os.path.exists(directory):
rmtree(directory)
os.makedirs(directory)
return directory
|
python
|
{
"resource": ""
}
|
q18699
|
make_transparent
|
train
|
def make_transparent(image):
"""Turn all black pixels in an image into transparent ones"""
data = image.copy().getdata()
modified = []
for item in data:
if _check_pixel(item) is True:
modified.append((255, 255, 255, 255)) # White transparent pixel
continue
modified.append(item)
image.putdata(modified)
return image
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.