repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
stonebig/winpython
|
refs/heads/master
|
winpython/_vendor/qtpy/tests/test_qtwebsockets.py
|
7
|
from __future__ import absolute_import
import pytest
from qtpy import PYQT5, PYSIDE2
@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings")
def test_qtwebsockets():
"""Test the qtpy.QtWebSockets namespace"""
from qtpy import QtWebSockets
assert QtWebSockets.QMaskGenerator is not None
assert QtWebSockets.QWebSocket is not None
assert QtWebSockets.QWebSocketCorsAuthenticator is not None
assert QtWebSockets.QWebSocketProtocol is not None
assert QtWebSockets.QWebSocketServer is not None
|
eufarn7sp/egads-eufar
|
refs/heads/master
|
egads/thirdparty/quantities/constants/electron.py
|
4
|
# -*- coding: utf-8 -*-
"""
"""
from __future__ import absolute_import
from ._utils import _cd
from ..unitquantity import UnitConstant
e = elementary_charge = UnitConstant(
'elementary_charge',
_cd('elementary charge'),
symbol='e'
)
elementary_charge_over_h = UnitConstant(
'elementary_charge_over_h',
_cd('elementary charge over h'),
symbol='e/h'
)
Faraday_constant = UnitConstant(
'Faraday_constant',
_cd('Faraday constant'),
symbol='F'
)
#F_star = Faraday_constant_for_conventional_electric_current = UnitConstant(
# _cd('Faraday constant for conventional electric current') what is a unit of C_90?
r_e = classical_electron_radius = UnitConstant(
'classical_electron_radius',
_cd('classical electron radius'),
symbol='r_e',
u_symbol='rₑ'
)
m_e = electron_mass = UnitConstant(
'electron_mass',
_cd('electron mass'),
symbol='m_e',
u_symbol='mₑ'
)
lambda_C = Compton_wavelength = UnitConstant(
'Compton_wavelength',
_cd('Compton wavelength'),
symbol='lambda_C',
u_symbol='λ_C'
)
Compton_wavelength_over_2_pi = UnitConstant(
'Compton_wavelength_over_2_pi',
_cd('Compton wavelength over 2 pi'),
symbol='lambdabar_C',
u_symbol='ƛ_C'
)
electron_charge_to_mass_quotient = UnitConstant(
'electron_charge_to_mass_quotient',
_cd('electron charge to mass quotient'),
symbol='(-e/m_e)',
u_symbol='(-e/mₑ)'
)
g_e = electron_g_factor = UnitConstant(
'electron_g_factor',
_cd('electron g factor'),
symbol='g_e',
u_symbol='gₑ'
)
gamma_e = electron_gyromagnetic_ratio = UnitConstant(
'electron_gyromagnetic_ratio',
_cd('electron gyromagnetic ratio'),
symbol='gamma_e',
u_symbol='γₑ'
)
electron_gyromagnetic_ratio_over_2_pi = UnitConstant(
'electron_gyromagnetic_ratio_over_2_pi',
_cd('electron gyromagnetic ratio over 2 pi'),
symbol='gamma_e/(2*pi)',
u_symbol='γₑ/(2·π)'
)
mu_e = electron_magnetic_moment = UnitConstant(
'electron_magnetic_moment',
_cd('electron magnetic moment'),
symbol='mu_e',
u_symbol='μₑ'
)
a_e = electron_magnetic_moment_anomaly = UnitConstant(
'electron_magnetic_moment_anomaly',
_cd('electron magnetic moment anomaly'),
symbol='a_e',
u_symbol='aₑ'
)
eV = electron_volt = UnitConstant(
'electron_volt',
_cd('electron volt'),
symbol='eV'
)
sigma_e = Thomson_cross_section = UnitConstant(
'Thomson_cross_section',
_cd('Thomson cross section'),
symbol='sigma_e',
u_symbol='σₑ'
)
mu_B = Bohr_magneton = UnitConstant(
'Bohr_magneton',
_cd('Bohr magneton'),
symbol='mu_B',
u_symbol='μ_B'
)
Bohr_magneton_in_Hz_per_T = UnitConstant(
'Bohr_magneton_in_Hz_per_T',
_cd('Bohr magneton in Hz/T')
)
Bohr_magneton_in_inverse_meters_per_tesla = UnitConstant(
'Bohr_magneton_in_inverse_meters_per_tesla',
_cd('Bohr magneton in inverse meters per tesla')
)
Bohr_magneton_in_K_per_T = UnitConstant(
'Bohr_magneton_in_K_per_T',
_cd('Bohr magneton in K/T')
)
electron_mass_energy_equivalent = UnitConstant(
'electron_mass_energy_equivalent',
_cd('electron mass energy equivalent'),
symbol='(m_e*c**2)',
u_symbol='(mₑ·c²)'
)
electron_mass_energy_equivalent_in_MeV = UnitConstant(
'electron_mass_energy_equivalent_in_MeV',
_cd('electron mass energy equivalent in MeV')
)
electron_mass_in_u = UnitConstant(
'electron_mass_in_u',
_cd('electron mass in u')
)
electron_molar_mass = UnitConstant(
'electron_molar_mass',
_cd('electron molar mass'),
symbol='M_e',
u_symbol='Mₑ'
)
electron_deuteron_mass_ratio = UnitConstant(
'electron_deuteron_mass_ratio',
_cd('electron-deuteron mass ratio'),
symbol='(m_e/m_d)',
u_symbol='(mₑ/m_d)'
)
electron_muon_mass_ratio = UnitConstant(
'electron_muon_mass_ratio',
_cd('electron-muon mass ratio'),
symbol='(m_e/m_mu)',
u_symbol='(mₑ/m_μ)'
)
electron_neutron_mass_ratio = UnitConstant(
'electron_neutron_mass_ratio',
_cd('electron-neutron mass ratio'),
symbol='(m_e/m_n)',
u_symbol='(mₑ/m_n)'
)
electron_proton_mass_ratio = UnitConstant(
'electron_proton_mass_ratio',
_cd('electron-proton mass ratio'),
symbol='(m_e/m_p)',
u_symbol='(mₑ/m_p)'
)
electron_tau_mass_ratio = UnitConstant(
'electron_tau_mass_ratio',
_cd('electron-tau mass ratio'),
symbol='(m_e/m_tau)',
u_symbol='(mₑ/m_τ)'
)
electron_to_alpha_particle_mass_ratio = UnitConstant(
'electron_to_alpha_particle_mass_ratio',
_cd('electron to alpha particle mass ratio'),
symbol='(m_e/m_alpha)',
u_symbol='(mₑ/m_α)'
)
electron_deuteron_magnetic_moment_ratio = UnitConstant(
'electron_deuteron_magnetic_moment_ratio',
_cd('electron-deuteron magnetic moment ratio'),
symbol='(mu_e/mu_d)',
u_symbol='(μₑ/μ_d)'
)
electron_magnetic_moment_to_Bohr_magneton_ratio = UnitConstant(
'electron_magnetic_moment_to_Bohr_magneton_ratio',
_cd('electron magnetic moment to Bohr magneton ratio'),
symbol='(mu_e/mu_B)',
u_symbol='(μₑ/μ_B)'
)
electron_magnetic_moment_to_nuclear_magneton_ratio = UnitConstant(
'electron_magnetic_moment_to_nuclear_magneton_ratio',
_cd('electron magnetic moment to nuclear magneton ratio'),
symbol='(mu_e/mu_N)',
u_symbol='(μₑ/μ_N)'
)
electron_muon_magnetic_moment_ratio = UnitConstant(
'electron_muon_magnetic_moment_ratio',
_cd('electron-muon magnetic moment ratio'),
symbol='(mu_e/mu_mu)',
u_symbol='(μₑ/μ_μ)'
)
electron_neutron_magnetic_moment_ratio = UnitConstant(
'electron_neutron_magnetic_moment_ratio',
_cd('electron-neutron magnetic moment ratio'),
symbol='(mu_e/mu_n)',
u_symbol='(μₑ/μ_n)'
)
electron_proton_magnetic_moment_ratio = UnitConstant(
'electron_proton_magnetic_moment_ratio',
_cd('electron-proton magnetic moment ratio'),
symbol='(mu_e/mu_p)',
u_symbol='(μₑ/μ_p)'
)
electron_to_shielded_helion_magnetic_moment_ratio = UnitConstant(
'electron_to_shielded_helion_magnetic_moment_ratio',
_cd('electron to shielded helion magnetic moment ratio'),
symbol='(mu_e/muprime_h)',
u_symbol='(μₑ/μ′_h)'
)
electron_to_shielded_proton_magnetic_moment_ratio = UnitConstant(
'electron_to_shielded_proton_magnetic_moment_ratio',
_cd('electron to shielded proton magnetic moment ratio'),
symbol='(mu_e/muprime_p)',
u_symbol='(μₑ/μ′_p)'
)
electron_volt_atomic_mass_unit_relationship = UnitConstant(
'electron_volt_atomic_mass_unit_relationship',
_cd('electron volt-atomic mass unit relationship')
)
electron_volt_hartree_relationship = UnitConstant(
'electron_volt_hartree_relationship',
_cd('electron volt-hartree relationship')
)
electron_volt_hertz_relationship = UnitConstant(
'electron_volt_hertz_relationship',
_cd('electron volt-hertz relationship')
)
electron_volt_inverse_meter_relationship = UnitConstant(
'electron_volt_inverse_meter_relationship',
_cd('electron volt-inverse meter relationship')
)
electron_volt_joule_relationship = UnitConstant(
'electron_volt_joule_relationship',
_cd('electron volt-joule relationship')
)
electron_volt_kelvin_relationship = UnitConstant(
'electron_volt_kelvin_relationship',
_cd('electron volt-kelvin relationship')
)
electron_volt_kilogram_relationship = UnitConstant(
'electron_volt_kilogram_relationship',
_cd('electron volt-kilogram relationship')
)
hertz_electron_volt_relationship = UnitConstant(
'hertz_electron_volt_relationship',
_cd('hertz-electron volt relationship')
)
inverse_meter_electron_volt_relationship = UnitConstant(
'inverse_meter_electron_volt_relationship',
_cd('inverse meter-electron volt relationship')
)
joule_electron_volt_relationship = UnitConstant(
'joule_electron_volt_relationship',
_cd('joule-electron volt relationship')
)
kelvin_electron_volt_relationship = UnitConstant(
'kelvin_electron_volt_relationship',
_cd('kelvin-electron volt relationship')
)
kilogram_electron_volt_relationship = UnitConstant(
'kilogram_electron_volt_relationship',
_cd('kilogram-electron volt relationship')
)
del UnitConstant, _cd
|
infobloxopen/infoblox-netmri
|
refs/heads/master
|
infoblox_netmri/api/broker/v2_7_0/adv_setting_option_broker.py
|
16
|
from ..broker import Broker
class AdvSettingOptionBroker(Broker):
controller = "adv_setting_options"
def show(self, **kwargs):
"""Shows the details for the specified adv setting option.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: The internal NetMRI identifier for this setting option.
:type id: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return adv_setting_option: The adv setting option identified by the specified id.
:rtype adv_setting_option: AdvSettingOption
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def index(self, **kwargs):
"""Lists the available adv setting options. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier for this setting option.
:type id: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier for this setting option.
:type id: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, value, adv_setting_def_id, label.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each AdvSettingOption. Valid values are id, value, adv_setting_def_id, label. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return adv_setting_options: An array of the AdvSettingOption objects that match the specified input criteria.
:rtype adv_setting_options: Array of AdvSettingOption
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available adv setting options matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param adv_setting_def_id: The internal NetMRI identifier for setting definition.
:type adv_setting_def_id: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param adv_setting_def_id: The internal NetMRI identifier for setting definition.
:type adv_setting_def_id: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier for this setting option.
:type id: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier for this setting option.
:type id: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param label: The label for this setting option.
:type label: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param label: The label for this setting option.
:type label: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param value: The value for this setting option.
:type value: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param value: The value for this setting option.
:type value: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, value, adv_setting_def_id, label.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each AdvSettingOption. Valid values are id, value, adv_setting_def_id, label. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against adv setting options, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: adv_setting_def_id, id, label, value.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return adv_setting_options: An array of the AdvSettingOption objects that match the specified input criteria.
:rtype adv_setting_options: Array of AdvSettingOption
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available adv setting options matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: adv_setting_def_id, id, label, value.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_adv_setting_def_id: The operator to apply to the field adv_setting_def_id. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. adv_setting_def_id: The internal NetMRI identifier for setting definition. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_adv_setting_def_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_adv_setting_def_id: If op_adv_setting_def_id is specified, the field named in this input will be compared to the value in adv_setting_def_id using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_adv_setting_def_id must be specified if op_adv_setting_def_id is specified.
:type val_f_adv_setting_def_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_adv_setting_def_id: If op_adv_setting_def_id is specified, this value will be compared to the value in adv_setting_def_id using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_adv_setting_def_id must be specified if op_adv_setting_def_id is specified.
:type val_c_adv_setting_def_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_id: The operator to apply to the field id. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. id: The internal NetMRI identifier for this setting option. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_id: If op_id is specified, the field named in this input will be compared to the value in id using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_id must be specified if op_id is specified.
:type val_f_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_id: If op_id is specified, this value will be compared to the value in id using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_id must be specified if op_id is specified.
:type val_c_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_label: The operator to apply to the field label. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. label: The label for this setting option. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_label: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_label: If op_label is specified, the field named in this input will be compared to the value in label using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_label must be specified if op_label is specified.
:type val_f_label: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_label: If op_label is specified, this value will be compared to the value in label using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_label must be specified if op_label is specified.
:type val_c_label: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_value: The operator to apply to the field value. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. value: The value for this setting option. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_value: If op_value is specified, the field named in this input will be compared to the value in value using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_value must be specified if op_value is specified.
:type val_f_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_value: If op_value is specified, this value will be compared to the value in value using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_value must be specified if op_value is specified.
:type val_c_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, value, adv_setting_def_id, label.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each AdvSettingOption. Valid values are id, value, adv_setting_def_id, label. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return adv_setting_options: An array of the AdvSettingOption objects that match the specified input criteria.
:rtype adv_setting_options: Array of AdvSettingOption
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
|
balint256/gnuradio
|
refs/heads/master
|
gr-analog/python/analog/standard_squelch.py
|
58
|
#
# Copyright 2005,2007,2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import math
from gnuradio import gr
from gnuradio import blocks
from gnuradio import filter
class standard_squelch(gr.hier_block2):
def __init__(self, audio_rate):
gr.hier_block2.__init__(self, "standard_squelch",
gr.io_signature(1, 1, gr.sizeof_float), # Input signature
gr.io_signature(1, 1, gr.sizeof_float)) # Output signature
self.input_node = blocks.add_const_ff(0) # FIXME kludge
self.low_iir = filter.iir_filter_ffd((0.0193,0,-0.0193),(1,1.9524,-0.9615))
self.low_square = blocks.multiply_ff()
self.low_smooth = filter.single_pole_iir_filter_ff(1/(0.01*audio_rate)) # 100ms time constant
self.hi_iir = filter.iir_filter_ffd((0.0193,0,-0.0193),(1,1.3597,-0.9615))
self.hi_square = blocks.multiply_ff()
self.hi_smooth = filter.single_pole_iir_filter_ff(1/(0.01*audio_rate))
self.sub = blocks.sub_ff();
self.add = blocks.add_ff();
self.gate = blocks.threshold_ff(0.3,0.43,0)
self.squelch_lpf = filter.single_pole_iir_filter_ff(1/(0.01*audio_rate))
self.div = blocks.divide_ff()
self.squelch_mult = blocks.multiply_ff()
self.connect(self, self.input_node)
self.connect(self.input_node, (self.squelch_mult, 0))
self.connect(self.input_node,self.low_iir)
self.connect(self.low_iir,(self.low_square,0))
self.connect(self.low_iir,(self.low_square,1))
self.connect(self.low_square,self.low_smooth,(self.sub,0))
self.connect(self.low_smooth, (self.add,0))
self.connect(self.input_node,self.hi_iir)
self.connect(self.hi_iir,(self.hi_square,0))
self.connect(self.hi_iir,(self.hi_square,1))
self.connect(self.hi_square,self.hi_smooth,(self.sub,1))
self.connect(self.hi_smooth, (self.add,1))
self.connect(self.sub, (self.div, 0))
self.connect(self.add, (self.div, 1))
self.connect(self.div, self.gate, self.squelch_lpf, (self.squelch_mult,1))
self.connect(self.squelch_mult, self)
def set_threshold(self, threshold):
self.gate.set_hi(threshold)
def threshold(self):
return self.gate.hi()
def squelch_range(self):
return (0.0, 1.0, 1.0/100)
|
ptoraskar/django
|
refs/heads/master
|
tests/auth_tests/test_auth_backends.py
|
200
|
from __future__ import unicode_literals
from datetime import date
from django.contrib.auth import (
BACKEND_SESSION_KEY, SESSION_KEY, authenticate, get_user,
)
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.hashers import MD5PasswordHasher
from django.contrib.auth.models import AnonymousUser, Group, Permission, User
from django.contrib.auth.tests.custom_user import CustomUser, ExtensionUser
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.http import HttpRequest
from django.test import (
SimpleTestCase, TestCase, modify_settings, override_settings,
)
from .models import CustomPermissionsUser, UUIDUser
class CountingMD5PasswordHasher(MD5PasswordHasher):
"""Hasher that counts how many times it computes a hash."""
calls = 0
def encode(self, *args, **kwargs):
type(self).calls += 1
return super(CountingMD5PasswordHasher, self).encode(*args, **kwargs)
class BaseModelBackendTest(object):
"""
A base class for tests that need to validate the ModelBackend
with different User models. Subclasses should define a class
level UserModel attribute, and a create_users() method to
construct two users for test purposes.
"""
backend = 'django.contrib.auth.backends.ModelBackend'
def setUp(self):
self.patched_settings = modify_settings(
AUTHENTICATION_BACKENDS={'append': self.backend},
)
self.patched_settings.enable()
self.create_users()
def tearDown(self):
self.patched_settings.disable()
# The custom_perms test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.has_perm('auth.test'), False)
user.is_staff = True
user.save()
self.assertEqual(user.has_perm('auth.test'), False)
user.is_superuser = True
user.save()
self.assertEqual(user.has_perm('auth.test'), True)
user.is_staff = True
user.is_superuser = True
user.is_active = False
user.save()
self.assertEqual(user.has_perm('auth.test'), False)
def test_custom_perms(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
user.user_permissions.add(perm)
# reloading user to purge the _perm_cache
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.get_all_permissions() == {'auth.test'}, True)
self.assertEqual(user.get_group_permissions(), set())
self.assertEqual(user.has_module_perms('Group'), False)
self.assertEqual(user.has_module_perms('auth'), True)
perm = Permission.objects.create(name='test2', content_type=content_type, codename='test2')
user.user_permissions.add(perm)
perm = Permission.objects.create(name='test3', content_type=content_type, codename='test3')
user.user_permissions.add(perm)
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.get_all_permissions(), {'auth.test2', 'auth.test', 'auth.test3'})
self.assertEqual(user.has_perm('test'), False)
self.assertEqual(user.has_perm('auth.test'), True)
self.assertEqual(user.has_perms(['auth.test2', 'auth.test3']), True)
perm = Permission.objects.create(name='test_group', content_type=content_type, codename='test_group')
group = Group.objects.create(name='test_group')
group.permissions.add(perm)
user.groups.add(group)
user = self.UserModel._default_manager.get(pk=self.user.pk)
exp = {'auth.test2', 'auth.test', 'auth.test3', 'auth.test_group'}
self.assertEqual(user.get_all_permissions(), exp)
self.assertEqual(user.get_group_permissions(), {'auth.test_group'})
self.assertEqual(user.has_perms(['auth.test3', 'auth.test_group']), True)
user = AnonymousUser()
self.assertEqual(user.has_perm('test'), False)
self.assertEqual(user.has_perms(['auth.test2', 'auth.test3']), False)
def test_has_no_object_perm(self):
"""Regressiontest for #12462"""
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
user.user_permissions.add(perm)
self.assertEqual(user.has_perm('auth.test', 'object'), False)
self.assertEqual(user.get_all_permissions('object'), set())
self.assertEqual(user.has_perm('auth.test'), True)
self.assertEqual(user.get_all_permissions(), {'auth.test'})
def test_anonymous_has_no_permissions(self):
"""
#17903 -- Anonymous users shouldn't have permissions in
ModelBackend.get_(all|user|group)_permissions().
"""
backend = ModelBackend()
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
user_perm = Permission.objects.create(name='test', content_type=content_type, codename='test_user')
group_perm = Permission.objects.create(name='test2', content_type=content_type, codename='test_group')
user.user_permissions.add(user_perm)
group = Group.objects.create(name='test_group')
user.groups.add(group)
group.permissions.add(group_perm)
self.assertEqual(backend.get_all_permissions(user), {'auth.test_user', 'auth.test_group'})
self.assertEqual(backend.get_user_permissions(user), {'auth.test_user', 'auth.test_group'})
self.assertEqual(backend.get_group_permissions(user), {'auth.test_group'})
user.is_anonymous = lambda: True
self.assertEqual(backend.get_all_permissions(user), set())
self.assertEqual(backend.get_user_permissions(user), set())
self.assertEqual(backend.get_group_permissions(user), set())
def test_inactive_has_no_permissions(self):
"""
#17903 -- Inactive users shouldn't have permissions in
ModelBackend.get_(all|user|group)_permissions().
"""
backend = ModelBackend()
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
user_perm = Permission.objects.create(name='test', content_type=content_type, codename='test_user')
group_perm = Permission.objects.create(name='test2', content_type=content_type, codename='test_group')
user.user_permissions.add(user_perm)
group = Group.objects.create(name='test_group')
user.groups.add(group)
group.permissions.add(group_perm)
self.assertEqual(backend.get_all_permissions(user), {'auth.test_user', 'auth.test_group'})
self.assertEqual(backend.get_user_permissions(user), {'auth.test_user', 'auth.test_group'})
self.assertEqual(backend.get_group_permissions(user), {'auth.test_group'})
user.is_active = False
user.save()
self.assertEqual(backend.get_all_permissions(user), set())
self.assertEqual(backend.get_user_permissions(user), set())
self.assertEqual(backend.get_group_permissions(user), set())
def test_get_all_superuser_permissions(self):
"""A superuser has all permissions. Refs #14795."""
user = self.UserModel._default_manager.get(pk=self.superuser.pk)
self.assertEqual(len(user.get_all_permissions()), len(Permission.objects.all()))
@override_settings(PASSWORD_HASHERS=['auth_tests.test_auth_backends.CountingMD5PasswordHasher'])
def test_authentication_timing(self):
"""Hasher is run once regardless of whether the user exists. Refs #20760."""
# Re-set the password, because this tests overrides PASSWORD_HASHERS
self.user.set_password('test')
self.user.save()
CountingMD5PasswordHasher.calls = 0
username = getattr(self.user, self.UserModel.USERNAME_FIELD)
authenticate(username=username, password='test')
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
CountingMD5PasswordHasher.calls = 0
authenticate(username='no_such_user', password='test')
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
class ModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the default User model.
"""
UserModel = User
def create_users(self):
self.user = User.objects.create_user(
username='test',
email='test@example.com',
password='test',
)
self.superuser = User.objects.create_superuser(
username='test2',
email='test2@example.com',
password='test',
)
@override_settings(AUTH_USER_MODEL='auth.ExtensionUser')
class ExtensionUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the custom ExtensionUser model.
This isn't a perfect test, because both the User and ExtensionUser are
synchronized to the database, which wouldn't ordinary happen in
production. As a result, it doesn't catch errors caused by the non-
existence of the User table.
The specific problem is queries on .filter(groups__user) et al, which
makes an implicit assumption that the user model is called 'User'. In
production, the auth.User table won't exist, so the requested join
won't exist either; in testing, the auth.User *does* exist, and
so does the join. However, the join table won't contain any useful
data; for testing, we check that the data we expect actually does exist.
"""
UserModel = ExtensionUser
def create_users(self):
self.user = ExtensionUser._default_manager.create_user(
username='test',
email='test@example.com',
password='test',
date_of_birth=date(2006, 4, 25)
)
self.superuser = ExtensionUser._default_manager.create_superuser(
username='test2',
email='test2@example.com',
password='test',
date_of_birth=date(1976, 11, 8)
)
@override_settings(AUTH_USER_MODEL='auth.CustomPermissionsUser')
class CustomPermissionsUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the CustomPermissionsUser model.
As with the ExtensionUser test, this isn't a perfect test, because both
the User and CustomPermissionsUser are synchronized to the database,
which wouldn't ordinary happen in production.
"""
UserModel = CustomPermissionsUser
def create_users(self):
self.user = CustomPermissionsUser._default_manager.create_user(
email='test@example.com',
password='test',
date_of_birth=date(2006, 4, 25)
)
self.superuser = CustomPermissionsUser._default_manager.create_superuser(
email='test2@example.com',
password='test',
date_of_birth=date(1976, 11, 8)
)
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CustomUserModelBackendAuthenticateTest(TestCase):
"""
Tests that the model backend can accept a credentials kwarg labeled with
custom user model's USERNAME_FIELD.
"""
def test_authenticate(self):
test_user = CustomUser._default_manager.create_user(
email='test@example.com',
password='test',
date_of_birth=date(2006, 4, 25)
)
authenticated_user = authenticate(email='test@example.com', password='test')
self.assertEqual(test_user, authenticated_user)
@override_settings(AUTH_USER_MODEL='auth.UUIDUser')
class UUIDUserTests(TestCase):
def test_login(self):
"""
A custom user with a UUID primary key should be able to login.
"""
user = UUIDUser.objects.create_user(username='uuid', password='test')
self.assertTrue(self.client.login(username='uuid', password='test'))
self.assertEqual(UUIDUser.objects.get(pk=self.client.session[SESSION_KEY]), user)
class TestObj(object):
pass
class SimpleRowlevelBackend(object):
def has_perm(self, user, perm, obj=None):
if not obj:
return # We only support row level perms
if isinstance(obj, TestObj):
if user.username == 'test2':
return True
elif user.is_anonymous() and perm == 'anon':
return True
elif not user.is_active and perm == 'inactive':
return True
return False
def has_module_perms(self, user, app_label):
if not user.is_anonymous() and not user.is_active:
return False
return app_label == "app1"
def get_all_permissions(self, user, obj=None):
if not obj:
return [] # We only support row level perms
if not isinstance(obj, TestObj):
return ['none']
if user.is_anonymous():
return ['anon']
if user.username == 'test2':
return ['simple', 'advanced']
else:
return ['simple']
def get_group_permissions(self, user, obj=None):
if not obj:
return # We only support row level perms
if not isinstance(obj, TestObj):
return ['none']
if 'test_group' in [group.name for group in user.groups.all()]:
return ['group_perm']
else:
return ['none']
@modify_settings(AUTHENTICATION_BACKENDS={
'append': 'auth_tests.test_auth_backends.SimpleRowlevelBackend',
})
class RowlevelBackendTest(TestCase):
"""
Tests for auth backend that supports object level permissions
"""
def setUp(self):
self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
self.user2 = User.objects.create_user('test2', 'test2@example.com', 'test')
self.user3 = User.objects.create_user('test3', 'test3@example.com', 'test')
def tearDown(self):
# The get_group_permissions test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user2.has_perm('perm', TestObj()), True)
self.assertEqual(self.user2.has_perm('perm'), False)
self.assertEqual(self.user2.has_perms(['simple', 'advanced'], TestObj()), True)
self.assertEqual(self.user3.has_perm('perm', TestObj()), False)
self.assertEqual(self.user3.has_perm('anon', TestObj()), False)
self.assertEqual(self.user3.has_perms(['simple', 'advanced'], TestObj()), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {'simple'})
self.assertEqual(self.user2.get_all_permissions(TestObj()), {'simple', 'advanced'})
self.assertEqual(self.user2.get_all_permissions(), set())
def test_get_group_permissions(self):
group = Group.objects.create(name='test_group')
self.user3.groups.add(group)
self.assertEqual(self.user3.get_group_permissions(TestObj()), {'group_perm'})
@override_settings(
AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend'],
)
class AnonymousUserBackendTest(SimpleTestCase):
"""
Tests for AnonymousUser delegating to backend.
"""
def setUp(self):
self.user1 = AnonymousUser()
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user1.has_perm('anon', TestObj()), True)
def test_has_perms(self):
self.assertEqual(self.user1.has_perms(['anon'], TestObj()), True)
self.assertEqual(self.user1.has_perms(['anon', 'perm'], TestObj()), False)
def test_has_module_perms(self):
self.assertEqual(self.user1.has_module_perms("app1"), True)
self.assertEqual(self.user1.has_module_perms("app2"), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {'anon'})
@override_settings(AUTHENTICATION_BACKENDS=[])
class NoBackendsTest(TestCase):
"""
Tests that an appropriate error is raised if no auth backends are provided.
"""
def setUp(self):
self.user = User.objects.create_user('test', 'test@example.com', 'test')
def test_raises_exception(self):
self.assertRaises(ImproperlyConfigured, self.user.has_perm, ('perm', TestObj(),))
@override_settings(AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend'])
class InActiveUserBackendTest(TestCase):
"""
Tests for an inactive user
"""
def setUp(self):
self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
self.user1.is_active = False
self.user1.save()
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user1.has_perm('inactive', TestObj()), True)
def test_has_module_perms(self):
self.assertEqual(self.user1.has_module_perms("app1"), False)
self.assertEqual(self.user1.has_module_perms("app2"), False)
class PermissionDeniedBackend(object):
"""
Always raises PermissionDenied in `authenticate`, `has_perm` and `has_module_perms`.
"""
supports_object_permissions = True
supports_anonymous_user = True
supports_inactive_user = True
def authenticate(self, username=None, password=None):
raise PermissionDenied
def has_perm(self, user_obj, perm, obj=None):
raise PermissionDenied
def has_module_perms(self, user_obj, app_label):
raise PermissionDenied
class PermissionDeniedBackendTest(TestCase):
"""
Tests that other backends are not checked once a backend raises PermissionDenied
"""
backend = 'auth_tests.test_auth_backends.PermissionDeniedBackend'
def setUp(self):
self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
self.user1.save()
@modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend})
def test_permission_denied(self):
"user is not authenticated after a backend raises permission denied #2550"
self.assertEqual(authenticate(username='test', password='test'), None)
@modify_settings(AUTHENTICATION_BACKENDS={'append': backend})
def test_authenticates(self):
self.assertEqual(authenticate(username='test', password='test'), self.user1)
@modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend})
def test_has_perm_denied(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm('auth.test'), False)
self.assertIs(self.user1.has_module_perms('auth'), False)
@modify_settings(AUTHENTICATION_BACKENDS={'append': backend})
def test_has_perm(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm('auth.test'), True)
self.assertIs(self.user1.has_module_perms('auth'), True)
class NewModelBackend(ModelBackend):
pass
class ChangedBackendSettingsTest(TestCase):
"""
Tests for changes in the settings.AUTHENTICATION_BACKENDS
"""
backend = 'auth_tests.test_auth_backends.NewModelBackend'
TEST_USERNAME = 'test_user'
TEST_PASSWORD = 'test_password'
TEST_EMAIL = 'test@example.com'
def setUp(self):
User.objects.create_user(self.TEST_USERNAME,
self.TEST_EMAIL,
self.TEST_PASSWORD)
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_changed_backend_settings(self):
"""
Tests that removing a backend configured in AUTHENTICATION_BACKENDS
make already logged-in users disconnect.
"""
# Get a session for the test user
self.assertTrue(self.client.login(
username=self.TEST_USERNAME,
password=self.TEST_PASSWORD)
)
# Prepare a request object
request = HttpRequest()
request.session = self.client.session
# Remove NewModelBackend
with self.settings(AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend']):
# Get the user from the request
user = get_user(request)
# Assert that the user retrieval is successful and the user is
# anonymous as the backend is not longer available.
self.assertIsNotNone(user)
self.assertTrue(user.is_anonymous())
class TypeErrorBackend(object):
"""
Always raises TypeError.
"""
supports_object_permissions = True
supports_anonymous_user = True
supports_inactive_user = True
def authenticate(self, username=None, password=None):
raise TypeError
class TypeErrorBackendTest(TestCase):
"""
Tests that a TypeError within a backend is propagated properly.
Regression test for ticket #18171
"""
backend = 'auth_tests.test_auth_backends.TypeErrorBackend'
def setUp(self):
self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_type_error_raised(self):
self.assertRaises(TypeError, authenticate, username='test', password='test')
class ImproperlyConfiguredUserModelTest(TestCase):
"""
Tests that an exception from within get_user_model is propagated and doesn't
raise an UnboundLocalError.
Regression test for ticket #21439
"""
def setUp(self):
self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
self.client.login(
username='test',
password='test'
)
@override_settings(AUTH_USER_MODEL='thismodel.doesntexist')
def test_does_not_shadow_exception(self):
# Prepare a request object
request = HttpRequest()
request.session = self.client.session
self.assertRaises(ImproperlyConfigured, get_user, request)
class ImportedModelBackend(ModelBackend):
pass
class ImportedBackendTests(TestCase):
"""
#23925 - The backend path added to the session should be the same
as the one defined in AUTHENTICATION_BACKENDS setting.
"""
backend = 'auth_tests.backend_alias.ImportedModelBackend'
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_backend_path(self):
username = 'username'
password = 'password'
User.objects.create_user(username, 'email', password)
self.assertTrue(self.client.login(username=username, password=password))
request = HttpRequest()
request.session = self.client.session
self.assertEqual(request.session[BACKEND_SESSION_KEY], self.backend)
|
rodekruis/digidoc
|
refs/heads/master
|
node_modules/npm-check-updates/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
|
526
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import filecmp
import gyp.common
import gyp.xcodeproj_file
import errno
import os
import sys
import posixpath
import re
import shutil
import subprocess
import tempfile
# Project files generated by this module will use _intermediate_var as a
# custom Xcode setting whose value is a DerivedSources-like directory that's
# project-specific and configuration-specific. The normal choice,
# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
# as it is likely that multiple targets within a single project file will want
# to access the same set of generated files. The other option,
# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
# it is not configuration-specific. INTERMEDIATE_DIR is defined as
# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
_intermediate_var = 'INTERMEDIATE_DIR'
# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
# targets that share the same BUILT_PRODUCTS_DIR.
_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.dylib',
# INTERMEDIATE_DIR is a place for targets to build up intermediate products.
# It is specific to each build environment. It is only guaranteed to exist
# and be constant within the context of a project, corresponding to a single
# input file. Some build environments may allow their intermediate directory
# to be shared on a wider scale, but this is not guaranteed.
'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
'OS': 'mac',
'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',
'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
'CONFIGURATION_NAME': '$(CONFIGURATION)',
}
# The Xcode-specific sections that hold paths.
generator_additional_path_sections = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
# 'mac_framework_dirs', input already handles _dirs endings.
]
# The Xcode-specific keys that exist on targets and aren't moved down to
# configurations.
generator_additional_non_configuration_keys = [
'mac_bundle',
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
'mac_xctest_bundle',
'xcode_create_dependents_test_runner',
]
# We want to let any rules apply to files that are resources also.
generator_extra_sources_for_rules = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
]
# Xcode's standard set of library directories, which don't need to be duplicated
# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
xcode_standard_library_dirs = frozenset([
'$(SDKROOT)/usr/lib',
'$(SDKROOT)/usr/local/lib',
])
def CreateXCConfigurationList(configuration_names):
xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
if len(configuration_names) == 0:
configuration_names = ['Default']
for configuration_name in configuration_names:
xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
'name': configuration_name})
xccl.AppendProperty('buildConfigurations', xcbc)
xccl.SetProperty('defaultConfigurationName', configuration_names[0])
return xccl
class XcodeProject(object):
def __init__(self, gyp_path, path, build_file_dict):
self.gyp_path = gyp_path
self.path = path
self.project = gyp.xcodeproj_file.PBXProject(path=path)
projectDirPath = gyp.common.RelativePath(
os.path.dirname(os.path.abspath(self.gyp_path)),
os.path.dirname(path) or '.')
self.project.SetProperty('projectDirPath', projectDirPath)
self.project_file = \
gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
self.build_file_dict = build_file_dict
# TODO(mark): add destructor that cleans up self.path if created_dir is
# True and things didn't complete successfully. Or do something even
# better with "try"?
self.created_dir = False
try:
os.makedirs(self.path)
self.created_dir = True
except OSError, e:
if e.errno != errno.EEXIST:
raise
def Finalize1(self, xcode_targets, serialize_all_tests):
# Collect a list of all of the build configuration names used by the
# various targets in the file. It is very heavily advised to keep each
# target in an entire project (even across multiple project files) using
# the same set of configuration names.
configurations = []
for xct in self.project.GetProperty('targets'):
xccl = xct.GetProperty('buildConfigurationList')
xcbcs = xccl.GetProperty('buildConfigurations')
for xcbc in xcbcs:
name = xcbc.GetProperty('name')
if name not in configurations:
configurations.append(name)
# Replace the XCConfigurationList attached to the PBXProject object with
# a new one specifying all of the configuration names used by the various
# targets.
try:
xccl = CreateXCConfigurationList(configurations)
self.project.SetProperty('buildConfigurationList', xccl)
except:
sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
raise
# The need for this setting is explained above where _intermediate_var is
# defined. The comments below about wanting to avoid project-wide build
# settings apply here too, but this needs to be set on a project-wide basis
# so that files relative to the _intermediate_var setting can be displayed
# properly in the Xcode UI.
#
# Note that for configuration-relative files such as anything relative to
# _intermediate_var, for the purposes of UI tree view display, Xcode will
# only resolve the configuration name once, when the project file is
# opened. If the active build configuration is changed, the project file
# must be closed and reopened if it is desired for the tree view to update.
# This is filed as Apple radar 6588391.
xccl.SetBuildSetting(_intermediate_var,
'$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
xccl.SetBuildSetting(_shared_intermediate_var,
'$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
# Set user-specified project-wide build settings and config files. This
# is intended to be used very sparingly. Really, almost everything should
# go into target-specific build settings sections. The project-wide
# settings are only intended to be used in cases where Xcode attempts to
# resolve variable references in a project context as opposed to a target
# context, such as when resolving sourceTree references while building up
# the tree tree view for UI display.
# Any values set globally are applied to all configurations, then any
# per-configuration values are applied.
for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
xccl.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in self.build_file_dict:
config_ref = self.project.AddOrGetFileInRootGroup(
self.build_file_dict['xcode_config_file'])
xccl.SetBaseConfiguration(config_ref)
build_file_configurations = self.build_file_dict.get('configurations', {})
if build_file_configurations:
for config_name in configurations:
build_file_configuration_named = \
build_file_configurations.get(config_name, {})
if build_file_configuration_named:
xcc = xccl.ConfigurationNamed(config_name)
for xck, xcv in build_file_configuration_named.get('xcode_settings',
{}).iteritems():
xcc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in build_file_configuration_named:
config_ref = self.project.AddOrGetFileInRootGroup(
build_file_configurations[config_name]['xcode_config_file'])
xcc.SetBaseConfiguration(config_ref)
# Sort the targets based on how they appeared in the input.
# TODO(mark): Like a lot of other things here, this assumes internal
# knowledge of PBXProject - in this case, of its "targets" property.
# ordinary_targets are ordinary targets that are already in the project
# file. run_test_targets are the targets that run unittests and should be
# used for the Run All Tests target. support_targets are the action/rule
# targets used by GYP file targets, just kept for the assert check.
ordinary_targets = []
run_test_targets = []
support_targets = []
# targets is full list of targets in the project.
targets = []
# does the it define it's own "all"?
has_custom_all = False
# targets_for_all is the list of ordinary_targets that should be listed
# in this project's "All" target. It includes each non_runtest_target
# that does not have suppress_wildcard set.
targets_for_all = []
for target in self.build_file_dict['targets']:
target_name = target['target_name']
toolset = target['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
toolset)
xcode_target = xcode_targets[qualified_target]
# Make sure that the target being added to the sorted list is already in
# the unsorted list.
assert xcode_target in self.project._properties['targets']
targets.append(xcode_target)
ordinary_targets.append(xcode_target)
if xcode_target.support_target:
support_targets.append(xcode_target.support_target)
targets.append(xcode_target.support_target)
if not int(target.get('suppress_wildcard', False)):
targets_for_all.append(xcode_target)
if target_name.lower() == 'all':
has_custom_all = True;
# If this target has a 'run_as' attribute, add its target to the
# targets, and add it to the test targets.
if target.get('run_as'):
# Make a target to run something. It should have one
# dependency, the parent xcode target.
xccl = CreateXCConfigurationList(configurations)
run_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run ' + target_name,
'productName': xcode_target.GetProperty('productName'),
'buildConfigurationList': xccl,
},
parent=self.project)
run_target.AddDependency(xcode_target)
command = target['run_as']
script = ''
if command.get('working_directory'):
script = script + 'cd "%s"\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
command.get('working_directory'))
if command.get('environment'):
script = script + "\n".join(
['export %s="%s"' %
(key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
for (key, val) in command.get('environment').iteritems()]) + "\n"
# Some test end up using sockets, files on disk, etc. and can get
# confused if more then one test runs at a time. The generator
# flag 'xcode_serialize_all_test_runs' controls the forcing of all
# tests serially. It defaults to True. To get serial runs this
# little bit of python does the same as the linux flock utility to
# make sure only one runs at a time.
command_prefix = ''
if serialize_all_tests:
command_prefix = \
"""python -c "import fcntl, subprocess, sys
file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
fcntl.flock(file.fileno(), fcntl.LOCK_EX)
sys.exit(subprocess.call(sys.argv[1:]))" """
# If we were unable to exec for some reason, we want to exit
# with an error, and fixup variable references to be shell
# syntax instead of xcode syntax.
script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
gyp.common.EncodePOSIXShellList(command.get('action')))
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'shellScript': script,
'showEnvVarsInLog': 0,
})
run_target.AppendProperty('buildPhases', ssbp)
# Add the run target to the project file.
targets.append(run_target)
run_test_targets.append(run_target)
xcode_target.test_runner = run_target
# Make sure that the list of targets being replaced is the same length as
# the one replacing it, but allow for the added test runner targets.
assert len(self.project._properties['targets']) == \
len(ordinary_targets) + len(support_targets)
self.project._properties['targets'] = targets
# Get rid of unnecessary levels of depth in groups like the Source group.
self.project.RootGroupsTakeOverOnlyChildren(True)
# Sort the groups nicely. Do this after sorting the targets, because the
# Products group is sorted based on the order of the targets.
self.project.SortGroups()
# Create an "All" target if there's more than one target in this project
# file and the project didn't define its own "All" target. Put a generated
# "All" target first so that people opening up the project for the first
# time will build everything by default.
if len(targets_for_all) > 1 and not has_custom_all:
xccl = CreateXCConfigurationList(configurations)
all_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'All',
},
parent=self.project)
for target in targets_for_all:
all_target.AddDependency(target)
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._properties. It's important to get the "All" target first,
# though.
self.project._properties['targets'].insert(0, all_target)
# The same, but for run_test_targets.
if len(run_test_targets) > 1:
xccl = CreateXCConfigurationList(configurations)
run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'Run All Tests',
},
parent=self.project)
for run_test_target in run_test_targets:
run_all_tests_target.AddDependency(run_test_target)
# Insert after the "All" target, which must exist if there is more than
# one run_test_target.
self.project._properties['targets'].insert(1, run_all_tests_target)
def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
# Finalize2 needs to happen in a separate step because the process of
# updating references to other projects depends on the ordering of targets
# within remote project files. Finalize1 is responsible for sorting duty,
# and once all project files are sorted, Finalize2 can come in and update
# these references.
# To support making a "test runner" target that will run all the tests
# that are direct dependents of any given target, we look for
# xcode_create_dependents_test_runner being set on an Aggregate target,
# and generate a second target that will run the tests runners found under
# the marked target.
for bf_tgt in self.build_file_dict['targets']:
if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
tgt_name = bf_tgt['target_name']
toolset = bf_tgt['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
tgt_name, toolset)
xcode_target = xcode_targets[qualified_target]
if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
# Collect all the run test targets.
all_run_tests = []
pbxtds = xcode_target.GetProperty('dependencies')
for pbxtd in pbxtds:
pbxcip = pbxtd.GetProperty('targetProxy')
dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
if hasattr(dependency_xct, 'test_runner'):
all_run_tests.append(dependency_xct.test_runner)
# Directly depend on all the runners as they depend on the target
# that builds them.
if len(all_run_tests) > 0:
run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run %s Tests' % tgt_name,
'productName': tgt_name,
},
parent=self.project)
for run_test_target in all_run_tests:
run_all_target.AddDependency(run_test_target)
# Insert the test runner after the related target.
idx = self.project._properties['targets'].index(xcode_target)
self.project._properties['targets'].insert(idx + 1, run_all_target)
# Update all references to other projects, to make sure that the lists of
# remote products are complete. Otherwise, Xcode will fill them in when
# it opens the project file, which will result in unnecessary diffs.
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._other_pbxprojects.
for other_pbxproject in self.project._other_pbxprojects.keys():
self.project.AddOrGetProjectReference(other_pbxproject)
self.project.SortRemoteProductReferences()
# Give everything an ID.
self.project_file.ComputeIDs()
# Make sure that no two objects in the project file have the same ID. If
# multiple objects wind up with the same ID, upon loading the file, Xcode
# will only recognize one object (the last one in the file?) and the
# results are unpredictable.
self.project_file.EnsureNoIDCollisions()
def Write(self):
# Write the project file to a temporary location first. Xcode watches for
# changes to the project file and presents a UI sheet offering to reload
# the project when it does change. However, in some cases, especially when
# multiple projects are open or when Xcode is busy, things don't work so
# seamlessly. Sometimes, Xcode is able to detect that a project file has
# changed but can't unload it because something else is referencing it.
# To mitigate this problem, and to avoid even having Xcode present the UI
# sheet when an open project is rewritten for inconsequential changes, the
# project file is written to a temporary file in the xcodeproj directory
# first. The new temporary file is then compared to the existing project
# file, if any. If they differ, the new file replaces the old; otherwise,
# the new project file is simply deleted. Xcode properly detects a file
# being renamed over an open project file as a change and so it remains
# able to present the "project file changed" sheet under this system.
# Writing to a temporary file first also avoids the possible problem of
# Xcode rereading an incomplete project file.
(output_fd, new_pbxproj_path) = \
tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
dir=self.path)
try:
output_file = os.fdopen(output_fd, 'wb')
self.project_file.Print(output_file)
output_file.close()
pbxproj_path = os.path.join(self.path, 'project.pbxproj')
same = False
try:
same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if same:
# The new file is identical to the old one, just get rid of the new
# one.
os.unlink(new_pbxproj_path)
else:
# The new file is different from the old one, or there is no old one.
# Rename the new file to the permanent name.
#
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
# file that can only be read by the owner, regardless of the umask.
# There's no reason to not respect the umask here, which means that
# an extra hoop is required to fetch it and reset the new file's mode.
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
umask = os.umask(077)
os.umask(umask)
os.chmod(new_pbxproj_path, 0666 & ~umask)
os.rename(new_pbxproj_path, pbxproj_path)
except Exception:
# Don't leave turds behind. In fact, if this code was responsible for
# creating the xcodeproj directory, get rid of that too.
os.unlink(new_pbxproj_path)
if self.created_dir:
shutil.rmtree(self.path, True)
raise
def AddSourceToTarget(source, type, pbxp, xct):
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
# little bit fancier.
source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's']
# .o is conceptually more of a "source" than a "library," but Xcode thinks
# of "sources" as things to compile and "libraries" (or "frameworks") as
# things to link with. Adding an object file to an Xcode target's frameworks
# phase works properly.
library_extensions = ['a', 'dylib', 'framework', 'o']
basename = posixpath.basename(source)
(root, ext) = posixpath.splitext(basename)
if ext:
ext = ext[1:].lower()
if ext in source_extensions and type != 'none':
xct.SourcesPhase().AddFile(source)
elif ext in library_extensions and type != 'none':
xct.FrameworksPhase().AddFile(source)
else:
# Files that aren't added to a sources or frameworks build phase can still
# go into the project file, just not as part of a build phase.
pbxp.AddOrGetFileInRootGroup(source)
def AddResourceToTarget(resource, pbxp, xct):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
xct.ResourcesPhase().AddFile(resource)
def AddHeaderToTarget(header, pbxp, xct, is_public):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
xct.HeadersPhase().AddFile(header, settings)
_xcode_variable_re = re.compile('(\$\((.*?)\))')
def ExpandXcodeVariables(string, expansions):
"""Expands Xcode-style $(VARIABLES) in string per the expansions dict.
In some rare cases, it is appropriate to expand Xcode variables when a
project file is generated. For any substring $(VAR) in string, if VAR is a
key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
Any $(VAR) substring in string for which VAR is not a key in the expansions
dict will remain in the returned string.
"""
matches = _xcode_variable_re.findall(string)
if matches == None:
return string
matches.reverse()
for match in matches:
(to_replace, variable) = match
if not variable in expansions:
continue
replacement = expansions[variable]
string = re.sub(re.escape(to_replace), replacement, string)
return string
_xcode_define_re = re.compile(r'([\\\"\' ])')
def EscapeXcodeDefine(s):
"""We must escape the defines that we give to XCode so that it knows not to
split on spaces and to respect backslash and quote literals. However, we
must not quote the define, or Xcode will incorrectly intepret variables
especially $(inherited)."""
return re.sub(_xcode_define_re, r'\\\1', s)
def PerformBuild(data, configurations, params):
options = params['options']
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
for config in configurations:
arguments = ['xcodebuild', '-project', xcodeproj_path]
arguments += ['-configuration', config]
print "Building [%s]: %s" % (config, arguments)
subprocess.check_call(arguments)
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
generator_flags = params.get('generator_flags', {})
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
serialize_all_tests = \
generator_flags.get('xcode_serialize_all_test_runs', True)
project_version = generator_flags.get('xcode_project_version', None)
skip_excluded_files = \
not generator_flags.get('xcode_list_excluded_files', True)
xcode_projects = {}
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
xcode_projects[build_file] = xcp
pbxp = xcp.project
if parallel_builds:
pbxp.SetProperty('attributes',
{'BuildIndependentTargetsInParallel': 'YES'})
if project_version:
xcp.project_file.SetXcodeVersion(project_version)
# Add gyp/gypi files to project
if not generator_flags.get('standalone'):
main_group = pbxp.GetProperty('mainGroup')
build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
main_group.AppendChild(build_group)
for included_file in build_file_dict['included_files']:
build_group.AddOrGetFileByPath(included_file, False)
xcode_targets = {}
xcode_target_to_target_dict = {}
for qualified_target in target_list:
[build_file, target_name, toolset] = \
gyp.common.ParseQualifiedTarget(qualified_target)
spec = target_dicts[qualified_target]
if spec['toolset'] != 'target':
raise Exception(
'Multiple toolsets not supported in xcode build (target %s)' %
qualified_target)
configuration_names = [spec['default_configuration']]
for configuration_name in sorted(spec['configurations'].keys()):
if configuration_name not in configuration_names:
configuration_names.append(configuration_name)
xcp = xcode_projects[build_file]
pbxp = xcp.project
# Set up the configurations for the target according to the list of names
# supplied.
xccl = CreateXCConfigurationList(configuration_names)
# Create an XCTarget subclass object for the target. The type with
# "+bundle" appended will be used if the target has "mac_bundle" set.
# loadable_modules not in a mac_bundle are mapped to
# com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
# to create a single-file mh_bundle.
_types = {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.googlecode.gyp.xcode.bundle',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
'executable+bundle': 'com.apple.product-type.application',
'loadable_module+bundle': 'com.apple.product-type.bundle',
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
'shared_library+bundle': 'com.apple.product-type.framework',
}
target_properties = {
'buildConfigurationList': xccl,
'name': target_name,
}
type = spec['type']
is_xctest = int(spec.get('mac_xctest_bundle', 0))
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
if type != 'none':
type_bundle_key = type
if is_xctest:
type_bundle_key += '+xctest'
assert type == 'loadable_module', (
'mac_xctest_bundle targets must have type loadable_module '
'(target %s)' % target_name)
elif is_bundle:
type_bundle_key += '+bundle'
xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
try:
target_properties['productType'] = _types[type_bundle_key]
except KeyError, e:
gyp.common.ExceptionAppend(e, "-- unknown product type while "
"writing target %s" % target_name)
raise
else:
xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
assert not is_bundle, (
'mac_bundle targets cannot have type none (target "%s")' %
target_name)
assert not is_xctest, (
'mac_xctest_bundle targets cannot have type none (target "%s")' %
target_name)
target_product_name = spec.get('product_name')
if target_product_name is not None:
target_properties['productName'] = target_product_name
xct = xctarget_type(target_properties, parent=pbxp,
force_outdir=spec.get('product_dir'),
force_prefix=spec.get('product_prefix'),
force_extension=spec.get('product_extension'))
pbxp.AppendProperty('targets', xct)
xcode_targets[qualified_target] = xct
xcode_target_to_target_dict[xct] = spec
spec_actions = spec.get('actions', [])
spec_rules = spec.get('rules', [])
# Xcode has some "issues" with checking dependencies for the "Compile
# sources" step with any source files/headers generated by actions/rules.
# To work around this, if a target is building anything directly (not
# type "none"), then a second target is used to run the GYP actions/rules
# and is made a dependency of this target. This way the work is done
# before the dependency checks for what should be recompiled.
support_xct = None
if type != 'none' and (spec_actions or spec_rules):
support_xccl = CreateXCConfigurationList(configuration_names);
support_target_properties = {
'buildConfigurationList': support_xccl,
'name': target_name + ' Support',
}
if target_product_name:
support_target_properties['productName'] = \
target_product_name + ' Support'
support_xct = \
gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
parent=pbxp)
pbxp.AppendProperty('targets', support_xct)
xct.AddDependency(support_xct)
# Hang the support target off the main target so it can be tested/found
# by the generator during Finalize.
xct.support_target = support_xct
prebuild_index = 0
# Add custom shell script phases for "actions" sections.
for action in spec_actions:
# There's no need to write anything into the script to ensure that the
# output directories already exist, because Xcode will look at the
# declared outputs and automatically ensure that they exist for us.
# Do we have a message to print when this action runs?
message = action.get('message')
if message:
message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
else:
message = ''
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(action['action'])
# Convert Xcode-type variable references to sh-compatible environment
# variable references.
message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
action_string)
script = ''
# Include the optional message
if message_sh:
script += message_sh + '\n'
# Be sure the script runs in exec, and that if exec fails, the script
# exits signalling an error.
script += 'exec ' + action_string_sh + '\nexit 1\n'
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': action['inputs'],
'name': 'Action "' + action['action_name'] + '"',
'outputPaths': action['outputs'],
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# TODO(mark): Should verify that at most one of these is specified.
if int(action.get('process_outputs_as_sources', False)):
for output in action['outputs']:
AddSourceToTarget(output, type, pbxp, xct)
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
for output in action['outputs']:
AddResourceToTarget(output, pbxp, xct)
# tgt_mac_bundle_resources holds the list of bundle resources so
# the rule processing can check against it.
if is_bundle:
tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
else:
tgt_mac_bundle_resources = []
# Add custom shell script phases driving "make" for "rules" sections.
#
# Xcode's built-in rule support is almost powerful enough to use directly,
# but there are a few significant deficiencies that render them unusable.
# There are workarounds for some of its inadequacies, but in aggregate,
# the workarounds added complexity to the generator, and some workarounds
# actually require input files to be crafted more carefully than I'd like.
# Consequently, until Xcode rules are made more capable, "rules" input
# sections will be handled in Xcode output by shell script build phases
# performed prior to the compilation phase.
#
# The following problems with Xcode rules were found. The numbers are
# Apple radar IDs. I hope that these shortcomings are addressed, I really
# liked having the rules handled directly in Xcode during the period that
# I was prototyping this.
#
# 6588600 Xcode compiles custom script rule outputs too soon, compilation
# fails. This occurs when rule outputs from distinct inputs are
# interdependent. The only workaround is to put rules and their
# inputs in a separate target from the one that compiles the rule
# outputs. This requires input file cooperation and it means that
# process_outputs_as_sources is unusable.
# 6584932 Need to declare that custom rule outputs should be excluded from
# compilation. A possible workaround is to lie to Xcode about a
# rule's output, giving it a dummy file it doesn't know how to
# compile. The rule action script would need to touch the dummy.
# 6584839 I need a way to declare additional inputs to a custom rule.
# A possible workaround is a shell script phase prior to
# compilation that touches a rule's primary input files if any
# would-be additional inputs are newer than the output. Modifying
# the source tree - even just modification times - feels dirty.
# 6564240 Xcode "custom script" build rules always dump all environment
# variables. This is a low-prioroty problem and is not a
# show-stopper.
rules_by_ext = {}
for rule in spec_rules:
rules_by_ext[rule['extension']] = rule
# First, some definitions:
#
# A "rule source" is a file that was listed in a target's "sources"
# list and will have a rule applied to it on the basis of matching the
# rule's "extensions" attribute. Rule sources are direct inputs to
# rules.
#
# Rule definitions may specify additional inputs in their "inputs"
# attribute. These additional inputs are used for dependency tracking
# purposes.
#
# A "concrete output" is a rule output with input-dependent variables
# resolved. For example, given a rule with:
# 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
# if the target's "sources" list contained "one.ext" and "two.ext",
# the "concrete output" for rule input "two.ext" would be "two.cc". If
# a rule specifies multiple outputs, each input file that the rule is
# applied to will have the same number of concrete outputs.
#
# If any concrete outputs are outdated or missing relative to their
# corresponding rule_source or to any specified additional input, the
# rule action must be performed to generate the concrete outputs.
# concrete_outputs_by_rule_source will have an item at the same index
# as the rule['rule_sources'] that it corresponds to. Each item is a
# list of all of the concrete outputs for the rule_source.
concrete_outputs_by_rule_source = []
# concrete_outputs_all is a flat list of all concrete outputs that this
# rule is able to produce, given the known set of input files
# (rule_sources) that apply to it.
concrete_outputs_all = []
# messages & actions are keyed by the same indices as rule['rule_sources']
# and concrete_outputs_by_rule_source. They contain the message and
# action to perform after resolving input-dependent variables. The
# message is optional, in which case None is stored for each rule source.
messages = []
actions = []
for rule_source in rule.get('rule_sources', []):
rule_source_dirname, rule_source_basename = \
posixpath.split(rule_source)
(rule_source_root, rule_source_ext) = \
posixpath.splitext(rule_source_basename)
# These are the same variable names that Xcode uses for its own native
# rule support. Because Xcode's rule engine is not being used, they
# need to be expanded as they are written to the makefile.
rule_input_dict = {
'INPUT_FILE_BASE': rule_source_root,
'INPUT_FILE_SUFFIX': rule_source_ext,
'INPUT_FILE_NAME': rule_source_basename,
'INPUT_FILE_PATH': rule_source,
'INPUT_FILE_DIRNAME': rule_source_dirname,
}
concrete_outputs_for_this_rule_source = []
for output in rule.get('outputs', []):
# Fortunately, Xcode and make both use $(VAR) format for their
# variables, so the expansion is the only transformation necessary.
# Any remaning $(VAR)-type variables in the string can be given
# directly to make, which will pick up the correct settings from
# what Xcode puts into the environment.
concrete_output = ExpandXcodeVariables(output, rule_input_dict)
concrete_outputs_for_this_rule_source.append(concrete_output)
# Add all concrete outputs to the project.
pbxp.AddOrGetFileInRootGroup(concrete_output)
concrete_outputs_by_rule_source.append( \
concrete_outputs_for_this_rule_source)
concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
# TODO(mark): Should verify that at most one of these is specified.
if int(rule.get('process_outputs_as_sources', False)):
for output in concrete_outputs_for_this_rule_source:
AddSourceToTarget(output, type, pbxp, xct)
# If the file came from the mac_bundle_resources list or if the rule
# is marked to process outputs as bundle resource, do so.
was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
for output in concrete_outputs_for_this_rule_source:
AddResourceToTarget(output, pbxp, xct)
# Do we have a message to print when this rule runs?
message = rule.get('message')
if message:
message = gyp.common.EncodePOSIXShellArgument(message)
message = ExpandXcodeVariables(message, rule_input_dict)
messages.append(message)
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(rule['action'])
action = ExpandXcodeVariables(action_string, rule_input_dict)
actions.append(action)
if len(concrete_outputs_all) > 0:
# TODO(mark): There's a possibilty for collision here. Consider
# target "t" rule "A_r" and target "t_A" rule "r".
makefile_name = '%s.make' % re.sub(
'[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
makefile_path = os.path.join(xcode_projects[build_file].path,
makefile_name)
# TODO(mark): try/close? Write to a temporary file and swap it only
# if it's got changes?
makefile = open(makefile_path, 'wb')
# make will build the first target in the makefile by default. By
# convention, it's called "all". List all (or at least one)
# concrete output for each rule source as a prerequisite of the "all"
# target.
makefile.write('all: \\\n')
for concrete_output_index in \
xrange(0, len(concrete_outputs_by_rule_source)):
# Only list the first (index [0]) concrete output of each input
# in the "all" target. Otherwise, a parallel make (-j > 1) would
# attempt to process each input multiple times simultaneously.
# Otherwise, "all" could just contain the entire list of
# concrete_outputs_all.
concrete_output = \
concrete_outputs_by_rule_source[concrete_output_index][0]
if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (concrete_output, eol))
for (rule_source, concrete_outputs, message, action) in \
zip(rule['rule_sources'], concrete_outputs_by_rule_source,
messages, actions):
makefile.write('\n')
# Add a rule that declares it can build each concrete output of a
# rule source. Collect the names of the directories that are
# required.
concrete_output_dirs = []
for concrete_output_index in xrange(0, len(concrete_outputs)):
concrete_output = concrete_outputs[concrete_output_index]
if concrete_output_index == 0:
bol = ''
else:
bol = ' '
makefile.write('%s%s \\\n' % (bol, concrete_output))
concrete_output_dir = posixpath.dirname(concrete_output)
if (concrete_output_dir and
concrete_output_dir not in concrete_output_dirs):
concrete_output_dirs.append(concrete_output_dir)
makefile.write(' : \\\n')
# The prerequisites for this rule are the rule source itself and
# the set of additional rule inputs, if any.
prerequisites = [rule_source]
prerequisites.extend(rule.get('inputs', []))
for prerequisite_index in xrange(0, len(prerequisites)):
prerequisite = prerequisites[prerequisite_index]
if prerequisite_index == len(prerequisites) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (prerequisite, eol))
# Make sure that output directories exist before executing the rule
# action.
if len(concrete_output_dirs) > 0:
makefile.write('\t@mkdir -p "%s"\n' %
'" "'.join(concrete_output_dirs))
# The rule message and action have already had the necessary variable
# substitutions performed.
if message:
# Mark it with note: so Xcode picks it up in build output.
makefile.write('\t@echo note: %s\n' % message)
makefile.write('\t%s\n' % action)
makefile.close()
# It might be nice to ensure that needed output directories exist
# here rather than in each target in the Makefile, but that wouldn't
# work if there ever was a concrete output that had an input-dependent
# variable anywhere other than in the leaf position.
# Don't declare any inputPaths or outputPaths. If they're present,
# Xcode will provide a slight optimization by only running the script
# phase if any output is missing or outdated relative to any input.
# Unfortunately, it will also assume that all outputs are touched by
# the script, and if the outputs serve as files in a compilation
# phase, they will be unconditionally rebuilt. Since make might not
# rebuild everything that could be declared here as an output, this
# extra compilation activity is unnecessary. With inputPaths and
# outputPaths not supplied, make will always be called, but it knows
# enough to not do anything when everything is up-to-date.
# To help speed things up, pass -j COUNT to make so it does some work
# in parallel. Don't use ncpus because Xcode will build ncpus targets
# in parallel and if each target happens to have a rules step, there
# would be ncpus^2 things going. With a machine that has 2 quad-core
# Xeons, a build can quickly run out of processes based on
# scheduling/other tasks, and randomly failing builds are no good.
script = \
"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
if [ "${JOB_COUNT}" -gt 4 ]; then
JOB_COUNT=4
fi
exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
exit 1
""" % makefile_name
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'name': 'Rule "' + rule['rule_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# Extra rule inputs also go into the project file. Concrete outputs were
# already added when they were computed.
groups = ['inputs', 'inputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for group in groups:
for item in rule.get(group, []):
pbxp.AddOrGetFileInRootGroup(item)
# Add "sources".
for source in spec.get('sources', []):
(source_root, source_extension) = posixpath.splitext(source)
if source_extension[1:] not in rules_by_ext:
# AddSourceToTarget will add the file to a root group if it's not
# already there.
AddSourceToTarget(source, type, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(source)
# Add "mac_bundle_resources" and "mac_framework_private_headers" if
# it's a bundle of any type.
if is_bundle:
for resource in tgt_mac_bundle_resources:
(resource_root, resource_extension) = posixpath.splitext(resource)
if resource_extension[1:] not in rules_by_ext:
AddResourceToTarget(resource, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(resource)
for header in spec.get('mac_framework_private_headers', []):
AddHeaderToTarget(header, pbxp, xct, False)
# Add "mac_framework_headers". These can be valid for both frameworks
# and static libraries.
if is_bundle or type == 'static_library':
for header in spec.get('mac_framework_headers', []):
AddHeaderToTarget(header, pbxp, xct, True)
# Add "copies".
pbxcp_dict = {}
for copy_group in spec.get('copies', []):
dest = copy_group['destination']
if dest[0] not in ('/', '$'):
# Relative paths are relative to $(SRCROOT).
dest = '$(SRCROOT)/' + dest
# Coalesce multiple "copies" sections in the same target with the same
# "destination" property into the same PBXCopyFilesBuildPhase, otherwise
# they'll wind up with ID collisions.
pbxcp = pbxcp_dict.get(dest, None)
if pbxcp is None:
pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
'name': 'Copy to ' + copy_group['destination']
},
parent=xct)
pbxcp.SetDestination(dest)
# TODO(mark): The usual comment about this knowing too much about
# gyp.xcodeproj_file internals applies.
xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
pbxcp_dict[dest] = pbxcp
for file in copy_group['files']:
pbxcp.AddFile(file)
# Excluded files can also go into the project file.
if not skip_excluded_files:
for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
'mac_framework_private_headers']:
excluded_key = key + '_excluded'
for item in spec.get(excluded_key, []):
pbxp.AddOrGetFileInRootGroup(item)
# So can "inputs" and "outputs" sections of "actions" groups.
groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for action in spec.get('actions', []):
for group in groups:
for item in action.get(group, []):
# Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
# sources.
if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
pbxp.AddOrGetFileInRootGroup(item)
for postbuild in spec.get('postbuilds', []):
action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
script = 'exec ' + action_string_sh + '\nexit 1\n'
# Make the postbuild step depend on the output of ld or ar from this
# target. Apparently putting the script step after the link step isn't
# sufficient to ensure proper ordering in all cases. With an input
# declared but no outputs, the script step should run every time, as
# desired.
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
xct.AppendProperty('buildPhases', ssbp)
# Add dependencies before libraries, because adding a dependency may imply
# adding a library. It's preferable to keep dependencies listed first
# during a link phase so that they can override symbols that would
# otherwise be provided by libraries, which will usually include system
# libraries. On some systems, ld is finicky and even requires the
# libraries to be ordered in such a way that unresolved symbols in
# earlier-listed libraries may only be resolved by later-listed libraries.
# The Mac linker doesn't work that way, but other platforms do, and so
# their linker invocations need to be constructed in this way. There's
# no compelling reason for Xcode's linker invocations to differ.
if 'dependencies' in spec:
for dependency in spec['dependencies']:
xct.AddDependency(xcode_targets[dependency])
# The support project also gets the dependencies (in case they are
# needed for the actions/rules to work).
if support_xct:
support_xct.AddDependency(xcode_targets[dependency])
if 'libraries' in spec:
for library in spec['libraries']:
xct.FrameworksPhase().AddFile(library)
# Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
# I wish Xcode handled this automatically.
library_dir = posixpath.dirname(library)
if library_dir not in xcode_standard_library_dirs and (
not xct.HasBuildSetting(_library_search_paths_var) or
library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
xct.AppendBuildSetting(_library_search_paths_var, library_dir)
for configuration_name in configuration_names:
configuration = spec['configurations'][configuration_name]
xcbc = xct.ConfigurationNamed(configuration_name)
for include_dir in configuration.get('mac_framework_dirs', []):
xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
for include_dir in configuration.get('include_dirs', []):
xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
for library_dir in configuration.get('library_dirs', []):
if library_dir not in xcode_standard_library_dirs and (
not xcbc.HasBuildSetting(_library_search_paths_var) or
library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
if 'defines' in configuration:
for define in configuration['defines']:
set_define = EscapeXcodeDefine(define)
xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
if 'xcode_settings' in configuration:
for xck, xcv in configuration['xcode_settings'].iteritems():
xcbc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in configuration:
config_ref = pbxp.AddOrGetFileInRootGroup(
configuration['xcode_config_file'])
xcbc.SetBaseConfiguration(config_ref)
build_files = []
for build_file, build_file_dict in data.iteritems():
if build_file.endswith('.gyp'):
build_files.append(build_file)
for build_file in build_files:
xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
for build_file in build_files:
xcode_projects[build_file].Finalize2(xcode_targets,
xcode_target_to_target_dict)
for build_file in build_files:
xcode_projects[build_file].Write()
|
Guneet-Dhillon/mxnet
|
refs/heads/master
|
python/mxnet/gluon/trainer.py
|
7
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=
"""Parameter optimizer."""
from .. import optimizer as opt
from ..model import _create_kvstore
from .parameter import ParameterDict, Parameter
class Trainer(object):
"""Applies an `Optimizer` on a set of Parameters. Trainer should
be used together with `autograd`.
Parameters
----------
params : ParameterDict
The set of parameters to optimize.
optimizer : str or Optimizer
The optimizer to use. See
`help <http://mxnet.io/api/python/optimization.html#the-mxnet-optimizer-package>`_
on Optimizer for a list of available optimizers.
optimizer_params : dict
Key-word arguments to be passed to optimizer constructor. For example,
`{'learning_rate': 0.1}`. All optimizers accept learning_rate, wd (weight decay),
clip_gradient, and lr_scheduler. See each optimizer's
constructor for a list of additional supported arguments.
kvstore : str or KVStore
kvstore type for multi-gpu and distributed training. See help on
:any:`mxnet.kvstore.create` for more information.
"""
def __init__(self, params, optimizer, optimizer_params=None, kvstore='device'):
if isinstance(params, (dict, ParameterDict)):
params = list(params.values())
if not isinstance(params, (list, tuple)):
raise ValueError(
"First argument must be a list or dict of Parameters, " \
"got %s."%(type(params)))
self._params = []
for param in params:
if not isinstance(param, Parameter):
raise ValueError(
"First argument must be a list or dict of Parameters, " \
"got list of %s."%(type(param)))
self._params.append(param)
optimizer_params = optimizer_params if optimizer_params else {}
self._scale = optimizer_params.get('rescale_grad', 1.0)
self._contexts = self._check_contexts()
self._init_optimizer(optimizer, optimizer_params)
self._kv_initialized = False
self._kvstore = kvstore
def _check_contexts(self):
contexts = None
for param in self._params:
ctx = param.list_ctx()
assert contexts is None or contexts == ctx, \
"All Parameters must be initialized on the same set of contexts, " \
"but Parameter %s is initialized on %s while previous Parameters " \
"are initialized on %s."%(param.name, str(ctx), str(contexts))
contexts = ctx
return contexts
def _init_optimizer(self, optimizer, optimizer_params):
param_dict = {i: param for i, param in enumerate(self._params)}
if isinstance(optimizer, opt.Optimizer):
assert not optimizer_params, \
"optimizer_params must be None if optimizer is an instance of " \
"Optimizer instead of str"
self._optimizer = optimizer
self._optimizer.param_dict = param_dict
else:
self._optimizer = opt.create(optimizer, param_dict=param_dict,
**optimizer_params)
self._updaters = [opt.get_updater(self._optimizer) \
for _ in self._contexts]
def _init_kvstore(self):
arg_arrays = {param.name: param.data(self._contexts[0]) for param in self._params}
kvstore, update_on_kvstore = _create_kvstore(self._kvstore, len(self._contexts),
arg_arrays)
if kvstore:
if 'dist' in kvstore.type:
update_on_kvstore = False
for i, param in enumerate(self._params):
param_arrays = param.list_data()
kvstore.init(i, param_arrays[0])
kvstore.pull(i, param_arrays, priority=-i)
if update_on_kvstore:
kvstore.set_optimizer(self._optimizer)
self._kvstore = kvstore
self._update_on_kvstore = update_on_kvstore
else:
self._kvstore = None
self._update_on_kvstore = None
self._kv_initialized = True
def step(self, batch_size, ignore_stale_grad=False):
"""Makes one step of parameter update. Should be called after
`autograd.compute_gradient` and outside of `record()` scope.
Parameters
----------
batch_size : int
Batch size of data processed. Gradient will be normalized by `1/batch_size`.
Set this to 1 if you normalized loss manually with `loss = mean(loss)`.
ignore_stale_grad : bool, optional, default=False
If true, ignores Parameters with stale gradient (gradient that has not
been updated by `backward` after last step) and skip update.
"""
if not self._kv_initialized:
self._init_kvstore()
self._optimizer.rescale_grad = self._scale / batch_size
for i, param in enumerate(self._params):
if param.grad_req == 'null':
continue
if not ignore_stale_grad:
for data in param.list_data():
if not data._fresh_grad:
raise UserWarning(
"Gradient of Parameter `%s` on context %s has not been updated "
"by backward since last `step`. This could mean a bug in your "
"model that maked it only use a subset of the Parameters (Blocks) "
"for this iteration. If you are intentionally only using a subset, "
"call step with ignore_stale_grad=True to suppress this "
"warning and skip updating of Parameters with stale gradient" \
%(param.name, str(data.context)))
if self._kvstore:
self._kvstore.push(i, param.list_grad(), priority=-i)
if self._update_on_kvstore:
self._kvstore.pull(i, param.list_data(), priority=-i)
continue
else:
self._kvstore.pull(i, param.list_grad(), priority=-i)
for upd, arr, grad in zip(self._updaters, param.list_data(), param.list_grad()):
if not ignore_stale_grad or arr._fresh_grad:
upd(i, grad, arr)
arr._fresh_grad = False
|
msparapa/das
|
refs/heads/master
|
examples/Hypersonics/lazythings.py
|
1
|
from sympy.core import Symbol
from sympy.functions import exp, sin, cos, tan
from sympy.solvers import solve
from das.legacy.Analytics.Mathematics import PoissonManifold
from das.utils.keyboard import keyboard
M = PoissonManifold(name='basic',coords=['h','theta','phi','v','gam','psi','lambda_h','lambda_theta','lambda_phi','lambda_v','lambda_gam','lambda_psi'], independent=['t'], verbose=2)
coords = M.coords
h = coords[0]
theta = coords[1]
phi = coords[2]
v = coords[3]
gam = coords[4]
psi = coords[5]
lambda_h = coords[6]
lambda_theta = coords[7]
lambda_phi = coords[8]
lambda_v = coords[9]
lambda_gam = coords[10]
lambda_psi = coords[11]
re = Symbol('re')
Aref = Symbol('Aref')
rho0 = Symbol('rho0')
H = Symbol('H')
alpha = Symbol('alpha')
beta = Symbol('beta')
mass = Symbol('mass')
mu = Symbol('mu')
rho = rho0*exp(-h/H)
Cl = 1.5658*alpha
Cd = 1.6537*alpha**2 + 0.0612
D = 0.5*rho*v**2*Cd*Aref
L = 0.5*rho*v**2*Cl*Aref
r = re+h
X = [0 for _ in range(6)]
X[0] = v*sin(gam)
X[1] = v*cos(gam)*cos(psi)/(r*cos(phi))
X[2] = v*cos(gam)*sin(psi)/r
X[3] = -D/mass - mu*sin(gam)/(r**2)
X[4] = L*cos(beta)/(mass*v) - mu/(v*r**2)*cos(gam) + v/r*cos(gam)
X[5] = L*sin(beta)/(mass*cos(gam)*v) - v/r*cos(gam)*cos(psi)*tan(phi)
H = 1 + X[0]*lambda_h + X[1]*lambda_theta + X[2]*lambda_phi + X[3]*lambda_v + X[4]*lambda_gam + X[5]*lambda_psi
dHdu = [H.diff(alpha), H.diff(beta)]
p = solve(dHdu, alpha, beta)
print(p)
keyboard()
|
zaolij/libaudioverse
|
refs/heads/master
|
bindings/python/examples/sim3d.py
|
1
|
#demonstrates how to use the 3d simulation.
import libaudioverse
import collections
libaudioverse.initialize()
sim = libaudioverse.Simulation()
sim.set_output_device(-1)
world = libaudioverse.EnvironmentNode(sim, "default")
source = libaudioverse.SourceNode(sim, world)
print "Enter a path to a sound file."
filepath = raw_input()
n = libaudioverse.BufferNode(sim)
b = libaudioverse.Buffer(sim)
b.load_from_file(filepath)
n.buffer = b
n.connect(0, source, 0)
n.looping.value = True
world.connect_simulation(0)
print """Enter python expressions that evaluate to 3-tuples (x, y, z).
Positive x is to your right, positive y is above you, and positive z is behind you.
Enter quit to quit."""
while True:
command = raw_input()
if command == 'quit':
break
vect = eval(command)
if not isinstance(vect, collections.Sized) or len(vect) != 3:
print "Must evaluate to a 3-tuple. Try again"
continue
source.position.value = vect
libaudioverse.shutdown()
|
maleficarium/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/xtube.py
|
18
|
from __future__ import unicode_literals
import itertools
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
orderedSet,
parse_duration,
sanitized_Request,
str_to_int,
)
class XTubeIE(InfoExtractor):
_VALID_URL = r'''(?x)
(?:
xtube:|
https?://(?:www\.)?xtube\.com/(?:watch\.php\?.*\bv=|video-watch/(?P<display_id>[^/]+)-)
)
(?P<id>[^/?&#]+)
'''
_TESTS = [{
# old URL schema
'url': 'http://www.xtube.com/watch.php?v=kVTUy_G222_',
'md5': '092fbdd3cbe292c920ef6fc6a8a9cdab',
'info_dict': {
'id': 'kVTUy_G222_',
'ext': 'mp4',
'title': 'strange erotica',
'description': 'contains:an ET kind of thing',
'uploader': 'greenshowers',
'duration': 450,
'view_count': int,
'comment_count': int,
'age_limit': 18,
}
}, {
# new URL schema
'url': 'http://www.xtube.com/video-watch/strange-erotica-625837',
'only_matching': True,
}, {
'url': 'xtube:625837',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('display_id')
if not display_id:
display_id = video_id
url = 'http://www.xtube.com/watch.php?v=%s' % video_id
req = sanitized_Request(url)
req.add_header('Cookie', 'age_verified=1; cookiesAccepted=1')
webpage = self._download_webpage(req, display_id)
sources = self._parse_json(self._search_regex(
r'sources\s*:\s*({.+?}),', webpage, 'sources'), video_id)
formats = []
for format_id, format_url in sources.items():
formats.append({
'url': format_url,
'format_id': format_id,
'height': int_or_none(format_id),
})
self._sort_formats(formats)
title = self._search_regex(
(r'<h1>(?P<title>[^<]+)</h1>', r'videoTitle\s*:\s*(["\'])(?P<title>.+?)\1'),
webpage, 'title', group='title')
description = self._search_regex(
r'</h1>\s*<p>([^<]+)', webpage, 'description', fatal=False)
uploader = self._search_regex(
(r'<input[^>]+name="contentOwnerId"[^>]+value="([^"]+)"',
r'<span[^>]+class="nickname"[^>]*>([^<]+)'),
webpage, 'uploader', fatal=False)
duration = parse_duration(self._search_regex(
r'<dt>Runtime:</dt>\s*<dd>([^<]+)</dd>',
webpage, 'duration', fatal=False))
view_count = str_to_int(self._search_regex(
r'<dt>Views:</dt>\s*<dd>([\d,\.]+)</dd>',
webpage, 'view count', fatal=False))
comment_count = str_to_int(self._html_search_regex(
r'>Comments? \(([\d,\.]+)\)<',
webpage, 'comment count', fatal=False))
return {
'id': video_id,
'display_id': display_id,
'title': title,
'description': description,
'uploader': uploader,
'duration': duration,
'view_count': view_count,
'comment_count': comment_count,
'age_limit': 18,
'formats': formats,
}
class XTubeUserIE(InfoExtractor):
IE_DESC = 'XTube user profile'
_VALID_URL = r'https?://(?:www\.)?xtube\.com/profile/(?P<id>[^/]+-\d+)'
_TEST = {
'url': 'http://www.xtube.com/profile/greenshowers-4056496',
'info_dict': {
'id': 'greenshowers-4056496',
'age_limit': 18,
},
'playlist_mincount': 155,
}
def _real_extract(self, url):
user_id = self._match_id(url)
entries = []
for pagenum in itertools.count(1):
request = sanitized_Request(
'http://www.xtube.com/profile/%s/videos/%d' % (user_id, pagenum),
headers={
'Cookie': 'popunder=4',
'X-Requested-With': 'XMLHttpRequest',
'Referer': url,
})
page = self._download_json(
request, user_id, 'Downloading videos JSON page %d' % pagenum)
html = page.get('html')
if not html:
break
for video_id in orderedSet([video_id for _, video_id in re.findall(
r'data-plid=(["\'])(.+?)\1', html)]):
entries.append(self.url_result('xtube:%s' % video_id, XTubeIE.ie_key()))
page_count = int_or_none(page.get('pageCount'))
if not page_count or pagenum == page_count:
break
playlist = self.playlist_result(entries, user_id)
playlist['age_limit'] = 18
return playlist
|
shoelzer/buildbot
|
refs/heads/master
|
master/buildbot/test/regressions/test_bad_change_properties_rows.py
|
10
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from twisted.trial import unittest
from buildbot.db import changes
from buildbot.test.fake import fakedb
from buildbot.test.util import connector_component
class TestBadRows(connector_component.ConnectorComponentMixin,
unittest.TestCase):
# See bug #1952 for details. This checks that users who used a development
# version between 0.8.3 and 0.8.4 get reasonable behavior even though some
# rows in the change_properties database do not contain a proper [value,
# source] tuple.
def setUp(self):
d = self.setUpConnectorComponent(
table_names=['patches', 'sourcestamps', 'changes',
'change_properties', 'change_files'])
@d.addCallback
def finish_setup(_):
self.db.changes = changes.ChangesConnectorComponent(self.db)
return d
def tearDown(self):
return self.tearDownConnectorComponent()
def test_bogus_row_no_source(self):
d = self.insertTestData([
fakedb.SourceStamp(id=10),
fakedb.ChangeProperty(changeid=13, property_name='devel',
property_value='"no source"'),
fakedb.Change(changeid=13, sourcestampid=10),
])
@d.addCallback
def get13(_):
return self.db.changes.getChange(13)
@d.addCallback
def check13(c):
self.assertEqual(c['properties'],
dict(devel=('no source', 'Change')))
return d
def test_bogus_row_jsoned_list(self):
d = self.insertTestData([
fakedb.SourceStamp(id=10),
fakedb.ChangeProperty(changeid=13, property_name='devel',
property_value='[1, 2]'),
fakedb.Change(changeid=13, sourcestampid=10),
])
@d.addCallback
def get13(_):
return self.db.changes.getChange(13)
@d.addCallback
def check13(c):
self.assertEqual(c['properties'],
dict(devel=([1, 2], 'Change')))
return d
|
dorotan/pythontraining
|
refs/heads/master
|
env/Lib/site-packages/_pytest/doctest.py
|
19
|
""" discover and run doctests in modules and test files."""
from __future__ import absolute_import
import traceback
import pytest
from _pytest._code.code import ExceptionInfo, ReprFileLocation, TerminalRepr
from _pytest.fixtures import FixtureRequest
DOCTEST_REPORT_CHOICE_NONE = 'none'
DOCTEST_REPORT_CHOICE_CDIFF = 'cdiff'
DOCTEST_REPORT_CHOICE_NDIFF = 'ndiff'
DOCTEST_REPORT_CHOICE_UDIFF = 'udiff'
DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = 'only_first_failure'
DOCTEST_REPORT_CHOICES = (
DOCTEST_REPORT_CHOICE_NONE,
DOCTEST_REPORT_CHOICE_CDIFF,
DOCTEST_REPORT_CHOICE_NDIFF,
DOCTEST_REPORT_CHOICE_UDIFF,
DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE,
)
def pytest_addoption(parser):
parser.addini('doctest_optionflags', 'option flags for doctests',
type="args", default=["ELLIPSIS"])
group = parser.getgroup("collect")
group.addoption("--doctest-modules",
action="store_true", default=False,
help="run doctests in all .py modules",
dest="doctestmodules")
group.addoption("--doctest-report",
type=str.lower, default="udiff",
help="choose another output format for diffs on doctest failure",
choices=DOCTEST_REPORT_CHOICES,
dest="doctestreport")
group.addoption("--doctest-glob",
action="append", default=[], metavar="pat",
help="doctests file matching pattern, default: test*.txt",
dest="doctestglob")
group.addoption("--doctest-ignore-import-errors",
action="store_true", default=False,
help="ignore doctest ImportErrors",
dest="doctest_ignore_import_errors")
def pytest_collect_file(path, parent):
config = parent.config
if path.ext == ".py":
if config.option.doctestmodules:
return DoctestModule(path, parent)
elif _is_doctest(config, path, parent):
return DoctestTextfile(path, parent)
def _is_doctest(config, path, parent):
if path.ext in ('.txt', '.rst') and parent.session.isinitpath(path):
return True
globs = config.getoption("doctestglob") or ['test*.txt']
for glob in globs:
if path.check(fnmatch=glob):
return True
return False
class ReprFailDoctest(TerminalRepr):
def __init__(self, reprlocation, lines):
self.reprlocation = reprlocation
self.lines = lines
def toterminal(self, tw):
for line in self.lines:
tw.line(line)
self.reprlocation.toterminal(tw)
class DoctestItem(pytest.Item):
def __init__(self, name, parent, runner=None, dtest=None):
super(DoctestItem, self).__init__(name, parent)
self.runner = runner
self.dtest = dtest
self.obj = None
self.fixture_request = None
def setup(self):
if self.dtest is not None:
self.fixture_request = _setup_fixtures(self)
globs = dict(getfixture=self.fixture_request.getfixturevalue)
for name, value in self.fixture_request.getfixturevalue('doctest_namespace').items():
globs[name] = value
self.dtest.globs.update(globs)
def runtest(self):
_check_all_skipped(self.dtest)
self.runner.run(self.dtest)
def repr_failure(self, excinfo):
import doctest
if excinfo.errisinstance((doctest.DocTestFailure,
doctest.UnexpectedException)):
doctestfailure = excinfo.value
example = doctestfailure.example
test = doctestfailure.test
filename = test.filename
if test.lineno is None:
lineno = None
else:
lineno = test.lineno + example.lineno + 1
message = excinfo.type.__name__
reprlocation = ReprFileLocation(filename, lineno, message)
checker = _get_checker()
report_choice = _get_report_choice(self.config.getoption("doctestreport"))
if lineno is not None:
lines = doctestfailure.test.docstring.splitlines(False)
# add line numbers to the left of the error message
lines = ["%03d %s" % (i + test.lineno + 1, x)
for (i, x) in enumerate(lines)]
# trim docstring error lines to 10
lines = lines[example.lineno - 9:example.lineno + 1]
else:
lines = ['EXAMPLE LOCATION UNKNOWN, not showing all tests of that example']
indent = '>>>'
for line in example.source.splitlines():
lines.append('??? %s %s' % (indent, line))
indent = '...'
if excinfo.errisinstance(doctest.DocTestFailure):
lines += checker.output_difference(example,
doctestfailure.got, report_choice).split("\n")
else:
inner_excinfo = ExceptionInfo(excinfo.value.exc_info)
lines += ["UNEXPECTED EXCEPTION: %s" %
repr(inner_excinfo.value)]
lines += traceback.format_exception(*excinfo.value.exc_info)
return ReprFailDoctest(reprlocation, lines)
else:
return super(DoctestItem, self).repr_failure(excinfo)
def reportinfo(self):
return self.fspath, None, "[doctest] %s" % self.name
def _get_flag_lookup():
import doctest
return dict(DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1,
DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE,
NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE,
ELLIPSIS=doctest.ELLIPSIS,
IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL,
COMPARISON_FLAGS=doctest.COMPARISON_FLAGS,
ALLOW_UNICODE=_get_allow_unicode_flag(),
ALLOW_BYTES=_get_allow_bytes_flag(),
)
def get_optionflags(parent):
optionflags_str = parent.config.getini("doctest_optionflags")
flag_lookup_table = _get_flag_lookup()
flag_acc = 0
for flag in optionflags_str:
flag_acc |= flag_lookup_table[flag]
return flag_acc
class DoctestTextfile(pytest.Module):
obj = None
def collect(self):
import doctest
# inspired by doctest.testfile; ideally we would use it directly,
# but it doesn't support passing a custom checker
text = self.fspath.read()
filename = str(self.fspath)
name = self.fspath.basename
globs = {'__name__': '__main__'}
optionflags = get_optionflags(self)
runner = doctest.DebugRunner(verbose=0, optionflags=optionflags,
checker=_get_checker())
parser = doctest.DocTestParser()
test = parser.get_doctest(text, globs, name, filename, 0)
if test.examples:
yield DoctestItem(test.name, self, runner, test)
def _check_all_skipped(test):
"""raises pytest.skip() if all examples in the given DocTest have the SKIP
option set.
"""
import doctest
all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples)
if all_skipped:
pytest.skip('all tests skipped by +SKIP option')
class DoctestModule(pytest.Module):
def collect(self):
import doctest
if self.fspath.basename == "conftest.py":
module = self.config.pluginmanager._importconftest(self.fspath)
else:
try:
module = self.fspath.pyimport()
except ImportError:
if self.config.getvalue('doctest_ignore_import_errors'):
pytest.skip('unable to import module %r' % self.fspath)
else:
raise
# uses internal doctest module parsing mechanism
finder = doctest.DocTestFinder()
optionflags = get_optionflags(self)
runner = doctest.DebugRunner(verbose=0, optionflags=optionflags,
checker=_get_checker())
for test in finder.find(module, module.__name__):
if test.examples: # skip empty doctests
yield DoctestItem(test.name, self, runner, test)
def _setup_fixtures(doctest_item):
"""
Used by DoctestTextfile and DoctestItem to setup fixture information.
"""
def func():
pass
doctest_item.funcargs = {}
fm = doctest_item.session._fixturemanager
doctest_item._fixtureinfo = fm.getfixtureinfo(node=doctest_item, func=func,
cls=None, funcargs=False)
fixture_request = FixtureRequest(doctest_item)
fixture_request._fillfixtures()
return fixture_request
def _get_checker():
"""
Returns a doctest.OutputChecker subclass that takes in account the
ALLOW_UNICODE option to ignore u'' prefixes in strings and ALLOW_BYTES
to strip b'' prefixes.
Useful when the same doctest should run in Python 2 and Python 3.
An inner class is used to avoid importing "doctest" at the module
level.
"""
if hasattr(_get_checker, 'LiteralsOutputChecker'):
return _get_checker.LiteralsOutputChecker()
import doctest
import re
class LiteralsOutputChecker(doctest.OutputChecker):
"""
Copied from doctest_nose_plugin.py from the nltk project:
https://github.com/nltk/nltk
Further extended to also support byte literals.
"""
_unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE)
_bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE)
def check_output(self, want, got, optionflags):
res = doctest.OutputChecker.check_output(self, want, got,
optionflags)
if res:
return True
allow_unicode = optionflags & _get_allow_unicode_flag()
allow_bytes = optionflags & _get_allow_bytes_flag()
if not allow_unicode and not allow_bytes:
return False
else: # pragma: no cover
def remove_prefixes(regex, txt):
return re.sub(regex, r'\1\2', txt)
if allow_unicode:
want = remove_prefixes(self._unicode_literal_re, want)
got = remove_prefixes(self._unicode_literal_re, got)
if allow_bytes:
want = remove_prefixes(self._bytes_literal_re, want)
got = remove_prefixes(self._bytes_literal_re, got)
res = doctest.OutputChecker.check_output(self, want, got,
optionflags)
return res
_get_checker.LiteralsOutputChecker = LiteralsOutputChecker
return _get_checker.LiteralsOutputChecker()
def _get_allow_unicode_flag():
"""
Registers and returns the ALLOW_UNICODE flag.
"""
import doctest
return doctest.register_optionflag('ALLOW_UNICODE')
def _get_allow_bytes_flag():
"""
Registers and returns the ALLOW_BYTES flag.
"""
import doctest
return doctest.register_optionflag('ALLOW_BYTES')
def _get_report_choice(key):
"""
This function returns the actual `doctest` module flag value, we want to do it as late as possible to avoid
importing `doctest` and all its dependencies when parsing options, as it adds overhead and breaks tests.
"""
import doctest
return {
DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF,
DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF,
DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF,
DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE,
DOCTEST_REPORT_CHOICE_NONE: 0,
}[key]
@pytest.fixture(scope='session')
def doctest_namespace():
"""
Inject names into the doctest namespace.
"""
return dict()
|
CAES-Python/Coffee_maker
|
refs/heads/master
|
Keurig_K55/is_ready_to_brew.py
|
1
|
#!/usr/bin/python
import RPi.GPIO as GPIO
import time
input = 2
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(input, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# when the Keurig K55 is ready to brew is flashes
# the brew button LEDs on and off each second
# so read the LED state for 2 seconds to see
# if it is flashing
first = GPIO.input(input)
time.sleep(.5)
second = GPIO.input(input)
time.sleep(.5)
third = GPIO.input(input)
if((first == second) and (second == third)):
print('NO')
else:
print('YES')
GPIO.cleanup()
#print first, second, third
|
suda/micropython
|
refs/heads/master
|
tests/float/true_value.py
|
102
|
# Test true-ish value handling
if not 0.0:
print("float 0")
if not 0+0j:
print("complex 0")
|
ff94315/hiwifi-openwrt-HC5661-HC5761
|
refs/heads/master
|
staging_dir/host/lib/python2.7/lib-tk/Tkinter.py
|
18
|
"""Wrapper functions for Tcl/Tk.
Tkinter provides classes which allow the display, positioning and
control of widgets. Toplevel widgets are Tk and Toplevel. Other
widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton,
Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox
LabelFrame and PanedWindow.
Properties of the widgets are specified with keyword arguments.
Keyword arguments have the same name as the corresponding resource
under Tk.
Widgets are positioned with one of the geometry managers Place, Pack
or Grid. These managers can be called with methods place, pack, grid
available in every Widget.
Actions are bound to events by resources (e.g. keyword argument
command) or with the method bind.
Example (Hello, World):
import Tkinter
from Tkconstants import *
tk = Tkinter.Tk()
frame = Tkinter.Frame(tk, relief=RIDGE, borderwidth=2)
frame.pack(fill=BOTH,expand=1)
label = Tkinter.Label(frame, text="Hello, World")
label.pack(fill=X, expand=1)
button = Tkinter.Button(frame,text="Exit",command=tk.destroy)
button.pack(side=BOTTOM)
tk.mainloop()
"""
__version__ = "$Revision: 81008 $"
import sys
if sys.platform == "win32":
# Attempt to configure Tcl/Tk without requiring PATH
import FixTk
import _tkinter # If this fails your Python may not be configured for Tk
tkinter = _tkinter # b/w compat for export
TclError = _tkinter.TclError
from types import *
from Tkconstants import *
wantobjects = 1
TkVersion = float(_tkinter.TK_VERSION)
TclVersion = float(_tkinter.TCL_VERSION)
READABLE = _tkinter.READABLE
WRITABLE = _tkinter.WRITABLE
EXCEPTION = _tkinter.EXCEPTION
# These are not always defined, e.g. not on Win32 with Tk 8.0 :-(
try: _tkinter.createfilehandler
except AttributeError: _tkinter.createfilehandler = None
try: _tkinter.deletefilehandler
except AttributeError: _tkinter.deletefilehandler = None
def _flatten(tuple):
"""Internal function."""
res = ()
for item in tuple:
if type(item) in (TupleType, ListType):
res = res + _flatten(item)
elif item is not None:
res = res + (item,)
return res
try: _flatten = _tkinter._flatten
except AttributeError: pass
def _cnfmerge(cnfs):
"""Internal function."""
if type(cnfs) is DictionaryType:
return cnfs
elif type(cnfs) in (NoneType, StringType):
return cnfs
else:
cnf = {}
for c in _flatten(cnfs):
try:
cnf.update(c)
except (AttributeError, TypeError), msg:
print "_cnfmerge: fallback due to:", msg
for k, v in c.items():
cnf[k] = v
return cnf
try: _cnfmerge = _tkinter._cnfmerge
except AttributeError: pass
class Event:
"""Container for the properties of an event.
Instances of this type are generated if one of the following events occurs:
KeyPress, KeyRelease - for keyboard events
ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events
Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate,
Colormap, Gravity, Reparent, Property, Destroy, Activate,
Deactivate - for window events.
If a callback function for one of these events is registered
using bind, bind_all, bind_class, or tag_bind, the callback is
called with an Event as first argument. It will have the
following attributes (in braces are the event types for which
the attribute is valid):
serial - serial number of event
num - mouse button pressed (ButtonPress, ButtonRelease)
focus - whether the window has the focus (Enter, Leave)
height - height of the exposed window (Configure, Expose)
width - width of the exposed window (Configure, Expose)
keycode - keycode of the pressed key (KeyPress, KeyRelease)
state - state of the event as a number (ButtonPress, ButtonRelease,
Enter, KeyPress, KeyRelease,
Leave, Motion)
state - state as a string (Visibility)
time - when the event occurred
x - x-position of the mouse
y - y-position of the mouse
x_root - x-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
y_root - y-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
char - pressed character (KeyPress, KeyRelease)
send_event - see X/Windows documentation
keysym - keysym of the event as a string (KeyPress, KeyRelease)
keysym_num - keysym of the event as a number (KeyPress, KeyRelease)
type - type of the event as a number
widget - widget in which the event occurred
delta - delta of wheel movement (MouseWheel)
"""
pass
_support_default_root = 1
_default_root = None
def NoDefaultRoot():
"""Inhibit setting of default root window.
Call this function to inhibit that the first instance of
Tk is used for windows without an explicit parent window.
"""
global _support_default_root
_support_default_root = 0
global _default_root
_default_root = None
del _default_root
def _tkerror(err):
"""Internal function."""
pass
def _exit(code='0'):
"""Internal function. Calling it will throw the exception SystemExit."""
raise SystemExit, code
_varnum = 0
class Variable:
"""Class to define value holders for e.g. buttons.
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a variable
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
global _varnum
if not master:
master = _default_root
self._master = master
self._tk = master.tk
if name:
self._name = name
else:
self._name = 'PY_VAR' + repr(_varnum)
_varnum += 1
if value is not None:
self.set(value)
elif not self._tk.call("info", "exists", self._name):
self.set(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
self._tk.globalunsetvar(self._name)
def __str__(self):
"""Return the name of the variable in Tcl."""
return self._name
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, value)
def get(self):
"""Return value of variable."""
return self._tk.globalgetvar(self._name)
def trace_variable(self, mode, callback):
"""Define a trace callback for the variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CALLBACK must be a function which is called when
the variable is read, written or undefined.
Return the name of the callback.
"""
cbname = self._master._register(callback)
self._tk.call("trace", "variable", self._name, mode, cbname)
return cbname
trace = trace_variable
def trace_vdelete(self, mode, cbname):
"""Delete the trace callback for a variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CBNAME is the name of the callback returned from trace_variable or trace.
"""
self._tk.call("trace", "vdelete", self._name, mode, cbname)
self._master.deletecommand(cbname)
def trace_vinfo(self):
"""Return all trace callback information."""
return map(self._tk.split, self._tk.splitlist(
self._tk.call("trace", "vinfo", self._name)))
def __eq__(self, other):
"""Comparison for equality (==).
Note: if the Variable's master matters to behavior
also compare self._master == other._master
"""
return self.__class__.__name__ == other.__class__.__name__ \
and self._name == other._name
class StringVar(Variable):
"""Value holder for strings variables."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a string variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return value of variable as string."""
value = self._tk.globalgetvar(self._name)
if isinstance(value, basestring):
return value
return str(value)
class IntVar(Variable):
"""Value holder for integer variables."""
_default = 0
def __init__(self, master=None, value=None, name=None):
"""Construct an integer variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def set(self, value):
"""Set the variable to value, converting booleans to integers."""
if isinstance(value, bool):
value = int(value)
return Variable.set(self, value)
def get(self):
"""Return the value of the variable as an integer."""
return getint(self._tk.globalgetvar(self._name))
class DoubleVar(Variable):
"""Value holder for float variables."""
_default = 0.0
def __init__(self, master=None, value=None, name=None):
"""Construct a float variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0.0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a float."""
return getdouble(self._tk.globalgetvar(self._name))
class BooleanVar(Variable):
"""Value holder for boolean variables."""
_default = False
def __init__(self, master=None, value=None, name=None):
"""Construct a boolean variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to False)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a bool."""
return self._tk.getboolean(self._tk.globalgetvar(self._name))
def mainloop(n=0):
"""Run the main loop of Tcl."""
_default_root.tk.mainloop(n)
getint = int
getdouble = float
def getboolean(s):
"""Convert true and false to integer values 1 and 0."""
return _default_root.tk.getboolean(s)
# Methods defined on both toplevel and interior widgets
class Misc:
"""Internal class.
Base class which defines methods common for interior widgets."""
# XXX font command?
_tclCommands = None
def destroy(self):
"""Internal function.
Delete all Tcl commands created for
this widget in the Tcl interpreter."""
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
self._tclCommands = None
def deletecommand(self, name):
"""Internal function.
Delete the Tcl command provided in NAME."""
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
try:
self._tclCommands.remove(name)
except ValueError:
pass
def tk_strictMotif(self, boolean=None):
"""Set Tcl internal variable, whether the look and feel
should adhere to Motif.
A parameter of 1 means adhere to Motif (e.g. no color
change if mouse passes over slider).
Returns the set value."""
return self.tk.getboolean(self.tk.call(
'set', 'tk_strictMotif', boolean))
def tk_bisque(self):
"""Change the color scheme to light brown as used in Tk 3.6 and before."""
self.tk.call('tk_bisque')
def tk_setPalette(self, *args, **kw):
"""Set a new color scheme for all widget elements.
A single color as argument will cause that all colors of Tk
widget elements are derived from this.
Alternatively several keyword parameters and its associated
colors can be given. The following keywords are valid:
activeBackground, foreground, selectColor,
activeForeground, highlightBackground, selectBackground,
background, highlightColor, selectForeground,
disabledForeground, insertBackground, troughColor."""
self.tk.call(('tk_setPalette',)
+ _flatten(args) + _flatten(kw.items()))
def tk_menuBar(self, *args):
"""Do not use. Needed in Tk 3.6 and earlier."""
pass # obsolete since Tk 4.0
def wait_variable(self, name='PY_VAR'):
"""Wait until the variable is modified.
A parameter of type IntVar, StringVar, DoubleVar or
BooleanVar must be given."""
self.tk.call('tkwait', 'variable', name)
waitvar = wait_variable # XXX b/w compat
def wait_window(self, window=None):
"""Wait until a WIDGET is destroyed.
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'window', window._w)
def wait_visibility(self, window=None):
"""Wait until the visibility of a WIDGET changes
(e.g. it appears).
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'visibility', window._w)
def setvar(self, name='PY_VAR', value='1'):
"""Set Tcl variable NAME to VALUE."""
self.tk.setvar(name, value)
def getvar(self, name='PY_VAR'):
"""Return value of Tcl variable NAME."""
return self.tk.getvar(name)
getint = int
getdouble = float
def getboolean(self, s):
"""Return a boolean value for Tcl boolean values true and false given as parameter."""
return self.tk.getboolean(s)
def focus_set(self):
"""Direct input focus to this widget.
If the application currently does not have the focus
this widget will get the focus if the application gets
the focus through the window manager."""
self.tk.call('focus', self._w)
focus = focus_set # XXX b/w compat?
def focus_force(self):
"""Direct input focus to this widget even if the
application does not have the focus. Use with
caution!"""
self.tk.call('focus', '-force', self._w)
def focus_get(self):
"""Return the widget which has currently the focus in the
application.
Use focus_displayof to allow working with several
displays. Return None if application does not have
the focus."""
name = self.tk.call('focus')
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_displayof(self):
"""Return the widget which has currently the focus on the
display where this widget is located.
Return None if the application does not have the focus."""
name = self.tk.call('focus', '-displayof', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_lastfor(self):
"""Return the widget which would have the focus if top level
for this widget gets the focus from the window manager."""
name = self.tk.call('focus', '-lastfor', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def tk_focusFollowsMouse(self):
"""The widget under mouse will get automatically focus. Can not
be disabled easily."""
self.tk.call('tk_focusFollowsMouse')
def tk_focusNext(self):
"""Return the next widget in the focus order which follows
widget which has currently the focus.
The focus order first goes to the next child, then to
the children of the child recursively and then to the
next sibling which is higher in the stacking order. A
widget is omitted if it has the takefocus resource set
to 0."""
name = self.tk.call('tk_focusNext', self._w)
if not name: return None
return self._nametowidget(name)
def tk_focusPrev(self):
"""Return previous widget in the focus order. See tk_focusNext for details."""
name = self.tk.call('tk_focusPrev', self._w)
if not name: return None
return self._nametowidget(name)
def after(self, ms, func=None, *args):
"""Call function once after given time.
MS specifies the time in milliseconds. FUNC gives the
function which shall be called. Additional parameters
are given as parameters to the function call. Return
identifier to cancel scheduling with after_cancel."""
if not func:
# I'd rather use time.sleep(ms*0.001)
self.tk.call('after', ms)
else:
def callit():
try:
func(*args)
finally:
try:
self.deletecommand(name)
except TclError:
pass
name = self._register(callit)
return self.tk.call('after', ms, name)
def after_idle(self, func, *args):
"""Call FUNC once if the Tcl main loop has no event to
process.
Return an identifier to cancel the scheduling with
after_cancel."""
return self.after('idle', func, *args)
def after_cancel(self, id):
"""Cancel scheduling of function identified with ID.
Identifier returned by after or after_idle must be
given as first parameter."""
try:
data = self.tk.call('after', 'info', id)
# In Tk 8.3, splitlist returns: (script, type)
# In Tk 8.4, splitlist may return (script, type) or (script,)
script = self.tk.splitlist(data)[0]
self.deletecommand(script)
except TclError:
pass
self.tk.call('after', 'cancel', id)
def bell(self, displayof=0):
"""Ring a display's bell."""
self.tk.call(('bell',) + self._displayof(displayof))
# Clipboard handling:
def clipboard_get(self, **kw):
"""Retrieve data from the clipboard on window's display.
The window keyword defaults to the root window of the Tkinter
application.
The type keyword specifies the form in which the data is
to be returned and should be an atom name such as STRING
or FILE_NAME. Type defaults to STRING.
This command is equivalent to:
selection_get(CLIPBOARD)
"""
return self.tk.call(('clipboard', 'get') + self._options(kw))
def clipboard_clear(self, **kw):
"""Clear the data in the Tk clipboard.
A widget specified for the optional displayof keyword
argument specifies the target display."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'clear') + self._options(kw))
def clipboard_append(self, string, **kw):
"""Append STRING to the Tk clipboard.
A widget specified at the optional displayof keyword
argument specifies the target display. The clipboard
can be retrieved with selection_get."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'append') + self._options(kw)
+ ('--', string))
# XXX grab current w/o window argument
def grab_current(self):
"""Return widget which has currently the grab in this application
or None."""
name = self.tk.call('grab', 'current', self._w)
if not name: return None
return self._nametowidget(name)
def grab_release(self):
"""Release grab for this widget if currently set."""
self.tk.call('grab', 'release', self._w)
def grab_set(self):
"""Set grab for this widget.
A grab directs all events to this and descendant
widgets in the application."""
self.tk.call('grab', 'set', self._w)
def grab_set_global(self):
"""Set global grab for this widget.
A global grab directs all events to this and
descendant widgets on the display. Use with caution -
other applications do not get events anymore."""
self.tk.call('grab', 'set', '-global', self._w)
def grab_status(self):
"""Return None, "local" or "global" if this widget has
no, a local or a global grab."""
status = self.tk.call('grab', 'status', self._w)
if status == 'none': status = None
return status
def option_add(self, pattern, value, priority = None):
"""Set a VALUE (second parameter) for an option
PATTERN (first parameter).
An optional third parameter gives the numeric priority
(defaults to 80)."""
self.tk.call('option', 'add', pattern, value, priority)
def option_clear(self):
"""Clear the option database.
It will be reloaded if option_add is called."""
self.tk.call('option', 'clear')
def option_get(self, name, className):
"""Return the value for an option NAME for this widget
with CLASSNAME.
Values with higher priority override lower values."""
return self.tk.call('option', 'get', self._w, name, className)
def option_readfile(self, fileName, priority = None):
"""Read file FILENAME into the option database.
An optional second parameter gives the numeric
priority."""
self.tk.call('option', 'readfile', fileName, priority)
def selection_clear(self, **kw):
"""Clear the current X selection."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('selection', 'clear') + self._options(kw))
def selection_get(self, **kw):
"""Return the contents of the current X selection.
A keyword parameter selection specifies the name of
the selection and defaults to PRIMARY. A keyword
parameter displayof specifies a widget on the display
to use."""
if 'displayof' not in kw: kw['displayof'] = self._w
return self.tk.call(('selection', 'get') + self._options(kw))
def selection_handle(self, command, **kw):
"""Specify a function COMMAND to call if the X
selection owned by this widget is queried by another
application.
This function must return the contents of the
selection. The function will be called with the
arguments OFFSET and LENGTH which allows the chunking
of very long selections. The following keyword
parameters can be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
name = self._register(command)
self.tk.call(('selection', 'handle') + self._options(kw)
+ (self._w, name))
def selection_own(self, **kw):
"""Become owner of X selection.
A keyword parameter selection specifies the name of
the selection (default PRIMARY)."""
self.tk.call(('selection', 'own') +
self._options(kw) + (self._w,))
def selection_own_get(self, **kw):
"""Return owner of X selection.
The following keyword parameter can
be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
if 'displayof' not in kw: kw['displayof'] = self._w
name = self.tk.call(('selection', 'own') + self._options(kw))
if not name: return None
return self._nametowidget(name)
def send(self, interp, cmd, *args):
"""Send Tcl command CMD to different interpreter INTERP to be executed."""
return self.tk.call(('send', interp, cmd) + args)
def lower(self, belowThis=None):
"""Lower this widget in the stacking order."""
self.tk.call('lower', self._w, belowThis)
def tkraise(self, aboveThis=None):
"""Raise this widget in the stacking order."""
self.tk.call('raise', self._w, aboveThis)
lift = tkraise
def colormodel(self, value=None):
"""Useless. Not implemented in Tk."""
return self.tk.call('tk', 'colormodel', self._w, value)
def winfo_atom(self, name, displayof=0):
"""Return integer which represents atom NAME."""
args = ('winfo', 'atom') + self._displayof(displayof) + (name,)
return getint(self.tk.call(args))
def winfo_atomname(self, id, displayof=0):
"""Return name of atom with identifier ID."""
args = ('winfo', 'atomname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_cells(self):
"""Return number of cells in the colormap for this widget."""
return getint(
self.tk.call('winfo', 'cells', self._w))
def winfo_children(self):
"""Return a list of all widgets which are children of this widget."""
result = []
for child in self.tk.splitlist(
self.tk.call('winfo', 'children', self._w)):
try:
# Tcl sometimes returns extra windows, e.g. for
# menus; those need to be skipped
result.append(self._nametowidget(child))
except KeyError:
pass
return result
def winfo_class(self):
"""Return window class name of this widget."""
return self.tk.call('winfo', 'class', self._w)
def winfo_colormapfull(self):
"""Return true if at the last color request the colormap was full."""
return self.tk.getboolean(
self.tk.call('winfo', 'colormapfull', self._w))
def winfo_containing(self, rootX, rootY, displayof=0):
"""Return the widget which is at the root coordinates ROOTX, ROOTY."""
args = ('winfo', 'containing') \
+ self._displayof(displayof) + (rootX, rootY)
name = self.tk.call(args)
if not name: return None
return self._nametowidget(name)
def winfo_depth(self):
"""Return the number of bits per pixel."""
return getint(self.tk.call('winfo', 'depth', self._w))
def winfo_exists(self):
"""Return true if this widget exists."""
return getint(
self.tk.call('winfo', 'exists', self._w))
def winfo_fpixels(self, number):
"""Return the number of pixels for the given distance NUMBER
(e.g. "3c") as float."""
return getdouble(self.tk.call(
'winfo', 'fpixels', self._w, number))
def winfo_geometry(self):
"""Return geometry string for this widget in the form "widthxheight+X+Y"."""
return self.tk.call('winfo', 'geometry', self._w)
def winfo_height(self):
"""Return height of this widget."""
return getint(
self.tk.call('winfo', 'height', self._w))
def winfo_id(self):
"""Return identifier ID for this widget."""
return self.tk.getint(
self.tk.call('winfo', 'id', self._w))
def winfo_interps(self, displayof=0):
"""Return the name of all Tcl interpreters for this display."""
args = ('winfo', 'interps') + self._displayof(displayof)
return self.tk.splitlist(self.tk.call(args))
def winfo_ismapped(self):
"""Return true if this widget is mapped."""
return getint(
self.tk.call('winfo', 'ismapped', self._w))
def winfo_manager(self):
"""Return the window mananger name for this widget."""
return self.tk.call('winfo', 'manager', self._w)
def winfo_name(self):
"""Return the name of this widget."""
return self.tk.call('winfo', 'name', self._w)
def winfo_parent(self):
"""Return the name of the parent of this widget."""
return self.tk.call('winfo', 'parent', self._w)
def winfo_pathname(self, id, displayof=0):
"""Return the pathname of the widget given by ID."""
args = ('winfo', 'pathname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_pixels(self, number):
"""Rounded integer value of winfo_fpixels."""
return getint(
self.tk.call('winfo', 'pixels', self._w, number))
def winfo_pointerx(self):
"""Return the x coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointerx', self._w))
def winfo_pointerxy(self):
"""Return a tuple of x and y coordinates of the pointer on the root window."""
return self._getints(
self.tk.call('winfo', 'pointerxy', self._w))
def winfo_pointery(self):
"""Return the y coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointery', self._w))
def winfo_reqheight(self):
"""Return requested height of this widget."""
return getint(
self.tk.call('winfo', 'reqheight', self._w))
def winfo_reqwidth(self):
"""Return requested width of this widget."""
return getint(
self.tk.call('winfo', 'reqwidth', self._w))
def winfo_rgb(self, color):
"""Return tuple of decimal values for red, green, blue for
COLOR in this widget."""
return self._getints(
self.tk.call('winfo', 'rgb', self._w, color))
def winfo_rootx(self):
"""Return x coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rootx', self._w))
def winfo_rooty(self):
"""Return y coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rooty', self._w))
def winfo_screen(self):
"""Return the screen name of this widget."""
return self.tk.call('winfo', 'screen', self._w)
def winfo_screencells(self):
"""Return the number of the cells in the colormap of the screen
of this widget."""
return getint(
self.tk.call('winfo', 'screencells', self._w))
def winfo_screendepth(self):
"""Return the number of bits per pixel of the root window of the
screen of this widget."""
return getint(
self.tk.call('winfo', 'screendepth', self._w))
def winfo_screenheight(self):
"""Return the number of pixels of the height of the screen of this widget
in pixel."""
return getint(
self.tk.call('winfo', 'screenheight', self._w))
def winfo_screenmmheight(self):
"""Return the number of pixels of the height of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmheight', self._w))
def winfo_screenmmwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmwidth', self._w))
def winfo_screenvisual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the default
colormodel of this screen."""
return self.tk.call('winfo', 'screenvisual', self._w)
def winfo_screenwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in pixel."""
return getint(
self.tk.call('winfo', 'screenwidth', self._w))
def winfo_server(self):
"""Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion"."""
return self.tk.call('winfo', 'server', self._w)
def winfo_toplevel(self):
"""Return the toplevel widget of this widget."""
return self._nametowidget(self.tk.call(
'winfo', 'toplevel', self._w))
def winfo_viewable(self):
"""Return true if the widget and all its higher ancestors are mapped."""
return getint(
self.tk.call('winfo', 'viewable', self._w))
def winfo_visual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the
colormodel of this widget."""
return self.tk.call('winfo', 'visual', self._w)
def winfo_visualid(self):
"""Return the X identifier for the visual for this widget."""
return self.tk.call('winfo', 'visualid', self._w)
def winfo_visualsavailable(self, includeids=0):
"""Return a list of all visuals available for the screen
of this widget.
Each item in the list consists of a visual name (see winfo_visual), a
depth and if INCLUDEIDS=1 is given also the X identifier."""
data = self.tk.split(
self.tk.call('winfo', 'visualsavailable', self._w,
includeids and 'includeids' or None))
if type(data) is StringType:
data = [self.tk.split(data)]
return map(self.__winfo_parseitem, data)
def __winfo_parseitem(self, t):
"""Internal function."""
return t[:1] + tuple(map(self.__winfo_getint, t[1:]))
def __winfo_getint(self, x):
"""Internal function."""
return int(x, 0)
def winfo_vrootheight(self):
"""Return the height of the virtual root window associated with this
widget in pixels. If there is no virtual root window return the
height of the screen."""
return getint(
self.tk.call('winfo', 'vrootheight', self._w))
def winfo_vrootwidth(self):
"""Return the width of the virtual root window associated with this
widget in pixel. If there is no virtual root window return the
width of the screen."""
return getint(
self.tk.call('winfo', 'vrootwidth', self._w))
def winfo_vrootx(self):
"""Return the x offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrootx', self._w))
def winfo_vrooty(self):
"""Return the y offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrooty', self._w))
def winfo_width(self):
"""Return the width of this widget."""
return getint(
self.tk.call('winfo', 'width', self._w))
def winfo_x(self):
"""Return the x coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'x', self._w))
def winfo_y(self):
"""Return the y coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'y', self._w))
def update(self):
"""Enter event loop until all pending events have been processed by Tcl."""
self.tk.call('update')
def update_idletasks(self):
"""Enter event loop until all idle callbacks have been called. This
will update the display of windows but not process events caused by
the user."""
self.tk.call('update', 'idletasks')
def bindtags(self, tagList=None):
"""Set or get the list of bindtags for this widget.
With no argument return the list of all bindtags associated with
this widget. With a list of strings as argument the bindtags are
set to this list. The bindtags determine in which order events are
processed (see bind)."""
if tagList is None:
return self.tk.splitlist(
self.tk.call('bindtags', self._w))
else:
self.tk.call('bindtags', self._w, tagList)
def _bind(self, what, sequence, func, add, needcleanup=1):
"""Internal function."""
if type(func) is StringType:
self.tk.call(what + (sequence, func))
elif func:
funcid = self._register(func, self._substitute,
needcleanup)
cmd = ('%sif {"[%s %s]" == "break"} break\n'
%
(add and '+' or '',
funcid, self._subst_format_str))
self.tk.call(what + (sequence, cmd))
return funcid
elif sequence:
return self.tk.call(what + (sequence,))
else:
return self.tk.splitlist(self.tk.call(what))
def bind(self, sequence=None, func=None, add=None):
"""Bind to this widget at event SEQUENCE a call to function FUNC.
SEQUENCE is a string of concatenated event
patterns. An event pattern is of the form
<MODIFIER-MODIFIER-TYPE-DETAIL> where MODIFIER is one
of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4,
Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3,
B3, Alt, Button4, B4, Double, Button5, B5 Triple,
Mod1, M1. TYPE is one of Activate, Enter, Map,
ButtonPress, Button, Expose, Motion, ButtonRelease
FocusIn, MouseWheel, Circulate, FocusOut, Property,
Colormap, Gravity Reparent, Configure, KeyPress, Key,
Unmap, Deactivate, KeyRelease Visibility, Destroy,
Leave and DETAIL is the button number for ButtonPress,
ButtonRelease and DETAIL is the Keysym for KeyPress and
KeyRelease. Examples are
<Control-Button-1> for pressing Control and mouse button 1 or
<Alt-A> for pressing A and the Alt key (KeyPress can be omitted).
An event pattern can also be a virtual event of the form
<<AString>> where AString can be arbitrary. This
event can be generated by event_generate.
If events are concatenated they must appear shortly
after each other.
FUNC will be called if the event sequence occurs with an
instance of Event as argument. If the return value of FUNC is
"break" no further bound function is invoked.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function.
Bind will return an identifier to allow deletion of the bound function with
unbind without memory leak.
If FUNC or SEQUENCE is omitted the bound function or list
of bound events are returned."""
return self._bind(('bind', self._w), sequence, func, add)
def unbind(self, sequence, funcid=None):
"""Unbind for this widget for event SEQUENCE the
function identified with FUNCID."""
self.tk.call('bind', self._w, sequence, '')
if funcid:
self.deletecommand(funcid)
def bind_all(self, sequence=None, func=None, add=None):
"""Bind to all widgets at an event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function. See bind for the return value."""
return self._bind(('bind', 'all'), sequence, func, add, 0)
def unbind_all(self, sequence):
"""Unbind for all widgets for event SEQUENCE all functions."""
self.tk.call('bind', 'all' , sequence, '')
def bind_class(self, className, sequence=None, func=None, add=None):
"""Bind to widgets with bindtag CLASSNAME at event
SEQUENCE a call of function FUNC. An additional
boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or
whether it will replace the previous function. See bind for
the return value."""
return self._bind(('bind', className), sequence, func, add, 0)
def unbind_class(self, className, sequence):
"""Unbind for a all widgets with bindtag CLASSNAME for event SEQUENCE
all functions."""
self.tk.call('bind', className , sequence, '')
def mainloop(self, n=0):
"""Call the mainloop of Tk."""
self.tk.mainloop(n)
def quit(self):
"""Quit the Tcl interpreter. All widgets will be destroyed."""
self.tk.quit()
def _getints(self, string):
"""Internal function."""
if string:
return tuple(map(getint, self.tk.splitlist(string)))
def _getdoubles(self, string):
"""Internal function."""
if string:
return tuple(map(getdouble, self.tk.splitlist(string)))
def _getboolean(self, string):
"""Internal function."""
if string:
return self.tk.getboolean(string)
def _displayof(self, displayof):
"""Internal function."""
if displayof:
return ('-displayof', displayof)
if displayof is None:
return ('-displayof', self._w)
return ()
def _options(self, cnf, kw = None):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
res = ()
for k, v in cnf.items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
elif isinstance(v, (tuple, list)):
nv = []
for item in v:
if not isinstance(item, (basestring, int)):
break
elif isinstance(item, int):
nv.append('%d' % item)
else:
# format it to proper Tcl code if it contains space
nv.append(('{%s}' if ' ' in item else '%s') % item)
else:
v = ' '.join(nv)
res = res + ('-'+k, v)
return res
def nametowidget(self, name):
"""Return the Tkinter instance of a widget identified by
its Tcl name NAME."""
name = str(name).split('.')
w = self
if not name[0]:
w = w._root()
name = name[1:]
for n in name:
if not n:
break
w = w.children[n]
return w
_nametowidget = nametowidget
def _register(self, func, subst=None, needcleanup=1):
"""Return a newly created Tcl function. If this
function is called, the Python function FUNC will
be executed. An optional function SUBST can
be given which will be executed before FUNC."""
f = CallWrapper(func, subst, self).__call__
name = repr(id(f))
try:
func = func.im_func
except AttributeError:
pass
try:
name = name + func.__name__
except AttributeError:
pass
self.tk.createcommand(name, f)
if needcleanup:
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(name)
return name
register = _register
def _root(self):
"""Internal function."""
w = self
while w.master: w = w.master
return w
_subst_format = ('%#', '%b', '%f', '%h', '%k',
'%s', '%t', '%w', '%x', '%y',
'%A', '%E', '%K', '%N', '%W', '%T', '%X', '%Y', '%D')
_subst_format_str = " ".join(_subst_format)
def _substitute(self, *args):
"""Internal function."""
if len(args) != len(self._subst_format): return args
getboolean = self.tk.getboolean
getint = int
def getint_event(s):
"""Tk changed behavior in 8.4.2, returning "??" rather more often."""
try:
return int(s)
except ValueError:
return s
nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args
# Missing: (a, c, d, m, o, v, B, R)
e = Event()
# serial field: valid vor all events
# number of button: ButtonPress and ButtonRelease events only
# height field: Configure, ConfigureRequest, Create,
# ResizeRequest, and Expose events only
# keycode field: KeyPress and KeyRelease events only
# time field: "valid for events that contain a time field"
# width field: Configure, ConfigureRequest, Create, ResizeRequest,
# and Expose events only
# x field: "valid for events that contain a x field"
# y field: "valid for events that contain a y field"
# keysym as decimal: KeyPress and KeyRelease events only
# x_root, y_root fields: ButtonPress, ButtonRelease, KeyPress,
# KeyRelease,and Motion events
e.serial = getint(nsign)
e.num = getint_event(b)
try: e.focus = getboolean(f)
except TclError: pass
e.height = getint_event(h)
e.keycode = getint_event(k)
e.state = getint_event(s)
e.time = getint_event(t)
e.width = getint_event(w)
e.x = getint_event(x)
e.y = getint_event(y)
e.char = A
try: e.send_event = getboolean(E)
except TclError: pass
e.keysym = K
e.keysym_num = getint_event(N)
e.type = T
try:
e.widget = self._nametowidget(W)
except KeyError:
e.widget = W
e.x_root = getint_event(X)
e.y_root = getint_event(Y)
try:
e.delta = getint(D)
except ValueError:
e.delta = 0
return (e,)
def _report_exception(self):
"""Internal function."""
import sys
exc, val, tb = sys.exc_type, sys.exc_value, sys.exc_traceback
root = self._root()
root.report_callback_exception(exc, val, tb)
def _configure(self, cmd, cnf, kw):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
cnf = {}
for x in self.tk.split(
self.tk.call(_flatten((self._w, cmd)))):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if type(cnf) is StringType:
x = self.tk.split(
self.tk.call(_flatten((self._w, cmd, '-'+cnf))))
return (x[0][1:],) + x[1:]
self.tk.call(_flatten((self._w, cmd)) + self._options(cnf))
# These used to be defined in Widget:
def configure(self, cnf=None, **kw):
"""Configure resources of a widget.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method keys.
"""
return self._configure('configure', cnf, kw)
config = configure
def cget(self, key):
"""Return the resource value for a KEY given as string."""
return self.tk.call(self._w, 'cget', '-' + key)
__getitem__ = cget
def __setitem__(self, key, value):
self.configure({key: value})
def __contains__(self, key):
raise TypeError("Tkinter objects don't support 'in' tests.")
def keys(self):
"""Return a list of all resource names of this widget."""
return map(lambda x: x[0][1:],
self.tk.split(self.tk.call(self._w, 'configure')))
def __str__(self):
"""Return the window path name of this widget."""
return self._w
# Pack methods that apply to the master
_noarg_ = ['_noarg_']
def pack_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'pack', 'propagate', self._w))
else:
self.tk.call('pack', 'propagate', self._w, flag)
propagate = pack_propagate
def pack_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call('pack', 'slaves', self._w)))
slaves = pack_slaves
# Place method that applies to the master
def place_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call(
'place', 'slaves', self._w)))
# Grid methods that apply to the master
def grid_bbox(self, column=None, row=None, col2=None, row2=None):
"""Return a tuple of integer coordinates for the bounding
box of this widget controlled by the geometry manager grid.
If COLUMN, ROW is given the bounding box applies from
the cell with row and column 0 to the specified
cell. If COL2 and ROW2 are given the bounding box
starts at that cell.
The returned integers specify the offset of the upper left
corner in the master widget and the width and height.
"""
args = ('grid', 'bbox', self._w)
if column is not None and row is not None:
args = args + (column, row)
if col2 is not None and row2 is not None:
args = args + (col2, row2)
return self._getints(self.tk.call(*args)) or None
bbox = grid_bbox
def _grid_configure(self, command, index, cnf, kw):
"""Internal function."""
if type(cnf) is StringType and not kw:
if cnf[-1:] == '_':
cnf = cnf[:-1]
if cnf[:1] != '-':
cnf = '-'+cnf
options = (cnf,)
else:
options = self._options(cnf, kw)
if not options:
res = self.tk.call('grid',
command, self._w, index)
words = self.tk.splitlist(res)
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if not value:
value = None
elif '.' in value:
value = getdouble(value)
else:
value = getint(value)
dict[key] = value
return dict
res = self.tk.call(
('grid', command, self._w, index)
+ options)
if len(options) == 1:
if not res: return None
# In Tk 7.5, -width can be a float
if '.' in res: return getdouble(res)
return getint(res)
def grid_columnconfigure(self, index, cnf={}, **kw):
"""Configure column INDEX of a grid.
Valid resources are minsize (minimum size of the column),
weight (how much does additional space propagate to this column)
and pad (how much space to let additionally)."""
return self._grid_configure('columnconfigure', index, cnf, kw)
columnconfigure = grid_columnconfigure
def grid_location(self, x, y):
"""Return a tuple of column and row which identify the cell
at which the pixel at position X and Y inside the master
widget is located."""
return self._getints(
self.tk.call(
'grid', 'location', self._w, x, y)) or None
def grid_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given, the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'grid', 'propagate', self._w))
else:
self.tk.call('grid', 'propagate', self._w, flag)
def grid_rowconfigure(self, index, cnf={}, **kw):
"""Configure row INDEX of a grid.
Valid resources are minsize (minimum size of the row),
weight (how much does additional space propagate to this row)
and pad (how much space to let additionally)."""
return self._grid_configure('rowconfigure', index, cnf, kw)
rowconfigure = grid_rowconfigure
def grid_size(self):
"""Return a tuple of the number of column and rows in the grid."""
return self._getints(
self.tk.call('grid', 'size', self._w)) or None
size = grid_size
def grid_slaves(self, row=None, column=None):
"""Return a list of all slaves of this widget
in its packing order."""
args = ()
if row is not None:
args = args + ('-row', row)
if column is not None:
args = args + ('-column', column)
return map(self._nametowidget,
self.tk.splitlist(self.tk.call(
('grid', 'slaves', self._w) + args)))
# Support for the "event" command, new in Tk 4.2.
# By Case Roole.
def event_add(self, virtual, *sequences):
"""Bind a virtual event VIRTUAL (of the form <<Name>>)
to an event SEQUENCE such that the virtual event is triggered
whenever SEQUENCE occurs."""
args = ('event', 'add', virtual) + sequences
self.tk.call(args)
def event_delete(self, virtual, *sequences):
"""Unbind a virtual event VIRTUAL from SEQUENCE."""
args = ('event', 'delete', virtual) + sequences
self.tk.call(args)
def event_generate(self, sequence, **kw):
"""Generate an event SEQUENCE. Additional
keyword arguments specify parameter of the event
(e.g. x, y, rootx, rooty)."""
args = ('event', 'generate', self._w, sequence)
for k, v in kw.items():
args = args + ('-%s' % k, str(v))
self.tk.call(args)
def event_info(self, virtual=None):
"""Return a list of all virtual events or the information
about the SEQUENCE bound to the virtual event VIRTUAL."""
return self.tk.splitlist(
self.tk.call('event', 'info', virtual))
# Image related commands
def image_names(self):
"""Return a list of all existing image names."""
return self.tk.call('image', 'names')
def image_types(self):
"""Return a list of all available image types (e.g. phote bitmap)."""
return self.tk.call('image', 'types')
class CallWrapper:
"""Internal class. Stores function to call when some user
defined Tcl function is called e.g. after an event occurred."""
def __init__(self, func, subst, widget):
"""Store FUNC, SUBST and WIDGET as members."""
self.func = func
self.subst = subst
self.widget = widget
def __call__(self, *args):
"""Apply first function SUBST to arguments, than FUNC."""
try:
if self.subst:
args = self.subst(*args)
return self.func(*args)
except SystemExit, msg:
raise SystemExit, msg
except:
self.widget._report_exception()
class XView:
"""Mix-in class for querying and changing the horizontal position
of a widget's window."""
def xview(self, *args):
"""Query and change the horizontal position of the view."""
res = self.tk.call(self._w, 'xview', *args)
if not args:
return self._getdoubles(res)
def xview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total width of the canvas is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units"
or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
class YView:
"""Mix-in class for querying and changing the vertical position
of a widget's window."""
def yview(self, *args):
"""Query and change the vertical position of the view."""
res = self.tk.call(self._w, 'yview', *args)
if not args:
return self._getdoubles(res)
def yview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total height of the canvas is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured in
"units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
class Wm:
"""Provides functions for the communication with the window manager."""
def wm_aspect(self,
minNumer=None, minDenom=None,
maxNumer=None, maxDenom=None):
"""Instruct the window manager to set the aspect ratio (width/height)
of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple
of the actual values if no argument is given."""
return self._getints(
self.tk.call('wm', 'aspect', self._w,
minNumer, minDenom,
maxNumer, maxDenom))
aspect = wm_aspect
def wm_attributes(self, *args):
"""This subcommand returns or sets platform specific attributes
The first form returns a list of the platform specific flags and
their values. The second form returns the value for the specific
option. The third form sets one or more of the values. The values
are as follows:
On Windows, -disabled gets or sets whether the window is in a
disabled state. -toolwindow gets or sets the style of the window
to toolwindow (as defined in the MSDN). -topmost gets or sets
whether this is a topmost window (displays above all other
windows).
On Macintosh, XXXXX
On Unix, there are currently no special attribute values.
"""
args = ('wm', 'attributes', self._w) + args
return self.tk.call(args)
attributes=wm_attributes
def wm_client(self, name=None):
"""Store NAME in WM_CLIENT_MACHINE property of this widget. Return
current value."""
return self.tk.call('wm', 'client', self._w, name)
client = wm_client
def wm_colormapwindows(self, *wlist):
"""Store list of window names (WLIST) into WM_COLORMAPWINDOWS property
of this widget. This list contains windows whose colormaps differ from their
parents. Return current list of widgets if WLIST is empty."""
if len(wlist) > 1:
wlist = (wlist,) # Tk needs a list of windows here
args = ('wm', 'colormapwindows', self._w) + wlist
return map(self._nametowidget, self.tk.call(args))
colormapwindows = wm_colormapwindows
def wm_command(self, value=None):
"""Store VALUE in WM_COMMAND property. It is the command
which shall be used to invoke the application. Return current
command if VALUE is None."""
return self.tk.call('wm', 'command', self._w, value)
command = wm_command
def wm_deiconify(self):
"""Deiconify this widget. If it was never mapped it will not be mapped.
On Windows it will raise this widget and give it the focus."""
return self.tk.call('wm', 'deiconify', self._w)
deiconify = wm_deiconify
def wm_focusmodel(self, model=None):
"""Set focus model to MODEL. "active" means that this widget will claim
the focus itself, "passive" means that the window manager shall give
the focus. Return current focus model if MODEL is None."""
return self.tk.call('wm', 'focusmodel', self._w, model)
focusmodel = wm_focusmodel
def wm_frame(self):
"""Return identifier for decorative frame of this widget if present."""
return self.tk.call('wm', 'frame', self._w)
frame = wm_frame
def wm_geometry(self, newGeometry=None):
"""Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given."""
return self.tk.call('wm', 'geometry', self._w, newGeometry)
geometry = wm_geometry
def wm_grid(self,
baseWidth=None, baseHeight=None,
widthInc=None, heightInc=None):
"""Instruct the window manager that this widget shall only be
resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and
height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the
number of grid units requested in Tk_GeometryRequest."""
return self._getints(self.tk.call(
'wm', 'grid', self._w,
baseWidth, baseHeight, widthInc, heightInc))
grid = wm_grid
def wm_group(self, pathName=None):
"""Set the group leader widgets for related widgets to PATHNAME. Return
the group leader of this widget if None is given."""
return self.tk.call('wm', 'group', self._w, pathName)
group = wm_group
def wm_iconbitmap(self, bitmap=None, default=None):
"""Set bitmap for the iconified widget to BITMAP. Return
the bitmap if None is given.
Under Windows, the DEFAULT parameter can be used to set the icon
for the widget and any descendents that don't have an icon set
explicitly. DEFAULT can be the relative path to a .ico file
(example: root.iconbitmap(default='myicon.ico') ). See Tk
documentation for more information."""
if default:
return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
else:
return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
iconbitmap = wm_iconbitmap
def wm_iconify(self):
"""Display widget as icon."""
return self.tk.call('wm', 'iconify', self._w)
iconify = wm_iconify
def wm_iconmask(self, bitmap=None):
"""Set mask for the icon bitmap of this widget. Return the
mask if None is given."""
return self.tk.call('wm', 'iconmask', self._w, bitmap)
iconmask = wm_iconmask
def wm_iconname(self, newName=None):
"""Set the name of the icon for this widget. Return the name if
None is given."""
return self.tk.call('wm', 'iconname', self._w, newName)
iconname = wm_iconname
def wm_iconposition(self, x=None, y=None):
"""Set the position of the icon of this widget to X and Y. Return
a tuple of the current values of X and X if None is given."""
return self._getints(self.tk.call(
'wm', 'iconposition', self._w, x, y))
iconposition = wm_iconposition
def wm_iconwindow(self, pathName=None):
"""Set widget PATHNAME to be displayed instead of icon. Return the current
value if None is given."""
return self.tk.call('wm', 'iconwindow', self._w, pathName)
iconwindow = wm_iconwindow
def wm_maxsize(self, width=None, height=None):
"""Set max WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'maxsize', self._w, width, height))
maxsize = wm_maxsize
def wm_minsize(self, width=None, height=None):
"""Set min WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'minsize', self._w, width, height))
minsize = wm_minsize
def wm_overrideredirect(self, boolean=None):
"""Instruct the window manager to ignore this widget
if BOOLEAN is given with 1. Return the current value if None
is given."""
return self._getboolean(self.tk.call(
'wm', 'overrideredirect', self._w, boolean))
overrideredirect = wm_overrideredirect
def wm_positionfrom(self, who=None):
"""Instruct the window manager that the position of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'positionfrom', self._w, who)
positionfrom = wm_positionfrom
def wm_protocol(self, name=None, func=None):
"""Bind function FUNC to command NAME for this widget.
Return the function bound to NAME if None is given. NAME could be
e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW"."""
if hasattr(func, '__call__'):
command = self._register(func)
else:
command = func
return self.tk.call(
'wm', 'protocol', self._w, name, command)
protocol = wm_protocol
def wm_resizable(self, width=None, height=None):
"""Instruct the window manager whether this width can be resized
in WIDTH or HEIGHT. Both values are boolean values."""
return self.tk.call('wm', 'resizable', self._w, width, height)
resizable = wm_resizable
def wm_sizefrom(self, who=None):
"""Instruct the window manager that the size of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'sizefrom', self._w, who)
sizefrom = wm_sizefrom
def wm_state(self, newstate=None):
"""Query or set the state of this widget as one of normal, icon,
iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only)."""
return self.tk.call('wm', 'state', self._w, newstate)
state = wm_state
def wm_title(self, string=None):
"""Set the title of this widget."""
return self.tk.call('wm', 'title', self._w, string)
title = wm_title
def wm_transient(self, master=None):
"""Instruct the window manager that this widget is transient
with regard to widget MASTER."""
return self.tk.call('wm', 'transient', self._w, master)
transient = wm_transient
def wm_withdraw(self):
"""Withdraw this widget from the screen such that it is unmapped
and forgotten by the window manager. Re-draw it with wm_deiconify."""
return self.tk.call('wm', 'withdraw', self._w)
withdraw = wm_withdraw
class Tk(Misc, Wm):
"""Toplevel widget of Tk which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
_w = '.'
def __init__(self, screenName=None, baseName=None, className='Tk',
useTk=1, sync=0, use=None):
"""Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will
be created. BASENAME will be used for the identification of the profile file (see
readprofile).
It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME
is the name of the widget class."""
self.master = None
self.children = {}
self._tkloaded = 0
# to avoid recursions in the getattr code in case of failure, we
# ensure that self.tk is always _something_.
self.tk = None
if baseName is None:
import sys, os
baseName = os.path.basename(sys.argv[0])
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc', '.pyo'):
baseName = baseName + ext
interactive = 0
self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use)
if useTk:
self._loadtk()
self.readprofile(baseName, className)
def loadtk(self):
if not self._tkloaded:
self.tk.loadtk()
self._loadtk()
def _loadtk(self):
self._tkloaded = 1
global _default_root
# Version sanity checks
tk_version = self.tk.getvar('tk_version')
if tk_version != _tkinter.TK_VERSION:
raise RuntimeError, \
"tk.h version (%s) doesn't match libtk.a version (%s)" \
% (_tkinter.TK_VERSION, tk_version)
# Under unknown circumstances, tcl_version gets coerced to float
tcl_version = str(self.tk.getvar('tcl_version'))
if tcl_version != _tkinter.TCL_VERSION:
raise RuntimeError, \
"tcl.h version (%s) doesn't match libtcl.a version (%s)" \
% (_tkinter.TCL_VERSION, tcl_version)
if TkVersion < 4.0:
raise RuntimeError, \
"Tk 4.0 or higher is required; found Tk %s" \
% str(TkVersion)
# Create and register the tkerror and exit commands
# We need to inline parts of _register here, _ register
# would register differently-named commands.
if self._tclCommands is None:
self._tclCommands = []
self.tk.createcommand('tkerror', _tkerror)
self.tk.createcommand('exit', _exit)
self._tclCommands.append('tkerror')
self._tclCommands.append('exit')
if _support_default_root and not _default_root:
_default_root = self
self.protocol("WM_DELETE_WINDOW", self.destroy)
def destroy(self):
"""Destroy this and all descendants widgets. This will
end the application of this Tcl interpreter."""
for c in self.children.values(): c.destroy()
self.tk.call('destroy', self._w)
Misc.destroy(self)
global _default_root
if _support_default_root and _default_root is self:
_default_root = None
def readprofile(self, baseName, className):
"""Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into
the Tcl Interpreter and calls execfile on BASENAME.py and CLASSNAME.py if
such a file exists in the home directory."""
import os
if 'HOME' in os.environ: home = os.environ['HOME']
else: home = os.curdir
class_tcl = os.path.join(home, '.%s.tcl' % className)
class_py = os.path.join(home, '.%s.py' % className)
base_tcl = os.path.join(home, '.%s.tcl' % baseName)
base_py = os.path.join(home, '.%s.py' % baseName)
dir = {'self': self}
exec 'from Tkinter import *' in dir
if os.path.isfile(class_tcl):
self.tk.call('source', class_tcl)
if os.path.isfile(class_py):
execfile(class_py, dir)
if os.path.isfile(base_tcl):
self.tk.call('source', base_tcl)
if os.path.isfile(base_py):
execfile(base_py, dir)
def report_callback_exception(self, exc, val, tb):
"""Internal function. It reports exception on sys.stderr."""
import traceback, sys
sys.stderr.write("Exception in Tkinter callback\n")
sys.last_type = exc
sys.last_value = val
sys.last_traceback = tb
traceback.print_exception(exc, val, tb)
def __getattr__(self, attr):
"Delegate attribute access to the interpreter object"
return getattr(self.tk, attr)
# Ideally, the classes Pack, Place and Grid disappear, the
# pack/place/grid methods are defined on the Widget class, and
# everybody uses w.pack_whatever(...) instead of Pack.whatever(w,
# ...), with pack(), place() and grid() being short for
# pack_configure(), place_configure() and grid_columnconfigure(), and
# forget() being short for pack_forget(). As a practical matter, I'm
# afraid that there is too much code out there that may be using the
# Pack, Place or Grid class, so I leave them intact -- but only as
# backwards compatibility features. Also note that those methods that
# take a master as argument (e.g. pack_propagate) have been moved to
# the Misc class (which now incorporates all methods common between
# toplevel and interior widgets). Again, for compatibility, these are
# copied into the Pack, Place or Grid class.
def Tcl(screenName=None, baseName=None, className='Tk', useTk=0):
return Tk(screenName, baseName, className, useTk)
class Pack:
"""Geometry manager Pack.
Base class to use the methods pack_* in every widget."""
def pack_configure(self, cnf={}, **kw):
"""Pack a widget in the parent widget. Use as options:
after=widget - pack it after you have packed widget
anchor=NSEW (or subset) - position widget according to
given direction
before=widget - pack it before you will pack widget
expand=bool - expand widget if parent size grows
fill=NONE or X or Y or BOTH - fill widget if widget grows
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget.
"""
self.tk.call(
('pack', 'configure', self._w)
+ self._options(cnf, kw))
pack = configure = config = pack_configure
def pack_forget(self):
"""Unmap this widget and do not use it for the packing order."""
self.tk.call('pack', 'forget', self._w)
forget = pack_forget
def pack_info(self):
"""Return information about the packing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('pack', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = pack_info
propagate = pack_propagate = Misc.pack_propagate
slaves = pack_slaves = Misc.pack_slaves
class Place:
"""Geometry manager Place.
Base class to use the methods place_* in every widget."""
def place_configure(self, cnf={}, **kw):
"""Place a widget in the parent widget. Use as options:
in=master - master relative to which the widget is placed
in_=master - see 'in' option description
x=amount - locate anchor of this widget at position x of master
y=amount - locate anchor of this widget at position y of master
relx=amount - locate anchor of this widget between 0.0 and 1.0
relative to width of master (1.0 is right edge)
rely=amount - locate anchor of this widget between 0.0 and 1.0
relative to height of master (1.0 is bottom edge)
anchor=NSEW (or subset) - position anchor according to given direction
width=amount - width of this widget in pixel
height=amount - height of this widget in pixel
relwidth=amount - width of this widget between 0.0 and 1.0
relative to width of master (1.0 is the same width
as the master)
relheight=amount - height of this widget between 0.0 and 1.0
relative to height of master (1.0 is the same
height as the master)
bordermode="inside" or "outside" - whether to take border width of
master widget into account
"""
self.tk.call(
('place', 'configure', self._w)
+ self._options(cnf, kw))
place = configure = config = place_configure
def place_forget(self):
"""Unmap this widget."""
self.tk.call('place', 'forget', self._w)
forget = place_forget
def place_info(self):
"""Return information about the placing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('place', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = place_info
slaves = place_slaves = Misc.place_slaves
class Grid:
"""Geometry manager Grid.
Base class to use the methods grid_* in every widget."""
# Thanks to Masazumi Yoshikawa (yosikawa@isi.edu)
def grid_configure(self, cnf={}, **kw):
"""Position a widget in the parent widget in a grid. Use as options:
column=number - use cell identified with given column (starting with 0)
columnspan=number - this widget will span several columns
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
row=number - use cell identified with given row (starting with 0)
rowspan=number - this widget will span several rows
sticky=NSEW - if cell is larger on which sides will this
widget stick to the cell boundary
"""
self.tk.call(
('grid', 'configure', self._w)
+ self._options(cnf, kw))
grid = configure = config = grid_configure
bbox = grid_bbox = Misc.grid_bbox
columnconfigure = grid_columnconfigure = Misc.grid_columnconfigure
def grid_forget(self):
"""Unmap this widget."""
self.tk.call('grid', 'forget', self._w)
forget = grid_forget
def grid_remove(self):
"""Unmap this widget but remember the grid options."""
self.tk.call('grid', 'remove', self._w)
def grid_info(self):
"""Return information about the options
for positioning this widget in a grid."""
words = self.tk.splitlist(
self.tk.call('grid', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = grid_info
location = grid_location = Misc.grid_location
propagate = grid_propagate = Misc.grid_propagate
rowconfigure = grid_rowconfigure = Misc.grid_rowconfigure
size = grid_size = Misc.grid_size
slaves = grid_slaves = Misc.grid_slaves
class BaseWidget(Misc):
"""Internal class."""
def _setup(self, master, cnf):
"""Internal function. Sets up information about children."""
if _support_default_root:
global _default_root
if not master:
if not _default_root:
_default_root = Tk()
master = _default_root
self.master = master
self.tk = master.tk
name = None
if 'name' in cnf:
name = cnf['name']
del cnf['name']
if not name:
name = repr(id(self))
self._name = name
if master._w=='.':
self._w = '.' + name
else:
self._w = master._w + '.' + name
self.children = {}
if self._name in self.master.children:
self.master.children[self._name].destroy()
self.master.children[self._name] = self
def __init__(self, master, widgetName, cnf={}, kw={}, extra=()):
"""Construct a widget with the parent widget MASTER, a name WIDGETNAME
and appropriate options."""
if kw:
cnf = _cnfmerge((cnf, kw))
self.widgetName = widgetName
BaseWidget._setup(self, master, cnf)
if self._tclCommands is None:
self._tclCommands = []
classes = []
for k in cnf.keys():
if type(k) is ClassType:
classes.append((k, cnf[k]))
del cnf[k]
self.tk.call(
(widgetName, self._w) + extra + self._options(cnf))
for k, v in classes:
k.configure(self, v)
def destroy(self):
"""Destroy this and all descendants widgets."""
for c in self.children.values(): c.destroy()
self.tk.call('destroy', self._w)
if self._name in self.master.children:
del self.master.children[self._name]
Misc.destroy(self)
def _do(self, name, args=()):
# XXX Obsolete -- better use self.tk.call directly!
return self.tk.call((self._w, name) + args)
class Widget(BaseWidget, Pack, Place, Grid):
"""Internal class.
Base class for a widget which can be positioned with the geometry managers
Pack, Place or Grid."""
pass
class Toplevel(BaseWidget, Wm):
"""Toplevel widget, e.g. for dialogs."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width."""
if kw:
cnf = _cnfmerge((cnf, kw))
extra = ()
for wmkey in ['screen', 'class_', 'class', 'visual',
'colormap']:
if wmkey in cnf:
val = cnf[wmkey]
# TBD: a hack needed because some keys
# are not valid as keyword arguments
if wmkey[-1] == '_': opt = '-'+wmkey[:-1]
else: opt = '-'+wmkey
extra = extra + (opt, val)
del cnf[wmkey]
BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)
root = self._root()
self.iconname(root.iconname())
self.title(root.title())
self.protocol("WM_DELETE_WINDOW", self.destroy)
class Button(Widget):
"""Button widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a button widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, repeatdelay,
repeatinterval, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
command, compound, default, height,
overrelief, state, width
"""
Widget.__init__(self, master, 'button', cnf, kw)
def tkButtonEnter(self, *dummy):
self.tk.call('tkButtonEnter', self._w)
def tkButtonLeave(self, *dummy):
self.tk.call('tkButtonLeave', self._w)
def tkButtonDown(self, *dummy):
self.tk.call('tkButtonDown', self._w)
def tkButtonUp(self, *dummy):
self.tk.call('tkButtonUp', self._w)
def tkButtonInvoke(self, *dummy):
self.tk.call('tkButtonInvoke', self._w)
def flash(self):
"""Flash the button.
This is accomplished by redisplaying
the button several times, alternating between active and
normal colors. At the end of the flash the button is left
in the same normal/active state as when the command was
invoked. This command is ignored if the button's state is
disabled.
"""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Invoke the command associated with the button.
The return value is the return value from the command,
or an empty string if there is no command associated with
the button. This command is ignored if the button's state
is disabled.
"""
return self.tk.call(self._w, 'invoke')
# Indices:
# XXX I don't like these -- take them away
def AtEnd():
return 'end'
def AtInsert(*args):
s = 'insert'
for a in args:
if a: s = s + (' ' + a)
return s
def AtSelFirst():
return 'sel.first'
def AtSelLast():
return 'sel.last'
def At(x, y=None):
if y is None:
return '@%r' % (x,)
else:
return '@%r,%r' % (x, y)
class Canvas(Widget, XView, YView):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a canvas widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, closeenough,
confine, cursor, height, highlightbackground, highlightcolor,
highlightthickness, insertbackground, insertborderwidth,
insertofftime, insertontime, insertwidth, offset, relief,
scrollregion, selectbackground, selectborderwidth, selectforeground,
state, takefocus, width, xscrollcommand, xscrollincrement,
yscrollcommand, yscrollincrement."""
Widget.__init__(self, master, 'canvas', cnf, kw)
def addtag(self, *args):
"""Internal function."""
self.tk.call((self._w, 'addtag') + args)
def addtag_above(self, newtag, tagOrId):
"""Add tag NEWTAG to all items above TAGORID."""
self.addtag(newtag, 'above', tagOrId)
def addtag_all(self, newtag):
"""Add tag NEWTAG to all items."""
self.addtag(newtag, 'all')
def addtag_below(self, newtag, tagOrId):
"""Add tag NEWTAG to all items below TAGORID."""
self.addtag(newtag, 'below', tagOrId)
def addtag_closest(self, newtag, x, y, halo=None, start=None):
"""Add tag NEWTAG to item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
self.addtag(newtag, 'closest', x, y, halo, start)
def addtag_enclosed(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items in the rectangle defined
by X1,Y1,X2,Y2."""
self.addtag(newtag, 'enclosed', x1, y1, x2, y2)
def addtag_overlapping(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
self.addtag(newtag, 'overlapping', x1, y1, x2, y2)
def addtag_withtag(self, newtag, tagOrId):
"""Add tag NEWTAG to all items with TAGORID."""
self.addtag(newtag, 'withtag', tagOrId)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses all items with tags specified as arguments."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tag_unbind(self, tagOrId, sequence, funcid=None):
"""Unbind for all items with TAGORID for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'bind', tagOrId, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagOrId, sequence=None, func=None, add=None):
"""Bind to all items with TAGORID at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'bind', tagOrId),
sequence, func, add)
def canvasx(self, screenx, gridspacing=None):
"""Return the canvas x coordinate of pixel position SCREENX rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasx', screenx, gridspacing))
def canvasy(self, screeny, gridspacing=None):
"""Return the canvas y coordinate of pixel position SCREENY rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasy', screeny, gridspacing))
def coords(self, *args):
"""Return a list of coordinates for the item given in ARGS."""
# XXX Should use _flatten on args
return map(getdouble,
self.tk.splitlist(
self.tk.call((self._w, 'coords') + args)))
def _create(self, itemType, args, kw): # Args: (val, val, ..., cnf={})
"""Internal function."""
args = _flatten(args)
cnf = args[-1]
if type(cnf) in (DictionaryType, TupleType):
args = args[:-1]
else:
cnf = {}
return getint(self.tk.call(
self._w, 'create', itemType,
*(args + self._options(cnf, kw))))
def create_arc(self, *args, **kw):
"""Create arc shaped region with coordinates x1,y1,x2,y2."""
return self._create('arc', args, kw)
def create_bitmap(self, *args, **kw):
"""Create bitmap with coordinates x1,y1."""
return self._create('bitmap', args, kw)
def create_image(self, *args, **kw):
"""Create image item with coordinates x1,y1."""
return self._create('image', args, kw)
def create_line(self, *args, **kw):
"""Create line with coordinates x1,y1,...,xn,yn."""
return self._create('line', args, kw)
def create_oval(self, *args, **kw):
"""Create oval with coordinates x1,y1,x2,y2."""
return self._create('oval', args, kw)
def create_polygon(self, *args, **kw):
"""Create polygon with coordinates x1,y1,...,xn,yn."""
return self._create('polygon', args, kw)
def create_rectangle(self, *args, **kw):
"""Create rectangle with coordinates x1,y1,x2,y2."""
return self._create('rectangle', args, kw)
def create_text(self, *args, **kw):
"""Create text with coordinates x1,y1."""
return self._create('text', args, kw)
def create_window(self, *args, **kw):
"""Create window with coordinates x1,y1,x2,y2."""
return self._create('window', args, kw)
def dchars(self, *args):
"""Delete characters of text items identified by tag or id in ARGS (possibly
several times) from FIRST to LAST character (including)."""
self.tk.call((self._w, 'dchars') + args)
def delete(self, *args):
"""Delete items identified by all tag or ids contained in ARGS."""
self.tk.call((self._w, 'delete') + args)
def dtag(self, *args):
"""Delete tag or id given as last arguments in ARGS from items
identified by first argument in ARGS."""
self.tk.call((self._w, 'dtag') + args)
def find(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'find') + args)) or ()
def find_above(self, tagOrId):
"""Return items above TAGORID."""
return self.find('above', tagOrId)
def find_all(self):
"""Return all items."""
return self.find('all')
def find_below(self, tagOrId):
"""Return all items below TAGORID."""
return self.find('below', tagOrId)
def find_closest(self, x, y, halo=None, start=None):
"""Return item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
return self.find('closest', x, y, halo, start)
def find_enclosed(self, x1, y1, x2, y2):
"""Return all items in rectangle defined
by X1,Y1,X2,Y2."""
return self.find('enclosed', x1, y1, x2, y2)
def find_overlapping(self, x1, y1, x2, y2):
"""Return all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
return self.find('overlapping', x1, y1, x2, y2)
def find_withtag(self, tagOrId):
"""Return all items with TAGORID."""
return self.find('withtag', tagOrId)
def focus(self, *args):
"""Set focus to the first item specified in ARGS."""
return self.tk.call((self._w, 'focus') + args)
def gettags(self, *args):
"""Return tags associated with the first item specified in ARGS."""
return self.tk.splitlist(
self.tk.call((self._w, 'gettags') + args))
def icursor(self, *args):
"""Set cursor at position POS in the item identified by TAGORID.
In ARGS TAGORID must be first."""
self.tk.call((self._w, 'icursor') + args)
def index(self, *args):
"""Return position of cursor as integer in item specified in ARGS."""
return getint(self.tk.call((self._w, 'index') + args))
def insert(self, *args):
"""Insert TEXT in item TAGORID at position POS. ARGS must
be TAGORID POS TEXT."""
self.tk.call((self._w, 'insert') + args)
def itemcget(self, tagOrId, option):
"""Return the resource value for an OPTION for item TAGORID."""
return self.tk.call(
(self._w, 'itemcget') + (tagOrId, '-'+option))
def itemconfigure(self, tagOrId, cnf=None, **kw):
"""Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments.
"""
return self._configure(('itemconfigure', tagOrId), cnf, kw)
itemconfig = itemconfigure
# lower, tkraise/lift hide Misc.lower, Misc.tkraise/lift,
# so the preferred name for them is tag_lower, tag_raise
# (similar to tag_bind, and similar to the Text widget);
# unfortunately can't delete the old ones yet (maybe in 1.6)
def tag_lower(self, *args):
"""Lower an item TAGORID given in ARGS
(optional below another item)."""
self.tk.call((self._w, 'lower') + args)
lower = tag_lower
def move(self, *args):
"""Move an item TAGORID given in ARGS."""
self.tk.call((self._w, 'move') + args)
def postscript(self, cnf={}, **kw):
"""Print the contents of the canvas to a postscript
file. Valid options: colormap, colormode, file, fontmap,
height, pageanchor, pageheight, pagewidth, pagex, pagey,
rotate, witdh, x, y."""
return self.tk.call((self._w, 'postscript') +
self._options(cnf, kw))
def tag_raise(self, *args):
"""Raise an item TAGORID given in ARGS
(optional above another item)."""
self.tk.call((self._w, 'raise') + args)
lift = tkraise = tag_raise
def scale(self, *args):
"""Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE."""
self.tk.call((self._w, 'scale') + args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y, gain=10):
"""Adjust the view of the canvas to GAIN times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y, gain)
def select_adjust(self, tagOrId, index):
"""Adjust the end of the selection near the cursor of an item TAGORID to index."""
self.tk.call(self._w, 'select', 'adjust', tagOrId, index)
def select_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'select', 'clear')
def select_from(self, tagOrId, index):
"""Set the fixed end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'from', tagOrId, index)
def select_item(self):
"""Return the item which has the selection."""
return self.tk.call(self._w, 'select', 'item') or None
def select_to(self, tagOrId, index):
"""Set the variable end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'to', tagOrId, index)
def type(self, tagOrId):
"""Return the type of the item TAGORID."""
return self.tk.call(self._w, 'type', tagOrId) or None
class Checkbutton(Widget):
"""Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a checkbutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, offvalue, onvalue, padx, pady, relief,
selectcolor, selectimage, state, takefocus, text, textvariable,
underline, variable, width, wraplength."""
Widget.__init__(self, master, 'checkbutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
def toggle(self):
"""Toggle the button."""
self.tk.call(self._w, 'toggle')
class Entry(Widget, XView):
"""Entry widget which allows to display simple text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct an entry widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, highlightbackground,
highlightcolor, highlightthickness, insertbackground,
insertborderwidth, insertofftime, insertontime, insertwidth,
invalidcommand, invcmd, justify, relief, selectbackground,
selectborderwidth, selectforeground, show, state, takefocus,
textvariable, validate, validatecommand, vcmd, width,
xscrollcommand."""
Widget.__init__(self, master, 'entry', cnf, kw)
def delete(self, first, last=None):
"""Delete text from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Return the text."""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Insert cursor at INDEX."""
self.tk.call(self._w, 'icursor', index)
def index(self, index):
"""Return position of cursor."""
return getint(self.tk.call(
self._w, 'index', index))
def insert(self, index, string):
"""Insert STRING at INDEX."""
self.tk.call(self._w, 'insert', index, string)
def scan_mark(self, x):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x)
def scan_dragto(self, x):
"""Adjust the view of the canvas to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x)
def selection_adjust(self, index):
"""Adjust the end of the selection near the cursor to INDEX."""
self.tk.call(self._w, 'selection', 'adjust', index)
select_adjust = selection_adjust
def selection_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'selection', 'clear')
select_clear = selection_clear
def selection_from(self, index):
"""Set the fixed end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'from', index)
select_from = selection_from
def selection_present(self):
"""Return True if there are characters selected in the entry, False
otherwise."""
return self.tk.getboolean(
self.tk.call(self._w, 'selection', 'present'))
select_present = selection_present
def selection_range(self, start, end):
"""Set the selection from START to END (not included)."""
self.tk.call(self._w, 'selection', 'range', start, end)
select_range = selection_range
def selection_to(self, index):
"""Set the variable end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'to', index)
select_to = selection_to
class Frame(Widget):
"""Frame widget which may contain other widgets and can have a 3D border."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a frame widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, relief, takefocus, visual, width."""
cnf = _cnfmerge((cnf, kw))
extra = ()
if 'class_' in cnf:
extra = ('-class', cnf['class_'])
del cnf['class_']
elif 'class' in cnf:
extra = ('-class', cnf['class'])
del cnf['class']
Widget.__init__(self, master, 'frame', cnf, {}, extra)
class Label(Widget):
"""Label widget which can display text and bitmaps."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a label widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
height, state, width
"""
Widget.__init__(self, master, 'label', cnf, kw)
class Listbox(Widget, XView, YView):
"""Listbox widget which can display a list of strings."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a listbox widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, height, highlightbackground,
highlightcolor, highlightthickness, relief, selectbackground,
selectborderwidth, selectforeground, selectmode, setgrid, takefocus,
width, xscrollcommand, yscrollcommand, listvariable."""
Widget.__init__(self, master, 'listbox', cnf, kw)
def activate(self, index):
"""Activate item identified by INDEX."""
self.tk.call(self._w, 'activate', index)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses the item identified by index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def curselection(self):
"""Return list of indices of currently selected item."""
# XXX Ought to apply self._getints()...
return self.tk.splitlist(self.tk.call(
self._w, 'curselection'))
def delete(self, first, last=None):
"""Delete items from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self, first, last=None):
"""Get list of items from FIRST to LAST (not included)."""
if last:
return self.tk.splitlist(self.tk.call(
self._w, 'get', first, last))
else:
return self.tk.call(self._w, 'get', first)
def index(self, index):
"""Return index of item identified with INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def insert(self, index, *elements):
"""Insert ELEMENTS at INDEX."""
self.tk.call((self._w, 'insert', index) + elements)
def nearest(self, y):
"""Get index of item which is nearest to y coordinate Y."""
return getint(self.tk.call(
self._w, 'nearest', y))
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the listbox to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def see(self, index):
"""Scroll such that INDEX is visible."""
self.tk.call(self._w, 'see', index)
def selection_anchor(self, index):
"""Set the fixed end oft the selection to INDEX."""
self.tk.call(self._w, 'selection', 'anchor', index)
select_anchor = selection_anchor
def selection_clear(self, first, last=None):
"""Clear the selection from FIRST to LAST (not included)."""
self.tk.call(self._w,
'selection', 'clear', first, last)
select_clear = selection_clear
def selection_includes(self, index):
"""Return 1 if INDEX is part of the selection."""
return self.tk.getboolean(self.tk.call(
self._w, 'selection', 'includes', index))
select_includes = selection_includes
def selection_set(self, first, last=None):
"""Set the selection from FIRST to LAST (not included) without
changing the currently selected elements."""
self.tk.call(self._w, 'selection', 'set', first, last)
select_set = selection_set
def size(self):
"""Return the number of elements in the listbox."""
return getint(self.tk.call(self._w, 'size'))
def itemcget(self, index, option):
"""Return the resource value for an ITEM and an OPTION."""
return self.tk.call(
(self._w, 'itemcget') + (index, '-'+option))
def itemconfigure(self, index, cnf=None, **kw):
"""Configure resources of an ITEM.
The values for resources are specified as keyword arguments.
To get an overview about the allowed keyword arguments
call the method without arguments.
Valid resource names: background, bg, foreground, fg,
selectbackground, selectforeground."""
return self._configure(('itemconfigure', index), cnf, kw)
itemconfig = itemconfigure
class Menu(Widget):
"""Menu widget which allows to display menu bars, pull-down menus and pop-up menus."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct menu widget with the parent MASTER.
Valid resource names: activebackground, activeborderwidth,
activeforeground, background, bd, bg, borderwidth, cursor,
disabledforeground, fg, font, foreground, postcommand, relief,
selectcolor, takefocus, tearoff, tearoffcommand, title, type."""
Widget.__init__(self, master, 'menu', cnf, kw)
def tk_bindForTraversal(self):
pass # obsolete since Tk 4.0
def tk_mbPost(self):
self.tk.call('tk_mbPost', self._w)
def tk_mbUnpost(self):
self.tk.call('tk_mbUnpost')
def tk_traverseToMenu(self, char):
self.tk.call('tk_traverseToMenu', self._w, char)
def tk_traverseWithinMenu(self, char):
self.tk.call('tk_traverseWithinMenu', self._w, char)
def tk_getMenuButtons(self):
return self.tk.call('tk_getMenuButtons', self._w)
def tk_nextMenu(self, count):
self.tk.call('tk_nextMenu', count)
def tk_nextMenuEntry(self, count):
self.tk.call('tk_nextMenuEntry', count)
def tk_invokeMenu(self):
self.tk.call('tk_invokeMenu', self._w)
def tk_firstMenu(self):
self.tk.call('tk_firstMenu', self._w)
def tk_mbButtonDown(self):
self.tk.call('tk_mbButtonDown', self._w)
def tk_popup(self, x, y, entry=""):
"""Post the menu at position X,Y with entry ENTRY."""
self.tk.call('tk_popup', self._w, x, y, entry)
def activate(self, index):
"""Activate entry at INDEX."""
self.tk.call(self._w, 'activate', index)
def add(self, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'add', itemType) +
self._options(cnf, kw))
def add_cascade(self, cnf={}, **kw):
"""Add hierarchical menu item."""
self.add('cascade', cnf or kw)
def add_checkbutton(self, cnf={}, **kw):
"""Add checkbutton menu item."""
self.add('checkbutton', cnf or kw)
def add_command(self, cnf={}, **kw):
"""Add command menu item."""
self.add('command', cnf or kw)
def add_radiobutton(self, cnf={}, **kw):
"""Addd radio menu item."""
self.add('radiobutton', cnf or kw)
def add_separator(self, cnf={}, **kw):
"""Add separator."""
self.add('separator', cnf or kw)
def insert(self, index, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'insert', index, itemType) +
self._options(cnf, kw))
def insert_cascade(self, index, cnf={}, **kw):
"""Add hierarchical menu item at INDEX."""
self.insert(index, 'cascade', cnf or kw)
def insert_checkbutton(self, index, cnf={}, **kw):
"""Add checkbutton menu item at INDEX."""
self.insert(index, 'checkbutton', cnf or kw)
def insert_command(self, index, cnf={}, **kw):
"""Add command menu item at INDEX."""
self.insert(index, 'command', cnf or kw)
def insert_radiobutton(self, index, cnf={}, **kw):
"""Addd radio menu item at INDEX."""
self.insert(index, 'radiobutton', cnf or kw)
def insert_separator(self, index, cnf={}, **kw):
"""Add separator at INDEX."""
self.insert(index, 'separator', cnf or kw)
def delete(self, index1, index2=None):
"""Delete menu items between INDEX1 and INDEX2 (included)."""
if index2 is None:
index2 = index1
num_index1, num_index2 = self.index(index1), self.index(index2)
if (num_index1 is None) or (num_index2 is None):
num_index1, num_index2 = 0, -1
for i in range(num_index1, num_index2 + 1):
if 'command' in self.entryconfig(i):
c = str(self.entrycget(i, 'command'))
if c:
self.deletecommand(c)
self.tk.call(self._w, 'delete', index1, index2)
def entrycget(self, index, option):
"""Return the resource value of an menu item for OPTION at INDEX."""
return self.tk.call(self._w, 'entrycget', index, '-' + option)
def entryconfigure(self, index, cnf=None, **kw):
"""Configure a menu item at INDEX."""
return self._configure(('entryconfigure', index), cnf, kw)
entryconfig = entryconfigure
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
the associated command."""
return self.tk.call(self._w, 'invoke', index)
def post(self, x, y):
"""Display a menu at position X,Y."""
self.tk.call(self._w, 'post', x, y)
def type(self, index):
"""Return the type of the menu item at INDEX."""
return self.tk.call(self._w, 'type', index)
def unpost(self):
"""Unmap a menu."""
self.tk.call(self._w, 'unpost')
def yposition(self, index):
"""Return the y-position of the topmost pixel of the menu item at INDEX."""
return getint(self.tk.call(
self._w, 'yposition', index))
class Menubutton(Widget):
"""Menubutton widget, obsolete since Tk8.0."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'menubutton', cnf, kw)
class Message(Widget):
"""Message widget to display multiline text. Obsolete since Label does it too."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'message', cnf, kw)
class Radiobutton(Widget):
"""Radiobutton widget which shows only one of several buttons in on-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a radiobutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, padx, pady, relief, selectcolor, selectimage,
state, takefocus, text, textvariable, underline, value, variable,
width, wraplength."""
Widget.__init__(self, master, 'radiobutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
class Scale(Widget):
"""Scale widget which can display a numerical scale."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scale widget with the parent MASTER.
Valid resource names: activebackground, background, bigincrement, bd,
bg, borderwidth, command, cursor, digits, fg, font, foreground, from,
highlightbackground, highlightcolor, highlightthickness, label,
length, orient, relief, repeatdelay, repeatinterval, resolution,
showvalue, sliderlength, sliderrelief, state, takefocus,
tickinterval, to, troughcolor, variable, width."""
Widget.__init__(self, master, 'scale', cnf, kw)
def get(self):
"""Get the current value as integer or float."""
value = self.tk.call(self._w, 'get')
try:
return getint(value)
except ValueError:
return getdouble(value)
def set(self, value):
"""Set the value to VALUE."""
self.tk.call(self._w, 'set', value)
def coords(self, value=None):
"""Return a tuple (X,Y) of the point along the centerline of the
trough that corresponds to VALUE or the current value if None is
given."""
return self._getints(self.tk.call(self._w, 'coords', value))
def identify(self, x, y):
"""Return where the point X,Y lies. Valid return values are "slider",
"though1" and "though2"."""
return self.tk.call(self._w, 'identify', x, y)
class Scrollbar(Widget):
"""Scrollbar widget which displays a slider at a certain position."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width."""
Widget.__init__(self, master, 'scrollbar', cnf, kw)
def activate(self, index):
"""Display the element at INDEX with activebackground and activerelief.
INDEX can be "arrow1","slider" or "arrow2"."""
self.tk.call(self._w, 'activate', index)
def delta(self, deltax, deltay):
"""Return the fractional change of the scrollbar setting if it
would be moved by DELTAX or DELTAY pixels."""
return getdouble(
self.tk.call(self._w, 'delta', deltax, deltay))
def fraction(self, x, y):
"""Return the fractional value which corresponds to a slider
position of X,Y."""
return getdouble(self.tk.call(self._w, 'fraction', x, y))
def identify(self, x, y):
"""Return the element under position X,Y as one of
"arrow1","slider","arrow2" or ""."""
return self.tk.call(self._w, 'identify', x, y)
def get(self):
"""Return the current fractional values (upper and lower end)
of the slider position."""
return self._getdoubles(self.tk.call(self._w, 'get'))
def set(self, *args):
"""Set the fractional values of the slider position (upper and
lower ends as value between 0 and 1)."""
self.tk.call((self._w, 'set') + args)
class Text(Widget, XView, YView):
"""Text widget which can display text in various forms."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a text widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor,
exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, padx, pady,
relief, selectbackground,
selectborderwidth, selectforeground,
setgrid, takefocus,
xscrollcommand, yscrollcommand,
WIDGET-SPECIFIC OPTIONS
autoseparators, height, maxundo,
spacing1, spacing2, spacing3,
state, tabs, undo, width, wrap,
"""
Widget.__init__(self, master, 'text', cnf, kw)
def bbox(self, *args):
"""Return a tuple of (x,y,width,height) which gives the bounding
box of the visible part of the character at the index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tk_textSelectTo(self, index):
self.tk.call('tk_textSelectTo', self._w, index)
def tk_textBackspace(self):
self.tk.call('tk_textBackspace', self._w)
def tk_textIndexCloser(self, a, b, c):
self.tk.call('tk_textIndexCloser', self._w, a, b, c)
def tk_textResetAnchor(self, index):
self.tk.call('tk_textResetAnchor', self._w, index)
def compare(self, index1, op, index2):
"""Return whether between index INDEX1 and index INDEX2 the
relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=."""
return self.tk.getboolean(self.tk.call(
self._w, 'compare', index1, op, index2))
def debug(self, boolean=None):
"""Turn on the internal consistency checks of the B-Tree inside the text
widget according to BOOLEAN."""
return self.tk.getboolean(self.tk.call(
self._w, 'debug', boolean))
def delete(self, index1, index2=None):
"""Delete the characters between INDEX1 and INDEX2 (not included)."""
self.tk.call(self._w, 'delete', index1, index2)
def dlineinfo(self, index):
"""Return tuple (x,y,width,height,baseline) giving the bounding box
and baseline position of the visible part of the line containing
the character at INDEX."""
return self._getints(self.tk.call(self._w, 'dlineinfo', index))
def dump(self, index1, index2=None, command=None, **kw):
"""Return the contents of the widget between index1 and index2.
The type of contents returned in filtered based on the keyword
parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are
given and true, then the corresponding items are returned. The result
is a list of triples of the form (key, value, index). If none of the
keywords are true then 'all' is used by default.
If the 'command' argument is given, it is called once for each element
of the list of triples, with the values of each triple serving as the
arguments to the function. In this case the list is not returned."""
args = []
func_name = None
result = None
if not command:
# Never call the dump command without the -command flag, since the
# output could involve Tcl quoting and would be a pain to parse
# right. Instead just set the command to build a list of triples
# as if we had done the parsing.
result = []
def append_triple(key, value, index, result=result):
result.append((key, value, index))
command = append_triple
try:
if not isinstance(command, str):
func_name = command = self._register(command)
args += ["-command", command]
for key in kw:
if kw[key]: args.append("-" + key)
args.append(index1)
if index2:
args.append(index2)
self.tk.call(self._w, "dump", *args)
return result
finally:
if func_name:
self.deletecommand(func_name)
## new in tk8.4
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
return self.tk.call(self._w, 'edit', *args)
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
return self.edit("modified", arg)
def edit_redo(self):
"""Redo the last undone edit
When the undo option is true, reapplies the last
undone edits provided no other edits were done since
then. Generates an error when the redo stack is empty.
Does nothing when the undo option is false.
"""
return self.edit("redo")
def edit_reset(self):
"""Clears the undo and redo stacks
"""
return self.edit("reset")
def edit_separator(self):
"""Inserts a separator (boundary) on the undo stack.
Does nothing when the undo option is false
"""
return self.edit("separator")
def edit_undo(self):
"""Undoes the last edit action
If the undo option is true. An edit action is defined
as all the insert and delete commands that are recorded
on the undo stack in between two separators. Generates
an error when the undo stack is empty. Does nothing
when the undo option is false
"""
return self.edit("undo")
def get(self, index1, index2=None):
"""Return the text from INDEX1 to INDEX2 (not included)."""
return self.tk.call(self._w, 'get', index1, index2)
# (Image commands are new in 8.0)
def image_cget(self, index, option):
"""Return the value of OPTION of an embedded image at INDEX."""
if option[:1] != "-":
option = "-" + option
if option[-1:] == "_":
option = option[:-1]
return self.tk.call(self._w, "image", "cget", index, option)
def image_configure(self, index, cnf=None, **kw):
"""Configure an embedded image at INDEX."""
return self._configure(('image', 'configure', index), cnf, kw)
def image_create(self, index, cnf={}, **kw):
"""Create an embedded image at INDEX."""
return self.tk.call(
self._w, "image", "create", index,
*self._options(cnf, kw))
def image_names(self):
"""Return all names of embedded images in this widget."""
return self.tk.call(self._w, "image", "names")
def index(self, index):
"""Return the index in the form line.char for INDEX."""
return str(self.tk.call(self._w, 'index', index))
def insert(self, index, chars, *args):
"""Insert CHARS before the characters at INDEX. An additional
tag can be given in ARGS. Additional CHARS and tags can follow in ARGS."""
self.tk.call((self._w, 'insert', index, chars) + args)
def mark_gravity(self, markName, direction=None):
"""Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT).
Return the current value if None is given for DIRECTION."""
return self.tk.call(
(self._w, 'mark', 'gravity', markName, direction))
def mark_names(self):
"""Return all mark names."""
return self.tk.splitlist(self.tk.call(
self._w, 'mark', 'names'))
def mark_set(self, markName, index):
"""Set mark MARKNAME before the character at INDEX."""
self.tk.call(self._w, 'mark', 'set', markName, index)
def mark_unset(self, *markNames):
"""Delete all marks in MARKNAMES."""
self.tk.call((self._w, 'mark', 'unset') + markNames)
def mark_next(self, index):
"""Return the name of the next mark after INDEX."""
return self.tk.call(self._w, 'mark', 'next', index) or None
def mark_previous(self, index):
"""Return the name of the previous mark before INDEX."""
return self.tk.call(self._w, 'mark', 'previous', index) or None
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the text to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def search(self, pattern, index, stopindex=None,
forwards=None, backwards=None, exact=None,
regexp=None, nocase=None, count=None, elide=None):
"""Search PATTERN beginning from INDEX until STOPINDEX.
Return the index of the first character of a match or an
empty string."""
args = [self._w, 'search']
if forwards: args.append('-forwards')
if backwards: args.append('-backwards')
if exact: args.append('-exact')
if regexp: args.append('-regexp')
if nocase: args.append('-nocase')
if elide: args.append('-elide')
if count: args.append('-count'); args.append(count)
if pattern and pattern[0] == '-': args.append('--')
args.append(pattern)
args.append(index)
if stopindex: args.append(stopindex)
return str(self.tk.call(tuple(args)))
def see(self, index):
"""Scroll such that the character at INDEX is visible."""
self.tk.call(self._w, 'see', index)
def tag_add(self, tagName, index1, *args):
"""Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS.
Additional pairs of indices may follow in ARGS."""
self.tk.call(
(self._w, 'tag', 'add', tagName, index1) + args)
def tag_unbind(self, tagName, sequence, funcid=None):
"""Unbind for all characters with TAGNAME for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'tag', 'bind', tagName, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagName, sequence, func, add=None):
"""Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'tag', 'bind', tagName),
sequence, func, add)
def tag_cget(self, tagName, option):
"""Return the value of OPTION for tag TAGNAME."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'tag', 'cget', tagName, option)
def tag_configure(self, tagName, cnf=None, **kw):
"""Configure a tag TAGNAME."""
return self._configure(('tag', 'configure', tagName), cnf, kw)
tag_config = tag_configure
def tag_delete(self, *tagNames):
"""Delete all tags in TAGNAMES."""
self.tk.call((self._w, 'tag', 'delete') + tagNames)
def tag_lower(self, tagName, belowThis=None):
"""Change the priority of tag TAGNAME such that it is lower
than the priority of BELOWTHIS."""
self.tk.call(self._w, 'tag', 'lower', tagName, belowThis)
def tag_names(self, index=None):
"""Return a list of all tag names."""
return self.tk.splitlist(
self.tk.call(self._w, 'tag', 'names', index))
def tag_nextrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched forward from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'nextrange', tagName, index1, index2))
def tag_prevrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched backwards from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'prevrange', tagName, index1, index2))
def tag_raise(self, tagName, aboveThis=None):
"""Change the priority of tag TAGNAME such that it is higher
than the priority of ABOVETHIS."""
self.tk.call(
self._w, 'tag', 'raise', tagName, aboveThis)
def tag_ranges(self, tagName):
"""Return a list of ranges of text which have tag TAGNAME."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'ranges', tagName))
def tag_remove(self, tagName, index1, index2=None):
"""Remove tag TAGNAME from all characters between INDEX1 and INDEX2."""
self.tk.call(
self._w, 'tag', 'remove', tagName, index1, index2)
def window_cget(self, index, option):
"""Return the value of OPTION of an embedded window at INDEX."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'window', 'cget', index, option)
def window_configure(self, index, cnf=None, **kw):
"""Configure an embedded window at INDEX."""
return self._configure(('window', 'configure', index), cnf, kw)
window_config = window_configure
def window_create(self, index, cnf={}, **kw):
"""Create a window at INDEX."""
self.tk.call(
(self._w, 'window', 'create', index)
+ self._options(cnf, kw))
def window_names(self):
"""Return all names of embedded windows in this widget."""
return self.tk.splitlist(
self.tk.call(self._w, 'window', 'names'))
def yview_pickplace(self, *what):
"""Obsolete function, use see."""
self.tk.call((self._w, 'yview', '-pickplace') + what)
class _setit:
"""Internal class. It wraps the command in the widget OptionMenu."""
def __init__(self, var, value, callback=None):
self.__value = value
self.__var = var
self.__callback = callback
def __call__(self, *args):
self.__var.set(self.__value)
if self.__callback:
self.__callback(self.__value, *args)
class OptionMenu(Menubutton):
"""OptionMenu which allows the user to select a value from a menu."""
def __init__(self, master, variable, value, *values, **kwargs):
"""Construct an optionmenu widget with the parent MASTER, with
the resource textvariable set to VARIABLE, the initially selected
value VALUE, the other menu values VALUES and an additional
keyword argument command."""
kw = {"borderwidth": 2, "textvariable": variable,
"indicatoron": 1, "relief": RAISED, "anchor": "c",
"highlightthickness": 2}
Widget.__init__(self, master, "menubutton", kw)
self.widgetName = 'tk_optionMenu'
menu = self.__menu = Menu(self, name="menu", tearoff=0)
self.menuname = menu._w
# 'command' is the only supported keyword
callback = kwargs.get('command')
if 'command' in kwargs:
del kwargs['command']
if kwargs:
raise TclError, 'unknown option -'+kwargs.keys()[0]
menu.add_command(label=value,
command=_setit(variable, value, callback))
for v in values:
menu.add_command(label=v,
command=_setit(variable, v, callback))
self["menu"] = menu
def __getitem__(self, name):
if name == 'menu':
return self.__menu
return Widget.__getitem__(self, name)
def destroy(self):
"""Destroy this widget and the associated menu."""
Menubutton.destroy(self)
self.__menu = None
class Image:
"""Base class for images."""
_last_id = 0
def __init__(self, imgtype, name=None, cnf={}, master=None, **kw):
self.name = None
if not master:
master = _default_root
if not master:
raise RuntimeError, 'Too early to create image'
self.tk = master.tk
if not name:
Image._last_id += 1
name = "pyimage%r" % (Image._last_id,) # tk itself would use image<x>
# The following is needed for systems where id(x)
# can return a negative number, such as Linux/m68k:
if name[0] == '-': name = '_' + name[1:]
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if hasattr(v, '__call__'):
v = self._register(v)
options = options + ('-'+k, v)
self.tk.call(('image', 'create', imgtype, name,) + options)
self.name = name
def __str__(self): return self.name
def __del__(self):
if self.name:
try:
self.tk.call('image', 'delete', self.name)
except TclError:
# May happen if the root was destroyed
pass
def __setitem__(self, key, value):
self.tk.call(self.name, 'configure', '-'+key, value)
def __getitem__(self, key):
return self.tk.call(self.name, 'configure', '-'+key)
def configure(self, **kw):
"""Configure the image."""
res = ()
for k, v in _cnfmerge(kw).items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
res = res + ('-'+k, v)
self.tk.call((self.name, 'config') + res)
config = configure
def height(self):
"""Return the height of the image."""
return getint(
self.tk.call('image', 'height', self.name))
def type(self):
"""Return the type of the imgage, e.g. "photo" or "bitmap"."""
return self.tk.call('image', 'type', self.name)
def width(self):
"""Return the width of the image."""
return getint(
self.tk.call('image', 'width', self.name))
class PhotoImage(Image):
"""Widget which can display colored images in GIF, PPM/PGM format."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create an image with NAME.
Valid resource names: data, format, file, gamma, height, palette,
width."""
Image.__init__(self, 'photo', name, cnf, master, **kw)
def blank(self):
"""Display a transparent image."""
self.tk.call(self.name, 'blank')
def cget(self, option):
"""Return the value of OPTION."""
return self.tk.call(self.name, 'cget', '-' + option)
# XXX config
def __getitem__(self, key):
return self.tk.call(self.name, 'cget', '-' + key)
# XXX copy -from, -to, ...?
def copy(self):
"""Return a new PhotoImage with the same image as this widget."""
destImage = PhotoImage()
self.tk.call(destImage, 'copy', self.name)
return destImage
def zoom(self,x,y=''):
"""Return a new PhotoImage with the same image as this widget
but zoom it with X and Y."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-zoom',x,y)
return destImage
def subsample(self,x,y=''):
"""Return a new PhotoImage based on the same image as this widget
but use only every Xth or Yth pixel."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-subsample',x,y)
return destImage
def get(self, x, y):
"""Return the color (red, green, blue) of the pixel at X,Y."""
return self.tk.call(self.name, 'get', x, y)
def put(self, data, to=None):
"""Put row formatted colors to image starting from
position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6))"""
args = (self.name, 'put', data)
if to:
if to[0] == '-to':
to = to[1:]
args = args + ('-to',) + tuple(to)
self.tk.call(args)
# XXX read
def write(self, filename, format=None, from_coords=None):
"""Write image to file FILENAME in FORMAT starting from
position FROM_COORDS."""
args = (self.name, 'write', filename)
if format:
args = args + ('-format', format)
if from_coords:
args = args + ('-from',) + tuple(from_coords)
self.tk.call(args)
class BitmapImage(Image):
"""Widget which can display a bitmap."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create a bitmap with NAME.
Valid resource names: background, data, file, foreground, maskdata, maskfile."""
Image.__init__(self, 'bitmap', name, cnf, master, **kw)
def image_names(): return _default_root.tk.call('image', 'names')
def image_types(): return _default_root.tk.call('image', 'types')
class Spinbox(Widget, XView):
"""spinbox widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
"""
Widget.__init__(self, master, 'spinbox', cnf, kw)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a
rectangle which encloses the character given by index.
The first two elements of the list give the x and y
coordinates of the upper-left corner of the screen
area covered by the character (in pixels relative
to the widget) and the last two elements give the
width and height of the character, in pixels. The
bounding box may refer to a region outside the
visible area of the window.
"""
return self.tk.call(self._w, 'bbox', index)
def delete(self, first, last=None):
"""Delete one or more elements of the spinbox.
First is the index of the first character to delete,
and last is the index of the character just after
the last one to delete. If last isn't specified it
defaults to first+1, i.e. a single character is
deleted. This command returns an empty string.
"""
return self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Returns the spinbox's string"""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Alter the position of the insertion cursor.
The insertion cursor will be displayed just before
the character given by index. Returns an empty string
"""
return self.tk.call(self._w, 'icursor', index)
def identify(self, x, y):
"""Returns the name of the widget at position x, y
Return value is one of: none, buttondown, buttonup, entry
"""
return self.tk.call(self._w, 'identify', x, y)
def index(self, index):
"""Returns the numerical index corresponding to index
"""
return self.tk.call(self._w, 'index', index)
def insert(self, index, s):
"""Insert string s at index
Returns an empty string.
"""
return self.tk.call(self._w, 'insert', index, s)
def invoke(self, element):
"""Causes the specified element to be invoked
The element could be buttondown or buttonup
triggering the action associated with it.
"""
return self.tk.call(self._w, 'invoke', element)
def scan(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'scan') + args)) or ()
def scan_mark(self, x):
"""Records x and the current view in the spinbox window;
used in conjunction with later scan dragto commands.
Typically this command is associated with a mouse button
press in the widget. It returns an empty string.
"""
return self.scan("mark", x)
def scan_dragto(self, x):
"""Compute the difference between the given x argument
and the x argument to the last scan mark command
It then adjusts the view left or right by 10 times the
difference in x-coordinates. This command is typically
associated with mouse motion events in the widget, to
produce the effect of dragging the spinbox at high speed
through the window. The return value is an empty string.
"""
return self.scan("dragto", x)
def selection(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'selection') + args)) or ()
def selection_adjust(self, index):
"""Locate the end of the selection nearest to the character
given by index,
Then adjust that end of the selection to be at index
(i.e including but not going beyond index). The other
end of the selection is made the anchor point for future
select to commands. If the selection isn't currently in
the spinbox, then a new selection is created to include
the characters between index and the most recent selection
anchor point, inclusive. Returns an empty string.
"""
return self.selection("adjust", index)
def selection_clear(self):
"""Clear the selection
If the selection isn't in this widget then the
command has no effect. Returns an empty string.
"""
return self.selection("clear")
def selection_element(self, element=None):
"""Sets or gets the currently selected element.
If a spinbutton element is specified, it will be
displayed depressed
"""
return self.selection("element", element)
###########################################################################
class LabelFrame(Widget):
"""labelframe widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a labelframe widget with the parent MASTER.
STANDARD OPTIONS
borderwidth, cursor, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, padx, pady, relief,
takefocus, text
WIDGET-SPECIFIC OPTIONS
background, class, colormap, container,
height, labelanchor, labelwidget,
visual, width
"""
Widget.__init__(self, master, 'labelframe', cnf, kw)
########################################################################
class PanedWindow(Widget):
"""panedwindow widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a panedwindow widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor, height,
orient, relief, width
WIDGET-SPECIFIC OPTIONS
handlepad, handlesize, opaqueresize,
sashcursor, sashpad, sashrelief,
sashwidth, showhandle,
"""
Widget.__init__(self, master, 'panedwindow', cnf, kw)
def add(self, child, **kw):
"""Add a child widget to the panedwindow in a new pane.
The child argument is the name of the child widget
followed by pairs of arguments that specify how to
manage the windows. The possible options and values
are the ones accepted by the paneconfigure method.
"""
self.tk.call((self._w, 'add', child) + self._options(kw))
def remove(self, child):
"""Remove the pane containing child from the panedwindow
All geometry management options for child will be forgotten.
"""
self.tk.call(self._w, 'forget', child)
forget=remove
def identify(self, x, y):
"""Identify the panedwindow component at point x, y
If the point is over a sash or a sash handle, the result
is a two element list containing the index of the sash or
handle, and a word indicating whether it is over a sash
or a handle, such as {0 sash} or {2 handle}. If the point
is over any other part of the panedwindow, the result is
an empty list.
"""
return self.tk.call(self._w, 'identify', x, y)
def proxy(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'proxy') + args)) or ()
def proxy_coord(self):
"""Return the x and y pair of the most recent proxy location
"""
return self.proxy("coord")
def proxy_forget(self):
"""Remove the proxy from the display.
"""
return self.proxy("forget")
def proxy_place(self, x, y):
"""Place the proxy at the given x and y coordinates.
"""
return self.proxy("place", x, y)
def sash(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'sash') + args)) or ()
def sash_coord(self, index):
"""Return the current x and y pair for the sash given by index.
Index must be an integer between 0 and 1 less than the
number of panes in the panedwindow. The coordinates given are
those of the top left corner of the region containing the sash.
pathName sash dragto index x y This command computes the
difference between the given coordinates and the coordinates
given to the last sash coord command for the given sash. It then
moves that sash the computed difference. The return value is the
empty string.
"""
return self.sash("coord", index)
def sash_mark(self, index):
"""Records x and y for the sash given by index;
Used in conjunction with later dragto commands to move the sash.
"""
return self.sash("mark", index)
def sash_place(self, index, x, y):
"""Place the sash given by index at the given coordinates
"""
return self.sash("place", index, x, y)
def panecget(self, child, option):
"""Query a management option for window.
Option may be any value allowed by the paneconfigure subcommand
"""
return self.tk.call(
(self._w, 'panecget') + (child, '-'+option))
def paneconfigure(self, tagOrId, cnf=None, **kw):
"""Query or modify the management options for window.
If no option is specified, returns a list describing all
of the available options for pathName. If option is
specified with no value, then the command returns a list
describing the one named option (this list will be identical
to the corresponding sublist of the value returned if no
option is specified). If one or more option-value pairs are
specified, then the command modifies the given widget
option(s) to have the given value(s); in this case the
command returns an empty string. The following options
are supported:
after window
Insert the window after the window specified. window
should be the name of a window already managed by pathName.
before window
Insert the window before the window specified. window
should be the name of a window already managed by pathName.
height size
Specify a height for the window. The height will be the
outer dimension of the window including its border, if
any. If size is an empty string, or if -height is not
specified, then the height requested internally by the
window will be used initially; the height may later be
adjusted by the movement of sashes in the panedwindow.
Size may be any value accepted by Tk_GetPixels.
minsize n
Specifies that the size of the window cannot be made
less than n. This constraint only affects the size of
the widget in the paned dimension -- the x dimension
for horizontal panedwindows, the y dimension for
vertical panedwindows. May be any value accepted by
Tk_GetPixels.
padx n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the X-direction. The value may have any of the forms
accepted by Tk_GetPixels.
pady n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the Y-direction. The value may have any of the forms
accepted by Tk_GetPixels.
sticky style
If a window's pane is larger than the requested
dimensions of the window, this option may be used
to position (or stretch) the window within its pane.
Style is a string that contains zero or more of the
characters n, s, e or w. The string can optionally
contains spaces or commas, but they are ignored. Each
letter refers to a side (north, south, east, or west)
that the window will "stick" to. If both n and s
(or e and w) are specified, the window will be
stretched to fill the entire height (or width) of
its cavity.
width size
Specify a width for the window. The width will be
the outer dimension of the window including its
border, if any. If size is an empty string, or
if -width is not specified, then the width requested
internally by the window will be used initially; the
width may later be adjusted by the movement of sashes
in the panedwindow. Size may be any value accepted by
Tk_GetPixels.
"""
if cnf is None and not kw:
cnf = {}
for x in self.tk.split(
self.tk.call(self._w,
'paneconfigure', tagOrId)):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if type(cnf) == StringType and not kw:
x = self.tk.split(self.tk.call(
self._w, 'paneconfigure', tagOrId, '-'+cnf))
return (x[0][1:],) + x[1:]
self.tk.call((self._w, 'paneconfigure', tagOrId) +
self._options(cnf, kw))
paneconfig = paneconfigure
def panes(self):
"""Returns an ordered list of the child panes."""
return self.tk.call(self._w, 'panes')
######################################################################
# Extensions:
class Studbutton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'studbutton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
class Tributton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'tributton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
self['fg'] = self['bg']
self['activebackground'] = self['bg']
######################################################################
# Test:
def _test():
root = Tk()
text = "This is Tcl/Tk version %s" % TclVersion
if TclVersion >= 8.1:
try:
text = text + unicode("\nThis should be a cedilla: \347",
"iso-8859-1")
except NameError:
pass # no unicode support
label = Label(root, text=text)
label.pack()
test = Button(root, text="Click me!",
command=lambda root=root: root.test.configure(
text="[%s]" % root.test['text']))
test.pack()
root.test = test
quit = Button(root, text="QUIT", command=root.destroy)
quit.pack()
# The following three commands are needed so the window pops
# up on top on Windows...
root.iconify()
root.update()
root.deiconify()
root.mainloop()
if __name__ == '__main__':
_test()
|
lucafavatella/intellij-community
|
refs/heads/cli-wip
|
python/helpers/py3only/docutils/languages/__init__.py
|
170
|
# $Id: __init__.py 7648 2013-04-18 07:36:22Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
# Internationalization details are documented in
# <http://docutils.sf.net/docs/howto/i18n.html>.
"""
This package contains modules for language-dependent features of Docutils.
"""
__docformat__ = 'reStructuredText'
import sys
from docutils.utils import normalize_language_tag
if sys.version_info < (2,5):
from docutils._compat import __import__
_languages = {}
def get_language(language_code, reporter=None):
"""Return module with language localizations.
`language_code` is a "BCP 47" language tag.
If there is no matching module, warn and fall back to English.
"""
# TODO: use a dummy module returning emtpy strings?, configurable?
for tag in normalize_language_tag(language_code):
tag = tag.replace('-','_') # '-' not valid in module names
if tag in _languages:
return _languages[tag]
try:
module = __import__(tag, globals(), locals(), level=1)
except ImportError:
try:
module = __import__(tag, globals(), locals(), level=0)
except ImportError:
continue
_languages[tag] = module
return module
if reporter is not None:
reporter.warning(
'language "%s" not supported: ' % language_code +
'Docutils-generated text will be in English.')
module = __import__('en', globals(), locals(), level=1)
_languages[tag] = module # warn only one time!
return module
|
aioue/ansible
|
refs/heads/devel
|
lib/ansible/utils/listify.py
|
118
|
# (c) 2014 Michael DeHaan, <michael@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from collections import Iterable
from ansible.module_utils.six import string_types
from ansible.template.safe_eval import safe_eval
__all__ = ['listify_lookup_plugin_terms']
def listify_lookup_plugin_terms(terms, templar, loader, fail_on_undefined=True, convert_bare=False):
if isinstance(terms, string_types):
terms = templar.template(terms.strip(), convert_bare=convert_bare, fail_on_undefined=fail_on_undefined)
else:
terms = templar.template(terms, fail_on_undefined=fail_on_undefined)
if isinstance(terms, string_types) or not isinstance(terms, Iterable):
terms = [terms]
return terms
|
JimCircadian/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/aos/_aos_blueprint.py
|
55
|
#!/usr/bin/python
#
# (c) 2017 Apstra Inc, <community@apstra.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aos_blueprint
author: jeremy@apstra.com (@jeremyschulman)
version_added: "2.3"
short_description: Manage AOS blueprint instance
deprecated:
removed_in: "2.9"
why: This module does not support AOS 2.1 or later
alternative: See new modules at U(https://www.ansible.com/ansible-apstra).
description:
- Apstra AOS Blueprint module let you manage your Blueprint easily. You can create
create and delete Blueprint by Name or ID. You can also use it to retrieve
all data from a blueprint. This module is idempotent
and support the I(check) mode. It's using the AOS REST API.
requirements:
- "aos-pyez >= 0.6.0"
options:
session:
description:
- An existing AOS session as obtained by M(aos_login) module.
required: true
name:
description:
- Name of the Blueprint to manage.
Only one of I(name) or I(id) can be set.
id:
description:
- AOS Id of the IP Pool to manage (can't be used to create a new IP Pool).
Only one of I(name) or I(id) can be set.
state:
description:
- Indicate what is the expected state of the Blueprint.
choices: ['present', 'absent', 'build-ready']
default: present
timeout:
description:
- When I(state=build-ready), this timeout identifies timeout in seconds to wait before
declaring a failure.
default: 5
template:
description:
- When creating a blueprint, this value identifies, by name, an existing engineering
design template within the AOS-server.
reference_arch:
description:
- When creating a blueprint, this value identifies a known AOS reference
architecture value. I(Refer to AOS-server documentation for available values).
'''
EXAMPLES = '''
- name: Creating blueprint
aos_blueprint:
session: "{{ aos_session }}"
name: "my-blueprint"
template: "my-template"
reference_arch: two_stage_l3clos
state: present
- name: Access a blueprint and get content
aos_blueprint:
session: "{{ aos_session }}"
name: "{{ blueprint_name }}"
template: "{{ blueprint_template }}"
state: present
register: bp
- name: Delete a blueprint
aos_blueprint:
session: "{{ aos_session }}"
name: "my-blueprint"
state: absent
- name: Await blueprint build-ready, and obtain contents
aos_blueprint:
session: "{{ aos_session }}"
name: "{{ blueprint_name }}"
state: build-ready
register: bp
'''
RETURNS = '''
name:
description: Name of the Blueprint
returned: always
type: str
sample: My-Blueprint
id:
description: AOS unique ID assigned to the Blueprint
returned: always
type: str
sample: fcc4ac1c-e249-4fe7-b458-2138bfb44c06
value:
description: Information about the Blueprint
returned: always
type: dict
sample: {'...'}
contents:
description: Blueprint contents data-dictionary
returned: always
type: dict
sample: { ... }
build_errors:
description: When state='build-ready', and build errors exist, this contains list of errors
returned: only when build-ready returns fail
type: list
sample: [{...}, {...}]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aos.aos import get_aos_session, check_aos_version, find_collection_item
def create_blueprint(module, aos, name):
margs = module.params
try:
template_id = aos.DesignTemplates[margs['template']].id
# Create a new Object based on the name
blueprint = aos.Blueprints[name]
blueprint.create(template_id, reference_arch=margs['reference_arch'])
except Exception as exc:
msg = "Unable to create blueprint: %s" % exc.message
if 'UNPROCESSABLE ENTITY' in exc.message:
msg += ' (likely missing dependencies)'
module.fail_json(msg=msg)
return blueprint
def ensure_absent(module, aos, blueprint):
if blueprint.exists is False:
module.exit_json(changed=False)
else:
if not module.check_mode:
try:
blueprint.delete()
except Exception as exc:
module.fail_json(msg='Unable to delete blueprint, %s' % exc.message)
module.exit_json(changed=True,
id=blueprint.id,
name=blueprint.name)
def ensure_present(module, aos, blueprint):
margs = module.params
if blueprint.exists:
module.exit_json(changed=False,
id=blueprint.id,
name=blueprint.name,
value=blueprint.value,
contents=blueprint.contents)
else:
# Check if template is defined and is valid
if margs['template'] is None:
module.fail_json(msg="You must define a 'template' name to create a new blueprint, currently missing")
elif aos.DesignTemplates.find(label=margs['template']) is None:
module.fail_json(msg="You must define a Valid 'template' name to create a new blueprint, %s is not valid" % margs['template'])
# Check if reference_arch
if margs['reference_arch'] is None:
module.fail_json(msg="You must define a 'reference_arch' to create a new blueprint, currently missing")
if not module.check_mode:
blueprint = create_blueprint(module, aos, margs['name'])
module.exit_json(changed=True,
id=blueprint.id,
name=blueprint.name,
value=blueprint.value,
contents=blueprint.contents)
else:
module.exit_json(changed=True,
name=margs['name'])
def ensure_build_ready(module, aos, blueprint):
margs = module.params
if not blueprint.exists:
module.fail_json(msg='blueprint %s does not exist' % blueprint.name)
if blueprint.await_build_ready(timeout=margs['timeout'] * 1000):
module.exit_json(contents=blueprint.contents)
else:
module.fail_json(msg='blueprint %s has build errors',
build_erros=blueprint.build_errors)
def aos_blueprint(module):
margs = module.params
try:
aos = get_aos_session(module, margs['session'])
except:
module.fail_json(msg="Unable to login to the AOS server")
item_name = False
item_id = False
if margs['name'] is not None:
item_name = margs['name']
elif margs['id'] is not None:
item_id = margs['id']
# ----------------------------------------------------
# Find Object if available based on ID or Name
# ----------------------------------------------------
try:
my_blueprint = find_collection_item(aos.Blueprints,
item_name=item_name,
item_id=item_id)
except:
module.fail_json(msg="Unable to find the Blueprint based on name or ID, something went wrong")
# ----------------------------------------------------
# Proceed based on State value
# ----------------------------------------------------
if margs['state'] == 'absent':
ensure_absent(module, aos, my_blueprint)
elif margs['state'] == 'present':
ensure_present(module, aos, my_blueprint)
elif margs['state'] == 'build-ready':
ensure_build_ready(module, aos, my_blueprint)
def main():
module = AnsibleModule(
argument_spec=dict(
session=dict(required=True, type="dict"),
name=dict(required=False),
id=dict(required=False),
state=dict(choices=[
'present', 'absent', 'build-ready'],
default='present'),
timeout=dict(type="int", default=5),
template=dict(required=False),
reference_arch=dict(required=False)
),
mutually_exclusive=[('name', 'id')],
required_one_of=[('name', 'id')],
supports_check_mode=True
)
# Check if aos-pyez is present and match the minimum version
check_aos_version(module, '0.6.0')
aos_blueprint(module)
if __name__ == '__main__':
main()
|
sarthakmeh03/django
|
refs/heads/master
|
tests/utils_tests/test_datastructures.py
|
7
|
"""
Tests for stuff in django.utils.datastructures.
"""
import copy
from django.test import SimpleTestCase
from django.utils import six
from django.utils.datastructures import (
DictWrapper, ImmutableList, MultiValueDict, MultiValueDictKeyError,
OrderedSet,
)
class OrderedSetTests(SimpleTestCase):
def test_bool(self):
# Refs #23664
s = OrderedSet()
self.assertFalse(s)
s.add(1)
self.assertTrue(s)
def test_len(self):
s = OrderedSet()
self.assertEqual(len(s), 0)
s.add(1)
s.add(2)
s.add(2)
self.assertEqual(len(s), 2)
class MultiValueDictTests(SimpleTestCase):
def test_multivaluedict(self):
d = MultiValueDict({'name': ['Adrian', 'Simon'],
'position': ['Developer']})
self.assertEqual(d['name'], 'Simon')
self.assertEqual(d.get('name'), 'Simon')
self.assertEqual(d.getlist('name'), ['Adrian', 'Simon'])
self.assertEqual(
sorted(six.iteritems(d)),
[('name', 'Simon'), ('position', 'Developer')]
)
self.assertEqual(
sorted(six.iterlists(d)),
[('name', ['Adrian', 'Simon']), ('position', ['Developer'])]
)
with self.assertRaisesMessage(MultiValueDictKeyError, 'lastname'):
d.__getitem__('lastname')
self.assertIsNone(d.get('lastname'))
self.assertEqual(d.get('lastname', 'nonexistent'), 'nonexistent')
self.assertEqual(d.getlist('lastname'), [])
self.assertEqual(d.getlist('doesnotexist', ['Adrian', 'Simon']),
['Adrian', 'Simon'])
d.setlist('lastname', ['Holovaty', 'Willison'])
self.assertEqual(d.getlist('lastname'), ['Holovaty', 'Willison'])
self.assertEqual(sorted(six.itervalues(d)),
['Developer', 'Simon', 'Willison'])
def test_appendlist(self):
d = MultiValueDict()
d.appendlist('name', 'Adrian')
d.appendlist('name', 'Simon')
self.assertEqual(d.getlist('name'), ['Adrian', 'Simon'])
def test_copy(self):
for copy_func in [copy.copy, lambda d: d.copy()]:
d1 = MultiValueDict({
"developers": ["Carl", "Fred"]
})
self.assertEqual(d1["developers"], "Fred")
d2 = copy_func(d1)
d2.update({"developers": "Groucho"})
self.assertEqual(d2["developers"], "Groucho")
self.assertEqual(d1["developers"], "Fred")
d1 = MultiValueDict({
"key": [[]]
})
self.assertEqual(d1["key"], [])
d2 = copy_func(d1)
d2["key"].append("Penguin")
self.assertEqual(d1["key"], ["Penguin"])
self.assertEqual(d2["key"], ["Penguin"])
def test_dict_translation(self):
mvd = MultiValueDict({
'devs': ['Bob', 'Joe'],
'pm': ['Rory'],
})
d = mvd.dict()
self.assertEqual(sorted(six.iterkeys(d)), sorted(six.iterkeys(mvd)))
for key in six.iterkeys(mvd):
self.assertEqual(d[key], mvd[key])
self.assertEqual({}, MultiValueDict().dict())
def test_getlist_doesnt_mutate(self):
x = MultiValueDict({'a': ['1', '2'], 'b': ['3']})
values = x.getlist('a')
values += x.getlist('b')
self.assertEqual(x.getlist('a'), ['1', '2'])
def test_internal_getlist_does_mutate(self):
x = MultiValueDict({'a': ['1', '2'], 'b': ['3']})
values = x._getlist('a')
values += x._getlist('b')
self.assertEqual(x._getlist('a'), ['1', '2', '3'])
def test_getlist_default(self):
x = MultiValueDict({'a': [1]})
MISSING = object()
values = x.getlist('b', default=MISSING)
self.assertIs(values, MISSING)
def test_getlist_none_empty_values(self):
x = MultiValueDict({'a': None, 'b': []})
self.assertIsNone(x.getlist('a'))
self.assertEqual(x.getlist('b'), [])
class ImmutableListTests(SimpleTestCase):
def test_sort(self):
d = ImmutableList(range(10))
# AttributeError: ImmutableList object is immutable.
with self.assertRaisesMessage(AttributeError, 'ImmutableList object is immutable.'):
d.sort()
self.assertEqual(repr(d), '(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)')
def test_custom_warning(self):
d = ImmutableList(range(10), warning="Object is immutable!")
self.assertEqual(d[1], 1)
# AttributeError: Object is immutable!
with self.assertRaisesMessage(AttributeError, 'Object is immutable!'):
d.__setitem__(1, 'test')
class DictWrapperTests(SimpleTestCase):
def test_dictwrapper(self):
def f(x):
return "*%s" % x
d = DictWrapper({'a': 'a'}, f, 'xx_')
self.assertEqual(
"Normal: %(a)s. Modified: %(xx_a)s" % d,
'Normal: a. Modified: *a'
)
|
cvegaj/ElectriCERT
|
refs/heads/master
|
venv3/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/requirements.py
|
454
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import string
import re
from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
from pkg_resources.extern.pyparsing import Literal as L # noqa
from pkg_resources.extern.six.moves.urllib import parse as urlparse
from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
class InvalidRequirement(ValueError):
"""
An invalid requirement was found, users should refer to PEP 508.
"""
ALPHANUM = Word(string.ascii_letters + string.digits)
LBRACKET = L("[").suppress()
RBRACKET = L("]").suppress()
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
COMMA = L(",").suppress()
SEMICOLON = L(";").suppress()
AT = L("@").suppress()
PUNCTUATION = Word("-_.")
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER
URI = Regex(r'[^ ]+')("url")
URL = (AT + URI)
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
joinString=",", adjacent=False)("_raw_spec")
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
lambda s, l, t: Marker(s[t._original_start:t._original_end])
)
MARKER_SEPERATOR = SEMICOLON
MARKER = MARKER_SEPERATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)
NAMED_REQUIREMENT = \
NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
class Requirement(object):
"""Parse a requirement.
Parse a given requirement string into its parts, such as name, specifier,
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
string.
"""
# TODO: Can we test whether something is contained within a requirement?
# If so how do we do that? Do we need to test against the _name_ of
# the thing as well as the version? What about the markers?
# TODO: Can we normalize the name and extra name?
def __init__(self, requirement_string):
try:
req = REQUIREMENT.parseString(requirement_string)
except ParseException as e:
raise InvalidRequirement(
"Invalid requirement, parse error at \"{0!r}\"".format(
requirement_string[e.loc:e.loc + 8]))
self.name = req.name
if req.url:
parsed_url = urlparse.urlparse(req.url)
if not (parsed_url.scheme and parsed_url.netloc) or (
not parsed_url.scheme and not parsed_url.netloc):
raise InvalidRequirement("Invalid URL given")
self.url = req.url
else:
self.url = None
self.extras = set(req.extras.asList() if req.extras else [])
self.specifier = SpecifierSet(req.specifier)
self.marker = req.marker if req.marker else None
def __str__(self):
parts = [self.name]
if self.extras:
parts.append("[{0}]".format(",".join(sorted(self.extras))))
if self.specifier:
parts.append(str(self.specifier))
if self.url:
parts.append("@ {0}".format(self.url))
if self.marker:
parts.append("; {0}".format(self.marker))
return "".join(parts)
def __repr__(self):
return "<Requirement({0!r})>".format(str(self))
|
codebam/linux
|
refs/heads/master
|
scripts/analyze_suspend.py
|
171
|
#!/usr/bin/python
#
# Tool for analyzing suspend/resume timing
# Copyright (c) 2013, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors:
# Todd Brandt <todd.e.brandt@linux.intel.com>
#
# Links:
# Home Page
# https://01.org/suspendresume
# Source repo
# https://github.com/01org/suspendresume
# Documentation
# Getting Started
# https://01.org/suspendresume/documentation/getting-started
# Command List:
# https://01.org/suspendresume/documentation/command-list
#
# Description:
# This tool is designed to assist kernel and OS developers in optimizing
# their linux stack's suspend/resume time. Using a kernel image built
# with a few extra options enabled, the tool will execute a suspend and
# will capture dmesg and ftrace data until resume is complete. This data
# is transformed into a device timeline and a callgraph to give a quick
# and detailed view of which devices and callbacks are taking the most
# time in suspend/resume. The output is a single html file which can be
# viewed in firefox or chrome.
#
# The following kernel build options are required:
# CONFIG_PM_DEBUG=y
# CONFIG_PM_SLEEP_DEBUG=y
# CONFIG_FTRACE=y
# CONFIG_FUNCTION_TRACER=y
# CONFIG_FUNCTION_GRAPH_TRACER=y
# CONFIG_KPROBES=y
# CONFIG_KPROBES_ON_FTRACE=y
#
# For kernel versions older than 3.15:
# The following additional kernel parameters are required:
# (e.g. in file /etc/default/grub)
# GRUB_CMDLINE_LINUX_DEFAULT="... initcall_debug log_buf_len=16M ..."
#
# ----------------- LIBRARIES --------------------
import sys
import time
import os
import string
import re
import platform
from datetime import datetime
import struct
import ConfigParser
# ----------------- CLASSES --------------------
# Class: SystemValues
# Description:
# A global, single-instance container used to
# store system values and test parameters
class SystemValues:
ansi = False
version = '4.2'
verbose = False
addlogs = False
mindevlen = 0.001
mincglen = 1.0
srgap = 0
cgexp = False
outdir = ''
testdir = '.'
tpath = '/sys/kernel/debug/tracing/'
fpdtpath = '/sys/firmware/acpi/tables/FPDT'
epath = '/sys/kernel/debug/tracing/events/power/'
traceevents = [
'suspend_resume',
'device_pm_callback_end',
'device_pm_callback_start'
]
testcommand = ''
mempath = '/dev/mem'
powerfile = '/sys/power/state'
suspendmode = 'mem'
hostname = 'localhost'
prefix = 'test'
teststamp = ''
dmesgstart = 0.0
dmesgfile = ''
ftracefile = ''
htmlfile = ''
embedded = False
rtcwake = False
rtcwaketime = 10
rtcpath = ''
devicefilter = []
stamp = 0
execcount = 1
x2delay = 0
usecallgraph = False
usetraceevents = False
usetraceeventsonly = False
usetracemarkers = True
usekprobes = True
usedevsrc = False
notestrun = False
devprops = dict()
postresumetime = 0
devpropfmt = '# Device Properties: .*'
tracertypefmt = '# tracer: (?P<t>.*)'
firmwarefmt = '# fwsuspend (?P<s>[0-9]*) fwresume (?P<r>[0-9]*)$'
postresumefmt = '# post resume time (?P<t>[0-9]*)$'
stampfmt = '# suspend-(?P<m>[0-9]{2})(?P<d>[0-9]{2})(?P<y>[0-9]{2})-'+\
'(?P<H>[0-9]{2})(?P<M>[0-9]{2})(?P<S>[0-9]{2})'+\
' (?P<host>.*) (?P<mode>.*) (?P<kernel>.*)$'
kprobecolor = 'rgba(204,204,204,0.5)'
synccolor = 'rgba(204,204,204,0.5)'
debugfuncs = []
tracefuncs = {
'sys_sync': dict(),
'pm_prepare_console': dict(),
'pm_notifier_call_chain': dict(),
'freeze_processes': dict(),
'freeze_kernel_threads': dict(),
'pm_restrict_gfp_mask': dict(),
'acpi_suspend_begin': dict(),
'suspend_console': dict(),
'acpi_pm_prepare': dict(),
'syscore_suspend': dict(),
'arch_enable_nonboot_cpus_end': dict(),
'syscore_resume': dict(),
'acpi_pm_finish': dict(),
'resume_console': dict(),
'acpi_pm_end': dict(),
'pm_restore_gfp_mask': dict(),
'thaw_processes': dict(),
'pm_restore_console': dict(),
'CPU_OFF': {
'func':'_cpu_down',
'args_x86_64': {'cpu':'%di:s32'},
'format': 'CPU_OFF[{cpu}]',
'mask': 'CPU_.*_DOWN'
},
'CPU_ON': {
'func':'_cpu_up',
'args_x86_64': {'cpu':'%di:s32'},
'format': 'CPU_ON[{cpu}]',
'mask': 'CPU_.*_UP'
},
}
dev_tracefuncs = {
# general wait/delay/sleep
'msleep': { 'args_x86_64': {'time':'%di:s32'} },
'udelay': { 'func':'__const_udelay', 'args_x86_64': {'loops':'%di:s32'} },
'acpi_os_stall': dict(),
# ACPI
'acpi_resume_power_resources': dict(),
'acpi_ps_parse_aml': dict(),
# filesystem
'ext4_sync_fs': dict(),
# ATA
'ata_eh_recover': { 'args_x86_64': {'port':'+36(%di):s32'} },
# i915
'i915_gem_restore_gtt_mappings': dict(),
'intel_opregion_setup': dict(),
'intel_dp_detect': dict(),
'intel_hdmi_detect': dict(),
'intel_opregion_init': dict(),
}
kprobes_postresume = [
{
'name': 'ataportrst',
'func': 'ata_eh_recover',
'args': {'port':'+36(%di):s32'},
'format': 'ata{port}_port_reset',
'mask': 'ata.*_port_reset'
}
]
kprobes = dict()
timeformat = '%.3f'
def __init__(self):
# if this is a phoronix test run, set some default options
if('LOG_FILE' in os.environ and 'TEST_RESULTS_IDENTIFIER' in os.environ):
self.embedded = True
self.addlogs = True
self.htmlfile = os.environ['LOG_FILE']
self.hostname = platform.node()
if(self.hostname == ''):
self.hostname = 'localhost'
rtc = "rtc0"
if os.path.exists('/dev/rtc'):
rtc = os.readlink('/dev/rtc')
rtc = '/sys/class/rtc/'+rtc
if os.path.exists(rtc) and os.path.exists(rtc+'/date') and \
os.path.exists(rtc+'/time') and os.path.exists(rtc+'/wakealarm'):
self.rtcpath = rtc
if (hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()):
self.ansi = True
def setPrecision(self, num):
if num < 0 or num > 6:
return
self.timeformat = '%.{0}f'.format(num)
def setOutputFile(self):
if((self.htmlfile == '') and (self.dmesgfile != '')):
m = re.match('(?P<name>.*)_dmesg\.txt$', self.dmesgfile)
if(m):
self.htmlfile = m.group('name')+'.html'
if((self.htmlfile == '') and (self.ftracefile != '')):
m = re.match('(?P<name>.*)_ftrace\.txt$', self.ftracefile)
if(m):
self.htmlfile = m.group('name')+'.html'
if(self.htmlfile == ''):
self.htmlfile = 'output.html'
def initTestOutput(self, subdir, testpath=''):
self.prefix = self.hostname
v = open('/proc/version', 'r').read().strip()
kver = string.split(v)[2]
n = datetime.now()
testtime = n.strftime('suspend-%m%d%y-%H%M%S')
if not testpath:
testpath = n.strftime('suspend-%y%m%d-%H%M%S')
if(subdir != "."):
self.testdir = subdir+"/"+testpath
else:
self.testdir = testpath
self.teststamp = \
'# '+testtime+' '+self.prefix+' '+self.suspendmode+' '+kver
if(self.embedded):
self.dmesgfile = \
'/tmp/'+testtime+'_'+self.suspendmode+'_dmesg.txt'
self.ftracefile = \
'/tmp/'+testtime+'_'+self.suspendmode+'_ftrace.txt'
return
self.dmesgfile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'_dmesg.txt'
self.ftracefile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'_ftrace.txt'
self.htmlfile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'.html'
if not os.path.isdir(self.testdir):
os.mkdir(self.testdir)
def setDeviceFilter(self, devnames):
self.devicefilter = string.split(devnames)
def rtcWakeAlarmOn(self):
os.system('echo 0 > '+self.rtcpath+'/wakealarm')
outD = open(self.rtcpath+'/date', 'r').read().strip()
outT = open(self.rtcpath+'/time', 'r').read().strip()
mD = re.match('^(?P<y>[0-9]*)-(?P<m>[0-9]*)-(?P<d>[0-9]*)', outD)
mT = re.match('^(?P<h>[0-9]*):(?P<m>[0-9]*):(?P<s>[0-9]*)', outT)
if(mD and mT):
# get the current time from hardware
utcoffset = int((datetime.now() - datetime.utcnow()).total_seconds())
dt = datetime(\
int(mD.group('y')), int(mD.group('m')), int(mD.group('d')),
int(mT.group('h')), int(mT.group('m')), int(mT.group('s')))
nowtime = int(dt.strftime('%s')) + utcoffset
else:
# if hardware time fails, use the software time
nowtime = int(datetime.now().strftime('%s'))
alarm = nowtime + self.rtcwaketime
os.system('echo %d > %s/wakealarm' % (alarm, self.rtcpath))
def rtcWakeAlarmOff(self):
os.system('echo 0 > %s/wakealarm' % self.rtcpath)
def initdmesg(self):
# get the latest time stamp from the dmesg log
fp = os.popen('dmesg')
ktime = '0'
for line in fp:
line = line.replace('\r\n', '')
idx = line.find('[')
if idx > 1:
line = line[idx:]
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(m):
ktime = m.group('ktime')
fp.close()
self.dmesgstart = float(ktime)
def getdmesg(self):
# store all new dmesg lines since initdmesg was called
fp = os.popen('dmesg')
op = open(self.dmesgfile, 'a')
for line in fp:
line = line.replace('\r\n', '')
idx = line.find('[')
if idx > 1:
line = line[idx:]
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(not m):
continue
ktime = float(m.group('ktime'))
if ktime > self.dmesgstart:
op.write(line)
fp.close()
op.close()
def addFtraceFilterFunctions(self, file):
fp = open(file)
list = fp.read().split('\n')
fp.close()
for i in list:
if len(i) < 2:
continue
self.tracefuncs[i] = dict()
def getFtraceFilterFunctions(self, current):
rootCheck(True)
if not current:
os.system('cat '+self.tpath+'available_filter_functions')
return
fp = open(self.tpath+'available_filter_functions')
master = fp.read().split('\n')
fp.close()
if len(self.debugfuncs) > 0:
for i in self.debugfuncs:
if i in master:
print i
else:
print self.colorText(i)
else:
for i in self.tracefuncs:
if 'func' in self.tracefuncs[i]:
i = self.tracefuncs[i]['func']
if i in master:
print i
else:
print self.colorText(i)
def setFtraceFilterFunctions(self, list):
fp = open(self.tpath+'available_filter_functions')
master = fp.read().split('\n')
fp.close()
flist = ''
for i in list:
if i not in master:
continue
if ' [' in i:
flist += i.split(' ')[0]+'\n'
else:
flist += i+'\n'
fp = open(self.tpath+'set_graph_function', 'w')
fp.write(flist)
fp.close()
def kprobeMatch(self, name, target):
if name not in self.kprobes:
return False
if re.match(self.kprobes[name]['mask'], target):
return True
return False
def basicKprobe(self, name):
self.kprobes[name] = {'name': name,'func': name,'args': dict(),'format': name,'mask': name}
def defaultKprobe(self, name, kdata):
k = kdata
for field in ['name', 'format', 'mask', 'func']:
if field not in k:
k[field] = name
archargs = 'args_'+platform.machine()
if archargs in k:
k['args'] = k[archargs]
else:
k['args'] = dict()
k['format'] = name
self.kprobes[name] = k
def kprobeColor(self, name):
if name not in self.kprobes or 'color' not in self.kprobes[name]:
return ''
return self.kprobes[name]['color']
def kprobeDisplayName(self, name, dataraw):
if name not in self.kprobes:
self.basicKprobe(name)
data = ''
quote=0
# first remvoe any spaces inside quotes, and the quotes
for c in dataraw:
if c == '"':
quote = (quote + 1) % 2
if quote and c == ' ':
data += '_'
elif c != '"':
data += c
fmt, args = self.kprobes[name]['format'], self.kprobes[name]['args']
arglist = dict()
# now process the args
for arg in sorted(args):
arglist[arg] = ''
m = re.match('.* '+arg+'=(?P<arg>.*) ', data);
if m:
arglist[arg] = m.group('arg')
else:
m = re.match('.* '+arg+'=(?P<arg>.*)', data);
if m:
arglist[arg] = m.group('arg')
out = fmt.format(**arglist)
out = out.replace(' ', '_').replace('"', '')
return out
def kprobeText(self, kprobe):
name, fmt, func, args = kprobe['name'], kprobe['format'], kprobe['func'], kprobe['args']
if re.findall('{(?P<n>[a-z,A-Z,0-9]*)}', func):
doError('Kprobe "%s" has format info in the function name "%s"' % (name, func), False)
for arg in re.findall('{(?P<n>[a-z,A-Z,0-9]*)}', fmt):
if arg not in args:
doError('Kprobe "%s" is missing argument "%s"' % (name, arg), False)
val = 'p:%s_cal %s' % (name, func)
for i in sorted(args):
val += ' %s=%s' % (i, args[i])
val += '\nr:%s_ret %s $retval\n' % (name, func)
return val
def addKprobes(self):
# first test each kprobe
print('INITIALIZING KPROBES...')
rejects = []
for name in sorted(self.kprobes):
if not self.testKprobe(self.kprobes[name]):
rejects.append(name)
# remove all failed ones from the list
for name in rejects:
vprint('Skipping KPROBE: %s' % name)
self.kprobes.pop(name)
self.fsetVal('', 'kprobe_events')
kprobeevents = ''
# set the kprobes all at once
for kp in self.kprobes:
val = self.kprobeText(self.kprobes[kp])
vprint('Adding KPROBE: %s\n%s' % (kp, val.strip()))
kprobeevents += self.kprobeText(self.kprobes[kp])
self.fsetVal(kprobeevents, 'kprobe_events')
# verify that the kprobes were set as ordered
check = self.fgetVal('kprobe_events')
linesout = len(kprobeevents.split('\n'))
linesack = len(check.split('\n'))
if linesack < linesout:
# if not, try appending the kprobes 1 by 1
for kp in self.kprobes:
kprobeevents = self.kprobeText(self.kprobes[kp])
self.fsetVal(kprobeevents, 'kprobe_events', 'a')
self.fsetVal('1', 'events/kprobes/enable')
def testKprobe(self, kprobe):
kprobeevents = self.kprobeText(kprobe)
if not kprobeevents:
return False
try:
self.fsetVal(kprobeevents, 'kprobe_events')
check = self.fgetVal('kprobe_events')
except:
return False
linesout = len(kprobeevents.split('\n'))
linesack = len(check.split('\n'))
if linesack < linesout:
return False
return True
def fsetVal(self, val, path, mode='w'):
file = self.tpath+path
if not os.path.exists(file):
return False
try:
fp = open(file, mode)
fp.write(val)
fp.close()
except:
pass
return True
def fgetVal(self, path):
file = self.tpath+path
res = ''
if not os.path.exists(file):
return res
try:
fp = open(file, 'r')
res = fp.read()
fp.close()
except:
pass
return res
def cleanupFtrace(self):
if(self.usecallgraph or self.usetraceevents):
self.fsetVal('0', 'events/kprobes/enable')
self.fsetVal('', 'kprobe_events')
def setupAllKprobes(self):
for name in self.tracefuncs:
self.defaultKprobe(name, self.tracefuncs[name])
for name in self.dev_tracefuncs:
self.defaultKprobe(name, self.dev_tracefuncs[name])
def isCallgraphFunc(self, name):
if len(self.debugfuncs) < 1 and self.suspendmode == 'command':
return True
if name in self.debugfuncs:
return True
funclist = []
for i in self.tracefuncs:
if 'func' in self.tracefuncs[i]:
funclist.append(self.tracefuncs[i]['func'])
else:
funclist.append(i)
if name in funclist:
return True
return False
def initFtrace(self, testing=False):
tp = self.tpath
print('INITIALIZING FTRACE...')
# turn trace off
self.fsetVal('0', 'tracing_on')
self.cleanupFtrace()
# set the trace clock to global
self.fsetVal('global', 'trace_clock')
# set trace buffer to a huge value
self.fsetVal('nop', 'current_tracer')
self.fsetVal('100000', 'buffer_size_kb')
# go no further if this is just a status check
if testing:
return
if self.usekprobes:
# add tracefunc kprobes so long as were not using full callgraph
if(not self.usecallgraph or len(self.debugfuncs) > 0):
for name in self.tracefuncs:
self.defaultKprobe(name, self.tracefuncs[name])
if self.usedevsrc:
for name in self.dev_tracefuncs:
self.defaultKprobe(name, self.dev_tracefuncs[name])
else:
self.usedevsrc = False
self.addKprobes()
# initialize the callgraph trace, unless this is an x2 run
if(self.usecallgraph):
# set trace type
self.fsetVal('function_graph', 'current_tracer')
self.fsetVal('', 'set_ftrace_filter')
# set trace format options
self.fsetVal('print-parent', 'trace_options')
self.fsetVal('funcgraph-abstime', 'trace_options')
self.fsetVal('funcgraph-cpu', 'trace_options')
self.fsetVal('funcgraph-duration', 'trace_options')
self.fsetVal('funcgraph-proc', 'trace_options')
self.fsetVal('funcgraph-tail', 'trace_options')
self.fsetVal('nofuncgraph-overhead', 'trace_options')
self.fsetVal('context-info', 'trace_options')
self.fsetVal('graph-time', 'trace_options')
self.fsetVal('0', 'max_graph_depth')
if len(self.debugfuncs) > 0:
self.setFtraceFilterFunctions(self.debugfuncs)
elif self.suspendmode == 'command':
self.fsetVal('', 'set_graph_function')
else:
cf = ['dpm_run_callback']
if(self.usetraceeventsonly):
cf += ['dpm_prepare', 'dpm_complete']
for fn in self.tracefuncs:
if 'func' in self.tracefuncs[fn]:
cf.append(self.tracefuncs[fn]['func'])
else:
cf.append(fn)
self.setFtraceFilterFunctions(cf)
if(self.usetraceevents):
# turn trace events on
events = iter(self.traceevents)
for e in events:
self.fsetVal('1', 'events/power/'+e+'/enable')
# clear the trace buffer
self.fsetVal('', 'trace')
def verifyFtrace(self):
# files needed for any trace data
files = ['buffer_size_kb', 'current_tracer', 'trace', 'trace_clock',
'trace_marker', 'trace_options', 'tracing_on']
# files needed for callgraph trace data
tp = self.tpath
if(self.usecallgraph):
files += [
'available_filter_functions',
'set_ftrace_filter',
'set_graph_function'
]
for f in files:
if(os.path.exists(tp+f) == False):
return False
return True
def verifyKprobes(self):
# files needed for kprobes to work
files = ['kprobe_events', 'events']
tp = self.tpath
for f in files:
if(os.path.exists(tp+f) == False):
return False
return True
def colorText(self, str):
if not self.ansi:
return str
return '\x1B[31;40m'+str+'\x1B[m'
sysvals = SystemValues()
# Class: DevProps
# Description:
# Simple class which holds property values collected
# for all the devices used in the timeline.
class DevProps:
syspath = ''
altname = ''
async = True
xtraclass = ''
xtrainfo = ''
def out(self, dev):
return '%s,%s,%d;' % (dev, self.altname, self.async)
def debug(self, dev):
print '%s:\n\taltname = %s\n\t async = %s' % (dev, self.altname, self.async)
def altName(self, dev):
if not self.altname or self.altname == dev:
return dev
return '%s [%s]' % (self.altname, dev)
def xtraClass(self):
if self.xtraclass:
return ' '+self.xtraclass
if not self.async:
return ' sync'
return ''
def xtraInfo(self):
if self.xtraclass:
return ' '+self.xtraclass
if self.async:
return ' async'
return ' sync'
# Class: DeviceNode
# Description:
# A container used to create a device hierachy, with a single root node
# and a tree of child nodes. Used by Data.deviceTopology()
class DeviceNode:
name = ''
children = 0
depth = 0
def __init__(self, nodename, nodedepth):
self.name = nodename
self.children = []
self.depth = nodedepth
# Class: Data
# Description:
# The primary container for suspend/resume test data. There is one for
# each test run. The data is organized into a cronological hierarchy:
# Data.dmesg {
# root structure, started as dmesg & ftrace, but now only ftrace
# contents: times for suspend start/end, resume start/end, fwdata
# phases {
# 10 sequential, non-overlapping phases of S/R
# contents: times for phase start/end, order/color data for html
# devlist {
# device callback or action list for this phase
# device {
# a single device callback or generic action
# contents: start/stop times, pid/cpu/driver info
# parents/children, html id for timeline/callgraph
# optionally includes an ftrace callgraph
# optionally includes intradev trace events
# }
# }
# }
# }
#
class Data:
dmesg = {} # root data structure
phases = [] # ordered list of phases
start = 0.0 # test start
end = 0.0 # test end
tSuspended = 0.0 # low-level suspend start
tResumed = 0.0 # low-level resume start
tLow = 0.0 # time spent in low-level suspend (standby/freeze)
fwValid = False # is firmware data available
fwSuspend = 0 # time spent in firmware suspend
fwResume = 0 # time spent in firmware resume
dmesgtext = [] # dmesg text file in memory
testnumber = 0
idstr = ''
html_device_id = 0
stamp = 0
outfile = ''
dev_ubiquitous = ['msleep', 'udelay']
def __init__(self, num):
idchar = 'abcdefghijklmnopqrstuvwxyz'
self.testnumber = num
self.idstr = idchar[num]
self.dmesgtext = []
self.phases = []
self.dmesg = { # fixed list of 10 phases
'suspend_prepare': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#CCFFCC', 'order': 0},
'suspend': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#88FF88', 'order': 1},
'suspend_late': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#00AA00', 'order': 2},
'suspend_noirq': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#008888', 'order': 3},
'suspend_machine': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#0000FF', 'order': 4},
'resume_machine': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FF0000', 'order': 5},
'resume_noirq': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FF9900', 'order': 6},
'resume_early': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFCC00', 'order': 7},
'resume': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFFF88', 'order': 8},
'resume_complete': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFFFCC', 'order': 9}
}
self.phases = self.sortedPhases()
self.devicegroups = []
for phase in self.phases:
self.devicegroups.append([phase])
def getStart(self):
return self.dmesg[self.phases[0]]['start']
def setStart(self, time):
self.start = time
self.dmesg[self.phases[0]]['start'] = time
def getEnd(self):
return self.dmesg[self.phases[-1]]['end']
def setEnd(self, time):
self.end = time
self.dmesg[self.phases[-1]]['end'] = time
def isTraceEventOutsideDeviceCalls(self, pid, time):
for phase in self.phases:
list = self.dmesg[phase]['list']
for dev in list:
d = list[dev]
if(d['pid'] == pid and time >= d['start'] and
time < d['end']):
return False
return True
def targetDevice(self, phaselist, start, end, pid=-1):
tgtdev = ''
for phase in phaselist:
list = self.dmesg[phase]['list']
for devname in list:
dev = list[devname]
if(pid >= 0 and dev['pid'] != pid):
continue
devS = dev['start']
devE = dev['end']
if(start < devS or start >= devE or end <= devS or end > devE):
continue
tgtdev = dev
break
return tgtdev
def addDeviceFunctionCall(self, displayname, kprobename, proc, pid, start, end, cdata, rdata):
machstart = self.dmesg['suspend_machine']['start']
machend = self.dmesg['resume_machine']['end']
tgtdev = self.targetDevice(self.phases, start, end, pid)
if not tgtdev and start >= machstart and end < machend:
# device calls in machine phases should be serial
tgtdev = self.targetDevice(['suspend_machine', 'resume_machine'], start, end)
if not tgtdev:
if 'scsi_eh' in proc:
self.newActionGlobal(proc, start, end, pid)
self.addDeviceFunctionCall(displayname, kprobename, proc, pid, start, end, cdata, rdata)
else:
vprint('IGNORE: %s[%s](%d) [%f - %f] | %s | %s | %s' % (displayname, kprobename,
pid, start, end, cdata, rdata, proc))
return False
# detail block fits within tgtdev
if('src' not in tgtdev):
tgtdev['src'] = []
title = cdata+' '+rdata
mstr = '\(.*\) *(?P<args>.*) *\((?P<caller>.*)\+.* arg1=(?P<ret>.*)'
m = re.match(mstr, title)
if m:
c = m.group('caller')
a = m.group('args').strip()
r = m.group('ret')
if len(r) > 6:
r = ''
else:
r = 'ret=%s ' % r
l = '%0.3fms' % ((end - start) * 1000)
if kprobename in self.dev_ubiquitous:
title = '%s(%s) <- %s, %s(%s)' % (displayname, a, c, r, l)
else:
title = '%s(%s) %s(%s)' % (displayname, a, r, l)
e = TraceEvent(title, kprobename, start, end - start)
tgtdev['src'].append(e)
return True
def trimTimeVal(self, t, t0, dT, left):
if left:
if(t > t0):
if(t - dT < t0):
return t0
return t - dT
else:
return t
else:
if(t < t0 + dT):
if(t > t0):
return t0 + dT
return t + dT
else:
return t
def trimTime(self, t0, dT, left):
self.tSuspended = self.trimTimeVal(self.tSuspended, t0, dT, left)
self.tResumed = self.trimTimeVal(self.tResumed, t0, dT, left)
self.start = self.trimTimeVal(self.start, t0, dT, left)
self.end = self.trimTimeVal(self.end, t0, dT, left)
for phase in self.phases:
p = self.dmesg[phase]
p['start'] = self.trimTimeVal(p['start'], t0, dT, left)
p['end'] = self.trimTimeVal(p['end'], t0, dT, left)
list = p['list']
for name in list:
d = list[name]
d['start'] = self.trimTimeVal(d['start'], t0, dT, left)
d['end'] = self.trimTimeVal(d['end'], t0, dT, left)
if('ftrace' in d):
cg = d['ftrace']
cg.start = self.trimTimeVal(cg.start, t0, dT, left)
cg.end = self.trimTimeVal(cg.end, t0, dT, left)
for line in cg.list:
line.time = self.trimTimeVal(line.time, t0, dT, left)
if('src' in d):
for e in d['src']:
e.time = self.trimTimeVal(e.time, t0, dT, left)
def normalizeTime(self, tZero):
# trim out any standby or freeze clock time
if(self.tSuspended != self.tResumed):
if(self.tResumed > tZero):
self.trimTime(self.tSuspended, \
self.tResumed-self.tSuspended, True)
else:
self.trimTime(self.tSuspended, \
self.tResumed-self.tSuspended, False)
def newPhaseWithSingleAction(self, phasename, devname, start, end, color):
for phase in self.phases:
self.dmesg[phase]['order'] += 1
self.html_device_id += 1
devid = '%s%d' % (self.idstr, self.html_device_id)
list = dict()
list[devname] = \
{'start': start, 'end': end, 'pid': 0, 'par': '',
'length': (end-start), 'row': 0, 'id': devid, 'drv': '' };
self.dmesg[phasename] = \
{'list': list, 'start': start, 'end': end,
'row': 0, 'color': color, 'order': 0}
self.phases = self.sortedPhases()
def newPhase(self, phasename, start, end, color, order):
if(order < 0):
order = len(self.phases)
for phase in self.phases[order:]:
self.dmesg[phase]['order'] += 1
if(order > 0):
p = self.phases[order-1]
self.dmesg[p]['end'] = start
if(order < len(self.phases)):
p = self.phases[order]
self.dmesg[p]['start'] = end
list = dict()
self.dmesg[phasename] = \
{'list': list, 'start': start, 'end': end,
'row': 0, 'color': color, 'order': order}
self.phases = self.sortedPhases()
self.devicegroups.append([phasename])
def setPhase(self, phase, ktime, isbegin):
if(isbegin):
self.dmesg[phase]['start'] = ktime
else:
self.dmesg[phase]['end'] = ktime
def dmesgSortVal(self, phase):
return self.dmesg[phase]['order']
def sortedPhases(self):
return sorted(self.dmesg, key=self.dmesgSortVal)
def sortedDevices(self, phase):
list = self.dmesg[phase]['list']
slist = []
tmp = dict()
for devname in list:
dev = list[devname]
tmp[dev['start']] = devname
for t in sorted(tmp):
slist.append(tmp[t])
return slist
def fixupInitcalls(self, phase, end):
# if any calls never returned, clip them at system resume end
phaselist = self.dmesg[phase]['list']
for devname in phaselist:
dev = phaselist[devname]
if(dev['end'] < 0):
for p in self.phases:
if self.dmesg[p]['end'] > dev['start']:
dev['end'] = self.dmesg[p]['end']
break
vprint('%s (%s): callback didnt return' % (devname, phase))
def deviceFilter(self, devicefilter):
# remove all by the relatives of the filter devnames
filter = []
for phase in self.phases:
list = self.dmesg[phase]['list']
for name in devicefilter:
dev = name
while(dev in list):
if(dev not in filter):
filter.append(dev)
dev = list[dev]['par']
children = self.deviceDescendants(name, phase)
for dev in children:
if(dev not in filter):
filter.append(dev)
for phase in self.phases:
list = self.dmesg[phase]['list']
rmlist = []
for name in list:
pid = list[name]['pid']
if(name not in filter and pid >= 0):
rmlist.append(name)
for name in rmlist:
del list[name]
def fixupInitcallsThatDidntReturn(self):
# if any calls never returned, clip them at system resume end
for phase in self.phases:
self.fixupInitcalls(phase, self.getEnd())
def isInsideTimeline(self, start, end):
if(self.start <= start and self.end > start):
return True
return False
def phaseOverlap(self, phases):
rmgroups = []
newgroup = []
for group in self.devicegroups:
for phase in phases:
if phase not in group:
continue
for p in group:
if p not in newgroup:
newgroup.append(p)
if group not in rmgroups:
rmgroups.append(group)
for group in rmgroups:
self.devicegroups.remove(group)
self.devicegroups.append(newgroup)
def newActionGlobal(self, name, start, end, pid=-1, color=''):
# if event starts before timeline start, expand timeline
if(start < self.start):
self.setStart(start)
# if event ends after timeline end, expand the timeline
if(end > self.end):
self.setEnd(end)
# which phase is this device callback or action "in"
targetphase = "none"
htmlclass = ''
overlap = 0.0
phases = []
for phase in self.phases:
pstart = self.dmesg[phase]['start']
pend = self.dmesg[phase]['end']
o = max(0, min(end, pend) - max(start, pstart))
if o > 0:
phases.append(phase)
if o > overlap:
if overlap > 0 and phase == 'post_resume':
continue
targetphase = phase
overlap = o
if pid == -2:
htmlclass = ' bg'
if len(phases) > 1:
htmlclass = ' bg'
self.phaseOverlap(phases)
if targetphase in self.phases:
newname = self.newAction(targetphase, name, pid, '', start, end, '', htmlclass, color)
return (targetphase, newname)
return False
def newAction(self, phase, name, pid, parent, start, end, drv, htmlclass='', color=''):
# new device callback for a specific phase
self.html_device_id += 1
devid = '%s%d' % (self.idstr, self.html_device_id)
list = self.dmesg[phase]['list']
length = -1.0
if(start >= 0 and end >= 0):
length = end - start
if pid == -2:
i = 2
origname = name
while(name in list):
name = '%s[%d]' % (origname, i)
i += 1
list[name] = {'start': start, 'end': end, 'pid': pid, 'par': parent,
'length': length, 'row': 0, 'id': devid, 'drv': drv }
if htmlclass:
list[name]['htmlclass'] = htmlclass
if color:
list[name]['color'] = color
return name
def deviceIDs(self, devlist, phase):
idlist = []
list = self.dmesg[phase]['list']
for devname in list:
if devname in devlist:
idlist.append(list[devname]['id'])
return idlist
def deviceParentID(self, devname, phase):
pdev = ''
pdevid = ''
list = self.dmesg[phase]['list']
if devname in list:
pdev = list[devname]['par']
if pdev in list:
return list[pdev]['id']
return pdev
def deviceChildren(self, devname, phase):
devlist = []
list = self.dmesg[phase]['list']
for child in list:
if(list[child]['par'] == devname):
devlist.append(child)
return devlist
def deviceDescendants(self, devname, phase):
children = self.deviceChildren(devname, phase)
family = children
for child in children:
family += self.deviceDescendants(child, phase)
return family
def deviceChildrenIDs(self, devname, phase):
devlist = self.deviceChildren(devname, phase)
return self.deviceIDs(devlist, phase)
def printDetails(self):
vprint(' test start: %f' % self.start)
for phase in self.phases:
dc = len(self.dmesg[phase]['list'])
vprint(' %16s: %f - %f (%d devices)' % (phase, \
self.dmesg[phase]['start'], self.dmesg[phase]['end'], dc))
vprint(' test end: %f' % self.end)
def deviceChildrenAllPhases(self, devname):
devlist = []
for phase in self.phases:
list = self.deviceChildren(devname, phase)
for dev in list:
if dev not in devlist:
devlist.append(dev)
return devlist
def masterTopology(self, name, list, depth):
node = DeviceNode(name, depth)
for cname in list:
# avoid recursions
if name == cname:
continue
clist = self.deviceChildrenAllPhases(cname)
cnode = self.masterTopology(cname, clist, depth+1)
node.children.append(cnode)
return node
def printTopology(self, node):
html = ''
if node.name:
info = ''
drv = ''
for phase in self.phases:
list = self.dmesg[phase]['list']
if node.name in list:
s = list[node.name]['start']
e = list[node.name]['end']
if list[node.name]['drv']:
drv = ' {'+list[node.name]['drv']+'}'
info += ('<li>%s: %.3fms</li>' % (phase, (e-s)*1000))
html += '<li><b>'+node.name+drv+'</b>'
if info:
html += '<ul>'+info+'</ul>'
html += '</li>'
if len(node.children) > 0:
html += '<ul>'
for cnode in node.children:
html += self.printTopology(cnode)
html += '</ul>'
return html
def rootDeviceList(self):
# list of devices graphed
real = []
for phase in self.dmesg:
list = self.dmesg[phase]['list']
for dev in list:
if list[dev]['pid'] >= 0 and dev not in real:
real.append(dev)
# list of top-most root devices
rootlist = []
for phase in self.dmesg:
list = self.dmesg[phase]['list']
for dev in list:
pdev = list[dev]['par']
pid = list[dev]['pid']
if(pid < 0 or re.match('[0-9]*-[0-9]*\.[0-9]*[\.0-9]*\:[\.0-9]*$', pdev)):
continue
if pdev and pdev not in real and pdev not in rootlist:
rootlist.append(pdev)
return rootlist
def deviceTopology(self):
rootlist = self.rootDeviceList()
master = self.masterTopology('', rootlist, 0)
return self.printTopology(master)
def selectTimelineDevices(self, widfmt, tTotal, mindevlen):
# only select devices that will actually show up in html
self.tdevlist = dict()
for phase in self.dmesg:
devlist = []
list = self.dmesg[phase]['list']
for dev in list:
length = (list[dev]['end'] - list[dev]['start']) * 1000
width = widfmt % (((list[dev]['end']-list[dev]['start'])*100)/tTotal)
if width != '0.000000' and length >= mindevlen:
devlist.append(dev)
self.tdevlist[phase] = devlist
# Class: TraceEvent
# Description:
# A container for trace event data found in the ftrace file
class TraceEvent:
text = ''
time = 0.0
length = 0.0
title = ''
row = 0
def __init__(self, a, n, t, l):
self.title = a
self.text = n
self.time = t
self.length = l
# Class: FTraceLine
# Description:
# A container for a single line of ftrace data. There are six basic types:
# callgraph line:
# call: " dpm_run_callback() {"
# return: " }"
# leaf: " dpm_run_callback();"
# trace event:
# tracing_mark_write: SUSPEND START or RESUME COMPLETE
# suspend_resume: phase or custom exec block data
# device_pm_callback: device callback info
class FTraceLine:
time = 0.0
length = 0.0
fcall = False
freturn = False
fevent = False
fkprobe = False
depth = 0
name = ''
type = ''
def __init__(self, t, m='', d=''):
self.time = float(t)
if not m and not d:
return
# is this a trace event
if(d == 'traceevent' or re.match('^ *\/\* *(?P<msg>.*) \*\/ *$', m)):
if(d == 'traceevent'):
# nop format trace event
msg = m
else:
# function_graph format trace event
em = re.match('^ *\/\* *(?P<msg>.*) \*\/ *$', m)
msg = em.group('msg')
emm = re.match('^(?P<call>.*?): (?P<msg>.*)', msg)
if(emm):
self.name = emm.group('msg')
self.type = emm.group('call')
else:
self.name = msg
km = re.match('^(?P<n>.*)_cal$', self.type)
if km:
self.fcall = True
self.fkprobe = True
self.type = km.group('n')
return
km = re.match('^(?P<n>.*)_ret$', self.type)
if km:
self.freturn = True
self.fkprobe = True
self.type = km.group('n')
return
self.fevent = True
return
# convert the duration to seconds
if(d):
self.length = float(d)/1000000
# the indentation determines the depth
match = re.match('^(?P<d> *)(?P<o>.*)$', m)
if(not match):
return
self.depth = self.getDepth(match.group('d'))
m = match.group('o')
# function return
if(m[0] == '}'):
self.freturn = True
if(len(m) > 1):
# includes comment with function name
match = re.match('^} *\/\* *(?P<n>.*) *\*\/$', m)
if(match):
self.name = match.group('n').strip()
# function call
else:
self.fcall = True
# function call with children
if(m[-1] == '{'):
match = re.match('^(?P<n>.*) *\(.*', m)
if(match):
self.name = match.group('n').strip()
# function call with no children (leaf)
elif(m[-1] == ';'):
self.freturn = True
match = re.match('^(?P<n>.*) *\(.*', m)
if(match):
self.name = match.group('n').strip()
# something else (possibly a trace marker)
else:
self.name = m
def getDepth(self, str):
return len(str)/2
def debugPrint(self, dev=''):
if(self.freturn and self.fcall):
print('%s -- %f (%02d): %s(); (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
elif(self.freturn):
print('%s -- %f (%02d): %s} (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
else:
print('%s -- %f (%02d): %s() { (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
def startMarker(self):
global sysvals
# Is this the starting line of a suspend?
if not self.fevent:
return False
if sysvals.usetracemarkers:
if(self.name == 'SUSPEND START'):
return True
return False
else:
if(self.type == 'suspend_resume' and
re.match('suspend_enter\[.*\] begin', self.name)):
return True
return False
def endMarker(self):
# Is this the ending line of a resume?
if not self.fevent:
return False
if sysvals.usetracemarkers:
if(self.name == 'RESUME COMPLETE'):
return True
return False
else:
if(self.type == 'suspend_resume' and
re.match('thaw_processes\[.*\] end', self.name)):
return True
return False
# Class: FTraceCallGraph
# Description:
# A container for the ftrace callgraph of a single recursive function.
# This can be a dpm_run_callback, dpm_prepare, or dpm_complete callgraph
# Each instance is tied to a single device in a single phase, and is
# comprised of an ordered list of FTraceLine objects
class FTraceCallGraph:
start = -1.0
end = -1.0
list = []
invalid = False
depth = 0
pid = 0
def __init__(self, pid):
self.start = -1.0
self.end = -1.0
self.list = []
self.depth = 0
self.pid = pid
def addLine(self, line, debug=False):
# if this is already invalid, just leave
if(self.invalid):
return False
# invalidate on too much data or bad depth
if(len(self.list) >= 1000000 or self.depth < 0):
self.invalidate(line)
return False
# compare current depth with this lines pre-call depth
prelinedep = line.depth
if(line.freturn and not line.fcall):
prelinedep += 1
last = 0
lasttime = line.time
virtualfname = 'execution_misalignment'
if len(self.list) > 0:
last = self.list[-1]
lasttime = last.time
# handle low misalignments by inserting returns
if prelinedep < self.depth:
if debug and last:
print '-------- task %d --------' % self.pid
last.debugPrint()
idx = 0
# add return calls to get the depth down
while prelinedep < self.depth:
if debug:
print 'MISALIGN LOW (add returns): C%d - eC%d' % (self.depth, prelinedep)
self.depth -= 1
if idx == 0 and last and last.fcall and not last.freturn:
# special case, turn last call into a leaf
last.depth = self.depth
last.freturn = True
last.length = line.time - last.time
if debug:
last.debugPrint()
else:
vline = FTraceLine(lasttime)
vline.depth = self.depth
vline.name = virtualfname
vline.freturn = True
self.list.append(vline)
if debug:
vline.debugPrint()
idx += 1
if debug:
line.debugPrint()
print ''
# handle high misalignments by inserting calls
elif prelinedep > self.depth:
if debug and last:
print '-------- task %d --------' % self.pid
last.debugPrint()
idx = 0
# add calls to get the depth up
while prelinedep > self.depth:
if debug:
print 'MISALIGN HIGH (add calls): C%d - eC%d' % (self.depth, prelinedep)
if idx == 0 and line.freturn and not line.fcall:
# special case, turn this return into a leaf
line.fcall = True
prelinedep -= 1
else:
vline = FTraceLine(lasttime)
vline.depth = self.depth
vline.name = virtualfname
vline.fcall = True
if debug:
vline.debugPrint()
self.list.append(vline)
self.depth += 1
if not last:
self.start = vline.time
idx += 1
if debug:
line.debugPrint()
print ''
# process the call and set the new depth
if(line.fcall and not line.freturn):
self.depth += 1
elif(line.freturn and not line.fcall):
self.depth -= 1
if len(self.list) < 1:
self.start = line.time
self.list.append(line)
if(line.depth == 0 and line.freturn):
if(self.start < 0):
self.start = line.time
self.end = line.time
if line.fcall:
self.end += line.length
if self.list[0].name == virtualfname:
self.invalid = True
return True
return False
def invalidate(self, line):
if(len(self.list) > 0):
first = self.list[0]
self.list = []
self.list.append(first)
self.invalid = True
id = 'task %s' % (self.pid)
window = '(%f - %f)' % (self.start, line.time)
if(self.depth < 0):
vprint('Too much data for '+id+\
' (buffer overflow), ignoring this callback')
else:
vprint('Too much data for '+id+\
' '+window+', ignoring this callback')
def slice(self, t0, tN):
minicg = FTraceCallGraph(0)
count = -1
firstdepth = 0
for l in self.list:
if(l.time < t0 or l.time > tN):
continue
if(count < 0):
if(not l.fcall or l.name == 'dev_driver_string'):
continue
firstdepth = l.depth
count = 0
l.depth -= firstdepth
minicg.addLine(l)
if((count == 0 and l.freturn and l.fcall) or
(count > 0 and l.depth <= 0)):
break
count += 1
return minicg
def repair(self, enddepth):
# bring the depth back to 0 with additional returns
fixed = False
last = self.list[-1]
for i in reversed(range(enddepth)):
t = FTraceLine(last.time)
t.depth = i
t.freturn = True
fixed = self.addLine(t)
if fixed:
self.end = last.time
return True
return False
def postProcess(self, debug=False):
stack = dict()
cnt = 0
for l in self.list:
if(l.fcall and not l.freturn):
stack[l.depth] = l
cnt += 1
elif(l.freturn and not l.fcall):
if(l.depth not in stack):
if debug:
print 'Post Process Error: Depth missing'
l.debugPrint()
return False
# transfer total time from return line to call line
stack[l.depth].length = l.length
stack.pop(l.depth)
l.length = 0
cnt -= 1
if(cnt == 0):
# trace caught the whole call tree
return True
elif(cnt < 0):
if debug:
print 'Post Process Error: Depth is less than 0'
return False
# trace ended before call tree finished
return self.repair(cnt)
def deviceMatch(self, pid, data):
found = False
# add the callgraph data to the device hierarchy
borderphase = {
'dpm_prepare': 'suspend_prepare',
'dpm_complete': 'resume_complete'
}
if(self.list[0].name in borderphase):
p = borderphase[self.list[0].name]
list = data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
self.start <= dev['start'] and
self.end >= dev['end']):
dev['ftrace'] = self.slice(dev['start'], dev['end'])
found = True
return found
for p in data.phases:
if(data.dmesg[p]['start'] <= self.start and
self.start <= data.dmesg[p]['end']):
list = data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
self.start <= dev['start'] and
self.end >= dev['end']):
dev['ftrace'] = self
found = True
break
break
return found
def newActionFromFunction(self, data):
name = self.list[0].name
if name in ['dpm_run_callback', 'dpm_prepare', 'dpm_complete']:
return
fs = self.start
fe = self.end
if fs < data.start or fe > data.end:
return
phase = ''
for p in data.phases:
if(data.dmesg[p]['start'] <= self.start and
self.start < data.dmesg[p]['end']):
phase = p
break
if not phase:
return
out = data.newActionGlobal(name, fs, fe, -2)
if out:
phase, myname = out
data.dmesg[phase]['list'][myname]['ftrace'] = self
def debugPrint(self):
print('[%f - %f] %s (%d)') % (self.start, self.end, self.list[0].name, self.pid)
for l in self.list:
if(l.freturn and l.fcall):
print('%f (%02d): %s(); (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
elif(l.freturn):
print('%f (%02d): %s} (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
else:
print('%f (%02d): %s() { (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
print(' ')
# Class: Timeline
# Description:
# A container for a device timeline which calculates
# all the html properties to display it correctly
class Timeline:
html = {}
height = 0 # total timeline height
scaleH = 20 # timescale (top) row height
rowH = 30 # device row height
bodyH = 0 # body height
rows = 0 # total timeline rows
phases = []
rowmaxlines = dict()
rowcount = dict()
rowheight = dict()
def __init__(self, rowheight):
self.rowH = rowheight
self.html = {
'header': '',
'timeline': '',
'legend': '',
}
# Function: getDeviceRows
# Description:
# determine how may rows the device funcs will take
# Arguments:
# rawlist: the list of devices/actions for a single phase
# Output:
# The total number of rows needed to display this phase of the timeline
def getDeviceRows(self, rawlist):
# clear all rows and set them to undefined
lendict = dict()
for item in rawlist:
item.row = -1
lendict[item] = item.length
list = []
for i in sorted(lendict, key=lendict.get, reverse=True):
list.append(i)
remaining = len(list)
rowdata = dict()
row = 1
# try to pack each row with as many ranges as possible
while(remaining > 0):
if(row not in rowdata):
rowdata[row] = []
for i in list:
if(i.row >= 0):
continue
s = i.time
e = i.time + i.length
valid = True
for ritem in rowdata[row]:
rs = ritem.time
re = ritem.time + ritem.length
if(not (((s <= rs) and (e <= rs)) or
((s >= re) and (e >= re)))):
valid = False
break
if(valid):
rowdata[row].append(i)
i.row = row
remaining -= 1
row += 1
return row
# Function: getPhaseRows
# Description:
# Organize the timeline entries into the smallest
# number of rows possible, with no entry overlapping
# Arguments:
# list: the list of devices/actions for a single phase
# devlist: string list of device names to use
# Output:
# The total number of rows needed to display this phase of the timeline
def getPhaseRows(self, dmesg, devlist):
# clear all rows and set them to undefined
remaining = len(devlist)
rowdata = dict()
row = 0
lendict = dict()
myphases = []
for item in devlist:
if item[0] not in self.phases:
self.phases.append(item[0])
if item[0] not in myphases:
myphases.append(item[0])
self.rowmaxlines[item[0]] = dict()
self.rowheight[item[0]] = dict()
dev = dmesg[item[0]]['list'][item[1]]
dev['row'] = -1
lendict[item] = float(dev['end']) - float(dev['start'])
if 'src' in dev:
dev['devrows'] = self.getDeviceRows(dev['src'])
lenlist = []
for i in sorted(lendict, key=lendict.get, reverse=True):
lenlist.append(i)
orderedlist = []
for item in lenlist:
dev = dmesg[item[0]]['list'][item[1]]
if dev['pid'] == -2:
orderedlist.append(item)
for item in lenlist:
if item not in orderedlist:
orderedlist.append(item)
# try to pack each row with as many ranges as possible
while(remaining > 0):
rowheight = 1
if(row not in rowdata):
rowdata[row] = []
for item in orderedlist:
dev = dmesg[item[0]]['list'][item[1]]
if(dev['row'] < 0):
s = dev['start']
e = dev['end']
valid = True
for ritem in rowdata[row]:
rs = ritem['start']
re = ritem['end']
if(not (((s <= rs) and (e <= rs)) or
((s >= re) and (e >= re)))):
valid = False
break
if(valid):
rowdata[row].append(dev)
dev['row'] = row
remaining -= 1
if 'devrows' in dev and dev['devrows'] > rowheight:
rowheight = dev['devrows']
for phase in myphases:
self.rowmaxlines[phase][row] = rowheight
self.rowheight[phase][row] = rowheight * self.rowH
row += 1
if(row > self.rows):
self.rows = int(row)
for phase in myphases:
self.rowcount[phase] = row
return row
def phaseRowHeight(self, phase, row):
return self.rowheight[phase][row]
def phaseRowTop(self, phase, row):
top = 0
for i in sorted(self.rowheight[phase]):
if i >= row:
break
top += self.rowheight[phase][i]
return top
# Function: calcTotalRows
# Description:
# Calculate the heights and offsets for the header and rows
def calcTotalRows(self):
maxrows = 0
standardphases = []
for phase in self.phases:
total = 0
for i in sorted(self.rowmaxlines[phase]):
total += self.rowmaxlines[phase][i]
if total > maxrows:
maxrows = total
if total == self.rowcount[phase]:
standardphases.append(phase)
self.height = self.scaleH + (maxrows*self.rowH)
self.bodyH = self.height - self.scaleH
for phase in standardphases:
for i in sorted(self.rowheight[phase]):
self.rowheight[phase][i] = self.bodyH/self.rowcount[phase]
# Function: createTimeScale
# Description:
# Create the timescale for a timeline block
# Arguments:
# m0: start time (mode begin)
# mMax: end time (mode end)
# tTotal: total timeline time
# mode: suspend or resume
# Output:
# The html code needed to display the time scale
def createTimeScale(self, m0, mMax, tTotal, mode):
timescale = '<div class="t" style="right:{0}%">{1}</div>\n'
rline = '<div class="t" style="left:0;border-left:1px solid black;border-right:0;">Resume</div>\n'
output = '<div class="timescale">\n'
# set scale for timeline
mTotal = mMax - m0
tS = 0.1
if(tTotal <= 0):
return output+'</div>\n'
if(tTotal > 4):
tS = 1
divTotal = int(mTotal/tS) + 1
divEdge = (mTotal - tS*(divTotal-1))*100/mTotal
for i in range(divTotal):
htmlline = ''
if(mode == 'resume'):
pos = '%0.3f' % (100 - ((float(i)*tS*100)/mTotal))
val = '%0.fms' % (float(i)*tS*1000)
htmlline = timescale.format(pos, val)
if(i == 0):
htmlline = rline
else:
pos = '%0.3f' % (100 - ((float(i)*tS*100)/mTotal) - divEdge)
val = '%0.fms' % (float(i-divTotal+1)*tS*1000)
if(i == divTotal - 1):
val = 'Suspend'
htmlline = timescale.format(pos, val)
output += htmlline
output += '</div>\n'
return output
# Class: TestProps
# Description:
# A list of values describing the properties of these test runs
class TestProps:
stamp = ''
tracertype = ''
S0i3 = False
fwdata = []
ftrace_line_fmt_fg = \
'^ *(?P<time>[0-9\.]*) *\| *(?P<cpu>[0-9]*)\)'+\
' *(?P<proc>.*)-(?P<pid>[0-9]*) *\|'+\
'[ +!#\*@$]*(?P<dur>[0-9\.]*) .*\| (?P<msg>.*)'
ftrace_line_fmt_nop = \
' *(?P<proc>.*)-(?P<pid>[0-9]*) *\[(?P<cpu>[0-9]*)\] *'+\
'(?P<flags>.{4}) *(?P<time>[0-9\.]*): *'+\
'(?P<msg>.*)'
ftrace_line_fmt = ftrace_line_fmt_nop
cgformat = False
data = 0
ktemp = dict()
def __init__(self):
self.ktemp = dict()
def setTracerType(self, tracer):
self.tracertype = tracer
if(tracer == 'function_graph'):
self.cgformat = True
self.ftrace_line_fmt = self.ftrace_line_fmt_fg
elif(tracer == 'nop'):
self.ftrace_line_fmt = self.ftrace_line_fmt_nop
else:
doError('Invalid tracer format: [%s]' % tracer, False)
# Class: TestRun
# Description:
# A container for a suspend/resume test run. This is necessary as
# there could be more than one, and they need to be separate.
class TestRun:
ftemp = dict()
ttemp = dict()
data = 0
def __init__(self, dataobj):
self.data = dataobj
self.ftemp = dict()
self.ttemp = dict()
# ----------------- FUNCTIONS --------------------
# Function: vprint
# Description:
# verbose print (prints only with -verbose option)
# Arguments:
# msg: the debug/log message to print
def vprint(msg):
global sysvals
if(sysvals.verbose):
print(msg)
# Function: parseStamp
# Description:
# Pull in the stamp comment line from the data file(s),
# create the stamp, and add it to the global sysvals object
# Arguments:
# m: the valid re.match output for the stamp line
def parseStamp(line, data):
global sysvals
m = re.match(sysvals.stampfmt, line)
data.stamp = {'time': '', 'host': '', 'mode': ''}
dt = datetime(int(m.group('y'))+2000, int(m.group('m')),
int(m.group('d')), int(m.group('H')), int(m.group('M')),
int(m.group('S')))
data.stamp['time'] = dt.strftime('%B %d %Y, %I:%M:%S %p')
data.stamp['host'] = m.group('host')
data.stamp['mode'] = m.group('mode')
data.stamp['kernel'] = m.group('kernel')
sysvals.hostname = data.stamp['host']
sysvals.suspendmode = data.stamp['mode']
if not sysvals.stamp:
sysvals.stamp = data.stamp
# Function: diffStamp
# Description:
# compare the host, kernel, and mode fields in 3 stamps
# Arguments:
# stamp1: string array with mode, kernel, and host
# stamp2: string array with mode, kernel, and host
# Return:
# True if stamps differ, False if they're the same
def diffStamp(stamp1, stamp2):
if 'host' in stamp1 and 'host' in stamp2:
if stamp1['host'] != stamp2['host']:
return True
if 'kernel' in stamp1 and 'kernel' in stamp2:
if stamp1['kernel'] != stamp2['kernel']:
return True
if 'mode' in stamp1 and 'mode' in stamp2:
if stamp1['mode'] != stamp2['mode']:
return True
return False
# Function: doesTraceLogHaveTraceEvents
# Description:
# Quickly determine if the ftrace log has some or all of the trace events
# required for primary parsing. Set the usetraceevents and/or
# usetraceeventsonly flags in the global sysvals object
def doesTraceLogHaveTraceEvents():
global sysvals
# check for kprobes
sysvals.usekprobes = False
out = os.system('grep -q "_cal: (" '+sysvals.ftracefile)
if(out == 0):
sysvals.usekprobes = True
# check for callgraph data on trace event blocks
out = os.system('grep -q "_cpu_down()" '+sysvals.ftracefile)
if(out == 0):
sysvals.usekprobes = True
out = os.popen('head -1 '+sysvals.ftracefile).read().replace('\n', '')
m = re.match(sysvals.stampfmt, out)
if m and m.group('mode') == 'command':
sysvals.usetraceeventsonly = True
sysvals.usetraceevents = True
return
# figure out what level of trace events are supported
sysvals.usetraceeventsonly = True
sysvals.usetraceevents = False
for e in sysvals.traceevents:
out = os.system('grep -q "'+e+': " '+sysvals.ftracefile)
if(out != 0):
sysvals.usetraceeventsonly = False
if(e == 'suspend_resume' and out == 0):
sysvals.usetraceevents = True
# determine is this log is properly formatted
for e in ['SUSPEND START', 'RESUME COMPLETE']:
out = os.system('grep -q "'+e+'" '+sysvals.ftracefile)
if(out != 0):
sysvals.usetracemarkers = False
# Function: appendIncompleteTraceLog
# Description:
# [deprecated for kernel 3.15 or newer]
# Legacy support of ftrace outputs that lack the device_pm_callback
# and/or suspend_resume trace events. The primary data should be
# taken from dmesg, and this ftrace is used only for callgraph data
# or custom actions in the timeline. The data is appended to the Data
# objects provided.
# Arguments:
# testruns: the array of Data objects obtained from parseKernelLog
def appendIncompleteTraceLog(testruns):
global sysvals
# create TestRun vessels for ftrace parsing
testcnt = len(testruns)
testidx = 0
testrun = []
for data in testruns:
testrun.append(TestRun(data))
# extract the callgraph and traceevent data
vprint('Analyzing the ftrace data...')
tp = TestProps()
tf = open(sysvals.ftracefile, 'r')
data = 0
for line in tf:
# remove any latent carriage returns
line = line.replace('\r\n', '')
# grab the time stamp
m = re.match(sysvals.stampfmt, line)
if(m):
tp.stamp = line
continue
# determine the trace data type (required for further parsing)
m = re.match(sysvals.tracertypefmt, line)
if(m):
tp.setTracerType(m.group('t'))
continue
# device properties line
if(re.match(sysvals.devpropfmt, line)):
devProps(line)
continue
# parse only valid lines, if this is not one move on
m = re.match(tp.ftrace_line_fmt, line)
if(not m):
continue
# gather the basic message data from the line
m_time = m.group('time')
m_pid = m.group('pid')
m_msg = m.group('msg')
if(tp.cgformat):
m_param3 = m.group('dur')
else:
m_param3 = 'traceevent'
if(m_time and m_pid and m_msg):
t = FTraceLine(m_time, m_msg, m_param3)
pid = int(m_pid)
else:
continue
# the line should be a call, return, or event
if(not t.fcall and not t.freturn and not t.fevent):
continue
# look for the suspend start marker
if(t.startMarker()):
data = testrun[testidx].data
parseStamp(tp.stamp, data)
data.setStart(t.time)
continue
if(not data):
continue
# find the end of resume
if(t.endMarker()):
data.setEnd(t.time)
testidx += 1
if(testidx >= testcnt):
break
continue
# trace event processing
if(t.fevent):
# general trace events have two types, begin and end
if(re.match('(?P<name>.*) begin$', t.name)):
isbegin = True
elif(re.match('(?P<name>.*) end$', t.name)):
isbegin = False
else:
continue
m = re.match('(?P<name>.*)\[(?P<val>[0-9]*)\] .*', t.name)
if(m):
val = m.group('val')
if val == '0':
name = m.group('name')
else:
name = m.group('name')+'['+val+']'
else:
m = re.match('(?P<name>.*) .*', t.name)
name = m.group('name')
# special processing for trace events
if re.match('dpm_prepare\[.*', name):
continue
elif re.match('machine_suspend.*', name):
continue
elif re.match('suspend_enter\[.*', name):
if(not isbegin):
data.dmesg['suspend_prepare']['end'] = t.time
continue
elif re.match('dpm_suspend\[.*', name):
if(not isbegin):
data.dmesg['suspend']['end'] = t.time
continue
elif re.match('dpm_suspend_late\[.*', name):
if(isbegin):
data.dmesg['suspend_late']['start'] = t.time
else:
data.dmesg['suspend_late']['end'] = t.time
continue
elif re.match('dpm_suspend_noirq\[.*', name):
if(isbegin):
data.dmesg['suspend_noirq']['start'] = t.time
else:
data.dmesg['suspend_noirq']['end'] = t.time
continue
elif re.match('dpm_resume_noirq\[.*', name):
if(isbegin):
data.dmesg['resume_machine']['end'] = t.time
data.dmesg['resume_noirq']['start'] = t.time
else:
data.dmesg['resume_noirq']['end'] = t.time
continue
elif re.match('dpm_resume_early\[.*', name):
if(isbegin):
data.dmesg['resume_early']['start'] = t.time
else:
data.dmesg['resume_early']['end'] = t.time
continue
elif re.match('dpm_resume\[.*', name):
if(isbegin):
data.dmesg['resume']['start'] = t.time
else:
data.dmesg['resume']['end'] = t.time
continue
elif re.match('dpm_complete\[.*', name):
if(isbegin):
data.dmesg['resume_complete']['start'] = t.time
else:
data.dmesg['resume_complete']['end'] = t.time
continue
# skip trace events inside devices calls
if(not data.isTraceEventOutsideDeviceCalls(pid, t.time)):
continue
# global events (outside device calls) are simply graphed
if(isbegin):
# store each trace event in ttemp
if(name not in testrun[testidx].ttemp):
testrun[testidx].ttemp[name] = []
testrun[testidx].ttemp[name].append(\
{'begin': t.time, 'end': t.time})
else:
# finish off matching trace event in ttemp
if(name in testrun[testidx].ttemp):
testrun[testidx].ttemp[name][-1]['end'] = t.time
# call/return processing
elif sysvals.usecallgraph:
# create a callgraph object for the data
if(pid not in testrun[testidx].ftemp):
testrun[testidx].ftemp[pid] = []
testrun[testidx].ftemp[pid].append(FTraceCallGraph(pid))
# when the call is finished, see which device matches it
cg = testrun[testidx].ftemp[pid][-1]
if(cg.addLine(t)):
testrun[testidx].ftemp[pid].append(FTraceCallGraph(pid))
tf.close()
for test in testrun:
# add the traceevent data to the device hierarchy
if(sysvals.usetraceevents):
for name in test.ttemp:
for event in test.ttemp[name]:
test.data.newActionGlobal(name, event['begin'], event['end'])
# add the callgraph data to the device hierarchy
for pid in test.ftemp:
for cg in test.ftemp[pid]:
if len(cg.list) < 1 or cg.invalid:
continue
if(not cg.postProcess()):
id = 'task %s cpu %s' % (pid, m.group('cpu'))
vprint('Sanity check failed for '+\
id+', ignoring this callback')
continue
callstart = cg.start
callend = cg.end
for p in test.data.phases:
if(test.data.dmesg[p]['start'] <= callstart and
callstart <= test.data.dmesg[p]['end']):
list = test.data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
callstart <= dev['start'] and
callend >= dev['end']):
dev['ftrace'] = cg
break
if(sysvals.verbose):
test.data.printDetails()
# Function: parseTraceLog
# Description:
# Analyze an ftrace log output file generated from this app during
# the execution phase. Used when the ftrace log is the primary data source
# and includes the suspend_resume and device_pm_callback trace events
# The ftrace filename is taken from sysvals
# Output:
# An array of Data objects
def parseTraceLog():
global sysvals
vprint('Analyzing the ftrace data...')
if(os.path.exists(sysvals.ftracefile) == False):
doError('%s does not exist' % sysvals.ftracefile, False)
sysvals.setupAllKprobes()
tracewatch = ['suspend_enter']
if sysvals.usekprobes:
tracewatch += ['sync_filesystems', 'freeze_processes', 'syscore_suspend',
'syscore_resume', 'resume_console', 'thaw_processes', 'CPU_ON', 'CPU_OFF']
# extract the callgraph and traceevent data
tp = TestProps()
testruns = []
testdata = []
testrun = 0
data = 0
tf = open(sysvals.ftracefile, 'r')
phase = 'suspend_prepare'
for line in tf:
# remove any latent carriage returns
line = line.replace('\r\n', '')
# stamp line: each stamp means a new test run
m = re.match(sysvals.stampfmt, line)
if(m):
tp.stamp = line
continue
# firmware line: pull out any firmware data
m = re.match(sysvals.firmwarefmt, line)
if(m):
tp.fwdata.append((int(m.group('s')), int(m.group('r'))))
continue
# tracer type line: determine the trace data type
m = re.match(sysvals.tracertypefmt, line)
if(m):
tp.setTracerType(m.group('t'))
continue
# post resume time line: did this test run include post-resume data
m = re.match(sysvals.postresumefmt, line)
if(m):
t = int(m.group('t'))
if(t > 0):
sysvals.postresumetime = t
continue
# device properties line
if(re.match(sysvals.devpropfmt, line)):
devProps(line)
continue
# ftrace line: parse only valid lines
m = re.match(tp.ftrace_line_fmt, line)
if(not m):
continue
# gather the basic message data from the line
m_time = m.group('time')
m_proc = m.group('proc')
m_pid = m.group('pid')
m_msg = m.group('msg')
if(tp.cgformat):
m_param3 = m.group('dur')
else:
m_param3 = 'traceevent'
if(m_time and m_pid and m_msg):
t = FTraceLine(m_time, m_msg, m_param3)
pid = int(m_pid)
else:
continue
# the line should be a call, return, or event
if(not t.fcall and not t.freturn and not t.fevent):
continue
# find the start of suspend
if(t.startMarker()):
phase = 'suspend_prepare'
data = Data(len(testdata))
testdata.append(data)
testrun = TestRun(data)
testruns.append(testrun)
parseStamp(tp.stamp, data)
if len(tp.fwdata) > data.testnumber:
data.fwSuspend, data.fwResume = tp.fwdata[data.testnumber]
if(data.fwSuspend > 0 or data.fwResume > 0):
data.fwValid = True
data.setStart(t.time)
continue
if(not data):
continue
# find the end of resume
if(t.endMarker()):
if(sysvals.usetracemarkers and sysvals.postresumetime > 0):
phase = 'post_resume'
data.newPhase(phase, t.time, t.time, '#F0F0F0', -1)
data.setEnd(t.time)
if(not sysvals.usetracemarkers):
# no trace markers? then quit and be sure to finish recording
# the event we used to trigger resume end
if(len(testrun.ttemp['thaw_processes']) > 0):
# if an entry exists, assume this is its end
testrun.ttemp['thaw_processes'][-1]['end'] = t.time
break
continue
# trace event processing
if(t.fevent):
if(phase == 'post_resume'):
data.setEnd(t.time)
if(t.type == 'suspend_resume'):
# suspend_resume trace events have two types, begin and end
if(re.match('(?P<name>.*) begin$', t.name)):
isbegin = True
elif(re.match('(?P<name>.*) end$', t.name)):
isbegin = False
else:
continue
m = re.match('(?P<name>.*)\[(?P<val>[0-9]*)\] .*', t.name)
if(m):
val = m.group('val')
if val == '0':
name = m.group('name')
else:
name = m.group('name')+'['+val+']'
else:
m = re.match('(?P<name>.*) .*', t.name)
name = m.group('name')
# ignore these events
if(name.split('[')[0] in tracewatch):
continue
# -- phase changes --
# suspend_prepare start
if(re.match('dpm_prepare\[.*', t.name)):
phase = 'suspend_prepare'
if(not isbegin):
data.dmesg[phase]['end'] = t.time
continue
# suspend start
elif(re.match('dpm_suspend\[.*', t.name)):
phase = 'suspend'
data.setPhase(phase, t.time, isbegin)
continue
# suspend_late start
elif(re.match('dpm_suspend_late\[.*', t.name)):
phase = 'suspend_late'
data.setPhase(phase, t.time, isbegin)
continue
# suspend_noirq start
elif(re.match('dpm_suspend_noirq\[.*', t.name)):
phase = 'suspend_noirq'
data.setPhase(phase, t.time, isbegin)
if(not isbegin):
phase = 'suspend_machine'
data.dmesg[phase]['start'] = t.time
continue
# suspend_machine/resume_machine
elif(re.match('machine_suspend\[.*', t.name)):
if(isbegin):
phase = 'suspend_machine'
data.dmesg[phase]['end'] = t.time
data.tSuspended = t.time
else:
if(sysvals.suspendmode in ['mem', 'disk'] and not tp.S0i3):
data.dmesg['suspend_machine']['end'] = t.time
data.tSuspended = t.time
phase = 'resume_machine'
data.dmesg[phase]['start'] = t.time
data.tResumed = t.time
data.tLow = data.tResumed - data.tSuspended
continue
# acpi_suspend
elif(re.match('acpi_suspend\[.*', t.name)):
# acpi_suspend[0] S0i3
if(re.match('acpi_suspend\[0\] begin', t.name)):
if(sysvals.suspendmode == 'mem'):
tp.S0i3 = True
data.dmesg['suspend_machine']['end'] = t.time
data.tSuspended = t.time
continue
# resume_noirq start
elif(re.match('dpm_resume_noirq\[.*', t.name)):
phase = 'resume_noirq'
data.setPhase(phase, t.time, isbegin)
if(isbegin):
data.dmesg['resume_machine']['end'] = t.time
continue
# resume_early start
elif(re.match('dpm_resume_early\[.*', t.name)):
phase = 'resume_early'
data.setPhase(phase, t.time, isbegin)
continue
# resume start
elif(re.match('dpm_resume\[.*', t.name)):
phase = 'resume'
data.setPhase(phase, t.time, isbegin)
continue
# resume complete start
elif(re.match('dpm_complete\[.*', t.name)):
phase = 'resume_complete'
if(isbegin):
data.dmesg[phase]['start'] = t.time
continue
# skip trace events inside devices calls
if(not data.isTraceEventOutsideDeviceCalls(pid, t.time)):
continue
# global events (outside device calls) are graphed
if(name not in testrun.ttemp):
testrun.ttemp[name] = []
if(isbegin):
# create a new list entry
testrun.ttemp[name].append(\
{'begin': t.time, 'end': t.time, 'pid': pid})
else:
if(len(testrun.ttemp[name]) > 0):
# if an entry exists, assume this is its end
testrun.ttemp[name][-1]['end'] = t.time
elif(phase == 'post_resume'):
# post resume events can just have ends
testrun.ttemp[name].append({
'begin': data.dmesg[phase]['start'],
'end': t.time})
# device callback start
elif(t.type == 'device_pm_callback_start'):
m = re.match('(?P<drv>.*) (?P<d>.*), parent: *(?P<p>.*), .*',\
t.name);
if(not m):
continue
drv = m.group('drv')
n = m.group('d')
p = m.group('p')
if(n and p):
data.newAction(phase, n, pid, p, t.time, -1, drv)
# device callback finish
elif(t.type == 'device_pm_callback_end'):
m = re.match('(?P<drv>.*) (?P<d>.*), err.*', t.name);
if(not m):
continue
n = m.group('d')
list = data.dmesg[phase]['list']
if(n in list):
dev = list[n]
dev['length'] = t.time - dev['start']
dev['end'] = t.time
# kprobe event processing
elif(t.fkprobe):
kprobename = t.type
kprobedata = t.name
key = (kprobename, pid)
# displayname is generated from kprobe data
displayname = ''
if(t.fcall):
displayname = sysvals.kprobeDisplayName(kprobename, kprobedata)
if not displayname:
continue
if(key not in tp.ktemp):
tp.ktemp[key] = []
tp.ktemp[key].append({
'pid': pid,
'begin': t.time,
'end': t.time,
'name': displayname,
'cdata': kprobedata,
'proc': m_proc,
})
elif(t.freturn):
if(key not in tp.ktemp) or len(tp.ktemp[key]) < 1:
continue
e = tp.ktemp[key][-1]
if e['begin'] < 0.0 or t.time - e['begin'] < 0.000001:
tp.ktemp[key].pop()
else:
e['end'] = t.time
e['rdata'] = kprobedata
# callgraph processing
elif sysvals.usecallgraph:
# create a callgraph object for the data
key = (m_proc, pid)
if(key not in testrun.ftemp):
testrun.ftemp[key] = []
testrun.ftemp[key].append(FTraceCallGraph(pid))
# when the call is finished, see which device matches it
cg = testrun.ftemp[key][-1]
if(cg.addLine(t)):
testrun.ftemp[key].append(FTraceCallGraph(pid))
tf.close()
if sysvals.suspendmode == 'command':
for test in testruns:
for p in test.data.phases:
if p == 'resume_complete':
test.data.dmesg[p]['start'] = test.data.start
test.data.dmesg[p]['end'] = test.data.end
else:
test.data.dmesg[p]['start'] = test.data.start
test.data.dmesg[p]['end'] = test.data.start
test.data.tSuspended = test.data.start
test.data.tResumed = test.data.start
test.data.tLow = 0
test.data.fwValid = False
for test in testruns:
# add the traceevent data to the device hierarchy
if(sysvals.usetraceevents):
# add actual trace funcs
for name in test.ttemp:
for event in test.ttemp[name]:
test.data.newActionGlobal(name, event['begin'], event['end'], event['pid'])
# add the kprobe based virtual tracefuncs as actual devices
for key in tp.ktemp:
name, pid = key
if name not in sysvals.tracefuncs:
continue
for e in tp.ktemp[key]:
kb, ke = e['begin'], e['end']
if kb == ke or not test.data.isInsideTimeline(kb, ke):
continue
test.data.newActionGlobal(e['name'], kb, ke, pid)
# add config base kprobes and dev kprobes
for key in tp.ktemp:
name, pid = key
if name in sysvals.tracefuncs:
continue
for e in tp.ktemp[key]:
kb, ke = e['begin'], e['end']
if kb == ke or not test.data.isInsideTimeline(kb, ke):
continue
color = sysvals.kprobeColor(e['name'])
if name not in sysvals.dev_tracefuncs:
# config base kprobe
test.data.newActionGlobal(e['name'], kb, ke, -2, color)
elif sysvals.usedevsrc:
# dev kprobe
data.addDeviceFunctionCall(e['name'], name, e['proc'], pid, kb,
ke, e['cdata'], e['rdata'])
if sysvals.usecallgraph:
# add the callgraph data to the device hierarchy
sortlist = dict()
for key in test.ftemp:
proc, pid = key
for cg in test.ftemp[key]:
if len(cg.list) < 1 or cg.invalid:
continue
if(not cg.postProcess()):
id = 'task %s' % (pid)
vprint('Sanity check failed for '+\
id+', ignoring this callback')
continue
# match cg data to devices
if sysvals.suspendmode == 'command' or not cg.deviceMatch(pid, test.data):
sortkey = '%f%f%d' % (cg.start, cg.end, pid)
sortlist[sortkey] = cg
# create blocks for orphan cg data
for sortkey in sorted(sortlist):
cg = sortlist[sortkey]
name = cg.list[0].name
if sysvals.isCallgraphFunc(name):
vprint('Callgraph found for task %d: %.3fms, %s' % (cg.pid, (cg.end - cg.start)*1000, name))
cg.newActionFromFunction(test.data)
if sysvals.suspendmode == 'command':
if(sysvals.verbose):
for data in testdata:
data.printDetails()
return testdata
# fill in any missing phases
for data in testdata:
lp = data.phases[0]
for p in data.phases:
if(data.dmesg[p]['start'] < 0 and data.dmesg[p]['end'] < 0):
print('WARNING: phase "%s" is missing!' % p)
if(data.dmesg[p]['start'] < 0):
data.dmesg[p]['start'] = data.dmesg[lp]['end']
if(p == 'resume_machine'):
data.tSuspended = data.dmesg[lp]['end']
data.tResumed = data.dmesg[lp]['end']
data.tLow = 0
if(data.dmesg[p]['end'] < 0):
data.dmesg[p]['end'] = data.dmesg[p]['start']
lp = p
if(len(sysvals.devicefilter) > 0):
data.deviceFilter(sysvals.devicefilter)
data.fixupInitcallsThatDidntReturn()
if(sysvals.verbose):
data.printDetails()
return testdata
# Function: loadRawKernelLog
# Description:
# Load a raw kernel log that wasn't created by this tool, it might be
# possible to extract a valid suspend/resume log
def loadRawKernelLog(dmesgfile):
global sysvals
stamp = {'time': '', 'host': '', 'mode': 'mem', 'kernel': ''}
stamp['time'] = datetime.now().strftime('%B %d %Y, %I:%M:%S %p')
stamp['host'] = sysvals.hostname
testruns = []
data = 0
lf = open(dmesgfile, 'r')
for line in lf:
line = line.replace('\r\n', '')
idx = line.find('[')
if idx > 1:
line = line[idx:]
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(not m):
continue
msg = m.group("msg")
m = re.match('PM: Syncing filesystems.*', msg)
if(m):
if(data):
testruns.append(data)
data = Data(len(testruns))
data.stamp = stamp
if(data):
m = re.match('.* *(?P<k>[0-9]\.[0-9]{2}\.[0-9]-.*) .*', msg)
if(m):
stamp['kernel'] = m.group('k')
m = re.match('PM: Preparing system for (?P<m>.*) sleep', msg)
if(m):
stamp['mode'] = m.group('m')
data.dmesgtext.append(line)
if(data):
testruns.append(data)
sysvals.stamp = stamp
sysvals.suspendmode = stamp['mode']
lf.close()
return testruns
# Function: loadKernelLog
# Description:
# [deprecated for kernel 3.15.0 or newer]
# load the dmesg file into memory and fix up any ordering issues
# The dmesg filename is taken from sysvals
# Output:
# An array of empty Data objects with only their dmesgtext attributes set
def loadKernelLog():
global sysvals
vprint('Analyzing the dmesg data...')
if(os.path.exists(sysvals.dmesgfile) == False):
doError('%s does not exist' % sysvals.dmesgfile, False)
# there can be multiple test runs in a single file
tp = TestProps()
testruns = []
data = 0
lf = open(sysvals.dmesgfile, 'r')
for line in lf:
line = line.replace('\r\n', '')
idx = line.find('[')
if idx > 1:
line = line[idx:]
m = re.match(sysvals.stampfmt, line)
if(m):
tp.stamp = line
continue
m = re.match(sysvals.firmwarefmt, line)
if(m):
tp.fwdata.append((int(m.group('s')), int(m.group('r'))))
continue
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(not m):
continue
msg = m.group("msg")
if(re.match('PM: Syncing filesystems.*', msg)):
if(data):
testruns.append(data)
data = Data(len(testruns))
parseStamp(tp.stamp, data)
if len(tp.fwdata) > data.testnumber:
data.fwSuspend, data.fwResume = tp.fwdata[data.testnumber]
if(data.fwSuspend > 0 or data.fwResume > 0):
data.fwValid = True
if(re.match('ACPI: resume from mwait', msg)):
print('NOTE: This suspend appears to be freeze rather than'+\
' %s, it will be treated as such' % sysvals.suspendmode)
sysvals.suspendmode = 'freeze'
if(not data):
continue
data.dmesgtext.append(line)
if(data):
testruns.append(data)
lf.close()
if(len(testruns) < 1):
# bad log, but see if you can extract something meaningful anyway
testruns = loadRawKernelLog(sysvals.dmesgfile)
if(len(testruns) < 1):
doError(' dmesg log is completely unreadable: %s' \
% sysvals.dmesgfile, False)
# fix lines with same timestamp/function with the call and return swapped
for data in testruns:
last = ''
for line in data.dmesgtext:
mc = re.match('.*(\[ *)(?P<t>[0-9\.]*)(\]) calling '+\
'(?P<f>.*)\+ @ .*, parent: .*', line)
mr = re.match('.*(\[ *)(?P<t>[0-9\.]*)(\]) call '+\
'(?P<f>.*)\+ returned .* after (?P<dt>.*) usecs', last)
if(mc and mr and (mc.group('t') == mr.group('t')) and
(mc.group('f') == mr.group('f'))):
i = data.dmesgtext.index(last)
j = data.dmesgtext.index(line)
data.dmesgtext[i] = line
data.dmesgtext[j] = last
last = line
return testruns
# Function: parseKernelLog
# Description:
# [deprecated for kernel 3.15.0 or newer]
# Analyse a dmesg log output file generated from this app during
# the execution phase. Create a set of device structures in memory
# for subsequent formatting in the html output file
# This call is only for legacy support on kernels where the ftrace
# data lacks the suspend_resume or device_pm_callbacks trace events.
# Arguments:
# data: an empty Data object (with dmesgtext) obtained from loadKernelLog
# Output:
# The filled Data object
def parseKernelLog(data):
global sysvals
phase = 'suspend_runtime'
if(data.fwValid):
vprint('Firmware Suspend = %u ns, Firmware Resume = %u ns' % \
(data.fwSuspend, data.fwResume))
# dmesg phase match table
dm = {
'suspend_prepare': 'PM: Syncing filesystems.*',
'suspend': 'PM: Entering [a-z]* sleep.*',
'suspend_late': 'PM: suspend of devices complete after.*',
'suspend_noirq': 'PM: late suspend of devices complete after.*',
'suspend_machine': 'PM: noirq suspend of devices complete after.*',
'resume_machine': 'ACPI: Low-level resume complete.*',
'resume_noirq': 'ACPI: Waking up from system sleep state.*',
'resume_early': 'PM: noirq resume of devices complete after.*',
'resume': 'PM: early resume of devices complete after.*',
'resume_complete': 'PM: resume of devices complete after.*',
'post_resume': '.*Restarting tasks \.\.\..*',
}
if(sysvals.suspendmode == 'standby'):
dm['resume_machine'] = 'PM: Restoring platform NVS memory'
elif(sysvals.suspendmode == 'disk'):
dm['suspend_late'] = 'PM: freeze of devices complete after.*'
dm['suspend_noirq'] = 'PM: late freeze of devices complete after.*'
dm['suspend_machine'] = 'PM: noirq freeze of devices complete after.*'
dm['resume_machine'] = 'PM: Restoring platform NVS memory'
dm['resume_early'] = 'PM: noirq restore of devices complete after.*'
dm['resume'] = 'PM: early restore of devices complete after.*'
dm['resume_complete'] = 'PM: restore of devices complete after.*'
elif(sysvals.suspendmode == 'freeze'):
dm['resume_machine'] = 'ACPI: resume from mwait'
# action table (expected events that occur and show up in dmesg)
at = {
'sync_filesystems': {
'smsg': 'PM: Syncing filesystems.*',
'emsg': 'PM: Preparing system for mem sleep.*' },
'freeze_user_processes': {
'smsg': 'Freezing user space processes .*',
'emsg': 'Freezing remaining freezable tasks.*' },
'freeze_tasks': {
'smsg': 'Freezing remaining freezable tasks.*',
'emsg': 'PM: Entering (?P<mode>[a-z,A-Z]*) sleep.*' },
'ACPI prepare': {
'smsg': 'ACPI: Preparing to enter system sleep state.*',
'emsg': 'PM: Saving platform NVS memory.*' },
'PM vns': {
'smsg': 'PM: Saving platform NVS memory.*',
'emsg': 'Disabling non-boot CPUs .*' },
}
t0 = -1.0
cpu_start = -1.0
prevktime = -1.0
actions = dict()
for line in data.dmesgtext:
# -- preprocessing --
# parse each dmesg line into the time and message
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(m):
val = m.group('ktime')
try:
ktime = float(val)
except:
doWarning('INVALID DMESG LINE: '+\
line.replace('\n', ''), 'dmesg')
continue
msg = m.group('msg')
# initialize data start to first line time
if t0 < 0:
data.setStart(ktime)
t0 = ktime
else:
continue
# hack for determining resume_machine end for freeze
if(not sysvals.usetraceevents and sysvals.suspendmode == 'freeze' \
and phase == 'resume_machine' and \
re.match('calling (?P<f>.*)\+ @ .*, parent: .*', msg)):
data.dmesg['resume_machine']['end'] = ktime
phase = 'resume_noirq'
data.dmesg[phase]['start'] = ktime
# -- phase changes --
# suspend start
if(re.match(dm['suspend_prepare'], msg)):
phase = 'suspend_prepare'
data.dmesg[phase]['start'] = ktime
data.setStart(ktime)
# suspend start
elif(re.match(dm['suspend'], msg)):
data.dmesg['suspend_prepare']['end'] = ktime
phase = 'suspend'
data.dmesg[phase]['start'] = ktime
# suspend_late start
elif(re.match(dm['suspend_late'], msg)):
data.dmesg['suspend']['end'] = ktime
phase = 'suspend_late'
data.dmesg[phase]['start'] = ktime
# suspend_noirq start
elif(re.match(dm['suspend_noirq'], msg)):
data.dmesg['suspend_late']['end'] = ktime
phase = 'suspend_noirq'
data.dmesg[phase]['start'] = ktime
# suspend_machine start
elif(re.match(dm['suspend_machine'], msg)):
data.dmesg['suspend_noirq']['end'] = ktime
phase = 'suspend_machine'
data.dmesg[phase]['start'] = ktime
# resume_machine start
elif(re.match(dm['resume_machine'], msg)):
if(sysvals.suspendmode in ['freeze', 'standby']):
data.tSuspended = prevktime
data.dmesg['suspend_machine']['end'] = prevktime
else:
data.tSuspended = ktime
data.dmesg['suspend_machine']['end'] = ktime
phase = 'resume_machine'
data.tResumed = ktime
data.tLow = data.tResumed - data.tSuspended
data.dmesg[phase]['start'] = ktime
# resume_noirq start
elif(re.match(dm['resume_noirq'], msg)):
data.dmesg['resume_machine']['end'] = ktime
phase = 'resume_noirq'
data.dmesg[phase]['start'] = ktime
# resume_early start
elif(re.match(dm['resume_early'], msg)):
data.dmesg['resume_noirq']['end'] = ktime
phase = 'resume_early'
data.dmesg[phase]['start'] = ktime
# resume start
elif(re.match(dm['resume'], msg)):
data.dmesg['resume_early']['end'] = ktime
phase = 'resume'
data.dmesg[phase]['start'] = ktime
# resume complete start
elif(re.match(dm['resume_complete'], msg)):
data.dmesg['resume']['end'] = ktime
phase = 'resume_complete'
data.dmesg[phase]['start'] = ktime
# post resume start
elif(re.match(dm['post_resume'], msg)):
data.dmesg['resume_complete']['end'] = ktime
data.setEnd(ktime)
phase = 'post_resume'
break
# -- device callbacks --
if(phase in data.phases):
# device init call
if(re.match('calling (?P<f>.*)\+ @ .*, parent: .*', msg)):
sm = re.match('calling (?P<f>.*)\+ @ '+\
'(?P<n>.*), parent: (?P<p>.*)', msg);
f = sm.group('f')
n = sm.group('n')
p = sm.group('p')
if(f and n and p):
data.newAction(phase, f, int(n), p, ktime, -1, '')
# device init return
elif(re.match('call (?P<f>.*)\+ returned .* after '+\
'(?P<t>.*) usecs', msg)):
sm = re.match('call (?P<f>.*)\+ returned .* after '+\
'(?P<t>.*) usecs(?P<a>.*)', msg);
f = sm.group('f')
t = sm.group('t')
list = data.dmesg[phase]['list']
if(f in list):
dev = list[f]
dev['length'] = int(t)
dev['end'] = ktime
# -- non-devicecallback actions --
# if trace events are not available, these are better than nothing
if(not sysvals.usetraceevents):
# look for known actions
for a in at:
if(re.match(at[a]['smsg'], msg)):
if(a not in actions):
actions[a] = []
actions[a].append({'begin': ktime, 'end': ktime})
if(re.match(at[a]['emsg'], msg)):
if(a in actions):
actions[a][-1]['end'] = ktime
# now look for CPU on/off events
if(re.match('Disabling non-boot CPUs .*', msg)):
# start of first cpu suspend
cpu_start = ktime
elif(re.match('Enabling non-boot CPUs .*', msg)):
# start of first cpu resume
cpu_start = ktime
elif(re.match('smpboot: CPU (?P<cpu>[0-9]*) is now offline', msg)):
# end of a cpu suspend, start of the next
m = re.match('smpboot: CPU (?P<cpu>[0-9]*) is now offline', msg)
cpu = 'CPU'+m.group('cpu')
if(cpu not in actions):
actions[cpu] = []
actions[cpu].append({'begin': cpu_start, 'end': ktime})
cpu_start = ktime
elif(re.match('CPU(?P<cpu>[0-9]*) is up', msg)):
# end of a cpu resume, start of the next
m = re.match('CPU(?P<cpu>[0-9]*) is up', msg)
cpu = 'CPU'+m.group('cpu')
if(cpu not in actions):
actions[cpu] = []
actions[cpu].append({'begin': cpu_start, 'end': ktime})
cpu_start = ktime
prevktime = ktime
# fill in any missing phases
lp = data.phases[0]
for p in data.phases:
if(data.dmesg[p]['start'] < 0 and data.dmesg[p]['end'] < 0):
print('WARNING: phase "%s" is missing, something went wrong!' % p)
print(' In %s, this dmesg line denotes the start of %s:' % \
(sysvals.suspendmode, p))
print(' "%s"' % dm[p])
if(data.dmesg[p]['start'] < 0):
data.dmesg[p]['start'] = data.dmesg[lp]['end']
if(p == 'resume_machine'):
data.tSuspended = data.dmesg[lp]['end']
data.tResumed = data.dmesg[lp]['end']
data.tLow = 0
if(data.dmesg[p]['end'] < 0):
data.dmesg[p]['end'] = data.dmesg[p]['start']
lp = p
# fill in any actions we've found
for name in actions:
for event in actions[name]:
data.newActionGlobal(name, event['begin'], event['end'])
if(sysvals.verbose):
data.printDetails()
if(len(sysvals.devicefilter) > 0):
data.deviceFilter(sysvals.devicefilter)
data.fixupInitcallsThatDidntReturn()
return True
# Function: createHTMLSummarySimple
# Description:
# Create summary html file for a series of tests
# Arguments:
# testruns: array of Data objects from parseTraceLog
def createHTMLSummarySimple(testruns, htmlfile):
global sysvals
# print out the basic summary of all the tests
hf = open(htmlfile, 'w')
# write the html header first (html head, css code, up to body start)
html = '<!DOCTYPE html>\n<html>\n<head>\n\
<meta http-equiv="content-type" content="text/html; charset=UTF-8">\n\
<title>AnalyzeSuspend Summary</title>\n\
<style type=\'text/css\'>\n\
body {overflow-y: scroll;}\n\
.stamp {width: 100%;text-align:center;background-color:#495E09;line-height:30px;color:white;font: 25px Arial;}\n\
table {width:100%;border-collapse: collapse;}\n\
.summary {font: 22px Arial;border:1px solid;}\n\
th {border: 1px solid black;background-color:#A7C942;color:white;}\n\
td {text-align: center;}\n\
tr.alt td {background-color:#EAF2D3;}\n\
tr.avg td {background-color:#BDE34C;}\n\
a:link {color: #90B521;}\n\
a:visited {color: #495E09;}\n\
a:hover {color: #B1DF28;}\n\
a:active {color: #FFFFFF;}\n\
</style>\n</head>\n<body>\n'
# group test header
count = len(testruns)
headline_stamp = '<div class="stamp">{0} {1} {2} {3} ({4} tests)</div>\n'
html += headline_stamp.format(sysvals.stamp['host'],
sysvals.stamp['kernel'], sysvals.stamp['mode'],
sysvals.stamp['time'], count)
# check to see if all the tests have the same value
stampcolumns = False
for data in testruns:
if diffStamp(sysvals.stamp, data.stamp):
stampcolumns = True
break
th = '\t<th>{0}</th>\n'
td = '\t<td>{0}</td>\n'
tdlink = '\t<td><a href="{0}">Click Here</a></td>\n'
# table header
html += '<table class="summary">\n<tr>\n'
html += th.format("Test #")
if stampcolumns:
html += th.format("Hostname")
html += th.format("Kernel Version")
html += th.format("Suspend Mode")
html += th.format("Test Time")
html += th.format("Suspend Time")
html += th.format("Resume Time")
html += th.format("Detail")
html += '</tr>\n'
# test data, 1 row per test
sTimeAvg = 0.0
rTimeAvg = 0.0
num = 1
for data in testruns:
# data.end is the end of post_resume
resumeEnd = data.dmesg['resume_complete']['end']
if num % 2 == 1:
html += '<tr class="alt">\n'
else:
html += '<tr>\n'
# test num
html += td.format("test %d" % num)
num += 1
if stampcolumns:
# host name
val = "unknown"
if('host' in data.stamp):
val = data.stamp['host']
html += td.format(val)
# host kernel
val = "unknown"
if('kernel' in data.stamp):
val = data.stamp['kernel']
html += td.format(val)
# suspend mode
val = "unknown"
if('mode' in data.stamp):
val = data.stamp['mode']
html += td.format(val)
# test time
val = "unknown"
if('time' in data.stamp):
val = data.stamp['time']
html += td.format(val)
# suspend time
sTime = (data.tSuspended - data.start)*1000
sTimeAvg += sTime
html += td.format("%3.3f ms" % sTime)
# resume time
rTime = (resumeEnd - data.tResumed)*1000
rTimeAvg += rTime
html += td.format("%3.3f ms" % rTime)
# link to the output html
html += tdlink.format(data.outfile)
html += '</tr>\n'
# last line: test average
if(count > 0):
sTimeAvg /= count
rTimeAvg /= count
html += '<tr class="avg">\n'
html += td.format('Average') # name
if stampcolumns:
html += td.format('') # host
html += td.format('') # kernel
html += td.format('') # mode
html += td.format('') # time
html += td.format("%3.3f ms" % sTimeAvg) # suspend time
html += td.format("%3.3f ms" % rTimeAvg) # resume time
html += td.format('') # output link
html += '</tr>\n'
# flush the data to file
hf.write(html+'</table>\n')
hf.write('</body>\n</html>\n')
hf.close()
def htmlTitle():
global sysvals
modename = {
'freeze': 'Freeze (S0)',
'standby': 'Standby (S1)',
'mem': 'Suspend (S3)',
'disk': 'Hibernate (S4)'
}
kernel = sysvals.stamp['kernel']
host = sysvals.hostname[0].upper()+sysvals.hostname[1:]
mode = sysvals.suspendmode
if sysvals.suspendmode in modename:
mode = modename[sysvals.suspendmode]
return host+' '+mode+' '+kernel
def ordinal(value):
suffix = 'th'
if value < 10 or value > 19:
if value % 10 == 1:
suffix = 'st'
elif value % 10 == 2:
suffix = 'nd'
elif value % 10 == 3:
suffix = 'rd'
return '%d%s' % (value, suffix)
# Function: createHTML
# Description:
# Create the output html file from the resident test data
# Arguments:
# testruns: array of Data objects from parseKernelLog or parseTraceLog
# Output:
# True if the html file was created, false if it failed
def createHTML(testruns):
global sysvals
if len(testruns) < 1:
print('ERROR: Not enough test data to build a timeline')
return
for data in testruns:
data.normalizeTime(testruns[-1].tSuspended)
x2changes = ['', 'absolute']
if len(testruns) > 1:
x2changes = ['1', 'relative']
# html function templates
headline_version = '<div class="version"><a href="https://01.org/suspendresume">AnalyzeSuspend v%s</a></div>' % sysvals.version
headline_stamp = '<div class="stamp">{0} {1} {2} {3}</div>\n'
html_devlist1 = '<button id="devlist1" class="devlist" style="float:left;">Device Detail%s</button>' % x2changes[0]
html_zoombox = '<center><button id="zoomin">ZOOM IN</button><button id="zoomout">ZOOM OUT</button><button id="zoomdef">ZOOM 1:1</button></center>\n'
html_devlist2 = '<button id="devlist2" class="devlist" style="float:right;">Device Detail2</button>\n'
html_timeline = '<div id="dmesgzoombox" class="zoombox">\n<div id="{0}" class="timeline" style="height:{1}px">\n'
html_tblock = '<div id="block{0}" class="tblock" style="left:{1}%;width:{2}%;">\n'
html_device = '<div id="{0}" title="{1}" class="thread{7}" style="left:{2}%;top:{3}px;height:{4}px;width:{5}%;{8}">{6}</div>\n'
html_traceevent = '<div title="{0}" class="traceevent" style="left:{1}%;top:{2}px;height:{3}px;width:{4}%;line-height:{3}px;">{5}</div>\n'
html_phase = '<div class="phase" style="left:{0}%;width:{1}%;top:{2}px;height:{3}px;background-color:{4}">{5}</div>\n'
html_phaselet = '<div id="{0}" class="phaselet" style="left:{1}%;width:{2}%;background-color:{3}"></div>\n'
html_legend = '<div id="p{3}" class="square" style="left:{0}%;background-color:{1}"> {2}</div>\n'
html_timetotal = '<table class="time1">\n<tr>'\
'<td class="green">{2} Suspend Time: <b>{0} ms</b></td>'\
'<td class="yellow">{2} Resume Time: <b>{1} ms</b></td>'\
'</tr>\n</table>\n'
html_timetotal2 = '<table class="time1">\n<tr>'\
'<td class="green">{3} Suspend Time: <b>{0} ms</b></td>'\
'<td class="gray">'+sysvals.suspendmode+' time: <b>{1} ms</b></td>'\
'<td class="yellow">{3} Resume Time: <b>{2} ms</b></td>'\
'</tr>\n</table>\n'
html_timetotal3 = '<table class="time1">\n<tr>'\
'<td class="green">Execution Time: <b>{0} ms</b></td>'\
'<td class="yellow">Command: <b>{1}</b></td>'\
'</tr>\n</table>\n'
html_timegroups = '<table class="time2">\n<tr>'\
'<td class="green">{4}Kernel Suspend: {0} ms</td>'\
'<td class="purple">{4}Firmware Suspend: {1} ms</td>'\
'<td class="purple">{4}Firmware Resume: {2} ms</td>'\
'<td class="yellow">{4}Kernel Resume: {3} ms</td>'\
'</tr>\n</table>\n'
# html format variables
rowheight = 30
devtextS = '14px'
devtextH = '30px'
hoverZ = 'z-index:10;'
if sysvals.usedevsrc:
hoverZ = ''
# device timeline
vprint('Creating Device Timeline...')
devtl = Timeline(rowheight)
# Generate the header for this timeline
for data in testruns:
tTotal = data.end - data.start
tEnd = data.dmesg['resume_complete']['end']
if(tTotal == 0):
print('ERROR: No timeline data')
sys.exit()
if(data.tLow > 0):
low_time = '%.0f'%(data.tLow*1000)
if sysvals.suspendmode == 'command':
run_time = '%.0f'%((data.end-data.start)*1000)
if sysvals.testcommand:
testdesc = sysvals.testcommand
else:
testdesc = 'unknown'
if(len(testruns) > 1):
testdesc = ordinal(data.testnumber+1)+' '+testdesc
thtml = html_timetotal3.format(run_time, testdesc)
devtl.html['header'] += thtml
elif data.fwValid:
suspend_time = '%.0f'%((data.tSuspended-data.start)*1000 + \
(data.fwSuspend/1000000.0))
resume_time = '%.0f'%((tEnd-data.tSuspended)*1000 + \
(data.fwResume/1000000.0))
testdesc1 = 'Total'
testdesc2 = ''
if(len(testruns) > 1):
testdesc1 = testdesc2 = ordinal(data.testnumber+1)
testdesc2 += ' '
if(data.tLow == 0):
thtml = html_timetotal.format(suspend_time, \
resume_time, testdesc1)
else:
thtml = html_timetotal2.format(suspend_time, low_time, \
resume_time, testdesc1)
devtl.html['header'] += thtml
sktime = '%.3f'%((data.dmesg['suspend_machine']['end'] - \
data.getStart())*1000)
sftime = '%.3f'%(data.fwSuspend / 1000000.0)
rftime = '%.3f'%(data.fwResume / 1000000.0)
rktime = '%.3f'%((data.dmesg['resume_complete']['end'] - \
data.dmesg['resume_machine']['start'])*1000)
devtl.html['header'] += html_timegroups.format(sktime, \
sftime, rftime, rktime, testdesc2)
else:
suspend_time = '%.0f'%((data.tSuspended-data.start)*1000)
resume_time = '%.0f'%((tEnd-data.tSuspended)*1000)
testdesc = 'Kernel'
if(len(testruns) > 1):
testdesc = ordinal(data.testnumber+1)+' '+testdesc
if(data.tLow == 0):
thtml = html_timetotal.format(suspend_time, \
resume_time, testdesc)
else:
thtml = html_timetotal2.format(suspend_time, low_time, \
resume_time, testdesc)
devtl.html['header'] += thtml
# time scale for potentially multiple datasets
t0 = testruns[0].start
tMax = testruns[-1].end
tSuspended = testruns[-1].tSuspended
tTotal = tMax - t0
# determine the maximum number of rows we need to draw
for data in testruns:
data.selectTimelineDevices('%f', tTotal, sysvals.mindevlen)
for group in data.devicegroups:
devlist = []
for phase in group:
for devname in data.tdevlist[phase]:
devlist.append((phase,devname))
devtl.getPhaseRows(data.dmesg, devlist)
devtl.calcTotalRows()
# create bounding box, add buttons
if sysvals.suspendmode != 'command':
devtl.html['timeline'] += html_devlist1
if len(testruns) > 1:
devtl.html['timeline'] += html_devlist2
devtl.html['timeline'] += html_zoombox
devtl.html['timeline'] += html_timeline.format('dmesg', devtl.height)
# draw the full timeline
phases = {'suspend':[],'resume':[]}
for phase in data.dmesg:
if 'resume' in phase:
phases['resume'].append(phase)
else:
phases['suspend'].append(phase)
# draw each test run chronologically
for data in testruns:
# if nore than one test, draw a block to represent user mode
if(data.testnumber > 0):
m0 = testruns[data.testnumber-1].end
mMax = testruns[data.testnumber].start
mTotal = mMax - m0
name = 'usermode%d' % data.testnumber
top = '%d' % devtl.scaleH
left = '%f' % (((m0-t0)*100.0)/tTotal)
width = '%f' % ((mTotal*100.0)/tTotal)
title = 'user mode (%0.3f ms) ' % (mTotal*1000)
devtl.html['timeline'] += html_device.format(name, \
title, left, top, '%d'%devtl.bodyH, width, '', '', '')
# now draw the actual timeline blocks
for dir in phases:
# draw suspend and resume blocks separately
bname = '%s%d' % (dir[0], data.testnumber)
if dir == 'suspend':
m0 = testruns[data.testnumber].start
mMax = testruns[data.testnumber].tSuspended
mTotal = mMax - m0
left = '%f' % (((m0-t0)*100.0)/tTotal)
else:
m0 = testruns[data.testnumber].tSuspended
mMax = testruns[data.testnumber].end
mTotal = mMax - m0
left = '%f' % ((((m0-t0)*100.0)+sysvals.srgap/2)/tTotal)
# if a timeline block is 0 length, skip altogether
if mTotal == 0:
continue
width = '%f' % (((mTotal*100.0)-sysvals.srgap/2)/tTotal)
devtl.html['timeline'] += html_tblock.format(bname, left, width)
for b in sorted(phases[dir]):
# draw the phase color background
phase = data.dmesg[b]
length = phase['end']-phase['start']
left = '%f' % (((phase['start']-m0)*100.0)/mTotal)
width = '%f' % ((length*100.0)/mTotal)
devtl.html['timeline'] += html_phase.format(left, width, \
'%.3f'%devtl.scaleH, '%.3f'%devtl.bodyH, \
data.dmesg[b]['color'], '')
# draw the devices for this phase
phaselist = data.dmesg[b]['list']
for d in data.tdevlist[b]:
name = d
drv = ''
dev = phaselist[d]
xtraclass = ''
xtrainfo = ''
xtrastyle = ''
if 'htmlclass' in dev:
xtraclass = dev['htmlclass']
xtrainfo = dev['htmlclass']
if 'color' in dev:
xtrastyle = 'background-color:%s;' % dev['color']
if(d in sysvals.devprops):
name = sysvals.devprops[d].altName(d)
xtraclass = sysvals.devprops[d].xtraClass()
xtrainfo = sysvals.devprops[d].xtraInfo()
if('drv' in dev and dev['drv']):
drv = ' {%s}' % dev['drv']
rowheight = devtl.phaseRowHeight(b, dev['row'])
rowtop = devtl.phaseRowTop(b, dev['row'])
top = '%.3f' % (rowtop + devtl.scaleH)
left = '%f' % (((dev['start']-m0)*100)/mTotal)
width = '%f' % (((dev['end']-dev['start'])*100)/mTotal)
length = ' (%0.3f ms) ' % ((dev['end']-dev['start'])*1000)
if sysvals.suspendmode == 'command':
title = name+drv+xtrainfo+length+'cmdexec'
else:
title = name+drv+xtrainfo+length+b
devtl.html['timeline'] += html_device.format(dev['id'], \
title, left, top, '%.3f'%rowheight, width, \
d+drv, xtraclass, xtrastyle)
if('src' not in dev):
continue
# draw any trace events for this device
vprint('Debug trace events found for device %s' % d)
vprint('%20s %20s %10s %8s' % ('title', \
'name', 'time(ms)', 'length(ms)'))
for e in dev['src']:
vprint('%20s %20s %10.3f %8.3f' % (e.title, \
e.text, e.time*1000, e.length*1000))
height = devtl.rowH
top = '%.3f' % (rowtop + devtl.scaleH + (e.row*devtl.rowH))
left = '%f' % (((e.time-m0)*100)/mTotal)
width = '%f' % (e.length*100/mTotal)
color = 'rgba(204,204,204,0.5)'
devtl.html['timeline'] += \
html_traceevent.format(e.title, \
left, top, '%.3f'%height, \
width, e.text)
# draw the time scale, try to make the number of labels readable
devtl.html['timeline'] += devtl.createTimeScale(m0, mMax, tTotal, dir)
devtl.html['timeline'] += '</div>\n'
# timeline is finished
devtl.html['timeline'] += '</div>\n</div>\n'
# draw a legend which describes the phases by color
if sysvals.suspendmode != 'command':
data = testruns[-1]
devtl.html['legend'] = '<div class="legend">\n'
pdelta = 100.0/len(data.phases)
pmargin = pdelta / 4.0
for phase in data.phases:
tmp = phase.split('_')
id = tmp[0][0]
if(len(tmp) > 1):
id += tmp[1][0]
order = '%.2f' % ((data.dmesg[phase]['order'] * pdelta) + pmargin)
name = string.replace(phase, '_', ' ')
devtl.html['legend'] += html_legend.format(order, \
data.dmesg[phase]['color'], name, id)
devtl.html['legend'] += '</div>\n'
hf = open(sysvals.htmlfile, 'w')
if not sysvals.cgexp:
cgchk = 'checked'
cgnchk = 'not(:checked)'
else:
cgchk = 'not(:checked)'
cgnchk = 'checked'
# write the html header first (html head, css code, up to body start)
html_header = '<!DOCTYPE html>\n<html>\n<head>\n\
<meta http-equiv="content-type" content="text/html; charset=UTF-8">\n\
<title>'+htmlTitle()+'</title>\n\
<style type=\'text/css\'>\n\
body {overflow-y:scroll;}\n\
.stamp {width:100%;text-align:center;background-color:gray;line-height:30px;color:white;font:25px Arial;}\n\
.callgraph {margin-top:30px;box-shadow:5px 5px 20px black;}\n\
.callgraph article * {padding-left:28px;}\n\
h1 {color:black;font:bold 30px Times;}\n\
t0 {color:black;font:bold 30px Times;}\n\
t1 {color:black;font:30px Times;}\n\
t2 {color:black;font:25px Times;}\n\
t3 {color:black;font:20px Times;white-space:nowrap;}\n\
t4 {color:black;font:bold 30px Times;line-height:60px;white-space:nowrap;}\n\
cS {color:blue;font:bold 11px Times;}\n\
cR {color:red;font:bold 11px Times;}\n\
table {width:100%;}\n\
.gray {background-color:rgba(80,80,80,0.1);}\n\
.green {background-color:rgba(204,255,204,0.4);}\n\
.purple {background-color:rgba(128,0,128,0.2);}\n\
.yellow {background-color:rgba(255,255,204,0.4);}\n\
.time1 {font:22px Arial;border:1px solid;}\n\
.time2 {font:15px Arial;border-bottom:1px solid;border-left:1px solid;border-right:1px solid;}\n\
td {text-align:center;}\n\
r {color:#500000;font:15px Tahoma;}\n\
n {color:#505050;font:15px Tahoma;}\n\
.tdhl {color:red;}\n\
.hide {display:none;}\n\
.pf {display:none;}\n\
.pf:'+cgchk+' + label {background:url(\'data:image/svg+xml;utf,<?xml version="1.0" standalone="no"?><svg xmlns="http://www.w3.org/2000/svg" height="18" width="18" version="1.1"><circle cx="9" cy="9" r="8" stroke="black" stroke-width="1" fill="white"/><rect x="4" y="8" width="10" height="2" style="fill:black;stroke-width:0"/><rect x="8" y="4" width="2" height="10" style="fill:black;stroke-width:0"/></svg>\') no-repeat left center;}\n\
.pf:'+cgnchk+' ~ label {background:url(\'data:image/svg+xml;utf,<?xml version="1.0" standalone="no"?><svg xmlns="http://www.w3.org/2000/svg" height="18" width="18" version="1.1"><circle cx="9" cy="9" r="8" stroke="black" stroke-width="1" fill="white"/><rect x="4" y="8" width="10" height="2" style="fill:black;stroke-width:0"/></svg>\') no-repeat left center;}\n\
.pf:'+cgchk+' ~ *:not(:nth-child(2)) {display:none;}\n\
.zoombox {position:relative;width:100%;overflow-x:scroll;}\n\
.timeline {position:relative;font-size:14px;cursor:pointer;width:100%; overflow:hidden;background:linear-gradient(#cccccc, white);}\n\
.thread {position:absolute;height:0%;overflow:hidden;line-height:'+devtextH+';font-size:'+devtextS+';border:1px solid;text-align:center;white-space:nowrap;background-color:rgba(204,204,204,0.5);}\n\
.thread.sync {background-color:'+sysvals.synccolor+';}\n\
.thread.bg {background-color:'+sysvals.kprobecolor+';}\n\
.thread:hover {background-color:white;border:1px solid red;'+hoverZ+'}\n\
.hover {background-color:white;border:1px solid red;'+hoverZ+'}\n\
.hover.sync {background-color:white;}\n\
.hover.bg {background-color:white;}\n\
.traceevent {position:absolute;font-size:10px;overflow:hidden;color:black;text-align:center;white-space:nowrap;border-radius:5px;border:1px solid black;background:linear-gradient(to bottom right,rgba(204,204,204,1),rgba(150,150,150,1));}\n\
.traceevent:hover {background:white;}\n\
.phase {position:absolute;overflow:hidden;border:0px;text-align:center;}\n\
.phaselet {position:absolute;overflow:hidden;border:0px;text-align:center;height:100px;font-size:24px;}\n\
.t {z-index:2;position:absolute;pointer-events:none;top:0%;height:100%;border-right:1px solid black;}\n\
.legend {position:relative; width:100%; height:40px; text-align:center;margin-bottom:20px}\n\
.legend .square {position:absolute;cursor:pointer;top:10px; width:0px;height:20px;border:1px solid;padding-left:20px;}\n\
button {height:40px;width:200px;margin-bottom:20px;margin-top:20px;font-size:24px;}\n\
.logbtn {position:relative;float:right;height:25px;width:50px;margin-top:3px;margin-bottom:0;font-size:10px;text-align:center;}\n\
.devlist {position:'+x2changes[1]+';width:190px;}\n\
a:link {color:white;text-decoration:none;}\n\
a:visited {color:white;}\n\
a:hover {color:white;}\n\
a:active {color:white;}\n\
.version {position:relative;float:left;color:white;font-size:10px;line-height:30px;margin-left:10px;}\n\
#devicedetail {height:100px;box-shadow:5px 5px 20px black;}\n\
.tblock {position:absolute;height:100%;}\n\
.bg {z-index:1;}\n\
</style>\n</head>\n<body>\n'
# no header or css if its embedded
if(sysvals.embedded):
hf.write('pass True tSus %.3f tRes %.3f tLow %.3f fwvalid %s tSus %.3f tRes %.3f\n' %
(data.tSuspended-data.start, data.end-data.tSuspended, data.tLow, data.fwValid, \
data.fwSuspend/1000000, data.fwResume/1000000))
else:
hf.write(html_header)
# write the test title and general info header
if(sysvals.stamp['time'] != ""):
hf.write(headline_version)
if sysvals.addlogs and sysvals.dmesgfile:
hf.write('<button id="showdmesg" class="logbtn">dmesg</button>')
if sysvals.addlogs and sysvals.ftracefile:
hf.write('<button id="showftrace" class="logbtn">ftrace</button>')
hf.write(headline_stamp.format(sysvals.stamp['host'],
sysvals.stamp['kernel'], sysvals.stamp['mode'], \
sysvals.stamp['time']))
# write the device timeline
hf.write(devtl.html['header'])
hf.write(devtl.html['timeline'])
hf.write(devtl.html['legend'])
hf.write('<div id="devicedetailtitle"></div>\n')
hf.write('<div id="devicedetail" style="display:none;">\n')
# draw the colored boxes for the device detail section
for data in testruns:
hf.write('<div id="devicedetail%d">\n' % data.testnumber)
for b in data.phases:
phase = data.dmesg[b]
length = phase['end']-phase['start']
left = '%.3f' % (((phase['start']-t0)*100.0)/tTotal)
width = '%.3f' % ((length*100.0)/tTotal)
hf.write(html_phaselet.format(b, left, width, \
data.dmesg[b]['color']))
if sysvals.suspendmode == 'command':
hf.write(html_phaselet.format('cmdexec', '0', '0', \
data.dmesg['resume_complete']['color']))
hf.write('</div>\n')
hf.write('</div>\n')
# write the ftrace data (callgraph)
data = testruns[-1]
if(sysvals.usecallgraph and not sysvals.embedded):
hf.write('<section id="callgraphs" class="callgraph">\n')
# write out the ftrace data converted to html
html_func_top = '<article id="{0}" class="atop" style="background-color:{1}">\n<input type="checkbox" class="pf" id="f{2}" checked/><label for="f{2}">{3} {4}</label>\n'
html_func_start = '<article>\n<input type="checkbox" class="pf" id="f{0}" checked/><label for="f{0}">{1} {2}</label>\n'
html_func_end = '</article>\n'
html_func_leaf = '<article>{0} {1}</article>\n'
num = 0
for p in data.phases:
list = data.dmesg[p]['list']
for devname in data.sortedDevices(p):
if('ftrace' not in list[devname]):
continue
devid = list[devname]['id']
cg = list[devname]['ftrace']
clen = (cg.end - cg.start) * 1000
if clen < sysvals.mincglen:
continue
fmt = '<r>(%.3f ms @ '+sysvals.timeformat+' to '+sysvals.timeformat+')</r>'
flen = fmt % (clen, cg.start, cg.end)
name = devname
if(devname in sysvals.devprops):
name = sysvals.devprops[devname].altName(devname)
if sysvals.suspendmode == 'command':
ftitle = name
else:
ftitle = name+' '+p
hf.write(html_func_top.format(devid, data.dmesg[p]['color'], \
num, ftitle, flen))
num += 1
for line in cg.list:
if(line.length < 0.000000001):
flen = ''
else:
fmt = '<n>(%.3f ms @ '+sysvals.timeformat+')</n>'
flen = fmt % (line.length*1000, line.time)
if(line.freturn and line.fcall):
hf.write(html_func_leaf.format(line.name, flen))
elif(line.freturn):
hf.write(html_func_end)
else:
hf.write(html_func_start.format(num, line.name, flen))
num += 1
hf.write(html_func_end)
hf.write('\n\n </section>\n')
# add the dmesg log as a hidden div
if sysvals.addlogs and sysvals.dmesgfile:
hf.write('<div id="dmesglog" style="display:none;">\n')
lf = open(sysvals.dmesgfile, 'r')
for line in lf:
hf.write(line)
lf.close()
hf.write('</div>\n')
# add the ftrace log as a hidden div
if sysvals.addlogs and sysvals.ftracefile:
hf.write('<div id="ftracelog" style="display:none;">\n')
lf = open(sysvals.ftracefile, 'r')
for line in lf:
hf.write(line)
lf.close()
hf.write('</div>\n')
if(not sysvals.embedded):
# write the footer and close
addScriptCode(hf, testruns)
hf.write('</body>\n</html>\n')
else:
# embedded out will be loaded in a page, skip the js
t0 = (testruns[0].start - testruns[-1].tSuspended) * 1000
tMax = (testruns[-1].end - testruns[-1].tSuspended) * 1000
# add js code in a div entry for later evaluation
detail = 'var bounds = [%f,%f];\n' % (t0, tMax)
detail += 'var devtable = [\n'
for data in testruns:
topo = data.deviceTopology()
detail += '\t"%s",\n' % (topo)
detail += '];\n'
hf.write('<div id=customcode style=display:none>\n'+detail+'</div>\n')
hf.close()
return True
# Function: addScriptCode
# Description:
# Adds the javascript code to the output html
# Arguments:
# hf: the open html file pointer
# testruns: array of Data objects from parseKernelLog or parseTraceLog
def addScriptCode(hf, testruns):
t0 = testruns[0].start * 1000
tMax = testruns[-1].end * 1000
# create an array in javascript memory with the device details
detail = ' var devtable = [];\n'
for data in testruns:
topo = data.deviceTopology()
detail += ' devtable[%d] = "%s";\n' % (data.testnumber, topo)
detail += ' var bounds = [%f,%f];\n' % (t0, tMax)
# add the code which will manipulate the data in the browser
script_code = \
'<script type="text/javascript">\n'+detail+\
' var resolution = -1;\n'\
' function redrawTimescale(t0, tMax, tS) {\n'\
' var rline = \'<div class="t" style="left:0;border-left:1px solid black;border-right:0;"><cR><-R</cR></div>\';\n'\
' var tTotal = tMax - t0;\n'\
' var list = document.getElementsByClassName("tblock");\n'\
' for (var i = 0; i < list.length; i++) {\n'\
' var timescale = list[i].getElementsByClassName("timescale")[0];\n'\
' var m0 = t0 + (tTotal*parseFloat(list[i].style.left)/100);\n'\
' var mTotal = tTotal*parseFloat(list[i].style.width)/100;\n'\
' var mMax = m0 + mTotal;\n'\
' var html = "";\n'\
' var divTotal = Math.floor(mTotal/tS) + 1;\n'\
' if(divTotal > 1000) continue;\n'\
' var divEdge = (mTotal - tS*(divTotal-1))*100/mTotal;\n'\
' var pos = 0.0, val = 0.0;\n'\
' for (var j = 0; j < divTotal; j++) {\n'\
' var htmlline = "";\n'\
' if(list[i].id[5] == "r") {\n'\
' pos = 100 - (((j)*tS*100)/mTotal);\n'\
' val = (j)*tS;\n'\
' htmlline = \'<div class="t" style="right:\'+pos+\'%">\'+val+\'ms</div>\';\n'\
' if(j == 0)\n'\
' htmlline = rline;\n'\
' } else {\n'\
' pos = 100 - (((j)*tS*100)/mTotal) - divEdge;\n'\
' val = (j-divTotal+1)*tS;\n'\
' if(j == divTotal - 1)\n'\
' htmlline = \'<div class="t" style="right:\'+pos+\'%"><cS>S-></cS></div>\';\n'\
' else\n'\
' htmlline = \'<div class="t" style="right:\'+pos+\'%">\'+val+\'ms</div>\';\n'\
' }\n'\
' html += htmlline;\n'\
' }\n'\
' timescale.innerHTML = html;\n'\
' }\n'\
' }\n'\
' function zoomTimeline() {\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var zoombox = document.getElementById("dmesgzoombox");\n'\
' var val = parseFloat(dmesg.style.width);\n'\
' var newval = 100;\n'\
' var sh = window.outerWidth / 2;\n'\
' if(this.id == "zoomin") {\n'\
' newval = val * 1.2;\n'\
' if(newval > 910034) newval = 910034;\n'\
' dmesg.style.width = newval+"%";\n'\
' zoombox.scrollLeft = ((zoombox.scrollLeft + sh) * newval / val) - sh;\n'\
' } else if (this.id == "zoomout") {\n'\
' newval = val / 1.2;\n'\
' if(newval < 100) newval = 100;\n'\
' dmesg.style.width = newval+"%";\n'\
' zoombox.scrollLeft = ((zoombox.scrollLeft + sh) * newval / val) - sh;\n'\
' } else {\n'\
' zoombox.scrollLeft = 0;\n'\
' dmesg.style.width = "100%";\n'\
' }\n'\
' var tS = [10000, 5000, 2000, 1000, 500, 200, 100, 50, 20, 10, 5, 2, 1];\n'\
' var t0 = bounds[0];\n'\
' var tMax = bounds[1];\n'\
' var tTotal = tMax - t0;\n'\
' var wTotal = tTotal * 100.0 / newval;\n'\
' var idx = 7*window.innerWidth/1100;\n'\
' for(var i = 0; (i < tS.length)&&((wTotal / tS[i]) < idx); i++);\n'\
' if(i >= tS.length) i = tS.length - 1;\n'\
' if(tS[i] == resolution) return;\n'\
' resolution = tS[i];\n'\
' redrawTimescale(t0, tMax, tS[i]);\n'\
' }\n'\
' function deviceHover() {\n'\
' var name = this.title.slice(0, this.title.indexOf(" ("));\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' var cpu = -1;\n'\
' if(name.match("CPU_ON\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(7));\n'\
' else if(name.match("CPU_OFF\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(8));\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dname = dev[i].title.slice(0, dev[i].title.indexOf(" ("));\n'\
' var cname = dev[i].className.slice(dev[i].className.indexOf("thread"));\n'\
' if((cpu >= 0 && dname.match("CPU_O[NF]*\\\[*"+cpu+"\\\]")) ||\n'\
' (name == dname))\n'\
' {\n'\
' dev[i].className = "hover "+cname;\n'\
' } else {\n'\
' dev[i].className = cname;\n'\
' }\n'\
' }\n'\
' }\n'\
' function deviceUnhover() {\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dev[i].className = dev[i].className.slice(dev[i].className.indexOf("thread"));\n'\
' }\n'\
' }\n'\
' function deviceTitle(title, total, cpu) {\n'\
' var prefix = "Total";\n'\
' if(total.length > 3) {\n'\
' prefix = "Average";\n'\
' total[1] = (total[1]+total[3])/2;\n'\
' total[2] = (total[2]+total[4])/2;\n'\
' }\n'\
' var devtitle = document.getElementById("devicedetailtitle");\n'\
' var name = title.slice(0, title.indexOf(" ("));\n'\
' if(cpu >= 0) name = "CPU"+cpu;\n'\
' var driver = "";\n'\
' var tS = "<t2>(</t2>";\n'\
' var tR = "<t2>)</t2>";\n'\
' if(total[1] > 0)\n'\
' tS = "<t2>("+prefix+" Suspend:</t2><t0> "+total[1].toFixed(3)+" ms</t0> ";\n'\
' if(total[2] > 0)\n'\
' tR = " <t2>"+prefix+" Resume:</t2><t0> "+total[2].toFixed(3)+" ms<t2>)</t2></t0>";\n'\
' var s = title.indexOf("{");\n'\
' var e = title.indexOf("}");\n'\
' if((s >= 0) && (e >= 0))\n'\
' driver = title.slice(s+1, e) + " <t1>@</t1> ";\n'\
' if(total[1] > 0 && total[2] > 0)\n'\
' devtitle.innerHTML = "<t0>"+driver+name+"</t0> "+tS+tR;\n'\
' else\n'\
' devtitle.innerHTML = "<t0>"+title+"</t0>";\n'\
' return name;\n'\
' }\n'\
' function deviceDetail() {\n'\
' var devinfo = document.getElementById("devicedetail");\n'\
' devinfo.style.display = "block";\n'\
' var name = this.title.slice(0, this.title.indexOf(" ("));\n'\
' var cpu = -1;\n'\
' if(name.match("CPU_ON\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(7));\n'\
' else if(name.match("CPU_OFF\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(8));\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' var idlist = [];\n'\
' var pdata = [[]];\n'\
' if(document.getElementById("devicedetail1"))\n'\
' pdata = [[], []];\n'\
' var pd = pdata[0];\n'\
' var total = [0.0, 0.0, 0.0];\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dname = dev[i].title.slice(0, dev[i].title.indexOf(" ("));\n'\
' if((cpu >= 0 && dname.match("CPU_O[NF]*\\\[*"+cpu+"\\\]")) ||\n'\
' (name == dname))\n'\
' {\n'\
' idlist[idlist.length] = dev[i].id;\n'\
' var tidx = 1;\n'\
' if(dev[i].id[0] == "a") {\n'\
' pd = pdata[0];\n'\
' } else {\n'\
' if(pdata.length == 1) pdata[1] = [];\n'\
' if(total.length == 3) total[3]=total[4]=0.0;\n'\
' pd = pdata[1];\n'\
' tidx = 3;\n'\
' }\n'\
' var info = dev[i].title.split(" ");\n'\
' var pname = info[info.length-1];\n'\
' pd[pname] = parseFloat(info[info.length-3].slice(1));\n'\
' total[0] += pd[pname];\n'\
' if(pname.indexOf("suspend") >= 0)\n'\
' total[tidx] += pd[pname];\n'\
' else\n'\
' total[tidx+1] += pd[pname];\n'\
' }\n'\
' }\n'\
' var devname = deviceTitle(this.title, total, cpu);\n'\
' var left = 0.0;\n'\
' for (var t = 0; t < pdata.length; t++) {\n'\
' pd = pdata[t];\n'\
' devinfo = document.getElementById("devicedetail"+t);\n'\
' var phases = devinfo.getElementsByClassName("phaselet");\n'\
' for (var i = 0; i < phases.length; i++) {\n'\
' if(phases[i].id in pd) {\n'\
' var w = 100.0*pd[phases[i].id]/total[0];\n'\
' var fs = 32;\n'\
' if(w < 8) fs = 4*w | 0;\n'\
' var fs2 = fs*3/4;\n'\
' phases[i].style.width = w+"%";\n'\
' phases[i].style.left = left+"%";\n'\
' phases[i].title = phases[i].id+" "+pd[phases[i].id]+" ms";\n'\
' left += w;\n'\
' var time = "<t4 style=\\"font-size:"+fs+"px\\">"+pd[phases[i].id]+" ms<br></t4>";\n'\
' var pname = "<t3 style=\\"font-size:"+fs2+"px\\">"+phases[i].id.replace("_", " ")+"</t3>";\n'\
' phases[i].innerHTML = time+pname;\n'\
' } else {\n'\
' phases[i].style.width = "0%";\n'\
' phases[i].style.left = left+"%";\n'\
' }\n'\
' }\n'\
' }\n'\
' var cglist = document.getElementById("callgraphs");\n'\
' if(!cglist) return;\n'\
' var cg = cglist.getElementsByClassName("atop");\n'\
' if(cg.length < 10) return;\n'\
' for (var i = 0; i < cg.length; i++) {\n'\
' if(idlist.indexOf(cg[i].id) >= 0) {\n'\
' cg[i].style.display = "block";\n'\
' } else {\n'\
' cg[i].style.display = "none";\n'\
' }\n'\
' }\n'\
' }\n'\
' function devListWindow(e) {\n'\
' var sx = e.clientX;\n'\
' if(sx > window.innerWidth - 440)\n'\
' sx = window.innerWidth - 440;\n'\
' var cfg="top="+e.screenY+", left="+sx+", width=440, height=720, scrollbars=yes";\n'\
' var win = window.open("", "_blank", cfg);\n'\
' if(window.chrome) win.moveBy(sx, 0);\n'\
' var html = "<title>"+e.target.innerHTML+"</title>"+\n'\
' "<style type=\\"text/css\\">"+\n'\
' " ul {list-style-type:circle;padding-left:10px;margin-left:10px;}"+\n'\
' "</style>"\n'\
' var dt = devtable[0];\n'\
' if(e.target.id != "devlist1")\n'\
' dt = devtable[1];\n'\
' win.document.write(html+dt);\n'\
' }\n'\
' function logWindow(e) {\n'\
' var name = e.target.id.slice(4);\n'\
' var win = window.open();\n'\
' var log = document.getElementById(name+"log");\n'\
' var title = "<title>"+document.title.split(" ")[0]+" "+name+" log</title>";\n'\
' win.document.write(title+"<pre>"+log.innerHTML+"</pre>");\n'\
' win.document.close();\n'\
' }\n'\
' function onClickPhase(e) {\n'\
' }\n'\
' window.addEventListener("resize", function () {zoomTimeline();});\n'\
' window.addEventListener("load", function () {\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' dmesg.style.width = "100%"\n'\
' document.getElementById("zoomin").onclick = zoomTimeline;\n'\
' document.getElementById("zoomout").onclick = zoomTimeline;\n'\
' document.getElementById("zoomdef").onclick = zoomTimeline;\n'\
' var list = document.getElementsByClassName("square");\n'\
' for (var i = 0; i < list.length; i++)\n'\
' list[i].onclick = onClickPhase;\n'\
' var list = document.getElementsByClassName("logbtn");\n'\
' for (var i = 0; i < list.length; i++)\n'\
' list[i].onclick = logWindow;\n'\
' list = document.getElementsByClassName("devlist");\n'\
' for (var i = 0; i < list.length; i++)\n'\
' list[i].onclick = devListWindow;\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dev[i].onclick = deviceDetail;\n'\
' dev[i].onmouseover = deviceHover;\n'\
' dev[i].onmouseout = deviceUnhover;\n'\
' }\n'\
' zoomTimeline();\n'\
' });\n'\
'</script>\n'
hf.write(script_code);
# Function: executeSuspend
# Description:
# Execute system suspend through the sysfs interface, then copy the output
# dmesg and ftrace files to the test output directory.
def executeSuspend():
global sysvals
t0 = time.time()*1000
tp = sysvals.tpath
fwdata = []
# mark the start point in the kernel ring buffer just as we start
sysvals.initdmesg()
# start ftrace
if(sysvals.usecallgraph or sysvals.usetraceevents):
print('START TRACING')
sysvals.fsetVal('1', 'tracing_on')
# execute however many s/r runs requested
for count in range(1,sysvals.execcount+1):
# if this is test2 and there's a delay, start here
if(count > 1 and sysvals.x2delay > 0):
tN = time.time()*1000
while (tN - t0) < sysvals.x2delay:
tN = time.time()*1000
time.sleep(0.001)
# initiate suspend
if(sysvals.usecallgraph or sysvals.usetraceevents):
sysvals.fsetVal('SUSPEND START', 'trace_marker')
if sysvals.suspendmode == 'command':
print('COMMAND START')
if(sysvals.rtcwake):
print('will issue an rtcwake in %d seconds' % sysvals.rtcwaketime)
sysvals.rtcWakeAlarmOn()
os.system(sysvals.testcommand)
else:
if(sysvals.rtcwake):
print('SUSPEND START')
print('will autoresume in %d seconds' % sysvals.rtcwaketime)
sysvals.rtcWakeAlarmOn()
else:
print('SUSPEND START (press a key to resume)')
pf = open(sysvals.powerfile, 'w')
pf.write(sysvals.suspendmode)
# execution will pause here
try:
pf.close()
except:
pass
t0 = time.time()*1000
if(sysvals.rtcwake):
sysvals.rtcWakeAlarmOff()
# return from suspend
print('RESUME COMPLETE')
if(sysvals.usecallgraph or sysvals.usetraceevents):
sysvals.fsetVal('RESUME COMPLETE', 'trace_marker')
if(sysvals.suspendmode == 'mem'):
fwdata.append(getFPDT(False))
# look for post resume events after the last test run
t = sysvals.postresumetime
if(t > 0):
print('Waiting %d seconds for POST-RESUME trace events...' % t)
time.sleep(t)
# stop ftrace
if(sysvals.usecallgraph or sysvals.usetraceevents):
sysvals.fsetVal('0', 'tracing_on')
print('CAPTURING TRACE')
writeDatafileHeader(sysvals.ftracefile, fwdata)
os.system('cat '+tp+'trace >> '+sysvals.ftracefile)
sysvals.fsetVal('', 'trace')
devProps()
# grab a copy of the dmesg output
print('CAPTURING DMESG')
writeDatafileHeader(sysvals.dmesgfile, fwdata)
sysvals.getdmesg()
def writeDatafileHeader(filename, fwdata):
global sysvals
prt = sysvals.postresumetime
fp = open(filename, 'a')
fp.write(sysvals.teststamp+'\n')
if(sysvals.suspendmode == 'mem'):
for fw in fwdata:
if(fw):
fp.write('# fwsuspend %u fwresume %u\n' % (fw[0], fw[1]))
if(prt > 0):
fp.write('# post resume time %u\n' % prt)
fp.close()
# Function: setUSBDevicesAuto
# Description:
# Set the autosuspend control parameter of all USB devices to auto
# This can be dangerous, so use at your own risk, most devices are set
# to always-on since the kernel cant determine if the device can
# properly autosuspend
def setUSBDevicesAuto():
global sysvals
rootCheck(True)
for dirname, dirnames, filenames in os.walk('/sys/devices'):
if(re.match('.*/usb[0-9]*.*', dirname) and
'idVendor' in filenames and 'idProduct' in filenames):
os.system('echo auto > %s/power/control' % dirname)
name = dirname.split('/')[-1]
desc = os.popen('cat %s/product 2>/dev/null' % \
dirname).read().replace('\n', '')
ctrl = os.popen('cat %s/power/control 2>/dev/null' % \
dirname).read().replace('\n', '')
print('control is %s for %6s: %s' % (ctrl, name, desc))
# Function: yesno
# Description:
# Print out an equivalent Y or N for a set of known parameter values
# Output:
# 'Y', 'N', or ' ' if the value is unknown
def yesno(val):
yesvals = ['auto', 'enabled', 'active', '1']
novals = ['on', 'disabled', 'suspended', 'forbidden', 'unsupported']
if val in yesvals:
return 'Y'
elif val in novals:
return 'N'
return ' '
# Function: ms2nice
# Description:
# Print out a very concise time string in minutes and seconds
# Output:
# The time string, e.g. "1901m16s"
def ms2nice(val):
ms = 0
try:
ms = int(val)
except:
return 0.0
m = ms / 60000
s = (ms / 1000) - (m * 60)
return '%3dm%2ds' % (m, s)
# Function: detectUSB
# Description:
# Detect all the USB hosts and devices currently connected and add
# a list of USB device names to sysvals for better timeline readability
def detectUSB():
global sysvals
field = {'idVendor':'', 'idProduct':'', 'product':'', 'speed':''}
power = {'async':'', 'autosuspend':'', 'autosuspend_delay_ms':'',
'control':'', 'persist':'', 'runtime_enabled':'',
'runtime_status':'', 'runtime_usage':'',
'runtime_active_time':'',
'runtime_suspended_time':'',
'active_duration':'',
'connected_duration':''}
print('LEGEND')
print('---------------------------------------------------------------------------------------------')
print(' A = async/sync PM queue Y/N D = autosuspend delay (seconds)')
print(' S = autosuspend Y/N rACTIVE = runtime active (min/sec)')
print(' P = persist across suspend Y/N rSUSPEN = runtime suspend (min/sec)')
print(' E = runtime suspend enabled/forbidden Y/N ACTIVE = active duration (min/sec)')
print(' R = runtime status active/suspended Y/N CONNECT = connected duration (min/sec)')
print(' U = runtime usage count')
print('---------------------------------------------------------------------------------------------')
print(' NAME ID DESCRIPTION SPEED A S P E R U D rACTIVE rSUSPEN ACTIVE CONNECT')
print('---------------------------------------------------------------------------------------------')
for dirname, dirnames, filenames in os.walk('/sys/devices'):
if(re.match('.*/usb[0-9]*.*', dirname) and
'idVendor' in filenames and 'idProduct' in filenames):
for i in field:
field[i] = os.popen('cat %s/%s 2>/dev/null' % \
(dirname, i)).read().replace('\n', '')
name = dirname.split('/')[-1]
for i in power:
power[i] = os.popen('cat %s/power/%s 2>/dev/null' % \
(dirname, i)).read().replace('\n', '')
if(re.match('usb[0-9]*', name)):
first = '%-8s' % name
else:
first = '%8s' % name
print('%s [%s:%s] %-20s %-4s %1s %1s %1s %1s %1s %1s %1s %s %s %s %s' % \
(first, field['idVendor'], field['idProduct'], \
field['product'][0:20], field['speed'], \
yesno(power['async']), \
yesno(power['control']), \
yesno(power['persist']), \
yesno(power['runtime_enabled']), \
yesno(power['runtime_status']), \
power['runtime_usage'], \
power['autosuspend'], \
ms2nice(power['runtime_active_time']), \
ms2nice(power['runtime_suspended_time']), \
ms2nice(power['active_duration']), \
ms2nice(power['connected_duration'])))
# Function: devProps
# Description:
# Retrieve a list of properties for all devices in the trace log
def devProps(data=0):
global sysvals
props = dict()
if data:
idx = data.index(': ') + 2
if idx >= len(data):
return
devlist = data[idx:].split(';')
for dev in devlist:
f = dev.split(',')
if len(f) < 3:
continue
dev = f[0]
props[dev] = DevProps()
props[dev].altname = f[1]
if int(f[2]):
props[dev].async = True
else:
props[dev].async = False
sysvals.devprops = props
if sysvals.suspendmode == 'command' and 'testcommandstring' in props:
sysvals.testcommand = props['testcommandstring'].altname
return
if(os.path.exists(sysvals.ftracefile) == False):
doError('%s does not exist' % sysvals.ftracefile, False)
# first get the list of devices we need properties for
msghead = 'Additional data added by AnalyzeSuspend'
alreadystamped = False
tp = TestProps()
tf = open(sysvals.ftracefile, 'r')
for line in tf:
if msghead in line:
alreadystamped = True
continue
# determine the trace data type (required for further parsing)
m = re.match(sysvals.tracertypefmt, line)
if(m):
tp.setTracerType(m.group('t'))
continue
# parse only valid lines, if this is not one move on
m = re.match(tp.ftrace_line_fmt, line)
if(not m or 'device_pm_callback_start' not in line):
continue
m = re.match('.*: (?P<drv>.*) (?P<d>.*), parent: *(?P<p>.*), .*', m.group('msg'));
if(not m):
continue
drv, dev, par = m.group('drv'), m.group('d'), m.group('p')
if dev not in props:
props[dev] = DevProps()
tf.close()
if not alreadystamped and sysvals.suspendmode == 'command':
out = '#\n# '+msghead+'\n# Device Properties: '
out += 'testcommandstring,%s,0;' % (sysvals.testcommand)
with open(sysvals.ftracefile, 'a') as fp:
fp.write(out+'\n')
sysvals.devprops = props
return
# now get the syspath for each of our target devices
for dirname, dirnames, filenames in os.walk('/sys/devices'):
if(re.match('.*/power', dirname) and 'async' in filenames):
dev = dirname.split('/')[-2]
if dev in props and (not props[dev].syspath or len(dirname) < len(props[dev].syspath)):
props[dev].syspath = dirname[:-6]
# now fill in the properties for our target devices
for dev in props:
dirname = props[dev].syspath
if not dirname or not os.path.exists(dirname):
continue
with open(dirname+'/power/async') as fp:
text = fp.read()
props[dev].async = False
if 'enabled' in text:
props[dev].async = True
fields = os.listdir(dirname)
if 'product' in fields:
with open(dirname+'/product') as fp:
props[dev].altname = fp.read()
elif 'name' in fields:
with open(dirname+'/name') as fp:
props[dev].altname = fp.read()
elif 'model' in fields:
with open(dirname+'/model') as fp:
props[dev].altname = fp.read()
elif 'description' in fields:
with open(dirname+'/description') as fp:
props[dev].altname = fp.read()
elif 'id' in fields:
with open(dirname+'/id') as fp:
props[dev].altname = fp.read()
elif 'idVendor' in fields and 'idProduct' in fields:
idv, idp = '', ''
with open(dirname+'/idVendor') as fp:
idv = fp.read().strip()
with open(dirname+'/idProduct') as fp:
idp = fp.read().strip()
props[dev].altname = '%s:%s' % (idv, idp)
if props[dev].altname:
out = props[dev].altname.strip().replace('\n', ' ')
out = out.replace(',', ' ')
out = out.replace(';', ' ')
props[dev].altname = out
# and now write the data to the ftrace file
if not alreadystamped:
out = '#\n# '+msghead+'\n# Device Properties: '
for dev in sorted(props):
out += props[dev].out(dev)
with open(sysvals.ftracefile, 'a') as fp:
fp.write(out+'\n')
sysvals.devprops = props
# Function: getModes
# Description:
# Determine the supported power modes on this system
# Output:
# A string list of the available modes
def getModes():
global sysvals
modes = ''
if(os.path.exists(sysvals.powerfile)):
fp = open(sysvals.powerfile, 'r')
modes = string.split(fp.read())
fp.close()
return modes
# Function: getFPDT
# Description:
# Read the acpi bios tables and pull out FPDT, the firmware data
# Arguments:
# output: True to output the info to stdout, False otherwise
def getFPDT(output):
global sysvals
rectype = {}
rectype[0] = 'Firmware Basic Boot Performance Record'
rectype[1] = 'S3 Performance Table Record'
prectype = {}
prectype[0] = 'Basic S3 Resume Performance Record'
prectype[1] = 'Basic S3 Suspend Performance Record'
rootCheck(True)
if(not os.path.exists(sysvals.fpdtpath)):
if(output):
doError('file does not exist: %s' % sysvals.fpdtpath, False)
return False
if(not os.access(sysvals.fpdtpath, os.R_OK)):
if(output):
doError('file is not readable: %s' % sysvals.fpdtpath, False)
return False
if(not os.path.exists(sysvals.mempath)):
if(output):
doError('file does not exist: %s' % sysvals.mempath, False)
return False
if(not os.access(sysvals.mempath, os.R_OK)):
if(output):
doError('file is not readable: %s' % sysvals.mempath, False)
return False
fp = open(sysvals.fpdtpath, 'rb')
buf = fp.read()
fp.close()
if(len(buf) < 36):
if(output):
doError('Invalid FPDT table data, should '+\
'be at least 36 bytes', False)
return False
table = struct.unpack('4sIBB6s8sI4sI', buf[0:36])
if(output):
print('')
print('Firmware Performance Data Table (%s)' % table[0])
print(' Signature : %s' % table[0])
print(' Table Length : %u' % table[1])
print(' Revision : %u' % table[2])
print(' Checksum : 0x%x' % table[3])
print(' OEM ID : %s' % table[4])
print(' OEM Table ID : %s' % table[5])
print(' OEM Revision : %u' % table[6])
print(' Creator ID : %s' % table[7])
print(' Creator Revision : 0x%x' % table[8])
print('')
if(table[0] != 'FPDT'):
if(output):
doError('Invalid FPDT table')
return False
if(len(buf) <= 36):
return False
i = 0
fwData = [0, 0]
records = buf[36:]
fp = open(sysvals.mempath, 'rb')
while(i < len(records)):
header = struct.unpack('HBB', records[i:i+4])
if(header[0] not in rectype):
i += header[1]
continue
if(header[1] != 16):
i += header[1]
continue
addr = struct.unpack('Q', records[i+8:i+16])[0]
try:
fp.seek(addr)
first = fp.read(8)
except:
if(output):
print('Bad address 0x%x in %s' % (addr, sysvals.mempath))
return [0, 0]
rechead = struct.unpack('4sI', first)
recdata = fp.read(rechead[1]-8)
if(rechead[0] == 'FBPT'):
record = struct.unpack('HBBIQQQQQ', recdata)
if(output):
print('%s (%s)' % (rectype[header[0]], rechead[0]))
print(' Reset END : %u ns' % record[4])
print(' OS Loader LoadImage Start : %u ns' % record[5])
print(' OS Loader StartImage Start : %u ns' % record[6])
print(' ExitBootServices Entry : %u ns' % record[7])
print(' ExitBootServices Exit : %u ns' % record[8])
elif(rechead[0] == 'S3PT'):
if(output):
print('%s (%s)' % (rectype[header[0]], rechead[0]))
j = 0
while(j < len(recdata)):
prechead = struct.unpack('HBB', recdata[j:j+4])
if(prechead[0] not in prectype):
continue
if(prechead[0] == 0):
record = struct.unpack('IIQQ', recdata[j:j+prechead[1]])
fwData[1] = record[2]
if(output):
print(' %s' % prectype[prechead[0]])
print(' Resume Count : %u' % \
record[1])
print(' FullResume : %u ns' % \
record[2])
print(' AverageResume : %u ns' % \
record[3])
elif(prechead[0] == 1):
record = struct.unpack('QQ', recdata[j+4:j+prechead[1]])
fwData[0] = record[1] - record[0]
if(output):
print(' %s' % prectype[prechead[0]])
print(' SuspendStart : %u ns' % \
record[0])
print(' SuspendEnd : %u ns' % \
record[1])
print(' SuspendTime : %u ns' % \
fwData[0])
j += prechead[1]
if(output):
print('')
i += header[1]
fp.close()
return fwData
# Function: statusCheck
# Description:
# Verify that the requested command and options will work, and
# print the results to the terminal
# Output:
# True if the test will work, False if not
def statusCheck(probecheck=False):
global sysvals
status = True
print('Checking this system (%s)...' % platform.node())
# check we have root access
res = sysvals.colorText('NO (No features of this tool will work!)')
if(rootCheck(False)):
res = 'YES'
print(' have root access: %s' % res)
if(res != 'YES'):
print(' Try running this script with sudo')
return False
# check sysfs is mounted
res = sysvals.colorText('NO (No features of this tool will work!)')
if(os.path.exists(sysvals.powerfile)):
res = 'YES'
print(' is sysfs mounted: %s' % res)
if(res != 'YES'):
return False
# check target mode is a valid mode
if sysvals.suspendmode != 'command':
res = sysvals.colorText('NO')
modes = getModes()
if(sysvals.suspendmode in modes):
res = 'YES'
else:
status = False
print(' is "%s" a valid power mode: %s' % (sysvals.suspendmode, res))
if(res == 'NO'):
print(' valid power modes are: %s' % modes)
print(' please choose one with -m')
# check if ftrace is available
res = sysvals.colorText('NO')
ftgood = sysvals.verifyFtrace()
if(ftgood):
res = 'YES'
elif(sysvals.usecallgraph):
status = False
print(' is ftrace supported: %s' % res)
# check if kprobes are available
res = sysvals.colorText('NO')
sysvals.usekprobes = sysvals.verifyKprobes()
if(sysvals.usekprobes):
res = 'YES'
else:
sysvals.usedevsrc = False
print(' are kprobes supported: %s' % res)
# what data source are we using
res = 'DMESG'
if(ftgood):
sysvals.usetraceeventsonly = True
sysvals.usetraceevents = False
for e in sysvals.traceevents:
check = False
if(os.path.exists(sysvals.epath+e)):
check = True
if(not check):
sysvals.usetraceeventsonly = False
if(e == 'suspend_resume' and check):
sysvals.usetraceevents = True
if(sysvals.usetraceevents and sysvals.usetraceeventsonly):
res = 'FTRACE (all trace events found)'
elif(sysvals.usetraceevents):
res = 'DMESG and FTRACE (suspend_resume trace event found)'
print(' timeline data source: %s' % res)
# check if rtcwake
res = sysvals.colorText('NO')
if(sysvals.rtcpath != ''):
res = 'YES'
elif(sysvals.rtcwake):
status = False
print(' is rtcwake supported: %s' % res)
if not probecheck:
return status
if (sysvals.usecallgraph and len(sysvals.debugfuncs) > 0) or len(sysvals.kprobes) > 0:
sysvals.initFtrace(True)
# verify callgraph debugfuncs
if sysvals.usecallgraph and len(sysvals.debugfuncs) > 0:
print(' verifying these ftrace callgraph functions work:')
sysvals.setFtraceFilterFunctions(sysvals.debugfuncs)
fp = open(sysvals.tpath+'set_graph_function', 'r')
flist = fp.read().split('\n')
fp.close()
for func in sysvals.debugfuncs:
res = sysvals.colorText('NO')
if func in flist:
res = 'YES'
else:
for i in flist:
if ' [' in i and func == i.split(' ')[0]:
res = 'YES'
break
print(' %s: %s' % (func, res))
# verify kprobes
if len(sysvals.kprobes) > 0:
print(' verifying these kprobes work:')
for name in sorted(sysvals.kprobes):
if name in sysvals.tracefuncs:
continue
res = sysvals.colorText('NO')
if sysvals.testKprobe(sysvals.kprobes[name]):
res = 'YES'
print(' %s: %s' % (name, res))
return status
# Function: doError
# Description:
# generic error function for catastrphic failures
# Arguments:
# msg: the error message to print
# help: True if printHelp should be called after, False otherwise
def doError(msg, help):
if(help == True):
printHelp()
print('ERROR: %s\n') % msg
sys.exit()
# Function: doWarning
# Description:
# generic warning function for non-catastrophic anomalies
# Arguments:
# msg: the warning message to print
# file: If not empty, a filename to request be sent to the owner for debug
def doWarning(msg, file=''):
print('/* %s */') % msg
if(file):
print('/* For a fix, please send this'+\
' %s file to <todd.e.brandt@intel.com> */' % file)
# Function: rootCheck
# Description:
# quick check to see if we have root access
def rootCheck(fatal):
global sysvals
if(os.access(sysvals.powerfile, os.W_OK)):
return True
if fatal:
doError('This command must be run as root', False)
return False
# Function: getArgInt
# Description:
# pull out an integer argument from the command line with checks
def getArgInt(name, args, min, max, main=True):
if main:
try:
arg = args.next()
except:
doError(name+': no argument supplied', True)
else:
arg = args
try:
val = int(arg)
except:
doError(name+': non-integer value given', True)
if(val < min or val > max):
doError(name+': value should be between %d and %d' % (min, max), True)
return val
# Function: getArgFloat
# Description:
# pull out a float argument from the command line with checks
def getArgFloat(name, args, min, max, main=True):
if main:
try:
arg = args.next()
except:
doError(name+': no argument supplied', True)
else:
arg = args
try:
val = float(arg)
except:
doError(name+': non-numerical value given', True)
if(val < min or val > max):
doError(name+': value should be between %f and %f' % (min, max), True)
return val
# Function: rerunTest
# Description:
# generate an output from an existing set of ftrace/dmesg logs
def rerunTest():
global sysvals
if(sysvals.ftracefile != ''):
doesTraceLogHaveTraceEvents()
if(sysvals.dmesgfile == '' and not sysvals.usetraceeventsonly):
doError('recreating this html output '+\
'requires a dmesg file', False)
sysvals.setOutputFile()
vprint('Output file: %s' % sysvals.htmlfile)
print('PROCESSING DATA')
if(sysvals.usetraceeventsonly):
testruns = parseTraceLog()
else:
testruns = loadKernelLog()
for data in testruns:
parseKernelLog(data)
if(sysvals.ftracefile != ''):
appendIncompleteTraceLog(testruns)
createHTML(testruns)
# Function: runTest
# Description:
# execute a suspend/resume, gather the logs, and generate the output
def runTest(subdir, testpath=''):
global sysvals
# prepare for the test
sysvals.initFtrace()
sysvals.initTestOutput(subdir, testpath)
vprint('Output files:\n %s' % sysvals.dmesgfile)
if(sysvals.usecallgraph or
sysvals.usetraceevents or
sysvals.usetraceeventsonly):
vprint(' %s' % sysvals.ftracefile)
vprint(' %s' % sysvals.htmlfile)
# execute the test
executeSuspend()
sysvals.cleanupFtrace()
# analyze the data and create the html output
print('PROCESSING DATA')
if(sysvals.usetraceeventsonly):
# data for kernels 3.15 or newer is entirely in ftrace
testruns = parseTraceLog()
else:
# data for kernels older than 3.15 is primarily in dmesg
testruns = loadKernelLog()
for data in testruns:
parseKernelLog(data)
if(sysvals.usecallgraph or sysvals.usetraceevents):
appendIncompleteTraceLog(testruns)
createHTML(testruns)
# Function: runSummary
# Description:
# create a summary of tests in a sub-directory
def runSummary(subdir, output):
global sysvals
# get a list of ftrace output files
files = []
for dirname, dirnames, filenames in os.walk(subdir):
for filename in filenames:
if(re.match('.*_ftrace.txt', filename)):
files.append("%s/%s" % (dirname, filename))
# process the files in order and get an array of data objects
testruns = []
for file in sorted(files):
if output:
print("Test found in %s" % os.path.dirname(file))
sysvals.ftracefile = file
sysvals.dmesgfile = file.replace('_ftrace.txt', '_dmesg.txt')
doesTraceLogHaveTraceEvents()
sysvals.usecallgraph = False
if not sysvals.usetraceeventsonly:
if(not os.path.exists(sysvals.dmesgfile)):
print("Skipping %s: not a valid test input" % file)
continue
else:
if output:
f = os.path.basename(sysvals.ftracefile)
d = os.path.basename(sysvals.dmesgfile)
print("\tInput files: %s and %s" % (f, d))
testdata = loadKernelLog()
data = testdata[0]
parseKernelLog(data)
testdata = [data]
appendIncompleteTraceLog(testdata)
else:
if output:
print("\tInput file: %s" % os.path.basename(sysvals.ftracefile))
testdata = parseTraceLog()
data = testdata[0]
data.normalizeTime(data.tSuspended)
link = file.replace(subdir+'/', '').replace('_ftrace.txt', '.html')
data.outfile = link
testruns.append(data)
createHTMLSummarySimple(testruns, subdir+'/summary.html')
# Function: checkArgBool
# Description:
# check if a boolean string value is true or false
def checkArgBool(value):
yes = ['1', 'true', 'yes', 'on']
if value.lower() in yes:
return True
return False
# Function: configFromFile
# Description:
# Configure the script via the info in a config file
def configFromFile(file):
global sysvals
Config = ConfigParser.ConfigParser()
ignorekprobes = False
Config.read(file)
sections = Config.sections()
if 'Settings' in sections:
for opt in Config.options('Settings'):
value = Config.get('Settings', opt).lower()
if(opt.lower() == 'verbose'):
sysvals.verbose = checkArgBool(value)
elif(opt.lower() == 'addlogs'):
sysvals.addlogs = checkArgBool(value)
elif(opt.lower() == 'dev'):
sysvals.usedevsrc = checkArgBool(value)
elif(opt.lower() == 'ignorekprobes'):
ignorekprobes = checkArgBool(value)
elif(opt.lower() == 'x2'):
if checkArgBool(value):
sysvals.execcount = 2
elif(opt.lower() == 'callgraph'):
sysvals.usecallgraph = checkArgBool(value)
elif(opt.lower() == 'callgraphfunc'):
sysvals.debugfuncs = []
if value:
value = value.split(',')
for i in value:
sysvals.debugfuncs.append(i.strip())
elif(opt.lower() == 'expandcg'):
sysvals.cgexp = checkArgBool(value)
elif(opt.lower() == 'srgap'):
if checkArgBool(value):
sysvals.srgap = 5
elif(opt.lower() == 'mode'):
sysvals.suspendmode = value
elif(opt.lower() == 'command'):
sysvals.testcommand = value
elif(opt.lower() == 'x2delay'):
sysvals.x2delay = getArgInt('-x2delay', value, 0, 60000, False)
elif(opt.lower() == 'postres'):
sysvals.postresumetime = getArgInt('-postres', value, 0, 3600, False)
elif(opt.lower() == 'rtcwake'):
sysvals.rtcwake = True
sysvals.rtcwaketime = getArgInt('-rtcwake', value, 0, 3600, False)
elif(opt.lower() == 'timeprec'):
sysvals.setPrecision(getArgInt('-timeprec', value, 0, 6, False))
elif(opt.lower() == 'mindev'):
sysvals.mindevlen = getArgFloat('-mindev', value, 0.0, 10000.0, False)
elif(opt.lower() == 'mincg'):
sysvals.mincglen = getArgFloat('-mincg', value, 0.0, 10000.0, False)
elif(opt.lower() == 'kprobecolor'):
try:
val = int(value, 16)
sysvals.kprobecolor = '#'+value
except:
sysvals.kprobecolor = value
elif(opt.lower() == 'synccolor'):
try:
val = int(value, 16)
sysvals.synccolor = '#'+value
except:
sysvals.synccolor = value
elif(opt.lower() == 'output-dir'):
args = dict()
n = datetime.now()
args['date'] = n.strftime('%y%m%d')
args['time'] = n.strftime('%H%M%S')
args['hostname'] = sysvals.hostname
sysvals.outdir = value.format(**args)
if sysvals.suspendmode == 'command' and not sysvals.testcommand:
doError('No command supplied for mode "command"', False)
if sysvals.usedevsrc and sysvals.usecallgraph:
doError('dev and callgraph cannot both be true', False)
if sysvals.usecallgraph and sysvals.execcount > 1:
doError('-x2 is not compatible with -f', False)
if ignorekprobes:
return
kprobes = dict()
archkprobe = 'Kprobe_'+platform.machine()
if archkprobe in sections:
for name in Config.options(archkprobe):
kprobes[name] = Config.get(archkprobe, name)
if 'Kprobe' in sections:
for name in Config.options('Kprobe'):
kprobes[name] = Config.get('Kprobe', name)
for name in kprobes:
function = name
format = name
color = ''
args = dict()
data = kprobes[name].split()
i = 0
for val in data:
# bracketted strings are special formatting, read them separately
if val[0] == '[' and val[-1] == ']':
for prop in val[1:-1].split(','):
p = prop.split('=')
if p[0] == 'color':
try:
color = int(p[1], 16)
color = '#'+p[1]
except:
color = p[1]
continue
# first real arg should be the format string
if i == 0:
format = val
# all other args are actual function args
else:
d = val.split('=')
args[d[0]] = d[1]
i += 1
if not function or not format:
doError('Invalid kprobe: %s' % name, False)
for arg in re.findall('{(?P<n>[a-z,A-Z,0-9]*)}', format):
if arg not in args:
doError('Kprobe "%s" is missing argument "%s"' % (name, arg), False)
if name in sysvals.kprobes:
doError('Duplicate kprobe found "%s"' % (name), False)
vprint('Adding KPROBE: %s %s %s %s' % (name, function, format, args))
sysvals.kprobes[name] = {
'name': name,
'func': function,
'format': format,
'args': args,
'mask': re.sub('{(?P<n>[a-z,A-Z,0-9]*)}', '.*', format)
}
if color:
sysvals.kprobes[name]['color'] = color
# Function: printHelp
# Description:
# print out the help text
def printHelp():
global sysvals
modes = getModes()
print('')
print('AnalyzeSuspend v%s' % sysvals.version)
print('Usage: sudo analyze_suspend.py <options>')
print('')
print('Description:')
print(' This tool is designed to assist kernel and OS developers in optimizing')
print(' their linux stack\'s suspend/resume time. Using a kernel image built')
print(' with a few extra options enabled, the tool will execute a suspend and')
print(' capture dmesg and ftrace data until resume is complete. This data is')
print(' transformed into a device timeline and an optional callgraph to give')
print(' a detailed view of which devices/subsystems are taking the most')
print(' time in suspend/resume.')
print('')
print(' Generates output files in subdirectory: suspend-mmddyy-HHMMSS')
print(' HTML output: <hostname>_<mode>.html')
print(' raw dmesg output: <hostname>_<mode>_dmesg.txt')
print(' raw ftrace output: <hostname>_<mode>_ftrace.txt')
print('')
print('Options:')
print(' [general]')
print(' -h Print this help text')
print(' -v Print the current tool version')
print(' -config file Pull arguments and config options from a file')
print(' -verbose Print extra information during execution and analysis')
print(' -status Test to see if the system is enabled to run this tool')
print(' -modes List available suspend modes')
print(' -m mode Mode to initiate for suspend %s (default: %s)') % (modes, sysvals.suspendmode)
print(' -o subdir Override the output subdirectory')
print(' [advanced]')
print(' -rtcwake t Use rtcwake to autoresume after <t> seconds (default: disabled)')
print(' -addlogs Add the dmesg and ftrace logs to the html output')
print(' -multi n d Execute <n> consecutive tests at <d> seconds intervals. The outputs will')
print(' be created in a new subdirectory with a summary page.')
print(' -srgap Add a visible gap in the timeline between sus/res (default: disabled)')
print(' -cmd {s} Instead of suspend/resume, run a command, e.g. "sync -d"')
print(' -mindev ms Discard all device blocks shorter than ms milliseconds (e.g. 0.001 for us)')
print(' -mincg ms Discard all callgraphs shorter than ms milliseconds (e.g. 0.001 for us)')
print(' -timeprec N Number of significant digits in timestamps (0:S, [3:ms], 6:us)')
print(' [debug]')
print(' -f Use ftrace to create device callgraphs (default: disabled)')
print(' -expandcg pre-expand the callgraph data in the html output (default: disabled)')
print(' -flist Print the list of functions currently being captured in ftrace')
print(' -flistall Print all functions capable of being captured in ftrace')
print(' -fadd file Add functions to be graphed in the timeline from a list in a text file')
print(' -filter "d1 d2 ..." Filter out all but this list of device names')
print(' -dev Display common low level functions in the timeline')
print(' [post-resume task analysis]')
print(' -x2 Run two suspend/resumes back to back (default: disabled)')
print(' -x2delay t Minimum millisecond delay <t> between the two test runs (default: 0 ms)')
print(' -postres t Time after resume completion to wait for post-resume events (default: 0 S)')
print(' [utilities]')
print(' -fpdt Print out the contents of the ACPI Firmware Performance Data Table')
print(' -usbtopo Print out the current USB topology with power info')
print(' -usbauto Enable autosuspend for all connected USB devices')
print(' [re-analyze data from previous runs]')
print(' -ftrace ftracefile Create HTML output using ftrace input')
print(' -dmesg dmesgfile Create HTML output using dmesg (not needed for kernel >= 3.15)')
print(' -summary directory Create a summary of all test in this dir')
print('')
return True
# ----------------- MAIN --------------------
# exec start (skipped if script is loaded as library)
if __name__ == '__main__':
cmd = ''
cmdarg = ''
multitest = {'run': False, 'count': 0, 'delay': 0}
simplecmds = ['-modes', '-fpdt', '-flist', '-flistall', '-usbtopo', '-usbauto', '-status']
# loop through the command line arguments
args = iter(sys.argv[1:])
for arg in args:
if(arg == '-m'):
try:
val = args.next()
except:
doError('No mode supplied', True)
if val == 'command' and not sysvals.testcommand:
doError('No command supplied for mode "command"', True)
sysvals.suspendmode = val
elif(arg in simplecmds):
cmd = arg[1:]
elif(arg == '-h'):
printHelp()
sys.exit()
elif(arg == '-v'):
print("Version %s" % sysvals.version)
sys.exit()
elif(arg == '-x2'):
sysvals.execcount = 2
if(sysvals.usecallgraph):
doError('-x2 is not compatible with -f', False)
elif(arg == '-x2delay'):
sysvals.x2delay = getArgInt('-x2delay', args, 0, 60000)
elif(arg == '-postres'):
sysvals.postresumetime = getArgInt('-postres', args, 0, 3600)
elif(arg == '-f'):
sysvals.usecallgraph = True
if(sysvals.execcount > 1):
doError('-x2 is not compatible with -f', False)
if(sysvals.usedevsrc):
doError('-dev is not compatible with -f', False)
elif(arg == '-addlogs'):
sysvals.addlogs = True
elif(arg == '-verbose'):
sysvals.verbose = True
elif(arg == '-dev'):
sysvals.usedevsrc = True
if(sysvals.usecallgraph):
doError('-dev is not compatible with -f', False)
elif(arg == '-rtcwake'):
sysvals.rtcwake = True
sysvals.rtcwaketime = getArgInt('-rtcwake', args, 0, 3600)
elif(arg == '-timeprec'):
sysvals.setPrecision(getArgInt('-timeprec', args, 0, 6))
elif(arg == '-mindev'):
sysvals.mindevlen = getArgFloat('-mindev', args, 0.0, 10000.0)
elif(arg == '-mincg'):
sysvals.mincglen = getArgFloat('-mincg', args, 0.0, 10000.0)
elif(arg == '-cmd'):
try:
val = args.next()
except:
doError('No command string supplied', True)
sysvals.testcommand = val
sysvals.suspendmode = 'command'
elif(arg == '-expandcg'):
sysvals.cgexp = True
elif(arg == '-srgap'):
sysvals.srgap = 5
elif(arg == '-multi'):
multitest['run'] = True
multitest['count'] = getArgInt('-multi n (exec count)', args, 2, 1000000)
multitest['delay'] = getArgInt('-multi d (delay between tests)', args, 0, 3600)
elif(arg == '-o'):
try:
val = args.next()
except:
doError('No subdirectory name supplied', True)
sysvals.outdir = val
elif(arg == '-config'):
try:
val = args.next()
except:
doError('No text file supplied', True)
if(os.path.exists(val) == False):
doError('%s does not exist' % val, False)
configFromFile(val)
elif(arg == '-fadd'):
try:
val = args.next()
except:
doError('No text file supplied', True)
if(os.path.exists(val) == False):
doError('%s does not exist' % val, False)
sysvals.addFtraceFilterFunctions(val)
elif(arg == '-dmesg'):
try:
val = args.next()
except:
doError('No dmesg file supplied', True)
sysvals.notestrun = True
sysvals.dmesgfile = val
if(os.path.exists(sysvals.dmesgfile) == False):
doError('%s does not exist' % sysvals.dmesgfile, False)
elif(arg == '-ftrace'):
try:
val = args.next()
except:
doError('No ftrace file supplied', True)
sysvals.notestrun = True
sysvals.ftracefile = val
if(os.path.exists(sysvals.ftracefile) == False):
doError('%s does not exist' % sysvals.ftracefile, False)
elif(arg == '-summary'):
try:
val = args.next()
except:
doError('No directory supplied', True)
cmd = 'summary'
cmdarg = val
sysvals.notestrun = True
if(os.path.isdir(val) == False):
doError('%s is not accesible' % val, False)
elif(arg == '-filter'):
try:
val = args.next()
except:
doError('No devnames supplied', True)
sysvals.setDeviceFilter(val)
else:
doError('Invalid argument: '+arg, True)
# callgraph size cannot exceed device size
if sysvals.mincglen < sysvals.mindevlen:
sysvals.mincglen = sysvals.mindevlen
# just run a utility command and exit
if(cmd != ''):
if(cmd == 'status'):
statusCheck(True)
elif(cmd == 'fpdt'):
getFPDT(True)
elif(cmd == 'usbtopo'):
detectUSB()
elif(cmd == 'modes'):
modes = getModes()
print modes
elif(cmd == 'flist'):
sysvals.getFtraceFilterFunctions(True)
elif(cmd == 'flistall'):
sysvals.getFtraceFilterFunctions(False)
elif(cmd == 'usbauto'):
setUSBDevicesAuto()
elif(cmd == 'summary'):
print("Generating a summary of folder \"%s\"" % cmdarg)
runSummary(cmdarg, True)
sys.exit()
# if instructed, re-analyze existing data files
if(sysvals.notestrun):
rerunTest()
sys.exit()
# verify that we can run a test
if(not statusCheck()):
print('Check FAILED, aborting the test run!')
sys.exit()
if multitest['run']:
# run multiple tests in a separate subdirectory
s = 'x%d' % multitest['count']
if not sysvals.outdir:
sysvals.outdir = datetime.now().strftime('suspend-'+s+'-%m%d%y-%H%M%S')
if not os.path.isdir(sysvals.outdir):
os.mkdir(sysvals.outdir)
for i in range(multitest['count']):
if(i != 0):
print('Waiting %d seconds...' % (multitest['delay']))
time.sleep(multitest['delay'])
print('TEST (%d/%d) START' % (i+1, multitest['count']))
runTest(sysvals.outdir)
print('TEST (%d/%d) COMPLETE' % (i+1, multitest['count']))
runSummary(sysvals.outdir, False)
else:
# run the test in the current directory
runTest('.', sysvals.outdir)
|
Samuel789/MediPi
|
refs/heads/master
|
MedManagementWeb/env/lib/python3.5/site-packages/wheel/test/test_install.py
|
109
|
# Test wheel.
# The file has the following contents:
# hello.pyd
# hello/hello.py
# hello/__init__.py
# test-1.0.data/data/hello.dat
# test-1.0.data/headers/hello.dat
# test-1.0.data/scripts/hello.sh
# test-1.0.dist-info/WHEEL
# test-1.0.dist-info/METADATA
# test-1.0.dist-info/RECORD
# The root is PLATLIB
# So, some in PLATLIB, and one in each of DATA, HEADERS and SCRIPTS.
import wheel.tool
import wheel.pep425tags
from wheel.install import WheelFile
from tempfile import mkdtemp
import shutil
import os
THISDIR = os.path.dirname(__file__)
TESTWHEEL = os.path.join(THISDIR, 'test-1.0-py2.py3-none-win32.whl')
def check(*path):
return os.path.exists(os.path.join(*path))
def test_install():
tempdir = mkdtemp()
def get_supported():
return list(wheel.pep425tags.get_supported()) + [('py3', 'none', 'win32')]
whl = WheelFile(TESTWHEEL, context=get_supported)
assert whl.supports_current_python(get_supported)
try:
locs = {}
for key in ('purelib', 'platlib', 'scripts', 'headers', 'data'):
locs[key] = os.path.join(tempdir, key)
os.mkdir(locs[key])
whl.install(overrides=locs)
assert len(os.listdir(locs['purelib'])) == 0
assert check(locs['platlib'], 'hello.pyd')
assert check(locs['platlib'], 'hello', 'hello.py')
assert check(locs['platlib'], 'hello', '__init__.py')
assert check(locs['data'], 'hello.dat')
assert check(locs['headers'], 'hello.dat')
assert check(locs['scripts'], 'hello.sh')
assert check(locs['platlib'], 'test-1.0.dist-info', 'RECORD')
finally:
shutil.rmtree(tempdir)
def test_install_tool():
"""Slightly improve coverage of wheel.install"""
wheel.tool.install([TESTWHEEL], force=True, dry_run=True)
|
SerialShadow/SickRage
|
refs/heads/master
|
lib/unrar2/unix.py
|
27
|
# Copyright (c) 2003-2005 Jimmy Retzlaff, 2008 Konstantin Yegupov
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Unix version uses unrar command line executable
import subprocess
import gc
import os, os.path
import time, re
from rar_exceptions import *
class UnpackerNotInstalled(Exception): pass
rar_executable_cached = None
rar_executable_version = None
def call_unrar(params):
"Calls rar/unrar command line executable, returns stdout pipe"
global rar_executable_cached
if rar_executable_cached is None:
for command in ('unrar', 'rar'):
try:
subprocess.Popen([command], stdout=subprocess.PIPE)
rar_executable_cached = command
break
except OSError:
pass
if rar_executable_cached is None:
raise UnpackerNotInstalled("No suitable RAR unpacker installed")
assert type(params) == list, "params must be list"
args = [rar_executable_cached] + params
try:
gc.disable() # See http://bugs.python.org/issue1336
return subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
finally:
gc.enable()
class RarFileImplementation(object):
def init(self, password=None):
global rar_executable_version
self.password = password
stdoutdata, stderrdata = self.call('v', []).communicate()
for line in stderrdata.splitlines():
if line.strip().startswith("Cannot open"):
raise FileOpenError
if line.find("CRC failed")>=0:
raise IncorrectRARPassword
accum = []
source = iter(stdoutdata.splitlines())
line = ''
while (line.find('RAR ') == -1):
line = source.next()
signature = line
# The code below is mighty flaky
# and will probably crash on localized versions of RAR
# but I see no safe way to rewrite it using a CLI tool
if signature.find("RAR 4") > -1:
rar_executable_version = 4
while not (line.startswith('Comment:') or line.startswith('Pathname/Comment')):
if line.strip().endswith('is not RAR archive'):
raise InvalidRARArchive
line = source.next()
while not line.startswith('Pathname/Comment'):
accum.append(line.rstrip('\n'))
line = source.next()
if len(accum):
accum[0] = accum[0][9:] # strip out "Comment:" part
self.comment = '\n'.join(accum[:-1])
else:
self.comment = None
elif signature.find("RAR 5") > -1:
rar_executable_version = 5
line = source.next()
while not line.startswith('Archive:'):
if line.strip().endswith('is not RAR archive'):
raise InvalidRARArchive
accum.append(line.rstrip('\n'))
line = source.next()
if len(accum):
self.comment = '\n'.join(accum[:-1]).strip()
else:
self.comment = None
else:
raise UnpackerNotInstalled("Unsupported RAR version, expected 4.x or 5.x, found: "
+ signature.split(" ")[1])
def escaped_password(self):
return '-' if self.password == None else self.password
def call(self, cmd, options=[], files=[]):
options2 = options + ['p'+self.escaped_password()]
soptions = ['-'+x for x in options2]
return call_unrar([cmd]+soptions+['--',self.archiveName]+files)
def infoiter(self):
command = "v" if rar_executable_version == 4 else "l"
stdoutdata, stderrdata = self.call(command, ['c-']).communicate()
for line in stderrdata.splitlines():
if line.strip().startswith("Cannot open"):
raise FileOpenError
accum = []
source = iter(stdoutdata.splitlines())
line = ''
while not line.startswith('-----------'):
if line.strip().endswith('is not RAR archive'):
raise InvalidRARArchive
if line.startswith("CRC failed") or line.startswith("Checksum error"):
raise IncorrectRARPassword
line = source.next()
line = source.next()
i = 0
re_spaces = re.compile(r"\s+")
if rar_executable_version == 4:
while not line.startswith('-----------'):
accum.append(line)
if len(accum)==2:
data = {}
data['index'] = i
# asterisks mark password-encrypted files
data['filename'] = accum[0].strip().lstrip("*") # asterisks marks password-encrypted files
fields = re_spaces.split(accum[1].strip())
data['size'] = int(fields[0])
attr = fields[5]
data['isdir'] = 'd' in attr.lower()
try:
data['datetime'] = time.strptime(fields[3]+" "+fields[4], '%d-%m-%y %H:%M')
except ValueError:
data['datetime'] = time.strptime(fields[3]+" "+fields[4], '%Y-%m-%d %H:%M')
data['comment'] = None
data['volume'] = None
yield data
accum = []
i += 1
line = source.next()
elif rar_executable_version == 5:
while not line.startswith('-----------'):
fields = line.strip().lstrip("*").split()
data = {}
data['index'] = i
data['filename'] = " ".join(fields[4:])
data['size'] = int(fields[1])
attr = fields[0]
data['isdir'] = 'd' in attr.lower()
try:
data['datetime'] = time.strptime(fields[2]+" "+fields[3], '%d-%m-%y %H:%M')
except ValueError:
data['datetime'] = time.strptime(fields[2]+" "+fields[3], '%Y-%m-%d %H:%M')
data['comment'] = None
data['volume'] = None
yield data
i += 1
line = source.next()
def read_files(self, checker):
res = []
for info in self.infoiter():
checkres = checker(info)
if checkres==True and not info.isdir:
pipe = self.call('p', ['inul'], [info.filename]).stdout
res.append((info, pipe.read()))
return res
def extract(self, checker, path, withSubpath, overwrite):
res = []
command = 'x'
if not withSubpath:
command = 'e'
options = []
if overwrite:
options.append('o+')
else:
options.append('o-')
if not path.endswith(os.sep):
path += os.sep
names = []
for info in self.infoiter():
checkres = checker(info)
if type(checkres) in [str, unicode]:
raise NotImplementedError("Condition callbacks returning strings are deprecated and only supported in Windows")
if checkres==True and not info.isdir:
names.append(info.filename)
res.append(info)
names.append(path)
proc = self.call(command, options, names)
stdoutdata, stderrdata = proc.communicate()
if stderrdata.find("CRC failed")>=0 or stderrdata.find("Checksum error")>=0:
raise IncorrectRARPassword
return res
def destruct(self):
pass
def get_volume(self):
command = "v" if rar_executable_version == 4 else "l"
stdoutdata, stderrdata = self.call(command, ['c-']).communicate()
for line in stderrdata.splitlines():
if line.strip().startswith("Cannot open"):
raise FileOpenError
source = iter(stdoutdata.splitlines())
line = ''
while not line.startswith('-----------'):
if line.strip().endswith('is not RAR archive'):
raise InvalidRARArchive
if line.startswith("CRC failed") or line.startswith("Checksum error"):
raise IncorrectRARPassword
line = source.next()
line = source.next()
if rar_executable_version == 4:
while not line.startswith('-----------'):
line = source.next()
line = source.next()
items = line.strip().split()
if len(items)>4 and items[4]=="volume":
return int(items[5]) - 1
else:
return None
elif rar_executable_version == 5:
while not line.startswith('-----------'):
line = source.next()
line = source.next()
items = line.strip().split()
if items[1]=="volume":
return int(items[2]) - 1
else:
return None
|
archf/ansible
|
refs/heads/devel
|
lib/ansible/plugins/connection/chroot.py
|
44
|
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2013, Maykel Moya <mmoya@speedyrails.com>
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import distutils.spawn
import os
import os.path
import subprocess
import traceback
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.basic import is_executable
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_bytes
from ansible.plugins.connection import ConnectionBase, BUFSIZE
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Connection(ConnectionBase):
''' Local chroot based connections '''
transport = 'chroot'
has_pipelining = True
# su currently has an undiagnosed issue with calculating the file
# checksums (so copy, for instance, doesn't work right)
# Have to look into that before re-enabling this
become_methods = frozenset(C.BECOME_METHODS).difference(('su',))
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self.chroot = self._play_context.remote_addr
if os.geteuid() != 0:
raise AnsibleError("chroot connection requires running as root")
# we're running as root on the local system so do some
# trivial checks for ensuring 'host' is actually a chroot'able dir
if not os.path.isdir(self.chroot):
raise AnsibleError("%s is not a directory" % self.chroot)
chrootsh = os.path.join(self.chroot, 'bin/sh')
# Want to check for a usable bourne shell inside the chroot.
# is_executable() == True is sufficient. For symlinks it
# gets really complicated really fast. So we punt on finding that
# out. As long as it's a symlink we assume that it will work
if not (is_executable(chrootsh) or (os.path.lexists(chrootsh) and os.path.islink(chrootsh))):
raise AnsibleError("%s does not look like a chrootable dir (/bin/sh missing)" % self.chroot)
self.chroot_cmd = distutils.spawn.find_executable('chroot')
if not self.chroot_cmd:
raise AnsibleError("chroot command not found in PATH")
def _connect(self):
''' connect to the chroot; nothing to do here '''
super(Connection, self)._connect()
if not self._connected:
display.vvv("THIS IS A LOCAL CHROOT DIR", host=self.chroot)
self._connected = True
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
''' run a command on the chroot. This is only needed for implementing
put_file() get_file() so that we don't have to read the whole file
into memory.
compared to exec_command() it looses some niceties like being able to
return the process's exit code immediately.
'''
executable = C.DEFAULT_EXECUTABLE.split()[0] if C.DEFAULT_EXECUTABLE else '/bin/sh'
local_cmd = [self.chroot_cmd, self.chroot, executable, '-c', cmd]
display.vvv("EXEC %s" % (local_cmd), host=self.chroot)
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p
def exec_command(self, cmd, in_data=None, sudoable=False):
''' run a command on the chroot '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
p = self._buffered_exec_command(cmd)
stdout, stderr = p.communicate(in_data)
return (p.returncode, stdout, stderr)
def _prefix_login_path(self, remote_path):
''' Make sure that we put files into a standard path
If a path is relative, then we need to choose where to put it.
ssh chooses $HOME but we aren't guaranteed that a home dir will
exist in any given chroot. So for now we're choosing "/" instead.
This also happens to be the former default.
Can revisit using $HOME instead if it's a problem
'''
if not remote_path.startswith(os.path.sep):
remote_path = os.path.join(os.path.sep, remote_path)
return os.path.normpath(remote_path)
def put_file(self, in_path, out_path):
''' transfer a file from local to chroot '''
super(Connection, self).put_file(in_path, out_path)
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.chroot)
out_path = shlex_quote(self._prefix_login_path(out_path))
try:
with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
try:
p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), stdin=in_file)
except OSError:
raise AnsibleError("chroot connection requires dd command in the chroot")
try:
stdout, stderr = p.communicate()
except:
traceback.print_exc()
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
if p.returncode != 0:
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
except IOError:
raise AnsibleError("file or module does not exist at: %s" % in_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from chroot to local '''
super(Connection, self).fetch_file(in_path, out_path)
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.chroot)
in_path = shlex_quote(self._prefix_login_path(in_path))
try:
p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE))
except OSError:
raise AnsibleError("chroot connection requires dd command in the chroot")
with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') as out_file:
try:
chunk = p.stdout.read(BUFSIZE)
while chunk:
out_file.write(chunk)
chunk = p.stdout.read(BUFSIZE)
except:
traceback.print_exc()
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
stdout, stderr = p.communicate()
if p.returncode != 0:
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
def close(self):
''' terminate the connection; nothing to do here '''
super(Connection, self).close()
self._connected = False
|
vibhorag/scikit-learn
|
refs/heads/master
|
examples/semi_supervised/plot_label_propagation_structure.py
|
247
|
"""
==============================================
Label Propagation learning a complex structure
==============================================
Example of LabelPropagation learning a complex internal structure
to demonstrate "manifold learning". The outer circle should be
labeled "red" and the inner circle "blue". Because both label groups
lie inside their own distinct shape, we can see that the labels
propagate correctly around the circle.
"""
print(__doc__)
# Authors: Clay Woolam <clay@woolam.org>
# Andreas Mueller <amueller@ais.uni-bonn.de>
# Licence: BSD
import numpy as np
import matplotlib.pyplot as plt
from sklearn.semi_supervised import label_propagation
from sklearn.datasets import make_circles
# generate ring with inner box
n_samples = 200
X, y = make_circles(n_samples=n_samples, shuffle=False)
outer, inner = 0, 1
labels = -np.ones(n_samples)
labels[0] = outer
labels[-1] = inner
###############################################################################
# Learn with LabelSpreading
label_spread = label_propagation.LabelSpreading(kernel='knn', alpha=1.0)
label_spread.fit(X, labels)
###############################################################################
# Plot output labels
output_labels = label_spread.transduction_
plt.figure(figsize=(8.5, 4))
plt.subplot(1, 2, 1)
plot_outer_labeled, = plt.plot(X[labels == outer, 0],
X[labels == outer, 1], 'rs')
plot_unlabeled, = plt.plot(X[labels == -1, 0], X[labels == -1, 1], 'g.')
plot_inner_labeled, = plt.plot(X[labels == inner, 0],
X[labels == inner, 1], 'bs')
plt.legend((plot_outer_labeled, plot_inner_labeled, plot_unlabeled),
('Outer Labeled', 'Inner Labeled', 'Unlabeled'), 'upper left',
numpoints=1, shadow=False)
plt.title("Raw data (2 classes=red and blue)")
plt.subplot(1, 2, 2)
output_label_array = np.asarray(output_labels)
outer_numbers = np.where(output_label_array == outer)[0]
inner_numbers = np.where(output_label_array == inner)[0]
plot_outer, = plt.plot(X[outer_numbers, 0], X[outer_numbers, 1], 'rs')
plot_inner, = plt.plot(X[inner_numbers, 0], X[inner_numbers, 1], 'bs')
plt.legend((plot_outer, plot_inner), ('Outer Learned', 'Inner Learned'),
'upper left', numpoints=1, shadow=False)
plt.title("Labels learned with Label Spreading (KNN)")
plt.subplots_adjust(left=0.07, bottom=0.07, right=0.93, top=0.92)
plt.show()
|
circumstance/wimpy
|
refs/heads/master
|
wim.py
|
1
|
#!/usr/bin/python
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
# wim.py - gps / bluetooth listener communicates via UDP packets
# designed to interface with pure data
# (C) 2012 Tim Redfern
#
# developed exclusively for circumstance - Tomorrow the ground forgets you were here
#
# http://productofcircumstance.com/portfoliocpt/tomorrow-the-ground-forgets-you-were-here/
#
# Bugs, issues, suggestions: tim@eclectronics.org
# wim.py requires an xml config file - see example
# this details 3 types of triggers which are translated into UDP packets
# trigger types: gps scalar, gps index, bluetooth
# GPS TRIGGERS require a serial gps device sending NMEA sentences
# the gps device address is in the xml config file
# gps scalar and index triggers reference geo-located bitmap overlays
# index triggers send a pre-deteremined message when an area is entered
# scalar triggers send a continually varying signal by interpolating greyscale values
# BLUETOOTH TRIGGERS are similar to index triggers but send a message when a
# known bluetooth device is encountered
import signal,sys
def signal_handler(signal, frame):
insock.close()
print "wim: interrupted"
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
from latLng import *
from layers import *
from xml2obj import *
from logger import *
if len(sys.argv)<2:
print "usage: wim.py configfile [-D debug] [-L log] [-T test]"
sys.exit(0)
Debug=False
test=False
Log=False
if len(sys.argv)>2:
if sys.argv[2]=="-D" or sys.argv[2]=="-d":
Debug=True
print "wim: DEBUG mode"
if len(sys.argv)>3:
if sys.argv[3]=="-L" or sys.argv[3]=="-l":
Log=True
print "wim: gps LOG mode"
if len(sys.argv)>4:
if sys.argv[4]=="-T" or sys.argv[4]=="-t":
test=True
print "wim: gps TEST mode"
doc=xml2obj(open(sys.argv[1]))
gpslayers=[]
#catch invalid xml
try:
for i in doc.gps.index:
#catch invalid xml
try:
g=indexlayer(i.file,i.ll1,i.ll2)
for t in i.trigger:
g.triggers.append(trigger(int(t.id),t.command,t.param))
gpslayers.append(g)
except:
print "wim: error parsing xml index entry"
except:
print "wim: no index layers found"
#catch invalid xml
try:
for i in doc.gps.scale:
#catch invalid xml
try:
g=scalelayer(i.file,i.ll1,i.ll2)
g.setcommand(i.command)
gpslayers.append(g)
except:
print "wim: error parsing xml index entry"
except:
print "wim: no scale layers found"
from gpspoller import *
gpsp=""
try:
gpsp = GpsPoller(doc.gpsdevice,test)
gpsp.start()
except:
print "wim: gps device not found"
from btscan import *
scan=scanner("127.0.0.1",5401,False)
try:
for t in doc.bt.trigger:
scan.d.dm.triggers[t.id]=(t.command,t.param)
except:
print "wim: no bluetooth triggers found"
if len(scan.d.dm.triggers) >0:
scan.start()
logger=None
if Log:
logger=log("gpslog")
import socket
GUI_IP="0.0.0.0"
GUI_PORT=5400
insock = socket.socket( socket.AF_INET, socket.SOCK_DGRAM )
insock.bind( (GUI_IP,GUI_PORT) )
insock.settimeout(0.01) #non blocking, this sets the frame rate of checking
PD_IP="127.0.0.1"
PD_PORT=5401
outsock = socket.socket( socket.AF_INET,socket.SOCK_DGRAM )
pos=latLng()
posChanged=False
gpsfix=False
while True:
data=""
try:
data, addr = insock.recvfrom(128)
if Debug:
print "wim: received:",data
pos.parse(data)
posChanged=True
except:
nothing=None
if gpsp!="": #gps available
if gpsp.fix>1:
gpsfix=True
outsock.sendto( "gps status 1\n", (PD_IP, PD_PORT) )
if gpsp.fix<2:
gpsfix=False
outsock.sendto( "gps status 0\n", (PD_IP, PD_PORT) )
check=gpsp.check()
if check!=False:
if Debug:
print "wim: received from gps",check[0],check[1]
outsock.sendto("gps data "+str(check[0])+" "+str(check[1])+"\n",(PD_IP, PD_PORT) )
pos=latLng(check[0],check[1])
posChanged=True
if Log:
logger.log(str(pos.lng)+","+str(pos.lat))
if posChanged:
posChanged=False
for layer in gpslayers:
r=layer.checkcoord(pos) #returns a message or None
if r!=None:
if Debug:
print "wim: sending:",str(r[0]),str(r[1])
#pd needs \n at end of message
outsock.sendto( str(r[0])+' '+str(r[1])+'\n', (PD_IP, PD_PORT) )
time.sleep(0.1)
|
openmv/micropython
|
refs/heads/master
|
tests/unicode/unicode_id.py
|
15
|
# test unicode in identifiers
# comment
# αβγδϵφζ
# global identifiers
α = 1
αβγ = 2
bβ = 3
βb = 4
print(α, αβγ, bβ, βb)
# function, argument, local identifiers
def α(β, γ):
δ = β + γ
print(β, γ, δ)
α(1, 2)
# class, method identifiers
class φ:
def __init__(self):
pass
def δ(self, ϵ):
print(ϵ)
zζzζz = φ()
if hasattr(zζzζz, "δ"):
zζzζz.δ(ϵ=123)
|
egunnar/encrypt_backup
|
refs/heads/master
|
test.py
|
1
|
#!/usr/bin/python3
''' Testing script for encrypt_backup.py '''
import unittest
import os
import subprocess
import sys
TMP_TESTING_DIR = None
TMP_TESTING_SUB_DIR = None
PASSWORD = 'testing123'
DEBUG_MODE = False
ENCRYPTED_FILE_EXT = '.gpg'
class MyTest(unittest.TestCase):
def setUp(self):
rm_dir_tree(TMP_TESTING_SUB_DIR)
def tearDown(self):
pass
def testCleanUpFolder(self):
''' Test that empty folders (after a file is deleted) are deleted'''
base_folder = TMP_TESTING_SUB_DIR + '/testCleanUpFolder/base'
target_folder = TMP_TESTING_SUB_DIR + '/testCleanUpFolder/target'
config_dict = {
'base_folder': base_folder,
'target_folder': target_folder
}
file1_full_base_directory = base_folder + '/aaa/bbb/ccc/ddd'
file1_base = file1_full_base_directory + '/file1.txt'
file1_full_target_directory = target_folder + '/aaa/bbb/ccc/ddd'
file_contents = 'bla bla'
create_or_modify_file(file1_base, file_contents)
file2_full_base_directory = base_folder + '/aaa/bbb/alt'
file2_base = file2_full_base_directory + '/file2.txt'
file2_full_target_directory = target_folder + '/aaa/bbb/alt'
file2_target = file2_full_target_directory + '/file2.txt' + \
ENCRYPTED_FILE_EXT
file_contents = 'bla bla'
create_or_modify_file(file2_base, file_contents)
run_encrypt_backup_wo_error(config_dict)
# sanity test
self.assertTrue(os.path.exists(file1_full_target_directory),
"The first file's path ({}) exists.".format(
file1_full_target_directory))
self.assertTrue(os.path.exists(file2_full_target_directory),
"The seconds file's path ({}) exists.".format(
file2_full_target_directory))
os.unlink(file1_base)
run_encrypt_backup_wo_error(config_dict)
deleted_folder = target_folder + '/aaa/bbb/ccc'
self.assertFalse(os.path.exists(deleted_folder), '{} is gone'.format(
deleted_folder))
self.assertTrue(os.path.exists(file2_target),
'{} file is still there.'.format(file2_target))
def testMovedFile(self):
''' Test moving a file '''
base_folder = TMP_TESTING_SUB_DIR + '/testMovedFile/base'
target_folder = TMP_TESTING_SUB_DIR + '/testMovedFile/target'
config_dict = {
'base_folder': base_folder,
'target_folder': target_folder
}
file1_base = base_folder + '/xxx/file1.txt'
file1_target = target_folder + '/xxx/file1.txt' + ENCRYPTED_FILE_EXT
file1_contents = 'this is file 1.'
create_or_modify_file(file1_base, file1_contents)
run_encrypt_backup_wo_error(config_dict)
new_file1_base = base_folder + '/xxx/new_file1.txt'
new_file1_target = target_folder + '/xxx/new_file1.txt' + \
ENCRYPTED_FILE_EXT
os.rename(file1_base, new_file1_base)
run_encrypt_backup_wo_error(config_dict)
self.assertTrue(os.path.exists(new_file1_target),
'The new file exists in the new location')
self.assertFalse(os.path.exists(file1_target),
'The new file exists does not exist in the old location')
def testAllBasic(self):
''' Test adding a file, removing 1, and updating 1'''
base_folder = TMP_TESTING_SUB_DIR + '/testallbasic/base'
target_folder = TMP_TESTING_SUB_DIR + '/testallbasic/target'
config_dict = {
'base_folder': base_folder,
'target_folder': target_folder
}
file1_base = base_folder + '/xxx/file1.txt'
file1_target = target_folder + '/xxx/file1.txt' + ENCRYPTED_FILE_EXT
file1_contents = 'this is file 1.'
create_or_modify_file(file1_base, file1_contents)
run_encrypt_backup_wo_error(config_dict)
self.assertTrue(
is_encrypt_as(file1_target, file1_contents),
'First file is encrypted in target directory')
# add a new file
file2_base = base_folder + '/yyy/file2.txt'
file2_target = target_folder + '/yyy/file2.txt' + ENCRYPTED_FILE_EXT
file2_contents = 'this is file 2.'
create_or_modify_file(file2_base, file2_contents)
run_encrypt_backup_wo_error(config_dict)
self.assertTrue(
is_encrypt_as(file1_target, file1_contents),
'First file still is encrypted in target directory')
self.assertTrue(
is_encrypt_as(file2_target, file2_contents),
'2nd file is encrypted in target directory')
# modify file
file1_contents = 'new file1 contents here!'
create_or_modify_file(file1_base, file1_contents)
run_encrypt_backup_wo_error(config_dict)
self.assertTrue(
is_encrypt_as(file1_target, file1_contents),
'First file has changed contents')
self.assertTrue(
is_encrypt_as(file2_target, file2_contents),
'2nd file is encrypted in target directory just like before')
# remove file
print('REMOVING:{}'.format(file2_base))
os.unlink(file2_base)
run_encrypt_backup_wo_error(config_dict)
self.assertTrue(
is_encrypt_as(file1_target, file1_contents),
'First file is still there with expected contents.')
self.assertFalse(os.path.exists(file2_target),
'File 2 is removed.')
def testFirstRun(self):
''' Test with nothing to do (2 runs).'''
config_dict = {
'base_folder': TMP_TESTING_SUB_DIR + '/testFirstRun/base/tmp',
'target_folder': TMP_TESTING_SUB_DIR + '/testFirstRun/target/tmp'
}
result = run_encrypt_back_program(config_dict)
self.assertEqual(result['ret_val'], 0, 'first empty run ok')
result = run_encrypt_back_program(config_dict)
self.assertEqual(result['ret_val'], 0, 'second empty run ok')
def run_encrypt_backup_wo_error(config_dict):
result = run_encrypt_back_program(config_dict)
if result['ret_val'] != 0:
raise Exception('run failed and returned:{}'.format(
result['ret_val']))
def run_encrypt_back_program(config_dict):
debug('in run_encrypt_back_program')
for manditory_param in ('base_folder', 'target_folder'):
if manditory_param not in config_dict:
raise Exception("don't call with function without config_dict\
['{}']".format(manditory_param))
config_dict.setdefault('file_extension', ENCRYPTED_FILE_EXT)
config_dict.setdefault('debug_mode', 'true')
config_dict.setdefault('password', PASSWORD)
config_file_name = TMP_TESTING_DIR + '/test_config_file.conf'
config_file = open(config_file_name, 'w')
for key, value in config_dict.items():
config_file.write('{}={}\n'.format(key, value))
config_file.close()
cmd = 'python encrypt_backup.py {}'.format(config_file_name)
return run_program(cmd)
def run_program(cmd):
debug('\trunning:' + cmd)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True)
ret_val = p.wait()
stdout = p.stdout.read().decode('utf-8')
stderr = p.stderr.read().decode('utf-8')
p.stdout.close()
p.stderr.close()
if ret_val != 0:
debug('\t' + cmd + ' failed')
else:
debug('\t' + cmd + ' worked')
debug('\tstdout:' + stdout)
debug('\tstderr:' + stderr)
return {'ret_val':ret_val, 'stdout':stdout, 'stderr':stderr}
def create_or_modify_file(file_name, contents):
containning_dir = os.path.dirname(file_name)
debug('containning_dir:' + containning_dir)
if not os.path.exists(containning_dir):
debug('making dir:' + containning_dir)
os.makedirs(containning_dir, exist_ok=True)
debug('making file:' + file_name)
fh = open(file_name, 'w')
fh.write(contents)
fh.close()
def is_encrypt_as(file_name, contents):
# sends the result to stdout
# echo 'sec3rt p@ssworD' | gpg --batch --passphrase-fd 0
# --decrypt secert3.txt.gpg
result = run_program("echo '{}' | gpg --batch --passphrase-fd 0 \
--decrypt {}".format(PASSWORD, file_name))
return result['stdout'] == contents
def rm_dir_tree(path):
# because of protected files in git and other reason I don't
# using shutil.rmtree
debug('attempting to remove:' + path)
if os.path.exists(path):
run_program('rm -rf {}'.format(path))
def debug(debug_str):
if DEBUG_MODE == True:
sys.stderr.write(debug_str + '\n')
if __name__ == '__main__':
if len(sys.argv) == 1:
TMP_TESTING_DIR = os.getcwd()
elif len(sys.argv) == 2:
TMP_TESTING_DIR = sys.argv[1]
else:
sys.stderr.write('Usage: one optional argument that is temp working \
directory for this program\n')
sys.exit(1)
TMP_TESTING_SUB_DIR = TMP_TESTING_DIR + '/testing'
# command line args screw up the unittest module (yes, weird but true)
del sys.argv[1:]
unittest.main()
|
2bam/afe
|
refs/heads/master
|
machine_config.py
|
1
|
# Location specific
import os
import ctypes
# NOTE: We use a false fullscreen because real fullscreen stays always-on-top, covering games
faux_fullscreen = True
if faux_fullscreen:
window_size = dict(width=ctypes.windll.user32.GetSystemMetrics(0), height=ctypes.windll.user32.GetSystemMetrics(1)) #windows-only
else:
window_size = dict(width=1280, height=720)
machine = { 'name': 'ArgentronW'
, 'code': 'AGT'
, 'location': 'Niceto'
}
update_folder = os.path.abspath('update_folder') # Source folder for compressed packages. Must be absolute path.
games_folder = os.path.abspath('games_folder') # Destination folder for uncompressed games. Must be absolute path.
timeout_kill_proc = 90 # Seconds of inactivity until the program kills the game process for no input. 'None' to disable timeout. Suggested 90 for party venues
timeout_attract_mode = 90 # Seconds of inactivity until the attract mode video is started (if configured)
timeout_accountability = 10 # Seconds after no input time to stop counting time for the game's stats
|
yantrabuddhi/FreeCAD
|
refs/heads/master
|
src/Mod/PartDesign/InitGui.py
|
17
|
# PartDesign gui init module
# (c) 2003 Juergen Riegel
#
# Gathering all the information to start FreeCAD
# This is the second one of three init scripts, the third one
# runs when the gui is up
#***************************************************************************
#* (c) Juergen Riegel (juergen.riegel@web.de) 2002 *
#* *
#* This file is part of the FreeCAD CAx development system. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* FreeCAD is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Lesser General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with FreeCAD; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#* Juergen Riegel 2002 *
#***************************************************************************/
class PartDesignWorkbench ( Workbench ):
"PartDesign workbench object"
Icon = """
/* XPM */
static char * partdesign_xpm[] = {
"16 16 9 1",
" c None",
". c #040006",
"+ c #070F38",
"@ c #002196",
"# c #0030F3",
"$ c #5A4D20",
"% c #858EB2",
"& c #DEB715",
"* c #BFB99D",
" & ........ ",
"&&&$..@@@@@@+...",
"&&&&$@#####@..@.",
"&&&&&$......@#@.",
"&&&&&&@@@+.###@.",
"$&&&&&&@#@.###@.",
".$&&&&&%#@.###@.",
".@*&&&*%#@.###@.",
".@#*&**%#@.###@.",
".@#@%%%.@@.###@.",
".@@@@@@@#@.###@.",
".@#######@.###@.",
".@#######@.##+. ",
".+@@@####@.@.. ",
" ......+++.. ",
" ... "};
"""
MenuText = "Part Design"
ToolTip = "Part Design workbench"
def Initialize(self):
# load the module
try:
from WizardShaft import WizardShaft
except ImportError:
print "Wizard shaft module cannot be loaded"
import PartDesignGui
import PartDesign
try:
import InvoluteGearFeature
except ImportError:
print "Involute gear module cannot be loaded"
def GetClassName(self):
return "PartDesignGui::Workbench"
Gui.addWorkbench(PartDesignWorkbench())
|
xiandiancloud/edx-platform-Y
|
refs/heads/master
|
common/test/acceptance/pages/lms/matlab_problem.py
|
179
|
"""
Matlab Problem Page.
"""
from bok_choy.page_object import PageObject
class MatlabProblemPage(PageObject):
"""
View of matlab problem page.
"""
url = None
def is_browser_on_page(self):
return self.q(css='.ungraded-matlab-result').present
@property
def problem_name(self):
"""
Return the current problem name.
"""
return self.q(css='.problem-header').text[0]
def set_response(self, response_str):
"""
Input a response to the prompt.
"""
input_css = "$('.CodeMirror')[0].CodeMirror.setValue('{}');".format(response_str)
self.browser.execute_script(input_css)
def click_run_code(self):
"""
Click the run code button.
"""
self.q(css='input.save').click()
self.wait_for_ajax()
def get_grader_msg(self, class_name):
"""
Returns the text value of given class.
"""
self.wait_for_ajax()
return self.q(css=class_name).text
|
jayceyxc/hue
|
refs/heads/master
|
desktop/libs/dashboard/src/dashboard/models.py
|
1
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import itertools
import json
import logging
import numbers
import re
from django.core.urlresolvers import reverse
from django.utils.html import escape
from django.utils.translation import ugettext as _
from desktop.lib.i18n import smart_unicode, smart_str
from desktop.models import get_data_link
from dashboard.dashboard_api import get_engine
LOG = logging.getLogger(__name__)
class Collection2(object):
def __init__(self, user, name='Default', data=None, document=None, engine='solr'):
self.document = document
if document is not None:
self.data = json.loads(document.data)
elif data is not None:
self.data = json.loads(data)
else:
self.data = {
'collection': self.get_default(user, name, engine),
'layout': []
}
def get_json(self, user):
return json.dumps(self.get_props(user))
def get_props(self, user):
props = self.data
if self.document is not None:
props['collection']['id'] = self.document.id
props['collection']['label'] = self.document.name
props['collection']['description'] = self.document.description
# For backward compatibility
if 'rows' not in props['collection']['template']:
props['collection']['template']['rows'] = 25
if 'showGrid' not in props['collection']['template']:
props['collection']['template']['showGrid'] = True
if 'showChart' not in props['collection']['template']:
props['collection']['template']['showChart'] = False
if 'chartSettings' not in props['collection']['template']:
props['collection']['template']['chartSettings'] = {
'chartType': 'bars',
'chartSorting': 'none',
'chartScatterGroup': None,
'chartScatterSize': None,
'chartScope': 'world',
'chartX': None,
'chartYSingle': None,
'chartYMulti': [],
'chartData': [],
'chartMapLabel': None,
}
if 'enabled' not in props['collection']:
props['collection']['enabled'] = True
if 'engine' not in props['collection']:
props['collection']['engine'] = 'solr'
if 'leafletmap' not in props['collection']['template']:
props['collection']['template']['leafletmap'] = {'latitudeField': None, 'longitudeField': None, 'labelField': None}
if 'timeFilter' not in props['collection']:
props['collection']['timeFilter'] = {
'field': '',
'type': 'rolling',
'value': 'all',
'from': '',
'to': '',
'truncate': True
}
if 'suggest' not in props['collection']:
props['collection']['suggest'] = {'enabled': False, 'dictionary': ''}
for field in props['collection']['template']['fieldsAttributes']:
if 'type' not in field:
field['type'] = 'string'
if 'nested' not in props['collection']:
props['collection']['nested'] = {
'enabled': False,
'schema': []
}
for facet in props['collection']['facets']:
properties = facet['properties']
if 'gap' in properties and not 'initial_gap' in properties:
properties['initial_gap'] = properties['gap']
if 'start' in properties and not 'initial_start' in properties:
properties['initial_start'] = properties['start']
if 'end' in properties and not 'initial_end' in properties:
properties['initial_end'] = properties['end']
if 'domain' not in properties:
properties['domain'] = {'blockParent': [], 'blockChildren': []}
if facet['widgetType'] == 'histogram-widget':
if 'timelineChartType' not in properties:
properties['timelineChartType'] = 'bar'
if 'enableSelection' not in properties:
properties['enableSelection'] = True
if 'extraSeries' not in properties:
properties['extraSeries'] = []
if facet['widgetType'] == 'map-widget' and facet['type'] == 'field':
facet['type'] = 'pivot'
properties['facets'] = []
properties['facets_form'] = {'field': '', 'mincount': 1, 'limit': 5}
if 'qdefinitions' not in props['collection']:
props['collection']['qdefinitions'] = []
return props
def get_default(self, user, name, engine='solr'):
fields = self.fields_data(user, name, engine)
id_field = [field['name'] for field in fields if field.get('isId')]
if id_field:
id_field = id_field[0]
else:
id_field = '' # Schemaless might not have an id
TEMPLATE = {
"extracode": escape("<style type=\"text/css\">\nem {\n font-weight: bold;\n background-color: yellow;\n}</style>\n\n<script>\n</script>"),
"highlighting": [""],
"properties": {"highlighting_enabled": True},
"template": """
<div class="row-fluid">
<div class="row-fluid">
<div class="span12">%s</div>
</div>
<br/>
</div>""" % ' '.join(['{{%s}}' % field['name'] for field in fields]),
"isGridLayout": True,
"showFieldList": True,
"showGrid": True,
"showChart": False,
"chartSettings" : {
'chartType': 'bars',
'chartSorting': 'none',
'chartScatterGroup': None,
'chartScatterSize': None,
'chartScope': 'world',
'chartX': None,
'chartYSingle': None,
'chartYMulti': [],
'chartData': [],
'chartMapLabel': None,
},
"fieldsAttributes": [self._make_gridlayout_header_field(field) for field in fields],
"fieldsSelected": [],
"leafletmap": {'latitudeField': None, 'longitudeField': None, 'labelField': None},
"rows": 25,
}
FACETS = []
return {
'id': None,
'name': name,
'engine': engine,
'label': name,
'enabled': False,
'template': TEMPLATE,
'facets': FACETS,
'fields': fields,
'idField': id_field,
}
@classmethod
def _make_field(cls, field, attributes):
return {
'name': str(escape(field)),
'type': str(attributes.get('type', '')),
'isId': attributes.get('required') and attributes.get('uniqueKey'),
'isDynamic': 'dynamicBase' in attributes
}
@classmethod
def _make_gridlayout_header_field(cls, field, isDynamic=False):
return {'name': field['name'], 'type': field['type'], 'sort': {'direction': None}, 'isDynamic': isDynamic}
@classmethod
def _make_luke_from_schema_fields(cls, schema_fields):
return dict([
(f['name'], {
'copySources': [],
'type': f['type'],
'required': True,
'uniqueKey': f.get('uniqueKey'),
'flags': u'%s-%s-----OF-----l' % ('I' if f['indexed'] else '-', 'S' if f['stored'] else '-'), u'copyDests': []
})
for f in schema_fields['fields']
])
def get_absolute_url(self):
return reverse('search:index') + '?collection=%s' % self.id
def fields(self, user):
return sorted([str(field.get('name', '')) for field in self.fields_data(user)])
def fields_data(self, user, name, engine='solr'):
api = get_engine(user, engine)
try:
schema_fields = api.fields(name)
schema_fields = schema_fields['schema']['fields']
except Exception, e:
LOG.warn('/luke call did not succeed: %s' % e)
fields = api.schema_fields(name)
schema_fields = Collection2._make_luke_from_schema_fields(fields)
return sorted([self._make_field(field, attributes) for field, attributes in schema_fields.iteritems()])
def update_data(self, post_data):
data_dict = self.data
data_dict.update(post_data)
self.data = data_dict
@property
def autocomplete(self):
return self.data['autocomplete']
@autocomplete.setter
def autocomplete(self, autocomplete):
properties_ = self.data
properties_['autocomplete'] = autocomplete
self.data = json.dumps(properties_)
@classmethod
def get_field_list(cls, collection):
if collection['template']['fieldsSelected'] and collection['template']['isGridLayout']:
fields = set(collection['template']['fieldsSelected'] + ([collection['idField']] if collection['idField'] else []))
# Add field if needed
if collection['template']['leafletmap'].get('latitudeField'):
fields.add(collection['template']['leafletmap']['latitudeField'])
if collection['template']['leafletmap'].get('longitudeField'):
fields.add(collection['template']['leafletmap']['longitudeField'])
if collection['template']['leafletmap'].get('labelField'):
fields.add(collection['template']['leafletmap']['labelField'])
return list(fields)
else:
return ['*']
def get_facet_field(category, field, facets):
if category in ('nested', 'function'):
id_pattern = '%(id)s'
else:
id_pattern = '%(field)s-%(id)s'
facets = filter(lambda facet: facet['type'] == category and id_pattern % facet == field, facets)
if facets:
return facets[0]
else:
return None
def pairwise2(field, fq_filter, iterable):
pairs = []
selected_values = [f['value'] for f in fq_filter]
a, b = itertools.tee(iterable)
for element in a:
pairs.append({
'cat': field,
'value': element,
'count': next(a),
'selected': element in selected_values,
'exclude': all([f['exclude'] for f in fq_filter if f['value'] == element])
})
return pairs
def range_pair(field, cat, fq_filter, iterable, end, collection_facet):
# e.g. counts":["0",17430,"1000",1949,"2000",671,"3000",404,"4000",243,"5000",165],"gap":1000,"start":0,"end":6000}
pairs = []
selected_values = [f['value'] for f in fq_filter]
is_single_unit_gap = re.match('^[\+\-]?1[A-Za-z]*$', str(collection_facet['properties']['gap'])) is not None
is_up = collection_facet['properties']['sort'] == 'asc'
if collection_facet['properties']['sort'] == 'asc' and (collection_facet['type'] == 'range-up' or collection_facet['properties'].get('type') == 'range-up'):
prev = None
n = []
for e in iterable:
if prev is not None:
n.append(e)
n.append(prev)
prev = None
else:
prev = e
iterable = n
iterable.reverse()
a, to = itertools.tee(iterable)
next(to, None)
counts = iterable[1::2]
total_counts = counts.pop(0) if collection_facet['properties']['sort'] == 'asc' else 0
for element in a:
next(to, None)
to_value = next(to, end)
count = next(a)
pairs.append({
'field': field, 'from': element, 'value': count, 'to': to_value, 'selected': element in selected_values,
'exclude': all([f['exclude'] for f in fq_filter if f['value'] == element]),
'is_single_unit_gap': is_single_unit_gap,
'total_counts': total_counts,
'is_up': is_up
})
total_counts += counts.pop(0) if counts else 0
if collection_facet['properties']['sort'] == 'asc' and collection_facet['type'] != 'range-up' and collection_facet['properties'].get('type') != 'range-up':
pairs.reverse()
return pairs
def augment_solr_response(response, collection, query):
augmented = response
augmented['normalized_facets'] = []
NAME = '%(field)s-%(id)s'
normalized_facets = []
selected_values = dict([(fq['id'], fq['filter']) for fq in query['fqs']])
if response and response.get('facet_counts'):
for facet in collection['facets']:
category = facet['type']
if category == 'field' and response['facet_counts']['facet_fields']:
name = NAME % facet
collection_facet = get_facet_field(category, name, collection['facets'])
counts = pairwise2(facet['field'], selected_values.get(facet['id'], []), response['facet_counts']['facet_fields'][name])
if collection_facet['properties']['sort'] == 'asc':
counts.reverse()
facet = {
'id': collection_facet['id'],
'field': facet['field'],
'type': category,
'label': collection_facet['label'],
'counts': counts,
}
normalized_facets.append(facet)
elif (category == 'range' or category == 'range-up') and response['facet_counts']['facet_ranges']:
name = NAME % facet
collection_facet = get_facet_field(category, name, collection['facets'])
counts = response['facet_counts']['facet_ranges'][name]['counts']
end = response['facet_counts']['facet_ranges'][name]['end']
counts = range_pair(facet['field'], name, selected_values.get(facet['id'], []), counts, end, collection_facet)
facet = {
'id': collection_facet['id'],
'field': facet['field'],
'type': category,
'label': collection_facet['label'],
'counts': counts,
'extraSeries': []
}
normalized_facets.append(facet)
elif category == 'query' and response['facet_counts']['facet_queries']:
for name, value in response['facet_counts']['facet_queries'].iteritems():
collection_facet = get_facet_field(category, name, collection['facets'])
facet = {
'id': collection_facet['id'],
'query': name,
'type': category,
'label': name,
'counts': value,
}
normalized_facets.append(facet)
elif category == 'pivot':
name = NAME % facet
if 'facet_pivot' in response['facet_counts'] and name in response['facet_counts']['facet_pivot']:
if facet['properties']['scope'] == 'stack':
count = _augment_pivot_2d(name, facet['id'], response['facet_counts']['facet_pivot'][name], selected_values)
else:
count = response['facet_counts']['facet_pivot'][name]
_augment_pivot_nd(facet['id'], count, selected_values)
else:
count = []
facet = {
'id': facet['id'],
'field': name,
'type': category,
'label': name,
'counts': count,
}
normalized_facets.append(facet)
if response and response.get('facets'):
for facet in collection['facets']:
category = facet['type']
name = facet['id'] # Nested facets can only have one name
if category == 'function' and name in response['facets']:
value = response['facets'][name]
collection_facet = get_facet_field(category, name, collection['facets'])
facet = {
'id': collection_facet['id'],
'query': name,
'type': category,
'label': name,
'counts': value,
}
normalized_facets.append(facet)
elif category == 'nested' and name in response['facets']:
value = response['facets'][name]
collection_facet = get_facet_field(category, name, collection['facets'])
extraSeries = []
counts = response['facets'][name]['buckets']
cols = ['%(field)s' % facet, 'count(%(field)s)' % facet]
last_x_col = 0
last_xx_col = 0
for i, f in enumerate(facet['properties']['facets']):
if f['aggregate']['function'] == 'count':
cols.append(f['field'])
last_xx_col = last_x_col
last_x_col = i + 2
from libsolr.api import SolrApi
cols.append(SolrApi._get_aggregate_function(f))
rows = []
# For dim in dimensions
# Number or Date range
if collection_facet['properties']['canRange'] and not facet['properties'].get('type') == 'field':
dimension = 3 if collection_facet['properties']['isDate'] else 1
# Single dimension or dimension 2 with analytics
if not collection_facet['properties']['facets'] or collection_facet['properties']['facets'][0]['aggregate']['function'] != 'count' and len(collection_facet['properties']['facets']) == 1:
column = 'count'
if len(collection_facet['properties']['facets']) == 1:
agg_keys = [key for key, value in counts[0].items() if key.lower().startswith('agg_')]
legend = agg_keys[0].split(':', 2)[1]
column = agg_keys[0]
else:
legend = facet['field'] # 'count(%s)' % legend
agg_keys = [column]
_augment_stats_2d(name, facet, counts, selected_values, agg_keys, rows)
counts = [_v for _f in counts for _v in (_f['val'], _f[column])]
counts = range_pair(facet['field'], name, selected_values.get(facet['id'], []), counts, 1, collection_facet)
else:
# Dimension 1 with counts and 2 with analytics
agg_keys = [key for key, value in counts[0].items() if key.lower().startswith('agg_') or key.lower().startswith('dim_')]
agg_keys.sort(key=lambda a: a[4:])
if len(agg_keys) == 1 and agg_keys[0].lower().startswith('dim_'):
agg_keys.insert(0, 'count')
counts = _augment_stats_2d(name, facet, counts, selected_values, agg_keys, rows)
_series = collections.defaultdict(list)
for row in rows:
for i, cell in enumerate(row):
if i > last_x_col:
legend = cols[i]
if last_xx_col != last_x_col:
legend = '%s %s' % (cols[i], row[last_x_col])
_series[legend].append(row[last_xx_col])
_series[legend].append(cell)
for _name, val in _series.iteritems():
_c = range_pair(facet['field'], _name, selected_values.get(facet['id'], []), val, 1, collection_facet)
extraSeries.append({'counts': _c, 'label': _name})
counts = []
elif collection_facet['properties'].get('isOldPivot'):
facet_fields = [collection_facet['field']] + [f['field'] for f in collection_facet['properties'].get('facets', []) if f['aggregate']['function'] == 'count']
column = 'count'
agg_keys = [key for key, value in counts[0].items() if key.lower().startswith('agg_') or key.lower().startswith('dim_')]
agg_keys.sort(key=lambda a: a[4:])
if len(agg_keys) == 1 and agg_keys[0].lower().startswith('dim_'):
agg_keys.insert(0, 'count')
counts = _augment_stats_2d(name, facet, counts, selected_values, agg_keys, rows)
#_convert_nested_to_augmented_pivot_nd(facet_fields, facet['id'], count, selected_values, dimension=2)
dimension = len(facet_fields)
elif not collection_facet['properties']['facets'] or (collection_facet['properties']['facets'][0]['aggregate']['function'] != 'count' and len(collection_facet['properties']['facets']) == 1):
# Dimension 1 with 1 count or agg
dimension = 1
column = 'count'
if len(collection_facet['properties']['facets']) == 1:
agg_keys = [key for key, value in counts[0].items() if key.lower().startswith('agg_')]
legend = agg_keys[0].split(':', 2)[1]
column = agg_keys[0]
else:
legend = facet['field']
agg_keys = [column]
_augment_stats_2d(name, facet, counts, selected_values, agg_keys, rows)
counts = [_v for _f in counts for _v in (_f['val'], _f[column])]
counts = pairwise2(legend, selected_values.get(facet['id'], []), counts)
else:
# Dimension 2 with analytics or 1 with N aggregates
dimension = 2
agg_keys = [key for key, value in counts[0].items() if key.lower().startswith('agg_') or key.lower().startswith('dim_')]
agg_keys.sort(key=lambda a: a[4:])
if len(agg_keys) == 1 and agg_keys[0].lower().startswith('dim_'):
agg_keys.insert(0, 'count')
counts = _augment_stats_2d(name, facet, counts, selected_values, agg_keys, rows)
actual_dimension = 1 + sum([_f['aggregate']['function'] == 'count' for _f in collection_facet['properties']['facets']])
counts = filter(lambda a: len(a['fq_fields']) == actual_dimension, counts)
num_bucket = response['facets'][name]['numBuckets'] if 'numBuckets' in response['facets'][name] else len(response['facets'][name])
facet = {
'id': collection_facet['id'],
'field': facet['field'],
'type': category,
'label': collection_facet['label'],
'counts': counts,
'extraSeries': extraSeries,
'dimension': dimension,
'response': {'response': {'start': 0, 'numFound': num_bucket}}, # Todo * nested buckets + offsets
'docs': [dict(zip(cols, row)) for row in rows],
'fieldsAttributes': [Collection2._make_gridlayout_header_field({'name': col, 'type': 'aggr' if '(' in col else 'string'}) for col in cols]
}
normalized_facets.append(facet)
# Remove unnecessary facet data
if response:
response.pop('facet_counts')
response.pop('facets')
augment_response(collection, query, response)
if normalized_facets:
augmented['normalized_facets'].extend(normalized_facets)
return augmented
def augment_response(collection, query, response):
# HTML escaping
if not query.get('download'):
id_field = collection.get('idField', '')
for doc in response['response']['docs']:
link = None
if 'link-meta' in doc:
meta = json.loads(doc['link-meta'])
link = get_data_link(meta)
elif 'link' in doc:
meta = {'type': 'link', 'link': doc['link']}
link = get_data_link(meta)
for field, value in doc.iteritems():
if isinstance(value, numbers.Number):
escaped_value = value
elif field == '_childDocuments_': # Nested documents
escaped_value = value
elif isinstance(value, list): # Multivalue field
escaped_value = [smart_unicode(escape(val), errors='replace') for val in value]
else:
value = smart_unicode(value, errors='replace')
escaped_value = escape(value)
doc[field] = escaped_value
doc['externalLink'] = link
doc['details'] = []
doc['hueId'] = smart_unicode(doc.get(id_field, ''))
highlighted_fields = response.get('highlighting', {}).keys()
if highlighted_fields and not query.get('download'):
id_field = collection.get('idField')
if id_field:
for doc in response['response']['docs']:
if id_field in doc and smart_unicode(doc[id_field]) in highlighted_fields:
highlighting = response['highlighting'][smart_unicode(doc[id_field])]
if highlighting:
escaped_highlighting = {}
for field, hls in highlighting.iteritems():
_hls = [escape(smart_unicode(hl, errors='replace')).replace('<em>', '<em>').replace('</em>', '</em>') for hl in hls]
escaped_highlighting[field] = _hls[0] if len(_hls) == 1 else _hls
doc.update(escaped_highlighting)
else:
response['warning'] = _("The Solr schema requires an id field for performing the result highlighting")
def _augment_pivot_2d(name, facet_id, counts, selected_values):
values = set()
for dimension in counts:
for pivot in dimension['pivot']:
values.add(pivot['value'])
values = sorted(list(values))
augmented = []
for dimension in counts:
count = {}
pivot_field = ''
for pivot in dimension['pivot']:
count[pivot['value']] = pivot['count']
pivot_field = pivot['field']
for val in values:
fq_values = [dimension['value'], val]
fq_fields = [dimension['field'], pivot_field]
fq_filter = selected_values.get(facet_id, [])
_selected_values = [f['value'] for f in fq_filter]
augmented.append({
"count": count.get(val, 0),
"value": val,
"cat": dimension['value'],
'selected': fq_values in _selected_values,
'exclude': all([f['exclude'] for f in fq_filter if f['value'] == val]),
'fq_fields': fq_fields,
'fq_values': fq_values,
})
return augmented
def _augment_stats_2d(name, facet, counts, selected_values, agg_keys, rows):
fq_fields = []
fq_values = []
fq_filter = []
_selected_values = [f['value'] for f in selected_values.get(facet['id'], [])]
_fields = [facet['field']] + [facet['field'] for facet in facet['properties']['facets']]
return __augment_stats_2d(counts, facet['field'], fq_fields, fq_values, fq_filter, _selected_values, _fields, agg_keys, rows)
# Clear one dimension
def __augment_stats_2d(counts, label, fq_fields, fq_values, fq_filter, _selected_values, _fields, agg_keys, rows):
augmented = []
for bucket in counts: # For each dimension, go through each bucket and pick up the counts or aggregates, then go recursively in the next dimension
val = bucket['val']
count = bucket['count']
dim_row = [val]
_fq_fields = fq_fields + _fields[0:1]
_fq_values = fq_values + [val]
for agg_key in agg_keys:
if agg_key == 'count':
dim_row.append(count)
augmented.append(_get_augmented(count, val, label, _fq_values, _fq_fields, fq_filter, _selected_values))
elif agg_key.startswith('agg_'):
label = fq_values[0] if len(_fq_fields) >= 2 else agg_key.split(':', 2)[1]
if agg_keys.index(agg_key) == 0: # One count by dimension
dim_row.append(count)
dim_row.append(bucket[agg_key])
augmented.append(_get_augmented(bucket[agg_key], val, label, _fq_values, _fq_fields, fq_filter, _selected_values))
else:
augmented.append(_get_augmented(count, val, label, _fq_values, _fq_fields, fq_filter, _selected_values)) # Needed?
# Go rec
_agg_keys = [key for key, value in bucket[agg_key]['buckets'][0].items() if key.lower().startswith('agg_') or key.lower().startswith('dim_')]
_agg_keys.sort(key=lambda a: a[4:])
if not _agg_keys or len(_agg_keys) == 1 and _agg_keys[0].lower().startswith('dim_'):
_agg_keys.insert(0, 'count')
next_dim = []
new_rows = []
augmented += __augment_stats_2d(bucket[agg_key]['buckets'], val, _fq_fields, _fq_values, fq_filter, _selected_values, _fields[1:], _agg_keys, next_dim)
for row in next_dim:
new_rows.append(dim_row + row)
dim_row = new_rows
if dim_row and type(dim_row[0]) == list:
rows.extend(dim_row)
else:
rows.append(dim_row)
return augmented
def _get_augmented(count, val, label, fq_values, fq_fields, fq_filter, _selected_values):
return {
"count": count,
"value": val,
"cat": label,
'selected': fq_values in _selected_values,
'exclude': all([f['exclude'] for f in fq_filter if f['value'] == val]),
'fq_fields': fq_fields,
'fq_values': fq_values
}
def _augment_pivot_nd(facet_id, counts, selected_values, fields='', values=''):
for c in counts:
fq_fields = (fields if fields else []) + [c['field']]
fq_values = (values if values else []) + [smart_str(c['value'])]
if 'pivot' in c:
_augment_pivot_nd(facet_id, c['pivot'], selected_values, fq_fields, fq_values)
fq_filter = selected_values.get(facet_id, [])
_selected_values = [f['value'] for f in fq_filter]
c['selected'] = fq_values in _selected_values
c['exclude'] = False
c['fq_fields'] = fq_fields
c['fq_values'] = fq_values
def _convert_nested_to_augmented_pivot_nd(facet_fields, facet_id, counts, selected_values, fields='', values='', dimension=2):
for c in counts['buckets']:
c['field'] = facet_fields[0]
fq_fields = (fields if fields else []) + [c['field']]
fq_values = (values if values else []) + [smart_str(c['val'])]
c['value'] = c.pop('val')
bucket = 'd%s' % dimension
if bucket in c:
next_dimension = facet_fields[1:]
if next_dimension:
_convert_nested_to_augmented_pivot_nd(next_dimension, facet_id, c[bucket], selected_values, fq_fields, fq_values, dimension=dimension+1)
c['pivot'] = c.pop(bucket)['buckets']
else:
c['count'] = c.pop(bucket)
fq_filter = selected_values.get(facet_id, [])
_selected_values = [f['value'] for f in fq_filter]
c['selected'] = fq_values in _selected_values
c['exclude'] = False
c['fq_fields'] = fq_fields
c['fq_values'] = fq_values
def augment_solr_exception(response, collection):
response.update(
{
"facet_counts": {
},
"highlighting": {
},
"normalized_facets": [
{
"field": facet['field'],
"counts": [],
"type": facet['type'],
"label": facet['label']
}
for facet in collection['facets']
],
"responseHeader": {
"status": -1,
"QTime": 0,
"params": {
}
},
"response": {
"start": 0,
"numFound": 0,
"docs": [
]
}
})
|
dataxu/ansible
|
refs/heads/dx-stable-2.5
|
lib/ansible/utils/module_docs_fragments/dimensiondata.py
|
192
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Dimension Data
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# - Adam Friedman <tintoy@tintoy.io>
class ModuleDocFragment(object):
# Dimension Data doc fragment
DOCUMENTATION = '''
options:
region:
description:
- The target region.
choices:
- Regions are defined in Apache libcloud project [libcloud/common/dimensiondata.py]
- They are also listed in U(https://libcloud.readthedocs.io/en/latest/compute/drivers/dimensiondata.html)
- Note that the default value "na" stands for "North America".
- The module prepends 'dd-' to the region choice.
default: na
mcp_user:
description:
- The username used to authenticate to the CloudControl API.
- If not specified, will fall back to C(MCP_USER) from environment variable or C(~/.dimensiondata).
required: false
mcp_password:
description:
- The password used to authenticate to the CloudControl API.
- If not specified, will fall back to C(MCP_PASSWORD) from environment variable or C(~/.dimensiondata).
- Required if I(mcp_user) is specified.
required: false
location:
description:
- The target datacenter.
required: true
validate_certs:
description:
- If C(false), SSL certificates will not be validated.
- This should only be used on private instances of the CloudControl API that use self-signed certificates.
required: false
default: true
'''
|
scapella/trimal
|
refs/heads/trimAl
|
scripts/get_sequence_representative_from_alignment.py
|
1
|
#!/usr/bin/python
#
# 'get_sequence_representative_from_alignment.py'
#
# Script implemented to work with trimAl to analyze gaps statistics and decide
# which are the boundaries in a given alignment - columns inbetween these
# boundaries will not be removed independently of the trimming strategy
# selected.
#
# [2014] S. Capella-Gutierrez - scapella@crg.es
#
# this script is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, the last available version.
#
# this script is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details on <http://www.gnu.org/licenses/>
#
from Bio import AlignIO
import numpy as np
import argparse
import sys
import os
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--in", dest = "inFile", required = True, type = \
str, help = "Input alignment")
parser.add_argument("-o", "--out", dest = "outFile", default = None, type = \
str, help = "Set output file")
parser.add_argument("-f", "--format", dest = "inFormat", default = "fasta", \
type = str, choices = ["clustal", "fasta-m10", "fasta", "phylip-relaxed", \
"phylip-sequential", "phylip", "nexus"],help = "Set input alignment format")
parser.add_argument("-g", "--gap_symbol", dest = "gapSymbol", default = '-', \
type = str, help = "Define the gap symbol used in the input alignment")
parser.add_argument("--keep_header", dest = "keepHeader", default = False,
action = "store_true", help = "Keep original alignment sequence IDs indepen"
+ "dently of blank spaces on it")
parser.add_argument("-v", "--verbose", dest = "verbose", default = False,
action = "store_true", help = "Activate verbosity")
args = parser.parse_args()
if not os.path.isfile(args.inFile):
sys.exit(("ERROR: Check input alignment file '%s'") % (args.inFile))
identities, sequences = {}, {}
for record in AlignIO.read(args.inFile, format = args.inFormat):
current_seq = str(record.seq)
sequence_length = len(current_seq)
sequence_id = record.id if not args.keepHeader else record.description
for seq in sequences:
## Identity score is computed considering all positions for which at least
## one of the sequences has a non-gap symbol
valid_pos = [ pos for pos in range(sequence_length) if current_seq[pos] \
!= args.gapSymbol or sequences[seq][0][pos] == args.gapSymbol ]
identical = [ pos for pos in valid_pos if sequences[seq][0][pos] == \
current_seq[pos]]
ratio = float(len(identical))/len(valid_pos)
identities.setdefault(sequence_id, {}).setdefault(seq, ratio)
identities.setdefault(seq, {}).setdefault(sequence_id, ratio)
## Save current sequence and move on to the nex one
ungapped = current_seq.replace(args.gapSymbol, "")
sequences.setdefault(sequence_id, [current_seq, ungapped, len(ungapped)])
selection, maxIdentity = set(), 0
for refer in sequences:
avg = np.average([identities[refer][seq] for seq in identities[refer]])
if args.verbose:
print >> sys.stderr, ("%-20s\t%.6f") % (refer, avg)
## Save current sequence if it has a greater identity score
if avg > maxIdentity:
maxIdentity = avg
selection = set([(sequences[refer][1], refer)])
elif avg == maxIdentity:
selection |= set([(sequences[refer][1], refer)])
representative = sorted(selection, reverse = True)[0][1]
ofile = open(args.outFile, "w") if args.outFile else sys.stdout
print >> ofile, (">%s\n%s") % (representative, sequences[representative][1])
ofile.close()
|
johnsonlau/multivimdriver-vmware-vio
|
refs/heads/master
|
vio/vio/pub/vim/drivers/vimsdk/image_v2.py
|
1
|
# Copyright (c) 2017 VMware, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import logging
from vio.pub.vim.drivers import base
from vio.pub.vim.drivers.vimsdk import sdk
from openstack.image.v2 import image as _image
LOG = logging.getLogger(__name__)
class GlanceClient(base.DriverBase):
def __init__(self, params):
super(GlanceClient, self).__init__(params)
LOG.info("%s", str(params))
self.conn = sdk.create_connection(params)
self.session = self.conn.session
self._proxy = self.conn.image
@sdk.translate_exception
def list_images(self, **query):
images = self._proxy.images(**query)
return images
@sdk.translate_exception
def get_image(self, imageid):
image = self._proxy.get_image(imageid)
return image
@sdk.translate_exception
def find_image(self, name_or_id):
image = self._proxy.find_image(name_or_id, ignore_missing=False)
return image
@sdk.translate_exception
def delete_image(self, imageid):
self._proxy.delete_image(imageid)
@sdk.translate_exception
def create_image(self, **data):
disk_format = data.pop('disk_format')
container_format = data.pop('container_format')
if not all([container_format, disk_format]):
raise Exception("Both container_format and disk_format are required")
img = self._proxy._create(_image.Image, disk_format=disk_format,
container_format=container_format, **data)
return img
@sdk.translate_exception
def upload_image(self, data, image):
image.data = data
image.upload(self.session)
|
chafique-delli/OpenUpgrade
|
refs/heads/master
|
addons/sale/edi/__init__.py
|
454
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale_order
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
sebastienc/kubernetes-py
|
refs/heads/master
|
kubernetes_py/models/v1/HostPathVolumeSource.py
|
3
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is subject to the terms and conditions defined in
# file 'LICENSE.md', which is part of this source code package.
#
from kubernetes_py.utils import is_valid_string, filter_model
class HostPathVolumeSource(object):
"""
http://kubernetes.io/docs/api-reference/v1/definitions/#_v1_hostpathvolumesource
"""
def __init__(self, model=None):
super(HostPathVolumeSource, self).__init__()
self._path = None
if model is not None:
m = filter_model(model)
self._build_with_model(m)
def _build_with_model(self, model=None):
if "path" in model:
self.path = model["path"]
# ------------------------------------------------------------------------------------- path
@property
def path(self):
return self._path
@path.setter
def path(self, path=None):
if not is_valid_string(path):
raise SyntaxError("HostPathVolumeSource: path: [ {0} ] is invalid.".format(path))
self._path = path
# ------------------------------------------------------------------------------------- serialize
def serialize(self):
data = {}
if self.path is not None:
data["path"] = self.path
return data
|
RanadeepPolavarapu/kuma
|
refs/heads/master
|
kuma/core/tests/logging_urls.py
|
10
|
from django.conf.urls import patterns, url
def exception_raiser(request):
raise Exception('Raising exception to test logging.')
urlpatterns = patterns(
'',
url(r'^test_exception/$',
exception_raiser,
name='logging.exception_raiser'),
)
|
jonfoster/pyxb1
|
refs/heads/master
|
pyxb/bundles/opengis/sos_1_0.py
|
6
|
from pyxb.bundles.opengis.raw.sos_1_0 import *
|
skg-net/ansible
|
refs/heads/devel
|
test/units/modules/network/f5/test_bigip_device_connectivity.py
|
22
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_device_connectivity import ApiParameters
from library.modules.bigip_device_connectivity import ModuleParameters
from library.modules.bigip_device_connectivity import ModuleManager
from library.modules.bigip_device_connectivity import ArgumentSpec
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_device_connectivity import ApiParameters
from ansible.modules.network.f5.bigip_device_connectivity import ModuleParameters
from ansible.modules.network.f5.bigip_device_connectivity import ModuleManager
from ansible.modules.network.f5.bigip_device_connectivity import ArgumentSpec
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
multicast_port='1010',
multicast_address='10.10.10.10',
multicast_interface='eth0',
failover_multicast=True,
unicast_failover=[
dict(
address='20.20.20.20',
port='1234'
)
],
mirror_primary_address='1.2.3.4',
mirror_secondary_address='5.6.7.8',
config_sync_ip='4.3.2.1',
state='present',
server='localhost',
user='admin',
password='password'
)
p = ModuleParameters(params=args)
assert p.multicast_port == 1010
assert p.multicast_address == '10.10.10.10'
assert p.multicast_interface == 'eth0'
assert p.failover_multicast is True
assert p.mirror_primary_address == '1.2.3.4'
assert p.mirror_secondary_address == '5.6.7.8'
assert p.config_sync_ip == '4.3.2.1'
assert len(p.unicast_failover) == 1
assert 'effectiveIp' in p.unicast_failover[0]
assert 'effectivePort' in p.unicast_failover[0]
assert 'port' in p.unicast_failover[0]
assert 'ip' in p.unicast_failover[0]
assert p.unicast_failover[0]['effectiveIp'] == '20.20.20.20'
assert p.unicast_failover[0]['ip'] == '20.20.20.20'
assert p.unicast_failover[0]['port'] == 1234
assert p.unicast_failover[0]['effectivePort'] == 1234
def test_api_parameters(self):
params = load_fixture('load_tm_cm_device.json')
p = ApiParameters(params=params)
assert p.multicast_port == 62960
assert p.multicast_address == '224.0.0.245'
assert p.multicast_interface == 'eth0'
assert p.mirror_primary_address == '10.2.2.2'
assert p.mirror_secondary_address == '10.2.3.2'
assert p.config_sync_ip == '10.2.2.2'
assert len(p.unicast_failover) == 2
assert 'effectiveIp' in p.unicast_failover[0]
assert 'effectivePort' in p.unicast_failover[0]
assert 'port' in p.unicast_failover[0]
assert 'ip' in p.unicast_failover[0]
assert p.unicast_failover[0]['effectiveIp'] == 'management-ip'
assert p.unicast_failover[0]['ip'] == 'management-ip'
assert p.unicast_failover[0]['port'] == 1026
assert p.unicast_failover[0]['effectivePort'] == 1026
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_update_settings(self, *args):
set_module_args(dict(
config_sync_ip="10.1.30.1",
mirror_primary_address="10.1.30.1",
unicast_failover=[
dict(
address="10.1.30.1"
)
],
server='localhost',
user='admin',
password='password'
))
# Configure the parameters that would be returned by querying the
# remote device
current = ApiParameters(params=load_fixture('load_tm_cm_device_default.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is True
assert results['config_sync_ip'] == '10.1.30.1'
assert results['mirror_primary_address'] == '10.1.30.1'
assert len(results.keys()) == 4
def test_set_primary_mirror_address_none(self, *args):
set_module_args(dict(
mirror_primary_address="none",
server='localhost',
user='admin',
password='password'
))
# Configure the parameters that would be returned by querying the
# remote device
current = ApiParameters(params=load_fixture('load_tm_cm_device.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is True
assert results['mirror_primary_address'] == 'none'
assert len(results.keys()) == 2
def test_set_secondary_mirror_address_none(self, *args):
set_module_args(dict(
mirror_secondary_address="none",
server='localhost',
user='admin',
password='password'
))
# Configure the parameters that would be returned by querying the
# remote device
current = ApiParameters(params=load_fixture('load_tm_cm_device.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is True
assert results['mirror_secondary_address'] == 'none'
assert len(results.keys()) == 2
def test_set_multicast_address_none(self, *args):
set_module_args(dict(
multicast_address="none",
server='localhost',
user='admin',
password='password'
))
# Configure the parameters that would be returned by querying the
# remote device
current = ApiParameters(params=load_fixture('load_tm_cm_device.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is True
assert results['multicast_address'] == 'none'
assert len(results.keys()) == 2
def test_set_multicast_port_negative(self, *args):
set_module_args(dict(
multicast_port=-1,
server='localhost',
user='admin',
password='password'
))
# Configure the parameters that would be returned by querying the
# remote device
current = ApiParameters(params=load_fixture('load_tm_cm_device.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert 'must be between' in str(ex)
def test_set_multicast_address(self, *args):
set_module_args(dict(
multicast_address="10.1.1.1",
server='localhost',
user='admin',
password='password'
))
# Configure the parameters that would be returned by querying the
# remote device
current = ApiParameters(params=load_fixture('load_tm_cm_device.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is True
assert results['multicast_address'] == '10.1.1.1'
assert len(results.keys()) == 2
def test_unset_unicast_failover(self, *args):
set_module_args(dict(
unicast_failover="none",
server='localhost',
user='admin',
password='password'
))
# Configure the parameters that would be returned by querying the
# remote device
current = ApiParameters(params=load_fixture('load_tm_cm_device.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is True
assert results['unicast_failover'] == 'none'
assert len(results.keys()) == 2
def test_unset_config_sync_ip(self, *args):
set_module_args(dict(
config_sync_ip="none",
server='localhost',
user='admin',
password='password'
))
# Configure the parameters that would be returned by querying the
# remote device
current = ApiParameters(params=load_fixture('load_tm_cm_device.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is True
assert results['config_sync_ip'] == 'none'
assert len(results.keys()) == 2
|
wujf/rethinkdb
|
refs/heads/next
|
test/interface/resources.py
|
29
|
#!/usr/bin/env python
# Copyright 2010-2014 RethinkDB, all rights reserved.
from __future__ import print_function
import os, sys, time, urllib2
startTime = time.time()
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')))
import driver, scenario_common, utils, vcoptparse
op = vcoptparse.OptParser()
scenario_common.prepare_option_parser_mode_flags(op)
_, command_prefix, serve_options = scenario_common.parse_mode_flags(op.parse(sys.argv))
print("Spinning up a server (%.2fs)" % (time.time() - startTime))
with driver.Process(output_folder='.', command_prefix=command_prefix, extra_options=serve_options, wait_until_ready=True) as server:
baseURL = 'http://%s:%d/' % (server.host, server.http_port)
print("Getting root (%.2fs)" % (time.time() - startTime))
fetchResult = urllib2.urlopen(baseURL, timeout=2)
fetchData = fetchResult.read()
assert fetchResult.getcode() == 200, 'Got a non 200 code when requesting the root: %s' % str(fetchResult.getcode())
assert fetchResult.headers['content-type'] == 'text/html'
assert '<html' in fetchData, 'Data from root did not include "html": %s' % fetchData
print("Getting invalid page (%.2fs)" % (time.time() - startTime))
# open a log file iterator and flush out the existing lines
logFile = utils.nonblocking_readline(server.logfile_path)
while next(logFile) is not None:
pass
try:
fetchResult = urllib2.urlopen(os.path.join(baseURL, 'foobar'), timeout=2)
except urllib2.HTTPError as e:
assert e.code == 403, 'Got a non 403 code when requesting bad url /foobar: %s' % str(e.code)
else:
assert False, "Did not raise a 403 error code when requesting a bad url"
print("Checking that the bad access was recorded (%.2fs)" % (time.time() - startTime))
deadline = time.time() + 2
foundIt = False
while time.time() < deadline:
thisEntry = next(logFile)
while thisEntry is not None:
if 'Someone asked for the nonwhitelisted file "/foobar"' in thisEntry:
foundIt = True
break
thisEntry = next(logFile)
if foundIt:
break
time.sleep(0.05)
else:
assert False, "Timed out waiting for the bad access marker to be written to the log"
print("Getting ajax/me (%.2fs)" % (time.time() - startTime))
fetchResult = urllib2.urlopen(os.path.join(baseURL, 'ajax/me'), timeout=2)
fetchData = fetchResult.read()
assert fetchResult.getcode() == 200, 'Got a non 200 code when requesting /me: %s' % str(fetchResult.getcode())
assert fetchResult.headers['content-type'] == 'application/json'
assert fetchData == '"%s"' % server.uuid, 'Data from ajax/me did not match the expected server uuid: %s vs %s' % (fetchData, server.uuid)
# -- ending
print("Cleaning up (%.2fs)" % (time.time() - startTime))
print("Done. (%.2fs)" % (time.time() - startTime))
|
justinabrahms/django-gencal
|
refs/heads/master
|
example_project/bills/models.py
|
1
|
import datetime
from django.db import models
class Bill(models.Model):
company = models.CharField(max_length=150)
amount = models.DecimalField(decimal_places=2, max_digits=7)
due_date = models.DateTimeField(default=datetime.datetime.now)
def __unicode__(self):
return "Owe $%d to %s on %s" % (self.amount, self.company, self.due_date)
|
ducthien1490/youtube-dl
|
refs/heads/master
|
devscripts/prepare_manpage.py
|
105
|
from __future__ import unicode_literals
import io
import os.path
import sys
import re
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
README_FILE = os.path.join(ROOT_DIR, 'README.md')
with io.open(README_FILE, encoding='utf-8') as f:
readme = f.read()
PREFIX = '''%YOUTUBE-DL(1)
# NAME
youtube\-dl \- download videos from youtube.com or other video platforms
# SYNOPSIS
**youtube-dl** \[OPTIONS\] URL [URL...]
'''
readme = re.sub(r'(?s)^.*?(?=# DESCRIPTION)', '', readme)
readme = re.sub(r'\s+youtube-dl \[OPTIONS\] URL \[URL\.\.\.\]', '', readme)
readme = PREFIX + readme
if sys.version_info < (3, 0):
print(readme.encode('utf-8'))
else:
print(readme)
|
cyanna/edx-platform
|
refs/heads/master
|
cms/djangoapps/contentstore/management/commands/clone_course.py
|
119
|
"""
Script for cloning a course
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.django import modulestore
from student.roles import CourseInstructorRole, CourseStaffRole
from opaque_keys.edx.keys import CourseKey
from opaque_keys import InvalidKeyError
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xmodule.modulestore import ModuleStoreEnum
#
# To run from command line: ./manage.py cms clone_course --settings=dev master/300/cough edx/111/foo
#
class Command(BaseCommand):
"""Clone a MongoDB-backed course to another location"""
help = 'Clone a MongoDB backed course to another location'
def course_key_from_arg(self, arg):
"""
Convert the command line arg into a course key
"""
try:
return CourseKey.from_string(arg)
except InvalidKeyError:
return SlashSeparatedCourseKey.from_deprecated_string(arg)
def handle(self, *args, **options):
"Execute the command"
if len(args) != 2:
raise CommandError("clone requires 2 arguments: <source-course_id> <dest-course_id>")
source_course_id = self.course_key_from_arg(args[0])
dest_course_id = self.course_key_from_arg(args[1])
mstore = modulestore()
print("Cloning course {0} to {1}".format(source_course_id, dest_course_id))
with mstore.bulk_operations(dest_course_id):
if mstore.clone_course(source_course_id, dest_course_id, ModuleStoreEnum.UserID.mgmt_command):
print("copying User permissions...")
# purposely avoids auth.add_user b/c it doesn't have a caller to authorize
CourseInstructorRole(dest_course_id).add_users(
*CourseInstructorRole(source_course_id).users_with_role()
)
CourseStaffRole(dest_course_id).add_users(
*CourseStaffRole(source_course_id).users_with_role()
)
|
editrobot/new_editrobot
|
refs/heads/master
|
node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
|
899
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import os
import gyp
import gyp.common
import gyp.msvs_emulation
import json
import sys
generator_supports_multiple_toolsets = True
generator_wants_static_library_dependencies_adjusted = False
generator_default_variables = {
}
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
'LIB_DIR', 'SHARED_LIB_DIR']:
# Some gyp steps fail if these are empty(!).
generator_default_variables[dirname] = 'dir'
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
'CONFIGURATION_NAME']:
generator_default_variables[unused] = ''
def CalculateVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
for key, val in generator_flags.items():
default_variables.setdefault(key, val)
default_variables.setdefault('OS', gyp.common.GetFlavor(params))
flavor = gyp.common.GetFlavor(params)
if flavor =='win':
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get('generator_flags', {})
if generator_flags.get('adjust_static_libraries', False):
global generator_wants_static_library_dependencies_adjusted
generator_wants_static_library_dependencies_adjusted = True
def GenerateOutput(target_list, target_dicts, data, params):
# Map of target -> list of targets it depends on.
edges = {}
# Queue of targets to visit.
targets_to_visit = target_list[:]
while len(targets_to_visit) > 0:
target = targets_to_visit.pop()
if target in edges:
continue
edges[target] = []
for dep in target_dicts[target].get('dependencies', []):
edges[target].append(dep)
targets_to_visit.append(dep)
filename = 'dump.json'
f = open(filename, 'w')
json.dump(edges, f)
f.close()
print 'Wrote json to %s.' % filename
|
ElDeveloper/qiime
|
refs/heads/master
|
scripts/sort_otu_table.py
|
15
|
#!/usr/bin/env python
# File created on 15 Feb 2011
from __future__ import division
__author__ = "Greg Caporaso"
__copyright__ = "Copyright 2011, The QIIME Project"
__credits__ = ["Greg Caporaso", "Daniel McDonald", "Emily TerAvest",
"Yoshiki Vazquez Baeza"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Greg Caporaso"
__email__ = "gregcaporaso@gmail.com"
from biom import load_table
from qiime.parse import parse_mapping_file
from qiime.util import (parse_command_line_parameters, get_options_lookup,
make_option, write_biom_table)
from qiime.sort import (sort_otu_table, sort_otu_table_by_mapping_field,
natsort_case_insensitive)
options_lookup = get_options_lookup()
script_info = {}
script_info[
'brief_description'] = "Script for sorting the sample IDs in an OTU table based on a specified value in a mapping file."
script_info['script_description'] = ""
script_info['script_usage'] = [("Default",
"case insensitive natural sorting"
" i.e. SAMP0, samp1, SAMP2, samp10, samp12",
"%prog -i otu_table.biom -o sorted_otu_table.biom"),
("",
"sort samples by the age field in the mapping file",
"%prog -i otu_table.biom -o dob_sorted_otu_table.biom -m Fasting_Map.txt -s DOB"),
("",
"sort samples based on order in a file where each line starts with a sample id",
"%prog -i otu_table.biom -o sorted_otu_table.biom -l sample_id_list.txt")]
script_info['output_description'] = ""
script_info['required_options'] = [
make_option('-i', '--input_otu_table',
help='Input OTU table filepath in BIOM format.',
type='existing_filepath'),
make_option('-o', '--output_fp',
help='Output OTU table filepath.',
type='new_filepath'),
]
script_info['optional_options'] = [
make_option('-m', '--mapping_fp',
help='Input metadata mapping filepath. [default: %default]',
type='existing_filepath'),
make_option('-s', '--sort_field', type='string',
help='Category to sort OTU table by. [default: %default]'),
make_option('-l', '--sorted_sample_ids_fp',
help='Sorted sample id filepath [default: %default]',
type='existing_filepath')
]
script_info[
'option_label'] = {'input_otu_table': 'OTU table filepath in BIOM format',
'output_fp': 'Output filepath',
'mapping_fp':
'QIIME-formatted mapping filepath',
'sort_field': 'Category to sort by',
'sorted_sample_ids_fp': 'Sorted sample id filepath'}
script_info['version'] = __version__
def sample_ids_from_f(lines):
result = []
for line in lines:
line = line.strip()
if line and not line.startswith('#'):
result.append(line.split()[0])
return result
def main():
option_parser, opts, args = parse_command_line_parameters(**script_info)
otu_table_data = load_table(opts.input_otu_table)
sort_field = opts.sort_field
mapping_fp = opts.mapping_fp
sorted_sample_ids_fp = opts.sorted_sample_ids_fp
if sort_field and mapping_fp:
mapping_data = parse_mapping_file(open(mapping_fp, 'U'))
result = sort_otu_table_by_mapping_field(otu_table_data, mapping_data,
sort_field)
elif sorted_sample_ids_fp:
sorted_sample_ids = sample_ids_from_f(open(sorted_sample_ids_fp, 'U'))
result = sort_otu_table(otu_table_data,
sorted_sample_ids)
else:
result = sort_otu_table(otu_table_data,
natsort_case_insensitive(otu_table_data.ids()))
write_biom_table(result, opts.output_fp)
if __name__ == "__main__":
main()
|
raschuetz/foundations-homework
|
refs/heads/master
|
07/data-analysis/lib/python3.5/site-packages/pip/_vendor/requests/models.py
|
148
|
# -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import datetime
from io import BytesIO, UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
from .packages.urllib3.exceptions import (
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
tuples. Order is retained if data is a list of tuples but arbitrary
if parameters are supplied as a dict.
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
or 4-tuples (filename, fileobj, contentype, custom_headers).
"""
if (not files):
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
val = [val]
for v in val:
if v is not None:
# Don't call str() on bytestrings: in Py3 it all goes wrong.
if not isinstance(v, bytes):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
for (k, v) in files:
# support for explicit filename
ft = None
fh = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
elif len(v) == 3:
fn, fp, ft = v
else:
fn, fp, ft, fh = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, (str, bytes, bytearray)):
fdata = fp
else:
fdata = fp.read()
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
:param json: json for the body to attach to the request (if files or data is not specified).
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.json = json
self.params = params
self.auth = auth
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare(
method=self.method,
url=self.url,
headers=self.headers,
files=self.files,
data=self.data,
json=self.json,
params=self.params,
auth=self.auth,
cookies=self.cookies,
hooks=self.hooks,
)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
# The `CookieJar` used to create the Cookie header will be stored here
# after prepare_cookies is called
self._cookies = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
self.prepare_body(data, files, json)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
# This MUST go after prepare_auth. Authenticators could add a hook
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def copy(self):
p = PreparedRequest()
p.method = self.method
p.url = self.url
p.headers = self.headers.copy() if self.headers is not None else None
p._cookies = _copy_cookie_jar(self._cookies)
p.body = self.body
p.hooks = self.hooks
return p
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = to_native_string(self.method.upper())
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
#: We're unable to blindly call unicode/str functions
#: as this will include the bytestring indicator (b'')
#: on python 3.x.
#: https://github.com/kennethreitz/requests/pull/2238
if isinstance(url, bytes):
url = url.decode('utf8')
else:
url = unicode(url) if is_py2 else str(url)
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
# `data` etc to work around exceptions from `url_parse`, which
# handles RFC 3986 only.
if ':' in url and not url.lower().startswith('http'):
self.url = url
return
# Support for unicode domain names and paths.
try:
scheme, auth, host, port, path, query, fragment = parse_url(url)
except LocationParseError as e:
raise InvalidURL(*e.args)
if not scheme:
error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
error = error.format(to_native_string(url, 'utf8'))
raise MissingSchema(error)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# Only want to apply IDNA to the hostname
try:
host = host.encode('idna').decode('utf-8')
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
netloc = auth or ''
if netloc:
netloc += '@'
netloc += host
if port:
netloc += ':' + str(port)
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
if isinstance(params, (str, bytes)):
params = to_native_string(params)
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
if headers:
self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
else:
self.headers = CaseInsensitiveDict()
def prepare_body(self, data, files, json=None):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
length = None
if not data and json is not None:
content_type = 'application/json'
body = complexjson.dumps(json)
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, dict))
])
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
if is_stream:
body = data
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length:
self.headers['Content-Length'] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data:
body = self._encode_params(data)
if isinstance(data, basestring) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if content_type and ('content-type' not in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
if hasattr(body, 'seek') and hasattr(body, 'tell'):
curr_pos = body.tell()
body.seek(0, 2)
end_pos = body.tell()
self.headers['Content-Length'] = builtin_str(max(0, end_pos - curr_pos))
body.seek(curr_pos, 0)
elif body is not None:
l = super_len(body)
if l:
self.headers['Content-Length'] = builtin_str(l)
elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None):
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
if auth is None:
url_auth = get_auth_from_url(self.url)
auth = url_auth if any(url_auth) else None
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data.
This function eventually generates a ``Cookie`` header from the
given cookies using cookielib. Due to cookielib's design, the header
will not be regenerated if it already exists, meaning this function
can only be called once for the life of the
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
header is removed beforehand."""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
else:
self._cookies = cookiejar_from_dict(cookies)
cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
# hooks can be passed as None to the prepare method and to this
# method. To prevent iterating over None, simply use an empty list
# if hooks is False-y
hooks = hooks or []
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
__attrs__ = [
'_content', 'status_code', 'headers', 'url', 'history',
'encoding', 'reason', 'cookies', 'elapsed', 'request'
]
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta).
#: This property specifically measures the time taken between sending
#: the first byte of the request and finishing parsing the headers. It
#: is therefore unaffected by consuming the response content or the
#: value of the ``stream`` keyword argument.
self.elapsed = datetime.timedelta(0)
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
#: is a response.
self.request = None
def __getstate__(self):
# Consume everything; accessing the content attribute makes
# sure the content has been fully read.
if not self._content_consumed:
self.content
return dict(
(attr, getattr(self, attr, None))
for attr in self.__attrs__
)
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
# pickled objects do not have .raw
setattr(self, '_content_consumed', True)
setattr(self, 'raw', None)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except HTTPError:
return False
return True
@property
def is_redirect(self):
"""True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`).
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
@property
def is_permanent_redirect(self):
"""True if this Response one of the permanent versions of redirect"""
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library"""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
# Special case for urllib3.
if hasattr(self.raw, 'stream'):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
if self._content_consumed and isinstance(self._content, bool):
raise StreamConsumedError()
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
stream_chunks = generate()
chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
chunks = stream_decode_response_unicode(chunks, self)
return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
.. note:: This method is not reentrant safe.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
if delimiter:
lines = chunk.split(delimiter)
else:
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code == 0:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
non-HTTP knowledge to make a better guess at the encoding, you should
set ``r.encoding`` appropriately before accessing this property.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
"""
if not self.encoding and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return complexjson.loads(
self.content.decode(encoding), **kwargs
)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
return complexjson.loads(self.text, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)
elif 500 <= self.status_code < 600:
http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
"""Releases the connection back to the pool. Once this method has been
called the underlying ``raw`` object must not be accessed again.
*Note: Should not normally need to be called explicitly.*
"""
if not self._content_consumed:
return self.raw.close()
return self.raw.release_conn()
|
smallyear/linuxLearn
|
refs/heads/master
|
salt/salt/modules/linux_acl.py
|
1
|
# -*- coding: utf-8 -*-
'''
Support for Linux File Access Control Lists
'''
from __future__ import absolute_import
# Import salt libs
import salt.utils
from salt.exceptions import CommandExecutionError
# Define the module's virtual name
__virtualname__ = 'acl'
def __virtual__():
'''
Only load the module if getfacl is installed
'''
if salt.utils.which('getfacl'):
return __virtualname__
return False
def version():
'''
Return facl version from getfacl --version
CLI Example:
.. code-block:: bash
salt '*' acl.version
'''
cmd = 'getfacl --version'
out = __salt__['cmd.run'](cmd).splitlines()
ret = out[0].split()
return ret[1].strip()
def _raise_on_no_files(*args):
if len(args) == 0:
raise CommandExecutionError('You need to specify at least one file or directory to work with!')
def getfacl(*args, **kwargs):
'''
Return (extremely verbose) map of FACLs on specified file(s)
CLI Examples:
.. code-block:: bash
salt '*' acl.getfacl /tmp/house/kitchen
salt '*' acl.getfacl /tmp/house/kitchen /tmp/house/livingroom
salt '*' acl.getfacl /tmp/house/kitchen /tmp/house/livingroom recursive=True
'''
recursive = kwargs.pop('recursive', False)
_raise_on_no_files(*args)
ret = {}
cmd = 'getfacl --absolute-names'
if recursive:
cmd += ' -R'
for dentry in args:
cmd += ' {0}'.format(dentry)
out = __salt__['cmd.run'](cmd, python_shell=False).splitlines()
dentry = ''
for line in out:
if not line:
continue
elif line.startswith('getfacl'):
continue
elif line.startswith('#'):
comps = line.replace('# ', '').split(': ')
if comps[0] == 'file':
dentry = comps[1]
ret[dentry] = {'comment': {},
'user': [],
'group': []}
ret[dentry]['comment'][comps[0]] = comps[1]
if comps[0] == 'flags':
flags = list(comps[1])
if flags[0] == 's':
ret[dentry]['suid'] = True
if flags[1] == 's':
ret[dentry]['sgid'] = True
if flags[2] == 't':
ret[dentry]['sticky'] = True
else:
vals = _parse_acl(acl=line,
user=ret[dentry]['comment']['owner'],
group=ret[dentry]['comment']['group'])
acl_type = vals['type']
del vals['type']
for entity in ('user', 'group'):
if entity in vals:
usergroup = vals[entity]
del vals[entity]
if acl_type == 'acl':
ret[dentry][entity].append({usergroup: vals})
elif acl_type == 'default':
if 'defaults' not in ret[dentry]:
ret[dentry]['defaults'] = {}
if entity not in ret[dentry]['defaults']:
ret[dentry]['defaults'][entity] = []
ret[dentry]['defaults'][entity].append({usergroup: vals})
for entity in ('other', 'mask'):
if entity in vals:
del vals[entity]
if acl_type == 'acl':
ret[dentry][entity] = [{"": vals}]
elif acl_type == 'default':
if 'defaults' not in ret[dentry]:
ret[dentry]['defaults'] = {}
ret[dentry]['defaults'][entity] = [{"": vals}]
return ret
def _parse_acl(acl, user, group):
'''
Parse a single ACL rule
'''
comps = acl.split(':')
vals = {}
# What type of rule is this?
vals['type'] = 'acl'
if comps[0] == 'default':
vals['type'] = 'default'
comps.pop(0)
# If a user is not specified, use the owner of the file
if comps[0] == 'user' and not comps[1]:
comps[1] = user
elif comps[0] == 'group' and not comps[1]:
comps[1] = group
vals[comps[0]] = comps[1]
# Set the permissions fields
octal = 0
vals['permissions'] = {}
if 'r' in comps[2]:
octal += 4
vals['permissions']['read'] = True
else:
vals['permissions']['read'] = False
if 'w' in comps[2]:
octal += 2
vals['permissions']['write'] = True
else:
vals['permissions']['write'] = False
if 'x' in comps[2]:
octal += 1
vals['permissions']['execute'] = True
else:
vals['permissions']['execute'] = False
vals['octal'] = octal
return vals
def wipefacls(*args, **kwargs):
'''
Remove all FACLs from the specified file(s)
CLI Examples:
.. code-block:: bash
salt '*' acl.wipefacls /tmp/house/kitchen
salt '*' acl.wipefacls /tmp/house/kitchen /tmp/house/livingroom
salt '*' acl.wipefacls /tmp/house/kitchen /tmp/house/livingroom recursive=True
'''
recursive = kwargs.pop('recursive', False)
_raise_on_no_files(*args)
cmd = 'setfacl -b'
if recursive:
cmd += ' -R'
for dentry in args:
cmd += ' {0}'.format(dentry)
__salt__['cmd.run'](cmd, python_shell=False)
return True
def _acl_prefix(acl_type):
prefix = ''
if acl_type.startswith('d'):
prefix = 'd:'
acl_type = acl_type.replace('default:', '')
acl_type = acl_type.replace('d:', '')
if acl_type == 'user' or acl_type == 'u':
prefix += 'u'
elif acl_type == 'group' or acl_type == 'g':
prefix += 'g'
elif acl_type == 'mask' or acl_type == 'm':
prefix += 'm'
return prefix
def modfacl(acl_type, acl_name='', perms='', *args, **kwargs):
'''
Add or modify a FACL for the specified file(s)
CLI Examples:
.. code-block:: bash
salt '*' acl.modfacl user myuser rwx /tmp/house/kitchen
salt '*' acl.modfacl default:group mygroup rx /tmp/house/kitchen
salt '*' acl.modfacl d:u myuser 7 /tmp/house/kitchen
salt '*' acl.modfacl g mygroup 0 /tmp/house/kitchen /tmp/house/livingroom
salt '*' acl.modfacl user myuser rwx /tmp/house/kitchen recursive=True
'''
recursive = kwargs.pop('recursive', False)
_raise_on_no_files(*args)
cmd = 'setfacl'
if recursive:
cmd += ' -R' # -R must come first as -m needs the acl_* arguments that come later
cmd += ' -m'
cmd = '{0} {1}:{2}:{3}'.format(cmd, _acl_prefix(acl_type), acl_name, perms)
for dentry in args:
cmd += ' {0}'.format(dentry)
__salt__['cmd.run'](cmd, python_shell=False)
return True
def delfacl(acl_type, acl_name='', *args, **kwargs):
'''
Remove specific FACL from the specified file(s)
CLI Examples:
.. code-block:: bash
salt '*' acl.delfacl user myuser /tmp/house/kitchen
salt '*' acl.delfacl default:group mygroup /tmp/house/kitchen
salt '*' acl.delfacl d:u myuser /tmp/house/kitchen
salt '*' acl.delfacl g myuser /tmp/house/kitchen /tmp/house/livingroom
salt '*' acl.delfacl user myuser /tmp/house/kitchen recursive=True
'''
recursive = kwargs.pop('recursive', False)
_raise_on_no_files(*args)
cmd = 'setfacl -x'
if recursive:
cmd += ' -R'
cmd = '{0} {1}:{2}'.format(cmd, _acl_prefix(acl_type), acl_name)
for dentry in args:
cmd += ' {0}'.format(dentry)
__salt__['cmd.run'](cmd, python_shell=False)
return True
|
muccg/rdrf
|
refs/heads/next_release
|
rdrf/registry/genetic/migrations/0001_initial.py
|
1
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Gene',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('symbol', models.TextField()),
('hgnc_id', models.TextField(verbose_name='HGNC ID')),
('name', models.TextField()),
('status', models.TextField()),
('chromosome', models.TextField()),
('accession_numbers', models.TextField()),
('refseq_id', models.TextField(verbose_name='RefSeq ID')),
],
options={
'ordering': ['symbol'],
},
),
migrations.CreateModel(
name='Laboratory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256)),
('address', models.TextField(max_length=200, blank=True)),
('contact_name', models.CharField(max_length=200, blank=True)),
('contact_email', models.EmailField(max_length=254, blank=True)),
('contact_phone', models.CharField(max_length=50, blank=True)),
],
options={
'verbose_name_plural': 'laboratories',
},
),
migrations.CreateModel(
name='Technique',
fields=[
('name', models.CharField(max_length=50, serialize=False, primary_key=True)),
],
),
]
|
tblume/systemd-upstream
|
refs/heads/master
|
test/sysv-generator-test.py
|
9
|
#!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1+
#
# systemd-sysv-generator integration test
#
# © 2015 Canonical Ltd.
# Author: Martin Pitt <martin.pitt@ubuntu.com>
import collections
import os
import shutil
import subprocess
import sys
import tempfile
import unittest
from configparser import RawConfigParser
from glob import glob
sysv_generator = './systemd-sysv-generator'
class MultiDict(collections.OrderedDict):
def __setitem__(self, key, value):
if isinstance(value, list) and key in self:
self[key].extend(value)
else:
super(MultiDict, self).__setitem__(key, value)
class SysvGeneratorTest(unittest.TestCase):
def setUp(self):
self.workdir = tempfile.mkdtemp(prefix='sysv-gen-test.')
self.init_d_dir = os.path.join(self.workdir, 'init.d')
os.mkdir(self.init_d_dir)
self.rcnd_dir = self.workdir
self.unit_dir = os.path.join(self.workdir, 'systemd')
os.mkdir(self.unit_dir)
self.out_dir = os.path.join(self.workdir, 'output')
os.mkdir(self.out_dir)
def tearDown(self):
shutil.rmtree(self.workdir)
#
# Helper methods
#
def run_generator(self, expect_error=False):
'''Run sysv-generator.
Fail if stderr contains any "Fail", unless expect_error is True.
Return (stderr, filename -> ConfigParser) pair with ouput to stderr and
parsed generated units.
'''
env = os.environ.copy()
env['SYSTEMD_LOG_LEVEL'] = 'debug'
env['SYSTEMD_LOG_TARGET'] = 'console'
env['SYSTEMD_SYSVINIT_PATH'] = self.init_d_dir
env['SYSTEMD_SYSVRCND_PATH'] = self.rcnd_dir
env['SYSTEMD_UNIT_PATH'] = self.unit_dir
gen = subprocess.Popen(
[sysv_generator, 'ignored', 'ignored', self.out_dir],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True, env=env)
(out, err) = gen.communicate()
if not expect_error:
self.assertFalse('Fail' in err, err)
self.assertEqual(gen.returncode, 0, err)
results = {}
for service in glob(self.out_dir + '/*.service'):
if os.path.islink(service):
continue
try:
# for python3 we need here strict=False to parse multiple
# lines with the same key
cp = RawConfigParser(dict_type=MultiDict, strict=False)
except TypeError:
# RawConfigParser in python2 does not have the strict option
# but it allows multiple lines with the same key by default
cp = RawConfigParser(dict_type=MultiDict)
cp.optionxform = lambda o: o # don't lower-case option names
with open(service) as f:
cp.readfp(f)
results[os.path.basename(service)] = cp
return (err, results)
def add_sysv(self, fname, keys, enable=False, prio=1):
'''Create a SysV init script with the given keys in the LSB header
There are sensible default values for all fields.
If enable is True, links will be created in the rcN.d dirs. In that
case, the priority can be given with "prio" (default to 1).
Return path of generated script.
'''
name_without_sh = fname.endswith('.sh') and fname[:-3] or fname
keys.setdefault('Provides', name_without_sh)
keys.setdefault('Required-Start', '$local_fs')
keys.setdefault('Required-Stop', keys['Required-Start'])
keys.setdefault('Default-Start', '2 3 4 5')
keys.setdefault('Default-Stop', '0 1 6')
keys.setdefault('Short-Description', 'test {} service'.format(name_without_sh))
keys.setdefault('Description', 'long description for test {} service'.format(name_without_sh))
script = os.path.join(self.init_d_dir, fname)
with open(script, 'w') as f:
f.write('#!/bin/init-d-interpreter\n### BEGIN INIT INFO\n')
for k, v in keys.items():
if v is not None:
f.write('#{:>20} {}\n'.format(k + ':', v))
f.write('### END INIT INFO\ncode --goes here\n')
os.chmod(script, 0o755)
if enable:
def make_link(prefix, runlevel):
d = os.path.join(self.rcnd_dir, 'rc{}.d'.format(runlevel))
if not os.path.isdir(d):
os.mkdir(d)
os.symlink('../init.d/' + fname, os.path.join(d, prefix + fname))
for rl in keys['Default-Start'].split():
make_link('S%02i' % prio, rl)
for rl in keys['Default-Stop'].split():
make_link('K%02i' % (99 - prio), rl)
return script
def assert_enabled(self, unit, targets):
'''assert that a unit is enabled in precisely the given targets'''
all_targets = ['multi-user', 'graphical']
# should be enabled
for target in all_targets:
link = os.path.join(self.out_dir, '{}.target.wants'.format(target), unit)
if target in targets:
unit_file = os.readlink(link)
# os.path.exists() will fail on a dangling symlink
self.assertTrue(os.path.exists(link))
self.assertEqual(os.path.basename(unit_file), unit)
else:
self.assertFalse(os.path.exists(link),
'{} unexpectedly exists'.format(link))
#
# test cases
#
def test_nothing(self):
'''no input files'''
results = self.run_generator()[1]
self.assertEqual(results, {})
self.assertEqual(os.listdir(self.out_dir), [])
def test_simple_disabled(self):
'''simple service without dependencies, disabled'''
self.add_sysv('foo', {}, enable=False)
err, results = self.run_generator()
self.assertEqual(len(results), 1)
# no enablement links or other stuff
self.assertEqual(os.listdir(self.out_dir), ['foo.service'])
s = results['foo.service']
self.assertEqual(s.sections(), ['Unit', 'Service'])
self.assertEqual(s.get('Unit', 'Description'), 'LSB: test foo service')
# $local_fs does not need translation, don't expect any dependency
# fields here
self.assertEqual(set(s.options('Unit')),
set(['Documentation', 'SourcePath', 'Description']))
self.assertEqual(s.get('Service', 'Type'), 'forking')
init_script = os.path.join(self.init_d_dir, 'foo')
self.assertEqual(s.get('Service', 'ExecStart'),
'{} start'.format(init_script))
self.assertEqual(s.get('Service', 'ExecStop'),
'{} stop'.format(init_script))
self.assertNotIn('Overwriting', err)
def test_simple_enabled_all(self):
'''simple service without dependencies, enabled in all runlevels'''
self.add_sysv('foo', {}, enable=True)
err, results = self.run_generator()
self.assertEqual(list(results), ['foo.service'])
self.assert_enabled('foo.service', ['multi-user', 'graphical'])
self.assertNotIn('Overwriting', err)
def test_simple_escaped(self):
'''simple service without dependencies, that requires escaping the name'''
self.add_sysv('foo+', {})
self.add_sysv('foo-admin', {})
err, results = self.run_generator()
self.assertEqual(set(results), {'foo-admin.service', 'foo\\x2b.service'})
self.assertNotIn('Overwriting', err)
def test_simple_enabled_some(self):
'''simple service without dependencies, enabled in some runlevels'''
self.add_sysv('foo', {'Default-Start': '2 4'}, enable=True)
err, results = self.run_generator()
self.assertEqual(list(results), ['foo.service'])
self.assert_enabled('foo.service', ['multi-user'])
def test_lsb_macro_dep_single(self):
'''single LSB macro dependency: $network'''
self.add_sysv('foo', {'Required-Start': '$network'})
s = self.run_generator()[1]['foo.service']
self.assertEqual(set(s.options('Unit')),
set(['Documentation', 'SourcePath', 'Description', 'After', 'Wants']))
self.assertEqual(s.get('Unit', 'After'), 'network-online.target')
self.assertEqual(s.get('Unit', 'Wants'), 'network-online.target')
def test_lsb_macro_dep_multi(self):
'''multiple LSB macro dependencies'''
self.add_sysv('foo', {'Required-Start': '$named $portmap'})
s = self.run_generator()[1]['foo.service']
self.assertEqual(set(s.options('Unit')),
set(['Documentation', 'SourcePath', 'Description', 'After']))
self.assertEqual(s.get('Unit', 'After').split(), ['nss-lookup.target', 'rpcbind.target'])
def test_lsb_deps(self):
'''LSB header dependencies to other services'''
# also give symlink priorities here; they should be ignored
self.add_sysv('foo', {'Required-Start': 'must1 must2',
'Should-Start': 'may1 ne_may2'},
enable=True, prio=40)
self.add_sysv('must1', {}, enable=True, prio=10)
self.add_sysv('must2', {}, enable=True, prio=15)
self.add_sysv('may1', {}, enable=True, prio=20)
# do not create ne_may2
err, results = self.run_generator()
self.assertEqual(sorted(results),
['foo.service', 'may1.service', 'must1.service', 'must2.service'])
# foo should depend on all of them
self.assertEqual(sorted(results['foo.service'].get('Unit', 'After').split()),
['may1.service', 'must1.service', 'must2.service', 'ne_may2.service'])
# other services should not depend on each other
self.assertFalse(results['must1.service'].has_option('Unit', 'After'))
self.assertFalse(results['must2.service'].has_option('Unit', 'After'))
self.assertFalse(results['may1.service'].has_option('Unit', 'After'))
def test_symlink_prio_deps(self):
'''script without LSB headers use rcN.d priority'''
# create two init.d scripts without LSB header and enable them with
# startup priorities
for prio, name in [(10, 'provider'), (15, 'consumer')]:
with open(os.path.join(self.init_d_dir, name), 'w') as f:
f.write('#!/bin/init-d-interpreter\ncode --goes here\n')
os.fchmod(f.fileno(), 0o755)
d = os.path.join(self.rcnd_dir, 'rc2.d')
if not os.path.isdir(d):
os.mkdir(d)
os.symlink('../init.d/' + name, os.path.join(d, 'S{:>2}{}'.format(prio, name)))
err, results = self.run_generator()
self.assertEqual(sorted(results), ['consumer.service', 'provider.service'])
self.assertFalse(results['provider.service'].has_option('Unit', 'After'))
self.assertEqual(results['consumer.service'].get('Unit', 'After'),
'provider.service')
def test_multiple_provides(self):
'''multiple Provides: names'''
self.add_sysv('foo', {'Provides': 'foo bar baz'})
err, results = self.run_generator()
self.assertEqual(list(results), ['foo.service'])
self.assertEqual(set(results['foo.service'].options('Unit')),
set(['Documentation', 'SourcePath', 'Description']))
# should create symlinks for the alternative names
for f in ['bar.service', 'baz.service']:
self.assertEqual(os.readlink(os.path.join(self.out_dir, f)),
'foo.service')
self.assertNotIn('Overwriting', err)
def test_provides_escaped(self):
'''a script that Provides: a name that requires escaping'''
self.add_sysv('foo', {'Provides': 'foo foo+'})
err, results = self.run_generator()
self.assertEqual(list(results), ['foo.service'])
self.assertEqual(os.readlink(os.path.join(self.out_dir, 'foo\\x2b.service')),
'foo.service')
self.assertNotIn('Overwriting', err)
def test_same_provides_in_multiple_scripts(self):
'''multiple init.d scripts provide the same name'''
self.add_sysv('foo', {'Provides': 'foo common'}, enable=True, prio=1)
self.add_sysv('bar', {'Provides': 'bar common'}, enable=True, prio=2)
err, results = self.run_generator()
self.assertEqual(sorted(results), ['bar.service', 'foo.service'])
# should create symlink for the alternative name for either unit
self.assertIn(os.readlink(os.path.join(self.out_dir, 'common.service')),
['foo.service', 'bar.service'])
def test_provide_other_script(self):
'''init.d scripts provides the name of another init.d script'''
self.add_sysv('foo', {'Provides': 'foo bar'}, enable=True)
self.add_sysv('bar', {'Provides': 'bar'}, enable=True)
err, results = self.run_generator()
self.assertEqual(sorted(results), ['bar.service', 'foo.service'])
# we do expect an overwrite here, bar.service should overwrite the
# alias link from foo.service
self.assertIn('Overwriting', err)
def test_nonexecutable_script(self):
'''ignores non-executable init.d script'''
os.chmod(self.add_sysv('foo', {}), 0o644)
err, results = self.run_generator()
self.assertEqual(results, {})
def test_sh_suffix(self):
'''init.d script with .sh suffix'''
self.add_sysv('foo.sh', {}, enable=True)
err, results = self.run_generator()
s = results['foo.service']
self.assertEqual(s.sections(), ['Unit', 'Service'])
# should not have a .sh
self.assertEqual(s.get('Unit', 'Description'), 'LSB: test foo service')
# calls correct script with .sh
init_script = os.path.join(self.init_d_dir, 'foo.sh')
self.assertEqual(s.get('Service', 'ExecStart'),
'{} start'.format(init_script))
self.assertEqual(s.get('Service', 'ExecStop'),
'{} stop'.format(init_script))
self.assert_enabled('foo.service', ['multi-user', 'graphical'])
def test_sh_suffix_with_provides(self):
'''init.d script with .sh suffix and Provides:'''
self.add_sysv('foo.sh', {'Provides': 'foo bar'})
err, results = self.run_generator()
# ensure we don't try to create a symlink to itself
self.assertNotIn('itself', err)
self.assertEqual(list(results), ['foo.service'])
self.assertEqual(results['foo.service'].get('Unit', 'Description'),
'LSB: test foo service')
# should create symlink for the alternative name
self.assertEqual(os.readlink(os.path.join(self.out_dir, 'bar.service')),
'foo.service')
def test_hidden_files(self):
'''init.d script with hidden file suffix'''
script = self.add_sysv('foo', {}, enable=True)
# backup files (not enabled in rcN.d/)
shutil.copy(script, script + '.dpkg-new')
shutil.copy(script, script + '.dpkg-dist')
shutil.copy(script, script + '.swp')
shutil.copy(script, script + '.rpmsave')
err, results = self.run_generator()
self.assertEqual(list(results), ['foo.service'])
self.assert_enabled('foo.service', ['multi-user', 'graphical'])
def test_backup_file(self):
'''init.d script with backup file'''
script = self.add_sysv('foo', {}, enable=True)
# backup files (not enabled in rcN.d/)
shutil.copy(script, script + '.bak')
shutil.copy(script, script + '.old')
shutil.copy(script, script + '.tmp')
shutil.copy(script, script + '.new')
err, results = self.run_generator()
print(err)
self.assertEqual(sorted(results), ['foo.service', 'foo.tmp.service'])
# ensure we don't try to create a symlink to itself
self.assertNotIn('itself', err)
self.assert_enabled('foo.service', ['multi-user', 'graphical'])
self.assert_enabled('foo.bak.service', [])
self.assert_enabled('foo.old.service', [])
def test_existing_native_unit(self):
'''existing native unit'''
with open(os.path.join(self.unit_dir, 'foo.service'), 'w') as f:
f.write('[Unit]\n')
self.add_sysv('foo.sh', {'Provides': 'foo bar'}, enable=True)
err, results = self.run_generator()
self.assertEqual(list(results), [])
# no enablement or alias links, as native unit is disabled
self.assertEqual(os.listdir(self.out_dir), [])
if __name__ == '__main__':
unittest.main(testRunner=unittest.TextTestRunner(stream=sys.stdout, verbosity=2))
|
gnuhub/intellij-community
|
refs/heads/master
|
python/testData/copyPaste/singleLine/Indent23.dst.py
|
664
|
class C:
def foo(self):
<caret> y = 2
|
scality/cinder
|
refs/heads/master
|
cinder/openstack/common/scheduler/base_weight.py
|
18
|
# Copyright (c) 2011-2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Pluggable Weighing support
"""
import abc
import six
from cinder.openstack.common.scheduler import base_handler
def normalize(weight_list, minval=None, maxval=None):
"""Normalize the values in a list between 0 and 1.0.
The normalization is made regarding the lower and upper values present in
weight_list. If the minval and/or maxval parameters are set, these values
will be used instead of the minimum and maximum from the list.
If all the values are equal, they are normalized to 0.
"""
if not weight_list:
return ()
if maxval is None:
maxval = max(weight_list)
if minval is None:
minval = min(weight_list)
maxval = float(maxval)
minval = float(minval)
if minval == maxval:
return [0] * len(weight_list)
range_ = maxval - minval
return ((i - minval) / range_ for i in weight_list)
class WeighedObject(object):
"""Object with weight information."""
def __init__(self, obj, weight):
self.obj = obj
self.weight = weight
def __repr__(self):
return "<WeighedObject '%s': %s>" % (self.obj, self.weight)
@six.add_metaclass(abc.ABCMeta)
class BaseWeigher(object):
"""Base class for pluggable weighers.
The attributes maxval and minval can be specified to set up the maximum
and minimum values for the weighed objects. These values will then be
taken into account in the normalization step, instead of taking the values
from the calculated weights.
"""
minval = None
maxval = None
def weight_multiplier(self):
"""How weighted this weigher should be.
Override this method in a subclass, so that the returned value is
read from a configuration option to permit operators specify a
multiplier for the weigher.
"""
return 1.0
@abc.abstractmethod
def _weigh_object(self, obj, weight_properties):
"""Override in a subclass to specify a weight for a specific
object.
"""
def weigh_objects(self, weighed_obj_list, weight_properties):
"""Weigh multiple objects.
Override in a subclass if you need access to all objects in order
to calculate weights. Do not modify the weight of an object here,
just return a list of weights.
"""
# Calculate the weights
weights = []
for obj in weighed_obj_list:
weight = self._weigh_object(obj.obj, weight_properties)
# Record the min and max values if they are None. If they anything
# but none we assume that the weigher has set them
if self.minval is None:
self.minval = weight
if self.maxval is None:
self.maxval = weight
if weight < self.minval:
self.minval = weight
elif weight > self.maxval:
self.maxval = weight
weights.append(weight)
return weights
class BaseWeightHandler(base_handler.BaseHandler):
object_class = WeighedObject
def get_weighed_objects(self, weigher_classes, obj_list,
weighing_properties):
"""Return a sorted (descending), normalized list of WeighedObjects."""
if not obj_list:
return []
weighed_objs = [self.object_class(obj, 0.0) for obj in obj_list]
for weigher_cls in weigher_classes:
weigher = weigher_cls()
weights = weigher.weigh_objects(weighed_objs, weighing_properties)
# Normalize the weights
weights = normalize(weights,
minval=weigher.minval,
maxval=weigher.maxval)
for i, weight in enumerate(weights):
obj = weighed_objs[i]
obj.weight += weigher.weight_multiplier() * weight
return sorted(weighed_objs, key=lambda x: x.weight, reverse=True)
|
luoxsbupt/ibus-pinyin
|
refs/heads/master
|
scripts/create_unique_index.py
|
4
|
import sqlite3
con2 = sqlite3.connect("py-new.db")
con2.execute ("PRAGMA synchronous = NORMAL;")
con2.execute ("PRAGMA temp_store = MEMORY;")
con2.execute("CREATE UNIQUE INDEX IF NOT EXISTS index_0_0 ON py_phrase_0(s0, y0, phrase)")
print "py_phrase_%d done" % 0
con2.execute("CREATE UNIQUE INDEX IF NOT EXISTS index_1_0 ON py_phrase_1(s0, y0, s1, y1, phrase)")
con2.execute("CREATE INDEX IF NOT EXISTS index_1_1 ON py_phrase_1(s0, s1, y1)")
print "py_phrase_%d done" % 1
for i in xrange(2, 16):
sql = "CREATE UNIQUE INDEX IF NOT EXISTS index_%d_0 ON py_phrase_%d (" % (i, i)
sql = sql + "s0,y0"
for j in xrange(1, i + 1):
sql = sql + ",s%d,y%d" % (j, j)
sql = sql + ", phrase)"
print sql
con2.execute(sql)
con2.execute("CREATE INDEX IF NOT EXISTS index_%d_1 ON py_phrase_%d(s0, s1, s2, y2)" % (i, i))
print "py_phrase_%d done" % i
# con2.execute("vacuum")
con2.commit()
|
melgor/melgor.github.io
|
refs/heads/master
|
node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/filters/__init__.py
|
196
|
# -*- coding: utf-8 -*-
"""
pygments.filters
~~~~~~~~~~~~~~~~
Module containing filter lookup functions and default
filters.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
string_to_tokentype
from pygments.filter import Filter
from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
get_choice_opt, ClassNotFound, OptionError
from pygments.plugin import find_plugin_filters
def find_filter_class(filtername):
"""
Lookup a filter by name. Return None if not found.
"""
if filtername in FILTERS:
return FILTERS[filtername]
for name, cls in find_plugin_filters():
if name == filtername:
return cls
return None
def get_filter_by_name(filtername, **options):
"""
Return an instantiated filter. Options are passed to the filter
initializer if wanted. Raise a ClassNotFound if not found.
"""
cls = find_filter_class(filtername)
if cls:
return cls(**options)
else:
raise ClassNotFound('filter %r not found' % filtername)
def get_all_filters():
"""
Return a generator of all filter names.
"""
for name in FILTERS:
yield name
for name, _ in find_plugin_filters():
yield name
def _replace_special(ttype, value, regex, specialttype,
replacefunc=lambda x: x):
last = 0
for match in regex.finditer(value):
start, end = match.start(), match.end()
if start != last:
yield ttype, value[last:start]
yield specialttype, replacefunc(value[start:end])
last = end
if last != len(value):
yield ttype, value[last:]
class CodeTagFilter(Filter):
"""
Highlight special code tags in comments and docstrings.
Options accepted:
`codetags` : list of strings
A list of strings that are flagged as code tags. The default is to
highlight ``XXX``, ``TODO``, ``BUG`` and ``NOTE``.
"""
def __init__(self, **options):
Filter.__init__(self, **options)
tags = get_list_opt(options, 'codetags',
['XXX', 'TODO', 'BUG', 'NOTE'])
self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
re.escape(tag) for tag in tags if tag
]))
def filter(self, lexer, stream):
regex = self.tag_re
for ttype, value in stream:
if ttype in String.Doc or \
ttype in Comment and \
ttype not in Comment.Preproc:
for sttype, svalue in _replace_special(ttype, value, regex,
Comment.Special):
yield sttype, svalue
else:
yield ttype, value
class KeywordCaseFilter(Filter):
"""
Convert keywords to lowercase or uppercase or capitalize them, which
means first letter uppercase, rest lowercase.
This can be useful e.g. if you highlight Pascal code and want to adapt the
code to your styleguide.
Options accepted:
`case` : string
The casing to convert keywords to. Must be one of ``'lower'``,
``'upper'`` or ``'capitalize'``. The default is ``'lower'``.
"""
def __init__(self, **options):
Filter.__init__(self, **options)
case = get_choice_opt(options, 'case', ['lower', 'upper', 'capitalize'], 'lower')
self.convert = getattr(unicode, case)
def filter(self, lexer, stream):
for ttype, value in stream:
if ttype in Keyword:
yield ttype, self.convert(value)
else:
yield ttype, value
class NameHighlightFilter(Filter):
"""
Highlight a normal Name (and Name.*) token with a different token type.
Example::
filter = NameHighlightFilter(
names=['foo', 'bar', 'baz'],
tokentype=Name.Function,
)
This would highlight the names "foo", "bar" and "baz"
as functions. `Name.Function` is the default token type.
Options accepted:
`names` : list of strings
A list of names that should be given the different token type.
There is no default.
`tokentype` : TokenType or string
A token type or a string containing a token type name that is
used for highlighting the strings in `names`. The default is
`Name.Function`.
"""
def __init__(self, **options):
Filter.__init__(self, **options)
self.names = set(get_list_opt(options, 'names', []))
tokentype = options.get('tokentype')
if tokentype:
self.tokentype = string_to_tokentype(tokentype)
else:
self.tokentype = Name.Function
def filter(self, lexer, stream):
for ttype, value in stream:
if ttype in Name and value in self.names:
yield self.tokentype, value
else:
yield ttype, value
class ErrorToken(Exception):
pass
class RaiseOnErrorTokenFilter(Filter):
"""
Raise an exception when the lexer generates an error token.
Options accepted:
`excclass` : Exception class
The exception class to raise.
The default is `pygments.filters.ErrorToken`.
*New in Pygments 0.8.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
self.exception = options.get('excclass', ErrorToken)
try:
# issubclass() will raise TypeError if first argument is not a class
if not issubclass(self.exception, Exception):
raise TypeError
except TypeError:
raise OptionError('excclass option is not an exception class')
def filter(self, lexer, stream):
for ttype, value in stream:
if ttype is Error:
raise self.exception(value)
yield ttype, value
class VisibleWhitespaceFilter(Filter):
"""
Convert tabs, newlines and/or spaces to visible characters.
Options accepted:
`spaces` : string or bool
If this is a one-character string, spaces will be replaces by this string.
If it is another true value, spaces will be replaced by ``·`` (unicode
MIDDLE DOT). If it is a false value, spaces will not be replaced. The
default is ``False``.
`tabs` : string or bool
The same as for `spaces`, but the default replacement character is ``»``
(unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value
is ``False``. Note: this will not work if the `tabsize` option for the
lexer is nonzero, as tabs will already have been expanded then.
`tabsize` : int
If tabs are to be replaced by this filter (see the `tabs` option), this
is the total number of characters that a tab should be expanded to.
The default is ``8``.
`newlines` : string or bool
The same as for `spaces`, but the default replacement character is ``¶``
(unicode PILCROW SIGN). The default value is ``False``.
`wstokentype` : bool
If true, give whitespace the special `Whitespace` token type. This allows
styling the visible whitespace differently (e.g. greyed out), but it can
disrupt background colors. The default is ``True``.
*New in Pygments 0.8.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
for name, default in {'spaces': u'·', 'tabs': u'»', 'newlines': u'¶'}.items():
opt = options.get(name, False)
if isinstance(opt, basestring) and len(opt) == 1:
setattr(self, name, opt)
else:
setattr(self, name, (opt and default or ''))
tabsize = get_int_opt(options, 'tabsize', 8)
if self.tabs:
self.tabs += ' '*(tabsize-1)
if self.newlines:
self.newlines += '\n'
self.wstt = get_bool_opt(options, 'wstokentype', True)
def filter(self, lexer, stream):
if self.wstt:
spaces = self.spaces or ' '
tabs = self.tabs or '\t'
newlines = self.newlines or '\n'
regex = re.compile(r'\s')
def replacefunc(wschar):
if wschar == ' ':
return spaces
elif wschar == '\t':
return tabs
elif wschar == '\n':
return newlines
return wschar
for ttype, value in stream:
for sttype, svalue in _replace_special(ttype, value, regex,
Whitespace, replacefunc):
yield sttype, svalue
else:
spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
# simpler processing
for ttype, value in stream:
if spaces:
value = value.replace(' ', spaces)
if tabs:
value = value.replace('\t', tabs)
if newlines:
value = value.replace('\n', newlines)
yield ttype, value
class GobbleFilter(Filter):
"""
Gobbles source code lines (eats initial characters).
This filter drops the first ``n`` characters off every line of code. This
may be useful when the source code fed to the lexer is indented by a fixed
amount of space that isn't desired in the output.
Options accepted:
`n` : int
The number of characters to gobble.
*New in Pygments 1.2.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
self.n = get_int_opt(options, 'n', 0)
def gobble(self, value, left):
if left < len(value):
return value[left:], 0
else:
return '', left - len(value)
def filter(self, lexer, stream):
n = self.n
left = n # How many characters left to gobble.
for ttype, value in stream:
# Remove ``left`` tokens from first line, ``n`` from all others.
parts = value.split('\n')
(parts[0], left) = self.gobble(parts[0], left)
for i in range(1, len(parts)):
(parts[i], left) = self.gobble(parts[i], n)
value = '\n'.join(parts)
if value != '':
yield ttype, value
class TokenMergeFilter(Filter):
"""
Merges consecutive tokens with the same token type in the output stream of a
lexer.
*New in Pygments 1.2.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
def filter(self, lexer, stream):
current_type = None
current_value = None
for ttype, value in stream:
if ttype is current_type:
current_value += value
else:
if current_type is not None:
yield current_type, current_value
current_type = ttype
current_value = value
if current_type is not None:
yield current_type, current_value
FILTERS = {
'codetagify': CodeTagFilter,
'keywordcase': KeywordCaseFilter,
'highlight': NameHighlightFilter,
'raiseonerror': RaiseOnErrorTokenFilter,
'whitespace': VisibleWhitespaceFilter,
'gobble': GobbleFilter,
'tokenmerge': TokenMergeFilter,
}
|
GdZ/scriptfile
|
refs/heads/master
|
software/googleAppEngine/lib/PyAMF/pyamf/tests/adapters/test_django.py
|
25
|
# Copyright (c) The PyAMF Project.
# See LICENSE.txt for details.
"""
PyAMF Django adapter tests.
@since: 0.3.1
"""
import unittest
import sys
import os
import datetime
import pyamf
from pyamf.tests import util
try:
import django
except ImportError:
django = None
if django and django.VERSION < (1, 0):
django = None
try:
reload(settings)
except NameError:
from pyamf.tests.adapters.django_app import settings
context = None
#: django modules/functions used once bootstrapped
create_test_db = None
destroy_test_db = None
management = None
setup_test_environment = None
teardown_test_environment = None
# test app data
models = None
adapter = None
def init_django():
"""
Bootstrap Django and initialise this module
"""
global django, management, create_test_db, destroy_test_db
global setup_test_environment, teardown_test_environment
if not django:
return
from django.core import management
project_dir = management.setup_environ(settings)
sys.path.insert(0, project_dir)
try:
from django.test.utils import create_test_db, destroy_test_db
except ImportError:
from django.db import connection
create_test_db = connection.creation.create_test_db
destroy_test_db = connection.creation.destroy_test_db
from django.test.utils import setup_test_environment, teardown_test_environment
return True
def setUpModule():
"""
Called to set up the module by the test runner
"""
global context, models, adapter
context = {
'sys.path': sys.path[:],
'sys.modules': sys.modules.copy(),
'os.environ': os.environ.copy(),
}
if init_django():
from pyamf.tests.adapters.django_app.adapters import models
from pyamf.adapters import _django_db_models_base as adapter
setup_test_environment()
settings.DATABASE_NAME = create_test_db(0, True)
def teadDownModule():
# remove all the stuff that django installed
teardown_test_environment()
sys.path = context['sys.path']
util.replace_dict(context['sys.modules'], sys.modules)
util.replace_dict(context['os.environ'], os.environ)
destroy_test_db(settings.DATABASE_NAME, 2)
class BaseTestCase(unittest.TestCase):
"""
"""
def setUp(self):
if not django:
self.skipTest("'django' is not available")
class TypeMapTestCase(BaseTestCase):
"""
Tests for basic encoding functionality
"""
def test_objects_all(self):
encoder = pyamf.get_encoder(pyamf.AMF0)
encoder.writeElement(models.SimplestModel.objects.all())
self.assertEqual(encoder.stream.getvalue(), '\n\x00\x00\x00\x00')
encoder = pyamf.get_encoder(pyamf.AMF3)
encoder.writeElement(models.SimplestModel.objects.all())
self.assertEqual(encoder.stream.getvalue(), '\t\x01\x01')
def test_NOT_PROVIDED(self):
from django.db.models import fields
self.assertEqual(pyamf.encode(fields.NOT_PROVIDED, encoding=pyamf.AMF0).getvalue(),
'\x06')
encoder = pyamf.get_encoder(pyamf.AMF3)
encoder.writeElement(fields.NOT_PROVIDED)
self.assertEqual(encoder.stream.getvalue(), '\x00')
class ClassAliasTestCase(BaseTestCase):
def test_time(self):
x = models.TimeClass()
x.t = datetime.time(12, 12, 12)
x.d = datetime.date(2008, 3, 12)
x.dt = datetime.datetime(2008, 3, 12, 12, 12, 12)
alias = adapter.DjangoClassAlias(models.TimeClass, None)
attrs = alias.getEncodableAttributes(x)
self.assertEqual(attrs, {
'id': None,
'd': datetime.datetime(2008, 3, 12, 0, 0),
'dt': datetime.datetime(2008, 3, 12, 12, 12, 12),
't': datetime.datetime(1970, 1, 1, 12, 12, 12)
})
y = models.TimeClass()
alias.applyAttributes(y, {
'id': None,
'd': datetime.datetime(2008, 3, 12, 0, 0),
'dt': datetime.datetime(2008, 3, 12, 12, 12, 12),
't': datetime.datetime(1970, 1, 1, 12, 12, 12)
})
self.assertEqual(y.id, None)
self.assertEqual(y.d, datetime.date(2008, 3, 12))
self.assertEqual(y.dt, datetime.datetime(2008, 3, 12, 12, 12, 12))
self.assertEqual(y.t, datetime.time(12, 12, 12))
y = models.TimeClass()
alias.applyAttributes(y, {
'id': None,
'd': None,
'dt': None,
't': None
})
self.assertEqual(y.id, None)
self.assertEqual(y.d, None)
self.assertEqual(y.dt, None)
self.assertEqual(y.t, None)
def test_undefined(self):
from django.db import models
from django.db.models import fields
class UndefinedClass(models.Model):
pass
alias = adapter.DjangoClassAlias(UndefinedClass, None)
x = UndefinedClass()
alias.applyAttributes(x, {
'id': pyamf.Undefined
})
self.assertEqual(x.id, fields.NOT_PROVIDED)
x.id = fields.NOT_PROVIDED
attrs = alias.getEncodableAttributes(x)
self.assertEqual(attrs, {'id': pyamf.Undefined})
def test_non_field_prop(self):
from django.db import models
class Book(models.Model):
def _get_number_of_odd_pages(self):
return 234
# note the lack of a setter callable ..
numberOfOddPages = property(_get_number_of_odd_pages)
alias = adapter.DjangoClassAlias(Book, 'Book')
x = Book()
self.assertEqual(alias.getEncodableAttributes(x),
{'numberOfOddPages': 234, 'id': None})
# now we test sending the numberOfOddPages attribute
alias.applyAttributes(x, {'numberOfOddPages': 24, 'id': None})
# test it hasn't been set
self.assertEqual(x.numberOfOddPages, 234)
def test_dynamic(self):
"""
Test for dynamic property encoding.
"""
alias = adapter.DjangoClassAlias(models.SimplestModel, 'Book')
x = models.SimplestModel()
x.spam = 'eggs'
self.assertEqual(alias.getEncodableAttributes(x),
{'spam': 'eggs', 'id': None})
# now we test sending the numberOfOddPages attribute
alias.applyAttributes(x, {'spam': 'foo', 'id': None})
# test it has been set
self.assertEqual(x.spam, 'foo')
def test_properties(self):
"""
See #764
"""
from django.db import models
class Foob(models.Model):
def _get_days(self):
return 1
def _set_days(self, val):
assert 1 == val
days = property(_get_days, _set_days)
alias = adapter.DjangoClassAlias(Foob, 'Bar')
x = Foob()
self.assertEqual(x.days, 1)
self.assertEqual(alias.getEncodableAttributes(x),
{'days': 1, 'id': None})
# now we test sending the numberOfOddPages attribute
alias.applyAttributes(x, {'id': None})
class ForeignKeyTestCase(BaseTestCase):
def test_one_to_many(self):
# initialise the db ..
r = models.Reporter(first_name='John', last_name='Smith', email='john@example.com')
r.save()
self.addCleanup(r.delete)
r2 = models.Reporter(first_name='Paul', last_name='Jones', email='paul@example.com')
r2.save()
self.addCleanup(r2.delete)
a = models.Article(headline="This is a test", reporter=r)
a.save()
self.addCleanup(a.delete)
self.assertEqual(a.id, 1)
del a
a = models.Article.objects.filter(pk=1)[0]
self.assertFalse('_reporter_cache' in a.__dict__)
a.reporter
self.assertTrue('_reporter_cache' in a.__dict__)
del a
a = models.Article.objects.filter(pk=1)[0]
alias = adapter.DjangoClassAlias(models.Article, defer=True)
self.assertFalse(hasattr(alias, 'fields'))
attrs = alias.getEncodableAttributes(a)
# note that the reporter attribute does not exist.
self.assertEqual(attrs, {
'headline': u'This is a test',
'id': 1,
'publications': []
})
self.assertFalse('_reporter_cache' in a.__dict__)
self.assertEqual(pyamf.encode(a, encoding=pyamf.AMF3).getvalue(),
'\n\x0b\x01\x11headline\x06\x1dThis is a test\x05id\x04\x01'
'\x19publications\t\x01\x01\x01')
del a
# now with select_related to pull in the reporter object
a = models.Article.objects.select_related('reporter').filter(pk=1)[0]
alias = adapter.DjangoClassAlias(models.Article, defer=True)
self.assertFalse(hasattr(alias, 'fields'))
self.assertEqual(alias.getEncodableAttributes(a), {
'headline': u'This is a test',
'id': 1,
'reporter': r,
'publications': []
})
self.assertTrue('_reporter_cache' in a.__dict__)
self.assertEqual(pyamf.encode(a, encoding=pyamf.AMF3).getvalue(),
'\n\x0b\x01\x11reporter\n\x0b\x01\x15first_name\x06\tJohn\x13'
'last_name\x06\x0bSmith\x05id\x04\x01\x0bemail\x06!john'
'@example.com\x01\x11headline\x06\x1dThis is a test\x19'
'publications\t\x01\x01\n\x04\x01\x01')
def test_many_to_many(self):
# install some test data - taken from
# http://www.djangoproject.com/documentation/models/many_to_many/
p1 = models.Publication(id=None, title='The Python Journal')
p1.save()
p2 = models.Publication(id=None, title='Science News')
p2.save()
p3 = models.Publication(id=None, title='Science Weekly')
p3.save()
self.addCleanup(p1.delete)
self.addCleanup(p2.delete)
self.addCleanup(p3.delete)
# Create an Article.
a1 = models.Article(id=None, headline='Django lets you build Web apps easily')
a1.save()
self.addCleanup(a1.delete)
self.assertEqual(a1.id, 1)
# Associate the Article with a Publication.
a1.publications.add(p1)
pub_alias = adapter.DjangoClassAlias(models.Publication, None)
art_alias = adapter.DjangoClassAlias(models.Article, None)
test_publication = models.Publication.objects.filter(pk=1)[0]
test_article = models.Article.objects.filter(pk=1)[0]
attrs = pub_alias.getEncodableAttributes(test_publication)
self.assertEqual(attrs, {'id': 1, 'title': u'The Python Journal'})
attrs = art_alias.getEncodableAttributes(test_article)
self.assertEqual(attrs, {
'headline': u'Django lets you build Web apps easily',
'id': 1,
'publications': [p1]
})
x = models.Article()
art_alias.applyAttributes(x, {
'headline': u'Test',
'id': 1,
'publications': [p1]
})
self.assertEqual(x.headline, u'Test')
self.assertEqual(x.id, 1)
self.assertEqual(list(x.publications.all()), [p1])
y = models.Article()
attrs = art_alias.getDecodableAttributes(y, {
'headline': u'Django lets you build Web apps easily',
'id': 0,
'publications': []
})
self.assertEqual(attrs, {'headline': u'Django lets you build Web apps easily'})
def test_nullable_foreign_keys(self):
x = models.SimplestModel()
x.save()
self.addCleanup(x.delete)
nfk_alias = adapter.DjangoClassAlias(models.NullForeignKey, None)
bfk_alias = adapter.DjangoClassAlias(models.BlankForeignKey, None)
nfk = models.NullForeignKey()
attrs = nfk_alias.getEncodableAttributes(nfk)
self.assertEqual(attrs, {'id': None})
bfk = models.BlankForeignKey()
attrs = bfk_alias.getEncodableAttributes(bfk)
self.assertEqual(attrs, {'id': None})
def test_static_relation(self):
"""
@see: #693
"""
from pyamf import util
pyamf.register_class(models.StaticRelation)
alias = adapter.DjangoClassAlias(models.StaticRelation,
static_attrs=('gak',))
alias.compile()
self.assertTrue('gak' in alias.relations)
self.assertTrue('gak' in alias.decodable_properties)
self.assertTrue('gak' in alias.static_attrs)
x = models.StaticRelation()
# just run this to ensure that it doesn't blow up
alias.getDecodableAttributes(x, {'id': None, 'gak': 'foo'})
class I18NTestCase(BaseTestCase):
def test_encode(self):
from django.utils.translation import ugettext_lazy
self.assertEqual(pyamf.encode(ugettext_lazy('Hello')).getvalue(),
'\x06\x0bHello')
class PKTestCase(BaseTestCase):
"""
See ticket #599 for this. Check to make sure that django pk fields
are set first
"""
def test_behaviour(self):
p = models.Publication(id=None, title='The Python Journal')
a = models.Article(id=None, headline='Django lets you build Web apps easily')
# Associate the Article with a Publication.
self.assertRaises(ValueError, lambda a, p: a.publications.add(p), a, p)
p.save()
a.save()
self.addCleanup(p.delete)
self.addCleanup(a.delete)
self.assertEqual(a.id, 1)
article_alias = adapter.DjangoClassAlias(models.Article, None)
x = models.Article()
article_alias.applyAttributes(x, {
'headline': 'Foo bar!',
'id': 1,
'publications': [p]
})
self.assertEqual(x.headline, 'Foo bar!')
self.assertEqual(x.id, 1)
self.assertEqual(list(x.publications.all()), [p])
def test_none(self):
"""
See #556. Make sure that PK fields with a value of 0 are actually set
to C{None}.
"""
alias = adapter.DjangoClassAlias(models.SimplestModel, None)
x = models.SimplestModel()
self.assertEqual(x.id, None)
alias.applyAttributes(x, {
'id': 0
})
self.assertEqual(x.id, None)
def test_no_pk(self):
"""
Ensure that Models without a primary key are correctly serialized.
See #691.
"""
instances = [models.NotSaved(name="a"), models.NotSaved(name="b")]
encoded = pyamf.encode(instances, encoding=pyamf.AMF3).getvalue()
decoded = pyamf.decode(encoded, encoding=pyamf.AMF3).next()
self.assertEqual(decoded[0]['name'], 'a')
self.assertEqual(decoded[1]['name'], 'b')
class ModelInheritanceTestCase(BaseTestCase):
"""
Tests for L{Django model inheritance<http://docs.djangoproject.com/en/dev/topics/db/models/#model-inheritance>}
"""
def test_abstract(self):
alias = adapter.DjangoClassAlias(models.Student)
x = models.Student()
attrs = alias.getEncodableAttributes(x)
self.assertEqual(attrs, {
'age': None,
'home_group': '',
'id': None,
'name': ''
})
def test_concrete(self):
alias = adapter.DjangoClassAlias(models.Place)
x = models.Place()
attrs = alias.getEncodableAttributes(x)
self.assertEqual(attrs, {
'id': None,
'name': '',
'address': ''
})
alias = adapter.DjangoClassAlias(models.Restaurant)
x = models.Restaurant()
attrs = alias.getEncodableAttributes(x)
self.assertEqual(attrs, {
'id': None,
'name': '',
'address': '',
'serves_hot_dogs': False,
'serves_pizza': False
})
class MockFile(object):
"""
mock for L{django.core.files.base.File}
"""
def chunks(self):
return []
def __len__(self):
return 0
def read(self, n):
return ''
class FieldsTestCase(BaseTestCase):
"""
Tests for L{fields}
"""
def test_file(self):
alias = adapter.DjangoClassAlias(models.FileModel)
i = models.FileModel()
i.file.save('bar', MockFile())
self.addCleanup(i.file.delete)
i.save()
attrs = alias.getEncodableAttributes(i)
self.assertEqual(attrs, {'text': '', 'id': 1, 'file': u'file_model/bar'})
attrs = alias.getDecodableAttributes(i, attrs)
self.assertEqual(attrs, {'text': ''})
class ImageTestCase(BaseTestCase):
"""
Tests for L{fields}
"""
def setUp(self):
try:
import PIL
except ImportError:
self.skipTest("'PIL' is not available")
BaseTestCase.setUp(self)
def test_image(self):
alias = adapter.DjangoClassAlias(models.Profile)
i = models.Profile()
i.file.save('bar', MockFile())
self.addCleanup(i.file.delete)
i.save()
self.addCleanup(i.delete)
attrs = alias.getEncodableAttributes(i)
self.assertEqual(attrs, {'text': '', 'id': 1, 'file': u'profile/bar'})
attrs = alias.getDecodableAttributes(i, attrs)
self.assertEqual(attrs, {'text': ''})
class ReferenceTestCase(BaseTestCase, util.EncoderMixIn):
"""
Test case to make sure that the same object from the database is encoded
by reference.
"""
amf_type = pyamf.AMF3
def setUp(self):
BaseTestCase.setUp(self)
util.EncoderMixIn.setUp(self)
def test_not_referenced(self):
"""
Test to ensure that we observe the correct behaviour in the Django
ORM.
"""
f = models.ParentReference()
f.name = 'foo'
b = models.ChildReference()
b.name = 'bar'
f.save()
b.foo = f
b.save()
f.bar = b
f.save()
self.addCleanup(f.delete)
self.addCleanup(b.delete)
self.assertEqual(f.id, 1)
foo = models.ParentReference.objects.select_related().get(id=1)
self.assertFalse(foo.bar.foo is foo)
def test_referenced_encode(self):
f = models.ParentReference()
f.name = 'foo'
b = models.ChildReference()
b.name = 'bar'
f.save()
b.foo = f
b.save()
f.bar = b
f.save()
self.addCleanup(f.delete)
self.addCleanup(b.delete)
self.assertEqual(f.id, 1)
foo = models.ParentReference.objects.select_related().get(id=1)
# ensure the referenced attribute resolves
foo.bar.foo
self.assertEncoded(foo, '\n\x0b\x01\x07bar\n\x0b\x01\x07foo\n\x00\x05'
'id\x04\x01\tname\x06\x00\x01\x04\x04\x01\x06\x06\x02\x01')
class AuthTestCase(BaseTestCase):
"""
Tests for L{django.contrib.auth.models}
"""
def test_user(self):
from django.contrib.auth import models
alias = pyamf.get_class_alias(models.User)
self.assertEqual(alias, 'django.contrib.auth.models.User')
self.assertEqual(alias.exclude_attrs, ('message_set', 'password'))
self.assertEqual(alias.readonly_attrs, ('username',))
class DBColumnTestCase(BaseTestCase):
"""
Tests for #807
"""
def setUp(self):
BaseTestCase.setUp(self)
self.alias = adapter.DjangoClassAlias(models.DBColumnModel, None)
self.model = models.DBColumnModel()
def test_encodable_attrs(self):
def attrs():
return self.alias.getEncodableAttributes(self.model)
self.assertEqual(attrs(), {'id': None})
x = models.SimplestModel()
x.save()
self.addCleanup(x.delete)
self.model.bar = x
self.assertEqual(attrs(), {'id': None, 'bar': x})
|
CoDEmanX/ArangoDB
|
refs/heads/devel
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/win32comext/axscript/server/axsite.py
|
17
|
import string
import win32com.axscript.axscript
import winerror
from win32com.axscript import axscript
from win32com.server import exception, util
import pythoncom
class AXEngine:
def __init__(self, site, engine):
self.eScript = self.eParse = self.eSafety = None
if type(engine) == type(''):
engine = pythoncom.CoCreateInstance(engine,
None,
pythoncom.CLSCTX_SERVER,
pythoncom.IID_IUnknown)
self.eScript = engine.QueryInterface(axscript.IID_IActiveScript)
self.eParse = engine.QueryInterface(axscript.IID_IActiveScriptParse)
self.eSafety = engine.QueryInterface(axscript.IID_IObjectSafety)
self.eScript.SetScriptSite(site)
self.eParse.InitNew()
def __del__(self):
self.Close()
def GetScriptDispatch(self, name = None):
return self.eScript.GetScriptDispatch(name)
def AddNamedItem(self, item, flags):
return self.eScript.AddNamedItem(item, flags)
# Some helpers.
def AddCode(self, code, flags=0):
self.eParse.ParseScriptText(code, None, None, None, 0, 0, flags)
def EvalCode(self, code):
return self.eParse.ParseScriptText(code, None, None, None, 0, 0, axscript.SCRIPTTEXT_ISEXPRESSION)
def Start(self):
# Should maybe check state?
# Do I need to transition through?
self.eScript.SetScriptState(axscript.SCRIPTSTATE_STARTED)
# self.eScript.SetScriptState(axscript.SCRIPTSTATE_CONNECTED)
def Close(self):
if self.eScript:
self.eScript.Close()
self.eScript = self.eParse = self.eSafety = None
def SetScriptState(self, state):
self.eScript.SetScriptState(state)
IActiveScriptSite_methods = [
'GetLCID',
'GetItemInfo',
'GetDocVersionString',
'OnScriptTerminate',
'OnStateChange',
'OnScriptError',
'OnEnterScript',
'OnLeaveScript',
]
class AXSite:
"""An Active Scripting site. A Site can have exactly one engine.
"""
_public_methods_ = IActiveScriptSite_methods
_com_interfaces_ = [ axscript.IID_IActiveScriptSite ]
def __init__(self, objModel={}, engine = None, lcid=0):
self.lcid = lcid
self.objModel = { }
for name, object in objModel.items():
# Gregs code did string.lower this - I think that is callers job if he wants!
self.objModel[name] = object
self.engine = None
if engine:
self._AddEngine(engine)
def AddEngine(self, engine):
"""Adds a new engine to the site.
engine can be a string, or a fully wrapped engine object.
"""
if type(engine)==type(''):
newEngine = AXEngine(util.wrap(self), engine)
else:
newEngine = engine
self.engine = newEngine
flags = axscript.SCRIPTITEM_ISVISIBLE | axscript.SCRIPTITEM_NOCODE | axscript.SCRIPTITEM_GLOBALMEMBERS | axscript.SCRIPTITEM_ISPERSISTENT
for name in self.objModel.keys():
newEngine.AddNamedItem(name, flags)
newEngine.SetScriptState(axscript.SCRIPTSTATE_INITIALIZED)
return newEngine
# B/W compat
_AddEngine = AddEngine
def _Close(self):
self.engine.Close()
self.objModel = {}
def GetLCID(self):
return self.lcid
def GetItemInfo(self, name, returnMask):
# name = string.lower(name)
if not self.objModel.has_key(name):
raise exception.Exception(scode=winerror.TYPE_E_ELEMENTNOTFOUND, desc='item not found')
### for now, we don't have any type information
if returnMask & axscript.SCRIPTINFO_IUNKNOWN:
return (self.objModel[name], None)
return (None, None)
def GetDocVersionString(self):
return 'Python AXHost version 1.0'
def OnScriptTerminate(self, result, excepInfo):
pass
def OnStateChange(self, state):
pass
def OnScriptError(self, errorInterface):
return winerror.S_FALSE
def OnEnterScript(self):
pass
def OnLeaveScript(self):
pass
|
Prasad9/incubator-mxnet
|
refs/heads/master
|
example/neural-style/find_mxnet.py
|
52
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
try:
import mxnet as mx
except ImportError:
import os, sys
curr_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.join(curr_path, "../../python"))
import mxnet as mx
|
Maccimo/intellij-community
|
refs/heads/master
|
python/testData/refactoring/move/baseClass/before/src/a.py
|
83
|
class B(object):
def __init__(self):
pass
class C(B):
def __init__(self):
super(C, self).__init__()
|
alexandermendes/pybossa-gravatar
|
refs/heads/master
|
tests/test_plugin.py
|
1
|
# -*- coding: utf8 -*-
import os
import hashlib
import pybossa_gravatar
from mock import patch, MagicMock
from factories import UserFactory
from default import Test, with_context
from helper import web
class TestEventListener(Test):
@with_context
@patch('pybossa_gravatar.event_listeners.gravatar.set', return_value=True)
def test_gravtar_set_on_add_user_event(self, mock_set):
target = MagicMock()
conn = MagicMock()
pybossa_gravatar.event_listeners.add_user_event(None, conn, target)
assert mock_set.called_with(target)
class TestView(web.Helper):
@with_context
@patch('pybossa_gravatar.view.gravatar.set', return_value=True)
def test_anon_user_redirected_to_sign_in(self, mock_set):
res = self.app.get('/account/joebloggs/update/gravatar/set',
follow_redirects=True)
assert "Please sign in to access this page" in res.data
@with_context
@patch('pybossa_gravatar.view.gravatar.set', return_value=True)
def test_unknown_user_not_found(self, mock_set):
self.register()
res = self.app.get('/account/joebloggs/update/gravatar/set',
follow_redirects=True)
assert res.status_code == 404
@with_context
@patch('pybossa_gravatar.view.ensure_authorized_to', return_value=True)
@patch('pybossa_gravatar.view.user_repo')
@patch('pybossa_gravatar.view.gravatar.set', return_value=True)
def test_authorised_user_can_set_gravatar(self, mock_set, mock_repo,
ensure_authorized_to):
mock_user = MagicMock()
mock_repo = MagicMock()
mock_repo.return_value = mock_user
self.register()
self.signin()
res = self.app.get('/account/{}/update/gravatar/set'.format(self.name),
follow_redirects=True)
assert mock_set.called_with(mock_user)
class TestGravatar(Test):
@with_context
@patch('pybossa_gravatar.gravatar_client.secure_filename',
return_value=True)
@patch('pybossa_gravatar.gravatar_client.user_repo')
@patch('pybossa_gravatar.gravatar._download', return_value=True)
def test_avatar_saved_for_user(self, _download, mock_repo, secure_fn):
mock_repo.update.return_value = True
user = UserFactory.create()
pybossa_gravatar.gravatar.set(user)
assert mock_repo.update.called_with(user)
@with_context
@patch('pybossa_gravatar.gravatar._download', return_value=True)
def test_correct_url_returned(self, _download):
pybossa_gravatar.gravatar.size = 42
pybossa_gravatar.gravatar.rating = 'pg'
pybossa_gravatar.gravatar.default = '404'
pybossa_gravatar.gravatar.force_default = True
pybossa_gravatar.gravatar.ssl = True
params = 's=42&r=pg&d=404&f=y'
base = 'https://secure'
user = UserFactory.create()
email_hash = hashlib.md5(user.email_addr).hexdigest()
expected = u'{0}.gravatar.com/avatar/{1}?{2}'.format(base, email_hash,
params)
returned = pybossa_gravatar.gravatar._get_url(user)
assert expected == returned
@with_context
@patch('pybossa_gravatar.gravatar_client.urllib.urlretrieve')
@patch('pybossa_gravatar.gravatar_client.uploader')
@patch('pybossa_gravatar.gravatar_client.os.path.isdir')
def test_url_downloaded_to_correct_folder(self, isdir, mock_uploader,
urlretrieve):
mock_uploader = MagicMock()
mock_uploader.upload_folder.return_value = 'upload_dir'
urlretrieve.return_value = True
isdir.return_value = True
path = os.path.join('upload_dir/user_1/fn')
pybossa_gravatar.gravatar._download('fn', 'user_1', 'example.com')
assert urlretrieve.called_with('example.com', path)
|
gangadhar-kadam/latestchurcherp
|
refs/heads/v5.0
|
erpnext/manufacturing/doctype/bom/test_bom.py
|
5
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.utils import cstr
test_records = frappe.get_test_records('BOM')
class TestBOM(unittest.TestCase):
def test_get_items(self):
from erpnext.manufacturing.doctype.bom.bom import get_bom_items_as_dict
items_dict = get_bom_items_as_dict(bom="BOM/_Test FG Item 2/001", qty=1, fetch_exploded=0)
self.assertTrue(test_records[2]["items"][0]["item_code"] in items_dict)
self.assertTrue(test_records[2]["items"][1]["item_code"] in items_dict)
self.assertEquals(len(items_dict.values()), 2)
def test_get_items_exploded(self):
from erpnext.manufacturing.doctype.bom.bom import get_bom_items_as_dict
items_dict = get_bom_items_as_dict(bom="BOM/_Test FG Item 2/001", qty=1, fetch_exploded=1)
self.assertTrue(test_records[2]["items"][0]["item_code"] in items_dict)
self.assertFalse(test_records[2]["items"][1]["item_code"] in items_dict)
self.assertTrue(test_records[0]["items"][0]["item_code"] in items_dict)
self.assertTrue(test_records[0]["items"][1]["item_code"] in items_dict)
self.assertEquals(len(items_dict.values()), 3)
def test_get_items_list(self):
from erpnext.manufacturing.doctype.bom.bom import get_bom_items
default_bom = frappe.db.get_value("BOM", {"item":"_Test FG Item 2", "is_default": 1})
self.assertEquals(len(get_bom_items(bom=default_bom)), 3)
def test_default_bom(self):
def _get_default_bom_in_item():
return cstr(frappe.db.get_value("Item", "_Test FG Item 2", "default_bom"))
bom = frappe.get_doc("BOM", {"item":"_Test FG Item 2", "is_default": 1})
self.assertEqual(_get_default_bom_in_item(), bom.name)
bom.is_active = 0
bom.save()
self.assertEqual(_get_default_bom_in_item(), "")
bom.is_active = 1
bom.is_default=1
bom.save()
self.assertTrue(_get_default_bom_in_item(), bom.name)
|
m1ck/bookadoptions
|
refs/heads/master
|
django/contrib/staticfiles/storage.py
|
154
|
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.utils.importlib import import_module
from django.contrib.staticfiles import utils
class StaticFilesStorage(FileSystemStorage):
"""
Standard file system storage for static files.
The defaults for ``location`` and ``base_url`` are
``STATIC_ROOT`` and ``STATIC_URL``.
"""
def __init__(self, location=None, base_url=None, *args, **kwargs):
if location is None:
location = settings.STATIC_ROOT
if base_url is None:
base_url = settings.STATIC_URL
if not location:
raise ImproperlyConfigured("You're using the staticfiles app "
"without having set the STATIC_ROOT setting.")
# check for None since we might use a root URL (``/``)
if base_url is None:
raise ImproperlyConfigured("You're using the staticfiles app "
"without having set the STATIC_URL setting.")
utils.check_settings()
super(StaticFilesStorage, self).__init__(location, base_url, *args, **kwargs)
class AppStaticStorage(FileSystemStorage):
"""
A file system storage backend that takes an app module and works
for the ``static`` directory of it.
"""
prefix = None
source_dir = 'static'
def __init__(self, app, *args, **kwargs):
"""
Returns a static file storage if available in the given app.
"""
# app is the actual app module
self.app_module = app
# We special case the admin app here since it has its static files
# in 'media' for historic reasons.
if self.app_module == 'django.contrib.admin':
self.prefix = 'admin'
self.source_dir = 'media'
mod = import_module(self.app_module)
mod_path = os.path.dirname(mod.__file__)
location = os.path.join(mod_path, self.source_dir)
super(AppStaticStorage, self).__init__(location, *args, **kwargs)
|
zooba/PTVS
|
refs/heads/master
|
Python/Tests/TestData/Grammar/MatMulOperator.py
|
7
|
1 @ 2
|
CVML/scikit-learn
|
refs/heads/master
|
sklearn/cluster/tests/test_dbscan.py
|
114
|
"""
Tests for DBSCAN clustering algorithm
"""
import pickle
import numpy as np
from scipy.spatial import distance
from scipy import sparse
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_in
from sklearn.utils.testing import assert_not_in
from sklearn.cluster.dbscan_ import DBSCAN
from sklearn.cluster.dbscan_ import dbscan
from sklearn.cluster.tests.common import generate_clustered_data
from sklearn.metrics.pairwise import pairwise_distances
n_clusters = 3
X = generate_clustered_data(n_clusters=n_clusters)
def test_dbscan_similarity():
# Tests the DBSCAN algorithm with a similarity array.
# Parameters chosen specifically for this task.
eps = 0.15
min_samples = 10
# Compute similarities
D = distance.squareform(distance.pdist(X))
D /= np.max(D)
# Compute DBSCAN
core_samples, labels = dbscan(D, metric="precomputed", eps=eps,
min_samples=min_samples)
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - (1 if -1 in labels else 0)
assert_equal(n_clusters_1, n_clusters)
db = DBSCAN(metric="precomputed", eps=eps, min_samples=min_samples)
labels = db.fit(D).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_2, n_clusters)
def test_dbscan_feature():
# Tests the DBSCAN algorithm with a feature vector array.
# Parameters chosen specifically for this task.
# Different eps to other test, because distance is not normalised.
eps = 0.8
min_samples = 10
metric = 'euclidean'
# Compute DBSCAN
# parameters chosen for task
core_samples, labels = dbscan(X, metric=metric, eps=eps,
min_samples=min_samples)
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_1, n_clusters)
db = DBSCAN(metric=metric, eps=eps, min_samples=min_samples)
labels = db.fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_2, n_clusters)
def test_dbscan_sparse():
core_sparse, labels_sparse = dbscan(sparse.lil_matrix(X), eps=.8,
min_samples=10)
core_dense, labels_dense = dbscan(X, eps=.8, min_samples=10)
assert_array_equal(core_dense, core_sparse)
assert_array_equal(labels_dense, labels_sparse)
def test_dbscan_no_core_samples():
rng = np.random.RandomState(0)
X = rng.rand(40, 10)
X[X < .8] = 0
for X_ in [X, sparse.csr_matrix(X)]:
db = DBSCAN(min_samples=6).fit(X_)
assert_array_equal(db.components_, np.empty((0, X_.shape[1])))
assert_array_equal(db.labels_, -1)
assert_equal(db.core_sample_indices_.shape, (0,))
def test_dbscan_callable():
# Tests the DBSCAN algorithm with a callable metric.
# Parameters chosen specifically for this task.
# Different eps to other test, because distance is not normalised.
eps = 0.8
min_samples = 10
# metric is the function reference, not the string key.
metric = distance.euclidean
# Compute DBSCAN
# parameters chosen for task
core_samples, labels = dbscan(X, metric=metric, eps=eps,
min_samples=min_samples,
algorithm='ball_tree')
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_1, n_clusters)
db = DBSCAN(metric=metric, eps=eps, min_samples=min_samples,
algorithm='ball_tree')
labels = db.fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_2, n_clusters)
def test_dbscan_balltree():
# Tests the DBSCAN algorithm with balltree for neighbor calculation.
eps = 0.8
min_samples = 10
D = pairwise_distances(X)
core_samples, labels = dbscan(D, metric="precomputed", eps=eps,
min_samples=min_samples)
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_1, n_clusters)
db = DBSCAN(p=2.0, eps=eps, min_samples=min_samples, algorithm='ball_tree')
labels = db.fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_2, n_clusters)
db = DBSCAN(p=2.0, eps=eps, min_samples=min_samples, algorithm='kd_tree')
labels = db.fit(X).labels_
n_clusters_3 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_3, n_clusters)
db = DBSCAN(p=1.0, eps=eps, min_samples=min_samples, algorithm='ball_tree')
labels = db.fit(X).labels_
n_clusters_4 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_4, n_clusters)
db = DBSCAN(leaf_size=20, eps=eps, min_samples=min_samples,
algorithm='ball_tree')
labels = db.fit(X).labels_
n_clusters_5 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_5, n_clusters)
def test_input_validation():
# DBSCAN.fit should accept a list of lists.
X = [[1., 2.], [3., 4.]]
DBSCAN().fit(X) # must not raise exception
def test_dbscan_badargs():
# Test bad argument values: these should all raise ValueErrors
assert_raises(ValueError,
dbscan,
X, eps=-1.0)
assert_raises(ValueError,
dbscan,
X, algorithm='blah')
assert_raises(ValueError,
dbscan,
X, metric='blah')
assert_raises(ValueError,
dbscan,
X, leaf_size=-1)
assert_raises(ValueError,
dbscan,
X, p=-1)
def test_pickle():
obj = DBSCAN()
s = pickle.dumps(obj)
assert_equal(type(pickle.loads(s)), obj.__class__)
def test_boundaries():
# ensure min_samples is inclusive of core point
core, _ = dbscan([[0], [1]], eps=2, min_samples=2)
assert_in(0, core)
# ensure eps is inclusive of circumference
core, _ = dbscan([[0], [1], [1]], eps=1, min_samples=2)
assert_in(0, core)
core, _ = dbscan([[0], [1], [1]], eps=.99, min_samples=2)
assert_not_in(0, core)
def test_weighted_dbscan():
# ensure sample_weight is validated
assert_raises(ValueError, dbscan, [[0], [1]], sample_weight=[2])
assert_raises(ValueError, dbscan, [[0], [1]], sample_weight=[2, 3, 4])
# ensure sample_weight has an effect
assert_array_equal([], dbscan([[0], [1]], sample_weight=None,
min_samples=6)[0])
assert_array_equal([], dbscan([[0], [1]], sample_weight=[5, 5],
min_samples=6)[0])
assert_array_equal([0], dbscan([[0], [1]], sample_weight=[6, 5],
min_samples=6)[0])
assert_array_equal([0, 1], dbscan([[0], [1]], sample_weight=[6, 6],
min_samples=6)[0])
# points within eps of each other:
assert_array_equal([0, 1], dbscan([[0], [1]], eps=1.5,
sample_weight=[5, 1], min_samples=6)[0])
# and effect of non-positive and non-integer sample_weight:
assert_array_equal([], dbscan([[0], [1]], sample_weight=[5, 0],
eps=1.5, min_samples=6)[0])
assert_array_equal([0, 1], dbscan([[0], [1]], sample_weight=[5.9, 0.1],
eps=1.5, min_samples=6)[0])
assert_array_equal([0, 1], dbscan([[0], [1]], sample_weight=[6, 0],
eps=1.5, min_samples=6)[0])
assert_array_equal([], dbscan([[0], [1]], sample_weight=[6, -1],
eps=1.5, min_samples=6)[0])
# for non-negative sample_weight, cores should be identical to repetition
rng = np.random.RandomState(42)
sample_weight = rng.randint(0, 5, X.shape[0])
core1, label1 = dbscan(X, sample_weight=sample_weight)
assert_equal(len(label1), len(X))
X_repeated = np.repeat(X, sample_weight, axis=0)
core_repeated, label_repeated = dbscan(X_repeated)
core_repeated_mask = np.zeros(X_repeated.shape[0], dtype=bool)
core_repeated_mask[core_repeated] = True
core_mask = np.zeros(X.shape[0], dtype=bool)
core_mask[core1] = True
assert_array_equal(np.repeat(core_mask, sample_weight), core_repeated_mask)
# sample_weight should work with precomputed distance matrix
D = pairwise_distances(X)
core3, label3 = dbscan(D, sample_weight=sample_weight,
metric='precomputed')
assert_array_equal(core1, core3)
assert_array_equal(label1, label3)
# sample_weight should work with estimator
est = DBSCAN().fit(X, sample_weight=sample_weight)
core4 = est.core_sample_indices_
label4 = est.labels_
assert_array_equal(core1, core4)
assert_array_equal(label1, label4)
est = DBSCAN()
label5 = est.fit_predict(X, sample_weight=sample_weight)
core5 = est.core_sample_indices_
assert_array_equal(core1, core5)
assert_array_equal(label1, label5)
assert_array_equal(label1, est.labels_)
def test_dbscan_core_samples_toy():
X = [[0], [2], [3], [4], [6], [8], [10]]
n_samples = len(X)
for algorithm in ['brute', 'kd_tree', 'ball_tree']:
# Degenerate case: every sample is a core sample, either with its own
# cluster or including other close core samples.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1,
min_samples=1)
assert_array_equal(core_samples, np.arange(n_samples))
assert_array_equal(labels, [0, 1, 1, 1, 2, 3, 4])
# With eps=1 and min_samples=2 only the 3 samples from the denser area
# are core samples. All other points are isolated and considered noise.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1,
min_samples=2)
assert_array_equal(core_samples, [1, 2, 3])
assert_array_equal(labels, [-1, 0, 0, 0, -1, -1, -1])
# Only the sample in the middle of the dense area is core. Its two
# neighbors are edge samples. Remaining samples are noise.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1,
min_samples=3)
assert_array_equal(core_samples, [2])
assert_array_equal(labels, [-1, 0, 0, 0, -1, -1, -1])
# It's no longer possible to extract core samples with eps=1:
# everything is noise.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1,
min_samples=4)
assert_array_equal(core_samples, [])
assert_array_equal(labels, -np.ones(n_samples))
def test_dbscan_precomputed_metric_with_degenerate_input_arrays():
# see https://github.com/scikit-learn/scikit-learn/issues/4641 for
# more details
X = np.ones((10, 2))
labels = DBSCAN(eps=0.5, metric='precomputed').fit(X).labels_
assert_equal(len(set(labels)), 1)
X = np.zeros((10, 2))
labels = DBSCAN(eps=0.5, metric='precomputed').fit(X).labels_
assert_equal(len(set(labels)), 1)
|
Zhongqilong/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/test/test_gzip.py
|
75
|
"""Test script for the gzip module.
"""
import unittest
from test import support
import os
import io
import struct
gzip = support.import_module('gzip')
data1 = b""" int length=DEFAULTALLOC, err = Z_OK;
PyObject *RetVal;
int flushmode = Z_FINISH;
unsigned long start_total_out;
"""
data2 = b"""/* zlibmodule.c -- gzip-compatible data compression */
/* See http://www.gzip.org/zlib/
/* See http://www.winimage.com/zLibDll for Windows */
"""
class UnseekableIO(io.BytesIO):
def seekable(self):
return False
def tell(self):
raise io.UnsupportedOperation
def seek(self, *args):
raise io.UnsupportedOperation
class BaseTest(unittest.TestCase):
filename = support.TESTFN
def setUp(self):
support.unlink(self.filename)
def tearDown(self):
support.unlink(self.filename)
class TestGzip(BaseTest):
def test_write(self):
with gzip.GzipFile(self.filename, 'wb') as f:
f.write(data1 * 50)
# Try flush and fileno.
f.flush()
f.fileno()
if hasattr(os, 'fsync'):
os.fsync(f.fileno())
f.close()
# Test multiple close() calls.
f.close()
def test_read(self):
self.test_write()
# Try reading.
with gzip.GzipFile(self.filename, 'r') as f:
d = f.read()
self.assertEqual(d, data1*50)
def test_read1(self):
self.test_write()
blocks = []
nread = 0
with gzip.GzipFile(self.filename, 'r') as f:
while True:
d = f.read1()
if not d:
break
blocks.append(d)
nread += len(d)
# Check that position was updated correctly (see issue10791).
self.assertEqual(f.tell(), nread)
self.assertEqual(b''.join(blocks), data1 * 50)
def test_io_on_closed_object(self):
# Test that I/O operations on closed GzipFile objects raise a
# ValueError, just like the corresponding functions on file objects.
# Write to a file, open it for reading, then close it.
self.test_write()
f = gzip.GzipFile(self.filename, 'r')
f.close()
with self.assertRaises(ValueError):
f.read(1)
with self.assertRaises(ValueError):
f.seek(0)
with self.assertRaises(ValueError):
f.tell()
# Open the file for writing, then close it.
f = gzip.GzipFile(self.filename, 'w')
f.close()
with self.assertRaises(ValueError):
f.write(b'')
with self.assertRaises(ValueError):
f.flush()
def test_append(self):
self.test_write()
# Append to the previous file
with gzip.GzipFile(self.filename, 'ab') as f:
f.write(data2 * 15)
with gzip.GzipFile(self.filename, 'rb') as f:
d = f.read()
self.assertEqual(d, (data1*50) + (data2*15))
def test_many_append(self):
# Bug #1074261 was triggered when reading a file that contained
# many, many members. Create such a file and verify that reading it
# works.
with gzip.GzipFile(self.filename, 'wb', 9) as f:
f.write(b'a')
for i in range(0, 200):
with gzip.GzipFile(self.filename, "ab", 9) as f: # append
f.write(b'a')
# Try reading the file
with gzip.GzipFile(self.filename, "rb") as zgfile:
contents = b""
while 1:
ztxt = zgfile.read(8192)
contents += ztxt
if not ztxt: break
self.assertEqual(contents, b'a'*201)
def test_exclusive_write(self):
with gzip.GzipFile(self.filename, 'xb') as f:
f.write(data1 * 50)
with gzip.GzipFile(self.filename, 'rb') as f:
self.assertEqual(f.read(), data1 * 50)
with self.assertRaises(FileExistsError):
gzip.GzipFile(self.filename, 'xb')
def test_buffered_reader(self):
# Issue #7471: a GzipFile can be wrapped in a BufferedReader for
# performance.
self.test_write()
with gzip.GzipFile(self.filename, 'rb') as f:
with io.BufferedReader(f) as r:
lines = [line for line in r]
self.assertEqual(lines, 50 * data1.splitlines(keepends=True))
def test_readline(self):
self.test_write()
# Try .readline() with varying line lengths
with gzip.GzipFile(self.filename, 'rb') as f:
line_length = 0
while 1:
L = f.readline(line_length)
if not L and line_length != 0: break
self.assertTrue(len(L) <= line_length)
line_length = (line_length + 1) % 50
def test_readlines(self):
self.test_write()
# Try .readlines()
with gzip.GzipFile(self.filename, 'rb') as f:
L = f.readlines()
with gzip.GzipFile(self.filename, 'rb') as f:
while 1:
L = f.readlines(150)
if L == []: break
def test_seek_read(self):
self.test_write()
# Try seek, read test
with gzip.GzipFile(self.filename) as f:
while 1:
oldpos = f.tell()
line1 = f.readline()
if not line1: break
newpos = f.tell()
f.seek(oldpos) # negative seek
if len(line1)>10:
amount = 10
else:
amount = len(line1)
line2 = f.read(amount)
self.assertEqual(line1[:amount], line2)
f.seek(newpos) # positive seek
def test_seek_whence(self):
self.test_write()
# Try seek(whence=1), read test
with gzip.GzipFile(self.filename) as f:
f.read(10)
f.seek(10, whence=1)
y = f.read(10)
self.assertEqual(y, data1[20:30])
def test_seek_write(self):
# Try seek, write test
with gzip.GzipFile(self.filename, 'w') as f:
for pos in range(0, 256, 16):
f.seek(pos)
f.write(b'GZ\n')
def test_mode(self):
self.test_write()
with gzip.GzipFile(self.filename, 'r') as f:
self.assertEqual(f.myfileobj.mode, 'rb')
support.unlink(self.filename)
with gzip.GzipFile(self.filename, 'x') as f:
self.assertEqual(f.myfileobj.mode, 'xb')
def test_1647484(self):
for mode in ('wb', 'rb'):
with gzip.GzipFile(self.filename, mode) as f:
self.assertTrue(hasattr(f, "name"))
self.assertEqual(f.name, self.filename)
def test_paddedfile_getattr(self):
self.test_write()
with gzip.GzipFile(self.filename, 'rb') as f:
self.assertTrue(hasattr(f.fileobj, "name"))
self.assertEqual(f.fileobj.name, self.filename)
def test_mtime(self):
mtime = 123456789
with gzip.GzipFile(self.filename, 'w', mtime = mtime) as fWrite:
fWrite.write(data1)
with gzip.GzipFile(self.filename) as fRead:
dataRead = fRead.read()
self.assertEqual(dataRead, data1)
self.assertTrue(hasattr(fRead, 'mtime'))
self.assertEqual(fRead.mtime, mtime)
def test_metadata(self):
mtime = 123456789
with gzip.GzipFile(self.filename, 'w', mtime = mtime) as fWrite:
fWrite.write(data1)
with open(self.filename, 'rb') as fRead:
# see RFC 1952: http://www.faqs.org/rfcs/rfc1952.html
idBytes = fRead.read(2)
self.assertEqual(idBytes, b'\x1f\x8b') # gzip ID
cmByte = fRead.read(1)
self.assertEqual(cmByte, b'\x08') # deflate
flagsByte = fRead.read(1)
self.assertEqual(flagsByte, b'\x08') # only the FNAME flag is set
mtimeBytes = fRead.read(4)
self.assertEqual(mtimeBytes, struct.pack('<i', mtime)) # little-endian
xflByte = fRead.read(1)
self.assertEqual(xflByte, b'\x02') # maximum compression
osByte = fRead.read(1)
self.assertEqual(osByte, b'\xff') # OS "unknown" (OS-independent)
# Since the FNAME flag is set, the zero-terminated filename follows.
# RFC 1952 specifies that this is the name of the input file, if any.
# However, the gzip module defaults to storing the name of the output
# file in this field.
expected = self.filename.encode('Latin-1') + b'\x00'
nameBytes = fRead.read(len(expected))
self.assertEqual(nameBytes, expected)
# Since no other flags were set, the header ends here.
# Rather than process the compressed data, let's seek to the trailer.
fRead.seek(os.stat(self.filename).st_size - 8)
crc32Bytes = fRead.read(4) # CRC32 of uncompressed data [data1]
self.assertEqual(crc32Bytes, b'\xaf\xd7d\x83')
isizeBytes = fRead.read(4)
self.assertEqual(isizeBytes, struct.pack('<i', len(data1)))
def test_with_open(self):
# GzipFile supports the context management protocol
with gzip.GzipFile(self.filename, "wb") as f:
f.write(b"xxx")
f = gzip.GzipFile(self.filename, "rb")
f.close()
try:
with f:
pass
except ValueError:
pass
else:
self.fail("__enter__ on a closed file didn't raise an exception")
try:
with gzip.GzipFile(self.filename, "wb") as f:
1/0
except ZeroDivisionError:
pass
else:
self.fail("1/0 didn't raise an exception")
def test_zero_padded_file(self):
with gzip.GzipFile(self.filename, "wb") as f:
f.write(data1 * 50)
# Pad the file with zeroes
with open(self.filename, "ab") as f:
f.write(b"\x00" * 50)
with gzip.GzipFile(self.filename, "rb") as f:
d = f.read()
self.assertEqual(d, data1 * 50, "Incorrect data in file")
def test_non_seekable_file(self):
uncompressed = data1 * 50
buf = UnseekableIO()
with gzip.GzipFile(fileobj=buf, mode="wb") as f:
f.write(uncompressed)
compressed = buf.getvalue()
buf = UnseekableIO(compressed)
with gzip.GzipFile(fileobj=buf, mode="rb") as f:
self.assertEqual(f.read(), uncompressed)
def test_peek(self):
uncompressed = data1 * 200
with gzip.GzipFile(self.filename, "wb") as f:
f.write(uncompressed)
def sizes():
while True:
for n in range(5, 50, 10):
yield n
with gzip.GzipFile(self.filename, "rb") as f:
f.max_read_chunk = 33
nread = 0
for n in sizes():
s = f.peek(n)
if s == b'':
break
self.assertEqual(f.read(len(s)), s)
nread += len(s)
self.assertEqual(f.read(100), b'')
self.assertEqual(nread, len(uncompressed))
def test_textio_readlines(self):
# Issue #10791: TextIOWrapper.readlines() fails when wrapping GzipFile.
lines = (data1 * 50).decode("ascii").splitlines(keepends=True)
self.test_write()
with gzip.GzipFile(self.filename, 'r') as f:
with io.TextIOWrapper(f, encoding="ascii") as t:
self.assertEqual(t.readlines(), lines)
def test_fileobj_from_fdopen(self):
# Issue #13781: Opening a GzipFile for writing fails when using a
# fileobj created with os.fdopen().
fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT)
with os.fdopen(fd, "wb") as f:
with gzip.GzipFile(fileobj=f, mode="w") as g:
pass
def test_bytes_filename(self):
str_filename = self.filename
try:
bytes_filename = str_filename.encode("ascii")
except UnicodeEncodeError:
self.skipTest("Temporary file name needs to be ASCII")
with gzip.GzipFile(bytes_filename, "wb") as f:
f.write(data1 * 50)
with gzip.GzipFile(bytes_filename, "rb") as f:
self.assertEqual(f.read(), data1 * 50)
# Sanity check that we are actually operating on the right file.
with gzip.GzipFile(str_filename, "rb") as f:
self.assertEqual(f.read(), data1 * 50)
# Testing compress/decompress shortcut functions
def test_compress(self):
for data in [data1, data2]:
for args in [(), (1,), (6,), (9,)]:
datac = gzip.compress(data, *args)
self.assertEqual(type(datac), bytes)
with gzip.GzipFile(fileobj=io.BytesIO(datac), mode="rb") as f:
self.assertEqual(f.read(), data)
def test_decompress(self):
for data in (data1, data2):
buf = io.BytesIO()
with gzip.GzipFile(fileobj=buf, mode="wb") as f:
f.write(data)
self.assertEqual(gzip.decompress(buf.getvalue()), data)
# Roundtrip with compress
datac = gzip.compress(data)
self.assertEqual(gzip.decompress(datac), data)
def test_read_truncated(self):
data = data1*50
# Drop the CRC (4 bytes) and file size (4 bytes).
truncated = gzip.compress(data)[:-8]
with gzip.GzipFile(fileobj=io.BytesIO(truncated)) as f:
self.assertRaises(EOFError, f.read)
with gzip.GzipFile(fileobj=io.BytesIO(truncated)) as f:
self.assertEqual(f.read(len(data)), data)
self.assertRaises(EOFError, f.read, 1)
# Incomplete 10-byte header.
for i in range(2, 10):
with gzip.GzipFile(fileobj=io.BytesIO(truncated[:i])) as f:
self.assertRaises(EOFError, f.read, 1)
def test_read_with_extra(self):
# Gzip data with an extra field
gzdata = (b'\x1f\x8b\x08\x04\xb2\x17cQ\x02\xff'
b'\x05\x00Extra'
b'\x0bI-.\x01\x002\xd1Mx\x04\x00\x00\x00')
with gzip.GzipFile(fileobj=io.BytesIO(gzdata)) as f:
self.assertEqual(f.read(), b'Test')
def test_prepend_error(self):
# See issue #20875
with gzip.open(self.filename, "wb") as f:
f.write(data1)
with gzip.open(self.filename, "rb") as f:
f.fileobj.prepend()
class TestOpen(BaseTest):
def test_binary_modes(self):
uncompressed = data1 * 50
with gzip.open(self.filename, "wb") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read())
self.assertEqual(file_data, uncompressed)
with gzip.open(self.filename, "rb") as f:
self.assertEqual(f.read(), uncompressed)
with gzip.open(self.filename, "ab") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read())
self.assertEqual(file_data, uncompressed * 2)
with self.assertRaises(FileExistsError):
gzip.open(self.filename, "xb")
support.unlink(self.filename)
with gzip.open(self.filename, "xb") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read())
self.assertEqual(file_data, uncompressed)
def test_implicit_binary_modes(self):
# Test implicit binary modes (no "b" or "t" in mode string).
uncompressed = data1 * 50
with gzip.open(self.filename, "w") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read())
self.assertEqual(file_data, uncompressed)
with gzip.open(self.filename, "r") as f:
self.assertEqual(f.read(), uncompressed)
with gzip.open(self.filename, "a") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read())
self.assertEqual(file_data, uncompressed * 2)
with self.assertRaises(FileExistsError):
gzip.open(self.filename, "x")
support.unlink(self.filename)
with gzip.open(self.filename, "x") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read())
self.assertEqual(file_data, uncompressed)
def test_text_modes(self):
uncompressed = data1.decode("ascii") * 50
uncompressed_raw = uncompressed.replace("\n", os.linesep)
with gzip.open(self.filename, "wt") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read()).decode("ascii")
self.assertEqual(file_data, uncompressed_raw)
with gzip.open(self.filename, "rt") as f:
self.assertEqual(f.read(), uncompressed)
with gzip.open(self.filename, "at") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read()).decode("ascii")
self.assertEqual(file_data, uncompressed_raw * 2)
def test_fileobj(self):
uncompressed_bytes = data1 * 50
uncompressed_str = uncompressed_bytes.decode("ascii")
compressed = gzip.compress(uncompressed_bytes)
with gzip.open(io.BytesIO(compressed), "r") as f:
self.assertEqual(f.read(), uncompressed_bytes)
with gzip.open(io.BytesIO(compressed), "rb") as f:
self.assertEqual(f.read(), uncompressed_bytes)
with gzip.open(io.BytesIO(compressed), "rt") as f:
self.assertEqual(f.read(), uncompressed_str)
def test_bad_params(self):
# Test invalid parameter combinations.
with self.assertRaises(TypeError):
gzip.open(123.456)
with self.assertRaises(ValueError):
gzip.open(self.filename, "wbt")
with self.assertRaises(ValueError):
gzip.open(self.filename, "xbt")
with self.assertRaises(ValueError):
gzip.open(self.filename, "rb", encoding="utf-8")
with self.assertRaises(ValueError):
gzip.open(self.filename, "rb", errors="ignore")
with self.assertRaises(ValueError):
gzip.open(self.filename, "rb", newline="\n")
def test_encoding(self):
# Test non-default encoding.
uncompressed = data1.decode("ascii") * 50
uncompressed_raw = uncompressed.replace("\n", os.linesep)
with gzip.open(self.filename, "wt", encoding="utf-16") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read()).decode("utf-16")
self.assertEqual(file_data, uncompressed_raw)
with gzip.open(self.filename, "rt", encoding="utf-16") as f:
self.assertEqual(f.read(), uncompressed)
def test_encoding_error_handler(self):
# Test with non-default encoding error handler.
with gzip.open(self.filename, "wb") as f:
f.write(b"foo\xffbar")
with gzip.open(self.filename, "rt", encoding="ascii", errors="ignore") \
as f:
self.assertEqual(f.read(), "foobar")
def test_newline(self):
# Test with explicit newline (universal newline mode disabled).
uncompressed = data1.decode("ascii") * 50
with gzip.open(self.filename, "wt", newline="\n") as f:
f.write(uncompressed)
with gzip.open(self.filename, "rt", newline="\r") as f:
self.assertEqual(f.readlines(), [uncompressed])
def test_main(verbose=None):
support.run_unittest(TestGzip, TestOpen)
if __name__ == "__main__":
test_main(verbose=True)
|
lokeshjindal15/pd-gem5
|
refs/heads/master
|
configs/common/cpu2000.py
|
48
|
# Copyright (c) 2006-2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
import os
import sys
from os.path import basename, exists, join as joinpath, normpath
from os.path import isdir, isfile, islink
spec_dist = os.environ.get('M5_CPU2000', '/dist/m5/cpu2000')
def copyfiles(srcdir, dstdir):
from filecmp import cmp as filecmp
from shutil import copyfile
srcdir = normpath(srcdir)
dstdir = normpath(dstdir)
if not isdir(dstdir):
os.mkdir(dstdir)
for root, dirs, files in os.walk(srcdir):
root = normpath(root)
prefix = os.path.commonprefix([root, srcdir])
root = root[len(prefix):]
if root.startswith('/'):
root = root[1:]
for entry in dirs:
newdir = joinpath(dstdir, root, entry)
if not isdir(newdir):
os.mkdir(newdir)
for entry in files:
dest = normpath(joinpath(dstdir, root, entry))
src = normpath(joinpath(srcdir, root, entry))
if not isfile(dest) or not filecmp(src, dest):
copyfile(src, dest)
# some of the spec benchmarks expect to be run from one directory up.
# just create some symlinks that solve the problem
inlink = joinpath(dstdir, 'input')
outlink = joinpath(dstdir, 'output')
if not exists(inlink):
os.symlink('.', inlink)
if not exists(outlink):
os.symlink('.', outlink)
class Benchmark(object):
def __init__(self, isa, os, input_set):
if not hasattr(self.__class__, 'name'):
self.name = self.__class__.__name__
if not hasattr(self.__class__, 'binary'):
self.binary = self.name
if not hasattr(self.__class__, 'args'):
self.args = []
if not hasattr(self.__class__, 'output'):
self.output = '%s.out' % self.name
if not hasattr(self.__class__, 'simpoint'):
self.simpoint = None
try:
func = getattr(self.__class__, input_set)
except AttributeError:
raise AttributeError, \
'The benchmark %s does not have the %s input set' % \
(self.name, input_set)
executable = joinpath(spec_dist, 'binaries', isa, os, self.binary)
if not isfile(executable):
raise AttributeError, '%s not found' % executable
self.executable = executable
# root of tree for input & output data files
data_dir = joinpath(spec_dist, 'data', self.name)
# optional subtree with files shared across input sets
all_dir = joinpath(data_dir, 'all')
# dirs for input & output files for this input set
inputs_dir = joinpath(data_dir, input_set, 'input')
outputs_dir = joinpath(data_dir, input_set, 'output')
# keep around which input set was specified
self.input_set = input_set
if not isdir(inputs_dir):
raise AttributeError, '%s not found' % inputs_dir
self.inputs_dir = [ inputs_dir ]
if isdir(all_dir):
self.inputs_dir += [ joinpath(all_dir, 'input') ]
if isdir(outputs_dir):
self.outputs_dir = outputs_dir
if not hasattr(self.__class__, 'stdin'):
self.stdin = joinpath(inputs_dir, '%s.in' % self.name)
if not isfile(self.stdin):
self.stdin = None
if not hasattr(self.__class__, 'stdout'):
self.stdout = joinpath(outputs_dir, '%s.out' % self.name)
if not isfile(self.stdout):
self.stdout = None
func(self, isa, os)
def makeLiveProcessArgs(self, **kwargs):
# set up default args for LiveProcess object
process_args = {}
process_args['cmd'] = [ self.name ] + self.args
process_args['executable'] = self.executable
if self.stdin:
process_args['input'] = self.stdin
if self.stdout:
process_args['output'] = self.stdout
if self.simpoint:
process_args['simpoint'] = self.simpoint
# explicit keywords override defaults
process_args.update(kwargs)
return process_args
def makeLiveProcess(self, **kwargs):
process_args = self.makeLiveProcessArgs(**kwargs)
# figure out working directory: use m5's outdir unless
# overridden by LiveProcess's cwd param
cwd = process_args.get('cwd')
if not cwd:
from m5 import options
cwd = options.outdir
process_args['cwd'] = cwd
if not isdir(cwd):
os.makedirs(cwd)
# copy input files to working directory
for d in self.inputs_dir:
copyfiles(d, cwd)
# generate LiveProcess object
from m5.objects import LiveProcess
return LiveProcess(**process_args)
def __str__(self):
return self.name
class DefaultBenchmark(Benchmark):
def ref(self, isa, os): pass
def test(self, isa, os): pass
def train(self, isa, os): pass
class MinneDefaultBenchmark(DefaultBenchmark):
def smred(self, isa, os): pass
def mdred(self, isa, os): pass
def lgred(self, isa, os): pass
class ammp(MinneDefaultBenchmark):
name = 'ammp'
number = 188
lang = 'C'
simpoint = 108*100E6
class applu(MinneDefaultBenchmark):
name = 'applu'
number = 173
lang = 'F77'
simpoint = 2179*100E6
class apsi(MinneDefaultBenchmark):
name = 'apsi'
number = 301
lang = 'F77'
simpoint = 3408*100E6
class art(DefaultBenchmark):
name = 'art'
number = 179
lang = 'C'
def test(self, isa, os):
self.args = [ '-scanfile', 'c756hel.in',
'-trainfile1', 'a10.img',
'-stride', '2',
'-startx', '134',
'-starty', '220',
'-endx', '139',
'-endy', '225',
'-objects', '1' ]
self.output = 'test.out'
def train(self, isa, os):
self.args = [ '-scanfile', 'c756hel.in',
'-trainfile1', 'a10.img',
'-stride', '2',
'-startx', '134',
'-starty', '220',
'-endx', '184',
'-endy', '240',
'-objects', '3' ]
self.output = 'train.out'
def lgred(self, isa, os):
self.args = ['-scanfile', 'c756hel.in',
'-trainfile1', 'a10.img',
'-stride', '5',
'-startx', '134',
'-starty', '220',
'-endx', '184',
'-endy', '240',
'-objects', '1' ]
self.output = 'lgred.out'
class art110(art):
def ref(self, isa, os):
self.args = [ '-scanfile', 'c756hel.in',
'-trainfile1', 'a10.img',
'-trainfile2', 'hc.img',
'-stride', '2',
'-startx', '110',
'-starty', '200',
'-endx', '160',
'-endy', '240',
'-objects', '10' ]
self.output = 'ref.1.out'
self.simpoint = 340*100E6
class art470(art):
def ref(self, isa, os):
self.args = [ '-scanfile', 'c756hel.in',
'-trainfile1', 'a10.img',
'-trainfile2', 'hc.img',
'-stride', '2',
'-startx', '470',
'-starty', '140',
'-endx', '520',
'-endy', '180',
'-objects', '10' ]
self.output = 'ref.2.out'
self.simpoint = 365*100E6
class equake(DefaultBenchmark):
name = 'equake'
number = 183
lang = 'C'
simpoint = 812*100E6
def lgred(self, isa, os): pass
class facerec(MinneDefaultBenchmark):
name = 'facerec'
number = 187
lang = 'F'
simpoint = 375*100E6
class fma3d(MinneDefaultBenchmark):
name = 'fma3d'
number = 191
lang = 'F'
simpoint = 2541*100E6
class galgel(MinneDefaultBenchmark):
name = 'galgel'
number = 178
lang = 'F'
simpoint = 2491*100E6
class lucas(MinneDefaultBenchmark):
name = 'lucas'
number = 189
lang = 'F'
simpoint = 545*100E6
class mesa(Benchmark):
name = 'mesa'
number = 177
lang = 'C'
stdin = None
def __set_args(self, frames):
self.args = [ '-frames', frames, '-meshfile', '%s.in' % self.name,
'-ppmfile', '%s.ppm' % self.name ]
def test(self, isa, os):
self.__set_args('10')
def train(self, isa, os):
self.__set_args('500')
def ref(self, isa, os):
self.__set_args('1000')
self.simpoint = 1135*100E6
def lgred(self, isa, os):
self.__set_args('1')
class mgrid(MinneDefaultBenchmark):
name = 'mgrid'
number = 172
lang = 'F77'
simpoint = 3292*100E6
class sixtrack(DefaultBenchmark):
name = 'sixtrack'
number = 200
lang = 'F77'
simpoint = 3043*100E6
def lgred(self, isa, os): pass
class swim(MinneDefaultBenchmark):
name = 'swim'
number = 171
lang = 'F77'
simpoint = 2079*100E6
class wupwise(DefaultBenchmark):
name = 'wupwise'
number = 168
lang = 'F77'
simpoint = 3237*100E6
def lgred(self, isa, os): pass
class bzip2(DefaultBenchmark):
name = 'bzip2'
number = 256
lang = 'C'
def test(self, isa, os):
self.args = [ 'input.random' ]
def train(self, isa, os):
self.args = [ 'input.compressed' ]
class bzip2_source(bzip2):
def ref(self, isa, os):
self.simpoint = 977*100E6
self.args = [ 'input.source', '58' ]
def lgred(self, isa, os):
self.args = [ 'input.source', '1' ]
class bzip2_graphic(bzip2):
def ref(self, isa, os):
self.simpoint = 718*100E6
self.args = [ 'input.graphic', '58' ]
def lgred(self, isa, os):
self.args = [ 'input.graphic', '1' ]
class bzip2_program(bzip2):
def ref(self, isa, os):
self.simpoint = 458*100E6
self.args = [ 'input.program', '58' ]
def lgred(self, isa, os):
self.args = [ 'input.program', '1' ]
class crafty(MinneDefaultBenchmark):
name = 'crafty'
number = 186
lang = 'C'
simpoint = 774*100E6
class eon(MinneDefaultBenchmark):
name = 'eon'
number = 252
lang = 'CXX'
stdin = None
class eon_kajiya(eon):
args = [ 'chair.control.kajiya', 'chair.camera', 'chair.surfaces',
'chair.kajiya.ppm', 'ppm', 'pixels_out.kajiya']
output = 'kajiya_log.out'
class eon_cook(eon):
args = [ 'chair.control.cook', 'chair.camera', 'chair.surfaces',
'chair.cook.ppm', 'ppm', 'pixels_out.cook' ]
output = 'cook_log.out'
class eon_rushmeier(eon):
args = [ 'chair.control.rushmeier', 'chair.camera', 'chair.surfaces',
'chair.rushmeier.ppm', 'ppm', 'pixels_out.rushmeier' ]
output = 'rushmeier_log.out'
simpoint = 403*100E6
class gap(DefaultBenchmark):
name = 'gap'
number = 254
lang = 'C'
def __set_args(self, size):
self.args = [ '-l', './', '-q', '-m', size ]
def test(self, isa, os):
self.__set_args('64M')
def train(self, isa, os):
self.__set_args('128M')
def ref(self, isa, os):
self.__set_args('192M')
self.simpoint = 674*100E6
def lgred(self, isa, os):
self.__set_args('64M')
def mdred(self, isa, os):
self.__set_args('64M')
def smred(self, isa, os):
self.__set_args('64M')
class gcc(DefaultBenchmark):
name = 'gcc'
number = 176
lang = 'C'
def test(self, isa, os):
self.args = [ 'cccp.i', '-o', 'cccp.s' ]
def train(self, isa, os):
self.args = [ 'cp-decl.i', '-o', 'cp-decl.s' ]
def smred(self, isa, os):
self.args = [ 'c-iterate.i', '-o', 'c-iterate.s' ]
def mdred(self, isa, os):
self.args = [ 'rdlanal.i', '-o', 'rdlanal.s' ]
def lgred(self, isa, os):
self.args = [ 'cp-decl.i', '-o', 'cp-decl.s' ]
class gcc_166(gcc):
def ref(self, isa, os):
self.simpoint = 389*100E6
self.args = [ '166.i', '-o', '166.s' ]
class gcc_200(gcc):
def ref(self, isa, os):
self.simpoint = 736*100E6
self.args = [ '200.i', '-o', '200.s' ]
class gcc_expr(gcc):
def ref(self, isa, os):
self.simpoint = 36*100E6
self.args = [ 'expr.i', '-o', 'expr.s' ]
class gcc_integrate(gcc):
def ref(self, isa, os):
self.simpoint = 4*100E6
self.args = [ 'integrate.i', '-o', 'integrate.s' ]
class gcc_scilab(gcc):
def ref(self, isa, os):
self.simpoint = 207*100E6
self.args = [ 'scilab.i', '-o', 'scilab.s' ]
class gzip(DefaultBenchmark):
name = 'gzip'
number = 164
lang = 'C'
def test(self, isa, os):
self.args = [ 'input.compressed', '2' ]
def train(self, isa, os):
self.args = [ 'input.combined', '32' ]
class gzip_source(gzip):
def ref(self, isa, os):
self.simpoint = 334*100E6
self.args = [ 'input.source', '1' ]
def smred(self, isa, os):
self.args = [ 'input.source', '1' ]
def mdred(self, isa, os):
self.args = [ 'input.source', '1' ]
def lgred(self, isa, os):
self.args = [ 'input.source', '1' ]
class gzip_log(gzip):
def ref(self, isa, os):
self.simpoint = 265*100E6
self.args = [ 'input.log', '60' ]
def smred(self, isa, os):
self.args = [ 'input.log', '1' ]
def mdred(self, isa, os):
self.args = [ 'input.log', '1' ]
def lgred(self, isa, os):
self.args = [ 'input.log', '1' ]
class gzip_graphic(gzip):
def ref(self, isa, os):
self.simpoint = 653*100E6
self.args = [ 'input.graphic', '60' ]
def smred(self, isa, os):
self.args = [ 'input.graphic', '1' ]
def mdred(self, isa, os):
self.args = [ 'input.graphic', '1' ]
def lgred(self, isa, os):
self.args = [ 'input.graphic', '1' ]
class gzip_random(gzip):
def ref(self, isa, os):
self.simpoint = 623*100E6
self.args = [ 'input.random', '60' ]
def smred(self, isa, os):
self.args = [ 'input.random', '1' ]
def mdred(self, isa, os):
self.args = [ 'input.random', '1' ]
def lgred(self, isa, os):
self.args = [ 'input.random', '1' ]
class gzip_program(gzip):
def ref(self, isa, os):
self.simpoint = 1189*100E6
self.args = [ 'input.program', '60' ]
def smred(self, isa, os):
self.args = [ 'input.program', '1' ]
def mdred(self, isa, os):
self.args = [ 'input.program', '1' ]
def lgred(self, isa, os):
self.args = [ 'input.program', '1' ]
class mcf(MinneDefaultBenchmark):
name = 'mcf'
number = 181
lang = 'C'
args = [ 'mcf.in' ]
simpoint = 553*100E6
class parser(MinneDefaultBenchmark):
name = 'parser'
number = 197
lang = 'C'
args = [ '2.1.dict', '-batch' ]
simpoint = 1146*100E6
class perlbmk(DefaultBenchmark):
name = 'perlbmk'
number = 253
lang = 'C'
def test(self, isa, os):
self.args = [ '-I.', '-I', 'lib', 'test.pl' ]
self.stdin = 'test.in'
class perlbmk_diffmail(perlbmk):
def ref(self, isa, os):
self.simpoint = 141*100E6
self.args = [ '-I', 'lib', 'diffmail.pl', '2', '550', '15', '24',
'23', '100' ]
def train(self, isa, os):
self.args = [ '-I', 'lib', 'diffmail.pl', '2', '350', '15', '24',
'23', '150' ]
class perlbmk_scrabbl(perlbmk):
def train(self, isa, os):
self.args = [ '-I.', '-I', 'lib', 'scrabbl.pl' ]
self.stdin = 'scrabbl.in'
class perlbmk_makerand(perlbmk):
def ref(self, isa, os):
self.simpoint = 11*100E6
self.args = [ '-I', 'lib', 'makerand.pl' ]
def lgred(self, isa, os):
self.args = [ '-I.', '-I', 'lib', 'lgred.makerand.pl' ]
def mdred(self, isa, os):
self.args = [ '-I.', '-I', 'lib', 'mdred.makerand.pl' ]
def smred(self, isa, os):
self.args = [ '-I.', '-I', 'lib', 'smred.makerand.pl' ]
class perlbmk_perfect(perlbmk):
def ref(self, isa, os):
self.simpoint = 5*100E6
self.args = [ '-I', 'lib', 'perfect.pl', 'b', '3', 'm', '4' ]
def train(self, isa, os):
self.args = [ '-I', 'lib', 'perfect.pl', 'b', '3' ]
class perlbmk_splitmail1(perlbmk):
def ref(self, isa, os):
self.simpoint = 405*100E6
self.args = [ '-I', 'lib', 'splitmail.pl', '850', '5', '19',
'18', '1500' ]
class perlbmk_splitmail2(perlbmk):
def ref(self, isa, os):
self.args = [ '-I', 'lib', 'splitmail.pl', '704', '12', '26',
'16', '836' ]
class perlbmk_splitmail3(perlbmk):
def ref(self, isa, os):
self.args = [ '-I', 'lib', 'splitmail.pl', '535', '13', '25',
'24', '1091' ]
class perlbmk_splitmail4(perlbmk):
def ref(self, isa, os):
self.args = [ '-I', 'lib', 'splitmail.pl', '957', '12', '23',
'26', '1014' ]
class twolf(Benchmark):
name = 'twolf'
number = 300
lang = 'C'
stdin = None
def test(self, isa, os):
self.args = [ 'test' ]
def train(self, isa, os):
self.args = [ 'train' ]
def ref(self, isa, os):
self.simpoint = 1066*100E6
self.args = [ 'ref' ]
def smred(self, isa, os):
self.args = [ 'smred' ]
def mdred(self, isa, os):
self.args = [ 'mdred' ]
def lgred(self, isa, os):
self.args = [ 'lgred' ]
class vortex(Benchmark):
name = 'vortex'
number = 255
lang = 'C'
stdin = None
def __init__(self, isa, os, input_set):
if (isa in ('alpha', 'arm', 'thumb', 'aarch64')):
self.endian = 'lendian'
elif (isa == 'sparc' or isa == 'sparc32'):
self.endian = 'bendian'
else:
raise AttributeError, "unknown ISA %s" % isa
super(vortex, self).__init__(isa, os, input_set)
def test(self, isa, os):
self.args = [ '%s.raw' % self.endian ]
self.output = 'vortex.out'
def train(self, isa, os):
self.args = [ '%s.raw' % self.endian ]
self.output = 'vortex.out'
def smred(self, isa, os):
self.args = [ '%s.raw' % self.endian ]
self.output = 'vortex.out'
def mdred(self, isa, os):
self.args = [ '%s.raw' % self.endian ]
self.output = 'vortex.out'
def lgred(self, isa, os):
self.args = [ '%s.raw' % self.endian ]
self.output = 'vortex.out'
class vortex1(vortex):
def ref(self, isa, os):
self.args = [ '%s1.raw' % self.endian ]
self.output = 'vortex1.out'
self.simpoint = 271*100E6
class vortex2(vortex):
def ref(self, isa, os):
self.simpoint = 1024*100E6
self.args = [ '%s2.raw' % self.endian ]
self.output = 'vortex2.out'
class vortex3(vortex):
def ref(self, isa, os):
self.simpoint = 564*100E6
self.args = [ '%s3.raw' % self.endian ]
self.output = 'vortex3.out'
class vpr(MinneDefaultBenchmark):
name = 'vpr'
number = 175
lang = 'C'
# not sure about vpr minnespec place.in
class vpr_place(vpr):
args = [ 'net.in', 'arch.in', 'place.out', 'dum.out', '-nodisp',
'-place_only', '-init_t', '5', '-exit_t', '0.005',
'-alpha_t', '0.9412', '-inner_num', '2' ]
output = 'place_log.out'
class vpr_route(vpr):
simpoint = 476*100E6
args = [ 'net.in', 'arch.in', 'place.in', 'route.out', '-nodisp',
'-route_only', '-route_chan_width', '15',
'-pres_fac_mult', '2', '-acc_fac', '1',
'-first_iter_pres_fac', '4', '-initial_pres_fac', '8' ]
output = 'route_log.out'
all = [ ammp, applu, apsi, art, art110, art470, equake, facerec, fma3d, galgel,
lucas, mesa, mgrid, sixtrack, swim, wupwise, bzip2_source,
bzip2_graphic, bzip2_program, crafty, eon_kajiya, eon_cook,
eon_rushmeier, gap, gcc_166, gcc_200, gcc_expr, gcc_integrate,
gcc_scilab, gzip_source, gzip_log, gzip_graphic, gzip_random,
gzip_program, mcf, parser, perlbmk_diffmail, perlbmk_makerand,
perlbmk_perfect, perlbmk_splitmail1, perlbmk_splitmail2,
perlbmk_splitmail3, perlbmk_splitmail4, twolf, vortex1, vortex2,
vortex3, vpr_place, vpr_route ]
__all__ = [ x.__name__ for x in all ]
if __name__ == '__main__':
from pprint import pprint
for bench in all:
for input_set in 'ref', 'test', 'train':
print 'class: %s' % bench.__name__
x = bench('alpha', 'tru64', input_set)
print '%s: %s' % (x, input_set)
pprint(x.makeLiveProcessArgs())
print
|
towerjoo/DjangoNotes
|
refs/heads/master
|
Django-1.5.1/tests/regressiontests/app_loading/not_installed/models.py
|
163
|
from django.db import models
class NotInstalledModel(models.Model):
pass
class RelatedModel(models.Model):
not_installed = models.ForeignKey(NotInstalledModel)
class M2MRelatedModel(models.Model):
not_installed = models.ManyToManyField(NotInstalledModel)
|
dsfsdgsbngfggb/odoo
|
refs/heads/8.0
|
addons/l10n_be_coda/wizard/account_coda_import.py
|
255
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2012 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp import tools
import logging
_logger = logging.getLogger(__name__)
class account_coda_import(osv.osv_memory):
_name = 'account.coda.import'
_description = 'Import CODA File'
_columns = {
'coda_data': fields.binary('CODA File', required=True),
'coda_fname': fields.char('CODA Filename', required=True),
'note': fields.text('Log'),
}
_defaults = {
'coda_fname': 'coda.txt',
}
def coda_parsing(self, cr, uid, ids, context=None, batch=False, codafile=None, codafilename=None):
if context is None:
context = {}
if batch:
codafile = str(codafile)
codafilename = codafilename
else:
data = self.browse(cr, uid, ids)[0]
try:
codafile = data.coda_data
codafilename = data.coda_fname
except:
raise osv.except_osv(_('Error'), _('Wizard in incorrect state. Please hit the Cancel button'))
return {}
recordlist = unicode(base64.decodestring(codafile), 'windows-1252', 'strict').split('\n')
statements = []
globalisation_comm = {}
for line in recordlist:
if not line:
pass
elif line[0] == '0':
#Begin of a new Bank statement
statement = {}
statements.append(statement)
statement['version'] = line[127]
if statement['version'] not in ['1', '2']:
raise osv.except_osv(_('Error') + ' R001', _('CODA V%s statements are not supported, please contact your bank') % statement['version'])
statement['globalisation_stack'] = []
statement['lines'] = []
statement['date'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[5:11]), '%d%m%y'))
statement['separateApplication'] = rmspaces(line[83:88])
elif line[0] == '1':
#Statement details
if statement['version'] == '1':
statement['acc_number'] = rmspaces(line[5:17])
statement['currency'] = rmspaces(line[18:21])
elif statement['version'] == '2':
if line[1] == '0': # Belgian bank account BBAN structure
statement['acc_number'] = rmspaces(line[5:17])
statement['currency'] = rmspaces(line[18:21])
elif line[1] == '1': # foreign bank account BBAN structure
raise osv.except_osv(_('Error') + ' R1001', _('Foreign bank accounts with BBAN structure are not supported '))
elif line[1] == '2': # Belgian bank account IBAN structure
statement['acc_number'] = rmspaces(line[5:21])
statement['currency'] = rmspaces(line[39:42])
elif line[1] == '3': # foreign bank account IBAN structure
raise osv.except_osv(_('Error') + ' R1002', _('Foreign bank accounts with IBAN structure are not supported '))
else: # Something else, not supported
raise osv.except_osv(_('Error') + ' R1003', _('Unsupported bank account structure '))
statement['journal_id'] = False
statement['bank_account'] = False
# Belgian Account Numbers are composed of 12 digits.
# In OpenERP, the user can fill the bank number in any format: With or without IBan code, with or without spaces, with or without '-'
# The two following sql requests handle those cases.
if len(statement['acc_number']) >= 12:
# If the Account Number is >= 12 digits, it is mostlikely a Belgian Account Number (With or without IBAN).
# The following request try to find the Account Number using a 'like' operator.
# So, if the Account Number is stored with IBAN code, it can be found thanks to this.
cr.execute("select id from res_partner_bank where replace(replace(acc_number,' ',''),'-','') like %s", ('%' + statement['acc_number'] + '%',))
else:
# This case is necessary to avoid cases like the Account Number in the CODA file is set to a single or few digits,
# and so a 'like' operator would return the first account number in the database which matches.
cr.execute("select id from res_partner_bank where replace(replace(acc_number,' ',''),'-','') = %s", (statement['acc_number'],))
bank_ids = [id[0] for id in cr.fetchall()]
# Filter bank accounts which are not allowed
bank_ids = self.pool.get('res.partner.bank').search(cr, uid, [('id', 'in', bank_ids)])
if bank_ids and len(bank_ids) > 0:
bank_accs = self.pool.get('res.partner.bank').browse(cr, uid, bank_ids)
for bank_acc in bank_accs:
if bank_acc.journal_id.id and ((bank_acc.journal_id.currency.id and bank_acc.journal_id.currency.name == statement['currency']) or (not bank_acc.journal_id.currency.id and bank_acc.journal_id.company_id.currency_id.name == statement['currency'])):
statement['journal_id'] = bank_acc.journal_id
statement['bank_account'] = bank_acc
break
if not statement['bank_account']:
raise osv.except_osv(_('Error') + ' R1004', _("No matching Bank Account (with Account Journal) found.\n\nPlease set-up a Bank Account with as Account Number '%s' and as Currency '%s' and an Account Journal.") % (statement['acc_number'], statement['currency']))
statement['description'] = rmspaces(line[90:125])
statement['balance_start'] = float(rmspaces(line[43:58])) / 1000
if line[42] == '1': #1 = Debit, the starting balance is negative
statement['balance_start'] = - statement['balance_start']
statement['balance_start_date'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[58:64]), '%d%m%y'))
statement['accountHolder'] = rmspaces(line[64:90])
statement['paperSeqNumber'] = rmspaces(line[2:5])
statement['codaSeqNumber'] = rmspaces(line[125:128])
elif line[0] == '2':
if line[1] == '1':
#New statement line
statementLine = {}
statementLine['ref'] = rmspaces(line[2:10])
statementLine['ref_move'] = rmspaces(line[2:6])
statementLine['ref_move_detail'] = rmspaces(line[6:10])
statementLine['sequence'] = len(statement['lines']) + 1
statementLine['transactionRef'] = rmspaces(line[10:31])
statementLine['debit'] = line[31] # 0 = Credit, 1 = Debit
statementLine['amount'] = float(rmspaces(line[32:47])) / 1000
if statementLine['debit'] == '1':
statementLine['amount'] = - statementLine['amount']
statementLine['transactionDate'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[47:53]), '%d%m%y'))
statementLine['transaction_family'] = rmspaces(line[54:56])
statementLine['transaction_code'] = rmspaces(line[56:58])
statementLine['transaction_category'] = rmspaces(line[58:61])
if line[61] == '1':
#Structured communication
statementLine['communication_struct'] = True
statementLine['communication_type'] = line[62:65]
statementLine['communication'] = '+++' + line[65:68] + '/' + line[68:72] + '/' + line[72:77] + '+++'
else:
#Non-structured communication
statementLine['communication_struct'] = False
statementLine['communication'] = rmspaces(line[62:115])
statementLine['entryDate'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[115:121]), '%d%m%y'))
statementLine['type'] = 'normal'
statementLine['globalisation'] = int(line[124])
if statementLine['globalisation'] > 0:
if statementLine['globalisation'] in statement['globalisation_stack']:
statement['globalisation_stack'].remove(statementLine['globalisation'])
else:
statementLine['type'] = 'globalisation'
statement['globalisation_stack'].append(statementLine['globalisation'])
globalisation_comm[statementLine['ref_move']] = statementLine['communication']
if not statementLine.get('communication'):
statementLine['communication'] = globalisation_comm.get(statementLine['ref_move'], '')
statement['lines'].append(statementLine)
elif line[1] == '2':
if statement['lines'][-1]['ref'][0:4] != line[2:6]:
raise osv.except_osv(_('Error') + 'R2004', _('CODA parsing error on movement data record 2.2, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
statement['lines'][-1]['communication'] += rmspaces(line[10:63])
statement['lines'][-1]['payment_reference'] = rmspaces(line[63:98])
statement['lines'][-1]['counterparty_bic'] = rmspaces(line[98:109])
elif line[1] == '3':
if statement['lines'][-1]['ref'][0:4] != line[2:6]:
raise osv.except_osv(_('Error') + 'R2005', _('CODA parsing error on movement data record 2.3, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
if statement['version'] == '1':
statement['lines'][-1]['counterpartyNumber'] = rmspaces(line[10:22])
statement['lines'][-1]['counterpartyName'] = rmspaces(line[47:73])
statement['lines'][-1]['counterpartyAddress'] = rmspaces(line[73:125])
statement['lines'][-1]['counterpartyCurrency'] = ''
else:
if line[22] == ' ':
statement['lines'][-1]['counterpartyNumber'] = rmspaces(line[10:22])
statement['lines'][-1]['counterpartyCurrency'] = rmspaces(line[23:26])
else:
statement['lines'][-1]['counterpartyNumber'] = rmspaces(line[10:44])
statement['lines'][-1]['counterpartyCurrency'] = rmspaces(line[44:47])
statement['lines'][-1]['counterpartyName'] = rmspaces(line[47:82])
statement['lines'][-1]['communication'] += rmspaces(line[82:125])
else:
# movement data record 2.x (x != 1,2,3)
raise osv.except_osv(_('Error') + 'R2006', _('\nMovement data records of type 2.%s are not supported ') % line[1])
elif line[0] == '3':
if line[1] == '1':
infoLine = {}
infoLine['entryDate'] = statement['lines'][-1]['entryDate']
infoLine['type'] = 'information'
infoLine['sequence'] = len(statement['lines']) + 1
infoLine['ref'] = rmspaces(line[2:10])
infoLine['transactionRef'] = rmspaces(line[10:31])
infoLine['transaction_family'] = rmspaces(line[32:34])
infoLine['transaction_code'] = rmspaces(line[34:36])
infoLine['transaction_category'] = rmspaces(line[36:39])
infoLine['communication'] = rmspaces(line[40:113])
statement['lines'].append(infoLine)
elif line[1] == '2':
if infoLine['ref'] != rmspaces(line[2:10]):
raise osv.except_osv(_('Error') + 'R3004', _('CODA parsing error on information data record 3.2, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
statement['lines'][-1]['communication'] += rmspaces(line[10:100])
elif line[1] == '3':
if infoLine['ref'] != rmspaces(line[2:10]):
raise osv.except_osv(_('Error') + 'R3005', _('CODA parsing error on information data record 3.3, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
statement['lines'][-1]['communication'] += rmspaces(line[10:100])
elif line[0] == '4':
comm_line = {}
comm_line['type'] = 'communication'
comm_line['sequence'] = len(statement['lines']) + 1
comm_line['ref'] = rmspaces(line[2:10])
comm_line['communication'] = rmspaces(line[32:112])
statement['lines'].append(comm_line)
elif line[0] == '8':
# new balance record
statement['debit'] = line[41]
statement['paperSeqNumber'] = rmspaces(line[1:4])
statement['balance_end_real'] = float(rmspaces(line[42:57])) / 1000
statement['balance_end_realDate'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[57:63]), '%d%m%y'))
if statement['debit'] == '1': # 1=Debit
statement['balance_end_real'] = - statement['balance_end_real']
if statement['balance_end_realDate']:
period_id = self.pool.get('account.period').search(cr, uid, [('company_id', '=', statement['journal_id'].company_id.id), ('date_start', '<=', statement['balance_end_realDate']), ('date_stop', '>=', statement['balance_end_realDate'])])
else:
period_id = self.pool.get('account.period').search(cr, uid, [('company_id', '=', statement['journal_id'].company_id.id), ('date_start', '<=', statement['date']), ('date_stop', '>=', statement['date'])])
if not period_id and len(period_id) == 0:
raise osv.except_osv(_('Error') + 'R0002', _("The CODA Statement New Balance date doesn't fall within a defined Accounting Period! Please create the Accounting Period for date %s for the company %s.") % (statement['balance_end_realDate'], statement['journal_id'].company_id.name))
statement['period_id'] = period_id[0]
elif line[0] == '9':
statement['balanceMin'] = float(rmspaces(line[22:37])) / 1000
statement['balancePlus'] = float(rmspaces(line[37:52])) / 1000
if not statement.get('balance_end_real'):
statement['balance_end_real'] = statement['balance_start'] + statement['balancePlus'] - statement['balanceMin']
for i, statement in enumerate(statements):
statement['coda_note'] = ''
balance_start_check_date = (len(statement['lines']) > 0 and statement['lines'][0]['entryDate']) or statement['date']
cr.execute('SELECT balance_end_real \
FROM account_bank_statement \
WHERE journal_id = %s and date <= %s \
ORDER BY date DESC,id DESC LIMIT 1', (statement['journal_id'].id, balance_start_check_date))
res = cr.fetchone()
balance_start_check = res and res[0]
if balance_start_check == None:
if statement['journal_id'].default_debit_account_id and (statement['journal_id'].default_credit_account_id == statement['journal_id'].default_debit_account_id):
balance_start_check = statement['journal_id'].default_debit_account_id.balance
else:
raise osv.except_osv(_('Error'), _("Configuration Error in journal %s!\nPlease verify the Default Debit and Credit Account settings.") % statement['journal_id'].name)
if balance_start_check != statement['balance_start']:
statement['coda_note'] = _("The CODA Statement %s Starting Balance (%.2f) does not correspond with the previous Closing Balance (%.2f) in journal %s!") % (statement['description'] + ' #' + statement['paperSeqNumber'], statement['balance_start'], balance_start_check, statement['journal_id'].name)
if not(statement.get('period_id')):
raise osv.except_osv(_('Error') + ' R3006', _(' No transactions or no period in coda file !'))
data = {
'name': statement['paperSeqNumber'],
'date': statement['date'],
'journal_id': statement['journal_id'].id,
'period_id': statement['period_id'],
'balance_start': statement['balance_start'],
'balance_end_real': statement['balance_end_real'],
}
statement['id'] = self.pool.get('account.bank.statement').create(cr, uid, data, context=context)
for line in statement['lines']:
if line['type'] == 'information':
statement['coda_note'] = "\n".join([statement['coda_note'], line['type'].title() + ' with Ref. ' + str(line['ref']), 'Date: ' + str(line['entryDate']), 'Communication: ' + line['communication'], ''])
elif line['type'] == 'communication':
statement['coda_note'] = "\n".join([statement['coda_note'], line['type'].title() + ' with Ref. ' + str(line['ref']), 'Ref: ', 'Communication: ' + line['communication'], ''])
elif line['type'] == 'normal':
note = []
if 'counterpartyName' in line and line['counterpartyName'] != '':
note.append(_('Counter Party') + ': ' + line['counterpartyName'])
else:
line['counterpartyName'] = False
if 'counterpartyNumber' in line and line['counterpartyNumber'] != '':
try:
if int(line['counterpartyNumber']) == 0:
line['counterpartyNumber'] = False
except:
pass
if line['counterpartyNumber']:
note.append(_('Counter Party Account') + ': ' + line['counterpartyNumber'])
else:
line['counterpartyNumber'] = False
if 'counterpartyAddress' in line and line['counterpartyAddress'] != '':
note.append(_('Counter Party Address') + ': ' + line['counterpartyAddress'])
partner_id = None
structured_com = False
bank_account_id = False
if line['communication_struct'] and 'communication_type' in line and line['communication_type'] == '101':
structured_com = line['communication']
if 'counterpartyNumber' in line and line['counterpartyNumber']:
account = str(line['counterpartyNumber'])
domain = [('acc_number', '=', account)]
iban = account[0:2].isalpha()
if iban:
n = 4
space_separated_account = ' '.join(account[i:i + n] for i in range(0, len(account), n))
domain = ['|', ('acc_number', '=', space_separated_account)] + domain
ids = self.pool.get('res.partner.bank').search(cr, uid, domain)
if ids:
bank_account_id = ids[0]
bank_account = self.pool.get('res.partner.bank').browse(cr, uid, bank_account_id, context=context)
line['counterpartyNumber'] = bank_account.acc_number
partner_id = bank_account.partner_id.id
else:
#create the bank account, not linked to any partner. The reconciliation will link the partner manually
#chosen at the bank statement final confirmation time.
try:
type_model, type_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'bank_normal')
type_id = self.pool.get('res.partner.bank.type').browse(cr, uid, type_id, context=context)
bank_code = type_id.code
except ValueError:
bank_code = 'bank'
bank_account_id = self.pool.get('res.partner.bank').create(cr, uid, {'acc_number': str(line['counterpartyNumber']), 'state': bank_code}, context=context)
if line.get('communication', ''):
note.append(_('Communication') + ': ' + line['communication'])
data = {
'name': structured_com or (line.get('communication', '') != '' and line['communication'] or '/'),
'note': "\n".join(note),
'date': line['entryDate'],
'amount': line['amount'],
'partner_id': partner_id,
'partner_name': line['counterpartyName'],
'statement_id': statement['id'],
'ref': line['ref'],
'sequence': line['sequence'],
'bank_account_id': bank_account_id,
}
self.pool.get('account.bank.statement.line').create(cr, uid, data, context=context)
if statement['coda_note'] != '':
self.pool.get('account.bank.statement').write(cr, uid, [statement['id']], {'coda_note': statement['coda_note']}, context=context)
model, action_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', 'action_bank_reconcile_bank_statements')
action = self.pool[model].browse(cr, uid, action_id, context=context)
statements_ids = [statement['id'] for statement in statements]
return {
'name': action.name,
'tag': action.tag,
'context': {'statement_ids': statements_ids},
'type': 'ir.actions.client',
}
def rmspaces(s):
return " ".join(s.split())
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agincel/AdamTestBot
|
refs/heads/master
|
requests/cookies.py
|
2
|
# -*- coding: utf-8 -*-
"""
Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports.
"""
import copy
import time
import collections
from .compat import cookielib, urlparse, urlunparse, Morsel
try:
import threading
# grr, pyflakes: this fixes "redefinition of unused 'threading'"
threading
except ImportError:
import dummy_threading as threading
class MockRequest(object):
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie.
The original request object is read-only. The client is responsible for collecting
the new headers via `get_new_headers()` and interpreting them appropriately. You
probably want `get_cookie_header`, defined below.
"""
def __init__(self, request):
self._r = request
self._new_headers = {}
self.type = urlparse(self._r.url).scheme
def get_type(self):
return self.type
def get_host(self):
return urlparse(self._r.url).netloc
def get_origin_req_host(self):
return self.get_host()
def get_full_url(self):
# Only return the response's URL if the user hadn't set the Host
# header
if not self._r.headers.get('Host'):
return self._r.url
# If they did set it, retrieve it and reconstruct the expected domain
host = self._r.headers['Host']
parsed = urlparse(self._r.url)
# Reconstruct the URL as we expect it
return urlunparse([
parsed.scheme, host, parsed.path, parsed.params, parsed.query,
parsed.fragment
])
def is_unverifiable(self):
return True
def has_header(self, name):
return name in self._r.headers or name in self._new_headers
def get_header(self, name, default=None):
return self._r.headers.get(name, self._new_headers.get(name, default))
def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
def add_unredirected_header(self, name, value):
self._new_headers[name] = value
def get_new_headers(self):
return self._new_headers
@property
def unverifiable(self):
return self.is_unverifiable()
@property
def origin_req_host(self):
return self.get_origin_req_host()
@property
def host(self):
return self.get_host()
class MockResponse(object):
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them.
"""
def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers
"""
self._headers = headers
def info(self):
return self._headers
def getheaders(self, name):
self._headers.getheaders(name)
def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object
:param response: urllib3.HTTPResponse object
"""
if not (hasattr(response, '_original_response') and
response._original_response):
return
# the _original_response field is the wrapped httplib.HTTPResponse object,
req = MockRequest(request)
# pull out the HTTPMessage with the headers and put it in the mock:
res = MockResponse(response._original_response.msg)
jar.extract_cookies(res, req)
def get_cookie_header(jar, request):
"""Produce an appropriate Cookie header string to be sent with `request`, or None."""
r = MockRequest(request)
jar.add_cookie_header(r)
return r.get_new_headers().get('Cookie')
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
"""Unsets a cookie by name, by default over all domains and paths.
Wraps CookieJar.clear(), is O(n).
"""
clearables = []
for cookie in cookiejar:
if cookie.name == name:
if domain is None or domain == cookie.domain:
if path is None or path == cookie.path:
clearables.append((cookie.domain, cookie.path, cookie.name))
for domain, path, name in clearables:
cookiejar.clear(domain, path, name)
class CookieConflictError(RuntimeError):
"""There are two cookies that meet the criteria specified in the cookie jar.
Use .get and .set and include domain and path args in order to be more specific."""
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict
interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
Requests does not use the dict interface internally; it's just for
compatibility with external client code. All requests code should work
out of the box with externally provided instances of ``CookieJar``, e.g.
``LWPCookieJar`` and ``FileCookieJar``.
Unlike a regular CookieJar, this class is pickleable.
.. warning:: dictionary operations that are normally O(1) may be O(n).
"""
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains.
.. warning:: operation is O(n), not O(1)."""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
return default
def set(self, name, value, **kwargs):
"""Dict-like set() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains."""
# support client code that unsets cookies by assignment of a None value:
if value is None:
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
return
if isinstance(value, Morsel):
c = morsel_to_cookie(value)
else:
c = create_cookie(name, value, **kwargs)
self.set_cookie(c)
return c
def iterkeys(self):
"""Dict-like iterkeys() that returns an iterator of names of cookies
from the jar. See itervalues() and iteritems()."""
for cookie in iter(self):
yield cookie.name
def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the
jar. See values() and items()."""
return list(self.iterkeys())
def itervalues(self):
"""Dict-like itervalues() that returns an iterator of values of cookies
from the jar. See iterkeys() and iteritems()."""
for cookie in iter(self):
yield cookie.value
def values(self):
"""Dict-like values() that returns a list of values of cookies from the
jar. See keys() and items()."""
return list(self.itervalues())
def iteritems(self):
"""Dict-like iteritems() that returns an iterator of name-value tuples
from the jar. See iterkeys() and itervalues()."""
for cookie in iter(self):
yield cookie.name, cookie.value
def items(self):
"""Dict-like items() that returns a list of name-value tuples from the
jar. See keys() and values(). Allows client-code to call
``dict(RequestsCookieJar)`` and get a vanilla python dict of key value
pairs."""
return list(self.iteritems())
def list_domains(self):
"""Utility method to list all the domains in the jar."""
domains = []
for cookie in iter(self):
if cookie.domain not in domains:
domains.append(cookie.domain)
return domains
def list_paths(self):
"""Utility method to list all the paths in the jar."""
paths = []
for cookie in iter(self):
if cookie.path not in paths:
paths.append(cookie.path)
return paths
def multiple_domains(self):
"""Returns True if there are multiple domains in the jar.
Returns False otherwise."""
domains = []
for cookie in iter(self):
if cookie.domain is not None and cookie.domain in domains:
return True
domains.append(cookie.domain)
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain
old Python dict of name-value pairs of cookies that meet the
requirements."""
dictionary = {}
for cookie in iter(self):
if (domain is None or cookie.domain == domain) and (path is None
or cookie.path == path):
dictionary[cookie.name] = cookie.value
return dictionary
def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws
exception if there are more than one cookie with name. In that case,
use the more explicit get() method instead.
.. warning:: operation is O(n), not O(1)."""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws
exception if there is already a cookie of that name in the jar. In that
case, use the more explicit set() method instead."""
self.set(name, value)
def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
``remove_cookie_by_name()``."""
remove_cookie_by_name(self, name)
def set_cookie(self, cookie, *args, **kwargs):
if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
cookie.value = cookie.value.replace('\\"', '')
return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
def update(self, other):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
self.set_cookie(copy.copy(cookie))
else:
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values. Takes as
args name and optional domain and path. Returns a cookie.value. If
there are conflicting cookies, _find arbitrarily chooses one. See
_find_no_duplicates if you want an exception thrown if there are
conflicting cookies."""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
return cookie.value
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
"""Both ``__get_item__`` and ``get`` call this function: it's never
used elsewhere in Requests. Takes as args name and optional domain and
path. Returns a cookie.value. Throws KeyError if cookie is not found
and CookieConflictError if there are multiple cookies that match name
and optionally domain and path."""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
if toReturn:
return toReturn
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def __getstate__(self):
"""Unlike a normal CookieJar, this class is pickleable."""
state = self.__dict__.copy()
# remove the unpickleable RLock object
state.pop('_cookies_lock')
return state
def __setstate__(self, state):
"""Unlike a normal CookieJar, this class is pickleable."""
self.__dict__.update(state)
if '_cookies_lock' not in self.__dict__:
self._cookies_lock = threading.RLock()
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.update(self)
return new_cj
def _copy_cookie_jar(jar):
if jar is None:
return None
if hasattr(jar, 'copy'):
# We're dealing with an instane of RequestsCookieJar
return jar.copy()
# We're dealing with a generic CookieJar instance
new_jar = copy.copy(jar)
new_jar.clear()
for cookie in jar:
new_jar.set_cookie(copy.copy(cookie))
return new_jar
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
result = dict(
version=0,
name=name,
value=value,
port=None,
domain='',
path='/',
secure=False,
expires=None,
discard=True,
comment=None,
comment_url=None,
rest={'HttpOnly': None},
rfc2109=False,)
badargs = set(kwargs) - set(result)
if badargs:
err = 'create_cookie() got unexpected keyword arguments: %s'
raise TypeError(err % list(badargs))
result.update(kwargs)
result['port_specified'] = bool(result['port'])
result['domain_specified'] = bool(result['domain'])
result['domain_initial_dot'] = result['domain'].startswith('.')
result['path_specified'] = bool(result['path'])
return cookielib.Cookie(**result)
def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
expires = None
if morsel['max-age']:
expires = time.time() + morsel['max-age']
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = time.mktime(
time.strptime(morsel['expires'], time_template)) - time.timezone
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
discard=False,
domain=morsel['domain'],
expires=expires,
name=morsel.key,
path=morsel['path'],
port=None,
rest={'HttpOnly': morsel['httponly']},
rfc2109=False,
secure=bool(morsel['secure']),
value=morsel.value,
version=morsel['version'] or 0,
)
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
:param cookiejar: (optional) A cookiejar to add the cookies to.
:param overwrite: (optional) If False, will not replace cookies
already in the jar with new ones.
"""
if cookiejar is None:
cookiejar = RequestsCookieJar()
if cookie_dict is not None:
names_from_jar = [cookie.name for cookie in cookiejar]
for name in cookie_dict:
if overwrite or (name not in names_from_jar):
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
return cookiejar
def merge_cookies(cookiejar, cookies):
"""Add cookies to cookiejar and returns a merged CookieJar.
:param cookiejar: CookieJar object to add the cookies to.
:param cookies: Dictionary or CookieJar object to be added.
"""
if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar')
if isinstance(cookies, dict):
cookiejar = cookiejar_from_dict(
cookies, cookiejar=cookiejar, overwrite=False)
elif isinstance(cookies, cookielib.CookieJar):
try:
cookiejar.update(cookies)
except AttributeError:
for cookie_in_jar in cookies:
cookiejar.set_cookie(cookie_in_jar)
return cookiejar
|
MIPS/external-chromium_org
|
refs/heads/dev-mips-jb-kitkat
|
gpu/command_buffer/build_gles2_cmd_buffer.py
|
23
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""code generator for GLES2 command buffers."""
import itertools
import os
import os.path
import sys
import re
from optparse import OptionParser
_SIZE_OF_UINT32 = 4
_SIZE_OF_COMMAND_HEADER = 4
_FIRST_SPECIFIC_COMMAND_ID = 256
_LICENSE = """// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
"""
_DO_NOT_EDIT_WARNING = """// This file is auto-generated from
// gpu/command_buffer/build_gles2_cmd_buffer.py
// DO NOT EDIT!
"""
# This string is copied directly out of the gl2.h file from GLES2.0
#
# Edits:
#
# *) Any argument that is a resourceID has been changed to GLid<Type>.
# (not pointer arguments) and if it's allowed to be zero it's GLidZero<Type>
# If it's allowed to not exist it's GLidBind<Type>
#
# *) All GLenums have been changed to GLenumTypeOfEnum
#
_GL_TYPES = {
'GLenum': 'unsigned int',
'GLboolean': 'unsigned char',
'GLbitfield': 'unsigned int',
'GLbyte': 'signed char',
'GLshort': 'short',
'GLint': 'int',
'GLsizei': 'int',
'GLubyte': 'unsigned char',
'GLushort': 'unsigned short',
'GLuint': 'unsigned int',
'GLfloat': 'float',
'GLclampf': 'float',
'GLvoid': 'void',
'GLfixed': 'int',
'GLclampx': 'int',
'GLintptr': 'long int',
'GLsizeiptr': 'long int',
}
# Capabilites selected with glEnable
_CAPABILITY_FLAGS = [
{'name': 'blend'},
{'name': 'cull_face'},
{'name': 'depth_test', 'state_flag': 'clear_state_dirty_'},
{'name': 'dither', 'default': True},
{'name': 'polygon_offset_fill'},
{'name': 'sample_alpha_to_coverage'},
{'name': 'sample_coverage'},
{'name': 'scissor_test', 'state_flag': 'clear_state_dirty_'},
{'name': 'stencil_test', 'state_flag': 'clear_state_dirty_'},
]
_STATES = {
'ClearColor': {
'type': 'Normal',
'func': 'ClearColor',
'enum': 'GL_COLOR_CLEAR_VALUE',
'states': [
{'name': 'color_clear_red', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_green', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_blue', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_alpha', 'type': 'GLfloat', 'default': '0.0f'},
],
},
'ClearDepthf': {
'type': 'Normal',
'func': 'ClearDepth',
'enum': 'GL_DEPTH_CLEAR_VALUE',
'states': [
{'name': 'depth_clear', 'type': 'GLclampf', 'default': '1.0f'},
],
},
'ColorMask': {
'type': 'Normal',
'func': 'ColorMask',
'enum': 'GL_COLOR_WRITEMASK',
'states': [
{'name': 'color_mask_red', 'type': 'GLboolean', 'default': 'true'},
{'name': 'color_mask_green', 'type': 'GLboolean', 'default': 'true'},
{'name': 'color_mask_blue', 'type': 'GLboolean', 'default': 'true'},
{'name': 'color_mask_alpha', 'type': 'GLboolean', 'default': 'true'},
],
'state_flag': 'clear_state_dirty_',
},
'ClearStencil': {
'type': 'Normal',
'func': 'ClearStencil',
'enum': 'GL_STENCIL_CLEAR_VALUE',
'states': [
{'name': 'stencil_clear', 'type': 'GLint', 'default': '0'},
],
},
'BlendColor': {
'type': 'Normal',
'func': 'BlendColor',
'enum': 'GL_BLEND_COLOR',
'states': [
{'name': 'blend_color_red', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_green', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_blue', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_alpha', 'type': 'GLfloat', 'default': '0.0f'},
],
},
'BlendEquation': {
'type': 'SrcDst',
'func': 'BlendEquationSeparate',
'states': [
{
'name': 'blend_equation_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_EQUATION_RGB',
'default': 'GL_FUNC_ADD',
},
{
'name': 'blend_equation_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_EQUATION_ALPHA',
'default': 'GL_FUNC_ADD',
},
],
},
'BlendFunc': {
'type': 'SrcDst',
'func': 'BlendFuncSeparate',
'states': [
{
'name': 'blend_source_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_SRC_RGB',
'default': 'GL_ONE',
},
{
'name': 'blend_dest_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_DST_RGB',
'default': 'GL_ZERO',
},
{
'name': 'blend_source_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_SRC_ALPHA',
'default': 'GL_ONE',
},
{
'name': 'blend_dest_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_DST_ALPHA',
'default': 'GL_ZERO',
},
],
},
'PolygonOffset': {
'type': 'Normal',
'func': 'PolygonOffset',
'states': [
{
'name': 'polygon_offset_factor',
'type': 'GLfloat',
'enum': 'GL_POLYGON_OFFSET_FACTOR',
'default': '0.0f',
},
{
'name': 'polygon_offset_units',
'type': 'GLfloat',
'enum': 'GL_POLYGON_OFFSET_UNITS',
'default': '0.0f',
},
],
},
'CullFace': {
'type': 'Normal',
'func': 'CullFace',
'enum': 'GL_CULL_FACE_MODE',
'states': [
{
'name': 'cull_mode',
'type': 'GLenum',
'default': 'GL_BACK',
},
],
},
'FrontFace': {
'type': 'Normal',
'func': 'FrontFace',
'enum': 'GL_FRONT_FACE',
'states': [{'name': 'front_face', 'type': 'GLenum', 'default': 'GL_CCW'}],
},
'DepthFunc': {
'type': 'Normal',
'func': 'DepthFunc',
'enum': 'GL_DEPTH_FUNC',
'states': [{'name': 'depth_func', 'type': 'GLenum', 'default': 'GL_LESS'}],
},
'DepthRange': {
'type': 'Normal',
'func': 'DepthRange',
'enum': 'GL_DEPTH_RANGE',
'states': [
{'name': 'z_near', 'type': 'GLclampf', 'default': '0.0f'},
{'name': 'z_far', 'type': 'GLclampf', 'default': '1.0f'},
],
},
'SampleCoverage': {
'type': 'Normal',
'func': 'SampleCoverage',
'states': [
{
'name': 'sample_coverage_value',
'type': 'GLclampf',
'enum': 'GL_SAMPLE_COVERAGE_VALUE',
'default': '1.0f',
},
{
'name': 'sample_coverage_invert',
'type': 'GLboolean',
'enum': 'GL_SAMPLE_COVERAGE_INVERT',
'default': 'false',
},
],
},
'StencilMask': {
'type': 'FrontBack',
'func': 'StencilMaskSeparate',
'state_flag': 'clear_state_dirty_',
'states': [
{
'name': 'stencil_front_writemask',
'type': 'GLuint',
'enum': 'GL_STENCIL_WRITEMASK',
'default': '0xFFFFFFFFU',
},
{
'name': 'stencil_back_writemask',
'type': 'GLuint',
'enum': 'GL_STENCIL_BACK_WRITEMASK',
'default': '0xFFFFFFFFU',
},
],
},
'StencilOp': {
'type': 'FrontBack',
'func': 'StencilOpSeparate',
'states': [
{
'name': 'stencil_front_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_front_z_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_PASS_DEPTH_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_front_z_pass_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_PASS_DEPTH_PASS',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_z_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_PASS_DEPTH_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_z_pass_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_PASS_DEPTH_PASS',
'default': 'GL_KEEP',
},
],
},
'StencilFunc': {
'type': 'FrontBack',
'func': 'StencilFuncSeparate',
'states': [
{
'name': 'stencil_front_func',
'type': 'GLenum',
'enum': 'GL_STENCIL_FUNC',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_front_ref',
'type': 'GLint',
'enum': 'GL_STENCIL_REF',
'default': '0',
},
{
'name': 'stencil_front_mask',
'type': 'GLuint',
'enum': 'GL_STENCIL_VALUE_MASK',
'default': '0xFFFFFFFFU',
},
{
'name': 'stencil_back_func',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_FUNC',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_back_ref',
'type': 'GLint',
'enum': 'GL_STENCIL_BACK_REF',
'default': '0',
},
{
'name': 'stencil_back_mask',
'type': 'GLuint',
'enum': 'GL_STENCIL_BACK_VALUE_MASK',
'default': '0xFFFFFFFFU',
},
],
},
'Hint': {
'type': 'NamedParameter',
'func': 'Hint',
'states': [
{
'name': 'hint_generate_mipmap',
'type': 'GLenum',
'enum': 'GL_GENERATE_MIPMAP_HINT',
'default': 'GL_DONT_CARE'
},
{
'name': 'hint_fragment_shader_derivative',
'type': 'GLenum',
'enum': 'GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES',
'default': 'GL_DONT_CARE',
'extension_flag': 'oes_standard_derivatives'
}
],
},
'PixelStore': {
'type': 'NamedParameter',
'func': 'PixelStorei',
'states': [
{
'name': 'pack_alignment',
'type': 'GLint',
'enum': 'GL_PACK_ALIGNMENT',
'default': '4'
},
{
'name': 'unpack_alignment',
'type': 'GLint',
'enum': 'GL_UNPACK_ALIGNMENT',
'default': '4'
}
],
},
# TODO: Consider implemenenting these states
# GL_ACTIVE_TEXTURE
'LineWidth': {
'type': 'Normal',
'func': 'LineWidth',
'enum': 'GL_LINE_WIDTH',
'states': [
{
'name': 'line_width',
'type': 'GLfloat',
'default': '1.0f',
'range_checks': [{'check': "<= 0.0f", 'test_value': "0.0f"}],
}],
},
'DepthMask': {
'type': 'Normal',
'func': 'DepthMask',
'enum': 'GL_DEPTH_WRITEMASK',
'states': [
{'name': 'depth_mask', 'type': 'GLboolean', 'default': 'true'},
],
'state_flag': 'clear_state_dirty_',
},
'Scissor': {
'type': 'Normal',
'func': 'Scissor',
'enum': 'GL_SCISSOR_BOX',
'states': [
# NOTE: These defaults reset at GLES2DecoderImpl::Initialization.
{
'name': 'scissor_x',
'type': 'GLint',
'default': '0',
'expected': 'kViewportX',
},
{
'name': 'scissor_y',
'type': 'GLint',
'default': '0',
'expected': 'kViewportY',
},
{
'name': 'scissor_width',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportWidth',
},
{
'name': 'scissor_height',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportHeight',
},
],
},
'Viewport': {
'type': 'Normal',
'func': 'Viewport',
'enum': 'GL_VIEWPORT',
'states': [
# NOTE: These defaults reset at GLES2DecoderImpl::Initialization.
{
'name': 'viewport_x',
'type': 'GLint',
'default': '0',
'expected': 'kViewportX',
},
{
'name': 'viewport_y',
'type': 'GLint',
'default': '0',
'expected': 'kViewportY',
},
{
'name': 'viewport_width',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportWidth',
},
{
'name': 'viewport_height',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportHeight',
},
],
},
}
# This is a list of enum names and their valid values. It is used to map
# GLenum arguments to a specific set of valid values.
_ENUM_LISTS = {
'BlitFilter': {
'type': 'GLenum',
'valid': [
'GL_NEAREST',
'GL_LINEAR',
],
'invalid': [
'GL_LINEAR_MIPMAP_LINEAR',
],
},
'FrameBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_FRAMEBUFFER',
],
'invalid': [
'GL_DRAW_FRAMEBUFFER' ,
'GL_READ_FRAMEBUFFER' ,
],
},
'RenderBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_RENDERBUFFER',
],
'invalid': [
'GL_FRAMEBUFFER',
],
},
'BufferTarget': {
'type': 'GLenum',
'valid': [
'GL_ARRAY_BUFFER',
'GL_ELEMENT_ARRAY_BUFFER',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'BufferUsage': {
'type': 'GLenum',
'valid': [
'GL_STREAM_DRAW',
'GL_STATIC_DRAW',
'GL_DYNAMIC_DRAW',
],
'invalid': [
'GL_STATIC_READ',
],
},
'CompressedTextureFormat': {
'type': 'GLenum',
'valid': [
],
},
'GLState': {
'type': 'GLenum',
'valid': [
# NOTE: State an Capability entries added later.
'GL_ACTIVE_TEXTURE',
'GL_ALIASED_LINE_WIDTH_RANGE',
'GL_ALIASED_POINT_SIZE_RANGE',
'GL_ALPHA_BITS',
'GL_ARRAY_BUFFER_BINDING',
'GL_BLUE_BITS',
'GL_COMPRESSED_TEXTURE_FORMATS',
'GL_CURRENT_PROGRAM',
'GL_DEPTH_BITS',
'GL_DEPTH_RANGE',
'GL_ELEMENT_ARRAY_BUFFER_BINDING',
'GL_FRAMEBUFFER_BINDING',
'GL_GENERATE_MIPMAP_HINT',
'GL_GREEN_BITS',
'GL_IMPLEMENTATION_COLOR_READ_FORMAT',
'GL_IMPLEMENTATION_COLOR_READ_TYPE',
'GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS',
'GL_MAX_CUBE_MAP_TEXTURE_SIZE',
'GL_MAX_FRAGMENT_UNIFORM_VECTORS',
'GL_MAX_RENDERBUFFER_SIZE',
'GL_MAX_TEXTURE_IMAGE_UNITS',
'GL_MAX_TEXTURE_SIZE',
'GL_MAX_VARYING_VECTORS',
'GL_MAX_VERTEX_ATTRIBS',
'GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS',
'GL_MAX_VERTEX_UNIFORM_VECTORS',
'GL_MAX_VIEWPORT_DIMS',
'GL_NUM_COMPRESSED_TEXTURE_FORMATS',
'GL_NUM_SHADER_BINARY_FORMATS',
'GL_PACK_ALIGNMENT',
'GL_RED_BITS',
'GL_RENDERBUFFER_BINDING',
'GL_SAMPLE_BUFFERS',
'GL_SAMPLE_COVERAGE_INVERT',
'GL_SAMPLE_COVERAGE_VALUE',
'GL_SAMPLES',
'GL_SCISSOR_BOX',
'GL_SHADER_BINARY_FORMATS',
'GL_SHADER_COMPILER',
'GL_SUBPIXEL_BITS',
'GL_STENCIL_BITS',
'GL_TEXTURE_BINDING_2D',
'GL_TEXTURE_BINDING_CUBE_MAP',
'GL_UNPACK_ALIGNMENT',
'GL_UNPACK_FLIP_Y_CHROMIUM',
'GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM',
'GL_UNPACK_UNPREMULTIPLY_ALPHA_CHROMIUM',
# we can add this because we emulate it if the driver does not support it.
'GL_VERTEX_ARRAY_BINDING_OES',
'GL_VIEWPORT',
],
'invalid': [
'GL_FOG_HINT',
],
},
'GetTexParamTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP',
],
'invalid': [
'GL_PROXY_TEXTURE_CUBE_MAP',
]
},
'TextureTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP_POSITIVE_X',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_X',
'GL_TEXTURE_CUBE_MAP_POSITIVE_Y',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_Y',
'GL_TEXTURE_CUBE_MAP_POSITIVE_Z',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_Z',
],
'invalid': [
'GL_PROXY_TEXTURE_CUBE_MAP',
]
},
'TextureBindTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP',
],
'invalid': [
'GL_TEXTURE_1D',
'GL_TEXTURE_3D',
],
},
'ShaderType': {
'type': 'GLenum',
'valid': [
'GL_VERTEX_SHADER',
'GL_FRAGMENT_SHADER',
],
'invalid': [
'GL_GEOMETRY_SHADER',
],
},
'FaceType': {
'type': 'GLenum',
'valid': [
'GL_FRONT',
'GL_BACK',
'GL_FRONT_AND_BACK',
],
},
'FaceMode': {
'type': 'GLenum',
'valid': [
'GL_CW',
'GL_CCW',
],
},
'CmpFunction': {
'type': 'GLenum',
'valid': [
'GL_NEVER',
'GL_LESS',
'GL_EQUAL',
'GL_LEQUAL',
'GL_GREATER',
'GL_NOTEQUAL',
'GL_GEQUAL',
'GL_ALWAYS',
],
},
'Equation': {
'type': 'GLenum',
'valid': [
'GL_FUNC_ADD',
'GL_FUNC_SUBTRACT',
'GL_FUNC_REVERSE_SUBTRACT',
],
'invalid': [
'GL_MIN',
'GL_MAX',
],
},
'SrcBlendFactor': {
'type': 'GLenum',
'valid': [
'GL_ZERO',
'GL_ONE',
'GL_SRC_COLOR',
'GL_ONE_MINUS_SRC_COLOR',
'GL_DST_COLOR',
'GL_ONE_MINUS_DST_COLOR',
'GL_SRC_ALPHA',
'GL_ONE_MINUS_SRC_ALPHA',
'GL_DST_ALPHA',
'GL_ONE_MINUS_DST_ALPHA',
'GL_CONSTANT_COLOR',
'GL_ONE_MINUS_CONSTANT_COLOR',
'GL_CONSTANT_ALPHA',
'GL_ONE_MINUS_CONSTANT_ALPHA',
'GL_SRC_ALPHA_SATURATE',
],
},
'DstBlendFactor': {
'type': 'GLenum',
'valid': [
'GL_ZERO',
'GL_ONE',
'GL_SRC_COLOR',
'GL_ONE_MINUS_SRC_COLOR',
'GL_DST_COLOR',
'GL_ONE_MINUS_DST_COLOR',
'GL_SRC_ALPHA',
'GL_ONE_MINUS_SRC_ALPHA',
'GL_DST_ALPHA',
'GL_ONE_MINUS_DST_ALPHA',
'GL_CONSTANT_COLOR',
'GL_ONE_MINUS_CONSTANT_COLOR',
'GL_CONSTANT_ALPHA',
'GL_ONE_MINUS_CONSTANT_ALPHA',
],
},
'Capability': {
'type': 'GLenum',
'valid': ["GL_%s" % cap['name'].upper() for cap in _CAPABILITY_FLAGS],
'invalid': [
'GL_CLIP_PLANE0',
'GL_POINT_SPRITE',
],
},
'DrawMode': {
'type': 'GLenum',
'valid': [
'GL_POINTS',
'GL_LINE_STRIP',
'GL_LINE_LOOP',
'GL_LINES',
'GL_TRIANGLE_STRIP',
'GL_TRIANGLE_FAN',
'GL_TRIANGLES',
],
'invalid': [
'GL_QUADS',
'GL_POLYGON',
],
},
'IndexType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT',
],
'invalid': [
'GL_UNSIGNED_INT',
'GL_INT',
],
},
'GetMaxIndexType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT',
'GL_UNSIGNED_INT',
],
'invalid': [
'GL_INT',
],
},
'Attachment': {
'type': 'GLenum',
'valid': [
'GL_COLOR_ATTACHMENT0',
'GL_DEPTH_ATTACHMENT',
'GL_STENCIL_ATTACHMENT',
],
},
'BackbufferAttachment': {
'type': 'GLenum',
'valid': [
'GL_COLOR_EXT',
'GL_DEPTH_EXT',
'GL_STENCIL_EXT',
],
},
'BufferParameter': {
'type': 'GLenum',
'valid': [
'GL_BUFFER_SIZE',
'GL_BUFFER_USAGE',
],
'invalid': [
'GL_PIXEL_PACK_BUFFER',
],
},
'FrameBufferParameter': {
'type': 'GLenum',
'valid': [
'GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE',
'GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME',
'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL',
'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE',
],
},
'ProgramParameter': {
'type': 'GLenum',
'valid': [
'GL_DELETE_STATUS',
'GL_LINK_STATUS',
'GL_VALIDATE_STATUS',
'GL_INFO_LOG_LENGTH',
'GL_ATTACHED_SHADERS',
'GL_ACTIVE_ATTRIBUTES',
'GL_ACTIVE_ATTRIBUTE_MAX_LENGTH',
'GL_ACTIVE_UNIFORMS',
'GL_ACTIVE_UNIFORM_MAX_LENGTH',
],
},
'QueryObjectParameter': {
'type': 'GLenum',
'valid': [
'GL_QUERY_RESULT_EXT',
'GL_QUERY_RESULT_AVAILABLE_EXT',
],
},
'QueryParameter': {
'type': 'GLenum',
'valid': [
'GL_CURRENT_QUERY_EXT',
],
},
'QueryTarget': {
'type': 'GLenum',
'valid': [
'GL_ANY_SAMPLES_PASSED_EXT',
'GL_ANY_SAMPLES_PASSED_CONSERVATIVE_EXT',
'GL_COMMANDS_ISSUED_CHROMIUM',
'GL_LATENCY_QUERY_CHROMIUM',
'GL_ASYNC_PIXEL_TRANSFERS_COMPLETED_CHROMIUM',
'GL_ASYNC_READ_PIXELS_COMPLETED_CHROMIUM',
],
},
'RenderBufferParameter': {
'type': 'GLenum',
'valid': [
'GL_RENDERBUFFER_RED_SIZE',
'GL_RENDERBUFFER_GREEN_SIZE',
'GL_RENDERBUFFER_BLUE_SIZE',
'GL_RENDERBUFFER_ALPHA_SIZE',
'GL_RENDERBUFFER_DEPTH_SIZE',
'GL_RENDERBUFFER_STENCIL_SIZE',
'GL_RENDERBUFFER_WIDTH',
'GL_RENDERBUFFER_HEIGHT',
'GL_RENDERBUFFER_INTERNAL_FORMAT',
],
},
'ShaderParameter': {
'type': 'GLenum',
'valid': [
'GL_SHADER_TYPE',
'GL_DELETE_STATUS',
'GL_COMPILE_STATUS',
'GL_INFO_LOG_LENGTH',
'GL_SHADER_SOURCE_LENGTH',
'GL_TRANSLATED_SHADER_SOURCE_LENGTH_ANGLE',
],
},
'ShaderPrecision': {
'type': 'GLenum',
'valid': [
'GL_LOW_FLOAT',
'GL_MEDIUM_FLOAT',
'GL_HIGH_FLOAT',
'GL_LOW_INT',
'GL_MEDIUM_INT',
'GL_HIGH_INT',
],
},
'StringType': {
'type': 'GLenum',
'valid': [
'GL_VENDOR',
'GL_RENDERER',
'GL_VERSION',
'GL_SHADING_LANGUAGE_VERSION',
'GL_EXTENSIONS',
],
},
'TextureParameter': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_MAG_FILTER',
'GL_TEXTURE_MIN_FILTER',
'GL_TEXTURE_POOL_CHROMIUM',
'GL_TEXTURE_WRAP_S',
'GL_TEXTURE_WRAP_T',
],
'invalid': [
'GL_GENERATE_MIPMAP',
],
},
'TexturePool': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_POOL_MANAGED_CHROMIUM',
'GL_TEXTURE_POOL_UNMANAGED_CHROMIUM',
],
},
'TextureWrapMode': {
'type': 'GLenum',
'valid': [
'GL_CLAMP_TO_EDGE',
'GL_MIRRORED_REPEAT',
'GL_REPEAT',
],
},
'TextureMinFilterMode': {
'type': 'GLenum',
'valid': [
'GL_NEAREST',
'GL_LINEAR',
'GL_NEAREST_MIPMAP_NEAREST',
'GL_LINEAR_MIPMAP_NEAREST',
'GL_NEAREST_MIPMAP_LINEAR',
'GL_LINEAR_MIPMAP_LINEAR',
],
},
'TextureMagFilterMode': {
'type': 'GLenum',
'valid': [
'GL_NEAREST',
'GL_LINEAR',
],
},
'TextureUsage': {
'type': 'GLenum',
'valid': [
'GL_NONE',
'GL_FRAMEBUFFER_ATTACHMENT_ANGLE',
],
},
'VertexAttribute': {
'type': 'GLenum',
'valid': [
# some enum that the decoder actually passes through to GL needs
# to be the first listed here since it's used in unit tests.
'GL_VERTEX_ATTRIB_ARRAY_NORMALIZED',
'GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING',
'GL_VERTEX_ATTRIB_ARRAY_ENABLED',
'GL_VERTEX_ATTRIB_ARRAY_SIZE',
'GL_VERTEX_ATTRIB_ARRAY_STRIDE',
'GL_VERTEX_ATTRIB_ARRAY_TYPE',
'GL_CURRENT_VERTEX_ATTRIB',
],
},
'VertexPointer': {
'type': 'GLenum',
'valid': [
'GL_VERTEX_ATTRIB_ARRAY_POINTER',
],
},
'HintTarget': {
'type': 'GLenum',
'valid': [
'GL_GENERATE_MIPMAP_HINT',
],
'invalid': [
'GL_PERSPECTIVE_CORRECTION_HINT',
],
},
'HintMode': {
'type': 'GLenum',
'valid': [
'GL_FASTEST',
'GL_NICEST',
'GL_DONT_CARE',
],
},
'PixelStore': {
'type': 'GLenum',
'valid': [
'GL_PACK_ALIGNMENT',
'GL_UNPACK_ALIGNMENT',
'GL_UNPACK_FLIP_Y_CHROMIUM',
'GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM',
'GL_UNPACK_UNPREMULTIPLY_ALPHA_CHROMIUM',
],
'invalid': [
'GL_PACK_SWAP_BYTES',
'GL_UNPACK_SWAP_BYTES',
],
},
'PixelStoreAlignment': {
'type': 'GLint',
'valid': [
'1',
'2',
'4',
'8',
],
'invalid': [
'3',
'9',
],
},
'ReadPixelFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_RGB',
'GL_RGBA',
],
},
'PixelType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT_5_6_5',
'GL_UNSIGNED_SHORT_4_4_4_4',
'GL_UNSIGNED_SHORT_5_5_5_1',
],
'invalid': [
'GL_SHORT',
'GL_INT',
],
},
'ReadPixelType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT_5_6_5',
'GL_UNSIGNED_SHORT_4_4_4_4',
'GL_UNSIGNED_SHORT_5_5_5_1',
],
'invalid': [
'GL_SHORT',
'GL_INT',
],
},
'RenderBufferFormat': {
'type': 'GLenum',
'valid': [
'GL_RGBA4',
'GL_RGB565',
'GL_RGB5_A1',
'GL_DEPTH_COMPONENT16',
'GL_STENCIL_INDEX8',
],
},
'ShaderBinaryFormat': {
'type': 'GLenum',
'valid': [
],
},
'StencilOp': {
'type': 'GLenum',
'valid': [
'GL_KEEP',
'GL_ZERO',
'GL_REPLACE',
'GL_INCR',
'GL_INCR_WRAP',
'GL_DECR',
'GL_DECR_WRAP',
'GL_INVERT',
],
},
'TextureFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_LUMINANCE',
'GL_LUMINANCE_ALPHA',
'GL_RGB',
'GL_RGBA',
],
'invalid': [
'GL_BGRA',
'GL_BGR',
],
},
'TextureInternalFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_LUMINANCE',
'GL_LUMINANCE_ALPHA',
'GL_RGB',
'GL_RGBA',
],
'invalid': [
'GL_BGRA',
'GL_BGR',
],
},
'TextureInternalFormatStorage': {
'type': 'GLenum',
'valid': [
'GL_RGB565',
'GL_RGBA4',
'GL_RGB5_A1',
'GL_ALPHA8_EXT',
'GL_LUMINANCE8_EXT',
'GL_LUMINANCE8_ALPHA8_EXT',
'GL_RGB8_OES',
'GL_RGBA8_OES',
],
},
'VertexAttribType': {
'type': 'GLenum',
'valid': [
'GL_BYTE',
'GL_UNSIGNED_BYTE',
'GL_SHORT',
'GL_UNSIGNED_SHORT',
# 'GL_FIXED', // This is not available on Desktop GL.
'GL_FLOAT',
],
'invalid': [
'GL_DOUBLE',
],
},
'TextureBorder': {
'type': 'GLint',
'valid': [
'0',
],
'invalid': [
'1',
],
},
'VertexAttribSize': {
'type': 'GLint',
'valid': [
'1',
'2',
'3',
'4',
],
'invalid': [
'0',
'5',
],
},
'ZeroOnly': {
'type': 'GLint',
'valid': [
'0',
],
'invalid': [
'1',
],
},
'FalseOnly': {
'type': 'GLboolean',
'valid': [
'false',
],
'invalid': [
'true',
],
},
'ResetStatus': {
'type': 'GLenum',
'valid': [
'GL_GUILTY_CONTEXT_RESET_ARB',
'GL_INNOCENT_CONTEXT_RESET_ARB',
'GL_UNKNOWN_CONTEXT_RESET_ARB',
],
},
}
# This table specifies the different pepper interfaces that are supported for
# GL commands. 'dev' is true if it's a dev interface.
_PEPPER_INTERFACES = [
{'name': '', 'dev': False},
{'name': 'InstancedArrays', 'dev': False},
{'name': 'FramebufferBlit', 'dev': False},
{'name': 'FramebufferMultisample', 'dev': False},
{'name': 'ChromiumEnableFeature', 'dev': False},
{'name': 'ChromiumMapSub', 'dev': False},
{'name': 'Query', 'dev': False},
]
# This table specifies types and other special data for the commands that
# will be generated.
#
# Must match function names specified in "cmd_buffer_functions.txt".
#
# cmd_comment: A comment added to the cmd format.
# type: defines which handler will be used to generate code.
# decoder_func: defines which function to call in the decoder to execute the
# corresponding GL command. If not specified the GL command will
# be called directly.
# gl_test_func: GL function that is expected to be called when testing.
# cmd_args: The arguments to use for the command. This overrides generating
# them based on the GL function arguments.
# a NonImmediate type is a type that stays a pointer even in
# and immediate version of acommand.
# gen_cmd: Whether or not this function geneates a command. Default = True.
# immediate: Whether or not to generate an immediate command for the GL
# function. The default is if there is exactly 1 pointer argument
# in the GL function an immediate command is generated.
# bucket: True to generate a bucket version of the command.
# impl_func: Whether or not to generate the GLES2Implementation part of this
# command.
# impl_decl: Whether or not to generate the GLES2Implementation declaration
# for this command.
# needs_size: If true a data_size field is added to the command.
# data_type: The type of data the command uses. For PUTn or PUT types.
# count: The number of units per element. For PUTn or PUT types.
# unit_test: If False no service side unit test will be generated.
# client_test: If False no client side unit test will be generated.
# expectation: If False the unit test will have no expected calls.
# gen_func: Name of function that generates GL resource for corresponding
# bind function.
# states: array of states that get set by this function corresponding to
# the given arguments
# state_flag: name of flag that is set to true when function is called.
# no_gl: no GL function is called.
# valid_args: A dictionary of argument indices to args to use in unit tests
# when they can not be automatically determined.
# pepper_interface: The pepper interface that is used for this extension
# invalid_test: False if no invalid test needed.
# shadowed: True = the value is shadowed so no glGetXXX call will be made.
# first_element_only: For PUT types, True if only the first element of an
# array is used and we end up calling the single value
# corresponding function. eg. TexParameteriv -> TexParameteri
_FUNCTION_INFO = {
'ActiveTexture': {
'decoder_func': 'DoActiveTexture',
'unit_test': False,
'impl_func': False,
'client_test': False,
},
'AttachShader': {'decoder_func': 'DoAttachShader'},
'BindAttribLocation': {'type': 'GLchar', 'bucket': True, 'needs_size': True},
'BindBuffer': {
'type': 'Bind',
'decoder_func': 'DoBindBuffer',
'gen_func': 'GenBuffersARB',
},
'BindFramebuffer': {
'type': 'Bind',
'decoder_func': 'DoBindFramebuffer',
'gl_test_func': 'glBindFramebufferEXT',
'gen_func': 'GenFramebuffersEXT',
},
'BindRenderbuffer': {
'type': 'Bind',
'decoder_func': 'DoBindRenderbuffer',
'gl_test_func': 'glBindRenderbufferEXT',
'gen_func': 'GenRenderbuffersEXT',
},
'BindTexture': {
'type': 'Bind',
'decoder_func': 'DoBindTexture',
'gen_func': 'GenTextures',
# TODO(gman): remove this once client side caching works.
'client_test': False,
},
'BlitFramebufferEXT': {
'decoder_func': 'DoBlitFramebufferEXT',
'unit_test': False,
'extension': True,
'pepper_interface': 'FramebufferBlit',
'defer_reads': True,
'defer_draws': True,
},
'BufferData': {
'type': 'Manual',
'immediate': True,
'client_test': False,
},
'BufferSubData': {
'type': 'Data',
'client_test': False,
'decoder_func': 'DoBufferSubData',
},
'CheckFramebufferStatus': {
'type': 'Is',
'decoder_func': 'DoCheckFramebufferStatus',
'gl_test_func': 'glCheckFramebufferStatusEXT',
'error_value': 'GL_FRAMEBUFFER_UNSUPPORTED',
'result': ['GLenum'],
},
'Clear': {
'decoder_func': 'DoClear',
'defer_draws': True,
},
'ClearColor': {
'type': 'StateSet',
'state': 'ClearColor',
},
'ClearDepthf': {
'type': 'StateSet',
'state': 'ClearDepthf',
'decoder_func': 'glClearDepth',
'gl_test_func': 'glClearDepth',
'valid_args': {
'0': '0.5f'
},
},
'ColorMask': {
'type': 'StateSet',
'state': 'ColorMask',
'no_gl': True,
'expectation': False,
},
'ConsumeTextureCHROMIUM': {
'decoder_func': 'DoConsumeTextureCHROMIUM',
'type': 'PUT',
'data_type': 'GLbyte',
'count': 64,
'unit_test': False,
'extension': True,
'chromium': True,
},
'ClearStencil': {
'type': 'StateSet',
'state': 'ClearStencil',
},
'EnableFeatureCHROMIUM': {
'type': 'Custom',
'immediate': False,
'decoder_func': 'DoEnableFeatureCHROMIUM',
'expectation': False,
'cmd_args': 'GLuint bucket_id, GLint* result',
'result': ['GLint'],
'extension': True,
'chromium': True,
'pepper_interface': 'ChromiumEnableFeature',
},
'CompileShader': {'decoder_func': 'DoCompileShader', 'unit_test': False},
'CompressedTexImage2D': {
'type': 'Manual',
'immediate': True,
'bucket': True,
},
'CompressedTexSubImage2D': {
'type': 'Data',
'bucket': True,
'decoder_func': 'DoCompressedTexSubImage2D',
},
'CopyTexImage2D': {
'decoder_func': 'DoCopyTexImage2D',
'unit_test': False,
'defer_reads': True,
},
'CopyTexSubImage2D': {
'decoder_func': 'DoCopyTexSubImage2D',
'defer_reads': True,
},
'CreateImageCHROMIUM': {
'type': 'Manual',
'cmd_args': 'GLsizei width, GLsizei height, GLenum internalformat',
'result': ['GLuint'],
'client_test': False,
'gen_cmd': False,
'expectation': False,
'extension': True,
'chromium': True,
},
'DestroyImageCHROMIUM': {
'type': 'Manual',
'immediate': True,
'client_test': False,
'gen_cmd': False,
'extension': True,
'chromium': True,
},
'GetImageParameterivCHROMIUM': {
'type': 'Manual',
'client_test': False,
'gen_cmd': False,
'expectation': False,
'extension': True,
'chromium': True,
},
'CreateProgram': {
'type': 'Create',
'client_test': False,
},
'CreateShader': {
'type': 'Create',
'client_test': False,
},
'BlendColor': {
'type': 'StateSet',
'state': 'BlendColor',
},
'BlendEquation': {
'type': 'StateSetRGBAlpha',
'state': 'BlendEquation',
'valid_args': {
'0': 'GL_FUNC_SUBTRACT'
},
},
'BlendEquationSeparate': {
'type': 'StateSet',
'state': 'BlendEquation',
'valid_args': {
'0': 'GL_FUNC_SUBTRACT'
},
},
'BlendFunc': {
'type': 'StateSetRGBAlpha',
'state': 'BlendFunc',
},
'BlendFuncSeparate': {
'type': 'StateSet',
'state': 'BlendFunc',
},
'SampleCoverage': {'decoder_func': 'DoSampleCoverage'},
'StencilFunc': {
'type': 'StateSetFrontBack',
'state': 'StencilFunc',
},
'StencilFuncSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilFunc',
},
'StencilOp': {
'type': 'StateSetFrontBack',
'state': 'StencilOp',
'valid_args': {
'1': 'GL_INCR'
},
},
'StencilOpSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilOp',
'valid_args': {
'1': 'GL_INCR'
},
},
'Hint': {
'type': 'StateSetNamedParameter',
'state': 'Hint',
},
'CullFace': {'type': 'StateSet', 'state': 'CullFace'},
'FrontFace': {'type': 'StateSet', 'state': 'FrontFace'},
'DepthFunc': {'type': 'StateSet', 'state': 'DepthFunc'},
'LineWidth': {
'type': 'StateSet',
'state': 'LineWidth',
'valid_args': {
'0': '0.5f'
},
},
'PolygonOffset': {
'type': 'StateSet',
'state': 'PolygonOffset',
},
'DeleteBuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteBuffersARB',
'resource_type': 'Buffer',
'resource_types': 'Buffers',
},
'DeleteFramebuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteFramebuffersEXT',
'resource_type': 'Framebuffer',
'resource_types': 'Framebuffers',
},
'DeleteProgram': {'type': 'Delete', 'decoder_func': 'DoDeleteProgram'},
'DeleteRenderbuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteRenderbuffersEXT',
'resource_type': 'Renderbuffer',
'resource_types': 'Renderbuffers',
},
'DeleteShader': {'type': 'Delete', 'decoder_func': 'DoDeleteShader'},
'DeleteSharedIdsCHROMIUM': {
'type': 'Custom',
'decoder_func': 'DoDeleteSharedIdsCHROMIUM',
'impl_func': False,
'expectation': False,
'immediate': False,
'extension': True,
'chromium': True,
},
'DeleteTextures': {
'type': 'DELn',
'resource_type': 'Texture',
'resource_types': 'Textures',
},
'DepthRangef': {
'decoder_func': 'DoDepthRangef',
'gl_test_func': 'glDepthRange',
},
'DepthMask': {
'type': 'StateSet',
'state': 'DepthMask',
'no_gl': True,
'expectation': False,
},
'DetachShader': {'decoder_func': 'DoDetachShader'},
'Disable': {
'decoder_func': 'DoDisable',
'impl_func': False,
'client_test': False,
},
'DisableVertexAttribArray': {
'decoder_func': 'DoDisableVertexAttribArray',
'impl_decl': False,
},
'DrawArrays': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLint first, GLsizei count',
'defer_draws': True,
},
'DrawElements': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLsizei count, '
'GLenumIndexType type, GLuint index_offset',
'client_test': False,
'defer_draws': True,
},
'Enable': {
'decoder_func': 'DoEnable',
'impl_func': False,
'client_test': False,
},
'EnableVertexAttribArray': {
'decoder_func': 'DoEnableVertexAttribArray',
'impl_decl': False,
},
'Finish': {
'impl_func': False,
'client_test': False,
'decoder_func': 'DoFinish',
'defer_reads': True,
},
'Flush': {
'impl_func': False,
'decoder_func': 'DoFlush',
},
'FramebufferRenderbuffer': {
'decoder_func': 'DoFramebufferRenderbuffer',
'gl_test_func': 'glFramebufferRenderbufferEXT',
},
'FramebufferTexture2D': {
'decoder_func': 'DoFramebufferTexture2D',
'gl_test_func': 'glFramebufferTexture2DEXT',
},
'FramebufferTexture2DMultisampleEXT': {
'decoder_func': 'DoFramebufferTexture2DMultisample',
'gl_test_func': 'glFramebufferTexture2DMultisampleEXT',
'expectation': False,
'unit_test': False,
'extension': True,
},
'GenerateMipmap': {
'decoder_func': 'DoGenerateMipmap',
'gl_test_func': 'glGenerateMipmapEXT',
},
'GenBuffers': {
'type': 'GENn',
'gl_test_func': 'glGenBuffersARB',
'resource_type': 'Buffer',
'resource_types': 'Buffers',
},
'GenMailboxCHROMIUM': {
'type': 'Manual',
'cmd_args': 'GLuint bucket_id',
'result': ['SizedResult<GLint>'],
'client_test': False,
'unit_test': False,
'extension': True,
'chromium': True,
},
'GenFramebuffers': {
'type': 'GENn',
'gl_test_func': 'glGenFramebuffersEXT',
'resource_type': 'Framebuffer',
'resource_types': 'Framebuffers',
},
'GenRenderbuffers': {
'type': 'GENn', 'gl_test_func': 'glGenRenderbuffersEXT',
'resource_type': 'Renderbuffer',
'resource_types': 'Renderbuffers',
},
'GenTextures': {
'type': 'GENn',
'gl_test_func': 'glGenTextures',
'resource_type': 'Texture',
'resource_types': 'Textures',
},
'GenSharedIdsCHROMIUM': {
'type': 'Custom',
'decoder_func': 'DoGenSharedIdsCHROMIUM',
'impl_func': False,
'expectation': False,
'immediate': False,
'extension': True,
'chromium': True,
},
'GetActiveAttrib': {
'type': 'Custom',
'immediate': False,
'cmd_args':
'GLidProgram program, GLuint index, uint32 name_bucket_id, '
'void* result',
'result': [
'int32 success',
'int32 size',
'uint32 type',
],
},
'GetActiveUniform': {
'type': 'Custom',
'immediate': False,
'cmd_args':
'GLidProgram program, GLuint index, uint32 name_bucket_id, '
'void* result',
'result': [
'int32 success',
'int32 size',
'uint32 type',
],
},
'GetAttachedShaders': {
'type': 'Custom',
'immediate': False,
'cmd_args': 'GLidProgram program, void* result, uint32 result_size',
'result': ['SizedResult<GLuint>'],
},
'GetAttribLocation': {
'type': 'HandWritten',
'immediate': True,
'bucket': True,
'needs_size': True,
'cmd_args':
'GLidProgram program, const char* name, NonImmediate GLint* location',
'result': ['GLint'],
},
'GetBooleanv': {
'type': 'GETn',
'result': ['SizedResult<GLboolean>'],
'decoder_func': 'DoGetBooleanv',
'gl_test_func': 'glGetBooleanv',
},
'GetBufferParameteriv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'decoder_func': 'DoGetBufferParameteriv',
'expectation': False,
'shadowed': True,
},
'GetError': {
'type': 'Is',
'decoder_func': 'GetErrorState()->GetGLError',
'impl_func': False,
'result': ['GLenum'],
'client_test': False,
},
'GetFloatv': {
'type': 'GETn',
'result': ['SizedResult<GLfloat>'],
'decoder_func': 'DoGetFloatv',
'gl_test_func': 'glGetFloatv',
},
'GetFramebufferAttachmentParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetFramebufferAttachmentParameteriv',
'gl_test_func': 'glGetFramebufferAttachmentParameterivEXT',
'result': ['SizedResult<GLint>'],
},
'GetIntegerv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'decoder_func': 'DoGetIntegerv',
'client_test': False,
},
'GetMaxValueInBufferCHROMIUM': {
'type': 'Is',
'decoder_func': 'DoGetMaxValueInBufferCHROMIUM',
'result': ['GLuint'],
'unit_test': False,
'client_test': False,
'extension': True,
'chromium': True,
'impl_func': False,
},
'GetMultipleIntegervCHROMIUM': {
'type': 'Custom',
'immediate': False,
'expectation': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'GetProgramiv': {
'type': 'GETn',
'decoder_func': 'DoGetProgramiv',
'result': ['SizedResult<GLint>'],
'expectation': False,
},
'GetProgramInfoCHROMIUM': {
'type': 'Custom',
'immediate': False,
'expectation': False,
'impl_func': False,
'extension': True,
'chromium': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32 bucket_id',
'result': [
'uint32 link_status',
'uint32 num_attribs',
'uint32 num_uniforms',
],
},
'GetProgramInfoLog': {
'type': 'STRn',
'expectation': False,
},
'GetRenderbufferParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetRenderbufferParameteriv',
'gl_test_func': 'glGetRenderbufferParameterivEXT',
'result': ['SizedResult<GLint>'],
},
'GetShaderiv': {
'type': 'GETn',
'decoder_func': 'DoGetShaderiv',
'result': ['SizedResult<GLint>'],
},
'GetShaderInfoLog': {
'type': 'STRn',
'get_len_func': 'glGetShaderiv',
'get_len_enum': 'GL_INFO_LOG_LENGTH',
'unit_test': False,
},
'GetShaderPrecisionFormat': {
'type': 'Custom',
'immediate': False,
'cmd_args':
'GLenumShaderType shadertype, GLenumShaderPrecision precisiontype, '
'void* result',
'result': [
'int32 success',
'int32 min_range',
'int32 max_range',
'int32 precision',
],
},
'GetShaderSource': {
'type': 'STRn',
'get_len_func': 'DoGetShaderiv',
'get_len_enum': 'GL_SHADER_SOURCE_LENGTH',
'unit_test': False,
'client_test': False,
},
'GetString': {
'type': 'Custom',
'client_test': False,
'cmd_args': 'GLenumStringType name, uint32 bucket_id',
},
'GetTexParameterfv': {'type': 'GETn', 'result': ['SizedResult<GLfloat>']},
'GetTexParameteriv': {'type': 'GETn', 'result': ['SizedResult<GLint>']},
'GetTranslatedShaderSourceANGLE': {
'type': 'STRn',
'get_len_func': 'DoGetShaderiv',
'get_len_enum': 'GL_TRANSLATED_SHADER_SOURCE_LENGTH_ANGLE',
'unit_test': False,
'extension': True,
},
'GetUniformfv': {
'type': 'Custom',
'immediate': False,
'result': ['SizedResult<GLfloat>'],
},
'GetUniformiv': {
'type': 'Custom',
'immediate': False,
'result': ['SizedResult<GLint>'],
},
'GetUniformLocation': {
'type': 'HandWritten',
'immediate': True,
'bucket': True,
'needs_size': True,
'cmd_args':
'GLidProgram program, const char* name, NonImmediate GLint* location',
'result': ['GLint'],
},
'GetVertexAttribfv': {
'type': 'GETn',
'result': ['SizedResult<GLfloat>'],
'impl_decl': False,
'decoder_func': 'DoGetVertexAttribfv',
'expectation': False,
'client_test': False,
},
'GetVertexAttribiv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'impl_decl': False,
'decoder_func': 'DoGetVertexAttribiv',
'expectation': False,
'client_test': False,
},
'GetVertexAttribPointerv': {
'type': 'Custom',
'immediate': False,
'result': ['SizedResult<GLuint>'],
'client_test': False,
},
'IsBuffer': {
'type': 'Is',
'decoder_func': 'DoIsBuffer',
'expectation': False,
},
'IsEnabled': {
'type': 'Is',
'decoder_func': 'DoIsEnabled',
'impl_func': False,
'expectation': False,
},
'IsFramebuffer': {
'type': 'Is',
'decoder_func': 'DoIsFramebuffer',
'expectation': False,
},
'IsProgram': {
'type': 'Is',
'decoder_func': 'DoIsProgram',
'expectation': False,
},
'IsRenderbuffer': {
'type': 'Is',
'decoder_func': 'DoIsRenderbuffer',
'expectation': False,
},
'IsShader': {
'type': 'Is',
'decoder_func': 'DoIsShader',
'expectation': False,
},
'IsTexture': {
'type': 'Is',
'decoder_func': 'DoIsTexture',
'expectation': False,
},
'LinkProgram': {
'decoder_func': 'DoLinkProgram',
'impl_func': False,
},
'MapBufferCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'MapBufferSubDataCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
},
'MapImageCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'MapTexSubImage2DCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
},
'PixelStorei': {'type': 'Manual'},
'PostSubBufferCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'unit_test': False,
'client_test': False,
'extension': True,
'chromium': True,
},
'ProduceTextureCHROMIUM': {
'decoder_func': 'DoProduceTextureCHROMIUM',
'type': 'PUT',
'data_type': 'GLbyte',
'count': 64,
'unit_test': False,
'extension': True,
'chromium': True,
},
'RenderbufferStorage': {
'decoder_func': 'DoRenderbufferStorage',
'gl_test_func': 'glRenderbufferStorageEXT',
'expectation': False,
},
'RenderbufferStorageMultisampleEXT': {
'decoder_func': 'DoRenderbufferStorageMultisample',
'gl_test_func': 'glRenderbufferStorageMultisampleEXT',
'expectation': False,
'unit_test': False,
'extension': True,
'pepper_interface': 'FramebufferMultisample',
},
'ReadPixels': {
'cmd_comment':
'// ReadPixels has the result separated from the pixel buffer so that\n'
'// it is easier to specify the result going to some specific place\n'
'// that exactly fits the rectangle of pixels.\n',
'type': 'Custom',
'immediate': False,
'impl_func': False,
'client_test': False,
'cmd_args':
'GLint x, GLint y, GLsizei width, GLsizei height, '
'GLenumReadPixelFormat format, GLenumReadPixelType type, '
'uint32 pixels_shm_id, uint32 pixels_shm_offset, '
'uint32 result_shm_id, uint32 result_shm_offset, '
'GLboolean async',
'result': ['uint32'],
'defer_reads': True,
},
'RegisterSharedIdsCHROMIUM': {
'type': 'Custom',
'decoder_func': 'DoRegisterSharedIdsCHROMIUM',
'impl_func': False,
'expectation': False,
'immediate': False,
'extension': True,
'chromium': True,
},
'ReleaseShaderCompiler': {
'decoder_func': 'DoReleaseShaderCompiler',
'unit_test': False,
},
'ShaderBinary': {
'type': 'Custom',
'client_test': False,
},
'ShaderSource': {
'type': 'Manual',
'immediate': True,
'bucket': True,
'needs_size': True,
'client_test': False,
'cmd_args':
'GLuint shader, const char* data',
},
'StencilMask': {
'type': 'StateSetFrontBack',
'state': 'StencilMask',
'no_gl': True,
'expectation': False,
},
'StencilMaskSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilMask',
'no_gl': True,
'expectation': False,
},
'SwapBuffers': {
'impl_func': False,
'decoder_func': 'DoSwapBuffers',
'unit_test': False,
'client_test': False,
'extension': True,
},
'TexImage2D': {
'type': 'Manual',
'immediate': True,
'client_test': False,
},
'TexParameterf': {
'decoder_func': 'DoTexParameterf',
'gl_test_func': 'glTexParameteri',
'valid_args': {
'2': 'GL_NEAREST'
},
},
'TexParameteri': {
'decoder_func': 'DoTexParameteri',
'valid_args': {
'2': 'GL_NEAREST'
},
},
'TexParameterfv': {
'type': 'PUT',
'data_type': 'GLfloat',
'data_value': 'GL_NEAREST',
'count': 1,
'decoder_func': 'DoTexParameterfv',
'gl_test_func': 'glTexParameteri',
'first_element_only': True,
},
'TexParameteriv': {
'type': 'PUT',
'data_type': 'GLint',
'data_value': 'GL_NEAREST',
'count': 1,
'decoder_func': 'DoTexParameteriv',
'gl_test_func': 'glTexParameteri',
'first_element_only': True,
},
'TexSubImage2D': {
'type': 'Manual',
'immediate': True,
'client_test': False,
'cmd_args': 'GLenumTextureTarget target, GLint level, '
'GLint xoffset, GLint yoffset, '
'GLsizei width, GLsizei height, '
'GLenumTextureFormat format, GLenumPixelType type, '
'const void* pixels, GLboolean internal'
},
'Uniform1f': {'type': 'PUTXn', 'data_type': 'GLfloat', 'count': 1},
'Uniform1fv': {
'type': 'PUTn',
'data_type': 'GLfloat',
'count': 1,
'decoder_func': 'DoUniform1fv',
},
'Uniform1i': {'decoder_func': 'DoUniform1i', 'unit_test': False},
'Uniform1iv': {
'type': 'PUTn',
'data_type': 'GLint',
'count': 1,
'decoder_func': 'DoUniform1iv',
'unit_test': False,
},
'Uniform2i': {'type': 'PUTXn', 'data_type': 'GLint', 'count': 2},
'Uniform2f': {'type': 'PUTXn', 'data_type': 'GLfloat', 'count': 2},
'Uniform2fv': {
'type': 'PUTn',
'data_type': 'GLfloat',
'count': 2,
'decoder_func': 'DoUniform2fv',
},
'Uniform2iv': {
'type': 'PUTn',
'data_type': 'GLint',
'count': 2,
'decoder_func': 'DoUniform2iv',
},
'Uniform3i': {'type': 'PUTXn', 'data_type': 'GLint', 'count': 3},
'Uniform3f': {'type': 'PUTXn', 'data_type': 'GLfloat', 'count': 3},
'Uniform3fv': {
'type': 'PUTn',
'data_type': 'GLfloat',
'count': 3,
'decoder_func': 'DoUniform3fv',
},
'Uniform3iv': {
'type': 'PUTn',
'data_type': 'GLint',
'count': 3,
'decoder_func': 'DoUniform3iv',
},
'Uniform4i': {'type': 'PUTXn', 'data_type': 'GLint', 'count': 4},
'Uniform4f': {'type': 'PUTXn', 'data_type': 'GLfloat', 'count': 4},
'Uniform4fv': {
'type': 'PUTn',
'data_type': 'GLfloat',
'count': 4,
'decoder_func': 'DoUniform4fv',
},
'Uniform4iv': {
'type': 'PUTn',
'data_type': 'GLint',
'count': 4,
'decoder_func': 'DoUniform4iv',
},
'UniformMatrix2fv': {
'type': 'PUTn',
'data_type': 'GLfloat',
'count': 4,
'decoder_func': 'DoUniformMatrix2fv',
},
'UniformMatrix3fv': {
'type': 'PUTn',
'data_type': 'GLfloat',
'count': 9,
'decoder_func': 'DoUniformMatrix3fv',
},
'UniformMatrix4fv': {
'type': 'PUTn',
'data_type': 'GLfloat',
'count': 16,
'decoder_func': 'DoUniformMatrix4fv',
},
'UnmapBufferCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'UnmapBufferSubDataCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
},
'UnmapImageCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'UnmapTexSubImage2DCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
},
'UseProgram': {
'decoder_func': 'DoUseProgram',
'impl_func': False,
'unit_test': False,
},
'ValidateProgram': {'decoder_func': 'DoValidateProgram'},
'VertexAttrib1f': {'decoder_func': 'DoVertexAttrib1f'},
'VertexAttrib1fv': {
'type': 'PUT',
'data_type': 'GLfloat',
'count': 1,
'decoder_func': 'DoVertexAttrib1fv',
},
'VertexAttrib2f': {'decoder_func': 'DoVertexAttrib2f'},
'VertexAttrib2fv': {
'type': 'PUT',
'data_type': 'GLfloat',
'count': 2,
'decoder_func': 'DoVertexAttrib2fv',
},
'VertexAttrib3f': {'decoder_func': 'DoVertexAttrib3f'},
'VertexAttrib3fv': {
'type': 'PUT',
'data_type': 'GLfloat',
'count': 3,
'decoder_func': 'DoVertexAttrib3fv',
},
'VertexAttrib4f': {'decoder_func': 'DoVertexAttrib4f'},
'VertexAttrib4fv': {
'type': 'PUT',
'data_type': 'GLfloat',
'count': 4,
'decoder_func': 'DoVertexAttrib4fv',
},
'VertexAttribPointer': {
'type': 'Manual',
'cmd_args': 'GLuint indx, GLintVertexAttribSize size, '
'GLenumVertexAttribType type, GLboolean normalized, '
'GLsizei stride, GLuint offset',
'client_test': False,
},
'Scissor': {
'type': 'StateSet',
'state': 'Scissor',
},
'Viewport': {
'decoder_func': 'DoViewport',
},
'ResizeCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'unit_test': False,
'extension': True,
'chromium': True,
},
'GetRequestableExtensionsCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'immediate': False,
'cmd_args': 'uint32 bucket_id',
'extension': True,
'chromium': True,
},
'RequestExtensionCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'immediate': False,
'client_test': False,
'cmd_args': 'uint32 bucket_id',
'extension': True,
'chromium': True,
},
'RateLimitOffscreenContextCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'CreateStreamTextureCHROMIUM': {
'type': 'Custom',
'cmd_args': 'GLuint client_id, void* result',
'result': ['GLuint'],
'immediate': False,
'impl_func': False,
'expectation': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'DestroyStreamTextureCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'expectation': False,
'extension': True,
'chromium': True,
},
'TexImageIOSurface2DCHROMIUM': {
'decoder_func': 'DoTexImageIOSurface2DCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'CopyTextureCHROMIUM': {
'decoder_func': 'DoCopyTextureCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'TexStorage2DEXT': {
'unit_test': False,
'extension': True,
'decoder_func': 'DoTexStorage2DEXT',
},
'DrawArraysInstancedANGLE': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLint first, GLsizei count, '
'GLsizei primcount',
'extension': True,
'unit_test': False,
'pepper_interface': 'InstancedArrays',
'defer_draws': True,
},
'DrawBuffersEXT': {
'type': 'PUTn',
'decoder_func': 'DoDrawBuffersEXT',
'data_type': 'GLenum',
'count': 1,
'client_test': False,
'unit_test': False,
'extension': True,
},
'DrawElementsInstancedANGLE': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLsizei count, '
'GLenumIndexType type, GLuint index_offset, GLsizei primcount',
'extension': True,
'unit_test': False,
'client_test': False,
'pepper_interface': 'InstancedArrays',
'defer_draws': True,
},
'VertexAttribDivisorANGLE': {
'type': 'Manual',
'cmd_args': 'GLuint index, GLuint divisor',
'extension': True,
'unit_test': False,
'pepper_interface': 'InstancedArrays',
},
'GenQueriesEXT': {
'type': 'GENn',
'gl_test_func': 'glGenQueriesARB',
'resource_type': 'Query',
'resource_types': 'Queries',
'unit_test': False,
'pepper_interface': 'Query',
},
'DeleteQueriesEXT': {
'type': 'DELn',
'gl_test_func': 'glDeleteQueriesARB',
'resource_type': 'Query',
'resource_types': 'Queries',
'unit_test': False,
'pepper_interface': 'Query',
},
'IsQueryEXT': {
'gen_cmd': False,
'client_test': False,
'pepper_interface': 'Query',
},
'BeginQueryEXT': {
'type': 'Manual',
'cmd_args': 'GLenumQueryTarget target, GLidQuery id, void* sync_data',
'immediate': False,
'gl_test_func': 'glBeginQuery',
'pepper_interface': 'Query',
},
'EndQueryEXT': {
'type': 'Manual',
'cmd_args': 'GLenumQueryTarget target, GLuint submit_count',
'gl_test_func': 'glEndnQuery',
'client_test': False,
'pepper_interface': 'Query',
},
'GetQueryivEXT': {
'gen_cmd': False,
'client_test': False,
'gl_test_func': 'glGetQueryiv',
'pepper_interface': 'Query',
},
'GetQueryObjectuivEXT': {
'gen_cmd': False,
'client_test': False,
'gl_test_func': 'glGetQueryObjectuiv',
'pepper_interface': 'Query',
},
'BindUniformLocationCHROMIUM': {
'type': 'GLchar',
'bucket': True,
'needs_size': True,
'gl_test_func': 'DoBindUniformLocationCHROMIUM',
},
'InsertEventMarkerEXT': {
'type': 'GLcharN',
'decoder_func': 'DoInsertEventMarkerEXT',
'expectation': False,
},
'PushGroupMarkerEXT': {
'type': 'GLcharN',
'decoder_func': 'DoPushGroupMarkerEXT',
'expectation': False,
},
'PopGroupMarkerEXT': {
'decoder_func': 'DoPopGroupMarkerEXT',
'expectation': False,
'impl_func': False,
},
'GenVertexArraysOES': {
'type': 'GENn',
'gl_test_func': 'glGenVertexArraysOES',
'resource_type': 'VertexArray',
'resource_types': 'VertexArrays',
'unit_test': False,
},
'BindVertexArrayOES': {
'type': 'Bind',
'gl_test_func': 'glBindVertexArrayOES',
'decoder_func': 'DoBindVertexArrayOES',
'gen_func': 'GenVertexArraysOES',
'unit_test': False,
'client_test': False,
},
'DeleteVertexArraysOES': {
'type': 'DELn',
'gl_test_func': 'glDeleteVertexArraysOES',
'resource_type': 'VertexArray',
'resource_types': 'VertexArrays',
'unit_test': False,
},
'IsVertexArrayOES': {
'type': 'Is',
'gl_test_func': 'glIsVertexArrayOES',
'decoder_func': 'DoIsVertexArrayOES',
'expectation': False,
'unit_test': False,
},
'BindTexImage2DCHROMIUM': {
'decoder_func': 'DoBindTexImage2DCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'ReleaseTexImage2DCHROMIUM': {
'decoder_func': 'DoReleaseTexImage2DCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'ShallowFinishCHROMIUM': {
'impl_func': False,
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'ShallowFlushCHROMIUM': {
'impl_func': False,
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'TraceBeginCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'immediate': False,
'client_test': False,
'cmd_args': 'GLuint bucket_id',
'extension': True,
'chromium': True,
},
'TraceEndCHROMIUM': {
'impl_func': False,
'immediate': False,
'client_test': False,
'decoder_func': 'DoTraceEndCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'AsyncTexImage2DCHROMIUM': {
'type': 'Manual',
'immediate': False,
'client_test': False,
'extension': True,
'chromium': True,
},
'AsyncTexSubImage2DCHROMIUM': {
'type': 'Manual',
'immediate': False,
'client_test': False,
'extension': True,
'chromium': True,
},
'WaitAsyncTexImage2DCHROMIUM': {
'type': 'Manual',
'immediate': False,
'client_test': False,
'extension': True,
'chromium': True,
},
'DiscardFramebufferEXT': {
'type': 'PUTn',
'count': 1,
'data_type': 'GLenum',
'cmd_args': 'GLenum target, GLsizei count, '
'const GLenum* attachments',
'decoder_func': 'DoDiscardFramebufferEXT',
'unit_test': False,
'client_test': False,
'extension': True,
},
'LoseContextCHROMIUM': {
'type': 'Manual',
'impl_func': True,
'extension': True,
'chromium': True,
},
'InsertSyncPointCHROMIUM': {
'type': 'HandWritten',
'impl_func': False,
'extension': True,
'chromium': True,
},
'WaitSyncPointCHROMIUM': {
'type': 'Custom',
'impl_func': True,
'extension': True,
'chromium': True,
},
}
def Grouper(n, iterable, fillvalue=None):
"""Collect data into fixed-length chunks or blocks"""
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def SplitWords(input_string):
"""Transforms a input_string into a list of lower-case components.
Args:
input_string: the input string.
Returns:
a list of lower-case words.
"""
if input_string.find('_') > -1:
# 'some_TEXT_' -> 'some text'
return input_string.replace('_', ' ').strip().lower().split()
else:
if re.search('[A-Z]', input_string) and re.search('[a-z]', input_string):
# mixed case.
# look for capitalization to cut input_strings
# 'SomeText' -> 'Some Text'
input_string = re.sub('([A-Z])', r' \1', input_string).strip()
# 'Vector3' -> 'Vector 3'
input_string = re.sub('([^0-9])([0-9])', r'\1 \2', input_string)
return input_string.lower().split()
def Lower(words):
"""Makes a lower-case identifier from words.
Args:
words: a list of lower-case words.
Returns:
the lower-case identifier.
"""
return '_'.join(words)
def ToUnderscore(input_string):
"""converts CamelCase to camel_case."""
words = SplitWords(input_string)
return Lower(words)
class CWriter(object):
"""Writes to a file formatting it for Google's style guidelines."""
def __init__(self, filename):
self.filename = filename
self.file_num = 0
self.content = []
def SetFileNum(self, num):
"""Used to help write number files and tests."""
self.file_num = num
def Write(self, string):
"""Writes a string to a file spliting if it's > 80 characters."""
lines = string.splitlines()
num_lines = len(lines)
for ii in range(0, num_lines):
self.__WriteLine(lines[ii], ii < (num_lines - 1) or string[-1] == '\n')
def __FindSplit(self, string):
"""Finds a place to split a string."""
splitter = string.find('=')
if splitter >= 1 and not string[splitter + 1] == '=' and splitter < 80:
return splitter
# parts = string.split('(')
parts = re.split("(?<=[^\"])\((?!\")", string)
fptr = re.compile('\*\w*\)')
if len(parts) > 1:
splitter = len(parts[0])
for ii in range(1, len(parts)):
# Don't split on the dot in "if (.condition)".
if (not parts[ii - 1][-3:] == "if " and
# Don't split "(.)" or "(.*fptr)".
(len(parts[ii]) > 0 and
not parts[ii][0] == ")" and not fptr.match(parts[ii]))
and splitter < 80):
return splitter
splitter += len(parts[ii]) + 1
done = False
end = len(string)
last_splitter = -1
while not done:
splitter = string[0:end].rfind(',')
if splitter < 0 or (splitter > 0 and string[splitter - 1] == '"'):
return last_splitter
elif splitter >= 80:
end = splitter
else:
return splitter
def __WriteLine(self, line, ends_with_eol):
"""Given a signle line, writes it to a file, splitting if it's > 80 chars"""
if len(line) >= 80:
i = self.__FindSplit(line)
if i > 0:
line1 = line[0:i + 1]
if line1[-1] == ' ':
line1 = line1[:-1]
lineend = ''
if line1[0] == '#':
lineend = ' \\'
nolint = ''
if len(line1) > 80:
nolint = ' // NOLINT'
self.__AddLine(line1 + nolint + lineend + '\n')
match = re.match("( +)", line1)
indent = ""
if match:
indent = match.group(1)
splitter = line[i]
if not splitter == ',':
indent = " " + indent
self.__WriteLine(indent + line[i + 1:].lstrip(), True)
return
nolint = ''
if len(line) > 80:
nolint = ' // NOLINT'
self.__AddLine(line + nolint)
if ends_with_eol:
self.__AddLine('\n')
def __AddLine(self, line):
self.content.append(line)
def Close(self):
"""Close the file."""
content = "".join(self.content)
write_file = True
if os.path.exists(self.filename):
old_file = open(self.filename, "rb");
old_content = old_file.read()
old_file.close();
if content == old_content:
write_file = False
if write_file:
file = open(self.filename, "wb")
file.write(content)
file.close()
class CHeaderWriter(CWriter):
"""Writes a C Header file."""
_non_alnum_re = re.compile(r'[^a-zA-Z0-9]')
def __init__(self, filename, file_comment = None):
CWriter.__init__(self, filename)
base = os.path.abspath(filename)
while os.path.basename(base) != 'src':
new_base = os.path.dirname(base)
assert new_base != base # Prevent infinite loop.
base = new_base
hpath = os.path.relpath(filename, base)
self.guard = self._non_alnum_re.sub('_', hpath).upper() + '_'
self.Write(_LICENSE)
self.Write(_DO_NOT_EDIT_WARNING)
if not file_comment == None:
self.Write(file_comment)
self.Write("#ifndef %s\n" % self.guard)
self.Write("#define %s\n\n" % self.guard)
def Close(self):
self.Write("#endif // %s\n\n" % self.guard)
CWriter.Close(self)
class TypeHandler(object):
"""This class emits code for a particular type of function."""
_remove_expected_call_re = re.compile(r' EXPECT_CALL.*?;\n', re.S)
def __init__(self):
pass
def InitFunction(self, func):
"""Add or adjust anything type specific for this function."""
if func.GetInfo('needs_size') and not func.name.endswith('Bucket'):
func.AddCmdArg(DataSizeArgument('data_size'))
def AddImmediateFunction(self, generator, func):
"""Adds an immediate version of a function."""
# Generate an immediate command if there is only 1 pointer arg.
immediate = func.GetInfo('immediate') # can be True, False or None
if immediate == True or immediate == None:
if func.num_pointer_args == 1 or immediate:
generator.AddFunction(ImmediateFunction(func))
def AddBucketFunction(self, generator, func):
"""Adds a bucket version of a function."""
# Generate an immediate command if there is only 1 pointer arg.
bucket = func.GetInfo('bucket') # can be True, False or None
if bucket:
generator.AddFunction(BucketFunction(func))
def WriteStruct(self, func, file):
"""Writes a structure that matches the arguments to a function."""
comment = func.GetInfo('cmd_comment')
if not comment == None:
file.Write(comment)
file.Write("struct %s {\n" % func.name)
file.Write(" typedef %s ValueType;\n" % func.name)
file.Write(" static const CommandId kCmdId = k%s;\n" % func.name)
func.WriteCmdArgFlag(file)
file.Write("\n")
result = func.GetInfo('result')
if not result == None:
if len(result) == 1:
file.Write(" typedef %s Result;\n\n" % result[0])
else:
file.Write(" struct Result {\n")
for line in result:
file.Write(" %s;\n" % line)
file.Write(" };\n\n")
func.WriteCmdComputeSize(file)
func.WriteCmdSetHeader(file)
func.WriteCmdInit(file)
func.WriteCmdSet(file)
file.Write(" gpu::CommandHeader header;\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s %s;\n" % (arg.cmd_type, arg.name))
file.Write("};\n")
file.Write("\n")
size = len(args) * _SIZE_OF_UINT32 + _SIZE_OF_COMMAND_HEADER
file.Write("COMPILE_ASSERT(sizeof(%s) == %d,\n" % (func.name, size))
file.Write(" Sizeof_%s_is_not_%d);\n" % (func.name, size))
file.Write("COMPILE_ASSERT(offsetof(%s, header) == 0,\n" % func.name)
file.Write(" OffsetOf_%s_header_not_0);\n" % func.name)
offset = _SIZE_OF_COMMAND_HEADER
for arg in args:
file.Write("COMPILE_ASSERT(offsetof(%s, %s) == %d,\n" %
(func.name, arg.name, offset))
file.Write(" OffsetOf_%s_%s_not_%d);\n" %
(func.name, arg.name, offset))
offset += _SIZE_OF_UINT32
if not result == None and len(result) > 1:
offset = 0;
for line in result:
parts = line.split()
name = parts[-1]
check = """
COMPILE_ASSERT(offsetof(%(cmd_name)s::Result, %(field_name)s) == %(offset)d,
OffsetOf_%(cmd_name)s_Result_%(field_name)s_not_%(offset)d);
"""
file.Write((check.strip() + "\n") % {
'cmd_name': func.name,
'field_name': name,
'offset': offset,
})
offset += _SIZE_OF_UINT32
file.Write("\n")
def WriteHandlerImplementation(self, func, file):
"""Writes the handler implementation for this command."""
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
def WriteCmdSizeTest(self, func, file):
"""Writes the size test for a command."""
file.Write(" EXPECT_EQ(sizeof(cmd), cmd.header.size * 4u);\n")
def WriteFormatTest(self, func, file):
"""Writes a format test for a command."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd")
args = func.GetCmdArgs()
for value, arg in enumerate(args):
file.Write(",\n static_cast<%s>(%d)" % (arg.type, value + 11))
file.Write(");\n")
file.Write(" EXPECT_EQ(static_cast<uint32>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
func.type_handler.WriteCmdSizeTest(func, file)
for value, arg in enumerate(args):
file.Write(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);\n" %
(arg.type, value + 11, arg.name))
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd));\n")
file.Write("}\n")
file.Write("\n")
def WriteImmediateFormatTest(self, func, file):
"""Writes a format test for an immediate version of a command."""
pass
def WriteBucketFormatTest(self, func, file):
"""Writes a format test for a bucket version of a command."""
pass
def WriteGetDataSizeCode(self, func, file):
"""Writes the code to set data_size used in validation"""
pass
def WriteImmediateCmdSizeTest(self, func, file):
"""Writes a size test for an immediate version of a command."""
file.Write(" // TODO(gman): Compute correct size.\n")
file.Write(" EXPECT_EQ(sizeof(cmd), cmd.header.size * 4u);\n")
def WriteImmediateHandlerImplementation (self, func, file):
"""Writes the handler impl for the immediate version of a command."""
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
def WriteBucketHandlerImplementation (self, func, file):
"""Writes the handler impl for the bucket version of a command."""
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
def WriteServiceImplementation(self, func, file):
"""Writes the service implementation for a command."""
file.Write(
"error::Error GLES2DecoderImpl::Handle%s(\n" % func.name)
file.Write(
" uint32 immediate_data_size, const gles2::cmds::%s& c) {\n" %
func.name)
self.WriteHandlerDeferReadWrite(func, file);
if len(func.GetOriginalArgs()) > 0:
last_arg = func.GetLastOriginalArg()
all_but_last_arg = func.GetOriginalArgs()[:-1]
for arg in all_but_last_arg:
arg.WriteGetCode(file)
self.WriteGetDataSizeCode(func, file)
last_arg.WriteGetCode(file)
func.WriteHandlerValidation(file)
func.WriteHandlerImplementation(file)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteImmediateServiceImplementation(self, func, file):
"""Writes the service implementation for an immediate version of command."""
file.Write(
"error::Error GLES2DecoderImpl::Handle%s(\n" % func.name)
file.Write(
" uint32 immediate_data_size, const gles2::cmds::%s& c) {\n" %
func.name)
self.WriteHandlerDeferReadWrite(func, file);
last_arg = func.GetLastOriginalArg()
all_but_last_arg = func.GetOriginalArgs()[:-1]
for arg in all_but_last_arg:
arg.WriteGetCode(file)
self.WriteGetDataSizeCode(func, file)
last_arg.WriteGetCode(file)
func.WriteHandlerValidation(file)
func.WriteHandlerImplementation(file)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteBucketServiceImplementation(self, func, file):
"""Writes the service implementation for a bucket version of command."""
file.Write(
"error::Error GLES2DecoderImpl::Handle%s(\n" % func.name)
file.Write(
" uint32 immediate_data_size, const gles2::cmds::%s& c) {\n" %
func.name)
self.WriteHandlerDeferReadWrite(func, file);
last_arg = func.GetLastOriginalArg()
all_but_last_arg = func.GetOriginalArgs()[:-1]
for arg in all_but_last_arg:
arg.WriteGetCode(file)
self.WriteGetDataSizeCode(func, file)
last_arg.WriteGetCode(file)
func.WriteHandlerValidation(file)
func.WriteHandlerImplementation(file)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteHandlerDeferReadWrite(self, func, file):
"""Writes the code to handle deferring reads or writes."""
defer_reads = func.GetInfo('defer_reads')
defer_draws = func.GetInfo('defer_draws')
conditions = []
if defer_draws:
conditions.append('ShouldDeferDraws()');
if defer_reads:
conditions.append('ShouldDeferReads()');
if not conditions:
return
file.Write(" if (%s)\n" % ' || '.join(conditions))
file.Write(" return error::kDeferCommandUntilLater;\n")
def WriteValidUnitTest(self, func, file, test, extra = {}):
"""Writes a valid unit test."""
if func.GetInfo('expectation') == False:
test = self._remove_expected_call_re.sub('', test)
name = func.name
arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()):
arg_strings.append(arg.GetValidArg(func, count, 0))
gl_arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()):
gl_arg_strings.append(arg.GetValidGLArg(func, count, 0))
gl_func_name = func.GetGLTestFunctionName()
vars = {
'test_name': 'GLES2DecoderTest%d' % file.file_num,
'name':name,
'gl_func_name': gl_func_name,
'args': ", ".join(arg_strings),
'gl_args': ", ".join(gl_arg_strings),
}
vars.update(extra)
old_test = ""
while (old_test != test):
old_test = test
test = test % vars
file.Write(test % vars)
def WriteInvalidUnitTest(self, func, file, test, extra = {}):
"""Writes a invalid unit test."""
for arg_index, arg in enumerate(func.GetOriginalArgs()):
num_invalid_values = arg.GetNumInvalidValues(func)
for value_index in range(0, num_invalid_values):
arg_strings = []
parse_result = "kNoError"
gl_error = None
for count, arg in enumerate(func.GetOriginalArgs()):
if count == arg_index:
(arg_string, parse_result, gl_error) = arg.GetInvalidArg(
count, value_index)
else:
arg_string = arg.GetValidArg(func, count, 0)
arg_strings.append(arg_string)
gl_arg_strings = []
for arg in func.GetOriginalArgs():
gl_arg_strings.append("_")
gl_func_name = func.GetGLTestFunctionName()
gl_error_test = ''
if not gl_error == None:
gl_error_test = '\n EXPECT_EQ(%s, GetGLError());' % gl_error
vars = {
'test_name': 'GLES2DecoderTest%d' % file.file_num ,
'name': func.name,
'arg_index': arg_index,
'value_index': value_index,
'gl_func_name': gl_func_name,
'args': ", ".join(arg_strings),
'all_but_last_args': ", ".join(arg_strings[:-1]),
'gl_args': ", ".join(gl_arg_strings),
'parse_result': parse_result,
'gl_error_test': gl_error_test,
}
vars.update(extra)
file.Write(test % vars)
def WriteServiceUnitTest(self, func, file):
"""Writes the service unit test for a command."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
self.WriteValidUnitTest(func, file, valid_test)
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test)
def WriteImmediateServiceUnitTest(self, func, file):
"""Writes the service unit test for an immediate command."""
file.Write("// TODO(gman): %s\n" % func.name)
def WriteImmediateValidationCode(self, func, file):
"""Writes the validation code for an immediate version of a command."""
pass
def WriteBucketServiceUnitTest(self, func, file):
"""Writes the service unit test for a bucket command."""
file.Write("// TODO(gman): %s\n" % func.name)
def WriteBucketValidationCode(self, func, file):
"""Writes the validation code for a bucket version of a command."""
file.Write("// TODO(gman): %s\n" % func.name)
def WriteGLES2ImplementationDeclaration(self, func, file):
"""Writes the GLES2 Implemention declaration."""
impl_decl = func.GetInfo('impl_decl')
if impl_decl == None or impl_decl == True:
file.Write("virtual %s %s(%s) OVERRIDE;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write("\n")
def WriteGLES2CLibImplementation(self, func, file):
file.Write("%s GLES2%s(%s) {\n" %
(func.return_type, func.name,
func.MakeTypedOriginalArgString("")))
result_string = "return "
if func.return_type == "void":
result_string = ""
file.Write(" %sgles2::GetGLContext()->%s(%s);\n" %
(result_string, func.original_name,
func.MakeOriginalArgString("")))
file.Write("}\n")
def WriteGLES2Header(self, func, file):
"""Writes a re-write macro for GLES"""
file.Write("#define gl%s GLES2_GET_FUN(%s)\n" %(func.name, func.name))
def WriteClientGLCallLog(self, func, file):
"""Writes a logging macro for the client side code."""
comma = ""
if len(func.GetOriginalArgs()):
comma = " << "
file.Write(
' GPU_CLIENT_LOG("[" << GetLogPrefix() << "] gl%s("%s%s << ")");\n' %
(func.original_name, comma, func.MakeLogArgString()))
def WriteClientGLReturnLog(self, func, file):
"""Writes the return value logging code."""
if func.return_type != "void":
file.Write(' GPU_CLIENT_LOG("return:" << result)\n')
def WriteGLES2ImplementationHeader(self, func, file):
"""Writes the GLES2 Implemention."""
self.WriteGLES2ImplementationDeclaration(func, file)
def WriteGLES2TraceImplementationHeader(self, func, file):
"""Writes the GLES2 Trace Implemention header."""
file.Write("virtual %s %s(%s) OVERRIDE;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteGLES2TraceImplementation(self, func, file):
"""Writes the GLES2 Trace Implemention."""
file.Write("%s GLES2TraceImplementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
result_string = "return "
if func.return_type == "void":
result_string = ""
file.Write(' TRACE_EVENT_BINARY_EFFICIENT0("gpu", "GLES2Trace::%s");\n' %
func.name)
file.Write(" %sgl_->%s(%s);\n" %
(result_string, func.name, func.MakeOriginalArgString("")))
file.Write("}\n")
file.Write("\n")
def WriteGLES2Implementation(self, func, file):
"""Writes the GLES2 Implemention."""
impl_func = func.GetInfo('impl_func')
impl_decl = func.GetInfo('impl_decl')
gen_cmd = func.GetInfo('gen_cmd')
if (func.can_auto_generate and
(impl_func == None or impl_func == True) and
(impl_decl == None or impl_decl == True) and
(gen_cmd == None or gen_cmd == True)):
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
self.WriteClientGLCallLog(func, file)
func.WriteDestinationInitalizationValidation(file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" helper_->%s(%s);\n" %
(func.name, func.MakeOriginalArgString("")))
file.Write(" CheckGLError();\n")
self.WriteClientGLReturnLog(func, file)
file.Write("}\n")
file.Write("\n")
def WriteGLES2InterfaceHeader(self, func, file):
"""Writes the GLES2 Interface."""
file.Write("virtual %s %s(%s) = 0;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteGLES2InterfaceStub(self, func, file):
"""Writes the GLES2 Interface stub declaration."""
file.Write("virtual %s %s(%s) OVERRIDE;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteGLES2InterfaceStubImpl(self, func, file):
"""Writes the GLES2 Interface stub declaration."""
args = func.GetOriginalArgs()
arg_string = ", ".join(
["%s /* %s */" % (arg.type, arg.name) for arg in args])
file.Write("%s GLES2InterfaceStub::%s(%s) {\n" %
(func.return_type, func.original_name, arg_string))
if func.return_type != "void":
file.Write(" return 0;\n")
file.Write("}\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
client_test = func.GetInfo('client_test')
if (func.can_auto_generate and
(client_test == None or client_test == True)):
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = []
for count, arg in enumerate(func.GetCmdArgs()):
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func, count, 0))
count += 1
gl_arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()):
gl_arg_strings.append(arg.GetValidClientSideArg(func, count, 0))
file.Write(code % {
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
else:
if client_test != False:
file.Write("// TODO: Implement unit test for %s\n" % func.name)
def WriteDestinationInitalizationValidation(self, func, file):
"""Writes the client side destintion initialization validation."""
for arg in func.GetOriginalArgs():
arg.WriteDestinationInitalizationValidation(file, func)
def WriteTraceEvent(self, func, file):
file.Write(' TRACE_EVENT0("gpu", "GLES2Implementation::%s");\n' %
func.original_name)
def WriteImmediateCmdComputeSize(self, func, file):
"""Writes the size computation code for the immediate version of a cmd."""
file.Write(" static uint32 ComputeSize(uint32 size_in_bytes) {\n")
file.Write(" return static_cast<uint32>(\n")
file.Write(" sizeof(ValueType) + // NOLINT\n")
file.Write(" RoundSizeToMultipleOfEntries(size_in_bytes));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Writes the SetHeader function for the immediate version of a cmd."""
file.Write(" void SetHeader(uint32 size_in_bytes) {\n")
file.Write(" header.SetCmdByTotalSize<ValueType>(size_in_bytes);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Writes the Init function for the immediate version of a command."""
raise NotImplementedError(func.name)
def WriteImmediateCmdSet(self, func, file):
"""Writes the Set function for the immediate version of a command."""
raise NotImplementedError(func.name)
def WriteCmdHelper(self, func, file):
"""Writes the cmd helper definition for a cmd."""
code = """ void %(name)s(%(typed_args)s) {
gles2::cmds::%(name)s* c = GetCmdSpace<gles2::cmds::%(name)s>();
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedCmdArgString(""),
"args": func.MakeCmdArgString(""),
})
def WriteImmediateCmdHelper(self, func, file):
"""Writes the cmd helper definition for the immediate version of a cmd."""
code = """ void %(name)s(%(typed_args)s) {
const uint32 s = 0; // TODO(gman): compute correct size
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(s);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedCmdArgString(""),
"args": func.MakeCmdArgString(""),
})
class StateSetHandler(TypeHandler):
"""Handler for commands that simply set state."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
code = []
for ndx,item in enumerate(states):
if 'range_checks' in item:
for range_check in item['range_checks']:
code.append("%s %s" % (args[ndx].name, range_check['check']))
if len(code):
file.Write(" if (%s) {\n" % " ||\n ".join(code))
file.Write(
' LOCAL_SET_GL_ERROR(GL_INVALID_VALUE,'
' "%s", "%s out of range");\n' %
(func.name, args[ndx].name))
file.Write(" return error::kNoError;\n")
file.Write(" }\n")
code = []
for ndx,item in enumerate(states):
code.append("state_.%s != %s" % (item['name'], args[ndx].name))
file.Write(" if (%s) {\n" % " ||\n ".join(code))
for ndx,item in enumerate(states):
file.Write(" state_.%s = %s;\n" % (item['name'], args[ndx].name))
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
TypeHandler.WriteServiceUnitTest(self, func, file)
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
for ndx,item in enumerate(states):
if 'range_checks' in item:
for check_ndx, range_check in enumerate(item['range_checks']):
valid_test = """
TEST_F(%(test_name)s, %(name)sInvalidValue%(ndx)d_%(check_ndx)d) {
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
name = func.name
arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()):
arg_strings.append(arg.GetValidArg(func, count, 0))
arg_strings[ndx] = range_check['test_value']
vars = {
'test_name': 'GLES2DecoderTest%d' % file.file_num,
'name': name,
'ndx': ndx,
'check_ndx': check_ndx,
'args': ", ".join(arg_strings),
}
file.Write(valid_test % vars)
class StateSetRGBAlphaHandler(TypeHandler):
"""Handler for commands that simply set state that have rgb/alpha."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
code = []
for ndx,item in enumerate(states):
code.append("state_.%s != %s" % (item['name'], args[ndx % num_args].name))
file.Write(" if (%s) {\n" % " ||\n ".join(code))
for ndx, item in enumerate(states):
file.Write(" state_.%s = %s;\n" %
(item['name'], args[ndx % num_args].name))
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
class StateSetFrontBackSeparateHandler(TypeHandler):
"""Handler for commands that simply set state that have front/back."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
face = args[0].name
num_args = len(args)
file.Write(" bool changed = false;\n")
for group_ndx, group in enumerate(Grouper(num_args - 1, states)):
file.Write(" if (%s == %s || %s == GL_FRONT_AND_BACK) {\n" %
(face, ('GL_FRONT', 'GL_BACK')[group_ndx], face))
code = []
for ndx, item in enumerate(group):
code.append("state_.%s != %s" % (item['name'], args[ndx + 1].name))
file.Write(" changed |= %s;\n" % " ||\n ".join(code))
file.Write(" }\n")
file.Write(" if (changed) {\n")
for group_ndx, group in enumerate(Grouper(num_args - 1, states)):
file.Write(" if (%s == %s || %s == GL_FRONT_AND_BACK) {\n" %
(face, ('GL_FRONT', 'GL_BACK')[group_ndx], face))
for ndx, item in enumerate(group):
file.Write(" state_.%s = %s;\n" %
(item['name'], args[ndx + 1].name))
file.Write(" }\n")
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
class StateSetFrontBackHandler(TypeHandler):
"""Handler for commands that simply set state that set both front/back."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
code = []
for group_ndx, group in enumerate(Grouper(num_args, states)):
for ndx, item in enumerate(group):
code.append("state_.%s != %s" % (item['name'], args[ndx].name))
file.Write(" if (%s) {\n" % " ||\n ".join(code))
for group_ndx, group in enumerate(Grouper(num_args, states)):
for ndx, item in enumerate(group):
file.Write(" state_.%s = %s;\n" % (item['name'], args[ndx].name))
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
class StateSetNamedParameter(TypeHandler):
"""Handler for commands that set a state chosen with an enum parameter."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overridden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
assert num_args == 2
file.Write(" switch (%s) {\n" % args[0].name)
for state in states:
file.Write(" case %s:\n" % state['enum'])
file.Write(" if (state_.%s != %s) {\n" %
(state['name'], args[1].name))
file.Write(" state_.%s = %s;\n" % (state['name'], args[1].name))
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
file.Write(" break;\n")
file.Write(" default:\n")
file.Write(" NOTREACHED();\n")
file.Write(" }\n")
class CustomHandler(TypeHandler):
"""Handler for commands that are auto-generated but require minor tweaks."""
def __init__(self):
TypeHandler.__init__(self)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateCmdGetTotalSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" uint32 total_size = 0; // TODO(gman): get correct size.\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void Init(%s) {\n" % func.MakeTypedCmdArgString("_"))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" SetHeader(total_size);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedCmdArgString("_", True))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, total_size);\n")
file.Write(" }\n")
file.Write("\n")
class TodoHandler(CustomHandler):
"""Handle for commands that are not yet implemented."""
def AddImmediateFunction(self, generator, func):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" // TODO: for now this is a no-op\n")
file.Write(
" SetGLError("
"GL_INVALID_OPERATION, \"gl%s\", \"not implemented\");\n" %
func.name)
if func.return_type != "void":
file.Write(" return 0;\n")
file.Write("}\n")
file.Write("\n")
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(
"error::Error GLES2DecoderImpl::Handle%s(\n" % func.name)
file.Write(
" uint32 immediate_data_size, const gles2::cmds::%s& c) {\n" %
func.name)
file.Write(" // TODO: for now this is a no-op\n")
file.Write(
" LOCAL_SET_GL_ERROR("
"GL_INVALID_OPERATION, \"gl%s\", \"not implemented\");\n" %
func.name)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
class HandWrittenHandler(CustomHandler):
"""Handler for comands where everything must be written by hand."""
def InitFunction(self, func):
"""Add or adjust anything type specific for this function."""
CustomHandler.InitFunction(self, func)
func.can_auto_generate = False
def WriteStruct(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteDocs(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteBucketServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteBucketCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Write test for %s\n" % func.name)
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Write test for %s\n" % func.name)
def WriteBucketFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Write test for %s\n" % func.name)
class ManualHandler(CustomHandler):
"""Handler for commands who's handlers must be written by hand."""
def __init__(self):
CustomHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
if (func.name == 'CompressedTexImage2DBucket'):
func.cmd_args = func.cmd_args[:-1]
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
else:
CustomHandler.InitFunction(self, func)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Implement test for %s\n" % func.name)
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
if func.GetInfo('impl_func'):
super(ManualHandler, self).WriteGLES2Implementation(func, file)
def WriteGLES2ImplementationHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("virtual %s %s(%s) OVERRIDE;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write("\n")
def WriteImmediateCmdGetTotalSize(self, func, file):
"""Overrriden from TypeHandler."""
# TODO(gman): Move this data to _FUNCTION_INFO?
if func.name == 'ShaderSourceImmediate':
file.Write(" uint32 total_size = ComputeSize(_data_size);\n")
else:
CustomHandler.WriteImmediateCmdGetTotalSize(self, func, file)
class DataHandler(TypeHandler):
"""Handler for glBufferData, glBufferSubData, glTexImage2D, glTexSubImage2D,
glCompressedTexImage2D, glCompressedTexImageSub2D."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
if func.name == 'CompressedTexSubImage2DBucket':
func.cmd_args = func.cmd_args[:-1]
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
# TODO(gman): Move this data to _FUNCTION_INFO?
name = func.name
if name.endswith("Immediate"):
name = name[0:-9]
if name == 'BufferData' or name == 'BufferSubData':
file.Write(" uint32 data_size = size;\n")
elif (name == 'CompressedTexImage2D' or
name == 'CompressedTexSubImage2D'):
file.Write(" uint32 data_size = imageSize;\n")
elif (name == 'CompressedTexSubImage2DBucket'):
file.Write(" Bucket* bucket = GetBucket(c.bucket_id);\n")
file.Write(" uint32 data_size = bucket->size();\n")
file.Write(" GLsizei imageSize = data_size;\n")
elif name == 'TexImage2D' or name == 'TexSubImage2D':
code = """ uint32 data_size;
if (!GLES2Util::ComputeImageDataSize(
width, height, format, type, unpack_alignment_, &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code)
else:
file.Write("// uint32 data_size = 0; // TODO(gman): get correct size!\n")
def WriteImmediateCmdGetTotalSize(self, func, file):
"""Overrriden from TypeHandler."""
# TODO(gman): Move this data to _FUNCTION_INFO?
if func.name == 'BufferDataImmediate':
file.Write(" uint32 total_size = ComputeSize(_size);\n")
elif func.name == 'BufferSubDataImmediate':
file.Write(" uint32 total_size = ComputeSize(_size);\n")
elif func.name == 'CompressedTexImage2DImmediate':
file.Write(" uint32 total_size = ComputeSize(_imageSize);\n")
elif func.name == 'CompressedTexSubImage2DImmediate':
file.Write(" uint32 total_size = ComputeSize(_imageSize);\n")
elif func.name == 'TexImage2DImmediate':
file.Write(
" uint32 total_size = 0; // TODO(gman): get correct size\n")
elif func.name == 'TexSubImage2DImmediate':
file.Write(
" uint32 total_size = 0; // TODO(gman): get correct size\n")
def WriteImmediateCmdSizeTest(self, func, file):
"""Overrriden from TypeHandler."""
# TODO(gman): Move this data to _FUNCTION_INFO?
if func.name == 'BufferDataImmediate':
file.Write(" uint32 total_size = cmd.ComputeSize(cmd.size);\n")
elif func.name == 'BufferSubDataImmediate':
file.Write(" uint32 total_size = cmd.ComputeSize(cmd.size);\n")
elif func.name == 'CompressedTexImage2DImmediate':
file.Write(" uint32 total_size = cmd.ComputeSize(cmd.imageSize);\n")
elif func.name == 'CompressedTexSubImage2DImmediate':
file.Write(" uint32 total_size = cmd.ComputeSize(cmd.imageSize);\n")
elif func.name == 'TexImage2DImmediate':
file.Write(
" uint32 total_size = 0; // TODO(gman): get correct size\n")
elif func.name == 'TexSubImage2DImmediate':
file.Write(
" uint32 total_size = 0; // TODO(gman): get correct size\n")
file.Write(" EXPECT_EQ(sizeof(cmd), total_size);\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void Init(%s) {\n" % func.MakeTypedCmdArgString("_"))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" SetHeader(total_size);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedCmdArgString("_", True))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, total_size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
# TODO(gman): Remove this exception.
file.Write("// TODO(gman): Implement test for %s\n" % func.name)
return
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
if not func.name == 'CompressedTexSubImage2DBucket':
TypeHandler.WriteBucketServiceImplemenation(self, func, file)
class BindHandler(TypeHandler):
"""Handler for glBind___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
if len(func.GetOriginalArgs()) == 1:
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
TEST_F(%(test_name)s, %(name)sValidArgsNewId) {
EXPECT_CALL(*gl_, %(gl_func_name)s(kNewServiceId));
EXPECT_CALL(*gl_, %(gl_gen_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(kNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != NULL);
}
"""
gen_func_names = {
}
self.WriteValidUnitTest(func, file, valid_test, {
'resource_type': func.GetOriginalArgs()[0].resource_type,
'gl_gen_func_name': func.GetInfo("gen_func"),
})
else:
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
TEST_F(%(test_name)s, %(name)sValidArgsNewId) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(first_gl_arg)s, kNewServiceId));
EXPECT_CALL(*gl_, %(gl_gen_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(first_arg)s, kNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != NULL);
}
"""
gen_func_names = {
}
self.WriteValidUnitTest(func, file, valid_test, {
'first_arg': func.GetOriginalArgs()[0].GetValidArg(func, 0, 0),
'first_gl_arg': func.GetOriginalArgs()[0].GetValidGLArg(func, 0, 0),
'resource_type': func.GetOriginalArgs()[1].resource_type,
'gl_gen_func_name': func.GetInfo("gen_func"),
})
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test)
def WriteGLES2Implementation(self, func, file):
"""Writes the GLES2 Implemention."""
impl_func = func.GetInfo('impl_func')
impl_decl = func.GetInfo('impl_decl')
if (func.can_auto_generate and
(impl_func == None or impl_func == True) and
(impl_decl == None or impl_decl == True)):
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
code = """ if (Is%(type)sReservedId(%(id)s)) {
SetGLError(GL_INVALID_OPERATION, "%(name)s\", \"%(id)s reserved id");
return;
}
if (Bind%(type)sHelper(%(arg_string)s)) {
helper_->%(name)s(%(arg_string)s);
}
CheckGLError();
}
"""
name_arg = None
if len(func.GetOriginalArgs()) == 1:
# Bind functions that have no target (like BindVertexArrayOES)
name_arg = func.GetOriginalArgs()[0]
else:
# Bind functions that have both a target and a name (like BindTexture)
name_arg = func.GetOriginalArgs()[1]
file.Write(code % {
'name': func.name,
'arg_string': func.MakeOriginalArgString(""),
'id': name_arg.name,
'type': name_arg.resource_type,
'lc_type': name_arg.resource_type.lower(),
})
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
client_test = func.GetInfo('client_test')
if client_test == False:
return
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
ClearCommands();
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());
}
"""
cmd_arg_strings = []
for count, arg in enumerate(func.GetCmdArgs()):
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func, count, 0))
count += 1
gl_arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()):
gl_arg_strings.append(arg.GetValidClientSideArg(func, count, 0))
file.Write(code % {
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
class GENnHandler(TypeHandler):
"""Handler for glGen___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
pass
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32 data_size;
if (!SafeMultiplyUint32(n, sizeof(GLuint), &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code)
def WriteHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" if (!%sHelper(n, %s)) {\n"
" return error::kInvalidArguments;\n"
" }\n" %
(func.name, func.GetLastOriginalArg().name))
def WriteImmediateHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" if (!%sHelper(n, %s)) {\n"
" return error::kInvalidArguments;\n"
" }\n" %
(func.original_name, func.GetLastOriginalArg().name))
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
log_code = (""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
GPU_CLIENT_LOG(" " << i << ": " << %s[i]);
}
});""" % func.GetOriginalArgs()[1].name)
args = {
'log_code': log_code,
'return_type': func.return_type,
'name': func.original_name,
'typed_args': func.MakeTypedOriginalArgString(""),
'args': func.MakeOriginalArgString(""),
'resource_types': func.GetInfo('resource_types'),
'count_name': func.GetOriginalArgs()[0].name,
}
file.Write(
"%(return_type)s GLES2Implementation::%(name)s(%(typed_args)s) {\n" %
args)
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
code = """ GPU_CLIENT_SINGLE_THREAD_CHECK();
GetIdHandler(id_namespaces::k%(resource_types)s)->
MakeIds(this, 0, %(args)s);
%(name)sHelper(%(args)s);
helper_->%(name)sImmediate(%(args)s);
helper_->CommandBufferHelper::Flush();
%(log_code)s
CheckGLError();
}
"""
file.Write(code % args)
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
GLuint ids[2] = { 0, };
struct Cmds {
cmds::%(name)sImmediate gen;
GLuint data[2];
};
Cmds expected;
expected.gen.Init(arraysize(ids), &ids[0]);
expected.data[0] = k%(types)sStartId;
expected.data[1] = k%(types)sStartId + 1;
gl_->%(name)s(arraysize(ids), &ids[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_EQ(k%(types)sStartId, ids[0]);
EXPECT_EQ(k%(types)sStartId + 1, ids[1]);
}
"""
file.Write(code % {
'name': func.name,
'types': func.GetInfo('resource_types'),
})
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
GetSharedMemoryAs<GLuint*>()[0] = kNewClientId;
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_name)s(kNewClientId) != NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type'),
})
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _)).Times(0);
GetSharedMemoryAs<GLuint*>()[0] = client_%(resource_name)s_id_;
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kInvalidArguments, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, file, invalid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
})
def WriteImmediateServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
GLuint temp = kNewClientId;
SpecializedSetup<cmds::%(name)s, 0>(true);
cmd->Init(1, &temp);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(*cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_name)s(kNewClientId) != NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type'),
})
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _)).Times(0);
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(false);
cmd->Init(1, &client_%(resource_name)s_id_);
EXPECT_EQ(error::kInvalidArguments,
ExecuteImmediateCmd(*cmd, sizeof(&client_%(resource_name)s_id_)));
}
"""
self.WriteValidUnitTest(func, file, invalid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
})
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32 ComputeDataSize(GLsizei n) {\n")
file.Write(
" return static_cast<uint32>(sizeof(GLuint) * n); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32 ComputeSize(GLsizei n) {\n")
file.Write(" return static_cast<uint32>(\n")
file.Write(" sizeof(ValueType) + ComputeDataSize(n)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader(GLsizei n) {\n")
file.Write(" header.SetCmdByTotalSize<ValueType>(ComputeSize(n));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
file.Write(" SetHeader(_n);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
file.Write(" _%s, ComputeDataSize(_n));\n" % last_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
file.Write(" const uint32 size = ComputeSize(_n);\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32 size = gles2::cmds::%(name)s::ComputeSize(n);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" static GLuint ids[] = { 12, 23, 34, };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd, static_cast<GLsizei>(arraysize(ids)), ids);\n")
file.Write(" EXPECT_EQ(static_cast<uint32>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(cmd.n * 4u),\n")
file.Write(" cmd.header.size * 4u);\n")
file.Write(" EXPECT_EQ(static_cast<GLsizei>(arraysize(ids)), cmd.n);\n");
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(arraysize(ids) * 4u));\n")
file.Write(" // TODO(gman): Check that ids were inserted;\n")
file.Write("}\n")
file.Write("\n")
class CreateHandler(TypeHandler):
"""Handler for glCreate___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.AddCmdArg(Argument("client_id", 'uint32'))
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s))
.WillOnce(Return(kNewServiceId));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != NULL);
}
"""
comma = ""
if len(func.GetOriginalArgs()):
comma =", "
self.WriteValidUnitTest(func, file, valid_test, {
'comma': comma,
'resource_type': func.name[6:],
})
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, {
'comma': comma,
})
def WriteHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" uint32 client_id = c.client_id;\n")
file.Write(" if (!%sHelper(%s)) {\n" %
(func.name, func.MakeCmdArgString("")))
file.Write(" return error::kInvalidArguments;\n")
file.Write(" }\n")
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" GLuint client_id;\n")
file.Write(
" GetIdHandler(id_namespaces::kProgramsAndShaders)->\n")
file.Write(" MakeIds(this, 0, 1, &client_id);\n")
file.Write(" helper_->%s(%s);\n" %
(func.name, func.MakeCmdArgString("")))
file.Write(' GPU_CLIENT_LOG("returned " << client_id);\n')
file.Write(" CheckGLError();\n")
file.Write(" return client_id;\n")
file.Write("}\n")
file.Write("\n")
class DeleteHandler(TypeHandler):
"""Handler for glDelete___ single resource type functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(
" GPU_CLIENT_DCHECK(%s != 0);\n" % func.GetOriginalArgs()[-1].name)
file.Write(" %sHelper(%s);\n" %
(func.original_name, func.GetOriginalArgs()[-1].name))
file.Write(" CheckGLError();\n")
file.Write("}\n")
file.Write("\n")
class DELnHandler(TypeHandler):
"""Handler for glDelete___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32 data_size;
if (!SafeMultiplyUint32(n, sizeof(GLuint), &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code)
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
GLuint ids[2] = { k%(types)sStartId, k%(types)sStartId + 1 };
struct Cmds {
cmds::%(name)sImmediate del;
GLuint data[2];
};
Cmds expected;
expected.del.Init(arraysize(ids), &ids[0]);
expected.data[0] = k%(types)sStartId;
expected.data[1] = k%(types)sStartId + 1;
gl_->%(name)s(arraysize(ids), &ids[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
file.Write(code % {
'name': func.name,
'types': func.GetInfo('resource_types'),
})
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(
*gl_,
%(gl_func_name)s(1, Pointee(kService%(upper_resource_name)sId)))
.Times(1);
GetSharedMemoryAs<GLuint*>()[0] = client_%(resource_name)s_id_;
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(
Get%(upper_resource_name)s(client_%(resource_name)s_id_) == NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
'upper_resource_name': func.GetInfo('resource_type'),
})
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs) {
GetSharedMemoryAs<GLuint*>()[0] = kInvalidClientId;
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, file, invalid_test)
def WriteImmediateServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(
*gl_,
%(gl_func_name)s(1, Pointee(kService%(upper_resource_name)sId)))
.Times(1);
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
cmd.Init(1, &client_%(resource_name)s_id_);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(client_%(resource_name)s_id_)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(
Get%(upper_resource_name)s(client_%(resource_name)s_id_) == NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
'upper_resource_name': func.GetInfo('resource_type'),
})
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(false);
GLuint temp = kInvalidClientId;
cmd.Init(1, &temp);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
}
"""
self.WriteValidUnitTest(func, file, invalid_test)
def WriteHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" %sHelper(n, %s);\n" %
(func.name, func.GetLastOriginalArg().name))
def WriteImmediateHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" %sHelper(n, %s);\n" %
(func.original_name, func.GetLastOriginalArg().name))
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
impl_decl = func.GetInfo('impl_decl')
if impl_decl == None or impl_decl == True:
args = {
'return_type': func.return_type,
'name': func.original_name,
'typed_args': func.MakeTypedOriginalArgString(""),
'args': func.MakeOriginalArgString(""),
'resource_type': func.GetInfo('resource_type').lower(),
'count_name': func.GetOriginalArgs()[0].name,
}
file.Write(
"%(return_type)s GLES2Implementation::%(name)s(%(typed_args)s) {\n" %
args)
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
file.Write(""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
GPU_CLIENT_LOG(" " << i << ": " << %s[i]);
}
});
""" % func.GetOriginalArgs()[1].name)
file.Write(""" GPU_CLIENT_DCHECK_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
GPU_DCHECK(%s[i] != 0);
}
});
""" % func.GetOriginalArgs()[1].name)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
code = """ %(name)sHelper(%(args)s);
CheckGLError();
}
"""
file.Write(code % args)
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32 ComputeDataSize(GLsizei n) {\n")
file.Write(
" return static_cast<uint32>(sizeof(GLuint) * n); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32 ComputeSize(GLsizei n) {\n")
file.Write(" return static_cast<uint32>(\n")
file.Write(" sizeof(ValueType) + ComputeDataSize(n)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader(GLsizei n) {\n")
file.Write(" header.SetCmdByTotalSize<ValueType>(ComputeSize(n));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
file.Write(" SetHeader(_n);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
file.Write(" _%s, ComputeDataSize(_n));\n" % last_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
file.Write(" const uint32 size = ComputeSize(_n);\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32 size = gles2::cmds::%(name)s::ComputeSize(n);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" static GLuint ids[] = { 12, 23, 34, };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd, static_cast<GLsizei>(arraysize(ids)), ids);\n")
file.Write(" EXPECT_EQ(static_cast<uint32>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(cmd.n * 4u),\n")
file.Write(" cmd.header.size * 4u);\n")
file.Write(" EXPECT_EQ(static_cast<GLsizei>(arraysize(ids)), cmd.n);\n");
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(arraysize(ids) * 4u));\n")
file.Write(" // TODO(gman): Check that ids were inserted;\n")
file.Write("}\n")
file.Write("\n")
class GETnHandler(TypeHandler):
"""Handler for GETn for glGetBooleanv, glGetFloatv, ... type functions."""
def __init__(self):
TypeHandler.__init__(self)
def AddImmediateFunction(self, generator, func):
"""Overrriden from TypeHandler."""
pass
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(
"error::Error GLES2DecoderImpl::Handle%s(\n" % func.name)
file.Write(
" uint32 immediate_data_size, const gles2::cmds::%s& c) {\n" %
func.name)
last_arg = func.GetLastOriginalArg()
all_but_last_args = func.GetOriginalArgs()[:-1]
for arg in all_but_last_args:
arg.WriteGetCode(file)
code = """ typedef cmds::%(func_name)s::Result Result;
GLsizei num_values = 0;
GetNumValuesReturnedForGLGet(pname, &num_values);
Result* result = GetSharedMemoryAs<Result*>(
c.params_shm_id, c.params_shm_offset, Result::ComputeSize(num_values));
%(last_arg_type)s params = result ? result->GetData() : NULL;
"""
file.Write(code % {
'last_arg_type': last_arg.type,
'func_name': func.name,
})
func.WriteHandlerValidation(file)
code = """ // Check that the client initialized the result.
if (result->size != 0) {
return error::kInvalidArguments;
}
"""
shadowed = func.GetInfo('shadowed')
if not shadowed:
file.Write(' LOCAL_COPY_REAL_GL_ERRORS_TO_WRAPPER("%s");\n' % func.name)
file.Write(code)
func.WriteHandlerImplementation(file)
if shadowed:
code = """ result->SetNumResults(num_values);
return error::kNoError;
}
"""
else:
code = """ GLenum error = glGetError();
if (error == GL_NO_ERROR) {
result->SetNumResults(num_values);
} else {
LOCAL_SET_GL_ERROR(error, "%(func_name)s", "");
}
return error::kNoError;
}
"""
file.Write(code % {'func_name': func.name})
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
impl_decl = func.GetInfo('impl_decl')
if impl_decl == None or impl_decl == True:
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
all_but_last_args = func.GetOriginalArgs()[:-1]
arg_string = (
", ".join(["%s" % arg.name for arg in all_but_last_args]))
all_arg_string = (
", ".join(["%s" % arg.name for arg in func.GetOriginalArgs()]))
self.WriteTraceEvent(func, file)
code = """ if (%(func_name)sHelper(%(all_arg_string)s)) {
return;
}
typedef cmds::%(func_name)s::Result Result;
Result* result = GetResultAs<Result*>();
if (!result) {
return;
}
result->SetNumResults(0);
helper_->%(func_name)s(%(arg_string)s,
GetResultShmId(), GetResultShmOffset());
WaitForCmd();
result->CopyResult(params);
GPU_CLIENT_LOG_CODE_BLOCK({
for (int32 i = 0; i < result->GetNumResults(); ++i) {
GPU_CLIENT_LOG(" " << i << ": " << result->GetData()[i]);
}
});
CheckGLError();
}
"""
file.Write(code % {
'func_name': func.name,
'arg_string': arg_string,
'all_arg_string': all_arg_string,
})
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
typedef cmds::%(name)s::Result Result;
Result::Type result = 0;
Cmds expected;
ExpectedMemoryInfo result1 = GetExpectedResultMemory(4);
expected.cmd.Init(%(cmd_args)s, result1.id, result1.offset);
EXPECT_CALL(*command_buffer(), OnFlush())
.WillOnce(SetMemory(result1.ptr, SizedResultHelper<Result::Type>(1)))
.RetiresOnSaturation();
gl_->%(name)s(%(args)s, &result);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_EQ(static_cast<Result::Type>(1), result);
}
"""
cmd_arg_strings = []
for count, arg in enumerate(func.GetCmdArgs()[0:-2]):
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func, count, 0))
cmd_arg_strings[0] = '123'
gl_arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()[0:-1]):
gl_arg_strings.append(arg.GetValidClientSideArg(func, count, 0))
gl_arg_strings[0] = '123'
file.Write(code % {
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, GetError())
.WillOnce(Return(GL_NO_ERROR))
.WillOnce(Return(GL_NO_ERROR))
.RetiresOnSaturation();
SpecializedSetup<cmds::%(name)s, 0>(true);
typedef cmds::%(name)s::Result Result;
Result* result = static_cast<Result*>(shared_memory_address_);
EXPECT_CALL(*gl_, %(gl_func_name)s(%(local_gl_args)s));
result->size = 0;
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(decoder_->GetGLES2Util()->GLGetNumValuesReturned(
%(valid_pname)s),
result->GetNumResults());
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
valid_pname = ''
for count, arg in enumerate(func.GetOriginalArgs()[:-1]):
arg_value = arg.GetValidGLArg(func, count, 0)
gl_arg_strings.append(arg_value)
if arg.name == 'pname':
valid_pname = arg_value
if func.GetInfo('gl_test_func') == 'glGetIntegerv':
gl_arg_strings.append("_")
else:
gl_arg_strings.append("result->GetData()")
self.WriteValidUnitTest(func, file, valid_test, {
'local_gl_args': ", ".join(gl_arg_strings),
'valid_pname': valid_pname,
})
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s::Result* result =
static_cast<cmds::%(name)s::Result*>(shared_memory_address_);
result->size = 0;
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));
EXPECT_EQ(0u, result->size);%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test)
class PUTHandler(TypeHandler):
"""Handler for glTexParameter_v, glVertexAttrib_v functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteServiceUnitTest(self, func, file):
"""Writes the service unit test for a command."""
expected_call = "EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));"
if func.GetInfo("first_element_only"):
gl_arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()):
gl_arg_strings.append(arg.GetValidGLArg(func, count, 0))
gl_arg_strings[-1] = "*" + gl_arg_strings[-1]
expected_call = ("EXPECT_CALL(*gl_, %%(gl_func_name)s(%s));" %
", ".join(gl_arg_strings))
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
GetSharedMemoryAs<%(data_type)s*>()[0] = %(data_value)s;
%(expected_call)s
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
extra = {
'data_type': func.GetInfo('data_type'),
'data_value': func.GetInfo('data_value') or '0',
'expected_call': expected_call,
}
self.WriteValidUnitTest(func, file, valid_test, extra)
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
GetSharedMemoryAs<%(data_type)s*>()[0] = %(data_value)s;
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, extra)
def WriteImmediateServiceUnitTest(self, func, file):
"""Writes the service unit test for a command."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
%(data_type)s temp[%(data_count)s] = { %(data_value)s, };
cmd.Init(%(gl_args)s, &temp[0]);
EXPECT_CALL(
*gl_,
%(gl_func_name)s(%(gl_args)s, %(data_ref)sreinterpret_cast<
%(data_type)s*>(ImmediateDataAddress(&cmd))));
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
gl_any_strings = []
for count, arg in enumerate(func.GetOriginalArgs()[0:-1]):
gl_arg_strings.append(arg.GetValidGLArg(func, count, 0))
gl_any_strings.append("_")
extra = {
'data_ref': ("*" if func.GetInfo('first_element_only') else ""),
'data_type': func.GetInfo('data_type'),
'data_count': func.GetInfo('count'),
'data_value': func.GetInfo('data_value') or '0',
'gl_args': ", ".join(gl_arg_strings),
'gl_any_args': ", ".join(gl_any_strings),
}
self.WriteValidUnitTest(func, file, valid_test, extra)
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
%(data_type)s temp[%(data_count)s] = { %(data_value)s, };
cmd.Init(%(all_but_last_args)s, &temp[0]);
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, extra)
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32 data_size;
if (!ComputeDataSize(1, sizeof(%s), %d, &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code % (func.info.data_type, func.info.count))
if func.is_immediate:
file.Write(" if (data_size > immediate_data_size) {\n")
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
last_arg_name = func.GetLastOriginalArg().name
values_str = ' << ", " << '.join(
["%s[%d]" % (last_arg_name, ndx) for ndx in range(0, func.info.count)])
file.Write(' GPU_CLIENT_LOG("values: " << %s);\n' % values_str)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" helper_->%sImmediate(%s);\n" %
(func.name, func.MakeOriginalArgString("")))
file.Write(" CheckGLError();\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
%(type)s data[%(count)d] = {0};
struct Cmds {
cmds::%(name)sImmediate cmd;
%(type)s data[%(count)d];
};
for (int jj = 0; jj < %(count)d; ++jj) {
data[jj] = static_cast<%(type)s>(jj);
}
Cmds expected;
expected.cmd.Init(%(cmd_args)s, &data[0]);
gl_->%(name)s(%(args)s, &data[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = []
for count, arg in enumerate(func.GetCmdArgs()[0:-2]):
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func, count, 0))
gl_arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()[0:-1]):
gl_arg_strings.append(arg.GetValidClientSideArg(func, count, 0))
file.Write(code % {
'name': func.name,
'type': func.GetInfo('data_type'),
'count': func.GetInfo('count'),
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32 ComputeDataSize() {\n")
file.Write(" return static_cast<uint32>(\n")
file.Write(" sizeof(%s) * %d); // NOLINT\n" %
(func.info.data_type, func.info.count))
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32 ComputeSize() {\n")
file.Write(" return static_cast<uint32>(\n")
file.Write(
" sizeof(ValueType) + ComputeDataSize()); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader() {\n")
file.Write(
" header.SetCmdByTotalSize<ValueType>(ComputeSize());\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
file.Write(" SetHeader();\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
file.Write(" _%s, ComputeDataSize());\n" % last_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
file.Write(" const uint32 size = ComputeSize();\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32 size = gles2::cmds::%(name)s::ComputeSize();
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" const int kSomeBaseValueToTestWith = 51;\n")
file.Write(" static %s data[] = {\n" % func.info.data_type)
for v in range(0, func.info.count):
file.Write(" static_cast<%s>(kSomeBaseValueToTestWith + %d),\n" %
(func.info.data_type, v))
file.Write(" };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd")
args = func.GetCmdArgs()
for value, arg in enumerate(args):
file.Write(",\n static_cast<%s>(%d)" % (arg.type, value + 11))
file.Write(",\n data);\n")
args = func.GetCmdArgs()
file.Write(" EXPECT_EQ(static_cast<uint32>(cmds::%s::kCmdId),\n"
% func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(sizeof(data)),\n")
file.Write(" cmd.header.size * 4u);\n")
for value, arg in enumerate(args):
file.Write(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);\n" %
(arg.type, value + 11, arg.name))
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(sizeof(data)));\n")
file.Write(" // TODO(gman): Check that data was inserted;\n")
file.Write("}\n")
file.Write("\n")
class PUTnHandler(TypeHandler):
"""Handler for PUTn 'glUniform__v' type functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteServiceUnitTest(self, func, file):
"""Overridden from TypeHandler."""
TypeHandler.WriteServiceUnitTest(self, func, file)
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgsCountTooLarge) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()):
# hardcoded to match unit tests.
if count == 0:
# the location of the second element of the 2nd uniform.
# defined in GLES2DecoderBase::SetupShaderForUniform
gl_arg_strings.append("3")
arg_strings.append("ProgramManager::MakeFakeLocation(1, 1)")
elif count == 1:
# the number of elements that gl will be called with.
gl_arg_strings.append("3")
# the number of elements requested in the command.
arg_strings.append("5")
else:
gl_arg_strings.append(arg.GetValidGLArg(func, count, 0))
arg_strings.append(arg.GetValidArg(func, count, 0))
extra = {
'gl_args': ", ".join(gl_arg_strings),
'args': ", ".join(arg_strings),
}
self.WriteValidUnitTest(func, file, valid_test, extra)
def WriteImmediateServiceUnitTest(self, func, file):
"""Overridden from TypeHandler."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
EXPECT_CALL(
*gl_,
%(gl_func_name)s(%(gl_args)s,
reinterpret_cast<%(data_type)s*>(ImmediateDataAddress(&cmd))));
SpecializedSetup<cmds::%(name)s, 0>(true);
%(data_type)s temp[%(data_count)s * 2] = { 0, };
cmd.Init(%(args)s, &temp[0]);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
gl_any_strings = []
arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()[0:-1]):
gl_arg_strings.append(arg.GetValidGLArg(func, count, 0))
gl_any_strings.append("_")
arg_strings.append(arg.GetValidArg(func, count, 0))
extra = {
'data_type': func.GetInfo('data_type'),
'data_count': func.GetInfo('count'),
'args': ", ".join(arg_strings),
'gl_args': ", ".join(gl_arg_strings),
'gl_any_args': ", ".join(gl_any_strings),
}
self.WriteValidUnitTest(func, file, valid_test, extra)
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
%(data_type)s temp[%(data_count)s * 2] = { 0, };
cmd.Init(%(all_but_last_args)s, &temp[0]);
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, extra)
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32 data_size;
if (!ComputeDataSize(count, sizeof(%s), %d, &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code % (func.info.data_type, func.info.count))
if func.is_immediate:
file.Write(" if (data_size > immediate_data_size) {\n")
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
last_arg_name = func.GetLastOriginalArg().name
file.Write(""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < count; ++i) {
""")
values_str = ' << ", " << '.join(
["%s[%d + i * %d]" % (
last_arg_name, ndx, func.info.count) for ndx in range(
0, func.info.count)])
file.Write(' GPU_CLIENT_LOG(" " << i << ": " << %s);\n' % values_str)
file.Write(" }\n });\n")
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" helper_->%sImmediate(%s);\n" %
(func.name, func.MakeOriginalArgString("")))
file.Write(" CheckGLError();\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
%(type)s data[%(count_param)d][%(count)d] = {{0}};
struct Cmds {
cmds::%(name)sImmediate cmd;
%(type)s data[%(count_param)d][%(count)d];
};
Cmds expected;
for (int ii = 0; ii < %(count_param)d; ++ii) {
for (int jj = 0; jj < %(count)d; ++jj) {
data[ii][jj] = static_cast<%(type)s>(ii * %(count)d + jj);
}
}
expected.cmd.Init(%(cmd_args)s, &data[0][0]);
gl_->%(name)s(%(args)s, &data[0][0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = []
for count, arg in enumerate(func.GetCmdArgs()[0:-2]):
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func, count, 0))
gl_arg_strings = []
count_param = 0
for count, arg in enumerate(func.GetOriginalArgs()[0:-1]):
gl_arg_strings.append(arg.GetValidClientSideArg(func, count, 0))
if arg.name == "count":
count_param = int(arg.GetValidClientSideArg(func, count, 0))
file.Write(code % {
'name': func.name,
'type': func.GetInfo('data_type'),
'count': func.GetInfo('count'),
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
'count_param': count_param,
})
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32 ComputeDataSize(GLsizei count) {\n")
file.Write(" return static_cast<uint32>(\n")
file.Write(" sizeof(%s) * %d * count); // NOLINT\n" %
(func.info.data_type, func.info.count))
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32 ComputeSize(GLsizei count) {\n")
file.Write(" return static_cast<uint32>(\n")
file.Write(
" sizeof(ValueType) + ComputeDataSize(count)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader(GLsizei count) {\n")
file.Write(
" header.SetCmdByTotalSize<ValueType>(ComputeSize(count));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
file.Write(" SetHeader(_count);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
file.Write(" _%s, ComputeDataSize(_count));\n" % last_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
file.Write(" const uint32 size = ComputeSize(_count);\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32 size = gles2::cmds::%(name)s::ComputeSize(count);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
args = func.GetCmdArgs()
count_param = 0
for value, arg in enumerate(args):
if arg.name == "count":
count_param = int(arg.GetValidClientSideArg(func, value, 0))
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" const int kSomeBaseValueToTestWith = 51;\n")
file.Write(" static %s data[] = {\n" % func.info.data_type)
for v in range(0, func.info.count * count_param):
file.Write(" static_cast<%s>(kSomeBaseValueToTestWith + %d),\n" %
(func.info.data_type, v))
file.Write(" };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" const GLsizei kNumElements = %d;\n" % count_param)
file.Write(" const size_t kExpectedCmdSize =\n")
file.Write(" sizeof(cmd) + kNumElements * sizeof(%s) * %d;\n" %
(func.info.data_type, func.info.count))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd")
for value, arg in enumerate(args):
file.Write(",\n static_cast<%s>(%d)" % (arg.type, value + 1))
file.Write(",\n data);\n")
file.Write(" EXPECT_EQ(static_cast<uint32>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(kExpectedCmdSize, cmd.header.size * 4u);\n")
for value, arg in enumerate(args):
file.Write(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);\n" %
(arg.type, value + 1, arg.name))
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(sizeof(data)));\n")
file.Write(" // TODO(gman): Check that data was inserted;\n")
file.Write("}\n")
file.Write("\n")
class PUTXnHandler(TypeHandler):
"""Handler for glUniform?f functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
code = """ %(type)s temp[%(count)s] = { %(values)s};
Do%(name)sv(%(location)s, 1, &temp[0]);
"""
values = ""
args = func.GetOriginalArgs()
count = int(func.GetInfo('count'))
num_args = len(args)
for ii in range(count):
values += "%s, " % args[len(args) - count + ii].name
file.Write(code % {
'name': func.name,
'count': func.GetInfo('count'),
'type': func.GetInfo('data_type'),
'location': args[0].name,
'args': func.MakeOriginalArgString(""),
'values': values,
})
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(name)sv(%(local_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
args = func.GetOriginalArgs()
local_args = "%s, 1, _" % args[0].GetValidGLArg(func, 0, 0)
self.WriteValidUnitTest(func, file, valid_test, {
'name': func.name,
'count': func.GetInfo('count'),
'local_args': local_args,
})
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(name)sv(_, _, _).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, {
'name': func.GetInfo('name'),
'count': func.GetInfo('count'),
})
class GLcharHandler(CustomHandler):
"""Handler for functions that pass a single string ."""
def __init__(self):
CustomHandler.__init__(self)
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32 ComputeSize(uint32 data_size) {\n")
file.Write(" return static_cast<uint32>(\n")
file.Write(" sizeof(ValueType) + data_size); // NOLINT\n")
file.Write(" }\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
code = """
void SetHeader(uint32 data_size) {
header.SetCmdBySize<ValueType>(data_size);
}
"""
file.Write(code)
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
args = func.GetCmdArgs()
set_code = []
for arg in args:
set_code.append(" %s = _%s;" % (arg.name, arg.name))
code = """
void Init(%(typed_args)s, uint32 _data_size) {
SetHeader(_data_size);
%(set_code)s
memcpy(ImmediateDataAddress(this), _%(last_arg)s, _data_size);
}
"""
file.Write(code % {
"typed_args": func.MakeTypedOriginalArgString("_"),
"set_code": "\n".join(set_code),
"last_arg": last_arg.name
})
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void* Set(void* cmd%s, uint32 _data_size) {\n" %
func.MakeTypedOriginalArgString("_", True))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _data_size);\n" %
func.MakeOriginalArgString("_"))
file.Write(" return NextImmediateCmdAddress<ValueType>("
"cmd, _data_size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32 data_size = strlen(name);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpace<gles2::cmds::%(name)s>(data_size);
if (c) {
c->Init(%(args)s, data_size);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
init_code = []
check_code = []
all_but_last_arg = func.GetCmdArgs()[:-1]
for value, arg in enumerate(all_but_last_arg):
init_code.append(" static_cast<%s>(%d)," % (arg.type, value + 11))
for value, arg in enumerate(all_but_last_arg):
check_code.append(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);" %
(arg.type, value + 11, arg.name))
code = """
TEST_F(GLES2FormatTest, %(func_name)s) {
cmds::%(func_name)s& cmd = *GetBufferAs<cmds::%(func_name)s>();
static const char* const test_str = \"test string\";
void* next_cmd = cmd.Set(
&cmd,
%(init_code)s
test_str,
strlen(test_str));
EXPECT_EQ(static_cast<uint32>(cmds::%(func_name)s::kCmdId),
cmd.header.command);
EXPECT_EQ(sizeof(cmd) +
RoundSizeToMultipleOfEntries(strlen(test_str)),
cmd.header.size * 4u);
EXPECT_EQ(static_cast<char*>(next_cmd),
reinterpret_cast<char*>(&cmd) + sizeof(cmd) +
RoundSizeToMultipleOfEntries(strlen(test_str)));
%(check_code)s
EXPECT_EQ(static_cast<uint32>(strlen(test_str)), cmd.data_size);
EXPECT_EQ(0, memcmp(test_str, ImmediateDataAddress(&cmd), strlen(test_str)));
CheckBytesWritten(
next_cmd,
sizeof(cmd) + RoundSizeToMultipleOfEntries(strlen(test_str)),
sizeof(cmd) + strlen(test_str));
}
"""
file.Write(code % {
'func_name': func.name,
'init_code': "\n".join(init_code),
'check_code': "\n".join(check_code),
})
class GLcharNHandler(CustomHandler):
"""Handler for functions that pass a single string with an optional len."""
def __init__(self):
CustomHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.cmd_args = []
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
def AddImmediateFunction(self, generator, func):
"""Overrriden from TypeHandler."""
pass
def AddBucketFunction(self, generator, func):
"""Overrriden from TypeHandler."""
pass
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("""error::Error GLES2DecoderImpl::Handle%(name)s(
uint32 immediate_data_size, const gles2::cmds::%(name)s& c) {
GLuint bucket_id = static_cast<GLuint>(c.%(bucket_id)s);
Bucket* bucket = GetBucket(bucket_id);
if (!bucket || bucket->size() == 0) {
return error::kInvalidArguments;
}
std::string str;
if (!bucket->GetAsString(&str)) {
return error::kInvalidArguments;
}
%(gl_func_name)s(0, str.c_str());
return error::kNoError;
}
""" % {
'name': func.name,
'gl_func_name': func.GetGLFunctionName(),
'bucket_id': func.cmd_args[0].name,
})
class IsHandler(TypeHandler):
"""Handler for glIs____ type and glGetError functions."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.AddCmdArg(Argument("result_shm_id", 'uint32'))
func.AddCmdArg(Argument("result_shm_offset", 'uint32'))
if func.GetInfo('result') == None:
func.AddInfo('result', ['uint32'])
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)sshared_memory_id_, shared_memory_offset_);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
comma = ""
if len(func.GetOriginalArgs()):
comma =", "
self.WriteValidUnitTest(func, file, valid_test, {
'comma': comma,
})
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)sshared_memory_id_, shared_memory_offset_);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, {
'comma': comma,
})
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgsBadSharedMemoryId) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skInvalidSharedMemoryId, shared_memory_offset_);
EXPECT_EQ(error::kOutOfBounds, ExecuteCmd(cmd));
cmd.Init(%(args)s%(comma)sshared_memory_id_, kInvalidSharedMemoryOffset);
EXPECT_EQ(error::kOutOfBounds, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, file, invalid_test, {
'comma': comma,
})
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(
"error::Error GLES2DecoderImpl::Handle%s(\n" % func.name)
file.Write(
" uint32 immediate_data_size, const gles2::cmds::%s& c) {\n" %
func.name)
args = func.GetOriginalArgs()
for arg in args:
arg.WriteGetCode(file)
code = """ typedef cmds::%(func_name)s::Result Result;
Result* result_dst = GetSharedMemoryAs<Result*>(
c.result_shm_id, c.result_shm_offset, sizeof(*result_dst));
if (!result_dst) {
return error::kOutOfBounds;
}
"""
file.Write(code % {'func_name': func.name})
func.WriteHandlerValidation(file)
file.Write(" *result_dst = %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func')
if impl_func == None or impl_func == True:
error_value = func.GetInfo("error_value") or "GL_FALSE"
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
self.WriteTraceEvent(func, file)
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
file.Write(" typedef cmds::%s::Result Result;\n" % func.name)
file.Write(" Result* result = GetResultAs<Result*>();\n")
file.Write(" if (!result) {\n")
file.Write(" return %s;\n" % error_value)
file.Write(" }\n")
file.Write(" *result = 0;\n")
arg_string = func.MakeOriginalArgString("")
comma = ""
if len(arg_string) > 0:
comma = ", "
file.Write(
" helper_->%s(%s%sGetResultShmId(), GetResultShmOffset());\n" %
(func.name, arg_string, comma))
file.Write(" WaitForCmd();\n")
file.Write(" %s result_value = *result;\n" % func.return_type)
file.Write(' GPU_CLIENT_LOG("returned " << result_value);\n')
file.Write(" CheckGLError();\n")
file.Write(" return result_value;\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
client_test = func.GetInfo('client_test')
if client_test == None or client_test == True:
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
typedef cmds::%(name)s::Result Result;
Cmds expected;
ExpectedMemoryInfo result1 =
GetExpectedResultMemory(sizeof(cmds::%(name)s::Result));
expected.cmd.Init(1, result1.id, result1.offset);
EXPECT_CALL(*command_buffer(), OnFlush())
.WillOnce(SetMemory(result1.ptr, uint32(1)))
.RetiresOnSaturation();
GLboolean result = gl_->%(name)s(1);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_TRUE(result);
}
"""
file.Write(code % {
'name': func.name,
})
class STRnHandler(TypeHandler):
"""Handler for GetProgramInfoLog, GetShaderInfoLog, GetShaderSource, and
GetTranslatedShaderSourceANGLE."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
# remove all but the first cmd args.
cmd_args = func.GetCmdArgs()
func.ClearCmdArgs()
func.AddCmdArg(cmd_args[0])
# add on a bucket id.
func.AddCmdArg(Argument('bucket_id', 'uint32'))
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
code_1 = """%(return_type)s GLES2Implementation::%(func_name)s(%(args)s) {
GPU_CLIENT_SINGLE_THREAD_CHECK();
"""
code_2 = """ GPU_CLIENT_LOG("[" << GetLogPrefix()
<< "] gl%(func_name)s" << "("
<< %(arg0)s << ", "
<< %(arg1)s << ", "
<< static_cast<void*>(%(arg2)s) << ", "
<< static_cast<void*>(%(arg3)s) << ")");
helper_->SetBucketSize(kResultBucketId, 0);
helper_->%(func_name)s(%(id_name)s, kResultBucketId);
std::string str;
GLsizei max_size = 0;
if (GetBucketAsString(kResultBucketId, &str)) {
if (bufsize > 0) {
max_size =
std::min(static_cast<size_t>(%(bufsize_name)s) - 1, str.size());
memcpy(%(dest_name)s, str.c_str(), max_size);
%(dest_name)s[max_size] = '\\0';
GPU_CLIENT_LOG("------\\n" << %(dest_name)s << "\\n------");
}
}
if (%(length_name)s != NULL) {
*%(length_name)s = max_size;
}
CheckGLError();
}
"""
args = func.GetOriginalArgs()
str_args = {
'return_type': func.return_type,
'func_name': func.original_name,
'args': func.MakeTypedOriginalArgString(""),
'id_name': args[0].name,
'bufsize_name': args[1].name,
'length_name': args[2].name,
'dest_name': args[3].name,
'arg0': args[0].name,
'arg1': args[1].name,
'arg2': args[2].name,
'arg3': args[3].name,
}
file.Write(code_1 % str_args)
func.WriteDestinationInitalizationValidation(file)
file.Write(code_2 % str_args)
def WriteServiceUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_F(%(test_name)s, %(name)sValidArgs) {
const char* kInfo = "hello";
const uint32 kBucketId = 123;
SpecializedSetup<cmds::%(name)s, 0>(true);
%(expect_len_code)s
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s))
.WillOnce(DoAll(SetArgumentPointee<2>(strlen(kInfo)),
SetArrayArgument<3>(kInfo, kInfo + strlen(kInfo) + 1)));
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
CommonDecoder::Bucket* bucket = decoder_->GetBucket(kBucketId);
ASSERT_TRUE(bucket != NULL);
EXPECT_EQ(strlen(kInfo) + 1, bucket->size());
EXPECT_EQ(0, memcmp(bucket->GetData(0, bucket->size()), kInfo,
bucket->size()));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
args = func.GetOriginalArgs()
id_name = args[0].GetValidGLArg(func, 0, 0)
get_len_func = func.GetInfo('get_len_func')
get_len_enum = func.GetInfo('get_len_enum')
sub = {
'id_name': id_name,
'get_len_func': get_len_func,
'get_len_enum': get_len_enum,
'gl_args': '%s, strlen(kInfo) + 1, _, _' %
args[0].GetValidGLArg(func, 0, 0),
'args': '%s, kBucketId' % args[0].GetValidArg(func, 0, 0),
'expect_len_code': '',
}
if get_len_func and get_len_func[0:2] == 'gl':
sub['expect_len_code'] = (
" EXPECT_CALL(*gl_, %s(%s, %s, _))\n"
" .WillOnce(SetArgumentPointee<2>(strlen(kInfo) + 1));") % (
get_len_func[2:], id_name, get_len_enum)
self.WriteValidUnitTest(func, file, valid_test, sub)
invalid_test = """
TEST_F(%(test_name)s, %(name)sInvalidArgs) {
const uint32 kBucketId = 123;
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _, _, _))
.Times(0);
cmds::%(name)s cmd;
cmd.Init(kInvalidClientId, kBucketId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
self.WriteValidUnitTest(func, file, invalid_test)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
class FunctionInfo(object):
"""Holds info about a function."""
def __init__(self, info, type_handler):
for key in info:
setattr(self, key, info[key])
self.type_handler = type_handler
if not 'type' in info:
self.type = ''
class Argument(object):
"""A class that represents a function argument."""
cmd_type_map_ = {
'GLenum': 'uint32',
'GLint': 'int32',
'GLintptr': 'int32',
'GLsizei': 'int32',
'GLsizeiptr': 'int32',
'GLfloat': 'float',
'GLclampf': 'float',
}
need_validation_ = ['GLsizei*', 'GLboolean*', 'GLenum*', 'GLint*']
def __init__(self, name, type):
self.name = name
self.optional = type.endswith("Optional*")
if self.optional:
type = type[:-9] + "*"
self.type = type
if type in self.cmd_type_map_:
self.cmd_type = self.cmd_type_map_[type]
else:
self.cmd_type = 'uint32'
def IsPointer(self):
"""Returns true if argument is a pointer."""
return False
def AddCmdArgs(self, args):
"""Adds command arguments for this argument to the given list."""
return args.append(self)
def AddInitArgs(self, args):
"""Adds init arguments for this argument to the given list."""
return args.append(self)
def GetValidArg(self, func, offset, index):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(offset)
if valid_arg != None:
return valid_arg
return str(offset + 1)
def GetValidClientSideArg(self, func, offset, index):
"""Gets a valid value for this argument."""
return str(offset + 1)
def GetValidClientSideCmdArg(self, func, offset, index):
"""Gets a valid value for this argument."""
return str(offset + 1)
def GetValidGLArg(self, func, offset, index):
"""Gets a valid GL value for this argument."""
valid_arg = func.GetValidArg(offset)
if valid_arg != None:
return valid_arg
return str(offset + 1)
def GetNumInvalidValues(self, func):
"""returns the number of invalid values to be tested."""
return 0
def GetInvalidArg(self, offset, index):
"""returns an invalid value and expected parse result by index."""
return ("---ERROR0---", "---ERROR2---", None)
def GetLogArg(self):
"""Get argument appropriate for LOG macro."""
if self.type == 'GLboolean':
return 'GLES2Util::GetStringBool(%s)' % self.name
if self.type == 'GLenum':
return 'GLES2Util::GetStringEnum(%s)' % self.name
return self.name
def WriteGetCode(self, file):
"""Writes the code to get an argument from a command structure."""
file.Write(" %s %s = static_cast<%s>(c.%s);\n" %
(self.type, self.name, self.type, self.name))
def WriteValidationCode(self, file, func):
"""Writes the validation code for an argument."""
pass
def WriteClientSideValidationCode(self, file, func):
"""Writes the validation code for an argument."""
pass
def WriteDestinationInitalizationValidation(self, file, func):
"""Writes the client side destintion initialization validation."""
pass
def WriteDestinationInitalizationValidatationIfNeeded(self, file, func):
"""Writes the client side destintion initialization validation if needed."""
parts = self.type.split(" ")
if len(parts) > 1:
return
if parts[0] in self.need_validation_:
file.Write(
" GPU_CLIENT_VALIDATE_DESTINATION_%sINITALIZATION(%s, %s);\n" %
("OPTIONAL_" if self.optional else "", self.type[:-1], self.name))
def WriteGetAddress(self, file):
"""Writes the code to get the address this argument refers to."""
pass
def GetImmediateVersion(self):
"""Gets the immediate version of this argument."""
return self
def GetBucketVersion(self):
"""Gets the bucket version of this argument."""
return self
class BoolArgument(Argument):
"""class for GLboolean"""
def __init__(self, name, type):
Argument.__init__(self, name, 'GLboolean')
def GetValidArg(self, func, offset, index):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideArg(self, func, offset, index):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideCmdArg(self, func, offset, index):
"""Gets a valid value for this argument."""
return 'true'
def GetValidGLArg(self, func, offset, index):
"""Gets a valid GL value for this argument."""
return 'true'
class UniformLocationArgument(Argument):
"""class for uniform locations."""
def __init__(self, name):
Argument.__init__(self, name, "GLint")
def WriteGetCode(self, file):
"""Writes the code to get an argument from a command structure."""
code = """ %s %s = static_cast<%s>(c.%s);
"""
file.Write(code % (self.type, self.name, self.type, self.name))
def GetValidArg(self, func, offset, index):
"""Gets a valid value for this argument."""
return "%d" % (offset + 1)
class DataSizeArgument(Argument):
"""class for data_size which Bucket commands do not need."""
def __init__(self, name):
Argument.__init__(self, name, "uint32")
def GetBucketVersion(self):
return None
class SizeArgument(Argument):
"""class for GLsizei and GLsizeiptr."""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def GetNumInvalidValues(self, func):
"""overridden from Argument."""
if func.is_immediate:
return 0
return 1
def GetInvalidArg(self, offset, index):
"""overridden from Argument."""
return ("-1", "kNoError", "GL_INVALID_VALUE")
def WriteValidationCode(self, file, func):
"""overridden from Argument."""
file.Write(" if (%s < 0) {\n" % self.name)
file.Write(
" LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, \"gl%s\", \"%s < 0\");\n" %
(func.original_name, self.name))
file.Write(" return error::kNoError;\n")
file.Write(" }\n")
def WriteClientSideValidationCode(self, file, func):
"""overridden from Argument."""
file.Write(" if (%s < 0) {\n" % self.name)
file.Write(
" SetGLError(GL_INVALID_VALUE, \"gl%s\", \"%s < 0\");\n" %
(func.original_name, self.name))
file.Write(" return;\n")
file.Write(" }\n")
class SizeNotNegativeArgument(SizeArgument):
"""class for GLsizeiNotNegative. It's NEVER allowed to be negative"""
def __init__(self, name, type, gl_type):
SizeArgument.__init__(self, name, gl_type)
def GetInvalidArg(self, offset, index):
"""overridden from SizeArgument."""
return ("-1", "kOutOfBounds", "GL_NO_ERROR")
def WriteValidationCode(self, file, func):
"""overridden from SizeArgument."""
pass
class EnumBaseArgument(Argument):
"""Base class for EnumArgument, IntArgument and ValidatedBoolArgument"""
def __init__(self, name, gl_type, type, gl_error):
Argument.__init__(self, name, gl_type)
self.local_type = type
self.gl_error = gl_error
name = type[len(gl_type):]
self.type_name = name
self.enum_info = _ENUM_LISTS[name]
def WriteValidationCode(self, file, func):
file.Write(" if (!validators_->%s.IsValid(%s)) {\n" %
(ToUnderscore(self.type_name), self.name))
if self.gl_error == "GL_INVALID_ENUM":
file.Write(
" LOCAL_SET_GL_ERROR_INVALID_ENUM(\"gl%s\", %s, \"%s\");\n" %
(func.original_name, self.name, self.name))
else:
file.Write(
" LOCAL_SET_GL_ERROR(%s, \"gl%s\", \"%s %s\");\n" %
(self.gl_error, func.original_name, self.name, self.gl_error))
file.Write(" return error::kNoError;\n")
file.Write(" }\n")
def GetValidArg(self, func, offset, index):
valid_arg = func.GetValidArg(offset)
if valid_arg != None:
return valid_arg
if 'valid' in self.enum_info:
valid = self.enum_info['valid']
num_valid = len(valid)
if index >= num_valid:
index = num_valid - 1
return valid[index]
return str(offset + 1)
def GetValidClientSideArg(self, func, offset, index):
"""Gets a valid value for this argument."""
return self.GetValidArg(func, offset, index)
def GetValidClientSideCmdArg(self, func, offset, index):
"""Gets a valid value for this argument."""
return self.GetValidArg(func, offset, index)
def GetValidGLArg(self, func, offset, index):
"""Gets a valid value for this argument."""
return self.GetValidArg(func, offset, index)
def GetNumInvalidValues(self, func):
"""returns the number of invalid values to be tested."""
if 'invalid' in self.enum_info:
invalid = self.enum_info['invalid']
return len(invalid)
return 0
def GetInvalidArg(self, offset, index):
"""returns an invalid value by index."""
if 'invalid' in self.enum_info:
invalid = self.enum_info['invalid']
num_invalid = len(invalid)
if index >= num_invalid:
index = num_invalid - 1
return (invalid[index], "kNoError", self.gl_error)
return ("---ERROR1---", "kNoError", self.gl_error)
class EnumArgument(EnumBaseArgument):
"""A class that represents a GLenum argument"""
def __init__(self, name, type):
EnumBaseArgument.__init__(self, name, "GLenum", type, "GL_INVALID_ENUM")
def GetLogArg(self):
"""Overridden from Argument."""
return ("GLES2Util::GetString%s(%s)" %
(self.type_name, self.name))
class IntArgument(EnumBaseArgument):
"""A class for a GLint argument that can only except specific values.
For example glTexImage2D takes a GLint for its internalformat
argument instead of a GLenum.
"""
def __init__(self, name, type):
EnumBaseArgument.__init__(self, name, "GLint", type, "GL_INVALID_VALUE")
class ValidatedBoolArgument(EnumBaseArgument):
"""A class for a GLboolean argument that can only except specific values.
For example glUniformMatrix takes a GLboolean for it's transpose but it
must be false.
"""
def __init__(self, name, type):
EnumBaseArgument.__init__(self, name, "GLboolean", type, "GL_INVALID_VALUE")
def GetLogArg(self):
"""Overridden from Argument."""
return 'GLES2Util::GetStringBool(%s)' % self.name
class ImmediatePointerArgument(Argument):
"""A class that represents an immediate argument to a function.
An immediate argument is one where the data follows the command.
"""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def AddCmdArgs(self, args):
"""Overridden from Argument."""
pass
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = GetImmediateDataAs<%s>(\n" %
(self.type, self.name, self.type))
file.Write(" c, data_size, immediate_data_size);\n")
def WriteValidationCode(self, file, func):
"""Overridden from Argument."""
file.Write(" if (%s == NULL) {\n" % self.name)
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def GetImmediateVersion(self):
"""Overridden from Argument."""
return None
def WriteDestinationInitalizationValidation(self, file, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(file, func)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
class BucketPointerArgument(Argument):
"""A class that represents an bucket argument to a function."""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def AddCmdArgs(self, args):
"""Overridden from Argument."""
pass
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = bucket->GetData(0, data_size);\n" %
(self.type, self.name))
def WriteValidationCode(self, file, func):
"""Overridden from Argument."""
pass
def GetImmediateVersion(self):
"""Overridden from Argument."""
return None
def WriteDestinationInitalizationValidation(self, file, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(file, func)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
class PointerArgument(Argument):
"""A class that represents a pointer argument to a function."""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def IsPointer(self):
"""Returns true if argument is a pointer."""
return True
def GetValidArg(self, func, offset, index):
"""Overridden from Argument."""
return "shared_memory_id_, shared_memory_offset_"
def GetValidGLArg(self, func, offset, index):
"""Overridden from Argument."""
return "reinterpret_cast<%s>(shared_memory_address_)" % self.type
def GetNumInvalidValues(self, func):
"""Overridden from Argument."""
return 2
def GetInvalidArg(self, offset, index):
"""Overridden from Argument."""
if index == 0:
return ("kInvalidSharedMemoryId, 0", "kOutOfBounds", None)
else:
return ("shared_memory_id_, kInvalidSharedMemoryOffset",
"kOutOfBounds", None)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
def AddCmdArgs(self, args):
"""Overridden from Argument."""
args.append(Argument("%s_shm_id" % self.name, 'uint32'))
args.append(Argument("%s_shm_offset" % self.name, 'uint32'))
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = GetSharedMemoryAs<%s>(\n" %
(self.type, self.name, self.type))
file.Write(
" c.%s_shm_id, c.%s_shm_offset, data_size);\n" %
(self.name, self.name))
def WriteGetAddress(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = GetSharedMemoryAs<%s>(\n" %
(self.type, self.name, self.type))
file.Write(
" %s_shm_id, %s_shm_offset, %s_size);\n" %
(self.name, self.name, self.name))
def WriteValidationCode(self, file, func):
"""Overridden from Argument."""
file.Write(" if (%s == NULL) {\n" % self.name)
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def GetImmediateVersion(self):
"""Overridden from Argument."""
return ImmediatePointerArgument(self.name, self.type)
def GetBucketVersion(self):
"""Overridden from Argument."""
if self.type == "const char*":
return InputStringBucketArgument(self.name, self.type)
return BucketPointerArgument(self.name, self.type)
def WriteDestinationInitalizationValidation(self, file, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(file, func)
class InputStringBucketArgument(Argument):
"""An string input argument where the string is passed in a bucket."""
def __init__(self, name, type):
Argument.__init__(self, name + "_bucket_id", "uint32")
def WriteGetCode(self, file):
"""Overridden from Argument."""
code = """
Bucket* %(name)s_bucket = GetBucket(c.%(name)s);
if (!%(name)s_bucket) {
return error::kInvalidArguments;
}
std::string %(name)s_str;
if (!%(name)s_bucket->GetAsString(&%(name)s_str)) {
return error::kInvalidArguments;
}
const char* %(name)s = %(name)s_str.c_str();
"""
file.Write(code % {
'name': self.name,
})
def GetValidArg(self, func, offset, index):
return "kNameBucketId"
def GetValidGLArg(self, func, offset, index):
return "_"
class NonImmediatePointerArgument(PointerArgument):
"""A pointer argument that stays a pointer even in an immediate cmd."""
def __init__(self, name, type):
PointerArgument.__init__(self, name, type)
def IsPointer(self):
"""Returns true if argument is a pointer."""
return False
def GetImmediateVersion(self):
"""Overridden from Argument."""
return self
class ResourceIdArgument(Argument):
"""A class that represents a resource id argument to a function."""
def __init__(self, name, type):
match = re.match("(GLid\w+)", type)
self.resource_type = match.group(1)[4:]
type = type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, type)
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(" %s %s = c.%s;\n" % (self.type, self.name, self.name))
def GetValidArg(self, func, offset, index):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func, offset, index):
return "kService%sId" % self.resource_type
class ResourceIdBindArgument(Argument):
"""Represents a resource id argument to a bind function."""
def __init__(self, name, type):
match = re.match("(GLidBind\w+)", type)
self.resource_type = match.group(1)[8:]
type = type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, type)
def WriteGetCode(self, file):
"""Overridden from Argument."""
code = """ %(type)s %(name)s = c.%(name)s;
"""
file.Write(code % {'type': self.type, 'name': self.name})
def GetValidArg(self, func, offset, index):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func, offset, index):
return "kService%sId" % self.resource_type
class ResourceIdZeroArgument(Argument):
"""Represents a resource id argument to a function that can be zero."""
def __init__(self, name, type):
match = re.match("(GLidZero\w+)", type)
self.resource_type = match.group(1)[8:]
type = type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, type)
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(" %s %s = c.%s;\n" % (self.type, self.name, self.name))
def GetValidArg(self, func, offset, index):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func, offset, index):
return "kService%sId" % self.resource_type
def GetNumInvalidValues(self, func):
"""returns the number of invalid values to be tested."""
return 1
def GetInvalidArg(self, offset, index):
"""returns an invalid value by index."""
return ("kInvalidClientId", "kNoError", "GL_INVALID_VALUE")
class Function(object):
"""A class that represents a function."""
def __init__(self, original_name, name, info, return_type, original_args,
args_for_cmds, cmd_args, init_args, num_pointer_args):
self.name = name
self.original_name = original_name
self.info = info
self.type_handler = info.type_handler
self.return_type = return_type
self.original_args = original_args
self.num_pointer_args = num_pointer_args
self.can_auto_generate = num_pointer_args == 0 and return_type == "void"
self.cmd_args = cmd_args
self.init_args = init_args
self.InitFunction()
self.args_for_cmds = args_for_cmds
self.is_immediate = False
def IsType(self, type_name):
"""Returns true if function is a certain type."""
return self.info.type == type_name
def InitFunction(self):
"""Calls the init function for the type handler."""
self.type_handler.InitFunction(self)
def GetInfo(self, name):
"""Returns a value from the function info for this function."""
if hasattr(self.info, name):
return getattr(self.info, name)
return None
def GetValidArg(self, index):
"""Gets a valid arg from the function info if one exists."""
valid_args = self.GetInfo('valid_args')
if valid_args and str(index) in valid_args:
return valid_args[str(index)]
return None
def AddInfo(self, name, value):
"""Adds an info."""
setattr(self.info, name, value)
def IsCoreGLFunction(self):
return (not self.GetInfo('extension') and
not self.GetInfo('pepper_interface'))
def InPepperInterface(self, interface):
ext = self.GetInfo('pepper_interface')
if not interface.GetName():
return self.IsCoreGLFunction()
return ext == interface.GetName()
def InAnyPepperExtension(self):
return self.IsCoreGLFunction() or self.GetInfo('pepper_interface')
def GetGLFunctionName(self):
"""Gets the function to call to execute GL for this command."""
if self.GetInfo('decoder_func'):
return self.GetInfo('decoder_func')
return "gl%s" % self.original_name
def GetGLTestFunctionName(self):
gl_func_name = self.GetInfo('gl_test_func')
if gl_func_name == None:
gl_func_name = self.GetGLFunctionName()
if gl_func_name.startswith("gl"):
gl_func_name = gl_func_name[2:]
else:
gl_func_name = self.original_name
return gl_func_name
def AddCmdArg(self, arg):
"""Adds a cmd argument to this function."""
self.cmd_args.append(arg)
def GetCmdArgs(self):
"""Gets the command args for this function."""
return self.cmd_args
def ClearCmdArgs(self):
"""Clears the command args for this function."""
self.cmd_args = []
def GetInitArgs(self):
"""Gets the init args for this function."""
return self.init_args
def GetOriginalArgs(self):
"""Gets the original arguments to this function."""
return self.original_args
def GetLastOriginalArg(self):
"""Gets the last original argument to this function."""
return self.original_args[len(self.original_args) - 1]
def __GetArgList(self, arg_string, add_comma):
"""Adds a comma if arg_string is not empty and add_comma is true."""
comma = ""
if add_comma and len(arg_string):
comma = ", "
return "%s%s" % (comma, arg_string)
def MakeTypedOriginalArgString(self, prefix, add_comma = False):
"""Gets a list of arguments as they arg in GL."""
args = self.GetOriginalArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self.__GetArgList(arg_string, add_comma)
def MakeOriginalArgString(self, prefix, add_comma = False, separator = ", "):
"""Gets the list of arguments as they are in GL."""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name) for arg in args])
return self.__GetArgList(arg_string, add_comma)
def MakeTypedCmdArgString(self, prefix, add_comma = False):
"""Gets a typed list of arguments as they need to be for command buffers."""
args = self.GetCmdArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self.__GetArgList(arg_string, add_comma)
def MakeCmdArgString(self, prefix, add_comma = False):
"""Gets the list of arguments as they need to be for command buffers."""
args = self.GetCmdArgs()
arg_string = ", ".join(
["%s%s" % (prefix, arg.name) for arg in args])
return self.__GetArgList(arg_string, add_comma)
def MakeTypedInitString(self, prefix, add_comma = False):
"""Gets a typed list of arguments as they need to be for cmd Init/Set."""
args = self.GetInitArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self.__GetArgList(arg_string, add_comma)
def MakeInitString(self, prefix, add_comma = False):
"""Gets the list of arguments as they need to be for cmd Init/Set."""
args = self.GetInitArgs()
arg_string = ", ".join(
["%s%s" % (prefix, arg.name) for arg in args])
return self.__GetArgList(arg_string, add_comma)
def MakeLogArgString(self):
"""Makes a string of the arguments for the LOG macros"""
args = self.GetOriginalArgs()
return ' << ", " << '.join([arg.GetLogArg() for arg in args])
def WriteCommandDescription(self, file):
"""Writes a description of the command."""
file.Write("//! Command that corresponds to gl%s.\n" % self.original_name)
def WriteHandlerValidation(self, file):
"""Writes validation code for the function."""
for arg in self.GetOriginalArgs():
arg.WriteValidationCode(file, self)
self.WriteValidationCode(file)
def WriteHandlerImplementation(self, file):
"""Writes the handler implementation for this command."""
self.type_handler.WriteHandlerImplementation(self, file)
def WriteValidationCode(self, file):
"""Writes the validation code for a command."""
pass
def WriteCmdArgFlag(self, file):
"""Writes the cmd kArgFlags constant."""
file.Write(" static const cmd::ArgFlags kArgFlags = cmd::kFixed;\n")
def WriteCmdComputeSize(self, file):
"""Writes the ComputeSize function for the command."""
file.Write(" static uint32 ComputeSize() {\n")
file.Write(
" return static_cast<uint32>(sizeof(ValueType)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteCmdSetHeader(self, file):
"""Writes the cmd's SetHeader function."""
file.Write(" void SetHeader() {\n")
file.Write(" header.SetCmd<ValueType>();\n")
file.Write(" }\n")
file.Write("\n")
def WriteCmdInit(self, file):
"""Writes the cmd's Init function."""
file.Write(" void Init(%s) {\n" % self.MakeTypedCmdArgString("_"))
file.Write(" SetHeader();\n")
args = self.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" }\n")
file.Write("\n")
def WriteCmdSet(self, file):
"""Writes the cmd's Set function."""
copy_args = self.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s) {\n" %
self.MakeTypedCmdArgString("_", True))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
file.Write(" return NextCmdAddress<ValueType>(cmd);\n")
file.Write(" }\n")
file.Write("\n")
def WriteStruct(self, file):
self.type_handler.WriteStruct(self, file)
def WriteDocs(self, file):
self.type_handler.WriteDocs(self, file)
def WriteCmdHelper(self, file):
"""Writes the cmd's helper."""
self.type_handler.WriteCmdHelper(self, file)
def WriteServiceImplementation(self, file):
"""Writes the service implementation for a command."""
self.type_handler.WriteServiceImplementation(self, file)
def WriteServiceUnitTest(self, file):
"""Writes the service implementation for a command."""
self.type_handler.WriteServiceUnitTest(self, file)
def WriteGLES2CLibImplementation(self, file):
"""Writes the GLES2 C Lib Implemention."""
self.type_handler.WriteGLES2CLibImplementation(self, file)
def WriteGLES2InterfaceHeader(self, file):
"""Writes the GLES2 Interface declaration."""
self.type_handler.WriteGLES2InterfaceHeader(self, file)
def WriteGLES2InterfaceStub(self, file):
"""Writes the GLES2 Interface Stub declaration."""
self.type_handler.WriteGLES2InterfaceStub(self, file)
def WriteGLES2InterfaceStubImpl(self, file):
"""Writes the GLES2 Interface Stub declaration."""
self.type_handler.WriteGLES2InterfaceStubImpl(self, file)
def WriteGLES2ImplementationHeader(self, file):
"""Writes the GLES2 Implemention declaration."""
self.type_handler.WriteGLES2ImplementationHeader(self, file)
def WriteGLES2Implementation(self, file):
"""Writes the GLES2 Implemention definition."""
self.type_handler.WriteGLES2Implementation(self, file)
def WriteGLES2TraceImplementationHeader(self, file):
"""Writes the GLES2 Trace Implemention declaration."""
self.type_handler.WriteGLES2TraceImplementationHeader(self, file)
def WriteGLES2TraceImplementation(self, file):
"""Writes the GLES2 Trace Implemention definition."""
self.type_handler.WriteGLES2TraceImplementation(self, file)
def WriteGLES2Header(self, file):
"""Writes the GLES2 Implemention unit test."""
self.type_handler.WriteGLES2Header(self, file)
def WriteGLES2ImplementationUnitTest(self, file):
"""Writes the GLES2 Implemention unit test."""
self.type_handler.WriteGLES2ImplementationUnitTest(self, file)
def WriteDestinationInitalizationValidation(self, file):
"""Writes the client side destintion initialization validation."""
self.type_handler.WriteDestinationInitalizationValidation(self, file)
def WriteFormatTest(self, file):
"""Writes the cmd's format test."""
self.type_handler.WriteFormatTest(self, file)
class PepperInterface(object):
"""A class that represents a function."""
def __init__(self, info):
self.name = info["name"]
self.dev = info["dev"]
def GetName(self):
return self.name
def GetInterfaceName(self):
upperint = ""
dev = ""
if self.name:
upperint = "_" + self.name.upper()
if self.dev:
dev = "_DEV"
return "PPB_OPENGLES2%s%s_INTERFACE" % (upperint, dev)
def GetInterfaceString(self):
dev = ""
if self.dev:
dev = "(Dev)"
return "PPB_OpenGLES2%s%s" % (self.name, dev)
def GetStructName(self):
dev = ""
if self.dev:
dev = "_Dev"
return "PPB_OpenGLES2%s%s" % (self.name, dev)
class ImmediateFunction(Function):
"""A class that represnets an immediate function command."""
def __init__(self, func):
new_args = []
for arg in func.GetOriginalArgs():
new_arg = arg.GetImmediateVersion()
if new_arg:
new_args.append(new_arg)
cmd_args = []
new_args_for_cmds = []
for arg in func.args_for_cmds:
new_arg = arg.GetImmediateVersion()
if new_arg:
new_args_for_cmds.append(new_arg)
new_arg.AddCmdArgs(cmd_args)
new_init_args = []
for arg in new_args_for_cmds:
arg.AddInitArgs(new_init_args)
Function.__init__(
self,
func.original_name,
"%sImmediate" % func.name,
func.info,
func.return_type,
new_args,
new_args_for_cmds,
cmd_args,
new_init_args,
0)
self.is_immediate = True
def WriteCommandDescription(self, file):
"""Overridden from Function"""
file.Write("//! Immediate version of command that corresponds to gl%s.\n" %
self.original_name)
def WriteServiceImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateServiceImplementation(self, file)
def WriteHandlerImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateHandlerImplementation(self, file)
def WriteServiceUnitTest(self, file):
"""Writes the service implementation for a command."""
self.type_handler.WriteImmediateServiceUnitTest(self, file)
def WriteValidationCode(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateValidationCode(self, file)
def WriteCmdArgFlag(self, file):
"""Overridden from Function"""
file.Write(" static const cmd::ArgFlags kArgFlags = cmd::kAtLeastN;\n")
def WriteCmdComputeSize(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdComputeSize(self, file)
def WriteCmdSetHeader(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdSetHeader(self, file)
def WriteCmdInit(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdInit(self, file)
def WriteCmdSet(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdSet(self, file)
def WriteCmdHelper(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdHelper(self, file)
def WriteFormatTest(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateFormatTest(self, file)
class BucketFunction(Function):
"""A class that represnets a bucket version of a function command."""
def __init__(self, func):
new_args = []
for arg in func.GetOriginalArgs():
new_arg = arg.GetBucketVersion()
if new_arg:
new_args.append(new_arg)
cmd_args = []
new_args_for_cmds = []
for arg in func.args_for_cmds:
new_arg = arg.GetBucketVersion()
if new_arg:
new_args_for_cmds.append(new_arg)
new_arg.AddCmdArgs(cmd_args)
new_init_args = []
for arg in new_args_for_cmds:
arg.AddInitArgs(new_init_args)
Function.__init__(
self,
func.original_name,
"%sBucket" % func.name,
func.info,
func.return_type,
new_args,
new_args_for_cmds,
cmd_args,
new_init_args,
0)
# def InitFunction(self):
# """Overridden from Function"""
# pass
def WriteCommandDescription(self, file):
"""Overridden from Function"""
file.Write("//! Bucket version of command that corresponds to gl%s.\n" %
self.original_name)
def WriteServiceImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteBucketServiceImplementation(self, file)
def WriteHandlerImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteBucketHandlerImplementation(self, file)
def WriteServiceUnitTest(self, file):
"""Writes the service implementation for a command."""
self.type_handler.WriteBucketServiceUnitTest(self, file)
def CreateArg(arg_string):
"""Creates an Argument."""
arg_parts = arg_string.split()
if len(arg_parts) == 1 and arg_parts[0] == 'void':
return None
# Is this a pointer argument?
elif arg_string.find('*') >= 0:
if arg_parts[0] == 'NonImmediate':
return NonImmediatePointerArgument(
arg_parts[-1],
" ".join(arg_parts[1:-1]))
else:
return PointerArgument(
arg_parts[-1],
" ".join(arg_parts[0:-1]))
# Is this a resource argument? Must come after pointer check.
elif arg_parts[0].startswith('GLidBind'):
return ResourceIdBindArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLidZero'):
return ResourceIdZeroArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLid'):
return ResourceIdArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLenum') and len(arg_parts[0]) > 6:
return EnumArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLboolean') and len(arg_parts[0]) > 9:
return ValidatedBoolArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLboolean'):
return BoolArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLintUniformLocation'):
return UniformLocationArgument(arg_parts[-1])
elif (arg_parts[0].startswith('GLint') and len(arg_parts[0]) > 5 and
not arg_parts[0].startswith('GLintptr')):
return IntArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif (arg_parts[0].startswith('GLsizeiNotNegative') or
arg_parts[0].startswith('GLintptrNotNegative')):
return SizeNotNegativeArgument(arg_parts[-1],
" ".join(arg_parts[0:-1]),
arg_parts[0][0:-11])
elif arg_parts[0].startswith('GLsize'):
return SizeArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
else:
return Argument(arg_parts[-1], " ".join(arg_parts[0:-1]))
class GLGenerator(object):
"""A class to generate GL command buffers."""
_function_re = re.compile(r'GL_APICALL(.*?)GL_APIENTRY (.*?) \((.*?)\);')
def __init__(self, verbose):
self.original_functions = []
self.functions = []
self.verbose = verbose
self.errors = 0
self._function_info = {}
self._empty_type_handler = TypeHandler()
self._empty_function_info = FunctionInfo({}, self._empty_type_handler)
self.pepper_interfaces = []
self.interface_info = {}
self._type_handlers = {
'Bind': BindHandler(),
'Create': CreateHandler(),
'Custom': CustomHandler(),
'Data': DataHandler(),
'Delete': DeleteHandler(),
'DELn': DELnHandler(),
'GENn': GENnHandler(),
'GETn': GETnHandler(),
'GLchar': GLcharHandler(),
'GLcharN': GLcharNHandler(),
'HandWritten': HandWrittenHandler(),
'Is': IsHandler(),
'Manual': ManualHandler(),
'PUT': PUTHandler(),
'PUTn': PUTnHandler(),
'PUTXn': PUTXnHandler(),
'StateSet': StateSetHandler(),
'StateSetRGBAlpha': StateSetRGBAlphaHandler(),
'StateSetFrontBack': StateSetFrontBackHandler(),
'StateSetFrontBackSeparate': StateSetFrontBackSeparateHandler(),
'StateSetNamedParameter': StateSetNamedParameter(),
'STRn': STRnHandler(),
'Todo': TodoHandler(),
}
for func_name in _FUNCTION_INFO:
info = _FUNCTION_INFO[func_name]
type = ''
if 'type' in info:
type = info['type']
self._function_info[func_name] = FunctionInfo(info,
self.GetTypeHandler(type))
for interface in _PEPPER_INTERFACES:
interface = PepperInterface(interface)
self.pepper_interfaces.append(interface)
self.interface_info[interface.GetName()] = interface
def AddFunction(self, func):
"""Adds a function."""
self.functions.append(func)
def GetTypeHandler(self, name):
"""Gets a type info for the given type."""
if len(name):
if name in self._type_handlers:
return self._type_handlers[name]
else:
raise KeyError("no such type handler: %s" % name)
return self._empty_type_handler
def GetFunctionInfo(self, name):
"""Gets a type info for the given function name."""
if name in self._function_info:
return self._function_info[name]
return self._empty_function_info
def Log(self, msg):
"""Prints something if verbose is true."""
if self.verbose:
print msg
def Error(self, msg):
"""Prints an error."""
print "Error: %s" % msg
self.errors += 1
def WriteLicense(self, file):
"""Writes the license."""
file.Write(_LICENSE)
def WriteNamespaceOpen(self, file):
"""Writes the code for the namespace."""
file.Write("namespace gpu {\n")
file.Write("namespace gles2 {\n")
file.Write("\n")
def WriteNamespaceClose(self, file):
"""Writes the code to close the namespace."""
file.Write("} // namespace gles2\n")
file.Write("} // namespace gpu\n")
file.Write("\n")
def ParseArgs(self, arg_string):
"""Parses a function arg string."""
args = []
num_pointer_args = 0
parts = arg_string.split(',')
is_gl_enum = False
for arg_string in parts:
if arg_string.startswith('GLenum '):
is_gl_enum = True
arg = CreateArg(arg_string)
if arg:
args.append(arg)
if arg.IsPointer():
num_pointer_args += 1
return (args, num_pointer_args, is_gl_enum)
def ParseGLH(self, filename):
"""Parses the cmd_buffer_functions.txt file and extracts the functions"""
f = open("gpu/command_buffer/cmd_buffer_functions.txt", "r")
functions = f.read()
f.close()
for line in functions.splitlines():
match = self._function_re.match(line)
if match:
func_name = match.group(2)[2:]
func_info = self.GetFunctionInfo(func_name)
if func_info.type != 'Noop':
return_type = match.group(1).strip()
arg_string = match.group(3)
(args, num_pointer_args, is_gl_enum) = self.ParseArgs(arg_string)
# comment in to find out which functions use bare enums.
# if is_gl_enum:
# self.Log("%s uses bare GLenum" % func_name)
args_for_cmds = args
if hasattr(func_info, 'cmd_args'):
(args_for_cmds, num_pointer_args, is_gl_enum) = (
self.ParseArgs(getattr(func_info, 'cmd_args')))
cmd_args = []
for arg in args_for_cmds:
arg.AddCmdArgs(cmd_args)
init_args = []
for arg in args_for_cmds:
arg.AddInitArgs(init_args)
return_arg = CreateArg(return_type + " result")
if return_arg:
init_args.append(return_arg)
f = Function(func_name, func_name, func_info, return_type, args,
args_for_cmds, cmd_args, init_args, num_pointer_args)
self.original_functions.append(f)
gen_cmd = f.GetInfo('gen_cmd')
if gen_cmd == True or gen_cmd == None:
self.AddFunction(f)
f.type_handler.AddImmediateFunction(self, f)
f.type_handler.AddBucketFunction(self, f)
self.Log("Auto Generated Functions : %d" %
len([f for f in self.functions if f.can_auto_generate or
(not f.IsType('') and not f.IsType('Custom') and
not f.IsType('Todo'))]))
funcs = [f for f in self.functions if not f.can_auto_generate and
(f.IsType('') or f.IsType('Custom') or f.IsType('Todo'))]
self.Log("Non Auto Generated Functions: %d" % len(funcs))
for f in funcs:
self.Log(" %-10s %-20s gl%s" % (f.info.type, f.return_type, f.name))
def WriteCommandIds(self, filename):
"""Writes the command buffer format"""
file = CHeaderWriter(filename)
file.Write("#define GLES2_COMMAND_LIST(OP) \\\n")
id = 256
for func in self.functions:
file.Write(" %-60s /* %d */ \\\n" %
("OP(%s)" % func.name, id))
id += 1
file.Write("\n")
file.Write("enum CommandId {\n")
file.Write(" kStartPoint = cmd::kLastCommonId, "
"// All GLES2 commands start after this.\n")
file.Write("#define GLES2_CMD_OP(name) k ## name,\n")
file.Write(" GLES2_COMMAND_LIST(GLES2_CMD_OP)\n")
file.Write("#undef GLES2_CMD_OP\n")
file.Write(" kNumCommands\n")
file.Write("};\n")
file.Write("\n")
file.Close()
def WriteFormat(self, filename):
"""Writes the command buffer format"""
file = CHeaderWriter(filename)
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteStruct(file)
file.Write("\n")
file.Close()
def WriteDocs(self, filename):
"""Writes the command buffer doc version of the commands"""
file = CWriter(filename)
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteDocs(file)
file.Write("\n")
file.Close()
def WriteFormatTest(self, filename):
"""Writes the command buffer format test."""
file = CHeaderWriter(
filename,
"// This file contains unit tests for gles2 commmands\n"
"// It is included by gles2_cmd_format_test.cc\n"
"\n")
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteFormatTest(file)
file.Close()
def WriteCmdHelperHeader(self, filename):
"""Writes the gles2 command helper."""
file = CHeaderWriter(filename)
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteCmdHelper(file)
file.Close()
def WriteServiceContextStateHeader(self, filename):
"""Writes the service context state header."""
file = CHeaderWriter(
filename,
"// It is included by context_state.h\n")
file.Write("struct EnableFlags {\n")
file.Write(" EnableFlags();\n")
for capability in _CAPABILITY_FLAGS:
file.Write(" bool %s;\n" % capability['name'])
file.Write("};\n\n")
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
for item in state['states']:
file.Write("%s %s;\n" % (item['type'], item['name']))
file.Write("\n")
file.Close()
def WriteClientContextStateHeader(self, filename):
"""Writes the client context state header."""
file = CHeaderWriter(
filename,
"// It is included by client_context_state.h\n")
file.Write("struct EnableFlags {\n")
file.Write(" EnableFlags();\n")
for capability in _CAPABILITY_FLAGS:
file.Write(" bool %s;\n" % capability['name'])
file.Write("};\n\n")
file.Close()
def WriteContextStateGetters(self, file, class_name):
"""Writes the state getters."""
for gl_type in ["GLint", "GLfloat"]:
file.Write("""
bool %s::GetStateAs%s(
GLenum pname, %s* params, GLsizei* num_written) const {
switch (pname) {
""" % (class_name, gl_type, gl_type))
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if 'enum' in state:
file.Write(" case %s:\n" % state['enum'])
file.Write(" *num_written = %d;\n" % len(state['states']))
file.Write(" if (params) {\n")
for ndx,item in enumerate(state['states']):
file.Write(" params[%d] = static_cast<%s>(%s);\n" %
(ndx, gl_type, item['name']))
file.Write(" }\n")
file.Write(" return true;\n")
else:
for item in state['states']:
file.Write(" case %s:\n" % item['enum'])
file.Write(" *num_written = 1;\n")
file.Write(" if (params) {\n")
file.Write(" params[0] = static_cast<%s>(%s);\n" %
(gl_type, item['name']))
file.Write(" }\n")
file.Write(" return true;\n")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(" *num_written = 1;\n")
file.Write(" if (params) {\n")
file.Write(
" params[0] = static_cast<%s>(enable_flags.%s);\n" %
(gl_type, capability['name']))
file.Write(" }\n")
file.Write(" return true;\n")
file.Write(""" default:
return false;
}
}
""")
def WriteServiceContextStateImpl(self, filename):
"""Writes the context state service implementation."""
file = CHeaderWriter(
filename,
"// It is included by context_state.cc\n")
code = []
for capability in _CAPABILITY_FLAGS:
code.append("%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
file.Write("ContextState::EnableFlags::EnableFlags()\n : %s {\n}\n" %
",\n ".join(code))
file.Write("\n")
file.Write("void ContextState::Initialize() {\n")
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
for item in state['states']:
file.Write(" %s = %s;\n" % (item['name'], item['default']))
file.Write("}\n")
file.Write("""
void ContextState::InitCapabilities() const {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" EnableDisable(GL_%s, enable_flags.%s);\n" %
(capability['name'].upper(), capability['name']))
file.Write("""}
void ContextState::InitState() const {
""")
# We need to sort the keys so the expectations match
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if state['type'] == 'FrontBack':
num_states = len(state['states'])
for ndx, group in enumerate(Grouper(num_states / 2, state['states'])):
args = []
for item in group:
args.append('%s' % item['name'])
file.Write(
" gl%s(%s, %s);\n" %
(state['func'], ('GL_FRONT', 'GL_BACK')[ndx], ", ".join(args)))
elif state['type'] == 'NamedParameter':
for item in state['states']:
if 'extension_flag' in item:
file.Write(" if (feature_info_->feature_flags().%s)\n " %
item['extension_flag'])
file.Write(" gl%s(%s, %s);\n" %
(state['func'], item['enum'], item['name']))
else:
args = []
for item in state['states']:
args.append('%s' % item['name'])
file.Write(" gl%s(%s);\n" % (state['func'], ", ".join(args)))
file.Write("}\n")
file.Write("""bool ContextState::GetEnabled(GLenum cap) const {
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(" return enable_flags.%s;\n" % capability['name'])
file.Write(""" default:
GPU_NOTREACHED();
return false;
}
}
""")
self.WriteContextStateGetters(file, "ContextState")
file.Close()
def WriteClientContextStateImpl(self, filename):
"""Writes the context state client side implementation."""
file = CHeaderWriter(
filename,
"// It is included by client_context_state.cc\n")
code = []
for capability in _CAPABILITY_FLAGS:
code.append("%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
file.Write(
"ClientContextState::EnableFlags::EnableFlags()\n : %s {\n}\n" %
",\n ".join(code))
file.Write("\n")
file.Write("""
bool ClientContextState::SetCapabilityState(
GLenum cap, bool enabled, bool* changed) {
*changed = false;
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(""" if (enable_flags.%(name)s != enabled) {
*changed = true;
enable_flags.%(name)s = enabled;
}
return true;
""" % capability)
file.Write(""" default:
return false;
}
}
""")
file.Write("""bool ClientContextState::GetEnabled(
GLenum cap, bool* enabled) const {
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(" *enabled = enable_flags.%s;\n" % capability['name'])
file.Write(" return true;\n")
file.Write(""" default:
return false;
}
}
""")
file.Close()
def WriteServiceImplementation(self, filename):
"""Writes the service decorder implementation."""
file = CHeaderWriter(
filename,
"// It is included by gles2_cmd_decoder.cc\n")
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteServiceImplementation(file)
file.Write("""
bool GLES2DecoderImpl::SetCapabilityState(GLenum cap, bool enabled) {
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
if 'state_flag' in capability:
file.Write(""" if (state_.enable_flags.%(name)s != enabled) {
state_.enable_flags.%(name)s = enabled;
%(state_flag)s = true;
}
return false;
""" % capability)
else:
file.Write(""" state_.enable_flags.%(name)s = enabled;
return true;
""" % capability)
file.Write(""" default:
NOTREACHED();
return false;
}
}
""")
file.Close()
def WriteServiceUnitTests(self, filename):
"""Writes the service decorder unit tests."""
num_tests = len(self.functions)
FUNCTIONS_PER_FILE = 98 # hard code this so it doesn't change.
count = 0
for test_num in range(0, num_tests, FUNCTIONS_PER_FILE):
count += 1
name = filename % count
file = CHeaderWriter(
name,
"// It is included by gles2_cmd_decoder_unittest_%d.cc\n" % count)
file.SetFileNum(count)
end = test_num + FUNCTIONS_PER_FILE
if end > num_tests:
end = num_tests
for idx in range(test_num, end):
func = self.functions[idx]
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
if func.GetInfo('unit_test') == False:
file.Write("// TODO(gman): %s\n" % func.name)
else:
func.WriteServiceUnitTest(file)
file.Close()
file = CHeaderWriter(
filename % 0,
"// It is included by gles2_cmd_decoder_unittest_base.cc\n")
file.Write(
"""void GLES2DecoderTestBase::SetupInitCapabilitiesExpectations() {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" ExpectEnableDisable(GL_%s, %s);\n" %
(capability['name'].upper(),
('false', 'true')['default' in capability]))
file.Write("""}
void GLES2DecoderTestBase::SetupInitStateExpectations() {
""")
# We need to sort the keys so the expectations match
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if state['type'] == 'FrontBack':
num_states = len(state['states'])
for ndx, group in enumerate(Grouper(num_states / 2, state['states'])):
args = []
for item in group:
if 'expected' in item:
args.append(item['expected'])
else:
args.append(item['default'])
file.Write(
" EXPECT_CALL(*gl_, %s(%s, %s))\n" %
(state['func'], ('GL_FRONT', 'GL_BACK')[ndx], ", ".join(args)))
file.Write(" .Times(1)\n")
file.Write(" .RetiresOnSaturation();\n")
elif state['type'] == 'NamedParameter':
for item in state['states']:
if 'extension_flag' in item:
continue
file.Write(
" EXPECT_CALL(*gl_, %s(%s, %s))\n" %
(state['func'], item['enum'], item['default']))
file.Write(" .Times(1)\n")
file.Write(" .RetiresOnSaturation();\n")
else:
args = []
for item in state['states']:
if 'expected' in item:
args.append(item['expected'])
else:
args.append(item['default'])
file.Write(" EXPECT_CALL(*gl_, %s(%s))\n" %
(state['func'], ", ".join(args)))
file.Write(" .Times(1)\n")
file.Write(" .RetiresOnSaturation();\n")
file.Write("""}
""")
file.Close()
def WriteGLES2Header(self, filename):
"""Writes the GLES2 header."""
file = CHeaderWriter(
filename,
"// This file contains Chromium-specific GLES2 declarations.\n\n")
for func in self.original_functions:
func.WriteGLES2Header(file)
file.Write("\n")
file.Close()
def WriteGLES2CLibImplementation(self, filename):
"""Writes the GLES2 c lib implementation."""
file = CHeaderWriter(
filename,
"// These functions emulate GLES2 over command buffers.\n")
for func in self.original_functions:
func.WriteGLES2CLibImplementation(file)
file.Write("""
namespace gles2 {
extern const NameToFunc g_gles2_function_table[] = {
""")
for func in self.original_functions:
file.Write(
' { "gl%s", reinterpret_cast<GLES2FunctionPointer>(gl%s), },\n' %
(func.name, func.name))
file.Write(""" { NULL, NULL, },
};
} // namespace gles2
""")
file.Close()
def WriteGLES2InterfaceHeader(self, filename):
"""Writes the GLES2 interface header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_interface.h to declare the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2InterfaceHeader(file)
file.Close()
def WriteGLES2InterfaceStub(self, filename):
"""Writes the GLES2 interface stub header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_interface_stub.h.\n")
for func in self.original_functions:
func.WriteGLES2InterfaceStub(file)
file.Close()
def WriteGLES2InterfaceStubImpl(self, filename):
"""Writes the GLES2 interface header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_interface_stub.cc.\n")
for func in self.original_functions:
func.WriteGLES2InterfaceStubImpl(file)
file.Close()
def WriteGLES2ImplementationHeader(self, filename):
"""Writes the GLES2 Implementation header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_implementation.h to declare the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2ImplementationHeader(file)
file.Close()
def WriteGLES2Implementation(self, filename):
"""Writes the GLES2 Implementation."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_implementation.cc to define the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2Implementation(file)
file.Close()
def WriteGLES2TraceImplementationHeader(self, filename):
"""Writes the GLES2 Trace Implementation header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_trace_implementation.h\n")
for func in self.original_functions:
func.WriteGLES2TraceImplementationHeader(file)
file.Close()
def WriteGLES2TraceImplementation(self, filename):
"""Writes the GLES2 Trace Implementation."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_trace_implementation.cc\n")
for func in self.original_functions:
func.WriteGLES2TraceImplementation(file)
file.Close()
def WriteGLES2ImplementationUnitTests(self, filename):
"""Writes the GLES2 helper header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_implementation.h to declare the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2ImplementationUnitTest(file)
file.Close()
def WriteServiceUtilsHeader(self, filename):
"""Writes the gles2 auto generated utility header."""
file = CHeaderWriter(filename)
for enum in sorted(_ENUM_LISTS.keys()):
file.Write("ValueValidator<%s> %s;\n" %
(_ENUM_LISTS[enum]['type'], ToUnderscore(enum)))
file.Write("\n")
file.Close()
def WriteServiceUtilsImplementation(self, filename):
"""Writes the gles2 auto generated utility implementation."""
file = CHeaderWriter(filename)
enums = sorted(_ENUM_LISTS.keys())
for enum in enums:
if len(_ENUM_LISTS[enum]['valid']) > 0:
file.Write("static const %s valid_%s_table[] = {\n" %
(_ENUM_LISTS[enum]['type'], ToUnderscore(enum)))
for value in _ENUM_LISTS[enum]['valid']:
file.Write(" %s,\n" % value)
file.Write("};\n")
file.Write("\n")
file.Write("Validators::Validators()\n")
pre = ': '
post = ','
for count, enum in enumerate(enums):
if count + 1 == len(enums):
post = ' {'
if len(_ENUM_LISTS[enum]['valid']) > 0:
code = """ %(pre)s%(name)s(
valid_%(name)s_table, arraysize(valid_%(name)s_table))%(post)s
"""
else:
code = """ %(pre)s%(name)s()%(post)s
"""
file.Write(code % {
'name': ToUnderscore(enum),
'pre': pre,
'post': post,
})
pre = ' '
file.Write("}\n\n");
file.Close()
def WriteCommonUtilsHeader(self, filename):
"""Writes the gles2 common utility header."""
file = CHeaderWriter(filename)
enums = sorted(_ENUM_LISTS.keys())
for enum in enums:
if _ENUM_LISTS[enum]['type'] == 'GLenum':
file.Write("static std::string GetString%s(uint32 value);\n" % enum)
file.Write("\n")
file.Close()
def WriteCommonUtilsImpl(self, filename):
"""Writes the gles2 common utility header."""
enum_re = re.compile(r'\#define\s+(GL_[a-zA-Z0-9_]+)\s+([0-9A-Fa-fx]+)')
dict = {}
for fname in ['../../third_party/khronos/GLES2/gl2.h',
'../../third_party/khronos/GLES2/gl2ext.h',
'../../gpu/GLES2/gl2chromium.h',
'../../gpu/GLES2/gl2extchromium.h']:
lines = open(fname).readlines()
for line in lines:
m = enum_re.match(line)
if m:
name = m.group(1)
value = m.group(2)
if len(value) <= 10 and not value in dict:
dict[value] = name
file = CHeaderWriter(filename)
file.Write("static const GLES2Util::EnumToString "
"enum_to_string_table[] = {\n")
for value in dict:
file.Write(' { %s, "%s", },\n' % (value, dict[value]))
file.Write("""};
const GLES2Util::EnumToString* const GLES2Util::enum_to_string_table_ =
enum_to_string_table;
const size_t GLES2Util::enum_to_string_table_len_ =
sizeof(enum_to_string_table) / sizeof(enum_to_string_table[0]);
""")
enums = sorted(_ENUM_LISTS.keys())
for enum in enums:
if _ENUM_LISTS[enum]['type'] == 'GLenum':
file.Write("std::string GLES2Util::GetString%s(uint32 value) {\n" %
enum)
if len(_ENUM_LISTS[enum]['valid']) > 0:
file.Write(" static const EnumToString string_table[] = {\n")
for value in _ENUM_LISTS[enum]['valid']:
file.Write(' { %s, "%s" },\n' % (value, value))
file.Write(""" };
return GLES2Util::GetQualifiedEnumString(
string_table, arraysize(string_table), value);
}
""")
else:
file.Write(""" return GLES2Util::GetQualifiedEnumString(
NULL, 0, value);
}
""")
file.Close()
def WritePepperGLES2Interface(self, filename, dev):
"""Writes the Pepper OpenGLES interface definition."""
file = CHeaderWriter(
filename,
"// OpenGL ES interface.\n",
2)
file.Write("#include \"ppapi/c/pp_resource.h\"\n")
if dev:
file.Write("#include \"ppapi/c/ppb_opengles2.h\"\n\n")
else:
file.Write("\n#ifndef __gl2_h_\n")
for (k, v) in _GL_TYPES.iteritems():
file.Write("typedef %s %s;\n" % (v, k))
file.Write("#endif // __gl2_h_\n\n")
for interface in self.pepper_interfaces:
if interface.dev != dev:
continue
file.Write("#define %s_1_0 \"%s;1.0\"\n" %
(interface.GetInterfaceName(), interface.GetInterfaceString()))
file.Write("#define %s %s_1_0\n" %
(interface.GetInterfaceName(), interface.GetInterfaceName()))
file.Write("\nstruct %s {\n" % interface.GetStructName())
for func in self.original_functions:
if not func.InPepperInterface(interface):
continue
original_arg = func.MakeTypedOriginalArgString("")
context_arg = "PP_Resource context"
if len(original_arg):
arg = context_arg + ", " + original_arg
else:
arg = context_arg
file.Write(" %s (*%s)(%s);\n" % (func.return_type, func.name, arg))
file.Write("};\n\n")
file.Close()
def WritePepperGLES2Implementation(self, filename):
"""Writes the Pepper OpenGLES interface implementation."""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
file.Write("#include \"ppapi/shared_impl/ppb_opengles2_shared.h\"\n\n")
file.Write("#include \"base/logging.h\"\n")
file.Write("#include \"gpu/command_buffer/client/gles2_implementation.h\"\n")
file.Write("#include \"ppapi/shared_impl/ppb_graphics_3d_shared.h\"\n")
file.Write("#include \"ppapi/thunk/enter.h\"\n\n")
file.Write("namespace ppapi {\n\n")
file.Write("namespace {\n\n")
file.Write("gpu::gles2::GLES2Implementation*"
" GetGLES(PP_Resource context) {\n")
file.Write(" thunk::EnterResource<thunk::PPB_Graphics3D_API>"
" enter_g3d(context, false);\n")
file.Write(" DCHECK(enter_g3d.succeeded());\n")
file.Write(" return static_cast<PPB_Graphics3D_Shared*>"
"(enter_g3d.object())->gles2_impl();\n")
file.Write("}\n\n")
for func in self.original_functions:
if not func.InAnyPepperExtension():
continue
original_arg = func.MakeTypedOriginalArgString("")
context_arg = "PP_Resource context_id"
if len(original_arg):
arg = context_arg + ", " + original_arg
else:
arg = context_arg
file.Write("%s %s(%s) {\n" % (func.return_type, func.name, arg))
return_str = "" if func.return_type == "void" else "return "
file.Write(" %sGetGLES(context_id)->%s(%s);\n" %
(return_str, func.original_name,
func.MakeOriginalArgString("")))
file.Write("}\n\n")
file.Write("} // namespace\n")
for interface in self.pepper_interfaces:
file.Write("const %s* PPB_OpenGLES2_Shared::Get%sInterface() {\n" %
(interface.GetStructName(), interface.GetName()))
file.Write(" static const struct %s "
"ppb_opengles2 = {\n" % interface.GetStructName())
file.Write(" &")
file.Write(",\n &".join(
f.name for f in self.original_functions
if f.InPepperInterface(interface)))
file.Write("\n")
file.Write(" };\n")
file.Write(" return &ppb_opengles2;\n")
file.Write("}\n")
file.Write("} // namespace ppapi\n")
file.Close()
def WriteGLES2ToPPAPIBridge(self, filename):
"""Connects GLES2 helper library to PPB_OpenGLES2 interface"""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
file.Write("#ifndef GL_GLEXT_PROTOTYPES\n")
file.Write("#define GL_GLEXT_PROTOTYPES\n")
file.Write("#endif\n")
file.Write("#include <GLES2/gl2.h>\n")
file.Write("#include <GLES2/gl2ext.h>\n")
file.Write("#include \"ppapi/lib/gl/gles2/gl2ext_ppapi.h\"\n\n")
for func in self.original_functions:
if not func.InAnyPepperExtension():
continue
interface = self.interface_info[func.GetInfo('pepper_interface') or '']
file.Write("%s GL_APIENTRY gl%s(%s) {\n" %
(func.return_type, func.name,
func.MakeTypedOriginalArgString("")))
return_str = "" if func.return_type == "void" else "return "
interface_str = "glGet%sInterfacePPAPI()" % interface.GetName()
original_arg = func.MakeOriginalArgString("")
context_arg = "glGetCurrentContextPPAPI()"
if len(original_arg):
arg = context_arg + ", " + original_arg
else:
arg = context_arg
if interface.GetName():
file.Write(" const struct %s* ext = %s;\n" %
(interface.GetStructName(), interface_str))
file.Write(" if (ext)\n")
file.Write(" %sext->%s(%s);\n" %
(return_str, func.name, arg))
if return_str:
file.Write(" %s0;\n" % return_str)
else:
file.Write(" %s%s->%s(%s);\n" %
(return_str, interface_str, func.name, arg))
file.Write("}\n\n")
file.Close()
def main(argv):
"""This is the main function."""
parser = OptionParser()
parser.add_option(
"-g", "--generate-implementation-templates", action="store_true",
help="generates files that are generally hand edited..")
parser.add_option(
"--alternate-mode", type="choice",
choices=("ppapi", "chrome_ppapi", "chrome_ppapi_proxy", "nacl_ppapi"),
help="generate files for other projects. \"ppapi\" will generate ppapi "
"bindings. \"chrome_ppapi\" generate chrome implementation for ppapi. "
"\"chrome_ppapi_proxy\" will generate the glue for the chrome IPC ppapi"
"proxy. \"nacl_ppapi\" will generate NaCl implementation for ppapi")
parser.add_option(
"--output-dir",
help="base directory for resulting files, under chrome/src. default is "
"empty. Use this if you want the result stored under gen.")
parser.add_option(
"-v", "--verbose", action="store_true",
help="prints more output.")
(options, args) = parser.parse_args(args=argv)
# Add in states and capabilites to GLState
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if 'enum' in state:
_ENUM_LISTS['GLState']['valid'].append(state['enum'])
else:
for item in state['states']:
if 'extension_flag' in item:
continue
_ENUM_LISTS['GLState']['valid'].append(item['enum'])
for capability in _CAPABILITY_FLAGS:
_ENUM_LISTS['GLState']['valid'].append("GL_%s" % capability['name'].upper())
# This script lives under gpu/command_buffer, cd to base directory.
os.chdir(os.path.dirname(__file__) + "/../..")
gen = GLGenerator(options.verbose)
gen.ParseGLH("common/GLES2/gl2.h")
# Support generating files under gen/
if options.output_dir != None:
os.chdir(options.output_dir)
if options.alternate_mode == "ppapi":
# To trigger this action, do "make ppapi_gles_bindings"
os.chdir("ppapi");
gen.WritePepperGLES2Interface("c/ppb_opengles2.h", False)
gen.WritePepperGLES2Interface("c/dev/ppb_opengles2ext_dev.h", True)
gen.WriteGLES2ToPPAPIBridge("lib/gl/gles2/gles2.c")
elif options.alternate_mode == "chrome_ppapi":
# To trigger this action, do "make ppapi_gles_implementation"
gen.WritePepperGLES2Implementation(
"ppapi/shared_impl/ppb_opengles2_shared.cc")
else:
os.chdir("gpu/command_buffer")
gen.WriteCommandIds("common/gles2_cmd_ids_autogen.h")
gen.WriteFormat("common/gles2_cmd_format_autogen.h")
gen.WriteFormatTest("common/gles2_cmd_format_test_autogen.h")
gen.WriteGLES2InterfaceHeader("client/gles2_interface_autogen.h")
gen.WriteGLES2InterfaceStub("client/gles2_interface_stub_autogen.h")
gen.WriteGLES2InterfaceStubImpl(
"client/gles2_interface_stub_impl_autogen.h")
gen.WriteGLES2ImplementationHeader("client/gles2_implementation_autogen.h")
gen.WriteGLES2Implementation("client/gles2_implementation_impl_autogen.h")
gen.WriteGLES2ImplementationUnitTests(
"client/gles2_implementation_unittest_autogen.h")
gen.WriteGLES2TraceImplementationHeader(
"client/gles2_trace_implementation_autogen.h")
gen.WriteGLES2TraceImplementation(
"client/gles2_trace_implementation_impl_autogen.h")
gen.WriteGLES2CLibImplementation("client/gles2_c_lib_autogen.h")
gen.WriteCmdHelperHeader("client/gles2_cmd_helper_autogen.h")
gen.WriteServiceImplementation("service/gles2_cmd_decoder_autogen.h")
gen.WriteServiceContextStateHeader("service/context_state_autogen.h")
gen.WriteServiceContextStateImpl("service/context_state_impl_autogen.h")
gen.WriteClientContextStateHeader("client/client_context_state_autogen.h")
gen.WriteClientContextStateImpl(
"client/client_context_state_impl_autogen.h")
gen.WriteServiceUnitTests("service/gles2_cmd_decoder_unittest_%d_autogen.h")
gen.WriteServiceUtilsHeader("service/gles2_cmd_validation_autogen.h")
gen.WriteServiceUtilsImplementation(
"service/gles2_cmd_validation_implementation_autogen.h")
gen.WriteCommonUtilsHeader("common/gles2_cmd_utils_autogen.h")
gen.WriteCommonUtilsImpl("common/gles2_cmd_utils_implementation_autogen.h")
gen.WriteGLES2Header("../GLES2/gl2chromium_autogen.h")
if gen.errors > 0:
print "%d errors" % gen.errors
return 1
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
evernym/plenum
|
refs/heads/master
|
plenum/test/replica/conftest.py
|
2
|
import pytest
from orderedset._orderedset import OrderedSet
from plenum.common.event_bus import InternalBus
from plenum.common.messages.node_messages import PrePrepare
from plenum.common.startable import Mode
from plenum.common.constants import POOL_LEDGER_ID, DOMAIN_LEDGER_ID, CURRENT_PROTOCOL_VERSION, AUDIT_LEDGER_ID, \
TXN_PAYLOAD, TXN_PAYLOAD_DATA, AUDIT_TXN_VIEW_NO, AUDIT_TXN_PP_SEQ_NO, AUDIT_TXN_DIGEST
from plenum.common.timer import QueueTimer
from plenum.common.util import get_utc_epoch
from plenum.server.batch_handlers.node_reg_handler import NodeRegHandler
from plenum.server.consensus.primary_selector import RoundRobinConstantNodesPrimariesSelector
from plenum.server.database_manager import DatabaseManager
from plenum.server.propagator import Requests
from plenum.server.quorums import Quorums
from plenum.server.replica import Replica
from plenum.test.conftest import getValueFromModule
from plenum.test.helper import MockTimestamp, sdk_random_request_objects, create_pre_prepare_params, \
create_prepare_from_pre_prepare
from plenum.test.testing_utils import FakeSomething
from plenum.test.bls.conftest import fake_state_root_hash, fake_multi_sig, fake_multi_sig_value
class ReplicaFakeNode(FakeSomething):
def __init__(self, viewNo, quorums, ledger_ids):
node_names = ["Alpha", "Beta", "Gamma", "Delta"]
node_stack = FakeSomething(
name="fake stack",
connecteds=set(node_names)
)
self.replicas = []
self.viewNo = viewNo
audit_ledger = FakeSomething(size=0, get_last_txn=lambda *args: None, getAllTxn=lambda *args, **kwargs: [])
db_manager = DatabaseManager()
db_manager.register_new_database(AUDIT_LEDGER_ID, audit_ledger)
super().__init__(
name="fake node",
ledger_ids=ledger_ids,
_viewNo=viewNo,
quorums=quorums,
nodestack=node_stack,
utc_epoch=lambda *args: get_utc_epoch(),
mode=Mode.participating,
view_change_in_progress=False,
monitor=FakeSomething(isMasterDegraded=lambda: False),
requests=Requests(),
onBatchCreated=lambda self, *args, **kwargs: True,
applyReq=lambda self, *args, **kwargs: True,
primaries=[],
get_validators=lambda: [],
db_manager=db_manager,
write_manager=FakeSomething(database_manager=db_manager,
node_reg_handler=NodeRegHandler(db_manager),
apply_request=lambda req, cons_time: None),
timer=QueueTimer(),
poolManager=FakeSomething(node_names_ordered_by_rank=lambda: node_names),
primaries_selector=RoundRobinConstantNodesPrimariesSelector(node_names)
)
@property
def viewNo(self):
return self._viewNo
@viewNo.setter
def viewNo(self, viewNo):
self._viewNo = viewNo
for replica in self.replicas:
replica._consensus_data.viewNo = viewNo
@property
def is_synced(self) -> bool:
return Mode.is_done_syncing(self.mode)
@property
def isParticipating(self) -> bool:
return self.mode == Mode.participating
def add_replica(self, replica):
self.replicas.append(replica)
for replica in self.replicas:
replica._consensus_data.view_no = self.viewNo
@pytest.fixture(scope='function', params=[0, 10])
def viewNo(tconf, request):
return request.param
@pytest.fixture(scope='function')
def ledger_ids():
return [POOL_LEDGER_ID]
@pytest.fixture(scope='function', params=[0])
def inst_id(request):
return request.param
@pytest.fixture(scope="function")
def mock_timestamp():
return get_utc_epoch
@pytest.fixture()
def fake_requests():
return sdk_random_request_objects(10, identifier="fake_did",
protocol_version=CURRENT_PROTOCOL_VERSION)
@pytest.fixture()
def txn_roots():
return ["AAAgqga9DNr4bjH57Rdq6BRtvCN1PV9UX5Mpnm9gbMAZ",
"BBBJmfG5DYAE8ZcdTTFMiwcZaDN6CRVdSdkhBXnkYPio",
"CCCJmfG5DYAE8ZcdTTFMiwcZaDN6CRVdSdkhBXnkYPio",
"DDDJmfG5DYAE8ZcdTTFMiwcZaDN6CRVdSdkhBXnkYPio"]
@pytest.fixture()
def state_roots(fake_state_root_hash):
return ["EuDgqga9DNr4bjH57Rdq6BRtvCN1PV9UX5Mpnm9gbMAZ",
fake_state_root_hash,
"D95JmfG5DYAE8ZcdTTFMiwcZaDN6CRVdSdkhBXnkYPio",
None]
@pytest.fixture(scope='function')
def replica(tconf, viewNo, inst_id, ledger_ids, mock_timestamp, fake_requests, txn_roots, state_roots, request):
node = ReplicaFakeNode(viewNo=viewNo,
quorums=Quorums(getValueFromModule(request, 'nodeCount', default=4)),
ledger_ids=ledger_ids)
bls_bft_replica = FakeSomething(
gc=lambda *args: None,
update_pre_prepare=lambda params, l_id: params,
validate_pre_prepare=lambda a, b: None,
validate_prepare=lambda a, b: None,
update_prepare=lambda a, b: a,
process_prepare=lambda a, b: None,
process_pre_prepare=lambda a, b: None,
process_order=lambda *args: None
)
replica = Replica(
node, instId=inst_id, isMaster=inst_id == 0,
config=tconf,
bls_bft_replica=bls_bft_replica,
get_current_time=mock_timestamp,
get_time_for_3pc_batch=mock_timestamp
)
node.add_replica(replica)
ReplicaFakeNode.master_last_ordered_3PC = replica.last_ordered_3pc
replica._ordering_service.last_accepted_pre_prepare_time = replica.get_time_for_3pc_batch()
replica.primaryName = "Alpha:{}".format(replica.instId)
replica.primaryNames[replica.viewNo] = replica.primaryName
replica._ordering_service.get_txn_root_hash = lambda ledger, to_str=False: txn_roots[ledger]
replica._ordering_service.get_state_root_hash = lambda ledger, to_str=False: state_roots[ledger]
replica._ordering_service._revert = lambda ledgerId, stateRootHash, reqCount: None
replica._ordering_service.post_batch_creation = lambda three_pc_batch: None
replica._ordering_service.requestQueues[DOMAIN_LEDGER_ID] = OrderedSet()
replica._ordering_service._get_primaries_for_ordered = lambda pp: [replica.primaryName]
replica._ordering_service._get_node_reg_for_ordered = lambda pp: ["Alpha", "Beta", "Gamma", "Delta"]
def reportSuspiciousNodeEx(ex):
assert False, ex
replica.node.reportSuspiciousNodeEx = reportSuspiciousNodeEx
return replica
@pytest.fixture(scope='function')
def primary_replica(replica):
replica.primaryName = replica.name
return replica
@pytest.fixture(scope='function')
def replica_with_requests(replica, fake_requests):
replica._ordering_service._apply_pre_prepare = lambda a: (fake_requests, [], [], False)
for req in fake_requests:
replica._ordering_service.requestQueues[DOMAIN_LEDGER_ID].add(req.key)
replica.requests.add(req)
replica.requests.set_finalised(req)
return replica
@pytest.fixture(scope="function",
params=['BLS_not_None', 'BLS_None'])
def multi_sig(fake_multi_sig, request):
if request.param == 'BLS_None':
return None
return fake_multi_sig
@pytest.fixture(scope="function")
def pre_prepare(replica, state_roots, txn_roots, multi_sig, fake_requests):
params = create_pre_prepare_params(state_root=state_roots[DOMAIN_LEDGER_ID],
ledger_id=DOMAIN_LEDGER_ID,
txn_root=txn_roots[DOMAIN_LEDGER_ID],
bls_multi_sig=multi_sig,
view_no=replica.viewNo,
inst_id=replica.instId,
pool_state_root=state_roots[POOL_LEDGER_ID],
audit_txn_root=txn_roots[AUDIT_LEDGER_ID],
reqs=fake_requests)
pp = PrePrepare(*params)
return pp
@pytest.fixture(scope="function")
def prepare(pre_prepare):
return create_prepare_from_pre_prepare(pre_prepare)
|
nive/nive_cms
|
refs/heads/master
|
nive_cms/tests/test_app.py
|
1
|
# -*- coding: utf-8 -*-
import unittest
from nive.workflow import WorkflowNotAllowed
from nive.security import Allow, Deny, Authenticated, Everyone, User
from nive_cms.tests.db_app import *
from nive_cms.tests import __local
class ObjectTest_db(object):
"""
Actual test classes are subclassed for db system (sqlite, mysql)
"""
def setUp(self):
self._loadApp()
self.remove = []
def tearDown(self):
u = User(u"test")
u.groups.append("group:editor")
root = self.app.root("editor")
for r in self.remove:
root.Delete(r, u)
def test_root(self):
a=self.app
ccc = a.db.GetCountEntries()
r=root(a)
user = User(u"test")
user.groups.append("group:editor")
# add to root
p = create_page(r, user=user)
self.assert_(p)
self.remove.append(p.id)
b0 = create_menublock(r, user=user)
self.assert_(b0)
self.remove.append(b0.id)
b1 = create_box(r, user=user)
self.assert_(b1)
self.remove.append(b1.id)
col = r.GetColumn("left")
if col:
r.Delete(col.id, user=user)
b2 = create_column(r, user=user)
self.assert_(b2)
self.remove.append(b2.id)
b3 = create_file(r, user=user)
self.assert_(b3)
self.remove.append(b3.id)
b4 = create_image(r, user=user)
self.assert_(b4)
self.remove.append(b4.id)
b5 = create_media(r, user=user)
self.assert_(b5)
self.remove.append(b5.id)
b6 = create_note(r, user=user)
self.assert_(b6)
self.remove.append(b6.id)
b7 = create_text(r, user=user)
self.assert_(b7)
self.remove.append(b7.id)
b8 = create_spacer(r, user=user)
self.assert_(b8)
self.remove.append(b8.id)
b9 = create_link(r, user=user)
self.assert_(b9)
self.remove.append(b9.id)
b10 = create_code(r, user=user)
self.assert_(b10)
self.remove.append(b10.id)
r.Delete(p.id, user=user, obj=p)
r.Delete(b0.id, user=user)
r.Delete(b1.id, user=user)
r.Delete(b2.id, user=user)
r.Delete(b3.id, user=user)
r.Delete(b4.id, user=user)
r.Delete(b5.id, user=user)
r.Delete(b6.id, user=user)
r.Delete(b7.id, user=user)
r.Delete(b8.id, user=user)
r.Delete(b9.id, user=user)
r.Delete(b10.id, user=user)
self.assertEqual(ccc, a.db.GetCountEntries(), "Delete failed")
# workflow failure add to root
user.groups = ["group:looser"]
self.assertRaises(WorkflowNotAllowed, create_page, r, user)
def test_page(self):
a=self.app
ccc = a.db.GetCountEntries()
r=root(a)
user = User(u"test")
user.groups.append("group:editor")
# add to root
p = create_page(r, user=user)
self.assert_(p)
self.remove.append(p.id)
self.assert_(p.IsLinked()==u"")
self.assert_(p.IsPage())
p.meta["pool_groups"] = ["sys:authenticated","another"]
p.Signal("init")
r = p
b0 = create_menublock(r, user=user)
self.assert_(b0)
b1 = create_box(r, user=user)
self.assert_(b1)
b2 = create_column(r, user=user)
self.assert_(b2)
b3 = create_file(r, user=user)
self.assert_(b3)
b4 = create_image(r, user=user)
self.assert_(b4)
b5 = create_media(r, user=user)
self.assert_(b5)
b6 = create_note(r, user=user)
self.assert_(b6)
b7 = create_text(r, user=user)
self.assert_(b7)
b8 = create_spacer(r, user=user)
self.assert_(b8)
b9 = create_link(r, user=user)
self.assert_(b9)
b10 = create_code(r, user=user)
self.assert_(b10)
root(a).Delete(r.id, user=user, obj=r)
self.assertEqual(ccc, a.db.GetCountEntries(), "Delete failed")
def test_container(self):
a=self.app
ccc = a.db.GetCountEntries()
r=root(a)
user = User(u"test")
user.groups.append("group:editor")
# add to root
b1 = create_box(r, user=user)
self.assert_(b1)
self.remove.append(b1.id)
r = b1
b3 = create_file(r, user=user)
self.assert_(b3)
b4 = create_image(r, user=user)
self.assert_(b4)
b5 = create_media(r, user=user)
self.assert_(b5)
b6 = create_note(r, user=user)
self.assert_(b6)
b7 = create_text(r, user=user)
self.assert_(b7)
b8 = create_spacer(r, user=user)
self.assert_(b8)
b9 = create_link(r, user=user)
self.assert_(b9)
b0 = create_menublock(r, user=user)
self.assert_(b0)
b10 = create_code(r, user=user)
self.assert_(b10)
r=root(a)
col = r.GetColumn("left")
if col:
r.Delete(col.id, user=user)
b2 = create_column(r, user=user)
self.assert_(b2)
self.remove.append(b2.id)
r = b2
b3 = create_file(r, user=user)
self.assert_(b3)
b4 = create_image(r, user=user)
self.assert_(b4)
b5 = create_media(r, user=user)
self.assert_(b5)
b6 = create_note(r, user=user)
self.assert_(b6)
b7 = create_text(r, user=user)
self.assert_(b7)
b8 = create_spacer(r, user=user)
self.assert_(b8)
b9 = create_link(r, user=user)
self.assert_(b9)
b0 = create_menublock(r, user=user)
self.assert_(b0)
b10 = create_code(r, user=user)
self.assert_(b10)
root(a).Delete(b1.id, user=user, obj=b1)
root(a).Delete(b2.id, user=user, obj=b2)
self.assertEqual(ccc, a.db.GetCountEntries(), "Delete failed")
def test_objs(self):
a=self.app
ccc = a.db.GetCountEntries()
r=root(a)
user = User(u"test")
user.groups.append("group:editor")
p = create_page(r, user=user)
self.remove.append(p.id)
r = p
#box
b1 = create_box(r, user=user)
b1.IsContainer()
b1.GetPage()
b1.GetElementContainer()
b1.GetContainer()
#column
b2 = create_column(r, user=user)
self.assert_(b2.IsLocal(r))
b2.GetName()
b2.IsContainer()
b2.GetPage()
b2.GetPages()
b2.GetElementContainer()
b2.GetContainer()
self.assert_(b2.GetColumn("left")==b2)
#file
b3 = create_file(r, user=user)
b3.GetDownloadTitle()
b3.FilenameToTitle()
#menublock
b0 = create_menublock(r, user=user)
b0.GetMenuPages()
root(a).Delete(r.id, user=user)
self.assertEqual(ccc, a.db.GetCountEntries(), "Delete failed")
class ObjectTest_db_Sqlite(ObjectTest_db, __local.SqliteTestCase):
"""
see tests.__local
"""
class ObjectTest_db_MySql(ObjectTest_db, __local.MySqlTestCase):
"""
see tests.__local
"""
class ObjectTest_db_pg(ObjectTest_db, __local.PostgreSqlTestCase):
"""
see tests.__local
"""
|
rajkotecha/origin
|
refs/heads/master
|
rel-eng/lib/origin/tagger/__init__.py
|
36
|
"""
Code for tagging Origin packages
"""
import os
import re
import rpm
import shutil
import subprocess
import tempfile
import textwrap
import sys
from tito.common import (get_latest_commit, run_command,
get_latest_tagged_version, increase_version, increase_zstream,
get_spec_version_and_release, tag_exists_locally, tag_exists_remotely,
head_points_to_tag, undo_tag)
#, get_spec_version_and_release
from tito.compat import write
from tito.tagger import VersionTagger
from tito.exception import TitoException
class OriginTagger(VersionTagger):
"""
Origin custom tagger. This tagger has several deviations from normal
the normal tito tagger.
** Rather than versions being tagged %{name}-%{version}-%{release} they're
tagged as v%{version} in order to preserve compatibility with origin build
processes. This means you really should not attempt to use the release field
for anything useful, it should probably always remain zero.
** RPM specfile global commit is updated with the git hash, this may be
relevant and popular with other golang projects, so TODO: submit to tito
upstream.
Requires that your commit global is written on one single line like this:
%global commit 460abe2a3abe0fa22ac96c551fe71c0fc36f7475
** RPM specfile global ldflags is updated with os::build::ldflags as generated
by importing hack/common.sh this absolutely depends on the non standard
version tagging outlined above. This is 100% openshift specific
Requires that your ldflags global is written on one single line like this:
%global ldflags -X foo -X bar
NOTE: Does not work with --use-version as tito does not provide a way to
override the forced version tagger, see
https://github.com/dgoodwin/tito/pull/163
Used For:
- Origin, probably not much else
"""
def _tag_release(self):
"""
Tag a new release of the package, add specfile global named commit. (ie:
x.y.z-r+1) and ldflags from hack/common.sh os::build::ldflags
"""
self._make_changelog()
new_version = self._bump_version()
new_version = re.sub(r"-.*","",new_version)
git_hash = get_latest_commit()
update_commit = "sed -i 's/^%%global commit .*$/%%global commit %s/' %s" % \
(git_hash, self.spec_file)
output = run_command(update_commit)
cmd = '. ./hack/common.sh ; echo $(os::build::ldflags)'
ldflags = run_command('bash -c \'%s\'' % (cmd) )
# hack/common.sh will tell us that the tree is dirty because tito has
# already mucked with things, but lets not consider the tree to be dirty
ldflags = ldflags.replace('-dirty','')
update_ldflags = "sed -i 's|^%%global ldflags .*$|%%global ldflags %s|' %s" % \
(ldflags, self.spec_file)
output = run_command(update_ldflags)
self._check_tag_does_not_exist(self._get_new_tag(new_version))
self._update_changelog(new_version)
self._update_package_metadata(new_version)
def _get_new_tag(self, new_version):
""" Returns the actual tag we'll be creating. """
return "v%s" % (new_version)
def get_latest_tagged_version(package_name):
"""
Return the latest git tag for this package in the current branch.
Uses the info in rel-eng/packages/package-name.
Returns None if file does not exist.
"""
git_root = find_git_root()
rel_eng_dir = os.path.join(git_root, "rel-eng")
file_path = "%s/packages/%s" % (rel_eng_dir, package_name)
debug("Getting latest package info from: %s" % file_path)
if not os.path.exists(file_path):
return None
output = run_command("awk '{ print $1 ; exit }' %s" % file_path)
if output is None or output.strip() == "":
error_out("Error looking up latest tagged version in: %s" % file_path)
return output
def _make_changelog(self):
"""
Create a new changelog entry in the spec, with line items from git
"""
if self._no_auto_changelog:
debug("Skipping changelog generation.")
return
in_f = open(self.spec_file, 'r')
out_f = open(self.spec_file + ".new", 'w')
found_changelog = False
for line in in_f.readlines():
out_f.write(line)
if not found_changelog and line.startswith("%changelog"):
found_changelog = True
old_version = get_latest_tagged_version(self.project_name)
# don't die if this is a new package with no history
if old_version is not None:
last_tag = "v%s" % (old_version)
output = self._generate_default_changelog(last_tag)
else:
output = self._new_changelog_msg
fd, name = tempfile.mkstemp()
write(fd, "# Create your changelog entry below:\n")
if self.git_email is None or (('HIDE_EMAIL' in self.user_config) and
(self.user_config['HIDE_EMAIL'] not in ['0', ''])):
header = "* %s %s\n" % (self.today, self.git_user)
else:
header = "* %s %s <%s>\n" % (self.today, self.git_user,
self.git_email)
write(fd, header)
for cmd_out in output.split("\n"):
write(fd, "- ")
write(fd, "\n ".join(textwrap.wrap(cmd_out, 77)))
write(fd, "\n")
write(fd, "\n")
if not self._accept_auto_changelog:
# Give the user a chance to edit the generated changelog:
editor = 'vi'
if "EDITOR" in os.environ:
editor = os.environ["EDITOR"]
subprocess.call(editor.split() + [name])
os.lseek(fd, 0, 0)
file = os.fdopen(fd)
for line in file.readlines():
if not line.startswith("#"):
out_f.write(line)
output = file.read()
file.close()
os.unlink(name)
if not found_changelog:
print("WARNING: no %changelog section find in spec file. Changelog entry was not appended.")
in_f.close()
out_f.close()
shutil.move(self.spec_file + ".new", self.spec_file)
def _undo(self):
"""
Undo the most recent tag.
Tag commit must be the most recent commit, and the tag must not
exist in the remote git repo, otherwise we report and error out.
"""
tag = "v%s" % (get_latest_tagged_version(self.project_name))
print("Undoing tag: %s" % tag)
if not tag_exists_locally(tag):
raise TitoException(
"Cannot undo tag that does not exist locally.")
if not self.offline and tag_exists_remotely(tag):
raise TitoException("Cannot undo tag that has been pushed.")
# Tag must be the most recent commit.
if not head_points_to_tag(tag):
raise TitoException("Cannot undo if tag is not the most recent commit.")
# Everything looks good:
print
undo_tag(tag)
# This won't do anything until tito supports configuring the forcedversion tagger
# See https://github.com/dgoodwin/tito/pull/163
class OriginForceVersionTagger(OriginTagger):
"""
Tagger which forcibly updates the spec file to a version provided on the
command line by the --use-version option.
TODO: could this be merged into main taggers?
"""
def _tag_release(self):
"""
Tag a new release of the package.
"""
self._make_changelog()
new_version = self._bump_version(force=True)
self._check_tag_does_not_exist(self._get_new_tag(new_version))
self._update_changelog(new_version)
self._update_setup_py(new_version)
self._update_package_metadata(new_version)
|
gilestrolab/ethoscope
|
refs/heads/master
|
src/ethoscope/web_utils/helpers.py
|
1
|
import random
import logging
import traceback
import datetime, time
import os
import re
from uuid import uuid4
def pi_version():
"""
Detect the version of the Raspberry Pi.
https://www.raspberrypi.org/documentation/hardware/raspberrypi/revision-codes/README.md
We used to use cat /proc/cpuinfo but as of the 4.9 kernel, all Pis report BCM2835, even those with BCM2836, BCM2837 and BCM2711 processors.
You should not use this string to detect the processor. Decode the revision code using the information in the URL above, or simply cat /sys/firmware/devicetree/base/model
"""
info_file = '/sys/firmware/devicetree/base/model'
if os.path.exists(info_file):
with open (info_file, 'r') as revision_input:
revision_info = revision_input.read().rstrip('\x00')
return revision_info
else:
return 0
def isMachinePI():
"""
Return True if we are running on a Pi - proper ethoscope
"""
return pi_version() != 0
def get_machine_name(path="/etc/machine-name"):
"""
Reads the machine name
This file will be present only on a real ethoscope
When running locally, it will generate a randome name
"""
if os.path.exists(path):
with open(path,'r') as f:
info = f.readline().rstrip()
return info
else:
return 'VIRTUASCOPE_' + str(random.randint(100,999))
def set_machine_name(id, path="/etc/machine-name"):
'''
Takes an id and updates the machine name accordingly in the format
ETHOSCOPE_id; changes the hostname too.
:param id: integer
'''
machine_name = "ETHOSCOPE_%03d" % id
try:
with open(path, 'w') as f:
f.write(machine_name)
logging.warning("Wrote new information in file: %s" % path)
with open("/etc/hostname", 'w') as f:
f.write(machine_name)
logging.warning("Changed the machine hostname to: %s" % machine_name)
except:
raise
def set_machine_id(id, path="/etc/machine-id"):
'''
Takes an id and updates the machine id accordingly in the format
0ID-UUID to make a 32 bytes string
:param id: integer
'''
new_uuid = "%03d" % id + uuid4().hex[3:]
try:
with open(path, 'w') as f:
f.write(new_uuid)
logging.warning("Wrote new information in file: %s" % path)
except:
raise
def get_WIFI(path="/etc/netctl/wlan"):
"""
"""
if os.path.exists(path):
with open(path,'r') as f:
wlan_settings = f.readlines()
d = {}
for line in wlan_settings:
if "=" in line:
d[ line.strip().split("=")[0] ] = line.strip().split("=")[1]
return d
else:
return {'error' : 'No WIFI Settings were found in path %s' % path}
def set_WIFI(ssid="ETHOSCOPE_WIFI", wpakey="ETHOSCOPE_1234", path="/etc/netctl/wlan"):
"""
"""
wlan_settings = '''Description=ethoscope_wifi network
Interface=wlan0
Connection=wireless
Security=wpa
IP=dhcp
TimeoutDHCP=60
ESSID=%s
Key=%s
''' % (ssid, wpakey)
try:
with open(path, 'w') as f:
f.write(wlan_settings)
logging.warning("Wrote new information in file: %s" % path)
except:
raise
def set_etc_hostname(ip_address, nodename = "node", path="/etc/hosts"):
'''
Updates the settings in /etc/hosts to match the given IP address
'''
try:
with open(path, 'w') as f:
f.write("127.0.0.1\tlocalhost\n")
f.write("%s\t%s\n" % (ip_address, nodename))
logging.warning("Wrote new information in file: %s" % path)
except:
raise
def get_commit_version(commit):
'''
'''
return {"id":str(commit),
"date":datetime.datetime.utcfromtimestamp(commit.committed_date).strftime('%Y-%m-%d %H:%M:%S')
}
def get_git_version():
'''
return the current git version
'''
import git
wd = os.getcwd()
while wd != "/":
try:
repo = git.Repo(wd)
commit = repo.commit()
return get_commit_version(commit)
except git.InvalidGitRepositoryError:
wd = os.path.dirname(wd)
raise Exception("Not in a git Tree")
def file_in_dir_r(file, dir):
file_dir_path = os.path.dirname(file).rstrip("//")
dir_path = dir.rstrip("//")
if file_dir_path == dir_path:
return True
elif file_dir_path == "":
return False
else:
return file_in_dir_r(file_dir_path, dir_path)
def cpu_serial():
"""
on a rPI, return a unique identifier of the CPU
"""
serial = ''
if isMachinePI():
with open('/proc/cpuinfo', 'r') as infile:
cpuinfo = infile.read()
# Match a line like 'Serial : xxxxx'
serial = re.search('^Serial\s+:\s+(\w+)$', cpuinfo,
flags=re.MULTILINE | re.IGNORECASE)
serial = serial.group(1)
return serial
def hasPiCamera():
"""
return True if a piCamera is supported and detected
"""
if isMachinePI():
with os.popen('/opt/vc/bin/vcgencmd get_camera') as cmd:
out_cmd = cmd.read().strip()
out = dict(x.split('=') for x in out_cmd.split(' '))
return out["detected"] == out["supported"] == "1"
else:
return False
def getPiCameraVersion():
"""
If a PiCamera is connected, returns the model
#PINoIR v1
#{'IFD0.Model': 'RP_ov5647', 'IFD0.Make': 'RaspberryPi'}
#PINoIR v2
#{'IFD0.Model': 'RP_imx219', 'IFD0.Make': 'RaspberryPi'}
"""
known_versions = {'RP_ov5647': 'PINoIR 1', 'RP_imx219': 'PINoIR 2'}
picamera_info_file = '/etc/picamera-version'
if hasPiCamera():
try:
with open(picamera_info_file, 'r') as infile:
camera_info = eval(infile.read())
camera_info['version'] = known_versions[ camera_info['IFD0.Model'] ]
except:
camera_info = "This is a new ethoscope. Run tracking once to detect the camera module"
return camera_info
else:
return False
def isSuperscope():
"""
The following lsusb device
Bus 001 Device 003: ID 05a3:9230 ARC International Camera
is the one we currently use for the SuperScope
https://www.amazon.co.uk/gp/product/B07R7JXV35/ref=ppx_yo_dt_b_asin_title_o06_s00?ie=UTF8&psc=1
Eventually we will include the new rPI camera too
https://uk.farnell.com/raspberry-pi/rpi-hq-camera/rpi-high-quality-camera-12-3-mp/dp/3381605
"""
pass
def isExperimental(new_value=None):
"""
return true if the machine is to be used as experimental
this mymics a non-PI or a PI without plugged in camera
to activate, create an empty file called /etc/isexperimental
"""
filename = '/etc/isexperimental'
current_value = os.path.exists(filename)
if new_value == None:
return current_value
if new_value == True and current_value == False:
#create file
with open(filename, mode='w'):
logging.warning("Created a new empty file in %s. The machine is now experimental." % filename)
elif new_value == False and current_value == True:
#delete file
os.remove(filename)
logging.warning("Removed file %s. The machine is not experimental." % filename)
def get_machine_id(path="/etc/machine-id"):
"""
Reads the machine ID
This file should be present on any linux installation because, when missing, it is automatically generated by the OS
"""
with open(path,'r') as f:
info = f.readline().rstrip()
return info
def get_etc_hostnames():
"""
Parses /etc/hosts file and returns all the hostnames in a dictionary.
"""
with open('/etc/hosts', 'r') as f:
hostlines = f.readlines()
hostlines = [line.strip() for line in hostlines
if not line.startswith('#') and line.strip() != '']
hosts = {}
for line in hostlines:
entries = line.split("#")[0].split()
hosts [ entries[1] ] = entries[0]
return hosts
def get_core_temperature():
"""
Returns the internal core temperature in degrees celsius
"""
if isMachinePI():
try:
with os.popen("/opt/vc/bin/vcgencmd measure_temp") as df:
temp = float("".join(filter(lambda d: str.isdigit(d) or d == '.', df.read())))
return temp
except:
return 0
else:
return 0
def get_SD_CARD_AGE():
"""
Given the machine_id file is created at the first boot, it assumes the SD card is as old as the file itself
:return: timestamp of the card
"""
try:
return time.time() - os.path.getmtime("/etc/machine-id")
except:
return
def get_partition_infos():
"""
Returns information about mounted partition and their free availble space
"""
try:
with os.popen('df -Th') as df:
df_info = df.read().strip().split('\n')
keys = df_info[0]
values = df_info[1:]
return [dict([(key, value) for key, value in zip(keys.split(), line.split())]) for line in values]
except:
return
def set_datetime(time_on_node):
"""
Set date and time on the PI
time_on_node is the time to be set in the datetime format
"""
cmd = 'date -s "%s"' % time_on_node.strftime("%d %b %Y %H:%M:%S") # 26 Jun 2020 15:04:25
try:
with os.popen(cmd, 'r') as c:
c.read()
return True
except:
return False
|
vorlock/ansible-modules-core
|
refs/heads/devel
|
network/basics/slurp.py
|
59
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: slurp
version_added: historical
short_description: Slurps a file from remote nodes
description:
- This module works like M(fetch). It is used for fetching a base64-
encoded blob containing the data in a remote file.
options:
src:
description:
- The file on the remote system to fetch. This I(must) be a file, not a
directory.
required: true
default: null
aliases: []
notes:
- "See also: M(fetch)"
requirements: []
author: Michael DeHaan
'''
EXAMPLES = '''
ansible host -m slurp -a 'src=/tmp/xx'
host | success >> {
"content": "aGVsbG8gQW5zaWJsZSB3b3JsZAo=",
"encoding": "base64"
}
'''
import base64
def main():
module = AnsibleModule(
argument_spec = dict(
src = dict(required=True, aliases=['path']),
),
supports_check_mode=True
)
source = os.path.expanduser(module.params['src'])
if not os.path.exists(source):
module.fail_json(msg="file not found: %s" % source)
if not os.access(source, os.R_OK):
module.fail_json(msg="file is not readable: %s" % source)
data = base64.b64encode(file(source).read())
module.exit_json(content=data, source=source, encoding='base64')
# import module snippets
from ansible.module_utils.basic import *
main()
|
robwarm/gpaw-symm
|
refs/heads/master
|
gpaw/test/h2o_xas.py
|
1
|
import os
import numpy as np
from math import pi, cos, sin
from ase import Atom, Atoms
from ase.parallel import rank, barrier
from gpaw import GPAW
from gpaw.xas import XAS
from gpaw.test import equal, gen
# Generate setup for oxygen with half a core-hole:
gen('O', name='hch1s', corehole=(1, 0, 0.5))
a = 5.0
d = 0.9575
t = pi / 180 * 104.51
H2O = Atoms([Atom('O', (0, 0, 0)),
Atom('H', (d, 0, 0)),
Atom('H', (d * cos(t), d * sin(t), 0))],
cell=(a, a, a), pbc=False)
H2O.center()
calc = GPAW(nbands=10, h=0.2, setups={'O': 'hch1s'})
H2O.set_calculator(calc)
e = H2O.get_potential_energy()
niter = calc.get_number_of_iterations()
import gpaw.mpi as mpi
if mpi.size == 1: #
xas = XAS(calc)
x, y = xas.get_spectra()
e1_n = xas.eps_n
de1 = e1_n[1] - e1_n[0]
calc.write('h2o-xas.gpw')
if mpi.size == 1:
calc = GPAW('h2o-xas.gpw', txt=None)
calc.initialize()
xas = XAS(calc)
x, y = xas.get_spectra()
e2_n = xas.eps_n
w_n = np.sum(xas.sigma_cn.real**2, axis=0)
de2 = e2_n[1] - e2_n[0]
print de2
print de2 - 2.0848
assert abs(de2 - 2.0848) < 0.001
print w_n[1] / w_n[0]
assert abs(w_n[1] / w_n[0] - 2.18) < 0.01
if mpi.size == 1:
assert de1 == de2
if 0:
import pylab as p
p.plot(x, y[0])
p.plot(x, sum(y))
p.show()
print e, niter
energy_tolerance = 0.00009
niter_tolerance = 0
equal(e, -17.9621, energy_tolerance)
|
MediaMath/Diamond
|
refs/heads/master
|
src/collectors/supervisord/supervisord.py
|
1
|
# coding=utf-8
"""
Custom collector for supervisord process control system
(github.com/Supervisor/supervisor)
Supervisor runs an XML-RPC server, which this collector uses to gather a few
basic stats on each registered process.
#### Dependencies
* xmlrpclib
* supervisor
* diamond
#### Usage
Configure supervisor's XML-RPC server (either over HTTP or Unix socket). See
supervisord.org/configuration.html for details. In the collector configuration
file, you may specify the protocol and path configuration; below are the
defaults.
<pre>
xmlrpc_server_protocol = unix
xmlrpc_server_path = /var/run/supervisor.sock
</pre>
"""
import xmlrpclib
try:
import supervisor.xmlrpc
except ImportError:
supervisor = None
import diamond.collector
class SupervisordCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(SupervisordCollector,
self).get_default_config_help()
config_help.update({
'xmlrpc_server_protocol': 'XML-RPC server protocol. '
'Options: unix, http',
'xmlrpc_server_path': 'XML-RPC server path.'
})
return config_help
def get_default_config(self):
default_config = super(SupervisordCollector, self).get_default_config()
default_config['path'] = 'supervisor'
default_config['xmlrpc_server_protocol'] = 'unix'
default_config['xmlrpc_server_path'] = '/var/run/supervisor.sock'
return default_config
def getAllProcessInfo(self):
server = None
protocol = self.config['xmlrpc_server_protocol']
path = self.config['xmlrpc_server_path']
uri = '{0}://{1}'.format(protocol, path)
self.log.debug(
'Attempting to connect to XML-RPC server "{0}"'.format(uri))
if protocol == 'unix':
server = xmlrpclib.ServerProxy('http://127.0.0.1',
supervisor.xmlrpc.SupervisorTransport(None, None, uri)
).supervisor
elif protocol == 'http':
server = xmlrpclib.Server(uri).supervisor
else:
self.log.debug(
'Invalid xmlrpc_server_protocol config setting "{0}"'.format(
protocol))
return None
return server.getAllProcessInfo()
def collect(self):
processes = self.getAllProcessInfo()
self.log.debug('Found {0} supervisord processes'.format(len(processes)))
for process in processes:
statPrefix = str.format("%s.%s" % (
process["group"], process["name"]))
# state
self.publish(statPrefix + ".state", process["state"])
# uptime
uptime = 0
if process["statename"] == "RUNNING":
uptime = process["now"] - process["start"]
self.publish(statPrefix + ".uptime", uptime)
|
mrtyler/ansible-role-influxdb
|
refs/heads/master
|
tests/test_default.py
|
2
|
from pytest import fixture
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
# Adapted from
# http://www.axelspringerideas.de/blog/index.php/2016/08/16/continuously-delivering-infrastructure-part-1-ansible-molecule-and-testinfra/
#
# This is a little fancy for my taste. pytest fixtures are cool -- they're how
# the testinfra File and Package helpers are implemented, so I think the author
# was aiming for consistency -- but I'm not sure this one improves readability
# or maintainability. However, 1) I didn't have to write it and 2) I thought it
# might be instructive to show this approach. Contrast with the more direct
# approach I use in test_influxdb_repo_key_is_installed().
@fixture()
def Repository_exists(Command):
"""
Tests if YUM Repo with specific Name exists and is enabled:
- **repo** - repo name to look for
**returns** - True if String is found
"""
def f(repo):
return (repo in Command.check_output("yum repolist"))
return f
def test_influxdb_repo_is_installed(Repository_exists):
assert Repository_exists("Influxdb upstream yum repo")
def test_influxdb_repo_key_is_installed(Command):
# Adapted from https://wiki.centos.org/TipsAndTricks/YumAndRPM:
# Show all installed GPG keys
# Thanks to forum user babo for this one-liner to show all GPG keys along
# with the corresponding repo information.
rpm_keys = Command.check_output("rpm -q gpg-pubkey --qf '%{name}-%{version}-%{release} --> %{summary}\n'")
assert "influxdb" in rpm_keys
def test_influxdb_package_is_installed(Package):
pkg = Package("influxdb")
assert pkg.is_installed
def test_which_package_is_installed(Package):
pkg = Package("which")
assert pkg.is_installed
# Influxdb will install a sysvinit script if it can't find systemd. Since we
# want influxdb to find systemd, insist that the legacy sysvinit script is not
# installed.
def test_etc_initd_influxdb_is_not_installed(File):
sysvinit = File("/etc/init.d/influxdb")
assert not sysvinit.exists
def test_influxdb_config_has_collectd_enabled(File):
config = File("/etc/influxdb/influxdb.conf").content_string
assert "# BEGIN ANSIBLE MANAGED BLOCK" in config
|
natanielruiz/android-yolo
|
refs/heads/master
|
jni-build/jni/include/tensorflow/python/client/timeline_test.py
|
20
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.client.Timeline."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import tensorflow as tf
from tensorflow.python.client import timeline
class TimelineTest(tf.test.TestCase):
def _validateTrace(self, chrome_trace_format):
# Check that the supplied string is valid JSON.
trace = json.loads(chrome_trace_format)
# It should have a top-level key containing events.
self.assertTrue('traceEvents' in trace)
# Every event in the list should have a 'ph' field.
for event in trace['traceEvents']:
self.assertTrue('ph' in event)
def testSimpleTimeline(self):
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
with tf.device('/cpu:0'):
with tf.Session() as sess:
sess.run(
tf.constant(1.0),
options=run_options,
run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
tl = timeline.Timeline(run_metadata.step_stats)
ctf = tl.generate_chrome_trace_format()
self._validateTrace(ctf)
def testTimelineCpu(self):
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
with self.test_session(use_gpu=False) as sess:
const1 = tf.constant(1.0, name='const1')
const2 = tf.constant(2.0, name='const2')
result = tf.add(const1, const2) + const1 * const2
sess.run(result, options=run_options, run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
step_stats = run_metadata.step_stats
devices = [d.device for d in step_stats.dev_stats]
self.assertTrue('/job:localhost/replica:0/task:0/cpu:0' in devices)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format()
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_dataflow=False)
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_memory=False)
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_memory=False,
show_dataflow=False)
self._validateTrace(ctf)
def testTimelineGpu(self):
if not tf.test.is_gpu_available():
return
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
with self.test_session(force_gpu=True) as sess:
const1 = tf.constant(1.0, name='const1')
const2 = tf.constant(2.0, name='const2')
result = tf.add(const1, const2) + const1 * const2
sess.run(result, options=run_options, run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
step_stats = run_metadata.step_stats
devices = [d.device for d in step_stats.dev_stats]
self.assertTrue('/job:localhost/replica:0/task:0/gpu:0' in devices)
self.assertTrue('/gpu:0/stream:all' in devices)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format()
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_dataflow=False)
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_memory=False)
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_memory=False,
show_dataflow=False)
self._validateTrace(ctf)
def testAnalysisAndAllocations(self):
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
config = tf.ConfigProto(device_count={'CPU': 3})
with tf.Session(config=config) as sess:
with tf.device('/cpu:0'):
const1 = tf.constant(1.0, name='const1')
with tf.device('/cpu:1'):
const2 = tf.constant(2.0, name='const2')
with tf.device('/cpu:2'):
result = const1 + const2 + const1 * const2
sess.run(result, options=run_options, run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
tl = timeline.Timeline(run_metadata.step_stats)
step_analysis = tl.analyze_step_stats()
ctf = step_analysis.chrome_trace.format_to_string()
self._validateTrace(ctf)
maximums = step_analysis.allocator_maximums
self.assertTrue('cpu' in maximums)
cpu_max = maximums['cpu']
# At least const1 + const2, both float32s (4 bytes each)
self.assertGreater(cpu_max.num_bytes, 8)
self.assertGreater(cpu_max.timestamp, 0)
self.assertTrue('const1' in cpu_max.tensors)
self.assertTrue('const2' in cpu_max.tensors)
def testManyCPUs(self):
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
config = tf.ConfigProto(device_count={'CPU': 3})
with tf.Session(config=config) as sess:
with tf.device('/cpu:0'):
const1 = tf.constant(1.0, name='const1')
with tf.device('/cpu:1'):
const2 = tf.constant(2.0, name='const2')
with tf.device('/cpu:2'):
result = const1 + const2 + const1 * const2
sess.run(result, options=run_options, run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
step_stats = run_metadata.step_stats
devices = [d.device for d in step_stats.dev_stats]
self.assertTrue('/job:localhost/replica:0/task:0/cpu:0' in devices)
self.assertTrue('/job:localhost/replica:0/task:0/cpu:1' in devices)
self.assertTrue('/job:localhost/replica:0/task:0/cpu:2' in devices)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format()
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_dataflow=False)
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_memory=False)
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_memory=False,
show_dataflow=False)
self._validateTrace(ctf)
if __name__ == '__main__':
tf.test.main()
|
murrayrm/python-control
|
refs/heads/master
|
control/matlab/timeresp.py
|
2
|
"""
Time response routines in the Matlab compatibility package
Note that the return arguments are different than in the standard control package.
"""
__all__ = ['step', 'stepinfo', 'impulse', 'initial', 'lsim']
def step(sys, T=None, X0=0., input=0, output=None, return_x=False):
'''
Step response of a linear system
If the system has multiple inputs or outputs (MIMO), one input has
to be selected for the simulation. Optionally, one output may be
selected. If no selection is made for the output, all outputs are
given. The parameters `input` and `output` do this. All other
inputs are set to 0, all other outputs are ignored.
Parameters
----------
sys: StateSpace, or TransferFunction
LTI system to simulate
T: array-like or number, optional
Time vector, or simulation time duration if a number (time vector is
autocomputed if not given)
X0: array-like or number, optional
Initial condition (default = 0)
Numbers are converted to constant arrays with the correct shape.
input: int
Index of the input that will be used in this simulation.
output: int
If given, index of the output that is returned by this simulation.
Returns
-------
yout: array
Response of the system
T: array
Time values of the output
xout: array (if selected)
Individual response of each x variable
See Also
--------
lsim, initial, impulse
Examples
--------
>>> yout, T = step(sys, T, X0)
'''
from ..timeresp import step_response
# Switch output argument order and transpose outputs
out = step_response(sys, T, X0, input, output,
transpose=True, return_x=return_x)
return (out[1], out[0], out[2]) if return_x else (out[1], out[0])
def stepinfo(sysdata, T=None, yfinal=None, SettlingTimeThreshold=0.02,
RiseTimeLimits=(0.1, 0.9)):
"""
Step response characteristics (Rise time, Settling Time, Peak and others).
Parameters
----------
sysdata : StateSpace or TransferFunction or array_like
The system data. Either LTI system to similate (StateSpace,
TransferFunction), or a time series of step response data.
T : array_like or float, optional
Time vector, or simulation time duration if a number (time vector is
autocomputed if not given).
Required, if sysdata is a time series of response data.
yfinal : scalar or array_like, optional
Steady-state response. If not given, sysdata.dcgain() is used for
systems to simulate and the last value of the the response data is
used for a given time series of response data. Scalar for SISO,
(noutputs, ninputs) array_like for MIMO systems.
SettlingTimeThreshold : float, optional
Defines the error to compute settling time (default = 0.02)
RiseTimeLimits : tuple (lower_threshold, upper_theshold)
Defines the lower and upper threshold for RiseTime computation
Returns
-------
S : dict or list of list of dict
If `sysdata` corresponds to a SISO system, S is a dictionary
containing:
RiseTime:
Time from 10% to 90% of the steady-state value.
SettlingTime:
Time to enter inside a default error of 2%
SettlingMin:
Minimum value after RiseTime
SettlingMax:
Maximum value after RiseTime
Overshoot:
Percentage of the Peak relative to steady value
Undershoot:
Percentage of undershoot
Peak:
Absolute peak value
PeakTime:
time of the Peak
SteadyStateValue:
Steady-state value
If `sysdata` corresponds to a MIMO system, `S` is a 2D list of dicts.
To get the step response characteristics from the j-th input to the
i-th output, access ``S[i][j]``
See Also
--------
step, lsim, initial, impulse
Examples
--------
>>> S = stepinfo(sys, T)
"""
from ..timeresp import step_info
# Call step_info with MATLAB defaults
S = step_info(sysdata, T=T, T_num=None, yfinal=yfinal,
SettlingTimeThreshold=SettlingTimeThreshold,
RiseTimeLimits=RiseTimeLimits)
return S
def impulse(sys, T=None, X0=0., input=0, output=None, return_x=False):
'''
Impulse response of a linear system
If the system has multiple inputs or outputs (MIMO), one input has
to be selected for the simulation. Optionally, one output may be
selected. If no selection is made for the output, all outputs are
given. The parameters `input` and `output` do this. All other
inputs are set to 0, all other outputs are ignored.
Parameters
----------
sys: StateSpace, TransferFunction
LTI system to simulate
T: array-like or number, optional
Time vector, or simulation time duration if a number (time vector is
autocomputed if not given)
X0: array-like or number, optional
Initial condition (default = 0)
Numbers are converted to constant arrays with the correct shape.
input: int
Index of the input that will be used in this simulation.
output: int
Index of the output that will be used in this simulation.
Returns
-------
yout: array
Response of the system
T: array
Time values of the output
xout: array (if selected)
Individual response of each x variable
See Also
--------
lsim, step, initial
Examples
--------
>>> yout, T = impulse(sys, T)
'''
from ..timeresp import impulse_response
# Switch output argument order and transpose outputs
out = impulse_response(sys, T, X0, input, output,
transpose = True, return_x=return_x)
return (out[1], out[0], out[2]) if return_x else (out[1], out[0])
def initial(sys, T=None, X0=0., input=None, output=None, return_x=False):
'''
Initial condition response of a linear system
If the system has multiple outputs (?IMO), optionally, one output
may be selected. If no selection is made for the output, all
outputs are given.
Parameters
----------
sys: StateSpace, or TransferFunction
LTI system to simulate
T: array-like or number, optional
Time vector, or simulation time duration if a number (time vector is
autocomputed if not given)
X0: array-like object or number, optional
Initial condition (default = 0)
Numbers are converted to constant arrays with the correct shape.
input: int
This input is ignored, but present for compatibility with step
and impulse.
output: int
If given, index of the output that is returned by this simulation.
Returns
-------
yout: array
Response of the system
T: array
Time values of the output
xout: array (if selected)
Individual response of each x variable
See Also
--------
lsim, step, impulse
Examples
--------
>>> yout, T = initial(sys, T, X0)
'''
from ..timeresp import initial_response
# Switch output argument order and transpose outputs
T, yout, xout = initial_response(sys, T, X0, output=output,
transpose=True, return_x=True)
return (yout, T, xout) if return_x else (yout, T)
def lsim(sys, U=0., T=None, X0=0.):
'''
Simulate the output of a linear system.
As a convenience for parameters `U`, `X0`:
Numbers (scalars) are converted to constant arrays with the correct shape.
The correct shape is inferred from arguments `sys` and `T`.
Parameters
----------
sys: LTI (StateSpace, or TransferFunction)
LTI system to simulate
U: array-like or number, optional
Input array giving input at each time `T` (default = 0).
If `U` is ``None`` or ``0``, a special algorithm is used. This special
algorithm is faster than the general algorithm, which is used otherwise.
T: array-like, optional for discrete LTI `sys`
Time steps at which the input is defined; values must be evenly spaced.
X0: array-like or number, optional
Initial condition (default = 0).
Returns
-------
yout: array
Response of the system.
T: array
Time values of the output.
xout: array
Time evolution of the state vector.
See Also
--------
step, initial, impulse
Examples
--------
>>> yout, T, xout = lsim(sys, U, T, X0)
'''
from ..timeresp import forced_response
# Switch output argument order and transpose outputs (and always return x)
out = forced_response(sys, T, U, X0, return_x=True, transpose=True)
return out[1], out[0], out[2]
|
Gillu13/scipy
|
refs/heads/master
|
scipy/weave/inline_tools.py
|
97
|
# should re-write compiled functions to take a local and global dict
# as input.
from __future__ import absolute_import, print_function
import sys
import os
from . import ext_tools
from . import catalog
from . import common_info
from numpy.core.multiarray import _get_ndarray_c_version
ndarray_api_version = '/* NDARRAY API VERSION %x */' % (_get_ndarray_c_version(),)
# not an easy way for the user_path_list to come in here.
# the PYTHONCOMPILED environment variable offers the most hope.
# If the user sets ``os.environ['PYTHONCOMPILED']``, that path will
# be used to compile the extension in. Note that .cpp and .so files
# will remain in that directory. See the docstring of ``catalog.catalog``
# for more details.
function_catalog = catalog.catalog()
class inline_ext_function(ext_tools.ext_function):
# Some specialization is needed for inline extension functions
def function_declaration_code(self):
code = 'static PyObject* %s(PyObject*self, PyObject* args)\n{\n'
return code % self.name
def template_declaration_code(self):
code = 'template<class T>\n' \
'static PyObject* %s(PyObject*self, PyObject* args)\n{\n'
return code % self.name
def parse_tuple_code(self):
""" Create code block for PyArg_ParseTuple. Variable declarations
for all PyObjects are done also.
This code got a lot uglier when I added local_dict...
"""
declare_return = 'py::object return_val;\n' \
'int exception_occurred = 0;\n' \
'PyObject *py__locals = NULL;\n' \
'PyObject *py__globals = NULL;\n'
py_objects = ', '.join(self.arg_specs.py_pointers())
if py_objects:
declare_py_objects = 'PyObject ' + py_objects + ';\n'
else:
declare_py_objects = ''
py_vars = ' = '.join(self.arg_specs.py_variables())
if py_vars:
init_values = py_vars + ' = NULL;\n\n'
else:
init_values = ''
parse_tuple = 'if(!PyArg_ParseTuple(args,"OO:compiled_func",'\
'&py__locals,'\
'&py__globals))\n'\
' return NULL;\n'
return declare_return + declare_py_objects + \
init_values + parse_tuple
def arg_declaration_code(self):
"""Return the declaration code as a string."""
arg_strings = [arg.declaration_code(inline=1)
for arg in self.arg_specs]
return "".join(arg_strings)
def arg_cleanup_code(self):
"""Return the cleanup code as a string."""
arg_strings = [arg.cleanup_code() for arg in self.arg_specs]
return "".join(arg_strings)
def arg_local_dict_code(self):
"""Return the code to create the local dict as a string."""
arg_strings = [arg.local_dict_code() for arg in self.arg_specs]
return "".join(arg_strings)
def function_code(self):
from .ext_tools import indent
decl_code = indent(self.arg_declaration_code(),4)
cleanup_code = indent(self.arg_cleanup_code(),4)
function_code = indent(self.code_block,4)
# local_dict_code = indent(self.arg_local_dict_code(),4)
try_code = \
' try \n' \
' { \n' \
'#if defined(__GNUC__) || defined(__ICC)\n' \
' PyObject* raw_locals __attribute__ ((unused));\n' \
' PyObject* raw_globals __attribute__ ((unused));\n' \
'#else\n' \
' PyObject* raw_locals;\n' \
' PyObject* raw_globals;\n' \
'#endif\n' \
' raw_locals = py_to_raw_dict(py__locals,"_locals");\n' \
' raw_globals = py_to_raw_dict(py__globals,"_globals");\n' \
' /* argument conversion code */ \n' \
+ decl_code + \
' /* inline code */ \n' \
+ function_code + \
' /*I would like to fill in changed locals and globals here...*/ \n' \
' }\n'
catch_code = "catch(...) \n" \
"{ \n" + \
" return_val = py::object(); \n" \
" exception_occurred = 1; \n" \
"} \n"
return_code = " /* cleanup code */ \n" + \
cleanup_code + \
" if(!(PyObject*)return_val && !exception_occurred)\n" \
" {\n \n" \
" return_val = Py_None; \n" \
" }\n \n" \
" return return_val.disown(); \n" \
"} \n"
all_code = self.function_declaration_code() + \
indent(self.parse_tuple_code(),4) + \
try_code + \
indent(catch_code,4) + \
return_code
return all_code
def python_function_definition_code(self):
args = (self.name, self.name)
function_decls = '{"%s",(PyCFunction)%s , METH_VARARGS},\n' % args
return function_decls
class inline_ext_module(ext_tools.ext_module):
def __init__(self,name,compiler=''):
ext_tools.ext_module.__init__(self,name,compiler)
self._build_information.append(common_info.inline_info())
function_cache = {}
def inline(code,arg_names=[],local_dict=None, global_dict=None,
force=0,
compiler='',
verbose=0,
support_code=None,
headers=[],
customize=None,
type_converters=None,
auto_downcast=1,
newarr_converter=0,
**kw):
"""
Inline C/C++ code within Python scripts.
``inline()`` compiles and executes C/C++ code on the fly. Variables
in the local and global Python scope are also available in the
C/C++ code. Values are passed to the C/C++ code by assignment
much like variables passed are passed into a standard Python
function. Values are returned from the C/C++ code through a
special argument called return_val. Also, the contents of
mutable objects can be changed within the C/C++ code and the
changes remain after the C code exits and returns to Python.
inline has quite a few options as listed below. Also, the keyword
arguments for distutils extension modules are accepted to
specify extra information needed for compiling.
Parameters
----------
code : string
A string of valid C++ code. It should not specify a return
statement. Instead it should assign results that need to be
returned to Python in the `return_val`.
arg_names : [str], optional
A list of Python variable names that should be transferred from
Python into the C/C++ code. It defaults to an empty string.
local_dict : dict, optional
If specified, it is a dictionary of values that should be used as
the local scope for the C/C++ code. If local_dict is not
specified the local dictionary of the calling function is used.
global_dict : dict, optional
If specified, it is a dictionary of values that should be used as
the global scope for the C/C++ code. If `global_dict` is not
specified, the global dictionary of the calling function is used.
force : {0, 1}, optional
If 1, the C++ code is compiled every time inline is called. This
is really only useful for debugging, and probably only useful if
your editing `support_code` a lot.
compiler : str, optional
The name of compiler to use when compiling. On windows, it
understands 'msvc' and 'gcc' as well as all the compiler names
understood by distutils. On Unix, it'll only understand the
values understood by distutils. (I should add 'gcc' though to
this).
On windows, the compiler defaults to the Microsoft C++ compiler.
If this isn't available, it looks for mingw32 (the gcc compiler).
On Unix, it'll probably use the same compiler that was used when
compiling Python. Cygwin's behavior should be similar.
verbose : {0,1,2}, optional
Specifies how much information is printed during the compile
phase of inlining code. 0 is silent (except on windows with msvc
where it still prints some garbage). 1 informs you when compiling
starts, finishes, and how long it took. 2 prints out the command
lines for the compilation process and can be useful if your having
problems getting code to work. Its handy for finding the name of
the .cpp file if you need to examine it. verbose has no effect if
the compilation isn't necessary.
support_code : str, optional
A string of valid C++ code declaring extra code that might be
needed by your compiled function. This could be declarations of
functions, classes, or structures.
headers : [str], optional
A list of strings specifying header files to use when compiling
the code. The list might look like ``["<vector>","'my_header'"]``.
Note that the header strings need to be in a form than can be
pasted at the end of a ``#include`` statement in the C++ code.
customize : base_info.custom_info, optional
An alternative way to specify `support_code`, `headers`, etc. needed
by the function. See :mod:`scipy.weave.base_info` for more
details. (not sure this'll be used much).
type_converters : [type converters], optional
These guys are what convert Python data types to C/C++ data types.
If you'd like to use a different set of type conversions than the
default, specify them here. Look in the type conversions section
of the main documentation for examples.
auto_downcast : {1,0}, optional
This only affects functions that have numpy arrays as input
variables. Setting this to 1 will cause all floating point values
to be cast as float instead of double if all the Numeric arrays
are of type float. If even one of the arrays has type double or
double complex, all variables maintain their standard
types.
newarr_converter : int, optional
Unused.
Other Parameters
----------------
Relevant :mod:`distutils` keywords. These are duplicated from Greg Ward's
:class:`distutils.extension.Extension` class for convenience:
sources : [string]
List of source filenames, relative to the distribution root
(where the setup script lives), in Unix form (slash-separated)
for portability. Source files may be C, C++, SWIG (.i),
platform-specific resource files, or whatever else is recognized
by the "build_ext" command as source for a Python extension.
.. note:: The `module_path` file is always appended to the front of
this list
include_dirs : [string]
List of directories to search for C/C++ header files (in Unix
form for portability).
define_macros : [(name : string, value : string|None)]
List of macros to define; each macro is defined using a 2-tuple,
where 'value' is either the string to define it to or None to
define it without a particular value (equivalent of "#define
FOO" in source or -DFOO on Unix C compiler command line).
undef_macros : [string]
List of macros to undefine explicitly.
library_dirs : [string]
List of directories to search for C/C++ libraries at link time.
libraries : [string]
List of library names (not filenames or paths) to link against.
runtime_library_dirs : [string]
List of directories to search for C/C++ libraries at run time
(for shared extensions, this is when the extension is loaded).
extra_objects : [string]
List of extra files to link with (e.g. object files not implied
by 'sources', static libraries that must be explicitly specified,
binary resource files, etc.)
extra_compile_args : [string]
Any extra platform- and compiler-specific information to use
when compiling the source files in 'sources'. For platforms and
compilers where "command line" makes sense, this is typically a
list of command-line arguments, but for other platforms it could
be anything.
extra_link_args : [string]
Any extra platform- and compiler-specific information to use
when linking object files together to create the extension (or
to create a new static Python interpreter). Similar
interpretation as for 'extra_compile_args'.
export_symbols : [string]
List of symbols to be exported from a shared extension. Not
used on all platforms, and not generally necessary for Python
extensions, which typically export exactly one symbol: "init" +
extension_name.
swig_opts : [string]
Any extra options to pass to SWIG if a source file has the .i
extension.
depends : [string]
List of files that the extension depends on.
language : string
Extension language (i.e. "c", "c++", "objc"). Will be detected
from the source extensions if not provided.
See Also
--------
distutils.extension.Extension : Describes additional parameters.
"""
# this grabs the local variables from the *previous* call
# frame -- that is the locals from the function that called
# inline.
global function_catalog
call_frame = sys._getframe().f_back
if local_dict is None:
local_dict = call_frame.f_locals
if global_dict is None:
global_dict = call_frame.f_globals
if force:
module_dir = global_dict.get('__file__',None)
func = compile_function(code,arg_names,local_dict,
global_dict,module_dir,
compiler=compiler,
verbose=verbose,
support_code=support_code,
headers=headers,
customize=customize,
type_converters=type_converters,
auto_downcast=auto_downcast,
**kw)
function_catalog.add_function(code,func,module_dir)
results = attempt_function_call(code,local_dict,global_dict)
else:
# 1. try local cache
try:
results = apply(function_cache[code],(local_dict,global_dict))
return results
except TypeError as msg:
msg = str(msg).strip()
if msg[:16] == "Conversion Error":
pass
else:
raise TypeError(msg)
except NameError as msg:
msg = str(msg).strip()
if msg[:16] == "Conversion Error":
pass
else:
raise NameError(msg)
except KeyError:
pass
# 2. try function catalog
try:
results = attempt_function_call(code,local_dict,global_dict)
# 3. build the function
except ValueError:
# compile the library
module_dir = global_dict.get('__file__',None)
func = compile_function(code,arg_names,local_dict,
global_dict,module_dir,
compiler=compiler,
verbose=verbose,
support_code=support_code,
headers=headers,
customize=customize,
type_converters=type_converters,
auto_downcast=auto_downcast,
**kw)
function_catalog.add_function(code,func,module_dir)
results = attempt_function_call(code,local_dict,global_dict)
return results
def attempt_function_call(code,local_dict,global_dict):
# we try 3 levels here -- a local cache first, then the
# catalog cache, and then persistent catalog.
#
global function_catalog
# 1. try local cache
try:
results = apply(function_cache[code],(local_dict,global_dict))
return results
except TypeError as msg:
msg = str(msg).strip()
if msg[:16] == "Conversion Error":
pass
else:
raise TypeError(msg)
except NameError as msg:
msg = str(msg).strip()
if msg[:16] == "Conversion Error":
pass
else:
raise NameError(msg)
except KeyError:
pass
# 2. try catalog cache.
function_list = function_catalog.get_functions_fast(code)
for func in function_list:
try:
results = apply(func,(local_dict,global_dict))
function_catalog.fast_cache(code,func)
function_cache[code] = func
return results
except TypeError as msg: # should specify argument types here.
# This should really have its own error type, instead of
# checking the beginning of the message, but I don't know
# how to define that yet.
msg = str(msg)
if msg[:16] == "Conversion Error":
pass
else:
raise TypeError(msg)
except NameError as msg:
msg = str(msg).strip()
if msg[:16] == "Conversion Error":
pass
else:
raise NameError(msg)
# 3. try persistent catalog
module_dir = global_dict.get('__file__',None)
function_list = function_catalog.get_functions(code,module_dir)
for func in function_list:
try:
results = apply(func,(local_dict,global_dict))
function_catalog.fast_cache(code,func)
function_cache[code] = func
return results
except: # should specify argument types here.
pass
# if we get here, the function wasn't found
raise ValueError('function with correct signature not found')
def inline_function_code(code,arg_names,local_dict=None,
global_dict=None,auto_downcast=1,
type_converters=None,compiler=''):
call_frame = sys._getframe().f_back
if local_dict is None:
local_dict = call_frame.f_locals
if global_dict is None:
global_dict = call_frame.f_globals
ext_func = inline_ext_function('compiled_func',code,arg_names,
local_dict,global_dict,auto_downcast,
type_converters=type_converters)
from . import build_tools
compiler = build_tools.choose_compiler(compiler)
ext_func.set_compiler(compiler)
return ext_func.function_code()
def compile_function(code,arg_names,local_dict,global_dict,
module_dir,
compiler='',
verbose=1,
support_code=None,
headers=[],
customize=None,
type_converters=None,
auto_downcast=1,
**kw):
# figure out where to store and what to name the extension module
# that will contain the function.
# storage_dir = catalog.intermediate_dir()
code = ndarray_api_version + '\n' + code
module_path = function_catalog.unique_module_name(code, module_dir)
storage_dir, module_name = os.path.split(module_path)
mod = inline_ext_module(module_name,compiler)
# create the function. This relies on the auto_downcast and
# type factories setting
ext_func = inline_ext_function('compiled_func',code,arg_names,
local_dict,global_dict,auto_downcast,
type_converters=type_converters)
mod.add_function(ext_func)
# if customize (a custom_info object), then set the module customization.
if customize:
mod.customize = customize
# add the extra "support code" needed by the function to the module.
if support_code:
mod.customize.add_support_code(support_code)
# add the extra headers needed by the function to the module.
for header in headers:
mod.customize.add_header(header)
# it's nice to let the users know when anything gets compiled, as the
# slowdown is very noticeable.
if verbose > 0:
print('<weave: compiling>')
# compile code in correct location, with the given compiler and verbosity
# setting. All input keywords are passed through to distutils
mod.compile(location=storage_dir,compiler=compiler,
verbose=verbose, **kw)
# import the module and return the function. Make sure
# the directory where it lives is in the python path.
try:
sys.path.insert(0,storage_dir)
exec('import ' + module_name)
func = eval(module_name+'.compiled_func')
finally:
del sys.path[0]
return func
|
iambibhas/django
|
refs/heads/master
|
tests/i18n/exclude/__init__.py
|
428
|
# This package is used to test the --exclude option of
# the makemessages and compilemessages management commands.
# The locale directory for this app is generated automatically
# by the test cases.
from django.utils.translation import ugettext as _
# Translators: This comment should be extracted
dummy1 = _("This is a translatable string.")
# This comment should not be extracted
dummy2 = _("This is another translatable string.")
|
yongshengwang/builthue
|
refs/heads/master
|
desktop/core/ext-py/httplib2-0.8/python2/httplib2test_appengine.py
|
59
|
"""
httplib2test_appengine
A set of unit tests for httplib2.py on Google App Engine
"""
__author__ = "Joe Gregorio (joe@bitworking.org)"
__copyright__ = "Copyright 2011, Joe Gregorio"
import os
import sys
import unittest
# The test resources base uri
base = 'http://bitworking.org/projects/httplib2/test/'
#base = 'http://localhost/projects/httplib2/test/'
cacheDirName = ".cache"
APP_ENGINE_PATH='../../google_appengine'
sys.path.insert(0, APP_ENGINE_PATH)
import dev_appserver
dev_appserver.fix_sys_path()
from google.appengine.ext import testbed
testbed = testbed.Testbed()
testbed.activate()
testbed.init_urlfetch_stub()
import google.appengine.api
import httplib2
class AppEngineHttpTest(unittest.TestCase):
def setUp(self):
if os.path.exists(cacheDirName):
[os.remove(os.path.join(cacheDirName, file)) for file in os.listdir(cacheDirName)]
def test(self):
h = httplib2.Http()
response, content = h.request("http://bitworking.org")
self.assertEqual(httplib2.SCHEME_TO_CONNECTION['https'],
httplib2.AppEngineHttpsConnection)
self.assertEquals(1, len(h.connections))
self.assertEquals(response.status, 200)
self.assertEquals(response['status'], '200')
# It would be great to run the test below, but it really tests the
# aberrant behavior of httplib on App Engine, but that special aberrant
# httplib only appears when actually running on App Engine and not when
# running via the SDK. When running via the SDK the httplib in std lib is
# loaded, which throws a different error when a timeout occurs.
#
#def test_timeout(self):
# # The script waits 3 seconds, so a timeout of more than that should succeed.
# h = httplib2.Http(timeout=7)
# r, c = h.request('http://bitworking.org/projects/httplib2/test/timeout/timeout.cgi')
#
# import httplib
# print httplib.__file__
# h = httplib2.Http(timeout=1)
# try:
# r, c = h.request('http://bitworking.org/projects/httplib2/test/timeout/timeout.cgi')
# self.fail('Timeout should have raised an exception.')
# except DeadlineExceededError:
# pass
def test_proxy_info_ignored(self):
h = httplib2.Http(proxy_info='foo.txt')
response, content = h.request("http://bitworking.org")
self.assertEquals(response.status, 200)
class AberrationsTest(unittest.TestCase):
def setUp(self):
self.orig_apiproxy_stub_map = google.appengine.api.apiproxy_stub_map
# Force apiproxy_stub_map to None to trigger the test condition.
google.appengine.api.apiproxy_stub_map = None
reload(httplib2)
def tearDown(self):
google.appengine.api.apiproxy_stub_map = self.orig_apiproxy_stub_map
reload(httplib2)
def test(self):
self.assertNotEqual(httplib2.SCHEME_TO_CONNECTION['https'],
httplib2.AppEngineHttpsConnection)
self.assertNotEqual(httplib2.SCHEME_TO_CONNECTION['http'],
httplib2.AppEngineHttpConnection)
if __name__ == '__main__':
unittest.main()
|
snnn/tensorflow
|
refs/heads/master
|
tensorflow/python/training/coordinator.py
|
34
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Coordinator to help multiple threads stop when requested."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import sys
import threading
import time
import six
from tensorflow.python.framework import errors
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import compat
from tensorflow.python.util.tf_export import tf_export
@tf_export("train.Coordinator")
class Coordinator(object):
"""A coordinator for threads.
This class implements a simple mechanism to coordinate the termination of a
set of threads.
#### Usage:
```python
# Create a coordinator.
coord = Coordinator()
# Start a number of threads, passing the coordinator to each of them.
...start thread 1...(coord, ...)
...start thread N...(coord, ...)
# Wait for all the threads to terminate.
coord.join(threads)
```
Any of the threads can call `coord.request_stop()` to ask for all the threads
to stop. To cooperate with the requests, each thread must check for
`coord.should_stop()` on a regular basis. `coord.should_stop()` returns
`True` as soon as `coord.request_stop()` has been called.
A typical thread running with a coordinator will do something like:
```python
while not coord.should_stop():
...do some work...
```
#### Exception handling:
A thread can report an exception to the coordinator as part of the
`request_stop()` call. The exception will be re-raised from the
`coord.join()` call.
Thread code:
```python
try:
while not coord.should_stop():
...do some work...
except Exception as e:
coord.request_stop(e)
```
Main code:
```python
try:
...
coord = Coordinator()
# Start a number of threads, passing the coordinator to each of them.
...start thread 1...(coord, ...)
...start thread N...(coord, ...)
# Wait for all the threads to terminate.
coord.join(threads)
except Exception as e:
...exception that was passed to coord.request_stop()
```
To simplify the thread implementation, the Coordinator provides a
context handler `stop_on_exception()` that automatically requests a stop if
an exception is raised. Using the context handler the thread code above
can be written as:
```python
with coord.stop_on_exception():
while not coord.should_stop():
...do some work...
```
#### Grace period for stopping:
After a thread has called `coord.request_stop()` the other threads have a
fixed time to stop, this is called the 'stop grace period' and defaults to 2
minutes. If any of the threads is still alive after the grace period expires
`coord.join()` raises a RuntimeError reporting the laggards.
```python
try:
...
coord = Coordinator()
# Start a number of threads, passing the coordinator to each of them.
...start thread 1...(coord, ...)
...start thread N...(coord, ...)
# Wait for all the threads to terminate, give them 10s grace period
coord.join(threads, stop_grace_period_secs=10)
except RuntimeError:
...one of the threads took more than 10s to stop after request_stop()
...was called.
except Exception:
...exception that was passed to coord.request_stop()
```
"""
def __init__(self, clean_stop_exception_types=None):
"""Create a new Coordinator.
Args:
clean_stop_exception_types: Optional tuple of Exception types that should
cause a clean stop of the coordinator. If an exception of one of these
types is reported to `request_stop(ex)` the coordinator will behave as
if `request_stop(None)` was called. Defaults to
`(tf.errors.OutOfRangeError,)` which is used by input queues to signal
the end of input. When feeding training data from a Python iterator it
is common to add `StopIteration` to this list.
"""
if clean_stop_exception_types is None:
clean_stop_exception_types = (errors.OutOfRangeError,)
self._clean_stop_exception_types = tuple(clean_stop_exception_types)
# Protects all attributes.
self._lock = threading.Lock()
# Event set when threads must stop.
self._stop_event = threading.Event()
# Python exc_info to report.
# If not None, it should hold the returned value of sys.exc_info(), which is
# a tuple containing exception (type, value, traceback).
self._exc_info_to_raise = None
# True if we have called join() already.
self._joined = False
# Set of threads registered for joining when join() is called. These
# threads will be joined in addition to the threads passed to the join()
# call. It's ok if threads are both registered and passed to the join()
# call.
self._registered_threads = set()
def _filter_exception(self, ex):
"""Check if the exception indicated in 'ex' should be ignored.
This method examines `ex` to check if it is an exception that should be
reported to the users. If yes, it returns `ex` as is, otherwise it returns
None.
The code returns None for exception types listed in
`_clean_stop_exception_types`.
Args:
ex: None, an `Exception`, or a Python `exc_info` tuple as returned by
`sys.exc_info()`.
Returns:
ex or None.
"""
if isinstance(ex, tuple):
ex2 = ex[1]
else:
ex2 = ex
if isinstance(ex2, self._clean_stop_exception_types):
# Ignore the exception.
ex = None
return ex
def request_stop(self, ex=None):
"""Request that the threads stop.
After this is called, calls to `should_stop()` will return `True`.
Note: If an exception is being passed in, in must be in the context of
handling the exception (i.e. `try: ... except Exception as ex: ...`) and not
a newly created one.
Args:
ex: Optional `Exception`, or Python `exc_info` tuple as returned by
`sys.exc_info()`. If this is the first call to `request_stop()` the
corresponding exception is recorded and re-raised from `join()`.
"""
with self._lock:
ex = self._filter_exception(ex)
# If we have already joined the coordinator the exception will not have a
# chance to be reported, so just raise it normally. This can happen if
# you continue to use a session have having stopped and joined the
# coordinator threads.
if self._joined:
if isinstance(ex, tuple):
six.reraise(*ex)
elif ex is not None:
# NOTE(touts): This is bogus if request_stop() is not called
# from the exception handler that raised ex.
six.reraise(*sys.exc_info())
if not self._stop_event.is_set():
if ex and self._exc_info_to_raise is None:
if isinstance(ex, tuple):
logging.info("Error reported to Coordinator: %s",
compat.as_str_any(ex[1]),
exc_info=ex)
self._exc_info_to_raise = ex
else:
logging.info("Error reported to Coordinator: %s, %s",
type(ex),
compat.as_str_any(ex))
self._exc_info_to_raise = sys.exc_info()
# self._exc_info_to_raise should contain a tuple containing exception
# (type, value, traceback)
if (len(self._exc_info_to_raise) != 3 or
not self._exc_info_to_raise[0] or
not self._exc_info_to_raise[1]):
# Raise, catch and record the exception here so that error happens
# where expected.
try:
raise ValueError(
"ex must be a tuple or sys.exc_info must return the current "
"exception: %s"
% self._exc_info_to_raise)
except ValueError:
# Record this error so it kills the coordinator properly.
# NOTE(touts): As above, this is bogus if request_stop() is not
# called from the exception handler that raised ex.
self._exc_info_to_raise = sys.exc_info()
self._stop_event.set()
def clear_stop(self):
"""Clears the stop flag.
After this is called, calls to `should_stop()` will return `False`.
"""
with self._lock:
self._joined = False
self._exc_info_to_raise = None
if self._stop_event.is_set():
self._stop_event.clear()
def should_stop(self):
"""Check if stop was requested.
Returns:
True if a stop was requested.
"""
return self._stop_event.is_set()
@contextlib.contextmanager
def stop_on_exception(self):
"""Context manager to request stop when an Exception is raised.
Code that uses a coordinator must catch exceptions and pass
them to the `request_stop()` method to stop the other threads
managed by the coordinator.
This context handler simplifies the exception handling.
Use it as follows:
```python
with coord.stop_on_exception():
# Any exception raised in the body of the with
# clause is reported to the coordinator before terminating
# the execution of the body.
...body...
```
This is completely equivalent to the slightly longer code:
```python
try:
...body...
except:
coord.request_stop(sys.exc_info())
```
Yields:
nothing.
"""
try:
yield
except: # pylint: disable=bare-except
self.request_stop(ex=sys.exc_info())
def wait_for_stop(self, timeout=None):
"""Wait till the Coordinator is told to stop.
Args:
timeout: Float. Sleep for up to that many seconds waiting for
should_stop() to become True.
Returns:
True if the Coordinator is told stop, False if the timeout expired.
"""
return self._stop_event.wait(timeout)
def register_thread(self, thread):
"""Register a thread to join.
Args:
thread: A Python thread to join.
"""
with self._lock:
self._registered_threads.add(thread)
def join(self, threads=None, stop_grace_period_secs=120,
ignore_live_threads=False):
"""Wait for threads to terminate.
This call blocks until a set of threads have terminated. The set of thread
is the union of the threads passed in the `threads` argument and the list
of threads that registered with the coordinator by calling
`Coordinator.register_thread()`.
After the threads stop, if an `exc_info` was passed to `request_stop`, that
exception is re-raised.
Grace period handling: When `request_stop()` is called, threads are given
'stop_grace_period_secs' seconds to terminate. If any of them is still
alive after that period expires, a `RuntimeError` is raised. Note that if
an `exc_info` was passed to `request_stop()` then it is raised instead of
that `RuntimeError`.
Args:
threads: List of `threading.Threads`. The started threads to join in
addition to the registered threads.
stop_grace_period_secs: Number of seconds given to threads to stop after
`request_stop()` has been called.
ignore_live_threads: If `False`, raises an error if any of the threads are
still alive after `stop_grace_period_secs`.
Raises:
RuntimeError: If any thread is still alive after `request_stop()`
is called and the grace period expires.
"""
# Threads registered after this call will not be joined.
with self._lock:
if threads is None:
threads = self._registered_threads
else:
threads = self._registered_threads.union(set(threads))
# Copy the set into a list to avoid race conditions where a new thread
# is added while we are waiting.
threads = list(threads)
# Wait for all threads to stop or for request_stop() to be called.
while any(t.is_alive() for t in threads) and not self.wait_for_stop(1.0):
pass
# If any thread is still alive, wait for the grace period to expire.
# By the time this check is executed, threads may still be shutting down,
# so we add a sleep of increasing duration to give them a chance to shut
# down without losing too many cycles.
# The sleep duration is limited to the remaining grace duration.
stop_wait_secs = 0.001
while any(t.is_alive() for t in threads) and stop_grace_period_secs >= 0.0:
time.sleep(stop_wait_secs)
stop_grace_period_secs -= stop_wait_secs
stop_wait_secs = 2 * stop_wait_secs
# Keep the waiting period within sane bounds.
# The minimum value is to avoid decreasing stop_wait_secs to a value
# that could cause stop_grace_period_secs to remain unchanged.
stop_wait_secs = max(min(stop_wait_secs, stop_grace_period_secs), 0.001)
# List the threads still alive after the grace period.
stragglers = [t.name for t in threads if t.is_alive()]
# Terminate with an exception if appropriate.
with self._lock:
self._joined = True
self._registered_threads = set()
if self._exc_info_to_raise:
six.reraise(*self._exc_info_to_raise)
elif stragglers:
if ignore_live_threads:
logging.info("Coordinator stopped with threads still running: %s",
" ".join(stragglers))
else:
raise RuntimeError(
"Coordinator stopped with threads still running: %s" %
" ".join(stragglers))
@property
def joined(self):
return self._joined
def raise_requested_exception(self):
"""If an exception has been passed to `request_stop`, this raises it."""
with self._lock:
if self._exc_info_to_raise:
six.reraise(*self._exc_info_to_raise)
# Threads for the standard services.
@tf_export("train.LooperThread")
class LooperThread(threading.Thread):
"""A thread that runs code repeatedly, optionally on a timer.
This thread class is intended to be used with a `Coordinator`. It repeatedly
runs code specified either as `target` and `args` or by the `run_loop()`
method.
Before each run the thread checks if the coordinator has requested stop. In
that case the looper thread terminates immediately.
If the code being run raises an exception, that exception is reported to the
coordinator and the thread terminates. The coordinator will then request all
the other threads it coordinates to stop.
You typically pass looper threads to the supervisor `Join()` method.
"""
def __init__(self, coord, timer_interval_secs, target=None, args=None,
kwargs=None):
"""Create a LooperThread.
Args:
coord: A Coordinator.
timer_interval_secs: Time boundaries at which to call Run(), or None
if it should be called back to back.
target: Optional callable object that will be executed in the thread.
args: Optional arguments to pass to `target` when calling it.
kwargs: Optional keyword arguments to pass to `target` when calling it.
Raises:
ValueError: If one of the arguments is invalid.
"""
if not isinstance(coord, Coordinator):
raise ValueError("'coord' argument must be a Coordinator: %s" % coord)
super(LooperThread, self).__init__()
self.daemon = True
self._coord = coord
self._timer_interval_secs = timer_interval_secs
self._target = target
if self._target:
self._args = args or ()
self._kwargs = kwargs or {}
elif args or kwargs:
raise ValueError("'args' and 'kwargs' argument require that you also "
"pass 'target'")
self._coord.register_thread(self)
@staticmethod
def loop(coord, timer_interval_secs, target, args=None, kwargs=None):
"""Start a LooperThread that calls a function periodically.
If `timer_interval_secs` is None the thread calls `target(args)`
repeatedly. Otherwise `target(args)` is called every `timer_interval_secs`
seconds. The thread terminates when a stop of the coordinator is
requested.
Args:
coord: A Coordinator.
timer_interval_secs: Number. Time boundaries at which to call `target`.
target: A callable object.
args: Optional arguments to pass to `target` when calling it.
kwargs: Optional keyword arguments to pass to `target` when calling it.
Returns:
The started thread.
"""
looper = LooperThread(coord, timer_interval_secs, target=target, args=args,
kwargs=kwargs)
looper.start()
return looper
def run(self):
with self._coord.stop_on_exception():
self.start_loop()
if self._timer_interval_secs is None:
# Call back-to-back.
while not self._coord.should_stop():
self.run_loop()
else:
# Next time at which to call run_loop(), starts as 'now'.
next_timer_time = time.time()
while not self._coord.wait_for_stop(next_timer_time - time.time()):
next_timer_time += self._timer_interval_secs
self.run_loop()
self.stop_loop()
def start_loop(self):
"""Called when the thread starts."""
pass
def stop_loop(self):
"""Called when the thread stops."""
pass
def run_loop(self):
"""Called at 'timer_interval_secs' boundaries."""
if self._target:
self._target(*self._args, **self._kwargs)
|
kaiix/depot_tools
|
refs/heads/master
|
third_party/gsutil/gslib/project_id.py
|
51
|
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import boto
from gslib.exception import ProjectIdException
from gslib.wildcard_iterator import WILDCARD_BUCKET_ITERATOR
GOOG_PROJ_ID_HDR = 'x-goog-project-id'
class ProjectIdHandler(object):
"""Google Project ID header handling."""
def __init__(self):
"""Instantiates Project ID handler. Call after boto config file loaded."""
config = boto.config
self.project_id = config.get_value('GSUtil', 'default_project_id')
def SetProjectId(self, project_id):
"""Overrides project ID value from config file default.
Args:
project_id: Project ID to use.
"""
self.project_id = project_id
def FillInProjectHeaderIfNeeded(self, command, uri, headers):
"""Fills project ID header into headers if defined and applicable.
Args:
command: The command being run.
uri: The URI against which this command is being run.
headers: Dictionary containing optional HTTP headers to pass to boto.
Must not be None.
"""
# We only include the project ID header if it's a GS URI and a project_id
# was specified and
# (it's an 'mb', 'disablelogging, or 'enablelogging' command or
# a boto request in integration tests or
# (an 'ls' command that doesn't specify a bucket or wildcarded bucket)).
if (uri.scheme.lower() == 'gs' and self.project_id
and (command == 'mb' or command == 'disablelogging'
or command == 'enablelogging'
or command == 'test'
or (command == 'ls' and not uri.names_bucket())
or (command == WILDCARD_BUCKET_ITERATOR))):
# Note: check for None (as opposed to "not headers") here because
# it's ok to pass empty headers.
if headers is None:
raise ProjectIdException(
'FillInProjectHeaderIfNeeded called with headers=None')
headers[GOOG_PROJ_ID_HDR] = self.project_id
elif headers.has_key(GOOG_PROJ_ID_HDR):
del headers[GOOG_PROJ_ID_HDR]
|
patrickcurl/ztruck
|
refs/heads/master
|
dj/lib/python2.7/site-packages/django/contrib/gis/geos/polygon.py
|
450
|
from ctypes import byref, c_uint
from django.contrib.gis.geos import prototypes as capi
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.contrib.gis.geos.libgeos import GEOM_PTR, get_pointer_arr
from django.contrib.gis.geos.linestring import LinearRing
from django.utils import six
from django.utils.six.moves import range
class Polygon(GEOSGeometry):
_minlength = 1
def __init__(self, *args, **kwargs):
"""
Initializes on an exterior ring and a sequence of holes (both
instances may be either LinearRing instances, or a tuple/list
that may be constructed into a LinearRing).
Examples of initialization, where shell, hole1, and hole2 are
valid LinearRing geometries:
>>> from django.contrib.gis.geos import LinearRing, Polygon
>>> shell = hole1 = hole2 = LinearRing()
>>> poly = Polygon(shell, hole1, hole2)
>>> poly = Polygon(shell, (hole1, hole2))
>>> # Example where a tuple parameters are used:
>>> poly = Polygon(((0, 0), (0, 10), (10, 10), (0, 10), (0, 0)),
... ((4, 4), (4, 6), (6, 6), (6, 4), (4, 4)))
"""
if not args:
raise TypeError('Must provide at least one LinearRing, or a tuple, to initialize a Polygon.')
# Getting the ext_ring and init_holes parameters from the argument list
ext_ring = args[0]
init_holes = args[1:]
n_holes = len(init_holes)
# If initialized as Polygon(shell, (LinearRing, LinearRing)) [for backward-compatibility]
if n_holes == 1 and isinstance(init_holes[0], (tuple, list)):
if len(init_holes[0]) == 0:
init_holes = ()
n_holes = 0
elif isinstance(init_holes[0][0], LinearRing):
init_holes = init_holes[0]
n_holes = len(init_holes)
polygon = self._create_polygon(n_holes + 1, (ext_ring,) + init_holes)
super(Polygon, self).__init__(polygon, **kwargs)
def __iter__(self):
"Iterates over each ring in the polygon."
for i in range(len(self)):
yield self[i]
def __len__(self):
"Returns the number of rings in this Polygon."
return self.num_interior_rings + 1
@classmethod
def from_bbox(cls, bbox):
"Constructs a Polygon from a bounding box (4-tuple)."
x0, y0, x1, y1 = bbox
for z in bbox:
if not isinstance(z, six.integer_types + (float,)):
return GEOSGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' %
(x0, y0, x0, y1, x1, y1, x1, y0, x0, y0))
return Polygon(((x0, y0), (x0, y1), (x1, y1), (x1, y0), (x0, y0)))
# ### These routines are needed for list-like operation w/ListMixin ###
def _create_polygon(self, length, items):
# Instantiate LinearRing objects if necessary, but don't clone them yet
# _construct_ring will throw a TypeError if a parameter isn't a valid ring
# If we cloned the pointers here, we wouldn't be able to clean up
# in case of error.
rings = []
for r in items:
if isinstance(r, GEOM_PTR):
rings.append(r)
else:
rings.append(self._construct_ring(r))
shell = self._clone(rings.pop(0))
n_holes = length - 1
if n_holes:
holes = get_pointer_arr(n_holes)
for i, r in enumerate(rings):
holes[i] = self._clone(r)
holes_param = byref(holes)
else:
holes_param = None
return capi.create_polygon(shell, holes_param, c_uint(n_holes))
def _clone(self, g):
if isinstance(g, GEOM_PTR):
return capi.geom_clone(g)
else:
return capi.geom_clone(g.ptr)
def _construct_ring(self, param, msg=(
'Parameter must be a sequence of LinearRings or objects that can initialize to LinearRings')):
"Helper routine for trying to construct a ring from the given parameter."
if isinstance(param, LinearRing):
return param
try:
ring = LinearRing(param)
return ring
except TypeError:
raise TypeError(msg)
def _set_list(self, length, items):
# Getting the current pointer, replacing with the newly constructed
# geometry, and destroying the old geometry.
prev_ptr = self.ptr
srid = self.srid
self.ptr = self._create_polygon(length, items)
if srid:
self.srid = srid
capi.destroy_geom(prev_ptr)
def _get_single_internal(self, index):
"""
Returns the ring at the specified index. The first index, 0, will
always return the exterior ring. Indices > 0 will return the
interior ring at the given index (e.g., poly[1] and poly[2] would
return the first and second interior ring, respectively).
CAREFUL: Internal/External are not the same as Interior/Exterior!
_get_single_internal returns a pointer from the existing geometries for use
internally by the object's methods. _get_single_external returns a clone
of the same geometry for use by external code.
"""
if index == 0:
return capi.get_extring(self.ptr)
else:
# Getting the interior ring, have to subtract 1 from the index.
return capi.get_intring(self.ptr, index - 1)
def _get_single_external(self, index):
return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid)
_set_single = GEOSGeometry._set_single_rebuild
_assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild
# #### Polygon Properties ####
@property
def num_interior_rings(self):
"Returns the number of interior rings."
# Getting the number of rings
return capi.get_nrings(self.ptr)
def _get_ext_ring(self):
"Gets the exterior ring of the Polygon."
return self[0]
def _set_ext_ring(self, ring):
"Sets the exterior ring of the Polygon."
self[0] = ring
# Properties for the exterior ring/shell.
exterior_ring = property(_get_ext_ring, _set_ext_ring)
shell = exterior_ring
@property
def tuple(self):
"Gets the tuple for each ring in this Polygon."
return tuple(self[i].tuple for i in range(len(self)))
coords = tuple
@property
def kml(self):
"Returns the KML representation of this Polygon."
inner_kml = ''.join("<innerBoundaryIs>%s</innerBoundaryIs>" % self[i + 1].kml
for i in range(self.num_interior_rings))
return "<Polygon><outerBoundaryIs>%s</outerBoundaryIs>%s</Polygon>" % (self[0].kml, inner_kml)
|
Duroktar/Kuai
|
refs/heads/master
|
kuai/backends/__init__.py
|
1
|
import os
from functools import partial
from pluginbase import PluginBase
from kuai.helpers import WeakCallback, Singleton, singleton_object
here = os.path.abspath(os.path.dirname(__file__))
get_path = partial(os.path.join, here)
plugin_base = PluginBase(package='kuai.plugins')
# ------------------------------------
__all__ = [
'Manager', 'set_backend', 'which_backend',
'WeakCallback', 'Singleton', 'singleton_object'
]
@singleton_object
class Manager(metaclass=Singleton):
_backends = {}
current_backend = None
def __init__(self):
self._source = plugin_base.make_plugin_source(
searchpath=[here]
)
self.set_backend()
@property
def backend(self):
return self._backend
def list_backends(self):
return self._source.list_plugins()
def _load_backend(self, name):
backend = self._source.load_plugin(name)
backend.setup(self)
def register_backend(self, name, backend):
self._backends[name] = backend
def set_backend(self, name='simple'):
try:
self._load_backend(name)
except Exception as e:
print(e)
print("Kuai: {} didn't work.. See error above. "
"Please select from,\n".format(name))
for each in self._source.list_plugins():
print(" - ", each)
exit(1)
else:
self.current_backend = name
self._backend = self._backends[name]
def set_backend(name):
Manager.set_backend(name)
def which_backend():
print("Backend:", Manager.backend.name)
|
lexor90/node-compiler
|
refs/heads/master
|
node/deps/v8/tools/testrunner/server/local_handler.py
|
123
|
# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import socket
import SocketServer
import StringIO
from . import compression
from . import constants
def LocalQuery(query):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
code = sock.connect_ex(("localhost", constants.CLIENT_PORT))
if code != 0: return None
compression.Send(query, sock)
compression.Send(constants.END_OF_STREAM, sock)
rec = compression.Receiver(sock)
data = None
while not rec.IsDone():
data = rec.Current()
assert data[0] == query[0]
data = data[1]
rec.Advance()
sock.close()
return data
class LocalHandler(SocketServer.BaseRequestHandler):
def handle(self):
rec = compression.Receiver(self.request)
while not rec.IsDone():
data = rec.Current()
action = data[0]
if action == constants.REQUEST_PEERS:
with self.server.daemon.peer_list_lock:
response = [ p.Pack() for p in self.server.daemon.peers
if p.trusting_me ]
compression.Send([action, response], self.request)
elif action == constants.UNRESPONSIVE_PEER:
self.server.daemon.DeletePeer(data[1])
elif action == constants.REQUEST_PUBKEY_FINGERPRINT:
compression.Send([action, self.server.daemon.pubkey_fingerprint],
self.request)
elif action == constants.REQUEST_STATUS:
compression.Send([action, self._GetStatusMessage()], self.request)
elif action == constants.ADD_TRUSTED:
fingerprint = self.server.daemon.CopyToTrusted(data[1])
compression.Send([action, fingerprint], self.request)
elif action == constants.INFORM_DURATION:
test_key = data[1]
test_duration = data[2]
arch = data[3]
mode = data[4]
self.server.daemon.AddPerfData(test_key, test_duration, arch, mode)
elif action == constants.UPDATE_PERF:
address = data[1]
perf = data[2]
self.server.daemon.UpdatePeerPerformance(data[1], data[2])
rec.Advance()
compression.Send(constants.END_OF_STREAM, self.request)
def _GetStatusMessage(self):
sio = StringIO.StringIO()
sio.write("Peers:\n")
with self.server.daemon.peer_list_lock:
for p in self.server.daemon.peers:
sio.write("%s\n" % p)
sio.write("My own jobs: %d, relative performance: %.2f\n" %
(self.server.daemon.jobs, self.server.daemon.relative_perf))
# Low-priority TODO: Return more information. Ideas:
# - currently running anything,
# - time since last job,
# - time since last repository fetch
# - number of workpackets/testcases handled since startup
# - slowest test(s)
result = sio.getvalue()
sio.close()
return result
class LocalSocketServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
def __init__(self, daemon):
SocketServer.TCPServer.__init__(self, ("localhost", constants.CLIENT_PORT),
LocalHandler)
self.daemon = daemon
|
eptmp3/Sick-Beard
|
refs/heads/development
|
lib/requests/packages/chardet2/gb2312freq.py
|
323
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# GB2312 most frequently used character table
#
# Char to FreqOrder table , from hz6763
# 512 --> 0.79 -- 0.79
# 1024 --> 0.92 -- 0.13
# 2048 --> 0.98 -- 0.06
# 6768 --> 1.00 -- 0.02
#
# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
# Random Distribution Ration = 512 / (3755 - 512) = 0.157
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
GB2312_TABLE_SIZE = 3760
GB2312CharToFreqOrder = ( \
1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575,
2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606,
360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052,
198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26,
3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403,
3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940,
836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121,
1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233,
1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094,
4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152,
3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909,
509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221,
2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360,
4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243,
1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257,
3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781,
1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937,
930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789,
396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451,
3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780,
2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745,
776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657,
163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536,
1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894,
915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541,
1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754,
1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99,
1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280,
160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885,
125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131,
259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947,
774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814,
4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480,
3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769,
1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207,
57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623,
193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616,
3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377,
1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315,
470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557,
3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503,
448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27,
1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31,
475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881,
1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276,
1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843,
3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770,
3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713,
768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014,
1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510,
386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459,
1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232,
1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512
#Everything below is of no interest for detection purpose
5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636,
5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874,
5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278,
3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806,
4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827,
5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512,
5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578,
4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828,
4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105,
4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189,
4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561,
3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226,
6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778,
4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039,
6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404,
4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213,
4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739,
4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328,
5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592,
3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424,
4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270,
3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232,
4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456,
4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121,
6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971,
6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409,
5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519,
4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367,
6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834,
4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460,
5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464,
5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709,
5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906,
6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530,
3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262,
6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920,
4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190,
5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318,
6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538,
6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697,
4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544,
5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016,
4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638,
5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006,
5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071,
4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552,
4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556,
5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432,
4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632,
4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885,
5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336,
4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729,
4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854,
4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332,
5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004,
5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419,
4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293,
3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580,
4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339,
6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341,
5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493,
5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046,
4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904,
6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728,
5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350,
6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233,
4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944,
5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413,
5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700,
3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999,
5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694,
6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571,
4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359,
6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178,
4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421,
4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330,
6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855,
3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587,
6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803,
4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791,
3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304,
3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445,
3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506,
4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856,
2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057,
5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777,
4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369,
5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028,
5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914,
5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175,
4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681,
5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534,
4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912,
5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054,
1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336,
3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666,
4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375,
4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113,
6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614,
4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173,
5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197,
3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271,
5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423,
5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529,
5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921,
3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837,
5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922,
5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187,
3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382,
5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628,
5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683,
5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053,
6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928,
4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662,
6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663,
4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554,
3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191,
4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013,
5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932,
5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055,
5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829,
3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096,
3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660,
6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199,
6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748,
5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402,
6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957,
6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668,
6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763,
6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407,
6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051,
5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429,
6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791,
6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028,
3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305,
3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159,
4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683,
4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372,
3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514,
5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544,
5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472,
5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716,
5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905,
5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327,
4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030,
5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281,
6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224,
5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327,
4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062,
4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354,
6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065,
3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953,
4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681,
4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708,
5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442,
6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387,
6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237,
4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713,
6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547,
5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957,
5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337,
5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074,
5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685,
5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455,
4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722,
5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615,
5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093,
5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989,
5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094,
6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212,
4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967,
5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733,
4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260,
4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864,
6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353,
4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095,
6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287,
3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504,
5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539,
6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750,
6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864,
6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213,
5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573,
6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252,
6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970,
3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703,
5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978,
4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767)
|
icomms/rapidsms
|
refs/heads/master
|
apps/injector/app.py
|
8
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import rapidsms
import re
class App(rapidsms.app.App):
"""This app allows callers to send in messages on behalf of other
callers - even across different backends. This is most useful
when RapidSMS explodes, and drops important messages. Rather
than asking the caller to try again, we can inject the message
on their behalf, ensuring that they receive their response. To
inject a message, send:
inject <BACKEND> <IDENTITY> <TEXT>"""
prefix = re.compile(r'^inject(?:\s+(.+))?$', re.I)
pattern = re.compile(r'^\s*(\S+?)\s+(\S+?)\s+(.+)$', re.I)
def handle(self, msg):
# check if this message was intended
# for us, via a very liberal regex
m = self.prefix.match(msg.text)
if m is None:
return False
# extract the arguments via a proper regex,
# and abort if it didn't contain everything
m = self.pattern.match(m.group(1))
if m is None:
msg.respond("Invalid inject syntax.\n" +\
"Try: inject <BACKEND> <IDENTITY> <TEXT>")
return True
# resolve the message into a real backend
be_name, identity, text = m.groups()
backend = self.router.get_backend(be_name)
# check that the target backend was valid
if backend is None:
msg.respond("There is no backend named: %s. Try one of: %s" %
(be_name, ", ".join([be.slug for be in self.router.backends])))
# create and send te message, as if it
# had originated from the named backend
else:
backend.route(backend.message(identity, text))
msg.respond("Your message was injected.")
# short-circuit
return True
|
mcflugen/topoflow
|
refs/heads/master
|
topoflow/components/erode_base.py
|
2
|
## Copyright (c) 2001-2013, Scott D. Peckham
##
## Jan 2013. Fixed bug in create_initial_DEM() (PLANE option)
## Jan 2009 (converted from IDL)
## September, October, November 2009
## February 2010 (local time-stepping)
## August 2010 (updated to use new model_output, conventions, etc.)
## Sep to Nov 2011 (miscellaneous)
## Jan 16, 2012 (Fixed bug: self.time_step_type vs. self.FIXED_STEPS.)
## NOTE: D8 components no longer compute "noflow_IDs" by default
## even though d8_global.py has a method to do so. It is
## only used here for the dt method that uses update_min_dz_up().
## See: FILL_PITS_IN_Z0 (in initialize_DEM()) ##################
#-----------------------------------------------------------------------
#
# class erosion_component
#
# set_constants()
#--------------------------
# initialize()
# update() # (in erode_d8_local.py & erode_d8_global.py)
# finalize()
# set_computed_input_vars()
#----------------------------------
# initialize_DEM()
# create_initial_DEM()
# initialize_boundary_conditions()
# initialize_computed_vars()
#----------------------------------
# update_R()
# update_R_integral()
# update_U()
# update_U_integral()
# update_base_level()
# update_DEM_edge_values() ###
# update_d8_vars()
#-----------------------------------------------
# See erode_d8_local.py and erode_d8_global.py
#-----------------------------------------------
##### update()
##### update_slope_grid()
##### update_Q_grid()
##### update_Qs_grid()
##### update_dz_dt()
##### update_DEM()
#--------------------------------
# fill_pits_in_DEM() ###
# update_DEM_min_and_max()
#--------------------------------
# update_dt_grid()
# update_dt_grid_method1() ## (3/9/10, based on original)
# update_min_dz_up_grid() ## (2/19/10)
# update_dt_grid_method2() ## (2/9/10, based on min_dz_up)
# update_dt_grid_local1() ## (2/19/10)
# update_n_grid() ## (2/19/10)
# update_n_grid2() ## (2/19/10) COMMENTED OUT
#--------------------------------
# print_mins_and_maxes() ## (3/12/10)
# check_stability()
# check_finished()
# check_steady_state() ### (not written yet)
#--------------------------------
# open_input_files()
# read_input_files()
# close_input_files()
#--------------------------------
# update_outfile_names()
# open_output_files()
# write_output_files()
# close_output_files()
# save_grids()
# save_pixel_values()
# print_time_and_value() ## (override one in BMI_base.py)
#
#---------------------------
# THESE ARE NOT USED YET
#---------------------------
# Get_Timestep()
# Stable_Timestep()
#
#-----------------------------------------------------------------------
import numpy as np
import os.path
import time
#----------------------
# Could also do this.
#----------------------
# from numpy import where, logical_and, logical_or
from topoflow.components import d8_global
from topoflow.utils import BMI_base
from topoflow.utils import fill_pits
from topoflow.utils import midpoints
from topoflow.utils import model_input
from topoflow.utils import model_output
from topoflow.utils import pixels
from topoflow.utils import rtg_files
from topoflow.utils import rti_files
#-------------------------------------------
# For use outside of the TopoFlow package.
#-------------------------------------------
##import d8_global
###------------------
##import BMI_base
##import fill_pits #####
##import midpoints #####
##import model_input
##import model_output
##import pixels
##import rtg_files
##import rti_files
## from model_output import * ## ELIMINATE THIS
#-----------------------------------------------------------------------
class erosion_component( BMI_base.BMI_component ):
#-------------------------------------------------------------------
def set_constants(self):
#---------------------------------------------
# (2/5/12) Set default stopping method:
# (0=n_steps, 1=stop_time, 2=steady-state)
# This is now done in initialize_time_vars()
# in CSDMS_base.py. (2/14/12)
#---------------------------------------------
## self.stop_code = 0
## self.stop_time = 1000.0 # [years]
#------------------------
# Define some constants
#------------------------
self.secs_per_year = np.float64(31536000)
self.mm_per_m = np.float64(1000)
self.dz_tolerance = np.float64(1e-6)
## self.dz_tolerance = np.float64(1e-5)
## self.dz_tolerance = np.float64(1e-4)
## self.dz_tolerance = np.float64(1e-3)
#--------------------------------
# Set constants related to time
#--------------------------------
self.dt_too_small = np.float64(1e-2)
# dt_too_big = 5d # (should there be a max?)
self.dt_limit = 1e20 # [years]
self.dt = 1.0 # [year]
##########################################
# CFL_factor is now set in the CFG file.
##########################################
## self.CFL_factor = np.float64(0.2)
#--------------------------------------
# Removed this option from CFG files.
# See erode_d8_global.update().
#--------------------------------------
self.FILL_PITS = False
#-------------------------------------------
# (11/20/11) Need to avoid frequent string
# comparisons for best performance.
#-------------------------------------------
self.time_step_type = self.get_attribute( 'time_step_type' )
self.FIXED_STEPS = (self.time_step_type.lower() == 'fixed')
self.DRIVER = (self.mode == 'driver')
# set_constants()
#-------------------------------------------------------------------
def initialize(self, cfg_file=None, mode="nondriver",
SILENT=False):
#--------------------------------------------------------
# Note: CSDMS_base.run_model() changes to cfg_directory.
# And now get_neighbors_test() does this also.
#--------------------------------------------------------
if not(SILENT):
print 'Erosion component: Initializing...'
self.status = 'initializing' # (OpenMI 2.0 convention)
self.mode = mode
self.cfg_file = cfg_file
#-----------------------------------------------
# Load component parameters from a config file
#-----------------------------------------------
self.set_constants()
self.initialize_config_vars()
## print '### In erode_base.initialize():'
## print '### out_directory =', self.out_directory
## print ' '
#-------------------------------------------------------
# Makes more sense to generate RTI structure,
# instead of reading a separate RTI file. (11/7/11)
# Note: basins.py needs RTI file for read_grid_info().
# Note: read_grid_info() is in BMI_base.py.
#-------------------------------------------------------
# self.read_grid_info()
rti = rti_files.make_info( grid_file=self.z0_file,
ncols=self.nx, nrows=self.ny,
xres=self.dx, yres=self.dy )
self.rti = rti
self.da = pixels.get_da( rti )
RTI_file = self.out_directory + self.case_prefix + '.rti'
rti_files.write_info( RTI_file, rti )
#--------------------------------------------------
# Note: BMI_base.initialize_basin_vars() calls
# basins.initialize() which then calls
# BMI_base.read_grid_info(). It looks for an
# RTI file in input directory first, then in the
# output directory. (11/5/13)
#--------------------------------------------------
self.initialize_basin_vars()
#-------------------------------------------
# This must come before "Disabled" test ??
#-------------------------------------------
self.initialize_time_vars( units='years' )
#---------------------------------------------
# Create an RTI file with grid info which
# can be used by this and other components ?
#---------------------------------------------
if (self.make_z0_method != 'READ_FILE'):
out_prefix = self.out_directory + self.case_prefix
DEM_file = (out_prefix + '_2D-z0.bin')
RTI_file = (out_prefix + '.rti')
self.z0_file = DEM_file
self.rti = rti_files.make_info( DEM_file, self.nx,
self.ny, self.dx, self.dy )
rti_files.write_info(RTI_file, self.rti)
#----------------------------------
# Has component been turned off ?
#----------------------------------
if (self.comp_status == 'Disabled'):
if not(SILENT):
print 'Erosion component: Disabled.'
self.SAVE_Z_GRIDS = False # (It is True by default.)
self.SAVE_Z_PIXELS = False # (It is True by default.)
self.DONE = True
self.status = 'initialized' # (OpenMI 2.0 convention)
return
#---------------------------------------------
# Open input files needed to initialize vars
#---------------------------------------------
# Can't move read_input_files() to start of
# update(), since initial values needed here.
#---------------------------------------------
self.open_input_files()
self.read_input_files()
#-----------------------
# Initialize variables
#---------------------------------------------------------
# Do we need initialize_DEM() after initialize_d8_vars()
# for erode_d8_global.py ?? #### CHECK THIS ####
#---------------------------------------------------------
## self.initialize_d8_vars() # (depend on D8 flow grid)
## self.initialize_DEM()
## self.initialize_boundary_conditions()
## self.initialize_computed_vars()
#-----------------------
# Initialize variables
#----------------------------------------------------
# Need initialize_DEM() before initialize_d8_vars()
# for erode_d8_local.py.
#----------------------------------------------------
self.initialize_DEM() # (must come before d8_vars)
self.initialize_d8_vars() # (depend on D8 flow grid)
self.initialize_boundary_conditions()
self.initialize_computed_vars()
#--------------------------------------------
# New safety precaution. (2/14/12)
# Make sure rtg_files.write_grid() function
# does not change data type fo 'float32'.
#--------------------------------------------
print 'Data type of initial DEM is:', str(self.DEM.dtype)
print ' '
#--------------------------------------------------
# Make sure self.Q_ts_file is not NULL (12/22/05)
# This is only output file that is set by default
# and is still NULL if user hasn't opened the
# output var dialog for the channel process.
#--------------------------------------------------
if (self.SAVE_Z_PIXELS and (self.z_ts_file == '')):
self.z_ts_file = (self.case_prefix + '_0D-z.txt')
self.open_output_files()
self.status = 'initialized' # (OpenMI 2.0 convention)
# initialize()
#-------------------------------------------------------------------
def update(self, time=None, SILENT=None, REPORT=None):
#------------------------------------------------------
# Note: erode_d8_local.py and erode_d8_global.py each
# have their own version of this function.
#------------------------------------------------------
pass
# update()
#-------------------------------------------------------------------
## def update(self, time=None, SILENT=None, REPORT=None):
## if (SILENT is None): SILENT=self.SILENT
## if (REPORT is None): REPORT=self.REPORT
##
## #### if not(SILENT) and (self.time_index == 0):
## if (self.time_index == 0):
## print 'Erosion component: Processing...'
## self.status = 'updating' # (OpenMI 2.0 convention)
##
## #---------------------------------
## # Print dz_max to track progress
## #---------------------------------
## if (self.mode == 'main'):
## self.print_time_and_value(self.dz_max, 'dz_max', '[m]',
## interval=5.0, PRINT_INDEX=True)
##
## #--------------------------------------
## # Update values from other components
## #--------------------------------------
## self.update_R()
## self.update_R_integral()
## self.update_U()
## self.update_U_integral()
##
## #-------------------------
## # Update computed values
## #-------------------------
## self.update_base_level()
## self.update_DEM_edge_values() ######
## if (self.FILL_PITS):
## self.fill_pits_in_DEM(SILENT=SILENT) ############
##
## #--------------------------------------------
## # Update the D8 flow grid and all vars that
## # depend on it, including D8 area grid.
## #--------------------------------------------
## self.update_d8_vars(SILENT=SILENT, REPORT=REPORT) #########
## self.update_slope_grid(SILENT=SILENT, REPORT=REPORT)
## self.update_Q_grid(SILENT=SILENT, REPORT=REPORT)
## self.update_Qs_grid(SILENT=SILENT, REPORT=REPORT)
## self.update_dz_dt_grid(SILENT=SILENT, REPORT=REPORT)
##
## self.update_DEM(SILENT=SILENT, REPORT=REPORT)
## self.update_DEM_min_and_max()
##
## ########################################
## # CAN THE UPDATED DEM HAVE PITS ??
## # IF NOT, DON'T CALL FILL_PITS.
## ########################################
##
## #------------------------
## # Check computed values
## #------------------------
## OK = self.check_stability()
##
## #-------------------------------------------
## # Read from files as needed to update vars
## #-----------------------------------------------------
## # NB! This is currently not needed for the "erosion
## # process" because values don't change over time and
## # read_input_files() is called by initialize().
## #-----------------------------------------------------
## # if (self.time_index > 0):
## # self.read_input_files()
##
## #----------------------------------------------
## # Write user-specified data to output files ?
## #----------------------------------------------
## self.write_output_files( time )
## if (OK):
## self.status = 'updated' # (OpenMI 2.0 convention)
## else:
## self.status = 'failed'
## self.DONE = True
##
## #------------------------
## # Update internal clock
## #------------------------
## self.update_time()
## ## print 'time_index =', self.time_index
##
## #-------------------------------------------
## # Check for steady-state condition instead
## #-------------------------------------------
## self.check_finished() ######################
##
## # update()
#-------------------------------------------------------------------
def finalize(self):
self.status = 'finalizing' # (OpenMI)
self.close_input_files() ## TopoFlow input "data streams"
self.close_output_files()
#-----------------------------
# Save final DEM to DEM_file # (write a separate function ###########)
#-----------------------------
## DEM_unit = open(self.final_DEM_file, 'wb')
## if (self.rti.SWAP_ENDIAN):
## final_DEM = self.DEM.byteswap(True)
## final_DEM.tofile(DEM_unit)
## else:
## self.DEM.tofile( DEM_unit )
## DEM_unit.close()
#----------------------
# Print final message
#----------------------
print ' '
print 'min(dt), max(dt) =', self.dt_grid.min(), self.dt_grid.max()
print ' '
print 'min(z), max(z) =', self.DEM.min(), self.DEM.max()
print ' '
print 'dz_max_vec[0] =', self.dz_max_vec[0]
print 'dz_max_vec[nt-1] =', self.dz_max_vec[-1]
print 'dz_max_vec.min() =', self.dz_max_vec.min()
print 'dz_max_vec.max() =', self.dz_max_vec.max()
print ' '
self.print_final_report( comp_name='Erode 3.1 (11/15/11)' )
## comp_name='Erosion component')
## model_info='Erode 3.1 (11/15/11)',
#---------------------------
# Release all of the ports
#----------------------------------------
# Make this call in "finalize()" method
# of the component's CCA Imple file
#----------------------------------------
# self.release_cca_ports( port_names, d_services )
self.status = 'finalized' # (OpenMI)
# finalize()
#-------------------------------------------------------------------
def set_computed_input_vars(self):
self.U_mpyr = self.U / self.mm_per_m
self.BLR_mpyr = self.BLR / self.mm_per_m
#--------------------------------------------------------------------
self.FLAT = (self.make_z0_method.upper() == 'FLAT')
self.PLANE = (self.make_z0_method.upper() == 'PLANE')
self.CORNER_PLANE = (self.make_z0_method.upper() == 'CORNER_PLANE')
self.READ_FILE = (self.make_z0_method.upper() == 'READ_FILE')
#--------------------------------------------------------------------
self.GAUSSIAN = (self.noise_method.upper() == 'GAUSSIAN')
self.MIDPOINTS = (self.noise_method.upper() == 'MIDPOINTS')
self.NO_NOISE = (self.noise_method.upper() == 'NO_NOISE')
#--------------------------------------------------------------------
self.BOTTOM = (self.BC_method.upper() == 'BOTTOM')
self.RIGHT = (self.BC_method.upper() == 'RIGHT')
self.CORNER = (self.BC_method.upper() == 'CORNER')
self.FOUR_SIDES = (self.BC_method.upper() == 'FOUR_SIDES')
#---------------------------------------------------------
# Make sure that all "save_dts" are larger or equal to
# the specified process dt. There is no point in saving
# results more often than they change.
# Issue a message to this effect if any are smaller ??
#---------------------------------------------------------
self.save_grid_dt = np.maximum(self.save_grid_dt, self.dt)
self.save_pixels_dt = np.maximum(self.save_pixels_dt, self.dt)
# set_computed_input_vars()
#-------------------------------------------------------------------
## def initialize_d8_vars(self):
##
## #------------------------------------------------------
## # Note: erode_d8_local.py and erode_d8_global.py each
## # have their own version of this function.
## #------------------------------------------------------
## pass
##
## # initialize_d8_vars()
#-------------------------------------------------------------
def initialize_DEM(self, SILENT=False):
#-------------------------------------------------------
# Notes: This function initializes the DEM either from
# self.z0_file or using create_initial_DEM().
#-------------------------------------------------------
### if (self.z0_file != ''):
if (self.make_z0_method == 'READ_FILE'):
#---------------------------------------
# Read inital elevation grid from file
#---------------------------------------
DEM_unit = open(self.z0_file, 'rb')
file_size = os.path.getsize(DEM_unit.name)
dbl_size = self.nx * self.ny * np.int32(8)
flt_size = self.nx * self.ny * np.int32(4)
int_size = self.nx * self.ny * np.int32(2)
if (file_size == dbl_size):
RTG_type = 'DOUBLE'
elif (file_size == flt_size):
RTG_type = 'FLOAT'
elif (file_size == int_size):
RTG_type = 'INTEGER'
else:
print 'ERROR in initialize_DEM().'
print ' Cannot determine DEM data type.'
return
self.DEM = rtg_files.read_grid( self.z0_file, self.rti,
RTG_type=RTG_type )
if (RTG_type != 'FLOAT'):
self.DEM = np.float32(self.DEM)
if not(SILENT):
print 'Read DEM from:', self.z0_file
else:
#--------------------------------
# Create initial elevation grid
#--------------------------------
self.create_initial_DEM(SILENT=SILENT)
#-------------------------------------
# See self.update_DEM_edge_values().
#----------------------------------------------------------
# 01/13/06. Otherwise we end up with very large values on
# the indicated boundary that prevent good use of color
#----------------------------------------------------------
# self.update_DEM_edge_values()
#------------------------------------
# Save the initial DEM to a file ??
#------------------------------------
#### Check for overwrite here !!! ##############################
## DEM_unit = open(self.z0_file, 'wb')
## if (self.rti.SWAP_ENDIAN): ## (rti undefined so far)
## self.DEM.byteswap(True)
## self.DEM.tofile(DEM_unit)
## DEM_unit.close()
#---------------------------------------------------
# Option to fill pits in the initial DEM (2/23/10)
#---------------------------------------------------
FILL_PITS_IN_Z0 = False
## FILL_PITS_IN_Z0 = True
if (FILL_PITS_IN_Z0):
print 'Filling pits in initial DEM...'
self.fill_pits_in_DEM(SILENT=True)
#-------------------------------------------
# Save new DEM to a file. This overwrites
# the one saved by create_initial_DEM()
#-------------------------------------------
rtg_files.write_grid( self.DEM, self.z0_file, self.rti)
self.DEM_min = np.nanmin( self.DEM )
self.DEM_max = np.nanmax( self.DEM )
if not(SILENT):
print 'Initial (z_min, z_max) =', self.DEM_min, self.DEM_max
print ' '
# initialize_DEM()
#-------------------------------------------------------------
def create_initial_DEM(self, SILENT=False):
#------------------------------------------------------
# Notes: This routine allows a "z0_method" and a
# "noise_method" to be combined to generate
# an initial surface (z0) with optional noise.
#
# z0_methods: FLAT, PLANE, CORNER_PLANE
# noise_methods: GAUSSIAN, MIDPOINTS
# Noise grid is generated first, which is
# more efficient in the FLAT case.
# "seed" is read from CFG file and should be
# a 4- or 5-digit integer (e.g. 36421).
# If the same seed is used, the same sequence
# of random numbers is generated, which allows
# for reproducible results and comparisons.
#------------------------------------------------------
nx = self.nx # (local synonyms)
ny = self.ny
#---------------------------------------
# Uncorrelated Gaussian random numbers
# which essentially models white noise
#---------------------------------------
# When sigma or factor = 1, then range
# is pretty much -3 to 3.
#---------------------------------------
if (self.GAUSSIAN):
np.random.seed( self.seed )
self.DEM = np.random.normal(loc=0.0, scale=1.0, size=(ny, nx))
#(mean = 0.0, stddev = 1.0)
#-----------------------------------
# factor = (1 / float64(3)) #(-3,3) -> (-1,1)
# factor = factor * float64(300)
factor = self.noise_scale
#-----------------------------------
# Slope should dominate over noise
#-----------------------------------
#*** factor = factor * (slope / 2d)
self.DEM = factor * self.DEM
#------------------------------
# if (PLANE OR CORNER_PLANE) then mean=0.0 else mean=2.0
# stddev = 4.0 ;(1.0)
# DEM = (stddev * DEM) + mean
if not(SILENT):
print 'Created initial DEM by GAUSSIAN method.'
elif (self.MIDPOINTS):
#-----------------------------------------------------------
# Use "midpoint displacement" to create a fractal surface
# with correlation structure as in MARSSIM (A. Howard).
# See midpoints.py in code directory.
#-----------------------------------------------------------
nn = max(nx, ny)
n_levels = np.ceil(np.log(nn-1) / np.log(2))
n_levels = np.int16( n_levels )
surf = midpoints.make_fractal_surface( n_levels, H=1.5,
scale=self.noise_scale,
seed=self.seed,
SILENT=True)
self.DEM = surf[0:ny, 0:nx]
if not(SILENT):
print 'Created initial DEM by MIDPOINTS method.'
else:
self.DEM = float32(0)
#----------------------------
# Construct x and y grids ?
#----------------------------
if (self.PLANE or self.CORNER_PLANE):
#---------------------------
# Option 1 for x & y grids
#---------------------------
x_vals = self.dx * np.arange( nx )
y_vals = self.dy * (ny - 1 - np.arange( ny ))
x,y = np.meshgrid( x_vals, y_vals )
#----------------------------------------------------
# Note: We haven't called initialize_d8_vars() yet.
#----------------------------------------------------
# IDs = self.d8.ID_grid
#---------------------------
# Option 2 for x & y grids
#---------------------------
## ramp = np.arange(nx*ny, dtype='Int32')
## IDs = np.reshape( ramp, [ny, nx] )
## cols = (IDs % nx)
## rows = (IDS / nx)
## x = (self.dx * cols)
## y = (self.dy * (ny - 1 - rows))
#--------------------------------------
# Inclined plane tilted toward bottom
#--------------------------------------
if (self.PLANE):
z = (self.z0_plane_dz_dx * x) + \
(self.z0_plane_dz_dy * y)
self.DEM += z
#-------------------------------------------------
# Inclined plane tilted toward lower left corner
#-------------------------------------------------
if (self.CORNER_PLANE):
a = (self.z0_plane_S / sqrt(np.float32(2)))
b = a
z = (a * x) + (b * y)
#--------------------------------------------
## z = (self.z0_plane_dz_dx * x) + \
## (self.z0_plane_dz_dy * y)
self.DEM += z
#----------------------------
# Make sure type is FLOAT ?
#-----------------------------------
# Changed to "float64". (2/14/12)
#----------------------------------
# self.DEM = np.float32( self.DEM )
self.DEM = np.float64( self.DEM )
#-------------------------
# Save new DEM to a file
#--------------------------------------------------
# Uses "write_grid() *function*, not class method
# in rtg_files.py. That function has an RTG_type
# keyword and converts type before writing.
#--------------------------------------------------
rtg_files.write_grid( self.DEM, self.z0_file, self.rti,
RTG_type='DOUBLE') ## (2/15/12)
# create_initial_DEM()
#-------------------------------------------------------------
def initialize_boundary_conditions(self):
nx = self.nx # (local synonyms)
ny = self.ny
ID_type = 'Int32'
#------------------------------------
# Use bottom row/edge as base level
#------------------------------------
if (self.BOTTOM):
self.base_IDs = np.arange(nx, dtype=ID_type) + (nx * (ny - 1))
#*** above_row = (base_IDs - nx)
#*** base_IDs = [above_row, base_IDs]
#---------------------------------
# Change values in the top row ?
#----------------------------------
# Copy values so slope & fluxes
# are zero at top edge for PLANE1
#----------------------------------
top_IDs = np.arange(nx, dtype=ID_type)
row2_IDs = np.arange(nx, dtype=ID_type) + nx
self.DEM[top_IDs] = self.DEM[row2_IDs]
#*** self.DEM[top_IDs] = 0.0
#-------------------------------
# Use right edge as base level
#-------------------------------
if (self.RIGHT):
self.base_IDs = nx * (np.arange(ny, dtype=ID_type) + 1)
self.base_IDs = self.base_IDs - 1
#*** prev_col = (base_IDs - 1L)
#*** base_IDs = [prev_col, base_IDs]
#---------------------------------
# Change values on the left edge
#---------------------------------
left_IDs = np.arange(ny, dtype=ID_type) * nx
#** emax = np.maximum(DEM, /NAN)
#** DEM[left_IDs] = (emax * 0.2)
self.DEM[left_IDs] = np.float32(0)
#-----------------------------------
# Use all four sides as base level
#-----------------------------------
if (self.FOUR_SIDES):
T_IDs = np.arange(nx, dtype=ID_type)
B_IDs = T_IDs + nx * (ny - 1)
L_IDs = nx * (np.arange(ny - 2, dtype=ID_type) + 1)
R_IDs = L_IDs + (nx - 1)
self.base_IDs = np.concatenate((T_IDs, B_IDs, L_IDs, R_IDs))
#---------------------------------------
# Use bottom left corner as base level
#---------------------------------------
if (self.CORNER):
ID1 = nx * (ny - 1) # (lower left pixel)
ID2 = ID1 - nx # (just above ID1)
ID3 = ID1 - (2 * nx) # (just above ID2)
self.base_IDs = np.concatenate(( ID1, ID1 + 1, ID1 + 2,
ID2, ID2 + 1, ID2 + 2,
ID3, ID3 + 1, ID3 + 2 ))
#--------------------------------------
# Set the initial base-level height ?
#--------------------------------------
if (self.BOTTOM or self.RIGHT or self.CORNER or self.FOUR_SIDES):
#-------------------------------------
# Subtracting 1 here is not good for
# continuing on from a previous run
#-------------------------------------
self.base_level = np.nanmin(self.DEM)
else:
self.base_level = np.float32(0)
# initialize_boundary_conditions()
#-------------------------------------------------------------------
def initialize_computed_vars(self):
self.dt = self.initialize_scalar( 0.1, dtype='float32') # [years]
## self.dt = float32(0.1) # [years]
#-------------------------
# For mass balance check
#-------------------------
self.vol_R = self.initialize_scalar( 0, dtype='float64')
self.vol_U = self.initialize_scalar( 0, dtype='float64')
self.dz_max_vec = np.zeros([self.n_steps], dtype='Float32')
self.dz_max = self.initialize_scalar( -9999, dtype='float32')
self.dz_min = self.initialize_scalar( 9999, dtype='float32')
#---------------------------------------------------
# (11/15/11) Added these for write_output_files().
# Initial values cause first grid to be saved.
#---------------------------------------------------
self.last_grid_time = -(self.save_grid_dt + 1)
self.last_pixel_time = -(self.save_pixels_dt + 1)
# self.last_model_time = -1.0 # (Don't use 0.)
# initialize_computed_vars()
#-------------------------------------------------------------------
def update_R(self):
#------------------------------------------------------
# Note: self.R is currently set by read_config_file()
#------------------------------------------------------
return
##################################################
##################################################
## CONVERT UNITS FROM [m/s] to [m/yr] BELOW !!
##################################################
##################################################
#----------------------------------------
# Compute the "excess rainrate", R.
# Each term must have same units: [m/s]
# Sum = net gain/loss rate over pixel.
#----------------------------------------------------
# R can be positive or negative. If negative, then
# water is removed from the surface at rate R until
# surface water is consumed.
#--------------------------------------------------------------
# P = precip_rate [m/s] (converted by read_input_data()).
# SM = snowmelt rate [m/s]
# GW = seep rate [m/s] (water_table intersects surface)
# ET = evap rate [m/s]
# IN = infil rate [m/s]
# MR = icemelt rate [m/s]
#--------------------------------------------------------------
P_rain = self.P_rain
#--------------
# For testing
#--------------
## print '(Pmin, Pmax) =', P.min(), P.max()
## print '(SMmin, SMmax) =', SM.min(), SM.max()
## print '(GWmin, GWmax) =', GW.min(), GW.max()
## print '(ETmin, ETmax) =', ET.min(), ET.max()
## print '(INmin, INmax) =', IN.min(), IN.max()
## print '(MRmin, MRmax) =', MR.min(), MR.max()
## # print '(Hmin, Hmax) =', H.min(), H.max()
## print ' '
self.R = P_rain
## self.R = (P_rain + SM + GW + MR) - (ET + IN)
# update_R()
#-------------------------------------------------------------------
def update_R_integral(self):
#-----------------------------------------------
# Update mass total for R, sum over all pixels
#-----------------------------------------------
## volume = np.double(self.R * self.da * self.dt) # [m^3]
volume = np.float64(self.R * self.da * self.dt) # [m^3]
if (volume.size == 1):
self.vol_R += (volume * self.rti.n_pixels)
else:
self.vol_R += np.sum(volume)
# update_R_integral()
#-------------------------------------------------------------------
def update_U(self):
#------------------------------------------------------
# Note: self.U is currently set by read_config_file()
#------------------------------------------------------
return
# update_U()
#-------------------------------------------------------------------
def update_U_integral(self):
#-----------------------------------------------
# Update mass total for U, sum over all pixels
#-----------------------------------------------
## volume = np.double(self.U * self.da * self.dt) # [m^3]
volume = np.float64(self.U * self.da * self.dt) # [m^3]
if (volume.size == 1):
self.vol_U += (volume * self.rti.n_pixels)
else:
self.vol_U += np.sum(volume)
# update_U_integral()
#-------------------------------------------------------------------
def update_base_level(self):
###################################################
# NB! As written, this makes all base_IDs have
# the same elevation. Probably not what we want.
# No longer called in erode_d8_global.py.
###################################################
#--------------------------------------
# Lower base level for bottom row or
# rightmost column or LL corner, etc.
#--------------------------------------
if (self.BOTTOM or self.RIGHT or self.CORNER or self.FOUR_SIDES):
#---------------------------------------
# NB! Inside loop since dt is dynamic
#---------------------------------------
# BLR_mpyr has units of meters/yr.
#---------------------------------------
drop = (self.BLR_mpyr * self.dt)
self.base_level -= drop
self.DEM.flat[ self.base_IDs ] = self.base_level
#-------------------------------
# Maintain boundary condition (Used for Test5)
# e.g. zero out all four edges
#-------------------------------
# nx = self.nx
# ny = self.ny
# self.DEM[0,:] = 0.0 # (left edge)
# self.DEM[nx-1, :] = 0.0 # (right edge)
# self.DEM[:, 0] = 0.0 # (top edge)
# self.DEM[:, ny-1] = 0.0 # (bottom edge)
# update_base_level()
#-------------------------------------------------------------------
def update_DEM_edge_values(self):
#-------------------------------------------
# 01/16/06. Adjust DEM edge values since
# they can't erode and will otherwise stay
# big and skew the color stretches.
#-------------------------------------------
if (self.BOTTOM):
self.DEM[0,:] = self.DEM[1,:] + np.float32(0.01)
if (self.RIGHT):
self.DEM[:,0] = self.DEM[:,1] + np.float32(0.01)
## if (self.BOTTOM):
## self.DEM[:, 1] = np.nanmax( self.DEM )
## self.DEM[:, 0 ]= self.DEM[:, 1] + 0.01
## if (self.RIGHT):
## self.DEM[1, :] = np.nanmax( self.DEM )
## self.DEM[0, :] = self.DEM[1, :] + 0.01
# update_DEM_edge_values()
#-------------------------------------------------------------------
def update_d8_vars(self, SILENT=True, REPORT=False,
SAVE_RTG=False):
#--------------------------------------------------------
# Note: This is written so that it works for both
# erode_d8_local.py and erode_d8_global.py,
# because each has its own, embedded d8.update().
#--------------------------------------------------------
# Update the D8 flow grid and all vars that
# depend on it, including D8 area grid.
#---------------------------------------------
# Area grid units are either 'm^2' or 'km^2'
# based on a setting in "*_d8.cfg" file.
# All length units are given in meters.
#---------------------------------------------
# d8.update() needs a depression-filled DEM
# and can later get it from a CCA port.
#---------------------------------------------
self.d8.update( self.time, DEM=self.DEM,
SILENT=SILENT, REPORT=REPORT )
#----------------------------------------
# Erode model needs A_units to be "m^2"
#----------------------------------------
if (self.d8.A_units == 'km^2'):
self.d8.A = self.d8.A * 1e6 # [km^2 -> m^2]
#-----------------------------
# Save grid as an RTG file ?
#-----------------------------
if (SAVE_RTG):
d8_file = (self.case_prefix + '_flow.rtg')
rtg_files.write_grid( self.d8.d8_grid, d8_file, self.rti,
RTG_type='BYTE')
area_file = (self.case_prefix + '_area.rtg')
rtg_files.write_grid( self.d8.A, area_file, self.rti)
# update_d8_vars()
## #-------------------------------------------------------------------
# Note: These next few functions are implemented differently
# by erode_d8_local.py and erode_d8_global.py.
## #-------------------------------------------------------------------
## def update_slope_grid(self, SILENT=True, REPORT=False):
##
## # update_slope_grid()
## #-------------------------------------------------------------------
## def update_Q_grid(self, SILENT=True, REPORT=False):
##
## # update_Q_grid()
## #-------------------------------------------------------------------
## def update_Qs_grid(self, SILENT=True, REPORT=False):
##
## # update_Qs_grid()
## #-------------------------------------------------------------------
## def update_dz_dt_grid(self, SILENT=True, REPORT=False):
##
## # update_dz_dt_grid()
## #-------------------------------------------------------------------
## def update_DEM(self, SILENT=True, REPORT=False):
##
## # update_DEM()
#-------------------------------------------------------------------
def fill_pits_in_DEM(self, SILENT=True):
if not(SILENT):
print 'Filling depressions in DEM...'
## DEM_before = self.DEM.copy() # (For testing)
fill_pits.fill_pits(self.DEM, 'FLOAT', self.nx, self.ny,
SILENT=SILENT)
## #--------------
## # For testing
## #--------------
## w = where(DEM_before != self.DEM)
## nw = w[0].size
## print 'Number of pixels changed by fill_pits =', nw
# fill_pits_in_DEM()
#-------------------------------------------------------------------
def update_DEM_min_and_max(self, REPORT=False):
self.DEM_min = np.nanmin( self.DEM )
self.DEM_max = np.nanmax( self.DEM )
#------------------
# Optional report
#------------------
if (REPORT):
z_str = str(self.DEM_min) + ', ' + str(self.DEM_max)
print ' min(z), max(z) = ' + z_str + ' [m]'
# update_DEM_min_and_max()
#-------------------------------------------------------------------
def update_dt_grid(self, SILENT=True, REPORT=False,
SAVE_RTG=False):
self.update_dt_grid_method1( SILENT=SILENT, REPORT=REPORT,
SAVE_RTG=SAVE_RTG )
## self.update_dt_grid_method2( SILENT=SILENT, REPORT=REPORT,
## SAVE_RTG=SAVE_RTG )
# update_dt_grid()
#-------------------------------------------------------------------
def update_dt_grid_method1(self, SILENT=True, REPORT=False,
SAVE_RTG=False):
#------------------------------------------------------------
# Notes: Compute dt value that each pixel needs to
# satisfy an experimental stability condition.
#
# Idea is that no D8 "kid pixel" can contribute
# enough sediment to its D8 parent to make its
# parent have a higher elevation than it does.
#
# dt < fac * da * (del_z / del_Qs)
#
# fac = factory of safety
# DEM = current elevation grid [m]
# Qs = K * Q^m * S^n = sed. discharge [m^3/yr]
# da = pixel area grid [m^2]
# del_z = elevation drops to D8 parent pixels [m]
# del_Qs = sed. discharge "drops" to D8 parents
#------------------------------------------------------------
# Notes: fac is a factory of safety. Decreased fac from
# 0.8 to 0.4 on 8/10/04. Tried factor of 0.6 on
# 8/12/04, but for m = n = 1, one or more pits would
# form near the outlet so decreased back to 0.4.
#------------------------------------------------------------
if not(SILENT):
print 'Updating dt_grid...'
#-----------------------
# Initialize some vars
#-----------------------
dt_too_small = np.float32(1E-2)
# dt_too_big = 5d # (should there be a max?)
#-----------------------------------------------------------
# (8/17/10) RT Time Profiles at random pixels shows that
# state vars are still noisy (oscillating) for some pixels
# when we use 0.4, but much less so than when using 1.0.
# Reduced default to 0.3.
#-----------------------------------------------------------
# fac = np.float64(0.4) # (less than 1)
fac = np.float64(0.3) # (less than 1)
#-------------------------------------------------------------
# (8/17/10) With default parameters and n_steps=5000,
# using fac = 1.0 also works. Despite very similar
# drainage patterns the base level drops to -56.9028
# vs. -24.5874. Max elevations are 2.02733 and 2.9543,
# respectively, and distribution of elevations is different.
# Total simulated time is 541213 vs. 218060.
# These comments are for an erode_d8_global simulation.
#-------------------------------------------------------------
# fac = np.float64(1)
#---------------------------
# Compute downstream drops
#----------------------------------------------------
# Get a nonzero value if a pixel's elevation is
# greater than than that of its parent pixel.
#----------------------------------------------------
# ##### THIS SHOULD TRUE EXCEPT FOR "FLATS" #######
#----------------------------------------------------
# Computing pIDs as: "self.d8.parent_IDs" works for
# erode_d8_global but doesn't for erode_d8_local.
#----------------------------------------------------
pIDs = divmod(self.d8.parent_ID_grid, self.nx)
del_z = np.maximum((self.DEM - self.DEM[ pIDs ]), 0)
#------------------------
# Experiment 2: 8/10/04
#----------------------------------------------------
# Get a nonzero value if the amount leaving a pixel
# is more than the amount leaving its parent pixel
#----------------------------------------------------
del_Qs = np.maximum((self.Qs - self.Qs[pIDs]), 0)
#--------------------------------------------------
# Initialize dt_grid to a very long dt value.
# This becomes the value at non-deposition sites.
#--------------------------------------------------
###########################################################
###########################################################
# NB! Original version had default of 1 year vs. 1e9 !!!
###########################################################
###########################################################
self.dt_grid = np.zeros([self.ny, self.nx], dtype='float64') # [years]
self.dt_grid += 1e9 # [years]
## self.dt_grid += 10000.0 # [years] # (this is a mid-range value)
## self.dt_grid = ones([self.ny, self.nx], dtype='float64') # [years]
#----------------------------
# Find the deposition sites
#----------------------------
wd = np.where(np.logical_and((del_Qs > 0), (del_z > 0)))
nd = wd[0].size
if (nd != 0):
#------------------------------
# Compute stable dt, in years
#------------------------------
term = fac * (del_z[wd] / del_Qs[wd])
if (self.da.size == 1):
self.dt_grid[wd] = term * self.da
else:
self.dt_grid[wd] = term * self.da[wd]
else:
if not(SILENT):
print '-------------------------------------------'
print ' WARNING: There are no deposition sites.'
print '-------------------------------------------'
print ' '
#--------------------------
# Save the min and max dt
#--------------------------
self.dt_min = self.dt_grid.min()
self.dt_max = self.dt_grid.max()
## if not(SILENT):
if (self.DEBUG):
print '#########################################'
print ' dt_min =', np.around(self.dt_min, 2)
print ' dt_max =', np.around(self.dt_max, 2)
print ' in update_dt_grid() (del_z method)'
print '#########################################'
#-------------------------------
# Don't let dt get too small ?
#-----------------------------------------------------
# dt_min must be less than 1e-4 for the case m=1.5,
# n=1.0, K=1.0. Making K smaller allows dt_too_small
# to be bigger. Now K is smaller.
#-----------------------------------------------------
# Fixed units issue in Qs_Grid so should be able to
# reduce dt_too_small.
#-----------------------------------------------------
dt_grid_min = np.nanmin(self.dt_grid)
if (dt_grid_min < dt_too_small):
print '******************************************'
print ' Aborting: Stable dt is too small.'
print ' Computed dt = ' + str(dt_grid_min)
print '******************************************'
print ' '
sys.exit()
#------------------
# Optional report
#------------------
if (REPORT):
# dt_grid_min = np.nanmin(self.dt_grid)
dt_grid_max = np.nanmax(self.dt_grid)
dt_str = str(dt_grid_min) + ', ' + str(dt_grid_max)
del_z_str = str(del_z.min()) + ', ' + str(del_z.max())
del_Qs_str = str(del_Qs.min()) + ', ' + str(del_Qs.max())
print ' min(dt), max(dt) = ' + dt_str + ' [yrs]'
print ' min(del_z), max(del_z) = ' + del_z_str
print ' min(del_Qs), max(del_Qs) = ' + del_Qs_str
print ' '
#-----------------------------
# Save grid as an RTG file ?
#-----------------------------
if (SAVE_RTG):
RTG_file = (self.case_prefix + '_dt.rtg')
rtg_files.write_grid( self.dt_grid, RTG_file, self.rti )
# update_dt_grid_method1()
#-------------------------------------------------------------------
def update_min_dz_up_grid(self, SILENT=True, REPORT=False,
SAVE_RTG=False):
#-------------------------------------------------------------
# Notes: Compute elevation drop from lowest uphill neighbor.
# Any pixel without an uphill neighbor, such as peaks
# and ridges will be assigned a value of 9999.0.
#
# Pixels with a flow code of zero may either be on
# the edge or at bottom of a pit. We need to know
# min_dz_up for the pit pixels.
#
# Flats should be okay (min_dz_up == 0). We expect
# multi-pixel depressions to fill from the edges
# inward, because Qs will be zero wherever S = 0.
#-------------------------------------------------------------
if not(SILENT):
print 'Updating min_dz_up_grid...'
#--------------
# For testing
#--------------
## print 'n1 =', self.d8.n1
## print 'n2 =', self.d8.n2
## print 'n3 =', self.d8.n3
## print 'n4 =', self.d8.n4
## print 'n5 =', self.d8.n5
## print 'n6 =', self.d8.n6
## print 'n7 =', self.d8.n7
## print 'n8 =', self.d8.n8
## n0 = self.d8.noflow_IDs[0].size
## print 'n0 =', n0
## print 'n_total =', (self.d8.n1 + self.d8.n2 + self.d8.n3 +
## self.d8.n4 + self.d8.n5 + self.d8.n6 +
## self.d8.n7 + self.d8.n8 + n0)
## print 'n_pixels =', self.rti.n_pixels
## print ' '
#-------------------------------------------------------
# Compute smallest elevation drop from any "kid pixel"
#-------------------------------------------------------
# We may need to re-initialize with 9999's here each
# time this function is called. Not sure yet.
#-------------------------------------------------------
## min_dz_up = self.min_dz_up_grid
min_dz_up = np.zeros((self.ny, self.nx), dtype='float64')
min_dz_up += 9999.0
#-------------------------------------------------------------
if (self.d8.n1 != 0):
dz1 = self.DEM[self.d8.w1] - self.DEM[self.d8.p1]
min_dz_up[self.d8.p1] = np.minimum( min_dz_up[self.d8.p1], dz1)
#-----------------------------------------------------------------------
if (self.d8.n2 != 0):
dz2 = self.DEM[self.d8.w2] - self.DEM[self.d8.p2]
min_dz_up[self.d8.p2] = np.minimum( min_dz_up[self.d8.p2], dz2)
#-----------------------------------------------------------------------
if (self.d8.n3 != 0):
dz3 = self.DEM[self.d8.w3] - self.DEM[self.d8.p3]
min_dz_up[self.d8.p3] = np.minimum( min_dz_up[self.d8.p3], dz3)
#-----------------------------------------------------------------------
if (self.d8.n4 != 0):
dz4 = self.DEM[self.d8.w4] - self.DEM[self.d8.p4]
min_dz_up[self.d8.p4] = np.minimum( min_dz_up[self.d8.p4], dz4)
#-----------------------------------------------------------------------
if (self.d8.n5 != 0):
dz5 = self.DEM[self.d8.w5] - self.DEM[self.d8.p5]
min_dz_up[self.d8.p5] = np.minimum( min_dz_up[self.d8.p5], dz5)
#-----------------------------------------------------------------------
if (self.d8.n6 != 0):
dz6 = self.DEM[self.d8.w6] - self.DEM[self.d8.p6]
min_dz_up[self.d8.p6] = np.minimum( min_dz_up[self.d8.p6], dz6)
#-----------------------------------------------------------------------
if (self.d8.n7 != 0):
dz7 = self.DEM[self.d8.w7] - self.DEM[self.d8.p7]
min_dz_up[self.d8.p7] = np.minimum( min_dz_up[self.d8.p7], dz7)
#-----------------------------------------------------------------------
if (self.d8.n8 != 0):
dz8 = self.DEM[self.d8.w8] - self.DEM[self.d8.p8]
min_dz_up[self.d8.p8] = np.minimum( min_dz_up[self.d8.p8], dz8)
#-----------------------------------
# Is min_dz_up negative anywhere ?
# This should be impossible.
#-----------------------------------
## w_neg = np.where(min_dz_up < 0)
## n_neg = w_neg[0].size
## if (n_neg != 0):
## print 'WARNING: min_dz_up < 0 at', n_neg, 'locations.'
#-------------------------------
# Is min_dz_up zero anywhere ?
#--------------------------------------------------------
# This should also be impossible if we're not doing
# any "D8 flat resolution" such as "iterative linking".
# If it did happen, then we'd probably want to look at
# the other kids and find the "min positive dz up."
#--------------------------------------------------------
# This will occur if a neighbor pixel has the same
# elevation (but flows toward this one) even if other
# "neighbor kids" have positive drops.
#--------------------------------------------------------
w_zero = np.where(min_dz_up == 0)
n_zero = w_zero[0].size
if (n_zero != 0):
print 'WARNING: min_dz_up = 0 at', n_zero, 'locations.'
#-----------------------------------
# Monitor the number of pit pixels
#-----------------------------------
### min_dz_up[ self.d8.noflow_IDs ] = 9999.0
if not(SILENT):
n0 = self.d8.noflow_IDs[0].size
n_edge = 2*(self.nx + self.ny) - 4
print 'Number of no-flow pixels =', n0
print 'Number in interior =', (n0 - n_edge)
#--------------------------------------------
# Checked these on 2/23/10 and all are zero
#--------------------------------------------
## S0 = self.S[ self.d8.noflow_IDs ]
## A0 = self.d8.A[ self.d8.noflow_IDs ]
## Q0 = self.Q[ self.d8.noflow_IDs ]
## Qs0 = self.Qs[ self.d8.noflow_IDs ]
## print 'At noflow pixels:'
## print ' Smin, Smax =', S0.min(), S0.max()
## print ' Amin, Amax =', A0.min(), A0.max()
## print ' Qmin, Qmax =', Q0.min(), Q0.max()
## print ' Qsmin, Qsmax =', Qs0.min(), Qs0.max()
#-----------------------------------------------------
# Check dz_dt for edge_IDs and noflow_IDs (interior)
#-----------------------------------------------------
if not(SILENT):
dz_dt0 = self.dz_dt[ self.d8.noflow_IDs ]
w0 = np.where(dz_dt0 == 0)
nw0 = w0[0].size
print 'At noflow pixels:'
print ' dz_dt_min, dz_dt_max =', dz_dt0.min(), dz_dt0.max()
print ' number of pixels with 0 =', nw0
dz_dte = self.dz_dt[ self.d8.edge_IDs ]
print 'At edge pixels:'
print ' dz_dt_min, dz_dt_max =', dz_dte.min(), dz_dte.max()
#--------------
# For testing
#--------------
if not(SILENT):
w = np.where(min_dz_up != 9999.0)
nw = w[0].size
if (nw == 0):
min_dz_up_max = 9999.0
else:
min_dz_up_max = min_dz_up[w].max()
print 'min_dz_up: min, max:', min_dz_up.min(), min_dz_up_max
print 'DEM: min, max:', self.DEM.min(), self.DEM.max()
#---------------------------
# Store the result in self
#---------------------------
self.min_dz_up_grid = min_dz_up
#-----------------------------
# Save grid as an RTG file ?
#-----------------------------
if (SAVE_RTG):
RTG_file = (self.case_prefix + '_min_dz_up.rtg')
rtg_files.write_grid( min_dz_up, RTG_file, self.rti )
# if not(SILENT):
print 'Saved min_dz_up_grid to:'
print ' ' + RTG_file
print ' '
# update_min_dz_up_grid()
#-------------------------------------------------------------------
## def update_min_dz_up_grid2(self, SILENT=True, REPORT=False):
##
## #-----------------------------------------------------------
## # NB! A neighbor pixel with (dz > 0) may not flow towards
## # the "center" pixel, so this approach won't work.
## #-------------------------------------------------------------
## # Notes: Compute elevation drop from lowest uphill neighbor.
## #-------------------------------------------------------------
## z = self.DEM
##
## #----------------------------------
## # Elevations of 8 neighbor pixels
## #----------------------------------
## z1 = np.roll(np.roll(z, 1, axis=0), -1, axis=1) # (upper-right)
## z2 = np.roll(z, -1, axis=1) # (right)
## z3 = np.roll(np.roll(z, -1, axis=0), -1, axis=1) # (lower-right)
## z4 = np.roll(z, -1, axis=0) # (bottom)
## z5 = np.roll(np.roll(z, -1, axis=0), 1, axis=1) # (lower-left)
## z6 = np.roll(z, 1, axis=1) # (left)
## z7 = np.roll(np.roll(z, 1, axis=0), 1, axis=1) # (upper-left)
## z8 = np.roll(z, 1, axis=0) # (top)
##
## #---------------------------------------------------
## # Grid of elevation drops *from* 8 neighbor pixels
## #---------------------------------------------------
## # NB! A neighbor pixel with (dz > 0) may not flow
## # towards the "center" pixel!
## #---------------------------------------------------
## dz1 = np.maximum(z1-z, 0)
## dz2 = np.maximum(z2-z, 0)
## dz3 = np.maximum(z3-z, 0)
## dz4 = np.maximum(z4-z, 0)
## dz5 = np.maximum(z5-z, 0)
## dz6 = np.maximum(z6-z, 0)
## dz7 = np.maximum(z7-z, 0)
## dz8 = np.maximum(z8-z, 0)
##
## min_dz_up = np.minimum(dz1, dz2)
## min_dz_up = np.minimum(min_dz_up, dz3)
## min_dz_up = np.minimum(min_dz_up, dz4)
## min_dz_up = np.minimum(min_dz_up, dz5)
## min_dz_up = np.minimum(min_dz_up, dz6)
## min_dz_up = np.minimum(min_dz_up, dz7)
## min_dz_up = np.minimum(min_dz_up, dz8)
##
## #-------------------------------
## # Is min_dz_up zero anywhere ?
## #-------------------------------
## w_neg = np.where(min_dz_up < 0)
## n_neg = w_neg[0].size
## if (n_neg != 0):
## print 'WARNING: min_dz_up < 0 at', n_neg, 'locations.'
## #-------------------------------------------------------------
## w_zero = np.where(min_dz_up == 0)
## n_zero = w_zero[0].size
## if (n_zero != 0):
## print 'WARNING: min_dz_up = 0 at', n_zero, 'locations.'
##
## #--------------
## # For testing
## #--------------
## w = where(min_dz_up != 9999.0)
## print 'min_dz_up: min, max:', min_dz_up.min(), min_dz_up[w].max()
## print 'DEM: min, max:', self.DEM.min(), self.DEM.max()
##
## #---------------------------
## # Store the result in self
## #---------------------------
## self.min_dz_up_grid = min_dz_up
##
## # update_min_dz_up_grid2()
#-------------------------------------------------------------------
def update_dt_grid_method2(self, SILENT=True, REPORT=False,
SAVE_RTG=False):
#---------------------------------------------------
# Note: Compute dt value that each pixel needs to
# satisfy CFL condition.
#---------------------------------------------------
# Where min_dz_up_grid = 0, we have S=0 and Qs=0.
#---------------------------------------------------
# Note: fac is a factory of safety that should be
# applied to the entire dt_grid.
#---------------------------------------------------
if not(SILENT):
print 'Updating dt_grid...'
#----------------------------------------
# This function requires min_dz_up_grid,
# so compute/update it now.
#----------------------------------------
self.update_min_dz_up_grid(SILENT=SILENT, REPORT=REPORT,
SAVE_RTG=SAVE_RTG)
### fac = np.float64(0.0002) # (less than 1)
### fac = np.float64(0.1)
fac = np.float64(1.0) # (OK for global ??)
self.dt_grid = fac * self.min_dz_up_grid / self.dz_dt
#-------------------------------------------
# Find places where dt is not well-defined.
#-------------------------------------------
w_bad = np.where(np.logical_or(self.min_dz_up_grid == 9999.0,
self.dz_dt <= 0.0))
n_bad = w_bad[0].size
if (n_bad != 0):
self.dt_grid[w_bad] = 0.0
#--------------------------------------------------
# Set those places to the largest stable timestep
#--------------------------------------------------
dt_max = self.dt_grid.max()
if (dt_max == 0):
dt_max = 1.0 #######
#-------------------------------------------
# Include where min_dz_up_grid = 0.
#-------------------------------------------
w_bad = np.where(self.dt_grid == 0)
n_bad = w_bad[0].size
if (n_bad != 0):
self.dt_grid[w_bad] = dt_max
self.dt_min = self.dt_grid.min() #######
self.dt_max = dt_max
if not(SILENT):
print '##########################################'
print ' dt_min =', np.around(self.dt_min, 4)
print ' in update_dt_grid() (min_dz_up method)'
print '##########################################'
#-----------------------------
# Save grid as an RTG file ?
#-----------------------------
if (SAVE_RTG):
RTG_file = (self.case_prefix + '_dt.rtg')
rtg_files.write_grid( self.dt_grid, RTG_file, self.rti )
# update_dt_grid_method2()
#-------------------------------------------------------------------
def update_dt_grid_local1(self, SILENT=True, REPORT=False,
SAVE_RTG=False):
#------------------------------------------------------------
# Notes: Compute dt value that each pixel needs to
# satisfy an experimental stability condition.
#
# Idea is that no D8 "kid pixel" can contribute
# enough sediment to its D8 parent to make its
# parent have a higher elevation than it does.
#
# dt < fac * da * (del_z / del_Qs)
#
# fac = factory of safety
# DEM = current elevation grid [m]
# Qs = K * Q^m * S^n = sed. discharge [m^3/yr]
# da = pixel area grid [m^2]
# del_z = elevation drops to D8 parent pixels [m]
# del_Qs = sed. discharge "drops" to D8 parents
#------------------------------------------------------------
# Notes: fac is a factory of safety. Decreased fac from
# 0.8 to 0.4 on 8/10/04. Tried factor of 0.6 on
# 8/12/04, but for m = n = 1, one or more pits would
# form near the outlet so decreased back to 0.4.
#------------------------------------------------------------
if not(SILENT):
print 'Updating dt_grid...'
#-----------------------
# Initialize some vars
#-----------------------
fac = np.float64(0.4) # (less than 1)
dt_too_small = np.float64(1E-2)
# dt_too_big = 5d # (should there be a max?)
#---------------------------
# Compute downstream drops
#----------------------------------------------------
# Get a nonzero value if a pixel's elevation is
# greater than than that of its parent pixel.
#----------------------------------------------------
# ##### THIS SHOULD TRUE EXCEPT FOR "FLATS" #######
#----------------------------------------------------
IDs = self.d8.IDs
pIDs = self.d8.parent_ID_grid.flat[ IDs ]
z_IDs = self.DEM.flat[ IDs ]
z_pIDs = self.DEM.flat[ pIDs ]
del_z = np.maximum((z_IDs - z_pIDs), 0)
#------------------------
# Experiment 2: 8/10/04
#----------------------------------------------------
# Get a nonzero value if the amount leaving a pixel
# is more than the amount leaving its parent pixel
#----------------------------------------------------
Qs_IDs = self.Qs.flat[ IDs ]
Qs_pIDs = self.Qs.flat[ pIDs ]
del_Qs = np.maximum((Qs_IDs - Qs_pIDs), 0)
###################################################
# THIS PART VIOLATES THE IDEA OF A "LOCAL" CALC.
###################################################
###################################################
#-----------------------------------------------
# Old version had default of 1 year, but there
# is no particular reason
#-----------------------------------------------
self.dt_grid = np.zeros([self.ny, self.nx], dtype='float64') # [years]
self.dt_grid += 10000.0 # [years]
#----------------------------
# Find the deposition sites
#----------------------------
wd = np.where(np.logical_and((del_Qs > 0), (del_z > 0)))
nd = wd[0].size
if (nd != 0):
#------------------------------
# Compute stable dt, in years
#------------------------------
term = fac * (del_z[wd] / del_Qs[wd])
if (self.da.size == 1):
self.dt_grid.flat[IDs[wd]] = term * self.da
else:
self.dt_grid.flat[IDs[wd]] = term * self.da.flat[IDs[wd]]
else:
print '-------------------------------------------'
print ' WARNING: There are no deposition sites.'
print '-------------------------------------------'
print ' '
#--------------------------
# Save the min and max dt
#--------------------------
self.dt_min = self.dt_grid.min()
self.dt_max = self.dt_grid.max()
if not(SILENT):
print '#########################################'
print ' dt_min =', np.around(self.dt_min, 2)
print ' in update_dt_grid() (del_z method)'
print '#########################################'
#-------------------------------
# Don't let dt get too small ?
#-----------------------------------------------------
# dt_min must be less than 1e-4 for the case m=1.5,
# n=1.0, K=1.0. Making K smaller allows dt_too_small
# to be bigger. Now K is smaller.
#-----------------------------------------------------
# Fixed units issue in Qs_Grid so should be able to
# reduce dt_too_small.
#-----------------------------------------------------
dt_grid_min = np.nanmin(self.dt_grid)
if (dt_grid_min < dt_too_small):
print '******************************************'
print ' Aborting: Stable dt is too small.'
print ' Computed dt = ' + str(dt_grid_min)
print '******************************************'
print ' '
sys.exit()
#------------------
# Optional report
#------------------
if (REPORT):
# dt_grid_min = np.nanmin(self.dt_grid)
dt_grid_max = np.nanmax(self.dt_grid)
dt_str = str(dt_grid_min) + ', ' + str(dt_grid_max)
del_z_str = str(del_z.min()) + ', ' + str(del_z.max())
del_Qs_str = str(del_Qs.min()) + ', ' + str(del_Qs.max())
print ' min(dt), max(dt) = ' + dt_str + ' [yrs]'
print ' min(del_z), max(del_z) = ' + del_z_str
print ' min(del_Qs), max(del_Qs) = ' + del_Qs_str
print ' '
#-----------------------------
# Save grid as an RTG file ?
#-----------------------------
if (SAVE_RTG):
RTG_file = (self.case_prefix + '_dt.rtg')
rtg_files.write_grid( self.dt_grid, RTG_file, self.rti )
# update_dt_grid_local1()
#-------------------------------------------------------------------
def update_n_grid(self, SILENT=True, REPORT=False, STORE=True,
SAVE_RTG=False):
#--------------------------------------------------
# Notes: Bin the pixels according to the timestep
# they require to satisfy a CFL condition.
# This version is based on dt_min.
#--------------------------------------------------
# (1) Find dt_min = min(dt_grid) = the smallest
# dt required for stability.
#
# (2) Let x = (dt_grid / dt_min) >= 1.
#--------------------------------------------------
self.dt_min = self.dt_grid.min()
x = (self.dt_grid / self.dt_min)
#----------------------------------------------------
# Assign value of n to every pixel that satisfies:
#
# n * dt_min <= dt_grid < (n+1) * dt_min
# <=> n <= x < (n+1)
#
# Then pixels in n_grid with a given value of n
# will satisfy CFL for dt = (n+1) * dt_min.
#------------------------------------------------------
# n=1 => 1 * dt_min <= dt_grid < 2 * dt_min
# n=2 => 2 * dt_min <= dt_grid < 3 * dt_min
# n=3 => 3 * dt_min <= dt_grid < 4 * dt_min
#------------------------------------------------------
## self.n_grid = np.floor(x)
#-------------------------------------------------------
# Assign value of n to every pixel that satisfies:
#
# (4^(n-1)) * dt_min <= dt_grid < (4^n) * dt_min
# <=> (n-1) <= log(x)/log(4) < n
#
# Then pixels in n_grid with a given value of n
# will satisfy CFL for dt = (4^(n-1)) * dt_min.
#-------------------------------------------------------
# n=1 => 1 * dt_min <= dt_grid < 4 * dt_min
# n=2 => 4 * dt_min <= dt_grid < 16 * dt_min
# n=3 => 16 * dt_min <= dt_grid < 64 * dt_min
#-------------------------------------------------------
# Same idea works if we replace 4 by some other
# positive integer. Try changing this value to get
# about the same number in each bin ?
#-------------------------------------------------------
# np.floor( np.log(x) / np.log(4), a)
a = np.floor( np.log(x) / np.log(4))
n_grid = (1 + a).astype('int32')
## n_grid = 1 + np.floor(np.log(x)/np.log(4))
#------------------------------------------------
# Store the pixel IDs for all of the "n groups"
#------------------------------------------------
start = 0
n_max = n_grid.max()
n_pixels = self.rti.n_pixels
group_IDs = np.empty(n_pixels, dtype='int32')
group_start = np.zeros(n_max + 1, dtype='int32')
group_count = np.zeros(n_max + 1, dtype='int32')
for n in xrange(1, n_max+1):
#-----------------------------------------------------
# Should we use n_grid.flatten() or ravel(n_grid) ??
#-----------------------------------------------------
## group_n = np.where(self.n_grid.flatten() == n)
group_n = np.where( np.ravel(n_grid) == n )
count = group_n[0].size
#-----------------------------------------------
# Notes: When (count == 0), IDs[i:i] = [].
# group_n is a tuple.
# group_n[0] is an nd_array.
#-----------------------------------------------
group_IDs[start: start + count] = group_n[0]
group_start[n] = start
group_count[n] = count
start += count
if (start != n_pixels):
print '---------------------------------------------'
print 'ERROR: Sum over all n groups is not'
print ' equal to number of pixels in grid.'
print '---------------------------------------------'
#---------------------------------
# Store "n group" info in self ?
#---------------------------------
if (STORE):
self.n_grid = n_grid
self.n_max = n_max
self.group_IDs = group_IDs
self.group_start = group_start
self.group_count = group_count
else:
#-----------------------------------------
# This is used to see how n_grid changes
# inside of the update_DEM() method.
#-----------------------------------------
self.new_n_grid = n_grid
#------------------
# Optional report
#------------------
## REPORT = True ###########
## REPORT = STORE ##########
if (REPORT):
n_min = n_grid.min()
print 'n_min, n_max =', n_min, n_max
#------------------------------------------
print 'group_count ='
print group_count[1:] # (skip 0 for n=0)
#------------------------------------------
n_pixels = self.nx * self.ny
T_global = n_pixels * 4.0**(n_max - 1)
n_vec = np.arange(1,n_max+1, dtype='int64')
M_vec = group_count[1:]
T_local = np.sum( M_vec * 4**(n_max - n_vec) )
ratio = T_global / T_local
print 'T_global =', T_global
print 'T_local =', T_local
print 'T_global / T_local =', ratio
print '------------------------------------------------------------'
#-----------------------------
# Save grid as an RTG file ?
#-----------------------------
if (SAVE_RTG):
RTG_file = (self.case_prefix + '_n.rtg')
rtg_files.write_grid( n_grid, RTG_file, self.rti )
#------------------
# Optional report
#------------------
## REPORT = True ###########
## if (REPORT):
## n_min = self.n_grid.min()
## n_max = self.n_grid.max()
## print 'n_min, n_max =', n_min, n_max
## #----------------------------------------
## w1 = np.where(self.n_grid == 1)
## print 'n=1 for', size(w1[0]), 'pixels'
## w2 = np.where(self.n_grid == 2)
## print 'n=2 for', size(w2[0]), 'pixels'
## w3 = np.where(self.n_grid == 3)
## print 'n=3 for', size(w3[0]), 'pixels'
## w4 = np.where(self.n_grid == 4)
## print 'n=4 for', size(w4[0]), 'pixels'
## w5 = np.where(self.n_grid == 5)
## print 'n=5 for', size(w5[0]), 'pixels'
## w6 = np.where(self.n_grid == 6)
## print 'n=6 for', size(w6[0]), 'pixels'
## w7 = np.where(self.n_grid == 7)
## print 'n=7 for', size(w7[0]), 'pixels'
## w8 = np.where(self.n_grid == 8)
## print 'n=8 for', size(w8[0]), 'pixels'
# update_n_grid()
#-------------------------------------------------------------------
## def update_n_grid2(self, SILENT=True, REPORT=False):
##
## #---------------------------------------------------
## # Notes: Bin the pixels according to the timestep
## # they require to satisfy a CFL condition.
## # This version is based on dt_max.
## #---------------------------------------------------
## # In the general case, n_values need not increase
## # in the downstream direction. However, if the
## # slope exponent (n) equals 1, then perhaps this
## # will be the case.
## #---------------------------------------------------
## # (1) Find dt_max = max(dt_grid) = the biggest
## # dt required for stability.
## #
## # (2) Let x = (dt_grid / dt_max) <= 1.
## #---------------------------------------------------
## self.dt_max = self.dt_grid.max()
## x = (self.dt_grid / self.dt_max)
##
## #----------------------------------------------------
## # Assign value of n to every pixel that satisfies:
## #
## # dt_max / (n+1) < dt_grid <= (dt_max / n)
## # <=> 1/(n+1) < x <= (1/n)
## #
## # Then pixels in n_grid with a given value of n
## # will satisfy CFL for dt = (dt_max / n).
## #------------------------------------------------------
## # n=1 => (1/2) * dt_max < dt_grid <= dt_max
## # n=2 => (1/3) * dt_max < dt_grid <= (1/2) * dt_max
## # n=3 => (1/4) * dt_max < dt_grid <= (1/3) * dt_max
## #------------------------------------------------------
## ## self.n_grid = np.floor(1.0 / x)
##
## #----------------------------------------------------
## # Assign value of n to every pixel that satisfies:
## #
## # dt_max / (4^n) < dt_grid <= (dt_max / (4^(n-1))
## # <=> (n-1) <= -log(x)/log(4) < n
## #
## # Then pixels in n_grid with a given value of n
## # will satisfy CFL for dt = (dt_max / 4^(n-1)).
## #---------------------------------------------------------
## # n=1 => (1/4) * dt_max < dt_grid <= dt_max
## # n=2 => (1/16) * dt_max < dt_grid <= (1/4) * dt_max
## # n=3 => (1/64) * dt_max < dt_grid <= (1/16) * dt_max
## #--------------------------------------------------------
## # Same idea works if we replace 4 by some other
## # positive integer. Try changing this value to get
## # about the same number in each bin ?
## #-------------------------------------------------------
## self.n_grid = np.floor(1.0 - np.log(x)/np.log(4.0))
##
## REPORT = True ###########
## if (REPORT):
## n_min = self.n_grid.min()
## n_max = self.n_grid.max()
## print 'n_min, n_max =', n_min, n_max
## #----------------------------------------
## w1 = np.where(self.n_grid == 1)
## print 'n=1 for', size(w1[0]), 'pixels'
## w2 = np.where(self.n_grid == 2)
## print 'n=2 for', size(w2[0]), 'pixels'
## w3 = np.where(self.n_grid == 3)
## print 'n=3 for', size(w3[0]), 'pixels'
## w4 = np.where(self.n_grid == 4)
## print 'n=4 for', size(w4[0]), 'pixels'
## w5 = np.where(self.n_grid == 5)
## print 'n=5 for', size(w5[0]), 'pixels'
## w6 = np.where(self.n_grid == 6)
## print 'n=6 for', size(w6[0]), 'pixels'
## w7 = np.where(self.n_grid == 7)
## print 'n=7 for', size(w7[0]), 'pixels'
## w8 = np.where(self.n_grid == 8)
## print 'n=8 for', size(w8[0]), 'pixels'
##
## # update_n_grid2()
#-------------------------------------------------------------------
def print_mins_and_maxes(self, step, DEM=False, A=False,
S=False, Q=False, QS=False,
DZ_DT=False, DZ=False, DT=False):
## print 'step =', step, '; DEM ='
## print np.around(self.DEM, decimals=0)
## print ' '
if (DEM):
vmin = self.DEM.min()
vmax = self.DEM.max()
vstr = '; zmin, zmax ='
if (A):
vmin = self.d8.A.min()
vmax = self.d8.A.max()
vstr = '; Amin, Amax ='
if (S):
vmin = self.S.min()
vmax = self.S.max()
vstr = '; Smin, Smax ='
if (Q):
vmin = self.Q.min()
vmax = self.Q.max()
vstr = '; Qmin, Qmax ='
if (QS):
vmin = self.Qs.min()
vmax = self.Qs.max()
vstr = '; Qsmin, Qsmax ='
if (DZ):
vmin = self.dz.min()
vmax = self.dz.max()
vstr = '; dz_min, dz_max ='
if (DT):
vmin = self.dt_grid.min()
vmax = self.dt_grid.max()
vstr = '; dt_min, dt_max ='
if (DZ_DT):
vmin = self.dz_dt.min()
vmax = self.dz_dt.max()
vstr = '; dz_dt_min, dz_dt_max ='
print 'step =', step, vstr, vmin, vmax
# print_mins_and_maxes()
#-------------------------------------------------------------------
def check_stability(self):
#----------------------------------
# Check for one type of stability
# (This may be obsolete now.)
#----------------------------------
## if (self.dz_max > float32(200)): # (before 4/15/10)
if (self.dz_max > np.float32(1000)):
print '************************************************'
print 'Program aborted because dz > 1000.'
print 'Time step or K is probably too large'
print 'for this set of input parameters.'
print ' dx = ' + str(self.dx) + ' [meters]'
print ' dy = ' + str(self.dy) + ' [meters]'
print ' dt = ' + str(self.dt) + ' [years]'
print ' K = ' + str(self.K)
print ' m = ' + str(self.m)
print ' n = ' + str(self.n)
print '************************************************'
print ' '
return False
## sys.exit()
else:
return True
# check_stability()
#-------------------------------------------------------------------
def check_finished(self):
#---------------------------------------------------------
# Note: If self.DONE has already been set to True by
# another function or component, this function
# preserves that setting (see below).
#---------------------------------------------------------
# CSDMS_base.run_model() also uses self.DONE as
# well as self.n_steps.
#---------------------------------------------------------
# TINY_DZ can occur either because dt required for
# stability is really small or because we have
# converged to a steady-state landscape.
#---------------------------------------------------------
if (self.stop_code == 0):
#---------------------
# Stop after n_steps
#---------------------
TIMES_UP = (self.time_index >= self.n_steps)
elif (self.stop_code == 1):
#-----------------------
# Stop after stop_time
#-----------------------
TIMES_UP = (self.time >= self.stop_time)
elif (self.stop_code == 2):
#-----------------------------------------
# Stop if "steady-state", but only works
# as written here for global timesteps.
#-----------------------------------------
TIMES_UP = (self.dz_max < self.dz_tolerance)
self.DONE = (self.DONE or TIMES_UP)
#----------------------
# Used before 2/5/12.
#----------------------
# TIMES_UP = (self.time_index >= self.n_steps)
## TINY_DZ = (self.dz_max < self.dz_tolerance)
# self.DONE = (self.DONE or TIMES_UP)
## self.DONE = (self.DONE or TIMES_UP or TINY_DZ)
## if (TINY_DZ):
## tol_str = str(self.dz_tolerance)
## print '### WARNING: dz_max < ' + tol_str + '.'
## ## print 'Aborting since dz_max < ' + tol_str + '.'
## ## self.DONE = True
## print ' dz_max =', self.dz_max, '.'
# check_finished()
#-------------------------------------------------------------------
## def check_steady_state(self):
##
## # check_steady_state()
#-------------------------------------------------------------------
def open_input_files(self):
if (self.make_z0_method == 'READ_FILE'):
self.z0_unit = model_input.open_file(self.z0_type, self.z0_file)
# open_input_files()
#-------------------------------------------------------------------
def read_input_files(self):
#-------------------------------------------------------
# All grids are assumed to have a data type of Float32.
#-------------------------------------------------------
if (self.make_z0_method == 'READ_FILE'):
self.z0 = model_input.read_next(self.z0_unit, self.z0_type, self.rti)
self.z0_unit.close() #########
## slopes = model_input.read_next(self.slope_unit, self.slope_type, rti)
## if (slopes is not None): self.slopes = slopes
# read_input_files()
#-------------------------------------------------------------------
def close_input_files(self):
if (self.make_z0_method == 'READ_FILE'):
self.z0_unit.close()
### if (self.z0_file != ''): self.z0_unit.close()
# close_input_files()
#-------------------------------------------------------------------
def update_outfile_names(self):
#-------------------------------------------------
# Notes: Append out_directory to outfile names.
#-------------------------------------------------
self.z_gs_file = (self.out_directory + self.z_gs_file)
self.S_gs_file = (self.out_directory + self.S_gs_file)
self.A_gs_file = (self.out_directory + self.A_gs_file)
self.Q_gs_file = (self.out_directory + self.Q_gs_file)
self.Qs_gs_file = (self.out_directory + self.Qs_gs_file)
self.dz_gs_file = (self.out_directory + self.dz_gs_file)
self.dt_gs_file = (self.out_directory + self.dt_gs_file)
self.dz_dt_gs_file = (self.out_directory + self.dz_dt_gs_file)
## self.n_gs_file = (self.out_directory + self.n_gs_file)
## self.T_gs_file = (self.out_directory + self.T_gs_file)
#----------------------------------------------------------------
self.z_ts_file = (self.out_directory + self.z_ts_file)
self.S_ts_file = (self.out_directory + self.S_ts_file)
self.A_ts_file = (self.out_directory + self.A_ts_file)
self.Q_ts_file = (self.out_directory + self.Q_ts_file)
self.Qs_ts_file = (self.out_directory + self.Qs_ts_file)
self.dz_ts_file = (self.out_directory + self.dz_ts_file)
self.dt_ts_file = (self.out_directory + self.dt_ts_file)
self.dz_dt_ts_file = (self.out_directory + self.dz_dt_ts_file)
## self.n_ts_file = (self.out_directory + self.n_ts_file)
## self.T_ts_file = (self.out_directory + self.T_ts_file)
## self.z_gs_file = (self.case_prefix + '_2D-z.rts')
## self.S_gs_file = (self.case_prefix + '_2D-S.rts')
## self.A_gs_file = (self.case_prefix + '_2D-A.rts')
## self.Q_gs_file = (self.case_prefix + '_2D-Q.rts')
## self.Qs_gs_file = (self.case_prefix + '_2D-Qs.rts')
## self.dz_gs_file = (self.case_prefix + '_2D-dz.rts')
## self.dt_gs_file = (self.case_prefix + '_2D-dt.rts')
## self.dz_dt_gs_file = (self.case_prefix + '_2D-dz_dt.rts')
## self.n_gs_file = (self.case_prefix + '_2D-n.rts')
## self.T_dt_gs_file = (self.case_prefix + '_2D-Tnext.rts')
## #-----------------------------------------------------------
## self.z_ts_file = (self.case_prefix + '_0D-z.txt')
## self.S_ts_file = (self.case_prefix + '_0D-S.txt')
## self.A_ts_file = (self.case_prefix + '_0D-A.txt')
## self.Q_ts_file = (self.case_prefix + '_0D-Q.txt')
## self.Qs_ts_file = (self.case_prefix + '_0D-Qs.txt')
## self.dz_ts_file = (self.case_prefix + '_0D-dz.txt')
## self.dt_ts_file = (self.case_prefix + '_0D-dt.txt')
## self.dz_dt_ts_file = (self.case_prefix + '_0D-dz_dt.txt')
## self.n_ts_file = (self.case_prefix + '_0D-n.txt')
## self.T_ts_file = (self.case_prefix + '_0D-Tnext.txt')
# update_outfile_names()
#-------------------------------------------------------------------
def open_output_files(self):
model_output.check_netcdf() # (test import and info message)
self.update_outfile_names()
#--------------------------------------
# Open new files to write grid stacks
#--------------------------------------
if (self.SAVE_Z_GRIDS):
model_output.open_new_gs_file( self, self.z_gs_file, self.rti,
var_name='z',
long_name='elevation grid',
units_name='m',
time_units='years')
if (self.SAVE_S_GRIDS):
model_output.open_new_gs_file( self, self.S_gs_file, self.rti,
var_name='S',
long_name='slope grid',
units_name='m/m',
time_units='years')
if (self.SAVE_A_GRIDS):
model_output.open_new_gs_file( self, self.A_gs_file, self.rti,
var_name='A',
long_name='contributing area grid',
units_name='km^2',
time_units='years')
if (self.SAVE_Q_GRIDS):
model_output.open_new_gs_file( self, self.Q_gs_file, self.rti,
var_name='Q',
long_name='water discharge grid',
units_name='m^3/s',
time_units='years')
if (self.SAVE_QS_GRIDS):
model_output.open_new_gs_file( self, self.Qs_gs_file, self.rti,
var_name='Qs',
long_name='sediment discharge grid',
units_name='m^3/s',
time_units='years')
if (self.SAVE_DZ_GRIDS):
model_output.open_new_gs_file( self, self.dz_gs_file, self.rti,
var_name='dz',
long_name='elevation increment grid',
units_name='m',
time_units='years')
if (self.SAVE_DT_GRIDS):
model_output.open_new_gs_file( self, self.dt_gs_file, self.rti,
var_name='dt',
long_name='local timestep grid',
units_name='yr',
time_units='years')
if (self.SAVE_DZ_DT_GRIDS):
model_output.open_new_gs_file( self, self.dz_dt_gs_file, self.rti,
var_name='dz_dt',
long_name='z rate of change grid',
units_name='m/yr',
time_units='years')
# (2/15/12)
## if (self.SAVE_UPCNT_GRIDS):
## model_output.open_new_gs_file( self, self.upcnt_gs_file, self.rti,
## var_name='up_count',
## long_name='cell update count grid',
## units_name='none',
## time_units='years')
## if (self.SAVE_N_GRIDS):
## model_output.open_new_gs_file( self, self.n_gs_file, self.rti,
## var_name='n',
## long_name='timestep bin number',
## units_name='none',
## time_units='years')
## if (self.SAVE_TN_GRIDS):
## model_output.open_new_gs_file( self, self.T_next_gs_file, self.rti,
## var_name='T_next',
## long_name='processing time grid',
## units_name='yr',
## time_units='years')
#---------------------------------------
# Open text files to write time series
#---------------------------------------
IDs = self.outlet_IDs
if (self.SAVE_Z_PIXELS):
model_output.open_new_ts_file( self, self.z_ts_file, IDs,
var_name='z',
long_name='elevation grid',
units_name='m',
time_units='years')
if (self.SAVE_S_PIXELS):
model_output.open_new_ts_file( self, self.S_ts_file, IDs,
var_name='S',
long_name='slope grid',
units_name='m/m',
time_units='years')
if (self.SAVE_A_PIXELS):
model_output.open_new_ts_file( self, self.A_ts_file, IDs,
var_name='A',
long_name='contributing area grid',
units_name='km^2',
time_units='years')
if (self.SAVE_Q_PIXELS):
model_output.open_new_ts_file( self, self.Q_ts_file, IDs,
var_name='Q',
long_name='water discharge grid',
units_name='m^3/s',
time_units='years')
if (self.SAVE_QS_PIXELS):
model_output.open_new_ts_file( self, self.Qs_ts_file, IDs,
var_name='Qs',
long_name='sediment discharge grid',
units_name='m^3/s',
time_units='years')
if (self.SAVE_DZ_PIXELS):
model_output.open_new_ts_file( self, self.dz_ts_file, IDs,
var_name='dz',
long_name='elevation increment grid',
units_name='m/m',
time_units='years')
if (self.SAVE_DT_PIXELS):
model_output.open_new_ts_file( self, self.dt_ts_file, IDs,
var_name='dt',
long_name='local timestep grid',
units_name='yr',
time_units='years')
if (self.SAVE_DZ_DT_PIXELS):
model_output.open_new_ts_file( self, self.dz_dt_ts_file, IDs,
var_name='dz_dt',
long_name='z rate of change grid',
units_name='m/yr',
time_units='years')
## (2/15/12)
## if (self.SAVE_UPCNT_PIXELS):
## model_output.open_new_ts_file( self, self.upcnt_ts_file, IDs,
## var_name='up_count',
## long_name='cell update count grid',
## units_name='none',
## time_units='years')
## if (self.SAVE_N_PIXELS):
## model_output.open_new_ts_file( self, self.n_ts_file, IDs,
## var_name='n',
## long_name='timestep bin number grid',
## units_name='none',
## time_units='years')
## if (self.SAVE_TN_PIXELS):
## model_output.open_new_ts_file( self, self.T_next_ts_file, IDs,
## var_name='T_next',
## long_name='processing time grid',
## units_name='yr',
## time_units='years')
# open_output_files()
#-------------------------------------------------------------------
def write_output_files(self, time=None):
#---------------------------------------------------------
# Notes: This function was written to use only model
# time (maybe from a caller) in seconds, and
# the save_grid_dt and save_pixels_dt parameters
# read by read_config_file().
#
# set_computed_input_vars() makes sure that all
# of the "save_dts" are larger than or equal to
# the process dt.
#---------------------------------------------------------
## print '===> dt =', self.dt
## print '===> T_clock =', self.T_clock
## print '===> Time =', self.time
#-----------------------------------------
# Allows time to be passed from a caller
#-----------------------------------------
if (time is None):
time = self.time
else:
time = np.float64(time)
#-------------------------------------
# Avoid string comparisons for speed
#-------------------------------------
if not(self.FIXED_STEPS):
## if (self.time_step_type != 'fixed'):
#------------------------------------------------
# (11/15/11). Save computed values based on the
# elapsed time since last saved. Is this the
# best we can do to honor "save_grid_dt" ??
#------------------------------------------------
elapsed_grid_time = (time - self.last_grid_time)
if (elapsed_grid_time > self.save_grid_dt):
## print '#### Writing frame at time =', self.time
self.save_grids()
self.last_grid_time = time.copy() ## (2/7/13)
#-----------------------------------------------------
elapsed_pixel_time = (time - self.last_pixel_time)
if (elapsed_pixel_time > self.save_pixels_dt):
self.save_pixel_values()
self.last_pixel_time = time.copy() ## (2/7/13)
else:
#---------------------------------------------
# (11/15/11) This does not work as intended
# for the case of adaptive timesteps.
#---------------------------------------------
# Save computed values at sampled times
#----------------------------------------
model_time = round(time) # (11/15/11)
## model_time = int(time)
if (model_time % int(self.save_grid_dt) == 0):
# print '#### Writing frame at time =', self.time
self.save_grids()
if (model_time % int(self.save_pixels_dt) == 0):
self.save_pixel_values()
#--------------------------
# SHOULD BE OBSOLETE NOW.
#---------------------------------------------------
# For Erode-D8-local, we need this, too. (11/15/1)
#---------------------------------------------------
## if (model_time == self.last_model_time):
## return
## self.last_model_time = model_time
# write_output_files()
#-------------------------------------------------------------------
## def write_output_files(self, time=None):
##
## #---------------------------------------------------------
## # Notes: This function was written to use only model
## # time (maybe from a caller) in seconds, and
## # the save_grid_dt and save_pixels_dt parameters
## # read by read_config_file().
## #
## # read_config_file() makes sure that all of
## # the "save_dts" are larger than or equal to the
## # process dt.
## #---------------------------------------------------------
##
## #-----------------------------------------
## # Allows time to be passed from a caller
## #-----------------------------------------
## if (time is None):
## time = self.time
## model_time = int(time)
##
## #----------------------------------------
## # Save computed values at sampled times
## #----------------------------------------
## if (model_time % int(self.save_grid_dt) == 0):
## # print '#### Writing frame at time =', self.time
## self.save_grids()
## if (model_time % int(self.save_pixels_dt) == 0):
## self.save_pixel_values()
##
## # write_output_files()
#-------------------------------------------------------------------
def close_output_files(self):
if (self.SAVE_Z_GRIDS): model_output.close_gs_file( self, 'z')
if (self.SAVE_S_GRIDS): model_output.close_gs_file( self, 'S')
if (self.SAVE_A_GRIDS): model_output.close_gs_file( self, 'A')
if (self.SAVE_Q_GRIDS): model_output.close_gs_file( self, 'Q')
if (self.SAVE_QS_GRIDS): model_output.close_gs_file( self, 'Qs')
if (self.SAVE_DZ_GRIDS): model_output.close_gs_file( self, 'dz')
if (self.SAVE_DT_GRIDS): model_output.close_gs_file( self, 'dt')
if (self.SAVE_DZ_DT_GRIDS): model_output.close_gs_file( self, 'dz_dt')
## if (self.SAVE_UPCNT_GRIDS): model_output.close_gs_file( self, 'up_count')
## if (self.SAVE_N_GRIDS): model_output.close_gs_file( self, 'n')
## if (self.SAVE_TN_GRIDS): model_output.close_gs_file( self, 'T_next')
#-------------------------------------------------------------------------
if (self.SAVE_Z_PIXELS): model_output.close_ts_file( self, 'z')
if (self.SAVE_S_PIXELS): model_output.close_ts_file( self, 'S')
if (self.SAVE_A_PIXELS): model_output.close_ts_file( self, 'A')
if (self.SAVE_Q_PIXELS): model_output.close_ts_file( self, 'Q')
if (self.SAVE_QS_PIXELS): model_output.close_ts_file( self, 'Qs')
if (self.SAVE_DZ_PIXELS): model_output.close_ts_file( self, 'dz')
if (self.SAVE_DT_PIXELS): model_output.close_ts_file( self, 'dt')
if (self.SAVE_DZ_DT_PIXELS): model_output.close_ts_file( self, 'dz_dt')
## if (self.SAVE_UPCNT_PIXELS): model_output.close_ts_file( self, 'up_count')
## if (self.SAVE_N_PIXELS): model_output.close_ts_file( self, 'n')
## if (self.SAVE_TN_PIXELS): model_output.close_ts_file( self, 'T_next')
# close_output_files()
#-------------------------------------------------------------------
def save_grids(self):
#---------------------------------------------------
# Notes: Each variable is saved as a grid whether
# it is a scalar or already a (2D) grid.
#---------------------------------------------------
if (self.SAVE_Z_GRIDS):
model_output.add_grid( self, self.DEM, 'z' )
if (self.SAVE_S_GRIDS):
model_output.add_grid( self, self.S, 'S' )
if (self.SAVE_A_GRIDS):
model_output.add_grid( self, self.d8.A, 'A' )
if (self.SAVE_Q_GRIDS):
model_output.add_grid( self, self.Q, 'Q' )
if (self.SAVE_QS_GRIDS):
model_output.add_grid( self, self.Qs, 'Qs' )
if (self.SAVE_DZ_GRIDS):
model_output.add_grid( self, self.dz, 'dz' )
if (self.SAVE_DT_GRIDS):
model_output.add_grid( self, self.dt_grid, 'dt' )
if (self.SAVE_DZ_DT_GRIDS):
model_output.add_grid( self, self.dz_dt, 'dz_dt' )
## if (self.SAVE_UPCNT_GRIDS):
## model_output.add_grid( self, self.update_count, 'up_count' )
## if (self.SAVE_N_GRIDS):
## model_output.add_grid( self, self.n_grid, 'n' )
##
## if (self.SAVE_T_GRIDS):
## model_output.add_grid( self, self.T_next, 'T_next' )
# save_grids()
#-------------------------------------------------------------------
def save_pixel_values(self): ##### save_time_series_data(self) #######
IDs = self.outlet_IDs
time = self.time
if (self.SAVE_Z_PIXELS):
model_output.add_values_at_IDs( self, time, self.DEM, 'z', IDs )
if (self.SAVE_S_PIXELS):
model_output.add_values_at_IDs( self, time, self.S, 'S', IDs )
if (self.SAVE_A_PIXELS):
model_output.add_values_at_IDs( self, time, self.d8.A, 'A', IDs )
if (self.SAVE_Q_PIXELS):
model_output.add_values_at_IDs( self, time, self.Q, 'Q', IDs )
if (self.SAVE_QS_PIXELS):
model_output.add_values_at_IDs( self, time, self.Qs, 'Qs', IDs )
if (self.SAVE_DZ_PIXELS):
model_output.add_values_at_IDs( self, time, self.dz, 'dz', IDs )
if (self.SAVE_DT_PIXELS):
model_output.add_values_at_IDs( self, time, self.dt_grid, 'dt', IDs )
if (self.SAVE_DZ_DT_PIXELS):
model_output.add_values_at_IDs( self, time, self.dz_dt, 'dz_dt', IDs )
## if (self.SAVE_UPCNT_PIXELS):
## model_output.add_values_at_IDs( self, time, self.update_count, 'up_count', IDs )
## if (self.SAVE_N_PIXELS):
## model_output.add_values_at_IDs( self, time, self.n_grid, 'n', IDs )
##
## if (self.SAVE_TN_PIXELS):
## model_output.add_values_at_IDs( self, time, self.T_next, 'T_next', IDs )
# save_pixel_values()
#-------------------------------------------------------------------
def print_time_and_value(self, var, var_name='dz_max',
units_name='[m]', interval=5.0):
#### PRINT_INDEX=False):
#------------------------------------------------------------
# Note: Print the model time and the current value of
# "var" and perhaps n_steps.
#------------------------------------------------------------
# (2/9/12) This overrides CSDMS_base.print_time_and_value()
# and uses self.stop_code, used by erode_d8_global.py and
# erode_d8_local.py.
#------------------------------------------------------------
#--------------------------------
# Print message about interval.
#--------------------------------
if (self.time_index == 0):
print 'Will print values every', interval, 'seconds.'
elapsed_time = (time.time() - self.last_print_time)
if (elapsed_time <= interval):
return
#--------------------------------------------
# Get time info and build time units string
#--------------------------------------------
index = (self.time_index + 1) # (starts at 0)
if (self.time_units == 'seconds'):
cur_time = self.time_min
time_units_str = ' [min]'
else:
cur_time = self.time
time_units_str = ' [' + self.time_units + ']'
#----------------------------
# Build the variable string
#----------------------------
var_str = var_name + ' = ' + ("%10.5f" % var)
var_str += ' ' + units_name
#------------------------
# Build the time string
#------------------------
msg1 = 'Time = ' + ("%10.2f" % cur_time)
msg2 = ' n = ' + str(index)
#-----------------------------------------
if (self.stop_code == 0):
#-------------------------------------
# Will stop when (n >= self.n_steps)
#-------------------------------------
msg1 += time_units_str
msg1 += ', ' + var_str
#-----------------------------------
msg2 += ' of ' + str(self.n_steps)
print msg1
print msg2
elif (self.stop_code == 1):
#------------------------------------------
# Will stop when (time >= self.stop_time)
#------------------------------------------
msg1 += ' of ' + ("%10.2f" % self.stop_time)
msg1 += time_units_str
print msg1
print msg2
print ' ' + var_str
else:
print msg1
print msg2
print ' ' + var_str
#-------------------------
# Update last_print_time
#-------------------------
self.last_print_time = time.time()
# print_time_and_value()
#-------------------------------------------------------------------
#---------------------------------------------------------------------------------
# (2/15/12) These are not used by erode_d8_local.py but could possibly
# still be used by erode_d8_global.py. They are functions not class methods.
#---------------------------------------------------------------------------------
##def Get_Timestep(cmin, nmin, dx, Rmax, Amax, Smax):
##
## #------------------------------------------------------------
## # Notes: The Courant condition: v < dx/dt is used together
## # with the following equations to compute a stable
## # timestep:
##
## # (1) Q = R * A = v * w * d
## # (2) v = d^(2/3) * S^(1/2) / n
## # (2b) d = (n v )^(3/2) * S^(-3/4)
##
## # Combining (1) and (2) we get:
## #
## # (3) v = (R * A / w)^(2/5) * S^(3/10) * n^(-3/5)
## # (4) w = c * dx (c <= 1)
## # (5) v < dx / dt
##
## # Combining these and solving for dt we get:
##
## # (6) dt < [c^(2/5) * n^(3/5) * dx^(7/5)] /
## # [(R * A)^(2/5) * S^(3/10)]
##
## # Use cmin, nmin, dx_min, Rmax, Amax, Smax.
## #------------------------------------------------------------
## numer = (cmin ** np.float64(0.4)) * (nmin ** np.float64(0.6)) * dx ** np.float64(1.4)
## denom = (Rmax * Amax) ** np.float64(0.4) * Smax ** np.float64(0.3)
## dt = (numer / denom)
##
## return dt
##
### Get_Timestep()
###-----------------------------------------------------------------------
##def Stable_Timestep(A, S, dx=None, dy=None, dt=None, R=None,
## theta=None, m=None, n=None, k=None):
##
## #--------------------------------------------------------
## # Notes: This routine is based on a similarity idea for
## # finding a stable timestep using the fact that
## # the model was stable for a previous set of
## # parameters. Recall that: Qs = K Q^m S^n,
## # Q = R A^theta, qs = Q/dw
##
## # K R^m A^(theta * m) S^n dt / (ds * dw) = const
##
## # We also assume that dx=dy and:
## # ds0/ds = dw0/dw = dx0/dx = dy0/dy
## #--------------------------------------------------------
##
## #-----------------------
## # Stable parameter set
## #-----------------------
## n_params = 2
## dx0 = np.float32(40.0) #(meters)
## dy0 = np.float32(40.0) #(meters)
## dt0 = np.float32(10.0) #(years)
## R0 = np.float32(1.0) #(m/year)
## m0 = np.float32(1.0)
## n0 = np.float32(1.0)
## k0 = np.float32(0.01)
## theta0 = np.float32(1.0)
## P0 = np.float32(100.0) #(Not known as well, but max(A*S).)
##
## #-------------------
## # Keyword defaults
## #-------------------
## if (dx in [0,None]):
## dx = dx0
## if (dy in [0,None]):
## dy = dy0
## if (dt in [0,None]):
## dt = dt0
## if (R in [0,None]):
## R = R0
## if (m in [0,None]):
## m = m0
## if (n in [0,None]):
## n = n0
## if (k in [0,None]):
## k = k0
## if (theta in [0,None]):
## theta = theta0
##
## #------------------------------------
## # Get max value of the A-S function
## #------------------------------------
## grid = (A ** (theta * m)) * (S ** n)
## P = np.nanmax(grid)
##
## #---------------
## # New timestep
## #---------------
## dt = (dx / dx0) * (dx / dx0) * (k0 / k) * ((R0 ** m0) / (R ** m))
## dt = dt * (P0 / P) * dt0
##
## return np.int16(dt)
##
### Stable_Timestep()
#-----------------------------------------------------------------------
|
TeamTwisted/external_chromium_org
|
refs/heads/opti-5.1
|
build/android/pylib/utils/json_results_generator.py
|
36
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Most of this file was ported over from Blink's
# Tools/Scripts/webkitpy/layout_tests/layout_package/json_results_generator.py
# Tools/Scripts/webkitpy/common/net/file_uploader.py
#
import json
import logging
import mimetypes
import os
import time
import urllib2
_log = logging.getLogger(__name__)
_JSON_PREFIX = 'ADD_RESULTS('
_JSON_SUFFIX = ');'
def HasJSONWrapper(string):
return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX)
def StripJSONWrapper(json_content):
# FIXME: Kill this code once the server returns json instead of jsonp.
if HasJSONWrapper(json_content):
return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)]
return json_content
def WriteJSON(json_object, file_path, callback=None):
# Specify separators in order to get compact encoding.
json_string = json.dumps(json_object, separators=(',', ':'))
if callback:
json_string = callback + '(' + json_string + ');'
with open(file_path, 'w') as fp:
fp.write(json_string)
def ConvertTrieToFlatPaths(trie, prefix=None):
"""Flattens the trie of paths, prepending a prefix to each."""
result = {}
for name, data in trie.iteritems():
if prefix:
name = prefix + '/' + name
if len(data) and not 'results' in data:
result.update(ConvertTrieToFlatPaths(data, name))
else:
result[name] = data
return result
def AddPathToTrie(path, value, trie):
"""Inserts a single path and value into a directory trie structure."""
if not '/' in path:
trie[path] = value
return
directory, _slash, rest = path.partition('/')
if not directory in trie:
trie[directory] = {}
AddPathToTrie(rest, value, trie[directory])
def TestTimingsTrie(individual_test_timings):
"""Breaks a test name into dicts by directory
foo/bar/baz.html: 1ms
foo/bar/baz1.html: 3ms
becomes
foo: {
bar: {
baz.html: 1,
baz1.html: 3
}
}
"""
trie = {}
for test_result in individual_test_timings:
test = test_result.test_name
AddPathToTrie(test, int(1000 * test_result.test_run_time), trie)
return trie
class TestResult(object):
"""A simple class that represents a single test result."""
# Test modifier constants.
(NONE, FAILS, FLAKY, DISABLED) = range(4)
def __init__(self, test, failed=False, elapsed_time=0):
self.test_name = test
self.failed = failed
self.test_run_time = elapsed_time
test_name = test
try:
test_name = test.split('.')[1]
except IndexError:
_log.warn('Invalid test name: %s.', test)
if test_name.startswith('FAILS_'):
self.modifier = self.FAILS
elif test_name.startswith('FLAKY_'):
self.modifier = self.FLAKY
elif test_name.startswith('DISABLED_'):
self.modifier = self.DISABLED
else:
self.modifier = self.NONE
def Fixable(self):
return self.failed or self.modifier == self.DISABLED
class JSONResultsGeneratorBase(object):
"""A JSON results generator for generic tests."""
MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750
# Min time (seconds) that will be added to the JSON.
MIN_TIME = 1
# Note that in non-chromium tests those chars are used to indicate
# test modifiers (FAILS, FLAKY, etc) but not actual test results.
PASS_RESULT = 'P'
SKIP_RESULT = 'X'
FAIL_RESULT = 'F'
FLAKY_RESULT = 'L'
NO_DATA_RESULT = 'N'
MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT,
TestResult.DISABLED: SKIP_RESULT,
TestResult.FAILS: FAIL_RESULT,
TestResult.FLAKY: FLAKY_RESULT}
VERSION = 4
VERSION_KEY = 'version'
RESULTS = 'results'
TIMES = 'times'
BUILD_NUMBERS = 'buildNumbers'
TIME = 'secondsSinceEpoch'
TESTS = 'tests'
FIXABLE_COUNT = 'fixableCount'
FIXABLE = 'fixableCounts'
ALL_FIXABLE_COUNT = 'allFixableCount'
RESULTS_FILENAME = 'results.json'
TIMES_MS_FILENAME = 'times_ms.json'
INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json'
# line too long pylint: disable=C0301
URL_FOR_TEST_LIST_JSON = (
'http://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&master=%s')
# pylint: enable=C0301
def __init__(self, builder_name, build_name, build_number,
results_file_base_path, builder_base_url,
test_results_map, svn_repositories=None,
test_results_server=None,
test_type='',
master_name=''):
"""Modifies the results.json file. Grabs it off the archive directory
if it is not found locally.
Args
builder_name: the builder name (e.g. Webkit).
build_name: the build name (e.g. webkit-rel).
build_number: the build number.
results_file_base_path: Absolute path to the directory containing the
results json file.
builder_base_url: the URL where we have the archived test results.
If this is None no archived results will be retrieved.
test_results_map: A dictionary that maps test_name to TestResult.
svn_repositories: A (json_field_name, svn_path) pair for SVN
repositories that tests rely on. The SVN revision will be
included in the JSON with the given json_field_name.
test_results_server: server that hosts test results json.
test_type: test type string (e.g. 'layout-tests').
master_name: the name of the buildbot master.
"""
self._builder_name = builder_name
self._build_name = build_name
self._build_number = build_number
self._builder_base_url = builder_base_url
self._results_directory = results_file_base_path
self._test_results_map = test_results_map
self._test_results = test_results_map.values()
self._svn_repositories = svn_repositories
if not self._svn_repositories:
self._svn_repositories = {}
self._test_results_server = test_results_server
self._test_type = test_type
self._master_name = master_name
self._archived_results = None
def GenerateJSONOutput(self):
json_object = self.GetJSON()
if json_object:
file_path = (
os.path.join(
self._results_directory,
self.INCREMENTAL_RESULTS_FILENAME))
WriteJSON(json_object, file_path)
def GenerateTimesMSFile(self):
times = TestTimingsTrie(self._test_results_map.values())
file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME)
WriteJSON(times, file_path)
def GetJSON(self):
"""Gets the results for the results.json file."""
results_json = {}
if not results_json:
results_json, error = self._GetArchivedJSONResults()
if error:
# If there was an error don't write a results.json
# file at all as it would lose all the information on the
# bot.
_log.error('Archive directory is inaccessible. Not '
'modifying or clobbering the results.json '
'file: ' + str(error))
return None
builder_name = self._builder_name
if results_json and builder_name not in results_json:
_log.debug('Builder name (%s) is not in the results.json file.'
% builder_name)
self._ConvertJSONToCurrentVersion(results_json)
if builder_name not in results_json:
results_json[builder_name] = (
self._CreateResultsForBuilderJSON())
results_for_builder = results_json[builder_name]
if builder_name:
self._InsertGenericMetaData(results_for_builder)
self._InsertFailureSummaries(results_for_builder)
# Update the all failing tests with result type and time.
tests = results_for_builder[self.TESTS]
all_failing_tests = self._GetFailedTestNames()
all_failing_tests.update(ConvertTrieToFlatPaths(tests))
for test in all_failing_tests:
self._InsertTestTimeAndResult(test, tests)
return results_json
def SetArchivedResults(self, archived_results):
self._archived_results = archived_results
def UploadJSONFiles(self, json_files):
"""Uploads the given json_files to the test_results_server (if the
test_results_server is given)."""
if not self._test_results_server:
return
if not self._master_name:
_log.error(
'--test-results-server was set, but --master-name was not. Not '
'uploading JSON files.')
return
_log.info('Uploading JSON files for builder: %s', self._builder_name)
attrs = [('builder', self._builder_name),
('testtype', self._test_type),
('master', self._master_name)]
files = [(json_file, os.path.join(self._results_directory, json_file))
for json_file in json_files]
url = 'http://%s/testfile/upload' % self._test_results_server
# Set uploading timeout in case appengine server is having problems.
# 120 seconds are more than enough to upload test results.
uploader = _FileUploader(url, 120)
try:
response = uploader.UploadAsMultipartFormData(files, attrs)
if response:
if response.code == 200:
_log.info('JSON uploaded.')
else:
_log.debug(
"JSON upload failed, %d: '%s'" %
(response.code, response.read()))
else:
_log.error('JSON upload failed; no response returned')
except Exception, err:
_log.error('Upload failed: %s' % err)
return
def _GetTestTiming(self, test_name):
"""Returns test timing data (elapsed time) in second
for the given test_name."""
if test_name in self._test_results_map:
# Floor for now to get time in seconds.
return int(self._test_results_map[test_name].test_run_time)
return 0
def _GetFailedTestNames(self):
"""Returns a set of failed test names."""
return set([r.test_name for r in self._test_results if r.failed])
def _GetModifierChar(self, test_name):
"""Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier
for the given test_name.
"""
if test_name not in self._test_results_map:
return self.__class__.NO_DATA_RESULT
test_result = self._test_results_map[test_name]
if test_result.modifier in self.MODIFIER_TO_CHAR.keys():
return self.MODIFIER_TO_CHAR[test_result.modifier]
return self.__class__.PASS_RESULT
def _get_result_char(self, test_name):
"""Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result
for the given test_name.
"""
if test_name not in self._test_results_map:
return self.__class__.NO_DATA_RESULT
test_result = self._test_results_map[test_name]
if test_result.modifier == TestResult.DISABLED:
return self.__class__.SKIP_RESULT
if test_result.failed:
return self.__class__.FAIL_RESULT
return self.__class__.PASS_RESULT
def _GetSVNRevision(self, in_directory):
"""Returns the svn revision for the given directory.
Args:
in_directory: The directory where svn is to be run.
"""
# This is overridden in flakiness_dashboard_results_uploader.py.
raise NotImplementedError()
def _GetArchivedJSONResults(self):
"""Download JSON file that only contains test
name list from test-results server. This is for generating incremental
JSON so the file generated has info for tests that failed before but
pass or are skipped from current run.
Returns (archived_results, error) tuple where error is None if results
were successfully read.
"""
results_json = {}
old_results = None
error = None
if not self._test_results_server:
return {}, None
results_file_url = (self.URL_FOR_TEST_LIST_JSON %
(urllib2.quote(self._test_results_server),
urllib2.quote(self._builder_name),
self.RESULTS_FILENAME,
urllib2.quote(self._test_type),
urllib2.quote(self._master_name)))
try:
# FIXME: We should talk to the network via a Host object.
results_file = urllib2.urlopen(results_file_url)
old_results = results_file.read()
except urllib2.HTTPError, http_error:
# A non-4xx status code means the bot is hosed for some reason
# and we can't grab the results.json file off of it.
if (http_error.code < 400 and http_error.code >= 500):
error = http_error
except urllib2.URLError, url_error:
error = url_error
if old_results:
# Strip the prefix and suffix so we can get the actual JSON object.
old_results = StripJSONWrapper(old_results)
try:
results_json = json.loads(old_results)
except Exception:
_log.debug('results.json was not valid JSON. Clobbering.')
# The JSON file is not valid JSON. Just clobber the results.
results_json = {}
else:
_log.debug('Old JSON results do not exist. Starting fresh.')
results_json = {}
return results_json, error
def _InsertFailureSummaries(self, results_for_builder):
"""Inserts aggregate pass/failure statistics into the JSON.
This method reads self._test_results and generates
FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries.
Args:
results_for_builder: Dictionary containing the test results for a
single builder.
"""
# Insert the number of tests that failed or skipped.
fixable_count = len([r for r in self._test_results if r.Fixable()])
self._InsertItemIntoRawList(results_for_builder,
fixable_count, self.FIXABLE_COUNT)
# Create a test modifiers (FAILS, FLAKY etc) summary dictionary.
entry = {}
for test_name in self._test_results_map.iterkeys():
result_char = self._GetModifierChar(test_name)
entry[result_char] = entry.get(result_char, 0) + 1
# Insert the pass/skip/failure summary dictionary.
self._InsertItemIntoRawList(results_for_builder, entry,
self.FIXABLE)
# Insert the number of all the tests that are supposed to pass.
all_test_count = len(self._test_results)
self._InsertItemIntoRawList(results_for_builder,
all_test_count, self.ALL_FIXABLE_COUNT)
def _InsertItemIntoRawList(self, results_for_builder, item, key):
"""Inserts the item into the list with the given key in the results for
this builder. Creates the list if no such list exists.
Args:
results_for_builder: Dictionary containing the test results for a
single builder.
item: Number or string to insert into the list.
key: Key in results_for_builder for the list to insert into.
"""
if key in results_for_builder:
raw_list = results_for_builder[key]
else:
raw_list = []
raw_list.insert(0, item)
raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG]
results_for_builder[key] = raw_list
def _InsertItemRunLengthEncoded(self, item, encoded_results):
"""Inserts the item into the run-length encoded results.
Args:
item: String or number to insert.
encoded_results: run-length encoded results. An array of arrays, e.g.
[[3,'A'],[1,'Q']] encodes AAAQ.
"""
if len(encoded_results) and item == encoded_results[0][1]:
num_results = encoded_results[0][0]
if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
encoded_results[0][0] = num_results + 1
else:
# Use a list instead of a class for the run-length encoding since
# we want the serialized form to be concise.
encoded_results.insert(0, [1, item])
def _InsertGenericMetaData(self, results_for_builder):
""" Inserts generic metadata (such as version number, current time etc)
into the JSON.
Args:
results_for_builder: Dictionary containing the test results for
a single builder.
"""
self._InsertItemIntoRawList(results_for_builder,
self._build_number, self.BUILD_NUMBERS)
# Include SVN revisions for the given repositories.
for (name, path) in self._svn_repositories:
# Note: for JSON file's backward-compatibility we use 'chrome' rather
# than 'chromium' here.
lowercase_name = name.lower()
if lowercase_name == 'chromium':
lowercase_name = 'chrome'
self._InsertItemIntoRawList(results_for_builder,
self._GetSVNRevision(path),
lowercase_name + 'Revision')
self._InsertItemIntoRawList(results_for_builder,
int(time.time()),
self.TIME)
def _InsertTestTimeAndResult(self, test_name, tests):
""" Insert a test item with its results to the given tests dictionary.
Args:
tests: Dictionary containing test result entries.
"""
result = self._get_result_char(test_name)
test_time = self._GetTestTiming(test_name)
this_test = tests
for segment in test_name.split('/'):
if segment not in this_test:
this_test[segment] = {}
this_test = this_test[segment]
if not len(this_test):
self._PopulateResutlsAndTimesJSON(this_test)
if self.RESULTS in this_test:
self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS])
else:
this_test[self.RESULTS] = [[1, result]]
if self.TIMES in this_test:
self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES])
else:
this_test[self.TIMES] = [[1, test_time]]
def _ConvertJSONToCurrentVersion(self, results_json):
"""If the JSON does not match the current version, converts it to the
current version and adds in the new version number.
"""
if self.VERSION_KEY in results_json:
archive_version = results_json[self.VERSION_KEY]
if archive_version == self.VERSION:
return
else:
archive_version = 3
# version 3->4
if archive_version == 3:
for results in results_json.values():
self._ConvertTestsToTrie(results)
results_json[self.VERSION_KEY] = self.VERSION
def _ConvertTestsToTrie(self, results):
if not self.TESTS in results:
return
test_results = results[self.TESTS]
test_results_trie = {}
for test in test_results.iterkeys():
single_test_result = test_results[test]
AddPathToTrie(test, single_test_result, test_results_trie)
results[self.TESTS] = test_results_trie
def _PopulateResutlsAndTimesJSON(self, results_and_times):
results_and_times[self.RESULTS] = []
results_and_times[self.TIMES] = []
return results_and_times
def _CreateResultsForBuilderJSON(self):
results_for_builder = {}
results_for_builder[self.TESTS] = {}
return results_for_builder
def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list):
"""Removes items from the run-length encoded list after the final
item that exceeds the max number of builds to track.
Args:
encoded_results: run-length encoded results. An array of arrays, e.g.
[[3,'A'],[1,'Q']] encodes AAAQ.
"""
num_builds = 0
index = 0
for result in encoded_list:
num_builds = num_builds + result[0]
index = index + 1
if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
return encoded_list[:index]
return encoded_list
def _NormalizeResultsJSON(self, test, test_name, tests):
""" Prune tests where all runs pass or tests that no longer exist and
truncate all results to maxNumberOfBuilds.
Args:
test: ResultsAndTimes object for this test.
test_name: Name of the test.
tests: The JSON object with all the test results for this builder.
"""
test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds(
test[self.RESULTS])
test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds(
test[self.TIMES])
is_all_pass = self._IsResultsAllOfType(test[self.RESULTS],
self.PASS_RESULT)
is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS],
self.NO_DATA_RESULT)
max_time = max([test_time[1] for test_time in test[self.TIMES]])
# Remove all passes/no-data from the results to reduce noise and
# filesize. If a test passes every run, but takes > MIN_TIME to run,
# don't throw away the data.
if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME):
del tests[test_name]
# method could be a function pylint: disable=R0201
def _IsResultsAllOfType(self, results, result_type):
"""Returns whether all the results are of the given type
(e.g. all passes)."""
return len(results) == 1 and results[0][1] == result_type
class _FileUploader(object):
def __init__(self, url, timeout_seconds):
self._url = url
self._timeout_seconds = timeout_seconds
def UploadAsMultipartFormData(self, files, attrs):
file_objs = []
for filename, path in files:
with file(path, 'rb') as fp:
file_objs.append(('file', filename, fp.read()))
# FIXME: We should use the same variable names for the formal and actual
# parameters.
content_type, data = _EncodeMultipartFormData(attrs, file_objs)
return self._UploadData(content_type, data)
def _UploadData(self, content_type, data):
start = time.time()
end = start + self._timeout_seconds
while time.time() < end:
try:
request = urllib2.Request(self._url, data,
{'Content-Type': content_type})
return urllib2.urlopen(request)
except urllib2.HTTPError as e:
_log.warn("Received HTTP status %s loading \"%s\". "
'Retrying in 10 seconds...' % (e.code, e.filename))
time.sleep(10)
def _GetMIMEType(filename):
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# FIXME: Rather than taking tuples, this function should take more
# structured data.
def _EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://code.google.com/p/rietveld/source/browse/trunk/upload.py
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for key, value in fields:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
if isinstance(value, unicode):
value = value.encode('utf-8')
lines.append(value)
for key, filename, value in files:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; '
'filename="%s"' % (key, filename))
lines.append('Content-Type: %s' % _GetMIMEType(filename))
lines.append('')
if isinstance(value, unicode):
value = value.encode('utf-8')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
|
sdh100shaun/mongo-connector
|
refs/heads/master
|
tests/test_mongo_connector.py
|
1
|
# Copyright 2012 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file will be used with PyPi in order to package and distribute the final
# product.
"""Tests methods for mongo_connector
"""
import os
import sys
import inspect
CURRENT_DIR = inspect.getfile(inspect.currentframe())
CMD_DIR = os.path.realpath(os.path.abspath(os.path.split(CURRENT_DIR)[0]))
CMD_DIR = CMD_DIR.rsplit("/", 1)[0]
CMD_DIR += "/mongo_connector"
if CMD_DIR not in sys.path:
sys.path.insert(0, CMD_DIR)
import unittest
import time
import json
from mongo_connector import Connector
from optparse import OptionParser
from setup_cluster import start_cluster
from bson.timestamp import Timestamp
from util import long_to_bson_ts
MAIN_ADDRESS = '27217'
class MongoInternalTester(unittest.TestCase):
""" Test Class for the Mongo Connector
"""
def runTest(self):
""" Runs the tests
"""
unittest.TestCase.__init__(self)
@classmethod
def setUpClass(cls):
""" Initializes the cluster
"""
use_mongos = True
if MAIN_ADDRESS.split(":")[1] != "27217":
use_mongos = False
cls.flag = start_cluster(use_mongos=use_mongos)
def test_connector(self):
"""Test whether the connector initiates properly
"""
if not self.flag:
self.fail("Shards cannot be added to mongos")
conn = Connector(MAIN_ADDRESS, 'config.txt', None, ['test.test'],
'_id', None, None)
conn.start()
while len(conn.shard_set) != 1:
time.sleep(2)
conn.join()
self.assertFalse(conn.can_run)
time.sleep(5)
for thread in conn.shard_set.values():
self.assertFalse(thread.running)
def test_write_oplog_progress(self):
"""Test write_oplog_progress under several circumstances
"""
os.system('touch temp_config.txt')
config_file_path = os.getcwd() + '/temp_config.txt'
conn = Connector(MAIN_ADDRESS, config_file_path, None, ['test.test'],
'_id', None, None)
#test that None is returned if there is no config file specified.
self.assertEqual(conn.write_oplog_progress(), None)
conn.oplog_progress.get_dict()[1] = Timestamp(12, 34)
#pretend to insert a thread/timestamp pair
conn.write_oplog_progress()
data = json.load(open(config_file_path, 'r'))
self.assertEqual(1, int(data[0]))
self.assertEqual(long_to_bson_ts(int(data[1])), Timestamp(12, 34))
#ensure the temp file was deleted
self.assertFalse(os.path.exists(config_file_path + '~'))
#ensure that updates work properly
conn.oplog_progress.get_dict()[1] = Timestamp(44, 22)
conn.write_oplog_progress()
config_file = open(config_file_path, 'r')
data = json.load(config_file)
self.assertEqual(1, int(data[0]))
self.assertEqual(long_to_bson_ts(int(data[1])), Timestamp(44, 22))
os.system('rm ' + config_file_path)
config_file.close()
def test_read_oplog_progress(self):
"""Test read_oplog_progress
"""
conn = Connector(MAIN_ADDRESS, None, None, ['test.test'], '_id',
None, None)
#testing with no file
self.assertEqual(conn.read_oplog_progress(), None)
os.system('touch temp_config.txt')
config_file_path = os.getcwd() + '/temp_config.txt'
conn.oplog_checkpoint = config_file_path
#testing with empty file
self.assertEqual(conn.read_oplog_progress(), None)
oplog_dict = conn.oplog_progress.get_dict()
#add a value to the file, delete the dict, and then read in the value
oplog_dict['oplog1'] = Timestamp(12, 34)
conn.write_oplog_progress()
del oplog_dict['oplog1']
self.assertEqual(len(oplog_dict), 0)
conn.read_oplog_progress()
self.assertTrue('oplog1' in oplog_dict.keys())
self.assertTrue(oplog_dict['oplog1'], Timestamp(12, 34))
oplog_dict['oplog1'] = Timestamp(55, 11)
#see if oplog progress dict is properly updated
conn.read_oplog_progress()
self.assertTrue(oplog_dict['oplog1'], Timestamp(55, 11))
os.system('rm ' + config_file_path)
if __name__ == '__main__':
os.system('rm config.txt; touch config.txt')
PARSER = OptionParser()
#-m is for the main address, which is a host:port pair, ideally of the
#mongos. For non sharded clusters, it can be the primary.
PARSER.add_option("-m", "--main", action="store", type="string",
dest="main_addr", default="27217")
(OPTIONS, ARGS) = PARSER.parse_args()
MAIN_ADDRESS = "localhost:" + OPTIONS.main_addr
unittest.main(argv=[sys.argv[0]])
|
jonathan-besanceney/Transition
|
refs/heads/dev
|
docapp/appskell.py
|
1
|
# ------------------------------------------------------------------------------
# Name: app_skell
# Purpose: Define a standard way to replace VBA in Excel Workbooks by an
# external COM application launched by Transition Excel/COM Add-in.
#
# Author: Jonathan Besanceney <jonathan.besanceney@gmail.com>
#
# Created: 09/03/2014
# Copyright: (c) 2014 Jonathan Besanceney
#
# This file is a part of Transition
#
# Transition is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Transition is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Transition. If not, see <http://www.gnu.org/licenses/>.
# ------------------------------------------------------------------------------
# -*- coding: utf8 -*-
"""
Define a standard way to replace VBA in Excel Workbooks by an
external COM application launched by Transition Excel/COM Add-in.
"""
import sys
# specify free threading, common way to think threading.
sys.coinit_flags = 0
import os
sys.path.append(os.path.abspath(os.path.dirname(__file__)))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
from win32com.client import DispatchWithEvents
import xl as pyvot
from transitioncore.comeventsinterface.excelwbeventsinterface import ExcelWbEventsInterface
class ExcelWbEventsSkell(ExcelWbEventsInterface):
"""
Workbook Event handling basics
"""
def __init__(self):
super(ExcelWbEventsSkell, self).__init__()
self.before_close = False
self.name = 'ExcelWbEventsSkell'
def del_event_handle(self, pyhandle):
pass
def OnActivate(self):
print(self.name, self.Name, "ExcelWbEventsSkell OnActivate")
# UnSet the before_close flag to help OnDeactivate to determine
# if we need quit
self.before_close = False
def OnBeforeClose(self, Cancel):
print(self.name, self.Name, "ExcelWbEventsSkell OnBeforeClose", Cancel,
repr(self.pywb))
# Set the before_close flag to help OnDeactivate to determine
# if we need quit
self.before_close = True
return Cancel
def OnDeactivate(self):
# here it's the Name property of the Excel Workbook
print(self.name, self.Name, "ExcelWbEventsSkell OnDeactivate")
# Is this event appends after OnBeforeClose ?
if self.before_close:
# We want quit, the workbook is closing now.
self.del_event_handle(self.event)
print(self.name, self.Name, "ExcelWbEventsSkell remove event handle")
class ExcelWorkbookAppSkell():
"""Application Standard Skeleton"""
def __init__(self, wb, evt_handler, name):
super(ExcelWorkbookAppSkell, self).__init__()
print("{} : init on {}".format(name, wb.Name))
self.wb = wb
self.name = name
self.evt_handler = evt_handler
self._pyhandles = list()
def add_event_handle(self, pyhandle):
self._pyhandles.append(pyhandle)
def del_event_handle(self, pyhandle):
self._pyhandles.remove(pyhandle)
def get_event_handles(self):
return self._pyhandles
@staticmethod
def is_handled_workbook(wb):
"""
This method returns an instance of ExcelWorkbookAppSkell if wb is a
handled Workbook
:param wb: Workbook instance
:return ExcelWorkbookAppSkell instance
"""
return ExcelWorkbookAppSkell(wb, ExcelWbEventsSkell, "ExcelWorkbookAppSkell")
def run(self):
""" Initialize and launch application main loop"""
print("{} : Init Transition WorkbookApp on {}".format(self.name, self.wb.Name))
if self.wb is not None:
# Add Events Handlers to the Workbook instance
self.wb = DispatchWithEvents(self.wb, self.evt_handler)
self.wb.name = self.name
# Get the Pythonic interface to Excel from Microsoft
# (Pyvot => xl)
self.wb.pywb = pyvot.Workbook(self.wb)
self.wb.del_event_handle = self.del_event_handle
print("{} : Transition is plugged on {}. Waiting for events...".format(self.name, self.wb.Name))
else:
print("{} : You must give a valid Workbook instance !".format(self.name))
if __name__ == '__main__':
from win32com.client import Dispatch
xlApp = Dispatch("Excel.Application")
xlApp.Visible = 1
xlApp.EnableEvents = True
m_wb = xlApp.Workbooks.Add()
app = ExcelWorkbookAppSkell.is_handled_workbook(m_wb)
app.run()
app = None
xlApp = None
sys.exit(0)
|
raymondxyang/tensorflow
|
refs/heads/master
|
tensorflow/python/grappler/cost_analyzer_tool.py
|
47
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""A tool for cost analysis."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from google.protobuf import text_format
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.grappler import cost_analyzer
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.platform import app
from tensorflow.python.platform import gfile
def main(_):
with gfile.GFile(FLAGS.input) as input_file:
metagraph = meta_graph_pb2.MetaGraphDef()
metagraph.ParseFromString(input_file.read())
if FLAGS.rewriter_config is not None:
rewriter_config = rewriter_config_pb2.RewriterConfig()
text_format.Merge(FLAGS.rewriter_config, rewriter_config)
optimized_graph = tf_optimizer.OptimizeGraph(rewriter_config, metagraph)
metagraph.graph_def.CopyFrom(optimized_graph)
report = cost_analyzer.GenerateCostReport(metagraph, FLAGS.per_node_report)
print(report)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--input", type=str, default=None, help="Input .meta file path.")
parser.add_argument(
"--rewriter_config",
type=str,
default=None,
help="Configuration for the grappler optimizers, described as a "
"RewriterConfig protocol buffer. Usage example 1: "
"--rewriter_config='optimize_tensor_layout: true "
"disable_model_pruning: true'. Usage example 2: "
"--rewriter_config='optimizers: \"constfold\" optimizers: \"layout\"'")
parser.add_argument(
"--per_node_report",
action="store_true",
help="Generate per-node report. By default the report contains stats "
"aggregated on a per op type basis, per_node_report adds results "
"for each individual node to the report.")
FLAGS, unparsed = parser.parse_known_args()
app.run(main=main, argv=[sys.argv[0]] + unparsed)
|
smart-solution/odoo-accounting-80
|
refs/heads/master
|
account_multi_analytic/__init__.py
|
1
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today Acespritech Solutions Pvt Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base
import account
import analytic
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
uclouvain/osis_louvain
|
refs/heads/master
|
attribution/migrations/0003_auto_20161215_1420.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-12-15 13:20
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
('base', '0083_auto_20161215_1414'),
('attribution', '0002_move_data_from_base_attribution_to_attribution_attribution'),
]
operations = [
migrations.CreateModel(
name='AttributionCharge',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, unique=True)),
('allocation_charge', models.DecimalField(blank=True, decimal_places=2, max_digits=5, null=True)),
],
options={
'abstract': False,
},
),
migrations.AlterField(
model_name='attribution',
name='function',
field=models.CharField(blank=True, choices=[('COORDINATOR', 'COORDINATOR'), ('HOLDER', 'HOLDER'), ('CO_HOLDER', 'CO_HOLDER'), ('DEPUTY', 'DEPUTY'), ('PROFESSOR', 'PROFESSOR')], db_index=True, max_length=15, null=True),
),
migrations.AddField(
model_name='attributioncharge',
name='attribution',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='attribution.Attribution'),
),
migrations.AddField(
model_name='attributioncharge',
name='learning_unit_component',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.LearningUnitComponent'),
),
]
|
amith01994/intellij-community
|
refs/heads/master
|
python/testData/copyPaste/singleLine/Indent33.dst.py
|
664
|
class C:
def foo(self):
<caret> y = 2
|
leopardhs/ns3_sdn
|
refs/heads/master
|
.waf-1.8.19-b1fc8f7baef51bd2db4c2971909a568d/waflib/Tools/asm.py
|
10
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
from waflib import Task
import waflib.Task
from waflib.Tools.ccroot import link_task,stlink_task
from waflib.TaskGen import extension
class asm(Task.Task):
color='BLUE'
run_str='${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
@extension('.s','.S','.asm','.ASM','.spp','.SPP')
def asm_hook(self,node):
return self.create_compiled_task('asm',node)
class asmprogram(link_task):
run_str='${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
ext_out=['.bin']
inst_to='${BINDIR}'
class asmshlib(asmprogram):
inst_to='${LIBDIR}'
class asmstlib(stlink_task):
pass
def configure(conf):
conf.env['ASMPATH_ST']='-I%s'
|
skuda/client-python
|
refs/heads/master
|
kubernetes/test/test_v1_service_port.py
|
1
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_service_port import V1ServicePort
class TestV1ServicePort(unittest.TestCase):
""" V1ServicePort unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1ServicePort(self):
"""
Test V1ServicePort
"""
model = kubernetes.client.models.v1_service_port.V1ServicePort()
if __name__ == '__main__':
unittest.main()
|
chouseknecht/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/parsing/convert_bool.py
|
118
|
# Copyright: 2017, Ansible Project
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause )
from ansible.module_utils.six import binary_type, text_type
from ansible.module_utils._text import to_text
BOOLEANS_TRUE = frozenset(('y', 'yes', 'on', '1', 'true', 't', 1, 1.0, True))
BOOLEANS_FALSE = frozenset(('n', 'no', 'off', '0', 'false', 'f', 0, 0.0, False))
BOOLEANS = BOOLEANS_TRUE.union(BOOLEANS_FALSE)
def boolean(value, strict=True):
if isinstance(value, bool):
return value
normalized_value = value
if isinstance(value, (text_type, binary_type)):
normalized_value = to_text(value, errors='surrogate_or_strict').lower().strip()
if normalized_value in BOOLEANS_TRUE:
return True
elif normalized_value in BOOLEANS_FALSE or not strict:
return False
raise TypeError("The value '%s' is not a valid boolean. Valid booleans include: %s" % (to_text(value), ', '.join(repr(i) for i in BOOLEANS)))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.