repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
markgras/salt | salt/states/azurearm_network.py | """
Azure (ARM) Network State Module
.. versionadded:: 2019.2.0
:maintainer: <<EMAIL>>
:maturity: new
:depends:
* `azure <https://pypi.python.org/pypi/azure>`_ >= 2.0.0
* `azure-common <https://pypi.python.org/pypi/azure-common>`_ >= 1.1.8
* `azure-mgmt <https://pypi.python.org/pypi/azure-mgmt>`_ >= 1.0.0
* `azure-mgmt-compute <https://pypi.python.org/pypi/azure-mgmt-compute>`_ >= 1.0.0
* `azure-mgmt-network <https://pypi.python.org/pypi/azure-mgmt-network>`_ >= 1.7.1
* `azure-mgmt-resource <https://pypi.python.org/pypi/azure-mgmt-resource>`_ >= 1.1.0
* `azure-mgmt-storage <https://pypi.python.org/pypi/azure-mgmt-storage>`_ >= 1.0.0
* `azure-mgmt-web <https://pypi.python.org/pypi/azure-mgmt-web>`_ >= 0.32.0
* `azure-storage <https://pypi.python.org/pypi/azure-storage>`_ >= 0.34.3
* `msrestazure <https://pypi.python.org/pypi/msrestazure>`_ >= 0.4.21
:platform: linux
:configuration: This module requires Azure Resource Manager credentials to be passed as a dictionary of
keyword arguments to the ``connection_auth`` parameter in order to work properly. Since the authentication
parameters are sensitive, it's recommended to pass them to the states via pillar.
Required provider parameters:
if using username and password:
* ``subscription_id``
* ``username``
* ``password``
if using a service principal:
* ``subscription_id``
* ``tenant``
* ``client_id``
* ``secret``
Optional provider parameters:
**cloud_environment**: Used to point the cloud driver to different API endpoints, such as Azure GovCloud. Possible values:
* ``AZURE_PUBLIC_CLOUD`` (default)
* ``AZURE_CHINA_CLOUD``
* ``AZURE_US_GOV_CLOUD``
* ``AZURE_GERMAN_CLOUD``
Example Pillar for Azure Resource Manager authentication:
.. code-block:: yaml
azurearm:
user_pass_auth:
subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617
username: fletch
password: <PASSWORD>
mysubscription:
subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617
tenant: ABCDEFAB-1234-ABCD-1234-ABCDEFABCDEF
client_id: ABCDEFAB-1234-ABCD-1234-ABCDEFABCDEF
secret: XXX<KEY>
cloud_environment: AZURE_PUBLIC_CLOUD
Example states using Azure Resource Manager authentication:
.. code-block:: jinja
{% set profile = salt['pillar.get']('azurearm:mysubscription') %}
Ensure virtual network exists:
azurearm_network.virtual_network_present:
- name: my_vnet
- resource_group: my_rg
- address_prefixes:
- '10.0.0.0/8'
- '192.168.0.0/16'
- dns_servers:
- '8.8.8.8'
- tags:
how_awesome: very
contact_name: <NAME>
- connection_auth: {{ profile }}
Ensure virtual network is absent:
azurearm_network.virtual_network_absent:
- name: other_vnet
- resource_group: my_rg
- connection_auth: {{ profile }}
"""
import logging
__virtualname__ = "azurearm_network"
log = logging.getLogger(__name__)
def __virtual__():
"""
Only make this state available if the azurearm_network module is available.
"""
if "azurearm_network.check_ip_address_availability" in __salt__:
return __virtualname__
return (False, "azurearm_network module could not be loaded")
def virtual_network_present(
name,
address_prefixes,
resource_group,
dns_servers=None,
tags=None,
connection_auth=None,
**kwargs
):
"""
.. versionadded:: 2019.2.0
Ensure a virtual network exists.
:param name:
Name of the virtual network.
:param resource_group:
The resource group assigned to the virtual network.
:param address_prefixes:
A list of CIDR blocks which can be used by subnets within the virtual network.
:param dns_servers:
A list of DNS server addresses.
:param tags:
A dictionary of strings can be passed as tag metadata to the virtual network object.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure virtual network exists:
azurearm_network.virtual_network_present:
- name: vnet1
- resource_group: group1
- address_prefixes:
- '10.0.0.0/8'
- '192.168.0.0/16'
- dns_servers:
- '8.8.8.8'
- tags:
contact_name: <NAME>
- connection_auth: {{ profile }}
- require:
- azurearm_resource: Ensure resource group exists
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
vnet = __salt__["azurearm_network.virtual_network_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" not in vnet:
tag_changes = __utils__["dictdiffer.deep_diff"](
vnet.get("tags", {}), tags or {}
)
if tag_changes:
ret["changes"]["tags"] = tag_changes
dns_changes = set(dns_servers or []).symmetric_difference(
set(vnet.get("dhcp_options", {}).get("dns_servers", []))
)
if dns_changes:
ret["changes"]["dns_servers"] = {
"old": vnet.get("dhcp_options", {}).get("dns_servers", []),
"new": dns_servers,
}
addr_changes = set(address_prefixes or []).symmetric_difference(
set(vnet.get("address_space", {}).get("address_prefixes", []))
)
if addr_changes:
ret["changes"]["address_space"] = {
"address_prefixes": {
"old": vnet.get("address_space", {}).get("address_prefixes", []),
"new": address_prefixes,
}
}
if kwargs.get("enable_ddos_protection", False) != vnet.get(
"enable_ddos_protection"
):
ret["changes"]["enable_ddos_protection"] = {
"old": vnet.get("enable_ddos_protection"),
"new": kwargs.get("enable_ddos_protection"),
}
if kwargs.get("enable_vm_protection", False) != vnet.get(
"enable_vm_protection"
):
ret["changes"]["enable_vm_protection"] = {
"old": vnet.get("enable_vm_protection"),
"new": kwargs.get("enable_vm_protection"),
}
if not ret["changes"]:
ret["result"] = True
ret["comment"] = "Virtual network {} is already present.".format(name)
return ret
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "Virtual network {} would be updated.".format(name)
return ret
else:
ret["changes"] = {
"old": {},
"new": {
"name": name,
"resource_group": resource_group,
"address_space": {"address_prefixes": address_prefixes},
"dhcp_options": {"dns_servers": dns_servers},
"enable_ddos_protection": kwargs.get("enable_ddos_protection", False),
"enable_vm_protection": kwargs.get("enable_vm_protection", False),
"tags": tags,
},
}
if __opts__["test"]:
ret["comment"] = "Virtual network {} would be created.".format(name)
ret["result"] = None
return ret
vnet_kwargs = kwargs.copy()
vnet_kwargs.update(connection_auth)
vnet = __salt__["azurearm_network.virtual_network_create_or_update"](
name=name,
resource_group=resource_group,
address_prefixes=address_prefixes,
dns_servers=dns_servers,
tags=tags,
**vnet_kwargs
)
if "error" not in vnet:
ret["result"] = True
ret["comment"] = "Virtual network {} has been created.".format(name)
return ret
ret["comment"] = "Failed to create virtual network {}! ({})".format(
name, vnet.get("error")
)
return ret
def virtual_network_absent(name, resource_group, connection_auth=None):
"""
.. versionadded:: 2019.2.0
Ensure a virtual network does not exist in the resource group.
:param name:
Name of the virtual network.
:param resource_group:
The resource group assigned to the virtual network.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
vnet = __salt__["azurearm_network.virtual_network_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" in vnet:
ret["result"] = True
ret["comment"] = "Virtual network {} was not found.".format(name)
return ret
elif __opts__["test"]:
ret["comment"] = "Virtual network {} would be deleted.".format(name)
ret["result"] = None
ret["changes"] = {
"old": vnet,
"new": {},
}
return ret
deleted = __salt__["azurearm_network.virtual_network_delete"](
name, resource_group, **connection_auth
)
if deleted:
ret["result"] = True
ret["comment"] = "Virtual network {} has been deleted.".format(name)
ret["changes"] = {"old": vnet, "new": {}}
return ret
ret["comment"] = "Failed to delete virtual network {}!".format(name)
return ret
def subnet_present(
name,
address_prefix,
virtual_network,
resource_group,
security_group=None,
route_table=None,
connection_auth=None,
**kwargs
):
"""
.. versionadded:: 2019.2.0
Ensure a subnet exists.
:param name:
Name of the subnet.
:param address_prefix:
A CIDR block used by the subnet within the virtual network.
:param virtual_network:
Name of the existing virtual network to contain the subnet.
:param resource_group:
The resource group assigned to the virtual network.
:param security_group:
The name of the existing network security group to assign to the subnet.
:param route_table:
The name of the existing route table to assign to the subnet.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure subnet exists:
azurearm_network.subnet_present:
- name: vnet1_sn1
- virtual_network: vnet1
- resource_group: group1
- address_prefix: '192.168.1.0/24'
- security_group: nsg1
- route_table: rt1
- connection_auth: {{ profile }}
- require:
- azurearm_network: Ensure virtual network exists
- azurearm_network: Ensure network security group exists
- azurearm_network: Ensure route table exists
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
snet = __salt__["azurearm_network.subnet_get"](
name,
virtual_network,
resource_group,
azurearm_log_level="info",
**connection_auth
)
if "error" not in snet:
if address_prefix != snet.get("address_prefix"):
ret["changes"]["address_prefix"] = {
"old": snet.get("address_prefix"),
"new": address_prefix,
}
nsg_name = None
if snet.get("network_security_group"):
nsg_name = snet["network_security_group"]["id"].split("/")[-1]
if security_group and (security_group != nsg_name):
ret["changes"]["network_security_group"] = {
"old": nsg_name,
"new": security_group,
}
rttbl_name = None
if snet.get("route_table"):
rttbl_name = snet["route_table"]["id"].split("/")[-1]
if route_table and (route_table != rttbl_name):
ret["changes"]["route_table"] = {"old": rttbl_name, "new": route_table}
if not ret["changes"]:
ret["result"] = True
ret["comment"] = "Subnet {} is already present.".format(name)
return ret
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "Subnet {} would be updated.".format(name)
return ret
else:
ret["changes"] = {
"old": {},
"new": {
"name": name,
"address_prefix": address_prefix,
"network_security_group": security_group,
"route_table": route_table,
},
}
if __opts__["test"]:
ret["comment"] = "Subnet {} would be created.".format(name)
ret["result"] = None
return ret
snet_kwargs = kwargs.copy()
snet_kwargs.update(connection_auth)
snet = __salt__["azurearm_network.subnet_create_or_update"](
name=name,
virtual_network=virtual_network,
resource_group=resource_group,
address_prefix=address_prefix,
network_security_group=security_group,
route_table=route_table,
**snet_kwargs
)
if "error" not in snet:
ret["result"] = True
ret["comment"] = "Subnet {} has been created.".format(name)
return ret
ret["comment"] = "Failed to create subnet {}! ({})".format(name, snet.get("error"))
return ret
def subnet_absent(name, virtual_network, resource_group, connection_auth=None):
"""
.. versionadded:: 2019.2.0
Ensure a virtual network does not exist in the virtual network.
:param name:
Name of the subnet.
:param virtual_network:
Name of the existing virtual network containing the subnet.
:param resource_group:
The resource group assigned to the virtual network.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
snet = __salt__["azurearm_network.subnet_get"](
name,
virtual_network,
resource_group,
azurearm_log_level="info",
**connection_auth
)
if "error" in snet:
ret["result"] = True
ret["comment"] = "Subnet {} was not found.".format(name)
return ret
elif __opts__["test"]:
ret["comment"] = "Subnet {} would be deleted.".format(name)
ret["result"] = None
ret["changes"] = {
"old": snet,
"new": {},
}
return ret
deleted = __salt__["azurearm_network.subnet_delete"](
name, virtual_network, resource_group, **connection_auth
)
if deleted:
ret["result"] = True
ret["comment"] = "Subnet {} has been deleted.".format(name)
ret["changes"] = {"old": snet, "new": {}}
return ret
ret["comment"] = "Failed to delete subnet {}!".format(name)
return ret
def network_security_group_present(
name, resource_group, tags=None, security_rules=None, connection_auth=None, **kwargs
):
"""
.. versionadded:: 2019.2.0
Ensure a network security group exists.
:param name:
Name of the network security group.
:param resource_group:
The resource group assigned to the network security group.
:param tags:
A dictionary of strings can be passed as tag metadata to the network security group object.
:param security_rules: An optional list of dictionaries representing valid SecurityRule objects. See the
documentation for the security_rule_present state or security_rule_create_or_update execution module
for more information on required and optional parameters for security rules. The rules are only
managed if this parameter is present. When this parameter is absent, implemented rules will not be removed,
and will merely become unmanaged.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure network security group exists:
azurearm_network.network_security_group_present:
- name: nsg1
- resource_group: group1
- security_rules:
- name: nsg1_rule1
priority: 100
protocol: tcp
access: allow
direction: outbound
source_address_prefix: virtualnetwork
destination_address_prefix: internet
source_port_range: '*'
destination_port_range: '*'
- name: nsg1_rule2
priority: 101
protocol: tcp
access: allow
direction: inbound
source_address_prefix: internet
destination_address_prefix: virtualnetwork
source_port_range: '*'
destination_port_ranges:
- '80'
- '443'
- tags:
contact_name: <NAME>
- connection_auth: {{ profile }}
- require:
- azurearm_resource: Ensure resource group exists
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
nsg = __salt__["azurearm_network.network_security_group_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" not in nsg:
tag_changes = __utils__["dictdiffer.deep_diff"](nsg.get("tags", {}), tags or {})
if tag_changes:
ret["changes"]["tags"] = tag_changes
if security_rules:
comp_ret = __utils__["azurearm.compare_list_of_dicts"](
nsg.get("security_rules", []), security_rules
)
if comp_ret.get("comment"):
ret["comment"] = '"security_rules" {}'.format(comp_ret["comment"])
return ret
if comp_ret.get("changes"):
ret["changes"]["security_rules"] = comp_ret["changes"]
if not ret["changes"]:
ret["result"] = True
ret["comment"] = "Network security group {} is already present.".format(
name
)
return ret
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "Network security group {} would be updated.".format(name)
return ret
else:
ret["changes"] = {
"old": {},
"new": {
"name": name,
"resource_group": resource_group,
"tags": tags,
"security_rules": security_rules,
},
}
if __opts__["test"]:
ret["comment"] = "Network security group {} would be created.".format(name)
ret["result"] = None
return ret
nsg_kwargs = kwargs.copy()
nsg_kwargs.update(connection_auth)
nsg = __salt__["azurearm_network.network_security_group_create_or_update"](
name=name,
resource_group=resource_group,
tags=tags,
security_rules=security_rules,
**nsg_kwargs
)
if "error" not in nsg:
ret["result"] = True
ret["comment"] = "Network security group {} has been created.".format(name)
return ret
ret["comment"] = "Failed to create network security group {}! ({})".format(
name, nsg.get("error")
)
return ret
def network_security_group_absent(name, resource_group, connection_auth=None):
"""
.. versionadded:: 2019.2.0
Ensure a network security group does not exist in the resource group.
:param name:
Name of the network security group.
:param resource_group:
The resource group assigned to the network security group.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
nsg = __salt__["azurearm_network.network_security_group_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" in nsg:
ret["result"] = True
ret["comment"] = "Network security group {} was not found.".format(name)
return ret
elif __opts__["test"]:
ret["comment"] = "Network security group {} would be deleted.".format(name)
ret["result"] = None
ret["changes"] = {
"old": nsg,
"new": {},
}
return ret
deleted = __salt__["azurearm_network.network_security_group_delete"](
name, resource_group, **connection_auth
)
if deleted:
ret["result"] = True
ret["comment"] = "Network security group {} has been deleted.".format(name)
ret["changes"] = {"old": nsg, "new": {}}
return ret
ret["comment"] = "Failed to delete network security group {}!".format(name)
return ret
def security_rule_present(
name,
access,
direction,
priority,
protocol,
security_group,
resource_group,
destination_address_prefix=None,
destination_port_range=None,
source_address_prefix=None,
source_port_range=None,
description=None,
destination_address_prefixes=None,
destination_port_ranges=None,
source_address_prefixes=None,
source_port_ranges=None,
connection_auth=None,
**kwargs
):
"""
.. versionadded:: 2019.2.0
Ensure a security rule exists.
:param name:
Name of the security rule.
:param access:
'allow' or 'deny'
:param direction:
'inbound' or 'outbound'
:param priority:
Integer between 100 and 4096 used for ordering rule application.
:param protocol:
'tcp', 'udp', or '*'
:param security_group:
The name of the existing network security group to contain the security rule.
:param resource_group:
The resource group assigned to the network security group.
:param description:
Optional description of the security rule.
:param destination_address_prefix:
The CIDR or destination IP range. Asterix '*' can also be used to match all destination IPs.
Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used.
If this is an ingress rule, specifies where network traffic originates from.
:param destination_port_range:
The destination port or range. Integer or range between 0 and 65535. Asterix '*'
can also be used to match all ports.
:param source_address_prefix:
The CIDR or source IP range. Asterix '*' can also be used to match all source IPs.
Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used.
If this is an ingress rule, specifies where network traffic originates from.
:param source_port_range:
The source port or range. Integer or range between 0 and 65535. Asterix '*'
can also be used to match all ports.
:param destination_address_prefixes:
A list of destination_address_prefix values. This parameter overrides destination_address_prefix
and will cause any value entered there to be ignored.
:param destination_port_ranges:
A list of destination_port_range values. This parameter overrides destination_port_range
and will cause any value entered there to be ignored.
:param source_address_prefixes:
A list of source_address_prefix values. This parameter overrides source_address_prefix
and will cause any value entered there to be ignored.
:param source_port_ranges:
A list of source_port_range values. This parameter overrides source_port_range
and will cause any value entered there to be ignored.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure security rule exists:
azurearm_network.security_rule_present:
- name: nsg1_rule2
- security_group: nsg1
- resource_group: group1
- priority: 101
- protocol: tcp
- access: allow
- direction: inbound
- source_address_prefix: internet
- destination_address_prefix: virtualnetwork
- source_port_range: '*'
- destination_port_ranges:
- '80'
- '443'
- connection_auth: {{ profile }}
- require:
- azurearm_network: Ensure network security group exists
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
exclusive_params = [
("source_port_ranges", "source_port_range"),
("source_address_prefixes", "source_address_prefix"),
("destination_port_ranges", "destination_port_range"),
("destination_address_prefixes", "destination_address_prefix"),
]
for params in exclusive_params:
# pylint: disable=eval-used
if not eval(params[0]) and not eval(params[1]):
ret["comment"] = "Either the {} or {} parameter must be provided!".format(
params[0], params[1]
)
return ret
# pylint: disable=eval-used
if eval(params[0]):
# pylint: disable=eval-used
if not isinstance(eval(params[0]), list):
ret["comment"] = "The {} parameter must be a list!".format(params[0])
return ret
# pylint: disable=exec-used
exec("{} = None".format(params[1]))
rule = __salt__["azurearm_network.security_rule_get"](
name,
security_group,
resource_group,
azurearm_log_level="info",
**connection_auth
)
if "error" not in rule:
# access changes
if access.capitalize() != rule.get("access"):
ret["changes"]["access"] = {"old": rule.get("access"), "new": access}
# description changes
if description != rule.get("description"):
ret["changes"]["description"] = {
"old": rule.get("description"),
"new": description,
}
# direction changes
if direction.capitalize() != rule.get("direction"):
ret["changes"]["direction"] = {
"old": rule.get("direction"),
"new": direction,
}
# priority changes
if int(priority) != rule.get("priority"):
ret["changes"]["priority"] = {"old": rule.get("priority"), "new": priority}
# protocol changes
if protocol.lower() != rule.get("protocol", "").lower():
ret["changes"]["protocol"] = {"old": rule.get("protocol"), "new": protocol}
# destination_port_range changes
if destination_port_range != rule.get("destination_port_range"):
ret["changes"]["destination_port_range"] = {
"old": rule.get("destination_port_range"),
"new": destination_port_range,
}
# source_port_range changes
if source_port_range != rule.get("source_port_range"):
ret["changes"]["source_port_range"] = {
"old": rule.get("source_port_range"),
"new": source_port_range,
}
# destination_port_ranges changes
if sorted(destination_port_ranges or []) != sorted(
rule.get("destination_port_ranges", [])
):
ret["changes"]["destination_port_ranges"] = {
"old": rule.get("destination_port_ranges"),
"new": destination_port_ranges,
}
# source_port_ranges changes
if sorted(source_port_ranges or []) != sorted(
rule.get("source_port_ranges", [])
):
ret["changes"]["source_port_ranges"] = {
"old": rule.get("source_port_ranges"),
"new": source_port_ranges,
}
# destination_address_prefix changes
if (destination_address_prefix or "").lower() != rule.get(
"destination_address_prefix", ""
).lower():
ret["changes"]["destination_address_prefix"] = {
"old": rule.get("destination_address_prefix"),
"new": destination_address_prefix,
}
# source_address_prefix changes
if (source_address_prefix or "").lower() != rule.get(
"source_address_prefix", ""
).lower():
ret["changes"]["source_address_prefix"] = {
"old": rule.get("source_address_prefix"),
"new": source_address_prefix,
}
# destination_address_prefixes changes
if sorted(destination_address_prefixes or []) != sorted(
rule.get("destination_address_prefixes", [])
):
if len(destination_address_prefixes or []) != len(
rule.get("destination_address_prefixes", [])
):
ret["changes"]["destination_address_prefixes"] = {
"old": rule.get("destination_address_prefixes"),
"new": destination_address_prefixes,
}
else:
local_dst_addrs, remote_dst_addrs = (
sorted(destination_address_prefixes),
sorted(rule.get("destination_address_prefixes")),
)
for idx, val in enumerate(local_dst_addrs):
if val.lower() != remote_dst_addrs[idx].lower():
ret["changes"]["destination_address_prefixes"] = {
"old": rule.get("destination_address_prefixes"),
"new": destination_address_prefixes,
}
break
# source_address_prefixes changes
if sorted(source_address_prefixes or []) != sorted(
rule.get("source_address_prefixes", [])
):
if len(source_address_prefixes or []) != len(
rule.get("source_address_prefixes", [])
):
ret["changes"]["source_address_prefixes"] = {
"old": rule.get("source_address_prefixes"),
"new": source_address_prefixes,
}
else:
local_src_addrs, remote_src_addrs = (
sorted(source_address_prefixes),
sorted(rule.get("source_address_prefixes")),
)
for idx, val in enumerate(local_src_addrs):
if val.lower() != remote_src_addrs[idx].lower():
ret["changes"]["source_address_prefixes"] = {
"old": rule.get("source_address_prefixes"),
"new": source_address_prefixes,
}
break
if not ret["changes"]:
ret["result"] = True
ret["comment"] = "Security rule {} is already present.".format(name)
return ret
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "Security rule {} would be updated.".format(name)
return ret
else:
ret["changes"] = {
"old": {},
"new": {
"name": name,
"access": access,
"description": description,
"direction": direction,
"priority": priority,
"protocol": protocol,
"destination_address_prefix": destination_address_prefix,
"destination_address_prefixes": destination_address_prefixes,
"destination_port_range": destination_port_range,
"destination_port_ranges": destination_port_ranges,
"source_address_prefix": source_address_prefix,
"source_address_prefixes": source_address_prefixes,
"source_port_range": source_port_range,
"source_port_ranges": source_port_ranges,
},
}
if __opts__["test"]:
ret["comment"] = "Security rule {} would be created.".format(name)
ret["result"] = None
return ret
rule_kwargs = kwargs.copy()
rule_kwargs.update(connection_auth)
rule = __salt__["azurearm_network.security_rule_create_or_update"](
name=name,
access=access,
description=description,
direction=direction,
priority=priority,
protocol=protocol,
security_group=security_group,
resource_group=resource_group,
destination_address_prefix=destination_address_prefix,
destination_address_prefixes=destination_address_prefixes,
destination_port_range=destination_port_range,
destination_port_ranges=destination_port_ranges,
source_address_prefix=source_address_prefix,
source_address_prefixes=source_address_prefixes,
source_port_range=source_port_range,
source_port_ranges=source_port_ranges,
**rule_kwargs
)
if "error" not in rule:
ret["result"] = True
ret["comment"] = "Security rule {} has been created.".format(name)
return ret
ret["comment"] = "Failed to create security rule {}! ({})".format(
name, rule.get("error")
)
return ret
def security_rule_absent(name, security_group, resource_group, connection_auth=None):
"""
.. versionadded:: 2019.2.0
Ensure a security rule does not exist in the network security group.
:param name:
Name of the security rule.
:param security_group:
The network security group containing the security rule.
:param resource_group:
The resource group assigned to the network security group.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
rule = __salt__["azurearm_network.security_rule_get"](
name,
security_group,
resource_group,
azurearm_log_level="info",
**connection_auth
)
if "error" in rule:
ret["result"] = True
ret["comment"] = "Security rule {} was not found.".format(name)
return ret
elif __opts__["test"]:
ret["comment"] = "Security rule {} would be deleted.".format(name)
ret["result"] = None
ret["changes"] = {
"old": rule,
"new": {},
}
return ret
deleted = __salt__["azurearm_network.security_rule_delete"](
name, security_group, resource_group, **connection_auth
)
if deleted:
ret["result"] = True
ret["comment"] = "Security rule {} has been deleted.".format(name)
ret["changes"] = {"old": rule, "new": {}}
return ret
ret["comment"] = "Failed to delete security rule {}!".format(name)
return ret
def load_balancer_present(
name,
resource_group,
sku=None,
frontend_ip_configurations=None,
backend_address_pools=None,
load_balancing_rules=None,
probes=None,
inbound_nat_rules=None,
inbound_nat_pools=None,
outbound_nat_rules=None,
tags=None,
connection_auth=None,
**kwargs
):
"""
.. versionadded:: 2019.2.0
Ensure a load balancer exists.
:param name:
Name of the load balancer.
:param resource_group:
The resource group assigned to the load balancer.
:param sku:
The load balancer SKU, which can be 'Basic' or 'Standard'.
:param tags:
A dictionary of strings can be passed as tag metadata to the load balancer object.
:param frontend_ip_configurations:
An optional list of dictionaries representing valid FrontendIPConfiguration objects. A frontend IP
configuration can be either private (using private IP address and subnet parameters) or public (using a
reference to a public IP address object). Valid parameters are:
- ``name``: The name of the resource that is unique within a resource group.
- ``private_ip_address``: The private IP address of the IP configuration. Required if
'private_ip_allocation_method' is 'Static'.
- ``private_ip_allocation_method``: The Private IP allocation method. Possible values are: 'Static' and
'Dynamic'.
- ``subnet``: Name of an existing subnet inside of which the frontend IP will reside.
- ``public_ip_address``: Name of an existing public IP address which will be assigned to the frontend IP object.
:param backend_address_pools:
An optional list of dictionaries representing valid BackendAddressPool objects. Only the 'name' parameter is
valid for a BackendAddressPool dictionary. All other parameters are read-only references from other objects
linking to the backend address pool. Inbound traffic is randomly load balanced across IPs in the backend IPs.
:param probes:
An optional list of dictionaries representing valid Probe objects. Valid parameters are:
- ``name``: The name of the resource that is unique within a resource group.
- ``protocol``: The protocol of the endpoint. Possible values are 'Http' or 'Tcp'. If 'Tcp' is specified, a
received ACK is required for the probe to be successful. If 'Http' is specified, a 200 OK response from the
specified URI is required for the probe to be successful.
- ``port``: The port for communicating the probe. Possible values range from 1 to 65535, inclusive.
- ``interval_in_seconds``: The interval, in seconds, for how frequently to probe the endpoint for health status.
Typically, the interval is slightly less than half the allocated timeout period (in seconds) which allows two
full probes before taking the instance out of rotation. The default value is 15, the minimum value is 5.
- ``number_of_probes``: The number of probes where if no response, will result in stopping further traffic from
being delivered to the endpoint. This values allows endpoints to be taken out of rotation faster or slower
than the typical times used in Azure.
- ``request_path``: The URI used for requesting health status from the VM. Path is required if a protocol is
set to 'Http'. Otherwise, it is not allowed. There is no default value.
:param load_balancing_rules:
An optional list of dictionaries representing valid LoadBalancingRule objects. Valid parameters are:
- ``name``: The name of the resource that is unique within a resource group.
- ``load_distribution``: The load distribution policy for this rule. Possible values are 'Default', 'SourceIP',
and 'SourceIPProtocol'.
- ``frontend_port``: The port for the external endpoint. Port numbers for each rule must be unique within the
Load Balancer. Acceptable values are between 0 and 65534. Note that value 0 enables 'Any Port'.
- ``backend_port``: The port used for internal connections on the endpoint. Acceptable values are between 0 and
65535. Note that value 0 enables 'Any Port'.
- ``idle_timeout_in_minutes``: The timeout for the TCP idle connection. The value can be set between 4 and 30
minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP.
- ``enable_floating_ip``: Configures a virtual machine's endpoint for the floating IP capability required
to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn
Availability Groups in SQL server. This setting can't be changed after you create the endpoint.
- ``disable_outbound_snat``: Configures SNAT for the VMs in the backend pool to use the public IP address
specified in the frontend of the load balancing rule.
- ``frontend_ip_configuration``: Name of the frontend IP configuration object used by the load balancing rule
object.
- ``backend_address_pool``: Name of the backend address pool object used by the load balancing rule object.
Inbound traffic is randomly load balanced across IPs in the backend IPs.
- ``probe``: Name of the probe object used by the load balancing rule object.
:param inbound_nat_rules:
An optional list of dictionaries representing valid InboundNatRule objects. Defining inbound NAT rules on your
load balancer is mutually exclusive with defining an inbound NAT pool. Inbound NAT pools are referenced from
virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an
Inbound NAT pool. They have to reference individual inbound NAT rules. Valid parameters are:
- ``name``: The name of the resource that is unique within a resource group.
- ``frontend_ip_configuration``: Name of the frontend IP configuration object used by the inbound NAT rule
object.
- ``protocol``: Possible values include 'Udp', 'Tcp', or 'All'.
- ``frontend_port``: The port for the external endpoint. Port numbers for each rule must be unique within the
Load Balancer. Acceptable values range from 1 to 65534.
- ``backend_port``: The port used for the internal endpoint. Acceptable values range from 1 to 65535.
- ``idle_timeout_in_minutes``: The timeout for the TCP idle connection. The value can be set between 4 and 30
minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP.
- ``enable_floating_ip``: Configures a virtual machine's endpoint for the floating IP capability required
to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn
Availability Groups in SQL server. This setting can't be changed after you create the endpoint.
:param inbound_nat_pools:
An optional list of dictionaries representing valid InboundNatPool objects. They define an external port range
for inbound NAT to a single backend port on NICs associated with a load balancer. Inbound NAT rules are created
automatically for each NIC associated with the Load Balancer using an external port from this range. Defining an
Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound NAT rules. Inbound NAT pools
are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot
reference an inbound NAT pool. They have to reference individual inbound NAT rules. Valid parameters are:
- ``name``: The name of the resource that is unique within a resource group.
- ``frontend_ip_configuration``: Name of the frontend IP configuration object used by the inbound NAT pool
object.
- ``protocol``: Possible values include 'Udp', 'Tcp', or 'All'.
- ``frontend_port_range_start``: The first port number in the range of external ports that will be used to
provide Inbound NAT to NICs associated with a load balancer. Acceptable values range between 1 and 65534.
- ``frontend_port_range_end``: The last port number in the range of external ports that will be used to
provide Inbound NAT to NICs associated with a load balancer. Acceptable values range between 1 and 65535.
- ``backend_port``: The port used for internal connections to the endpoint. Acceptable values are between 1 and
65535.
:param outbound_nat_rules:
An optional list of dictionaries representing valid OutboundNatRule objects. Valid parameters are:
- ``name``: The name of the resource that is unique within a resource group.
- ``frontend_ip_configuration``: Name of the frontend IP configuration object used by the outbound NAT rule
object.
- ``backend_address_pool``: Name of the backend address pool object used by the outbound NAT rule object.
Outbound traffic is randomly load balanced across IPs in the backend IPs.
- ``allocated_outbound_ports``: The number of outbound ports to be used for NAT.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure load balancer exists:
azurearm_network.load_balancer_present:
- name: lb1
- resource_group: group1
- location: eastus
- frontend_ip_configurations:
- name: lb1_feip1
public_ip_address: pub_ip1
- backend_address_pools:
- name: lb1_bepool1
- probes:
- name: lb1_webprobe1
protocol: tcp
port: 80
interval_in_seconds: 5
number_of_probes: 2
- load_balancing_rules:
- name: lb1_webprobe1
protocol: tcp
frontend_port: 80
backend_port: 80
idle_timeout_in_minutes: 4
frontend_ip_configuration: lb1_feip1
backend_address_pool: lb1_bepool1
probe: lb1_webprobe1
- tags:
contact_name: <NAME>
- connection_auth: {{ profile }}
- require:
- azurearm_resource: Ensure resource group exists
- azurearm_network: Ensure public IP exists
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
if sku:
sku = {"name": sku.capitalize()}
load_bal = __salt__["azurearm_network.load_balancer_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" not in load_bal:
# tag changes
tag_changes = __utils__["dictdiffer.deep_diff"](
load_bal.get("tags", {}), tags or {}
)
if tag_changes:
ret["changes"]["tags"] = tag_changes
# sku changes
if sku:
sku_changes = __utils__["dictdiffer.deep_diff"](
load_bal.get("sku", {}), sku
)
if sku_changes:
ret["changes"]["sku"] = sku_changes
# frontend_ip_configurations changes
if frontend_ip_configurations:
comp_ret = __utils__["azurearm.compare_list_of_dicts"](
load_bal.get("frontend_ip_configurations", []),
frontend_ip_configurations,
["public_ip_address", "subnet"],
)
if comp_ret.get("comment"):
ret["comment"] = '"frontend_ip_configurations" {}'.format(
comp_ret["comment"]
)
return ret
if comp_ret.get("changes"):
ret["changes"]["frontend_ip_configurations"] = comp_ret["changes"]
# backend_address_pools changes
if backend_address_pools:
comp_ret = __utils__["azurearm.compare_list_of_dicts"](
load_bal.get("backend_address_pools", []), backend_address_pools
)
if comp_ret.get("comment"):
ret["comment"] = '"backend_address_pools" {}'.format(
comp_ret["comment"]
)
return ret
if comp_ret.get("changes"):
ret["changes"]["backend_address_pools"] = comp_ret["changes"]
# probes changes
if probes:
comp_ret = __utils__["azurearm.compare_list_of_dicts"](
load_bal.get("probes", []), probes
)
if comp_ret.get("comment"):
ret["comment"] = '"probes" {}'.format(comp_ret["comment"])
return ret
if comp_ret.get("changes"):
ret["changes"]["probes"] = comp_ret["changes"]
# load_balancing_rules changes
if load_balancing_rules:
comp_ret = __utils__["azurearm.compare_list_of_dicts"](
load_bal.get("load_balancing_rules", []),
load_balancing_rules,
["frontend_ip_configuration", "backend_address_pool", "probe"],
)
if comp_ret.get("comment"):
ret["comment"] = '"load_balancing_rules" {}'.format(comp_ret["comment"])
return ret
if comp_ret.get("changes"):
ret["changes"]["load_balancing_rules"] = comp_ret["changes"]
# inbound_nat_rules changes
if inbound_nat_rules:
comp_ret = __utils__["azurearm.compare_list_of_dicts"](
load_bal.get("inbound_nat_rules", []),
inbound_nat_rules,
["frontend_ip_configuration"],
)
if comp_ret.get("comment"):
ret["comment"] = '"inbound_nat_rules" {}'.format(comp_ret["comment"])
return ret
if comp_ret.get("changes"):
ret["changes"]["inbound_nat_rules"] = comp_ret["changes"]
# inbound_nat_pools changes
if inbound_nat_pools:
comp_ret = __utils__["azurearm.compare_list_of_dicts"](
load_bal.get("inbound_nat_pools", []),
inbound_nat_pools,
["frontend_ip_configuration"],
)
if comp_ret.get("comment"):
ret["comment"] = '"inbound_nat_pools" {}'.format(comp_ret["comment"])
return ret
if comp_ret.get("changes"):
ret["changes"]["inbound_nat_pools"] = comp_ret["changes"]
# outbound_nat_rules changes
if outbound_nat_rules:
comp_ret = __utils__["azurearm.compare_list_of_dicts"](
load_bal.get("outbound_nat_rules", []),
outbound_nat_rules,
["frontend_ip_configuration"],
)
if comp_ret.get("comment"):
ret["comment"] = '"outbound_nat_rules" {}'.format(comp_ret["comment"])
return ret
if comp_ret.get("changes"):
ret["changes"]["outbound_nat_rules"] = comp_ret["changes"]
if not ret["changes"]:
ret["result"] = True
ret["comment"] = "Load balancer {} is already present.".format(name)
return ret
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "Load balancer {} would be updated.".format(name)
return ret
else:
ret["changes"] = {
"old": {},
"new": {
"name": name,
"sku": sku,
"tags": tags,
"frontend_ip_configurations": frontend_ip_configurations,
"backend_address_pools": backend_address_pools,
"load_balancing_rules": load_balancing_rules,
"probes": probes,
"inbound_nat_rules": inbound_nat_rules,
"inbound_nat_pools": inbound_nat_pools,
"outbound_nat_rules": outbound_nat_rules,
},
}
if __opts__["test"]:
ret["comment"] = "Load balancer {} would be created.".format(name)
ret["result"] = None
return ret
lb_kwargs = kwargs.copy()
lb_kwargs.update(connection_auth)
load_bal = __salt__["azurearm_network.load_balancer_create_or_update"](
name=name,
resource_group=resource_group,
sku=sku,
tags=tags,
frontend_ip_configurations=frontend_ip_configurations,
backend_address_pools=backend_address_pools,
load_balancing_rules=load_balancing_rules,
probes=probes,
inbound_nat_rules=inbound_nat_rules,
inbound_nat_pools=inbound_nat_pools,
outbound_nat_rules=outbound_nat_rules,
**lb_kwargs
)
if "error" not in load_bal:
ret["result"] = True
ret["comment"] = "Load balancer {} has been created.".format(name)
return ret
ret["comment"] = "Failed to create load balancer {}! ({})".format(
name, load_bal.get("error")
)
return ret
def load_balancer_absent(name, resource_group, connection_auth=None):
"""
.. versionadded:: 2019.2.0
Ensure a load balancer does not exist in the resource group.
:param name:
Name of the load balancer.
:param resource_group:
The resource group assigned to the load balancer.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
load_bal = __salt__["azurearm_network.load_balancer_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" in load_bal:
ret["result"] = True
ret["comment"] = "Load balancer {} was not found.".format(name)
return ret
elif __opts__["test"]:
ret["comment"] = "Load balancer {} would be deleted.".format(name)
ret["result"] = None
ret["changes"] = {
"old": load_bal,
"new": {},
}
return ret
deleted = __salt__["azurearm_network.load_balancer_delete"](
name, resource_group, **connection_auth
)
if deleted:
ret["result"] = True
ret["comment"] = "Load balancer {} has been deleted.".format(name)
ret["changes"] = {"old": load_bal, "new": {}}
return ret
ret["comment"] = "Failed to delete load balancer {}!".format(name)
return ret
def public_ip_address_present(
name,
resource_group,
tags=None,
sku=None,
public_ip_allocation_method=None,
public_ip_address_version=None,
dns_settings=None,
idle_timeout_in_minutes=None,
connection_auth=None,
**kwargs
):
"""
.. versionadded:: 2019.2.0
Ensure a public IP address exists.
:param name:
Name of the public IP address.
:param resource_group:
The resource group assigned to the public IP address.
:param dns_settings:
An optional dictionary representing a valid PublicIPAddressDnsSettings object. Parameters include
'domain_name_label' and 'reverse_fqdn', which accept strings. The 'domain_name_label' parameter is concatenated
with the regionalized DNS zone make up the fully qualified domain name associated with the public IP address.
If a domain name label is specified, an A DNS record is created for the public IP in the Microsoft Azure DNS
system. The 'reverse_fqdn' parameter is a user-visible, fully qualified domain name that resolves to this public
IP address. If the reverse FQDN is specified, then a PTR DNS record is created pointing from the IP address in
the in-addr.arpa domain to the reverse FQDN.
:param sku:
The public IP address SKU, which can be 'Basic' or 'Standard'.
:param public_ip_allocation_method:
The public IP allocation method. Possible values are: 'Static' and 'Dynamic'.
:param public_ip_address_version:
The public IP address version. Possible values are: 'IPv4' and 'IPv6'.
:param idle_timeout_in_minutes:
An integer representing the idle timeout of the public IP address.
:param tags:
A dictionary of strings can be passed as tag metadata to the public IP address object.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure public IP exists:
azurearm_network.public_ip_address_present:
- name: pub_ip1
- resource_group: group1
- dns_settings:
domain_name_label: decisionlab-ext-test-label
- sku: basic
- public_ip_allocation_method: static
- public_ip_address_version: ipv4
- idle_timeout_in_minutes: 4
- tags:
contact_name: <NAME>
- connection_auth: {{ profile }}
- require:
- azurearm_resource: Ensure resource group exists
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
if sku:
sku = {"name": sku.capitalize()}
pub_ip = __salt__["azurearm_network.public_ip_address_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" not in pub_ip:
# tag changes
tag_changes = __utils__["dictdiffer.deep_diff"](
pub_ip.get("tags", {}), tags or {}
)
if tag_changes:
ret["changes"]["tags"] = tag_changes
# dns_settings changes
if dns_settings:
if not isinstance(dns_settings, dict):
ret["comment"] = "DNS settings must be provided as a dictionary!"
return ret
for key in dns_settings:
if dns_settings[key] != pub_ip.get("dns_settings", {}).get(key):
ret["changes"]["dns_settings"] = {
"old": pub_ip.get("dns_settings"),
"new": dns_settings,
}
break
# sku changes
if sku:
sku_changes = __utils__["dictdiffer.deep_diff"](pub_ip.get("sku", {}), sku)
if sku_changes:
ret["changes"]["sku"] = sku_changes
# public_ip_allocation_method changes
if public_ip_allocation_method:
if public_ip_allocation_method.capitalize() != pub_ip.get(
"public_ip_allocation_method"
):
ret["changes"]["public_ip_allocation_method"] = {
"old": pub_ip.get("public_ip_allocation_method"),
"new": public_ip_allocation_method,
}
# public_ip_address_version changes
if public_ip_address_version:
if (
public_ip_address_version.lower()
!= pub_ip.get("public_ip_address_version", "").lower()
):
ret["changes"]["public_ip_address_version"] = {
"old": pub_ip.get("public_ip_address_version"),
"new": public_ip_address_version,
}
# idle_timeout_in_minutes changes
if idle_timeout_in_minutes and (
int(idle_timeout_in_minutes) != pub_ip.get("idle_timeout_in_minutes")
):
ret["changes"]["idle_timeout_in_minutes"] = {
"old": pub_ip.get("idle_timeout_in_minutes"),
"new": idle_timeout_in_minutes,
}
if not ret["changes"]:
ret["result"] = True
ret["comment"] = "Public IP address {} is already present.".format(name)
return ret
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "Public IP address {} would be updated.".format(name)
return ret
else:
ret["changes"] = {
"old": {},
"new": {
"name": name,
"tags": tags,
"dns_settings": dns_settings,
"sku": sku,
"public_ip_allocation_method": public_ip_allocation_method,
"public_ip_address_version": public_ip_address_version,
"idle_timeout_in_minutes": idle_timeout_in_minutes,
},
}
if __opts__["test"]:
ret["comment"] = "Public IP address {} would be created.".format(name)
ret["result"] = None
return ret
pub_ip_kwargs = kwargs.copy()
pub_ip_kwargs.update(connection_auth)
pub_ip = __salt__["azurearm_network.public_ip_address_create_or_update"](
name=name,
resource_group=resource_group,
sku=sku,
tags=tags,
dns_settings=dns_settings,
public_ip_allocation_method=public_ip_allocation_method,
public_ip_address_version=public_ip_address_version,
idle_timeout_in_minutes=idle_timeout_in_minutes,
**pub_ip_kwargs
)
if "error" not in pub_ip:
ret["result"] = True
ret["comment"] = "Public IP address {} has been created.".format(name)
return ret
ret["comment"] = "Failed to create public IP address {}! ({})".format(
name, pub_ip.get("error")
)
return ret
def public_ip_address_absent(name, resource_group, connection_auth=None):
"""
.. versionadded:: 2019.2.0
Ensure a public IP address does not exist in the resource group.
:param name:
Name of the public IP address.
:param resource_group:
The resource group assigned to the public IP address.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
pub_ip = __salt__["azurearm_network.public_ip_address_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" in pub_ip:
ret["result"] = True
ret["comment"] = "Public IP address {} was not found.".format(name)
return ret
elif __opts__["test"]:
ret["comment"] = "Public IP address {} would be deleted.".format(name)
ret["result"] = None
ret["changes"] = {
"old": pub_ip,
"new": {},
}
return ret
deleted = __salt__["azurearm_network.public_ip_address_delete"](
name, resource_group, **connection_auth
)
if deleted:
ret["result"] = True
ret["comment"] = "Public IP address {} has been deleted.".format(name)
ret["changes"] = {"old": pub_ip, "new": {}}
return ret
ret["comment"] = "Failed to delete public IP address {}!".format(name)
return ret
def network_interface_present(
name,
ip_configurations,
subnet,
virtual_network,
resource_group,
tags=None,
virtual_machine=None,
network_security_group=None,
dns_settings=None,
mac_address=None,
primary=None,
enable_accelerated_networking=None,
enable_ip_forwarding=None,
connection_auth=None,
**kwargs
):
"""
.. versionadded:: 2019.2.0
Ensure a network interface exists.
:param name:
Name of the network interface.
:param ip_configurations:
A list of dictionaries representing valid NetworkInterfaceIPConfiguration objects. The 'name' key is required at
minimum. At least one IP Configuration must be present.
:param subnet:
Name of the existing subnet assigned to the network interface.
:param virtual_network:
Name of the existing virtual network containing the subnet.
:param resource_group:
The resource group assigned to the virtual network.
:param tags:
A dictionary of strings can be passed as tag metadata to the network interface object.
:param network_security_group:
The name of the existing network security group to assign to the network interface.
:param virtual_machine:
The name of the existing virtual machine to assign to the network interface.
:param dns_settings:
An optional dictionary representing a valid NetworkInterfaceDnsSettings object. Valid parameters are:
- ``dns_servers``: List of DNS server IP addresses. Use 'AzureProvidedDNS' to switch to Azure provided DNS
resolution. 'AzureProvidedDNS' value cannot be combined with other IPs, it must be the only value in
dns_servers collection.
- ``internal_dns_name_label``: Relative DNS name for this NIC used for internal communications between VMs in
the same virtual network.
- ``internal_fqdn``: Fully qualified DNS name supporting internal communications between VMs in the same virtual
network.
- ``internal_domain_name_suffix``: Even if internal_dns_name_label is not specified, a DNS entry is created for
the primary NIC of the VM. This DNS name can be constructed by concatenating the VM name with the value of
internal_domain_name_suffix.
:param mac_address:
Optional string containing the MAC address of the network interface.
:param primary:
Optional boolean allowing the interface to be set as the primary network interface on a virtual machine
with multiple interfaces attached.
:param enable_accelerated_networking:
Optional boolean indicating whether accelerated networking should be enabled for the interface.
:param enable_ip_forwarding:
Optional boolean indicating whether IP forwarding should be enabled for the interface.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure network interface exists:
azurearm_network.network_interface_present:
- name: iface1
- subnet: vnet1_sn1
- virtual_network: vnet1
- resource_group: group1
- ip_configurations:
- name: iface1_ipc1
public_ip_address: pub_ip2
- dns_settings:
internal_dns_name_label: decisionlab-int-test-label
- primary: True
- enable_accelerated_networking: True
- enable_ip_forwarding: False
- network_security_group: nsg1
- connection_auth: {{ profile }}
- require:
- azurearm_network: Ensure subnet exists
- azurearm_network: Ensure network security group exists
- azurearm_network: Ensure another public IP exists
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
iface = __salt__["azurearm_network.network_interface_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" not in iface:
# tag changes
tag_changes = __utils__["dictdiffer.deep_diff"](
iface.get("tags", {}), tags or {}
)
if tag_changes:
ret["changes"]["tags"] = tag_changes
# mac_address changes
if mac_address and (mac_address != iface.get("mac_address")):
ret["changes"]["mac_address"] = {
"old": iface.get("mac_address"),
"new": mac_address,
}
# primary changes
if primary is not None:
if primary != iface.get("primary", True):
ret["changes"]["primary"] = {
"old": iface.get("primary"),
"new": primary,
}
# enable_accelerated_networking changes
if enable_accelerated_networking is not None:
if enable_accelerated_networking != iface.get(
"enable_accelerated_networking"
):
ret["changes"]["enable_accelerated_networking"] = {
"old": iface.get("enable_accelerated_networking"),
"new": enable_accelerated_networking,
}
# enable_ip_forwarding changes
if enable_ip_forwarding is not None:
if enable_ip_forwarding != iface.get("enable_ip_forwarding"):
ret["changes"]["enable_ip_forwarding"] = {
"old": iface.get("enable_ip_forwarding"),
"new": enable_ip_forwarding,
}
# network_security_group changes
nsg_name = None
if iface.get("network_security_group"):
nsg_name = iface["network_security_group"]["id"].split("/")[-1]
if network_security_group and (network_security_group != nsg_name):
ret["changes"]["network_security_group"] = {
"old": nsg_name,
"new": network_security_group,
}
# virtual_machine changes
vm_name = None
if iface.get("virtual_machine"):
vm_name = iface["virtual_machine"]["id"].split("/")[-1]
if virtual_machine and (virtual_machine != vm_name):
ret["changes"]["virtual_machine"] = {"old": vm_name, "new": virtual_machine}
# dns_settings changes
if dns_settings:
if not isinstance(dns_settings, dict):
ret["comment"] = "DNS settings must be provided as a dictionary!"
return ret
for key in dns_settings:
if (
dns_settings[key].lower()
!= iface.get("dns_settings", {}).get(key, "").lower()
):
ret["changes"]["dns_settings"] = {
"old": iface.get("dns_settings"),
"new": dns_settings,
}
break
# ip_configurations changes
comp_ret = __utils__["azurearm.compare_list_of_dicts"](
iface.get("ip_configurations", []),
ip_configurations,
["public_ip_address", "subnet"],
)
if comp_ret.get("comment"):
ret["comment"] = '"ip_configurations" {}'.format(comp_ret["comment"])
return ret
if comp_ret.get("changes"):
ret["changes"]["ip_configurations"] = comp_ret["changes"]
if not ret["changes"]:
ret["result"] = True
ret["comment"] = "Network interface {} is already present.".format(name)
return ret
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "Network interface {} would be updated.".format(name)
return ret
else:
ret["changes"] = {
"old": {},
"new": {
"name": name,
"ip_configurations": ip_configurations,
"dns_settings": dns_settings,
"network_security_group": network_security_group,
"virtual_machine": virtual_machine,
"enable_accelerated_networking": enable_accelerated_networking,
"enable_ip_forwarding": enable_ip_forwarding,
"mac_address": mac_address,
"primary": primary,
"tags": tags,
},
}
if __opts__["test"]:
ret["comment"] = "Network interface {} would be created.".format(name)
ret["result"] = None
return ret
iface_kwargs = kwargs.copy()
iface_kwargs.update(connection_auth)
iface = __salt__["azurearm_network.network_interface_create_or_update"](
name=name,
subnet=subnet,
virtual_network=virtual_network,
resource_group=resource_group,
ip_configurations=ip_configurations,
dns_settings=dns_settings,
enable_accelerated_networking=enable_accelerated_networking,
enable_ip_forwarding=enable_ip_forwarding,
mac_address=mac_address,
primary=primary,
network_security_group=network_security_group,
virtual_machine=virtual_machine,
tags=tags,
**iface_kwargs
)
if "error" not in iface:
ret["result"] = True
ret["comment"] = "Network interface {} has been created.".format(name)
return ret
ret["comment"] = "Failed to create network interface {}! ({})".format(
name, iface.get("error")
)
return ret
def network_interface_absent(name, resource_group, connection_auth=None):
"""
.. versionadded:: 2019.2.0
Ensure a network interface does not exist in the resource group.
:param name:
Name of the network interface.
:param resource_group:
The resource group assigned to the network interface.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
iface = __salt__["azurearm_network.network_interface_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" in iface:
ret["result"] = True
ret["comment"] = "Network interface {} was not found.".format(name)
return ret
elif __opts__["test"]:
ret["comment"] = "Network interface {} would be deleted.".format(name)
ret["result"] = None
ret["changes"] = {
"old": iface,
"new": {},
}
return ret
deleted = __salt__["azurearm_network.network_interface_delete"](
name, resource_group, **connection_auth
)
if deleted:
ret["result"] = True
ret["comment"] = "Network interface {} has been deleted.".format(name)
ret["changes"] = {"old": iface, "new": {}}
return ret
ret["comment"] = "Failed to delete network interface {}!)".format(name)
return ret
def route_table_present(
name,
resource_group,
tags=None,
routes=None,
disable_bgp_route_propagation=None,
connection_auth=None,
**kwargs
):
"""
.. versionadded:: 2019.2.0
Ensure a route table exists.
:param name:
Name of the route table.
:param resource_group:
The resource group assigned to the route table.
:param routes:
An optional list of dictionaries representing valid Route objects contained within a route table. See the
documentation for the route_present state or route_create_or_update execution module for more information on
required and optional parameters for routes. The routes are only managed if this parameter is present. When this
parameter is absent, implemented routes will not be removed, and will merely become unmanaged.
:param disable_bgp_route_propagation:
An optional boolean parameter setting whether to disable the routes learned by BGP on the route table.
:param tags:
A dictionary of strings can be passed as tag metadata to the route table object.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure route table exists:
azurearm_network.route_table_present:
- name: rt1
- resource_group: group1
- routes:
- name: rt1_route1
address_prefix: '0.0.0.0/0'
next_hop_type: internet
- name: rt1_route2
address_prefix: '192.168.0.0/16'
next_hop_type: vnetlocal
- tags:
contact_name: <NAME>
- connection_auth: {{ profile }}
- require:
- azurearm_resource: Ensure resource group exists
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
rt_tbl = __salt__["azurearm_network.route_table_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" not in rt_tbl:
# tag changes
tag_changes = __utils__["dictdiffer.deep_diff"](
rt_tbl.get("tags", {}), tags or {}
)
if tag_changes:
ret["changes"]["tags"] = tag_changes
# disable_bgp_route_propagation changes
# pylint: disable=line-too-long
if disable_bgp_route_propagation and (
disable_bgp_route_propagation != rt_tbl.get("disable_bgp_route_propagation")
):
ret["changes"]["disable_bgp_route_propagation"] = {
"old": rt_tbl.get("disable_bgp_route_propagation"),
"new": disable_bgp_route_propagation,
}
# routes changes
if routes:
comp_ret = __utils__["azurearm.compare_list_of_dicts"](
rt_tbl.get("routes", []), routes
)
if comp_ret.get("comment"):
ret["comment"] = '"routes" {}'.format(comp_ret["comment"])
return ret
if comp_ret.get("changes"):
ret["changes"]["routes"] = comp_ret["changes"]
if not ret["changes"]:
ret["result"] = True
ret["comment"] = "Route table {} is already present.".format(name)
return ret
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "Route table {} would be updated.".format(name)
return ret
else:
ret["changes"] = {
"old": {},
"new": {
"name": name,
"tags": tags,
"routes": routes,
"disable_bgp_route_propagation": disable_bgp_route_propagation,
},
}
if __opts__["test"]:
ret["comment"] = "Route table {} would be created.".format(name)
ret["result"] = None
return ret
rt_tbl_kwargs = kwargs.copy()
rt_tbl_kwargs.update(connection_auth)
rt_tbl = __salt__["azurearm_network.route_table_create_or_update"](
name=name,
resource_group=resource_group,
disable_bgp_route_propagation=disable_bgp_route_propagation,
routes=routes,
tags=tags,
**rt_tbl_kwargs
)
if "error" not in rt_tbl:
ret["result"] = True
ret["comment"] = "Route table {} has been created.".format(name)
return ret
ret["comment"] = "Failed to create route table {}! ({})".format(
name, rt_tbl.get("error")
)
return ret
def route_table_absent(name, resource_group, connection_auth=None):
"""
.. versionadded:: 2019.2.0
Ensure a route table does not exist in the resource group.
:param name:
Name of the route table.
:param resource_group:
The resource group assigned to the route table.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
rt_tbl = __salt__["azurearm_network.route_table_get"](
name, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" in rt_tbl:
ret["result"] = True
ret["comment"] = "Route table {} was not found.".format(name)
return ret
elif __opts__["test"]:
ret["comment"] = "Route table {} would be deleted.".format(name)
ret["result"] = None
ret["changes"] = {
"old": rt_tbl,
"new": {},
}
return ret
deleted = __salt__["azurearm_network.route_table_delete"](
name, resource_group, **connection_auth
)
if deleted:
ret["result"] = True
ret["comment"] = "Route table {} has been deleted.".format(name)
ret["changes"] = {"old": rt_tbl, "new": {}}
return ret
ret["comment"] = "Failed to delete route table {}!".format(name)
return ret
def route_present(
name,
address_prefix,
next_hop_type,
route_table,
resource_group,
next_hop_ip_address=None,
connection_auth=None,
**kwargs
):
"""
.. versionadded:: 2019.2.0
Ensure a route exists within a route table.
:param name:
Name of the route.
:param address_prefix:
The destination CIDR to which the route applies.
:param next_hop_type:
The type of Azure hop the packet should be sent to. Possible values are: 'VirtualNetworkGateway', 'VnetLocal',
'Internet', 'VirtualAppliance', and 'None'.
:param next_hop_ip_address:
The IP address packets should be forwarded to. Next hop values are only allowed in routes where the next hop
type is 'VirtualAppliance'.
:param route_table:
The name of the existing route table which will contain the route.
:param resource_group:
The resource group assigned to the route table.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure route exists:
azurearm_network.route_present:
- name: rt1_route2
- route_table: rt1
- resource_group: group1
- address_prefix: '192.168.0.0/16'
- next_hop_type: vnetlocal
- connection_auth: {{ profile }}
- require:
- azurearm_network: Ensure route table exists
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
route = __salt__["azurearm_network.route_get"](
name, route_table, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" not in route:
if address_prefix != route.get("address_prefix"):
ret["changes"]["address_prefix"] = {
"old": route.get("address_prefix"),
"new": address_prefix,
}
if next_hop_type.lower() != route.get("next_hop_type", "").lower():
ret["changes"]["next_hop_type"] = {
"old": route.get("next_hop_type"),
"new": next_hop_type,
}
if next_hop_type.lower() == "virtualappliance" and next_hop_ip_address != route.get(
"next_hop_ip_address"
):
ret["changes"]["next_hop_ip_address"] = {
"old": route.get("next_hop_ip_address"),
"new": next_hop_ip_address,
}
if not ret["changes"]:
ret["result"] = True
ret["comment"] = "Route {} is already present.".format(name)
return ret
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "Route {} would be updated.".format(name)
return ret
else:
ret["changes"] = {
"old": {},
"new": {
"name": name,
"address_prefix": address_prefix,
"next_hop_type": next_hop_type,
"next_hop_ip_address": next_hop_ip_address,
},
}
if __opts__["test"]:
ret["comment"] = "Route {} would be created.".format(name)
ret["result"] = None
return ret
route_kwargs = kwargs.copy()
route_kwargs.update(connection_auth)
route = __salt__["azurearm_network.route_create_or_update"](
name=name,
route_table=route_table,
resource_group=resource_group,
address_prefix=address_prefix,
next_hop_type=next_hop_type,
next_hop_ip_address=next_hop_ip_address,
**route_kwargs
)
if "error" not in route:
ret["result"] = True
ret["comment"] = "Route {} has been created.".format(name)
return ret
ret["comment"] = "Failed to create route {}! ({})".format(name, route.get("error"))
return ret
def route_absent(name, route_table, resource_group, connection_auth=None):
"""
.. versionadded:: 2019.2.0
Ensure a route table does not exist in the resource group.
:param name:
Name of the route table.
:param route_table:
The name of the existing route table containing the route.
:param resource_group:
The resource group assigned to the route table.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
ret[
"comment"
] = "Connection information must be specified via connection_auth dictionary!"
return ret
route = __salt__["azurearm_network.route_get"](
name, route_table, resource_group, azurearm_log_level="info", **connection_auth
)
if "error" in route:
ret["result"] = True
ret["comment"] = "Route {} was not found.".format(name)
return ret
elif __opts__["test"]:
ret["comment"] = "Route {} would be deleted.".format(name)
ret["result"] = None
ret["changes"] = {
"old": route,
"new": {},
}
return ret
deleted = __salt__["azurearm_network.route_delete"](
name, route_table, resource_group, **connection_auth
)
if deleted:
ret["result"] = True
ret["comment"] = "Route {} has been deleted.".format(name)
ret["changes"] = {"old": route, "new": {}}
return ret
ret["comment"] = "Failed to delete route {}!".format(name)
return ret
|
jellehuibregtse/cah | auth-service/src/main/java/com/jellehuibregtse/cah/authservice/security/SecurityCredentialsConfig.java | package com.jellehuibregtse.cah.authservice.security;
import com.jellehuibregtse.cah.authservice.jwt.JwtConfig;
import com.jellehuibregtse.cah.authservice.jwt.JwtTokenAuthenticationFilter;
import com.jellehuibregtse.cah.authservice.jwt.JwtUsernameAndPasswordAuthenticationFilter;
import com.jellehuibregtse.cah.authservice.service.JwtTokenService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.http.HttpMethod;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import javax.servlet.http.HttpServletResponse;
/**
* The security configuration for the authentication service.
*
* @author <NAME>
*/
@EnableWebSecurity
@EnableGlobalMethodSecurity(prePostEnabled = true)
public class SecurityCredentialsConfig extends WebSecurityConfigurerAdapter {
private final UserDetailsService userDetailsService;
private final JwtConfig jwtConfig;
private final PasswordEncoder passwordEncoder;
private final JwtTokenService jwtTokenService;
@Autowired
public SecurityCredentialsConfig(@Qualifier("applicationUserService") UserDetailsService userDetailsService,
JwtConfig jwtConfig,
PasswordEncoder passwordEncoder,
JwtTokenService jwtTokenService) {
this.userDetailsService = userDetailsService;
this.jwtConfig = jwtConfig;
this.passwordEncoder = passwordEncoder;
this.jwtTokenService = jwtTokenService;
}
@Override
protected void configure(HttpSecurity http) throws Exception {
http.csrf()
.disable()
// Make sure we use a stateless session (a session that won't be used to store a user's state).
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and()
// Handle an authorized attempts.
.exceptionHandling()
.authenticationEntryPoint((req, rsp, e) -> rsp.sendError(HttpServletResponse.SC_UNAUTHORIZED))
.and()
// Add a filter to validate user credentials and add token in the response header.
// What's the authenticationManager()?
// An object provided by WebSecurityConfigurerAdapter, used to authenticate the user passing user's credentials.
// The filter needs this auth manager to authenticate the user.
.addFilter(new JwtUsernameAndPasswordAuthenticationFilter(authenticationManager(),
jwtConfig,
jwtTokenService))
.addFilterAfter(new JwtTokenAuthenticationFilter(jwtConfig), UsernamePasswordAuthenticationFilter.class)
.authorizeRequests()
// Allow all POST requests, otherwise a user can't authenticate.
.antMatchers(HttpMethod.POST, jwtConfig.getUri())
.permitAll()
.antMatchers("/actuator/**")
.permitAll()
// Allow POST request for creating a new user
.antMatchers(HttpMethod.POST, "/users/**")
.permitAll()
// Allow GET for checking if a user with email has been taken
.antMatchers(HttpMethod.GET, "/users/**")
.permitAll()
// Any other requests must be authenticated.
.anyRequest()
.authenticated();
}
// Spring has UserDetailsService interface, which can be overridden to provide our implementation for fetching user from database (or any other source).
// The UserDetailsService object is used by the auth manager to load the user from database.
// In addition, we need to define the password encoder also. So, auth manager can compare and verify passwords.
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
auth.userDetailsService(userDetailsService).passwordEncoder(passwordEncoder);
}
} |
menty44/tutorials | rxjava-operators/src/test/java/com/baeldung/rxjava/operators/RxAggregateOperatorsUnitTest.java | package com.baeldung.rxjava.operators;
import org.junit.Test;
import rx.Observable;
import rx.observers.TestSubscriber;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
public class RxAggregateOperatorsUnitTest {
@Test
public void givenTwoObservable_whenConcatenatingThem_thenSuccessfull() {
// given
List<Integer> listOne = Arrays.asList(1, 2, 3, 4);
Observable<Integer> observableOne = Observable.from(listOne);
List<Integer> listTwo = Arrays.asList(5, 6, 7, 8);
Observable<Integer> observableTwo = Observable.from(listTwo);
TestSubscriber<Integer> subscriber = TestSubscriber.create();
// when
Observable<Integer> concatObservable = observableOne.concatWith(observableTwo);
concatObservable.subscribe(subscriber);
// then
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValueCount(8);
subscriber.assertValues(1, 2, 3, 4, 5, 6, 7, 8);
}
@Test
public void givenObservable_whenCounting_thenObtainingNumberOfElements() {
// given
List<String> lettersList = Arrays.asList("A", "B", "C", "D", "E", "F", "G");
TestSubscriber<Integer> subscriber = TestSubscriber.create();
// when
Observable<Integer> sourceObservable = Observable.from(lettersList)
.count();
sourceObservable.subscribe(subscriber);
// then
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValueCount(1);
subscriber.assertValue(7);
}
@Test
public void givenObservable_whenReducing_thenObtainingInvertedConcatenatedString() {
// given
List<String> list = Arrays.asList("A", "B", "C", "D", "E", "F", "G");
TestSubscriber<String> subscriber = TestSubscriber.create();
// when
Observable<String> reduceObservable = Observable.from(list)
.reduce((letter1, letter2) -> letter2 + letter1);
reduceObservable.subscribe(subscriber);
// then
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValueCount(1);
subscriber.assertValue("GFEDCBA");
}
@Test
public void givenObservable_whenCollecting_thenObtainingASet() {
// given
List<String> list = Arrays.asList("A", "B", "C", "B", "B", "A", "D");
TestSubscriber<HashSet> subscriber = TestSubscriber.create();
// when
Observable<HashSet<String>> reduceListObservable = Observable.from(list)
.collect(HashSet::new, HashSet::add);
reduceListObservable.subscribe(subscriber);
// then
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValueCount(1);
subscriber.assertValues(new HashSet<>(list));
}
@Test
public void givenObservable_whenUsingToList_thenObtainedAList() {
// given
Observable<Integer> sourceObservable = Observable.range(1, 5);
TestSubscriber<List> subscriber = TestSubscriber.create();
// when
Observable<List<Integer>> listObservable = sourceObservable.toList();
listObservable.subscribe(subscriber);
// then
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValueCount(1);
subscriber.assertValue(Arrays.asList(1, 2, 3, 4, 5));
}
@Test
public void givenObservable_whenUsingToSortedList_thenObtainedASortedList() {
// given
Observable<Integer> sourceObservable = Observable.range(10, 5);
TestSubscriber<List> subscriber = TestSubscriber.create();
// when
Observable<List<Integer>> listObservable = sourceObservable.toSortedList();
listObservable.subscribe(subscriber);
// then
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValueCount(1);
subscriber.assertValue(Arrays.asList(10, 11, 12, 13, 14));
}
@Test
public void givenObservable_whenUsingToSortedListWithComparator_thenObtainedAnInverseSortedList() {
// given
Observable<Integer> sourceObservable = Observable.range(10, 5);
TestSubscriber<List> subscriber = TestSubscriber.create();
// when
Observable<List<Integer>> listObservable = sourceObservable.toSortedList((int1, int2) -> int2 - int1);
listObservable.subscribe(subscriber);
// then
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValueCount(1);
subscriber.assertValue(Arrays.asList(14, 13, 12, 11, 10));
}
@Test
public void givenObservable_whenUsingToMap_thenObtainedAMap() {
// given
Observable<Book> bookObservable = Observable
.just(
new Book("The North Water", 2016),
new Book("Origin", 2017),
new Book("Sleeping Beauties", 2017));
TestSubscriber<Map> subscriber = TestSubscriber.create();
// when
Observable<Map<String, Integer>> mapObservable = bookObservable
.toMap(Book::getTitle, Book::getYear, HashMap::new);
mapObservable.subscribe(subscriber);
// then
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValueCount(1);
subscriber.assertValue(new HashMap() {
{
put("The North Water", 2016);
put("Origin", 2017);
put("Sleeping Beauties", 2017);
}
});
}
@Test
public void givenObservable_whenUsingToMultiMap_thenObtainedAMultiMap() {
// given
Observable<Book> bookObservable = Observable
.just(
new Book("The North Water", 2016),
new Book("Origin", 2017),
new Book("Sleeping Beauties", 2017));
TestSubscriber<Map> subscriber = TestSubscriber.create();
// when
Observable multiMapObservable = bookObservable
.toMultimap(Book::getYear, Book::getTitle, () -> new HashMap<>(), (key) -> new ArrayList<>());
multiMapObservable.subscribe(subscriber);
// then
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValueCount(1);
subscriber.assertValue(new HashMap() {
{
put(2016, Arrays.asList("The North Water"));
put(2017, Arrays.asList("Origin", "Sleeping Beauties"));
}
});
}
class Book {
private String title;
private Integer year;
public Book(String title, Integer year) {
this.title = title;
this.year = year;
}
public String getTitle() {
return title;
}
public Integer getYear() {
return year;
}
}
}
|
hsmtknj/programming-contest | cpp_snippet/sort/pg/merge_sort.cpp | #include <bits/stdc++.h>
void merge_sort(std::vector<int> &v, int left, int right)
{
// ベースケース: 底まで行ったら終了
if (right - left <= 1)
{
// std::cout << left << "," << right << ": " << v[left] << std::endl;
return;
}
// 再帰関数
int mid = left + (right - left) / 2;
// std::cout << left << ", " << mid << ", " << right << std::endl;
merge_sort(v, left, mid);
merge_sort(v, mid, right);
// マージしてソート
std::vector<int> buf;
for (int i = left; i < mid; i++) buf.push_back(v[i]);
for (int i = right-1; i >= mid; i--) buf.push_back(v[i]);
int l_ind = 0;
int r_ind = right - left - 1;
for (int i = left; i < right; i++)
{
if (buf[l_ind] < buf[r_ind])
{
v[i] = buf[l_ind];
l_ind++;
}
else
{
v[i] = buf[r_ind];
r_ind--;
}
}
}
int main()
{
std::vector<int> v = {30, 88, 25, 17, 20};
merge_sort(v, 0, v.size());
for (int i = 0; i < v.size(); i++) std::cout << v[i] << " ";
std::cout << std::endl;
return 0;
} |
perfectrecall/aws-sdk-cpp | aws-cpp-sdk-ecs/include/aws/ecs/model/ListServicesResult.h | <filename>aws-cpp-sdk-ecs/include/aws/ecs/model/ListServicesResult.h
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/ecs/ECS_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSVector.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <utility>
namespace Aws
{
template<typename RESULT_TYPE>
class AmazonWebServiceResult;
namespace Utils
{
namespace Json
{
class JsonValue;
} // namespace Json
} // namespace Utils
namespace ECS
{
namespace Model
{
class AWS_ECS_API ListServicesResult
{
public:
ListServicesResult();
ListServicesResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
ListServicesResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
/**
* <p>The list of full ARN entries for each service that's associated with the
* specified cluster.</p>
*/
inline const Aws::Vector<Aws::String>& GetServiceArns() const{ return m_serviceArns; }
/**
* <p>The list of full ARN entries for each service that's associated with the
* specified cluster.</p>
*/
inline void SetServiceArns(const Aws::Vector<Aws::String>& value) { m_serviceArns = value; }
/**
* <p>The list of full ARN entries for each service that's associated with the
* specified cluster.</p>
*/
inline void SetServiceArns(Aws::Vector<Aws::String>&& value) { m_serviceArns = std::move(value); }
/**
* <p>The list of full ARN entries for each service that's associated with the
* specified cluster.</p>
*/
inline ListServicesResult& WithServiceArns(const Aws::Vector<Aws::String>& value) { SetServiceArns(value); return *this;}
/**
* <p>The list of full ARN entries for each service that's associated with the
* specified cluster.</p>
*/
inline ListServicesResult& WithServiceArns(Aws::Vector<Aws::String>&& value) { SetServiceArns(std::move(value)); return *this;}
/**
* <p>The list of full ARN entries for each service that's associated with the
* specified cluster.</p>
*/
inline ListServicesResult& AddServiceArns(const Aws::String& value) { m_serviceArns.push_back(value); return *this; }
/**
* <p>The list of full ARN entries for each service that's associated with the
* specified cluster.</p>
*/
inline ListServicesResult& AddServiceArns(Aws::String&& value) { m_serviceArns.push_back(std::move(value)); return *this; }
/**
* <p>The list of full ARN entries for each service that's associated with the
* specified cluster.</p>
*/
inline ListServicesResult& AddServiceArns(const char* value) { m_serviceArns.push_back(value); return *this; }
/**
* <p>The <code>nextToken</code> value to include in a future
* <code>ListServices</code> request. When the results of a
* <code>ListServices</code> request exceed <code>maxResults</code>, this value can
* be used to retrieve the next page of results. This value is <code>null</code>
* when there are no more results to return.</p>
*/
inline const Aws::String& GetNextToken() const{ return m_nextToken; }
/**
* <p>The <code>nextToken</code> value to include in a future
* <code>ListServices</code> request. When the results of a
* <code>ListServices</code> request exceed <code>maxResults</code>, this value can
* be used to retrieve the next page of results. This value is <code>null</code>
* when there are no more results to return.</p>
*/
inline void SetNextToken(const Aws::String& value) { m_nextToken = value; }
/**
* <p>The <code>nextToken</code> value to include in a future
* <code>ListServices</code> request. When the results of a
* <code>ListServices</code> request exceed <code>maxResults</code>, this value can
* be used to retrieve the next page of results. This value is <code>null</code>
* when there are no more results to return.</p>
*/
inline void SetNextToken(Aws::String&& value) { m_nextToken = std::move(value); }
/**
* <p>The <code>nextToken</code> value to include in a future
* <code>ListServices</code> request. When the results of a
* <code>ListServices</code> request exceed <code>maxResults</code>, this value can
* be used to retrieve the next page of results. This value is <code>null</code>
* when there are no more results to return.</p>
*/
inline void SetNextToken(const char* value) { m_nextToken.assign(value); }
/**
* <p>The <code>nextToken</code> value to include in a future
* <code>ListServices</code> request. When the results of a
* <code>ListServices</code> request exceed <code>maxResults</code>, this value can
* be used to retrieve the next page of results. This value is <code>null</code>
* when there are no more results to return.</p>
*/
inline ListServicesResult& WithNextToken(const Aws::String& value) { SetNextToken(value); return *this;}
/**
* <p>The <code>nextToken</code> value to include in a future
* <code>ListServices</code> request. When the results of a
* <code>ListServices</code> request exceed <code>maxResults</code>, this value can
* be used to retrieve the next page of results. This value is <code>null</code>
* when there are no more results to return.</p>
*/
inline ListServicesResult& WithNextToken(Aws::String&& value) { SetNextToken(std::move(value)); return *this;}
/**
* <p>The <code>nextToken</code> value to include in a future
* <code>ListServices</code> request. When the results of a
* <code>ListServices</code> request exceed <code>maxResults</code>, this value can
* be used to retrieve the next page of results. This value is <code>null</code>
* when there are no more results to return.</p>
*/
inline ListServicesResult& WithNextToken(const char* value) { SetNextToken(value); return *this;}
private:
Aws::Vector<Aws::String> m_serviceArns;
Aws::String m_nextToken;
};
} // namespace Model
} // namespace ECS
} // namespace Aws
|
ienter/iwage | src/js/modes/image/tools/glfx/Vignette.js | <reponame>ienter/iwage
Ext.ns('iwage.image.tools.glfx');
Ext.define('iwage.image.tools.glfx.Vignette', {
toolLabel: 'Vignette',
extend: 'iwage.image.tools.glfx.Common',
createControls: function() {
return [
{
xtype: 'slider',
fieldLabel: 'Tamaño',
itemId: 'size',
width: 350,
value: 0,
minValue: 0,
maxValue: 100
},
{
xtype: 'slider',
fieldLabel: 'Intensidad',
itemId: 'amount',
width: 350,
value: 0,
minValue: 0,
maxValue: 100
}
];
},
previewFilter: function(values) {
this.fxCanvas.draw(this.texture).vignette(values.size / 100, values.amount / 100).update();
}
}); |
tikskit/otus-java-dev-class | hw13-atm-department/src/main/java/ru/tikskit/atm/NotEnoughMoneyException.java | package ru.tikskit.atm;
/**
* Сумма денег в банкомате меньше, чем требуется выдать
*/
public class NotEnoughMoneyException extends ATMException {
public NotEnoughMoneyException(int requiredAmount) {
super(String.format("Недостаточно денег в банкомате: %d!", requiredAmount));
}
}
|
redrye/PowershellTerminal | terminalpp/window.h | #pragma once
#include <algorithm>
#include <thread>
#include <mutex>
#include "helpers/time.h"
#include "ui/canvas.h"
#include "ui/renderer.h"
#include "ui/event_queue.h"
#include "config.h"
#include "application.h"
#include "font.h"
namespace tpp {
using namespace ui;
class Window : public Renderer {
public:
/** Determines the icon of the renderer's window where appropriate.
Instead of specifying the actual icon, which is left to the actual renderers being used, the icon specifies the meaning of the icon.
*/
enum class Icon {
Default,
Notification,
}; // Window::Icon
std::string const & title() const {
return title_;
}
virtual void setTitle(std::string const & value) {
if (value != title_)
title_ = value;
}
Icon icon() const {
return icon_;
}
virtual void setIcon(Icon value) {
if (value != icon_)
icon_ = value;
}
Size sizePx() const {
return sizePx_;
}
double zoom() const {
return zoom_;
}
virtual void setZoom(double value) {
if (zoom_ != value)
zoom_ = value;
}
bool fullscreen() const {
return fullscreen_;
}
virtual void setFullscreen(bool value = true) {
if (fullscreen_ != value)
fullscreen_ = value;
}
virtual void show(bool value = true) = 0;
/** Determines the background color of the window.
The background color of the renderer is used to draw the parts of the window that are not accessible from the cells, such as when the pixel size does not correspond to cell size multiplies.
This is obtained from the root widget's background so that there are no borders of different color around the widget. This means that the root widget's background color for full size appliactions (such as the terminal itself) must be properly adjusted to the terminal itself.
*/
Color backgroundColor() const {
if (root() != nullptr)
return root()->background();
else
return Color::Black;
}
/** \name Window Closing.
To request a window to close, the requestClose() method should be called, which triggers the onClose event. Unless deactivated in the handler, the close() method will be called immediately after the event is serviced. The close() method then actually closes the window.
This gives the window the ability to implement conditional closings such as close of a window with multiple terminals can be rejected.
The close() method of the window should never be called by the widgets. Rather the state should be updated in such way that the onClose handler will not deactivate the event and then requestClose() should be called.
TODO maybe this should move to renderer proper? See if this will be needed for ansi renderer as well.
*/
//@{
public:
using CloseEvent = ui::Event<void, Window>;
/** Requests the window to be closed,.
Generates the `onClose` event and unless the event is deactivated, calls the close() method afterwards. Can be called either programatically, or when user requests the closure of the window.
*/
void requestClose() {
CloseEvent::Payload p{};
onClose(p, this);
if (p.active())
close();
}
/** Triggered when closure of the widow is requested.
*/
CloseEvent onClose;
protected:
/** Closes the window immediately.
Subclasses must override the method and call parent implementation after which they must destroy the actual window which should lead to the destruction of the object (such as deleting the object in the main event loop or the UI).
*/
virtual void close() {
// delete the root if attached
Widget * rootWidget = root();
setRoot(nullptr);
delete rootWidget;
};
//@}
protected:
Window(int width, int height, FontMetrics const & font, EventQueue & eq):
Renderer{Size{width, height}, eq},
title_{"terminal++"},
icon_{Icon::Default},
zoom_{1.0},
fullscreen_{false} {
// calculate the width and height in pixels and the cell dimensions from the font at given zoom level
baseFontSize_ = font.cellSize();
// TODO do I want round, or float instead?
cellSize_ = baseFontSize_ * zoom_; //Size{static_cast<int>(baseFontSize_.width() * zoom_),static_cast<int>(baseFontSize_.height() * zoom_)};
sizePx_ = Size{cellSize_.width() * width, cellSize_.height() * height};
// set the desired fps for the renderer
setFps(Config::Instance().renderer.fps());
}
virtual void windowResized(int width, int height) {
if (width != sizePx_.width() || height != sizePx_.height()) {
sizePx_ = Size{width, height};
// tell the renderer to resize
resize(Size{width / cellSize_.width(), height / cellSize_.height()});
}
}
/** Converts the x & y coordinates in pixels to cell coordinates.
*/
Point pixelsToCoords(Point xy) {
int px = xy.x() / cellSize_.width();
int py = xy.y() / cellSize_.height();
// there is no -0, so coordinates smaller than 0 are immediately at least -1
if (xy.x() < 0)
--px;
if (xy.y() < 0)
--py;
return Point{px, py};
}
/** \name Mouse Input
The coordinates reported to the renderer are in pixels and must be converted to terminal columns and rows before they are passed further.
TODO do I need these?
*/
//@{
void mouseDown(Point coords, MouseButton button) override {
++mouseButtonsDown_;
Renderer::mouseDown(coords, button);
}
void mouseUp(Point coords, MouseButton button) override {
if (mouseButtonsDown_ > 0)
--mouseButtonsDown_;
Renderer::mouseUp(coords, button);
}
//@}
/** Title of the window.
*/
std::string title_;
Icon icon_;
/** Size of the client area of the window in pixels.
*/
Size sizePx_;
/** Base font size, i.e. the cell size of a font with zoom equal to 1.0.
*/
Size baseFontSize_;
/** The size actual cell in the window (i.e. with current zoom settings)
*/
Size cellSize_;
double zoom_;
bool fullscreen_;
/** Mouse buttons that are currently down so that we know when to release the mouse capture. */
unsigned mouseButtonsDown_ = 0;
}; // tpp::Window
/** Templated child of the Window that provides support for fast rendering via CRTP.
*/
template<typename IMPLEMENTATION, typename NATIVE_HANDLE>
class RendererWindow : public Window {
public:
void setZoom(double value) override {
if (value != zoom_) {
Window::setZoom(value);
// get the font dimensions
typename IMPLEMENTATION::Font * f = IMPLEMENTATION::Font::Get(ui::Font(), static_cast<int>(baseFontSize_.height() * zoom_));
cellSize_ = f->cellSize();
// tell the renderer to resize
resize(Size{sizePx_.width() / cellSize_.width(), sizePx_.height() / cellSize_.height()});
}
}
protected:
/** Creates new renderer window.
Initializes the font metrics and event queue tied to the implementation type (DirectWrite, X11, etc.).
*/
RendererWindow(int width, int height, EventQueue & eventQueue):
Window{width, height, * IMPLEMENTATION::Font::Get(ui::Font(), tpp::Config::Instance().renderer.font.size()), eventQueue},
lastCursorPos_{-1,-1} {
}
Cell state_;
Point lastCursorPos_;
static IMPLEMENTATION * GetWindowForHandle(NATIVE_HANDLE handle) {
ASSERT(GlobalState_ != nullptr);
std::lock_guard<std::mutex> g(GlobalState_->mWindows);
auto i = GlobalState_->windows.find(handle);
return i == GlobalState_->windows.end() ? nullptr : i->second;
}
static void RegisterWindowHandle(IMPLEMENTATION * window, NATIVE_HANDLE handle) {
ASSERT(GlobalState_ != nullptr);
std::lock_guard<std::mutex> g(GlobalState_->mWindows);
ASSERT(GlobalState_->windows.find(handle) == GlobalState_->windows.end());
GlobalState_->windows.insert(std::make_pair(handle, window));
}
/** Removes the window with given handle from the list of windows.
*/
static void UnregisterWindowHandle(NATIVE_HANDLE handle) {
ASSERT(GlobalState_ != nullptr);
std::lock_guard<std::mutex> g(GlobalState_->mWindows);
GlobalState_->windows.erase(handle);
}
static bool BlinkVisible() {
ASSERT(GlobalState_ != nullptr);
return GlobalState_->blinkVisible;
}
static unsigned BlinkSpeed() {
ASSERT(GlobalState_ != nullptr);
return GlobalState_->blinkSpeed;
}
static std::unordered_map<NATIVE_HANDLE, IMPLEMENTATION*> const Windows() {
ASSERT(GlobalState_ != nullptr);
return GlobalState_->windows;
}
/** Starts the blinker thread that runs for the duration of the application and periodically repaints all windows so that blinking text is properly displayed.
The method must be called by the Application instance startup.
*/
static void StartBlinkerThread() {
GlobalState_ = new GlobalState{};
std::thread t([](){
GlobalState_->blinkVisible = true;
while (true) {
std::this_thread::sleep_for(std::chrono::milliseconds(GlobalState_->blinkSpeed));
GlobalState_->blinkVisible = ! GlobalState_->blinkVisible;
{
std::lock_guard<std::mutex> g(GlobalState_->mWindows);
for (auto i : GlobalState_->windows)
i.second->repaint();
}
}
});
t.detach();
}
/** Global state for the window management and rendering.
Because the blinker thread is detached, the global state must be heap allocated so that the objects here are never deallocated in case the blinker thread will execute after the main function ends.
*/
struct GlobalState {
/** A map which points from the native handles to the windows. */
std::unordered_map<NATIVE_HANDLE, IMPLEMENTATION *> windows;
/** Guard for the list of windows (ui thread and the blinker thread). */
std::mutex mWindows;
/** Current visibility of the blinking text. */
std::atomic<bool> blinkVisible;
/** The speed of the blinking text, same for all windows in the application. */
unsigned blinkSpeed = DEFAULT_BLINK_SPEED;
}; // tpp::RendererWindow::BlinkInfo
static GlobalState * GlobalState_;
#define initializeDraw(...) static_cast<IMPLEMENTATION*>(this)->initializeDraw(__VA_ARGS__)
#define initializeGlyphRun(...) static_cast<IMPLEMENTATION*>(this)->initializeGlyphRun(__VA_ARGS__)
#define addGlyph(...) static_cast<IMPLEMENTATION*>(this)->addGlyph(__VA_ARGS__)
#define changeFont(...) static_cast<IMPLEMENTATION*>(this)->changeFont(__VA_ARGS__)
#define changeFg(...) static_cast<IMPLEMENTATION*>(this)->changeForegroundColor(__VA_ARGS__)
#define changeBg(...) static_cast<IMPLEMENTATION*>(this)->changeBackgroundColor(__VA_ARGS__)
#define changeDecor(...) static_cast<IMPLEMENTATION*>(this)->changeDecorationColor(__VA_ARGS__)
#define drawGlyphRun(...) static_cast<IMPLEMENTATION*>(this)->drawGlyphRun(__VA_ARGS__)
#define drawBorder(...) static_cast<IMPLEMENTATION*>(this)->drawBorder(__VA_ARGS__)
#define finalizeDraw(...) static_cast<IMPLEMENTATION*>(this)->finalizeDraw(__VA_ARGS__)
using Renderer::render;
void render(Rect const & rect) override {
MARK_AS_UNUSED(rect);
// then actually render the entire window
Stopwatch t;
t.start();
// shorthand to the buffer
Buffer const & buffer = this->buffer();
// initialize the drawing and set the state for the first cell
initializeDraw();
state_ = buffer.at(0,0);
changeFont(state_.font());
changeFg(state_.fg());
changeBg(state_.bg());
changeDecor(state_.decor());
// loop over the buffer and draw the cells
for (int row = 0, re = height(); row < re; ++row) {
initializeGlyphRun(0, row);
for (int col = 0, ce = width(); col < ce; ) {
Cell const & c = buffer.at(col, row);
// detect if there were changes in the font & colors and update the state & draw the glyph run if present. The code looks a bit ugly as we have to first draw the glyph run and only then change the state.
bool drawRun = true;
if (state_.font() != c.font()) {
if (drawRun) {
drawGlyphRun();
initializeGlyphRun(col, row);
drawRun = false;
}
changeFont(c.font());
state_.setFont(c.font());
}
if (state_.fg() != c.fg()) {
if (drawRun) {
drawGlyphRun();
initializeGlyphRun(col, row);
drawRun = false;
}
changeFg(c.fg());
state_.setFg(c.fg());
}
if (state_.bg() != c.bg()) {
if (drawRun) {
drawGlyphRun();
initializeGlyphRun(col, row);
drawRun = false;
}
changeBg(c.bg());
state_.setBg(c.bg());
}
if (state_.decor() != c.decor()) {
if (drawRun) {
drawGlyphRun();
initializeGlyphRun(col, row);
drawRun = false;
}
changeDecor(c.decor());
state_.setDecor(c.decor());
}
// we don't care about the border at this stage
// draw the cell
addGlyph(col, row, c);
// move to the next column (skip invisible cols if double width or larger font)
col += c.font().width();
}
drawGlyphRun();
}
// determine the cursor, its visibility and its position and draw it if necessary. The cursor is drawn when it is not blinking, when its position has changed since last time it was drawn with blink on or if it is blinking and blink is visible. This prevents the cursor for disappearing while moving
Point cursorPos = buffer.cursorPosition();
Canvas::Cursor cursor = buffer.cursor();
if (buffer.contains(cursorPos) && cursor.visible() && (! cursor.blink() || BlinkVisible() || cursorPos != lastCursorPos_)) {
state_.setCodepoint(cursor.codepoint());
state_.setFg(cursor.color());
state_.setBg(Color::None);
state_.setFont(buffer.at(cursorPos).font());
changeFont(state_.font());
changeFg(state_.fg());
changeBg(state_.bg());
initializeGlyphRun(cursorPos.x(), cursorPos.y());
addGlyph(cursorPos.x(), cursorPos.y(), state_);
drawGlyphRun();
if (BlinkVisible())
lastCursorPos_ = cursorPos;
}
// finally, draw the border, which is done on the base cell level over the already drawn text
int wThin = std::min(cellSize_.width(), cellSize_.height()) / 4;
int wThick = std::min(cellSize_.width(), cellSize_.height()) / 2;
Color borderColor = buffer.at(0,0).border().color();
changeBg(borderColor);
for (int row = 0, re = height(); row < re; ++row) {
for (int col = 0, ce = width(); col < ce; ++col) {
Border b = buffer.at(col, row).border();
if (b.color() != borderColor) {
borderColor = b.color();
changeBg(borderColor);
}
if (! b.empty())
drawBorder(col, row, b, wThin, wThick);
}
}
finalizeDraw();
}
#undef initializeDraw
#undef initializeGlyphRun
#undef addGlyph
#undef changeFont
#undef changeFg
#undef changeBg
#undef changeDecor
#undef changeBorderColor
#undef drawGlyphRun
#undef drawBorder
#undef finalizeDraw
}; // tpp::RendererWindow
template<typename IMPLEMENTATION, typename NATIVE_HANDLE>
typename tpp::RendererWindow<IMPLEMENTATION, NATIVE_HANDLE>::GlobalState * tpp::RendererWindow<IMPLEMENTATION, NATIVE_HANDLE>::GlobalState_ = nullptr;
} // namespace tpp
|
SwarajPaul99/Health-Care-System | src/main/java/com/Sprint/HealthCareSystem/Service/IUserService.java | package com.Sprint.HealthCareSystem.Service;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.Sprint.HealthCareSystem.Entity.User;
import com.Sprint.HealthCareSystem.Exceptions.UserCreationError;
import com.Sprint.HealthCareSystem.Exceptions.UserNotFound;
import com.Sprint.HealthCareSystem.Repository.IUserRepository;
import com.Sprint.HealthCareSystem.Repository.Queries.QueryClass;
import com.Sprint.HealthCareSystem.Validators.InputValidator;
@Service
public class IUserService implements UserService{
@Autowired
private IUserRepository iuserRepository;
@Autowired
InputValidator validate;
@Autowired(required = false)
QueryClass qc;
@Override
public User validateUser(String username, String password) throws UserNotFound {
User pUser = qc.findByUserName(username);
if(pUser == null )throw new UserNotFound("Invalid Username");
if(pUser.getPassword().equals(password)) return pUser;
else {
throw new UserNotFound("Invalid Password");
}
}
@Override
public User addUser(User user) throws UserCreationError {
if(!validate.usernameValidator(user.getUsername()))
throw new UserCreationError("Check Username !!!!");
if(iuserRepository.existsByusername(user.getUsername()))
throw new UserCreationError("username Already exists");
if(!validate.passwordValidator(user.getPassword()))
throw new UserCreationError("Cannot register this User with this password");
user.setRole("user");
return iuserRepository.saveAndFlush(user);
}
@Override
public User updateUser(User user) throws UserNotFound {
if(!iuserRepository.existsById(user.getUid()))
throw new UserNotFound("User with id :" + user.getUid()+" Doesn't Exist");
User use = iuserRepository.findById(user.getUid()).get();
iuserRepository.delete(use);
return use;
}
@Override
public User removeUser(User user) throws UserNotFound {
if(!iuserRepository.existsById(user.getUid()))
throw new UserNotFound("User with id :" + user.getUid()+" Doesn't Exist");
iuserRepository.deleteById(user.getUid());
return user;
}
@Override
public List<User> getAll() {
return iuserRepository.findAll();
}
}
|
weihualiu/SDBC | ds/tbound.c | <filename>ds/tbound.c
/*************************************************
* 可以看到如何用lowerBound()和upperBound组合成
* < <= > 功能
************************************************/
#include <stdio.h>
#include <Binary_search.h>
#include <pack.h>
// <key的最后元素
int less_than(void *key,void *data,int data_siz,int cmp(void *key,void *data,int n))
{
int ret;
if(0>(ret=lowerBound(key,data,data_siz,cmp)) &&
0>(ret=upperBound(key,data,data_siz,cmp))) {
ret=data_siz;
}
return --ret;
}
// <=key的最后元素
int less_eq(void *key,void *data,int data_siz,int cmp(void *key,void *data,int n))
{
int ret;
if(0>(ret=upperBound(key,data,data_siz,cmp)))
ret=data_siz;
return --ret;
}
// >=key的第一个元素
int great_eq(void *key,void *data,int data_siz,int cmp(void *key,void *data,int n))
{
int ret;
return (ret=lowerBound(key,data,data_siz,cmp))>=0?ret:
upperBound(key,data,data_siz,cmp);
}
//static int tab[]={-1,-1,0,1,1,1,1,2,2,2,2,3,3,4,5,5,5,5,5,5,6,6,6,7};
static int tab[]={0,0,2,2,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,7,10,10};
#define COUNT sizeof(tab)/sizeof(int)
static int tab1[]={1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39};
#define CNT1 sizeof(tab1)/sizeof(int)
static int int_cmp(void *key,void *data,int n)
{
int *d=(int *)data+n;
printf("data[%d]=%d,key=%d\n",n,*d,*(int*)key);
if(*d > *(int*)key) return 1;
if(*d < *(int*)key) return -1;
return 0;
}
int main(int argc,char *argv[])
{
int ret,low,up,key;
key=10;
low=lowerBound(&key,tab,COUNT,int_cmp);
printf("------------\n");
up=upperBound(&key,tab,COUNT,int_cmp);
printf("------------\n");
ret=less_than(&key,tab,COUNT,int_cmp);
printf("------------\n");
printf("key=%d,low=%d,up=%d,less=%d,less_eq=%d,great_eq=%d\n",key,low,up,ret,
less_eq(&key,tab,COUNT,int_cmp),
great_eq(&key,tab,COUNT,int_cmp));
key=0;
ret=Binary_GTEQ(&key,tab1,CNT1,int_cmp);
printf("GTEQ:key=%d,ret=%d,data=%d\n",key,ret,(ret>=0)?tab1[ret]:INTNULL);
return 0;
}
|
uktrade/data-hub-frontend | test/functional/cypress/specs/dashboard-new/projects-filter-spec.js | import { INVESTMENT_PROJECT_STAGES } from '../../fakers/constants'
import { investmentProjectListFaker } from '../../fakers/investment-projects'
import { investmentProjectSummaryFaker } from '../../fakers/investment-project-summary'
import { assertPayload } from '../../support/assertions'
const getFinancialYearStart = (date) =>
date.getMonth() < 3 ? date.getFullYear() - 1 : date.getFullYear()
const myAdviser = {
id: '7d19d407-9aec-4d06-b190-<PASSWORD>046<PASSWORD>',
name: '<NAME>',
}
const prospectStageId = '8a320cc9-ae2e-443e-9d26-2f36452c2ced'
const ongoingStatusId = 'ongoing'
const minimumPayload = {
adviser: myAdviser.id,
limit: 10,
offset: 0,
sortby: 'created_on:desc',
show_summary: true,
}
describe('Dashboard - my projects list filters', () => {
const summary = investmentProjectSummaryFaker()
before(() => {
cy.setUserFeatures(['personalised-dashboard'])
})
after(() => {
cy.resetUser()
})
context('When a filter is applied and there are zero projects', () => {
before(() => {
cy.intercept('POST', '/api-proxy/v3/search/investment_project', (req) => {
req.reply(
req.body.stage === INVESTMENT_PROJECT_STAGES.verifyWin.id
? { body: { count: 0, results: [], summary } }
: {
body: {
count: 10,
results: investmentProjectListFaker(10),
summary,
},
}
)
}).as('apiRequest')
cy.visit('/')
cy.wait('@apiRequest')
})
it('should display "No investment projects"', () => {
cy.get('[data-test="stage-select"] select').select('Verify win')
cy.wait('@apiRequest')
cy.get('[data-test="tabpanel"] p').should(
'have.text',
'No investment projects'
)
})
})
context('When filters are applied', () => {
beforeEach(() => {
cy.intercept('POST', '/api-proxy/v3/search/investment_project').as(
'apiRequest'
)
cy.visit('/')
cy.wait('@apiRequest')
})
it('should filter by stage', () => {
cy.get('[data-test="stage-select"] select').select('Prospect')
cy.wait('@apiRequest')
assertPayload('@apiRequest', {
...minimumPayload,
stage: prospectStageId,
})
})
it('should filter by status', () => {
cy.get('[data-test="status-select"] select').select('Ongoing')
cy.wait('@apiRequest')
assertPayload('@apiRequest', {
...minimumPayload,
status: ongoingStatusId,
})
})
it('should filter by land date', () => {
cy.wait('@apiRequest')
const financialYearStart = getFinancialYearStart(new Date()).toString()
cy.get('[data-test="land-date-select"] select').select(financialYearStart)
assertPayload('@apiRequest', {
...minimumPayload,
land_date_financial_year_start: [financialYearStart],
})
})
})
})
|
dariusgrassi/upf-epc | pfcpiface/vendor/github.com/wmnsk/go-pfcp/ie/rds-configuration-information.go | <reponame>dariusgrassi/upf-epc
// Copyright 2019-2021 go-pfcp authors. All rights reserved.
// Use of this source code is governed by a MIT-style license that can be
// found in the LICENSE file.
package ie
import "io"
// NewRDSConfigurationInformation creates a new RDSConfigurationInformation IE.
func NewRDSConfigurationInformation(rds uint8) *IE {
return newUint8ValIE(RDSConfigurationInformation, rds&0x01)
}
// RDSConfigurationInformation returns RDSConfigurationInformation in uint8 if the type of IE matches.
func (i *IE) RDSConfigurationInformation() (uint8, error) {
if len(i.Payload) < 1 {
return 0, io.ErrUnexpectedEOF
}
switch i.Type {
case RDSConfigurationInformation:
return i.Payload[0], nil
case ProvideRDSConfigurationInformation:
ies, err := i.ProvideRDSConfigurationInformation()
if err != nil {
return 0, err
}
for _, x := range ies {
if x.Type == RDSConfigurationInformation {
return x.RDSConfigurationInformation()
}
}
return 0, ErrIENotFound
default:
return 0, &InvalidTypeError{Type: i.Type}
}
}
// HasRDS reports whether an IE has RDS bit.
func (i *IE) HasRDS() bool {
switch i.Type {
case RDSConfigurationInformation:
if len(i.Payload) < 1 {
return false
}
return has1stBit(i.Payload[0])
default:
return false
}
}
|
allisonverdam/cubes | cubes/metadata/defaults.py | # -*- encoding: utf-8 -*-
"""Metadata validation
"""
import pkgutil
import json
from collections import namedtuple
from .. import compat
try:
import jsonschema
except ImportError:
from ..common import MissingPackage
jsonschema = MissingPackage("jsonschema", "Model validation")
__all__ = (
"validate_model",
)
ValidationError = namedtuple("ValidationError",
["severity", "scope", "object", "property", "message"])
def validate_model(metadata):
"""Validate model metadata."""
validator = ModelMetadataValidator(metadata)
return validator.validate()
class ModelMetadataValidator(object):
def __init__(self, metadata):
self.metadata = metadata
data = pkgutil.get_data("cubes", "schemas/model.json")
self.model_schema = json.loads(compat.to_str(data))
data = pkgutil.get_data("cubes", "schemas/cube.json")
self.cube_schema = json.loads(compat.to_str(data))
data = pkgutil.get_data("cubes", "schemas/dimension.json")
self.dimension_schema = json.loads(compat.to_str(data))
def validate(self):
errors = []
errors += self.validate_model()
if "cubes" in self.metadata:
for cube in self.metadata["cubes"]:
errors += self.validate_cube(cube)
if "dimensions" in self.metadata:
for dim in self.metadata["dimensions"]:
errors += self.validate_dimension(dim)
return errors
def _collect_errors(self, scope, obj, validator, metadata):
errors = []
for error in validator.iter_errors(metadata):
if error.path:
path = [str(item) for item in error.path]
ref = ".".join(path)
else:
ref = None
verror = ValidationError("error", scope, obj, ref, error.message)
errors.append(verror)
return errors
def validate_model(self):
validator = jsonschema.Draft4Validator(self.model_schema)
errors = self._collect_errors("model", None, validator, self.metadata)
dims = self.metadata.get("dimensions")
if dims and isinstance(dims, list):
for dim in dims:
if isinstance(dim, compat.string_type):
err = ValidationError("default", "model", None,
"dimensions",
"Dimension '%s' is not described, "
"creating flat single-attribute "
"dimension" % dim)
errors.append(err)
return errors
def validate_cube(self, cube):
validator = jsonschema.Draft4Validator(self.cube_schema)
name = cube.get("name")
return self._collect_errors("cube", name, validator, cube)
def validate_dimension(self, dim):
validator = jsonschema.Draft4Validator(self.dimension_schema)
name = dim.get("name")
errors = self._collect_errors("dimension", name, validator, dim)
if "default_hierarchy_name" not in dim:
error = ValidationError("default", "dimension", name, None,
"No default hierarchy name specified, "
"using first one")
errors.append(error)
if "levels" not in dim and "attributes" not in dim:
error = ValidationError("default", "dimension", name, None,
"Neither levels nor attributes specified, "
"creating flat dimension without details")
errors.append(error)
elif "levels" in dim and "attributes" in dim:
error = ValidationError("error", "dimension", name, None,
"Both levels and attributes specified")
errors.append(error)
return errors
|
rubicon-project/prebid-server-java | src/main/java/org/prebid/server/bidder/stroeercore/StroeerCoreBidder.java | package org.prebid.server.bidder.stroeercore;
import com.fasterxml.jackson.core.type.TypeReference;
import com.iab.openrtb.request.BidRequest;
import com.iab.openrtb.request.Imp;
import com.iab.openrtb.response.Bid;
import io.vertx.core.http.HttpMethod;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.prebid.server.bidder.Bidder;
import org.prebid.server.bidder.model.BidderBid;
import org.prebid.server.bidder.model.BidderError;
import org.prebid.server.bidder.model.BidderCall;
import org.prebid.server.bidder.model.HttpRequest;
import org.prebid.server.bidder.model.Price;
import org.prebid.server.bidder.model.Result;
import org.prebid.server.bidder.stroeercore.model.StroeerCoreBid;
import org.prebid.server.bidder.stroeercore.model.StroeerCoreBidResponse;
import org.prebid.server.currency.CurrencyConversionService;
import org.prebid.server.exception.PreBidException;
import org.prebid.server.json.DecodeException;
import org.prebid.server.json.JacksonMapper;
import org.prebid.server.proto.openrtb.ext.ExtPrebid;
import org.prebid.server.proto.openrtb.ext.request.stroeercore.ExtImpStroeerCore;
import org.prebid.server.proto.openrtb.ext.response.BidType;
import org.prebid.server.util.BidderUtil;
import org.prebid.server.util.HttpUtil;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public class StroeerCoreBidder implements Bidder<BidRequest> {
private static final String BIDDER_CURRENCY = "EUR";
private static final TypeReference<ExtPrebid<?, ExtImpStroeerCore>> STROEER_CORE_EXT_TYPE_REFERENCE =
new TypeReference<>() {
};
private final String endpointUrl;
private final JacksonMapper mapper;
private final CurrencyConversionService currencyConversionService;
public StroeerCoreBidder(String endpointUrl,
JacksonMapper mapper,
CurrencyConversionService currencyConversionService) {
this.endpointUrl = HttpUtil.validateUrl(endpointUrl);
this.mapper = Objects.requireNonNull(mapper);
this.currencyConversionService = Objects.requireNonNull(currencyConversionService);
}
@Override
public Result<List<HttpRequest<BidRequest>>> makeHttpRequests(BidRequest bidRequest) {
final List<Imp> modifiedImps = new ArrayList<>();
final List<BidderError> errors = new ArrayList<>();
for (Imp imp : bidRequest.getImp()) {
final ExtImpStroeerCore impExt;
final Price price;
try {
validateImp(imp);
impExt = parseImpExt(imp);
validateImpExt(impExt);
price = convertBidFloor(bidRequest, imp);
} catch (PreBidException e) {
errors.add(BidderError.badInput(String.format("%s. Ignore imp id = %s.", e.getMessage(), imp.getId())));
continue;
}
modifiedImps.add(modifyImp(imp, impExt, price));
}
if (modifiedImps.isEmpty()) {
return Result.withErrors(errors);
}
final BidRequest outgoingRequest = bidRequest.toBuilder().imp(modifiedImps).build();
return createHttpRequests(errors, outgoingRequest);
}
private static void validateImp(Imp imp) {
if (imp.getBanner() == null) {
throw new PreBidException("Expected banner impression");
}
}
private ExtImpStroeerCore parseImpExt(Imp imp) {
try {
return mapper.mapper().convertValue(imp.getExt(), STROEER_CORE_EXT_TYPE_REFERENCE).getBidder();
} catch (IllegalArgumentException e) {
throw new PreBidException(e.getMessage());
}
}
private static void validateImpExt(ExtImpStroeerCore impExt) {
if (StringUtils.isBlank(impExt.getSlotId())) {
throw new PreBidException("Custom param slot id (sid) is empty");
}
}
private Price convertBidFloor(BidRequest bidRequest, Imp imp) {
final BigDecimal bidFloor = imp.getBidfloor();
final String bidFloorCurrency = imp.getBidfloorcur();
if (!shouldConvertBidFloor(bidFloor, bidFloorCurrency)) {
return Price.of(bidFloorCurrency, bidFloor);
}
final BigDecimal convertedBidFloor = currencyConversionService.convertCurrency(
bidFloor, bidRequest, bidFloorCurrency, BIDDER_CURRENCY);
return Price.of(BIDDER_CURRENCY, convertedBidFloor);
}
private Result<List<HttpRequest<BidRequest>>> createHttpRequests(List<BidderError> errors, BidRequest bidRequest) {
return Result.of(Collections.singletonList(HttpRequest.<BidRequest>builder()
.method(HttpMethod.POST)
.uri(endpointUrl)
.body(mapper.encodeToBytes(bidRequest))
.payload(bidRequest)
.headers(HttpUtil.headers())
.build()), errors);
}
private static boolean shouldConvertBidFloor(BigDecimal bidFloor, String bidFloorCurrency) {
return BidderUtil.isValidPrice(bidFloor) && !StringUtils.equalsIgnoreCase(bidFloorCurrency, BIDDER_CURRENCY);
}
private static Imp modifyImp(Imp imp, ExtImpStroeerCore impExt, Price price) {
return imp.toBuilder()
.bidfloorcur(price.getCurrency())
.bidfloor(price.getValue())
.tagid(impExt.getSlotId())
.build();
}
@Override
public Result<List<BidderBid>> makeBids(BidderCall<BidRequest> httpCall, BidRequest bidRequest) {
try {
final String body = httpCall.getResponse().getBody();
final StroeerCoreBidResponse response = mapper.decodeValue(body, StroeerCoreBidResponse.class);
return Result.withValues(extractBids(response));
} catch (DecodeException e) {
return Result.withError(BidderError.badServerResponse(e.getMessage()));
}
}
private static List<BidderBid> extractBids(StroeerCoreBidResponse bidResponse) {
if (bidResponse == null || CollectionUtils.isEmpty(bidResponse.getBids())) {
return Collections.emptyList();
}
return bidResponse.getBids().stream()
.filter(Objects::nonNull)
.map(StroeerCoreBidder::toBidderBid)
.collect(Collectors.toList());
}
private static BidderBid toBidderBid(StroeerCoreBid stroeercoreBid) {
return BidderBid.of(
Bid.builder()
.id(stroeercoreBid.getId())
.impid(stroeercoreBid.getBidId())
.w(stroeercoreBid.getWidth())
.h(stroeercoreBid.getHeight())
.price(stroeercoreBid.getCpm())
.adm(stroeercoreBid.getAdMarkup())
.crid(stroeercoreBid.getCreativeId())
.build(),
BidType.banner,
BIDDER_CURRENCY);
}
}
|
upstar77/spacemap | coworker/core/base_model.py | from django.db import models
from location_field.models.spatial import LocationField
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from coworker.search import index
class BaseLocation(models.Model):
address = models.CharField(max_length=250)
lat_lng = LocationField(based_fields=['address'], zoom=7)
class Meta:
abstract = True
@cached_property
def lat(self):
return self.lat_lng.y
@cached_property
def lng(self):
return self.lat_lng.x
class BaseSearch(index.Indexed):
def get_serializer(self):
raise NotImplementedError
|
RhoInc/participant-visit-listing | src/listing/onDraw/addSummaries/visit.js | <filename>src/listing/onDraw/addSummaries/visit.js
import { nest, format } from 'd3';
export default function visit() {
this.pvl.data.sets.visit_col.forEach(visit => {
const visit_data = this.pvl.data.filtered.filter(
d => d[this.pvl.settings.visit_col] === visit.name
);
const visit_summary = nest()
.key(d => d[this.pvl.settings.visit_status_col])
.rollup(d => format('%')(d.length / visit_data.length))
.entries(visit_data);
const visit_cell = this.table
.selectAll('thead tr')
.selectAll('th:not(:first-child)')
.filter(d => d === visit.name);
visit_cell.attr(
'title',
`${visit.name}\n - ${visit_summary
.map(status => `${status.key} (${status.values})`)
.join('\n - ')}`
);
if (this.pvl.settings.abbreviate_visits)
visit_cell.text(d => {
const abbreviation = this.pvl.data.sets.visit_col.find(visit => visit.name === d)
.abbreviation;
return abbreviation !== 'undefined'
? this.pvl.data.sets.visit_col.find(visit => visit.name === d).abbreviation
: d;
});
});
}
|
cstom4994/SourceEngineRebuild | src/public/panorama/controls/animatedimagestrip.h | //=========== Copyright Valve Corporation, All rights reserved. ===============//
//
// Purpose:
//=============================================================================//
#ifndef ANIMATED_IMAGE_STRIP_H
#define ANIMATED_IMAGE_STRIP_H
#ifdef _WIN32
#pragma once
#endif
#include "image.h"
namespace panorama
{
//-----------------------------------------------------------------------------
// Purpose: Animated Image Strip
//
// Takes an image that has multiple sub-frames and animates displaying them.
// Accepts strips in either horizontal or vertical orientation.
//-----------------------------------------------------------------------------
class CAnimatedImageStrip : public CImagePanel
{
DECLARE_PANEL2D( CAnimatedImageStrip, CImagePanel );
public:
CAnimatedImageStrip( CPanel2D *parent, const char * pchPanelID );
virtual ~CAnimatedImageStrip();
virtual bool BSetProperty( CPanoramaSymbol symName, const char *pchValue ) OVERRIDE;
virtual void Paint() OVERRIDE;
void StartAnimating();
void StopAnimating();
void StopAnimatingAtFrame( int nFrame );
int GetDefaultFrame() const { return m_nDefaultFrame; }
void SetDefaultFrame( int nFrame ) { m_nDefaultFrame = nFrame; }
float GetFrameTime() const { return m_flFrameTime; }
void SetFrameTime( float flFrameTime ) { m_flFrameTime = flFrameTime; }
void SetCurrentFrame( int nFrame );
int GetCurrentFrame() const { return m_nCurrentFrameIndex; }
int GetFrameCount();
private:
void AdvanceFrame();
int GetFrameIndex( int nFrame );
bool EventPanelLoaded( const CPanelPtr< IUIPanel > &panelPtr );
bool EventAdvanceFrame();
int m_nDefaultFrame;
float m_flFrameTime;
int m_nCurrentFrameIndex;
int m_nStopAtFrameIndex;
bool m_bAnimating;
bool m_bCurrentFramePainted;
};
} // namespace panorama
#endif // PANORAMA_BUTTON_H
|
huzheng12/vue-admin | src/apis/global_management/global_letter/letterRecord.js | import {request as axios} from "@/utils/request/index.js";
export default {
// 多语种信件 > 查询
listMessageVo: ({ isAuto, offset, limit, title_like, sendDate_GT, sendDate_LT, isSend }) => {
return axios.request({
url: '/appmessage/listMessageVo',
method: 'post',
data: {
isAuto, // 自动发送记录 1 手动 0
offset, // 偏移量
limit, // 数量
title_like, // 标题关键字
sendDate_GT, // 发送时间起始
sendDate_LT, // 发送时间截至
isSend // 状态 0 未发送 已发送 1 已撤回 3
}
})
},
// 多语种信件 > 获取一条的信息
modifysee: ({ id }) => {
return axios.request({
url: `/appmessage/${id}`,
method: 'get'
})
},
// 用户列表 > 用户列表
list: ({ ids, offset, limit }) => {
return axios.request({
url: '/customer/appcustomer/list',
method: 'post',
data: {
ids, // 客户ID 多个用,隔开
offset, // 从第几条开始
limit // 每页多少条
}
})
}
}
|
3jins/job-seek | src/webServer/api/category.js | import express from 'express';
import Category from '../../mongodb/model/Category';
const router = express.Router();
router.get('/', (req, res) => Category.find()
.then(categories => res.json(categories))
.catch((err) => {
console.error(err);
return res.status(500).json({
message: 'Could not retrieve categories',
});
}));
export default router;
|
Ayub-Khan/workbench | workbench/workers/meta.py | <reponame>Ayub-Khan/workbench<gh_stars>10-100
''' Meta worker '''
import hashlib
import magic
import pprint
class MetaData(object):
''' This worker computes meta data for any file type. '''
dependencies = ['sample', 'tags']
def __init__(self):
''' Initialization '''
self.meta = {}
def execute(self, input_data):
''' This worker computes meta data for any file type. '''
raw_bytes = input_data['sample']['raw_bytes']
self.meta['md5'] = hashlib.md5(raw_bytes).hexdigest()
self.meta['tags'] = input_data['tags']['tags']
self.meta['type_tag'] = input_data['sample']['type_tag']
with magic.Magic() as mag:
self.meta['file_type'] = mag.id_buffer(raw_bytes[:1024])
with magic.Magic(flags=magic.MAGIC_MIME_TYPE) as mag:
self.meta['mime_type'] = mag.id_buffer(raw_bytes[:1024])
with magic.Magic(flags=magic.MAGIC_MIME_ENCODING) as mag:
try:
self.meta['encoding'] = mag.id_buffer(raw_bytes[:1024])
except magic.MagicError:
self.meta['encoding'] = 'unknown'
self.meta['file_size'] = len(raw_bytes)
self.meta['filename'] = input_data['sample']['filename']
self.meta['import_time'] = input_data['sample']['import_time']
self.meta['customer'] = input_data['sample']['customer']
self.meta['length'] = input_data['sample']['length']
return self.meta
# Unit test: Create the class, the proper input and run the execute() method for a test
def test():
''' meta.py: Unit test'''
# This worker test requires a local server running
import zerorpc
workbench = zerorpc.Client(timeout=300, heartbeat=60)
workbench.connect("tcp://127.0.0.1:4242")
# Generate the input data for this worker
import os
data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'../data/pe/bad/033d91aae8ad29ed9fbb858179271232')
md5 = workbench.store_sample(open(data_path, 'rb').read(), 'bad_pe', 'exe')
input_data = workbench.get_sample(md5)
input_data.update(workbench.work_request('tags', md5))
# Execute the worker (unit test)
worker = MetaData()
output = worker.execute(input_data)
print '\n<<< Unit Test >>>'
pprint.pprint(output)
# Execute the worker (server test)
output = workbench.work_request('meta', md5)
print '\n<<< Server Test >>>'
pprint.pprint(output)
if __name__ == "__main__":
test()
|
Truong-Duong/SMU-BootCamp-Archive | 17-CS/01-Activities/28-Stu_Mini-Project/Main/01_bubble-sort/bubble-sort.js | // Create our input data
const unsortedInputArray = [];
// seed data in unsortedInputArray
for (let i = 0; i < 2000; i++) {
unsortedInputArray.push(Math.round(Math.random() * 2000));
}
// slightly more efficient since it will stop the `while` loop early if array is sorted
const bubbleSort = (array) => {
// set flag indicating the array hasn't been sorted yet
let sorted = false;
// as long as the array still isn't sorted, run this loop
while (!sorted) {
// automatically assume that the array is sorted by this point
// if we find that there's still some sorting to do, we'll set this to `false` to continue the `while` loop
sorted = true;
// loop through the entire array
for (let i = 0; i < array.length; i++) {
// if the value at array[i] is greater than the value to the right of it in the array, swap those values
if (array[i] > array[i + 1]) {
// since we're swapping and overwriting values, we need to temporarily store one of the values during the switch
const tmp = array[i];
// overwrite the value at `array[i]` with the value at `array[i + 1]`
array[i] = array[i + 1];
// overwrite the value at `array[i + 1]` with the value in `tmp`, which was the value originally at `array[i]`
array[i + 1] = tmp;
// since we found something to swap, we can assume the array isn't sorted still and we should run through the `while` loop again just in case
sorted = false;
}
}
}
// after the `while` loop has completed, we return the sorted array
return array;
};
const sorted = bubbleSort(unsortedInputArray);
console.log('Post Sort:', sorted.join(' '));
console.log('DONE!');
|
TeacherManoj0131/HacktoberFest2020-Contributions | Algorithms/Directed Graph/DirectedGraphShortestPath.cpp | <filename>Algorithms/Directed Graph/DirectedGraphShortestPath.cpp<gh_stars>100-1000
/*
Algorithm Purpose: Get the length of shortest path from 1 to a certain vertex in directed graph.
For: weighted directed graph, consisting of n vertices and m edges.
q should answer:
1. 1->v — find the length of shortest path from vertex 1 to vertex v.
2 c->l1->l2...lc — add 1 to weights of edges with indices l1, l2, ..., lc.
Input:
1. The first line contains integers n (number of vertices), m (number of edges), q (number of requests)
2. Next m lines contain the descriptions of edges: i-th of them contains description of edge with index i — three integers ai, bi, ci — the beginning and the end of edge, and its initial weight correspondingly.
3. Next q lines contain the description of edges in the format described above.
Output
the length of the shortest path from 1 to v in a separate line. ( -1, if such path does not exists.)
Example
Input
3 2 9
1 2 0
2 3 0
2 1 2
1 3
1 2
2 1 1
1 3
1 2
2 2 1 2
1 3
1 2
Output
1
0
2
1
4
2
*/
#include<bits/stdc++.h>
#define ll long long
using namespace std;
const int N = 1e5+10;
const ll inf= 0x3f3f3f3f3f3f3f3f;
int n, m, aq, head[N], cnt, opt, c;
bool vis[N];
struct node
{
int to,nxt,w;
}
e[N];
ll d[N], ad[N];
queue<int> val[N];
priority_queue<pair<ll,int> > q;
// adding all the vertices
void add(int x,int y,int z)
{
e[++cnt].to = y, e[cnt].w = z, e[cnt].nxt = head[x], head[x] = cnt;
}
// push the vertices in the priority queue
void dj()
{
memset(d, 0x3f3f3f3f, sizeof(d));
d[1] = 0, q.push(make_pair(d[1],1));
while(!q.empty())
{
int x = q.top().second;
q.pop();
if(vis[x]) continue;
vis[x]=1;
for(int i = head[x]; i; i = e[i].nxt)
{
int y = e[i].to;
if( d[y] > d[x] + e[i].w)
{
d[y] = d[x] + e[i].w;
q.push(make_pair(-d[y], y));
}
}
}
}
// calculate to see where is the shortest path
void calc()
{
ll mx = 0;
for(int i=0; i<=mx; i++)
while(val[i].size())
{
int x = val[i].front();
val[i].pop(); // pop after the vertex being used
if(ad[x] < i) continue;
for(int i = head[x]; i; i=e[i].nxt)
{
int y=e[i].to;
ll z=d[x]+e[i].w-d[y];
if( ad[y] > ad[x] + z )
{
ad[y] = ad[x] + z;
if( ad[y] <= min(c, n-1))
{
mx = max(mx, ad[y]);
val[ad[y]].push(y);
}
}
}
}
}
int main()
{
scanf("%d%d%d", &n, &m, &aq);
for(int i=1;i<=m;i++)
{
int x,y,z;
scanf("%d%d%d", &x, &y, &z);
add(x,y,z);
}
dj();
while(aq--) // input lines
{
scanf("%d%d", &opt, &c);
if(opt==1) cout << (d[c]==inf?-1:d[c]) <<endl;
else
{
for(int i=1,id;i<=c;i++) scanf("%d",&id), e[id].w++;
memset(ad,0x3f3f3f3f,sizeof(ad));
ad[1]=0,val[0].push(1);
calc();
for(int i=1;i<=n;i++) d[i]=min(inf,d[i]+ad[i]);
}
}
return 0;
}
|
dmgerman/hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/MemoryMappedBlock.java | begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
end_comment
begin_package
DECL|package|org.apache.hadoop.hdfs.server.datanode.fsdataset.impl
package|package
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|hdfs
operator|.
name|server
operator|.
name|datanode
operator|.
name|fsdataset
operator|.
name|impl
package|;
end_package
begin_import
import|import
name|java
operator|.
name|nio
operator|.
name|MappedByteBuffer
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|classification
operator|.
name|InterfaceAudience
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|classification
operator|.
name|InterfaceStability
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|io
operator|.
name|nativeio
operator|.
name|NativeIO
import|;
end_import
begin_comment
comment|/** * Represents an HDFS block that is mapped to memory by the DataNode. */
end_comment
begin_class
annotation|@
name|InterfaceAudience
operator|.
name|Private
annotation|@
name|InterfaceStability
operator|.
name|Unstable
DECL|class|MemoryMappedBlock
specifier|public
class|class
name|MemoryMappedBlock
implements|implements
name|MappableBlock
block|{
DECL|field|mmap
specifier|private
name|MappedByteBuffer
name|mmap
decl_stmt|;
DECL|field|length
specifier|private
specifier|final
name|long
name|length
decl_stmt|;
DECL|method|MemoryMappedBlock (MappedByteBuffer mmap, long length)
name|MemoryMappedBlock
parameter_list|(
name|MappedByteBuffer
name|mmap
parameter_list|,
name|long
name|length
parameter_list|)
block|{
name|this
operator|.
name|mmap
operator|=
name|mmap
expr_stmt|;
name|this
operator|.
name|length
operator|=
name|length
expr_stmt|;
assert|assert
name|length
operator|>
literal|0
assert|;
block|}
annotation|@
name|Override
DECL|method|getLength ()
specifier|public
name|long
name|getLength
parameter_list|()
block|{
return|return
name|length
return|;
block|}
annotation|@
name|Override
DECL|method|getAddress ()
specifier|public
name|long
name|getAddress
parameter_list|()
block|{
return|return
operator|-
literal|1L
return|;
block|}
annotation|@
name|Override
DECL|method|close ()
specifier|public
name|void
name|close
parameter_list|()
block|{
if|if
condition|(
name|mmap
operator|!=
literal|null
condition|)
block|{
name|NativeIO
operator|.
name|POSIX
operator|.
name|munmap
argument_list|(
name|mmap
argument_list|)
expr_stmt|;
name|mmap
operator|=
literal|null
expr_stmt|;
block|}
block|}
block|}
end_class
end_unit
|
SolidDesignNet/j1939-84 | src-test/org/etools/j1939_84/controllers/part02/Part02Step08ControllerTest.java | <reponame>SolidDesignNet/j1939-84
/*
* Copyright 2021 Equipment & Tool Institute
*/
package org.etools.j1939_84.controllers.part02;
import static net.soliddesign.j1939tools.j1939.packets.DM26TripDiagnosticReadinessPacket.PGN;
import static org.etools.j1939_84.J1939_84.NL;
import static org.etools.j1939_84.model.Outcome.FAIL;
import static org.etools.j1939_84.model.Outcome.INFO;
import static org.etools.j1939_84.model.Outcome.WARN;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.util.concurrent.Executor;
import org.etools.j1939_84.controllers.DataRepository;
import org.etools.j1939_84.controllers.ResultsListener;
import org.etools.j1939_84.controllers.TestResultsListener;
import org.etools.j1939_84.model.OBDModuleInformation;
import org.etools.j1939_84.modules.BannerModule;
import org.etools.j1939_84.modules.EngineSpeedModule;
import org.etools.j1939_84.modules.ReportFileModule;
import org.etools.j1939_84.modules.VehicleInformationModule;
import org.etools.j1939_84.utils.AbstractControllerTest;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import net.soliddesign.j1939tools.bus.Packet;
import net.soliddesign.j1939tools.bus.RequestResult;
import net.soliddesign.j1939tools.j1939.J1939;
import net.soliddesign.j1939tools.j1939.packets.DM26TripDiagnosticReadinessPacket;
import net.soliddesign.j1939tools.modules.CommunicationsModule;
import net.soliddesign.j1939tools.modules.DateTimeModule;
@RunWith(MockitoJUnitRunner.class)
public class Part02Step08ControllerTest extends AbstractControllerTest {
private static final int PART = 2;
private static final int STEP = 8;
@Mock
private BannerModule bannerModule;
private DataRepository dataRepository;
@Mock
private CommunicationsModule communicationsModule;
@Mock
private EngineSpeedModule engineSpeedModule;
@Mock
private Executor executor;
private Part02Step08Controller instance;
@Mock
private J1939 j1939;
private TestResultsListener listener;
@Mock
private ResultsListener mockListener;
@Mock
private ReportFileModule reportFileModule;
@Mock
private VehicleInformationModule vehicleInformationModule;
@Before
public void setUp() {
listener = new TestResultsListener(mockListener);
DateTimeModule.setInstance(null);
dataRepository = DataRepository.newInstance();
instance = new Part02Step08Controller(
executor,
engineSpeedModule,
bannerModule,
vehicleInformationModule,
communicationsModule,
dataRepository,
DateTimeModule.getInstance());
setup(instance,
listener,
j1939,
executor,
reportFileModule,
engineSpeedModule,
vehicleInformationModule,
communicationsModule);
}
@After
public void tearDown() {
verifyNoMoreInteractions(executor,
engineSpeedModule,
bannerModule,
vehicleInformationModule,
communicationsModule,
mockListener);
}
@Test
public void testGetDisplayName() {
assertEquals("Part " + PART + " Step " + STEP, instance.getDisplayName());
}
@Test
public void testGetStepNumber() {
assertEquals(STEP, instance.getStepNumber());
}
@Test
public void testGetTotalSteps() {
assertEquals("Total Steps", 0, instance.getTotalSteps());
}
@Test
public void testNoResponsesNoModules() {
when(communicationsModule.requestDM26(any())).thenReturn(new RequestResult<>(false));
runTest();
verify(communicationsModule).requestDM26(any());
assertEquals("", listener.getResults());
assertEquals("", listener.getMessages());
}
@Test
public void testNoResponses() {
dataRepository.putObdModule(new OBDModuleInformation(0x01));
when(communicationsModule.requestDM26(any(), eq(0x01))).thenReturn(new RequestResult<>(true));
when(communicationsModule.requestDM26(any())).thenReturn(new RequestResult<>(true));
runTest();
verify(communicationsModule).requestDM26(any());
verify(communicationsModule).requestDM26(any(), eq(0x01));
verify(mockListener).addOutcome(PART,
STEP,
FAIL,
"6.2.8.2.c - Engine #2 (1) did not provide a NACK and did not provide a DM26 response");
verify(mockListener).addOutcome(PART,
STEP,
INFO,
"6.2.8.5.a - No responses received from Engine #2 (1)");
assertEquals("", listener.getResults());
assertEquals("", listener.getMessages());
}
@Test
public void testFailures() {
// Module 0 has a different packet from the first time
DM26TripDiagnosticReadinessPacket packet0 = new DM26TripDiagnosticReadinessPacket(
Packet.create(PGN,
0x00,
0x11,
0x22,
0x33,
0x44,
0x55,
0x66,
0x77,
0x88));
OBDModuleInformation obdModule0 = new OBDModuleInformation(0);
obdModule0.set(packet0, 1);
dataRepository.putObdModule(obdModule0);
DM26TripDiagnosticReadinessPacket packet00 = new DM26TripDiagnosticReadinessPacket(
Packet.create(PGN,
0x00,
0x99,
0xAA,
0xBB,
0xCC,
0xDD,
0xEE,
0xFF,
0x00));
when(communicationsModule.requestDM26(any(), eq(0))).thenReturn(new RequestResult<>(false, packet00));
// Module 1 has the same both times and will not report an error
DM26TripDiagnosticReadinessPacket packet1 = new DM26TripDiagnosticReadinessPacket(
Packet.create(PGN,
0x01,
0x00,
0x00,
0x04,
0x00,
0xFF,
0xFF,
0xFF,
0xFF));
OBDModuleInformation obdModule1 = new OBDModuleInformation(1);
obdModule1.set(packet1, 1);
dataRepository.putObdModule(obdModule1);
when(communicationsModule.requestDM26(any(), eq(1))).thenReturn(new RequestResult<>(false, packet1));
// Module 2 will not respond from the first time, but will respond this time
dataRepository.putObdModule(new OBDModuleInformation(2));
DM26TripDiagnosticReadinessPacket packet2 = new DM26TripDiagnosticReadinessPacket(
Packet.create(PGN,
0x02,
0x00,
0x00,
0x04,
0x00,
0xFF,
0xFF,
0xFF,
0xFF));
when(communicationsModule.requestDM26(any(), eq(2))).thenReturn(new RequestResult<>(false, packet2));
// Module 3 will not respond
dataRepository.putObdModule(new OBDModuleInformation(3));
when(communicationsModule.requestDM26(any(), eq(3))).thenReturn(new RequestResult<>(true));
when(communicationsModule.requestDM26(any()))
.thenReturn(new RequestResult<>(false,
packet0,
packet1,
packet2));
runTest();
verify(communicationsModule).requestDM26(any());
verify(communicationsModule).requestDM26(any(), eq(0x00));
verify(communicationsModule).requestDM26(any(), eq(0x01));
verify(communicationsModule).requestDM26(any(), eq(0x02));
verify(communicationsModule).requestDM26(any(), eq(0x03));
String expectedResults = "" + NL;
expectedResults += "Vehicle Composite of DM26:" + NL;
expectedResults += " A/C system refrigerant enabled, not complete" + NL;
expectedResults += " Boost pressure control sys enabled, not complete" + NL;
expectedResults += " Catalyst enabled, not complete" + NL;
expectedResults += " Cold start aid system enabled, not complete" + NL;
expectedResults += " Comprehensive component enabled, not complete" + NL;
expectedResults += " Diesel Particulate Filter enabled, not complete" + NL;
expectedResults += " EGR/VVT system enabled, not complete" + NL;
expectedResults += " Evaporative system enabled, not complete" + NL;
expectedResults += " Exhaust Gas Sensor enabled, not complete" + NL;
expectedResults += " Exhaust Gas Sensor heater enabled, not complete" + NL;
expectedResults += " Fuel System not enabled, complete" + NL;
expectedResults += " Heated catalyst enabled, not complete" + NL;
expectedResults += " Misfire not enabled, complete" + NL;
expectedResults += " NMHC converting catalyst enabled, not complete" + NL;
expectedResults += " NOx catalyst/adsorber enabled, not complete" + NL;
expectedResults += " Secondary air system enabled, not complete" + NL;
expectedResults += "" + NL;
assertEquals(expectedResults, listener.getResults());
verify(mockListener).addOutcome(
2,
8,
FAIL,
"6.2.8.2.a - Difference from Turbocharger (2) monitor support bits this cycle compared to responses in part 1 after DM11");
verify(mockListener).addOutcome(
2,
8,
FAIL,
"6.2.8.2.b - Turbocharger (2) indicates number of warm-ups since code clear greater than zero");
verify(mockListener).addOutcome(
2,
8,
FAIL,
"6.2.8.2.a - Difference from Engine #1 (0) monitor support bits this cycle compared to responses in part 1 after DM11");
verify(mockListener).addOutcome(
2,
8,
FAIL,
"6.2.8.2.c - Transmission #1 (3) did not provide a NACK and did not provide a DM26 response");
verify(mockListener).addOutcome(
2,
8,
FAIL,
"6.2.8.2.b - Engine #2 (1) indicates number of warm-ups since code clear greater than zero");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor A/C system refrigerant is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor Boost pressure control sys is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor Catalyst is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor Cold start aid system is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor Diesel Particulate Filter is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor EGR/VVT system is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor Evaporative system is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor Exhaust Gas Sensor is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor Exhaust Gas Sensor heater is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor Heated catalyst is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor NMHC converting catalyst is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor NOx catalyst/adsorber is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
WARN,
"6.2.8.3.a - Required monitor Secondary air system is supported by more than one OBD ECU");
verify(mockListener).addOutcome(
2,
8,
FAIL,
"6.2.8.5.a - Difference in data between DS and global responses from Engine #1 (0)");
verify(mockListener).addOutcome(
2,
8,
INFO,
"6.2.8.5.a - No responses received from Transmission #1 (3)");
}
@Test
public void testNoFailures() {
DM26TripDiagnosticReadinessPacket packet1 = new DM26TripDiagnosticReadinessPacket(
Packet.create(PGN,
0x01,
0x00,
0x00,
0x00,
0x44,
0x55,
0x66,
0x77,
0x88));
OBDModuleInformation obdModule1 = new OBDModuleInformation(0x01);
obdModule1.set(packet1, 1);
dataRepository.putObdModule(obdModule1);
when(communicationsModule.requestDM26(any())).thenReturn(new RequestResult<>(false, packet1));
when(communicationsModule.requestDM26(any(), eq(0x01))).thenReturn(new RequestResult<>(false, packet1));
runTest();
verify(communicationsModule).requestDM26(any());
verify(communicationsModule).requestDM26(any(), eq(0x01));
String expectedResults = NL;
expectedResults += "Vehicle Composite of DM26:" + NL;
expectedResults += " A/C system refrigerant enabled, not complete" + NL;
expectedResults += " Boost pressure control sys enabled, complete" + NL;
expectedResults += " Catalyst enabled, not complete" + NL;
expectedResults += " Cold start aid system not enabled, complete" + NL;
expectedResults += " Comprehensive component enabled, not complete" + NL;
expectedResults += " Diesel Particulate Filter enabled, complete" + NL;
expectedResults += " EGR/VVT system not enabled, complete" + NL;
expectedResults += " Evaporative system enabled, not complete" + NL;
expectedResults += " Exhaust Gas Sensor not enabled, not complete" + NL;
expectedResults += " Exhaust Gas Sensor heater enabled, not complete" + NL;
expectedResults += " Fuel System not enabled, complete" + NL;
expectedResults += " Heated catalyst not enabled, not complete" + NL;
expectedResults += " Misfire not enabled, complete" + NL;
expectedResults += " NMHC converting catalyst not enabled, complete" + NL;
expectedResults += " NOx catalyst/adsorber not enabled, not complete" + NL;
expectedResults += " Secondary air system not enabled, complete" + NL;
expectedResults += NL;
assertEquals(expectedResults, listener.getResults());
}
}
|
rohun-tripati/pythonRepo | Previous_State_On_Repo/StrokeRecoveryOffline/Data/code/pixelsToStrokesView.py | <gh_stars>1-10
import random, sys
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.lines as mlines
import matplotlib.patches as mpatches
def add_arrow_to_line2D(
axes, line, arrow_locs=[0.2, 0.4, 0.6, 0.8],
arrowstyle='-|>', arrowsize=1, transform=None):
"""
Add arrows to a matplotlib.lines.Line2D at selected locations.
Parameters:
-----------
axes:
line: list of 1 Line2D obbject as returned by plot command
arrow_locs: list of locations where to insert arrows, % of total length
arrowstyle: style of the arrow
arrowsize: size of the arrow
transform: a matplotlib transform instance, default to data coordinates
Returns:
--------
arrows: list of arrows
"""
if (not(isinstance(line, list)) or not(isinstance(line[0],
mlines.Line2D))):
raise ValueError("expected a matplotlib.lines.Line2D object")
x, y = line[0].get_xdata(), line[0].get_ydata()
arrow_kw = dict(arrowstyle=arrowstyle, mutation_scale=10 * arrowsize)
color = line[0].get_color()
use_multicolor_lines = isinstance(color, np.ndarray)
if use_multicolor_lines:
raise NotImplementedError("multicolor lines not supported")
else:
arrow_kw['color'] = color
linewidth = line[0].get_linewidth()
if isinstance(linewidth, np.ndarray):
raise NotImplementedError("multiwidth lines not supported")
else:
arrow_kw['linewidth'] = linewidth
if transform is None:
transform = axes.transData
arrows = []
for loc in arrow_locs:
s = np.cumsum(np.sqrt(np.diff(x) ** 2 + np.diff(y) ** 2))
n = np.searchsorted(s, s[-1] * loc)
arrow_tail = (x[n], y[n])
arrow_head = (np.mean(x[n:n + 2]), np.mean(y[n:n + 2]))
p = mpatches.FancyArrowPatch(
arrow_tail, arrow_head, transform=transform,
**arrow_kw)
axes.add_patch(p)
arrows.append(p)
return arrows
def arrowview(strokepath):
strokefile = open(strokepath, "r")
fig, ax = plt.subplots(1, 1)
first = ["r", "g", "b"]
second = ["^", "--", "s", "o"]
x = []
y = []
for line in strokefile.readlines():
line = line.strip()
if line == ".PEN_DOWN" or line == ".PEN_UP":
#change colour
rand = random.random()%12
last = first[ int(rand/4) ] + second[ int(rand)%4 ]
jumppoints = 20
if len(x) > 1:
if len(x)>jumppoints:
x = [x[index] for index in range(0, len(x), jumppoints) ]
y = [y[index] for index in range(0, len(y), jumppoints) ]
line = ax.plot(x, y, 'k-')
add_arrow_to_line2D(ax, line, arrow_locs=np.linspace(0., 1., 200), arrowstyle='->')
x = []
y = []
continue
else:
coor = line.split()
x.append( int(coor[0]) )
y.append( int(coor[1]) )
temp = ""
for term in strokepath:
if term != "/":
temp+= term
plt.savefig( "images/" + temp.strip(".txt") + "_arrow")
strokefile.close()
def mainview(strokepath):
arrowview(strokepath)
strokefile = open(strokepath, "r")
fig, ax = plt.subplots(1, 1)
first = ["r", "g", "b"]
second = ["^", "--", "s", "o"]
x = []
y = []
for line in strokefile.readlines():
line = line.strip()
if line == ".PEN_DOWN" or line == ".PEN_UP":
rand = int( (random.random() * 100 )%3)
last = first[ rand ] + second[ 1 ]
if len(x) > 1:
plt.plot(x,y,last)
x = []
y = []
continue
else:
coor = line.split()
x.append( int(coor[0]) )
y.append( int(coor[1]) )
temp = ""
for term in strokepath:
if term != "/":
temp+= term
plt.savefig( "images/" + temp.strip(".txt") + "_")
# for i in range(1,50):
# i = i*2
# mainview("file_0_" + str(i) + ".txt") |
mcodegeeks/OpenKODE-Framework | 01_Develop/libXMFFmpeg/Source/libavcodec/j2k.cpp | <gh_stars>1-10
/*
* JPEG2000 encoder and decoder common functions
* Copyright (c) 2007 <NAME>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* JPEG2000 image encoder and decoder common functions
* @file
* @author <NAME>
*/
#include "internal.h"
#include "j2k.h"
#define SHL(a, n) ((n)>=0 ? (a) << (n) : (a) >> -(n))
#if 0
void ff_j2k_printv(int *tab, int l)
{
int i;
for (i = 0; i < l; i++)
printf("%.3d ", tab[i]);
printf("\n");
}
void ff_j2k_printu(uint8_t *tab, int l)
{
int i;
for (i = 0; i < l; i++)
printf("%.3hd ", tab[i]);
printf("\n");
}
#endif
/* tag tree routines */
/** allocate the memory for tag tree */
static int tag_tree_size(int w, int h)
{
int res = 0;
while (w > 1 || h > 1){
res += w * h;
w = (w+1) >> 1;
h = (h+1) >> 1;
}
return res + 1;
}
J2kTgtNode *ff_j2k_tag_tree_init(int w, int h)
{
int pw = w, ph = h;
J2kTgtNode *res, *t, *t2;
t = res = (J2kTgtNode *)av_mallocz(tag_tree_size(w, h)*sizeof(J2kTgtNode));
if (res == NULL)
return NULL;
while (w > 1 || h > 1){
int i, j;
pw = w;
ph = h;
w = (w+1) >> 1;
h = (h+1) >> 1;
t2 = t + pw*ph;
for (i = 0; i < ph; i++)
for (j = 0; j < pw; j++){
t[i*pw + j].parent = &t2[(i>>1)*w + (j>>1)];
}
t = t2;
}
t[0].parent = NULL;
return res;
}
static void tag_tree_zero(J2kTgtNode *t, int w, int h)
{
int i, siz = tag_tree_size(w, h);
for (i = 0; i < siz; i++){
t[i].val = 0;
t[i].vis = 0;
}
}
uint8_t ff_j2k_nbctxno_lut[256][4];
static int getnbctxno(int flag, int bandno, int vert_causal_ctx_csty_symbol)
{
int h, v, d;
h = ((flag & J2K_T1_SIG_E) ? 1:0)+
((flag & J2K_T1_SIG_W) ? 1:0);
v = ((flag & J2K_T1_SIG_N) ? 1:0);
if (!vert_causal_ctx_csty_symbol)
v = v + ((flag & J2K_T1_SIG_S) ? 1:0);
d = ((flag & J2K_T1_SIG_NE) ? 1:0)+
((flag & J2K_T1_SIG_NW) ? 1:0);
if (!vert_causal_ctx_csty_symbol)
d = d + ((flag & J2K_T1_SIG_SE) ? 1:0)+
((flag & J2K_T1_SIG_SW) ? 1:0);
if (bandno < 3){
if (bandno == 1)
FFSWAP(int, h, v);
if (h == 2) return 8;
if (h == 1){
if (v >= 1) return 7;
if (d >= 1) return 6;
return 5;
}
if (v == 2) return 4;
if (v == 1) return 3;
if (d >= 2) return 2;
if (d == 1) return 1;
return 0;
} else{
if (d >= 3) return 8;
if (d == 2){
if (h+v >= 1) return 7;
return 6;
}
if (d == 1){
if (h+v >= 2) return 5;
if (h+v == 1) return 4;
return 3;
}
if (h+v >= 2) return 2;
if (h+v == 1) return 1;
return 0;
}
assert(0);
}
uint8_t ff_j2k_sgnctxno_lut[16][16], ff_j2k_xorbit_lut[16][16];
static int getsgnctxno(int flag, uint8_t *xorbit)
{
int vcontrib, hcontrib;
static const int contribtab[3][3] = {{0, -1, 1}, {-1, -1, 0}, {1, 0, 1}};
static const int ctxlbltab[3][3] = {{13, 12, 11}, {10, 9, 10}, {11, 12, 13}};
static const int xorbittab[3][3] = {{1, 1, 1,}, {1, 0, 0}, {0, 0, 0}};
hcontrib = contribtab[flag & J2K_T1_SIG_E ? flag & J2K_T1_SGN_E ? 1:2:0]
[flag & J2K_T1_SIG_W ? flag & J2K_T1_SGN_W ? 1:2:0]+1;
vcontrib = contribtab[flag & J2K_T1_SIG_S ? flag & J2K_T1_SGN_S ? 1:2:0]
[flag & J2K_T1_SIG_N ? flag & J2K_T1_SGN_N ? 1:2:0]+1;
*xorbit = xorbittab[hcontrib][vcontrib];
return ctxlbltab[hcontrib][vcontrib];
}
void ff_j2k_init_tier1_luts(void)
{
int i, j;
for (i = 0; i < 256; i++)
for (j = 0; j < 4; j++)
ff_j2k_nbctxno_lut[i][j] = getnbctxno(i, j, 0);
for (i = 0; i < 16; i++)
for (j = 0; j < 16; j++)
ff_j2k_sgnctxno_lut[i][j] = getsgnctxno(i + (j << 8), &ff_j2k_xorbit_lut[i][j]);
}
void ff_j2k_set_significant(J2kT1Context *t1, int x, int y, int negative)
{
x++; y++;
t1->flags[y][x] |= J2K_T1_SIG;
if (negative){
t1->flags[y][x+1] |= J2K_T1_SIG_W | J2K_T1_SGN_W;
t1->flags[y][x-1] |= J2K_T1_SIG_E | J2K_T1_SGN_E;
t1->flags[y+1][x] |= J2K_T1_SIG_N | J2K_T1_SGN_N;
t1->flags[y-1][x] |= J2K_T1_SIG_S | J2K_T1_SGN_S;
} else{
t1->flags[y][x+1] |= J2K_T1_SIG_W;
t1->flags[y][x-1] |= J2K_T1_SIG_E;
t1->flags[y+1][x] |= J2K_T1_SIG_N;
t1->flags[y-1][x] |= J2K_T1_SIG_S;
}
t1->flags[y+1][x+1] |= J2K_T1_SIG_NW;
t1->flags[y+1][x-1] |= J2K_T1_SIG_NE;
t1->flags[y-1][x+1] |= J2K_T1_SIG_SW;
t1->flags[y-1][x-1] |= J2K_T1_SIG_SE;
}
int ff_j2k_init_component(J2kComponent *comp, J2kCodingStyle *codsty, J2kQuantStyle *qntsty, int cbps, int dx, int dy)
{
int reslevelno, bandno, gbandno = 0, ret, i, j, csize = 1;
if (ret=ff_j2k_dwt_init(&comp->dwt, comp->coord, codsty->nreslevels-1, codsty->transform))
return ret;
for (i = 0; i < 2; i++)
csize *= comp->coord[i][1] - comp->coord[i][0];
comp->data = (int *)av_malloc(csize * sizeof(int));
if (!comp->data)
return AVERROR(ENOMEM);
comp->reslevel = (J2kResLevel *)av_malloc(codsty->nreslevels * sizeof(J2kResLevel));
if (!comp->reslevel)
return AVERROR(ENOMEM);
for (reslevelno = 0; reslevelno < codsty->nreslevels; reslevelno++){
int declvl = codsty->nreslevels - reslevelno;
J2kResLevel *reslevel = comp->reslevel + reslevelno;
for (i = 0; i < 2; i++)
for (j = 0; j < 2; j++)
reslevel->coord[i][j] =
ff_j2k_ceildivpow2(comp->coord[i][j], declvl - 1);
if (reslevelno == 0)
reslevel->nbands = 1;
else
reslevel->nbands = 3;
if (reslevel->coord[0][1] == reslevel->coord[0][0])
reslevel->num_precincts_x = 0;
else
reslevel->num_precincts_x = ff_j2k_ceildivpow2(reslevel->coord[0][1], codsty->log2_prec_width)
- (reslevel->coord[0][0] >> codsty->log2_prec_width);
if (reslevel->coord[1][1] == reslevel->coord[1][0])
reslevel->num_precincts_y = 0;
else
reslevel->num_precincts_y = ff_j2k_ceildivpow2(reslevel->coord[1][1], codsty->log2_prec_height)
- (reslevel->coord[1][0] >> codsty->log2_prec_height);
reslevel->band = (J2kBand *)av_malloc(reslevel->nbands * sizeof(J2kBand));
if (!reslevel->band)
return AVERROR(ENOMEM);
for (bandno = 0; bandno < reslevel->nbands; bandno++, gbandno++){
J2kBand *band = reslevel->band + bandno;
int cblkno, precx, precy, precno;
int x0, y0, x1, y1;
int xi0, yi0, xi1, yi1;
int cblkperprecw, cblkperprech;
if (qntsty->quantsty != J2K_QSTY_NONE){
const static uint8_t lut_gain[2][4] = {{0, 0, 0, 0}, {0, 1, 1, 2}};
int numbps;
numbps = cbps + lut_gain[codsty->transform][bandno + reslevelno>0];
band->stepsize = SHL(2048 + qntsty->mant[gbandno], 2 + numbps - qntsty->expn[gbandno]);
} else
band->stepsize = 1 << 13;
if (reslevelno == 0){ // the same everywhere
band->codeblock_width = 1 << FFMIN(codsty->log2_cblk_width, codsty->log2_prec_width-1);
band->codeblock_height = 1 << FFMIN(codsty->log2_cblk_height, codsty->log2_prec_height-1);
for (i = 0; i < 2; i++)
for (j = 0; j < 2; j++)
band->coord[i][j] = ff_j2k_ceildivpow2(comp->coord[i][j], declvl-1);
} else{
band->codeblock_width = 1 << FFMIN(codsty->log2_cblk_width, codsty->log2_prec_width);
band->codeblock_height = 1 << FFMIN(codsty->log2_cblk_height, codsty->log2_prec_height);
for (i = 0; i < 2; i++)
for (j = 0; j < 2; j++)
band->coord[i][j] = ff_j2k_ceildivpow2(comp->coord[i][j] - (((bandno+1>>i)&1) << declvl-1), declvl);
}
band->cblknx = ff_j2k_ceildiv(band->coord[0][1], band->codeblock_width) - band->coord[0][0] / band->codeblock_width;
band->cblkny = ff_j2k_ceildiv(band->coord[1][1], band->codeblock_height) - band->coord[1][0] / band->codeblock_height;
for (j = 0; j < 2; j++)
band->coord[0][j] = ff_j2k_ceildiv(band->coord[0][j], dx);
for (j = 0; j < 2; j++)
band->coord[1][j] = ff_j2k_ceildiv(band->coord[1][j], dy);
band->cblknx = ff_j2k_ceildiv(band->cblknx, dx);
band->cblkny = ff_j2k_ceildiv(band->cblkny, dy);
band->cblk = (J2kCblk *)av_malloc(band->cblknx * band->cblkny * sizeof(J2kCblk));
if (!band->cblk)
return AVERROR(ENOMEM);
band->prec = (J2kPrec *)av_malloc(reslevel->num_precincts_x * reslevel->num_precincts_y * sizeof(J2kPrec));
if (!band->prec)
return AVERROR(ENOMEM);
for (cblkno = 0; cblkno < band->cblknx * band->cblkny; cblkno++){
J2kCblk *cblk = band->cblk + cblkno;
cblk->zero = 0;
cblk->lblock = 3;
cblk->length = 0;
cblk->lengthinc = 0;
cblk->npasses = 0;
}
y0 = band->coord[1][0];
y1 = ((band->coord[1][0] + (1<<codsty->log2_prec_height)) & ~((1<<codsty->log2_prec_height)-1)) - y0;
yi0 = 0;
yi1 = ff_j2k_ceildivpow2(y1 - y0, codsty->log2_cblk_height) << codsty->log2_cblk_height;
yi1 = FFMIN(yi1, band->cblkny);
cblkperprech = 1<<(codsty->log2_prec_height - codsty->log2_cblk_height);
for (precy = 0, precno = 0; precy < reslevel->num_precincts_y; precy++){
for (precx = 0; precx < reslevel->num_precincts_x; precx++, precno++){
band->prec[precno].yi0 = yi0;
band->prec[precno].yi1 = yi1;
}
yi1 += cblkperprech;
yi0 = yi1 - cblkperprech;
yi1 = FFMIN(yi1, band->cblkny);
}
x0 = band->coord[0][0];
x1 = ((band->coord[0][0] + (1<<codsty->log2_prec_width)) & ~((1<<codsty->log2_prec_width)-1)) - x0;
xi0 = 0;
xi1 = ff_j2k_ceildivpow2(x1 - x0, codsty->log2_cblk_width) << codsty->log2_cblk_width;
xi1 = FFMIN(xi1, band->cblknx);
cblkperprecw = 1<<(codsty->log2_prec_width - codsty->log2_cblk_width);
for (precx = 0, precno = 0; precx < reslevel->num_precincts_x; precx++){
for (precy = 0; precy < reslevel->num_precincts_y; precy++, precno = 0){
J2kPrec *prec = band->prec + precno;
prec->xi0 = xi0;
prec->xi1 = xi1;
prec->cblkincl = ff_j2k_tag_tree_init(prec->xi1 - prec->xi0,
prec->yi1 - prec->yi0);
prec->zerobits = ff_j2k_tag_tree_init(prec->xi1 - prec->xi0,
prec->yi1 - prec->yi0);
if (!prec->cblkincl || !prec->zerobits)
return AVERROR(ENOMEM);
}
xi1 += cblkperprecw;
xi0 = xi1 - cblkperprecw;
xi1 = FFMIN(xi1, band->cblknx);
}
}
}
return 0;
}
void ff_j2k_reinit(J2kComponent *comp, J2kCodingStyle *codsty)
{
int reslevelno, bandno, cblkno, precno;
for (reslevelno = 0; reslevelno < codsty->nreslevels; reslevelno++){
J2kResLevel *rlevel = comp->reslevel + reslevelno;
for (bandno = 0; bandno < rlevel->nbands; bandno++){
J2kBand *band = rlevel->band + bandno;
for(precno = 0; precno < rlevel->num_precincts_x * rlevel->num_precincts_y; precno++){
J2kPrec *prec = band->prec + precno;
tag_tree_zero(prec->zerobits, prec->xi1 - prec->xi0, prec->yi1 - prec->yi0);
tag_tree_zero(prec->cblkincl, prec->xi1 - prec->xi0, prec->yi1 - prec->yi0);
}
for (cblkno = 0; cblkno < band->cblknx * band->cblkny; cblkno++){
J2kCblk *cblk = band->cblk + cblkno;
cblk->length = 0;
cblk->lblock = 3;
}
}
}
}
void ff_j2k_cleanup(J2kComponent *comp, J2kCodingStyle *codsty)
{
int reslevelno, bandno, precno;
for (reslevelno = 0; reslevelno < codsty->nreslevels; reslevelno++){
J2kResLevel *reslevel = comp->reslevel + reslevelno;
for (bandno = 0; bandno < reslevel->nbands ; bandno++){
J2kBand *band = reslevel->band + bandno;
for (precno = 0; precno < reslevel->num_precincts_x * reslevel->num_precincts_y; precno++){
J2kPrec *prec = band->prec + precno;
av_freep(&prec->zerobits);
av_freep(&prec->cblkincl);
}
av_freep(&band->cblk);
av_freep(&band->prec);
}
av_freep(&reslevel->band);
}
ff_j2k_dwt_destroy(&comp->dwt);
av_freep(&comp->reslevel);
av_freep(&comp->data);
}
|
teleclimber/Dropserver | internal/getcleanhost/getcleanhost_test.go | package getcleanhost
import "testing"
func TestGetCleanHost(t *testing.T) {
cases := []struct {
hostPort string
host string
er bool
}{
{"abc.def:3000", "abc.def", false},
{"abc.def:", "abc.def", false},
{"abc.def", "abc.def", false},
{"", "", false},
{"abc.def:xyz", "abc.def", false},
{"abc.[ def", "abc.[ def", false}, // In the end I'm not sure how to trigger an error in SplitHostPort.
}
for _, c := range cases {
t.Run(c.hostPort, func(t *testing.T) {
result, err := GetCleanHost(c.hostPort)
if err != nil && !c.er {
t.Error(err)
} else if err == nil && c.er {
t.Error("expected an error")
}
if result != c.host {
t.Errorf("Expected %v, got %v", c.host, result)
}
})
}
}
|
mbreemhaar/webcurator | webcurator-core/src/main/java/org/webcurator/core/store/Indexer.java | <reponame>mbreemhaar/webcurator<gh_stars>0
package org.webcurator.core.store;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.boot.web.client.RestTemplateBuilder;
import org.webcurator.core.store.RunnableIndex.Mode;
import org.webcurator.core.util.WebServiceEndPoint;
import org.webcurator.domain.model.core.HarvestResultDTO;
public class Indexer {
private static Log log = LogFactory.getLog(Indexer.class);
private static final Map<String, Map<Long, RunnableIndex>> runningIndexes = new HashMap<String, Map<Long, RunnableIndex>>();
public static final Object lock = new Object();
public static void addRunningIndex(RunnableIndex indexer, Long harvestResultOid) {
synchronized (lock) {
Map<Long, RunnableIndex> indexerRunningIndexes;
if (runningIndexes.containsKey(indexer.getName())) {
indexerRunningIndexes = runningIndexes.get(indexer.getName());
} else {
indexerRunningIndexes = new HashMap<Long, RunnableIndex>();
runningIndexes.put(indexer.getName(), indexerRunningIndexes);
}
indexerRunningIndexes.put(harvestResultOid, indexer);
}
}
public static void removeRunningIndex(String indexerName, Long harvestResultOid) {
synchronized (lock) {
Map<Long, RunnableIndex> indexerRunningIndexes;
if (runningIndexes.containsKey(indexerName)) {
indexerRunningIndexes = runningIndexes.get(indexerName);
if (indexerRunningIndexes.containsKey(harvestResultOid)) {
indexerRunningIndexes.remove(harvestResultOid);
}
}
}
}
public static boolean lastRunningIndex(String callingIndexerName, Long harvestResultOid) {
synchronized (lock) {
Iterator<String> it = runningIndexes.keySet().iterator();
while (it.hasNext()) {
String indexerName = it.next();
if (!indexerName.equals(callingIndexerName) &&
containsRunningIndex(indexerName, harvestResultOid)) {
return false;
}
}
return true;
}
}
public static boolean containsRunningIndex(Long harvestResultOid) {
synchronized (lock) {
Iterator<String> it = runningIndexes.keySet().iterator();
while (it.hasNext()) {
String indexerName = it.next();
if (containsRunningIndex(indexerName, harvestResultOid)) {
return true;
}
}
return false;
}
}
private static boolean containsRunningIndex(String indexerName, Long harvestResultOid) {
synchronized (lock) {
if (runningIndexes.containsKey(indexerName)) {
Map<Long, RunnableIndex> indexerRunningIndexes = runningIndexes.get(indexerName);
if (indexerRunningIndexes != null && indexerRunningIndexes.containsKey(harvestResultOid)) {
return true;
}
}
return false;
}
}
private boolean doCreate = false;
private List<RunnableIndex> indexers;
public Indexer() {
this(false);
}
public Indexer(boolean doCreate) {
super();
this.doCreate = doCreate;
}
public void runIndex(HarvestResultDTO dto, File directory) {
if (indexers == null || indexers.size() <= 0) {
log.error("No indexers are defined");
} else {
Iterator<RunnableIndex> it = indexers.iterator();
while (it.hasNext()) {
RunnableIndex indexer = it.next();
if (indexer.isEnabled()) {
try {
//Use a new indexer each time to make it thread safe
RunnableIndex theCopy = indexer.getCopy();
theCopy.initialise(dto, directory);
theCopy.setMode(Mode.INDEX);
runIndex(dto.getOid(), theCopy);
} catch (Exception e) {
log.error("Unable to instantiate indexer: " + e.getMessage(), e);
}
}
}
}
}
public void removeIndex(HarvestResultDTO dto, File directory) {
if (indexers == null || indexers.size() <= 0) {
log.error("No indexers are defined");
} else {
Iterator<RunnableIndex> it = indexers.iterator();
while (it.hasNext()) {
RunnableIndex indexer = it.next();
if (indexer.isEnabled()) {
try {
//Use a new indexer each time to make it thread safe
RunnableIndex theCopy = indexer.getCopy();
theCopy.initialise(dto, directory);
theCopy.setMode(Mode.REMOVE);
runIndex(dto.getOid(), theCopy);
} catch (Exception e) {
log.error("Unable to instantiate indexer: " + e.getMessage(), e);
}
}
}
}
}
public Boolean checkIndexing(Long hrOid) {
return containsRunningIndex(hrOid);
}
private void runIndex(Long hrOid, RunnableIndex indexer) {
//don't allow the same HR to be indexed concurrently on the same type of indexer multiple times
if (!containsRunningIndex(indexer.getName(), hrOid)) {
addRunningIndex(indexer, hrOid);
new Thread(indexer).start();
}
}
public static class CommandLine {
Properties props = new Properties();
public CommandLine(String[] args) {
for (int i = 0; i < args.length; i += 2) {
props.put(args[i].substring(1), args[i + 1]);
}
}
public String getArg(String key) {
return props.getProperty(key);
}
}
public static void main(String[] args) {
try {
CommandLine cl = new CommandLine(args);
String host = cl.getArg("host");
int port = Integer.parseInt(cl.getArg("port"));
Long targetInstanceOid = Long.parseLong(cl.getArg("ti"));
int hrnum = Integer.parseInt(cl.getArg("hrnum"));
File dir = new File(cl.getArg("baseDir"));
if (host == null || dir == null) {
if (host == null) System.out.println("Host must be specified");
if (dir == null) System.out.println("Directory must be specified");
syntax();
}
if (!dir.exists()) {
System.out.println("Directory does not exist");
syntax();
}
HarvestResultDTO dto = new HarvestResultDTO();
dto.setTargetInstanceOid(targetInstanceOid);
dto.setHarvestNumber(hrnum);
dto.setProvenanceNote("Manual Intervention");
dto.setCreationDate(new Date());
String baseUrl = String.format("http://%s:%d", host, port);
Indexer indexer = new Indexer(true);
WCTIndexer wctIndexer = new WCTIndexer(baseUrl, new RestTemplateBuilder());
wctIndexer.setDoCreate(true);
List<RunnableIndex> indexers = new ArrayList<RunnableIndex>();
indexers.add(wctIndexer);
indexer.setIndexers(indexers);
indexer.runIndex(dto, dir);
} catch (Exception ex) {
log.error(ex);
syntax();
}
}
private static void syntax() {
System.out.println("Syntax: ");
System.out.println(" -ti tiOid -hrnum 1 -host hostname -port portnumber -baseDir basedir");
System.exit(1);
}
public boolean isDoCreate() {
return doCreate;
}
public void setIndexers(List<RunnableIndex> indexers) {
this.indexers = indexers;
}
public List<RunnableIndex> getIndexers() {
return indexers;
}
}
|
bl0x/symbiflow-arch-defs | quicklogic/pp3/utils/timing.py | <gh_stars>100-1000
import statistics
from copy import deepcopy
from collections import defaultdict, namedtuple
from data_structs import VprSwitch, MuxEdgeTiming, DriverTiming, SinkTiming
from utils import yield_muxes, add_named_item
# =============================================================================
def linear_regression(xs, ys):
"""
Computes linear regression coefficients
https://en.wikipedia.org/wiki/Simple_linear_regression
Returns a and b coefficients of the function f(y) = a * x + b
"""
x_mean = statistics.mean(xs)
y_mean = statistics.mean(ys)
num, den = 0.0, 0.0
for x, y in zip(xs, ys):
num += (x - x_mean) * (y - y_mean)
den += (x - x_mean) * (x - x_mean)
a = num / den
b = y_mean - a * x_mean
return a, b
# =============================================================================
def create_vpr_switch(type, tdel, r, c):
"""
Creates a VPR switch with the given parameters. Autmatically generates
its name with these parameters encoded.
The VPR switch parameters are:
- type: Switch type. See VPR docs for the available types
- tdel: Constant propagation delay [s]
- r: Internal resistance [ohm]
- c: Internal capacitance (active only when the switch is "on") [F]
"""
# Format the switch name
name = ["sw"]
name += ["T{:>08.6f}".format(tdel * 1e9)]
name += ["R{:>08.6f}".format(r)]
name += ["C{:>010.6f}".format(c * 1e12)]
# Create the VPR switch
switch = VprSwitch(
name="_".join(name),
type=type,
t_del=tdel,
r=r,
c_in=0.0,
c_out=0.0,
c_int=c,
)
return switch
def compute_switchbox_timing_model(switchbox, timing_data):
"""
Processes switchbox timing data.
The timing data is provided in a form of delays for each mux edge (path
from its input pin to the output pin). The delay varies with number of
active loads of the source.
This data is used to compute driver resistances and load capacitances
as well as constant propagation delays.
The timing model assumes that each output of a mux has a certain resistance
and constant propagation time. Then, every load has a capacitance which is
connected when it is active. All capacitances are identical. The input
timing data does not allow to distinguish between them. Additionally, each
load can have a constant propagation delay.
For multiplexers that are driver by switchbox inputs, fake drivers are
assumed solely for the purpose of the timing model.
"""
# A helper struct
Timing = namedtuple("Timing", "driver_r driver_tdel sink_c sink_tdel")
# Delay scaling factor
FACTOR = 1.0
# Error threshold (for reporting) in ns
ERROR_THRESHOLD = 0.4 * 1e-9
# Build a map of sinks for each driver
# For internal drivers key = (stage_id, switch_id, mux_id)
# For external drivers key = (stage_id, input_name)
sink_map = defaultdict(lambda: [])
for connection in switchbox.connections:
src = connection.src
dst = connection.dst
dst_key = (dst.stage_id, dst.switch_id, dst.mux_id, dst.pin_id)
src_key = (src.stage_id, src.switch_id, src.mux_id)
sink_map[src_key].append(dst_key)
for pin in switchbox.inputs.values():
for loc in pin.locs:
dst_key = (loc.stage_id, loc.switch_id, loc.mux_id, loc.pin_id)
src_key = (loc.stage_id, pin.name)
sink_map[src_key].append(dst_key)
# Compute timing model for each driver
driver_timing = {}
for driver, sinks in sink_map.items():
# Collect timing data for each sink edge
edge_timings = {}
for stage_id, switch_id, mux_id, pin_id in sinks:
# Try getting timing data. If not found then probably we are
# computing timing for VCC or GND input.
try:
data = timing_data[stage_id][switch_id][mux_id][pin_id]
except KeyError:
continue
# Sanity check. The number of load counts must be equal to the
# number of sinks for the driver.
assert len(data) == len(sinks)
# Take the worst case (max), convert ns to seconds.
data = {n: max(d) * 1e-9 for n, d in data.items()}
# Store
key = (stage_id, switch_id, mux_id, pin_id)
edge_timings[key] = data
# No timing data, probably it is a VCC or GND input
if not len(edge_timings):
continue
# Compute linear regression for each sink data
coeffs = {}
for sink in sinks:
xs = sorted(edge_timings[sink].keys())
ys = [edge_timings[sink][x] for x in xs]
a, b = linear_regression(xs, ys)
# Cannot have a < 0 (decreasing relation). If such thing happens
# force the regression line to be flat.
if a < 0.0:
print(
"WARNING: For '{} {}' the delay model slope is negative! (a={:.2e})"
.format(switchbox.type, sink, a)
)
a = 0.0
# Cannot have any delay higher than the model. Check if all delays
# lie below the regression line and if not then shift the line up
# accordingly.
for x, y in zip(xs, ys):
t = a * x + b
if y > t:
b += y - t
coeffs[sink] = (a, b)
# Assumed driver resistance [ohm]
driver_r = 1.0
# Compute driver's Tdel
driver_tdel = min([cfs[1] for cfs in coeffs.values()])
# Compute per-sink Tdel
sink_tdel = {s: cfs[1] - driver_tdel for s, cfs in coeffs.items()}
# Compute sink capacitance. Since we have multiple edge timings that
# should yield the same capacitance, compute one for each timing and
# then choose the worst case (max).
sink_cs = {
s: (cfs[0] / (FACTOR * driver_r) - sink_tdel[s])
for s, cfs in coeffs.items()
}
sink_c = max(sink_cs.values())
# Sanity check
assert sink_c >= 0.0, (switchbox.type, sink, sink_c)
# Compute error of the delay model
for sink in sinks:
# Compute for this sink
error = {}
for n, true_delay in edge_timings[sink].items():
model_delay = driver_tdel + FACTOR * driver_r * sink_c * n + sink_tdel[
sink]
error[n] = true_delay - model_delay
max_error = max([abs(e) for e in error.values()])
# Report the error
if max_error > ERROR_THRESHOLD:
print(
"WARNING: Error of the timing model of '{} {}' is too high:"
.format(switchbox.type, sink)
)
print("--------------------------------------------")
print("| # loads | actual | model | error |")
print("|---------+----------+----------+----------|")
for n in edge_timings[sink].keys():
print(
"| {:<8}| {:<9.3f}| {:<9.3f}| {:<9.3f}|".format(
n, 1e9 * edge_timings[sink][n],
1e9 * (edge_timings[sink][n] - error[n]),
1e9 * error[n]
)
)
print("--------------------------------------------")
print("")
# Store the data
driver_timing[driver] = Timing(
driver_r=driver_r,
driver_tdel=driver_tdel,
sink_tdel={s: d
for s, d in sink_tdel.items()},
sink_c=sink_c
)
return driver_timing, sink_map
def populate_switchbox_timing(
switchbox, driver_timing, sink_map, vpr_switches
):
"""
Populates the switchbox timing model by annotating its muxes with the timing
data. Creates new VPR switches with required parameters or uses existing
ones if already created.
"""
# Populate timing data to the switchbox
for driver, timing in driver_timing.items():
# Driver VPR switch
driver_vpr_switch = create_vpr_switch(
type="mux",
tdel=timing.driver_tdel,
r=timing.driver_r,
c=0.0,
)
driver_vpr_switch = add_named_item(
vpr_switches, driver_vpr_switch, driver_vpr_switch.name
)
# Annotate all driver's edges
for sink in sink_map[driver]:
stage_id, switch_id, mux_id, pin_id = sink
# Sink VPR switch
sink_vpr_switch = create_vpr_switch(
type="mux",
tdel=timing.sink_tdel[sink],
r=0.0,
c=timing.sink_c,
)
sink_vpr_switch = add_named_item(
vpr_switches, sink_vpr_switch, sink_vpr_switch.name
)
# Get the mux
stage = switchbox.stages[stage_id]
switch = stage.switches[switch_id]
mux = switch.muxes[mux_id]
assert pin_id not in mux.timing
mux.timing[pin_id] = MuxEdgeTiming(
driver=DriverTiming(
tdel=timing.driver_tdel,
r=timing.driver_r,
vpr_switch=driver_vpr_switch.name
),
sink=SinkTiming(
tdel=timing.sink_tdel,
c=timing.sink_c,
vpr_switch=sink_vpr_switch.name
)
)
def copy_switchbox_timing(src_switchbox, dst_switchbox):
"""
Copies all timing information from the source switchbox to the destination
one.
"""
# Mux timing
for dst_stage, dst_switch, dst_mux in yield_muxes(dst_switchbox):
src_stage = src_switchbox.stages[dst_stage.id]
src_switch = src_stage.switches[dst_switch.id]
src_mux = src_switch.muxes[dst_mux.id]
dst_mux.timing = deepcopy(src_mux.timing)
# =============================================================================
def add_vpr_switches_for_cell(cell_type, cell_timings):
"""
Creates VPR switches for IOPATH delays read from SDF file(s) for the given
cell type.
"""
# Filter timings for the cell
timings = {
k: v
for k, v in cell_timings.items()
if k.startswith(cell_type)
}
# Add VPR switches
vpr_switches = {}
for celltype, cell_data in timings.items():
for instance, inst_data in cell_data.items():
# Add IOPATHs
for timing, timing_data in inst_data.items():
if timing_data["type"].lower() != "iopath":
continue
# Get data
name = "{}.{}.{}.{}".format(
cell_type, instance, timing_data["from_pin"],
timing_data["to_pin"]
)
tdel = timing_data["delay_paths"]["slow"]["avg"]
# Add the switch
sw = VprSwitch(
name=name,
type="mux",
t_del=tdel,
r=0.0,
c_in=0.0,
c_out=0.0,
c_int=0.0,
)
vpr_switches[sw.name] = sw
return vpr_switches
|
taoyonggang/litemall | litemall-admin-api/src/test/java/org/linlinjava/litemall/admin/text.java | package org.linlinjava.litemall.admin;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.linlinjava.litemall.admin.config.QrcodeProperties;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
@WebAppConfiguration
@RunWith(SpringJUnit4ClassRunner.class)
@SpringBootTest
public class text {
@Autowired
private QrcodeProperties myProps;
@Test
public void propsTest() {
String url = myProps.getImgageUrl();
System.out.println("simpleProp: " + myProps.getImgageUrl());
}
}
|
Landgate/Staff-Calibration | staff_calibration/forms.py | from django import forms
from datetime import date
# import models
from .models import uCalibrationUpdate
from staffs.models import Staff, DigitalLevel
# make your forms
class StaffForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
user = kwargs.pop('user', None)
super(StaffForm, self).__init__(*args, **kwargs)
if user.is_staff:
self.fields['staff_number'].queryset = Staff.objects.all()
self.fields['level_number'].queryset = DigitalLevel.objects.all()
else:
self.fields['staff_number'].queryset = Staff.objects.filter(staff_owner = user.authority)
self.fields['level_number'].queryset = DigitalLevel.objects.filter(level_owner = user.authority)
class Meta:
model = uCalibrationUpdate
fields = ['staff_number', 'level_number', 'calibration_date', 'first_name', 'last_name','start_temperature', 'end_temperature', 'document']
widgets = {
'staff_number': forms.Select(attrs={'required': 'true'}),
'level_number': forms.Select(attrs={'required': 'true'}),
'calibration_date': forms.DateInput(format=('%d-%m-%Y'), attrs={'placeholder':'Select a date', 'type':'date'}),
}
first_name = forms.CharField(required=False, widget=forms.TextInput(attrs={'placeholder':'Enter first name'}))
last_name = forms.CharField(required=False, widget=forms.TextInput(attrs={'placeholder':'Enter last name'}))
start_temperature = forms.FloatField(widget=forms.NumberInput(attrs={'placeholder':'Enter between 0 and 45'}))
end_temperature = forms.FloatField(widget=forms.NumberInput(attrs={'placeholder':'Enter between 0 and 45'}))
document = forms.FileField(widget=forms.FileInput(attrs={'accept' : '.csv, .txt'}))
def clean_calibration_date(self):
calibration_date = self.cleaned_data['calibration_date']
if calibration_date > date.today():
raise forms.ValidationError("The observation date cannot be in the future!")
return calibration_date
|
Sofiasotog/signature_ruby | app/controllers/account.js | <filename>app/controllers/account.js
const getModelByName = require('../models/getModelByName');
const signup = (req,res)=>{
if(!req.body.user){
return res.status(200).send({
success: false,
error:'user not fund'
})
}
const UserModel = getModelByName('user_conservacion');
try{
UserModel.signup(req.body.user)
.then(()=>{
res.status(200).send({success:true, message:'usuario creado correctamente'})
})
.catch(error=>res.status(200).send({success:false , message: error.message}))
/*
const {email, password, firstName, lastName} = req.body;
const signedUp = await UserModel.create({
email, password, firstName, lastName
});
res.status(200).send({success:true, message:'user created succesfully', data:signedUp});
*/
}catch(err){
res.status(500).send({success:false, error:err.message});
}
};
const getTechnicians = (req,res)=>{
const UserModel = getModelByName('user_conservacion');
try{
UserModel.getTechnicians()
.then((data)=>{
console.log(data);
res.status(200).send({success:true,data:data});
}).catch((err)=>{
res.status(200).send({success:false,error:err.message});
})
}catch(err){
res.status(500).send({success:false, error:err.message});
}
}
/*const confirmEmail = (req,res)=>{
const User = getModelByName('user_conservacion');
try{
User.confirmAccount(req.params.token)
.then(()=>{
res.status(200).send({success:true, message:"User confirmated succesfully"});
}).catch(err=>res.status(200).send({success:false, message: err.message}));
}catch(err){
res.status(500).send({success:false, error:err.message});
}
};*/
const login = (req,res)=>{
if(!req.body.matricula) return res.status(200).send({success:false,error:"Matricula not provided"});
if(!req.body.password) return res.status(200).send({success:false, error:"Password not provided"});
const User = getModelByName('user_conservacion');
try{
User.login(req.body.matricula, req.body.password)
.then(data=>{
res.status(200).send({success:true, data});
}).catch(err=>res.status(200).send({success:false,error:err.message}));
}catch(err){
res.status(200).send({success:false,error:err.message});
}
}
const current_user = (req,res)=>{
if(!req.body.email) return res.status(200).send({success:false, data:{user:null}});
const User = getModelByName('user_conservacion');
return User.findUserById(req.user._id)
.then(user=>{
res.status(200).send({success:true,data:{user}});
}).catch(err=>res.status(200).send({success:false,error:err.message}));
};
module.exports = {signup, login, current_user,getTechnicians}; |
rex/destiny | app/models/faction_definition.rb | class FactionDefinition < ActiveRecord::Base
end
|
e-mayo/mscreen | mscreen/autodocktools_prepare_py3k/MolKit/pdb2pqr/forcefield.py | #!/usr/bin/python2 -O
"""
Forcefield script
This module takes a pdblist as input and replaces the occupancy and
tempfactor fields with charge and radius fields, with values as defined
by a particular forcefield. The forcefield structure is modeled off of
the structures.py file, where each forcefield is considered a chain of
residues of atoms.
<NAME> (<EMAIL>)
Washington University in St. Louis
"""
__date__ = "6 October 2003"
__author__ = "<NAME>"
AMBER_FILE = "AMBER.DAT"
CHARMM_FILE = "CHARMM.DAT"
PARSE_FILE = "PARSE.DAT"
import _py2k_string as string
import sys
import getopt
import os
class Forcefield:
"""
Forcefield class
The forcefield class contains definitions for a given forcefield.
Each forcefield object contains a dictionary of residues, with each
residue containing a dictionary of atoms. Dictionaries are used
instead of lists as the ordering is not important. The forcefield
definition files are unedited, directly from the forcefield - all
transformations are done within.
"""
def __init__(self, ff):
"""
Initialize the class by parsing the definition file
Parameters
ff: The name of the forcefield (string)
"""
self.residues = {}
self.name = ff
defpath = ""
if ff == "amber":
defpath = AMBER_FILE
elif ff == "charmm":
defpath = CHARMM_FILE
elif ff == "parse":
defpath = PARSE_FILE
else:
raise ValueError("Invalid forcefield %s!" % ff)
if not os.path.isfile(defpath):
for path in sys.path:
testpath = "%s/%s" % (path, defpath)
if os.path.isfile(testpath):
defpath = testpath
break
if not os.path.isfile(defpath):
raise ValueError("Unable to find forcefield %s!" % defpath)
file = open(defpath)
lines = file.readlines()
for line in lines:
if not line.startswith("#"):
fields = string.split(line)
resname = fields[0]
atomname = fields[1]
charge = float(fields[2])
radius = float(fields[3])
atom = ForcefieldAtom(atomname, charge, radius)
myResidue = self.getResidue(resname)
if myResidue == None:
myResidue = ForcefieldResidue(resname)
self.residues[resname] = myResidue
myResidue.addAtom(atom)
def getResidue(self, resname):
"""
Return the residue object with the given resname
Parameters
resname: The name of the residue (string)
Returns
residue: The residue object (ForcefieldResidue)
"""
residue = None
try:
residue = self.residues[resname]
except KeyError:
pass
return residue
def getParams(self, residue, name):
"""
Get the parameters associated with the input fields.
The residue itself is needed instead of simply its name
because the forcefield may use a different residue name
than the standard amino acid name.
Parameters
residue: The residue (residue)
name: The atom name (string)
Returns
charge: The charge on the atom (float)
radius: The radius of the atom (float)
"""
charge = None
radius = None
resname = ""
atomname = ""
if self.name == "amber":
resname, atomname = self.getAmberParams(residue, name)
elif self.name == "charmm":
resname, atomname = self.getCharmmParams(residue, name)
elif self.name == "parse":
resname, atomname = self.getParseParams(residue, name)
defresidue = self.getResidue(resname)
if defresidue == None:
return charge, radius
atom = defresidue.getAtom(atomname)
if atom != None:
charge = atom.get("charge")
radius = atom.get("radius")
return charge, radius
def getAmberParams(self, residue, name):
"""
Get the forcefield definitions from the Amber database
Parameters
residue: The residue (residue)
name: The atom name (string)
Returns
resname: The name of the amber residue
atomname: The name of the amber atom
"""
atomname = name
type = residue.get("type")
if type == 4:
resname = residue.get("naname")
else:
resname = residue.get("name")
# Residue Substitutions
if residue.get("name") == "CYS" and "HG" not in residue.get("map"):
resname = "CYX"
elif residue.get("name") == "HIS":
if "HD1" in residue.get("map") and "HE2" in residue.get("map"):
resname = "HIP"
elif "HD1" in residue.get("map"):
resname = "HID"
elif "HE2" in residue.get("map"):
resname = "HIE"
else:
resname = "HID" # Default for no hydrogens
elif residue.get("name") == "HSP":
resname = "HIP"
elif residue.get("name") == "HSE":
resname = "HIE"
elif residue.get("name") == "HSD":
resname = "HID"
elif residue.get("name") == "GLU" or residue.get("name") == "GLH":
if "HE1" in residue.get("map"):
resname = "GLH"
if atomname == "HE1": atomname = "HE2"
elif atomname == "OE1": atomname = "OE2"
elif atomname == "OE2": atomname = "OE1"
elif "HE2" in residue.get("map"): resname = "GLH"
elif residue.get("name") == "ASP" or residue.get("name") == "ASH":
if "HD1" in residue.get("map"):
resname = "ASH"
if atomname == "HD1": atomname = "HD2"
elif atomname == "OD1": atomname = "OD2"
elif atomname == "OD2": atomname = "OD1"
elif "HD2" in residue.get("map"): resname = "ASH"
if residue.get("isCterm") == 1:
resname = "C" + resname
elif residue.get("isNterm") == 1:
resname = "N" + resname
# Atom Substitutions
if resname == "WAT":
if atomname == "O": atomname = "OW"
elif atomname == "H1": atomname = "HW"
elif atomname == "H2": atomname = "HW"
elif resname == "ILE":
if atomname == "CD": atomname = "CD1"
if resname[0] == "N": # N-terminal
if atomname == "H": atomname = "H1"
if (resname == "CCYS" or resname == "NCYS") and atomname == "HG": atomname = "HSG"
return resname, atomname
def getParseParams(self, residue, name):
"""
Get the forcefield definitions from the Parse database
Parameters
residue: The residue (residue)
name: The atom name (string)
Returns
resname: The name of the amber residue
atomname: The name of the amber atom
"""
atomname = name
resname = residue.get("name")
# Terminal/Water Substitutions
if residue.get("isNterm") and resname != "ACE":
if resname == "PRO":
resname = "PR+"
if atomname == "H2": atomname = "HN1"
elif atomname == "H3": atomname = "HN2"
elif atomname in ["N","H","H2","H3","CA","HA","C","O"]:
resname = "BK+"
if atomname == "H": atomname = "H1"
elif residue.get("isCterm"):
if atomname in ["N","H","HA","CA","C","O","OXT"]:
resname = "BK-"
if atomname == "O": atomname = "O1"
elif atomname == "OXT": atomname = "O2"
elif residue.get("type") == 3:
resname = "H2O"
if atomname == "O": atomname = "OH"
elif atomname == "H1": atomname = "HH1"
elif atomname == "H2": atomname = "HH2"
# Residue Substitutions
if resname == "HSD": resname = "HID"
elif resname in ["HIE","HSE"]: resname = "HIS"
elif resname in ["HIP","HSP"]: resname = "HI+"
elif resname == "ILE":
if atomname == "HG12": atomname = "HG11"
elif atomname == "HG13": atomname = "HG12"
elif atomname == "CD": atomname = "CD1"
elif resname == "CYS" and "HG" not in residue.get("map"):
resname = "CSS"
elif resname == "HIS":
if "HD1" in residue.get("map") and "HE2" in residue.get("map"):
resname = "HI+"
elif "HD1" in residue.get("map"):
resname = "HID"
elif "HE2" in residue.get("map"):
resname = "HIS"
elif resname == "GLU" or resname == "GLH":
if "HE1" in residue.get("map"):
resname = "GL0"
if atomname == "HE1": atomname = "HE2"
elif atomname == "OE1": atomname = "OE2"
elif atomname == "OE2": atomname = "OE1"
elif "HE2" in residue.get("map"): resname = "GL0"
elif resname == "ASP" or resname == "ASH":
if "HD1" in residue.get("map"):
resname = "AS0"
if atomname == "HD1": atomname = "HD2"
elif atomname == "OD1": atomname = "OD2"
elif atomname == "OD2": atomname = "OD1"
elif "HD2" in residue.get("map"): resname = "AS0"
# Hydrogen Substitutions
if atomname == "H": atomname = "HN"
elif atomname == "HA2": atomname = "HA1"
elif atomname == "HA3": atomname = "HA2"
elif atomname == "HB2" and resname not in ["ALA"]: atomname = "HB1"
elif atomname == "HB3" and resname not in ["ALA"]: atomname = "HB2"
elif atomname == "HD2" and resname not in ["HIS","HI+","HID"]: atomname = "HD1"
elif atomname == "HD3" and resname not in ["HIS","HI+","HID"]: atomname = "HD2"
elif atomname == "HE2" and resname not in ["TRP","HIS","HI+","HID","GL0"]: atomname = "HE1"
elif atomname == "HE3" and resname not in ["TRP","HIS","HI+","HID"]: atomname = "HE2"
elif atomname == "HG2": atomname = "HG1"
elif atomname == "HG3": atomname = "HG2"
return resname, atomname
def getCharmmParams(self, residue, name):
"""
Get the forcefield definitions from the Charmm database
Parameters
residue: The residue (residue)
name: The atom name (string)
Returns
resname: The name of the Charmm residue
atomname: The name of the Charmm atom
"""
resname = residue.get("name")
atomname = name
# Nucleic Acid Substitutions
if residue.get("type") == 4:
resname = resname[0]
if resname == "A": resname = "ADE"
elif resname == "C": resname = "CYT"
elif resname == "G": resname = "GUA"
elif resname == "T":
resname = "THY"
if atomname == "C7": atomname = "C5M"
elif atomname == "H71": atomname = "H51"
elif atomname == "H72": atomname = "H52"
elif atomname == "H73": atomname = "H53"
elif resname == "U": resname = "URA"
if atomname == "H5'1": atomname = "H5'"
elif atomname == "H5'2": atomname = "H5''"
elif atomname == "H2'1": atomname = "H2'"
elif atomname in ["H2'2","HO'2"]: atomname = "H2''"
if residue.getAtom("O2'") == None:
if atomname in ["C2'","H2'","H2''"]: resname = "DEO1"
if residue.getAtom("H5T") != None:
if atomname in ["H5T","O5'","C5'"]: resname = "5TER"
if residue.getAtom("H3T") != None:
if atomname in ["H3T","O3'","C3'"]: resname = "3TER"
# Terminal/Water Substitutions
if residue.get("isNterm"):
if resname == "GLY" and atomname in ["N","H","H2","H3","CA","HA2","HA3"]:
resname = "GLYP"
if atomname == "H": atomname = "HT1"
elif atomname == "H2": atomname = "HT2"
elif atomname == "H3": atomname = "HT3"
elif resname == "PRO" and atomname in ["N","HN1","HN2","CD","CA","HD1","HD2","HA","H2","H3"]:
resname = "PROP"
if atomname == "H2": atomname = "HN1"
elif atomname == "H3": atomname = "HN2"
elif resname == "ACE":
if atomname == "CH3": atomname = "CAY"
elif atomname == "HH31": atomname = "HY1"
elif atomname == "HH32": atomname = "HY2"
elif atomname == "HH33": atomname = "HY3"
elif atomname == "C": atomname = "CY"
elif atomname == "O": atomname = "OY"
else:
if atomname in ["N","H","H2","H3","CA","HA"]:
resname = "NTER"
if atomname == "H": atomname = "HT1"
elif atomname == "H2": atomname = "HT2"
elif atomname == "H3": atomname = "HT3"
elif residue.get("isCterm"):
if atomname in ["O","OXT","C"]:
resname = "CTER"
if atomname == "O":
atomname = "OT1"
elif atomname == "OXT":
atomname = "OT2"
elif residue.get("type") == 3:
resname = "TP3M"
if atomname == "O": atomname = "OH2"
# Residue substitutions
if resname == "ILE":
if atomname == "CD1": atomname = "CD"
elif atomname == "HD11": atomname = "HD1"
elif atomname == "HD12": atomname = "HD2"
elif atomname == "HD13": atomname = "HD3"
elif atomname == "HG12": atomname = "HG11"
elif atomname == "HG13": atomname = "HG12"
elif resname == "CYS" and "HG" not in residue.get("map"):
if atomname == "CB":
resname = "DISU"
atomname = "1CB"
elif atomname == "SG":
resname = "DISU"
atomname = "1SG"
elif resname == "HIS":
if "HD1" in residue.get("map") and "HE2" in residue.get("map"):
resname = "HSP"
elif "HD1" in residue.get("map"):
resname = "HSD"
elif "HE2" in residue.get("map"):
resname = "HSE"
elif resname == "GLU" or resname == "GLH":
if "HE1" in residue.get("map"):
if atomname == "HE1": atomname = "HE2"
elif atomname == "OE1": atomname = "OE2"
elif atomname == "OE2": atomname = "OE1"
if atomname in ["CG","HG3","HG1","HG2","CD","OE1","OE2","HE2"]: resname = "GLUP"
else: resname == "GLU"
elif "HE2" in residue.get("map"):
if atomname in ["CG","HG3","HG1","HG2","CD","OE1","OE2","HE2"]: resname = "GLUP"
else: resname == "GLU"
elif resname == "ASP" or resname == "ASH":
if "HD1" in residue.get("map"):
if atomname == "HD1": atomname = "HD2"
elif atomname == "OD1": atomname = "OD2"
elif atomname == "OD2": atomname = "OD1"
if atomname in ["CB","HB3","HB1","HB2","CG","OD1","OD2","HD2"]: resname = "ASPP"
else: resname == "ASP"
elif "HD2" in residue.get("map"):
if atomname in ["CB","HB3","HB1","HB2","CG","OD1","OD2","HD2"]: resname = "ASPP"
else: resname == "ASP"
# HETATM Substitutions
if resname == "ACE":
if atomname == "CH3": atomname = "CAY"
elif atomname == "HH31": atomname = "HY1"
elif atomname == "HH32": atomname = "HY2"
elif atomname == "HH33": atomname = "HY3"
elif atomname == "C": atomname = "CY"
elif atomname == "O": atomname = "OY"
elif resname == "ADP":
atomname = string.replace(atomname,"*","\'")
# Hydrogen Substitutions
if atomname == "H": atomname = "HN"
elif atomname == "HA2": atomname = "HA1"
elif atomname == "HA3": atomname = "HA2"
elif atomname == "HB2" and resname not in ["ALA"]: atomname = "HB1"
elif atomname == "HB3" and resname not in ["ALA"]: atomname = "HB2"
elif atomname == "HD2" and resname not in ["HSP","HSE","HSD","ASPP"]: atomname = "HD1"
elif atomname == "HD3" and resname not in ["HIS","HSE","HSD"]: atomname = "HD2"
elif atomname == "HE2" and resname not in ["TRP","HSP","HSE","HSD","GLUP"]: atomname = "HE1"
elif atomname == "HE3" and resname not in ["TRP","HSP","HSE","HSD"]: atomname = "HE2"
elif atomname == "HG2": atomname = "HG1"
elif atomname == "HG3": atomname = "HG2"
elif atomname == "HG" and resname in ["SER","CYS"]: atomname = "HG1"
return resname, atomname
class ForcefieldResidue:
"""
ForcefieldResidue class
The ForceFieldResidue class contains a mapping of all atoms within
the residue for easy searching.
"""
def __init__(self, name):
"""
Initialize the ForceFieldResidue object
Parameters
name: The name of the residue (string)
"""
self.name = name
self.atoms = {}
def addAtom(self, atom):
"""
Add an atom to the ForcefieldResidue
Parameters
atom: The atom to be added (atom)
"""
atomname = atom.get("name")
self.atoms[atomname] = atom
def getAtom(self, atomname):
"""
Return the atom object with the given atomname
Parameters
resname: The name of the atom (string)
Returns
residue: The atom object (ForcefieldAtom)
"""
atom = None
try:
atom = self.atoms[atomname]
except KeyError:
pass
return atom
class ForcefieldAtom:
"""
ForcefieldAtom class
The ForcefieldAtom object contains fields that are related to the
forcefield at the atom level
"""
def __init__(self, name, charge, radius):
"""
Initialize the object
Parameters
name: The atom name (string)
charge: The charge on the atom (float)
radius: The radius of the atom (float)
"""
self.name = name
self.charge = charge
self.radius = radius
def get(self, name):
"""
Get a member of the ForcefieldAtom class
Parameters
name: The name of the member (string)
Possible Values
name: The atom name (string)
charge: The charge on the atom (float)
radius: The radius of the atom (float)
epsilon: The epsilon assocaited with the atom (float)
Returns
item: The value of the member
"""
try:
item = getattr(self, name)
return item
except AttributeError:
message = "Unable to access object \"%s\" in class ForcefieldAtom" % name
raise ValueError(message)
|
JuanitoFatas/faulty | spec/cache/null_spec.rb | <gh_stars>10-100
# frozen_string_literal: true
RSpec.describe Faulty::Cache::Null do
subject(:cache) { described_class.new }
it 'reads nothing after writing' do
cache.write('foo', 'bar')
expect(cache.read('foo')).to eq(nil)
end
it 'is fault_tolerant' do
expect(cache.fault_tolerant?).to eq(true)
end
end
|
EgorBo/coreclr | src/binder/fusionassemblyname.cpp | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// ============================================================
//
// FusionAssemblyName.cpp
//
// Implements the CAssemblyName class
//
// ============================================================
#include <windows.h>
#include <winerror.h>
#include "strongname.h"
#include "fusionhelpers.hpp"
#include "fusionassemblyname.hpp"
#include <strsafe.h>
#include "shlwapi.h"
#include "assemblyidentity.hpp"
#include "textualidentityparser.hpp"
#define DISPLAY_NAME_DELIMITER W(',')
#define DISPLAY_NAME_DELIMITER_STRING W(",")
#define VERSION_STRING_SEGMENTS 4
#define REMAINING_BUFFER_SIZE ((*pccDisplayName) - (pszBuf - szDisplayName))
// ---------------------------------------------------------------------------
// Private Helpers
// ---------------------------------------------------------------------------
namespace
{
HRESULT GetPublicKeyTokenFromPKBlob(LPBYTE pbPublicKeyToken, DWORD cbPublicKeyToken,
LPBYTE *ppbSN, LPDWORD pcbSN)
{
HRESULT hr = S_OK;
// Generate the hash of the public key.
if (!StrongNameTokenFromPublicKey(pbPublicKeyToken, cbPublicKeyToken, ppbSN, pcbSN))
{
hr = StrongNameErrorInfo();
}
return hr;
}
};
// ---------------------------------------------------------------------------
// CPropertyArray ctor
// ---------------------------------------------------------------------------
CPropertyArray::CPropertyArray()
{
_dwSig = 0x504f5250; /* 'PORP' */
memset(&_rProp, 0, ASM_NAME_MAX_PARAMS * sizeof(FusionProperty));
}
// ---------------------------------------------------------------------------
// CPropertyArray dtor
// ---------------------------------------------------------------------------
CPropertyArray::~CPropertyArray()
{
for (DWORD i = 0; i < ASM_NAME_MAX_PARAMS; i++)
{
if (_rProp[i].cb > sizeof(DWORD))
{
if (_rProp[i].pv != NULL)
{
FUSION_DELETE_ARRAY((LPBYTE) _rProp[i].pv);
_rProp[i].pv = NULL;
}
}
}
}
// ---------------------------------------------------------------------------
// CPropertyArray::Set
// ---------------------------------------------------------------------------
HRESULT CPropertyArray::Set(DWORD PropertyId,
LPCVOID pvProperty, DWORD cbProperty)
{
HRESULT hr = S_OK;
FusionProperty *pItem = NULL;
pItem = &(_rProp[PropertyId]);
if (!cbProperty && !pvProperty)
{
if (pItem->cb > sizeof(DWORD))
{
if (pItem->pv != NULL)
FUSION_DELETE_ARRAY((LPBYTE) pItem->pv);
}
pItem->pv = NULL;
}
else if (cbProperty > sizeof(DWORD))
{
LPBYTE ptr = NEW(BYTE[cbProperty]);
if (!ptr)
{
hr = E_OUTOFMEMORY;
goto exit;
}
if (pItem->cb > sizeof(DWORD))
FUSION_DELETE_ARRAY((LPBYTE) pItem->pv);
memcpy(ptr, pvProperty, cbProperty);
pItem->pv = ptr;
}
else
{
if (pItem->cb > sizeof(DWORD))
FUSION_DELETE_ARRAY((LPBYTE) pItem->pv);
memcpy(&(pItem->pv), pvProperty, cbProperty);
#ifdef _DEBUG
if (PropertyId == ASM_NAME_ARCHITECTURE) {
PEKIND pe = * ((PEKIND *)pvProperty);
_ASSERTE(pe != peInvalid);
}
#endif
}
pItem->cb = cbProperty;
exit:
return hr;
}
// ---------------------------------------------------------------------------
// CPropertyArray::Get
// ---------------------------------------------------------------------------
HRESULT CPropertyArray::Get(DWORD PropertyId,
LPVOID pvProperty, LPDWORD pcbProperty)
{
HRESULT hr = S_OK;
FusionProperty *pItem;
_ASSERTE(pcbProperty);
if (PropertyId >= ASM_NAME_MAX_PARAMS
|| (!pvProperty && *pcbProperty))
{
_ASSERTE(!"Invalid Argument! Passed in NULL buffer with size non-zero!");
hr = E_INVALIDARG;
goto exit;
}
pItem = &(_rProp[PropertyId]);
if (pItem->cb > *pcbProperty)
hr = HRESULT_FROM_WIN32(ERROR_INSUFFICIENT_BUFFER);
else if (pItem->cb)
memcpy(pvProperty, (pItem->cb > sizeof(DWORD) ?
pItem->pv : (LPBYTE) &(pItem->pv)), pItem->cb);
*pcbProperty = pItem->cb;
exit:
return hr;
}
// ---------------------------------------------------------------------------
// CPropertyArray::operator []
// Wraps DWORD optimization test.
// ---------------------------------------------------------------------------
FusionProperty CPropertyArray::operator [] (DWORD PropertyId)
{
FusionProperty prop;
prop.pv = _rProp[PropertyId].cb > sizeof(DWORD) ?
_rProp[PropertyId].pv : &(_rProp[PropertyId].pv);
prop.cb = _rProp[PropertyId].cb;
return prop;
}
// ---------------------------------------------------------------------------
// CAssemblyName::AddRef
// ---------------------------------------------------------------------------
STDMETHODIMP_(ULONG)
CAssemblyName::AddRef()
{
return InterlockedIncrement(&_cRef);
}
// ---------------------------------------------------------------------------
// CAssemblyName::Release
// ---------------------------------------------------------------------------
STDMETHODIMP_(ULONG)
CAssemblyName::Release()
{
ULONG ulRef = InterlockedDecrement(&_cRef);
if (ulRef == 0)
{
delete this;
}
return ulRef;
}
// ---------------------------------------------------------------------------
// CAssemblyName::QueryInterface
// ---------------------------------------------------------------------------
STDMETHODIMP
CAssemblyName::QueryInterface(REFIID riid, void** ppv)
{
HRESULT hr = S_OK;
BEGIN_ENTRYPOINT_NOTHROW;
if (!ppv)
{
hr = E_POINTER;
goto Exit;
}
if ( IsEqualIID(riid, IID_IUnknown)
|| IsEqualIID(riid, IID_IAssemblyName)
)
{
*ppv = static_cast<IAssemblyName*> (this);
AddRef();
hr = S_OK;
goto Exit;
}
else
{
*ppv = NULL;
hr = E_NOINTERFACE;
goto Exit;
}
Exit:
END_ENTRYPOINT_NOTHROW;
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName::SetProperty
// ---------------------------------------------------------------------------
STDMETHODIMP
CAssemblyName::SetProperty(DWORD PropertyId,
LPCVOID pvProperty,
DWORD cbProperty)
{
HRESULT hr = S_OK;
BEGIN_ENTRYPOINT_NOTHROW;
hr = SetPropertyInternal(PropertyId, pvProperty, cbProperty);
END_ENTRYPOINT_NOTHROW;
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName::GetProperty
// ---------------------------------------------------------------------------
STDMETHODIMP
CAssemblyName::GetProperty(DWORD PropertyId,
LPVOID pvProperty, LPDWORD pcbProperty)
{
HRESULT hr = S_OK;
BEGIN_ENTRYPOINT_NOTHROW;
// Retrieve the property.
switch(PropertyId)
{
case ASM_NAME_NULL_PUBLIC_KEY_TOKEN:
case ASM_NAME_NULL_PUBLIC_KEY:
{
hr = (_fPublicKeyToken && !_rProp[PropertyId].cb) ? S_OK : S_FALSE;
break;
}
case ASM_NAME_NULL_CUSTOM:
{
hr = (_fCustom && !_rProp[PropertyId].cb) ? S_OK : S_FALSE;
break;
}
default:
{
hr = _rProp.Get(PropertyId, pvProperty, pcbProperty);
break;
}
}
END_ENTRYPOINT_NOTHROW;
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName::Finalize
// ---------------------------------------------------------------------------
STDMETHODIMP
CAssemblyName::Finalize()
{
BEGIN_ENTRYPOINT_NOTHROW;
_fIsFinalized = TRUE;
END_ENTRYPOINT_NOTHROW;
return S_OK;
}
// ---------------------------------------------------------------------------
// CAssemblyName::GetDisplayName
// ---------------------------------------------------------------------------
STDMETHODIMP
CAssemblyName::GetDisplayName( __out_ecount_opt(*pccDisplayName) LPOLESTR szDisplayName,
__inout LPDWORD pccDisplayName,
DWORD dwDisplayFlags)
{
HRESULT hr = S_OK;
BEGIN_ENTRYPOINT_NOTHROW;
if (!dwDisplayFlags) {
dwDisplayFlags = ASM_DISPLAYF_DEFAULT;
}
// Validate input buffer.
if(!pccDisplayName || (!szDisplayName && *pccDisplayName)) {
hr = E_INVALIDARG;
goto exit;
}
EX_TRY
{
NewHolder<BINDER_SPACE::AssemblyIdentity> pAssemblyIdentity = new BINDER_SPACE::AssemblyIdentity();
FusionProperty prop;
StackSString textualIdentity;
// Name required
prop = _rProp[ASM_NAME_NAME];
if (prop.cb == 0) {
hr = FUSION_E_INVALID_NAME;
goto exit;
}
else {
_ASSERTE(prop.cb >= sizeof(WCHAR));
pAssemblyIdentity->m_simpleName.Set((const WCHAR *) prop.pv,
(prop.cb - sizeof(WCHAR)) / sizeof(WCHAR));
pAssemblyIdentity->SetHave(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_SIMPLE_NAME);
}
// Display version
if (dwDisplayFlags & ASM_DISPLAYF_VERSION) {
prop = _rProp[ASM_NAME_MAJOR_VERSION];
// Set version if we have it
if (prop.cb != 0) {
DWORD dwVersionParts[4];
for(DWORD i = 0; i < 4; i++) {
prop = _rProp[ASM_NAME_MAJOR_VERSION + i];
// Normalize non-existing version parts to zero
if (prop.cb == sizeof(WORD)) {
dwVersionParts[i] = (DWORD) (* ((WORD *) prop.pv));
}
else {
dwVersionParts[i] = 0;
}
}
pAssemblyIdentity->m_version.SetFeatureVersion(dwVersionParts[0], dwVersionParts[1]);
pAssemblyIdentity->m_version.SetServiceVersion(dwVersionParts[2], dwVersionParts[3]);
pAssemblyIdentity->SetHave(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_VERSION);
}
}
// Display culture
if (dwDisplayFlags & ASM_DISPLAYF_CULTURE) {
prop = _rProp[ASM_NAME_CULTURE];
if (prop.cb != 0) {
_ASSERTE(prop.cb >= sizeof(WCHAR));
if (((const WCHAR *) prop.pv)[0] != 0x00) {
pAssemblyIdentity->m_cultureOrLanguage.
Set((const WCHAR *) prop.pv,
(prop.cb - sizeof(WCHAR)) / sizeof(WCHAR));
}
pAssemblyIdentity->SetHave(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_CULTURE);
}
}
// Display public key token
if ((dwDisplayFlags & ASM_DISPLAYF_PUBLIC_KEY_TOKEN) && _fPublicKeyToken) {
prop = _rProp[ASM_NAME_PUBLIC_KEY_TOKEN];
if (prop.cb != 0) {
pAssemblyIdentity->m_publicKeyOrTokenBLOB.Set((const BYTE *) prop.pv, prop.cb);
pAssemblyIdentity->SetHave(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_PUBLIC_KEY_TOKEN);
}
else {
pAssemblyIdentity->
SetHave(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_PUBLIC_KEY_TOKEN_NULL);
}
}
// Display processor architecture
if (dwDisplayFlags & ASM_DISPLAYF_PROCESSORARCHITECTURE) {
if (_rProp[ASM_NAME_ARCHITECTURE].cb != 0) {
DWORD PeKind = *((LPDWORD)_rProp[ASM_NAME_ARCHITECTURE].pv);
if (PeKind != peNone) {
pAssemblyIdentity->m_kProcessorArchitecture = (PEKIND) PeKind;
pAssemblyIdentity->
SetHave(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_PROCESSOR_ARCHITECTURE);
}
}
}
// Display retarget flag
if (dwDisplayFlags & ASM_DISPLAYF_RETARGET) {
prop = _rProp[ASM_NAME_RETARGET];
if (prop.cb != 0) {
BOOL fRetarget = *((LPBOOL) prop.pv);
if (fRetarget)
{
pAssemblyIdentity->SetHave(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_RETARGETABLE);
}
}
}
// Display content type
if (dwDisplayFlags & ASM_DISPLAYF_CONTENT_TYPE)
{
prop = _rProp[ASM_NAME_CONTENT_TYPE];
if (prop.cb != 0)
{
DWORD dwContentType = *((LPDWORD)prop.pv);
if (dwContentType != AssemblyContentType_Default)
{
pAssemblyIdentity->SetHave(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_CONTENT_TYPE);
pAssemblyIdentity->m_kContentType = (AssemblyContentType)dwContentType;
}
}
}
// Display custom flag
if ((dwDisplayFlags & ASM_DISPLAYF_CUSTOM) && _fCustom) {
prop = _rProp[ASM_NAME_CUSTOM];
if (prop.cb != 0) {
pAssemblyIdentity->m_customBLOB.Set((const BYTE *) prop.pv, prop.cb);
pAssemblyIdentity->SetHave(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_CUSTOM);
}
else {
pAssemblyIdentity->SetHave(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_CUSTOM_NULL);
}
}
// Create the textual identity
hr = BINDER_SPACE::TextualIdentityParser::ToString(pAssemblyIdentity,
pAssemblyIdentity->m_dwIdentityFlags,
textualIdentity);
if (FAILED(hr)) {
goto exit;
}
// Determine required buffer size
DWORD dwGivenSize = *pccDisplayName;
DWORD dwRequiredSize = textualIdentity.GetCount() + 1;
*pccDisplayName = dwRequiredSize;
if (dwRequiredSize > dwGivenSize) {
hr = HRESULT_FROM_WIN32(ERROR_INSUFFICIENT_BUFFER);
if (szDisplayName) {
szDisplayName[0] = 0x00;
}
goto exit;
}
else {
hr = S_OK;
memcpy(szDisplayName, textualIdentity.GetUnicode(), dwRequiredSize * sizeof(WCHAR));
}
}
EX_CATCH_HRESULT(hr);
exit:
END_ENTRYPOINT_NOTHROW;
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName::GetName
// ---------------------------------------------------------------------------
STDMETHODIMP
CAssemblyName::GetName(
__inout LPDWORD lpcwBuffer,
__out_ecount_opt(*lpcwBuffer) LPOLESTR pwzBuffer)
{
HRESULT hr = S_OK;
BEGIN_ENTRYPOINT_NOTHROW;
DWORD cbBuffer = *lpcwBuffer * sizeof(TCHAR);
hr = GetProperty(ASM_NAME_NAME, pwzBuffer, &cbBuffer);
*lpcwBuffer = cbBuffer / sizeof(TCHAR);
END_ENTRYPOINT_NOTHROW;
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName::GetVersion
// ---------------------------------------------------------------------------
STDMETHODIMP
CAssemblyName::GetVersion(
/* [out] */ LPDWORD pdwVersionHi,
/* [out] */ LPDWORD pdwVersionLow)
{
HRESULT hr = S_OK;
BEGIN_ENTRYPOINT_NOTHROW;
// Get Assembly Version
hr = GetVersion( ASM_NAME_MAJOR_VERSION, pdwVersionHi, pdwVersionLow);
END_ENTRYPOINT_NOTHROW;
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName::IsEqual
// ---------------------------------------------------------------------------
STDMETHODIMP
CAssemblyName::IsEqual(LPASSEMBLYNAME pName, DWORD dwCmpFlags)
{
HRESULT hr = S_OK;
BEGIN_ENTRYPOINT_NOTHROW;
DWORD dwPartialCmpMask = 0;
BOOL fIsPartial = FALSE;
CAssemblyName *pCName = static_cast<CAssemblyName *>(pName);
const DWORD SIMPLE_VERSION_MASK = ASM_CMPF_VERSION;
FusionProperty propThis;
FusionProperty propPara;
if(!pName) {
hr = S_FALSE;
goto Exit;
}
// Get the ref partial comparison mask, if any.
fIsPartial = CAssemblyName::IsPartial(this, &dwPartialCmpMask);
if (dwCmpFlags == ASM_CMPF_DEFAULT) {
// Set all comparison flags.
dwCmpFlags = ASM_CMPF_IL_ALL | ASM_CMPF_ARCHITECTURE;
// don't compare architecture if ref does not have architecture.
if (!(dwPartialCmpMask & ASM_CMPF_ARCHITECTURE)) {
dwCmpFlags &= ~ASM_CMPF_ARCHITECTURE;
}
// Otherwise, if ref is simple (possibly partial)
// we mask off all version bits.
if (!CAssemblyName::IsStronglyNamed(this))
{
// we don't have a public key token, but we don't know
// it is because we are simply named assembly or we are
// just partial on public key token.
if (dwPartialCmpMask & ASM_CMPF_PUBLIC_KEY_TOKEN)
{
// now we know we are simply named assembly since we
// have a public key token, but it is NULL.
dwCmpFlags &= ~SIMPLE_VERSION_MASK;
}
// If neither of these two cases then public key token
// is not set in ref , but def may be simple or strong.
// The comparison mask is chosen based on def.
else
{
if (!CAssemblyName::IsStronglyNamed(pName))
dwCmpFlags &= ~SIMPLE_VERSION_MASK;
}
}
}
// Mask off flags (either passed in or generated
// by default flag with the comparison mask generated
// from the ref.
dwCmpFlags &= dwPartialCmpMask;
// The individual name fields can now be compared..
// Compare name
if (dwCmpFlags & ASM_CMPF_NAME)
{
propThis = _rProp[ASM_NAME_NAME];
propPara = pCName->_rProp[ASM_NAME_NAME];
if (propThis.cb != propPara.cb)
{
hr = S_FALSE;
goto Exit;
}
if (propThis.cb && FusionCompareStringI((LPWSTR)propThis.pv, (LPWSTR)propPara.pv))
{
hr = S_FALSE;
goto Exit;
}
}
// Compare version
if (dwCmpFlags & ASM_CMPF_MAJOR_VERSION)
{
propThis = _rProp[ASM_NAME_MAJOR_VERSION];
propPara = pCName->_rProp[ASM_NAME_MAJOR_VERSION];
if (*((LPWORD) propThis.pv) != *((LPWORD)propPara.pv))
{
hr = S_FALSE;
goto Exit;
}
}
if (dwCmpFlags & ASM_CMPF_MINOR_VERSION)
{
propThis = _rProp[ASM_NAME_MINOR_VERSION];
propPara = pCName->_rProp[ASM_NAME_MINOR_VERSION];
if (*((LPWORD) propThis.pv) != *((LPWORD)propPara.pv))
{
hr = S_FALSE;
goto Exit;
}
}
if (dwCmpFlags & ASM_CMPF_REVISION_NUMBER)
{
propThis = _rProp[ASM_NAME_REVISION_NUMBER];
propPara = pCName->_rProp[ASM_NAME_REVISION_NUMBER];
if (*((LPWORD) propThis.pv) != *((LPWORD)propPara.pv))
{
hr = S_FALSE;
goto Exit;
}
}
if (dwCmpFlags & ASM_CMPF_BUILD_NUMBER)
{
propThis = _rProp[ASM_NAME_BUILD_NUMBER];
propPara = pCName->_rProp[ASM_NAME_BUILD_NUMBER];
if (*((LPWORD) propThis.pv) != *((LPWORD)propPara.pv))
{
hr = S_FALSE;
goto Exit;
}
}
// Compare public key token
if (dwCmpFlags & ASM_CMPF_PUBLIC_KEY_TOKEN)
{
// compare public key if both of them have public key set.
propThis = _rProp[ASM_NAME_PUBLIC_KEY];
propPara = pCName->_rProp[ASM_NAME_PUBLIC_KEY];
if (!propThis.cb || !propPara.cb) {
// otherwise, compare public key token
propThis = _rProp[ASM_NAME_PUBLIC_KEY_TOKEN];
propPara = pCName->_rProp[ASM_NAME_PUBLIC_KEY_TOKEN];
}
if (propThis.cb != propPara.cb) {
hr = S_FALSE;
goto Exit;
}
if (propThis.cb && memcmp(propThis.pv, propPara.pv, propThis.cb)) {
hr = S_FALSE;
goto Exit;
}
}
// Compare Culture
if (dwCmpFlags & ASM_CMPF_CULTURE)
{
propThis = _rProp[ASM_NAME_CULTURE];
propPara = pCName->_rProp[ASM_NAME_CULTURE];
if (propThis.cb != propPara.cb)
{
hr = S_FALSE;
goto Exit;
}
if (propThis.cb && FusionCompareStringI((LPWSTR)propThis.pv, (LPWSTR)propPara.pv))
{
hr = S_FALSE;
goto Exit;
}
}
// Compare Custom attribute.
if (dwCmpFlags & ASM_CMPF_CUSTOM)
{
propThis = _rProp[ASM_NAME_PUBLIC_KEY_TOKEN];
propPara = pCName->_rProp[ASM_NAME_PUBLIC_KEY_TOKEN];
if (propThis.cb != propPara.cb) {
hr = S_FALSE;
goto Exit;
}
if (propThis.cb && memcmp(propThis.pv, propPara.pv, propThis.cb)) {
hr = S_FALSE;
goto Exit;
}
}
// Compare Retarget flag
if (dwCmpFlags & ASM_CMPF_RETARGET)
{
propThis = _rProp[ASM_NAME_RETARGET];
propPara = pCName->_rProp[ASM_NAME_RETARGET];
if (*((LPDWORD) propThis.pv) != *((LPDWORD)propPara.pv))
{
hr = S_FALSE;
goto Exit;
}
}
// compare config mask
if (dwCmpFlags & ASM_CMPF_CONFIG_MASK)
{
propThis = _rProp[ASM_NAME_CONFIG_MASK];
propPara = pCName->_rProp[ASM_NAME_CONFIG_MASK];
if (*((LPDWORD) propThis.pv) != *((LPDWORD)propPara.pv))
{
hr = S_FALSE;
goto Exit;
}
}
// compare architecture
if (dwCmpFlags & ASM_CMPF_ARCHITECTURE)
{
propThis = _rProp[ASM_NAME_ARCHITECTURE];
propPara = pCName->_rProp[ASM_NAME_ARCHITECTURE];
if (propThis.cb != propPara.cb) {
hr = S_FALSE;
goto Exit;
}
if (propThis.cb) {
if (*((LPDWORD) propThis.pv) != *((LPDWORD)propPara.pv)) {
hr = S_FALSE;
goto Exit;
}
}
}
// Compare content type
if (dwCmpFlags & ASM_CMPF_CONTENT_TYPE)
{
propThis = _rProp[ASM_NAME_CONTENT_TYPE];
propPara = pCName->_rProp[ASM_NAME_CONTENT_TYPE];
if (*((LPDWORD)propThis.pv) != *((LPDWORD)propPara.pv))
{
hr = S_FALSE;
goto Exit;
}
}
// compare MVID
if (dwCmpFlags & ASM_CMPF_MVID)
{
propThis = _rProp[ASM_NAME_MVID];
propPara = pCName->_rProp[ASM_NAME_MVID];
if (propThis.cb != propPara.cb) {
hr = S_FALSE;
goto Exit;
}
if (propThis.cb && memcmp(propThis.pv, propPara.pv, propThis.cb)) {
hr = S_FALSE;
goto Exit;
}
}
// compare Signature
if (dwCmpFlags & ASM_CMPF_SIGNATURE)
{
propThis = _rProp[ASM_NAME_SIGNATURE_BLOB];
propPara = pCName->_rProp[ASM_NAME_SIGNATURE_BLOB];
if (propThis.cb != propPara.cb) {
hr = S_FALSE;
goto Exit;
}
if (propThis.cb && memcmp(propThis.pv, propPara.pv, propThis.cb)) {
hr = S_FALSE;
goto Exit;
}
}
hr = S_OK;
Exit:
END_ENTRYPOINT_NOTHROW;
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName::Reserved
// ---------------------------------------------------------------------------
STDMETHODIMP
CAssemblyName::Reserved(
/* in */ REFIID refIID,
/* in */ IUnknown *pUnkBindSink,
/* in */ IUnknown *pUnkAppCtx,
/* in */ LPCOLESTR szCodebaseIn,
/* in */ LONGLONG llFlags,
/* in */ LPVOID pvReserved,
/* in */ DWORD cbReserved,
/* out */ VOID **ppv)
{
return E_NOTIMPL;
}
// ---------------------------------------------------------------------------
// CAssemblyName::Clone
// ---------------------------------------------------------------------------
HRESULT CAssemblyName::Clone(IAssemblyName **ppName)
{
HRESULT hr = S_OK;
BEGIN_ENTRYPOINT_NOTHROW;
CAssemblyName *pClone = NULL;
if (!ppName) {
hr = E_INVALIDARG;
goto Exit;
}
*ppName = NULL;
pClone = NEW(CAssemblyName);
if( !pClone ) {
hr = E_OUTOFMEMORY;
goto Exit;
}
hr = CopyProperties(this, pClone, NULL, 0);
if (FAILED(hr)) {
goto Exit;
}
*ppName = pClone;
(*ppName)->AddRef();
Exit:
SAFERELEASE(pClone);
END_ENTRYPOINT_NOTHROW;
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName::SetPropertyInternal
// ---------------------------------------------------------------------------
HRESULT CAssemblyName::SetPropertyInternal(DWORD PropertyId,
LPCVOID pvProperty,
DWORD cbProperty)
{
HRESULT hr = S_OK;
LPBYTE pbSN = NULL;
DWORD cbSN = 0;
// Fail if finalized.
if (_fIsFinalized)
{
_ASSERTE(!"SetProperty on a IAssemblyName while the name is finalized!");
hr = E_UNEXPECTED;
goto exit;
}
if (PropertyId >= ASM_NAME_MAX_PARAMS
|| (!pvProperty && cbProperty))
{
_ASSERTE(!"Invalid Argument! Passed in NULL buffer with size non-zero!");
hr = E_INVALIDARG;
goto exit;
}
// <REVISIT_TODO> - make this a switch statement.</REVISIT_TODO>
if (PropertyId == ASM_NAME_MAJOR_VERSION ||
PropertyId == ASM_NAME_MINOR_VERSION ||
PropertyId == ASM_NAME_BUILD_NUMBER ||
PropertyId == ASM_NAME_REVISION_NUMBER)
{
if (cbProperty > sizeof(WORD)) {
hr = E_INVALIDARG;
goto exit;
}
}
// Check if public key is being set and if so,
// set the public key token if not already set.
if (PropertyId == ASM_NAME_PUBLIC_KEY)
{
// If setting true public key, generate hash.
if (pvProperty && cbProperty)
{
// Generate the public key token from the pk.
if (FAILED(hr = GetPublicKeyTokenFromPKBlob((LPBYTE) pvProperty, cbProperty, &pbSN, &cbSN)))
goto exit;
// Set the public key token property.
if (FAILED(hr = SetPropertyInternal(ASM_NAME_PUBLIC_KEY_TOKEN, pbSN, cbSN)))
goto exit;
}
// Otherwise expect call to reset property.
else if (!cbProperty)
{
if (FAILED(hr = SetPropertyInternal(ASM_NAME_PUBLIC_KEY_TOKEN, pvProperty, cbProperty)))
goto exit;
}
}
// Setting NULL public key clears values in public key,
// public key token and sets public key token flag.
else if (PropertyId == ASM_NAME_NULL_PUBLIC_KEY)
{
pvProperty = NULL;
cbProperty = 0;
hr = SetPropertyInternal(ASM_NAME_NULL_PUBLIC_KEY_TOKEN, pvProperty, cbProperty);
goto exit;
}
// Setting or clearing public key token.
else if (PropertyId == ASM_NAME_PUBLIC_KEY_TOKEN)
{
// Defensive: invalid sized public key tokens should be avoided.
if (cbProperty > PUBLIC_KEY_TOKEN_LEN)
{
hr = SetPropertyInternal(ASM_NAME_NULL_PUBLIC_KEY_TOKEN, NULL, 0);
hr = E_INVALIDARG;
goto exit;
}
if (pvProperty && cbProperty)
_fPublicKeyToken = TRUE;
else if (!cbProperty)
_fPublicKeyToken = FALSE;
}
// Setting NULL public key token clears public key token and
// sets public key token flag.
else if (PropertyId == ASM_NAME_NULL_PUBLIC_KEY_TOKEN)
{
_fPublicKeyToken = TRUE;
pvProperty = NULL;
cbProperty = 0;
PropertyId = ASM_NAME_PUBLIC_KEY_TOKEN;
}
else if (PropertyId == ASM_NAME_CUSTOM)
{
if (pvProperty && cbProperty)
_fCustom = TRUE;
else if (!cbProperty)
_fCustom = FALSE;
}
else if (PropertyId == ASM_NAME_NULL_CUSTOM)
{
_fCustom = TRUE;
pvProperty = NULL;
cbProperty = 0;
PropertyId = ASM_NAME_CUSTOM;
}
// Setting "neutral" as the culture is the same as "" culture (meaning
// culture-invariant).
else if (PropertyId == ASM_NAME_CULTURE) {
if (pvProperty && !FusionCompareStringI((LPWSTR)pvProperty, W("neutral"))) {
pvProperty = (void *)W("");
cbProperty = sizeof(W(""));
}
}
// Set property on array.
hr = _rProp.Set(PropertyId, pvProperty, cbProperty);
exit:
if (SUCCEEDED(hr)) {
LPWSTR pwzOld;
// Clear cache
pwzOld = InterlockedExchangeT(&_pwzTextualIdentity, NULL);
SAFEDELETEARRAY(pwzOld);
pwzOld = InterlockedExchangeT(&_pwzTextualIdentityILFull, NULL);
SAFEDELETEARRAY(pwzOld);
}
// Free memory allocated by crypto wrapper.
if (pbSN) {
StrongNameFreeBuffer(pbSN);
}
return hr;
}
// ---------------------------------------------------------------------------
// CheckFieldsForFriendAssembly
// ---------------------------------------------------------------------------
STDAPI
CheckFieldsForFriendAssembly(
LPASSEMBLYNAME pAssemblyName)
{
HRESULT hr = S_OK;
DWORD dwSize=0;
// Let's look at the information they gave us in the friends declaration.
// If they put in a Processor Architecture, Culture, or Version, then we'll return an error.
if (FAILED(hr = pAssemblyName->GetProperty(ASM_NAME_MAJOR_VERSION, NULL, &dwSize)) ||
FAILED(hr = pAssemblyName->GetProperty(ASM_NAME_MINOR_VERSION, NULL, &dwSize)) ||
FAILED(hr = pAssemblyName->GetProperty(ASM_NAME_BUILD_NUMBER, NULL, &dwSize)) ||
FAILED(hr = pAssemblyName->GetProperty(ASM_NAME_REVISION_NUMBER, NULL, &dwSize)) ||
FAILED(hr = pAssemblyName->GetProperty(ASM_NAME_CULTURE, NULL, &dwSize)) ||
FAILED(hr = pAssemblyName->GetProperty(ASM_NAME_ARCHITECTURE, NULL, &dwSize)))
{
// If any of these calls failed due to an insufficient buffer, then that means
// the assembly name contained them
if (hr == HRESULT_FROM_WIN32(ERROR_INSUFFICIENT_BUFFER))
hr = META_E_CA_BAD_FRIENDS_ARGS;
} else {
if (FAILED(hr = pAssemblyName->GetProperty(ASM_NAME_PUBLIC_KEY_TOKEN, NULL, &dwSize))) {
//
// Public Key token should not be passed to InternalsVisibleTo
// attribute. This translates to the ASM_NAME_PUBLIC_KEY_TOKEN
// property being set, while the full public key is not.
//
if (hr == HRESULT_FROM_WIN32(ERROR_INSUFFICIENT_BUFFER)) {
dwSize = 0;
if (FAILED(hr = pAssemblyName->GetProperty(ASM_NAME_PUBLIC_KEY, NULL, &dwSize))) {
if (hr == HRESULT_FROM_WIN32(ERROR_INSUFFICIENT_BUFFER))
hr = S_OK;
} else {
hr = META_E_CA_BAD_FRIENDS_ARGS;
}
}
} else {
hr = S_OK;
}
}
return hr;
}
// ---------------------------------------------------------------------------
// CreateAssemblyNameObject
// ---------------------------------------------------------------------------
// This is not external for CoreCLR
STDAPI
CreateAssemblyNameObject(
LPASSEMBLYNAME *ppAssemblyName,
LPCOLESTR szAssemblyName,
DWORD dwFlags,
LPVOID pvReserved)
{
HRESULT hr = S_OK;
BEGIN_ENTRYPOINT_NOTHROW;
CAssemblyName *pName = NULL;
if (!ppAssemblyName)
{
hr = E_INVALIDARG;
goto exit;
}
pName = NEW(CAssemblyName);
if (!pName)
{
hr = E_OUTOFMEMORY;
goto exit;
}
if (dwFlags & CANOF_PARSE_DISPLAY_NAME)
{
hr = pName->Init(NULL, NULL);
if (FAILED(hr)) {
goto exit;
}
hr = pName->Parse((LPWSTR)szAssemblyName);
}
else
{
hr = pName->Init(szAssemblyName, NULL);
}
if (SUCCEEDED(hr) && ((dwFlags & CANOF_VERIFY_FRIEND_ASSEMBLYNAME)))
{
hr = CheckFieldsForFriendAssembly(pName);
}
if (FAILED(hr))
{
SAFERELEASE(pName);
goto exit;
}
*ppAssemblyName = pName;
exit:
END_ENTRYPOINT_NOTHROW;
return hr;
}
// ---------------------------------------------------------------------------
// CreateAssemblyNameObjectFromMetaData
// ---------------------------------------------------------------------------
STDAPI
CreateAssemblyNameObjectFromMetaData(
LPASSEMBLYNAME *ppAssemblyName,
LPCOLESTR szAssemblyName,
ASSEMBLYMETADATA *pamd,
LPVOID pvReserved)
{
HRESULT hr = S_OK;
CAssemblyName *pName = NULL;
pName = NEW(CAssemblyName);
if (!pName)
{
hr = E_OUTOFMEMORY;
goto exit;
}
hr = pName->Init(szAssemblyName, pamd);
if (FAILED(hr))
{
SAFERELEASE(pName);
goto exit;
}
*ppAssemblyName = pName;
exit:
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName constructor
// ---------------------------------------------------------------------------
CAssemblyName::CAssemblyName()
{
_dwSig = 0x454d414e; /* 'EMAN' */
_fIsFinalized = FALSE;
_fPublicKeyToken = FALSE;
_fCustom = TRUE;
_cRef = 1;
_pwzPathModifier = NULL;
_pwzTextualIdentity = NULL;
_pwzTextualIdentityILFull = NULL;
}
// ---------------------------------------------------------------------------
// CAssemblyName destructor
// ---------------------------------------------------------------------------
CAssemblyName::~CAssemblyName()
{
SAFEDELETEARRAY(_pwzPathModifier);
SAFEDELETEARRAY(_pwzTextualIdentity);
SAFEDELETEARRAY(_pwzTextualIdentityILFull);
}
// ---------------------------------------------------------------------------
// CAssemblyName::IsStronglyNamed
// ---------------------------------------------------------------------------
BOOL CAssemblyName::IsStronglyNamed(IAssemblyName *pName)
{
CAssemblyName *pCName = static_cast<CAssemblyName *> (pName);
_ASSERTE(pCName);
return (pCName->_rProp[ASM_NAME_PUBLIC_KEY_TOKEN].cb != 0);
}
// ---------------------------------------------------------------------------
// CAssemblyName::IsPartial
// ---------------------------------------------------------------------------
BOOL CAssemblyName::IsPartial(IAssemblyName *pIName,
LPDWORD pdwCmpMask)
{
DWORD dwCmpMask = 0;
BOOL fPartial = FALSE;
static const ASM_NAME rNameFlags[] ={ASM_NAME_NAME,
ASM_NAME_CULTURE,
ASM_NAME_PUBLIC_KEY_TOKEN,
ASM_NAME_MAJOR_VERSION,
ASM_NAME_MINOR_VERSION,
ASM_NAME_BUILD_NUMBER,
ASM_NAME_REVISION_NUMBER,
ASM_NAME_CUSTOM
};
static const ASM_CMP_FLAGS rCmpFlags[] = {ASM_CMPF_NAME,
ASM_CMPF_CULTURE,
ASM_CMPF_PUBLIC_KEY_TOKEN,
ASM_CMPF_MAJOR_VERSION,
ASM_CMPF_MINOR_VERSION,
ASM_CMPF_BUILD_NUMBER,
ASM_CMPF_REVISION_NUMBER,
ASM_CMPF_CUSTOM
};
CAssemblyName *pName = static_cast<CAssemblyName*> (pIName); // dynamic_cast
_ASSERTE(pName);
DWORD iNumOfComparison = sizeof(rNameFlags) / sizeof(rNameFlags[0]);
for (DWORD i = 0; i < iNumOfComparison; i++)
{
if (pName->_rProp[rNameFlags[i]].cb
|| (rNameFlags[i] == ASM_NAME_PUBLIC_KEY_TOKEN
&& pName->_fPublicKeyToken)
|| (rNameFlags[i] == ASM_NAME_CUSTOM
&& pName->_fCustom))
{
dwCmpMask |= rCmpFlags[i];
}
else {
fPartial = TRUE;
}
}
if(pName->_rProp[ASM_NAME_ARCHITECTURE].cb) {
dwCmpMask |= ASM_CMPF_ARCHITECTURE;
}
if (pName->_rProp[ASM_NAME_RETARGET].cb) {
dwCmpMask |= ASM_CMPF_RETARGET;
}
if (pName->_rProp[ASM_NAME_CONTENT_TYPE].cb != 0)
{
dwCmpMask |= ASM_CMPF_CONTENT_TYPE;
}
if (pName->_rProp[ASM_NAME_CONFIG_MASK].cb) {
dwCmpMask |= ASM_CMPF_CONFIG_MASK;
}
if (pName->_rProp[ASM_NAME_MVID].cb) {
dwCmpMask |= ASM_CMPF_MVID;
}
if (pName->_rProp[ASM_NAME_SIGNATURE_BLOB].cb) {
dwCmpMask |= ASM_CMPF_SIGNATURE;
}
if (pdwCmpMask)
*pdwCmpMask = dwCmpMask;
return fPartial;
}
// ---------------------------------------------------------------------------
// CAssemblyName::Init
// ---------------------------------------------------------------------------
HRESULT
CAssemblyName::Init(LPCTSTR pszAssemblyName, ASSEMBLYMETADATA *pamd)
{
HRESULT hr = S_OK;
// Name
if (pszAssemblyName)
{
hr = SetProperty(ASM_NAME_NAME, (LPTSTR) pszAssemblyName,
(DWORD)((wcslen(pszAssemblyName)+1) * sizeof(TCHAR)));
if (FAILED(hr))
goto exit;
}
if (pamd) {
// Major version
if (FAILED(hr = SetProperty(ASM_NAME_MAJOR_VERSION,
&pamd->usMajorVersion, sizeof(WORD)))
// Minor version
|| FAILED(hr = SetProperty(ASM_NAME_MINOR_VERSION,
&pamd->usMinorVersion, sizeof(WORD)))
// Revision number
|| FAILED(hr = SetProperty(ASM_NAME_REVISION_NUMBER,
&pamd->usRevisionNumber, sizeof(WORD)))
// Build number
|| FAILED(hr = SetProperty(ASM_NAME_BUILD_NUMBER,
&pamd->usBuildNumber, sizeof(WORD)))
// Culture
|| FAILED(hr = SetProperty(ASM_NAME_CULTURE,
pamd->szLocale, pamd->cbLocale * sizeof(WCHAR)))
)
{
goto exit;
}
}
exit:
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName::Parse
// ---------------------------------------------------------------------------
HRESULT CAssemblyName::Parse(__in_z LPCWSTR szDisplayName)
{
HRESULT hr = S_OK;
if (!(szDisplayName && *szDisplayName))
{
hr = E_INVALIDARG;
goto exit;
}
EX_TRY {
BINDER_SPACE::AssemblyIdentity assemblyIdentity;
SString displayName(szDisplayName);
// Parse the textual identity
hr = BINDER_SPACE::TextualIdentityParser::Parse(displayName, &assemblyIdentity);
if (FAILED(hr)) {
goto exit;
}
// Set name.
hr = SetProperty(ASM_NAME_NAME,
(LPVOID) assemblyIdentity.m_simpleName.GetUnicode(),
(assemblyIdentity.m_simpleName.GetCount() + 1) * sizeof(WCHAR));
if (FAILED(hr)) {
goto exit;
}
// Set version.
if (assemblyIdentity.Have(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_VERSION)) {
WORD wVersionPart = 0;
wVersionPart = (WORD) assemblyIdentity.m_version.GetMajor();
hr = SetProperty(ASM_NAME_MAJOR_VERSION, &wVersionPart, sizeof(WORD));
if (FAILED(hr)) {
goto exit;
}
wVersionPart = (WORD) assemblyIdentity.m_version.GetMinor();
hr = SetProperty(ASM_NAME_MINOR_VERSION, &wVersionPart, sizeof(WORD));
if (FAILED(hr)) {
goto exit;
}
wVersionPart = (WORD) assemblyIdentity.m_version.GetBuild();
hr = SetProperty(ASM_NAME_BUILD_NUMBER, &wVersionPart, sizeof(WORD));
if (FAILED(hr)) {
goto exit;
}
wVersionPart = (WORD) assemblyIdentity.m_version.GetRevision();
hr = SetProperty(ASM_NAME_REVISION_NUMBER, &wVersionPart, sizeof(WORD));
if (FAILED(hr)) {
goto exit;
}
}
// Set culture.
if (assemblyIdentity.Have(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_CULTURE)) {
hr = SetProperty(ASM_NAME_CULTURE,
(LPVOID) assemblyIdentity.m_cultureOrLanguage.GetUnicode(),
(assemblyIdentity.m_cultureOrLanguage.GetCount()+1) * sizeof(WCHAR));
if (FAILED(hr)) {
goto exit;
}
}
// Set public key (token) or NULL flag.
if (assemblyIdentity.Have(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_PUBLIC_KEY)) {
SBuffer &publicKeyBuffer = assemblyIdentity.m_publicKeyOrTokenBLOB;
const void *pBytes = publicKeyBuffer;
// This also computes and sets the public key token.
hr = SetProperty(ASM_NAME_PUBLIC_KEY, (void *) pBytes, publicKeyBuffer.GetSize());
if (FAILED(hr)) {
goto exit;
}
}
else if (assemblyIdentity.Have(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_PUBLIC_KEY_TOKEN)) {
SBuffer &publicKeyTokenBuffer = assemblyIdentity.m_publicKeyOrTokenBLOB;
const void *pBytes = publicKeyTokenBuffer;
hr = SetProperty(ASM_NAME_PUBLIC_KEY_TOKEN,
(LPVOID) pBytes,
publicKeyTokenBuffer.GetSize());
if (FAILED(hr)) {
goto exit;
}
}
else if (assemblyIdentity.
Have(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_PUBLIC_KEY_TOKEN_NULL)) {
hr = SetProperty(ASM_NAME_NULL_PUBLIC_KEY_TOKEN, NULL, 0);
if (FAILED(hr)) {
goto exit;
}
}
// Set architecture.
if (assemblyIdentity.Have(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_PROCESSOR_ARCHITECTURE)) {
PEKIND peKind = assemblyIdentity.m_kProcessorArchitecture;
hr = SetProperty(ASM_NAME_ARCHITECTURE, (LPVOID) &peKind, sizeof(PEKIND));
if(FAILED(hr)) {
goto exit;
}
}
// Set retargetable flag.
if (assemblyIdentity.Have(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_RETARGETABLE)) {
BOOL fRetarget = TRUE;
if (FAILED(hr = SetProperty(ASM_NAME_RETARGET, &fRetarget, sizeof(BOOL)))) {
goto exit;
}
}
// Set content type.
if (assemblyIdentity.Have(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_CONTENT_TYPE)) {
DWORD dwContentType = assemblyIdentity.m_kContentType;
hr = SetProperty(ASM_NAME_CONTENT_TYPE, &dwContentType, sizeof(dwContentType));
IfFailGoto(hr, exit);
}
// Set custom or NULL flag.
if (assemblyIdentity.Have(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_CUSTOM)) {
SBuffer &customBuffer = assemblyIdentity.m_customBLOB;
const void *pBytes = customBuffer;
hr = SetProperty(ASM_NAME_CUSTOM, (void *) pBytes, customBuffer.GetSize());
if (FAILED(hr)) {
goto exit;
}
}
else if (assemblyIdentity.Have(BINDER_SPACE::AssemblyIdentity::IDENTITY_FLAG_CUSTOM_NULL)) {
hr = SetProperty(ASM_NAME_NULL_CUSTOM, NULL, 0);
if (FAILED(hr)) {
goto exit;
}
}
}
EX_CATCH_HRESULT(hr);
exit:
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName::GetVersion
// ---------------------------------------------------------------------------
HRESULT
CAssemblyName::GetVersion(
/* [in] */ DWORD dwMajorVersionEnumValue,
/* [out] */ LPDWORD pdwVersionHi,
/* [out] */ LPDWORD pdwVersionLow)
{
HRESULT hr = S_OK;
DWORD cb = sizeof(WORD);
WORD wVerMajor = 0, wVerMinor = 0, wRevNo = 0, wBldNo = 0;
if(!pdwVersionHi || !pdwVersionLow) {
hr = E_INVALIDARG;
goto Exit;
}
*pdwVersionHi = *pdwVersionLow = 0;
if(FAILED( (hr = GetProperty(dwMajorVersionEnumValue, &wVerMajor, &(cb = sizeof(WORD))))))
goto Exit;
if (cb == 0) {
hr = FUSION_E_INVALID_NAME;
goto Exit;
}
if(FAILED( (hr = GetProperty(dwMajorVersionEnumValue+1, &wVerMinor, &(cb = sizeof(WORD))))))
goto Exit;
if (cb == 0) {
hr = FUSION_E_INVALID_NAME;
goto Exit;
}
if(FAILED( (hr = GetProperty(dwMajorVersionEnumValue+2, &wBldNo, &(cb = sizeof(WORD))))))
goto Exit;
if (cb == 0) {
hr = FUSION_E_INVALID_NAME;
goto Exit;
}
if(FAILED( (hr = GetProperty(dwMajorVersionEnumValue+3, &wRevNo, &(cb = sizeof(WORD))))))
goto Exit;
if (cb == 0) {
hr = FUSION_E_INVALID_NAME;
goto Exit;
}
*pdwVersionHi = MAKELONG(wVerMinor, wVerMajor);
*pdwVersionLow = MAKELONG(wRevNo, wBldNo);
Exit:
return hr;
}
// ---------------------------------------------------------------------------
// CAssemblyName::CopyProperties
// ---------------------------------------------------------------------------
HRESULT
CAssemblyName::CopyProperties(CAssemblyName *pSource,
CAssemblyName *pTarget,
const DWORD properties[],
DWORD dwSize)
{
HRESULT hr = S_OK;
DWORD i = 0;
FusionProperty prop;
_ASSERTE(pSource && pTarget);
if (!dwSize) {
for( i = 0; i < ASM_NAME_MAX_PARAMS; i ++) {
prop = pSource->_rProp[i];
if (prop.cb) {
if (FAILED(hr = pTarget->SetProperty(i, prop.pv, prop.cb))) {
goto Exit;
}
}
}
}
else {
for (i = 0; i<dwSize; i++) {
_ASSERTE(properties[i] < ASM_NAME_MAX_PARAMS);
prop = pSource->_rProp[properties[i]];
if (prop.cb) {
if (FAILED(hr = pTarget->SetProperty(properties[i], prop.pv, prop.cb))) {
goto Exit;
}
}
}
}
pTarget->_fPublicKeyToken = pSource->_fPublicKeyToken;
pTarget->_fCustom = pSource->_fCustom;
if (pSource->_pwzPathModifier) {
pTarget->_pwzPathModifier = WSTRDupDynamic(pSource->_pwzPathModifier);
if(!pTarget->_pwzPathModifier) {
hr = E_OUTOFMEMORY;
goto Exit;
}
}
Exit:
return hr;
}
namespace LegacyFusion
{
HRESULT SetStringProperty(IAssemblyName *pIAssemblyName,
DWORD dwPropertyId,
SString &value)
{
CAssemblyName *pAssemblyName = static_cast<CAssemblyName *>(pIAssemblyName);
const WCHAR *pValue = value.GetUnicode();
DWORD dwCBValue = (value.GetCount() + 1) * sizeof(WCHAR);
return pAssemblyName->SetPropertyInternal(dwPropertyId,
const_cast<WCHAR *>(pValue),
dwCBValue);
}
HRESULT SetBufferProperty(IAssemblyName *pIAssemblyName,
DWORD dwPropertyId,
SBuffer &value)
{
CAssemblyName *pAssemblyName = static_cast<CAssemblyName *>(pIAssemblyName);
const BYTE *pValue = value; // special operator
DWORD dwCBValue = value.GetSize() * sizeof(BYTE);
return pAssemblyName->SetPropertyInternal(dwPropertyId,
const_cast<BYTE *>(pValue),
dwCBValue);
}
HRESULT SetWordProperty(IAssemblyName *pIAssemblyName,
DWORD dwPropertyId,
DWORD dwValue)
{
CAssemblyName *pAssemblyName = static_cast<CAssemblyName *>(pIAssemblyName);
WORD wValue = static_cast<WORD>(dwValue);
DWORD wCBValue = sizeof(WORD);
// This file-internal function is and must be only used to set version fields
PREFIX_ASSUME((dwPropertyId == ASM_NAME_MAJOR_VERSION) ||
(dwPropertyId == ASM_NAME_MINOR_VERSION) ||
(dwPropertyId == ASM_NAME_BUILD_NUMBER) ||
(dwPropertyId == ASM_NAME_REVISION_NUMBER));
return pAssemblyName->SetPropertyInternal(dwPropertyId, &wValue, wCBValue);
}
HRESULT SetDwordProperty(IAssemblyName *pIAssemblyName,
DWORD dwPropertyId,
DWORD dwValue)
{
CAssemblyName *pAssemblyName = static_cast<CAssemblyName *>(pIAssemblyName);
DWORD dwCBValue = sizeof(DWORD);
return pAssemblyName->SetPropertyInternal(dwPropertyId, &dwValue, dwCBValue);
}
};
namespace fusion
{
namespace util
{
namespace priv
{
inline bool IsNullProperty(DWORD dwProperty)
{
LIMITED_METHOD_CONTRACT;
return dwProperty == ASM_NAME_NULL_PUBLIC_KEY_TOKEN ||
dwProperty == ASM_NAME_NULL_PUBLIC_KEY ||
dwProperty == ASM_NAME_NULL_CUSTOM;
}
HRESULT ConvertToUtf8(PCWSTR wzStr, __deref_out UTF8** pszStr)
{
HRESULT hr = S_OK;
_ASSERTE(wzStr != nullptr && pszStr != nullptr);
if (wzStr == nullptr || pszStr == nullptr)
{
return E_INVALIDARG;
}
DWORD cbSize = WszWideCharToMultiByte(CP_UTF8, 0, wzStr, -1, NULL, 0, NULL, NULL);
if(cbSize == 0)
{
return SUCCEEDED(hr = HRESULT_FROM_GetLastError()) ? E_UNEXPECTED : hr;
}
NewArrayHolder<UTF8> szStr = new (nothrow) UTF8[cbSize];
IfNullRet(szStr);
cbSize = WszWideCharToMultiByte(CP_UTF8, 0, wzStr, -1, static_cast<LPSTR>(szStr), cbSize, NULL, NULL);
if(cbSize == 0)
{
return SUCCEEDED(hr = HRESULT_FROM_GetLastError()) ? E_UNEXPECTED : hr;
}
*pszStr = szStr.Extract();
return S_OK;
}
}
// Non-allocating helper.
HRESULT GetProperty(IAssemblyName * pName, DWORD dwProperty, PVOID pBuf, DWORD *pcbBuf)
{
LIMITED_METHOD_CONTRACT;
HRESULT hr = S_OK;
_ASSERTE(pName != nullptr && pcbBuf != nullptr);
if (pName == nullptr || pcbBuf == nullptr)
{
return E_INVALIDARG;
}
hr = pName->GetProperty(dwProperty, pBuf, pcbBuf);
IfFailRet(hr);
// Zero-length non-null property means there is no value.
if (hr == S_OK && *pcbBuf == 0 && !priv::IsNullProperty(dwProperty))
{
hr = S_FALSE;
}
return hr;
}
// Allocating helper.
HRESULT GetProperty(IAssemblyName * pName, DWORD dwProperty, PBYTE * ppBuf, DWORD *pcbBuf)
{
LIMITED_METHOD_CONTRACT;
HRESULT hr = S_OK;
_ASSERTE(ppBuf != nullptr && (*ppBuf == nullptr || pcbBuf != nullptr));
if (ppBuf == nullptr || (*ppBuf != nullptr && pcbBuf == nullptr))
{
return E_INVALIDARG;
}
DWORD cbBuf = 0;
if (pcbBuf == nullptr)
pcbBuf = &cbBuf;
hr = GetProperty(pName, dwProperty, *ppBuf, pcbBuf);
// No provided buffer constitutes a request for one to be allocated.
if (*ppBuf == nullptr)
{
// If it's a null property, allocate a single-byte array to provide consistency.
if (hr == S_OK && priv::IsNullProperty(dwProperty))
{
*ppBuf = new (nothrow) BYTE[1];
IfNullRet(*ppBuf);
}
// Great, get the value.
else if (hr == HRESULT_FROM_WIN32(ERROR_INSUFFICIENT_BUFFER))
{
NewArrayHolder<BYTE> pBuf = new (nothrow) BYTE[*pcbBuf];
IfNullRet(pBuf);
hr = pName->GetProperty(dwProperty, pBuf, pcbBuf);
IfFailRet(hr);
*ppBuf = pBuf.Extract();
hr = S_OK;
}
}
return hr;
}
HRESULT GetProperty(IAssemblyName * pName, DWORD dwProperty, SString & ssVal)
{
LIMITED_METHOD_CONTRACT;
HRESULT hr = S_OK;
_ASSERTE(pName != nullptr);
if (pName == nullptr)
{
return E_INVALIDARG;
}
DWORD cbSize = 0;
hr = GetProperty(pName, dwProperty, static_cast<PBYTE>(nullptr), &cbSize);
if (hr == HRESULT_FROM_WIN32(ERROR_INSUFFICIENT_BUFFER))
{
EX_TRY
{
PWSTR wzNameBuf = ssVal.OpenUnicodeBuffer(cbSize / sizeof(WCHAR) - 1);
hr = GetProperty(pName, dwProperty, reinterpret_cast<PBYTE>(wzNameBuf), &cbSize);
ssVal.CloseBuffer();
IfFailThrow(hr);
ssVal.Normalize();
}
EX_CATCH_HRESULT(hr);
IfFailRet(hr);
}
return hr;
}
HRESULT GetProperty(IAssemblyName * pName, DWORD dwProperty, __deref_out WCHAR ** pwzVal)
{
LIMITED_METHOD_CONTRACT;
HRESULT hr = S_OK;
_ASSERTE(pName != nullptr && pwzVal != nullptr);
if (pName == nullptr || pwzVal == nullptr)
{
return E_INVALIDARG;
}
DWORD cbSize = 0;
hr = pName->GetProperty(dwProperty, NULL, &cbSize);
if (hr == HRESULT_FROM_WIN32(ERROR_INSUFFICIENT_BUFFER))
{
NewArrayHolder<WCHAR> wzVal = reinterpret_cast<PWSTR>(new (nothrow) BYTE[cbSize]);
IfNullRet(wzVal);
hr = pName->GetProperty(dwProperty, reinterpret_cast<PBYTE>(static_cast<PWSTR>(wzVal)), &cbSize);
IfFailRet(hr);
*pwzVal = wzVal.Extract();
}
return hr;
}
HRESULT GetProperty(IAssemblyName * pName, DWORD dwProperty, __deref_out UTF8 **pwzOut)
{
LIMITED_METHOD_CONTRACT;
HRESULT hr = S_OK;
if (pwzOut == nullptr)
return E_INVALIDARG;
SmallStackSString ssStr;
hr = GetProperty(pName, dwProperty, ssStr);
IfFailRet(hr);
hr = priv::ConvertToUtf8(ssStr, pwzOut);
IfFailRet(hr);
return hr;
}
}
}
|
WOWZON3/TERAsology | engine/src/main/java/org/terasology/logic/characters/CharacterMoveInputEvent.java | <filename>engine/src/main/java/org/terasology/logic/characters/CharacterMoveInputEvent.java
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.logic.characters;
import org.joml.Vector3f;
import org.joml.Vector3fc;
import org.terasology.network.NetworkEvent;
import org.terasology.network.ServerEvent;
/**
*/
@ServerEvent
public class CharacterMoveInputEvent extends NetworkEvent {
private long delta;
private float pitch;
private float yaw;
private boolean running;
private boolean crouching;
private boolean jumpRequested;
private Vector3f movementDirection = new Vector3f();
private int sequenceNumber;
private boolean firstRun = true;
protected CharacterMoveInputEvent() {
}
@Deprecated
public CharacterMoveInputEvent(int sequence, float pitch, float yaw, Vector3fc movementDirection, boolean running, boolean jumpRequested, long delta) {
this(sequence, pitch, yaw, movementDirection, running, false, jumpRequested, delta);
}
public CharacterMoveInputEvent(int sequence, float pitch, float yaw, Vector3fc movementDirection, boolean running, boolean crouching, boolean jumpRequested, long delta) {
this.delta = delta;
this.pitch = pitch;
this.yaw = yaw;
this.running = running;
this.crouching = crouching;
this.jumpRequested = jumpRequested;
this.movementDirection.set(movementDirection);
this.sequenceNumber = sequence;
}
public CharacterMoveInputEvent(CharacterMoveInputEvent repeatInput, int withLength) {
this.delta = withLength;
this.pitch = repeatInput.pitch;
this.yaw = repeatInput.yaw;
this.running = repeatInput.running;
this.crouching = repeatInput.crouching;
this.jumpRequested = false;
this.movementDirection.set(repeatInput.movementDirection);
this.sequenceNumber = repeatInput.sequenceNumber;
}
public long getDeltaMs() {
return delta;
}
public float getDelta() {
return delta / 1000f;
}
public float getPitch() {
return pitch;
}
public float getYaw() {
return yaw;
}
public Vector3fc getMovementDirection() {
return movementDirection;
}
public boolean isRunning() {
return running;
}
public boolean isCrouching() {
return crouching;
}
public boolean isJumpRequested() {
return jumpRequested;
}
public int getSequenceNumber() {
return sequenceNumber;
}
public boolean isFirstRun() {
return firstRun;
}
public void runComplete() {
firstRun = false;
}
}
|
reels-research/iOS-Private-Frameworks | CoreParsec.framework/_CPNetworkTimingData.h | /* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/CoreParsec.framework/CoreParsec
*/
@interface _CPNetworkTimingData : PBCodable <NSSecureCoding, _CPNetworkTimingData> {
bool _QUICWhitelistedDomain;
bool _TFOSuccess;
unsigned int _connectEnd;
unsigned int _connectStart;
bool _connectionRace;
bool _connectionReused;
NSString * _connectionUUID;
unsigned int _domainLookupEnd;
unsigned int _domainLookupStart;
unsigned int _fetchStart;
NSString * _interfaceIdentifier;
NSString * _networkProtocolName;
NSData * _peerAddress;
unsigned int _redirectCount;
unsigned int _redirectCountW3C;
unsigned int _redirectEnd;
unsigned int _redirectStart;
unsigned int _requestEnd;
unsigned int _requestHeaderSize;
unsigned int _requestStart;
unsigned int _responseBodyBytesDecoded;
unsigned int _responseBodyBytesReceived;
unsigned int _responseEnd;
unsigned int _responseHeaderSize;
unsigned int _responseStart;
unsigned int _secureConnectStart;
_CPTCPInfo * _startTimeCounts;
_CPTCPInfo * _stopTimeCounts;
double _timingDataInit;
_CPTuscanyConnectionInfo * _tuscany;
}
@property (nonatomic) bool QUICWhitelistedDomain;
@property (nonatomic) bool TFOSuccess;
@property (nonatomic) unsigned int connectEnd;
@property (nonatomic) unsigned int connectStart;
@property (nonatomic) bool connectionRace;
@property (nonatomic) bool connectionReused;
@property (nonatomic, copy) NSString *connectionUUID;
@property (readonly, copy) NSString *debugDescription;
@property (readonly, copy) NSString *description;
@property (nonatomic) unsigned int domainLookupEnd;
@property (nonatomic) unsigned int domainLookupStart;
@property (nonatomic) unsigned int fetchStart;
@property (readonly) unsigned long long hash;
@property (nonatomic, copy) NSString *interfaceIdentifier;
@property (nonatomic, readonly) NSData *jsonData;
@property (nonatomic, copy) NSString *networkProtocolName;
@property (nonatomic, copy) NSData *peerAddress;
@property (nonatomic) unsigned int redirectCount;
@property (nonatomic) unsigned int redirectCountW3C;
@property (nonatomic) unsigned int redirectEnd;
@property (nonatomic) unsigned int redirectStart;
@property (nonatomic) unsigned int requestEnd;
@property (nonatomic) unsigned int requestHeaderSize;
@property (nonatomic) unsigned int requestStart;
@property (nonatomic) unsigned int responseBodyBytesDecoded;
@property (nonatomic) unsigned int responseBodyBytesReceived;
@property (nonatomic) unsigned int responseEnd;
@property (nonatomic) unsigned int responseHeaderSize;
@property (nonatomic) unsigned int responseStart;
@property (nonatomic) unsigned int secureConnectStart;
@property (nonatomic, retain) _CPTCPInfo *startTimeCounts;
@property (nonatomic, retain) _CPTCPInfo *stopTimeCounts;
@property (readonly) Class superclass;
@property (nonatomic) double timingDataInit;
@property (nonatomic, retain) _CPTuscanyConnectionInfo *tuscany;
+ (id)startMetricsForNormalization;
- (void).cxx_destruct;
- (bool)QUICWhitelistedDomain;
- (bool)TFOSuccess;
- (unsigned int)connectEnd;
- (unsigned int)connectStart;
- (bool)connectionRace;
- (bool)connectionReused;
- (id)connectionUUID;
- (id)dictionaryRepresentation;
- (unsigned int)domainLookupEnd;
- (unsigned int)domainLookupStart;
- (unsigned int)fetchStart;
- (unsigned long long)hash;
- (id)initWithDictionary:(id)arg1;
- (id)initWithJSON:(id)arg1;
- (id)initWithTelemetryDictionary:(id)arg1;
- (id)interfaceIdentifier;
- (bool)isEqual:(id)arg1;
- (id)jsonData;
- (id)networkProtocolName;
- (id)peerAddress;
- (bool)readFrom:(id)arg1;
- (unsigned int)redirectCount;
- (unsigned int)redirectCountW3C;
- (unsigned int)redirectEnd;
- (unsigned int)redirectStart;
- (unsigned int)requestEnd;
- (unsigned int)requestHeaderSize;
- (unsigned int)requestStart;
- (bool)requiresQueryId;
- (unsigned int)responseBodyBytesDecoded;
- (unsigned int)responseBodyBytesReceived;
- (unsigned int)responseEnd;
- (unsigned int)responseHeaderSize;
- (unsigned int)responseStart;
- (unsigned int)secureConnectStart;
- (void)setConnectEnd:(unsigned int)arg1;
- (void)setConnectStart:(unsigned int)arg1;
- (void)setConnectionRace:(bool)arg1;
- (void)setConnectionReused:(bool)arg1;
- (void)setConnectionUUID:(id)arg1;
- (void)setDomainLookupEnd:(unsigned int)arg1;
- (void)setDomainLookupStart:(unsigned int)arg1;
- (void)setFetchStart:(unsigned int)arg1;
- (void)setInterfaceIdentifier:(id)arg1;
- (void)setNetworkProtocolName:(id)arg1;
- (void)setPeerAddress:(id)arg1;
- (void)setQUICWhitelistedDomain:(bool)arg1;
- (void)setRedirectCount:(unsigned int)arg1;
- (void)setRedirectCountW3C:(unsigned int)arg1;
- (void)setRedirectEnd:(unsigned int)arg1;
- (void)setRedirectStart:(unsigned int)arg1;
- (void)setRequestEnd:(unsigned int)arg1;
- (void)setRequestHeaderSize:(unsigned int)arg1;
- (void)setRequestStart:(unsigned int)arg1;
- (void)setResponseBodyBytesDecoded:(unsigned int)arg1;
- (void)setResponseBodyBytesReceived:(unsigned int)arg1;
- (void)setResponseEnd:(unsigned int)arg1;
- (void)setResponseHeaderSize:(unsigned int)arg1;
- (void)setResponseStart:(unsigned int)arg1;
- (void)setSecureConnectStart:(unsigned int)arg1;
- (void)setStartTimeCounts:(id)arg1;
- (void)setStopTimeCounts:(id)arg1;
- (void)setTFOSuccess:(bool)arg1;
- (void)setTimingDataInit:(double)arg1;
- (void)setTuscany:(id)arg1;
- (id)startTimeCounts;
- (id)stopTimeCounts;
- (double)timingDataInit;
- (id)tuscany;
- (void)writeTo:(id)arg1;
@end
|
se77en/LibCoroC | examples/C/ticker.c | // Copyright 2016 <NAME> (<EMAIL>). All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.txt file.
#include <stdlib.h>
#include <stdio.h>
#include "libcoroc.h"
void callback(void) { printf("\ttime out!\n"); }
int main(int argc, char** argv) {
uint64_t awaken = 0;
int i = 0;
coroc_timer_t timer = coroc_timer_allocate(1000000 * 2, callback);
coroc_timer_after(timer, 1000000 * 2); // 2 seconds later
for (i = 0; i < 3; i++) {
printf("waiting for 2 seconds!\n");
coroc_chan_recv((coroc_chan_t)timer, &awaken);
printf("awaken, time is %llu!\n", (long long unsigned)awaken);
}
printf("release the timer ..\n");
coroc_timer_stop(timer);
coroc_timer_dealloc(timer);
coroc_coroutine_exit(0);
}
|
slavrd/go-book-gopl | ch02/ex2.02/lengthconv/lengthconv.go | <gh_stars>0
//!+
// Package lengthconv converts Feet to Meters and vice versa
package lengthconv
import "fmt"
const (
// MeterToFeetRatio the ration of meter to feet
MeterToFeetRatio = 0.3048
)
// Meter represents the meter length unit
type Meter float64
// Foot represents the foot length unit
type Foot float64
func (m Meter) String() string { return fmt.Sprintf("%g m.", m) }
func (f Foot) String() string { return fmt.Sprintf("%g ft.", f) }
// MToF converts meters to feet units
func MToF(m Meter) Foot {
return Foot(m * MeterToFeetRatio)
}
// FToM converts feet to meter units
func FToM(f Foot) Meter {
return Meter(f / MeterToFeetRatio)
}
//!-
|
cambridgejames/coss4-server | coss4-services/user-management/src/main/java/cn/net/powerinv/coss4/service/LoginAndLogoutService.java | package cn.net.powerinv.coss4.service;
import cn.net.powerinv.coss4.vo.ThirdPartyDTO;
import cn.net.powerinv.coss4.vo.UserDTO;
import java.util.Map;
/**
* 用户基本信息维护服务
*/
public interface LoginAndLogoutService {
/**
* 用户名/邮箱登录
* @param userDTO 用户信息
* @return 登录结果
*/
public Map<String, Object> loginWithUsualInfo(UserDTO userDTO);
/**
* 手机短信快捷登录
* @param userDTO 用户信息
* @return 登录结果
*/
public Map<String, Object> loginWithPhone(UserDTO userDTO);
/**
* 第三方登录
* @param thirdPartyDTO 第三方平台信息
* @return 登录结果
*/
public Map<String, Object> loginWithThirdPart(ThirdPartyDTO thirdPartyDTO);
/**
* 登出
* @return 登出结果
*/
public Map<String, Object> logout();
}
|
echaussidon/LSS | py/LSS/imaging/veto_masks/reference/trim_allwise_catalog.py | <gh_stars>0
# Based on AllWISE W1MPRO<13.3 catalog
# Select stars within 1.5 degree of a brick center
from __future__ import division, print_function
import sys, os, glob, time, warnings, gc
import numpy as np
# import matplotlib
# matplotlib.use("Agg")
# import matplotlib.pyplot as plt
from astropy.table import Table, vstack, hstack
import fitsio
# from astropy.io import fits
import healpy as hp
from astropy import units as u
from astropy.coordinates import SkyCoord
allwise_path = '/global/project/projectdirs/desi/users/rongpu/useful/w1_bright-13.3.fits'
output_path = '/global/cfs/cdirs/desi/users/rongpu/desi_mask/w1_bright-13.3-dr9.fits'
search_radius = 1.5 * 3600.
search_radius_init = 4 * 3600. # search radius for the initial healpix selection
allwise = Table(fitsio.read(allwise_path))
print(len(allwise))
allwise['idx'] = np.arange(len(allwise))
bricks_south = Table(fitsio.read('/global/cfs/cdirs/cosmo/data/legacysurvey/dr9/south/survey-bricks-dr9-south.fits.gz'))
bricks_north = Table(fitsio.read('/global/cfs/cdirs/cosmo/data/legacysurvey/dr9/north/survey-bricks-dr9-north.fits.gz'))
bricks = vstack([bricks_south, bricks_north])
_, idx = np.unique(bricks['brickid'], return_index=True)
bricks = bricks[idx]
print(len(bricks))
ra2 = np.array(bricks['ra'])
dec2 = np.array(bricks['dec'])
sky2 = SkyCoord(ra2*u.degree, dec2*u.degree, frame='icrs')
########################## Pre-selection using healpix ##########################
nside = 32
print('Healpix pixel size (square deg): {:.5f}'.format(hp.nside2pixarea(nside, degrees=True)))
npix = hp.nside2npix(nside)
hp_ra, hp_dec = hp.pix2ang(nside, np.arange(npix), nest=True, lonlat=True)
# Select healpix pixels within 4 degrees of a brick center
sky1 = SkyCoord(hp_ra*u.degree, hp_dec*u.degree, frame='icrs')
_, d2d, _ = sky1.match_to_catalog_sky(sky2)
d2d = d2d.to_value('arcsec')
mask = d2d < search_radius_init
hp_idx = np.where(mask)[0]
print(len(hp_idx))
wise_hp_idx = hp.ang2pix(nside, allwise['RA'], allwise['DEC'], nest=True, lonlat=True)
mask = np.in1d(wise_hp_idx, hp_idx)
allwise = allwise[mask]
print(len(allwise))
########################## Find the stars near a brick ##########################
ra1 = np.array(allwise['RA'])
dec1 = np.array(allwise['DEC'])
sky1 = SkyCoord(allwise['RA']*u.degree, allwise['DEC']*u.degree, frame='icrs')
_, d2d, _ = sky1.match_to_catalog_sky(sky2)
d2d = d2d.to_value('arcsec')
mask = d2d < search_radius
allwise = allwise[mask]
print(len(allwise))
allwise.write(output_path, overwrite=True)
|
smilehcb/002 | src/server/controllers/reply.js | <filename>src/server/controllers/reply.js
/**
* 评论回复控制器
*/
var validator = require('validator');
var _ = require('lodash');
var at = require('../common/at');
var EventProxy = require('eventproxy');
var UserProxy = require('../proxy').User;
var OrderProxy = require('../proxy').Order;
var ReplyProxy = require('../proxy').Reply;
var config = require('../config');
var tools = require('../common/tools');
var mongoose = require('mongoose');
/**
* messageboard留言板
*/
exports.messageboard_add = function (req, res, next) {
var content = req.body.r_content;
var reply_id = req.body.reply_id;
//留言板object_id写死了
var objId = new mongoose.Types.ObjectId(tools.stringToHex('messageboard'));
// console.log(objId);
var str = validator.trim(content);
if (str === '') {
return res.renderError('回复内容不能为空!', 422);
}
var ep = EventProxy.create();
ep.fail(next);
ReplyProxy.newAndSave(content, objId, req.session.user._id, reply_id, function (err, reply) {
if (err) return next(err);
ep.emit('reply_saved', reply);
});
UserProxy.getUserById(req.session.user._id, ep.done(function (user) {
user.score += 5;
user.reply_count += 1;
user.save();
req.session.user = user;
ep.emit('score_saved');
}));
ep.all('reply_saved', 'score_saved', function (reply) {
res.redirect('/about#' + reply._id);
});
};
/**
* 删除回复
*/
exports.delete = function (req, res, next) {
var reply_id = req.params.reply_id;
if (!reply_id) return next();
ReplyProxy.getReplyById(reply_id, function (err, reply) {
if (err) {
return next(err);
}
if (!reply) {
res.status(422);
res.json({ status: '该回复 ' + reply_id + ' 不存在' });
return;
}
//判断是否是本人回复
if (reply.user_id.toString() === req.session.user._id.toString() || req.session.user.is_admin) {
reply.deleted = true;
reply.save(function (err) {
if(err){
return next(err);
}
res.json({status:'success'});
if(!reply.reply_id){ //如果回复下没有回复
reply.user.score-=5;
reply.user.reply_count-=1;
reply.user.save();
}
});
}else{
res.json({status:'failed'});
return;
}
});
};
/**
* 评论置顶
*/
exports.up = function (req, res, next) {
var replyId = req.params.reply_id;
var userId = req.session.user._id;
ReplyProxy.getReplyById(replyId, function (err, reply) {
if (err) {
return next(err);
}
if (reply.user_id.equals(userId) && !config.debug) {
// 不能帮自己点赞
res.send({
success: false,
message: '做人要谦虚,不能给自己点赞。',
});
} else {
var action;
reply.ups = reply.ups || [];
var upIndex = reply.ups.indexOf(userId);
if (upIndex === -1) {
reply.ups.push(userId);
action = 'up';
} else {
reply.ups.splice(upIndex, 1);
action = 'down';
}
reply.save(function () {
res.send({
success: true,
action: action
});
});
}
});
};
/**
* 修改评论页
*/
exports.showEdit = function (req, res, next) {
var reply_id = req.params.reply_id;
ReplyProxy.getReplyById(reply_id, function (err, reply) {
if (!reply) {
return res.render404('此回复不存在或已被删除。');
}
if (req.session.user._id.equals(reply.user_id) || req.session.user.is_admin) {
res.render('reply/edit', {
reply_id: reply._id,
content: reply.content
});
} else {
return res.renderError('对不起,你不能编辑此回复。', 403);
}
});
};
/**
* 更新编辑回复
*/
exports.update = function (req, res, next) {
var reply_id = req.params.reply_id;
var content = req.body.t_content;
ReplyProxy.getReplyById(reply_id, function (err, reply) {
if (!reply) {
return res.render404('此回复不存在或已被删除。');
}
if (String(reply.user_id) === req.session.user._id.toString() || req.session.user.is_admin) {
if (content.trim().length > 0) {
reply.content = content;
reply.save(function (err) {
if (err) {
return next(err);
}
//目前写死跳转留言板
res.redirect('/about#' + reply._id);
});
} else {
return res.renderError('回复的字数太少。', 400);
}
} else {
return res.renderError('对不起,你不能编辑此回复。', 403);
}
});
}; |
Raffy23/boinc-webmanager | shared/src/main/scala/at/happywetter/boinc/shared/webrpc/package.scala | package at.happywetter.boinc.shared
/**
* Created by:
*
* @author Raphael
* @version 04.03.2018
*/
package object webrpc {
case class ServerStatus(daemon_status: Seq[Daemon], database_file_states: DatabaseFileStates,
tasks_by_app: Seq[ServerStatusApp])
case class Daemon(host: String, command: String, status: String)
case class DatabaseFileStates(results_ready_to_send: Int,
results_in_progress: Int,
workunits_waiting_for_validation: Int,
workunits_waiting_for_assimilation: Int,
workunits_waiting_for_deletion: Int,
results_waiting_for_deletion: Int,
transitioner_backlog_hours: Double,
users_with_recent_credit: Int,
users_with_credit: Int,
users_registered_in_past_24_hours: Int,
hosts_with_recent_credit: Int,
hosts_with_credit: Int,
hosts_registered_in_past_24_hours: Int,
current_floating_point_speed: Double)
case class ServerStatusApp(id: Int, name: String, unsent: Int, in_progress: Int, avg_runtime: Double,
min_runtime: Double, max_runtime: Double, users: Int)
final case class User(username: String, passwordHash: String, nonce: String)
}
|
tylerwinkler/msg-game | src/Entities/Entity.cpp | #include "Entities/Entity.hpp"
#include "Components/SpriteComponent.hpp"
void Entity::setPosition(int x, int y)
{
oldPosition.x = position.x;
oldPosition.y = position.y;
position.x = x;
position.y = y;
for (auto observer : m_positionObservers)
{
observer->receive(x, y);
}
}
void Entity::assignID(int id)
{
this->id = id;
}
void Entity::addComponent(std::unique_ptr<Component> component)
{
m_components.insert(std::make_pair(component->type, std::move(component)));
}
Component& Entity::getComponentByType(int type)
{
if (m_components.count(type) != 0)
{
return *m_components.at(type);
}
throw std::logic_error("Component does not exist");
}
bool Entity::init()
{
for (auto& comp : m_components)
{
if (!comp.second->init(id))
{
return false;
}
}
return true;
}
void Entity::addObserver(Component* component)
{
m_positionObservers.push_back(component);
}
|
adorsys/xs2a-gateway | xs2a-adapter-service-loader/src/test/java/de/adorsys/xs2a/adapter/serviceloader/TestAccountInformationService.java | <filename>xs2a-adapter-service-loader/src/test/java/de/adorsys/xs2a/adapter/serviceloader/TestAccountInformationService.java
/*
* Copyright 2018-2022 adorsys GmbH & Co KG
*
* This program is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or (at
* your option) any later version. This program is distributed in the hope that
* it will be useful, but WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see https://www.gnu.org/licenses/.
*
* This project is also available under a separate commercial license. You can
* contact us at <EMAIL>.
*/
package de.adorsys.xs2a.adapter.serviceloader;
import de.adorsys.xs2a.adapter.api.AccountInformationService;
import de.adorsys.xs2a.adapter.api.RequestHeaders;
import de.adorsys.xs2a.adapter.api.RequestParams;
import de.adorsys.xs2a.adapter.api.Response;
import de.adorsys.xs2a.adapter.api.model.*;
public class TestAccountInformationService implements AccountInformationService {
@Override
public Response<ConsentsResponse201> createConsent(RequestHeaders requestHeaders,
RequestParams requestParams,
Consents body) {
return null;
}
@Override
public Response<ConsentInformationResponse200Json> getConsentInformation(String consentId,
RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
@Override
public Response<Void> deleteConsent(String consentId,
RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
@Override
public Response<ConsentStatusResponse200> getConsentStatus(String consentId,
RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
@Override
public Response<Authorisations> getConsentAuthorisation(String consentId,
RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
@Override
public Response<StartScaprocessResponse> startConsentAuthorisation(String consentId,
RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
@Override
public Response<StartScaprocessResponse> startConsentAuthorisation(String consentId,
RequestHeaders requestHeaders,
RequestParams requestParams,
UpdatePsuAuthentication updatePsuAuthentication) {
return null;
}
@Override
public Response<SelectPsuAuthenticationMethodResponse> updateConsentsPsuData(String consentId,
String authorisationId,
RequestHeaders requestHeaders,
RequestParams requestParams,
SelectPsuAuthenticationMethod selectPsuAuthenticationMethod) {
return null;
}
@Override
public Response<ScaStatusResponse> updateConsentsPsuData(String consentId,
String authorisationId,
RequestHeaders requestHeaders,
RequestParams requestParams,
TransactionAuthorisation transactionAuthorisation) {
return null;
}
@Override
public Response<UpdatePsuAuthenticationResponse> updateConsentsPsuData(String consentId,
String authorisationId,
RequestHeaders requestHeaders,
RequestParams requestParams,
UpdatePsuAuthentication updatePsuAuthentication) {
return null;
}
@Override
public Response<AccountList> getAccountList(RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
@Override
public Response<OK200AccountDetails> readAccountDetails(String accountId,
RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
@Override
public Response<TransactionsResponse200Json> getTransactionList(String accountId,
RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
@Override
public Response<String> getTransactionListAsString(String accountId,
RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
@Override
public Response<ScaStatusResponse> getConsentScaStatus(String consentId,
String authorisationId,
RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
@Override
public Response<ReadAccountBalanceResponse200> getBalances(String accountId,
RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
@Override
public Response<CardAccountList> getCardAccountList(RequestHeaders requestHeaders, RequestParams requestParams) {
return null;
}
@Override
public Response<OK200CardAccountDetails> getCardAccountDetails(String accountId, RequestHeaders requestHeaders, RequestParams requestParams) {
return null;
}
@Override
public Response<ReadCardAccountBalanceResponse200> getCardAccountBalances(String accountId, RequestHeaders requestHeaders, RequestParams requestParams) {
return null;
}
@Override
public Response<CardAccountsTransactionsResponse200> getCardAccountTransactionList(String accountId, RequestHeaders requestHeaders, RequestParams requestParams) {
return null;
}
@Override
public Response<OK200TransactionDetails> getTransactionDetails(String accountId,
String transactionId,
RequestHeaders requestHeaders,
RequestParams requestParams) {
return null;
}
}
|
d4x337/reloaded | db/migrate/20161015102605_add_image_to_posts.rb | class AddImageToPosts < ActiveRecord::Migration
def self.up
change_table :posts do |t|
t.attachment :image
end
end
def self.down
remove_column :posts, :image, :attachment
end
end
|
Justin-Teng/HackerRank | Algorithms/Implementation/Easy/Sock-Merchant.cpp | <filename>Algorithms/Implementation/Easy/Sock-Merchant.cpp
#include <bits/stdc++.h>
using namespace std;
int sockMerchant(int n, vector <int> ar) {
unordered_map<int, int> socks;
int count = 0;
for (int i = 0; i < n; i++) {
auto it = socks.find(ar[i]);
if (it != socks.end()) {
count++;
socks.erase(it);
}
else {
socks.emplace(ar[i], 0);
}
}
return count;
}
int main() {
int n;
cin >> n;
vector<int> ar(n);
for(int ar_i = 0; ar_i < n; ar_i++){
cin >> ar[ar_i];
}
int result = sockMerchant(n, ar);
cout << result << endl;
return 0;
}
|
monadgroup/sy17 | tool/tool/dependency/shadermanager.h | #pragma once
#include <map>
#include <memory>
#include <string>
#include <set>
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#define SH_CHANGENOTIFY 2000
namespace monad {
class ShaderDependency;
class ShaderManager {
std::map<const std::string, std::weak_ptr<ShaderDependency>> cache;
std::set<std::string> changeList;
bool isWatching = false;
CRITICAL_SECTION watchCriticalSection;
CRITICAL_SECTION changeCriticalSection;
std::string watchRoot;
HANDLE threadId;
HANDLE watchFile;
static DWORD WINAPI watchThread(void *param);
void setIsWatching(bool val);
bool getIsWatching();
public:
ShaderManager();
~ShaderManager();
void startWatching(const std::string &root);
void update();
void stopWatching();
std::shared_ptr<ShaderDependency> get(const std::string &path);
void add(std::shared_ptr<ShaderDependency> dep);
static std::string nicePath(const std::string &fullpath);
};
extern ShaderManager globalManager;
}
|
pparkddo/ps | boj/9663/Main.java | import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
class Solution {
private int n;
private boolean placed[][];
private int count = 0;
Solution(int n) {
this.n = n;
this.placed = new boolean[n][n];
}
public int getCount() {
return count;
}
public void run() {
for (int i = 0; i < this.n; i++) {
dfs(0, i, 1);
}
}
private void dfs(int row, int column, int depth) {
for (int i = 0; i < this.n; i++) {
for (int j = 0; j < this.n; j++) {
if (!placed[i][j]) {
continue;
}
if (isSameColumn(column, j)) {
return;
}
if (isLocatedDiagonally(row, column, i, j)) {
return;
}
}
}
if (depth == this.n) {
count = count + 1;
return;
}
for (int i = 0; i < this.n; i++) {
placed[row][column] = true;
dfs(row+1, i, depth+1);
placed[row][column] = false;
}
}
private boolean isSameColumn(int column, int otherColumn) {
return column == otherColumn;
}
private boolean isLocatedDiagonally(int row, int column, int otherRow, int otherColumn) {
return (
row-column == otherRow-otherColumn
|| row+column == otherRow+otherColumn
);
}
}
public class Main {
public static void main(String[] args) throws IOException {
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
int n = Integer.parseInt(in.readLine());
in.close();
Solution solution = new Solution(n);
solution.run();
System.out.println(solution.getCount());
}
}
|
LibreTask/website | src/components/Signup.js | /*
* @link https://libretask.org/
* @license https://github.com/LibreTask/backend/blob/master/LICENSE.md
*/
import React, { Component } from "react";
import { browserHistory } from "react-router";
import { connect } from "react-redux";
import {
Button,
FormGroup,
ControlLabel,
FormControl,
HelpBlock
} from "react-bootstrap";
import * as HeaderActions from "../actions/header";
import * as FooterActions from "../actions/footer";
import * as UserActions from "../actions/user";
import * as ProfileController from "../controllers/profile";
import AppValidator from "../validator";
import AppStyles from "../styles";
import * as Client from "../middleware/client";
const styles = {
legal: {
fontSize: "95%",
margin: "auto"
},
agreement: {
cursor: "pointer",
textDecoration: "underline"
}
};
class Signup extends Component {
constructor(props) {
super(props);
this.state = {
signupError: "",
isSigningUp: false,
currentEmail: "@", // initialize with "@"; it's part of all emails
currentPassword: "",
currentConfirmPassword: "",
emailValidationError: "",
passwordValidationError: "",
confirmPasswordValidationError: ""
};
}
componentDidMount = () => {
this.props.hideHeader();
this.props.hideFooter();
};
componentWillUnmount = () => {
this.props.showHeader();
this.props.showFooter();
};
_signup = () => {
let email = this.state.currentEmail;
let password = this.state.currentPassword;
let confirmPassword = this.state.currentConfirmPassword;
let emailValidationError = "";
let passwordValidationError = "";
let confirmPasswordValidationError = "";
if (!AppValidator.isValidEmail(email)) {
emailValidationError = "Email is not valid";
}
if (!AppValidator.isValidPassword(password)) {
passwordValidationError = "Password must be between 6 and 100 characters";
}
// only check whether password equals confirm password, if password is valid
if (!passwordValidationError && password !== confirmPassword) {
confirmPasswordValidationError = "Passwords do not match";
}
if (
passwordValidationError ||
emailValidationError ||
confirmPasswordValidationError
) {
this.setState({
signupError: "",
emailValidationError: emailValidationError,
passwordValidationError: passwordValidationError,
confirmPasswordValidationError: confirmPasswordValidationError
});
return; // validation failed; cannot signup
}
this.setState(
{
isSigningUp: true,
emailValidationError: "",
passwordValidationError: "",
confirmPasswordValidationError: ""
},
() => {
Client.signup(email, password)
.then(response => {
if (response.errorCode) {
this.setState({
signupError: response.errorMessage,
isSigningUp: false
});
} else {
// TODO - fix this hack; do not assign PW in this way
response.profile.password = password;
ProfileController.createOrUpdateProfile(response.profile);
this.props.login(response.profile);
browserHistory.push("/");
}
})
.catch(error => {
this.setState({
signupError: error.message,
isSigningUp: false
});
});
}
);
};
_constructMessageBanner = () => {
if (this.state.signupError) {
return (
<div style={AppStyles.messageBanner}>{this.state.signupError}</div>
);
} else {
return <div />;
}
};
emailValidationState = () => {
// no status, if no error
return this.state.emailValidationError ? "error" : null;
};
passwordValidationState = () => {
// no status, if no error
return this.state.passwordValidationError ? "error" : null;
};
confirmPasswordValidationState = () => {
// no status, if no error
return this.state.confirmPasswordValidationError ? "error" : null;
};
_signupForm = () => {
return (
<form style={AppStyles.form}>
<FormGroup
controlId="confirm-password-email-form"
validationState={this.emailValidationState()}
>
<ControlLabel>Email</ControlLabel>
<FormControl
type="email"
value={this.state.currentEmail}
placeholder=""
onChange={e => {
this.setState({ currentEmail: e.target.value });
}}
/>
<FormControl.Feedback />
<HelpBlock>{this.state.emailValidationError}</HelpBlock>
</FormGroup>
<FormGroup
controlId="confirm-password-newpassword-form"
validationState={this.passwordValidationState()}
>
<ControlLabel>Password</ControlLabel>
<FormControl
type="password"
value={this.state.currentPassword}
placeholder=""
onChange={e => {
this.setState({ currentPassword: e.target.value });
}}
/>
<FormControl.Feedback />
<HelpBlock>{this.state.passwordValidationError}</HelpBlock>
</FormGroup>
<FormGroup
controlId="confirm-password-confirmpassword-form"
validationState={this.confirmPasswordValidationState()}
>
<ControlLabel>Confirm Password</ControlLabel>
<FormControl
type="password"
value={this.state.currentConfirmPassword}
placeholder=""
onChange={e => {
this.setState({ currentConfirmPassword: e.target.value });
}}
/>
<FormControl.Feedback />
<HelpBlock>{this.state.confirmPasswordValidationError}</HelpBlock>
</FormGroup>
</form>
);
};
render() {
return (
<div
style={AppStyles.centerBlockMain}
onKeyPress={e => {
if (e.key === "Enter") {
this._signup();
}
}}
>
{this._constructMessageBanner()}
<div
style={AppStyles.headerText}
onClick={() => {
browserHistory.push("/");
}}
>
<img
style={AppStyles.primaryIcon}
src="../images/primary.png"
alt="LibreTask primary icon"
/>{" "}
LibreTask
</div>
<div style={AppStyles.centerBlockContent}>
<div style={AppStyles.centeredTitleText}>Create Account</div>
<div style={AppStyles.centerContent}>
{this._signupForm()}
<Button
className="form_button"
style={AppStyles.formButton}
bsStyle="primary"
bsSize="large"
onClick={() => {
this._signup();
}}
>
Create Account
</Button>
<br />
<br />
<div style={styles.legal}>
By signing up, you are agreeing to the{" "}
<span
className="underline_on_hover"
style={styles.agreement}
onClick={() => {
browserHistory.push("/terms");
}}
>
Terms of Service
</span>{" "}
and the{" "}
<span
className="underline_on_hover"
style={styles.agreement}
onClick={() => {
browserHistory.push("/privacy");
}}
>
Privacy Policy
</span>
.
</div>
</div>
</div>
<div
className="underline_on_hover"
style={AppStyles.formFooterLink}
onClick={() => {
browserHistory.push("/login");
}}
>
Already have an account? Log in.
</div>
</div>
);
}
}
const mapStateToProps = state => ({ });
const mapDispatchToProps = {
hideFooter: FooterActions.hide,
showFooter: FooterActions.show,
hideHeader: HeaderActions.hide,
showHeader: HeaderActions.show,
login: UserActions.login
};
export default connect(mapStateToProps, mapDispatchToProps)(Signup);
|
McLeodMoores/starling | projects/web/src/main/java/com/opengamma/web/user/AbstractWebRoleResource.java | <gh_stars>1-10
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.web.user;
import org.joda.beans.impl.flexi.FlexiBean;
import com.opengamma.master.user.UserMaster;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.web.AbstractPerRequestWebResource;
/**
* Abstract base class for RESTful role resources.
*/
public abstract class AbstractWebRoleResource
extends AbstractPerRequestWebResource<WebRoleData> {
/**
* HTML ftl directory.
*/
protected static final String HTML_DIR = "users/html/";
/**
* Creates the resource.
*
* @param userMaster
* the role master, not null
*/
protected AbstractWebRoleResource(final UserMaster userMaster) {
super(new WebRoleData());
ArgumentChecker.notNull(userMaster, "userMaster");
data().setUserMaster(userMaster);
}
/**
* Creates the resource.
*
* @param parent
* the parent resource, not null
*/
protected AbstractWebRoleResource(final AbstractWebRoleResource parent) {
super(parent);
}
// -------------------------------------------------------------------------
/**
* Creates the output root data.
*
* @return the output root data, not null
*/
@Override
protected FlexiBean createRootData() {
final FlexiBean out = super.createRootData();
out.put("uris", new WebRoleUris(data()));
return out;
}
}
|
portal-io/portal-ios-business | WVRPay/WVRPay/Classes/CheckGoodsPayedList/Model/http/WVRHttpApiCheckGoodsPayedList.h | <gh_stars>0
//
// WVRHttpApiCheckGoodsPayedList.h
// WhaleyVR
//
// Created by <NAME> on 2017/9/7.
// Copyright © 2017年 Snailvr. All rights reserved.
//
#import "WVRAPIBaseManager+ReactiveExtension.h"
@interface WVRHttpApiCheckGoodsPayedList : WVRAPIBaseManager <WVRAPIManager>
@end
|
irwansetiawan/android-publisher-sdk | publisher-sdk/src/main/java/com/criteo/publisher/AppEvents/AppEvents.java | <gh_stars>1-10
/*
* Copyright 2020 Criteo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.criteo.publisher.AppEvents;
import android.content.Context;
import androidx.annotation.NonNull;
import com.criteo.publisher.Clock;
import com.criteo.publisher.model.DeviceInfo;
import com.criteo.publisher.network.AppEventTask;
import com.criteo.publisher.network.PubSdkApi;
import com.criteo.publisher.privacy.UserPrivacyUtil;
import com.criteo.publisher.util.AdvertisingInfo;
import com.criteo.publisher.util.AppEventResponseListener;
import com.criteo.publisher.util.ApplicationStoppedListener;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicLong;
public class AppEvents implements AppEventResponseListener, ApplicationStoppedListener {
private static final String EVENT_INACTIVE = "Inactive";
private static final String EVENT_ACTIVE = "Active";
private static final String EVENT_LAUNCH = "Launch";
private final Context mContext;
private final AdvertisingInfo advertisingInfo;
private final Clock clock;
private final PubSdkApi api;
private final UserPrivacyUtil userPrivacyUtil;
@NonNull
private final DeviceInfo deviceInfo;
@NonNull
private final Executor executor;
private final AtomicLong silencedUntilTimeInMillis = new AtomicLong(-1);
public AppEvents(
@NonNull Context context,
@NonNull AdvertisingInfo advertisingInfo,
@NonNull Clock clock,
@NonNull PubSdkApi api,
@NonNull UserPrivacyUtil userPrivacyUtil,
@NonNull DeviceInfo deviceInfo,
@NonNull Executor executor
) {
this.mContext = context;
this.advertisingInfo = advertisingInfo;
this.clock = clock;
this.api = api;
this.userPrivacyUtil = userPrivacyUtil;
this.deviceInfo = deviceInfo;
this.executor = executor;
}
private void postAppEvent(String eventType) {
if (!shouldCallBearcat()) {
return;
}
long silencedUntil = silencedUntilTimeInMillis.get();
if (silencedUntil > 0 && clock.getCurrentTimeInMillis() < silencedUntil) {
return;
}
executor.execute(new AppEventTask(
mContext,
this,
advertisingInfo,
api,
deviceInfo,
userPrivacyUtil,
eventType
));
}
@Override
public void setThrottle(int throttleInSec) {
this.silencedUntilTimeInMillis.set(clock.getCurrentTimeInMillis() + throttleInSec * 1000);
}
public void sendLaunchEvent() {
postAppEvent(EVENT_LAUNCH);
}
public void sendActiveEvent() {
postAppEvent(EVENT_ACTIVE);
}
public void sendInactiveEvent() {
postAppEvent(EVENT_INACTIVE);
}
@Override
public void onApplicationStopped() {
}
private boolean shouldCallBearcat() {
return userPrivacyUtil.isCCPAConsentGivenOrNotApplicable() && userPrivacyUtil
.isMopubConsentGivenOrNotApplicable();
}
}
|
bgalloway1/enroll | components/sponsored_benefits/app/helpers/sponsored_benefits/application_helper.rb | <gh_stars>10-100
module SponsoredBenefits
module ApplicationHelper
def generate_breadcrumb_links(proposal, organization)
if proposal.persisted?
links = [sponsored_benefits.edit_organizations_plan_design_organization_plan_design_proposal_path(organization.id, proposal.id)]
links << sponsored_benefits.new_organizations_plan_design_proposal_plan_selection_path(proposal)
else
links = [sponsored_benefits.new_organizations_plan_design_organization_plan_design_proposal_path(organization.id)]
end
unless proposal.active_benefit_group.nil?
links << sponsored_benefits.new_organizations_plan_design_proposal_plan_review_path(proposal)
end
links
end
def eligibility_criteria(employer)
# toDo - See why do we have this dependency in DC.
end
end
end
|
u-transnet/Gamedev | uTransnetCalcServer/src/main/java/com/github/utransnet/utranscalc/server/data/ObjectType.java | package com.github.utransnet.utranscalc.server.data;
/**
* Created by Artem on 04.06.2018.
*/
public enum ObjectType {
link(1),
pylon(2),
station(3);
public int id;
ObjectType(int id) {
this.id = id;
}
}
|
guyuedumingx/ProgrammingExperiment | src/main/java/experiment2/yhicxu/utils/LinkQueue.java | package experiment2.yhicxu.utils;
import java.util.Iterator;
/**
* <p><b>类名:</b>{@code LinkQueue}</p>
* <p><b>功能:</b></p><br>链队列
* <p><b>方法:</b></p>
* <br> {@link #LinkQueue()}构造方法
* <br> {@link #offer(Object)}将数据添加到队尾
* <br> {@link #peek()}查看队首元素
* <br> {@link #pool()}取出队首元素
* <br> {@link #isEmpty()}判断队列是否为空
* <br> {@link #size()}获取队列长度
* <br> {@link #iterator()}覆盖重写迭代器方法,返回一个迭代器
*
* @author 60rzvvbj
* @date 2021/3/20
*/
public class LinkQueue<T> implements Iterable<T> {
/**
* <p><b>类名:</b>{@code Node}</p>
* <p><b>功能:</b></p><br>链队列节点
* <p><b>方法:</b></p>
* <br> {@link #Node(Object)} 构造方法
*
* @author 60rzvvbj
* @date 2021/3/20
*/
private class Node<T> {
T data;
Node<T> next;
/**
* <p><b>方法名:</b>{@code Node}</p>
* <p><b>功能:</b></p><br>构造方法
*
* @param data 数据
* @author 60rzvvbj
* @date 2021/3/20
*/
Node(T data) {
this.data = data;
}
}
/**
* 头结点
*/
private Node<T> head;
/**
* 尾节点
*/
private Node<T> last;
/**
* 队列大小
*/
private int size;
/**
* <p><b>方法名:</b>{@code LinkQueue}</p>
* <p><b>功能:</b></p><br>构造方法
*
* @author 60rzvvbj
* @date 2021/3/20
*/
public LinkQueue() {
head = null;
last = null;
}
/**
* <p><b>方法名:</b>{@code offer}</p>
* <p><b>功能:</b></p><br>将数据添加到队尾
*
* @param t 数据
* @author 60rzvvbj
* @date 2021/3/20
*/
public void offer(T t) {
if (head == null) {
last = new Node<>(t);
head = last;
} else {
head.next = new Node<>(t);
head = head.next;
}
size++;
}
/**
* <p><b>方法名:</b>{@code peek}</p>
* <p><b>功能:</b></p><br>查看队首元素
*
* @return 队首元素,如果队列为空则返回null
* @author 60rzvvbj
* @date 2021/3/20
*/
public T peek() {
if (isEmpty()) {
return null;
} else {
return last.data;
}
}
/**
* <p><b>方法名:</b>{@code poll}</p>
* <p><b>功能:</b></p><br>取出队首元素
*
* @return 队首元素,如果队列为空则返回null
* @author 60rzvvbj
* @date 2021/3/20
*/
public T pool() {
if (isEmpty()) {
return null;
} else {
T res = last.data;
if (head == last) {
head = null;
}
last = last.next;
size--;
return res;
}
}
/**
* <p><b>方法名:</b>{@code isEmpty}</p>
* <p><b>功能:</b></p><br>判断队列是否为空
*
* @return 是否为空
* @author 60rzvvbj
* @date 2021/3/20
*/
public boolean isEmpty() {
return head == null && last == null;
}
/**
* <p><b>方法名:</b>{@code size}</p>
* <p><b>功能:</b></p><br>获取队列长度
*
* @return 队列长度
* @author 60rzvvbj
* @date 2021/3/20
*/
public int size() {
return size;
}
/**
* <p><b>方法名:</b>{@code iterator}</p>
* <p><b>功能:</b></p><br>覆盖重写迭代器方法,返回一个迭代器
*
* @return 迭代器
* @author 60rzvvbj
* @date 2021/3/20
*/
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
Node<T> now = last;
@Override
public boolean hasNext() {
return now != null;
}
@Override
public T next() {
T res = now.data;
now = now.next;
return res;
}
};
}
}
|
mpi2/PhenotypeData | datatests/src/test/java/org/mousephenotype/cda/datatests/repositories/solr/EssentialGeneServiceTest.java | /**
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
/**
* Copyright © 2014 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This test class is intended to run healthchecks against the observation table.
*/
package org.mousephenotype.cda.datatests.repositories.solr;
import org.apache.solr.client.solrj.SolrServerException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mousephenotype.cda.solr.service.EssentialGeneService;
import org.mousephenotype.cda.solr.service.dto.EssentialGeneDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.io.IOException;
import java.util.List;
import static org.junit.Assert.assertTrue;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = {RepositorySolrTestConfig.class})
public class EssentialGeneServiceTest {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private EssentialGeneService essentialGeneService;
@Test
public void testGetGeneById() throws SolrServerException, IOException {
String mgiId = "MGI:1098687";//Aak1
List<EssentialGeneDTO> essentialGeneDTOS = essentialGeneService.getGeneListByMgiId(mgiId);
for(EssentialGeneDTO geneDTO: essentialGeneDTOS) {
logger.info("Gene symbol is: " + geneDTO.getMarkerSymbol());
logger.info("mgiAccessionId: " + geneDTO.getMgiAccession());
logger.info("idgFamily: " + geneDTO.getIdgFamily());
logger.info("===================");
}
EssentialGeneDTO gene = essentialGeneService.getGeneByMgiId(mgiId);
logger.info("single Gene symbol is: " + gene.getMarkerSymbol());
logger.info("mgiAccessionId: " + gene.getMgiAccession());
logger.info("idgFamily: " + gene.getIdgFamily());
logger.info("===================");
assertTrue("Expected gene but was null", gene != null);
}
@Test
public void testGetAllIdgGeneList() throws IOException, SolrServerException {
List<EssentialGeneDTO> idgGeneDTOS = essentialGeneService.getAllIdgGeneList();
logger.info("idgGenes size="+idgGeneDTOS.size());
// for(EssentialGeneDTO geneDTO: idgGeneDTOS) {
// logger.info("Gene symbol is: " + geneDTO.getMarkerSymbol());
// logger.info("mgiAccessionId: " + geneDTO.getMgiAccession());
// logger.info("idgChr: " + geneDTO.getIdgChr());
// logger.info("idgFamily: " + geneDTO.getIdgFamily());
// logger.info("idgIDL: " + geneDTO.getIdgIdl());
// logger.info("idgSymbol: " + geneDTO.getIdgSymbol());
// logger.info("uniprot acc: " + geneDTO.getIdgUniprotAcc());
// logger.info("===================");
// }
}
@Test
public void getAllBySecondaryProjectIdAndGroupLabel() throws IOException, SolrServerException {
List<EssentialGeneDTO> genes = essentialGeneService.getAllIdgGeneListByGroupLabel("Kinase");
assert(genes.size()> 127);
}
}
|
John3/crabmusket_Torque3D | Engine/source/lighting/advanced/advancedLightingFeatures.cpp | //-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#include "platform/platform.h"
#include "lighting/advanced/advancedLightingFeatures.h"
#include "shaderGen/featureMgr.h"
#include "gfx/gfxStringEnumTranslate.h"
#include "materials/materialParameters.h"
#include "materials/materialFeatureTypes.h"
#include "materials/matTextureTarget.h"
#include "gfx/gfxDevice.h"
#include "core/util/safeDelete.h"
#if defined( TORQUE_OS_WIN ) || defined( TORQUE_OS_XBOX )
# include "lighting/advanced/hlsl/gBufferConditionerHLSL.h"
# include "lighting/advanced/hlsl/advancedLightingFeaturesHLSL.h"
#endif
#if defined( TORQUE_OPENGL )
# include "lighting/advanced/glsl/gBufferConditionerGLSL.h"
# include "lighting/advanced/glsl/advancedLightingFeaturesGLSL.h"
#endif
bool AdvancedLightingFeatures::smFeaturesRegistered = false;
void AdvancedLightingFeatures::registerFeatures( const GFXFormat &prepassTargetFormat, const GFXFormat &lightInfoTargetFormat )
{
AssertFatal( !smFeaturesRegistered, "AdvancedLightingFeatures::registerFeatures() - Features already registered. Bad!" );
// If we ever need this...
TORQUE_UNUSED(lightInfoTargetFormat);
ConditionerFeature *cond = NULL;
if(GFX->getAdapterType() == OpenGL)
{
#if defined( TORQUE_OPENGL )
cond = new GBufferConditionerGLSL( prepassTargetFormat, GBufferConditionerGLSL::ViewSpace );
FEATUREMGR->registerFeature(MFT_PrePassConditioner, cond);
FEATUREMGR->registerFeature(MFT_RTLighting, new DeferredRTLightingFeatGLSL());
FEATUREMGR->registerFeature(MFT_NormalMap, new DeferredBumpFeatGLSL());
FEATUREMGR->registerFeature(MFT_PixSpecular, new DeferredPixelSpecularGLSL());
FEATUREMGR->registerFeature(MFT_MinnaertShading, new DeferredMinnaertGLSL());
FEATUREMGR->registerFeature(MFT_SubSurface, new DeferredSubSurfaceGLSL());
#endif
}
else
{
#if defined( TORQUE_OS_WIN )
cond = new GBufferConditionerHLSL( prepassTargetFormat, GBufferConditionerHLSL::ViewSpace );
FEATUREMGR->registerFeature(MFT_PrePassConditioner, cond);
FEATUREMGR->registerFeature(MFT_RTLighting, new DeferredRTLightingFeatHLSL());
FEATUREMGR->registerFeature(MFT_NormalMap, new DeferredBumpFeatHLSL());
FEATUREMGR->registerFeature(MFT_PixSpecular, new DeferredPixelSpecularHLSL());
FEATUREMGR->registerFeature(MFT_MinnaertShading, new DeferredMinnaertHLSL());
FEATUREMGR->registerFeature(MFT_SubSurface, new DeferredSubSurfaceHLSL());
#endif
}
NamedTexTarget *target = NamedTexTarget::find( "prepass" );
if ( target )
target->setConditioner( cond );
smFeaturesRegistered = true;
}
void AdvancedLightingFeatures::unregisterFeatures()
{
NamedTexTarget *target = NamedTexTarget::find( "prepass" );
if ( target )
target->setConditioner( NULL );
FEATUREMGR->unregisterFeature(MFT_PrePassConditioner);
FEATUREMGR->unregisterFeature(MFT_RTLighting);
FEATUREMGR->unregisterFeature(MFT_NormalMap);
FEATUREMGR->unregisterFeature(MFT_PixSpecular);
FEATUREMGR->unregisterFeature(MFT_MinnaertShading);
FEATUREMGR->unregisterFeature(MFT_SubSurface);
smFeaturesRegistered = false;
}
|
Adrjanjan/JiMP-Exercises | lab3/tinyurl/TinyUrl.cpp | //
// Created by adrja on 22.03.2018.
//
#include "TinyUrl.h"
namespace tinyurl {
std::unique_ptr<TinyUrlCodec> Init() {
std::unique_ptr<TinyUrlCodec> codec_structure(new TinyUrlCodec);
codec_structure->state = {'0', '0', '0', '0', '0', '0'};
codec_structure->urls = {};
return std::move(codec_structure);
}
void NextHash(std::array<char, 6> *state) {
++state->end()[-1];
for (int i = 6 - 1; i >= 0; --i) {
if (state->at(i) == '9' + 1) {
state->at(i) = static_cast<char>('A');
} else if (state->at(i) == 'Z' + 1) {
state->at(i) = static_cast<char>('a');
} else if (state->at(i) == 'z' + 1) {
state->at(i) = static_cast<char>('0');
++state->at(i - 1);
}
}
}
std::string Encode(const std::string &url, std::unique_ptr<TinyUrlCodec> *codec) {
std::string hash;
NextHash(&(*codec)->state);
for (const auto i: (*codec)->state) {
hash += i;
}
(*codec)->urls.emplace(hash, url);
return hash;
}
std::string Decode(const std::unique_ptr<TinyUrlCodec> &codec, const std::string &hash) {
for (const auto &i: codec->urls) {
if (i.first == hash) {
return i.second;
}
}
return "";
}
} |
bluebird88/HIS | his-cloud/his-cloud-service-dms/src/main/java/com/neu/his/cloud/service/dms/dto/dms/DmsRefundDrugParam.java | package com.neu.his.cloud.service.dms.dto.dms;
import io.swagger.annotations.ApiModelProperty;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
@Setter
@Getter
@ToString
public class DmsRefundDrugParam implements Serializable {
@ApiModelProperty(value = "退药处方项Id")
Long refundPrescriptionItemId;
@ApiModelProperty(value = "退药数量")
Long refundNum;
@ApiModelProperty(value = "退药处方项类型")
Integer type;
}
|
LasserCAT/elemental-creepers | src/main/java/T145/elementalcreepers/core/ModLoader.java | <reponame>LasserCAT/elemental-creepers
package T145.elementalcreepers.core;
import java.util.List;
import java.util.Set;
import T145.elementalcreepers.ElementalCreepers;
import T145.elementalcreepers.client.render.entity.RenderAngryCreeper;
import T145.elementalcreepers.client.render.entity.RenderBaseCreeper;
import T145.elementalcreepers.client.render.entity.RenderFriendlyCreeper;
import T145.elementalcreepers.client.render.entity.RenderSpiderCreeper;
import T145.elementalcreepers.config.ModConfig;
import T145.elementalcreepers.entities.EntityBallisticCreeper;
import T145.elementalcreepers.entities.EntityCakeCreeper;
import T145.elementalcreepers.entities.EntityCookieCreeper;
import T145.elementalcreepers.entities.EntityDarkCreeper;
import T145.elementalcreepers.entities.EntityEarthCreeper;
import T145.elementalcreepers.entities.EntityEnderCreeper;
import T145.elementalcreepers.entities.EntityFireCreeper;
import T145.elementalcreepers.entities.EntityFireworkCreeper;
import T145.elementalcreepers.entities.EntityFriendlyCreeper;
import T145.elementalcreepers.entities.EntityFurnaceCreeper;
import T145.elementalcreepers.entities.EntityGhostCreeper;
import T145.elementalcreepers.entities.EntityIceCreeper;
import T145.elementalcreepers.entities.EntityIllusionCreeper;
import T145.elementalcreepers.entities.EntityLightCreeper;
import T145.elementalcreepers.entities.EntityLightningCreeper;
import T145.elementalcreepers.entities.EntityMagmaCreeper;
import T145.elementalcreepers.entities.EntityPsychicCreeper;
import T145.elementalcreepers.entities.EntityReverseCreeper;
import T145.elementalcreepers.entities.EntitySpiderCreeper;
import T145.elementalcreepers.entities.EntitySpringCreeper;
import T145.elementalcreepers.entities.EntityStoneCreeper;
import T145.elementalcreepers.entities.EntityWaterCreeper;
import T145.elementalcreepers.entities.EntityWindCreeper;
import T145.elementalcreepers.entities.EntityZombieCreeper;
import T145.elementalcreepers.lib.Constants;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.EnumCreatureType;
import net.minecraft.entity.monster.EntityCreeper;
import net.minecraft.entity.monster.IMob;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.util.DamageSource;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.world.biome.Biome;
import net.minecraftforge.client.event.ModelRegistryEvent;
import net.minecraftforge.common.BiomeDictionary;
import net.minecraftforge.event.RegistryEvent;
import net.minecraftforge.event.entity.living.LivingDeathEvent;
import net.minecraftforge.event.entity.living.LivingHurtEvent;
import net.minecraftforge.fml.client.registry.RenderingRegistry;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import net.minecraftforge.fml.common.gameevent.PlayerEvent.PlayerLoggedInEvent;
import net.minecraftforge.fml.common.registry.EntityEntry;
import net.minecraftforge.fml.common.registry.EntityEntryBuilder;
import net.minecraftforge.fml.common.registry.EntityRegistry;
import net.minecraftforge.fml.common.registry.ForgeRegistries;
import net.minecraftforge.fml.common.registry.GameRegistry.ObjectHolder;
import net.minecraftforge.fml.relauncher.Side;
@ObjectHolder(ElementalCreepers.MODID)
public class ModLoader {
private ModLoader() {}
@EventBusSubscriber(modid = ElementalCreepers.MODID)
public static class ServerLoader {
private ServerLoader() {}
@SubscribeEvent
public static void registerEntities(final RegistryEvent.Register<EntityEntry> event) {
final EntityEntry[] entries = {
createBuilder("CakeCreeper")
.entity(EntityCakeCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("CookieCreeper")
.entity(EntityCookieCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("DarkCreeper")
.entity(EntityDarkCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("EarthCreeper")
.entity(EntityEarthCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("EnderCreeper")
.entity(EntityEnderCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("FireCreeper")
.entity(EntityFireCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("FireworkCreeper")
.entity(EntityFireworkCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("FurnaceCreeper")
.entity(EntityFurnaceCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("FriendlyCreeper")
.entity(EntityFriendlyCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("GhostCreeper")
.entity(EntityGhostCreeper.class)
.tracker(80, 3, true)
.build(),
createBuilder("BallisticCreeper")
.entity(EntityBallisticCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("IceCreeper")
.entity(EntityIceCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("IllusionCreeper")
.entity(EntityIllusionCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("LightCreeper")
.entity(EntityLightCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("LightningCreeper")
.entity(EntityLightningCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("MagmaCreeper")
.entity(EntityMagmaCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("PsychicCreeper")
.entity(EntityPsychicCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("ReverseCreeper")
.entity(EntityReverseCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("SpiderCreeper")
.entity(EntitySpiderCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("SpringCreeper")
.entity(EntitySpringCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("StoneCreeper")
.entity(EntityStoneCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("WaterCreeper")
.entity(EntityWaterCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("WindCreeper")
.entity(EntityWindCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build(),
createBuilder("ZombieCreeper")
.entity(EntityZombieCreeper.class)
.tracker(80, 3, true)
.egg(0x0DA70B, 0x101010)
.build()
};
for (EntityEntry entry : entries) {
event.getRegistry().register(entry);
Constants.CREEPER_LIST.add((Class<? extends EntityCreeper>) entry.getEntityClass());
}
if (ModConfig.general.reasonableSpawnRates) {
addOverworldSpawn(EntityFireCreeper.class, ModConfig.spawnRate.fireCreeperSpawn, 1, 3);
addOverworldSpawn(EntityWaterCreeper.class, ModConfig.spawnRate.waterCreeperSpawn, 1, 3);
addOverworldSpawn(EntityLightningCreeper.class, ModConfig.spawnRate.lightningCreeperSpawn, 1, 3);
addOverworldSpawn(EntityCookieCreeper.class, ModConfig.spawnRate.cookieCreeperSpawn, 1, 2);
addOverworldSpawn(EntityDarkCreeper.class, ModConfig.spawnRate.darkCreeperSpawn, 1, 3);
addOverworldSpawn(EntityLightCreeper.class, ModConfig.spawnRate.lightCreeperSpawn, 1, 3);
addOverworldSpawn(EntityEarthCreeper.class, ModConfig.spawnRate.earthCreeperSpawn, 1, 3);
addOverworldSpawn(EntityReverseCreeper.class, ModConfig.spawnRate.reverseCreeperSpawn, 1, 1);
addOverworldSpawn(EntityIceCreeper.class, ModConfig.spawnRate.iceCreeperSpawn, 1, 3);
addOverworldSpawn(EntityPsychicCreeper.class, ModConfig.spawnRate.psychicCreeperSpawn, 1, 3);
addOverworldSpawn(EntityIllusionCreeper.class, ModConfig.spawnRate.illusionCreeperSpawn, 1, 1);
addOverworldSpawn(EntitySpiderCreeper.class, ModConfig.spawnRate.spiderCreeperSpawn, 1, 3);
addOverworldSpawn(EntityWindCreeper.class, ModConfig.spawnRate.windCreeperSpawn, 1, 2);
addOverworldSpawn(EntityBallisticCreeper.class, ModConfig.spawnRate.ballisticCreeperSpawn, 1, 1);
addOverworldSpawn(EntityEnderCreeper.class, ModConfig.spawnRate.enderCreeperSpawn, 1, 2);
addOverworldSpawn(EntityStoneCreeper.class, ModConfig.spawnRate.stoneCreeperSpawn, 1, 3);
addOverworldSpawn(EntityCakeCreeper.class, ModConfig.spawnRate.cakeCreeperSpawn, 1, 3);
addOverworldSpawn(EntityFireworkCreeper.class, ModConfig.spawnRate.fireworkCreeperSpawn, 1, 3);
addOverworldSpawn(EntitySpringCreeper.class, ModConfig.spawnRate.springCreeperSpawn, 1, 3);
addOverworldSpawn(EntityFurnaceCreeper.class, ModConfig.spawnRate.furnaceCreeperSpawn, 1, 3);
addOverworldSpawn(EntityZombieCreeper.class, ModConfig.spawnRate.zombieCreeperSpawn, 1, 1);
} else {
copyCreeperSpawns(EntityCakeCreeper.class);
copyCreeperSpawns(EntityCookieCreeper.class);
copyCreeperSpawns(EntityDarkCreeper.class);
copyCreeperSpawns(EntityEarthCreeper.class);
copyCreeperSpawns(EntityEnderCreeper.class);
copyCreeperSpawns(EntityFireCreeper.class);
copyCreeperSpawns(EntityFireworkCreeper.class);
copyCreeperSpawns(EntityFurnaceCreeper.class);
copyCreeperSpawns(EntityBallisticCreeper.class);
copyCreeperSpawns(EntityIceCreeper.class);
copyCreeperSpawns(EntityIllusionCreeper.class);
copyCreeperSpawns(EntityLightCreeper.class);
copyCreeperSpawns(EntityLightningCreeper.class);
copyCreeperSpawns(EntityMagmaCreeper.class);
copyCreeperSpawns(EntityPsychicCreeper.class);
copyCreeperSpawns(EntityReverseCreeper.class);
copyCreeperSpawns(EntitySpiderCreeper.class);
copyCreeperSpawns(EntitySpringCreeper.class);
copyCreeperSpawns(EntityStoneCreeper.class);
copyCreeperSpawns(EntityWaterCreeper.class);
copyCreeperSpawns(EntityWindCreeper.class);
copyCreeperSpawns(EntityZombieCreeper.class);
}
addOverworldSpawn(EntityFriendlyCreeper.class, ModConfig.spawnRate.friendlyCreeperSpawn, 1, 2, EnumCreatureType.CREATURE);
addNetherSpawn(EntityFireCreeper.class, ModConfig.spawnRate.fireCreeperSpawn, 1, 3);
addNetherSpawn(EntityMagmaCreeper.class, ModConfig.spawnRate.magmaCreeperSpawn, 1, 2);
addEndSpawn(EntityEnderCreeper.class, ModConfig.spawnRate.enderCreeperSpawn * 5, 1, 3);
// TODO: Add dimension blacklist/whitelist
}
private static int entityID = 0;
private static <E extends Entity> EntityEntryBuilder<E> createBuilder(final String name) {
final EntityEntryBuilder<E> builder = EntityEntryBuilder.create();
final ResourceLocation registryName = new ResourceLocation(ElementalCreepers.MODID, name);
return builder.id(registryName, entityID++).name(ElementalCreepers.MODID + ":" + name);
}
private static BiomeDictionary.Type[] validOverworldBiomeTypes = { BiomeDictionary.Type.FOREST, BiomeDictionary.Type.HILLS, BiomeDictionary.Type.SWAMP, BiomeDictionary.Type.JUNGLE, BiomeDictionary.Type.WASTELAND, BiomeDictionary.Type.MAGICAL, BiomeDictionary.Type.BEACH, BiomeDictionary.Type.SANDY, BiomeDictionary.Type.SNOWY, BiomeDictionary.Type.MOUNTAIN };
public static void addOverworldSpawn(Class<? extends EntityLiving> entityClass, int spawnprob, int min, int max, EnumCreatureType type) {
for (BiomeDictionary.Type biomeType : validOverworldBiomeTypes) {
Set<Biome> biomeSet = BiomeDictionary.getBiomes(biomeType);
EntityRegistry.addSpawn(entityClass, spawnprob, min, max, type, biomeSet.toArray(new Biome[biomeSet.size()]));
}
}
public static void addOverworldSpawn(Class<? extends EntityLiving> entityClass, int spawnprob, int min, int max) {
addOverworldSpawn(entityClass, spawnprob, min, max, EnumCreatureType.MONSTER);
}
public static void addNetherSpawn(Class<? extends EntityLiving> entityClass, int spawnprob, int min, int max) {
Set<Biome> biomeSet = BiomeDictionary.getBiomes(BiomeDictionary.Type.NETHER);
EntityRegistry.addSpawn(entityClass, spawnprob, min, max, EnumCreatureType.MONSTER, biomeSet.toArray(new Biome[biomeSet.size()]));
}
public static void addEndSpawn(Class<? extends EntityLiving> entityClass, int spawnprob, int min, int max) {
Set<Biome> biomeSet = BiomeDictionary.getBiomes(BiomeDictionary.Type.END);
EntityRegistry.addSpawn(entityClass, spawnprob, min, max, EnumCreatureType.MONSTER, biomeSet.toArray(new Biome[biomeSet.size()]));
}
private static Biome[] getBiomes(final BiomeDictionary.Type type) {
return BiomeDictionary.getBiomes(type).toArray(new Biome[0]);
}
private static void copySpawns(final Class<? extends EntityLiving> classToAdd, final EnumCreatureType creatureTypeToAdd, final Class<? extends EntityLiving> classToCopy, final EnumCreatureType creatureTypeToCopy) {
for (final Biome biome : ForgeRegistries.BIOMES) {
biome.getSpawnableList(creatureTypeToCopy).stream().filter(entry -> entry.entityClass == classToCopy).findFirst().ifPresent(spawnListEntry -> biome.getSpawnableList(creatureTypeToAdd).add(new Biome.SpawnListEntry(classToAdd, spawnListEntry.itemWeight, spawnListEntry.minGroupCount, spawnListEntry.maxGroupCount)));
}
}
private static void copyCreeperSpawns(final Class<? extends EntityLiving> classToAdd) {
copySpawns(classToAdd, EnumCreatureType.MONSTER, EntityCreeper.class, EnumCreatureType.MONSTER);
}
@SubscribeEvent
public static void onEntityDeath(LivingDeathEvent event) {
DamageSource damage = event.getSource();
Entity immediateSource = damage.getImmediateSource();
Entity trueSource = damage.getTrueSource();
EntityLivingBase entity = event.getEntityLiving();
boolean killedByPlayer = damage.getDamageType().equals("player") || trueSource instanceof EntityPlayer;
if (killedByPlayer && entity instanceof EntityCreeper && !(entity instanceof EntityGhostCreeper) && !(entity instanceof EntityFriendlyCreeper)) {
if (entity instanceof EntityIllusionCreeper && ((EntityIllusionCreeper) entity).isIllusion()) {
return;
}
if (entity.world.rand.nextInt(100) < ModConfig.general.ghostCreeperChance) {
EntityGhostCreeper ghost = new EntityGhostCreeper(entity.world);
ghost.setLocationAndAngles(entity.posX, entity.posY, entity.posZ, entity.rotationYaw, entity.rotationPitch);
entity.world.spawnEntity(ghost);
}
}
// TODO: Add HashSet entity blacklist
if (entity instanceof EntityLivingBase && entity instanceof IMob) {
int radius = ModConfig.general.zombieCreeperRange;
AxisAlignedBB bb = new AxisAlignedBB(entity.posX - radius, entity.posY - radius, entity.posZ - radius, entity.posX + radius, entity.posY + radius, entity.posZ + radius);
List<EntityZombieCreeper> zombles = entity.world.getEntitiesWithinAABB(EntityZombieCreeper.class, bb, creature -> entity != creature);
if (!zombles.isEmpty()) {
if (zombles.size() == 1) {
zombles.get(0).addCreeper();
} else {
// we have more, and determine which is closest
float dist = Float.POSITIVE_INFINITY;
EntityZombieCreeper closest = null;
for (EntityZombieCreeper zomble : zombles) {
float newDist = entity.getDistance(zomble);
if (newDist < dist) {
dist = newDist;
closest = zomble;
}
}
if (closest != null) {
closest.addCreeper();
}
}
}
}
}
@SubscribeEvent
public static void onEntityHurt(LivingHurtEvent event) {
Entity entity = event.getEntity();
DamageSource damage = event.getSource();
if (entity instanceof EntitySpringCreeper && damage == DamageSource.FALL) {
EntitySpringCreeper creeper = (EntitySpringCreeper) entity;
if (!creeper.world.isRemote && creeper.isSprung()) {
creeper.world.createExplosion(creeper, creeper.posX, creeper.posY - 2.0D, creeper.posZ, creeper.getExplosionPower() * ((event.getAmount() < 6.0F ? 6.0F : event.getAmount()) / 6.0F), creeper.world.getGameRules().getBoolean("mobGriefing"));
creeper.setDead();
}
}
}
@SubscribeEvent
public static void onPlayerJoinedWorld(PlayerLoggedInEvent event) {
if (ModConfig.general.checkForUpdates && UpdateChecker.hasUpdate()) {
event.player.sendMessage(UpdateChecker.getUpdateNotification());
}
}
}
@EventBusSubscriber(value = Side.CLIENT, modid = ElementalCreepers.MODID)
public static class ClientLoader {
private ClientLoader() {}
@SubscribeEvent
public static void onModelRegistration(ModelRegistryEvent event) {
RenderingRegistry.registerEntityRenderingHandler(EntityCreeper.class, manager -> new RenderAngryCreeper(manager));
registerRenderer(EntityCakeCreeper.class, "cakecreeper");
registerRenderer(EntityCookieCreeper.class, "cookiecreeper");
registerRenderer(EntityDarkCreeper.class, "darkcreeper");
registerRenderer(EntityEarthCreeper.class, "earthcreeper");
registerRenderer(EntityEnderCreeper.class, "endercreeper");
registerRenderer(EntityFireCreeper.class, "firecreeper");
registerRenderer(EntityFireworkCreeper.class, "fireworkcreeper");
registerRenderer(EntityFurnaceCreeper.class, "furnacecreeper");
RenderingRegistry.registerEntityRenderingHandler(EntityGhostCreeper.class, manager -> new RenderBaseCreeper(manager, true));
RenderingRegistry.registerEntityRenderingHandler(EntityFriendlyCreeper.class, manager -> new RenderFriendlyCreeper(manager));
registerRenderer(EntityBallisticCreeper.class, "hydrogencreeper");
registerRenderer(EntityIceCreeper.class, "icecreeper");
registerRenderer(EntityIllusionCreeper.class, "illusioncreeper");
registerRenderer(EntityLightCreeper.class, "lightcreeper");
registerRenderer(EntityLightningCreeper.class, "lightningcreeper");
registerRenderer(EntityMagmaCreeper.class, "magmacreeper");
registerRenderer(EntityPsychicCreeper.class, "psychiccreeper");
registerRenderer(EntityReverseCreeper.class, "reversecreeper");
RenderingRegistry.registerEntityRenderingHandler(EntitySpiderCreeper.class, manager -> new RenderSpiderCreeper(manager));
registerRenderer(EntitySpringCreeper.class, "springcreeper");
registerRenderer(EntityStoneCreeper.class, "stonecreeper");
registerRenderer(EntityWaterCreeper.class, "watercreeper");
registerRenderer(EntityWindCreeper.class, "windcreeper");
registerRenderer(EntityZombieCreeper.class, "zombiecreeper");
}
private static void registerRenderer(Class creeper, String textureName) {
Constants.TEXTURE_LIST.add(textureName);
RenderingRegistry.registerEntityRenderingHandler(creeper, manager -> new RenderBaseCreeper(manager, textureName));
}
}
} |
fdosani/rubicon-ml | rubicon_ml/viz/dataframe_plot.py | <gh_stars>0
import dash_bootstrap_components as dbc
import plotly.express as px
from dash import dcc, html
from dash.dependencies import Input, Output
from rubicon_ml.viz.base import VizBase
from rubicon_ml.viz.common.colors import (
get_rubicon_colorscale,
light_blue,
plot_background_blue,
)
class DataframePlot(VizBase):
"""Plot the dataframes with name `dataframe_name` logged to the
experiments `experiments` on a shared axis.
Parameters
----------
dataframe_name : str
The name of the dataframe to plot. A dataframe with name
`dataframe_name` must be logged to each experiment in `experiments`.
experiments : list of rubicon_ml.client.experiment.Experiment, optional
The experiments to visualize. Defaults to None. Can be set as
attribute after instantiation.
plotting_func : function, optional
The `plotly.express` plotting function used to visualize the
dataframes. Available options can be found at
https://plotly.com/python-api-reference/plotly.express.html.
Defaults to `plotly.express.line`.
plotting_func_kwargs : dict, optional
Keyword arguments to be passed to `plotting_func`. Available options
can be found in the documentation of the individual functions at the
URL above.
x : str, optional
The name of the column in the dataframes with name `dataframe_name`
to plot across the x-axis.
y : str, optional
The name of the column in the dataframes with name `dataframe_name`
to plot across the y-axis.
"""
def __init__(
self,
dataframe_name,
experiments=None,
plotting_func=px.line,
plotting_func_kwargs={},
x=None,
y=None,
):
super().__init__(dash_title="plot dataframes")
self.dataframe_name = dataframe_name
self.experiments = experiments
self.plotting_func = plotting_func
self.plotting_func_kwargs = plotting_func_kwargs
self.x = x
self.y = y
@property
def layout(self):
"""Defines the dataframe plot's layout."""
header_text = (
f"showing dataframe '{self.dataframe_name}' "
f"over {len(self.experiments)} experiment"
f"{'s' if len(self.experiments) != 1 else ''}"
)
return html.Div(
[
html.Div(id="dummy-callback-trigger"),
dbc.Row(
html.H5(header_text, id="header-text"),
className="header-row",
),
dcc.Loading(dcc.Graph(id="dataframe-plot"), color=light_blue),
],
id="dataframe-plot-layout-container",
)
def load_experiment_data(self):
"""Load the experiment data required for the dataframe plot.
Extracts the dataframe with name `self.dataframe_name` from
each experiment in `self.experiment` and combines the data
stored in them into one dataframe. All dataframes with name
`dataframe_name` must have the same schema.
"""
self.data_df = None
for experiment in self.experiments:
dataframe = experiment.dataframe(name=self.dataframe_name)
data_df = dataframe.get_data()
data_df["experiment_id"] = experiment.id
if self.x is None:
self.x = data_df.columns[0]
if self.y is None:
self.y = data_df.columns[1]
if self.data_df is None:
self.data_df = data_df
else:
self.data_df = self.data_df.append(data_df)
self.data_df = self.data_df.reset_index(drop=True)
if "color" not in self.plotting_func_kwargs:
self.plotting_func_kwargs["color"] = "experiment_id"
if "color_discrete_sequence" not in self.plotting_func_kwargs:
self.plotting_func_kwargs["color_discrete_sequence"] = get_rubicon_colorscale(
len(self.experiments),
)
def register_callbacks(self, link_experiment_table=False):
outputs = [
Output("dataframe-plot", "figure"),
Output("header-text", "children"),
]
inputs = [Input("dummy-callback-trigger", "children")]
states = []
if link_experiment_table:
inputs.append(
Input("experiment-table", "derived_virtual_selected_row_ids"),
)
@self.app.callback(outputs, inputs, states)
def update_dataframe_plot(*args):
"""Render the plot specified by `self.plotting_func`.
Returns the Plotly figure generated by calling `self.plotting_func`
on the data in the experiments' dataframes and the header text
with the dataframes' name.
"""
if link_experiment_table:
selected_row_ids = args[-1]
selected_row_ids = selected_row_ids if selected_row_ids else []
else:
selected_row_ids = [e.id for e in self.experiments]
df_figure_margin = 30
df_figure = self.plotting_func(
self.data_df[self.data_df["experiment_id"].isin(selected_row_ids)],
self.x,
self.y,
**self.plotting_func_kwargs,
)
df_figure.update_layout(margin_t=df_figure_margin, plot_bgcolor=plot_background_blue)
for i in range(len(df_figure.data)):
df_figure.data[i].name = df_figure.data[i].name[:7]
header_text = (
f"showing dataframe '{self.dataframe_name}' "
f"over {len(selected_row_ids)} experiment"
f"{'s' if len(selected_row_ids) != 1 else ''}"
)
return df_figure, header_text
|
Xovis/azure-service-operator | pkg/secrets/kube/client_test.go | // Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
package kube
import (
"context"
"fmt"
"strconv"
"strings"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
v1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/types"
"github.com/Azure/azure-service-operator/pkg/secrets"
)
func getExpectedSecretName(secretKey secrets.SecretKey, namingScheme secrets.SecretNamingVersion) types.NamespacedName {
switch namingScheme {
case secrets.SecretNamingV1:
return types.NamespacedName{Namespace: secretKey.Namespace, Name: secretKey.Name}
case secrets.SecretNamingV2:
return types.NamespacedName{Namespace: secretKey.Namespace, Name: strings.ToLower(secretKey.Kind) + "-" + secretKey.Name}
default:
panic("unknown secret naming scheme")
}
}
var _ = Describe("Kube Secrets Client", func() {
BeforeEach(func() {
// Add any setup steps that needs to be executed before each test
})
AfterEach(func() {
// Add any teardown steps that needs to be executed after each test
})
// Add Tests for OpenAPI validation (or additonal CRD features) specified in
// your API definition.
// Avoid adding tests for vanilla CRUD operations because they would
// test Kubernetes API server, which isn't the goal here.
Context("Create and Delete", func() {
supportedSecretNamingSchemes := []secrets.SecretNamingVersion{
secrets.SecretNamingV1,
secrets.SecretNamingV2,
}
for _, secretNamingScheme := range supportedSecretNamingSchemes {
secretNamingScheme := secretNamingScheme
It(fmt.Sprintf("should create and delete secret in k8s with secret naming scheme %q", secretNamingScheme), func() {
secretName := "secret" + strconv.FormatInt(GinkgoRandomSeed(), 10)
var err error
ctx := context.Background()
data := map[string][]byte{
"test": []byte("data"),
"sweet": []byte("potato"),
}
client := New(k8sClient, secretNamingScheme)
key := secrets.SecretKey{Name: secretName, Namespace: "default", Kind: "Test"}
Context("creating secret with secret client", func() {
err = client.Upsert(ctx, key, data)
Expect(err).To(BeNil())
})
secret := &v1.Secret{}
Context("ensuring secret exists using k8s client", func() {
err = k8sClient.Get(ctx, getExpectedSecretName(key, secretNamingScheme), secret)
Expect(err).To(BeNil())
d, err := client.Get(ctx, key)
Expect(err).To(BeNil())
for k, v := range d {
Expect(data[k]).To(Equal(v))
}
})
Context("delete secret and ensure it is gone", func() {
err = client.Delete(ctx, key)
Expect(err).To(BeNil())
err = k8sClient.Get(ctx, getExpectedSecretName(key, secretNamingScheme), secret)
Expect(err).ToNot(BeNil())
})
})
}
})
})
|
Haavasma/Harmoni | client/src/Containers/Event/Components/artist.js | //@flow
import * as React from 'react';
import { Component } from "react-simplified";
import Button from "react-bootstrap/Button";
import Card from "react-bootstrap/Card";
import {Artist} from "../../../services/ArtistService";
import Accordion from "react-bootstrap/Accordion";
import {Alert} from "../../../widgets";
let del_artist: Artist[] = [];
/**
* Variabel for å sjekke om en string er en gyldig email-addresse
* @type {RegExp}
*/
const emailRegEx = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
/**
* ArtistDropdown
* Dette er en komponent klasse som brukes til å lage nye artister.
* @requires react
* @requires react-simplified
* @requires react-bootstrap
* @constructor
* @param {string} buttonName - Dette er hva som skal stå på knappen som man trykker på for å se ArtistDropdown
*/
export class ArtistDropdown extends Component<{buttonName: string, artist: Artist}> {
state: Object={raider: null, hraider: null,contract: null};
artist: Artist[] = [];
artist_name: string = this.props.artist.artist_name;
currentriders: string = this.props.artist.riders;
riders: File = this.props.artist.riders;
currenthospitality_riders: string = this.props.artist.hospitality_riders;
hospitality_riders: File = this.props.artist.hospitality_riders;
currentartist_contract: string = this.props.artist.artist_contract;
artist_contract: File = this.props.artist.artist_contract;
email: string = this.props.artist.email;
phone: string = this.props.artist.phone;
//image: string = this.props.image;
/**
* Dette er metoden som brukes for å generere en HTML komponent for å redigere artist.
* @returns {*} - Dette returnerer en HTML komponent.
*/
render() {
return (
<Accordion>
<Card style={{border: "none"}}>
<Card.Header style={{border: "none"}}>
<Accordion.Toggle as={Button} variant="success" eventKey="0" style = {{float: "left"}}>
{this.props.buttonName}
</Accordion.Toggle>
<button type="button" className="btn btn-danger" onClick={() => this.delete(this.props.artist)} style={{marginLeft: 10+"px", float: "left"}}>Slett</button>
</Card.Header>
<Accordion.Collapse eventKey="0" style={{border: "none"}}>
<Card.Body style={{width: "70%"}}>
<form style={{padding: 20 + 'px', width: "100%" , position: "sticky", overflow: "visible"}}>
<div className="form-group">
<row>
<h4>Kontakt info: </h4><br/>
<div className="form-group">
<label>Fullt navn:</label>
<input type="text" className="form-control" placeholder="<NAME>" value={this.artist_name}
onChange={(event: SyntheticInputEvent<HTMLInputElement>) => (this.artist_name = event.target.value)}/>
</div>
<div className="form-group">
<label>E-post: </label>
<input type="epost" className="form-control" placeholder="<EMAIL>" value={this.email}
onChange={(event: SyntheticInputEvent<HTMLInputElement>) => (this.email = event.target.value)}/>
</div>
<div className="form-group">
<label>Mobilnummer: </label>
<input type="text" className="form-control" placeholder="+47 00000000" value={this.phone}
onChange={(event: SyntheticInputEvent<HTMLInputElement>) => (this.phone = event.target.value)}/>
</div>
<label>Rider:</label><br/>
<div>
<a href = {this.currentriders} target = "blank" style = {{color: "blue"}}>{this.currentriders?<p>Nåværende riders</p>:<div></div>}</a>
</div>
<div className="input-group">
<div className="input-group-prepend">
</div>
<div className="custom-file">
<input type="file" className="file-path validate" id="raider" accept='.pdf'
onChange={(event: SyntheticInputEvent<HTMLInputElement>)=>{
if(event.target.files[0]) {
let ascii = /^[ -~]+$/;
if (!ascii.test(event.target.files[0].name)) {
Alert.danger("Ugyldig filnavn: unngå å bruke bokstavene 'Æ, Ø og Å'");
} else {
this.riders = event.target.files[0];
}
}
}
}/>
</div>
</div><br/>
<label>Hospitality rider:</label><br/>
<a href = {this.currenthospitality_riders} target = "blank" style = {{color: "blue"}}>{this.currenthospitality_riders?<p>Nåværende hospitality riders</p>:<div></div>}</a>
<div className="input-group">
<div className="input-group-prepend">
</div>
<div className="custom-file">
<input type="file" className="file-path validate" id="hospitality-raider" accept='.pdf'
onChange={(event: SyntheticInputEvent<HTMLInputElement>)=>{
if(event.target.files[0]) {
let ascii = /^[ -~]+$/;
if (!ascii.test(event.target.files[0].name)) {
Alert.danger("Ugyldig filnavn: unngå å bruke bokstavene 'Æ, Ø og Å'");
} else {
this.hospitality_riders = event.target.files[0];
}
}
}}/>
</div>
</div>
<br/>
<label>Artistkontrakt:</label><br/>
<a href = {this.currentartist_contract} target = "blank" style = {{color: "blue"}}>{this.currentartist_contract?<p>Nåværende artistkontrakt</p>:<div></div>}</a>
<div className="input-group">
<div className="input-group-prepend">
</div>
<div className="custom-file">
<input type="file" className="file-path validate" id="contract" accept='.pdf'
onChange={(event: SyntheticInputEvent<HTMLInputElement>)=>{
if(event.target.files[0]) {
let ascii = /^[ -~]+$/;
if (!ascii.test(event.target.files[0].name)) {
Alert.danger("Ugyldig filnavn: unngå å bruke bokstavene 'Æ, Ø og Å'");
} else {
this.artist_contract = event.target.files[0];
}
}
}}/>
</div>
</div>
<br/>
<div className="form-group" align="center">
<Accordion.Toggle type="button" as={Button} variant="success" eventKey="0" onClick={() => {this.add()}}>
Lagre
</Accordion.Toggle>
</div>
</row>
</div>
</form>
</Card.Body>
</Accordion.Collapse>
</Card>
</Accordion>
);
}
/**
* Dette er metoden man bruker for å legge inn artist i arangement
* For å legge inn ny artist må man ha navn +e-post|tlf
*/
add(){
if (this.artist_name == "") {
Alert.danger("Artist navn ikke fylt inn.");
return;
}
if (!emailRegEx.test(this.email) && this.phone == "") {
Alert.danger("Telefonnummer eller gyldig epost-adresse må fylles inn.");
return;
}
if(this.pris < 0){
this.pris = 0;
Alert.danger("Pris kan ikke være en negativ verdi");
return;
}
console.log(this.state);
const index = this.artist.indexOf(this.props.artist);
this.artist[index] = new Artist(this.props.artist.artist_id,this.props.artist.event_id,this.artist_name ,this.email, this.phone, this.riders, this.hospitality_riders, this.artist_contract);
//let s: any = ArtistDetails.instance();
//s.mounted();
}
/**
* Dette er en funksjon som kjører før render funksjonen.
* Vi bruker denne til å ikke overskrive detaljene til artisten
*/
mounted(): unknown {
let s: any = ArtistDetails.instance();
this.artist = s.artist;
}
/**
* Dette er funskjeonen man bruker for å slette en artist.
* @param {Artist} a - Parameteren tar inn et artist objektav artisten som skal slettes
*/
delete(a: Artist){
del_artist.push(a);
const index = this.artist.indexOf(a);
if (index > -1) {
this.artist[index] = null;
}
}
}
/**
* Denne klassen skal vise artist informasjonen på en oversiktlig måte, og evt mulighet til å lage nye
*/
export class ArtistDetails extends Component {
artist: Artist[] = [];
/**
* Denne klassen inneholder en react komponent som skal vise informasonen til alle artister som er koblet til et arrangement
* @returns {*} - Denne metoden returnerer en komponent som viser detaljene til alle atristene i et arrangement
*/
render(){
return (
<div className="card">
<div className="card-header">
<h3>Artister:</h3>
</div>
<div className="card-body">
{this.artist.map(a => {if (a) { return(
<div className="card-header">
<div className="row">
<div className="col"><label>Artist: {a.artist_name} </label></div>
<div className="col"><label>Email: {a.email}</label></div>
<div className="col"><label>Tlf: {a.phone}</label></div>
</div>
<div className={"row"}>
<div className={"col"}>
<ArtistDropdown buttonName={"Legg til"} artist={a}/>
</div>
</div>
</div>
)}})}
<button type="button" className="btn btn-secondary" onClick={() => this.addNewArtist()}>Legg til artist</button>
</div>
</div>
)
}
/**
* Denne metoden skal lage en ny tom artist som brukeren skal fylle inn med informeasjon.
*/
addNewArtist(){
let a: Artist = new Artist(-1, 0, "", "", "", null, null, null, null);
this.artist.push(a);
}
}
export { del_artist };
|
mhewson/rei-cedar | backstop_data/engine_scripts/onReady.js | <filename>backstop_data/engine_scripts/onReady.js
const hover = require('./hover');
const focus = require('./focus');
module.exports = (chromy, scenario, vp) => {
hover(chromy, scenario);
focus(chromy, scenario);
// add more ready handlers here...
};
|
HolmanA/CatChat | src/main/java/catchat/model/chat/BaseChatModel.java | package catchat.model.chat;
import catchat.data.source.entities.chat.Chat;
import catchat.data.source.entities.message.Message;
import catchat.data.source.ApiInvoker;
import catchat.data.source.groupme.BaseGroupMeApiCommand;
import catchat.data.source.groupme.LikeMessageApiCommand;
import catchat.data.source.groupme.UnlikeMessageApiCommand;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* BaseChatModel
*
* @author <NAME>
* @version 1.0
* @since 1.0
*/
public abstract class BaseChatModel implements ChatContract.Model {
private static final Logger log = LoggerFactory.getLogger(BaseChatModel.class);
private static int sentMessageId = 1;
private List<ChatContract.Listener> listeners;
private ApiInvoker invoker;
private Chat chat;
private List<Message> messages;
private String oldestMessageId;
BaseChatModel(ApiInvoker invoker, Chat chat) {
this.invoker = invoker;
this.chat = chat;
listeners = new ArrayList<>();
messages = new ArrayList<>();
oldestMessageId = "";
log.debug("Chat model created for chat: {}", chat.getName());
}
@Override
public void loadMoreMessages() {
log.debug("Loading more messages");
try {
invoker.execute(getMessagesCommand(result -> {
parseGetMessagesResult(result);
}, chat.getId(), oldestMessageId, ""));
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void reloadMessages() {
log.debug("Reloading messages");
try {
invoker.execute(getMessagesCommand(result -> {
clearMessages();
parseGetMessagesResult(result);
}, chat.getId(), "", ""));
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void sendMessage(String messageText) {
log.debug("Sending message");
try {
invoker.execute(sendMessageCommand(result -> {
for (ChatContract.Listener listener : listeners) {
log.trace("Message Sent: {}", listener);
listener.messageSent();
}
}, chat.getId(), Integer.toString(sentMessageId++), messageText));
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void clearMessages() {
log.debug("Clearing messages");
messages.clear();
}
@Override
public void likeMessage(Message message) {
log.debug("Liking message");
try {
invoker.execute(new LikeMessageApiCommand(result -> {
reloadMessages();
}, chat.getId(), message.getId()));
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void unlikeMessage(Message message) {
log.debug("Unliking message");
try {
invoker.execute(new UnlikeMessageApiCommand(result -> {
reloadMessages();
}, chat.getId(), message.getId()));
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public Chat getChat() {
return chat;
}
@Override
public List<Message> getMessages() {
return messages;
}
@Override
public void subscribe(ChatContract.Listener listener) {
log.debug("Subscribing {}", listener);
listeners.add(listener);
}
@Override
public void unsubscribe(ChatContract.Listener listener) {
log.debug("Unsubscribing {}", listener);
listeners.remove(listener);
}
@Override
public void unsubscribeAll() {
log.debug("Unsubscribing All");
listeners.clear();
}
abstract BaseGroupMeApiCommand<List<Message>> getMessagesCommand(BaseGroupMeApiCommand.Listener<List<Message>> listener, String chatId, String beforeId, String sinceId) throws IOException;
abstract BaseGroupMeApiCommand<Void> sendMessageCommand(BaseGroupMeApiCommand.Listener<Void> listener, String chatId, String messageId, String messageText) throws IOException;
void parseGetMessagesResult(List<Message> result) {
log.debug("Setting messages");
if (result != null && !result.isEmpty()) {
messages.addAll(result);
oldestMessageId = messages.get(messages.size() - 1).getId();
for (ChatContract.Listener listener : listeners) {
log.trace("Chat Changed: {}", listener);
listener.chatChanged();
}
}
}
}
|
nicksieger/advent-jruby | vendor/RMagick4J/test/eyetests/tests/gruff_area_1.rb | <filename>vendor/RMagick4J/test/eyetests/tests/gruff_area_1.rb
require 'rubygems'
require 'gruff'
require File.join(File.dirname(__FILE__), 'new_image.rb')
g = Gruff::Area.new
g.title = "Many Multi-Area Graph Test"
g.labels = {
0 => 'June',
10 => 'July',
30 => 'August',
50 => 'September',
}
g.data('many points', (0..50).collect {|i| rand(100) })
# Default theme
g.write("gruff_area_1.jpg") |
acrollet/content-build | src/site/stages/build/drupal/tests/fixtures/metalsmith-drupal.fixture.js | export const personProfileWithoutBio = {
data: {
nodeQuery: {
entities: [
{
entityBundle: 'person_profile',
entityUrl: { path: 'profile-sans-bio' },
},
],
},
},
};
export const personProfileWithBio = {
data: {
nodeQuery: {
entities: [
{
entityBundle: 'person_profile',
entityUrl: { path: 'profile-with-bio' },
fieldIntroText: 'This person has a bio',
fieldBody: 'Lorem ipsum dolor amet',
},
],
},
},
};
|
chainsquad/python-graphenelib | graphenecommon/aio/block.py | <gh_stars>10-100
# -*- coding: utf-8 -*-
from .blockchainobject import BlockchainObject
from ..block import Block as SyncBlock, BlockHeader as SyncBlockHeader
from ..exceptions import BlockDoesNotExistsException
class Block(BlockchainObject, SyncBlock):
""" Read a single block from the chain
:param int block: block number
:param instance blockchain_instance: instance to use when accesing a RPC
:param bool lazy: Use lazy loading
:param loop: asyncio event loop
Instances of this class are dictionaries that come with additional
methods (see below) that allow dealing with a block and it's
corresponding functions.
.. code-block:: python
from aio.block import Block
block = await Block(1)
print(block)
"""
async def __init__(self, *args, use_cache=False, **kwargs):
# We allow to hand over use_cache be default, but here,
# we want to change the default to *false* so we don't cache every
# block all the time for eternity
kwargs["use_cache"] = use_cache
await BlockchainObject.__init__(self, *args, **kwargs)
async def refresh(self):
""" Even though blocks never change, you freshly obtain its contents
from an API with this method
"""
block = await self.blockchain.rpc.get_block(self.identifier)
if not block:
raise BlockDoesNotExistsException
await super(Block, self).__init__(
block, blockchain_instance=self.blockchain, use_cache=self._use_cache
)
class BlockHeader(BlockchainObject, SyncBlockHeader):
async def __init__(self, *args, use_cache=False, **kwargs):
# We allow to hand over use_cache be default, but here,
# we want to change the default to *false* so we don't cache every
# block all the time for eternity
kwargs["use_cache"] = use_cache
await BlockchainObject.__init__(self, *args, **kwargs)
async def refresh(self):
""" Even though blocks never change, you freshly obtain its contents
from an API with this method
"""
block = await self.blockchain.rpc.get_block_header(self.identifier)
if not block:
raise BlockDoesNotExistsException
await super(BlockHeader, self).__init__(
block, blockchain_instance=self.blockchain, use_cache=self._use_cache
)
|
zoho/zohocrm-python-sdk-2.0 | zcrmsdk/src/com/zoho/crm/api/bulk_write/__init__.py | from .request_wrapper import RequestWrapper
from .resource import Resource
from .bulk_write_operations import BulkWriteOperations, UploadFileHeader
from .result import Result
from .api_exception import APIException
from .action_response import ActionResponse
from .response_handler import ResponseHandler
from .file import File
from .call_back import CallBack
from .success_response import SuccessResponse
from .bulk_write_response import BulkWriteResponse
from .file_body_wrapper import FileBodyWrapper
from .field_mapping import FieldMapping
from .response_wrapper import ResponseWrapper
|
Sergioamjr/my-website | src/components/Footer/Footer.js | /** @jsx jsx */
import { css, jsx } from "@emotion/core";
import PropTypes from "prop-types";
import { justifyContent, primaryFont } from "../../design";
const footerStyle = css`
background: var(--theme);
height: 60px;
display: flex;
margin-top: 40px;
align-items: center;
justify-content: center;
`;
const liStyle = css`
margin: 0 15px;
`;
const linkStyle = css`
${primaryFont}
color: var(--light)
`;
const Footer = ({ options }) => {
return (
<footer css={footerStyle}>
<ul css={justifyContent}>
{options.map(({ Icon, url, name, description }) => (
<li
key={name}
css={css`
${liStyle}
`}
>
<a
aria-label={description}
title={description}
target="_blank"
rel="noopener noreferrer"
href={url}
css={linkStyle}
>
<Icon />
</a>
</li>
))}
</ul>
</footer>
);
};
Footer.propTypes = {
options: PropTypes.arrayOf(
PropTypes.shape({
Icon: PropTypes.func,
name: PropTypes.string,
description: PropTypes.string,
url: PropTypes.string,
})
),
};
export default Footer;
|
guanzhongxing/craft-atom-rpc | craft-atom-protocol-http/src/main/java/io/craft/atom/protocol/http/model/HttpCookie.java | package io.craft.atom.protocol.http.model;
import static io.craft.atom.protocol.http.HttpConstants.S_EQUAL_SIGN;
import static io.craft.atom.protocol.http.HttpConstants.S_SEMICOLON;
import static io.craft.atom.protocol.http.HttpConstants.S_SP;
import io.craft.atom.protocol.http.HttpDates;
import java.io.Serializable;
import java.util.Collections;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
/**
* Represents a token or short packet of state information. <br>
* These header fields can be used by HTTP servers to store state (called cookies) at HTTP user agents,
* letting the servers maintain a stateful session over the mostly stateless HTTP protocol.
* <p>
* Cookie syntax:
* <pre>
* set-cookie-header = "Set-Cookie:" SP set-cookie-string
* set-cookie-string = cookie-pair *( ";" SP cookie-av )
* cookie-pair = cookie-name "=" cookie-value
* cookie-name = token
* cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )
* cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E
* ; US-ASCII characters excluding CTLs,
* ; whitespace DQUOTE, comma, semicolon,
* ; and backslash
* token = <token, defined in [RFC2616], Section 2.2>
*
* cookie-av = expires-av / max-age-av / domain-av /
* path-av / secure-av / httponly-av /
* extension-av
* expires-av = "Expires=" sane-cookie-date
* sane-cookie-date = <rfc1123-date, defined in [RFC2616], Section 3.3.1>
* max-age-av = "Max-Age=" non-zero-digit *DIGIT
* ; In practice, both expires-av and max-age-av
* ; are limited to dates representable by the
* ; user agent.
* non-zero-digit = %x31-39
* ; digits 1 through 9
* domain-av = "Domain=" domain-value
* domain-value = <subdomain>
* ; defined in [RFC1034], Section 3.5, as
* ; enhanced by [RFC1123], Section 2.1
* path-av = "Path=" path-value
* path-value = <any CHAR except CTLs or ";">
* secure-av = "Secure"
* httponly-av = "HttpOnly"
* extension-av = <any CHAR except CTLs or ";">
*
*
* cookie-header = "Cookie:" OWS cookie-string OWS
* cookie-string = cookie-pair *( ";" SP cookie-pair )
* </pre>
*
* For examples:
* <pre>
* Set-Cookie: SID=31d4d96e407aad42; Domain=example.com; Path=/; HttpOnly; Secure; Expires=Wed, 09 Jun 2021 10:18:14 GMT; Max-Age=86400
* Cookie: SID=31d4d96e407aad42; lang=en-US
* </pre>
*
* More about cookie definition please reference <a href="http://tools.ietf.org/html/rfc6265">rfc6265</a>.
*
* @author mindwind
* @version 1.0, Mar 22, 2013
*/
@ToString(of = { "name", "value", "domain", "path", "httpOnly", "secure", "expires", "maxAge", "extension" })
public class HttpCookie implements Serializable {
private static final long serialVersionUID = 5584804359930330729L;
public static final String DOMAIN = "Domain" ;
public static final String PATH = "Path" ;
public static final String HTTP_ONLY = "HttpOnly" ;
public static final String SECURE = "Secure" ;
public static final String EXPIRES = "Expires" ;
public static final String MAX_AGE = "Max-Age" ;
// ~ ----------------------------------------------------------------------------------------------------------
@Getter @Setter private String name ;
@Getter @Setter private String value ;
@Getter @Setter private String domain ;
@Getter @Setter private String path ;
@Setter private Boolean httpOnly ;
@Setter private Boolean secure ;
@Getter @Setter private Date expires ;
@Getter @Setter private Integer maxAge ;
private Map<String, String> extension = new LinkedHashMap<String, String>();
// ~ ----------------------------------------------------------------------------------------------------------
public HttpCookie() {
super();
}
public HttpCookie(String name, String value) {
if (name == null) {
throw new IllegalArgumentException("Name should not be null");
}
this.name = name;
this.value = value;
}
public HttpCookie(String name, String value, String domain) {
this(name, value);
this.domain = domain;
}
public HttpCookie(String name, String value, String domain, String path) {
this(name, value, domain);
this.path = path;
}
public HttpCookie(String name, String value, String domain, String path, boolean httpOnly) {
this(name, value, domain, path);
}
public HttpCookie(String name, String value, String domain, String path, boolean httpOnly, int maxAge) {
this(name, value, domain, path, httpOnly);
this.maxAge = maxAge;
}
// ~ ----------------------------------------------------------------------------------------------------------
public Boolean isSecure() {
return secure;
}
public Boolean isHttpOnly() {
return httpOnly;
}
public Map<String, String> getExtensionAttributes() {
return Collections.unmodifiableMap(extension);
}
public void addExtensionAttribute(String name, String value) {
this.extension.put(name, value);
}
public void removeExtensionAttribute(String name) {
this.extension.remove(name);
}
public String getExtensionAttribute(String name) {
return this.extension.get(name);
}
public String toHttpString() {
StringBuilder sb = new StringBuilder();
sb.append(getName()).append(S_EQUAL_SIGN).append(getValue());
if (domain != null) {
sb.append(S_SEMICOLON).append(S_SP).append(DOMAIN).append(S_EQUAL_SIGN).append(getDomain());
}
if (path != null) {
sb.append(S_SEMICOLON).append(S_SP).append(PATH).append(S_EQUAL_SIGN).append(getPath());
}
if (httpOnly != null) {
sb.append(S_SEMICOLON).append(S_SP).append(HTTP_ONLY);
}
if (secure != null) {
sb.append(S_SEMICOLON).append(S_SP).append(SECURE);
}
if (expires != null) {
sb.append(S_SEMICOLON).append(S_SP).append(EXPIRES).append(S_EQUAL_SIGN).append(HttpDates.format(getExpires()));
}
if (maxAge != null) {
sb.append(S_SEMICOLON).append(S_SP).append(MAX_AGE).append(S_EQUAL_SIGN).append(getMaxAge());
}
Set<Entry<String, String>> entrys = extension.entrySet();
for (Entry<String, String> entry : entrys) {
String k = entry.getKey();
String v = entry.getValue();
sb.append(S_SEMICOLON).append(S_SP).append(k);
if ( v != null) {
sb.append(S_EQUAL_SIGN).append(v);
}
}
return sb.toString();
}
}
|
r-koubou/KSPSyntaxParser | src/java/net/rkoubou/kspparser/analyzer/data/reserved/ReservedSymbolManager.java | /* =========================================================================
ReservedSymbolManager.java
Copyright (c) R-Koubou
======================================================================== */
package net.rkoubou.kspparser.analyzer.data.reserved;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import net.rkoubou.kspparser.ApplicationConstants;
import net.rkoubou.kspparser.analyzer.AnalyzerConstants;
import net.rkoubou.kspparser.analyzer.Argument;
import net.rkoubou.kspparser.analyzer.Callback;
import net.rkoubou.kspparser.analyzer.CallbackTable;
import net.rkoubou.kspparser.analyzer.Command;
import net.rkoubou.kspparser.analyzer.CommandArgument;
import net.rkoubou.kspparser.analyzer.CommandTable;
import net.rkoubou.kspparser.analyzer.ReturnType;
import net.rkoubou.kspparser.analyzer.SymbolDefinition.SymbolType;
import net.rkoubou.kspparser.analyzer.UIType;
import net.rkoubou.kspparser.analyzer.UITypeTable;
import net.rkoubou.kspparser.analyzer.Variable;
import net.rkoubou.kspparser.analyzer.VariableTable;
import net.rkoubou.kspparser.javacc.generated.ASTCallCommand;
import net.rkoubou.kspparser.javacc.generated.ASTCallbackArgumentList;
import net.rkoubou.kspparser.javacc.generated.ASTCallbackDeclaration;
import net.rkoubou.kspparser.javacc.generated.ASTVariableDeclaration;
import net.rkoubou.kspparser.javacc.generated.KSPParserTreeConstants;
import net.rkoubou.kspparser.util.table.Row;
import net.rkoubou.kspparser.util.table.StringParser;
/**
* data/reserved に配備した予約済み変数、コマンド、コールバック、関数など各種シンボルの定義ファイルからデシリアライズする
*/
public class ReservedSymbolManager implements KSPParserTreeConstants, AnalyzerConstants
{
/** 定義ファイルの場所 */
static public final String BASE_DIR = ApplicationConstants.DATA_DIR + "/symbols";
/** split処理で使用する、条件式ORの文字列表現 */
static public final String SPLIT_COND_OR = "||";
/** split処理で使用する、条件式NOTの文字列表現 */
static public final String COND_NOT = "!";
/** split処理で使用する、条件式ORの正規表現 */
static public final String REGEX_SPLIT_COND_OR = "\\|\\|";
/** シングルトンインスタンス */
static private final ReservedSymbolManager instance = new ReservedSymbolManager();
/** 予約済みUIタイプ変数 */
private final HashMap<String,UIType> uiTypes = new HashMap<String,UIType>();
/** 予約済み変数 */
private final HashMap<String,Variable> variables = new HashMap<String,Variable>( 512 );
/** 予約済みコマンド */
private final HashMap<String,Command> commands = new HashMap<String,Command>( 256 );
/** 予約済みコールバック */
private HashMap<String,Callback> callbacks = new HashMap<String,Callback>();
/**
* ctor
*/
private ReservedSymbolManager()
{
}
/**
* 定義ファイルを再読み込み
*/
public void load() throws IOException
{
loadUITypes();
loadVariables();
loadCallbacks();
loadCommands();
}
/**
* インスタンスを取得する
*/
static public ReservedSymbolManager getManager()
{
return instance;
}
/**
* 指定されたUI型テーブルにこのクラスが読み込んだ外部変数を適用する
*/
public void apply( UITypeTable dest )
{
for( String key : uiTypes.keySet() )
{
dest.add( uiTypes.get( key ) );
}
}
/**
* 指定された変数テーブルにこのクラスが読み込んだ外部変数を適用する
*/
public void apply( VariableTable dest )
{
for( String key : variables.keySet() )
{
dest.add( variables.get( key ) );
}
}
/**
* 指定されたコマンドテーブルにこのクラスが読み込んだ外部コールバックを適用する
*/
public void apply( CommandTable dest )
{
for( String key : commands.keySet() )
{
dest.add( commands.get( key ) );
}
}
/**
* 指定されたコールバックテーブルにこのクラスが読み込んだ外部コールバックを適用する
*/
public void apply( CallbackTable dest )
{
for( String name : callbacks.keySet() )
{
Callback v = callbacks.get( name );
dest.add( v, name );
}
}
/**
* UIタイプの予約済み定義ファイルから UIType クラスインスタンスを生成する
*/
private void loadUITypes() throws IOException
{
File f = new File( BASE_DIR, "uitypes.txt" );
StringParser parser = new StringParser();
parser.parse( f );
uiTypes.clear();
for( Row row : parser.getTable() )
{
String name = row.stringValue( 0 );
boolean constant = "Y".equals( row.stringValue( 1 ) );
boolean initializerRequired = "Y".equals( row.stringValue( 2 ) );
int type = toVariableType( row.stringValue( 3 ) ).type;
int[] typeList = UIType.EMPTY_INITIALIZER_TYPE_LIST;
//--------------------------------------------------------------------------
// 初期値代入式が必須の場合
//--------------------------------------------------------------------------
if( row.length() >= 5 )
{
typeList = new int[ row.length() - 4 ];
for( int i = 4, x = 0; i < row.length(); i++, x++ )
{
typeList[ x ] = toVariableType( row.stringValue( i ) ).type;
}
}
UIType ui = new UIType( name, true, type, constant, initializerRequired, typeList );
uiTypes.put( name, ui );
}
}
/**
* 変数の予約済み定義ファイルから Variable クラスインスタンスを生成する
*/
private void loadVariables() throws IOException
{
File f = new File( BASE_DIR, "variables.txt" );
StringParser parser = new StringParser();
parser.parse( f );
variables.clear();
for( Row row : parser.getTable() )
{
Variable v = toVariableType( row.stringValue( 0 ) );
String name = v.toKSPTypeCharacter() + row.stringValue( 1 );
boolean availableOnInit = "Y".equals( row.stringValue( 2 ) );
v.setName( name );
v.accessFlag = ACCESS_ATTR_CONST; // ビルトイン変数に代入を許可させない
v.availableOnInit = availableOnInit; // on init 内で使用可能な変数かどうか。一部のビルトイン定数ではそれを許可していない。
v.reserved = true; // 予約変数
v.referenced = true; // 予約変数につき、使用・未使用に関わらず参照済みマーク
v.state = SymbolState.LOADED; // 予約変数につき、値代入済みマーク
v.value = v.getDefaultValue();
variables.put( name, v );
}
}
/**
* コマンドのの予約済み定義ファイルから Command クラスインスタンスを生成する
*/
private void loadCommands() throws IOException
{
File f = new File( BASE_DIR, "commands.txt" );
StringParser parser = new StringParser();
parser.parse( f );
commands.clear();
for( Row row : parser.getTable() )
{
String returnType = row.stringValue( 0 );
String name = row.stringValue( 1 );
String availableCallback = row.stringValue( 2 );
boolean hasParenthesis = false;
//--------------------------------------------------------------------------
// data[3] 以降:引数を含む場合
// 引数のAST、変数を生成
//--------------------------------------------------------------------------
ArrayList<CommandArgument> args = new ArrayList<CommandArgument>();
if( row.length() >= 4 )
{
hasParenthesis = true;
final int len = row.length();
for( int i = 3; i < len; i++ )
{
//--------------------------------------------------------------------------
// 複数のデータ型を許容するコマンドがあるので単一にせずにリストにストックしていく
//--------------------------------------------------------------------------
String typeString = row.stringValue( i );
args.add( toVariableTypeForArgument( typeString ) );
}
}
//--------------------------------------------------------------------------
// コマンドのAST、変数を生成
//--------------------------------------------------------------------------
{
Command newItem;
ASTCallCommand ast = new ASTCallCommand( JJTCALLCOMMAND );
ast.symbol.setName( name );
newItem = new Command( ast );
if( args.size() > 0 )
{
newItem.argList.addAll( args );
}
newItem.hasParenthesis = hasParenthesis;
toReturnTypeForCommand( returnType, newItem.returnType );
newItem.symbolType = SymbolType.Command;
newItem.reserved = true;
newItem.availableCallbackList.clear();
toAvailableCommandOnCallbackList( availableCallback, newItem.availableCallbackList );
commands.put( name, newItem );
}
} //~for( Column<String> col : parser.getTable() )
}
/**
* コールバックの予約済み定義ファイルから Variable クラスインスタンスを生成する
*/
private void loadCallbacks() throws IOException
{
File f = new File( BASE_DIR, "callbacks.txt" );
StringParser parser = new StringParser();
parser.parse( f );
callbacks.clear();
for( Row row : parser.getTable() )
{
String name = row.stringValue( 0 );
boolean dup = "Y".equals( row.stringValue( 1 ) );
//--------------------------------------------------------------------------
// data[2] 以降:引数を含む場合
// 引数のAST、変数を生成
//--------------------------------------------------------------------------
ArrayList<Argument> args = new ArrayList<Argument>();
if( row.length() >= 3 )
{
final int len = row.length();
for( int i = 2; i < len; i++ )
{
String typeString = row.stringValue( i );
boolean requireDeclarationOnInit = false;
if( typeString.startsWith( "&" ) )
{
// ui_control など引数==宣言した変数の場合
requireDeclarationOnInit = true;
typeString =typeString.substring( 1 );
}
Variable v = toVariableType( typeString );
Argument a = new Argument( v );
a.setName( "<undefined>" ); // シンボル収集時にマージ
a.requireDeclarationOnInit = requireDeclarationOnInit; // 引数の変数が on init で宣言した変数かどうか
a.reserved = true; // 予約変数
a.referenced = true; // 予約変数につき、使用・未使用に関わらず参照済みマーク
a.state = SymbolState.LOADED; // 予約変数につき、値代入済みマーク
args.add( a );
}
}
//--------------------------------------------------------------------------
// コールバックのAST、変数を生成
//--------------------------------------------------------------------------
{
Callback newItem;
ASTCallbackDeclaration ast = new ASTCallbackDeclaration( JJTCALLBACKDECLARATION );
ast.symbol.setName( name );
if( args.size() > 0 )
{
ASTCallbackArgumentList astList = new ASTCallbackArgumentList( JJTCALLBACKARGUMENTLIST );
for( Argument a : args )
{
astList.args.add( a.getName() );
}
ast.jjtAddChild( astList, 0 );
newItem = new Callback( ast );
}
else
{
newItem = new Callback( ast );
}
newItem.setName( name );
newItem.symbolType = SymbolType.Callback;
newItem.reserved = true;
newItem.declared = false;
newItem.setAllowDuplicate( dup );
callbacks.put( name, newItem );
}
} //~for( Column<String> col : parser.getTable() )
}
/**
* 型識別文字から Variableクラスのtypeに格納する形式の値に変換する
*/
public Variable toVariableType( String t )
{
Variable ret = new Variable( new ASTVariableDeclaration( JJTVARIABLEDECLARATION ) );
int type = TYPE_NONE;
int accessFlag = ACCESS_ATTR_NONE;
UIType uiTypeInfo = null;
t = t.intern();
if( t == "*" )
{
type = TYPE_ALL;
}
else if( t == "X" )
{
type = TYPE_UNKOWN;
}
else if( t == "*[]" )
{
type = TYPE_MULTIPLE | TYPE_ATTR_ARRAY;
}
else if( t == "V" )
{
type = TYPE_VOID;
}
else if( t == "I" || t == "@I" )
{
type = TYPE_INT;
}
else if( t == "I[]" )
{
type = TYPE_INT | TYPE_ATTR_ARRAY;
}
else if( t == "R" || t == "@R" )
{
type = TYPE_REAL;
}
else if( t == "R[]" )
{
type = TYPE_REAL | TYPE_ATTR_ARRAY;
}
else if( t == "S" || t == "@S" )
{
type = TYPE_STRING;
}
else if( t == "S[]" )
{
type = TYPE_STRING | TYPE_ATTR_ARRAY;
}
else if( t == "B" || t == "@B" )
{
type = TYPE_BOOL;
}
else if( t == "B[]" )
{
type = TYPE_BOOL | TYPE_ATTR_ARRAY;
}
else if( t == "PP" )
{
type = TYPE_PREPROCESSOR_SYMBOL;
}
else if( t == "KEY" )
{
type = TYPE_KEYID;
}
// 全UIタイプを許容する場合
else if( t.equals( "ui_*" ) )
{
uiTypeInfo = UIType.ANY_UI;
accessFlag |= ACCESS_ATTR_UI;
}
// 指定のUIタイプの場合
else if( t.startsWith( "ui_" ) )
{
boolean found = false;
if( uiTypes.containsKey( t ) )
{
uiTypeInfo = uiTypes.get( t );
accessFlag |= ACCESS_ATTR_UI;
found = true;
}
if( !found )
{
throw new IllegalArgumentException( "Unknown type : " + t );
}
}
else
{
throw new IllegalArgumentException( "Unknown type : " + t );
}
if( t.startsWith( "@" ) )
{
accessFlag |= ACCESS_ATTR_CONST;
}
ret.setName( "tmp" );
ret.type = type;
ret.accessFlag = accessFlag;
ret.uiTypeInfo = uiTypeInfo;
return ret;
}
/**
* 型識別文字から戻り値の値に変換する(コマンドによって複数の戻り値がある)
*/
public void toReturnTypeForCommand( String t, ReturnType dest )
{
t = t.intern();
String[] orCond = t.split( REGEX_SPLIT_COND_OR );
//--------------------------------------------------------------------------
// A または B または .... n の場合
//--------------------------------------------------------------------------
if( orCond.length >= 2 )
{
for( String i : orCond )
{
dest.typeList.add( toVariableType( i ).type );
}
}
else
{
dest.typeList.add( toVariableType( t ).type );
}
}
/**
* 型識別文字から引数の値に変換する(コマンド引数で複数の型を扱う場合)
*/
public CommandArgument toVariableTypeForArgument( String t )
{
t = t.intern();
CommandArgument ret;
ArrayList<Variable> args = new ArrayList<Variable>();
String[] orCond = t.split( REGEX_SPLIT_COND_OR );
//--------------------------------------------------------------------------
// A または B または .... n の場合
//--------------------------------------------------------------------------
if( orCond.length >= 2 )
{
for( String i : orCond )
{
Variable v = toVariableType( i );
args.add( v );
}
}
else
{
Variable v = toVariableType( t );
args.add( v );
}
//--------------------------------------------------------------------------
// 共通の値設定
//--------------------------------------------------------------------------
for( int x = 0; x < args.size(); x++ )
{
Variable v = args.get( x );
v.reserved = false; // KONTAKT内部のビルトインコマンドにつき、非予約変数
v.referenced = true; // KONTAKT内部のビルトインコマンドにつき、使用・未使用に関わらず参照済みマーク
v.state = SymbolState.LOADED; // KONTAKT内部のビルトインコマンドにつき、値代入済みマーク
if( v.uiTypeInfo != null )
{
v.uiTypeName = v.uiTypeInfo.name;
}
}
ret = new CommandArgument( args );
return ret;
}
/**
* コマンドテーブル生成用:利用可能なコールバック名の記述を元に、利用可能リストを生成する
*/
public void toAvailableCommandOnCallbackList( String callbackName, HashMap<String,Callback> dest )
{
callbackName = callbackName.intern();
String[] orCond = callbackName.split( REGEX_SPLIT_COND_OR );
//--------------------------------------------------------------------------
// 全コールバックで使用可能
//--------------------------------------------------------------------------
if( callbackName == "*" )
{
dest.putAll( callbacks );
return;
}
//--------------------------------------------------------------------------
// A または B または .... n の場合
//--------------------------------------------------------------------------
if( orCond.length >= 2 )
{
for( String i : orCond )
{
if( callbacks.containsKey( i ) )
{
dest.put( i, callbacks.get( i ) );
}
}
}
//--------------------------------------------------------------------------
// A 以外の場合
//--------------------------------------------------------------------------
else if( callbackName.startsWith( COND_NOT ) )
{
String exclude = callbackName.substring( 1 );
for( String key : callbacks.keySet() )
{
if( !key.equals( exclude ) )
{
dest.put( key, callbacks.get( key ) );
}
}
}
//--------------------------------------------------------------------------
// 単体のコールバック指定
//--------------------------------------------------------------------------
else
{
if( callbacks.containsKey( callbackName ) )
{
dest.put( callbackName, callbacks.get( callbackName ) );
}
}
}
/**
* Unit test
*/
static public void main( String[] args ) throws Throwable
{
// command: java -classpath ./target/classes/ net.rkoubou.kspparser.analyzer.ReservedSymbolManager
ReservedSymbolManager mgr = ReservedSymbolManager.getManager();
mgr.load();
}
}
|
hir0pr0tagonist/tesb-rt-se | examples/cxf/jaxrs-oauth2/common/src/main/java/oauth2/common/ReservationConfirmation.java | <reponame>hir0pr0tagonist/tesb-rt-se
/**
* Copyright (C) 2011 Talend Inc. - www.talend.com
*/
package oauth2.common;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement
public class ReservationConfirmation {
private String address;
private int hour;
private boolean calendarUpdated;
public ReservationConfirmation() {
}
public ReservationConfirmation(String address, int hour, boolean updated) {
this.address = address;
this.hour = hour;
this.calendarUpdated = updated;
}
public void setHour(int hour) {
this.hour = hour;
}
public int getHour() {
return hour;
}
public void setAddress(String address) {
this.address = address;
}
public String getAddress() {
return address;
}
public void setCalendarUpdated(boolean calendarUpdated) {
this.calendarUpdated = calendarUpdated;
}
public boolean isCalendarUpdated() {
return calendarUpdated;
}
}
|
isandlaTech/cohorte-demos | demo-led/led-gateway/repo/web_viewer/viewer.py | <reponame>isandlaTech/cohorte-demos
#!/usr/bin/python
from pelix.ipopo.decorators import ComponentFactory, Provides, Requires, Property, \
BindField, UnbindField
import pelix.remote
import os
import json
import time
import uuid
import logging
_logger = logging.getLogger("viewer.viewer")
@ComponentFactory("led_viewer_factory")
@Provides('pelix.http.servlet')
@Requires("_leds", "java:/led.services.LedService", optional=True, aggregate=True)
@Requires("_cams", "java:/led.services.CameraService", optional=True, aggregate=True)
@Property('_path', 'pelix.http.path', "/")
@Property('_reject', pelix.remote.PROP_EXPORT_REJECT, ['pelix.http.servlet'])
class Viewer(object):
def __init__(self):
self._path = None
self._leds = []
self._leds_map = {}
self._leds_list_lastupdate = time.time()
self._cams = []
self._cams_map = {}
self._uuid = None
self._time_uuid = 0
def get_lastupdate(self):
result = {"lastupdate" : self._leds_list_lastupdate}
return result
def get_leds(self):
#_logger.critical("get_leds")
result = {"leds": []}
for led in self._leds_map:
state = self._leds_map[led]["svc"].get_state()
result["leds"].append({"name": led, "state": state})
return result
def get_led(self, led):
#_logger.critical("get_led %s", led)
result = {}
if led in self._leds_map:
result["name"] = led
state = self._leds_map[led]["svc"].get_state()
result["state"] = state
return result
else:
return {"name": "unknown", "state": "unknown"}
def get_cams(self):
#_logger.critical("get_cams")
result = {"cams": []}
for cam in self._cams_map:
state = self._cams_map[cam]["svc"].get_state()
result["cams"].append({"name": cam, "state": state})
return result
def get_cam(self, cam):
#_logger.critical("get_cam %s", cam)
result = {}
if cam in self._cams_map:
result["name"] = cam
state = self._cams_map[cam]["svc"].get_state()
result["state"] = state
return result
else:
return {"name": "unknown", "state": "unknown"}
def send_action(self, led, action):
_logger.critical("send_action %s to led: %d", action, led)
result = {}
_led = self._leds_map[led]
if _led:
result["name"] = led
if action == "on":
result["state"] = self._leds_map[led]["svc"].on()
elif action == "off":
result["state"] = self._leds_map[led]["svc"].off()
return result
def send_action_cam(self, cam, action):
_logger.critical("send_action_cam %s to led: %d", action, cam)
result = {}
_cam = self._cams_map[cam]
if _cam:
result["name"] = cam
if action == "picture":
result["state"] = "not busy"
result["res"] = self._cams_map[cam]["svc"].takePicture()
_logger.critical("RES : %s", result["res"])
return result
@BindField('_leds')
def on_bind_led(self, field, svc, svc_ref):
#_logger.critical("binding a new led...")
props = svc_ref.get_properties()
led_name = props.get("led.name")
led_name = str(led_name).lower()
self._leds_map[led_name] = {}
self._leds_map[led_name]["svc_ref"] = svc_ref
self._leds_map[led_name]["svc"] = svc
self._leds_list_lastupdate = time.time()
_logger.critical("name: %s", led_name)
@UnbindField('_leds')
def on_unbind_led(self, field, svc, svc_ref):
#_logger.critical("unbinding a led...")
props = svc_ref.get_properties()
led_name = props.get("led.name")
led_name = str(led_name).lower()
del self._leds_map[led_name]
self._leds_list_lastupdate = time.time()
#_logger.critical("name: %s", led_name)
@BindField('_cams')
def on_bind_cam(self, field, svc, svc_ref):
_logger.critical("binding a new cam...")
props = svc_ref.get_properties()
cam_name = props.get("cam.name")
cam_name = str(cam_name).lower()
self._cams_map[cam_name] = {}
self._cams_map[cam_name]["svc_ref"] = svc_ref
self._cams_map[cam_name]["svc"] = svc
self._leds_list_lastupdate = time.time()
#_logger.critical("name: %s", led_name)
@UnbindField('_cams')
def on_unbind_cam(self, field, svc, svc_ref):
_logger.critical("unbinding a cam...")
props = svc_ref.get_properties()
cam_name = props.get("cam.name")
cam_name = str(cam_name).lower()
del self._cams_map[cam_name]
self._leds_list_lastupdate = time.time()
#_logger.critical("name: %s", led_name)
"""
Resources -----------------------------------------------------
"""
def root_dir(self):
return os.path.abspath(os.path.dirname(__file__))
def get_file(self, filename):
try:
src = os.path.join(self.root_dir(), filename)
with open(src, 'rb') as fp:
return fp.read()
except IOError as exc:
return str(exc)
def load_resource(self, path, request, response):
mimetypes = {
".css": "text/css",
".html": "text/html",
".js": "application/javascript",
".jpeg": "image/jpeg",
".jpg": "image/jpeg",
".png": "image/png",
".gif": "image/gif"
}
complete_path = os.path.join(self.root_dir(), path)
ext = os.path.splitext(path)[1]
mimetype = mimetypes.get(ext, "text/html")
content = self.get_file(complete_path)
return response.send_content(200, content, mimetype)
def show_main_page(self, request, response):
rel_path = self._path
while len(rel_path) > 0 and rel_path[0] == '/':
rel_path = rel_path[1:]
if not rel_path:
rel_path = ''
content = "<html><head><meta http-equiv='refresh' content='0; URL=" #+ self._path
content += rel_path + "static/web/index.html'/></head><body></body></html>"
response.send_content(200, content)
def show_error_page(self, request, response):
content = """<html>
<head><title>Cohorte Robots</title><head><body><h3>404 This is not the web page you are looking for!</h3></body></html>"""
response.send_content(404, content)
def sendJson(self, data, response):
result = json.dumps(data, sort_keys=False,
indent=4, separators=(',', ': '))
print(result)
response.set_header("cache-control", "no-cache")
response.send_content(200, result, "application/json")
"""
Get -----------------------------------------------------------
"""
def do_GET(self, request, response):
"""
(1) /leds/
(2) /leds/static
(3) /leds/api/lastupdate
(4) /leds/api/leds
(5) /leds/api/leds/ARDUINO_YUN_LED
(6) /leds/api/leds/ARDUINO_YUN_LED/on
(7) /leds/api/leds/ARDUINO_YUN_LED/off
(8) /leds/api/cams
(9) /leds/api/cams/CAMERA1
(10) /leds/api/cams/CAMERA1/picture
"""
if((time.time() - self._time_uuid) > 3) :
self._uuid = None
cookie = request.get_header("Cookie")
uuid_var = str(uuid.uuid4())
if(cookie != None) :
uuid_var = str(cookie.split('=')[1])
if(uuid_var == self._uuid) :
self._time_uuid = time.time()
if((time.time() - self._time_uuid) > 800) :
response.set_header("Set-Cookie", "sessionToken="+ uuid_var +"; Max-Age=900; path=/")
else:
response.set_header("Set-Cookie", "sessionToken="+ uuid_var +"; Max-Age=900; path=/")
if(self._uuid == None) :
self._time_uuid = time.time()
self._uuid = uuid_var
query = request.get_path()
# prepare query path: remove first and last '/' if exists
while len(query) > 0 and query[0] == '/':
query = query[1:]
while len(query) > 0 and query[-1] == '/':
query = query[:-1]
# get parts of the url
if len(query) == 0:
self.show_main_page(request, response)
else:
#parts = str(query).split('?')[0].split('/')
parts = str(query).split('/')
if len(parts) == 0:
# show main page
self.show_main_page(request, response)
#self.show_error_page(request, response)
elif len(parts) > 0:
if str(parts[0]) == "static":
if len(parts) > 1:
self.load_resource('/'.join(parts[1:]), request, response)
else:
self.show_error_page(request, response)
elif str(parts[0]) == "api":
if len(parts) == 2:
if str(parts[1]).lower() == "leds":
t = self.get_leds()
if(self._uuid == uuid_var) :
t["prioritaire"]="yes"
else:
t["prioritaire"]="no"
self.sendJson(t, response)
elif str(parts[1]).lower() == "lastupdate":
t = self.get_lastupdate()
if(self._uuid == uuid_var) :
t["prioritaire"]="yes"
else:
t["prioritaire"]="no"
self.sendJson(t, response)
elif str(parts[1]).lower() == "cams":
t = self.get_cams()
if(self._uuid == uuid_var) :
t["prioritaire"]="yes"
else:
t["prioritaire"]="no"
self.sendJson(t, response)
elif len(parts) == 3:
if str(parts[1]).lower() == "leds":
led = str(parts[2]).lower()
t = self.get_led(led)
if(self._uuid == uuid_var) :
t["prioritaire"]="yes"
else:
t["prioritaire"]="no"
self.sendJson(t, response)
elif str(parts[1]).lower() == "cams":
cam = str(parts[2]).lower()
t = self.get_cam(cam)
if(self._uuid == uuid_var) :
t["prioritaire"]="yes"
else:
t["prioritaire"]="no"
self.sendJson(t, response)
elif str(parts[1]).lower() == "connexion":
mdp = str(parts[2])
t = {}
if(mdp=="isandla$38TECH"):
self._uuid = uuid_var
if(self._uuid == uuid_var) :
t["prioritaire"]="yes"
else:
t["prioritaire"]="no"
self.sendJson(t, response)
elif len(parts) == 4:
if str(parts[1]).lower() == "leds":
led = str(parts[2]).lower()
action = str(parts[3]).lower()
t = self.send_action(led, action)
if(self._uuid == uuid_var) :
t["prioritaire"]="yes"
else:
t["prioritaire"]="no"
self.sendJson(t, response)
elif str(parts[1]).lower() == "cams":
cam = str(parts[2]).lower()
action = str(parts[3]).lower()
t = self.send_action_cam(cam, action)
if(self._uuid == uuid_var) :
t["prioritaire"]="yes"
else:
t["prioritaire"]="no"
self.sendJson(t, response)
|
jgzl/gw | common/common-core/src/main/java/com/github/jgzl/gw/common/core/spi/Join.java | <gh_stars>0
package com.github.jgzl.gw.common.core.spi;
import java.lang.annotation.*;
/**
* Join
* Adding this annotation to a class indicates joining the extension mechanism.
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface Join {
}
|
LemaMichael/BetterGoogleWifi | GoogleWifi Headers/GOOPanelPanGestureRecognizer.h | //
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>.
//
#import <UIKit/UIPanGestureRecognizer.h>
@interface GOOPanelPanGestureRecognizer : UIPanGestureRecognizer
{
_Bool _trackingPan;
_Bool _edgeOnly;
struct CGPoint _startPoint;
struct CGAffineTransform _startTransform;
}
@property(nonatomic) _Bool edgeOnly; // @synthesize edgeOnly=_edgeOnly;
@property(nonatomic) _Bool trackingPan; // @synthesize trackingPan=_trackingPan;
@property(nonatomic) struct CGAffineTransform startTransform; // @synthesize startTransform=_startTransform;
@property(nonatomic) struct CGPoint startPoint; // @synthesize startPoint=_startPoint;
@end
|
mercury199/Space-Gen-Modification | src/com/zarkonnen/spacegen/LostArtefact.java | <filename>src/com/zarkonnen/spacegen/LostArtefact.java
/**
Copyright 2012 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.zarkonnen.spacegen;
public class LostArtefact implements Stratum {
public String status;
int lostTime;
Artefact artefact;
public LostArtefact(String status, int lostTime, Artefact artefact) {
this.status = status;
this.lostTime = lostTime;
this.artefact = artefact;
}
@Override
public String toString() {
if (artefact.type == ArtefactType.PIRATE_TOMB || artefact.type == ArtefactType.ADVENTURER_TOMB) {
return "The " + artefact + ", buried in " + lostTime + ".";
}
if (artefact.type == ArtefactType.WRECK) {
return "The " + artefact + ".";
}
return "A " + artefact + ", " + status + " in " + lostTime + ".";
}
@Override
public int time() { return lostTime; }
}
|
sywh/algorithms | Fundamentals/UF.py | class UF:
def __init__(self, N: int):
self._count = N
self.id = [i for i in range(N)]
def union(self, p: int, q: int): # add connection between p and q
pID = self.find(p)
qID = self.find(q)
if pID == qID:
return
for i in range(len(self.id)):
if self.id[i] = pID:
self.id[i] = qID
self._count -= 1
def find(self, p: int): # find the identifier of the componet that p belongs to
return self.id[p]
def connected(self, p: int, q: int):
return self.find(p) == self.find(q)
def count(self): # num of connected components
return self._count |
edwardmfasano/php-sf-flex-webpack-encore-vuejs | assets/js/lib/reviver/library/index.js | <filename>assets/js/lib/reviver/library/index.js
import {EditorReviver} from "./editorReviver";
import {AuthorReviver} from "./authorReviver";
import {JobReviver} from "./jobReviver";
import {SerieReviver} from "./serieReviver";
import {EditorsReviver} from "./editorsReviver";
import {AuthorsReviver} from "./authorsReviver";
import {BookReviver} from "./bookReviver";
const authorReviver = new AuthorReviver()
const editorReviver = new EditorReviver()
const jobReviver = new JobReviver()
const serieReviver = new SerieReviver()
const editorsReviver = new EditorsReviver(editorReviver)
const authorsReviver = new AuthorsReviver(jobReviver, authorReviver)
const bookReviver = new BookReviver(authorsReviver, editorsReviver, serieReviver)
export {
bookReviver,
authorReviver,
editorReviver,
jobReviver,
serieReviver,
editorsReviver,
authorsReviver
} |
ondrejsika/sedastrela-is | sedastrela_is/event/urls.py | <filename>sedastrela_is/event/urls.py
from django.conf.urls import include, url
from sedastrela_is.event.views import attending_done_view, attending_view
urlpatterns = [
url(r'^admin/', include('sedastrela_is.event.admin_urls', namespace='admin')),
url(r'^attending/(?P<person_token>\w+)/(?P<event_id>\d+)/(?P<state>\w+)/?', attending_view, name='attending'),
url(r'^attending-done/?', attending_done_view, name='attending_done'),
]
|
wenze1367/snort-2.9.15-read-annotation | snort-2.9.15.1/src/detection_util.h | /*
** Copyright (C) 2014-2019 Cisco and/or its affiliates. All rights reserved.
** Copyright (C) 2002-2013 Sourcefire, Inc.
** Copyright (C) 1998-2002 <NAME> <<EMAIL>>
**
** This program is free software; you can redistribute it and/or modify
** it under the terms of the GNU General Public License Version 2 as
** published by the Free Software Foundation. You may not use, modify or
** distribute this program under any other version of the GNU General
** Public License.
**
** This program is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
** GNU General Public License for more details.
**
** You should have received a copy of the GNU General Public License
** along with this program; if not, write to the Free Software
** Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
**
** Description
** This file contains the utility functions used by rule options.
**
*/
#ifndef __DETECTION_UTIL_H__
#define __DETECTION_UTIL_H__
#include <assert.h>
#include "sf_types.h"
#include "decode.h"
#include "detect.h"
#include "snort.h"
#include "snort_debug.h"
#include "treenodes.h"
#ifndef DECODE_BLEN
#define DECODE_BLEN 65535
#define MAX_URI 8192
// NOTE - if you change these, you must also change:
// dynamic-plugins/sf_dynamic_common.h
// dynamic-plugins/sf_dynamic_define.h
// dynamic-plugins/sf_engine/sf_snort_plugin_api.h
// detection-plugins/sp_pcre.h
typedef enum
{
HTTP_BUFFER_NONE,
HTTP_BUFFER_URI,
HTTP_BUFFER_HEADER,
HTTP_BUFFER_CLIENT_BODY,
HTTP_BUFFER_METHOD,
HTTP_BUFFER_COOKIE,
HTTP_BUFFER_STAT_CODE,
HTTP_BUFFER_STAT_MSG,
HTTP_BUFFER_RAW_URI,
HTTP_BUFFER_RAW_HEADER,
HTTP_BUFFER_RAW_COOKIE,
HTTP_BUFFER_MAX
} HTTP_BUFFER;
#endif
typedef enum {
FLAG_ALT_DECODE = 0x0001,
FLAG_ALT_DETECT = 0x0002,
FLAG_DETECT_ALL = 0xffff
} DetectFlagType;
#define DOE_BUF_URI 0x01
#define DOE_BUF_STD 0x02
#define HTTPURI_PIPELINE_REQ 0x01
#define HTTP_ENCODE_TYPE__UTF8_UNICODE 0x00000001
#define HTTP_ENCODE_TYPE__DOUBLE_ENCODE 0x00000002
#define HTTP_ENCODE_TYPE__NONASCII 0x00000004
#define HTTP_ENCODE_TYPE__BASE36 0x00000008
#define HTTP_ENCODE_TYPE__UENCODE 0x00000010
#define HTTP_ENCODE_TYPE__BARE_BYTE 0x00000020
#define HTTP_ENCODE_TYPE__IIS_UNICODE 0x00000040
#define HTTP_ENCODE_TYPE__ASCII 0x00000080
typedef struct
{
const uint8_t* buf;
uint16_t length;
uint32_t encode_type;
} HttpBuffer;
typedef struct {
const uint8_t *data;
uint16_t len;
} DataPointer;
typedef struct {
uint8_t data[DECODE_BLEN];
uint16_t len;
} DataBuffer;
extern uint8_t base64_decode_buf[DECODE_BLEN];
extern uint32_t base64_decode_size;
extern uint8_t mime_present;
extern uint8_t doe_buf_flags;
extern const uint8_t *doe_ptr;
extern void *global_ssl_callback;
extern uint16_t detect_flags;
extern uint32_t http_mask;
extern HttpBuffer http_buffer[HTTP_BUFFER_MAX];
extern const char* http_buffer_name[HTTP_BUFFER_MAX];
extern DataPointer DetectBuffer;
extern DataPointer file_data_ptr;
extern DataBuffer DecodeBuffer;
static inline void ClearHttpBuffers (void)
{
http_mask = 0;
}
static inline uint32_t GetHttpBufferMask (void)
{
return http_mask;
}
static inline const HttpBuffer* GetHttpBuffer (HTTP_BUFFER b)
{
if ( !((1 << b) & http_mask) )
return NULL;
return http_buffer + b;
}
static inline void SetHttpBufferEncoding (
HTTP_BUFFER b, const uint8_t* buf, unsigned len, uint32_t enc)
{
HttpBuffer* hb = http_buffer + b;
assert(b < HTTP_BUFFER_MAX && buf);
hb->buf = buf;
hb->length = len;
hb->encode_type = enc;
http_mask |= (1 << b);
}
static inline void SetHttpBuffer (HTTP_BUFFER b, const uint8_t* buf, unsigned len)
{
SetHttpBufferEncoding(b, buf, len, 0);
}
#define SetDetectLimit(pktPtr, altLen) \
{ \
pktPtr->alt_dsize = altLen; \
}
#define IsLimitedDetect(pktPtr) (pktPtr->packet_flags & PKT_HTTP_DECODE)
/*
* Function: setFileDataPtr
*
* Purpose: Sets the file data pointer used by
* file_data rule option.
*
* Arguments: ptr => pointer to the body data
*
* Returns: void
*
*/
static inline void setFileDataPtr(const uint8_t *ptr, uint16_t decode_size)
{
file_data_ptr.data = ptr;
file_data_ptr.len = decode_size;
}
/*
* Function: IsBase64DecodeBuf
*
* Purpose: Checks if there is base64 decoded buffer.
*
* Arguments: p => doe_ptr
*
* Returns: Returns 1 if there is base64 decoded data
* and if the doe_ptr is within the buffer.
* Returns 0 otherwise.
*
*/
static inline int IsBase64DecodeBuf(const uint8_t *p)
{
if( base64_decode_size && p )
{
if ((p >= base64_decode_buf) &&
(p < (base64_decode_buf + base64_decode_size)))
{
return 1;
}
else
return 0;
}
else
return 0;
}
/*
* Function: SetDoePtr(const uint8_t *ptr, uint8_t type)
*
* Purpose: This function set the doe_ptr and sets the type of
* buffer to which doe_ptr points.
*
* Arguments: ptr => pointer
* type => type of buffer
*
* Returns: void
*
*/
static inline void SetDoePtr(const uint8_t *ptr, uint8_t type)
{
doe_ptr = ptr;
doe_buf_flags = type;
}
/*
* Function: UpdateDoePtr(const uint8_t *ptr, uint8_t update)
*
* Purpose: This function updates the doe_ptr and resets the type of
* buffer to which doe_ptr points based on the update value.
*
* Arguments: ptr => pointer
* update => reset the buf flag if update is not zero.
*
* Returns: void
*
*/
static inline void UpdateDoePtr(const uint8_t *ptr, uint8_t update)
{
doe_ptr = ptr;
if(update)
doe_buf_flags = DOE_BUF_STD;
}
void EventTrace_Init(void);
void EventTrace_Term(void);
void EventTrace_Log(const Packet*, OptTreeNode*, int action);
static inline int EventTrace_IsEnabled (void)
{
return ( snort_conf->event_trace_max > 0 );
}
static inline void DetectFlag_Enable(DetectFlagType df)
{
detect_flags |= df;
}
static inline void DetectFlag_Disable(DetectFlagType df)
{
detect_flags &= ~df;
}
static inline int Is_DetectFlag(DetectFlagType df)
{
return ( (detect_flags & df) != 0 );
}
static inline uint16_t Get_DetectFlags(void)
{
return detect_flags;
}
static inline void Reset_DetectFlags(uint16_t dflags)
{
detect_flags = dflags;
}
static inline void SetSSLCallback(void *p)
{
global_ssl_callback = p;
}
static inline void *GetSSLCallback(void)
{
return global_ssl_callback;
}
static inline int GetAltDetect(uint8_t **bufPtr, uint16_t *altLenPtr)
{
if ( Is_DetectFlag(FLAG_ALT_DETECT) )
{
*bufPtr = (uint8_t*) DetectBuffer.data;
*altLenPtr = DetectBuffer.len;
return 1;
}
return 0;
}
static inline void SetAltDetect(const uint8_t *buf, uint16_t altLen)
{
DetectFlag_Enable(FLAG_ALT_DETECT);
DetectBuffer.data = buf;
DetectBuffer.len = altLen;
}
static inline void SetAltDecode(uint16_t altLen)
{
DetectFlag_Enable(FLAG_ALT_DECODE);
DecodeBuffer.len = altLen;
}
static inline void DetectReset(const uint8_t *buf, uint16_t altLen)
{
DetectBuffer.data = buf;
DetectBuffer.len = altLen;
DetectFlag_Disable(FLAG_DETECT_ALL);
/* Reset the values */
file_data_ptr.data = NULL;
file_data_ptr.len = 0;
base64_decode_size = 0;
doe_buf_flags = 0;
mime_present = 0;
DecodeBuffer.len = 0;
}
#endif
|
sgomezsaez/SCARF-SimilarityEngine | SimilarityEngine/src/com/model/PerformanceMetricDescriptor.java | <reponame>sgomezsaez/SCARF-SimilarityEngine<filename>SimilarityEngine/src/com/model/PerformanceMetricDescriptor.java
package com.model;
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Table;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
@Entity
@Table(name = "performance_metric_descriptor")
@JsonIgnoreProperties({"hibernateLazyInitializer", "handler"})
public class PerformanceMetricDescriptor implements Serializable{
private static final long serialVersionUID = 1L;
final static float UNDEFINED = -200;
@Id
@GeneratedValue
@Column(name = "id")
public int id;
@Column(name = "min")
public float min;
@Column(name = "max")
public float max;
@Column(name = "mean")
public float mean;
@Column(name = "st_deviation")
public float st_deviation;
@Column(name = "fk_performance_id")
public int fk_performance_id;
@Column(name = "fk_metric_id")
public int fk_metric_id;
public PerformanceMetricDescriptor(float min, float max, float mean, float st_deviation,
int fk_performance_id, int fk_metric_id) {
super();
this.min = min;
this.max = max;
this.mean = mean;
this.st_deviation = st_deviation;
this.fk_performance_id = fk_performance_id;
this.fk_metric_id = fk_metric_id;
}
public PerformanceMetricDescriptor() {
super();
this.id = -1;
this.min = UNDEFINED;
this.max = UNDEFINED;
this.mean = UNDEFINED;
this.st_deviation = UNDEFINED;
this.fk_performance_id = -1;
this.fk_metric_id = -1;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public float getMin() {
return min;
}
public void setMin(float min) {
this.min = min;
}
public float getMax() {
return max;
}
public void setMax(float max) {
this.max = max;
}
public float getMean() {
return mean;
}
public void setMean(float mean) {
this.mean = mean;
}
public float getSt_deviation() {
return st_deviation;
}
public void setSt_deviation(float st_deviation) {
this.st_deviation = st_deviation;
}
public int getFk_performance_id() {
return fk_performance_id;
}
public void setFk_performance_id(int fk_performance_id) {
this.fk_performance_id = fk_performance_id;
}
public int getFk_metric_id() {
return fk_metric_id;
}
public void setFk_metric_id(int fk_metric_id) {
this.fk_metric_id = fk_metric_id;
}
} |
gb-archive/asmotor | xasm/6502/main.c | <reponame>gb-archive/asmotor
/* Copyright 2008-2017 <NAME>
This file is part of ASMotor.
ASMotor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ASMotor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with ASMotor. If not, see <http://www.gnu.org/licenses/>.
*/
#include "xasm.h"
#include <stdlib.h>
#include <stdio.h>
static SConfiguration s_sConfiguration =
{
"motor6502",
"1.0",
0x10000,
ASM_LITTLE_ENDIAN,
false,
false,
MINSIZE_8BIT,
1,
"rb", "rw", "rl",
"db", "dw", "dl",
"ds", NULL, NULL
};
SConfiguration* g_pConfiguration = &s_sConfiguration;
extern int main(int argc, char* argv[])
{
return xasm_Main(argc, argv);
}
|
yizhang-cae/torsten | cmdstan/stan/src/test/unit/lang/parser/torsten_functions_test.cpp | <reponame>yizhang-cae/torsten
#include <gtest/gtest.h>
#include <test/unit/lang/utility.hpp>
TEST(lang_parser, PKModelOneCpt_function_signatures) {
test_parsable("function-signatures/math/torsten/PKModelOneCpt");
}
TEST(lang_parser, PKModelTwoCpt_function_signatures) {
test_parsable("function-signatures/math/torsten/PKModelTwoCpt");
}
TEST(lang_parser, linOdeModel_function_signatures) {
test_parsable("function-signatures/math/torsten/linOdeModel");
}
TEST(lang_parser, generalCptModel_function_signatures) {
test_parsable("function-signatures/math/torsten/generalCptModel");
}
TEST(lang_parser, mixOde1CptModel_function_signatures) {
test_parsable("function-signatures/math/torsten/mixOde1CptModel");
}
TEST(lang_parser, mixOde2CptModel_function_signatures) {
test_parsable("function-signatures/math/torsten/mixOde2CptModel");
}
TEST(lang_parser, univariate_integral_function_signatures) {
test_parsable("function-signatures/math/torsten/univariate_integral");
}
|
ovidiu-mura/LongestIncreasingSequences | src/main/java/ArrangingCoins/Solution.java | <reponame>ovidiu-mura/LongestIncreasingSequences
package ArrangingCoins;
public class Solution {
public int arrangeCoins(int n) {
if(n == 0)
return 0;
else if(n == 1)
return 1;
else if(n == 2)
return 1;
int i=1;
long coins = 0;
while(coins <= n) {
coins += i;
++i;
}
return i-2;
}
}
|
igagrock/imzbackendRest | backendapi/src/main/java/com/wemater/controller/StartExecutorforArticles.java | package com.wemater.controller;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import org.apache.log4j.Logger;
import org.hibernate.HibernateException;
import com.wemater.dao.UserDao;
import com.wemater.dto.User;
import com.wemater.util.HibernateUtil;
import com.wemater.util.SessionUtil;
public class StartExecutorforArticles extends HttpServlet {
private static final long serialVersionUID = 1L;
private static Logger log = Logger.getLogger(StartExecutorforArticles.class);
@Override
public void init() throws ServletException {
SessionUtil su = new SessionUtil(HibernateUtil.getSessionFactory()
.openSession());
log.info("inserting anonymous--STARTED");
saveUpdateAnyonymous(su);
}
public void saveUpdateAnyonymous(SessionUtil su) {
UserDao ud = new UserDao(su);
User user = ud.createUser("Anonymous", "<EMAIL>", "Anonymous", "CKBPS0423c",
"This article is an Orphan. I am just taking care of it. You can still read it and support it!");
try {
su.beginSessionWithTransaction();
su.getSession().save(user);
su.CommitCurrentTransaction();
log.info("Inserting anonymous--DONE");
} catch (HibernateException e) {
su.rollBackCurrentTransaction();
log.info("Anyonymous already inserted. NO NEED TO INSERT");
}
}
} |
liuzix/sgx-elps | musl/src/unistd/dup.c | #include <unistd.h>
#include "syscall.h"
int dup(int fd)
{
return __async_syscall(SYS_dup, fd);
}
|
isstac/spf-sca | src/main/sidechannel/cost/approximate/MultipleRunCost.java | package sidechannel.cost.approximate;
import java.util.ArrayList;
import gov.nasa.jpf.vm.ChoiceGenerator;
import gov.nasa.jpf.vm.SystemState;
import sidechannel.choice.CostChoiceGenerator;
import sidechannel.cost.approximate.monitor.MultiRunPathMonitor;
/**
*
* @author <NAME> <<EMAIL>>
*
*/
public class MultipleRunCost extends ApproximateCost <ArrayList<Long>>{
public MultipleRunCost(int sideChannel, String secureMethod){
if(secureMethod != null){
monitor = new MultiRunPathMonitor(sideChannel, secureMethod);
}
}
@Override
public ArrayList<Long> getCurrentCost(SystemState ss) {
ChoiceGenerator<?>[] cgs = ss.getChoiceGenerators();
ArrayList<Long> costs = new ArrayList<Long>();
ChoiceGenerator<?> cg = null;
// explore the choice generator chain - unique for a given path.
for (int i = 0; i < cgs.length; i++) {
cg = cgs[i];
if ((cg instanceof CostChoiceGenerator)) {
costs.add(((CostChoiceGenerator) cg).getCost());
}
}
return costs;
}
}
|
dataplumber/dmas | inventory/src/main/java/gov/nasa/podaac/inventory/api/GranuleMetadataFactory.java | /**
*
*/
package gov.nasa.podaac.inventory.api;
import gov.nasa.podaac.inventory.core.GranuleMetadataImpl;
/**
* @author clwong
* $Id: GranuleMetadataFactory.java 249 2007-10-02 22:59:41Z clwong $
*/
public class GranuleMetadataFactory {
private static GranuleMetadataFactory granuleMetadataFactory = new GranuleMetadataFactory();
/**
* Default constructor.
*/
private GranuleMetadataFactory() {
}
/**
* Gets an instance of GranuleMetadataFactory object.
*
* @return GranuleMetadataFactory object.
*/
public static GranuleMetadataFactory getInstance() {
return granuleMetadataFactory;
}
public GranuleMetadata createGranuleMetadata() {
return new GranuleMetadataImpl();
}
}
|
qhl0505/dmd | dmd-mall/src/main/java/com/dmd/mall/config/SecurityConfig.java | <gh_stars>1-10
package com.dmd.mall.config;
//import com.dmd.mall.model.UmsMember;
//import com.dmd.mall.model.domain.MemberDetails;
//import com.dmd.mall.service.UmsMemberService;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.context.annotation.Bean;
//import org.springframework.context.annotation.Configuration;
//import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
//import org.springframework.security.config.annotation.web.builders.HttpSecurity;
//import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
//import org.springframework.security.core.userdetails.UserDetails;
//import org.springframework.security.core.userdetails.UserDetailsService;
//import org.springframework.security.core.userdetails.UsernameNotFoundException;
//import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
//import org.springframework.security.crypto.password.PasswordEncoder;
/**
* SpringSecurity的配置
* Created by macro on 2018/8/3.
*/
//@Configuration
//public class SecurityConfig extends WebSecurityConfigurerAdapter {
// @Autowired
// private UmsMemberService memberService;
//
// @Override
// protected void configure(HttpSecurity http) throws Exception {
// http.csrf().disable();//开启basic认证登录后可以调用需要认证的接口
// }
//
// @Override
// protected void configure(AuthenticationManagerBuilder auth) throws Exception {
// auth.userDetailsService(userDetailsService())
// .passwordEncoder(passwordEncoder());
// }
//
// @Bean
// public PasswordEncoder passwordEncoder() {
// return new BCryptPasswordEncoder();
// }
//
// @Bean
// public UserDetailsService userDetailsService() {
// //获取登录用户信息
// return new UserDetailsService() {
// @Override
// public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
// UmsMember member = memberService.getByUsername(username);
// if (member != null) {
// return new MemberDetails(member);
// }
// throw new UsernameNotFoundException("用户名或密码错误");
// }
// };
// }
//}
|
best08618/asylo | gcc-gcc-7_3_0-release/gcc/testsuite/gcc.dg/tree-ssa/20030920-1.c | <gh_stars>1-10
/* Jump threading was creating FALLTHRU edges out of blocks ending in
GOTO_EXPR. */
extern int frame_pointer_needed;
struct value_data_entry
{
unsigned int mode;
unsigned int oldest_regno;
unsigned int next_regno;
};
struct value_data
{
struct value_data_entry e[53];
unsigned int max_value_regs;
};
struct rtx_def
{
unsigned int code: 16;
unsigned int mode : 8;
unsigned int jump : 1;
unsigned int call : 1;
unsigned int unchanging : 1;
unsigned int volatil : 1;
unsigned int in_struct : 1;
unsigned int used : 1;
unsigned integrated : 1;
unsigned frame_related : 1;
int fld[1];
};
typedef struct rtx_def *rtx;
enum machine_mode { VOIDmode, BImode, QImode, HImode, SImode, DImode,
TImode, OImode, PQImode, PHImode, PSImode, PDImode, QFmode, HFmode,
TQFmode, SFmode, DFmode, XFmode, TFmode, QCmode, HCmode, SCmode,
DCmode, XCmode, TCmode, CQImode, CHImode, CSImode, CDImode, CTImode,
COImode, V1DImode, V2QImode, V2HImode, V2SImode, V2DImode, V4QImode,
V4HImode, V4SImode, V4DImode, V8QImode, V8HImode, V8SImode, V8DImode,
V16QImode, V2HFmode, V2SFmode, V2DFmode, V4HFmode, V4SFmode, V4DFmode,
V8HFmode, V8SFmode, V8DFmode, V16SFmode, BLKmode, CCmode, CCGCmode,
CCGOCmode, CCNOmode, CCZmode, CCFPmode, CCFPUmode, MAX_MACHINE_MODE };
enum mode_class { MODE_RANDOM, MODE_INT, MODE_FLOAT, MODE_PARTIAL_INT, MODE_CC,
MODE_COMPLEX_INT, MODE_COMPLEX_FLOAT,
MODE_VECTOR_INT, MODE_VECTOR_FLOAT,
MAX_MODE_CLASS};
extern const unsigned char mode_size[(int) MAX_MACHINE_MODE];
extern const enum mode_class mode_class[(int) MAX_MACHINE_MODE];
extern int target_flags;
static void
copy_value (rtx dest, rtx src, struct value_data *vd)
{
unsigned int dr = (((dest)->fld[0]));
unsigned int sr = (((src)->fld[0]));
unsigned int dn, sn;
unsigned int i;
if (sr == dr)
return;
if (dr == 7)
return;
if (frame_pointer_needed && dr == 6)
return;
dn = (((dr) >= 8 && (dr) <= (8 + 7)) || (((dr) >= (20 + 1) && (dr) <= ((20 + 1) + 7)) || ((dr) >= (((((((20 + 1) + 7) + 1) + 7) + 1) + 7) + 1) && (dr) <= ((((((((20 + 1) + 7) + 1) + 7) + 1) + 7) + 1) + 7))) || ((dr) >= (((20 + 1) + 7) + 1) && (dr) <= ((((20 + 1) + 7) + 1) + 7)) ? (((mode_class[(int) (((enum machine_mode) (dest)->mode))]) == MODE_COMPLEX_INT || (mode_class[(int) (((enum machine_mode) (dest)->mode))]) == MODE_COMPLEX_FLOAT) ? 2 : 1) : ((((enum machine_mode) (dest)->mode)) == TFmode ? ((target_flags & 0x00100000) ? 2 : 3) : (((enum machine_mode) (dest)->mode)) == TCmode ? ((target_flags & 0x00100000) ? 4 : 6) : (((mode_size[(int) (((enum machine_mode) (dest)->mode))]) + ((target_flags & 0x00100000) ? 8 : 4) - 1) / ((target_flags & 0x00100000) ? 8 : 4))));
sn = (((sr) >= 8 && (sr) <= (8 + 7)) || (((sr) >= (20 + 1) && (sr) <= ((20 + 1) + 7)) || ((sr) >= (((((((20 + 1) + 7) + 1) + 7) + 1) + 7) + 1) && (sr) <= ((((((((20 + 1) + 7) + 1) + 7) + 1) + 7) + 1) + 7))) || ((sr) >= (((20 + 1) + 7) + 1) && (sr) <= ((((20 + 1) + 7) + 1) + 7)) ? (((mode_class[(int) (((enum machine_mode) (dest)->mode))]) == MODE_COMPLEX_INT || (mode_class[(int) (((enum machine_mode) (dest)->mode))]) == MODE_COMPLEX_FLOAT) ? 2 : 1) : ((((enum machine_mode) (dest)->mode)) == TFmode ? ((target_flags & 0x00100000) ? 2 : 3) : (((enum machine_mode) (dest)->mode)) == TCmode ? ((target_flags & 0x00100000) ? 4 : 6) : (((mode_size[(int) (((enum machine_mode) (dest)->mode))]) + ((target_flags & 0x00100000) ? 8 : 4) - 1) / ((target_flags & 0x00100000) ? 8 : 4))));
if ((dr > sr && dr < sr + sn)
|| (sr > dr && sr < dr + dn))
return;
if (vd->e[sr].mode == VOIDmode)
set_value_regno (sr, vd->e[dr].mode, vd);
else if (sn < (unsigned int) (((sr) >= 8 && (sr) <= (8 + 7)) || (((sr) >= (20 + 1) && (sr) <= ((20 + 1) + 7)) || ((sr) >= (((((((20 + 1) + 7) + 1) + 7) + 1) + 7) + 1) && (sr) <= ((((((((20 + 1) + 7) + 1) + 7) + 1) + 7) + 1) + 7))) || ((sr) >= (((20 + 1) + 7) + 1) && (sr) <= ((((20 + 1) + 7) + 1) + 7)) ? (((mode_class[(int) (vd->e[sr].mode)]) == MODE_COMPLEX_INT || (mode_class[(int) (vd->e[sr].mode)]) == MODE_COMPLEX_FLOAT) ? 2 : 1) : ((vd->e[sr].mode) == TFmode ? ((target_flags & 0x00100000) ? 2 : 3) : (vd->e[sr].mode) == TCmode ? ((target_flags & 0x00100000) ? 4 : 6) : (((mode_size[(int) (vd->e[sr].mode)]) + ((target_flags & 0x00100000) ? 8 : 4) - 1) / ((target_flags & 0x00100000) ? 8 : 4))))
&& ((mode_size[(int) (vd->e[sr].mode)]) > ((target_flags & 0x00100000) ? 8 : 4)
? 0 : 0))
return;
else if (sn > (unsigned int) (((sr) >= 8 && (sr) <= (8 + 7)) || (((sr) >= (20 + 1) && (sr) <= ((20 + 1) + 7)) || ((sr) >= (((((((20 + 1) + 7) + 1) + 7) + 1) + 7) + 1) && (sr) <= ((((((((20 + 1) + 7) + 1) + 7) + 1) + 7) + 1) + 7))) || ((sr) >= (((20 + 1) + 7) + 1) && (sr) <= ((((20 + 1) + 7) + 1) + 7)) ? (((mode_class[(int) (vd->e[sr].mode)]) == MODE_COMPLEX_INT || (mode_class[(int) (vd->e[sr].mode)]) == MODE_COMPLEX_FLOAT) ? 2 : 1) : ((vd->e[sr].mode) == TFmode ? ((target_flags & 0x00100000) ? 2 : 3) : (vd->e[sr].mode) == TCmode ? ((target_flags & 0x00100000) ? 4 : 6) : (((mode_size[(int) (vd->e[sr].mode)]) + ((target_flags & 0x00100000) ? 8 : 4) - 1) / ((target_flags & 0x00100000) ? 8 : 4)))))
return;
vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
for (i = sr; vd->e[i].next_regno != (~(unsigned int) 0); i = vd->e[i].next_regno)
continue;
vd->e[i].next_regno = dr;
validate_value_data (vd);
}
|
bzxy/cydia | iOSOpenDev/frameworks/AXRuntime.framework/Headers/AXRuntime-Structs.h | <gh_stars>100-1000
/**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/AXRuntime.framework/AXRuntime
*/
|
Marcusz97/CILP_Facilitatore_Audacity | lib-src/libflac/src/flac/utils.c | <reponame>Marcusz97/CILP_Facilitatore_Audacity
/* flac - Command-line FLAC encoder/decoder
* Copyright (C) 2002-2009 <NAME>
* Copyright (C) 2011-2013 Xiph.Org Foundation
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#if HAVE_CONFIG_H
# include <config.h>
#endif
#include <math.h>
#include <stdarg.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "utils.h"
#include "FLAC/assert.h"
#include "FLAC/metadata.h"
#include "share/compat.h"
#ifndef _WIN32
#include <wchar.h>
#ifdef HAVE_TERMIOS_H
# include <termios.h>
#endif
#ifdef GWINSZ_IN_SYS_IOCTL
# include <sys/ioctl.h>
#endif
#endif
const char *CHANNEL_MASK_TAG = "WAVEFORMATEXTENSIBLE_CHANNEL_MASK";
int flac__utils_verbosity_ = 2;
static FLAC__bool local__parse_uint64_(const char *s, FLAC__uint64 *value)
{
FLAC__uint64 ret = 0;
char c;
if(*s == '\0')
return false;
while('\0' != (c = *s++))
if(c >= '0' && c <= '9')
ret = ret * 10 + (c - '0');
else
return false;
*value = ret;
return true;
}
static FLAC__bool local__parse_timecode_(const char *s, double *value)
{
double ret;
unsigned i;
char c, *endptr;
/* parse [0-9][0-9]*: */
c = *s++;
if(c >= '0' && c <= '9')
i = (c - '0');
else
return false;
while(':' != (c = *s++)) {
if(c >= '0' && c <= '9')
i = i * 10 + (c - '0');
else
return false;
}
ret = (double)i * 60.;
/* parse [0-9]*[.,]?[0-9]* i.e. a sign-less rational number (. or , OK for fractional seconds, to support different locales) */
if(strspn(s, "1234567890.,") != strlen(s))
return false;
ret += strtod(s, &endptr);
if (endptr == s || *endptr)
return false;
*value = ret;
return true;
}
static FLAC__bool local__parse_cue_(const char *s, const char *end, unsigned *track, unsigned *indx)
{
FLAC__bool got_track = false, got_index = false;
unsigned t = 0, i = 0;
char c;
while(end? s < end : *s != '\0') {
c = *s++;
if(c >= '0' && c <= '9') {
t = t * 10 + (c - '0');
got_track = true;
}
else if(c == '.')
break;
else
return false;
}
while(end? s < end : *s != '\0') {
c = *s++;
if(c >= '0' && c <= '9') {
i = i * 10 + (c - '0');
got_index = true;
}
else
return false;
}
*track = t;
*indx = i;
return got_track && got_index;
}
/*
* this only works with sorted cuesheets (the spec strongly recommends but
* does not require sorted cuesheets). but if it's not sorted, picking a
* nearest cue point has no significance.
*/
static FLAC__uint64 local__find_closest_cue_(const FLAC__StreamMetadata_CueSheet *cuesheet, unsigned track, unsigned indx, FLAC__uint64 total_samples, FLAC__bool look_forward)
{
int t, i;
if(look_forward) {
for(t = 0; t < (int)cuesheet->num_tracks; t++)
for(i = 0; i < (int)cuesheet->tracks[t].num_indices; i++)
if(cuesheet->tracks[t].number > track || (cuesheet->tracks[t].number == track && cuesheet->tracks[t].indices[i].number >= indx))
return cuesheet->tracks[t].offset + cuesheet->tracks[t].indices[i].offset;
return total_samples;
}
else {
for(t = (int)cuesheet->num_tracks - 1; t >= 0; t--)
for(i = (int)cuesheet->tracks[t].num_indices - 1; i >= 0; i--)
if(cuesheet->tracks[t].number < track || (cuesheet->tracks[t].number == track && cuesheet->tracks[t].indices[i].number <= indx))
return cuesheet->tracks[t].offset + cuesheet->tracks[t].indices[i].offset;
return 0;
}
}
void flac__utils_printf(FILE *stream, int level, const char *format, ...)
{
if(flac__utils_verbosity_ >= level) {
va_list args;
FLAC__ASSERT(0 != format);
va_start(args, format);
(void) flac_vfprintf(stream, format, args);
va_end(args);
#ifdef _MSC_VER
if(stream == stderr)
fflush(stream); /* for some reason stderr is buffered in at least some if not all MSC libs */
#endif
}
}
/* variables and functions for console status output */
static FLAC__bool is_name_printed;
static int stats_char_count = 0;
static int console_width;
static int console_chars_left;
int get_console_width(void)
{
int width = 80;
#if defined _WIN32
width = win_get_console_width();
#elif defined __EMX__
int s[2];
_scrsize (s);
width = s[0];
#elif !defined __ANDROID__
struct winsize w;
if (ioctl(STDOUT_FILENO, TIOCGWINSZ, &w) != -1) width = w.ws_col;
#endif
return width;
}
size_t strlen_console(const char *text)
{
#ifdef _WIN32
return strlen_utf8(text);
#else
size_t len;
wchar_t *wtmp;
len = strlen(text)+1;
wtmp = (wchar_t *)malloc(len*sizeof(wchar_t));
if (wtmp == NULL) return len-1;
mbstowcs(wtmp, text, len);
len = wcswidth(wtmp, len);
free(wtmp);
return len;
#endif
}
void stats_new_file(void)
{
is_name_printed = false;
}
void stats_clear(void)
{
while (stats_char_count > 0 && stats_char_count--)
fprintf(stderr, "\b");
}
void stats_print_name(int level, const char *name)
{
int len;
if (flac__utils_verbosity_ >= level) {
stats_clear();
if(is_name_printed) return;
console_width = get_console_width();
len = strlen_console(name)+2;
console_chars_left = console_width - (len % console_width);
flac_fprintf(stderr, "%s: ", name);
is_name_printed = true;
}
}
void stats_print_info(int level, const char *format, ...)
{
char tmp[80];
int len, clear_len;
if (flac__utils_verbosity_ >= level) {
va_list args;
va_start(args, format);
len = vsnprintf(tmp, sizeof(tmp), format, args);
va_end(args);
if (len < 0 || len == sizeof(tmp)) {
tmp[sizeof(tmp)-1] = '\0';
len = sizeof(tmp)-1;
}
stats_clear();
if (len >= console_chars_left) {
clear_len = console_chars_left;
while (clear_len > 0 && clear_len--) fprintf(stderr, " ");
fprintf(stderr, "\n");
console_chars_left = console_width;
}
stats_char_count = fprintf(stderr, "%s", tmp);
}
}
#ifdef FLAC__VALGRIND_TESTING
size_t flac__utils_fwrite(const void *ptr, size_t size, size_t nmemb, FILE *stream)
{
size_t ret = fwrite(ptr, size, nmemb, stream);
if(!ferror(stream))
fflush(stream);
return ret;
}
#endif
FLAC__bool flac__utils_parse_skip_until_specification(const char *s, utils__SkipUntilSpecification *spec)
{
FLAC__uint64 val;
FLAC__bool is_negative = false;
FLAC__ASSERT(0 != spec);
spec->is_relative = false;
spec->value_is_samples = true;
spec->value.samples = 0;
if(0 != s) {
if(s[0] == '-') {
is_negative = true;
spec->is_relative = true;
s++;
}
else if(s[0] == '+') {
spec->is_relative = true;
s++;
}
if(local__parse_uint64_(s, &val)) {
spec->value_is_samples = true;
spec->value.samples = (FLAC__int64)val;
if(is_negative)
spec->value.samples = -(spec->value.samples);
}
else {
double d;
if(!local__parse_timecode_(s, &d))
return false;
spec->value_is_samples = false;
spec->value.seconds = d;
if(is_negative)
spec->value.seconds = -(spec->value.seconds);
}
}
return true;
}
void flac__utils_canonicalize_skip_until_specification(utils__SkipUntilSpecification *spec, unsigned sample_rate)
{
FLAC__ASSERT(0 != spec);
if(!spec->value_is_samples) {
spec->value.samples = (FLAC__int64)(spec->value.seconds * (double)sample_rate);
spec->value_is_samples = true;
}
}
FLAC__bool flac__utils_parse_cue_specification(const char *s, utils__CueSpecification *spec)
{
const char *start = s, *end = 0;
FLAC__ASSERT(0 != spec);
spec->has_start_point = spec->has_end_point = false;
s = strchr(s, '-');
if(0 != s) {
if(s == start)
start = 0;
end = s+1;
if(*end == '\0')
end = 0;
}
if(start) {
if(!local__parse_cue_(start, s, &spec->start_track, &spec->start_index))
return false;
spec->has_start_point = true;
}
if(end) {
if(!local__parse_cue_(end, 0, &spec->end_track, &spec->end_index))
return false;
spec->has_end_point = true;
}
return true;
}
void flac__utils_canonicalize_cue_specification(const utils__CueSpecification *cue_spec, const FLAC__StreamMetadata_CueSheet *cuesheet, FLAC__uint64 total_samples, utils__SkipUntilSpecification *skip_spec, utils__SkipUntilSpecification *until_spec)
{
FLAC__ASSERT(0 != cue_spec);
FLAC__ASSERT(0 != cuesheet);
FLAC__ASSERT(0 != total_samples);
FLAC__ASSERT(0 != skip_spec);
FLAC__ASSERT(0 != until_spec);
skip_spec->is_relative = false;
skip_spec->value_is_samples = true;
until_spec->is_relative = false;
until_spec->value_is_samples = true;
if(cue_spec->has_start_point)
skip_spec->value.samples = local__find_closest_cue_(cuesheet, cue_spec->start_track, cue_spec->start_index, total_samples, /*look_forward=*/false);
else
skip_spec->value.samples = 0;
if(cue_spec->has_end_point)
until_spec->value.samples = local__find_closest_cue_(cuesheet, cue_spec->end_track, cue_spec->end_index, total_samples, /*look_forward=*/true);
else
until_spec->value.samples = total_samples;
}
FLAC__bool flac__utils_set_channel_mask_tag(FLAC__StreamMetadata *object, FLAC__uint32 channel_mask)
{
FLAC__StreamMetadata_VorbisComment_Entry entry = { 0, 0 };
char tag[128];
FLAC__ASSERT(object);
FLAC__ASSERT(object->type == FLAC__METADATA_TYPE_VORBIS_COMMENT);
FLAC__ASSERT(strlen(CHANNEL_MASK_TAG+1+2+16+1) <= sizeof(tag)); /* +1 for =, +2 for 0x, +16 for digits, +1 for NUL */
entry.entry = (FLAC__byte*)tag;
if((entry.length = flac_snprintf(tag, sizeof(tag), "%s=0x%04X", CHANNEL_MASK_TAG, (unsigned)channel_mask)) >= sizeof(tag))
return false;
if(!FLAC__metadata_object_vorbiscomment_replace_comment(object, entry, /*all=*/true, /*copy=*/true))
return false;
return true;
}
FLAC__bool flac__utils_get_channel_mask_tag(const FLAC__StreamMetadata *object, FLAC__uint32 *channel_mask)
{
int offset;
unsigned val;
char *p;
FLAC__ASSERT(object);
FLAC__ASSERT(object->type == FLAC__METADATA_TYPE_VORBIS_COMMENT);
if(0 > (offset = FLAC__metadata_object_vorbiscomment_find_entry_from(object, /*offset=*/0, CHANNEL_MASK_TAG)))
return false;
if(object->data.vorbis_comment.comments[offset].length < strlen(CHANNEL_MASK_TAG)+4)
return false;
if(0 == (p = strchr((const char *)object->data.vorbis_comment.comments[offset].entry, '='))) /* should never happen, but just in case */
return false;
if(strncmp(p, "=0x", 3))
return false;
if(sscanf(p+3, "%x", &val) != 1)
return false;
*channel_mask = val;
return true;
}
|
x-sheep/puzzles | PuzzleModern/ParamsFlyout.xaml.h | <filename>PuzzleModern/ParamsFlyout.xaml.h
//
// ParamsFlyout.xaml.h
// Declaration of the ParamsFlyout class
//
#pragma once
#include "ParamsFlyout.g.h"
using namespace PuzzleCommon;
namespace PuzzleModern
{
public delegate void NewConfigurationEventHandler(ParamsFlyout ^sender, Windows::Foundation::Collections::IVector<ConfigItem^>^ newConfig);
/// <summary>
/// An empty page that can be used on its own or navigated to within a Frame.
/// </summary>
[Windows::Foundation::Metadata::WebHostHidden]
public ref class ParamsFlyout sealed
{
public:
ParamsFlyout(Windows::Foundation::Collections::IVector<ConfigItem^>^ items);
void ShowErrorMessage(Platform::String ^error);
event NewConfigurationEventHandler ^NewConfiguration;
private:
Windows::Foundation::Collections::IVector<ConfigItem^>^ _configItems;
void ConfirmButton_Click(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e);
};
}
|
SimonCasteran/JobBoard | front/src/Admin/admin.js | import React, { useState } from 'react';
import NoCRUD from './noCRUD'
import UserCRUD from './userCRUD'
import AdCRUD from './adCRUD'
import CompanyCRUD from './companyCRUD'
import ApplicationCRUD from './applicationCRUD'
const Admin = () => {
const [page, setPage] = useState(0);
const components = [
<NoCRUD/>,
<UserCRUD/>,
<CompanyCRUD/>,
<AdCRUD/>,
<ApplicationCRUD/>
]
return (
<div>
<button className="button-default" onClick={() => setPage(0)}>No CRUD</button>
<button className="button-default" onClick={() => {setPage(0); setPage(1)}}>User CRUD</button>
<button className="button-default" onClick={() => {setPage(0); setPage(2)}}>Company CRUD</button>
<button className="button-default" onClick={() => {setPage(0); setPage(3)}}> Ad CRUD</button>
<button className="button-default" onClick={() => {setPage(0); setPage(4)}}>Application CRUD</button>
{components[page]}
</div>
)
};
export default Admin; |
gitter-badger/torch | torch-core/src/test/java/org/brightify/torch/util/SerializerTest.java | package org.brightify.torch.util;
import org.brightify.torch.test.TestObject;
import org.junit.Test;
import java.util.ArrayList;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
/**
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class SerializerTest {
@Test
public void testIntegerSerialization() throws Exception {
int int1 = Integer.MAX_VALUE;
byte[] data = Serializer.serialize(int1);
int int2 = Serializer.deserialize(data, int.class);
assertEquals(int1, int2);
}
@Test
public void testLongSerialization() throws Exception {
long long1 = Long.MAX_VALUE;
byte[] data = Serializer.serialize(long1);
long long2 = Serializer.deserialize(data, long.class);
assertEquals(long1, long2);
}
@Test
public void testStringSerialization() throws Exception {
String string1 = "test_testdj24jd3dm3k";
byte[] data = Serializer.serialize(string1);
String string2 = Serializer.deserialize(data, String.class);
assertEquals(string1, string2);
}
@Test
public void testObjectSerialization() throws Exception {
TestObject expectedObject = new TestObject();
byte[] data = Serializer.serialize(expectedObject);
TestObject deserializedObject = Serializer.deserialize(data, TestObject.class);
assertEquals(expectedObject, deserializedObject);
}
@Test
public void testArraySerialization() throws Exception {
TestObject[] array1 = new TestObject[10];
for (int i = 0; i < array1.length; i++) {
array1[i] = new TestObject();
array1[i].id = (long) i;
}
byte[] data = Serializer.serializeArray(array1);
TestObject[] array2 = Serializer.deserializeArray(new TestObject[0], TestObject.class, data);
assertArrayEquals(array1, array2);
}
@Test
public void testCollectionSerialization() throws Exception {
ArrayList<TestObject> list1 = new ArrayList<TestObject>();
for (int i = 0; i < 10; i++) {
TestObject testObject = new TestObject();
testObject.id = (long) i;
list1.add(testObject);
}
byte[] data = Serializer.serializeList(list1);
ArrayList<TestObject> list2 = Serializer.deserializeArrayList(TestObject.class, data);
assertEquals(list1, list2);
}
}
|
IcelyFramework/icely-metamodels | plugins/com.github.icelyframework.metamodels/src/com/github/icelyframework/dynamicview/impl/ResourceImpl.java | <reponame>IcelyFramework/icely-metamodels
/**
*/
package com.github.icelyframework.dynamicview.impl;
import com.github.icelyframework.dynamicview.Aggregate;
import com.github.icelyframework.dynamicview.Create;
import com.github.icelyframework.dynamicview.Delete;
import com.github.icelyframework.dynamicview.DynamicviewPackage;
import com.github.icelyframework.dynamicview.Other;
import com.github.icelyframework.dynamicview.Permission;
import com.github.icelyframework.dynamicview.Read;
import com.github.icelyframework.dynamicview.Resource;
import com.github.icelyframework.dynamicview.ResourceActivity;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.EObjectResolvingEList;
import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Resource</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link com.github.icelyframework.dynamicview.impl.ResourceImpl#getName <em>Name</em>}</li>
* <li>{@link com.github.icelyframework.dynamicview.impl.ResourceImpl#getHasRelatedResource <em>Has Related Resource</em>}</li>
* <li>{@link com.github.icelyframework.dynamicview.impl.ResourceImpl#getIsRelatedResource <em>Is Related Resource</em>}</li>
* <li>{@link com.github.icelyframework.dynamicview.impl.ResourceImpl#getReadActivity <em>Read Activity</em>}</li>
* <li>{@link com.github.icelyframework.dynamicview.impl.ResourceImpl#getCreateActivity <em>Create Activity</em>}</li>
* <li>{@link com.github.icelyframework.dynamicview.impl.ResourceImpl#getDeleteActivity <em>Delete Activity</em>}</li>
* <li>{@link com.github.icelyframework.dynamicview.impl.ResourceImpl#getOtherActivities <em>Other Activities</em>}</li>
* <li>{@link com.github.icelyframework.dynamicview.impl.ResourceImpl#getAggregate <em>Aggregate</em>}</li>
* <li>{@link com.github.icelyframework.dynamicview.impl.ResourceImpl#getHasActivity <em>Has Activity</em>}</li>
* <li>{@link com.github.icelyframework.dynamicview.impl.ResourceImpl#getPathName <em>Path Name</em>}</li>
* <li>{@link com.github.icelyframework.dynamicview.impl.ResourceImpl#getHasPermission <em>Has Permission</em>}</li>
* </ul>
*
* @generated
*/
public abstract class ResourceImpl extends EObjectImpl implements Resource {
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* The cached value of the '{@link #getHasRelatedResource() <em>Has Related Resource</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHasRelatedResource()
* @generated
* @ordered
*/
protected EList<Resource> hasRelatedResource;
/**
* The cached value of the '{@link #getIsRelatedResource() <em>Is Related Resource</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getIsRelatedResource()
* @generated
* @ordered
*/
protected Resource isRelatedResource;
/**
* The cached value of the '{@link #getReadActivity() <em>Read Activity</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getReadActivity()
* @generated
* @ordered
*/
protected Read readActivity;
/**
* The cached value of the '{@link #getCreateActivity() <em>Create Activity</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCreateActivity()
* @generated
* @ordered
*/
protected Create createActivity;
/**
* The cached value of the '{@link #getDeleteActivity() <em>Delete Activity</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDeleteActivity()
* @generated
* @ordered
*/
protected Delete deleteActivity;
/**
* The cached value of the '{@link #getOtherActivities() <em>Other Activities</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOtherActivities()
* @generated
* @ordered
*/
protected EList<Other> otherActivities;
/**
* The cached value of the '{@link #getHasActivity() <em>Has Activity</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHasActivity()
* @generated
* @ordered
*/
protected EList<ResourceActivity> hasActivity;
/**
* The default value of the '{@link #getPathName() <em>Path Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPathName()
* @generated
* @ordered
*/
protected static final String PATH_NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getPathName() <em>Path Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPathName()
* @generated
* @ordered
*/
protected String pathName = PATH_NAME_EDEFAULT;
/**
* The cached value of the '{@link #getHasPermission() <em>Has Permission</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHasPermission()
* @generated
* @ordered
*/
protected EList<Permission> hasPermission;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ResourceImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return DynamicviewPackage.Literals.RESOURCE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getName() {
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setName(String newName) {
String oldName = name;
name = newName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DynamicviewPackage.RESOURCE__NAME, oldName, name));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EList<Resource> getHasRelatedResource() {
if (hasRelatedResource == null) {
hasRelatedResource = new EObjectWithInverseResolvingEList<Resource>(Resource.class, this, DynamicviewPackage.RESOURCE__HAS_RELATED_RESOURCE, DynamicviewPackage.RESOURCE__IS_RELATED_RESOURCE);
}
return hasRelatedResource;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Resource getIsRelatedResource() {
if (isRelatedResource != null && isRelatedResource.eIsProxy()) {
InternalEObject oldIsRelatedResource = (InternalEObject)isRelatedResource;
isRelatedResource = (Resource)eResolveProxy(oldIsRelatedResource);
if (isRelatedResource != oldIsRelatedResource) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, DynamicviewPackage.RESOURCE__IS_RELATED_RESOURCE, oldIsRelatedResource, isRelatedResource));
}
}
return isRelatedResource;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Resource basicGetIsRelatedResource() {
return isRelatedResource;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetIsRelatedResource(Resource newIsRelatedResource, NotificationChain msgs) {
Resource oldIsRelatedResource = isRelatedResource;
isRelatedResource = newIsRelatedResource;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, DynamicviewPackage.RESOURCE__IS_RELATED_RESOURCE, oldIsRelatedResource, newIsRelatedResource);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setIsRelatedResource(Resource newIsRelatedResource) {
if (newIsRelatedResource != isRelatedResource) {
NotificationChain msgs = null;
if (isRelatedResource != null)
msgs = ((InternalEObject)isRelatedResource).eInverseRemove(this, DynamicviewPackage.RESOURCE__HAS_RELATED_RESOURCE, Resource.class, msgs);
if (newIsRelatedResource != null)
msgs = ((InternalEObject)newIsRelatedResource).eInverseAdd(this, DynamicviewPackage.RESOURCE__HAS_RELATED_RESOURCE, Resource.class, msgs);
msgs = basicSetIsRelatedResource(newIsRelatedResource, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DynamicviewPackage.RESOURCE__IS_RELATED_RESOURCE, newIsRelatedResource, newIsRelatedResource));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Read getReadActivity() {
if (readActivity != null && readActivity.eIsProxy()) {
InternalEObject oldReadActivity = (InternalEObject)readActivity;
readActivity = (Read)eResolveProxy(oldReadActivity);
if (readActivity != oldReadActivity) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, DynamicviewPackage.RESOURCE__READ_ACTIVITY, oldReadActivity, readActivity));
}
}
return readActivity;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Read basicGetReadActivity() {
return readActivity;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setReadActivity(Read newReadActivity) {
Read oldReadActivity = readActivity;
readActivity = newReadActivity;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DynamicviewPackage.RESOURCE__READ_ACTIVITY, oldReadActivity, readActivity));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Create getCreateActivity() {
if (createActivity != null && createActivity.eIsProxy()) {
InternalEObject oldCreateActivity = (InternalEObject)createActivity;
createActivity = (Create)eResolveProxy(oldCreateActivity);
if (createActivity != oldCreateActivity) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, DynamicviewPackage.RESOURCE__CREATE_ACTIVITY, oldCreateActivity, createActivity));
}
}
return createActivity;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Create basicGetCreateActivity() {
return createActivity;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setCreateActivity(Create newCreateActivity) {
Create oldCreateActivity = createActivity;
createActivity = newCreateActivity;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DynamicviewPackage.RESOURCE__CREATE_ACTIVITY, oldCreateActivity, createActivity));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Delete getDeleteActivity() {
if (deleteActivity != null && deleteActivity.eIsProxy()) {
InternalEObject oldDeleteActivity = (InternalEObject)deleteActivity;
deleteActivity = (Delete)eResolveProxy(oldDeleteActivity);
if (deleteActivity != oldDeleteActivity) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, DynamicviewPackage.RESOURCE__DELETE_ACTIVITY, oldDeleteActivity, deleteActivity));
}
}
return deleteActivity;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Delete basicGetDeleteActivity() {
return deleteActivity;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setDeleteActivity(Delete newDeleteActivity) {
Delete oldDeleteActivity = deleteActivity;
deleteActivity = newDeleteActivity;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DynamicviewPackage.RESOURCE__DELETE_ACTIVITY, oldDeleteActivity, deleteActivity));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EList<Other> getOtherActivities() {
if (otherActivities == null) {
otherActivities = new EObjectResolvingEList<Other>(Other.class, this, DynamicviewPackage.RESOURCE__OTHER_ACTIVITIES);
}
return otherActivities;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Aggregate getAggregate() {
if (eContainerFeatureID() != DynamicviewPackage.RESOURCE__AGGREGATE) return null;
return (Aggregate)eInternalContainer();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetAggregate(Aggregate newAggregate, NotificationChain msgs) {
msgs = eBasicSetContainer((InternalEObject)newAggregate, DynamicviewPackage.RESOURCE__AGGREGATE, msgs);
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setAggregate(Aggregate newAggregate) {
if (newAggregate != eInternalContainer() || (eContainerFeatureID() != DynamicviewPackage.RESOURCE__AGGREGATE && newAggregate != null)) {
if (EcoreUtil.isAncestor(this, newAggregate))
throw new IllegalArgumentException("Recursive containment not allowed for " + toString());
NotificationChain msgs = null;
if (eInternalContainer() != null)
msgs = eBasicRemoveFromContainer(msgs);
if (newAggregate != null)
msgs = ((InternalEObject)newAggregate).eInverseAdd(this, DynamicviewPackage.AGGREGATE__HAS_RESOURCE, Aggregate.class, msgs);
msgs = basicSetAggregate(newAggregate, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DynamicviewPackage.RESOURCE__AGGREGATE, newAggregate, newAggregate));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EList<ResourceActivity> getHasActivity() {
if (hasActivity == null) {
hasActivity = new EObjectContainmentEList<ResourceActivity>(ResourceActivity.class, this, DynamicviewPackage.RESOURCE__HAS_ACTIVITY);
}
return hasActivity;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getPathName() {
return pathName;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setPathName(String newPathName) {
String oldPathName = pathName;
pathName = newPathName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DynamicviewPackage.RESOURCE__PATH_NAME, oldPathName, pathName));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EList<Permission> getHasPermission() {
if (hasPermission == null) {
hasPermission = new EObjectContainmentEList<Permission>(Permission.class, this, DynamicviewPackage.RESOURCE__HAS_PERMISSION);
}
return hasPermission;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case DynamicviewPackage.RESOURCE__HAS_RELATED_RESOURCE:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getHasRelatedResource()).basicAdd(otherEnd, msgs);
case DynamicviewPackage.RESOURCE__IS_RELATED_RESOURCE:
if (isRelatedResource != null)
msgs = ((InternalEObject)isRelatedResource).eInverseRemove(this, DynamicviewPackage.RESOURCE__HAS_RELATED_RESOURCE, Resource.class, msgs);
return basicSetIsRelatedResource((Resource)otherEnd, msgs);
case DynamicviewPackage.RESOURCE__AGGREGATE:
if (eInternalContainer() != null)
msgs = eBasicRemoveFromContainer(msgs);
return basicSetAggregate((Aggregate)otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case DynamicviewPackage.RESOURCE__HAS_RELATED_RESOURCE:
return ((InternalEList<?>)getHasRelatedResource()).basicRemove(otherEnd, msgs);
case DynamicviewPackage.RESOURCE__IS_RELATED_RESOURCE:
return basicSetIsRelatedResource(null, msgs);
case DynamicviewPackage.RESOURCE__AGGREGATE:
return basicSetAggregate(null, msgs);
case DynamicviewPackage.RESOURCE__HAS_ACTIVITY:
return ((InternalEList<?>)getHasActivity()).basicRemove(otherEnd, msgs);
case DynamicviewPackage.RESOURCE__HAS_PERMISSION:
return ((InternalEList<?>)getHasPermission()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eBasicRemoveFromContainerFeature(NotificationChain msgs) {
switch (eContainerFeatureID()) {
case DynamicviewPackage.RESOURCE__AGGREGATE:
return eInternalContainer().eInverseRemove(this, DynamicviewPackage.AGGREGATE__HAS_RESOURCE, Aggregate.class, msgs);
}
return super.eBasicRemoveFromContainerFeature(msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case DynamicviewPackage.RESOURCE__NAME:
return getName();
case DynamicviewPackage.RESOURCE__HAS_RELATED_RESOURCE:
return getHasRelatedResource();
case DynamicviewPackage.RESOURCE__IS_RELATED_RESOURCE:
if (resolve) return getIsRelatedResource();
return basicGetIsRelatedResource();
case DynamicviewPackage.RESOURCE__READ_ACTIVITY:
if (resolve) return getReadActivity();
return basicGetReadActivity();
case DynamicviewPackage.RESOURCE__CREATE_ACTIVITY:
if (resolve) return getCreateActivity();
return basicGetCreateActivity();
case DynamicviewPackage.RESOURCE__DELETE_ACTIVITY:
if (resolve) return getDeleteActivity();
return basicGetDeleteActivity();
case DynamicviewPackage.RESOURCE__OTHER_ACTIVITIES:
return getOtherActivities();
case DynamicviewPackage.RESOURCE__AGGREGATE:
return getAggregate();
case DynamicviewPackage.RESOURCE__HAS_ACTIVITY:
return getHasActivity();
case DynamicviewPackage.RESOURCE__PATH_NAME:
return getPathName();
case DynamicviewPackage.RESOURCE__HAS_PERMISSION:
return getHasPermission();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case DynamicviewPackage.RESOURCE__NAME:
setName((String)newValue);
return;
case DynamicviewPackage.RESOURCE__HAS_RELATED_RESOURCE:
getHasRelatedResource().clear();
getHasRelatedResource().addAll((Collection<? extends Resource>)newValue);
return;
case DynamicviewPackage.RESOURCE__IS_RELATED_RESOURCE:
setIsRelatedResource((Resource)newValue);
return;
case DynamicviewPackage.RESOURCE__READ_ACTIVITY:
setReadActivity((Read)newValue);
return;
case DynamicviewPackage.RESOURCE__CREATE_ACTIVITY:
setCreateActivity((Create)newValue);
return;
case DynamicviewPackage.RESOURCE__DELETE_ACTIVITY:
setDeleteActivity((Delete)newValue);
return;
case DynamicviewPackage.RESOURCE__OTHER_ACTIVITIES:
getOtherActivities().clear();
getOtherActivities().addAll((Collection<? extends Other>)newValue);
return;
case DynamicviewPackage.RESOURCE__AGGREGATE:
setAggregate((Aggregate)newValue);
return;
case DynamicviewPackage.RESOURCE__HAS_ACTIVITY:
getHasActivity().clear();
getHasActivity().addAll((Collection<? extends ResourceActivity>)newValue);
return;
case DynamicviewPackage.RESOURCE__PATH_NAME:
setPathName((String)newValue);
return;
case DynamicviewPackage.RESOURCE__HAS_PERMISSION:
getHasPermission().clear();
getHasPermission().addAll((Collection<? extends Permission>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case DynamicviewPackage.RESOURCE__NAME:
setName(NAME_EDEFAULT);
return;
case DynamicviewPackage.RESOURCE__HAS_RELATED_RESOURCE:
getHasRelatedResource().clear();
return;
case DynamicviewPackage.RESOURCE__IS_RELATED_RESOURCE:
setIsRelatedResource((Resource)null);
return;
case DynamicviewPackage.RESOURCE__READ_ACTIVITY:
setReadActivity((Read)null);
return;
case DynamicviewPackage.RESOURCE__CREATE_ACTIVITY:
setCreateActivity((Create)null);
return;
case DynamicviewPackage.RESOURCE__DELETE_ACTIVITY:
setDeleteActivity((Delete)null);
return;
case DynamicviewPackage.RESOURCE__OTHER_ACTIVITIES:
getOtherActivities().clear();
return;
case DynamicviewPackage.RESOURCE__AGGREGATE:
setAggregate((Aggregate)null);
return;
case DynamicviewPackage.RESOURCE__HAS_ACTIVITY:
getHasActivity().clear();
return;
case DynamicviewPackage.RESOURCE__PATH_NAME:
setPathName(PATH_NAME_EDEFAULT);
return;
case DynamicviewPackage.RESOURCE__HAS_PERMISSION:
getHasPermission().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case DynamicviewPackage.RESOURCE__NAME:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
case DynamicviewPackage.RESOURCE__HAS_RELATED_RESOURCE:
return hasRelatedResource != null && !hasRelatedResource.isEmpty();
case DynamicviewPackage.RESOURCE__IS_RELATED_RESOURCE:
return isRelatedResource != null;
case DynamicviewPackage.RESOURCE__READ_ACTIVITY:
return readActivity != null;
case DynamicviewPackage.RESOURCE__CREATE_ACTIVITY:
return createActivity != null;
case DynamicviewPackage.RESOURCE__DELETE_ACTIVITY:
return deleteActivity != null;
case DynamicviewPackage.RESOURCE__OTHER_ACTIVITIES:
return otherActivities != null && !otherActivities.isEmpty();
case DynamicviewPackage.RESOURCE__AGGREGATE:
return getAggregate() != null;
case DynamicviewPackage.RESOURCE__HAS_ACTIVITY:
return hasActivity != null && !hasActivity.isEmpty();
case DynamicviewPackage.RESOURCE__PATH_NAME:
return PATH_NAME_EDEFAULT == null ? pathName != null : !PATH_NAME_EDEFAULT.equals(pathName);
case DynamicviewPackage.RESOURCE__HAS_PERMISSION:
return hasPermission != null && !hasPermission.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuilder result = new StringBuilder(super.toString());
result.append(" (name: ");
result.append(name);
result.append(", pathName: ");
result.append(pathName);
result.append(')');
return result.toString();
}
} //ResourceImpl
|
ScalablyTyped/SlinkyTyped | q/qlik/src/main/scala/typingsSlinky/qlik/mod/QTable.scala | package typingsSlinky.qlik.mod
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait QTable extends StObject {
var colCount: Double = js.native
def exportData(options: ExportDataOptions, callback: js.Function1[/* url */ String, Unit]): Unit = js.native
def getColByName(fld: String): js.UndefOr[Double] = js.native
def getMoreData(): Unit = js.native
var headers: js.Array[QHeader] = js.native
var rowCount: Double = js.native
var rows: js.Array[QRow] = js.native
var totals: js.Array[QMeasureCell] = js.native
}
object QTable {
@scala.inline
def apply(
colCount: Double,
exportData: (ExportDataOptions, js.Function1[/* url */ String, Unit]) => Unit,
getColByName: String => js.UndefOr[Double],
getMoreData: () => Unit,
headers: js.Array[QHeader],
rowCount: Double,
rows: js.Array[QRow],
totals: js.Array[QMeasureCell]
): QTable = {
val __obj = js.Dynamic.literal(colCount = colCount.asInstanceOf[js.Any], exportData = js.Any.fromFunction2(exportData), getColByName = js.Any.fromFunction1(getColByName), getMoreData = js.Any.fromFunction0(getMoreData), headers = headers.asInstanceOf[js.Any], rowCount = rowCount.asInstanceOf[js.Any], rows = rows.asInstanceOf[js.Any], totals = totals.asInstanceOf[js.Any])
__obj.asInstanceOf[QTable]
}
@scala.inline
implicit class QTableMutableBuilder[Self <: QTable] (val x: Self) extends AnyVal {
@scala.inline
def setColCount(value: Double): Self = StObject.set(x, "colCount", value.asInstanceOf[js.Any])
@scala.inline
def setExportData(value: (ExportDataOptions, js.Function1[/* url */ String, Unit]) => Unit): Self = StObject.set(x, "exportData", js.Any.fromFunction2(value))
@scala.inline
def setGetColByName(value: String => js.UndefOr[Double]): Self = StObject.set(x, "getColByName", js.Any.fromFunction1(value))
@scala.inline
def setGetMoreData(value: () => Unit): Self = StObject.set(x, "getMoreData", js.Any.fromFunction0(value))
@scala.inline
def setHeaders(value: js.Array[QHeader]): Self = StObject.set(x, "headers", value.asInstanceOf[js.Any])
@scala.inline
def setHeadersVarargs(value: QHeader*): Self = StObject.set(x, "headers", js.Array(value :_*))
@scala.inline
def setRowCount(value: Double): Self = StObject.set(x, "rowCount", value.asInstanceOf[js.Any])
@scala.inline
def setRows(value: js.Array[QRow]): Self = StObject.set(x, "rows", value.asInstanceOf[js.Any])
@scala.inline
def setRowsVarargs(value: QRow*): Self = StObject.set(x, "rows", js.Array(value :_*))
@scala.inline
def setTotals(value: js.Array[QMeasureCell]): Self = StObject.set(x, "totals", value.asInstanceOf[js.Any])
@scala.inline
def setTotalsVarargs(value: QMeasureCell*): Self = StObject.set(x, "totals", js.Array(value :_*))
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.