repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
rockychen-dpaw/it-assets | status/plugins.py | <gh_stars>0
import datetime
import re
import urllib3
from msal import ConfidentialClientApplication
import boto3
import requests
import pytz
import socket
from django.conf import settings
from .utils import lookup
TZ = pytz.timezone(settings.TIME_ZONE)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def monitor_prtg(plugin, date):
PRTG_BASE = plugin.params.get(name="PRTG_BASE").value
PRTG_USERNAME = plugin.params.get(name="PRTG_USERNAME").value
PRTG_PASSHASH = plugin.params.get(name="PRTG_PASSHASH").value
PRTG_URL = plugin.params.get(name="PRTG_URL").value
PRTG_DEVICES = "{}/api/table.json?content=devices&output=json&columns=objid,host,probe,device,active,status,upsens&count=2000&username={}&passhash={}".format(
PRTG_BASE, PRTG_USERNAME, PRTG_PASSHASH
)
report = requests.get(PRTG_DEVICES, verify=False).json()
print("{} devices in PRTG report".format(len(report["devices"])))
from .models import Host, HostIP
for device in report["devices"]:
host_status = lookup(device["host"], date)
if host_status is None: # We found a device in PRTG not recorded in IT Assets.
if device["host"].strip():
host = device["host"].strip().lower()
if host in ["127.0.0.1"]:
continue
# List of domains for which to skip (these are websites, not hosts).
skip_domains = [
'dbca.wa.gov.au',
'dpaw.wa.gov.au',
'perthzoo.wa.gov.au',
'rottnestislandonline.com',
'rottnestisland.com',
'worldwidewattle.com',
]
skiphost = False
for s in skip_domains:
if host.find(s) >= 0:
skiphost = True
continue
if skiphost:
print("Skipping {}".format(host))
continue
try:
ip = socket.gethostbyname(host)
except Exception:
print("Exception thrown for socket.gethostbyname('{}'), skipping".format(host))
continue
qs_host = Host.objects.filter(name__istartswith=host)
qs_hostip = HostIP.objects.filter(ip=ip)
if qs_host.exists():
if qs_hostip.exists():
hostip = qs_hostip.first()
hostip.host = qs_host.first()
hostip.save()
print("Existing HostIP {} reassigned to Host {}".format(ip, host))
else:
hostip = HostIP.objects.create(ip=ip, host=qs_host.first())
print("New HostIP {} created and assigned to Host {}".format(ip, host))
else:
if qs_hostip.exists():
new_host = Host.objects.create(name=host)
hostip = qs_hostip.first()
hostip.host = new_host
hostip.save()
print("Host {} created and existing HostIP {} reassigned to it".format(host, ip))
else:
new_host = Host.objects.create(name=host)
hostip = HostIP.objects.create(ip=ip, host=new_host)
print("New Host {} and HostIP {} created and associated".format(host, ip))
continue
host_status.monitor_info = {
"id": device["objid"],
"device_name": device["device"],
"probe": device["probe"],
"active": device["active"],
"status": device["status"],
"sensors_up": device["upsens_raw"],
}
host_status.monitor_plugin = plugin
if device["active"] and device["upsens_raw"] > 0:
host_status.monitor_status = 3
host_status.monitor_output = "Device is monitored and sensor(s) are green."
elif device["active"] and device["upsens_raw"] <= 0:
host_status.monitor_status = 2
host_status.monitor_output = "Device is monitored, but no sensors are up."
elif not device["active"]:
host_status.monitor_status = 2
host_status.monitor_output = (
"Device has been added to monitoring, but is deactivated."
)
host_status.monitor_url = "{}/device.htm?id={}".format(
PRTG_URL, device["objid"]
)
host_status.save()
print("Updated PRTG status for {}".format(host_status))
# Update Host metadata from PRTG report.
for host in Host.objects.all():
devices = [d for d in report["devices"] if d["host"] == host.name]
if not devices: # Host is not output in the PRTG report, set to "No record".
host.monitor_status = 1
host.save()
print("Updated PRTG monitor status for {}".format(host))
else:
device = devices[0]
if host.description != device['device']:
host.description = device['device']
host.save()
print("Updated host description for {}".format(host))
def vulnerability_nessus(plugin, date):
NESSUS_BASE = plugin.params.get(name="NESSUS_BASE").value
NESSUS_ACCESS_KEY = plugin.params.get(name="NESSUS_ACCESS_KEY").value
NESSUS_SECRET_KEY = plugin.params.get(name="NESSUS_SECRET_KEY").value
NESSUS_SCAN_FOLDER = plugin.params.get(name="NESSUS_SCAN_FOLDER").value
NESSUS_URL = plugin.params.get(name="NESSUS_URL").value
NESSUS_HEADERS = {
"X-ApiKeys": "accessKey={}; secretKey={}".format(
NESSUS_ACCESS_KEY, NESSUS_SECRET_KEY
),
"Content-Type": "application/json",
"Accept": "text/plain",
}
NESSUS_SCAN_FOLDER = 3
NESSUS_SCANS = "{}/scans?folder_id={}".format(NESSUS_BASE, NESSUS_SCAN_FOLDER)
def nessus_report(report_id):
return "{}/scans/{}".format(NESSUS_BASE, report_id)
def nessus_vulns(scan_id, host_id, history_id):
return "{}/scans/{}/hosts/{}?history_id={}".format(NESSUS_BASE, scan_id, host_id, history_id)
def nessus_result_url(scan_id, host_id, history_id):
return "{}/#/scans/reports/{}/hosts/{}/vulnerabilities".format(NESSUS_URL, scan_id, host_id, history_id)
requests.packages.urllib3.disable_warnings()
reports = requests.get(NESSUS_SCANS, headers=NESSUS_HEADERS, verify=False).json()
for report in reports["scans"]:
data = requests.get(
nessus_report(report["id"]), headers=NESSUS_HEADERS, verify=False
).json()
if len(data["history"]) == 0:
continue
history_id = data["history"][-1]["history_id"]
if data["info"]["policy"].startswith("Web"):
continue
name = data["info"]["name"]
print('Report {} ({})'.format(name, report['id']))
for report_host in data["hosts"]:
print('{}: {} crit, {} high, {} med, {} low, {} info, severity {}, score {}'.format(
report_host['hostname'],
report_host['critical'],
report_host['high'],
report_host['medium'],
report_host['low'],
report_host['info'],
report_host['severity'],
report_host['score']
))
host_status = lookup(report_host["hostname"], date)
if host_status is None:
continue
os = None
detail = requests.get(
nessus_vulns(report["id"], report_host["host_id"], history_id),
headers=NESSUS_HEADERS,
verify=False,
).json()
if "operating-system" in detail["info"]:
os = detail["info"]["operating-system"]
host_status.vulnerability_info = {
"report_id": report["id"],
"history_id": history_id,
"host_id": report_host["host_id"],
"scan_name": data["info"]["name"],
"scan_start": datetime.datetime.fromtimestamp(
data["info"]["scan_start"], datetime.timezone.utc
).isoformat(),
"scan_end": datetime.datetime.fromtimestamp(
data["info"]["scan_end"], datetime.timezone.utc
).isoformat(),
"severity": report_host["severity"],
"score": report_host["score"],
"num_critical": report_host["critical"],
"num_high": report_host["high"],
"num_medium": report_host["medium"],
"num_low": report_host["low"],
"num_info": report_host["info"],
"os": os,
}
host_status.vulnerability_plugin = plugin
host_status.vulnerability_output = (
"Device has been scanned, vulnerabilities were found"
)
host_status.vulnerability_status = 2
if (int(report_host["critical"]) == 0) and (int(report_host["high"]) == 0):
name_check = [x["plugin_name"] for x in detail["vulnerabilities"]]
if "Authentication Failure - Local Checks Not Run" in name_check:
host_status.vulnerability_output = "Device is being scanned, but does not have correct credentials."
else:
host_status.vulnerability_output = "Device has been scanned, no critical or high vulnerabilities were found."
host_status.vulnerability_status = 3
host_status.vulnerability_url = nessus_result_url(
report["id"], report_host["host_id"], history_id
)
host_status.save()
print("Updated Nessus status for {}".format(host_status))
def backup_acronis(plugin, date):
ACRONIS_BASE = plugin.params.get(name="ACRONIS_BASE").value
ACRONIS_USERNAME = plugin.params.get(name="ACRONIS_USERNAME").value
ACRONIS_PASSWORD = plugin.params.get(name="<PASSWORD>PASSWORD").value
ACRONIS_URL = plugin.params.get(name="ACRONIS_URL").value
ACRONIS_AUTH = "{}/idp/authorize/local".format(ACRONIS_BASE)
ACRONIS_RESOURCES = "{}/api/resource_manager/v1/resources?filter=all&limit=2000&embed=details&embed=agent".format(
ACRONIS_BASE
)
backup_limit = (
datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=1)
).isoformat()
sess = requests.session()
base = sess.get(ACRONIS_BASE)
req_id = re.search(
"/idp/authorize/sspi\?req=([a-z0-9]+)", base.content.decode("utf8")
).group(1)
sess.post(
ACRONIS_AUTH + "?req_id={}".format(req_id),
{"req": req_id, "login": ACRONIS_USERNAME, "password": <PASSWORD>},
)
resources = sess.get(ACRONIS_RESOURCES).json()
for agent in resources["items"]:
host_status = None
if "details" not in agent or "parameters" not in agent["details"]:
continue
if "IP" not in agent["details"]["parameters"]:
continue
for ip in agent["details"]["parameters"]["IP"]:
host_status = lookup(ip, date)
if host_status:
break
if "status" not in agent:
continue
if not host_status or agent["status"].get("lastBackup") is None:
continue
os_name = None
if "OperatingSystem" in agent["details"]["parameters"]:
os_name = agent["details"]["parameters"]["OperatingSystem"][0]
next_backup = None
if (
"nextBackup" in agent["status"]
and agent["status"]["nextBackup"] is not None
):
next_backup = datetime.datetime.fromisoformat(
agent["status"]["nextBackup"].split("Z", 1)[0]
)
last_backup = None
if (
"lastBackup" in agent["status"]
and agent["status"]["lastBackup"] is not None
):
last_backup = datetime.datetime.fromisoformat(
agent["status"]["lastBackup"].split("Z", 1)[0]
)
state = agent["status"].get("state")
if (
"last_backup" in host_status.backup_info
and last_backup
< datetime.datetime.fromisoformat(host_status.backup_info["last_backup"])
):
continue
host_status.backup_info = {
"id": agent.get("id"),
"next_backup": next_backup.isoformat() if next_backup else None,
"last_backup": last_backup.isoformat() if last_backup else None,
"os": os_name,
"status": state,
}
host_status.backup_url = "{}/#m=Resources&key=All devices".format(ACRONIS_URL,)
host_status.backup_plugin = plugin
if state == "notProtected" and last_backup is not None:
host_status.backup_output = (
"Device is present, automatic backups are disabled"
)
host_status.backup_status = 2
elif not (
host_status.backup_info["last_backup"] is not None
and host_status.backup_info["last_backup"] > backup_limit
):
host_status.backup_output = (
"Device is present, last backup older than 24 hours."
)
host_status.backup_status = 2
else:
host_status.backup_output = "Device is present, last backup was successful."
host_status.backup_status = 3
host_status.save()
print("Updated Acronis backup status for {}".format(host_status))
def backup_aws(plugin, date):
AWS_ACCESS_KEY_ID = plugin.params.get(name="AWS_ACCESS_KEY_ID").value
AWS_SECRET_ACCESS_KEY = plugin.params.get(name="AWS_SECRET_ACCESS_KEY").value
AWS_REGION = plugin.params.get(name="AWS_REGION").value
AWS_URL = "https://{0}.console.aws.amazon.com/ec2/v2/home?region={0}#Instances:search=".format(
AWS_REGION
)
client = boto3.client(
"ec2",
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
)
snapshot_limit = (
datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=1)
).isoformat()
instance_map = {}
volume_map = {}
# scrape information from EC2 instances list
instances = client.describe_instances()
for resv in instances["Reservations"]:
for inst in resv["Instances"]:
key = inst["InstanceId"]
instance_map[key] = {
"id": key,
"ips": [x["PrivateIpAddress"] for x in inst["NetworkInterfaces"]],
"volumes": [],
"snapshots": {},
}
# find name
for tag in inst["Tags"]:
if tag["Key"].lower() == "name":
instance_map[key]["name"] = tag["Value"]
break
# find all volumes
for volume in inst["BlockDeviceMappings"]:
if "Ebs" in volume:
instance_map[key]["volumes"].append(volume["Ebs"]["VolumeId"])
volume_map[volume["Ebs"]["VolumeId"]] = key
# scrape information from snapshots list
snapshots = client.describe_snapshots()
for snap in snapshots["Snapshots"]:
if snap["State"] != "completed":
continue
volume = snap["VolumeId"]
if volume not in volume_map:
continue
key = volume_map[volume]
if volume not in instance_map[key]["snapshots"]:
instance_map[key]["snapshots"][volume] = []
instance_map[key]["snapshots"][volume].append(snap["StartTime"])
for instance in instance_map.values():
for ip in instance["ips"]:
host_status = lookup(ip, date)
if host_status:
break
if not host_status:
continue
host_status.backup_plugin = plugin
host_status.backup_url = AWS_URL + instance["id"]
host_status.backup_info = {
"id": instance["id"],
"name": instance["name"],
"volumes": [],
}
for v in instance["volumes"]:
snaps = sorted(instance["snapshots"].get(v, []), reverse=True)
last_backup = snaps[0].isoformat() if snaps else None
host_status.backup_info["volumes"].append(
{"id": v, "snap_count": len(snaps), "last_backup": last_backup, }
)
if not all([v["snap_count"] for v in host_status.backup_info["volumes"]]):
host_status.backup_output = "A volume has not been snapshotted."
host_status.backup_status = 2
elif not all(
[
(v["last_backup"] is not None and v["last_backup"] > snapshot_limit)
for v in host_status.backup_info["volumes"]
]
):
host_status.backup_output = (
"A volume does not have a snapshot from the last 24 hours."
)
host_status.backup_status = 2
else:
host_status.backup_output = "Daily snapshotting was successful."
host_status.backup_status = 3
host_status.save()
print("Updated AWS backup status for {}".format(host_status))
def _ms_api(verb, url, previous=None, **kwargs):
req = requests.request(verb, url, **kwargs)
data = req.json()
result = []
if previous is not None:
result = previous
if "value" not in data:
return result
result.extend(data["value"])
if "@nextLink" in data:
return _ms_api(verb, data["@nextLink"], previous=result, **kwargs)
return result
def backup_azure(plugin, date):
AZURE_TENANT = plugin.params.get(name="AZURE_TENANT").value
AZURE_APP_ID = plugin.params.get(name="AZURE_APP_ID").value
AZURE_APP_SECRET = plugin.params.get(name="AZURE_APP_SECRET").value
AZURE_SUBSCRIPTION_ID = plugin.params.get(name="AZURE_SUBSCRIPTION_ID").value
AZURE_VAULT_NAME = plugin.params.get(name="AZURE_VAULT_NAME").value
AZURE_URL = "https://portal.azure.com/#resource{}/backupSetting"
MANAGEMENT_BASE = "https://management.azure.com"
MANAGEMENT_BASE_SCOPE = "{}/.default".format(MANAGEMENT_BASE)
MANAGEMENT_SUB = "{}/subscriptions/{}".format(MANAGEMENT_BASE, AZURE_SUBSCRIPTION_ID)
ctx = ConfidentialClientApplication(
client_id=AZURE_APP_ID,
client_credential=AZURE_APP_SECRET,
authority=AZURE_TENANT,
)
token = ctx.acquire_token_for_client(MANAGEMENT_BASE_SCOPE)
headers = {"Authorization": "Bearer {}".format(token["access_token"])}
# Get the ID of the specified vault.
MANAGEMENT_LIST_VAULTS = "{}/providers/Microsoft.RecoveryServices/vaults?api-version=2016-06-01".format(MANAGEMENT_SUB)
vaults = _ms_api("GET", MANAGEMENT_LIST_VAULTS, headers=headers)
vault = None
for v in vaults:
if v["name"] == AZURE_VAULT_NAME:
vault = v["id"]
break
if vault is None:
return
# Get backup protection container list.
MANAGEMENT_LIST_CONTAINERS = "{}{}/backupProtectionContainers?api-version=2016-12-01&$filter=backupManagementType%20eq%20%27AzureIaasVM%27%20and%20status%20eq%20%27Registered%27".format(
MANAGEMENT_BASE, vault
)
containers = _ms_api("GET", MANAGEMENT_LIST_CONTAINERS, headers=headers)
vm_mapping = {}
for container in containers:
vm_id = container["properties"]["virtualMachineId"]
if vm_id not in vm_mapping:
vm_mapping[vm_id] = {}
vm_mapping[vm_id]["id"] = vm_id
vm_mapping[vm_id]["container_name"] = container["name"]
vm_mapping[vm_id]["container_id"] = container["id"]
vm_mapping[vm_id]["container_health"] = container["properties"]["healthStatus"]
vm_mapping[vm_id]["ips"] = []
# Get private IP addresses of each VM
MANAGEMENT_LIST_NICS = "{}/providers/Microsoft.Network/networkInterfaces?api-version=2018-10-01".format(
MANAGEMENT_SUB
)
nics = _ms_api("GET", MANAGEMENT_LIST_NICS, headers=headers)
for nic in nics:
if "virtualMachine" not in nic["properties"]:
continue
vm_id = nic["properties"]["virtualMachine"]["id"]
if vm_id in vm_mapping:
vm_mapping[vm_id]["ips"] = [
x["properties"]["privateIPAddress"]
for x in nic["properties"]["ipConfigurations"]
]
for vm in vm_mapping.values():
host_status = None
for ip in vm["ips"]:
host_status = lookup(ip, date)
if host_status:
break
if not host_status:
continue
host_status.backup_plugin = plugin
host_status.backup_url = AZURE_URL.format(vm["id"])
host_status.backup_info = {
"id": vm["id"],
"container_name": vm["container_name"],
"container_id": vm["container_id"],
"container_health": vm["container_health"],
}
if vm["container_health"] == "Healthy":
host_status.backup_output = "VM is enrolled for backups and is healthy."
host_status.backup_status = 3
else:
host_status.backup_output = (
"VM is enrolled for backups, but is not healthy."
)
host_status.backup_status = 2
host_status.save()
print("Updated Azure backup status for {}".format(host_status))
def backup_storagesync(plugin, date):
AZURE_TENANT = plugin.params.get(name="AZURE_TENANT").value
AZURE_APP_ID = plugin.params.get(name="AZURE_APP_ID").value
AZURE_APP_SECRET = plugin.params.get(name="AZURE_APP_SECRET").value
AZURE_SUBSCRIPTION_ID = plugin.params.get(name="AZURE_SUBSCRIPTION_ID").value
AZURE_RESOURCE_GROUP = plugin.params.get(name="AZURE_RESOURCE_GROUP").value
AZURE_STORAGE_SYNC_NAME = plugin.params.get(name="AZURE_STORAGE_SYNC_NAME").value
AZURE_URL = "https://portal.azure.com/#resource/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorageSync/storageSyncServices/{}/syncGroups".format(
AZURE_SUBSCRIPTION_ID, AZURE_RESOURCE_GROUP, AZURE_STORAGE_SYNC_NAME
)
MANAGEMENT_BASE = "https://management.azure.com"
MANAGEMENT_BASE_SCOPE = "{}/.default".format(MANAGEMENT_BASE)
backup_limit = (
datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=1)
).isoformat()
ctx = ConfidentialClientApplication(
client_id=AZURE_APP_ID,
client_credential=AZURE_APP_SECRET,
authority=AZURE_TENANT,
)
token = ctx.acquire_token_for_client(MANAGEMENT_BASE_SCOPE)
headers = {"Authorization": "Bearer {}".format(token["access_token"])}
# Get list of sync groups
MANAGEMENT_SYNC_BASE = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorageSync/storageSyncServices/{}".format(
MANAGEMENT_BASE,
AZURE_SUBSCRIPTION_ID,
AZURE_RESOURCE_GROUP,
AZURE_STORAGE_SYNC_NAME,
)
MANAGEMENT_SYNC_GROUPS = "{}/syncGroups?api-version=2019-03-01".format(
MANAGEMENT_SYNC_BASE
)
sync_groups = {}
group_list = _ms_api("GET", MANAGEMENT_SYNC_GROUPS, headers=headers)
for group in group_list:
sync_groups[group["name"]] = _ms_api(
"GET",
"{}/syncGroups/{}/serverEndpoints?api-version=2019-03-01".format(
MANAGEMENT_SYNC_BASE, group["name"]
),
headers=headers,
)
server_map = {}
for name, servers in sync_groups.items():
for server in servers:
key = server["properties"]["friendlyName"]
if key not in server_map:
server_map[key] = []
server_map[key].append(
{
"name": name,
"path": server["properties"]["serverLocalPath"],
"upload_health": server["properties"]["syncStatus"]["uploadHealth"],
"download_health": server["properties"]["syncStatus"][
"downloadHealth"
],
"files_not_syncing": server["properties"]["syncStatus"][
"totalPersistentFilesNotSyncingCount"
],
"last_upload": server["properties"]["syncStatus"]["uploadStatus"][
"lastSyncTimestamp"
],
"last_upload_success": server["properties"]["syncStatus"][
"uploadStatus"
]["lastSyncSuccessTimestamp"],
"last_download": server["properties"]["syncStatus"][
"downloadStatus"
]["lastSyncTimestamp"],
"last_download_success": server["properties"]["syncStatus"][
"downloadStatus"
]["lastSyncSuccessTimestamp"],
"id": server["id"],
}
)
for name, servers in server_map.items():
host_status = lookup(name, date)
if not host_status or host_status.backup_plugin:
continue
host_status.backup_plugin = plugin
host_status.backup_url = AZURE_URL
host_status.backup_info = servers
for server in servers:
if server["last_upload_success"] < backup_limit:
host_status.backup_output = (
"File shares are being backed up, last backup older than 24 hours."
)
host_status.backup_status = 2
host_status.save()
return
if server["upload_health"] != "Healthy":
host_status.backup_output = (
"File shares are being backed up, uploads not marked as healthy."
)
host_status.backup_status = 2
host_status.save()
return
host_status.backup_output = (
"File shares are being backed up, last backup was successful."
)
host_status.backup_status = 3
host_status.save()
def patching_oms(plugin, date):
AZURE_TENANT = plugin.params.get(name="AZURE_TENANT").value
AZURE_APP_ID = plugin.params.get(name="AZURE_APP_ID").value
AZURE_APP_SECRET = plugin.params.get(name="AZURE_APP_SECRET").value
AZURE_LOG_WORKSPACE = plugin.params.get(name="AZURE_LOG_WORKSPACE").value
LOG_ANALYTICS_BASE = "https://api.loganalytics.io"
LOG_ANALYTICS_BASE_SCOPE = "{}/.default".format(LOG_ANALYTICS_BASE)
LOG_ANALYTICS_QUERY = "{}/v1/workspaces/{}/query".format(LOG_ANALYTICS_BASE, AZURE_LOG_WORKSPACE)
ctx = ConfidentialClientApplication(
client_id=AZURE_APP_ID,
client_credential=<PASSWORD>,
authority=AZURE_TENANT,
)
token = ctx.acquire_token_for_client(LOG_ANALYTICS_BASE_SCOPE)
headers = {"Authorization": "Bearer {}".format(token["access_token"])}
patching = requests.get(
LOG_ANALYTICS_QUERY,
params={
"query": "(ConfigurationData | project Computer, TimeGenerated, VMUUID | distinct Computer) | join kind=inner ( Heartbeat | project Computer, OSType, OSName, OSMajorVersion, OSMinorVersion, ComputerEnvironment, TimeGenerated, TenantId, ComputerIP | summarize arg_max (TimeGenerated, *) by Computer ) on Computer"
},
headers=headers,
)
results = patching.json()
for computer in results["tables"][0]["rows"]:
host_status = lookup(computer[0], date)
if host_status is None:
continue
host_status.patching_info = {
"id": computer[8],
"os_type": computer[3],
"os_name": computer[4],
"os_major_version": computer[5],
"os_minor_version": computer[6],
}
host_status.patching_plugin = plugin
host_status.patching_output = "Server has been enrolled in OMS."
host_status.patching_status = 3
host_status.save()
print("Updated patching status for {}".format(host_status))
|
chuiizeet/Pixel-Art-Headers | Headers/FBSDKURLSessionTask.h | //
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>.
//
#import <objc/NSObject.h>
@class NSURLSessionTask;
@interface FBSDKURLSessionTask : NSObject
{
NSURLSessionTask *_task;
CDUnknownBlockType _handler;
unsigned long long _requestStartTime;
unsigned long long _loggerSerialNumber;
}
@property(readonly, nonatomic) unsigned long long loggerSerialNumber; // @synthesize loggerSerialNumber=_loggerSerialNumber;
@property(nonatomic) unsigned long long requestStartTime; // @synthesize requestStartTime=_requestStartTime;
@property(copy, nonatomic) CDUnknownBlockType handler; // @synthesize handler=_handler;
@property(retain, nonatomic) NSURLSessionTask *task; // @synthesize task=_task;
- (void).cxx_destruct;
- (void)cancel;
- (void)start;
- (void)taskDidCompleteWithError:(id)arg1;
- (void)taskDidCompleteWithResponse:(id)arg1 data:(id)arg2;
- (void)logMessage:(id)arg1;
- (void)invokeHandler:(CDUnknownBlockType)arg1 error:(id)arg2 response:(id)arg3 responseData:(id)arg4;
- (void)logAndInvokeHandler:(CDUnknownBlockType)arg1 response:(id)arg2 responseData:(id)arg3;
- (void)logAndInvokeHandler:(CDUnknownBlockType)arg1 error:(id)arg2;
- (id)initWithRequest:(id)arg1 fromSession:(id)arg2 completionHandler:(CDUnknownBlockType)arg3;
@end
|
scrutinizer-ci-testing/netguru-people | db/migrate/20150408092703_change_membership_start_and_end_date_columns_to_date.rb | <filename>db/migrate/20150408092703_change_membership_start_and_end_date_columns_to_date.rb
class ChangeMembershipStartAndEndDateColumnsToDate < ActiveRecord::Migration
def change
change_column :memberships, :starts_at, :date
change_column :memberships, :ends_at, :date
end
end
|
davideschiavone/pulpissimo | boot_code/include/archi/udma/i2s/v3/udma_i2s_v3_gvsoc.h | <reponame>davideschiavone/pulpissimo<filename>boot_code/include/archi/udma/i2s/v3/udma_i2s_v3_gvsoc.h
/* THIS FILE HAS BEEN GENERATED, DO NOT MODIFY IT.
*/
/*
* Copyright (C) 2018 ETH Zurich, University of Bologna
* and GreenWaves Technologies
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __INCLUDE_ARCHI_UDMA_I2S_V3_UDMA_I2S_V3_GVSOC_H__
#define __INCLUDE_ARCHI_UDMA_I2S_V3_UDMA_I2S_V3_GVSOC_H__
#if !defined(LANGUAGE_ASSEMBLY) && !defined(__ASSEMBLER__)
#include <stdint.h>
#include "archi/utils.h"
#endif
//
// REGISTERS STRUCTS
//
#ifdef __GVSOC__
class vp_udma_i2s_i2s_rx_saddr : public vp::reg_32
{
public:
inline void rx_saddr_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_RX_SADDR_RX_SADDR_BIT, UDMA_I2S_I2S_RX_SADDR_RX_SADDR_WIDTH); }
inline uint32_t rx_saddr_get() { return this->get_field(UDMA_I2S_I2S_RX_SADDR_RX_SADDR_BIT, UDMA_I2S_I2S_RX_SADDR_RX_SADDR_WIDTH); }
};
class vp_udma_i2s_i2s_rx_size : public vp::reg_32
{
public:
inline void rx_size_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_RX_SIZE_RX_SIZE_BIT, UDMA_I2S_I2S_RX_SIZE_RX_SIZE_WIDTH); }
inline uint32_t rx_size_get() { return this->get_field(UDMA_I2S_I2S_RX_SIZE_RX_SIZE_BIT, UDMA_I2S_I2S_RX_SIZE_RX_SIZE_WIDTH); }
};
class vp_udma_i2s_i2s_rx_cfg : public vp::reg_32
{
public:
inline void continous_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_RX_CFG_CONTINOUS_BIT, UDMA_I2S_I2S_RX_CFG_CONTINOUS_WIDTH); }
inline uint32_t continous_get() { return this->get_field(UDMA_I2S_I2S_RX_CFG_CONTINOUS_BIT, UDMA_I2S_I2S_RX_CFG_CONTINOUS_WIDTH); }
inline void datasize_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_RX_CFG_DATASIZE_BIT, UDMA_I2S_I2S_RX_CFG_DATASIZE_WIDTH); }
inline uint32_t datasize_get() { return this->get_field(UDMA_I2S_I2S_RX_CFG_DATASIZE_BIT, UDMA_I2S_I2S_RX_CFG_DATASIZE_WIDTH); }
inline void en_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_RX_CFG_EN_BIT, UDMA_I2S_I2S_RX_CFG_EN_WIDTH); }
inline uint32_t en_get() { return this->get_field(UDMA_I2S_I2S_RX_CFG_EN_BIT, UDMA_I2S_I2S_RX_CFG_EN_WIDTH); }
inline void clr_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_RX_CFG_CLR_BIT, UDMA_I2S_I2S_RX_CFG_CLR_WIDTH); }
inline uint32_t clr_get() { return this->get_field(UDMA_I2S_I2S_RX_CFG_CLR_BIT, UDMA_I2S_I2S_RX_CFG_CLR_WIDTH); }
inline void pending_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_RX_CFG_PENDING_BIT, UDMA_I2S_I2S_RX_CFG_PENDING_WIDTH); }
inline uint32_t pending_get() { return this->get_field(UDMA_I2S_I2S_RX_CFG_PENDING_BIT, UDMA_I2S_I2S_RX_CFG_PENDING_WIDTH); }
};
class vp_udma_i2s_i2s_tx_saddr : public vp::reg_32
{
public:
inline void tx_saddr_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_TX_SADDR_TX_SADDR_BIT, UDMA_I2S_I2S_TX_SADDR_TX_SADDR_WIDTH); }
inline uint32_t tx_saddr_get() { return this->get_field(UDMA_I2S_I2S_TX_SADDR_TX_SADDR_BIT, UDMA_I2S_I2S_TX_SADDR_TX_SADDR_WIDTH); }
};
class vp_udma_i2s_i2s_tx_size : public vp::reg_32
{
public:
inline void tx_size_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_TX_SIZE_TX_SIZE_BIT, UDMA_I2S_I2S_TX_SIZE_TX_SIZE_WIDTH); }
inline uint32_t tx_size_get() { return this->get_field(UDMA_I2S_I2S_TX_SIZE_TX_SIZE_BIT, UDMA_I2S_I2S_TX_SIZE_TX_SIZE_WIDTH); }
};
class vp_udma_i2s_i2s_tx_cfg : public vp::reg_32
{
public:
inline void continous_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_TX_CFG_CONTINOUS_BIT, UDMA_I2S_I2S_TX_CFG_CONTINOUS_WIDTH); }
inline uint32_t continous_get() { return this->get_field(UDMA_I2S_I2S_TX_CFG_CONTINOUS_BIT, UDMA_I2S_I2S_TX_CFG_CONTINOUS_WIDTH); }
inline void datasize_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_TX_CFG_DATASIZE_BIT, UDMA_I2S_I2S_TX_CFG_DATASIZE_WIDTH); }
inline uint32_t datasize_get() { return this->get_field(UDMA_I2S_I2S_TX_CFG_DATASIZE_BIT, UDMA_I2S_I2S_TX_CFG_DATASIZE_WIDTH); }
inline void en_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_TX_CFG_EN_BIT, UDMA_I2S_I2S_TX_CFG_EN_WIDTH); }
inline uint32_t en_get() { return this->get_field(UDMA_I2S_I2S_TX_CFG_EN_BIT, UDMA_I2S_I2S_TX_CFG_EN_WIDTH); }
inline void clr_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_TX_CFG_CLR_BIT, UDMA_I2S_I2S_TX_CFG_CLR_WIDTH); }
inline uint32_t clr_get() { return this->get_field(UDMA_I2S_I2S_TX_CFG_CLR_BIT, UDMA_I2S_I2S_TX_CFG_CLR_WIDTH); }
inline void pending_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_TX_CFG_PENDING_BIT, UDMA_I2S_I2S_TX_CFG_PENDING_WIDTH); }
inline uint32_t pending_get() { return this->get_field(UDMA_I2S_I2S_TX_CFG_PENDING_BIT, UDMA_I2S_I2S_TX_CFG_PENDING_WIDTH); }
};
class vp_udma_i2s_i2s_clkcfg_setup : public vp::reg_32
{
public:
inline void master_clk_div_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_CLK_DIV_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_CLK_DIV_WIDTH); }
inline uint32_t master_clk_div_get() { return this->get_field(UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_CLK_DIV_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_CLK_DIV_WIDTH); }
inline void slave_clk_div_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_CLK_DIV_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_CLK_DIV_WIDTH); }
inline uint32_t slave_clk_div_get() { return this->get_field(UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_CLK_DIV_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_CLK_DIV_WIDTH); }
inline void common_clk_div_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_CLKCFG_SETUP_COMMON_CLK_DIV_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_COMMON_CLK_DIV_WIDTH); }
inline uint32_t common_clk_div_get() { return this->get_field(UDMA_I2S_I2S_CLKCFG_SETUP_COMMON_CLK_DIV_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_COMMON_CLK_DIV_WIDTH); }
inline void slave_clk_en_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_CLK_EN_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_CLK_EN_WIDTH); }
inline uint32_t slave_clk_en_get() { return this->get_field(UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_CLK_EN_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_CLK_EN_WIDTH); }
inline void master_clk_en_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_CLK_EN_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_CLK_EN_WIDTH); }
inline uint32_t master_clk_en_get() { return this->get_field(UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_CLK_EN_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_CLK_EN_WIDTH); }
inline void pdm_clk_en_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_CLKCFG_SETUP_PDM_CLK_EN_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_PDM_CLK_EN_WIDTH); }
inline uint32_t pdm_clk_en_get() { return this->get_field(UDMA_I2S_I2S_CLKCFG_SETUP_PDM_CLK_EN_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_PDM_CLK_EN_WIDTH); }
inline void slave_ext_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_EXT_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_EXT_WIDTH); }
inline uint32_t slave_ext_get() { return this->get_field(UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_EXT_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_EXT_WIDTH); }
inline void slave_num_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_NUM_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_NUM_WIDTH); }
inline uint32_t slave_num_get() { return this->get_field(UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_NUM_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_SLAVE_NUM_WIDTH); }
inline void master_ext_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_EXT_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_EXT_WIDTH); }
inline uint32_t master_ext_get() { return this->get_field(UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_EXT_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_EXT_WIDTH); }
inline void master_num_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_NUM_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_NUM_WIDTH); }
inline uint32_t master_num_get() { return this->get_field(UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_NUM_BIT, UDMA_I2S_I2S_CLKCFG_SETUP_MASTER_NUM_WIDTH); }
};
class vp_udma_i2s_i2s_slv_setup : public vp::reg_32
{
public:
inline void slave_words_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_SLV_SETUP_SLAVE_WORDS_BIT, UDMA_I2S_I2S_SLV_SETUP_SLAVE_WORDS_WIDTH); }
inline uint32_t slave_words_get() { return this->get_field(UDMA_I2S_I2S_SLV_SETUP_SLAVE_WORDS_BIT, UDMA_I2S_I2S_SLV_SETUP_SLAVE_WORDS_WIDTH); }
inline void slave_bits_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_SLV_SETUP_SLAVE_BITS_BIT, UDMA_I2S_I2S_SLV_SETUP_SLAVE_BITS_WIDTH); }
inline uint32_t slave_bits_get() { return this->get_field(UDMA_I2S_I2S_SLV_SETUP_SLAVE_BITS_BIT, UDMA_I2S_I2S_SLV_SETUP_SLAVE_BITS_WIDTH); }
inline void slave_lsb_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_SLV_SETUP_SLAVE_LSB_BIT, UDMA_I2S_I2S_SLV_SETUP_SLAVE_LSB_WIDTH); }
inline uint32_t slave_lsb_get() { return this->get_field(UDMA_I2S_I2S_SLV_SETUP_SLAVE_LSB_BIT, UDMA_I2S_I2S_SLV_SETUP_SLAVE_LSB_WIDTH); }
inline void slave_2ch_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_SLV_SETUP_SLAVE_2CH_BIT, UDMA_I2S_I2S_SLV_SETUP_SLAVE_2CH_WIDTH); }
inline uint32_t slave_2ch_get() { return this->get_field(UDMA_I2S_I2S_SLV_SETUP_SLAVE_2CH_BIT, UDMA_I2S_I2S_SLV_SETUP_SLAVE_2CH_WIDTH); }
inline void slave_en_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_SLV_SETUP_SLAVE_EN_BIT, UDMA_I2S_I2S_SLV_SETUP_SLAVE_EN_WIDTH); }
inline uint32_t slave_en_get() { return this->get_field(UDMA_I2S_I2S_SLV_SETUP_SLAVE_EN_BIT, UDMA_I2S_I2S_SLV_SETUP_SLAVE_EN_WIDTH); }
};
class vp_udma_i2s_i2s_mst_setup : public vp::reg_32
{
public:
inline void master_words_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_MST_SETUP_MASTER_WORDS_BIT, UDMA_I2S_I2S_MST_SETUP_MASTER_WORDS_WIDTH); }
inline uint32_t master_words_get() { return this->get_field(UDMA_I2S_I2S_MST_SETUP_MASTER_WORDS_BIT, UDMA_I2S_I2S_MST_SETUP_MASTER_WORDS_WIDTH); }
inline void master_bits_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_MST_SETUP_MASTER_BITS_BIT, UDMA_I2S_I2S_MST_SETUP_MASTER_BITS_WIDTH); }
inline uint32_t master_bits_get() { return this->get_field(UDMA_I2S_I2S_MST_SETUP_MASTER_BITS_BIT, UDMA_I2S_I2S_MST_SETUP_MASTER_BITS_WIDTH); }
inline void master_lsb_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_MST_SETUP_MASTER_LSB_BIT, UDMA_I2S_I2S_MST_SETUP_MASTER_LSB_WIDTH); }
inline uint32_t master_lsb_get() { return this->get_field(UDMA_I2S_I2S_MST_SETUP_MASTER_LSB_BIT, UDMA_I2S_I2S_MST_SETUP_MASTER_LSB_WIDTH); }
inline void master_2ch_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_MST_SETUP_MASTER_2CH_BIT, UDMA_I2S_I2S_MST_SETUP_MASTER_2CH_WIDTH); }
inline uint32_t master_2ch_get() { return this->get_field(UDMA_I2S_I2S_MST_SETUP_MASTER_2CH_BIT, UDMA_I2S_I2S_MST_SETUP_MASTER_2CH_WIDTH); }
inline void master_en_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_MST_SETUP_MASTER_EN_BIT, UDMA_I2S_I2S_MST_SETUP_MASTER_EN_WIDTH); }
inline uint32_t master_en_get() { return this->get_field(UDMA_I2S_I2S_MST_SETUP_MASTER_EN_BIT, UDMA_I2S_I2S_MST_SETUP_MASTER_EN_WIDTH); }
};
class vp_udma_i2s_i2s_pdm_setup : public vp::reg_32
{
public:
inline void pdm_shift_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_PDM_SETUP_PDM_SHIFT_BIT, UDMA_I2S_I2S_PDM_SETUP_PDM_SHIFT_WIDTH); }
inline uint32_t pdm_shift_get() { return this->get_field(UDMA_I2S_I2S_PDM_SETUP_PDM_SHIFT_BIT, UDMA_I2S_I2S_PDM_SETUP_PDM_SHIFT_WIDTH); }
inline void pdm_decimation_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_PDM_SETUP_PDM_DECIMATION_BIT, UDMA_I2S_I2S_PDM_SETUP_PDM_DECIMATION_WIDTH); }
inline uint32_t pdm_decimation_get() { return this->get_field(UDMA_I2S_I2S_PDM_SETUP_PDM_DECIMATION_BIT, UDMA_I2S_I2S_PDM_SETUP_PDM_DECIMATION_WIDTH); }
inline void pdm_mode_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_PDM_SETUP_PDM_MODE_BIT, UDMA_I2S_I2S_PDM_SETUP_PDM_MODE_WIDTH); }
inline uint32_t pdm_mode_get() { return this->get_field(UDMA_I2S_I2S_PDM_SETUP_PDM_MODE_BIT, UDMA_I2S_I2S_PDM_SETUP_PDM_MODE_WIDTH); }
inline void pdm_en_set(uint32_t value) { this->set_field(value, UDMA_I2S_I2S_PDM_SETUP_PDM_EN_BIT, UDMA_I2S_I2S_PDM_SETUP_PDM_EN_WIDTH); }
inline uint32_t pdm_en_get() { return this->get_field(UDMA_I2S_I2S_PDM_SETUP_PDM_EN_BIT, UDMA_I2S_I2S_PDM_SETUP_PDM_EN_WIDTH); }
};
#endif
#endif
|
Antique/libxenserver | src/xen_dr_task.c | /*
* Copyright (c) <NAME>, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1) Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <stddef.h>
#include <stdlib.h>
#include "xen_internal.h"
#include <xen/api/xen_common.h>
#include <xen/api/xen_dr_task.h>
#include <xen/api/xen_dr_task_xen_dr_task_record_map.h>
#include <xen/api/xen_sr.h>
#include <xen/api/xen_string_string_map.h>
XEN_FREE(xen_dr_task)
XEN_SET_ALLOC_FREE(xen_dr_task)
XEN_ALLOC(xen_dr_task_record)
XEN_SET_ALLOC_FREE(xen_dr_task_record)
XEN_ALLOC(xen_dr_task_record_opt)
XEN_RECORD_OPT_FREE(xen_dr_task)
XEN_SET_ALLOC_FREE(xen_dr_task_record_opt)
static const struct_member xen_dr_task_record_struct_members[] =
{
{ .key = "uuid",
.type = &abstract_type_string,
.offset = offsetof(xen_dr_task_record, uuid) },
{ .key = "introduced_SRs",
.type = &abstract_type_ref_set,
.offset = offsetof(xen_dr_task_record, introduced_srs) }
};
const abstract_type xen_dr_task_record_abstract_type_ =
{
.typename = STRUCT,
.struct_size = sizeof(xen_dr_task_record),
.member_count =
sizeof(xen_dr_task_record_struct_members) / sizeof(struct_member),
.members = xen_dr_task_record_struct_members
};
static const struct struct_member xen_dr_task_xen_dr_task_record_members[] =
{
{
.type = &abstract_type_string,
.offset = offsetof(xen_dr_task_xen_dr_task_record_map_contents, key)
},
{
.type = &xen_dr_task_record_abstract_type_,
.offset = offsetof(xen_dr_task_xen_dr_task_record_map_contents, val)
}
};
const abstract_type abstract_type_string_xen_dr_task_record_map =
{
.typename = MAP,
.struct_size = sizeof(xen_dr_task_xen_dr_task_record_map_contents),
.members = xen_dr_task_xen_dr_task_record_members
};
void
xen_dr_task_record_free(xen_dr_task_record *record)
{
if (record == NULL)
{
return;
}
free(record->handle);
free(record->uuid);
xen_sr_record_opt_set_free(record->introduced_srs);
free(record);
}
bool
xen_dr_task_get_record(xen_session *session, xen_dr_task_record **result, xen_dr_task dr_task)
{
abstract_value param_values[] =
{
{ .type = &abstract_type_string,
.u.string_val = dr_task }
};
abstract_type result_type = xen_dr_task_record_abstract_type_;
*result = NULL;
XEN_CALL_("DR_task.get_record");
if (session->ok)
{
(*result)->handle = xen_strdup_((*result)->uuid);
}
return session->ok;
}
bool
xen_dr_task_get_by_uuid(xen_session *session, xen_dr_task *result, char *uuid)
{
abstract_value param_values[] =
{
{ .type = &abstract_type_string,
.u.string_val = uuid }
};
abstract_type result_type = abstract_type_string;
*result = NULL;
XEN_CALL_("DR_task.get_by_uuid");
return session->ok;
}
bool
xen_dr_task_get_introduced_srs(xen_session *session, struct xen_sr_set **result, xen_dr_task dr_task)
{
abstract_value param_values[] =
{
{ .type = &abstract_type_string,
.u.string_val = dr_task }
};
abstract_type result_type = abstract_type_string_set;
*result = NULL;
XEN_CALL_("DR_task.get_introduced_SRs");
return session->ok;
}
bool
xen_dr_task_create(xen_session *session, xen_dr_task *result, char *type, xen_string_string_map *device_config, struct xen_string_set *whitelist)
{
abstract_value param_values[] =
{
{ .type = &abstract_type_string,
.u.string_val = type },
{ .type = &abstract_type_string_string_map,
.u.set_val = (arbitrary_set *)device_config },
{ .type = &abstract_type_string_set,
.u.set_val = (arbitrary_set *)whitelist }
};
abstract_type result_type = abstract_type_string;
*result = NULL;
XEN_CALL_("DR_task.create");
return session->ok;
}
bool
xen_dr_task_create_async(xen_session *session, xen_task *result, char *type, xen_string_string_map *device_config, struct xen_string_set *whitelist)
{
abstract_value param_values[] =
{
{ .type = &abstract_type_string,
.u.string_val = type },
{ .type = &abstract_type_string_string_map,
.u.set_val = (arbitrary_set *)device_config },
{ .type = &abstract_type_string_set,
.u.set_val = (arbitrary_set *)whitelist }
};
abstract_type result_type = abstract_type_string;
*result = NULL;
XEN_CALL_("Async.DR_task.create");
return session->ok;
}
bool
xen_dr_task_destroy(xen_session *session, xen_dr_task self)
{
abstract_value param_values[] =
{
{ .type = &abstract_type_string,
.u.string_val = self }
};
xen_call_(session, "DR_task.destroy", param_values, 1, NULL, NULL);
return session->ok;
}
bool
xen_dr_task_destroy_async(xen_session *session, xen_task *result, xen_dr_task self)
{
abstract_value param_values[] =
{
{ .type = &abstract_type_string,
.u.string_val = self }
};
abstract_type result_type = abstract_type_string;
*result = NULL;
XEN_CALL_("Async.DR_task.destroy");
return session->ok;
}
bool
xen_dr_task_get_all(xen_session *session, struct xen_dr_task_set **result)
{
abstract_type result_type = abstract_type_string_set;
*result = NULL;
xen_call_(session, "DR_task.get_all", NULL, 0, &result_type, result);
return session->ok;
}
bool
xen_dr_task_get_all_records(xen_session *session, xen_dr_task_xen_dr_task_record_map **result)
{
abstract_type result_type = abstract_type_string_xen_dr_task_record_map;
*result = NULL;
xen_call_(session, "DR_task.get_all_records", NULL, 0, &result_type, result);
return session->ok;
}
bool
xen_dr_task_get_uuid(xen_session *session, char **result, xen_dr_task dr_task)
{
abstract_value param_values[] =
{
{ .type = &abstract_type_string,
.u.string_val = dr_task }
};
abstract_type result_type = abstract_type_string;
*result = NULL;
XEN_CALL_("DR_task.get_uuid");
return session->ok;
}
|
w-nowak/event-sourced-ddd-restaurant | shared-kernel/src/test/java/com/wnowakcraft/samples/restaurant/core/infrastructure/messaging/mocking/BusinessFlowMock.java | package com.wnowakcraft.samples.restaurant.core.infrastructure.messaging.mocking;
import com.wnowakcraft.samples.restaurant.core.domain.logic.BusinessFlowProvisioner;
import com.wnowakcraft.samples.restaurant.core.domain.logic.DefaultBusinessFlowProvisioner.BusinessFlowProvisionerConfig;
import com.wnowakcraft.samples.restaurant.core.domain.model.Command;
import com.wnowakcraft.samples.restaurant.core.domain.model.Event;
import com.wnowakcraft.samples.restaurant.core.domain.model.Response;
import com.wnowakcraft.samples.restaurant.core.infrastructure.messaging.CommandChannelFactory;
import com.wnowakcraft.samples.restaurant.core.infrastructure.messaging.EventListener;
import com.wnowakcraft.samples.restaurant.core.infrastructure.messaging.EventListenerBuilder;
import com.wnowakcraft.samples.restaurant.core.infrastructure.messaging.EventListenerFactory;
import com.wnowakcraft.samples.restaurant.core.infrastructure.saga.BusinessFlowRunner;
import com.wnowakcraft.samples.restaurant.core.infrastructure.saga.BusinessFlowStateHandler;
import lombok.Getter;
import org.mockito.ArgumentCaptor;
import org.mockito.invocation.InvocationOnMock;
import java.util.Collection;
import java.util.List;
import java.util.function.Consumer;
import static com.wnowakcraft.preconditions.Preconditions.requireNonNull;
import static java.util.concurrent.CompletableFuture.completedFuture;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.*;
import static org.mockito.Mockito.mock;
public class BusinessFlowMock<E extends Event<?>, S> {
private static final BeforeCommandSent NO_BEFORE_COMMAND_SENT_HANDLER = (c) -> {};
@Getter private final BusinessFlowStateHandler<S> flowStateHandler;
private final CommandChannelFactory commandChannelFactory;
private final EventListenerFactory eventListenerFactory;
private CommandResponseChannelMock commandResponseChannelMock;
private ArgumentCaptor<Consumer<E>> initEventCaptor = ArgumentCaptor.forClass(Consumer.class);
private final BusinessFlowProvisionerConfig<E> flowProvisionerConfig;
private Collection<Class<? extends Response>> flowFinishedResponseTypes;
private BeforeCommandSent beforeCommandSentHandler = NO_BEFORE_COMMAND_SENT_HANDLER;
public BusinessFlowMock(BusinessFlowProvisionerConfig<E> flowProvisionerConfig,
Collection<Class<? extends Response>> flowFinishedResponseTypes) {
this.flowProvisionerConfig = flowProvisionerConfig;
this.flowFinishedResponseTypes = flowFinishedResponseTypes;
flowStateHandler = mock(BusinessFlowStateHandler.class);
commandChannelFactory = mock(CommandChannelFactory.class);
eventListenerFactory = mock(EventListenerFactory.class);
}
@FunctionalInterface
public interface TestProvisionerFactory<E extends Event<?>, S> {
BusinessFlowProvisioner<E, S> initialize(EventListenerBuilder eventListenerBuilder,
CommandChannelFactory commandChannelFactory,
BusinessFlowStateHandler<S> businessFlowStateHandler,
BusinessFlowProvisionerConfig<E> businessFlowProvisionerConfig);
}
public BusinessFlowProvisioner<E, S> initializeTestProvisioner(TestProvisionerFactory<E, S> testProvisionerFactory) {
var businessFlowProvisioner = testProvisionerFactory.initialize(
new EventListenerBuilder(eventListenerFactory),
commandChannelFactory,
flowStateHandler,
flowProvisionerConfig
);
mockEventListenerFactoryToCaptureEventListener(flowProvisionerConfig.getEventKindToListenTo());
mockFlowStateHandlerToNotifyAboutPublishedCommands();
commandResponseChannelMock =createCommandResponseChannelMock(
commandChannelFactory,
flowProvisionerConfig.getCommandResponseChannelName(),
flowFinishedResponseTypes);
return businessFlowProvisioner;
}
public void triggerBusinessFlowInitEvent(E initEvent) {
initEventCaptor.getValue().accept(initEvent);
commandResponseChannelMock
.getAsyncTestWaitSupport()
.waitUntilAsyncFlowFinished();
}
public void whenFollowingCommandResponseReceived(Response response) {
commandResponseChannelMock.acceptNewCommandResponseJustReceived(response);
}
public void thenWaitUntilFlowIsFinished() {
commandResponseChannelMock
.getAsyncTestWaitSupport()
.waitUntilAsyncFlowFinished();
}
@SafeVarargs
public static Collection<Class<? extends Response>> allowedFlowFinishedResponses(Class<? extends Response>... responses) {
return List.of(responses);
}
public WhenOnCommand getOnCommandMock() {
return commandResponseChannelMock;
}
private void mockFlowStateHandlerToNotifyAboutPublishedCommands() {
willAnswer(this::notifyCommandSent)
.given(flowStateHandler)
.createNewState(anyStateEnvelope(), any(Command.class));
willAnswer(this::notifyCommandSent)
.given(flowStateHandler)
.updateState(anyStateEnvelope(), any(Command.class));
}
private Void notifyCommandSent(InvocationOnMock invocationOnMock) {
var command =invocationOnMock.getArgument(1, Command.class);
beforeCommandSentHandler.beforeCommandSent(command);
commandResponseChannelMock
.getSentCommandNotifier()
.notifyCommandSent(command);
return null;
}
@SuppressWarnings("unchecked")
private static <S> BusinessFlowRunner.StateEnvelope<S> anyStateEnvelope() {
return any(BusinessFlowRunner.StateEnvelope.class);
}
private static CommandResponseChannelMock createCommandResponseChannelMock(CommandChannelFactory commandChannelFactory,
String commandResponseChannelName,
Collection<Class<? extends Response>> flowFinishedResponseTypes) {
return CommandResponseChannelMock.mockCommandResponseChannel(
commandResponseChannelName, commandChannelFactory,
flowFinishedResponseTypes
);
}
private void mockEventListenerFactoryToCaptureEventListener(Class<? super E> eventFamily) {
EventListener<E> eventListener = mock(EventListener.class);
given(eventListenerFactory.<E>listenToEventsOfKind(eventFamily)).willReturn(completedFuture(eventListener));
willDoNothing().given(eventListener).onEvent(initEventCaptor.capture());
}
public void attachBeforeCommandSentHandler(BeforeCommandSent beforeCommandSentHandler) {
this.beforeCommandSentHandler = requireNonNull(beforeCommandSentHandler, "beforeCommandSentHandler");
}
@FunctionalInterface
public interface BeforeCommandSent {
void beforeCommandSent(Command command);
}
}
|
myOmikron/TelegramBotSDK | telegram_bot_sdk/telegram_objects/encryptedPassportElement.py | from telegram_bot_sdk.telegram_objects.passportFile import PassportFile
class EncryptedPassportElement:
"""This class contains information about documents or other Telegram Passport elements shared with the bot by\
the user
:param type_result: Element type. One of “personal_details”, “passport”, “driver_license”, “identity_card”,\
“internal_passport”, “address”, “utility_bill”, “bank_statement”, “rental_agreement”, “passport_registration”,\
“temporary_registration”, “phone_number”, “email”
:type type_result: str
:param data: Optional: Base64-encoded encrypted Telegram Passport element data provided by the user, available for\
“personal_details”, “passport”, “driver_license”, “identity_card”, “internal_passport” and “address” types
:type data: str
:param hash_data: Base64-encoded element hash for using in :ref:`object_passport_element_error_unspecified`
:type hash_data: str
:param phone_number: Optional: User's verified phone number, available only for “phone_number” type
:type phone_number: str
:param email: Optional: User's verified email address, available only for “email” type
:type email: str
:param files: Optional: Array of encrypted files with documents provided by the user, available for “utility_bill”,\
“bank_statement”, “rental_agreement”, “passport_registration” and “temporary_registration” types. Files can be\
decrypted and verified using the accompanying :ref:`object_encrypted_credentials`
:type files: list of :ref:`object_passport_file`
:param front_side: Optional: Encrypted file with the front side of the document, provided by the user. Available\
for “passport”, “driver_license”, “identity_card” and “internal_passport”. The file can be decrypted and verified\
using the accompanying :ref:`object_encrypted_credentials`.
:type front_side: :ref:`object_passport_file`
:param reverse_side: Optional: Encrypted file with the reverse side of the document, provided by the user. \
Available or “driver_license” and “identity_card”. The file can be decrypted and verified using the accompanying \
:ref:`object_encrypted_credentials`
:type reverse_side: :ref:`object_passport_file`
:param selfie: Optional: Encrypted file with the selfie of the user holding a document, provided by the user;\
available for “passport”, “driver_license”, “identity_card” and “internal_passport”. The file can be decrypted and \
verified using the accompanying :ref:`object_encrypted_credentials`
:type selfie: :ref:`object_passport_file`
:param translation: Optional: Array of encrypted files with translated versions of documents provided by the user. \
Available if requested for “passport”, “driver_license”, “identity_card”, “internal_passport”, “utility_bill”, \
“bank_statement”, “rental_agreement”, “passport_registration” and “temporary_registration” types. Files can be \
decrypted and verified using the accompanying :ref:`object_encrypted_credentials`
:type translation: list of :ref:`object_passport_file`
"""
def __init__(self, *, type_result, data=None, hash_data, phone_number=None, email=None, files=None, front_side=None,
reverse_side=None, selfie=None, translation=None):
self.type_result = type_result
self.data = data
self.hash_data = hash_data
self.phone_number = phone_number
self.email = email
self.files = [PassportFile(**x) for x in files] if files else None
self.front_side = PassportFile(**front_side) if front_side else None
self.reverse_side = PassportFile(**reverse_side) if reverse_side else None
self.selfie = PassportFile(**selfie) if selfie else None
self.translation = [PassportFile(**x) for x in translation] if translation else None
|
xxxzhou/oeip | oeip-win-mf/ReaderCallback.cpp | #include "ReaderCallback.h"
#include <shlwapi.h>
#include "MediaStruct.h"
ReaderCallback::ReaderCallback() {
}
ReaderCallback::~ReaderCallback() {
}
void ReaderCallback::setSourceReader(IMFSourceReader* pReader, unsigned long dwStreamIndex) {
std::lock_guard<std::mutex> mtx_locker(mtx);
reader = pReader;
streamIndex = dwStreamIndex;
}
void ReaderCallback::setBufferRevice(function<void(unsigned long, byte*)> reviceFunc) {
onReviceBuffer = reviceFunc;
}
void ReaderCallback::setDeviceEvent(onEventHandle eventHandle) {
onDeviceEvent = eventHandle;
}
bool ReaderCallback::setPlay(bool pPlayVideo) {
std::lock_guard<std::mutex> mtx_locker(mtx);
HRESULT hr = 0;
//设置播放
if (pPlayVideo) {
if (bPlay)
return true;
int i = 0;
hr = reader->ReadSample(streamIndex, 0, nullptr, nullptr, nullptr, nullptr);
//最多试验三次
while (FAILED(hr) && i++ < 3) {
hr = reader->ReadSample(streamIndex, 0, nullptr, nullptr, nullptr, nullptr);
std::this_thread::sleep_for(std::chrono::milliseconds(200));
}
if (SUCCEEDED(hr)) {
bPlay = true;
logMessage(OEIP_INFO, "start reading data.");
onDeviceHandle(OEIP_DeviceOpen, 0);
return true;
}
else {
bPlay = false;
logMessage(OEIP_ERROR, "cannot start reading data.");
onDeviceHandle(OEIP_DeviceNoOpen, 0);
return false;
}
}
else {
if (bPlay) {
bPlay = false;
//加了这个,后面才能改变不同的fomat,如nv12变成mjpg
try {
if (reader != nullptr)
hr = reader->Flush(streamIndex);
}
catch (exception e) {
logMessage(OEIP_WARN, e.what());
}
//等待OnFlush完成
std::unique_lock <std::mutex> lck(mtx2);
auto status = signal.wait_for(lck, std::chrono::seconds(3));
if (status == std::cv_status::timeout) {
logMessage(OEIP_WARN, "MF device is not closed properly.");
}
}
return SUCCEEDED(hr);
}
}
void ReaderCallback::onDeviceHandle(OeipDeviceEventType eventType, int32_t data) {
if (onDeviceEvent) {
onDeviceEvent(eventType, data);
}
}
HRESULT ReaderCallback::QueryInterface(REFIID riid, void** ppvObject) {
static const QITAB qit[] =
{
QITABENT(ReaderCallback, IMFSourceReaderCallback),
{ 0 },
};
return QISearch(this, qit, riid, ppvObject);
}
ULONG ReaderCallback::AddRef(void) {
return InterlockedIncrement(&refCount);
}
ULONG ReaderCallback::Release(void) {
ULONG uCount = InterlockedDecrement(&refCount);
if (uCount == 0) {
onReviceBuffer = nullptr;
delete this;
}
// For thread safety, return a temporary variable.
return uCount;
}
HRESULT ReaderCallback::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample* pSample) {
//std::lock_guard<std::mutex> mtx_locker(mtx);
HRESULT hr = S_OK;
//人为中断
if (!bPlay)
return hr;
if (pSample) {
CComPtr<IMFMediaBuffer> pBuffer = nullptr;
DWORD lenght;
pSample->GetTotalLength(&lenght);
hr = pSample->GetBufferByIndex(0, &pBuffer);
if (pBuffer) {
unsigned long length = 0;
unsigned long maxLength = 0;
pBuffer->GetCurrentLength(&length);
//pBuffer->GetMaxLength(&maxLength);
if (onReviceBuffer) {
byte* data = nullptr;
auto hr = pBuffer->Lock(&data, &length, &length);
onReviceBuffer(length, (uint8_t*)data);
pBuffer->Unlock();
}
}
}
// Request the next frame.
if (SUCCEEDED(hr)) {
hr = reader->ReadSample(streamIndex, 0, nullptr, nullptr, nullptr, nullptr);
}
if (FAILED(hr)) {
bPlay = false;
onDeviceHandle(OEIP_DeviceDropped, 0);
logMessage(OEIP_WARN, "Data interruption.");
}
return hr;
}
HRESULT ReaderCallback::OnFlush(DWORD dwStreamIndex) {
//通知closeDevice能关闭设备了
signal.notify_all();
return S_OK;
}
HRESULT ReaderCallback::OnEvent(DWORD dwStreamIndex, IMFMediaEvent* pEvent) {
return S_OK;
}
|
FantasyZsp/SpringBoot2.xDemo | jdk/src/main/java/xyz/mydev/jdk/genericity/complex/port/DefaultPorterV2.java | <reponame>FantasyZsp/SpringBoot2.xDemo<gh_stars>0
package xyz.mydev.jdk.genericity.complex.port;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import xyz.mydev.jdk.genericity.complex.msg.SerializableMessage;
import java.io.Serializable;
import java.util.concurrent.ExecutorService;
/**
* 消息搬运工
* 处理缓存、去重、可靠中转等逻辑
*
* @author ZSP
*/
@Slf4j
@Getter
@Setter
public class DefaultPorterV2<E extends SerializableMessage<? extends Serializable>> extends AbstractPorter<E> {
private ExecutorService portExecutor;
private ExecutorService transferExecutor;
private TransferTaskFactory<E> transferTaskFactory;
private PortTaskFactory<E> portTaskFactory;
public DefaultPorterV2() {
}
} |
liumapp/compiling-jvm | openjdk/jdk/src/share/demo/jvmti/java_crw_demo/java_crw_demo.h | <reponame>liumapp/compiling-jvm<filename>openjdk/jdk/src/share/demo/jvmti/java_crw_demo/java_crw_demo.h
/*
* Copyright (c) 2003, 2004, Oracle and/or its affiliates. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Neither the name of Oracle nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef JAVA_CRW_DEMO_H
#define JAVA_CRW_DEMO_H
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif
/* This callback is used to notify the caller of a fatal error. */
typedef void (*FatalErrorHandler)(const char*message, const char*file, int line);
/* This callback is used to return the method information for a class.
* Since the information was already read here, it was useful to
* return it here, with no JVMTI phase restrictions.
* If the class file does represent a "class" and it has methods, then
* this callback will be called with the class number and pointers to
* the array of names, array of signatures, and the count of methods.
*/
typedef void (*MethodNumberRegister)(unsigned, const char**, const char**, int);
/* Class file reader/writer interface. Basic input is a classfile image
* and details about what to inject. The output is a new classfile image
* that was allocated with malloc(), and should be freed by the caller.
*/
/* Names of external symbols to look for. These are the names that we
* try and lookup in the shared library. On Windows 2000, the naming
* convention is to prefix a "_" and suffix a "@N" where N is 4 times
* the number or arguments supplied.It has 19 args, so 76 = 19*4.
* On Windows 2003, Linux, and Solaris, the first name will be
* found, on Windows 2000 a second try should find the second name.
*
* WARNING: If You change the JavaCrwDemo typedef, you MUST change
* multiple things in this file, including this name.
*/
#define JAVA_CRW_DEMO_SYMBOLS { "java_crw_demo", "_java_crw_demo@76" }
/* Typedef needed for type casting in dynamic access situations. */
typedef void (JNICALL *JavaCrwDemo)(
unsigned class_number,
const char *name,
const unsigned char *file_image,
long file_len,
int system_class,
char* tclass_name,
char* tclass_sig,
char* call_name,
char* call_sig,
char* return_name,
char* return_sig,
char* obj_init_name,
char* obj_init_sig,
char* newarray_name,
char* newarray_sig,
unsigned char **pnew_file_image,
long *pnew_file_len,
FatalErrorHandler fatal_error_handler,
MethodNumberRegister mnum_callback
);
/* Function export (should match typedef above) */
JNIEXPORT void JNICALL java_crw_demo(
unsigned class_number, /* Caller assigned class number for class */
const char *name, /* Internal class name, e.g. java/lang/Object */
/* (Do not use "java.lang.Object" format) */
const unsigned char
*file_image, /* Pointer to classfile image for this class */
long file_len, /* Length of the classfile in bytes */
int system_class, /* Set to 1 if this is a system class */
/* (prevents injections into empty */
/* <clinit>, finalize, and <init> methods) */
char* tclass_name, /* Class that has methods we will call at */
/* the injection sites (tclass) */
char* tclass_sig, /* Signature of tclass */
/* (Must be "L" + tclass_name + ";") */
char* call_name, /* Method name in tclass to call at offset 0 */
/* for every method */
char* call_sig, /* Signature of this call_name method */
/* (Must be "(II)V") */
char* return_name, /* Method name in tclass to call at all */
/* return opcodes in every method */
char* return_sig, /* Signature of this return_name method */
/* (Must be "(II)V") */
char* obj_init_name, /* Method name in tclass to call first thing */
/* when injecting java.lang.Object.<init> */
char* obj_init_sig, /* Signature of this obj_init_name method */
/* (Must be "(Ljava/lang/Object;)V") */
char* newarray_name, /* Method name in tclass to call after every */
/* newarray opcode in every method */
char* newarray_sig, /* Signature of this method */
/* (Must be "(Ljava/lang/Object;II)V") */
unsigned char
**pnew_file_image, /* Returns a pointer to new classfile image */
long *pnew_file_len, /* Returns the length of the new image */
FatalErrorHandler
fatal_error_handler, /* Pointer to function to call on any */
/* fatal error. NULL sends error to stderr */
MethodNumberRegister
mnum_callback /* Pointer to function that gets called */
/* with all details on methods in this */
/* class. NULL means skip this call. */
);
/* External to read the class name out of a class file .
*
* WARNING: If You change the typedef, you MUST change
* multiple things in this file, including this name.
*/
#define JAVA_CRW_DEMO_CLASSNAME_SYMBOLS \
{ "java_crw_demo_classname", "_java_crw_demo_classname@12" }
/* Typedef needed for type casting in dynamic access situations. */
typedef char * (JNICALL *JavaCrwDemoClassname)(
const unsigned char *file_image,
long file_len,
FatalErrorHandler fatal_error_handler);
JNIEXPORT char * JNICALL java_crw_demo_classname(
const unsigned char *file_image,
long file_len,
FatalErrorHandler fatal_error_handler);
#ifdef __cplusplus
} /* extern "C" */
#endif /* __cplusplus */
#endif
|
softicar/platform | platform-common/src/test/java/com/softicar/platform/common/string/csv/CsvTokenizerTest.java | package com.softicar.platform.common.string.csv;
import com.softicar.platform.common.testing.AbstractTest;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
public class CsvTokenizerTest extends AbstractTest {
private List<List<String>> tokenMatrix;
public CsvTokenizerTest() {
this.tokenMatrix = null;
}
// ---------------- single line ---------------- //
@Test
public void testWithSingleLine() {
tokenize("qwe,asd,zxc");
new Asserter()//
.assertRow("qwe", "asd", "zxc")
.assertNoMoreRows();
}
// ---------------- multiples lines ---------------- //
@Test
public void testWithMultipleLinesSeparatedByBackslashN() {
tokenize("qwe,asd,zxc\n123,456,789");
new Asserter()//
.assertRow("qwe", "asd", "zxc")
.assertRow("123", "456", "789")
.assertNoMoreRows();
}
@Test
public void testWithMultipleLinesSeparatedByBackslashR() {
tokenize("qwe,asd,zxc\r123,456,789");
new Asserter()//
.assertRow("qwe", "asd", "zxc")
.assertRow("123", "456", "789")
.assertNoMoreRows();
}
@Test
public void testWithMultipleLinesSeparatedByBackslashRN() {
tokenize("qwe,asd,zxc\r\n123,456,789");
new Asserter()//
.assertRow("qwe", "asd", "zxc")
.assertRow("123", "456", "789")
.assertNoMoreRows();
}
@Test
public void testWithMultipleLinesAndEmptyLine() {
tokenize("qwe,asd,zxc\n\n123,456,789");
new Asserter()//
.assertRow("qwe", "asd", "zxc")
.assertRow("123", "456", "789")
.assertNoMoreRows();
}
@Test
public void testWithMultipleLinesOfVariableLength() {
tokenize("qwe,asd,zxc\n123,456\nxxx,yyy,zzz,___");
new Asserter()//
.assertRow("qwe", "asd", "zxc")
.assertRow("123", "456")
.assertRow("xxx", "yyy", "zzz", "___")
.assertNoMoreRows();
}
// ---------------- value separation ---------------- //
@Test
public void testWithValueSeparationByAdjacentCommas() {
tokenize(",,qwe,asd,, ,zxc,,");
new Asserter()//
.assertRow("", "", "qwe", "asd", "", " ", "zxc", "", "")
.assertNoMoreRows();
}
@Test
public void testWithValueSeparationMissing() {
try {
tokenize("\"qwe\" \"asd\",zxc");
fail();
} catch (CsvSyntaxException exception) {
assertEquals("CSV syntax error in line 1 at character 6", exception.getMessage());
}
}
// ---------------- line-spanning value ---------------- //
@Test
public void testWithLineSpanningValue() {
tokenize("\"X\nX\r\nX\rX\",asd,zxc");
new Asserter()//
.assertRow("X\nX\r\nX\rX", "asd", "zxc")
.assertNoMoreRows();
}
// ---------------- spacing ---------------- //
@Test
public void testWithSpacesAroundValues() {
tokenize(" qwe, asd ,zxc ");
new Asserter()//
.assertRow(" qwe", " asd ", "zxc ")
.assertNoMoreRows();
}
// ---------------- quoting ---------------- //
@Test
public void testWithQuotes() {
tokenize("\"\"\"qwe,123\"\"\",\"asd\",zxc");
new Asserter()//
.assertRow("\"qwe,123\"", "asd", "zxc")
.assertNoMoreRows();
}
@Test
public void testWithQuotesOnEdges() {
tokenize("\"qwe\",asd,\"zxc\"");
new Asserter()//
.assertRow("qwe", "asd", "zxc")
.assertNoMoreRows();
}
@Test
public void testWithQuotesMissingInTheEnd() {
try {
tokenize("qwe,\"asd,zxc");
fail();
} catch (CsvSyntaxException exception) {
assertEquals("CSV syntax error in line 1 at character 13", exception.getMessage());
}
}
@Test
public void testWithQuotesInAsymmetricalDistribution() {
tokenize("qwe,\"\"\"asd\"\"\"\"\",\"\"\"zxc\"");
new Asserter()//
.assertRow("qwe", "\"asd\"\"", "\"zxc")
.assertNoMoreRows();
}
@Test
public void testWithQuotesInInvalidBlock() {
try {
tokenize("qwe,\"\"asd,zxc");
fail();
} catch (CsvSyntaxException exception) {
assertEquals("CSV syntax error in line 1 at character 7", exception.getMessage());
}
}
@Test
public void testWithQuotesInInvalidBlockAndDistribution() {
try {
tokenize("qwe,\"\"asd\",zxc");
fail();
} catch (CsvSyntaxException exception) {
assertEquals("CSV syntax error in line 1 at character 7", exception.getMessage());
}
}
@Test
public void testWithQuoteInNonQuotedValue() {
try {
tokenize("qwe,a\"sd,zxc");
fail();
} catch (CsvSyntaxException exception) {
assertEquals("CSV syntax error in line 1 at character 6", exception.getMessage());
}
}
// ---------------- blank values and lines ---------------- //
@Test
public void testWithEmptyString() {
tokenize("");
new Asserter()//
.assertNoMoreRows();
}
@Test
public void testWithBlankString() {
tokenize(" ");
new Asserter()//
.assertRow(" ")
.assertNoMoreRows();
}
@Test
public void testWithEmptyLinesOnly() {
tokenize("\n\n");
new Asserter()//
.assertNoMoreRows();
}
@Test
public void testWithEmptyLinesOnEdges() {
tokenize("\nqwe,asd\n");
new Asserter()//
.assertRow("qwe", "asd")
.assertNoMoreRows();
}
// ---------------- erroneous parameters ---------------- //
@Test(expected = NullPointerException.class)
public void testWithNullString() {
new CsvTokenizer().tokenize(null);
}
// ---------------- internal ---------------- //
private void tokenize(String csv) {
this.tokenMatrix = new CsvTokenizer().tokenize(csv);
}
private class Asserter {
private int currentRow;
public Asserter() {
assertNotNull(tokenMatrix);
this.currentRow = 0;
}
public Asserter assertRow(String...tokens) {
var rowTokens = tokenMatrix.get(currentRow);
assertEquals(//
"Unexpected number of tokens in the logical row with index %s.\nExpected tokens: %s\nActual tokens: %s\n"
.formatted(currentRow, Arrays.asList(tokens), rowTokens),
tokens.length,
rowTokens.size());
for (int i = 0; i < tokens.length; i++) {
String expectedToken = tokens[i];
String actualToken = rowTokens.get(i);
assertEquals(//
"Unexpected token at index %s in the logical row with index %s.".formatted(i, currentRow),
expectedToken,
actualToken);
}
++currentRow;
return this;
}
public Asserter assertNoMoreRows() {
assertEquals(//
"Encountered an unexpected number of logical rows.",
currentRow,
tokenMatrix.size());
return this;
}
}
}
|
anshusandhi6/project-calliope-frontend | src/shared/index.js | export { default as Navbar } from './Navbar';
export { default as Footer } from './Footer';
export { default as ArticleCard } from './ArticleCard';
export { default as EditorDeskCard } from './EditorDeskCard';
|
MarsonShine/Books | DesignPattern/DesignPatternGo/proxys/application.go | <gh_stars>10-100
package proxys
type application struct {
}
func (app *application) requestHandle(url, method string) (int, string) {
if url == "/app/status" && method == "GET" {
return 200, "Ok"
}
if url == "/create/user" && method == "POST" {
return 201, "User Created"
}
return 404, "Not Ok"
}
|
jihwahn1018/ovirt-web-ui | src/helpers.test.js | <filename>src/helpers.test.js
/* eslint-env jest */
import { userFormatOfBytes } from '_/helpers'
describe('test userFormatOfBytes', function () {
it('B to KiB', function () {
const b = 1024
expect(userFormatOfBytes(b))
.toMatchObject({ str: '1.0 KiB', rounded: '1.0', suffix: 'KiB' })
})
it('B to MiB (1.5 MiB)', function () {
const b = (1024 ** 2) + (500 * 1024)
expect(userFormatOfBytes(b))
.toMatchObject({ str: '1.5 MiB', rounded: '1.5', suffix: 'MiB' })
})
it('B to MiB (7.6 MiB)', function () {
const b = (7 * 1024 ** 2) + (600 * 1024)
expect(userFormatOfBytes(b))
.toMatchObject({ str: '7.6 MiB', rounded: '7.6', suffix: 'MiB' })
})
it('MiB to GiB', function () {
const b = 1538
expect(userFormatOfBytes(b, 'MiB'))
.toMatchObject({ str: '1.5 GiB', rounded: '1.5', suffix: 'GiB' })
})
})
|
OpenSextant/Xponents | src/main/java/org/opensextant/extractors/geo/TagFilter.java | package org.opensextant.extractors.geo;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.lucene.analysis.util.ClasspathResourceLoader;
import org.opensextant.ConfigException;
import org.opensextant.extraction.MatchFilter;
import org.opensextant.util.LuceneStopwords;
import org.opensextant.util.TextUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.supercsv.io.CsvMapReader;
import org.supercsv.prefs.CsvPreference;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/*
* We can filter out trivial place name matches that we know to be close to
* false positives 100% of the time. E.g,. "way", "back", "north" You might
* consider two different stop filters, Is "North" different than "north"?
* This first pass filter should really filter out only text we know to be
* false positives regardless of case.
*
* Filter out unwanted tags via GazetteerETL data model or in Solr index. If
* you believe certain items will always be filtered OUT then set name_bias < 0
*/
public class TagFilter extends MatchFilter {
/**
* This may need to be turned off for processing lower-case or dirty data.
*/
boolean filter_stopwords = true;
boolean filter_on_case = true;
Set<String> nonPlaceStopTerms = null;
Logger log = LoggerFactory.getLogger(TagFilter.class);
/*
* Select languages for experimentation.
*/
private final Map<String, Set<Object>> langStopFilters = new HashMap<>();
// private Set<String> generalLangId = new HashSet<>();
/**
* NOTE: This expects the files are all available. This fails if resource
* files are missing.
*
* @throws ConfigException if any file has a problem.
*/
public TagFilter() throws IOException {
super();
nonPlaceStopTerms = new HashSet<>();
String[] defaultNonPlaceFilters = {"/filters/non-placenames.csv", // GENERAL
"/filters/non-placenames,spa.csv", // SPANISH
"/filters/non-placenames,deu.csv", // GERMAN
"/filters/non-placenames,acronym.csv" // ACRONYMS
};
for (String f : defaultNonPlaceFilters) {
nonPlaceStopTerms.addAll(loadExclusions(GazetteerMatcher.class.getResourceAsStream(f)));
}
/*
* NOTE: these stop word sets are of format='wordset'
* Whereas other languages (es, it, etc.) are provided in format='snowball'
* StopFilterFactory is needed to load snowball filters.
*/
String[] langSet = {
"ja", "th", "tr", "id", "ar", "fa", "ur", "ru", "it", "pt", "de", "nl", "es", "en", "tl", "ko", "vi"};
loadLanguageStopwords(langSet);
}
private final boolean loadCustomStops = false;
private void loadCustomStopwords() throws IOException {
if (!loadCustomStops) {
return;
}
/*
* More optional lists.
*/
// KOREAN
String url = "/lang/carrot2-stopwords.ko";
String lg = "ko";
URL obj = URL.class.getResource(url);
if (obj != null) {
loadStopSet(obj, lg);
}
// CHINESE
url = "/lang/carrot2-stopwords.zh";
lg = "zh";
obj = URL.class.getResource(url);
if (obj != null) {
loadStopSet(obj, lg);
}
// VIETNAMESE
url = "/lang/vietnamese-stopwords.txt";
lg = "vi";
obj = URL.class.getResource(url);
if (obj != null) {
loadStopSet(obj, lg);
}
}
/**
* Load default Lucene stop words to aid in language specific filtration.
*
* @param langids
* @throws IOException
* @throws ConfigException
*/
private void loadLanguageStopwords(String[] langids) throws IOException, ConfigException {
for (String lg : langids) {
langStopFilters.put(lg, LuceneStopwords.getStopwords(new ClasspathResourceLoader(TagFilter.class), lg));
}
// Temporarily discontinued:
loadCustomStopwords();
}
private void loadStopSet(URL url, String langid) throws IOException {
try (InputStream strm = url.openStream()) {
HashSet<Object> stopTerms = new HashSet<>();
for (String line : IOUtils.readLines(strm, StandardCharsets.UTF_8)) {
if (line.trim().startsWith("#")) {
continue;
}
stopTerms.add(line.trim().toLowerCase());
}
if (stopTerms.isEmpty()) {
throw new ConfigException("No terms found in stop filter file " + url);
}
langStopFilters.put(langid, stopTerms);
}
}
public void enableStopwordFilter(boolean b) {
filter_stopwords = b;
}
public void enableCaseSensitive(boolean b) {
filter_on_case = b;
}
/**
* Default filtering rules: (a) If filter is in case-sensitive mode
* (DEFAULT), all lower case matches are ignored; only mixed case or upper
* case passes (b) If match term, t, is in stop word list it is filtered
* out. Case is ignored.
* TODO: filter rules -- if text match is all lower case and filter is
* case-sensitive, then this filters out any lower case matches. Not
* optimal. This should take into account alpha-case of document.
* TODO: trivial for the general case, but important: stopTerms is hashed
* only by lower case value, so native-case lookup is not possible.
*/
@Override
public boolean filterOut(String t) {
if (filter_on_case && StringUtils.isAllLowerCase(t)) {
return true;
}
if (filter_stopwords) {
return nonPlaceStopTerms.contains(t.toLowerCase().replace('-', ' '));
}
return false;
}
/**
* Experimental.
* Using proper Language ID (ISO 2-char for now), determine if the given
* term, t, is a stop term in that language.
*
* @param t
* @param langId
* @param docIsUpper true if input doc is mostly upper
* @param docIsLower true if input doc is mostly lower
* @return
*/
public boolean filterOut(PlaceCandidate t, String langId, boolean docIsUpper, boolean docIsLower) {
/*
* Consider no given language ID -- only short, non-ASCII terms should be
* filtered out
* against all stop filters; Otherwise there is some performance issues.
*/
if (langId == null) {
if (t.isASCII()) {
return false; /* Not filtering out short crap, right now. */
} else if (t.getLength() < 4) {
return assessAllFilters(t.getText().toLowerCase());
}
}
/*
* Consider language specific stop filters.
* NOTE: LangID should not be 'CJK' or group. langStopFilters keys stop terms by
* LangID
*/
if (langStopFilters.containsKey(langId)) {
Set<Object> terms = langStopFilters.get(langId);
return terms.contains(t.getText().toLowerCase());
}
/*
* EXPERIMENTAL.
* But if langID is given, we first consider if text in document
* is possibly a Proper name of sort...
* UPPERCASENAME -- possibly stop?
* Upper Case Name -- pass; not stop
* not upper case name -- possibly stop.
*/
if (!docIsUpper) {
char c = t.getText().charAt(0);
if (Character.isUpperCase(c) && !t.isUpper()) {
// Proper Name, possibly. Not stopping.
return false;
}
}
boolean cjk = TextUtils.isCJK(langId);
/*
* Bi-gram + whitespace filter for CJK:
*/
if (cjk && filterOutCJK(t)) {
return true;
}
/*
* FILTER out lower case matches for non-English, non-CJK texts.
* If document is mixed case. That is we still expect/assume interesting
* place names to be proper names. However, if you find longer name matches ~10
* chars or longer
* as lower case names, then let them pass. 10 chars is arbitrary, but approx. 1
* word threshold.
*/
if (!cjk) {
if (!docIsLower && !docIsUpper) {
return t.isLower() && t.getLength() < 10;
}
}
return false;
}
/**
* @param langId lang ID to check.
* @param termLower lower case term.
* @return
*/
public boolean filterOut(String langId, String termLower) {
String lg = langId != null ? langId : "en"; // default? eek.
if (langStopFilters.containsKey(lg)) {
Set<Object> terms = langStopFilters.get(lg);
return terms.contains(termLower);
}
return false;
}
/**
* Experimental. Hack.
* Due to bi-gram shingling with CJK languages - Chinese, Japanese, Korean -
* the matcher really over-matches, e.g. For really short matches, let's
* rule out obvious bad matches.
*
* <pre>
* ... に た ... input text matched にた
* gazetteer place name.
* </pre>
* <p>
* TOOD: make use of better tokenizer/matcher
* in SolrTextTagger configuration for CJK
*
* @param t
* @return
*/
private boolean filterOutCJK(PlaceCandidate t) {
return t.getLength() < 5 && TextUtils.count_ws(t.getText()) > 0;
}
/**
* Run a term (already lowercased) against all stop filters.
*
* @param textnorm
* @return
*/
public boolean assessAllFilters(String textnorm) {
for (Set<Object> terms : langStopFilters.values()) {
if (terms.contains(textnorm)) {
return true;
}
}
return false;
}
/**
* Exclusions have two columns in a CSV file. 'exclusion', 'category'
* "#" in exclusion column implies a comment. Call is responsible for
* getting I/O stream.
*
* @param filestream URL/file with exclusion terms
* @return set of filter terms
* @throws ConfigException if filter is not found
*/
public static Set<String> loadExclusions(InputStream filestream) throws ConfigException {
/*
* Load the exclusion names -- these are terms that are gazeteer
* entries, e.g., gazetteer.name = <exclusion term>, that will be marked
* as search_only = true.
*/
try (Reader termsIO = new InputStreamReader(filestream)) {
CsvMapReader termreader = new CsvMapReader(termsIO, CsvPreference.EXCEL_PREFERENCE);
String[] columns = termreader.getHeader(true);
Map<String, String> terms = null;
HashSet<String> stopTerms = new HashSet<>();
while ((terms = termreader.read(columns)) != null) {
String term = terms.get("exclusion");
if (StringUtils.isBlank(term) || term.startsWith("#")) {
continue;
}
String trimmed = term.trim();
/*
* Allow for case-sensitive filtration, if stop terms are listed in native case
* in resource files
*/
stopTerms.add(trimmed);
stopTerms.add(trimmed.toLowerCase());
}
termreader.close();
return stopTerms;
} catch (Exception err) {
throw new ConfigException("Could not load exclusions.", err);
}
}
}
|
FOC-framework/framework | foc/src/com/foc/business/notifier/actions/FocNotifActionFactory.java | /*******************************************************************************
* Copyright 2016 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.foc.business.notifier.actions;
import java.util.HashMap;
import com.foc.Globals;
import com.foc.business.notifier.FNotifTrigger;
@SuppressWarnings("serial")
public class FocNotifActionFactory extends HashMap<Integer, IFocNotifAction>{
public FocNotifActionFactory(){
put(FNotifTrigger.ACTION_SEND_EMAIL, new FocNotifAction_SendEmail());
put(FNotifTrigger.ACTION_EXECUTE_REPORT, new FocNotifAction_SendReportByEmail());
}
public static FocNotifActionFactory getInstance(boolean createIfNeeded) {
FocNotifActionFactory focNEF = null;
if (Globals.getApp() != null) {
focNEF = Globals.getApp().getNotificationActionFactory(createIfNeeded);
}
return focNEF;
}
public static FocNotifActionFactory getInstance(){
return getInstance(true);
}
}
|
staddi99/AdventOfCode | 2020/day_8/code.js | import input from './input.js';
const inputArray = input.split('\n');
class Computer {
constructor(input) {
this.raw_input = input.slice(0);
this.reset();
}
reset() {
this.program = this.parseInput(this.raw_input);
this.accu = 0;
this.instruction = 0;
this.record = {};
}
parseInput(input) {
return input.map((line) => {
let [, command, arg] = /(\w+) (.+)$/.exec(line);
arg = Number(arg);
return { command, arg };
});
}
acc(arg) {
this.accu += arg;
this.updateRecord();
this.instruction++;
}
jmp(arg) {
this.updateRecord();
this.instruction += arg;
}
nop() {
this.updateRecord();
this.instruction++;
}
updateRecord() {
if (!this.record[this.instruction]) {
this.record[this.instruction] = 0;
}
this.record[this.instruction]++;
}
getMaxRecord() {
let max = 0;
let counts = Object.values(this.record);
for (let count of counts) {
if (count > max) max = count;
}
return max;
}
runUntil2nd() {
let acc_before;
do {
let { command, arg } = this.program[this.instruction];
acc_before = this.accu;
this[command](arg);
} while (this.getMaxRecord() < 2);
return acc_before;
}
runUntilN(n) {
do {
let cmd = this.program[this.instruction];
if (!cmd) {
return this.accu;
}
let { command, arg } = cmd;
this[command](arg);
} while (this.getMaxRecord() < n);
return null;
}
tryChanges() {
this.reset();
for (let i = 0; i < this.program.length; i++) {
let cmd = this.program[i];
if (cmd.command === 'jmp') {
cmd.command = 'nop';
} else if (cmd.command === 'nop') {
cmd.command = 'jmp';
} else {
continue;
}
let val = this.runUntilN(2);
if (val !== null) {
return val;
}
this.reset();
}
}
}
function partOne() {
let c = new Computer(inputArray);
return c.runUntil2nd();
}
function partTwo() {
let c = new Computer(inputArray)
return c.tryChanges();
}
console.log("Part 1: " + partOne());
console.log("Part 2: " + partTwo()); |
krattai/AEBL | blades/gnunet/src/util/load.c | <reponame>krattai/AEBL
/*
This file is part of GNUnet.
Copyright (C) 2010, 2013 GNUnet e.V.
GNUnet is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 3, or (at your
option) any later version.
GNUnet is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNUnet; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
*/
/**
* @file util/load.c
* @brief functions related to load calculations
* @author <NAME>
*/
#include "platform.h"
#include "gnunet_util_lib.h"
#define LOG(kind,...) GNUNET_log_from (kind, "util", __VA_ARGS__)
/**
* Values we track for load calculations.
*/
struct GNUNET_LOAD_Value
{
/**
* How fast should the load decline if no values are added?
*/
struct GNUNET_TIME_Relative autodecline;
/**
* Last time this load value was updated by an event.
*/
struct GNUNET_TIME_Absolute last_update;
/**
* Sum of all datastore delays ever observed (in ms). Note that
* delays above 64k ms are excluded (to avoid overflow within
* first 4 billion requests).
*/
uint64_t cummulative_delay;
/**
* Sum of squares of all datastore delays ever observed (in ms). Note that
* delays above 64k ms are excluded (to avoid overflow within
* first 4 billion requests).
*/
uint64_t cummulative_squared_delay;
/**
* Total number of requests included in the cummulative datastore delay values.
*/
uint64_t cummulative_request_count;
/**
* Current running average datastore delay. Its relation to the
* average datastore delay and it std. dev. (as calcualted from the
* cummulative values) tells us our current load.
*/
double runavg_delay;
/**
* How high is the load? 0 for below average, otherwise
* the number of std. devs we are above average, or 100 if the
* load is so high that we currently cannot calculate it.
*/
double load;
};
static void
internal_update (struct GNUNET_LOAD_Value *load)
{
struct GNUNET_TIME_Relative delta;
unsigned int n;
if (load->autodecline.rel_value_us == GNUNET_TIME_UNIT_FOREVER_REL.rel_value_us)
return;
delta = GNUNET_TIME_absolute_get_duration (load->last_update);
if (delta.rel_value_us < load->autodecline.rel_value_us)
return;
if (0 == load->autodecline.rel_value_us)
{
load->runavg_delay = 0.0;
load->load = 0;
return;
}
n = delta.rel_value_us / load->autodecline.rel_value_us;
if (n > 16)
{
load->runavg_delay = 0.0;
load->load = 0;
return;
}
while (n > 0)
{
n--;
load->runavg_delay = (load->runavg_delay * 7.0) / 8.0;
}
}
/**
* Create a new load value.
*
* @param autodecline speed at which this value should automatically
* decline in the absence of external events; at the given
* frequency, 0-load values will be added to the load
* @return the new load value
*/
struct GNUNET_LOAD_Value *
GNUNET_LOAD_value_init (struct GNUNET_TIME_Relative autodecline)
{
struct GNUNET_LOAD_Value *ret;
ret = GNUNET_new (struct GNUNET_LOAD_Value);
ret->autodecline = autodecline;
ret->last_update = GNUNET_TIME_absolute_get ();
return ret;
}
/**
* Change the value by which the load automatically declines.
*
* @param load load to update
* @param autodecline frequency of load decline
*/
void
GNUNET_LOAD_value_set_decline (struct GNUNET_LOAD_Value *load,
struct GNUNET_TIME_Relative autodecline)
{
internal_update (load);
load->autodecline = autodecline;
}
/**
* Recalculate our load value.
*
* @param load load to update
*/
static void
calculate_load (struct GNUNET_LOAD_Value *load)
{
double stddev;
double avgdel;
double sum_val_i;
double n;
double nm1;
if (load->cummulative_request_count <= 1)
return;
/* calcuate std dev of latency; we have for n values of "i" that:
*
* avg = (sum val_i) / n
* stddev = (sum (val_i - avg)^2) / (n-1)
* = (sum (val_i^2 - 2 avg val_i + avg^2) / (n-1)
* = (sum (val_i^2) - 2 avg sum (val_i) + n * avg^2) / (n-1)
*/
sum_val_i = (double) load->cummulative_delay;
n = ((double) load->cummulative_request_count);
nm1 = n - 1.0;
avgdel = sum_val_i / n;
stddev =
(((double) load->cummulative_squared_delay) - 2.0 * avgdel * sum_val_i +
n * avgdel * avgdel) / nm1;
if (stddev <= 0)
stddev = 0.01; /* must have been rounding error or zero; prevent division by zero */
/* now calculate load based on how far out we are from
* std dev; or if we are below average, simply assume load zero */
if (load->runavg_delay < avgdel)
load->load = 0.0;
else
load->load = (load->runavg_delay - avgdel) / stddev;
}
/**
* Get the current load.
*
* @param load load handle
* @return zero for below-average load, otherwise
* number of std. devs we are above average;
* 100 if the latest updates were so large
* that we could not do proper calculations
*/
double
GNUNET_LOAD_get_load (struct GNUNET_LOAD_Value *load)
{
internal_update (load);
calculate_load (load);
return load->load;
}
/**
* Get the average value given to update so far.
*
* @param load load handle
* @return zero if update was never called
*/
double
GNUNET_LOAD_get_average (struct GNUNET_LOAD_Value *load)
{
double n;
double sum_val_i;
internal_update (load);
if (load->cummulative_request_count == 0)
return 0.0;
n = ((double) load->cummulative_request_count);
sum_val_i = (double) load->cummulative_delay;
return sum_val_i / n;
}
/**
* Update the current load.
*
* @param load to update
* @param data latest measurement value (for example, delay)
*/
void
GNUNET_LOAD_update (struct GNUNET_LOAD_Value *load, uint64_t data)
{
uint32_t dv;
internal_update (load);
load->last_update = GNUNET_TIME_absolute_get ();
if (data > 64 * 1024)
{
/* very large */
load->load = 100.0;
return;
}
dv = (uint32_t) data;
load->cummulative_delay += dv;
load->cummulative_squared_delay += dv * dv;
load->cummulative_request_count++;
load->runavg_delay = ((load->runavg_delay * 7.0) + dv) / 8.0;
}
/* end of load.c */
|
devbrain/neutrino | src/thirdparty/freetype/src/autofit/aflatin.c | /****************************************************************************
*
* aflatin.c
*
* Auto-fitter hinting routines for latin writing system (body).
*
* Copyright (C) 2003-2020 by
* <NAME>, <NAME>, and <NAME>.
*
* This file is part of the FreeType project, and may only be used,
* modified, and distributed under the terms of the FreeType project
* license, LICENSE.TXT. By continuing to use, modify, or distribute
* this file you indicate that you have read the license and
* understand and accept it fully.
*
*/
#include <ft2build.h>
#include FT_ADVANCES_H
#include FT_INTERNAL_DEBUG_H
#include "afglobal.h"
#include "aflatin.h"
#include "aferrors.h"
#ifdef AF_CONFIG_OPTION_USE_WARPER
#include "afwarp.h"
#endif
/**************************************************************************
*
* The macro FT_COMPONENT is used in trace mode. It is an implicit
* parameter of the FT_TRACE() and FT_ERROR() macros, used to print/log
* messages during execution.
*/
#undef FT_COMPONENT
#define FT_COMPONENT aflatin
/* needed for computation of round vs. flat segments */
#define FLAT_THRESHOLD(x) ( x / 14 )
/*************************************************************************/
/*************************************************************************/
/***** *****/
/***** L A T I N G L O B A L M E T R I C S *****/
/***** *****/
/*************************************************************************/
/*************************************************************************/
/* Find segments and links, compute all stem widths, and initialize */
/* standard width and height for the glyph with given charcode. */
FT_LOCAL_DEF(void)
af_latin_metrics_init_widths (AF_LatinMetrics metrics,
FT_Face face) {
/* scan the array of segments in each direction */
AF_GlyphHintsRec hints[1];
FT_TRACE5(("\n"
"latin standard widths computation (style `%s')\n"
"=====================================================\n"
"\n",
af_style_names[metrics->root.style_class->style]));
af_glyph_hints_init (hints, face->memory);
metrics->axis[AF_DIMENSION_HORZ].width_count = 0;
metrics->axis[AF_DIMENSION_VERT].width_count = 0;
{
FT_Error error;
FT_ULong glyph_index;
int dim;
AF_LatinMetricsRec dummy[1];
AF_Scaler scaler = &dummy->root.scaler;
AF_StyleClass style_class = metrics->root.style_class;
AF_ScriptClass script_class = af_script_classes[style_class->script];
/* If HarfBuzz is not available, we need a pointer to a single */
/* unsigned long value. */
#ifdef FT_CONFIG_OPTION_USE_HARFBUZZ
void* shaper_buf;
#else
FT_ULong shaper_buf_;
void* shaper_buf = &shaper_buf_;
#endif
const char* p;
#ifdef FT_DEBUG_LEVEL_TRACE
FT_ULong ch = 0;
#endif
p = script_class->standard_charstring;
#ifdef FT_CONFIG_OPTION_USE_HARFBUZZ
shaper_buf = af_shaper_buf_create( face );
#endif
/*
* We check a list of standard characters to catch features like
* `c2sc' (small caps from caps) that don't contain lowercase letters
* by definition, or other features that mainly operate on numerals.
* The first match wins.
*/
glyph_index = 0;
while (*p) {
unsigned int num_idx;
#ifdef FT_DEBUG_LEVEL_TRACE
const char* p_old;
#endif
while (*p == ' ')
p++;
#ifdef FT_DEBUG_LEVEL_TRACE
p_old = p;
GET_UTF8_CHAR( ch, p_old );
#endif
/* reject input that maps to more than a single glyph */
p = af_shaper_get_cluster (p, &metrics->root, shaper_buf, &num_idx);
if (num_idx > 1)
continue;
/* otherwise exit loop if we have a result */
glyph_index = af_shaper_get_elem (&metrics->root,
shaper_buf,
0,
NULL,
NULL);
if (glyph_index)
break;
}
af_shaper_buf_destroy (face, shaper_buf);
if (!glyph_index) {
FT_TRACE5(("standard character missing;"
" using fallback stem widths\n"));
goto Exit;
}
FT_TRACE5(("standard character: U+%04lX (glyph index %d)\n",
ch, glyph_index));
error = FT_Load_Glyph (face, glyph_index, FT_LOAD_NO_SCALE);
if (error || face->glyph->outline.n_points <= 0)
goto Exit;
FT_ZERO(dummy);
dummy->units_per_em = metrics->units_per_em;
scaler->x_scale = 0x10000L;
scaler->y_scale = 0x10000L;
scaler->x_delta = 0;
scaler->y_delta = 0;
scaler->face = face;
scaler->render_mode = FT_RENDER_MODE_NORMAL;
scaler->flags = 0;
af_glyph_hints_rescale (hints, (AF_StyleMetrics) dummy);
error = af_glyph_hints_reload (hints, &face->glyph->outline);
if (error)
goto Exit;
for (dim = 0; dim < AF_DIMENSION_MAX; dim++) {
AF_LatinAxis axis = &metrics->axis[dim];
AF_AxisHints axhints = &hints->axis[dim];
AF_Segment seg, limit, link;
FT_UInt num_widths = 0;
error = af_latin_hints_compute_segments (hints,
(AF_Dimension) dim);
if (error)
goto Exit;
/*
* We assume that the glyphs selected for the stem width
* computation are `featureless' enough so that the linking
* algorithm works fine without adjustments of its scoring
* function.
*/
af_latin_hints_link_segments (hints,
0,
NULL,
(AF_Dimension) dim);
seg = axhints->segments;
limit = seg + axhints->num_segments;
for (; seg < limit; seg++) {
link = seg->link;
/* we only consider stem segments there! */
if (link && link->link == seg && link > seg) {
FT_Pos dist;
dist = seg->pos - link->pos;
if (dist < 0)
dist = -dist;
if (num_widths < AF_LATIN_MAX_WIDTHS)
axis->widths[num_widths++].org = dist;
}
}
/* this also replaces multiple almost identical stem widths */
/* with a single one (the value 100 is heuristic) */
af_sort_and_quantize_widths (&num_widths, axis->widths,
dummy->units_per_em / 100);
axis->width_count = num_widths;
}
Exit:
for (dim = 0; dim < AF_DIMENSION_MAX; dim++) {
AF_LatinAxis axis = &metrics->axis[dim];
FT_Pos stdw;
stdw = (axis->width_count > 0) ? axis->widths[0].org
: AF_LATIN_CONSTANT(metrics, 50);
/* let's try 20% of the smallest width */
axis->edge_distance_threshold = stdw / 5;
axis->standard_width = stdw;
axis->extra_light = 0;
#ifdef FT_DEBUG_LEVEL_TRACE
{
FT_UInt i;
FT_TRACE5(( "%s widths:\n",
dim == AF_DIMENSION_VERT ? "horizontal"
: "vertical" ));
FT_TRACE5(( " %d (standard)", axis->standard_width ));
for ( i = 1; i < axis->width_count; i++ )
FT_TRACE5(( " %d", axis->widths[i].org ));
FT_TRACE5(( "\n" ));
}
#endif
}
}
FT_TRACE5(("\n"));
af_glyph_hints_done (hints);
}
static void
af_latin_sort_blue (FT_UInt count,
AF_LatinBlue* table) {
FT_UInt i, j;
AF_LatinBlue swap;
/* we sort from bottom to top */
for (i = 1; i < count; i++) {
for (j = i; j > 0; j--) {
FT_Pos a, b;
if (table[j - 1]->flags & (AF_LATIN_BLUE_TOP |
AF_LATIN_BLUE_SUB_TOP))
a = table[j - 1]->ref.org;
else
a = table[j - 1]->shoot.org;
if (table[j]->flags & (AF_LATIN_BLUE_TOP |
AF_LATIN_BLUE_SUB_TOP))
b = table[j]->ref.org;
else
b = table[j]->shoot.org;
if (b >= a)
break;
swap = table[j];
table[j] = table[j - 1];
table[j - 1] = swap;
}
}
}
/* Find all blue zones. Flat segments give the reference points, */
/* round segments the overshoot positions. */
static int
af_latin_metrics_init_blues (AF_LatinMetrics metrics,
FT_Face face) {
FT_Pos flats[AF_BLUE_STRING_MAX_LEN];
FT_Pos rounds[AF_BLUE_STRING_MAX_LEN];
FT_UInt num_flats;
FT_UInt num_rounds;
AF_LatinBlue blue;
FT_Error error;
AF_LatinAxis axis = &metrics->axis[AF_DIMENSION_VERT];
FT_Outline outline;
AF_StyleClass sc = metrics->root.style_class;
AF_Blue_Stringset bss = sc->blue_stringset;
const AF_Blue_StringRec* bs = &af_blue_stringsets[bss];
FT_Pos flat_threshold = FLAT_THRESHOLD(metrics->units_per_em);
/* If HarfBuzz is not available, we need a pointer to a single */
/* unsigned long value. */
#ifdef FT_CONFIG_OPTION_USE_HARFBUZZ
void* shaper_buf;
#else
FT_ULong shaper_buf_;
void* shaper_buf = &shaper_buf_;
#endif
/* we walk over the blue character strings as specified in the */
/* style's entry in the `af_blue_stringset' array */
FT_TRACE5(("latin blue zones computation\n"
"============================\n"
"\n"));
#ifdef FT_CONFIG_OPTION_USE_HARFBUZZ
shaper_buf = af_shaper_buf_create( face );
#endif
for (; bs->string != AF_BLUE_STRING_MAX; bs++) {
const char* p = &af_blue_strings[bs->string];
FT_Pos* blue_ref;
FT_Pos* blue_shoot;
FT_Pos ascender;
FT_Pos descender;
#ifdef FT_DEBUG_LEVEL_TRACE
{
FT_Bool have_flag = 0;
FT_TRACE5(( "blue zone %d", axis->blue_count ));
if ( bs->properties )
{
FT_TRACE5(( " (" ));
if ( AF_LATIN_IS_TOP_BLUE( bs ) )
{
FT_TRACE5(( "top" ));
have_flag = 1;
}
else if ( AF_LATIN_IS_SUB_TOP_BLUE( bs ) )
{
FT_TRACE5(( "sub top" ));
have_flag = 1;
}
if ( AF_LATIN_IS_NEUTRAL_BLUE( bs ) )
{
if ( have_flag )
FT_TRACE5(( ", " ));
FT_TRACE5(( "neutral" ));
have_flag = 1;
}
if ( AF_LATIN_IS_X_HEIGHT_BLUE( bs ) )
{
if ( have_flag )
FT_TRACE5(( ", " ));
FT_TRACE5(( "small top" ));
have_flag = 1;
}
if ( AF_LATIN_IS_LONG_BLUE( bs ) )
{
if ( have_flag )
FT_TRACE5(( ", " ));
FT_TRACE5(( "long" ));
}
FT_TRACE5(( ")" ));
}
FT_TRACE5(( ":\n" ));
}
#endif /* FT_DEBUG_LEVEL_TRACE */
num_flats = 0;
num_rounds = 0;
ascender = 0;
descender = 0;
while (*p) {
FT_ULong glyph_index;
FT_Long y_offset;
FT_Int best_point, best_contour_first, best_contour_last;
FT_Vector* points;
FT_Pos best_y_extremum; /* same as points.y */
FT_Bool best_round = 0;
unsigned int i, num_idx;
#ifdef FT_DEBUG_LEVEL_TRACE
const char* p_old;
FT_ULong ch;
#endif
while (*p == ' ')
p++;
#ifdef FT_DEBUG_LEVEL_TRACE
p_old = p;
GET_UTF8_CHAR( ch, p_old );
#endif
p = af_shaper_get_cluster (p, &metrics->root, shaper_buf, &num_idx);
if (!num_idx) {
FT_TRACE5((" U+%04lX unavailable\n", ch));
continue;
}
if (AF_LATIN_IS_TOP_BLUE(bs))
best_y_extremum = FT_INT_MIN;
else
best_y_extremum = FT_INT_MAX;
/* iterate over all glyph elements of the character cluster */
/* and get the data of the `biggest' one */
for (i = 0; i < num_idx; i++) {
FT_Pos best_y;
FT_Bool round = 0;
/* load the character in the face -- skip unknown or empty ones */
glyph_index = af_shaper_get_elem (&metrics->root,
shaper_buf,
i,
NULL,
&y_offset);
if (glyph_index == 0) {
FT_TRACE5((" U+%04lX unavailable\n", ch));
continue;
}
error = FT_Load_Glyph (face, glyph_index, FT_LOAD_NO_SCALE);
outline = face->glyph->outline;
/* reject glyphs that don't produce any rendering */
if (error || outline.n_points <= 2) {
#ifdef FT_DEBUG_LEVEL_TRACE
if ( num_idx == 1 )
FT_TRACE5(( " U+%04lX contains no (usable) outlines\n", ch ));
else
FT_TRACE5(( " component %d of cluster starting with U+%04lX"
" contains no (usable) outlines\n", i, ch ));
#endif
continue;
}
/* now compute min or max point indices and coordinates */
points = outline.points;
best_point = -1;
best_y = 0; /* make compiler happy */
best_contour_first = 0; /* ditto */
best_contour_last = 0; /* ditto */
{
FT_Int nn;
FT_Int first = 0;
FT_Int last = -1;
for (nn = 0; nn < outline.n_contours; first = last + 1, nn++) {
FT_Int old_best_point = best_point;
FT_Int pp;
last = outline.contours[nn];
/* Avoid single-point contours since they are never */
/* rasterized. In some fonts, they correspond to mark */
/* attachment points that are way outside of the glyph's */
/* real outline. */
if (last <= first)
continue;
if (AF_LATIN_IS_TOP_BLUE(bs) ||
AF_LATIN_IS_SUB_TOP_BLUE(bs)) {
for (pp = first; pp <= last; pp++) {
if (best_point < 0 || points[pp].y > best_y) {
best_point = pp;
best_y = points[pp].y;
ascender = FT_MAX(ascender, best_y + y_offset);
}
else
descender = FT_MIN(descender, points[pp].y + y_offset);
}
}
else {
for (pp = first; pp <= last; pp++) {
if (best_point < 0 || points[pp].y < best_y) {
best_point = pp;
best_y = points[pp].y;
descender = FT_MIN(descender, best_y + y_offset);
}
else
ascender = FT_MAX(ascender, points[pp].y + y_offset);
}
}
if (best_point != old_best_point) {
best_contour_first = first;
best_contour_last = last;
}
}
}
/* now check whether the point belongs to a straight or round */
/* segment; we first need to find in which contour the extremum */
/* lies, then inspect its previous and next points */
if (best_point >= 0) {
FT_Pos best_x = points[best_point].x;
FT_Int prev, next;
FT_Int best_segment_first, best_segment_last;
FT_Int best_on_point_first, best_on_point_last;
FT_Pos dist;
best_segment_first = best_point;
best_segment_last = best_point;
if (FT_CURVE_TAG(outline.tags[best_point]) == FT_CURVE_TAG_ON) {
best_on_point_first = best_point;
best_on_point_last = best_point;
}
else {
best_on_point_first = -1;
best_on_point_last = -1;
}
/* look for the previous and next points on the contour */
/* that are not on the same Y coordinate, then threshold */
/* the `closeness'... */
prev = best_point;
next = prev;
do {
if (prev > best_contour_first)
prev--;
else
prev = best_contour_last;
dist = FT_ABS(points[prev].y - best_y);
/* accept a small distance or a small angle (both values are */
/* heuristic; value 20 corresponds to approx. 2.9 degrees) */
if (dist > 5)
if (FT_ABS(points[prev].x - best_x) <= 20 * dist)
break;
best_segment_first = prev;
if (FT_CURVE_TAG(outline.tags[prev]) == FT_CURVE_TAG_ON) {
best_on_point_first = prev;
if (best_on_point_last < 0)
best_on_point_last = prev;
}
}
while (prev != best_point);
do {
if (next < best_contour_last)
next++;
else
next = best_contour_first;
dist = FT_ABS(points[next].y - best_y);
if (dist > 5)
if (FT_ABS(points[next].x - best_x) <= 20 * dist)
break;
best_segment_last = next;
if (FT_CURVE_TAG(outline.tags[next]) == FT_CURVE_TAG_ON) {
best_on_point_last = next;
if (best_on_point_first < 0)
best_on_point_first = next;
}
}
while (next != best_point);
if (AF_LATIN_IS_LONG_BLUE(bs)) {
/* If this flag is set, we have an additional constraint to */
/* get the blue zone distance: Find a segment of the topmost */
/* (or bottommost) contour that is longer than a heuristic */
/* threshold. This ensures that small bumps in the outline */
/* are ignored (for example, the `vertical serifs' found in */
/* many Hebrew glyph designs). */
/* If this segment is long enough, we are done. Otherwise, */
/* search the segment next to the extremum that is long */
/* enough, has the same direction, and a not too large */
/* vertical distance from the extremum. Note that the */
/* algorithm doesn't check whether the found segment is */
/* actually the one (vertically) nearest to the extremum. */
/* heuristic threshold value */
FT_Pos length_threshold = metrics->units_per_em / 25;
dist = FT_ABS(points[best_segment_last].x -
points[best_segment_first].x);
if (dist < length_threshold &&
best_segment_last - best_segment_first + 2 <=
best_contour_last - best_contour_first) {
/* heuristic threshold value */
FT_Pos height_threshold = metrics->units_per_em / 4;
FT_Int first;
FT_Int last;
FT_Bool hit;
/* we intentionally declare these two variables */
/* outside of the loop since various compilers emit */
/* incorrect warning messages otherwise, talking about */
/* `possibly uninitialized variables' */
FT_Int p_first = 0; /* make compiler happy */
FT_Int p_last = 0;
FT_Bool left2right;
/* compute direction */
prev = best_point;
do {
if (prev > best_contour_first)
prev--;
else
prev = best_contour_last;
if (points[prev].x != best_x)
break;
}
while (prev != best_point);
/* skip glyph for the degenerate case */
if (prev == best_point)
continue;
left2right = FT_BOOL(points[prev].x < points[best_point].x);
first = best_segment_last;
last = first;
hit = 0;
do {
FT_Bool l2r;
FT_Pos d;
if (!hit) {
/* no hit; adjust first point */
first = last;
/* also adjust first and last on point */
if (FT_CURVE_TAG(outline.tags[first]) ==
FT_CURVE_TAG_ON) {
p_first = first;
p_last = first;
}
else {
p_first = -1;
p_last = -1;
}
hit = 1;
}
if (last < best_contour_last)
last++;
else
last = best_contour_first;
if (FT_ABS(best_y - points[first].y) > height_threshold) {
/* vertical distance too large */
hit = 0;
continue;
}
/* same test as above */
dist = FT_ABS(points[last].y - points[first].y);
if (dist > 5)
if (FT_ABS(points[last].x - points[first].x) <=
20 * dist) {
hit = 0;
continue;
}
if (FT_CURVE_TAG(outline.tags[last]) == FT_CURVE_TAG_ON) {
p_last = last;
if (p_first < 0)
p_first = last;
}
l2r = FT_BOOL(points[first].x < points[last].x);
d = FT_ABS(points[last].x - points[first].x);
if (l2r == left2right &&
d >= length_threshold) {
/* all constraints are met; update segment after */
/* finding its end */
do {
if (last < best_contour_last)
last++;
else
last = best_contour_first;
d = FT_ABS(points[last].y - points[first].y);
if (d > 5)
if (FT_ABS(points[next].x - points[first].x) <=
20 * dist) {
if (last > best_contour_first)
last--;
else
last = best_contour_last;
break;
}
p_last = last;
if (FT_CURVE_TAG(outline.tags[last]) ==
FT_CURVE_TAG_ON) {
p_last = last;
if (p_first < 0)
p_first = last;
}
}
while (last != best_segment_first);
best_y = points[first].y;
best_segment_first = first;
best_segment_last = last;
best_on_point_first = p_first;
best_on_point_last = p_last;
break;
}
}
while (last != best_segment_first);
}
}
/* for computing blue zones, we add the y offset as returned */
/* by the currently used OpenType feature -- for example, */
/* superscript glyphs might be identical to subscript glyphs */
/* with a vertical shift */
best_y += y_offset;
#ifdef FT_DEBUG_LEVEL_TRACE
if ( num_idx == 1 )
FT_TRACE5(( " U+%04lX: best_y = %5ld", ch, best_y ));
else
FT_TRACE5(( " component %d of cluster starting with U+%04lX:"
" best_y = %5ld", i, ch, best_y ));
#endif
/* now set the `round' flag depending on the segment's kind: */
/* */
/* - if the horizontal distance between the first and last */
/* `on' point is larger than a heuristic threshold */
/* we have a flat segment */
/* - if either the first or the last point of the segment is */
/* an `off' point, the segment is round, otherwise it is */
/* flat */
if (best_on_point_first >= 0 &&
best_on_point_last >= 0 &&
(FT_ABS(points[best_on_point_last].x -
points[best_on_point_first].x)) >
flat_threshold)
round = 0;
else
round = FT_BOOL(
FT_CURVE_TAG (outline.tags[best_segment_first]) !=
FT_CURVE_TAG_ON ||
FT_CURVE_TAG (outline.tags[best_segment_last]) !=
FT_CURVE_TAG_ON);
if (round && AF_LATIN_IS_NEUTRAL_BLUE(bs)) {
/* only use flat segments for a neutral blue zone */
FT_TRACE5((" (round, skipped)\n"));
continue;
}
FT_TRACE5((" (%s)\n", round ? "round" : "flat"));
}
if (AF_LATIN_IS_TOP_BLUE(bs)) {
if (best_y > best_y_extremum) {
best_y_extremum = best_y;
best_round = round;
}
}
else {
if (best_y < best_y_extremum) {
best_y_extremum = best_y;
best_round = round;
}
}
} /* end for loop */
if (!(best_y_extremum == FT_INT_MIN ||
best_y_extremum == FT_INT_MAX)) {
if (best_round)
rounds[num_rounds++] = best_y_extremum;
else
flats[num_flats++] = best_y_extremum;
}
} /* end while loop */
if (num_flats == 0 && num_rounds == 0) {
/*
* we couldn't find a single glyph to compute this blue zone,
* we will simply ignore it then
*/
FT_TRACE5((" empty\n"));
continue;
}
/* we have computed the contents of the `rounds' and `flats' tables, */
/* now determine the reference and overshoot position of the blue -- */
/* we simply take the median value after a simple sort */
af_sort_pos (num_rounds, rounds);
af_sort_pos (num_flats, flats);
blue = &axis->blues[axis->blue_count];
blue_ref = &blue->ref.org;
blue_shoot = &blue->shoot.org;
axis->blue_count++;
if (num_flats == 0) {
*blue_ref =
*blue_shoot = rounds[num_rounds / 2];
}
else if (num_rounds == 0) {
*blue_ref =
*blue_shoot = flats[num_flats / 2];
}
else {
*blue_ref = flats[num_flats / 2];
*blue_shoot = rounds[num_rounds / 2];
}
/* there are sometimes problems: if the overshoot position of top */
/* zones is under its reference position, or the opposite for bottom */
/* zones. We must thus check everything there and correct the errors */
if (*blue_shoot != *blue_ref) {
FT_Pos ref = *blue_ref;
FT_Pos shoot = *blue_shoot;
FT_Bool over_ref = FT_BOOL(shoot > ref);
if ((AF_LATIN_IS_TOP_BLUE(bs) ||
AF_LATIN_IS_SUB_TOP_BLUE(bs)) ^ over_ref) {
*blue_ref =
*blue_shoot = (shoot + ref) / 2;
FT_TRACE5((" [overshoot smaller than reference,"
" taking mean value]\n"));
}
}
blue->ascender = ascender;
blue->descender = descender;
blue->flags = 0;
if (AF_LATIN_IS_TOP_BLUE(bs))
blue->flags |= AF_LATIN_BLUE_TOP;
if (AF_LATIN_IS_SUB_TOP_BLUE(bs))
blue->flags |= AF_LATIN_BLUE_SUB_TOP;
if (AF_LATIN_IS_NEUTRAL_BLUE(bs))
blue->flags |= AF_LATIN_BLUE_NEUTRAL;
/*
* The following flag is used later to adjust the y and x scales
* in order to optimize the pixel grid alignment of the top of small
* letters.
*/
if (AF_LATIN_IS_X_HEIGHT_BLUE(bs))
blue->flags |= AF_LATIN_BLUE_ADJUSTMENT;
FT_TRACE5((" -> reference = %ld\n"
" overshoot = %ld\n",
*blue_ref, *blue_shoot));
} /* end for loop */
af_shaper_buf_destroy (face, shaper_buf);
if (axis->blue_count) {
/* we finally check whether blue zones are ordered; */
/* `ref' and `shoot' values of two blue zones must not overlap */
FT_UInt i;
AF_LatinBlue blue_sorted[AF_BLUE_STRINGSET_MAX_LEN + 2];
for (i = 0; i < axis->blue_count; i++)
blue_sorted[i] = &axis->blues[i];
/* sort bottoms of blue zones... */
af_latin_sort_blue (axis->blue_count, blue_sorted);
/* ...and adjust top values if necessary */
for (i = 0; i < axis->blue_count - 1; i++) {
FT_Pos* a;
FT_Pos* b;
#ifdef FT_DEBUG_LEVEL_TRACE
FT_Bool a_is_top = 0;
#endif
if (blue_sorted[i]->flags & (AF_LATIN_BLUE_TOP |
AF_LATIN_BLUE_SUB_TOP)) {
a = &blue_sorted[i]->shoot.org;
#ifdef FT_DEBUG_LEVEL_TRACE
a_is_top = 1;
#endif
}
else
a = &blue_sorted[i]->ref.org;
if (blue_sorted[i + 1]->flags & (AF_LATIN_BLUE_TOP |
AF_LATIN_BLUE_SUB_TOP))
b = &blue_sorted[i + 1]->shoot.org;
else
b = &blue_sorted[i + 1]->ref.org;
if (*a > *b) {
*a = *b;
FT_TRACE5(("blue zone overlap:"
" adjusting %s %d to %ld\n",
a_is_top ? "overshoot" : "reference",
blue_sorted[i] - axis->blues,
*a));
}
}
FT_TRACE5(("\n"));
return 0;
}
else {
/* disable hinting for the current style if there are no blue zones */
AF_FaceGlobals globals = metrics->root.globals;
FT_UShort* gstyles = globals->glyph_styles;
FT_Long i;
FT_TRACE5(("no blue zones found:"
" hinting disabled for this style\n"));
for (i = 0; i < globals->glyph_count; i++) {
if ((gstyles[i] & AF_STYLE_MASK) == sc->style)
gstyles[i] = AF_STYLE_NONE_DFLT;
}
FT_TRACE5(("\n"));
return 1;
}
}
/* Check whether all ASCII digits have the same advance width. */
FT_LOCAL_DEF(void)
af_latin_metrics_check_digits (AF_LatinMetrics metrics,
FT_Face face) {
FT_Bool started = 0, same_width = 1;
FT_Fixed advance = 0, old_advance = 0;
/* If HarfBuzz is not available, we need a pointer to a single */
/* unsigned long value. */
#ifdef FT_CONFIG_OPTION_USE_HARFBUZZ
void* shaper_buf;
#else
FT_ULong shaper_buf_;
void* shaper_buf = &shaper_buf_;
#endif
/* in all supported charmaps, digits have character codes 0x30-0x39 */
const char digits[] = "0 1 2 3 4 5 6 7 8 9";
const char* p;
p = digits;
#ifdef FT_CONFIG_OPTION_USE_HARFBUZZ
shaper_buf = af_shaper_buf_create( face );
#endif
while (*p) {
FT_ULong glyph_index;
unsigned int num_idx;
/* reject input that maps to more than a single glyph */
p = af_shaper_get_cluster (p, &metrics->root, shaper_buf, &num_idx);
if (num_idx > 1)
continue;
glyph_index = af_shaper_get_elem (&metrics->root,
shaper_buf,
0,
&advance,
NULL);
if (!glyph_index)
continue;
if (started) {
if (advance != old_advance) {
same_width = 0;
break;
}
}
else {
old_advance = advance;
started = 1;
}
}
af_shaper_buf_destroy (face, shaper_buf);
metrics->root.digits_have_same_width = same_width;
}
/* Initialize global metrics. */
FT_LOCAL_DEF(FT_Error)
af_latin_metrics_init (AF_LatinMetrics metrics,
FT_Face face) {
FT_Error error = FT_Err_Ok;
FT_CharMap oldmap = face->charmap;
metrics->units_per_em = face->units_per_EM;
if (!FT_Select_Charmap (face, FT_ENCODING_UNICODE)) {
af_latin_metrics_init_widths (metrics, face);
if (af_latin_metrics_init_blues (metrics, face)) {
/* use internal error code to indicate missing blue zones */
error = -1;
goto Exit;
}
af_latin_metrics_check_digits (metrics, face);
}
Exit:
FT_Set_Charmap (face, oldmap);
return error;
}
/* Adjust scaling value, then scale and shift widths */
/* and blue zones (if applicable) for given dimension. */
static void
af_latin_metrics_scale_dim (AF_LatinMetrics metrics,
AF_Scaler scaler,
AF_Dimension dim) {
FT_Fixed scale;
FT_Pos delta;
AF_LatinAxis axis;
FT_UInt nn;
if (dim == AF_DIMENSION_HORZ) {
scale = scaler->x_scale;
delta = scaler->x_delta;
}
else {
scale = scaler->y_scale;
delta = scaler->y_delta;
}
axis = &metrics->axis[dim];
if (axis->org_scale == scale && axis->org_delta == delta)
return;
axis->org_scale = scale;
axis->org_delta = delta;
/*
* correct X and Y scale to optimize the alignment of the top of small
* letters to the pixel grid
*/
{
AF_LatinAxis Axis = &metrics->axis[AF_DIMENSION_VERT];
AF_LatinBlue blue = NULL;
for (nn = 0; nn < Axis->blue_count; nn++) {
if (Axis->blues[nn].flags & AF_LATIN_BLUE_ADJUSTMENT) {
blue = &Axis->blues[nn];
break;
}
}
if (blue) {
FT_Pos scaled;
FT_Pos threshold;
FT_Pos fitted;
FT_UInt limit;
FT_UInt ppem;
scaled = FT_MulFix(blue->shoot.org, scale);
ppem = metrics->root.scaler.face->size->metrics.x_ppem;
limit = metrics->root.globals->increase_x_height;
threshold = 40;
/* if the `increase-x-height' property is active, */
/* we round up much more often */
if (limit &&
ppem <= limit &&
ppem >= AF_PROP_INCREASE_X_HEIGHT_MIN)
threshold = 52;
fitted = (scaled + threshold) & ~63;
if (scaled != fitted) {
#if 0
if ( dim == AF_DIMENSION_HORZ )
{
if ( fitted < scaled )
scale -= scale / 50; /* scale *= 0.98 */
}
else
#endif
if (dim == AF_DIMENSION_VERT) {
FT_Pos max_height;
FT_Pos dist;
FT_Fixed new_scale;
new_scale = FT_MulDiv (scale, fitted, scaled);
/* the scaling should not change the result by more than two pixels */
max_height = metrics->units_per_em;
for (nn = 0; nn < Axis->blue_count; nn++) {
max_height = FT_MAX(max_height, Axis->blues[nn].ascender);
max_height = FT_MAX(max_height, -Axis->blues[nn].descender);
}
dist = FT_ABS(FT_MulFix (max_height, new_scale - scale));
dist &= ~127;
if (dist == 0) {
FT_TRACE5((
"af_latin_metrics_scale_dim:"
" x height alignment (style `%s'):\n"
" "
" vertical scaling changed from %.5f to %.5f (by %d%%)\n"
"\n",
af_style_names[metrics->root.style_class->style],
scale / 65536.0,
new_scale / 65536.0,
(fitted - scaled) * 100 / scaled));
scale = new_scale;
}
#ifdef FT_DEBUG_LEVEL_TRACE
else
{
FT_TRACE5((
"af_latin_metrics_scale_dim:"
" x height alignment (style `%s'):\n"
" "
" excessive vertical scaling abandoned\n"
"\n",
af_style_names[metrics->root.style_class->style] ));
}
#endif
}
}
}
}
axis->scale = scale;
axis->delta = delta;
if (dim == AF_DIMENSION_HORZ) {
metrics->root.scaler.x_scale = scale;
metrics->root.scaler.x_delta = delta;
}
else {
metrics->root.scaler.y_scale = scale;
metrics->root.scaler.y_delta = delta;
}
FT_TRACE5(("%s widths (style `%s')\n",
dim == AF_DIMENSION_HORZ ? "horizontal" : "vertical",
af_style_names[metrics->root.style_class->style]));
/* scale the widths */
for (nn = 0; nn < axis->width_count; nn++) {
AF_Width width = axis->widths + nn;
width->cur = FT_MulFix(width->org, scale);
width->fit = width->cur;
FT_TRACE5((" %d scaled to %.2f\n",
width->org,
width->cur / 64.0));
}
FT_TRACE5(("\n"));
/* an extra-light axis corresponds to a standard width that is */
/* smaller than 5/8 pixels */
axis->extra_light =
FT_BOOL(FT_MulFix (axis->standard_width, scale) < 32 + 8);
#ifdef FT_DEBUG_LEVEL_TRACE
if ( axis->extra_light )
FT_TRACE5(( "`%s' style is extra light (at current resolution)\n"
"\n",
af_style_names[metrics->root.style_class->style] ));
#endif
if (dim == AF_DIMENSION_VERT) {
#ifdef FT_DEBUG_LEVEL_TRACE
if ( axis->blue_count )
FT_TRACE5(( "blue zones (style `%s')\n",
af_style_names[metrics->root.style_class->style] ));
#endif
/* scale the blue zones */
for (nn = 0; nn < axis->blue_count; nn++) {
AF_LatinBlue blue = &axis->blues[nn];
FT_Pos dist;
blue->ref.cur = FT_MulFix(blue->ref.org, scale) + delta;
blue->ref.fit = blue->ref.cur;
blue->shoot.cur = FT_MulFix(blue->shoot.org, scale) + delta;
blue->shoot.fit = blue->shoot.cur;
blue->flags &= ~AF_LATIN_BLUE_ACTIVE;
/* a blue zone is only active if it is less than 3/4 pixels tall */
dist = FT_MulFix(blue->ref.org - blue->shoot.org, scale);
if (dist <= 48 && dist >= -48) {
#if 0
FT_Pos delta1;
#endif
FT_Pos delta2;
/* use discrete values for blue zone widths */
#if 0
/* generic, original code */
delta1 = blue->shoot.org - blue->ref.org;
delta2 = delta1;
if ( delta1 < 0 )
delta2 = -delta2;
delta2 = FT_MulFix( delta2, scale );
if ( delta2 < 32 )
delta2 = 0;
else if ( delta2 < 64 )
delta2 = 32 + ( ( ( delta2 - 32 ) + 16 ) & ~31 );
else
delta2 = FT_PIX_ROUND( delta2 );
if ( delta1 < 0 )
delta2 = -delta2;
blue->ref.fit = FT_PIX_ROUND( blue->ref.cur );
blue->shoot.fit = blue->ref.fit + delta2;
#else
/* simplified version due to abs(dist) <= 48 */
delta2 = dist;
if (dist < 0)
delta2 = -delta2;
if (delta2 < 32)
delta2 = 0;
else if (delta2 < 48)
delta2 = 32;
else
delta2 = 64;
if (dist < 0)
delta2 = -delta2;
blue->ref.fit = FT_PIX_ROUND(blue->ref.cur);
blue->shoot.fit = blue->ref.fit - delta2;
#endif
blue->flags |= AF_LATIN_BLUE_ACTIVE;
}
}
/* use sub-top blue zone only if it doesn't overlap with */
/* another (non-sup-top) blue zone; otherwise, the */
/* effect would be similar to a neutral blue zone, which */
/* is not desired here */
for (nn = 0; nn < axis->blue_count; nn++) {
AF_LatinBlue blue = &axis->blues[nn];
FT_UInt i;
if (!(blue->flags & AF_LATIN_BLUE_SUB_TOP))
continue;
if (!(blue->flags & AF_LATIN_BLUE_ACTIVE))
continue;
for (i = 0; i < axis->blue_count; i++) {
AF_LatinBlue b = &axis->blues[i];
if (b->flags & AF_LATIN_BLUE_SUB_TOP)
continue;
if (!(b->flags & AF_LATIN_BLUE_ACTIVE))
continue;
if (b->ref.fit <= blue->shoot.fit &&
b->shoot.fit >= blue->ref.fit) {
blue->flags &= ~AF_LATIN_BLUE_ACTIVE;
break;
}
}
}
#ifdef FT_DEBUG_LEVEL_TRACE
for ( nn = 0; nn < axis->blue_count; nn++ )
{
AF_LatinBlue blue = &axis->blues[nn];
FT_TRACE5(( " reference %d: %d scaled to %.2f%s\n"
" overshoot %d: %d scaled to %.2f%s\n",
nn,
blue->ref.org,
blue->ref.fit / 64.0,
( blue->flags & AF_LATIN_BLUE_ACTIVE ) ? ""
: " (inactive)",
nn,
blue->shoot.org,
blue->shoot.fit / 64.0,
( blue->flags & AF_LATIN_BLUE_ACTIVE ) ? ""
: " (inactive)" ));
}
#endif
}
}
/* Scale global values in both directions. */
FT_LOCAL_DEF(void)
af_latin_metrics_scale (AF_LatinMetrics metrics,
AF_Scaler scaler) {
metrics->root.scaler.render_mode = scaler->render_mode;
metrics->root.scaler.face = scaler->face;
metrics->root.scaler.flags = scaler->flags;
af_latin_metrics_scale_dim (metrics, scaler, AF_DIMENSION_HORZ);
af_latin_metrics_scale_dim (metrics, scaler, AF_DIMENSION_VERT);
}
/* Extract standard_width from writing system/script specific */
/* metrics class. */
FT_LOCAL_DEF(void)
af_latin_get_standard_widths (AF_LatinMetrics metrics,
FT_Pos* stdHW,
FT_Pos* stdVW) {
if (stdHW)
*stdHW = metrics->axis[AF_DIMENSION_VERT].standard_width;
if (stdVW)
*stdVW = metrics->axis[AF_DIMENSION_HORZ].standard_width;
}
/*************************************************************************/
/*************************************************************************/
/***** *****/
/***** L A T I N G L Y P H A N A L Y S I S *****/
/***** *****/
/*************************************************************************/
/*************************************************************************/
/* Walk over all contours and compute its segments. */
FT_LOCAL_DEF(FT_Error)
af_latin_hints_compute_segments (AF_GlyphHints hints,
AF_Dimension dim) {
AF_LatinMetrics metrics = (AF_LatinMetrics) hints->metrics;
AF_AxisHints axis = &hints->axis[dim];
FT_Memory memory = hints->memory;
FT_Error error = FT_Err_Ok;
AF_Segment segment = NULL;
AF_SegmentRec seg0;
AF_Point* contour = hints->contours;
AF_Point* contour_limit = contour + hints->num_contours;
AF_Direction major_dir, segment_dir;
FT_Pos flat_threshold = FLAT_THRESHOLD(metrics->units_per_em);
FT_ZERO(&seg0);
seg0.score = 32000;
seg0.flags = AF_EDGE_NORMAL;
major_dir = (AF_Direction) FT_ABS(axis->major_dir);
segment_dir = major_dir;
axis->num_segments = 0;
/* set up (u,v) in each point */
if (dim == AF_DIMENSION_HORZ) {
AF_Point point = hints->points;
AF_Point limit = point + hints->num_points;
for (; point < limit; point++) {
point->u = point->fx;
point->v = point->fy;
}
}
else {
AF_Point point = hints->points;
AF_Point limit = point + hints->num_points;
for (; point < limit; point++) {
point->u = point->fy;
point->v = point->fx;
}
}
/* do each contour separately */
for (; contour < contour_limit; contour++) {
AF_Point point = contour[0];
AF_Point last = point->prev;
int on_edge = 0;
/* we call values measured along a segment (point->v) */
/* `coordinates', and values orthogonal to it (point->u) */
/* `positions' */
FT_Pos min_pos = 32000;
FT_Pos max_pos = -32000;
FT_Pos min_coord = 32000;
FT_Pos max_coord = -32000;
FT_UShort min_flags = AF_FLAG_NONE;
FT_UShort max_flags = AF_FLAG_NONE;
FT_Pos min_on_coord = 32000;
FT_Pos max_on_coord = -32000;
FT_Bool passed;
AF_Segment prev_segment = NULL;
FT_Pos prev_min_pos = min_pos;
FT_Pos prev_max_pos = max_pos;
FT_Pos prev_min_coord = min_coord;
FT_Pos prev_max_coord = max_coord;
FT_UShort prev_min_flags = min_flags;
FT_UShort prev_max_flags = max_flags;
FT_Pos prev_min_on_coord = min_on_coord;
FT_Pos prev_max_on_coord = max_on_coord;
if (FT_ABS(last->out_dir) == major_dir &&
FT_ABS(point->out_dir) == major_dir) {
/* we are already on an edge, try to locate its start */
last = point;
for (;;) {
point = point->prev;
if (FT_ABS(point->out_dir) != major_dir) {
point = point->next;
break;
}
if (point == last)
break;
}
}
last = point;
passed = 0;
for (;;) {
FT_Pos u, v;
if (on_edge) {
/* get minimum and maximum position */
u = point->u;
if (u < min_pos)
min_pos = u;
if (u > max_pos)
max_pos = u;
/* get minimum and maximum coordinate together with flags */
v = point->v;
if (v < min_coord) {
min_coord = v;
min_flags = point->flags;
}
if (v > max_coord) {
max_coord = v;
max_flags = point->flags;
}
/* get minimum and maximum coordinate of `on' points */
if (!(point->flags & AF_FLAG_CONTROL)) {
v = point->v;
if (v < min_on_coord)
min_on_coord = v;
if (v > max_on_coord)
max_on_coord = v;
}
if (point->out_dir != segment_dir || point == last) {
/* check whether the new segment's start point is identical to */
/* the previous segment's end point; for example, this might */
/* happen for spikes */
if (!prev_segment || segment->first != prev_segment->last) {
/* points are different: we are just leaving an edge, thus */
/* record a new segment */
segment->last = point;
segment->pos = (FT_Short) ((min_pos + max_pos) >> 1);
segment->delta = (FT_Short) ((max_pos - min_pos) >> 1);
/* a segment is round if either its first or last point */
/* is a control point, and the length of the on points */
/* inbetween doesn't exceed a heuristic limit */
if ((min_flags | max_flags) & AF_FLAG_CONTROL &&
(max_on_coord - min_on_coord) < flat_threshold)
segment->flags |= AF_EDGE_ROUND;
segment->min_coord = (FT_Short) min_coord;
segment->max_coord = (FT_Short) max_coord;
segment->height = segment->max_coord - segment->min_coord;
prev_segment = segment;
prev_min_pos = min_pos;
prev_max_pos = max_pos;
prev_min_coord = min_coord;
prev_max_coord = max_coord;
prev_min_flags = min_flags;
prev_max_flags = max_flags;
prev_min_on_coord = min_on_coord;
prev_max_on_coord = max_on_coord;
}
else {
/* points are the same: we don't create a new segment but */
/* merge the current segment with the previous one */
if (prev_segment->last->in_dir == point->in_dir) {
/* we have identical directions (this can happen for */
/* degenerate outlines that move zig-zag along the main */
/* axis without changing the coordinate value of the other */
/* axis, and where the segments have just been merged): */
/* unify segments */
/* update constraints */
if (prev_min_pos < min_pos)
min_pos = prev_min_pos;
if (prev_max_pos > max_pos)
max_pos = prev_max_pos;
if (prev_min_coord < min_coord) {
min_coord = prev_min_coord;
min_flags = prev_min_flags;
}
if (prev_max_coord > max_coord) {
max_coord = prev_max_coord;
max_flags = prev_max_flags;
}
if (prev_min_on_coord < min_on_coord)
min_on_coord = prev_min_on_coord;
if (prev_max_on_coord > max_on_coord)
max_on_coord = prev_max_on_coord;
prev_segment->last = point;
prev_segment->pos = (FT_Short) ((min_pos +
max_pos) >> 1);
prev_segment->delta = (FT_Short) ((max_pos -
min_pos) >> 1);
if ((min_flags | max_flags) & AF_FLAG_CONTROL &&
(max_on_coord - min_on_coord) < flat_threshold)
prev_segment->flags |= AF_EDGE_ROUND;
else
prev_segment->flags &= ~AF_EDGE_ROUND;
prev_segment->min_coord = (FT_Short) min_coord;
prev_segment->max_coord = (FT_Short) max_coord;
prev_segment->height = prev_segment->max_coord -
prev_segment->min_coord;
}
else {
/* we have different directions; use the properties of the */
/* longer segment and discard the other one */
if (FT_ABS(prev_max_coord - prev_min_coord) >
FT_ABS(max_coord - min_coord)) {
/* discard current segment */
if (min_pos < prev_min_pos)
prev_min_pos = min_pos;
if (max_pos > prev_max_pos)
prev_max_pos = max_pos;
prev_segment->last = point;
prev_segment->pos = (FT_Short) ((prev_min_pos +
prev_max_pos) >> 1);
prev_segment->delta = (FT_Short) ((prev_max_pos -
prev_min_pos) >> 1);
}
else {
/* discard previous segment */
if (prev_min_pos < min_pos)
min_pos = prev_min_pos;
if (prev_max_pos > max_pos)
max_pos = prev_max_pos;
segment->last = point;
segment->pos = (FT_Short) ((min_pos + max_pos) >> 1);
segment->delta = (FT_Short) ((max_pos - min_pos) >> 1);
if ((min_flags | max_flags) & AF_FLAG_CONTROL &&
(max_on_coord - min_on_coord) < flat_threshold)
segment->flags |= AF_EDGE_ROUND;
segment->min_coord = (FT_Short) min_coord;
segment->max_coord = (FT_Short) max_coord;
segment->height = segment->max_coord -
segment->min_coord;
*prev_segment = *segment;
prev_min_pos = min_pos;
prev_max_pos = max_pos;
prev_min_coord = min_coord;
prev_max_coord = max_coord;
prev_min_flags = min_flags;
prev_max_flags = max_flags;
prev_min_on_coord = min_on_coord;
prev_max_on_coord = max_on_coord;
}
}
axis->num_segments--;
}
on_edge = 0;
segment = NULL;
/* fall through */
}
}
/* now exit if we are at the start/end point */
if (point == last) {
if (passed)
break;
passed = 1;
}
/* if we are not on an edge, check whether the major direction */
/* coincides with the current point's `out' direction, or */
/* whether we have a single-point contour */
if (!on_edge &&
(FT_ABS(point->out_dir) == major_dir ||
point == point->prev)) {
/* this is the start of a new segment! */
segment_dir = (AF_Direction) point->out_dir;
error = af_axis_hints_new_segment (axis, memory, &segment);
if (error)
goto Exit;
/* clear all segment fields */
segment[0] = seg0;
segment->dir = (FT_Char) segment_dir;
segment->first = point;
segment->last = point;
/* `af_axis_hints_new_segment' reallocates memory, */
/* thus we have to refresh the `prev_segment' pointer */
if (prev_segment)
prev_segment = segment - 1;
min_pos = max_pos = point->u;
min_coord = max_coord = point->v;
min_flags = max_flags = point->flags;
if (point->flags & AF_FLAG_CONTROL) {
min_on_coord = 32000;
max_on_coord = -32000;
}
else
min_on_coord = max_on_coord = point->v;
on_edge = 1;
if (point == point->prev) {
/* we have a one-point segment: this is a one-point */
/* contour with `in' and `out' direction set to */
/* AF_DIR_NONE */
segment->pos = (FT_Short) min_pos;
if (point->flags & AF_FLAG_CONTROL)
segment->flags |= AF_EDGE_ROUND;
segment->min_coord = (FT_Short) point->v;
segment->max_coord = (FT_Short) point->v;
segment->height = 0;
on_edge = 0;
segment = NULL;
}
}
point = point->next;
}
} /* contours */
/* now slightly increase the height of segments if this makes */
/* sense -- this is used to better detect and ignore serifs */
{
AF_Segment segments = axis->segments;
AF_Segment segments_end = FT_OFFSET(segments, axis->num_segments);
for (segment = segments; segment < segments_end; segment++) {
AF_Point first = segment->first;
AF_Point last = segment->last;
FT_Pos first_v = first->v;
FT_Pos last_v = last->v;
if (first_v < last_v) {
AF_Point p;
p = first->prev;
if (p->v < first_v)
segment->height = (FT_Short) (segment->height +
((first_v - p->v) >> 1));
p = last->next;
if (p->v > last_v)
segment->height = (FT_Short) (segment->height +
((p->v - last_v) >> 1));
}
else {
AF_Point p;
p = first->prev;
if (p->v > first_v)
segment->height = (FT_Short) (segment->height +
((p->v - first_v) >> 1));
p = last->next;
if (p->v < last_v)
segment->height = (FT_Short) (segment->height +
((last_v - p->v) >> 1));
}
}
}
Exit:
return error;
}
/* Link segments to form stems and serifs. If `width_count' and */
/* `widths' are non-zero, use them to fine-tune the scoring function. */
FT_LOCAL_DEF(void)
af_latin_hints_link_segments (AF_GlyphHints hints,
FT_UInt width_count,
AF_WidthRec* widths,
AF_Dimension dim) {
AF_AxisHints axis = &hints->axis[dim];
AF_Segment segments = axis->segments;
AF_Segment segment_limit = segments + axis->num_segments;
FT_Pos len_threshold, len_score, dist_score, max_width;
AF_Segment seg1, seg2;
if (width_count)
max_width = widths[width_count - 1].org;
else
max_width = 0;
/* a heuristic value to set up a minimum value for overlapping */
len_threshold = AF_LATIN_CONSTANT(hints->metrics, 8);
if (len_threshold == 0)
len_threshold = 1;
/* a heuristic value to weight lengths */
len_score = AF_LATIN_CONSTANT(hints->metrics, 6000);
/* a heuristic value to weight distances (no call to */
/* AF_LATIN_CONSTANT needed, since we work on multiples */
/* of the stem width) */
dist_score = 3000;
/* now compare each segment to the others */
for (seg1 = segments; seg1 < segment_limit; seg1++) {
if (seg1->dir != axis->major_dir)
continue;
/* search for stems having opposite directions, */
/* with seg1 to the `left' of seg2 */
for (seg2 = segments; seg2 < segment_limit; seg2++) {
FT_Pos pos1 = seg1->pos;
FT_Pos pos2 = seg2->pos;
if (seg1->dir + seg2->dir == 0 && pos2 > pos1) {
/* compute distance between the two segments */
FT_Pos min = seg1->min_coord;
FT_Pos max = seg1->max_coord;
FT_Pos len;
if (min < seg2->min_coord)
min = seg2->min_coord;
if (max > seg2->max_coord)
max = seg2->max_coord;
/* compute maximum coordinate difference of the two segments */
/* (this is, how much they overlap) */
len = max - min;
if (len >= len_threshold) {
/*
* The score is the sum of two demerits indicating the
* `badness' of a fit, measured along the segments' main axis
* and orthogonal to it, respectively.
*
* - The less overlapping along the main axis, the worse it
* is, causing a larger demerit.
*
* - The nearer the orthogonal distance to a stem width, the
* better it is, causing a smaller demerit. For simplicity,
* however, we only increase the demerit for values that
* exceed the largest stem width.
*/
FT_Pos dist = pos2 - pos1;
FT_Pos dist_demerit, score;
if (max_width) {
/* distance demerits are based on multiples of `max_width'; */
/* we scale by 1024 for getting more precision */
FT_Pos delta = (dist << 10) / max_width - (1 << 10);
if (delta > 10000)
dist_demerit = 32000;
else if (delta > 0)
dist_demerit = delta * delta / dist_score;
else
dist_demerit = 0;
}
else
dist_demerit = dist; /* default if no widths available */
score = dist_demerit + len_score / len;
/* and we search for the smallest score */
if (score < seg1->score) {
seg1->score = score;
seg1->link = seg2;
}
if (score < seg2->score) {
seg2->score = score;
seg2->link = seg1;
}
}
}
}
}
/* now compute the `serif' segments, cf. explanations in `afhints.h' */
for (seg1 = segments; seg1 < segment_limit; seg1++) {
seg2 = seg1->link;
if (seg2) {
if (seg2->link != seg1) {
seg1->link = 0;
seg1->serif = seg2->link;
}
}
}
}
/* Link segments to edges, using feature analysis for selection. */
FT_LOCAL_DEF(FT_Error)
af_latin_hints_compute_edges (AF_GlyphHints hints,
AF_Dimension dim) {
AF_AxisHints axis = &hints->axis[dim];
FT_Error error = FT_Err_Ok;
FT_Memory memory = hints->memory;
AF_LatinAxis laxis = &((AF_LatinMetrics) hints->metrics)->axis[dim];
AF_StyleClass style_class = hints->metrics->style_class;
AF_ScriptClass script_class = af_script_classes[style_class->script];
FT_Bool top_to_bottom_hinting = 0;
AF_Segment segments = axis->segments;
AF_Segment segment_limit = segments + axis->num_segments;
AF_Segment seg;
#if 0
AF_Direction up_dir;
#endif
FT_Fixed scale;
FT_Pos edge_distance_threshold;
FT_Pos segment_length_threshold;
FT_Pos segment_width_threshold;
axis->num_edges = 0;
scale = (dim == AF_DIMENSION_HORZ) ? hints->x_scale
: hints->y_scale;
#if 0
up_dir = ( dim == AF_DIMENSION_HORZ ) ? AF_DIR_UP
: AF_DIR_RIGHT;
#endif
if (dim == AF_DIMENSION_VERT)
top_to_bottom_hinting = script_class->top_to_bottom_hinting;
/*
* We ignore all segments that are less than 1 pixel in length
* to avoid many problems with serif fonts. We compute the
* corresponding threshold in font units.
*/
if (dim == AF_DIMENSION_HORZ)
segment_length_threshold = FT_DivFix (64, hints->y_scale);
else
segment_length_threshold = 0;
/*
* Similarly, we ignore segments that have a width delta
* larger than 0.5px (i.e., a width larger than 1px).
*/
segment_width_threshold = FT_DivFix (32, scale);
/**********************************************************************
*
* We begin by generating a sorted table of edges for the current
* direction. To do so, we simply scan each segment and try to find
* an edge in our table that corresponds to its position.
*
* If no edge is found, we create and insert a new edge in the
* sorted table. Otherwise, we simply add the segment to the edge's
* list which gets processed in the second step to compute the
* edge's properties.
*
* Note that the table of edges is sorted along the segment/edge
* position.
*
*/
/* assure that edge distance threshold is at most 0.25px */
edge_distance_threshold = FT_MulFix(laxis->edge_distance_threshold,
scale);
if (edge_distance_threshold > 64 / 4)
edge_distance_threshold = 64 / 4;
edge_distance_threshold = FT_DivFix (edge_distance_threshold,
scale);
for (seg = segments; seg < segment_limit; seg++) {
AF_Edge found = NULL;
FT_Int ee;
/* ignore too short segments, too wide ones, and, in this loop, */
/* one-point segments without a direction */
if (seg->height < segment_length_threshold ||
seg->delta > segment_width_threshold ||
seg->dir == AF_DIR_NONE)
continue;
/* A special case for serif edges: If they are smaller than */
/* 1.5 pixels we ignore them. */
if (seg->serif &&
2 * seg->height < 3 * segment_length_threshold)
continue;
/* look for an edge corresponding to the segment */
for (ee = 0; ee < axis->num_edges; ee++) {
AF_Edge edge = axis->edges + ee;
FT_Pos dist;
dist = seg->pos - edge->fpos;
if (dist < 0)
dist = -dist;
if (dist < edge_distance_threshold && edge->dir == seg->dir) {
found = edge;
break;
}
}
if (!found) {
AF_Edge edge;
/* insert a new edge in the list and */
/* sort according to the position */
error = af_axis_hints_new_edge (axis, seg->pos,
(AF_Direction) seg->dir,
top_to_bottom_hinting,
memory, &edge);
if (error)
goto Exit;
/* add the segment to the new edge's list */
FT_ZERO(edge);
edge->first = seg;
edge->last = seg;
edge->dir = seg->dir;
edge->fpos = seg->pos;
edge->opos = FT_MulFix(seg->pos, scale);
edge->pos = edge->opos;
seg->edge_next = seg;
}
else {
/* if an edge was found, simply add the segment to the edge's */
/* list */
seg->edge_next = found->first;
found->last->edge_next = seg;
found->last = seg;
}
}
/* we loop again over all segments to catch one-point segments */
/* without a direction: if possible, link them to existing edges */
for (seg = segments; seg < segment_limit; seg++) {
AF_Edge found = NULL;
FT_Int ee;
if (seg->dir != AF_DIR_NONE)
continue;
/* look for an edge corresponding to the segment */
for (ee = 0; ee < axis->num_edges; ee++) {
AF_Edge edge = axis->edges + ee;
FT_Pos dist;
dist = seg->pos - edge->fpos;
if (dist < 0)
dist = -dist;
if (dist < edge_distance_threshold) {
found = edge;
break;
}
}
/* one-point segments without a match are ignored */
if (found) {
seg->edge_next = found->first;
found->last->edge_next = seg;
found->last = seg;
}
}
/*******************************************************************
*
* Good, we now compute each edge's properties according to the
* segments found on its position. Basically, these are
*
* - the edge's main direction
* - stem edge, serif edge or both (which defaults to stem then)
* - rounded edge, straight or both (which defaults to straight)
* - link for edge
*
*/
/* first of all, set the `edge' field in each segment -- this is */
/* required in order to compute edge links */
/*
* Note that removing this loop and setting the `edge' field of each
* segment directly in the code above slows down execution speed for
* some reasons on platforms like the Sun.
*/
{
AF_Edge edges = axis->edges;
AF_Edge edge_limit = FT_OFFSET(edges, axis->num_edges);
AF_Edge edge;
for (edge = edges; edge < edge_limit; edge++) {
seg = edge->first;
if (seg)
do {
seg->edge = edge;
seg = seg->edge_next;
}
while (seg != edge->first);
}
/* now compute each edge properties */
for (edge = edges; edge < edge_limit; edge++) {
FT_Int is_round = 0; /* does it contain round segments? */
FT_Int is_straight = 0; /* does it contain straight segments? */
#if 0
FT_Pos ups = 0; /* number of upwards segments */
FT_Pos downs = 0; /* number of downwards segments */
#endif
seg = edge->first;
do {
FT_Bool is_serif;
/* check for roundness of segment */
if (seg->flags & AF_EDGE_ROUND)
is_round++;
else
is_straight++;
#if 0
/* check for segment direction */
if ( seg->dir == up_dir )
ups += seg->max_coord - seg->min_coord;
else
downs += seg->max_coord - seg->min_coord;
#endif
/* check for links -- if seg->serif is set, then seg->link must */
/* be ignored */
is_serif = FT_BOOL(seg->serif &&
seg->serif->edge &&
seg->serif->edge != edge);
if ((seg->link && seg->link->edge) || is_serif) {
AF_Edge edge2;
AF_Segment seg2;
edge2 = edge->link;
seg2 = seg->link;
if (is_serif) {
seg2 = seg->serif;
edge2 = edge->serif;
}
if (edge2) {
FT_Pos edge_delta;
FT_Pos seg_delta;
edge_delta = edge->fpos - edge2->fpos;
if (edge_delta < 0)
edge_delta = -edge_delta;
seg_delta = seg->pos - seg2->pos;
if (seg_delta < 0)
seg_delta = -seg_delta;
if (seg_delta < edge_delta)
edge2 = seg2->edge;
}
else
edge2 = seg2->edge;
if (is_serif) {
edge->serif = edge2;
edge2->flags |= AF_EDGE_SERIF;
}
else
edge->link = edge2;
}
seg = seg->edge_next;
}
while (seg != edge->first);
/* set the round/straight flags */
edge->flags = AF_EDGE_NORMAL;
if (is_round > 0 && is_round >= is_straight)
edge->flags |= AF_EDGE_ROUND;
#if 0
/* set the edge's main direction */
edge->dir = AF_DIR_NONE;
if ( ups > downs )
edge->dir = (FT_Char)up_dir;
else if ( ups < downs )
edge->dir = (FT_Char)-up_dir;
else if ( ups == downs )
edge->dir = 0; /* both up and down! */
#endif
/* get rid of serifs if link is set */
/* XXX: This gets rid of many unpleasant artefacts! */
/* Example: the `c' in cour.pfa at size 13 */
if (edge->serif && edge->link)
edge->serif = NULL;
}
}
Exit:
return error;
}
/* Detect segments and edges for given dimension. */
FT_LOCAL_DEF(FT_Error)
af_latin_hints_detect_features (AF_GlyphHints hints,
FT_UInt width_count,
AF_WidthRec* widths,
AF_Dimension dim) {
FT_Error error;
error = af_latin_hints_compute_segments (hints, dim);
if (!error) {
af_latin_hints_link_segments (hints, width_count, widths, dim);
error = af_latin_hints_compute_edges (hints, dim);
}
return error;
}
/* Compute all edges which lie within blue zones. */
static void
af_latin_hints_compute_blue_edges (AF_GlyphHints hints,
AF_LatinMetrics metrics) {
AF_AxisHints axis = &hints->axis[AF_DIMENSION_VERT];
AF_Edge edge = axis->edges;
AF_Edge edge_limit = edge + axis->num_edges;
AF_LatinAxis latin = &metrics->axis[AF_DIMENSION_VERT];
FT_Fixed scale = latin->scale;
/* compute which blue zones are active, i.e. have their scaled */
/* size < 3/4 pixels */
/* for each horizontal edge search the blue zone which is closest */
for (; edge < edge_limit; edge++) {
FT_UInt bb;
AF_Width best_blue = NULL;
FT_Bool best_blue_is_neutral = 0;
FT_Pos best_dist; /* initial threshold */
/* compute the initial threshold as a fraction of the EM size */
/* (the value 40 is heuristic) */
best_dist = FT_MulFix(metrics->units_per_em / 40, scale);
/* assure a minimum distance of 0.5px */
if (best_dist > 64 / 2)
best_dist = 64 / 2;
for (bb = 0; bb < latin->blue_count; bb++) {
AF_LatinBlue blue = latin->blues + bb;
FT_Bool is_top_blue, is_neutral_blue, is_major_dir;
/* skip inactive blue zones (i.e., those that are too large) */
if (!(blue->flags & AF_LATIN_BLUE_ACTIVE))
continue;
/* if it is a top zone, check for right edges (against the major */
/* direction); if it is a bottom zone, check for left edges (in */
/* the major direction) -- this assumes the TrueType convention */
/* for the orientation of contours */
is_top_blue =
(FT_Byte) ((blue->flags & (AF_LATIN_BLUE_TOP |
AF_LATIN_BLUE_SUB_TOP)) != 0);
is_neutral_blue =
(FT_Byte) ((blue->flags & AF_LATIN_BLUE_NEUTRAL) != 0);
is_major_dir =
FT_BOOL(edge->dir == axis->major_dir);
/* neutral blue zones are handled for both directions */
if (is_top_blue ^ is_major_dir || is_neutral_blue) {
FT_Pos dist;
/* first of all, compare it to the reference position */
dist = edge->fpos - blue->ref.org;
if (dist < 0)
dist = -dist;
dist = FT_MulFix(dist, scale);
if (dist < best_dist) {
best_dist = dist;
best_blue = &blue->ref;
best_blue_is_neutral = is_neutral_blue;
}
/* now compare it to the overshoot position and check whether */
/* the edge is rounded, and whether the edge is over the */
/* reference position of a top zone, or under the reference */
/* position of a bottom zone (provided we don't have a */
/* neutral blue zone) */
if (edge->flags & AF_EDGE_ROUND &&
dist != 0 &&
!is_neutral_blue) {
FT_Bool is_under_ref = FT_BOOL(edge->fpos < blue->ref.org);
if (is_top_blue ^ is_under_ref) {
dist = edge->fpos - blue->shoot.org;
if (dist < 0)
dist = -dist;
dist = FT_MulFix(dist, scale);
if (dist < best_dist) {
best_dist = dist;
best_blue = &blue->shoot;
best_blue_is_neutral = is_neutral_blue;
}
}
}
}
}
if (best_blue) {
edge->blue_edge = best_blue;
if (best_blue_is_neutral)
edge->flags |= AF_EDGE_NEUTRAL;
}
}
}
/* Initalize hinting engine. */
static FT_Error
af_latin_hints_init (AF_GlyphHints hints,
AF_LatinMetrics metrics) {
FT_Render_Mode mode;
FT_UInt32 scaler_flags, other_flags;
FT_Face face = metrics->root.scaler.face;
af_glyph_hints_rescale (hints, (AF_StyleMetrics) metrics);
/*
* correct x_scale and y_scale if needed, since they may have
* been modified by `af_latin_metrics_scale_dim' above
*/
hints->x_scale = metrics->axis[AF_DIMENSION_HORZ].scale;
hints->x_delta = metrics->axis[AF_DIMENSION_HORZ].delta;
hints->y_scale = metrics->axis[AF_DIMENSION_VERT].scale;
hints->y_delta = metrics->axis[AF_DIMENSION_VERT].delta;
/* compute flags depending on render mode, etc. */
mode = metrics->root.scaler.render_mode;
#if 0 /* #ifdef AF_CONFIG_OPTION_USE_WARPER */
if ( mode == FT_RENDER_MODE_LCD || mode == FT_RENDER_MODE_LCD_V )
metrics->root.scaler.render_mode = mode = FT_RENDER_MODE_NORMAL;
#endif
scaler_flags = hints->scaler_flags;
other_flags = 0;
/*
* We snap the width of vertical stems for the monochrome and
* horizontal LCD rendering targets only.
*/
if (mode == FT_RENDER_MODE_MONO || mode == FT_RENDER_MODE_LCD)
other_flags |= AF_LATIN_HINTS_HORZ_SNAP;
/*
* We snap the width of horizontal stems for the monochrome and
* vertical LCD rendering targets only.
*/
if (mode == FT_RENDER_MODE_MONO || mode == FT_RENDER_MODE_LCD_V)
other_flags |= AF_LATIN_HINTS_VERT_SNAP;
/*
* We adjust stems to full pixels unless in `light' or `lcd' mode.
*/
if (mode != FT_RENDER_MODE_LIGHT && mode != FT_RENDER_MODE_LCD)
other_flags |= AF_LATIN_HINTS_STEM_ADJUST;
if (mode == FT_RENDER_MODE_MONO)
other_flags |= AF_LATIN_HINTS_MONO;
/*
* In `light' or `lcd' mode we disable horizontal hinting completely.
* We also do it if the face is italic.
*
* However, if warping is enabled (which only works in `light' hinting
* mode), advance widths get adjusted, too.
*/
if (mode == FT_RENDER_MODE_LIGHT || mode == FT_RENDER_MODE_LCD ||
(face->style_flags & FT_STYLE_FLAG_ITALIC) != 0)
scaler_flags |= AF_SCALER_FLAG_NO_HORIZONTAL;
#ifdef AF_CONFIG_OPTION_USE_WARPER
/* get (global) warper flag */
if (!metrics->root.globals->module->warping)
scaler_flags |= AF_SCALER_FLAG_NO_WARPER;
#endif
hints->scaler_flags = scaler_flags;
hints->other_flags = other_flags;
return FT_Err_Ok;
}
/*************************************************************************/
/*************************************************************************/
/***** *****/
/***** L A T I N G L Y P H G R I D - F I T T I N G *****/
/***** *****/
/*************************************************************************/
/*************************************************************************/
/* Snap a given width in scaled coordinates to one of the */
/* current standard widths. */
static FT_Pos
af_latin_snap_width (AF_Width widths,
FT_UInt count,
FT_Pos width) {
FT_UInt n;
FT_Pos best = 64 + 32 + 2;
FT_Pos reference = width;
FT_Pos scaled;
for (n = 0; n < count; n++) {
FT_Pos w;
FT_Pos dist;
w = widths[n].cur;
dist = width - w;
if (dist < 0)
dist = -dist;
if (dist < best) {
best = dist;
reference = w;
}
}
scaled = FT_PIX_ROUND(reference);
if (width >= reference) {
if (width < scaled + 48)
width = reference;
}
else {
if (width > scaled - 48)
width = reference;
}
return width;
}
/* Compute the snapped width of a given stem, ignoring very thin ones. */
/* There is a lot of voodoo in this function; changing the hard-coded */
/* parameters influence the whole hinting process. */
static FT_Pos
af_latin_compute_stem_width (AF_GlyphHints hints,
AF_Dimension dim,
FT_Pos width,
FT_Pos base_delta,
FT_UInt base_flags,
FT_UInt stem_flags) {
AF_LatinMetrics metrics = (AF_LatinMetrics) hints->metrics;
AF_LatinAxis axis = &metrics->axis[dim];
FT_Pos dist = width;
FT_Int sign = 0;
FT_Int vertical = (dim == AF_DIMENSION_VERT);
if (!AF_LATIN_HINTS_DO_STEM_ADJUST(hints) ||
axis->extra_light)
return width;
if (dist < 0) {
dist = -width;
sign = 1;
}
if ((vertical && !AF_LATIN_HINTS_DO_VERT_SNAP(hints)) ||
(!vertical && !AF_LATIN_HINTS_DO_HORZ_SNAP(hints))) {
/* smooth hinting process: very lightly quantize the stem width */
/* leave the widths of serifs alone */
if ((stem_flags & AF_EDGE_SERIF) &&
vertical &&
(dist < 3 * 64))
goto Done_Width;
else if (base_flags & AF_EDGE_ROUND) {
if (dist < 80)
dist = 64;
}
else if (dist < 56)
dist = 56;
if (axis->width_count > 0) {
FT_Pos delta;
/* compare to standard width */
delta = dist - axis->widths[0].cur;
if (delta < 0)
delta = -delta;
if (delta < 40) {
dist = axis->widths[0].cur;
if (dist < 48)
dist = 48;
goto Done_Width;
}
if (dist < 3 * 64) {
delta = dist & 63;
dist &= -64;
if (delta < 10)
dist += delta;
else if (delta < 32)
dist += 10;
else if (delta < 54)
dist += 54;
else
dist += delta;
}
else {
/* A stem's end position depends on two values: the start */
/* position and the stem length. The former gets usually */
/* rounded to the grid, while the latter gets rounded also if it */
/* exceeds a certain length (see below in this function). This */
/* `double rounding' can lead to a great difference to the */
/* original, unhinted position; this normally doesn't matter for */
/* large PPEM values, but for small sizes it can easily make */
/* outlines collide. For this reason, we adjust the stem length */
/* by a small amount depending on the PPEM value in case the */
/* former and latter rounding both point into the same */
/* direction. */
FT_Pos bdelta = 0;
if (((width > 0) && (base_delta > 0)) ||
((width < 0) && (base_delta < 0))) {
FT_UInt ppem = metrics->root.scaler.face->size->metrics.x_ppem;
if (ppem < 10)
bdelta = base_delta;
else if (ppem < 30)
bdelta = (base_delta * (FT_Pos) (30 - ppem)) / 20;
if (bdelta < 0)
bdelta = -bdelta;
}
dist = (dist - bdelta + 32) & ~63;
}
}
}
else {
/* strong hinting process: snap the stem width to integer pixels */
FT_Pos org_dist = dist;
dist = af_latin_snap_width (axis->widths, axis->width_count, dist);
if (vertical) {
/* in the case of vertical hinting, always round */
/* the stem heights to integer pixels */
if (dist >= 64)
dist = (dist + 16) & ~63;
else
dist = 64;
}
else {
if (AF_LATIN_HINTS_DO_MONO(hints)) {
/* monochrome horizontal hinting: snap widths to integer pixels */
/* with a different threshold */
if (dist < 64)
dist = 64;
else
dist = (dist + 32) & ~63;
}
else {
/* for horizontal anti-aliased hinting, we adopt a more subtle */
/* approach: we strengthen small stems, round stems whose size */
/* is between 1 and 2 pixels to an integer, otherwise nothing */
if (dist < 48)
dist = (dist + 64) >> 1;
else if (dist < 128) {
/* We only round to an integer width if the corresponding */
/* distortion is less than 1/4 pixel. Otherwise this */
/* makes everything worse since the diagonals, which are */
/* not hinted, appear a lot bolder or thinner than the */
/* vertical stems. */
FT_Pos delta;
dist = (dist + 22) & ~63;
delta = dist - org_dist;
if (delta < 0)
delta = -delta;
if (delta >= 16) {
dist = org_dist;
if (dist < 48)
dist = (dist + 64) >> 1;
}
}
else
/* round otherwise to prevent color fringes in LCD mode */
dist = (dist + 32) & ~63;
}
}
}
Done_Width:
if (sign)
dist = -dist;
return dist;
}
/* Align one stem edge relative to the previous stem edge. */
static void
af_latin_align_linked_edge (AF_GlyphHints hints,
AF_Dimension dim,
AF_Edge base_edge,
AF_Edge stem_edge) {
FT_Pos dist, base_delta;
FT_Pos fitted_width;
dist = stem_edge->opos - base_edge->opos;
base_delta = base_edge->pos - base_edge->opos;
fitted_width = af_latin_compute_stem_width (hints, dim,
dist, base_delta,
base_edge->flags,
stem_edge->flags);
stem_edge->pos = base_edge->pos + fitted_width;
FT_TRACE5((" LINK: edge %d (opos=%.2f) linked to %.2f,"
" dist was %.2f, now %.2f\n",
stem_edge - hints->axis[dim].edges, stem_edge->opos / 64.0,
stem_edge->pos / 64.0, dist / 64.0, fitted_width / 64.0));
}
/* Shift the coordinates of the `serif' edge by the same amount */
/* as the corresponding `base' edge has been moved already. */
static void
af_latin_align_serif_edge (AF_GlyphHints hints,
AF_Edge base,
AF_Edge serif) {
FT_UNUSED(hints);
serif->pos = base->pos + (serif->opos - base->opos);
}
/*************************************************************************/
/*************************************************************************/
/*************************************************************************/
/**** ****/
/**** E D G E H I N T I N G ****/
/**** ****/
/*************************************************************************/
/*************************************************************************/
/*************************************************************************/
/* The main grid-fitting routine. */
static void
af_latin_hint_edges (AF_GlyphHints hints,
AF_Dimension dim) {
AF_AxisHints axis = &hints->axis[dim];
AF_Edge edges = axis->edges;
AF_Edge edge_limit = edges + axis->num_edges;
FT_PtrDist n_edges;
AF_Edge edge;
AF_Edge anchor = NULL;
FT_Int has_serifs = 0;
AF_StyleClass style_class = hints->metrics->style_class;
AF_ScriptClass script_class = af_script_classes[style_class->script];
FT_Bool top_to_bottom_hinting = 0;
#ifdef FT_DEBUG_LEVEL_TRACE
FT_UInt num_actions = 0;
#endif
FT_TRACE5(("latin %s edge hinting (style `%s')\n",
dim == AF_DIMENSION_VERT ? "horizontal" : "vertical",
af_style_names[hints->metrics->style_class->style]));
if (dim == AF_DIMENSION_VERT)
top_to_bottom_hinting = script_class->top_to_bottom_hinting;
/* we begin by aligning all stems relative to the blue zone */
/* if needed -- that's only for horizontal edges */
if (dim == AF_DIMENSION_VERT && AF_HINTS_DO_BLUES(hints)) {
for (edge = edges; edge < edge_limit; edge++) {
AF_Width blue;
AF_Edge edge1, edge2; /* these edges form the stem to check */
if (edge->flags & AF_EDGE_DONE)
continue;
edge1 = NULL;
edge2 = edge->link;
/*
* If a stem contains both a neutral and a non-neutral blue zone,
* skip the neutral one. Otherwise, outlines with different
* directions might be incorrectly aligned at the same vertical
* position.
*
* If we have two neutral blue zones, skip one of them.
*
*/
if (edge->blue_edge && edge2 && edge2->blue_edge) {
FT_Byte neutral = edge->flags & AF_EDGE_NEUTRAL;
FT_Byte neutral2 = edge2->flags & AF_EDGE_NEUTRAL;
if (neutral2) {
edge2->blue_edge = NULL;
edge2->flags &= ~AF_EDGE_NEUTRAL;
}
else if (neutral) {
edge->blue_edge = NULL;
edge->flags &= ~AF_EDGE_NEUTRAL;
}
}
blue = edge->blue_edge;
if (blue)
edge1 = edge;
/* flip edges if the other edge is aligned to a blue zone */
else if (edge2 && edge2->blue_edge) {
blue = edge2->blue_edge;
edge1 = edge2;
edge2 = edge;
}
if (!edge1)
continue;
#ifdef FT_DEBUG_LEVEL_TRACE
if ( !anchor )
FT_TRACE5(( " BLUE_ANCHOR: edge %d (opos=%.2f) snapped to %.2f,"
" was %.2f (anchor=edge %d)\n",
edge1 - edges, edge1->opos / 64.0, blue->fit / 64.0,
edge1->pos / 64.0, edge - edges ));
else
FT_TRACE5(( " BLUE: edge %d (opos=%.2f) snapped to %.2f,"
" was %.2f\n",
edge1 - edges, edge1->opos / 64.0, blue->fit / 64.0,
edge1->pos / 64.0 ));
num_actions++;
#endif
edge1->pos = blue->fit;
edge1->flags |= AF_EDGE_DONE;
if (edge2 && !edge2->blue_edge) {
af_latin_align_linked_edge (hints, dim, edge1, edge2);
edge2->flags |= AF_EDGE_DONE;
#ifdef FT_DEBUG_LEVEL_TRACE
num_actions++;
#endif
}
if (!anchor)
anchor = edge;
}
}
/* now we align all other stem edges, trying to maintain the */
/* relative order of stems in the glyph */
for (edge = edges; edge < edge_limit; edge++) {
AF_Edge edge2;
if (edge->flags & AF_EDGE_DONE)
continue;
/* skip all non-stem edges */
edge2 = edge->link;
if (!edge2) {
has_serifs++;
continue;
}
/* now align the stem */
/* this should not happen, but it's better to be safe */
if (edge2->blue_edge) {
FT_TRACE5((" ASSERTION FAILED for edge %d\n", edge2 - edges));
af_latin_align_linked_edge (hints, dim, edge2, edge);
edge->flags |= AF_EDGE_DONE;
#ifdef FT_DEBUG_LEVEL_TRACE
num_actions++;
#endif
continue;
}
if (!anchor) {
/* if we reach this if clause, no stem has been aligned yet */
FT_Pos org_len, org_center, cur_len;
FT_Pos cur_pos1, error1, error2, u_off, d_off;
org_len = edge2->opos - edge->opos;
cur_len = af_latin_compute_stem_width (hints, dim,
org_len, 0,
edge->flags,
edge2->flags);
/* some voodoo to specially round edges for small stem widths; */
/* the idea is to align the center of a stem, then shifting */
/* the stem edges to suitable positions */
if (cur_len <= 64) {
/* width <= 1px */
u_off = 32;
d_off = 32;
}
else {
/* 1px < width < 1.5px */
u_off = 38;
d_off = 26;
}
if (cur_len < 96) {
org_center = edge->opos + (org_len >> 1);
cur_pos1 = FT_PIX_ROUND(org_center);
error1 = org_center - (cur_pos1 - u_off);
if (error1 < 0)
error1 = -error1;
error2 = org_center - (cur_pos1 + d_off);
if (error2 < 0)
error2 = -error2;
if (error1 < error2)
cur_pos1 -= u_off;
else
cur_pos1 += d_off;
edge->pos = cur_pos1 - cur_len / 2;
edge2->pos = edge->pos + cur_len;
}
else
edge->pos = FT_PIX_ROUND(edge->opos);
anchor = edge;
edge->flags |= AF_EDGE_DONE;
FT_TRACE5((" ANCHOR: edge %d (opos=%.2f) and %d (opos=%.2f)"
" snapped to %.2f and %.2f\n",
edge - edges, edge->opos / 64.0,
edge2 - edges, edge2->opos / 64.0,
edge->pos / 64.0, edge2->pos / 64.0));
af_latin_align_linked_edge (hints, dim, edge, edge2);
#ifdef FT_DEBUG_LEVEL_TRACE
num_actions += 2;
#endif
}
else {
FT_Pos org_pos, org_len, org_center, cur_len;
FT_Pos cur_pos1, cur_pos2, delta1, delta2;
org_pos = anchor->pos + (edge->opos - anchor->opos);
org_len = edge2->opos - edge->opos;
org_center = org_pos + (org_len >> 1);
cur_len = af_latin_compute_stem_width (hints, dim,
org_len, 0,
edge->flags,
edge2->flags);
if (edge2->flags & AF_EDGE_DONE) {
FT_TRACE5((" ADJUST: edge %d (pos=%.2f) moved to %.2f\n",
edge - edges, edge->pos / 64.0,
(edge2->pos - cur_len) / 64.0));
edge->pos = edge2->pos - cur_len;
}
else if (cur_len < 96) {
FT_Pos u_off, d_off;
cur_pos1 = FT_PIX_ROUND(org_center);
if (cur_len <= 64) {
u_off = 32;
d_off = 32;
}
else {
u_off = 38;
d_off = 26;
}
delta1 = org_center - (cur_pos1 - u_off);
if (delta1 < 0)
delta1 = -delta1;
delta2 = org_center - (cur_pos1 + d_off);
if (delta2 < 0)
delta2 = -delta2;
if (delta1 < delta2)
cur_pos1 -= u_off;
else
cur_pos1 += d_off;
edge->pos = cur_pos1 - cur_len / 2;
edge2->pos = cur_pos1 + cur_len / 2;
FT_TRACE5((" STEM: edge %d (opos=%.2f) linked to %d (opos=%.2f)"
" snapped to %.2f and %.2f\n",
edge - edges, edge->opos / 64.0,
edge2 - edges, edge2->opos / 64.0,
edge->pos / 64.0, edge2->pos / 64.0));
}
else {
org_pos = anchor->pos + (edge->opos - anchor->opos);
org_len = edge2->opos - edge->opos;
org_center = org_pos + (org_len >> 1);
cur_len = af_latin_compute_stem_width (hints, dim,
org_len, 0,
edge->flags,
edge2->flags);
cur_pos1 = FT_PIX_ROUND(org_pos);
delta1 = cur_pos1 + (cur_len >> 1) - org_center;
if (delta1 < 0)
delta1 = -delta1;
cur_pos2 = FT_PIX_ROUND(org_pos + org_len) - cur_len;
delta2 = cur_pos2 + (cur_len >> 1) - org_center;
if (delta2 < 0)
delta2 = -delta2;
edge->pos = (delta1 < delta2) ? cur_pos1 : cur_pos2;
edge2->pos = edge->pos + cur_len;
FT_TRACE5((" STEM: edge %d (opos=%.2f) linked to %d (opos=%.2f)"
" snapped to %.2f and %.2f\n",
edge - edges, edge->opos / 64.0,
edge2 - edges, edge2->opos / 64.0,
edge->pos / 64.0, edge2->pos / 64.0));
}
#ifdef FT_DEBUG_LEVEL_TRACE
num_actions++;
#endif
edge->flags |= AF_EDGE_DONE;
edge2->flags |= AF_EDGE_DONE;
if (edge > edges &&
(top_to_bottom_hinting ? (edge->pos > edge[-1].pos)
: (edge->pos < edge[-1].pos))) {
/* don't move if stem would (almost) disappear otherwise; */
/* the ad-hoc value 16 corresponds to 1/4px */
if (edge->link && FT_ABS(edge->link->pos - edge[-1].pos) > 16) {
#ifdef FT_DEBUG_LEVEL_TRACE
FT_TRACE5(( " BOUND: edge %d (pos=%.2f) moved to %.2f\n",
edge - edges,
edge->pos / 64.0,
edge[-1].pos / 64.0 ));
num_actions++;
#endif
edge->pos = edge[-1].pos;
}
}
}
}
/* make sure that lowercase m's maintain their symmetry */
/* In general, lowercase m's have six vertical edges if they are sans */
/* serif, or twelve if they are with serifs. This implementation is */
/* based on that assumption, and seems to work very well with most */
/* faces. However, if for a certain face this assumption is not */
/* true, the m is just rendered like before. In addition, any stem */
/* correction will only be applied to symmetrical glyphs (even if the */
/* glyph is not an m), so the potential for unwanted distortion is */
/* relatively low. */
/* We don't handle horizontal edges since we can't easily assure that */
/* the third (lowest) stem aligns with the base line; it might end up */
/* one pixel higher or lower. */
n_edges = edge_limit - edges;
if (dim == AF_DIMENSION_HORZ && (n_edges == 6 || n_edges == 12)) {
AF_Edge edge1, edge2, edge3;
FT_Pos dist1, dist2, span, delta;
if (n_edges == 6) {
edge1 = edges;
edge2 = edges + 2;
edge3 = edges + 4;
}
else {
edge1 = edges + 1;
edge2 = edges + 5;
edge3 = edges + 9;
}
dist1 = edge2->opos - edge1->opos;
dist2 = edge3->opos - edge2->opos;
span = dist1 - dist2;
if (span < 0)
span = -span;
if (span < 8) {
delta = edge3->pos - (2 * edge2->pos - edge1->pos);
edge3->pos -= delta;
if (edge3->link)
edge3->link->pos -= delta;
/* move the serifs along with the stem */
if (n_edges == 12) {
(edges + 8)->pos -= delta;
(edges + 11)->pos -= delta;
}
edge3->flags |= AF_EDGE_DONE;
if (edge3->link)
edge3->link->flags |= AF_EDGE_DONE;
}
}
if (has_serifs || !anchor) {
/*
* now hint the remaining edges (serifs and single) in order
* to complete our processing
*/
for (edge = edges; edge < edge_limit; edge++) {
FT_Pos delta;
if (edge->flags & AF_EDGE_DONE)
continue;
delta = 1000;
if (edge->serif) {
delta = edge->serif->opos - edge->opos;
if (delta < 0)
delta = -delta;
}
if (delta < 64 + 16) {
af_latin_align_serif_edge (hints, edge->serif, edge);
FT_TRACE5((" SERIF: edge %d (opos=%.2f) serif to %d (opos=%.2f)"
" aligned to %.2f\n",
edge - edges, edge->opos / 64.0,
edge->serif - edges, edge->serif->opos / 64.0,
edge->pos / 64.0));
}
else if (!anchor) {
edge->pos = FT_PIX_ROUND(edge->opos);
anchor = edge;
FT_TRACE5((" SERIF_ANCHOR: edge %d (opos=%.2f)"
" snapped to %.2f\n",
edge - edges, edge->opos / 64.0, edge->pos / 64.0));
}
else {
AF_Edge before, after;
for (before = edge - 1; before >= edges; before--)
if (before->flags & AF_EDGE_DONE)
break;
for (after = edge + 1; after < edge_limit; after++)
if (after->flags & AF_EDGE_DONE)
break;
if (before >= edges && before < edge &&
after < edge_limit && after > edge) {
if (after->opos == before->opos)
edge->pos = before->pos;
else
edge->pos = before->pos +
FT_MulDiv (edge->opos - before->opos,
after->pos - before->pos,
after->opos - before->opos);
FT_TRACE5((" SERIF_LINK1: edge %d (opos=%.2f) snapped to %.2f"
" from %d (opos=%.2f)\n",
edge - edges, edge->opos / 64.0,
edge->pos / 64.0,
before - edges, before->opos / 64.0));
}
else {
edge->pos = anchor->pos +
((edge->opos - anchor->opos + 16) & ~31);
FT_TRACE5((" SERIF_LINK2: edge %d (opos=%.2f)"
" snapped to %.2f\n",
edge - edges, edge->opos / 64.0, edge->pos / 64.0));
}
}
#ifdef FT_DEBUG_LEVEL_TRACE
num_actions++;
#endif
edge->flags |= AF_EDGE_DONE;
if (edge > edges &&
(top_to_bottom_hinting ? (edge->pos > edge[-1].pos)
: (edge->pos < edge[-1].pos))) {
/* don't move if stem would (almost) disappear otherwise; */
/* the ad-hoc value 16 corresponds to 1/4px */
if (edge->link && FT_ABS(edge->link->pos - edge[-1].pos) > 16) {
#ifdef FT_DEBUG_LEVEL_TRACE
FT_TRACE5(( " BOUND: edge %d (pos=%.2f) moved to %.2f\n",
edge - edges,
edge->pos / 64.0,
edge[-1].pos / 64.0 ));
num_actions++;
#endif
edge->pos = edge[-1].pos;
}
}
if (edge + 1 < edge_limit &&
edge[1].flags & AF_EDGE_DONE &&
(top_to_bottom_hinting ? (edge->pos < edge[1].pos)
: (edge->pos > edge[1].pos))) {
/* don't move if stem would (almost) disappear otherwise; */
/* the ad-hoc value 16 corresponds to 1/4px */
if (edge->link && FT_ABS(edge->link->pos - edge[-1].pos) > 16) {
#ifdef FT_DEBUG_LEVEL_TRACE
FT_TRACE5(( " BOUND: edge %d (pos=%.2f) moved to %.2f\n",
edge - edges,
edge->pos / 64.0,
edge[1].pos / 64.0 ));
num_actions++;
#endif
edge->pos = edge[1].pos;
}
}
}
}
#ifdef FT_DEBUG_LEVEL_TRACE
if ( !num_actions )
FT_TRACE5(( " (none)\n" ));
FT_TRACE5(( "\n" ));
#endif
}
/* Apply the complete hinting algorithm to a latin glyph. */
static FT_Error
af_latin_hints_apply (FT_UInt glyph_index,
AF_GlyphHints hints,
FT_Outline* outline,
AF_LatinMetrics metrics) {
FT_Error error;
int dim;
AF_LatinAxis axis;
error = af_glyph_hints_reload (hints, outline);
if (error)
goto Exit;
/* analyze glyph outline */
if (AF_HINTS_DO_HORIZONTAL(hints)) {
axis = &metrics->axis[AF_DIMENSION_HORZ];
error = af_latin_hints_detect_features (hints,
axis->width_count,
axis->widths,
AF_DIMENSION_HORZ);
if (error)
goto Exit;
}
if (AF_HINTS_DO_VERTICAL(hints)) {
axis = &metrics->axis[AF_DIMENSION_VERT];
error = af_latin_hints_detect_features (hints,
axis->width_count,
axis->widths,
AF_DIMENSION_VERT);
if (error)
goto Exit;
/* apply blue zones to base characters only */
if (!(metrics->root.globals->glyph_styles[glyph_index] & AF_NONBASE))
af_latin_hints_compute_blue_edges (hints, metrics);
}
/* grid-fit the outline */
for (dim = 0; dim < AF_DIMENSION_MAX; dim++) {
#ifdef AF_CONFIG_OPTION_USE_WARPER
if (dim == AF_DIMENSION_HORZ &&
metrics->root.scaler.render_mode == FT_RENDER_MODE_NORMAL &&
AF_HINTS_DO_WARP(hints)) {
AF_WarperRec warper;
FT_Fixed scale;
FT_Pos delta;
af_warper_compute (&warper, hints, (AF_Dimension) dim,
&scale, &delta);
af_glyph_hints_scale_dim (hints, (AF_Dimension) dim,
scale, delta);
continue;
}
#endif /* AF_CONFIG_OPTION_USE_WARPER */
if ((dim == AF_DIMENSION_HORZ && AF_HINTS_DO_HORIZONTAL(hints)) ||
(dim == AF_DIMENSION_VERT && AF_HINTS_DO_VERTICAL(hints))) {
af_latin_hint_edges (hints, (AF_Dimension) dim);
af_glyph_hints_align_edge_points (hints, (AF_Dimension) dim);
af_glyph_hints_align_strong_points (hints, (AF_Dimension) dim);
af_glyph_hints_align_weak_points (hints, (AF_Dimension) dim);
}
}
af_glyph_hints_save (hints, outline);
Exit:
return error;
}
/*************************************************************************/
/*************************************************************************/
/***** *****/
/***** L A T I N S C R I P T C L A S S *****/
/***** *****/
/*************************************************************************/
/*************************************************************************/
AF_DEFINE_WRITING_SYSTEM_CLASS(
af_latin_writing_system_class,
AF_WRITING_SYSTEM_LATIN,
sizeof (AF_LatinMetricsRec),
(AF_WritingSystem_InitMetricsFunc) af_latin_metrics_init, /* style_metrics_init */
(AF_WritingSystem_ScaleMetricsFunc) af_latin_metrics_scale, /* style_metrics_scale */
(AF_WritingSystem_DoneMetricsFunc) NULL, /* style_metrics_done */
(AF_WritingSystem_GetStdWidthsFunc) af_latin_get_standard_widths, /* style_metrics_getstdw */
(AF_WritingSystem_InitHintsFunc) af_latin_hints_init, /* style_hints_init */
(AF_WritingSystem_ApplyHintsFunc) af_latin_hints_apply /* style_hints_apply */
)
/* END */
|
ktrzeciaknubisa/jxcore-binary-packaging | deps/mozjs/incs/nss/nss/lib/dev/devm.h | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef DEVM_H
#define DEVM_H
#ifndef BASE_H
#include "base.h"
#endif /* BASE_H */
#ifndef DEV_H
#include "dev.h"
#endif /* DEV_H */
#ifndef DEVTM_H
#include "devtm.h"
#endif /* DEVTM_H */
PR_BEGIN_EXTERN_C
/* Shortcut to cryptoki API functions. */
#define CKAPI(epv) \
((CK_FUNCTION_LIST_PTR)(epv))
NSS_EXTERN void
nssDevice_AddRef
(
struct nssDeviceBaseStr *device
);
NSS_EXTERN PRBool
nssDevice_Destroy
(
struct nssDeviceBaseStr *device
);
NSS_EXTERN PRBool
nssModule_IsThreadSafe
(
NSSModule *module
);
NSS_EXTERN PRBool
nssModule_IsInternal
(
NSSModule *mod
);
NSS_EXTERN PRBool
nssModule_IsModuleDBOnly
(
NSSModule *mod
);
NSS_EXTERN void *
nssModule_GetCryptokiEPV
(
NSSModule *mod
);
NSS_EXTERN NSSSlot *
nssSlot_Create
(
CK_SLOT_ID slotId,
NSSModule *parent
);
NSS_EXTERN void *
nssSlot_GetCryptokiEPV
(
NSSSlot *slot
);
NSS_EXTERN NSSToken *
nssToken_Create
(
CK_SLOT_ID slotID,
NSSSlot *peer
);
NSS_EXTERN void *
nssToken_GetCryptokiEPV
(
NSSToken *token
);
NSS_EXTERN nssSession *
nssToken_GetDefaultSession
(
NSSToken *token
);
NSS_EXTERN PRBool
nssToken_IsLoginRequired
(
NSSToken *token
);
NSS_EXTERN void
nssToken_Remove
(
NSSToken *token
);
NSS_EXTERN nssCryptokiObject *
nssCryptokiObject_Create
(
NSSToken *t,
nssSession *session,
CK_OBJECT_HANDLE h
);
NSS_EXTERN nssTokenObjectCache *
nssTokenObjectCache_Create
(
NSSToken *token,
PRBool cacheCerts,
PRBool cacheTrust,
PRBool cacheCRLs
);
NSS_EXTERN void
nssTokenObjectCache_Destroy
(
nssTokenObjectCache *cache
);
NSS_EXTERN void
nssTokenObjectCache_Clear
(
nssTokenObjectCache *cache
);
NSS_EXTERN PRBool
nssTokenObjectCache_HaveObjectClass
(
nssTokenObjectCache *cache,
CK_OBJECT_CLASS objclass
);
NSS_EXTERN nssCryptokiObject **
nssTokenObjectCache_FindObjectsByTemplate
(
nssTokenObjectCache *cache,
CK_OBJECT_CLASS objclass,
CK_ATTRIBUTE_PTR otemplate,
CK_ULONG otlen,
PRUint32 maximumOpt,
PRStatus *statusOpt
);
NSS_EXTERN PRStatus
nssTokenObjectCache_GetObjectAttributes
(
nssTokenObjectCache *cache,
NSSArena *arenaOpt,
nssCryptokiObject *object,
CK_OBJECT_CLASS objclass,
CK_ATTRIBUTE_PTR atemplate,
CK_ULONG atlen
);
NSS_EXTERN PRStatus
nssTokenObjectCache_ImportObject
(
nssTokenObjectCache *cache,
nssCryptokiObject *object,
CK_OBJECT_CLASS objclass,
CK_ATTRIBUTE_PTR ot,
CK_ULONG otlen
);
NSS_EXTERN void
nssTokenObjectCache_RemoveObject
(
nssTokenObjectCache *cache,
nssCryptokiObject *object
);
/* XXX allows peek back into token */
NSS_EXTERN PRStatus
nssToken_GetCachedObjectAttributes
(
NSSToken *token,
NSSArena *arenaOpt,
nssCryptokiObject *object,
CK_OBJECT_CLASS objclass,
CK_ATTRIBUTE_PTR atemplate,
CK_ULONG atlen
);
/* PKCS#11 stores strings in a fixed-length buffer padded with spaces. This
* function gets the length of the actual string.
*/
NSS_EXTERN PRUint32
nssPKCS11String_Length
(
CK_CHAR *pkcs11str,
PRUint32 bufLen
);
PR_END_EXTERN_C
#endif /* DEV_H */
|
bocke/ucc | test/cases/float/to_char.c | <gh_stars>10-100
// RUN: %ucc -c %s
f(float f)
{
char c = f;
}
|
gadzorg/gram2_api_server | spec/models/master_data/account_spec.rb | require 'rails_helper'
RSpec.describe MasterData::Account, type: :model do
it "has a valid factory" do
expect(build(:master_data_account)).to be_valid
end
it "has an empty database" do
expect(MasterData::Account.count).to eq(0)
end
describe "valid hruid" do
it "generate uniq hurid"
it "contain promo for gadz"
it "contraint ext for ext"
it "contraint soce for soce"
it "manage homonyms w/ same promo"
end
describe "password audit" do
let(:account) {create(:master_data_account, current_update_author: "api_client_1")}
context "password modified" do
let(:new_data) {{firstname: "Albert", password:<PASSWORD>("<PASSWORD>"), current_update_author: "api_client_2"}}
it "updates password_updated_at" do
old_password_udpated_at=account.password_updated_at
account.update_attributes(new_data)
expect(account.password_updated_at).not_to eq(old_password_udpated_at)
end
it "updates password_updated_by" do
account.update_attributes(new_data)
expect(account.password_updated_by).to eq("api_client_2")
end
end
context "password modified with null author" do
let(:new_data) {{firstname: "Albert", password:<PASSWORD>("<PASSWORD>")}}
it "updates password_updated_by" do
account_clone=MasterData::Account.find(account.id)
account_clone.update_attributes(new_data)
expect(account_clone.password_updated_by).to be_nil
end
end
context "password not modified" do
let(:new_data) {{firstname: "Albert", current_update_author: "api_client_2"}}
it "doesn't update password_updated_at" do
old_password_udpated_at=account.password_updated_at
account.update_attributes(new_data)
expect(account.password_updated_at).to eq(old_password_udpated_at)
end
it "doesn't update password_updated_by" do
account.update_attributes(new_data)
expect(account.password_updated_by).to eq("api_<PASSWORD>")
end
end
context "account creation" do
it "setup password_updated_by" do
expect(account.password_updated_by).to eq("<PASSWORD>")
end
it "setup password_updated_at" do
expect(account.password_updated_at).not_to be_nil
end
end
end
#email
describe "validations" do
subject { build(:master_data_account) }
it "validate uniqueness of email" do
create(:master_data_account, email: "<EMAIL>")
expect(build(:master_data_account, email: "<EMAIL>")).not_to be_valid
end
it "allow Accounts without emails" do
expect(build(:master_data_account, email: nil)).to be_valid
end
it "allow multiple Accounts without emails" do
create(:master_data_account, email: nil)
expect(build(:master_data_account, email: nil)).to be_valid
end
it {is_expected.to allow_value('<EMAIL>').for(:email)}
it {is_expected.not_to allow_value('prenom.nom.gadz.org').for(:email)}
it {is_expected.to validate_presence_of :password}
it {is_expected.to validate_inclusion_of(:gender).in_array(['male','female'])}
it "validate presence of a uuid"
#id soce
it "validate presence of :id_soce" do
account=create(:master_data_account)
account.id_soce=nil
expect(account.valid?).to eq(false)
end
describe "validate that :id_soce is an integer" do
it "invalidate strings in :id_soce" do
account=create(:master_data_account)
account.id_soce="string"
expect(account.valid?).to eq(false)
end
it "invalidate non integer numbers in :id_soce" do
account=create(:master_data_account)
account.id_soce=157.211
expect(account.valid?).to eq(false)
end
end
end
describe "id_soce auto_increment" do
it "auto increment id_soce" do
account1=create(:master_data_account)
expect(create(:master_data_account).id_soce).to eq(account1.id_soce+1)
end
describe "update id_soce sequence when user input" do
context "when user input greater than actual sequence" do
it "update id_soce next value " do
account1=create(:master_data_account)
account2=create(:master_data_account, id_soce: account1.id_soce+10)
expect(create(:master_data_account).id_soce).to eq(account1.id_soce+11)
end
end
context "when user input lesser than actual sequence" do
it "doens't update id_soce sequence when user input" do
account1=create(:master_data_account)
account2=create(:master_data_account, id_soce: account1.id_soce+10)
account3=create(:master_data_account, id_soce: account1.id_soce+5)
expect(create(:master_data_account).id_soce).to eq(account1.id_soce+11)
end
end
end
end
#info trads
it "valid buque without special char"
it "invalid buque with special char"
it "valid buque zaloeil with special char"
describe "add/remove alias" do
subject { create(:master_data_account) }
it "add new alias" do
subject.add_new_alias("alias1")
expect(subject.alias.last.name).to eq("alias1")
end
it "refuse to add existing alias for this account" do
before_count = subject.alias.count
subject.add_new_alias(subject.alias.first.name)
expect(subject.alias.count).to eq(before_count)
end
end
describe "nil if blank" do
it "set gapps_id to nil" do
account=create(:master_data_account, gapps_id:'')
expect(account.gapps_id).to be_nil
end
it "set email to nil" do
account=build(:master_data_account, email:'')
account.save
expect(account.email).to be_nil
end
end
end
|
uk-gov-mirror/DFE-Digital.get-help-with-tech | app/mailers/invite_school_user_mailer.rb | <reponame>uk-gov-mirror/DFE-Digital.get-help-with-tech
class InviteSchoolUserMailer < ApplicationMailer
def nominated_contact_email
@user = params[:user]
template_mail(
invite_user_template_id,
to: @user.email_address,
personalisation: personalisation,
)
end
private
def personalisation
{
email_address: @user.email_address,
}
end
def invite_user_template_id
Settings.govuk_notify.templates.devices.school_nominated_contact
end
end
|
vusec/probeguard | analyzer/diagnoser/rdef_diagnoser.c | #include "unistd.h"
#include "errno.h"
#include "rdef_diagnoser.h"
#define FILE_ACCESS_CHECK(F) \
if (-1 == access(F, R_OK)) \
{ \
rdef_print_error("Error accessing file: %s (%s)\n", F, strerror(errno)); \
exit(errno); \
}
// Cmd line argument values
static char *prog_filename = NULL;
static char *ptdump_filename = NULL;
static char *sideband_filename = NULL;
static rdef_prog_info_t *prog_files = NULL;
static unsigned num_prog_files = 0;
#define WINDOW_SIZE 1024 * 100
#define LLVMID_CACHE_SIZE 1024
static uint64_t insn_btrace_cache[WINDOW_SIZE];
static rdef_diagnoser_t diagnoser;
int rdef_diagnoser_init(char *pt_dump_filename, char *sideband_filename, rdef_prog_info_t prog_files[], unsigned num_prog_files, rdef_diagnoser_t *diagnoser)
{
if (!diagnoser)
{
rdef_print_error("%s : Argument error.\n", __func__);
return RDEF_E_FAIL;
}
// Initialize dwarf reader
if (RDEF_E_OK != rdef_dwf_load(prog_filename))
{
return RDEF_E_FAIL;
}
diagnoser->dwarf_addr_map_loaded = 1;
rdef_print_info("%s : successfully loaded dwarf binary and read its content.\n", __func__);
// Initialize pt reader
struct ptxed_options options;
// TODO: Hard code them properly, or remove this completely
options.track_image = 1;
options.att_format = 1;
#if defined(FEATURE_ELF)
options.elf_binary = 1;
#endif
if (RDEF_E_OK != ptrdr_init(prog_files, num_prog_files, pt_dump_filename, sideband_filename, options, &diagnoser->pt_ctx.decoder, &diagnoser->pt_ctx.image))
{
return RDEF_E_FAIL;
}
return RDEF_E_OK;
}
/*
* Reads the last <size> insn addreses into the <insn_btrace_window> starting from <from_offset> backwards.
* eg. insn_btrace_window[0] is the next insn executed after insn_btrace_window[1] and
* insn_btrace_window[0] has the insn addr found at the <from_offset> distance from the end of the trace.
* Return value is the number of insn addresses added into the btrace window.
*/
uint64_t rdef_get_last_insn_addrs(rdef_diagnoser_t *diagnoser, uint64_t *insn_btrace_window, uint64_t btrace_size, uint64_t from_offset)
{
if (!diagnoser || (NULL == insn_btrace_window))
{
rdef_print_error("%s : Argument error\n", __func__);
return 0;
}
if (0 == btrace_size)
return 0;
memset(insn_btrace_window, 0, btrace_size * sizeof(uint64_t));
int is_eos = 0;
int res = RDEF_E_OK;
if (RDEF_E_FAIL == ptrdr_sync(diagnoser->pt_ctx.decoder, &is_eos, RDEF_TRACE_BACKWARDS, from_offset))
{
return RDEF_E_FAIL;
}
if (is_eos)
{
rdef_print_error("Reached end of stream.\n");
return RDEF_E_FAIL;
}
uint64_t *p = insn_btrace_window + btrace_size - 1;
uint64_t i;
for (i = 0; i < btrace_size; i++)
{
if (RDEF_E_FAIL == ptrdr_next_insn_addr(diagnoser->pt_ctx.decoder, p, &is_eos))
{
rdef_print_warning("%s: failed getting next insn addr.\n", __func__);
break;
}
p--;
}
if (is_eos)
{
rdef_print_info("Reached end of stream.\n");
}
return i;
}
uint64_t rdef_get_last_llvm_id(rdef_diagnoser_t *diagnoser, uint64_t *last_addr)
{
uint64_t window_size = 1024;
uint64_t *btrace_window = (uint64_t *) calloc(window_size, sizeof(uint64_t));
uint64_t llvmid = 0, curr_offset = 0, fetched = 0, count =0;
while (llvmid == 0)
{
rdef_print_debug("count: %lu\n", count);
fetched = rdef_get_last_insn_addrs(diagnoser, btrace_window, window_size, curr_offset);
if (fetched == 0)
break;
// note: btrace_window[0] has the last address and the window flows backwards.
for (unsigned i = 0; i < fetched; i++)
{
if (btrace_window[i] == 0)
continue;
llvmid = rdef_dwf_get_assigned_id(btrace_window[i]);
if (0 != llvmid)
{
if (last_addr)
*last_addr = btrace_window[i];
break;
}
}
curr_offset += fetched;
count++;
}
rdef_print_info("Number of window traversals: %lu Btrace window size: %lu, llvmid:%lx\n", count, window_size, llvmid);
return llvmid;
}
int rdef_get_unique_insn_addrs_from_dump(rdef_diagnoser_t *diagnoser, rdef_set_t **insn_addrs, uint64_t *num_insns)
{
if (!diagnoser || !(diagnoser->dwarf_addr_map_loaded))
{
rdef_print_error("%s : Argument error\n", __func__);
return RDEF_E_FAIL;
}
*insn_addrs = (rdef_set_t *) malloc (sizeof(rdef_set_t));
memset(*insn_addrs, 0, sizeof(rdef_set_t));
*num_insns = 0;
int res;
while(1)
{
int is_eos = 0;
if (RDEF_E_FAIL == ptrdr_sync(diagnoser->pt_ctx.decoder, &is_eos, RDEF_TRACE_FORWARDS, 0))
{
return RDEF_E_FAIL;
}
rdef_print_info("ptrdr_sync done.\n");
if (is_eos)
{
rdef_print_info("Reached end of stream.\n");
res = RDEF_E_OK;
break;
}
uint64_t next_addr = 0;
do
{
if (RDEF_E_FAIL == ptrdr_next_insn_addr(diagnoser->pt_ctx.decoder, &next_addr, &is_eos))
{
rdef_print_warning("%s: failed getting next insn addr.\n", __func__);
break;
}
rdef_set_element_t *lookup;
HASH_FIND_PTR((*insn_addrs)->index, &next_addr, lookup);
if (NULL == lookup)
{
rdef_set_element_t *new_elem = &((*insn_addrs)->set[(*insn_addrs)->next_free]);
new_elem->value = next_addr;
(*num_insns)++;
HASH_ADD_PTR((*insn_addrs)->index, value, new_elem);
rdef_print_info("Added address: %lu to the set.\n", next_addr);
}
}while(next_addr != 0);
if (is_eos)
{
rdef_print_info("Reached end of stream.\n");
res = RDEF_E_OK;
break;
}
}
if (RDEF_E_OK == res)
{
ptrdr_close(diagnoser->pt_ctx.decoder, diagnoser->pt_ctx.image);
return RDEF_E_OK;
}
rdef_print_error("%s : Something went wrong.\n", __func__);
return RDEF_E_FAIL;
}
void print_addrs(rdef_set_t *insn_addr_set)
{
rdef_set_element_t *r;
printf("Binary instruction addresses found in pt dump:\n");
uint64_t i = 0;
for (r=insn_addr_set->index; r != NULL; r=r->hh.next, i++)
{
printf("%lu) %lx \n", i, r->value);
}
return;
}
void print_addr_map(rdef_set_t *insn_addr_set)
{
rdef_set_element_t *r;
printf("Binary instruction addresses found in pt dump:\n");
for (r=insn_addr_set->index; r != NULL; r=r->hh.next)
{
uint64_t id = rdef_dwf_get_assigned_id(r->value);
printf("addr) %lx ==> id) %lx\n", r->value, id);
}
return;
}
static int extract_base(char *arg, uint64_t *base)
{
char *sep, *rest;
sep = strrchr(arg, ':');
if (sep) {
uint64_t num;
if (!sep[1])
return 0;
errno = 0;
num = strtoull(sep+1, &rest, 0);
if (errno || *rest)
return 0;
*base = num;
*sep = 0;
return 1;
}
return 0;
}
static int parse_args(int argc, char **argv)
{
int min_args = 2;
int opt_arg_begin = min_args + 1;
if (argc <= min_args)
{
printf("Usage: %s <executable path> <pt dump path> [ ELF files information ]\n", argv[0]);
printf("\nELF files information:\n");
printf("\t%-30s \t%s\n", "-s <sideband filename>", "file generated by sptsideband.py");
printf("\t%-30s \t%s\n", "--elf <filename>:<base>", "specify one by one, all the other loaded elf binaries and their load addresses");
printf("\n");
exit(1);
}
// positional args
if (-1 == access(argv[1], R_OK))
{
rdef_print_error("Error accessing program file: %s (%s)\n", argv[1], strerror(errno));
exit(errno);
}
prog_filename = argv[1];
if (-1 == access(argv[2], R_OK))
{
rdef_print_error("Error accessing PT dump file: %s (%s)\n", argv[2], strerror(errno));
exit(errno);
}
ptdump_filename = argv[2];
int num_elves = (argc - opt_arg_begin)/2;
int *elf_args = calloc(num_elves, sizeof(int)); // it is ok if we allocate more; less is not acceptable.
int elf_pos = -1;
for (unsigned argi = opt_arg_begin; argi < argc; argi++)
{
char *curr = argv[argi];
//rdef_print_info("curr arg: %s\n", curr);
if (!sideband_filename && (!strcmp(argv[argi], "-s")))
{
sideband_filename = argv[++argi];
if (-1 == access(sideband_filename, R_OK))
{
rdef_print_error("Error accessing sideband file: %s (%s)\n", sideband_filename, strerror(errno));
exit(errno);
}
continue;
}
if (!strcmp(argv[argi], "--elf"))
{
argi++;
elf_pos++;
*(elf_args+elf_pos) = argi;
}
}
rdef_print_info("elf_pos: %d\n", elf_pos);
num_prog_files = elf_pos+1;
prog_files = (rdef_prog_info_t *) calloc((elf_pos+1) , sizeof(rdef_prog_info_t));
// default values
prog_files[0].filename = prog_filename;
prog_files[0].base = 0;
//rdef_print_info("Constructing rdef_prog_info_t values...\n");
for (int i = 0; i <= elf_pos; i++)
{
prog_files[i].filename = argv[*(elf_args+i)];
int ret = extract_base(argv[*(elf_args+i)], &(prog_files[i].base));
if (!ret)
{
prog_files[i].base = 0;
}
FILE_ACCESS_CHECK(prog_files[i].filename);
}
free(elf_args);
return 0;
}
int main(int argc, char **argv)
{
rdef_diagnoser_t diagnoser;
rdef_set_t *insn_addr_set;
uint64_t num_insns;
parse_args(argc, argv);
rdef_print_debug("Arg parsing done.\n");
int result;
result = rdef_diagnoser_init(ptdump_filename, sideband_filename, prog_files, num_prog_files, &diagnoser);
if (RDEF_E_FAIL == result)
{
rdef_print_error("Failed initializing diagnoser.\n");
return 1;
}
rdef_print_debug("Diagnoser initialized.\n");
uint64_t last_addr = 0;
uint64_t last_llvmid = rdef_get_last_llvm_id(&diagnoser, &last_addr);
rdef_print_info("Last LLVM ID found: %lu for address: 0x%lx\n", last_llvmid, last_addr);
return 0;
uint64_t num_contents = 0;
uint64_t *btrace_window = (uint64_t *) calloc(WINDOW_SIZE, sizeof(uint64_t));
num_contents = rdef_get_last_insn_addrs(&diagnoser, btrace_window, WINDOW_SIZE, 0);
rdef_print_info("Fetched %lu number of back traced instruction addresses.\n", num_contents);
for(uint64_t i=0; i < num_contents; i++)
{
printf("instr addr: 0x%lx dwarf-ins-id: %lu \n", btrace_window[i], rdef_dwf_get_assigned_id(btrace_window[i]));
}
free(btrace_window);
#if 0
result = rdef_get_unique_insn_addrs_from_dump(&diagnoser, &insn_addr_set, &num_insns);
if (RDEF_E_FAIL == result)
{
return 1;
}
print_addrs(insn_addr_set);
print_addr_map(insn_addr_set);
#endif
return 0;
}
|
PiCoPress/Sprexor | sprexor-v3-core/src/main/java/sprexor/v3/cosmos/SprexorCmdlst.java | package sprexor.v3.cosmos;
import sprexor.v3.IOCenter;
import sprexor.v3.SManager;
import sprexor.v3.components.SCommand;
import sprexor.v3.components.SParameter;
import sprexor.v3.components.annotations.CommandInfo;
import sprexor.v3.lib.Utils;
@CommandInfo(name = "commands", version = "0.0.1")
public class SprexorCmdlst implements SCommand {
@Override
public int main(IOCenter io, SParameter args, SManager Environment) {
io.out.printf(Utils.join(Environment.getList(), "\t") + "\n");
return 0;
}
}
|
fc-dream/PDFTestForAndroid | sample/PDFtest/src/com/lowagie/examples/objects/tables/pdfptable/Tables.java | /*
* $Id: Tables.java 3373 2008-05-12 16:21:24Z xlv $
*
* This code is free software. It may only be copied or modified
* if you include the following copyright notice:
*
* --> Copyright 2001-2005 by <NAME> and <NAME> <--
*
* This code is part of the 'iText Tutorial'.
* You can find the complete tutorial at the following address:
* http://www.lowagie.com/iText/tutorial/
*
* This code is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*
* <EMAIL>
*/
package com.lowagie.examples.objects.tables.pdfptable;
import java.io.FileOutputStream;
import java.io.IOException;
import com.lowagie.text.Document;
import com.lowagie.text.DocumentException;
import com.lowagie.text.Font;
import com.lowagie.text.FontFactory;
import com.lowagie.text.PageSize;
import com.lowagie.text.Phrase;
import com.lowagie.text.pdf.PdfPCell;
import com.lowagie.text.pdf.PdfPTable;
import com.lowagie.text.pdf.PdfWriter;
/**
* Adds a table to a page twice.
*/
public class Tables {
/**
* Adds a table to a page twice.
*
* @param args
* no arguments needed
*/
public static void main(String[] args) {
Font font8 = FontFactory.getFont(FontFactory.HELVETICA, 8);
// step 1
Document document = new Document(PageSize.A4);
try {
// step 2
PdfWriter writer = PdfWriter.getInstance(document, new FileOutputStream(android.os.Environment.getExternalStorageDirectory() + java.io.File.separator + "droidtext" + java.io.File.separator + "tables.pdf"));
float width = document.getPageSize().getWidth();
float height = document.getPageSize().getHeight();
// step 3
document.open();
// step 4
float[] columnDefinitionSize = { 33.33F, 33.33F, 33.33F };
float pos = height / 2;
PdfPTable table = null;
PdfPCell cell = null;
table = new PdfPTable(columnDefinitionSize);
table.getDefaultCell().setBorder(0);
table.setHorizontalAlignment(0);
table.setTotalWidth(width - 72);
table.setLockedWidth(true);
cell = new PdfPCell(new Phrase("Table added with document.add()"));
cell.setColspan(columnDefinitionSize.length);
table.addCell(cell);
table.addCell(new Phrase("<NAME>", font8));
table.addCell(new Phrase("<NAME>", font8));
table.addCell(new Phrase("<NAME>", font8));
table.addCell(new Phrase("8, Rabic street", font8));
table.addCell(new Phrase("2 Photons Avenue", font8));
table.addCell(new Phrase("32 Gravitation Court", font8));
table.addCell(new Phrase("39100 Dole France", font8));
table.addCell(new Phrase("12345 Ulm Germany", font8));
table.addCell(new Phrase("45789 Cambridge England", font8));
document.add(table);
table = new PdfPTable(columnDefinitionSize);
table.getDefaultCell().setBorder(0);
table.setHorizontalAlignment(0);
table.setTotalWidth(width - 72);
table.setLockedWidth(true);
cell = new PdfPCell(new Phrase("Table added with writeSelectedRows"));
cell.setColspan(columnDefinitionSize.length);
table.addCell(cell);
table.addCell(new Phrase("<NAME>", font8));
table.addCell(new Phrase("<NAME>", font8));
table.addCell(new Phrase("<NAME>", font8));
table.addCell(new Phrase("8, Rabic street", font8));
table.addCell(new Phrase("2 Photons Avenue", font8));
table.addCell(new Phrase("32 Gravitation Court", font8));
table.addCell(new Phrase("39100 Dole France", font8));
table.addCell(new Phrase("12345 Ulm Germany", font8));
table.addCell(new Phrase("45789 Cambridge England", font8));
table.writeSelectedRows(0, -1, 50, pos, writer.getDirectContent());
}
catch (DocumentException de) {
System.err.println(de.getMessage());
} catch (IOException ioe) {
System.err.println(ioe.getMessage());
}
// step 5
document.close();
}
} |
yurenfangzhou/bdp-gold | gold-shop/shop-core/src/main/java/com/platform/utils/BeanUtil.java | package com.platform.utils;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.springframework.cglib.beans.BeanMap;
import javax.persistence.Column;
import java.beans.PropertyDescriptor;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created on 2018/2/26 0026.
*
* @author wlhbdp
*/
public class BeanUtil {
/**
* 记录每个修改字段的分隔符
*/
public static final String SEPARATOR = ";;;";
/**
* 将对象装换为map
* @param bean
* @return
*/
public static <T> Map<String, Object> beanToMap(T bean) {
Map<String, Object> map = Maps.newHashMap();
if (bean != null) {
BeanMap beanMap = BeanMap.create(bean);
for (Object key : beanMap.keySet()) {
map.put(key+"", beanMap.get(key));
}
}
return map;
}
/**
* 将map装换为javabean对象
* @param map
* @param bean
* @return
*/
public static <T> T mapToBean(Map<String, Object> map,T bean) {
BeanMap beanMap = BeanMap.create(bean);
beanMap.putAll(map);
return bean;
}
/**
* 将List<T>转换为List<Map<String, Object>>
* @param objList
* @return
*/
public static <T> List<Map<String, Object>> objectsToMaps(List<T> objList) {
List<Map<String, Object>> list = Lists.newArrayList();
if (objList != null && objList.size() > 0) {
Map<String, Object> map = null;
T bean = null;
for (int i = 0,size = objList.size(); i < size; i++) {
bean = objList.get(i);
map = beanToMap(bean);
list.add(map);
}
}
return list;
}
/**
* 对象组中是否存在 Empty Object
*
* @param os 对象组
* @return
*/
public static boolean isOneEmpty(Object... os) {
for (Object o : os) {
if (StringUtil.isNullOrEmpty(o)) {
return true;
}
}
return false;
}
/**
* 缓存字段名和中文注释对应关系的map
*/
private static Map<String,String> fieldMap = com.platform.utils.Maps.newHashMap();
public static final Pattern COLUMN_DEFINITION_PATTERN = Pattern.compile("([A-Za-z]+)(?:\\(\\d+\\))?\\s*(?:(?:COMMENT|[Cc]omment)\\s+'(.*?)')?");
/**
* 从实体类的columDefinition中获取字段的中文注释,如果
* @param clazz
* @param field
* @return
*/
public static String getFieldComment(Class clazz, Field field){
String key = clazz.getName()+field.getName();
String comment = fieldMap.get(key);
if(comment==null){
Annotation[] annotations = field.getAnnotations();
for(Annotation annotation :annotations) {
if (annotation instanceof Column) {
Column columnAnno = (Column) annotation;
String columnDefinition = columnAnno.columnDefinition();
if (columnDefinition != null && !"".equals(columnDefinition.trim())) {
Matcher matcher = COLUMN_DEFINITION_PATTERN.matcher(columnDefinition.trim());
if(matcher.find()) {
comment = matcher.group(2);
fieldMap.put(key,comment);
break;
}
}
}
}
}
if(comment==null){
comment = field.getName();
fieldMap.put(key,comment);
}
return comment;
}
/**
* 比较两个对象pojo1和pojo2,并输出不一致信息
* @param key
* @param pojo1
* @param pojo2
* @return
* @throws IllegalAccessException
* @throws InstantiationException
*/
public static String contrastObj( String key, Object pojo1, Map<String, String> pojo2) throws IllegalAccessException, InstantiationException {
StringBuilder str = new StringBuilder();
String headerName = key;
String headerValue = pojo2.get(key);
try {
Class clazz = pojo1.getClass();
Field[] fields = pojo1.getClass().getDeclaredFields();
int i = 1;
for (Field field : fields) {
if ("serialVersionUID".equals(field.getName())) {
continue;
}
PropertyDescriptor pd = new PropertyDescriptor(field.getName(), clazz);
Method getMethod = pd.getReadMethod();
Object o1 = "null";
if(StringUtil.isNotNullOrEmpty(pojo2.get("id"))) {
o1 = getMethod.invoke(pojo1);
}
Object o2 = pojo2.get(StringUtil.firstCharToLowerCase(getMethod.getName().substring(3)));
if(StringUtil.equals(key,field.getName())){
headerName = getFieldComment(clazz,field);
}
if (o1 == null || o2 == null) {
continue;
}
if (o1 instanceof Date) {
o1 = DateUtil.getDay((Date) o1);
} else if (o1 instanceof Integer) {
o2 = Integer.parseInt(o2.toString());
}
if (!o1.toString().equals(o2.toString())) {
if (i != 1) {
str.append( SEPARATOR);
}
String fieldName = getFieldComment(clazz,field);
str.append( fieldName + ":" + o1 + "=>" + o2);
i++;
}
}
} catch (Exception e) {
}
String header = headerName+"="+headerValue+ SEPARATOR;
return header+str;
}
/**
* 解析多个key(逗号隔开的)
*
*/
public static String parseMutiKey( Map<String, String> requests) {
StringBuilder sb = new StringBuilder();
for(Map.Entry<String,String> entry: requests.entrySet()) {
sb.append(entry.getKey()).append("=").append(entry.getValue()).append(";");
}
return sb.toString();
}
}
|
tristandeleu/jax-meta-learning | jax_meta/utils/losses.py | import jax.numpy as jnp
from jax import nn
def nll_loss(log_likelihoods, targets):
losses = jnp.take_along_axis(log_likelihoods, targets[..., None], axis=-1)
return -jnp.squeeze(losses, axis=-1)
def cross_entropy(logits, targets):
log_likelihoods = nn.log_softmax(logits, axis=-1)
return nll_loss(log_likelihoods, targets)
def binary_cross_entropy(logits, targets):
# See: https://www.tensorflow.org/api_docs/python/tf/nn/sigmoid_cross_entropy_with_logits
return nn.relu(logits) - logits * targets + nn.softplus(-jnp.abs(logits))
|
ksaracevic1/etf-alles-1 | I semester/introduction-to-programming/labovi/T2/Z3/main.c | #include <stdio.h>
int main(){
int dan, mjesec;
printf("Dan: ");
scanf("%d", &dan);
printf("Mjesec: ");
scanf("%d", &mjesec);
/*if((dan>=20 && mjesec==1 || dan<=18 && mjesec==2)){
printf("Znak je: Vodolija");
}else if */
switch(mjesec){
case 1:
if(dan>=20 && dan<=31) printf("Vas znak je Vodolija!");
if(dan>=1 && dan<=19) printf("Vas znak je Jarac! ");
break;
case 2:
if(dan>=1 && dan<=18) printf("Vas znak je Vodolija!");
if(dan>=19 && dan<=28) printf("Vas znak je Ribe!" );
break;
case 3:
if(dan>=1 && dan<=20) printf("Vas znak je Ribe!");
if(dan>=21 && dan<=31) printf("Vas znak je Ovan!");
break;
case 4:
if(dan>=1 && dan<=19) printf("Vas znak je Ovan!");
if(dan>=20 && dan<=30) printf("Vas znak je Bik!");
break;
case 5:
if(dan>=1 && dan<=20) printf("Vas znak je Bik!");
if(dan>=21 && dan<=31) printf("Vas znak je Blizanci!");
break;
case 6:
if(dan>=1 && dan<=20) printf("Vas znak je Blizanci!");
if(dan>=21 && dan<=30) printf("Vas znak je Rak!");
break;
case 7:
if(dan>=1 && dan<=22) printf("Vas znak je Rak!");
if(dan>=23 && dan<=31) printf("Vas znak je Lav!");
break;
case 8:
if(dan>=1 && dan<=22) printf("Vas znak je Lav!");
if(dan>=23 && dan<=31) printf("Vas znak je Djevica!");
break;
case 9:
if(dan>=1 && dan<=22) printf("Vas znak je Djevica!");
if(dan>=23 && dan<=30) printf("Vas znak je Vaga!");
break;
case 10:
if(dan>=1 && dan<=22) printf("Vas znak je Vaga!");
if(dan>=23 && dan<=31) printf("Vas znak je Skorpija!");
break;
case 11:
if(dan>=1 && dan<=21) printf("Vas znak je Skoprija!");
if(dan>=22 && dan<=30) printf("Vas znak je Strijelac!");
break;
case 12:
if(dan>=1 && dan<=21) printf("Vas znak je Strijelac!");
if(dan>=22 && dan<=31) printf("Vas znak je Jarac!");
break;
}
return 0;
} |
Limmen/open_spiel | open_spiel/bots/gin_rummy/simple_gin_rummy_bot.cc | // Copyright 2021 DeepMind Technologies Limited
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <algorithm>
#include <vector>
#include "open_spiel/spiel.h"
#include "open_spiel/spiel_utils.h"
#include "open_spiel/spiel_bots.h"
#include "open_spiel/bots/gin_rummy/simple_gin_rummy_bot.h"
#include "open_spiel/games/gin_rummy.h"
#include "open_spiel/games/gin_rummy/gin_rummy_utils.h"
namespace open_spiel {
namespace gin_rummy {
void SimpleGinRummyBot::Restart() {
knocked_ = false;
next_actions_ = {};
}
ActionsAndProbs SimpleGinRummyBot::GetPolicy(const State& state) {
ActionsAndProbs policy;
auto legal_actions = state.LegalActions(player_id_);
auto chosen_action = Step(state);
for (auto action : legal_actions)
policy.emplace_back(action, action == chosen_action ? 1.0 : 0.0);
return policy;
}
Action SimpleGinRummyBot::Step(const State& state) {
std::vector<float> observation;
state.ObservationTensor(player_id_, &observation);
std::vector<int> hand;
std::vector<int> layed_melds;
std::vector<int> discard_pile;
absl::optional<int> upcard = absl::nullopt;
int knock_card = 0;
int stock_size = 0;
// Decode observation tensor.
int offset = 0;
SPIEL_CHECK_TRUE(observation[player_id_] == 1);
offset += kNumPlayers;
// Player hand.
if (player_id_ == 1) offset += kDefaultNumCards;
for (int i = 0; i < kDefaultNumCards; ++i) {
if (observation[offset + i] == 1) hand.push_back(i);
}
offset += kDefaultNumCards;
if (player_id_ == 0) offset += kDefaultNumCards;
// Current player.
SPIEL_CHECK_EQ(observation[offset + player_id_], 1);
offset += kNumPlayers;
// Knock card.
for (int i = 0; i < kDefaultKnockCard; ++i) {
if (observation[offset + i] == 1) knock_card += 1;
}
offset += kDefaultKnockCard;
// Upcard.
for (int i = 0; i < kDefaultNumCards; ++i) {
if (observation[offset + i] == 1) upcard = i;
}
offset += kDefaultNumCards;
// Discard pile.
for (int i = 0; i < kDefaultNumCards; ++i) {
if (observation[offset + i] == 1) discard_pile.push_back(i);
}
offset += kDefaultNumCards;
// Stock size.
for (int i = 0; i < kDefaultNumCards; ++i) {
if (observation[offset + i] == 1) stock_size += 1;
}
offset += kDefaultNumCards;
// Layed melds. Player 0 looks at player 1's layed melds and vice versa.
if (player_id_ == 0) offset += kNumMeldActions;
for (int i = 0; i < kNumMeldActions; ++i) {
if (observation[offset + i] == 1) {
layed_melds.push_back(i);
knocked_ = true;
}
} // Completed decoding observation.
auto legal_actions = state.LegalActions(player_id_);
// Next actions must be legal, in order from back to front.
if (!next_actions_.empty()) {
Action action = next_actions_.back();
if (std::find(legal_actions.begin(), legal_actions.end(), action) ==
legal_actions.end()) {
std::cerr << "Game state:" << std::endl;
std::cerr << state.ToString() << std::endl;
std::cerr << "Legal actions: " << legal_actions << std::endl;
std::cerr << "Bot next actions: " << next_actions_ << std::endl;
SpielFatalError("Previously determined next action is illegal.");
}
next_actions_.pop_back();
return action;
}
// When knocking, bot decides how to lay the hand all at once and saves the
// corresponding meld actions in next_actions_.
if (knocked_) {
if (!layed_melds.empty()) {
// Opponent knocked.
next_actions_.push_back(kPassAction); // Bot never lays off.
for (int meld_id : GetMelds(hand)) {
next_actions_.push_back(kMeldActionBase + meld_id);
}
next_actions_.push_back(kPassAction);
} else {
next_actions_.push_back(kPassAction);
std::vector<int> melds_to_lay = GetMelds(hand);
for (int meld_id : melds_to_lay) {
next_actions_.push_back(kMeldActionBase + meld_id);
}
int best_discard = GetDiscard(hand);
next_actions_.push_back(best_discard);
}
Action action = next_actions_.back();
SPIEL_CHECK_TRUE(std::find(legal_actions.begin(),
legal_actions.end(), action) != legal_actions.end());
next_actions_.pop_back();
return action;
} else if (!upcard.has_value()) {
// MoveType kDiscard
if (hand.size() != hand_size_ + 1) {
std::cerr << "Game state:" << std::endl;
std::cerr << state.ToString() << std::endl;
std::cerr << "Bot hand:" << std::endl;
std::cerr << utils_.HandToString(hand);
SpielFatalError("Discarding with an insufficient number of cards.");
}
int deadwood = utils_.MinDeadwood(hand);
if (deadwood <= knock_card && !knocked_) {
knocked_ = true;
return kKnockAction;
} else {
int best_discard = GetDiscard(hand);
if (best_discard >= 0) {
return best_discard;
} else {
return legal_actions[0];
}
}
} else {
// MoveType kDraw
if (stock_size == kWallStockSize) {
// Special rules apply when we've reached the wall.
if (legal_actions.back() == kKnockAction) {
knocked_ = true;
return kKnockAction;
} else {
return kPassAction;
}
} else if (utils_.MinDeadwood(hand, upcard) <= knock_card ||
!absl::c_linear_search(GetBestDeadwood(hand, upcard), upcard)) {
// Draw upcard if doing so permits a knock, or if the upcard would not be
// in the "best" deadwood (=> upcard would be in a "best" meld).
return kDrawUpcardAction;
} else {
return legal_actions.back(); // Draw from stock or pass.
}
}
}
// Returns the "best" deadwood, i.e. the cards that do not belong to one of the
// "best" melds. Here "best" means any meld group that achieves the lowest
// possible deadwood count for the given hand. In general this is non-unique.
std::vector<int> SimpleGinRummyBot::GetBestDeadwood(std::vector<int> hand,
const absl::optional<int> card) const {
if (card.has_value()) hand.push_back(card.value());
for (const auto& meld : utils_.BestMeldGroup(hand)) {
for (auto card : meld) {
hand.erase(remove(hand.begin(), hand.end(), card), hand.end());
}
}
return hand;
}
int SimpleGinRummyBot::GetDiscard(const std::vector<int> &hand) const {
std::vector<int> deadwood = GetBestDeadwood(hand);
if (!deadwood.empty()) {
std::sort(deadwood.begin(), deadwood.end(),
RankComparator(kDefaultNumRanks));
return deadwood.back();
} else {
// 11 card gin. All cards are melded so there is no deadwood to throw from.
// Must be careful to throw a card from a meld that does not break up that
// meld. E.g. consider an 11 card gin containing the meld As2s3s4s. With a
// knock card of 10, all of these cards are legal discards following a
// knock, but only the As and 4s preserve gin.
for (int i = 0; i < hand.size(); ++i) {
std::vector<int> hand_copy = hand;
hand_copy.erase(hand_copy.begin() + i);
if (utils_.MinDeadwood(hand_copy) == 0)
return hand[i];
}
SpielFatalError("11 card gin error.");
}
}
std::vector<int> SimpleGinRummyBot::GetMelds(std::vector<int> hand) const {
if (hand.size() == hand_size_ + 1 && utils_.MinDeadwood(hand) == 0) {
// 11 card gin. Must select discard that preserves gin. See GetDiscard().
hand.erase(remove(hand.begin(), hand.end(), GetDiscard(hand)), hand.end());
}
std::vector<int> rv;
for (const auto& meld : utils_.BestMeldGroup(hand)) {
rv.push_back(utils_.meld_to_int.at(meld));
}
return rv;
}
} // namespace gin_rummy
} // namespace open_spiel
|
Harreke/EasyApp | injection-processor/src/main/java/com/harreke/easyapp/injection/annotation/InjectIntArray.java | package com.harreke.easyapp.injection.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Created by huoqisheng on 2016/6/17.
*/
@Retention(RetentionPolicy.CLASS)
@Target(ElementType.FIELD)
public @interface InjectIntArray {
String value() default "";
} |
sahirsharma/Martian | NASA SPACEAPPS CHALLENGE/Solution/Software part/Astronomical Data and Python Libraries/pyephem/pyephem-3.7.6.0/ephem/tests/test_stars.py | #!/usr/bin/env python
import unittest
from ephem import star
class StarTests(unittest.TestCase):
def test_Fomalhaut(self):
s = star('Fomalhaut')
self.assertEqual(s.name, 'Fomalhaut')
self.assertRaises(RuntimeError, getattr, s, 'ra')
def test_Fomalhaut_compute(self):
s = star('Fomalhaut')
s.compute()
self.assertEqual(s.name, 'Fomalhaut')
self.assertEqual(str(s._ra), '22:57:38.80')
def test_Fomalhaut_autocompute(self):
s = star('Fomalhaut', '1971/1/1')
self.assertEqual(s.name, 'Fomalhaut')
self.assertEqual(str(s._ra), '22:57:38.80')
def test_unknown_star(self):
self.assertRaises(KeyError, star, 'Alpha Centauri')
|
mille2004/NoteItIOS | Vendor/Core/NSDictionary+BaseKit.h | <filename>Vendor/Core/NSDictionary+BaseKit.h
//
// Created by <NAME> on 2012
// Copyright 2012 BaseKit
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#import <Foundation/Foundation.h>
@interface NSDictionary (BaseKit)
/**
* Convinient method to check if the dictionary is empty or not.
*/
@property (nonatomic, readonly) BOOL isEmpty;
/**
* Check if any value is assiated with given key
*/
- (BOOL)containsObjectForKey:(id)key;
/**
* If dictionary has a object for aKey return it, else return defaultObject
*/
- (id)objectForKey:(id)aKey defaultObject:(id)defaultObject;
- (BOOL)boolValueForKey:(id)aKey defaultValue:(BOOL)defaultValue;
- (BOOL)boolValueForKey:(id)aKey;
- (int)intValueForKey:(id)aKey;
- (float)floatValueForKey:(id)aKey defaultValue:(float)defaultValue;
- (float)floatValueForKey:(id)aKey;
- (NSInteger)integerValueForKey:(id)aKey defaultValue:(NSInteger)defaultValue;
- (NSInteger)integerValueForKey:(id)aKey;
- (double)doubleValueForKey:(id)aKey defaultValue:(NSInteger)defaultValue;
- (double)doubleValueForKey:(id)aKey;
- (NSString *)stringValueForKey:(id)aKey defaultValue:(NSString *)defaultValue;
- (NSString *)stringValueForKey:(id)aKey;
- (NSNumber *)numberValueForKey:(id)aKey defaultValue:(NSNumber *)defaultValue;
- (NSNumber *)numberValueForKey:(id)aKey;
@end
|
bicepjai/mypuzzles | interviewstreet/archive/insertion_sort/insr_sort.py | #!/usr/bin/python
import sys, re, math
from itertools import permutations
mswaps = 0
def merge(left, right):
global mswaps
result = []
i ,j = 0, 0
while i < len(left) and j < len(right):
if left[i] <= right[j]:
result.append(left[i])
i += 1
mswaps += 1
else:
result.append(right[j])
j += 1
result += left[i:]
result += right[j:]
return result
def merge_sort(list):
if len(list) < 2:
return list
middle = len(list) / 2
left = merge_sort(list[:middle])
right = merge_sort(list[middle:])
return merge(left, right)
def mnofswaos(A):
global mswaps
mswaps = 0
merge_sort(A)
return mswaps
def insert_sort(N,Ai):
swaps = 0
for i in range(1,N):
j=i
while j>0 and Ai[j] < Ai[j-1]:
a = Ai[j]
Ai[j] = Ai[j-1]
Ai[j-1] = a
j = j-1
swaps = swaps + 1
return swaps
T = int(raw_input())
for t in range(0,T):
solutions = 0
N= int(raw_input())
B = [int(x) for x in raw_input().split()]
if t == T-1:
print mnofswaos(B),
else:
print mnofswaos(B)
|
SelfKeyFoundation/Identity-Wallet | src/common/wallet-tokens/types.js | /* istanbul ignore file */
const WALLET_TOKENS_UPDATE = 'app/wallet-tokens/UPDATE';
const WALLET_TOKENS_SET = 'app/wallet-tokens/SET';
const WALLET_TOKENS_LOAD = 'app/wallet-tokens/LOAD';
const WALLET_TOKENS_CREATE = 'app/wallet-tokens/CREATE';
const WALLET_TOKENS_STATE_EDIT = 'app/wallet-tokens/EDIT';
export {
WALLET_TOKENS_SET,
WALLET_TOKENS_UPDATE,
WALLET_TOKENS_LOAD,
WALLET_TOKENS_CREATE,
WALLET_TOKENS_STATE_EDIT
};
|
timboudreau/ANTLR4-Plugins-for-NetBeans | antlr-plugin/src/main/java/org/nemesis/antlr/v4/netbeans/v8/grammar/file/tool/extract/GenerateBuildAndRunGrammarResult.java | /*
* Copyright 2016-2019 <NAME>, <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.nemesis.antlr.v4.netbeans.v8.grammar.file.tool.extract;
import org.nemesis.jfs.javac.CompileResult;
import org.nemesis.jfs.javac.JavacDiagnostic;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicBoolean;
import org.nemesis.antlr.v4.netbeans.v8.grammar.file.tool.AntlrLibrary;
import org.nemesis.antlr.v4.netbeans.v8.grammar.file.tool.AntlrSourceGenerationResult;
import org.nemesis.antlr.v4.netbeans.v8.grammar.file.tool.extract.AntlrProxies.ParseTreeProxy;
import org.nemesis.antlr.v4.netbeans.v8.grammar.file.tool.extract.AntlrProxies.ProxySyntaxError;
/**
*
* @author <NAME>
*/
public class GenerateBuildAndRunGrammarResult {
private final AntlrSourceGenerationResult generationResult;
private final Optional<CompileResult> compileResult;
private final Optional<ParserRunResult> parserRunResult;
private final String text;
private final boolean compiled;
private final boolean parsed;
public GenerateBuildAndRunGrammarResult(AntlrSourceGenerationResult generationResult,
Optional<CompileResult> compileResult,
Optional<ParserRunResult> parserResult, String text,
boolean compiled, boolean parsed) {
this.generationResult = generationResult;
this.compileResult = compileResult;
this.parserRunResult = parserResult;
this.text = text;
this.compiled = compiled;
this.parsed = parsed;
}
public static GenerateBuildAndRunGrammarResult forUnparsed(ParseTreeProxy prox) {
Optional<CompileResult> compileResult = Optional.empty();
ParserRunResult res = new ParserRunResult(Optional.empty(), Optional.of(prox), false);
AntlrSourceGenerationResult gen = new AntlrSourceGenerationResult(null, null, null, "x", false,
Optional.empty(), Optional.empty(), Collections.emptyList(), Collections.emptySet(),
AntlrLibrary.getDefault(), prox.grammarName(), new AtomicBoolean());
return new GenerateBuildAndRunGrammarResult(gen, compileResult, Optional.of(res),
prox.text(), false, false);
}
public static GenerateBuildAndRunGrammarResult forThrown(Throwable thrown, String text) {
ParseTreeProxy prox = AntlrProxies.forUnparsed(Paths.get("-nothing"), "x", text);
Optional<CompileResult> compileResult = Optional.empty();
ParserRunResult res = new ParserRunResult(Optional.empty(), Optional.of(prox), false);
AntlrSourceGenerationResult gen = new AntlrSourceGenerationResult(null, null, null, "x", false,
Optional.of(thrown), Optional.empty(), Collections.emptyList(), Collections.emptySet(),
AntlrLibrary.getDefault(), prox.grammarName(), new AtomicBoolean());
return new GenerateBuildAndRunGrammarResult(gen, compileResult, Optional.of(res),
prox.text(), false, false);
}
public String toString() {
StringBuilder sb = new StringBuilder("ParseResult len=" + (text == null ? -1 : text.length()));
sb.append(" usable=").append(isUsable());
sb.append(" compiled=").append(compiled).append(" parsed=").append(parsed);
sb.append("\n").append("generationResult=").append(generationResult);
sb.append("\n").append("compileResult=");
if (compileResult.isPresent()) {
CompileResult res = compileResult.get();
sb.append(" success=").append(res.ok());
if (res.thrown().isPresent()) {
sb.append(" thrown=").append(res.thrown().get());
} else {
sb.append(" thrown=null");
}
sb.append(" usable=").append(res.isUsable());
if (res.diagnostics().size() > 0) {
sb.append(" diags={");
for (JavacDiagnostic d : res.diagnostics()) {
sb.append('<').append(d.toString().replace('\n', ' ')).append('>');
}
sb.append('}');
}
} else {
sb.append("<absent>");
}
sb.append("\n").append("parseResult=");
if (parserRunResult.isPresent()) {
ParserRunResult res = parserRunResult.get();
if (res.thrown().isPresent()) {
sb.append(" thrown=").append(res.thrown().get());
} else {
sb.append(" thrown=null");
}
if (res.parseTree().isPresent()) {
ParseTreeProxy px = res.parseTree().get();
sb.append(" tree=").append(px.summary());
if (px.thrown() != null) {
px.thrown().printStackTrace();
}
if (!px.syntaxErrors().isEmpty()) {
sb.append(" syntaxErrors=");
for (ProxySyntaxError e : px.syntaxErrors()) {
sb.append('<').append(e).append('>');
}
}
sb.append(" pxthrown=").append(px.thrown());
} else {
sb.append(" tree=<absent>");
}
} else {
sb.append("<absent>");
}
return sb.toString();
}
public boolean wasCompiled() {
return compiled;
}
public boolean wasParsed() {
return parsed;
}
public void rethrow() throws Throwable {
Optional<Throwable> thrown = thrown();
if (thrown.isPresent()) {
throw thrown.get();
} else if (parserRunResult.isPresent()) {
if (parserRunResult.get().parseTree().isPresent()) {
Throwable t = parserRunResult.get().parseTree().get().thrown();
if (t != null) {
throw t;
}
}
}
}
public String text() {
return text;
}
public boolean isUsable() {
if (!generationResult.isUsable()) {
return false;
}
if (compiled) {
if (!compileResult.isPresent()) {
return false;
} else {
if (!compileResult.get().isUsable()) {
return false;
}
}
}
if (parsed) {
if (!this.parserRunResult.isPresent()) {
} else {
if (!parserRunResult.get().isUsable()) {
return false;
} else {
if (!parserRunResult.get().parseTree().isPresent()) {
return false;
} else {
ParseTreeProxy prox = parserRunResult.get().parseTree().get();
if (prox.thrown() != null) {
prox.thrown().printStackTrace();
return false;
} else {
// if (!prox.syntaxErrors().isEmpty()) {
// System.out.println("Extraction has syntax errors: " + prox.syntaxErrors());
// return false;
// }
return true;
}
}
}
}
}
return true;
}
public Optional<Throwable> thrown() {
Optional<Throwable> result = generationResult.thrown();
if (!result.isPresent()) {
if (compileResult.isPresent()) {
result = compileResult.get().thrown();
if (!result.isPresent() && parserRunResult.isPresent()) {
result = parserRunResult.get().thrown();
if (!result.isPresent()) {
if (parserRunResult.get().parseTree().isPresent()) {
Throwable t = parserRunResult.get().parseTree().get().thrown();
result = Optional.ofNullable(t);
}
}
}
}
}
return result;
}
public AntlrSourceGenerationResult generationResult() {
return generationResult;
}
public Optional<CompileResult> compileResult() {
return compileResult;
}
public Optional<ParserRunResult> parseResult() {
return parserRunResult;
}
public boolean onSuccess(ParseConsumer consumer) {
if (isUsable()) {
consumer.accept(generationResult, compileResult.isPresent() ? compileResult.get() : null,
parserRunResult.isPresent() ? parserRunResult.get() : null);
return true;
}
return false;
}
public interface ParseConsumer {
void accept(AntlrSourceGenerationResult genResult, CompileResult compileResult, ParserRunResult parserRunResult);
}
}
|
hdm/mac-tracker | data/js/00/50/c2/fa/a0/00.36.js | macDetailCallback("0050c2faa000/36",[{"a":"#902, E&C Venture Tower GuroDong, GuroGu Seoul KR 152-719","o":"YJSYSTEM","d":"2012-06-17","t":"add","s":"ieee","c":"KR"}]);
|
JLLeitschuh/Multiverse-Inventories | src/main/java/com/onarandombox/multiverseinventories/blacklist/package-info.java | <filename>src/main/java/com/onarandombox/multiverseinventories/blacklist/package-info.java<gh_stars>10-100
/**
* This package contains classes related to Item Blacklisting per world group/world.
* It is also very unfinished and likely to be refactored later.
*/
package com.onarandombox.multiverseinventories.blacklist;
|
mattl-netflix/Priam | priam/src/main/java/com/netflix/priam/cryptography/pgp/PgpCredential.java | <gh_stars>100-1000
/**
* Copyright 2017 Netflix, Inc.
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.priam.cryptography.pgp;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.google.inject.Inject;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.cred.ICredentialGeneric;
/*
* A generic implementation of fetch keys as plaintext. The key values are used within PGP cryptography algorithm. Users may
* want to provide an implementation where your key(s)' value is decrypted using AES encryption algorithm.
*/
public class PgpCredential implements ICredentialGeneric {
private final IConfiguration config;
@Inject
public PgpCredential(IConfiguration config) {
this.config = config;
}
@Override
public AWSCredentialsProvider getAwsCredentialProvider() {
return null;
}
@Override
public byte[] getValue(KEY key) {
if (key == null) {
throw new NullPointerException("Credential key cannot be null.");
}
if (key.equals(KEY.PGP_PASSWORD)) {
return this.config.getPgpPasswordPhrase().getBytes();
} else if (key.equals(KEY.PGP_PUBLIC_KEY_LOC)) {
return this.config.getPgpPublicKeyLoc().getBytes();
} else {
throw new IllegalArgumentException("Key value not supported.");
}
}
}
|
vikas-t/DS-Algo | functional-problems/rightViewOfBinaryTree.py | <gh_stars>0
#!/usr/bin/python3
# https://practice.geeksforgeeks.org/problems/right-view-of-binary-tree/1
class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def inOrder(root):
if root == None:
return
inOrder(root.left)
print(root.data, end=" ")
inOrder(root.right)
def getRightViewSum(root):
h = {}
level = 0
getSum(root, level, h)
return sum(h.values())
def getSum(root, level, h):
"""
Time complexity is O(n) and space complexity is O(h) where h is
the height of the tree
"""
if root == None:
return
h[level] = root.data
getSum(root.left, level+1, h)
getSum(root.right, level+1, h)
def getSum2(root, level=0, maxLevel=None, sum=None):
"""
This approach is better than the previous one as it does not use extra
space. We keep track of two variables as references, maxlevel and sum.
maxLevel refers to the highest level reached so far. sum is the total sum
of the right view uptil that level. We traverse right child first, so
as we go down the level we obtain the rightmost node of the level. We
compare the level with maxLevel and if it is greater signifies that
we have obtained the rightmost node of a that level, we add the node
data to the sum. Finally we return the sum on level 0.
"""
if root == None:
return 0
if maxLevel == None:
maxLevel = [-1]
sum = [0]
if maxLevel[0] < level:
sum[0] += root.data
maxLevel[0] = level
getSum2(root.right, level+1, maxLevel, sum)
getSum2(root.left , level+1, maxLevel, sum)
if level == 0:
return sum[0]
nodes = [None]
for v in range(1, 9):
nodes.append(Node(v))
nodes[1].left = nodes[2]
nodes[1].right = nodes[3]
nodes[2].left = nodes[4]
nodes[2].right = nodes[5]
nodes[3].left = nodes[6]
nodes[3].right = nodes[7]
nodes[4].right = nodes[8]
#inOrder(nodes[1])
print(getRightViewSum(nodes[1]))
print(getSum2(nodes[1])) |
Orig5826/Basics | Lang/Python/stdlib/012_tkinter.py | import tkinter as tk
wind = tk.Tk()
wind.title("tk 小程序")
wind.geometry('300x200+500+300')
# list
ll = ["C", "C++", "Java", "Python", "Lua", "R"]
lbox1 = tk.Listbox(wind)
for item in ll:
lbox1.insert(0, item)
lbox1.pack()
wind.mainloop()
|
jnthn/intellij-community | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/typing/RehighlightInnerBlockAfterInline.java | <filename>java/java-tests/testData/codeInsight/daemonCodeAnalyzer/typing/RehighlightInnerBlockAfterInline.java
class Ds {
public int meth(int j) {
int <caret>e = j;
int d = e;
if (j==0) {
int e = j;
}
return 0;
}
} |
MarouenMechtri/accords-platform-1 | tools/codegen/OCCI/Action.py | '''
Created on 15 Apr 2013
@author: jevc
'''
class Action(object):
'''
Class to represent an Action
'''
def __init__(self, actionid):
'''
Constructor
@param id: action id
'''
self.actionid = actionid
# Gets resolved to action definition
self.actionidP = None
|
ivan909020/freelancehunt-sdk-java | src/main/java/com/github/ivan909020/freelancehunt/sdk/requests/projects/bids/RejectBidRequest.java | package com.github.ivan909020.freelancehunt.sdk.requests.projects.bids;
import com.github.ivan909020.freelancehunt.sdk.exceptions.ApiException;
import com.github.ivan909020.freelancehunt.sdk.exceptions.ApiValidationException;
import com.github.ivan909020.freelancehunt.sdk.requests.PostApiRequest;
import com.github.ivan909020.freelancehunt.sdk.responses.projects.bids.RejectBidResponse;
public class RejectBidRequest extends PostApiRequest<RejectBidResponse> {
private Long projectId;
private Long bidId;
public RejectBidRequest() {
}
public RejectBidRequest setProjectId(Long projectId) {
this.projectId = projectId;
return this;
}
public RejectBidRequest setBidId(Long bidId) {
this.bidId = bidId;
return this;
}
@Override
public String getUrlPath() {
return "projects/" + projectId + "/bids/" + bidId + "/reject";
}
@Override
public void validate() {
super.validate();
if (projectId == null) {
throw new ApiValidationException("ProjectId parameter can't be empty");
}
if (bidId == null) {
throw new ApiValidationException("BidId parameter can't be empty");
}
}
@Override
public RejectBidResponse deserializeResponse(String responseContent) {
try {
return deserializeResponse(responseContent, RejectBidResponse.class);
} catch (ApiException e) {
throw e.setRequestPath(getUrl());
}
}
}
|
alqvia/layer0 | cli/client/environment_test.go | <filename>cli/client/environment_test.go
package client
import (
"net/http"
"testing"
"github.com/quintilesims/layer0/common/models"
"github.com/quintilesims/layer0/common/testutils"
)
func TestCreateEnvironment(t *testing.T) {
handler := func(w http.ResponseWriter, r *http.Request) {
testutils.AssertEqual(t, r.Method, "POST")
testutils.AssertEqual(t, r.URL.Path, "/environment/")
var req models.CreateEnvironmentRequest
Unmarshal(t, r, &req)
testutils.AssertEqual(t, req.EnvironmentName, "name")
testutils.AssertEqual(t, req.InstanceSize, "m3.medium")
testutils.AssertEqual(t, req.MinClusterCount, 2)
testutils.AssertEqual(t, req.UserDataTemplate, []byte("user_data"))
testutils.AssertEqual(t, req.OperatingSystem, "linux")
testutils.AssertEqual(t, req.AMIID, "ami")
MarshalAndWrite(t, w, models.Environment{EnvironmentID: "id"}, 200)
}
client, server := newClientAndServer(handler)
defer server.Close()
environment, err := client.CreateEnvironment("name", "m3.medium", 2, []byte("user_data"), "linux", "ami")
if err != nil {
t.Fatal(err)
}
testutils.AssertEqual(t, environment.EnvironmentID, "id")
}
func TestDeleteEnvironment(t *testing.T) {
handler := func(w http.ResponseWriter, r *http.Request) {
testutils.AssertEqual(t, r.Method, "DELETE")
testutils.AssertEqual(t, r.URL.Path, "/environment/id")
headers := map[string]string{
"Location": "/job/jobid",
"X-JobID": "jobid",
}
MarshalAndWriteHeader(t, w, "", headers, 202)
}
client, server := newClientAndServer(handler)
defer server.Close()
jobID, err := client.DeleteEnvironment("id")
if err != nil {
t.Fatal(err)
}
testutils.AssertEqual(t, jobID, "jobid")
}
func TestGetEnvironment(t *testing.T) {
handler := func(w http.ResponseWriter, r *http.Request) {
testutils.AssertEqual(t, r.Method, "GET")
testutils.AssertEqual(t, r.URL.Path, "/environment/id")
MarshalAndWrite(t, w, models.Environment{EnvironmentID: "id"}, 200)
}
client, server := newClientAndServer(handler)
defer server.Close()
environment, err := client.GetEnvironment("id")
if err != nil {
t.Fatal(err)
}
testutils.AssertEqual(t, environment.EnvironmentID, "id")
}
func TestListEnvironments(t *testing.T) {
handler := func(w http.ResponseWriter, r *http.Request) {
testutils.AssertEqual(t, r.Method, "GET")
testutils.AssertEqual(t, r.URL.Path, "/environment/")
environments := []models.EnvironmentSummary{
{EnvironmentID: "id1"},
{EnvironmentID: "id2"},
}
MarshalAndWrite(t, w, environments, 200)
}
client, server := newClientAndServer(handler)
defer server.Close()
environments, err := client.ListEnvironments()
if err != nil {
t.Fatal(err)
}
testutils.AssertEqual(t, len(environments), 2)
testutils.AssertEqual(t, environments[0].EnvironmentID, "id1")
testutils.AssertEqual(t, environments[1].EnvironmentID, "id2")
}
func TestUpdateEnvironment(t *testing.T) {
handler := func(w http.ResponseWriter, r *http.Request) {
testutils.AssertEqual(t, r.Method, "PUT")
testutils.AssertEqual(t, r.URL.Path, "/environment/id")
var req models.UpdateEnvironmentRequest
Unmarshal(t, r, &req)
testutils.AssertEqual(t, req.MinClusterCount, 2)
MarshalAndWrite(t, w, models.Environment{EnvironmentID: "id"}, 200)
}
client, server := newClientAndServer(handler)
defer server.Close()
environment, err := client.UpdateEnvironment("id", 2)
if err != nil {
t.Fatal(err)
}
testutils.AssertEqual(t, environment.EnvironmentID, "id")
}
func TestCreateLink(t *testing.T) {
handler := func(w http.ResponseWriter, r *http.Request) {
testutils.AssertEqual(t, r.Method, "POST")
testutils.AssertEqual(t, r.URL.Path, "/environment/id1/link")
var req models.CreateEnvironmentLinkRequest
Unmarshal(t, r, &req)
testutils.AssertEqual(t, req.EnvironmentID, "id2")
MarshalAndWrite(t, w, "", 200)
}
client, server := newClientAndServer(handler)
defer server.Close()
if err := client.CreateLink("id1", "id2"); err != nil {
t.Fatal(err)
}
}
func TestCreateUnlink(t *testing.T) {
handler := func(w http.ResponseWriter, r *http.Request) {
testutils.AssertEqual(t, r.Method, "DELETE")
testutils.AssertEqual(t, r.URL.Path, "/environment/id1/link/id2")
MarshalAndWrite(t, w, "", 200)
}
client, server := newClientAndServer(handler)
defer server.Close()
if err := client.DeleteLink("id1", "id2"); err != nil {
t.Fatal(err)
}
}
|
hexacta/angular-js-cli | templates/full-app/_app/_layout/_components/_sidemenu/_sidemenu.spec.js | <reponame>hexacta/angular-js-cli
/* eslint-env node, jasmine */
'use strict';
describe('Sidemenu Component', function () {
var $componentController, $state, SidemenuComponentCtrl,
routerHelper;
var states = [
{
url: '/state-one',
component: 'stateOne',
resolve: {},
title: 'State One',
internalName: 'StateOne',
settings: {
nav: 2,
icon: 'fa-users'
},
name: 'app.state-one'
}, {
url: '/state-two',
component: 'stateTwo',
resolve: {},
title: 'State Two',
internalName: 'StateTwo',
settings: {
nav: 1,
icon: 'fa-plus'
},
name: 'app.state-two'
}
];
// Before each test load our api.layout module
beforeEach(angular.mock.module('app.core'));
beforeEach(angular.mock.module('blocks.router'));
beforeEach(angular.mock.module('app.layout'));
beforeEach(inject(function (_$componentController_, _$state_, _routerHelper_) {
$componentController = _$componentController_;
$state = _$state_;
routerHelper = _routerHelper_;
}));
beforeEach(function () {
var bindings = {
logout: angular.noop,
restrictedScreens: [],
toggleSidebar: angular.noop
};
var injection = {
$state: $state,
routerHelper: routerHelper
};
SidemenuComponentCtrl = $componentController('sidemenu', injection,
bindings);
});
describe('Sidemenu Ctrl', function () {
it('should exist', function () {
expect(SidemenuComponentCtrl).toBeDefined();
});
it('Shuld have imports and intial exports defined before $onInit', function () {
spyOn(routerHelper, 'getStates').and.returnValue();
expect(SidemenuComponentCtrl.logout).toBeDefined();
expect(SidemenuComponentCtrl.restrictedScreens).toBeDefined();
expect(SidemenuComponentCtrl.toggleSidebar).toBeDefined();
expect(SidemenuComponentCtrl.hasRestrictedAccess).toBeDefined();
expect(SidemenuComponentCtrl.isCurrent).toBeDefined();
expect(SidemenuComponentCtrl.appTitle).not.toBeDefined();
});
});
describe('$onInit', function () {
beforeEach(function () {
spyOn(routerHelper, 'getStates').and.returnValue(states);
spyOn(SidemenuComponentCtrl, '$onInit').and.callThrough();
});
it('shuld exist', function () {
expect(SidemenuComponentCtrl.$onInit).toBeDefined();
});
it('shuld init the menu with the states\' info and sort it by nav', function () {
SidemenuComponentCtrl.$onInit();
expect(SidemenuComponentCtrl.$onInit).toHaveBeenCalled();
expect(routerHelper.getStates).toHaveBeenCalled();
expect(SidemenuComponentCtrl.navRoutes[0]).toEqual(states[1]);
expect(SidemenuComponentCtrl.navRoutes[1]).toEqual(states[0]);
});
});
describe('hasRestrictedAccess', function () {
var result, state;
beforeEach(function () {
state = states[0];
spyOn(SidemenuComponentCtrl, 'hasRestrictedAccess').and.callThrough();
SidemenuComponentCtrl.$onInit();
});
it('shuld exist', function () {
expect(SidemenuComponentCtrl.hasRestrictedAccess).toBeDefined();
});
it('shuld return false if there are no restricted screens', function () {
SidemenuComponentCtrl.restrictedScreens = null;
result = SidemenuComponentCtrl.hasRestrictedAccess(state);
expect(SidemenuComponentCtrl.hasRestrictedAccess).toHaveBeenCalledWith(state);
expect(result).toEqual(null);
});
it('shuld return true if route is inside the restricted screens', function () {
SidemenuComponentCtrl.restrictedScreens = ['StateOne'];
result = SidemenuComponentCtrl.hasRestrictedAccess(state);
expect(SidemenuComponentCtrl.hasRestrictedAccess).toHaveBeenCalledWith(state);
expect(result).toEqual(true);
});
it('shuld return false if route is not inside the restricted screens', function () {
SidemenuComponentCtrl.restrictedScreens = ['StateTwo'];
result = SidemenuComponentCtrl.hasRestrictedAccess(state);
expect(SidemenuComponentCtrl.hasRestrictedAccess).toHaveBeenCalledWith(state);
expect(result).toEqual(false);
});
});
describe('isCurrent', function () {
var result, state;
beforeEach(function () {
state = states[0];
spyOn(SidemenuComponentCtrl, 'isCurrent').and.callThrough();
SidemenuComponentCtrl.$onInit();
});
it('shuld exist', function () {
expect(SidemenuComponentCtrl.isCurrent).toBeDefined();
});
it('shuld return false if no current state active', function () {
result = SidemenuComponentCtrl.isCurrent(state);
expect(SidemenuComponentCtrl.isCurrent).toHaveBeenCalledWith(state);
expect(result).toEqual('');
});
it('shuld return false if current state is not the recieved one', function () {
$state.current.title = 'State Two';
$state.current.internalName = 'StateTwo';
result = SidemenuComponentCtrl.isCurrent(state);
expect(SidemenuComponentCtrl.isCurrent).toHaveBeenCalledWith(state);
expect(result).toEqual(false);
});
it('shuld return true if current state is the recieved one', function () {
$state.current.title = 'State One';
$state.current.internalName = 'StateOne';
result = SidemenuComponentCtrl.isCurrent(state);
expect(SidemenuComponentCtrl.isCurrent).toHaveBeenCalledWith(state);
expect(result).toEqual(true);
});
});
});
|
Mu-L/liteflow | liteflow-testcase-springboot/src/test/java/com/yomahub/liteflow/test/subflow/cmp2/HCmp.java | <gh_stars>0
package com.yomahub.liteflow.test.subflow.cmp2;
import com.yomahub.liteflow.core.NodeComponent;
import com.yomahub.liteflow.slot.DefaultContext;
import org.springframework.stereotype.Component;
import static com.yomahub.liteflow.test.subflow.ImplicitSubFlowSpringbootTest.RUN_TIME_SLOT;
@Component("h")
public class HCmp extends NodeComponent {
@Override
public void process() throws Exception {
String requestData = this.getSubChainReqData();
DefaultContext context = this.getContextBean();
context.setData("innerRequest", requestData);
RUN_TIME_SLOT.add(this.getSlot().getRequestId());
System.out.println("Hcomp executed!");
}
}
|
johnGHB/fluid | Fluidium/lib/OmniGroup/Frameworks/OmniAppKit/Widgets.subproj/OAVectorView.h | // Copyright 2003-2006 Omni Development, Inc. All rights reserved.
//
// This software may only be used and reproduced according to the
// terms in the file OmniSourceLicense.html, which should be
// distributed with this project and can also be found at
// <http://www.omnigroup.com/developer/sourcecode/sourcelicense/>.
// $Id$
#import <AppKit/NSControl.h>
@class NSValueTransformer;
@class NSTextField;
#import <AppKit/NSNibDeclarations.h> // For IBAction, IBOutlet
@interface OAVectorView : NSControl
{
IBOutlet NSTextField *xField;
IBOutlet NSTextField *yField;
IBOutlet NSTextField *commaTextField;
id observedObjectForVector;
NSString *observedKeyPathForVector;
NSValueTransformer *vectorValueTransformer;
}
// Actions
- (IBAction)vectorTextFieldAction:(id)sender;
// API
- (void)setIsMultiple:(BOOL)flag;
- (BOOL)isMultiple;
- (NSTextField *)xField;
- (NSTextField *)yField;
@end
|
abdallahsafi-401-advanced-javascript/data-structures-and-algorithms | Data-Structures/stacksAndQueues/stacks.js | <reponame>abdallahsafi-401-advanced-javascript/data-structures-and-algorithms
'use strict';
const Node = require('./node.js');
const CustomError = require('./customError.js');
class Stack {
constructor() {
this.top = null;
this.size = 0;
}
push(value) {
if (!value) {
throw new CustomError('Invalid argument!');
}
const node = new Node(value);
node.next = this.top;
this.top = node;
this.size++;
}
pop() {
if (this.isEmpty()) {
throw new CustomError('Stack is empty');
}
let item = this.top;
this.top = this.top.next;
this.size--;
return item.value;
}
peek() {
if (this.isEmpty()) {
throw new CustomError('Stack is empty');
}
return this.top.value;
}
isEmpty() {
return this.size === 0 ;
}
}
module.exports = Stack;
|
pengxianhong/xAndroidDemo | app/src/main/java/com/example/mutidemo/ui/NewsDetailsActivity.java | <gh_stars>1-10
package com.example.mutidemo.ui;
import android.os.Build;
import android.widget.TextView;
import androidx.annotation.RequiresApi;
import com.example.mutidemo.R;
import com.example.mutidemo.util.ImageUtil;
import com.pengxh.app.multilib.base.BaseNormalActivity;
import com.qmuiteam.qmui.util.QMUIDisplayHelper;
import butterknife.BindView;
/**
* @author: Pengxh
* @email: <EMAIL>
* @description: TODO
* @date: 2020/3/5 20:18
*/
public class NewsDetailsActivity extends BaseNormalActivity {
@BindView(R.id.newsTitle)
TextView newsTitle;
@BindView(R.id.newsSrc)
TextView newsSrc;
@BindView(R.id.newsTime)
TextView newsTime;
@BindView(R.id.newsContent)
TextView newsContent;
@Override
public int initLayoutView() {
return R.layout.activity_news_details;
}
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
@Override
public void initData() {
String title = getIntent().getStringExtra("title");
String src = getIntent().getStringExtra("src");
String time = getIntent().getStringExtra("time");
String content = getIntent().getStringExtra("content");
newsTitle.setText(title);
newsSrc.setText(src);
newsTime.setText(time);
ImageUtil.setTextFromHtml(this, newsContent, content,
QMUIDisplayHelper.getScreenWidth(this), 10);
}
@Override
public void initEvent() {
}
}
|
thaliproject/Thali_Codovaplugin | test/www/jxcore/meta_tests/testPouchDBAgent.js | 'use strict';
var objectAssign = require('object-assign');
var ForeverAgent = require('forever-agent');
var express = require('express');
var expressPouchDB = require('express-pouchdb');
var http = require('http');
var tape = require('../lib/thaliTape');
var testUtils = require('../lib/testUtils');
var PouchDB = testUtils.getLevelDownPouchDb();
var test = tape({
setup: function (t) {
t.end();
},
teardown: function (t) {
t.end();
}
});
test('PouchDB agent works as expected', function (t) {
var agent = new ForeverAgent();
var requestsData = [];
var _addRequest = agent.addRequest;
agent.addRequest = function (data) {
requestsData.push(objectAssign({}, data));
return _addRequest.apply(this, arguments);
};
var app = express();
app.use('/db', expressPouchDB(PouchDB));
var server = http.createServer(app);
server.listen(0, function () {
var port = server.address().port;
var url = 'http://localhost:' + port + '/db';
var db = new PouchDB(url, {
ajax: {
agent: agent
}
});
function query(name) {
return db.get(name)
.then(function () {
t.fail('we should not find a document');
})
.catch(function (error) {
t.ok(error, 'error is not empty');
t.equals(error.status, 404, 'status should be 404');
})
.then(function () {
var lastData = requestsData[requestsData.length - 1];
t.equals(lastData.method, 'GET', 'method is \'get\'');
t.equals(lastData.path, '/db/' + name + '?', 'path is ok');
});
}
query('fit')
.then(function () {
return query('foo');
})
.then(function () {
t.ok(requestsData.length > 2, 'we should call agent more than twice');
t.end();
});
});
});
|
hdachev/Nimble | src/nodes/stop_nans_node.cpp | <reponame>hdachev/Nimble<gh_stars>0
#include "stop_nans_node.h"
#include "../render_graph.h"
#include "../resource_manager.h"
#include "../renderer.h"
#include "../logger.h"
namespace nimble
{
DEFINE_RENDER_NODE_FACTORY(StopNaNsNode)
// -----------------------------------------------------------------------------------------------------------------------------------
StopNaNsNode::StopNaNsNode(RenderGraph* graph) :
RenderNode(graph)
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
StopNaNsNode::~StopNaNsNode()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
void StopNaNsNode::declare_connections()
{
// Declare the inputs to this render node
register_input_render_target("Color");
m_output_rt = register_scaled_output_render_target("Color", 1.0f, 1.0f, GL_TEXTURE_2D, GL_RGBA16F, GL_RGBA, GL_HALF_FLOAT);
}
// -----------------------------------------------------------------------------------------------------------------------------------
bool StopNaNsNode::initialize(Renderer* renderer, ResourceManager* res_mgr)
{
m_texture = find_input_render_target("Color");
m_output_rtv = RenderTargetView(0, 0, 0, m_output_rt->texture);
m_vs = res_mgr->load_shader("shader/post_process/fullscreen_triangle_vs.glsl", GL_VERTEX_SHADER);
m_fs = res_mgr->load_shader("shader/post_process/stop_nans_fs.glsl", GL_FRAGMENT_SHADER);
if (m_vs && m_fs)
{
m_program = renderer->create_program(m_vs, m_fs);
if (m_program)
return true;
else
{
NIMBLE_LOG_ERROR("Failed to create Program!");
return false;
}
}
else
{
NIMBLE_LOG_ERROR("Failed to load Shaders!");
return false;
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
void StopNaNsNode::execute(double delta, Renderer* renderer, Scene* scene, View* view)
{
glDisable(GL_DEPTH_TEST);
glDisable(GL_CULL_FACE);
m_program->use();
renderer->bind_render_targets(1, &m_output_rtv, nullptr);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
glViewport(0, 0, m_graph->window_width(), m_graph->window_height());
if (m_program->set_uniform("s_Color", 0) && m_texture)
m_texture->texture->bind(0);
render_fullscreen_triangle(renderer, view);
}
// -----------------------------------------------------------------------------------------------------------------------------------
void StopNaNsNode::shutdown()
{
}
// -----------------------------------------------------------------------------------------------------------------------------------
std::string StopNaNsNode::name()
{
return "Stop NaNs";
}
// -----------------------------------------------------------------------------------------------------------------------------------
} // namespace nimble |
oliver-za/invictus-frontend | client/src/components/shared/ItemList.js | <gh_stars>0
import React from 'react'
import {Link} from 'react-router-dom'
function ItemList({itemKind, previousPrice}) {
return (
<div className="Mens-Items-Wrapper">
{itemKind.map((item) => (
<li key={item._id} style={{ listStyleType: "none" }}>
<div className="Mens-Item-Image">
<Link to={`/item/${item._id}`} alt={"Link to " + item.name}>
<div className="image-hover-container">
<img
src={"/" + item.imageUrl}
alt={item.name}
className="image-hover-image"
/>
<div className="image-hover-overlay">
<img
src={"/" + item.imageUrl2}
className="image-hover-image"
alt={item.name + "2"}
/>
</div>
</div>
</Link>
<p>{item.name}</p>
{previousPrice ?
<>
<p style={{ textDecoration: "line-through"}}>{previousPrice}</p>
<p style={{ color: "#f94c43" }}>{item.price}</p>
</> : <p>{item.price}</p>}
</div>
</li>
)
)}
</ div>
)
}
export default ItemList
|
reverie/jotleaf.com | jotleaf/static/js/views_errors.js | <filename>jotleaf/static/js/views_errors.js
// Generated by CoffeeScript 1.4.0
var Error403, Error404,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
Error404 = (function(_super) {
__extends(Error404, _super);
function Error404() {
return Error404.__super__.constructor.apply(this, arguments);
}
Error404.prototype.documentTitle = 'Page Not Found';
Error404.prototype.initialize = function() {
return this.makeMainWebsiteView('tpl_404');
};
return Error404;
})(TopView);
Error403 = (function(_super) {
__extends(Error403, _super);
function Error403() {
return Error403.__super__.constructor.apply(this, arguments);
}
Error403.prototype.documentTitle = 'Permission Denied';
Error403.prototype.initialize = function() {
return this.makeMainWebsiteView('tpl_permission_denied', {
username: this.options.username
});
};
return Error403;
})(TopView);
|
stutiredboy/obproxy | src/obproxy/proxy/api/ob_intercept_plugin.h | /**
* Copyright (c) 2021 OceanBase
* OceanBase Database Proxy(ODP) is licensed under Mulan PubL v2.
* You can use this software according to the terms and conditions of the Mulan PubL v2.
* You may obtain a copy of Mulan PubL v2 at:
* http://license.coscl.org.cn/MulanPubL-2.0
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
* See the Mulan PubL v2 for more details.
*/
#ifndef OBPROXY_INTERCEPT_PLUGIN_H
#define OBPROXY_INTERCEPT_PLUGIN_H
#include "proxy/api/ob_api_transaction.h"
#include "proxy/api/ob_transaction_plugin.h"
namespace oceanbase
{
namespace obproxy
{
namespace proxy
{
/**
* Allows a plugin to act as a server and return the response. This
* plugin can be created in read request hook.
*/
class ObInterceptPlugin : public ObTransactionPlugin
{
friend class ObApiUtilsInternal;
public:
/**
* A method that you must implement when writing an ObInterceptPlugin, this method will be
* invoked whenever client request data is read.
*/
virtual void consume(event::ObIOBufferReader *reader) = 0;
/**
* A method that you must implement when writing an ObInterceptPlugin, this method
* will be invoked when the client request is deemed complete.
*/
virtual void handle_input_complete() = 0;
virtual void destroy();
protected:
// a plugin must implement this interface, it cannot be constructed directly
explicit ObInterceptPlugin(ObApiTransaction &transaction);
/**
* This method is how an ObInterceptPlugin will send output back to
* the client.
*/
int produce(event::ObIOBufferReader *reader);
int set_output_complete();
private:
void destroy_cont();
int do_read();
int handle_event_internal(ObEventType event, void *edata);
int handle_event(ObEventType event, void *data);
ObContInternal *intercept_cont_;
event::ObVConnection *net_vc_;
struct IoHandle
{
event::ObVIO *vio_;
event::ObMIOBuffer *buffer_;
event::ObIOBufferReader *reader_;
IoHandle() : vio_(NULL), buffer_(NULL), reader_(NULL) { };
void destroy()
{
if (NULL != reader_) {
reader_->mbuf_->dealloc_reader(reader_);
}
if (NULL != buffer_) {
event::free_miobuffer(buffer_);
}
}
};
IoHandle input_;
IoHandle output_;
int64_t num_bytes_written_;
// these two fields to be used by the continuation callback only
ObEventType saved_event_;
void *saved_edata_;
event::ObAction *timeout_action_;
};
} // end of namespace proxy
} // end of namespace obproxy
} // end of namespace oceanbase
#endif // OBPROXY_INTERCEPT_PLUGIN_H
|
gizmore/gdo | lib/GDO/Form/GDT_Select.rb | #
#
#
class GDO::Form::GDT_Select < GDO::Form::GDT_ComboBox
def initialize(name=nil)
super
@multiple = false
end
###########
### GDT ###
###########
def multiple(multiple=true); @multiple = multiple; self; end
def _multiple; @multiple; end
def to_value(var)
var.split(',')
end
def to_var(value)
value.join(',')
end
##############
### Render ###
##############
def render_form; ::GDO::Core::GDT_Template.render_template('Form', 'form/gdt_select.erb', {:field => self}); end
end
|
zhangkn/iOS14Header | System/Library/PrivateFrameworks/NanoTimeKitCompanion.framework/NTKAVListing.h | /*
* This header is generated by classdump-dyld 1.0
* on Sunday, September 27, 2020 at 11:52:20 AM Mountain Standard Time
* Operating System: Version 14.0 (Build 18A373)
* Image Source: /System/Library/PrivateFrameworks/NanoTimeKitCompanion.framework/NanoTimeKitCompanion
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
@class CLKVideo, UIImage;
@protocol NTKAVListing <NSObject>
@property (nonatomic,readonly) CLKVideo * video;
@property (nonatomic,readonly) UIImage * image;
@required
-(CLKVideo *)video;
-(UIImage *)image;
-(BOOL)snapshotDiffers:(id)arg1;
-(void)discardAssets;
@end
|
jayvdb/alcazar | tests/test_requests.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#----------------------------------------------------------------------------------------------------------------------------------
# includes
# 2+3 compat
from __future__ import absolute_import, division, print_function, unicode_literals
# standards
import unittest
# alcazar
from alcazar import Request
#----------------------------------------------------------------------------------------------------------------------------------
class RequestTests(unittest.TestCase):
def test_default_method_is_get(self):
request = Request('http://example.com/')
self.assertEqual(request.method, 'GET')
def test_can_set_method_manually(self):
request = Request('http://example.com/', method='DELETE')
self.assertEqual(request.method, 'DELETE')
def test_post_by_default_when_data(self):
request = Request('http://example.com/', data=b'payload')
self.assertEqual(request.method, 'POST')
def test_post_by_default_when_json(self):
request = Request('http://example.com/', json={'key': 'value'})
self.assertEqual(request.method, 'POST')
#----------------------------------------------------------------------------------------------------------------------------------
|
yf9212/demo | src/main/java/com/yf/designPattern/filter/Criteria.java | <gh_stars>0
package com.yf.designPattern.filter;
import java.util.List;
public interface Criteria {
public List<Person> metteCriteria(List<Person> persons);
}
|
free-ice/ws-wss4j | ws-security-common/src/main/java/org/apache/wss4j/common/NamePasswordCallbackHandler.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.wss4j.common;
import java.io.IOException;
import java.lang.reflect.Method;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
public class NamePasswordCallbackHandler implements CallbackHandler {
private static final org.slf4j.Logger LOG =
org.slf4j.LoggerFactory.getLogger(NamePasswordCallbackHandler.class);
private static final String PASSWORD_CALLBACK_NAME = "setObject";
private static final Class<?>[] PASSWORD_CALLBACK_TYPES =
new Class<?>[]{Object.class, char[].class, String.class};
private String username;
private String password;
private String passwordCallbackName;
public NamePasswordCallbackHandler(String username, String password) {
this(username, password, null);
}
public NamePasswordCallbackHandler(String username, String password, String passwordCallbackName) {
this.username = username;
this.password = password;
this.passwordCallbackName = passwordCallbackName;
}
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
for (Callback callback : callbacks) {
if (handleCallback(callback)) {
continue;
} else if (callback instanceof NameCallback) {
((NameCallback) callback).setName(username);
} else if (callback instanceof PasswordCallback) {
PasswordCallback pwCallback = (PasswordCallback) callback;
pwCallback.setPassword(password.toCharArray());
} else if (!invokePasswordCallback(callback)) {
LOG.info("Unsupported callback type " + callback.getClass().getName());
throw new UnsupportedCallbackException(callback, "Unsupported callback type "
+ callback.getClass().getName());
}
}
}
protected boolean handleCallback(Callback callback) {
return false;
}
/*
* This method is called from the handle(Callback[]) method when the specified callback
* did not match any of the known callback classes. It looks for the callback method
* having the specified method name with one of the supported parameter types.
* If found, it invokes the callback method on the object and returns true.
* If not, it returns false.
*/
@SuppressWarnings("InexactVarargsConditional")
private boolean invokePasswordCallback(Callback callback) {
String cbname = passwordCallbackName == null
? PASSWORD_CALLBACK_NAME : passwordCallbackName;
for (Class<?> arg : PASSWORD_CALLBACK_TYPES) {
try {
Method method = callback.getClass().getMethod(cbname, arg);
method.invoke(callback, arg == String.class ? password : password.toCharArray());
return true;
} catch (Exception e) {
// ignore and continue
LOG.warn(e.toString());
}
}
return false;
}
}
|
Hanggansta/Nimble | src/shader_cache.cpp | <filename>src/shader_cache.cpp
#include "shader_cache.h"
#include "shader_library.h"
namespace nimble
{
// -----------------------------------------------------------------------------------------------------------------------------------
void ShaderCache::shutdown()
{
for (auto& pair : m_library_cache)
pair.second.reset();
}
// -----------------------------------------------------------------------------------------------------------------------------------
std::shared_ptr<ShaderLibrary> ShaderCache::load_library(const std::string& vs, const std::string& fs)
{
std::string id = "vs:";
id += vs;
id += "-fs:";
id += fs;
if (m_library_cache.find(id) != m_library_cache.end() && !m_library_cache[id].expired())
return m_library_cache[id].lock();
else
{
auto library = std::make_shared<ShaderLibrary>(vs, fs);
m_library_cache[id] = library;
return library;
}
}
// -----------------------------------------------------------------------------------------------------------------------------------
} // namespace nimble |
mobile-club/adyen-api-js | test/model/NotificationRequestItemDetails.spec.js | /**
* Adyen api
* Operations about payments, recurring and payout
*
* OpenAPI spec version: 30
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
*
* Swagger Codegen version: 2.3.1
*
* Do not edit the class manually.
*
*/
(function(root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD.
define(['expect.js', '../../src/index'], factory);
} else if (typeof module === 'object' && module.exports) {
// CommonJS-like environments that support module.exports, like Node.
factory(require('expect.js'), require('../../src/index'));
} else {
// Browser globals (root is window)
factory(root.expect, root.AdyenApiJs);
}
}(this, function(expect, AdyenApiJs) {
'use strict';
var instance;
beforeEach(function() {
instance = new AdyenApiJs.NotificationRequestItemDetails();
});
var getProperty = function(object, getter, property) {
// Use getter method if present; otherwise, get the property directly.
if (typeof object[getter] === 'function')
return object[getter]();
else
return object[property];
}
var setProperty = function(object, setter, property, value) {
// Use setter method if present; otherwise, set the property directly.
if (typeof object[setter] === 'function')
object[setter](value);
else
object[property] = value;
}
describe('NotificationRequestItemDetails', function() {
it('should create an instance of NotificationRequestItemDetails', function() {
// uncomment below and update the code to test NotificationRequestItemDetails
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be.a(AdyenApiJs.NotificationRequestItemDetails);
});
it('should have the property additionalData (base name: "additionalData")', function() {
// uncomment below and update the code to test the property additionalData
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
it('should have the property amount (base name: "amount")', function() {
// uncomment below and update the code to test the property amount
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
it('should have the property pspReference (base name: "pspReference")', function() {
// uncomment below and update the code to test the property pspReference
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
it('should have the property eventCode (base name: "eventCode")', function() {
// uncomment below and update the code to test the property eventCode
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
it('should have the property eventDate (base name: "eventDate")', function() {
// uncomment below and update the code to test the property eventDate
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
it('should have the property merchantAccountCode (base name: "merchantAccountCode")', function() {
// uncomment below and update the code to test the property merchantAccountCode
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
it('should have the property operations (base name: "operations")', function() {
// uncomment below and update the code to test the property operations
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
it('should have the property merchantReference (base name: "merchantReference")', function() {
// uncomment below and update the code to test the property merchantReference
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
it('should have the property originalReference (base name: "originalReference")', function() {
// uncomment below and update the code to test the property originalReference
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
it('should have the property paymentMethod (base name: "paymentMethod")', function() {
// uncomment below and update the code to test the property paymentMethod
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
it('should have the property reason (base name: "reason")', function() {
// uncomment below and update the code to test the property reason
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
it('should have the property success (base name: "success")', function() {
// uncomment below and update the code to test the property success
//var instane = new AdyenApiJs.NotificationRequestItemDetails();
//expect(instance).to.be();
});
});
}));
|
theusaf/mcfunction | lib/commands/replaceitem.js | const BaseCommand = require("../class/BaseCommand"),
NullCommand = require("../class/NullCommand"),
CommandNameOutput = require("../class/output/CommandNameOutput"),
SelectorOutput = require("../class/output/SelectorOutput"),
NumberOutput = require("../class/output/NumberOutput"),
SlotOutput = require("../class/output/SlotOutput"),
NBTOutput = require("../class/output/NBTOutput"),
EnumOutput = require("../class/output/EnumOutput"),
ItemOutput = require("../class/output/ItemOutput"),
CoordinateOutput = require("../class/output/CoordinateOutput");
class ReplaceitemCommand12 extends BaseCommand {
constructor() {super("replaceitem");}
handleSuggestions(tokens) {
if (tokens.length > 2 && !["block", "entity"].includes(tokens[1].getValue())) {
return;
}
switch (tokens.length) {
case 1: {
return [new CommandNameOutput("replaceitem", "Replaces items in entities and blocks.")];
}
case 2: {
return [new EnumOutput(
["block", "Target Type"],
["entity", "Target Type"]
)];
}
default: {
const choice2 = tokens[1].getValue();
let index = 0;
switch (choice2) {
case "block": {
index = -2;
switch (tokens.length) {
case 3:
case 4:
case 5: {
return [new CoordinateOutput(["x", "y", "z"].slice(tokens.length - 3), "~", 6 - tokens.length)];
}
}
break;
}
case "entity": {
if (tokens.length === 3) {
return [new SelectorOutput(null, "targets")];
}
}
}
switch (index + tokens.length) {
case 4: {
return [new SlotOutput(null, "slot")];
}
case 5: {
return [new ItemOutput(null, "item")];
}
case 6: {
return [new NumberOutput("amount", "The item count.", "1")];
}
case 7: {
return [new NumberOutput("data", "The damage/data number of the item", "0")];
}
case 8: {
return [new NBTOutput("dataTag", "The NBT data to be applied to the replacement items.")];
}
}
}
}
}
}
class ReplaceitemCommand13 extends ReplaceitemCommand12 {
handleSuggestions(tokens) {
const subCommand = tokens[1]?.getValue();
if (subCommand === "entity") {
if (tokens.length > 6) {
return;
}
} else if (subCommand === "block") {
if (tokens.length > 8) {
return;
}
}
return super.handleSuggestions(tokens);
}
}
module.exports = {
"1.12": new ReplaceitemCommand12,
"1.13": new ReplaceitemCommand13,
"1.17": new NullCommand
};
|
WhiteDOU/LeetCode | 500.cpp | <gh_stars>1-10
class Solution
{
public:
vector<string> findWords(vector<string> &words)
{
vector<char> v1 = {'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'};
set<char> row1(v1.begin(), v1.end());
vector<char> v2 = {'a', 's', 'd', 'f', 'g', 'h', 'j', 'k', 'l'};
set<char> row2(v2.begin(), v2.end());
vector<char> v3 = {'z', 'x', 'c', 'v', 'b', 'n', 'm'};
set<char> row3(v3.begin(), v3.end());
vector<string> ans;
if (words.size() == 0)
return ans;
for (auto word :words)
{
int one = 0, two = 0, three = 0;
for (char c : word)
{
if (c < 'a')
c += 32;
if (row1.count(c))
one = 1;
if (row2.count(c))
two = 1;
if (row3.count(c))
three = 1;
if (one + two +three > 1)
break;
}
if (one + two + three == 1)
ans.push_back(word);
}
return ans;
}
}; |
IASA-GR/appdb-core | public/js/archive.js | var Endian = {BIG : 0,LITTLE : 1};
var ZipConstants = {
LOCSIG: 0x04034b50, // "PK\003\004"
LOCHDR: 30, // LOC header size
LOCVER: 4, // version needed to extract
LOCNAM: 26, // filename length
EXTSIG: 0x08074b50, // "PK\007\008"
EXTHDR: 16, // EXT header size
CENSIG: 0x02014b50, // "PK\001\002"
CENHDR: 46, // CEN header size
CENVER: 6, // version needed to extract
CENNAM: 28, // filename length
CENOFF: 42, // LOC header offset
ENDSIG: 0x06054b50, // "PK\005\006"
ENDHDR: 22, // END header size
ENDTOT: 10, // total number of entries
ENDOFF: 16, // offset of first CEN header
STORED: 0,
DEFLATED: 8
};
var Base64 = function(input) {
var StringMaker = undefined;
if(navigator.userAgent.toLowerCase().indexOf(" chrome/")>=0||navigator.userAgent.toLowerCase().indexOf(" firefox/")>=0||
navigator.userAgent.toLowerCase().indexOf(' gecko/')>=0){StringMaker=function(){this.str="";this.length=0;
this.append=function(s){this.str+=s;this.length+=s.length;};this.prepend=function(s){this.str=s+this.str;this.length+=s.length;};
this.toString=function(){return this.str;};};}else{StringMaker=function(){this.parts=[];this.length=0;this.append=function(s){
this.parts.push(s);this.length+=s.length;};this.prepend=function(s){this.parts.unshift(s);this.length+=s.length;};
this.toString=function(){return this.parts.join('');};};}
var keyStr = "<KEY>;
var o=new StringMaker(),a,b,c,d,f,g,h,i=0;while(i<input.length){a=input.charCodeAt(i++);
b=input.charCodeAt(i++);c=input.charCodeAt(i++);d=a>>2;f=((a&3)<<4)|(b>>4);g=((b&15)<<2)|(c>>6);h=c&63;
if(isNaN(b)){g=h=64;}else if(isNaN(c)){h=64;}o.append(keyStr.charAt(d)+keyStr.charAt(f)+keyStr.charAt(g)
+keyStr.charAt(h));}return o.toString();
};
var BA = function(byteData, endianType) {
var _bytes = '', _len = 0, _pos = 0,_endian = 0;
if (byteData) {
_bytes = byteData || '';
_endian = endianType !== undefined ? endianType : _endian;
_len = byteData.length;
}
var isBA = typeof byteData !== 'string' && byteData !== undefined;
var warn = function(msg){
throw msg;
};
return {
position : function(val) { if (val) _pos = val; else return _pos; },
move : function(val) { _pos += val},
bytesAvailable : function() { return _len - _pos; },
length : function() { return _len; },
endian : function(val) { if (val) _endian = val; else return _endian; },
data : function(val) { if (val) { _bytes = val || ''; _len = _bytes.length; isBA = typeof val !== 'string' && val !== undefined; } else return _bytes; },
readByte : function() {
if (this.bytesAvailable() === 0) { warn("readByte::End of stream!"); }
return isBA ? _bytes[_pos++] & 0xFF : (_bytes.charCodeAt(_pos++) & 0xFF);
},
readByteAt : function(index) {
if (index < _len) {
return isBA ? _bytes[index] & 0xFF :_bytes.charCodeAt(index) & 0xFF;
}
return warn("readByteAt::End of stream");
},
writeByte : function(val) {
if (isBA) {
if (_pos < _len) {
_bytes[_pos] = val & 0xFF;
} else {
_bytes[_bytes.length++] = val;
}
_pos++;
return;
}
if (_pos < _len) {
_bytes = _bytes.substr(0, _pos) + String.fromCharCode(val & 0xFF) + _bytes.substring(_pos + 1);
} else {
_bytes += String.fromCharCode(val & 0xFF);
_len += 1;
}
_pos++;
},
readBytes : function(offset, length) {
if (length === undefined) {
var p = _pos;
_pos += offset;
if (isBA) {
var tmp = '';
for (var i = p; i < p + offset; i++) {
tmp += String.fromCharCode(_bytes[i]);
}
return tmp;
} else {
return _bytes.substr(p, offset);
}
}
if (isBA) {
var tmpx = '';
for (var j = offset; j < offset + length; j++) {
tmpx += String.fromCharCode(_bytes[j]);
}
return tmpx;
}
return _bytes.substr(offset, length);
},
readUnsignedInt : function() {
if (this.bytesAvailable() < 4) { throw "End of stream!"; }
var p = 0, x = 0;
if (_endian == Endian.BIG) {
p = (_pos += 4) - 4;
if (isBA) {
x = ((_bytes[p] & 0xFF) << 24) | ((_bytes[++p] & 0xFF) << 16) | ((_bytes[++p] & 0xFF) << 8) | (_bytes[++p] & 0xFF);
} else
x = ((_bytes.charCodeAt(p) & 0xFF) << 24) | ((_bytes.charCodeAt(++p) & 0xFF) << 16) | ((_bytes.charCodeAt(++p) & 0xFF) << 8) | (_bytes.charCodeAt(++p) & 0xFF);
} else {
p = (_pos += 4);
if (isBA) {
x = ((_bytes[--p] & 0xFF) << 24) | ((_bytes[--p] & 0xFF) << 16) | ((_bytes[--p] & 0xFF) << 8) | (_bytes[--p] & 0xFF);
} else
x = ((_bytes.charCodeAt(--p) & 0xFF) << 24) | ((_bytes.charCodeAt(--p) & 0xFF) << 16) | ((_bytes.charCodeAt(--p) & 0xFF) << 8) | (_bytes.charCodeAt(--p) & 0xFF);
}
return x;
},
readUnsignedShort : function() {
if (this.bytesAvailable() < 2) { throw "End of stream!"; }
var p = 0;
if (_endian == Endian.BIG) {
p = (_pos += 2) - 2;
if (isBA) {
return ((_bytes[p] & 0xFF) << 8) | (_bytes[++p] & 0xFF);
} else
return ((_bytes.charCodeAt(p) & 0xFF) << 8) | (_bytes.charCodeAt(++p) & 0xFF);
} else {
p = (_pos += 2);
if (isBA) {
return ((_bytes[--p] & 0xFF) << 8) | (_bytes[--p] & 0xFF);
} else
return ((_bytes.charCodeAt(--p) & 0xFF) << 8) | (_bytes.charCodeAt(--p) & 0xFF);
}
},
readShort : function() {
if (this.bytesAvailable() < 2) { throw "End of stream!"; }
var p = 0, x = 0;
if (_endian == Endian.BIG) {
p = (_pos += 2) - 2;
if (isBA) {
x = ((_bytes[p] & 0xFF) << 8) | (_bytes[++p] & 0xFF);
} else
x = ((_bytes.charCodeAt(p) & 0xFF) << 8) | (_bytes.charCodeAt(++p) & 0xFF);
} else {
p = (_pos += 2);
if (isBA) {
x = ((_bytes[--p] & 0xFF) << 8) | (_bytes[--p] & 0xFF);
} else
x = ((_bytes.charCodeAt(--p) & 0xFF) << 8) | (_bytes.charCodeAt(--p) & 0xFF);
}
return (x >= 32768) ? x - 65536 : x;
},
readUTFBytes : function(readLength) {
readLength = readLength || 0;
var output = '';
for (var i = 0; i < readLength; i++) {
output += String.fromCharCode(this.readByte());
}
return output;
}
};
};
var Inflater = function() {
var MAXBITS = 15, MAXLCODES = 286, MAXDCODES = 30, MAXCODES = 316, FIXLCODES = 288,
LENS = [3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258],
LEXT = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0],
DISTS = [1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577],
DEXT = [ 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13],
inbuf = undefined, // input buffer - ByteArray
incnt = 0, // bytes read so far
bitbuf = 0, // bit buffer
bitcnt = 0, // number of bits in bit buffer
// Huffman code decoding tables
lencode = undefined,
distcode = undefined;
function bits(need) {
var val = bitbuf;
while(bitcnt < need) {
if (incnt == inbuf.length()) throw 'available inflate data did not terminate';
val |= inbuf.readByteAt(incnt++) << bitcnt;
bitcnt += 8;
}
bitbuf = val >> need;
bitcnt -= need;
return val & ((1 << need) - 1);
}
function construct(h, length, n) {
var offs = new Array();
for (var len = 0; len <= MAXBITS; len++) h.count[len] = 0;
for (var symbol = 0; symbol < n; symbol++) h.count[length[symbol]]++;
if(h.count[0] == n) return 0;
var left = 1;
for(len = 1; len <= MAXBITS; len++) {
left <<= 1;
left -= h.count[len];
if(left < 0) return left;
}
offs[1] = 0;
for(len = 1; len < MAXBITS; len++) offs[len + 1] = offs[len] + h.count[len];
for(symbol = 0; symbol < n; symbol++)
if(length[symbol] !== 0) h.symbol[offs[length[symbol]]++] = symbol;
return left;
}
function decode(h) {
var code = 0, first = 0, index = 0;
for(var len = 1; len <= MAXBITS; len++) {
code |= bits(1);
var count = h.count[len];
if(code < first + count) return h.symbol[index + (code - first)];
index += count;
first += count;
first <<= 1;
code <<= 1;
}
return -9; // ran out of codes
}
function codes(buf) {
do {
var symbol = decode(lencode);
if(symbol < 0) return symbol;
if(symbol < 256) {
buf.position(buf.length());
buf.writeByte(symbol);
}
else if(symbol > 256) {
symbol -= 257;
if(symbol >= 29) throw "invalid literal/length or distance code in fixed or dynamic block";
var len = LENS[symbol] + bits(LEXT[symbol]);
symbol = decode(distcode);
if(symbol < 0) return symbol;
var dist = DISTS[symbol] + bits(DEXT[symbol]);
if(dist > buf.length()) throw "distance is too far back in fixed or dynamic block";
buf.position(buf.length());
while(len--) buf.writeByte(buf.readByteAt(buf.length() - dist));
}
} while (symbol != 256);
return 0;
}
function stored(buf) {
bitbuf = 0;
bitcnt = 0;
if(incnt + 4 > inbuf.length()) throw 'available inflate data did not terminate';
var len = inbuf[incnt++];
len |= inbuf[incnt++] << 8;
if(inbuf[incnt++] != (~len & 0xff) || inbuf[incnt++] != ((~len >> 8) & 0xff))
throw "stored block length did not match one's complement";
if(incnt + len > inbuf.length()) throw 'available inflate data did not terminate';
while(len--) buf[buf.length] = inbuf[incnt++];
}
function constructFixedTables() {
var lengths = new Array();
// literal/length table
for(var symbol = 0; symbol < 144; symbol++) lengths[symbol] = 8;
for(; symbol < 256; symbol++) lengths[symbol] = 9;
for(; symbol < 280; symbol++) lengths[symbol] = 7;
for(; symbol < FIXLCODES; symbol++) lengths[symbol] = 8;
construct(lencode, lengths, FIXLCODES);
for(symbol = 0; symbol < MAXDCODES; symbol++) lengths[symbol] = 5;
construct(distcode, lengths, MAXDCODES);
}
function constructDynamicTables() {
var lengths = new Array(),
order = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15],
nlen = bits(5) + 257,
ndist = bits(5) + 1,
ncode = bits(4) + 4;
if(nlen > MAXLCODES || ndist > MAXDCODES) throw "dynamic block code description: too many length or distance codes";
for(var index = 0; index < ncode; index++) lengths[order[index]] = bits(3);
for(; index < 19; index++) lengths[order[index]] = 0;
var err = construct(lencode, lengths, 19);
if(err !== 0) throw "dynamic block code description: code lengths codes incomplete";
index = 0;
while(index < nlen + ndist) {
var symbol = decode(lencode), len;
if(symbol < 16) lengths[index++] = symbol;
else {
len = 0;
if(symbol == 16) {
if(index === 0) throw "dynamic block code description: repeat lengths with no first length";
len = lengths[index - 1];
symbol = 3 + bits(2);
}
else if(symbol == 17) symbol = 3 + bits(3);
else symbol = 11 + bits(7);
if(index + symbol > nlen + ndist)
throw "dynamic block code description: repeat more than specified lengths";
while(symbol--) lengths[index++] = len;
}
}
err = construct(lencode, lengths, nlen);
if(err < 0 || (err > 0 && nlen - lencode.count[0] != 1)) throw "dynamic block code description: invalid literal/length code lengths";
err = construct(distcode, lengths.slice(nlen), ndist);
if(err < 0 || (err > 0 && ndist - distcode.count[0] != 1)) throw "dynamic block code description: invalid distance code lengths";
return err;
}
return {
setInput : function(buf) {
inbuf = buf;
inbuf.endian(Endian.LITTLE);
inbuf.position(0);
},
inflate : function(buf) {
incnt = bitbuf = bitcnt = 0;
var err = 0;
do {
var last = bits(1);
var type = bits(2);
if(type === 0) stored(buf); // uncompressed block
else if(type == 3) throw 'invalid block type (type == 3)';
else { // compressed block
lencode = {count:new Array(0), symbol:new Array(0)};
distcode = {count:new Array(0), symbol:new Array(0)};
if(type == 1) constructFixedTables();
else if(type == 2) err = constructDynamicTables();
if(err !== 0) return err;
err = codes(buf);
}
if(err !== 0) break;
} while (!last);
return err;
}
}
};
var ZipEntry = function(name) {
var _name = name, dostime = 0, flag = 0, version = 0, offset = 0;
return {
name : function() { return _name; },
time : function(val) {
var d;
if (val) {
d = new Date(time);
dostime = (d.fullYear - 1980 & 0x7f) << 25 | (d.month + 1) << 21 | d.day << 16 | d.hours << 11 | d.minutes << 5 | d.seconds >> 1;
} else {
d = new Date(((dostime >> 25) & 0x7f) + 1980,((dostime >> 21) & 0x0f) - 1,(dostime >> 16) & 0x1f,(dostime >> 11) & 0x1f,(dostime >> 5) & 0x3f,(dostime & 0x1f) << 1);
return d.getTime();
}
},
size : 0,
compressedSize : 0,
crc : 0,
method : 0,
extra : undefined,
comment : '',
isDirectory : function() {
return _name.charAt(_name.length - 1) == '/';
}
}
};
var ZipFile = function(data) {
var buf = undefined, // ByteArray
entryList = [], // Array
entryTable = {}, // Dict
locOffsetTable = {}; // Dict
buf = new BA(data.data(), Endian.LITTLE);
readEntries();
function readEntries() {
readEND();
entryTable = {};
locOffsetTable = {};
// read cen entries
for(var i = 0; i < entryList.length; i++) {
var tmpbuf = new BA(buf.readBytes(ZipConstants.CENHDR), Endian.LITTLE);
if(tmpbuf.readUnsignedInt() != ZipConstants.CENSIG) // "PK\005\006"
throw "readEntries::Invalid CEN header (bad signature)";
// handle filename
tmpbuf.position(28);
var len = tmpbuf.readUnsignedShort();
if(len === 0) throw "missing entry name";
var e = new ZipEntry(buf.readUTFBytes(len));
// handle extra field
len = tmpbuf.readUnsignedShort();
e.extra = new BA();
if(len > 0) e.extra.data(buf.readBytes(len));
// handle file comment
buf.move(tmpbuf.readUnsignedShort());
// now get the remaining fields for the entry
tmpbuf.position(6); // version needed to extract
e.version = tmpbuf.readUnsignedShort();
e.flag = tmpbuf.readUnsignedShort();
if ((e.flag & 1) == 1) throw "readEntries::Encrypted ZIP entry not supported";
e.method = tmpbuf.readUnsignedShort();
e.dostime = tmpbuf.readUnsignedInt();
e.crc = tmpbuf.readUnsignedInt();
e.compressedSize = tmpbuf.readUnsignedInt();
e.size = tmpbuf.readUnsignedInt();
// add to entries and table
entryList[i] = e;
entryTable[e.name()] = e;
// loc offset
tmpbuf.position(42); // LOC HEADER
locOffsetTable[e.name()] = tmpbuf.readUnsignedInt();
}
}
function readEND() {
var b = new BA();
b.endian(Endian.LITTLE);
b.data(buf.readBytes(findEND(), ZipConstants.ENDHDR));
b.position(ZipConstants.ENDTOT); // total number of entries
entryList = new Array(b.readUnsignedShort());
b.position(ZipConstants.ENDOFF); // offset of first CEN header
buf.position(b.readUnsignedInt());
}
function findEND() {
var j = buf.length() - ZipConstants.ENDHDR; // END header size
var n = Math.max(0, j - 0xffff); // 0xffff is max zip file comment length
for(var i=j; i >= n; i--) {
buf.position(i);
if(buf.readByte() != 0x50) continue; // quick check that the byte is 'P'
buf.position(i);
if(buf.readUnsignedInt() == 0x06054b50) { // "PK\005\006"
return i;
}
}
throw "findEND::Invalid zip";
}
return {
entries : function() { return entryList },
size : function() { return entryList.length },
getEntry : function(name) { return entryTable[name]; },
getInput : function(entry) {
buf.position(locOffsetTable[entry.name()] + 30 - 2);
var len = buf.readShort();
buf.move(entry.name().length + len);
var b1 = new BA();
if(entry.compressedSize > 0) {
b1.data(buf.readBytes(entry.compressedSize));
}
switch(entry.method) {
case 0: // STORED
return b1;
break;
case 8: // DEFLATED
var b2 = new BA();
var inflater = new Inflater();
inflater.setInput(b1);
inflater.inflate(b2);
b2.position(0)
return b2;
break;
default:
throw "zipEntry::getInput::Invalid compression method";
}
}
};
};
var ZipLoader = function(zipURL) {
var ZIP_CACHE = {},
ZIP_FILE_REG = new RegExp('.*?\.zip$', 'i'),
ZIP_ENTRY_REG = new RegExp('(.*?\.zip):\/\/(.*$)', 'i'),
_zipUrl = zipURL,
_entryUrl = '',
_entryDataFormat = '',
isCompleteProcessRequired = true,
data = undefined;
if (_zipUrl) {
loadBinaryResource(_zipUrl);
}
function isIE() {
return (navigator.userAgent.match(/MSIE/) !== null);
}
function getXMLHttpObj(){
if(typeof(XMLHttpRequest)!=='undefined') {
return new XMLHttpRequest();
}
var axO = ['Msxml2.XMLHTTP.6.0', 'Msxml2.XMLHTTP.4.0', 'Msxml2.XMLHTTP.3.0', 'Msxml2.XMLHTTP', 'Microsoft.XMLHTTP'], i;
for (i = 0; i < axO.length; i++) {
try{
return new ActiveXObject(axO[i]);
}catch(e){}
}
return null;
}
function loadBinaryResource(url) {
var req = getXMLHttpObj();
req.open('GET', url, false);
if(!req.overrideMimeType) {
var vbScript = '<scr' + 'ipt type="text/vbscript">\n'+
'<!-' + '-\n' +
'Function BinaryToArray(Binary)\n'+
' Dim i\n'+
' ReDim byteArray(LenB(Binary))\n'+
' For i = 1 To LenB(Binary)\n'+
' byteArray(i-1) = AscB(MidB(Binary, i, 1))\n'+
' Next\n'+
' BinaryToArray = byteArray\n'+
'End Function\n'+
'--' + '>\n' +
'</scr' + 'ipt>';
document.write(vbScript);
req.setRequestHeader('Accept-Charset', 'x-user-defined');
req.send();
var fileContents = BinaryToArray(req.responseBody).toArray();
loadComplete(fileContents);
} else {
req.overrideMimeType('text/plain; charset=x-user-defined');
req.send();
loadComplete(req.responseText);
}
}
function loadComplete(data) {
if (isCompleteProcessRequired) {
var zipfile = new ZipFile(new BA(data, Endian.LITTLE));
ZIP_CACHE[_zipUrl] = zipfile;
dataFromZip(zipfile);
}
}
function dataFromZip(zip) {
if (zip && _entryUrl) {
data = getZipEntry(zip, _entryUrl);
}
}
function getZipEntry(zip, entryUrl) {
if (ZIP_ENTRY_REG.test(entryUrl)) {
var result = ZIP_ENTRY_REG.exec(entryUrl);
var outerEntry = result[1];
var innerEntry = result[2];
var innerZip = new ZipFile(zip.getInput(zip.getEntry(outerEntry)));
return getZipEntry(innerZip, innerEntry);
} else {
var entry = zip.getEntry(entryUrl);
if (entry) {
return zip.getInput(entry);
} else {
throw "Requested file was not found in the archive";
}
}
}
function appendChild(node, text) {
if (null === node.canHaveChildren || node.canHaveChildren) {
node.appendChild(document.createTextNode(text));
} else {
node.text = text;
}
}
function getFileExtension(filename) {
return (/[.]/.exec(filename)) && /[^.]+$/.exec(filename)[0] || '';
}
function _utf8_decode(utftext) {
if (utftext.charCodeAt(0) == 0xef && utftext.charCodeAt(1) == 0xbb && utftext.charCodeAt(2) == 0xbf) {
utftext = utftext.substr(3);
var string = "";
var i = 0;
var c = c1 = c2 = 0;
while ( i < utftext.length ) {
c = utftext.charCodeAt(i);
if (c < 128) {
string += String.fromCharCode(c);
i++;
}
else if((c > 191) && (c < 224)) {
c2 = utftext.charCodeAt(i+1);
string += String.fromCharCode(((c & 31) << 6) | (c2 & 63));
i += 2;
}
else {
c2 = utftext.charCodeAt(i+1);
c3 = utftext.charCodeAt(i+2);
string += String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63));
i += 3;
}
}
return string;
}
return utftext;
}
return {
load : function(url) {
isCompleteProcessRequired = false;
switch(true) {
case ZIP_ENTRY_REG.test(url):
var result = ZIP_ENTRY_REG.exec(url);
_zipUrl = result[1];
_entryUrl = result[2];
var zip = ZIP_CACHE[_zipUrl];
if (zip) {
dataFromZip(zip);
} else {
isCompleteProcessRequired = true;
loadBinaryResource(url);
}
break;
case ZIP_FILE_REG.test(url):
isCompleteProcessRequired = true;
_zipUrl = url;
_entryDataFormat = 'Text';
loadBinaryResource(url);
break;
default:
loadBinaryResource(url);
break;
}
if (data) {
data.position(0);
return _utf8_decode(data.readBytes(0, data.length()));
} else {
return '';
}
},
loadImage : function(url) {
var data = this.load(url);
if (data) {
var tmp = "data:";
switch(getFileExtension(url).toLowerCase()) {
case 'gif' :
tmp += "image/gif;base64,";
break;
case 'png' :
tmp += "image/png;base64,";
break;
case 'jpg':
case 'jpeg':
tmp += "image/jpeg;base64,";
break;
}
tmp += Base64(data);
}
return tmp;
},
loadCSS : function(url) {
var data = _utf8_decode(this.load(url));
if (data) {
var pa= document.getElementsByTagName('head')[0] ;
var el= document.createElement('style');
el.type= 'text/css';
el.media= 'screen';
if(el.styleSheet) el.styleSheet.cssText = data;// IE method
else el.appendChild(document.createTextNode(data));// others
pa.appendChild(el);
}
},
loadScript : function(url,encoding) {
var data = _utf8_decode(this.load(url));
if (data) {
var fileRef = window.document.createElement("script");
fileRef.setAttribute("type", "text/javascript");
fileRef.setAttribute("charset", encoding);
if (isIE()) { // IE
eval(data);
} else{
var head = document.getElementsByTagName("head")[0] || document.documentElement;
head.insertBefore(fileRef, head.firstChild);
appendChild(fileRef, data);
}
}
}
};
};
|
objectiser/overlord-commons | overlord-commons-ant/src/main/java/org/overlord/commons/ant/crypt/AesEncrypterTask.java | /*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.overlord.commons.ant.crypt;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Task;
import org.overlord.commons.codec.AesEncrypter;
/**
* A custom ant task capable of encrypting a property using synchronous
* AES encryption.
*/
public class AesEncrypterTask extends Task {
private String plain;
private String addproperty;
/**
* Constructor.
*/
public AesEncrypterTask() {
}
/**
* @return the plain text value
*/
public String getPlain() {
return plain;
}
/**
* @param plain
*/
public void setPlain(String plain) {
this.plain = plain;
}
/**
* Gets the addproperty.
*
* @return the addproperty
*/
public String getAddproperty() {
return addproperty;
}
/**
* Sets the addproperty.
*
* @param addproperty
* the new addproperty
*/
public void setAddproperty(String addproperty) {
this.addproperty = addproperty;
}
/**
* @see org.apache.tools.ant.Task#execute()
*/
@Override
public void execute() throws BuildException {
String generatedPassword = ""; //$NON-NLS-1$
if (addproperty == null || addproperty.equals("")) { //$NON-NLS-1$
throw new BuildException("\tThe 'addproperty' attribute is required."); //$NON-NLS-1$
}
if (plain == null || plain.equals("")) { //$NON-NLS-1$
throw new BuildException("\tThe 'plain' attribute is required."); //$NON-NLS-1$
}
// Encrypt the password using AES encryptiong
generatedPassword = AesEncrypter.encrypt(plain);
if (addproperty != null && !addproperty.equals("")) { //$NON-NLS-1$
getProject().setProperty(addproperty, generatedPassword);
}
}
}
|
whskyneat/element-wheels-blog | web/app/plugins/quick-setup/js/gd-quick-setup.js | <filename>web/app/plugins/quick-setup/js/gd-quick-setup.js
/*jslint nomen: true, plusplus: true, sloppy: true, vars: true, white: true, browser: true */
/**
* Copyright 2013 Go Daddy Operating Company, LLC. All Rights Reserved.
*/
jQuery(document).ready(function($) {
/*
* Step 1
*/
// Save the site type, then submit
$("#form-step1 input[type=submit]").on( 'mousedown', function() {
$("#site_type").val( $(this).data("site-type") );
});
/*
* Step 2
*/
//
$("#form-step2 a.form2-submit").on( 'mousedown', function() {
$("#theme_slug").val( $(this).data("theme-slug") );
$("#form-step2").trigger("submit");
});
/*
* Step 3
*/
// "Plugin settings" pane
$(".q-setup-expand").on( 'click', function(){
$(this).toggleClass("q-setup-contract");
$(".q-setup-optionals-list").slideToggle("300");
});
// Enforce the "I understand this will nuke my site" checkbox
$("#form-step3 #q-setup-final-warning").on( 'click', function() {
if ($(this).is(":checked")) {
$("#form-step3 a.gd-quicksetup-wizard-submit").removeClass("button-disabled");
$("#form-step3 a.gd-quicksetup-wizard-submit").removeAttr("disabled");
} else {
$("#form-step3 a.gd-quicksetup-wizard-submit").addClass("button-disabled");
$("#form-step3 a.gd-quicksetup-wizard-submit").attr("disabled", true);
}
});
// Pre-load ajax-loader.gif
var ajax_loader = new Image();
ajax_loader.src = gd_quicksetup_img_dir + '/ajax-loader.gif';
// Disable submit button (don't double-submit)
$("#form-step3 a.gd-quicksetup-wizard-submit").on( "click", function() {
if ( $(this).hasClass("button-disabled") ) {
return;
}
$("#form-step3 a.gd-quicksetup-wizard-submit").addClass("button-disabled");
$("#form-step3 a.gd-quicksetup-wizard-submit").attr("disabled", true);
$("#form-step3 #q-setup-final-warning").prop("disabled", true);
$("#form-step3 a.gd-quicksetup-wizard-submit").html('<img style="position: relative; top: 3px;" src="' + ajax_loader.src + '" /> ' + objectL10n.building );
$("a.gd-quicksetup-wizard-start-over").hide();
$("#form-step3").trigger("submit");
});
// Add/remove feature functionality
$("div.q-setup-steps-wrap").on( "click", "a.qs-add-remove", function() {
if ( objectL10n.add === $(this).html() ) {
$(this).html( objectL10n.remove );
$(this).parents("div.page-container").find("input").prop("disabled", false);
$(this).parents("div.page-container").find("textarea").prop("disabled", false);
$(this).parents("div.page-container").find("input[type=hidden]").each( function( idx, el ) {
if ( $(el).attr("name").match(/enabled[\d]/) ) {
$(el).val("true");
}
});
} else {
$(this).html( objectL10n.add );
$(this).parents("div.page-container").find("input").prop("disabled", true);
$(this).parents("div.page-container").find("textarea").prop("disabled", true);
$(this).parents("div.page-container").find("input[type=hidden]").each( function( idx, el ) {
if ( $(el).attr("name").match(/enabled[\d]/) ) {
$(el).val("false");
}
});
}
$(this).parents("div.page-container").find("ul a").toggleClass("button-disabled");
$(this).toggleClass("button-primary");
});
// Remove files from gallery
$("ul.q-setup-page-info").on( "click", "a.q-setup-remove-gallery-file", function() {
if ( $(this).hasClass("button-disabled") ) {
return false;
}
$(this).parent().fadeOut();
$(this).parent().remove();
});
// Add files to gallery
$("a.qs-add-images").on( "click", function() {
if ( $(this).hasClass("button-disabled") ) {
return false;
}
var idx = 0;
var el = $(this).parents("ul.q-setup-page-info").find("input[type=file]:last");
var _idx = $(el).data("idx");
idx = $(el).data("index");
idx++;
var $li = $(this).parent();
var i = 0;
for ( i = idx ; i < idx + 3 ; i++ ) {
$li.before($("<li style=\"display: none;\"><label for=\"upload_image_" + _idx + "_" + i + "\"><input id=\"upload_image_" + _idx + "_" + i + "\" name=\"upload_image_" + _idx + "_" + i + "\" data-idx=\"" + _idx + "\" data-index=\"" + i + "\" type=\"file\" size=\"36\" value=\"\" /></label> <a href=\"javascript:;\" class=\"q-setup-remove-gallery-file\">" + objectL10n.remove + "</a></li>"));
}
$(this).parents("ul.q-setup-page-info").find("li").fadeIn();
});
// Add custom pages
$("a.qs-add-custom-page").on( "click", function() {
var idx = 0;
$("div.q-setup-item").each( function( index, el ) {
if ( undefined !== $(el).data("index") && parseInt( $(el).data("index") ) > idx ) {
idx = $(el).data("index");
}
});
var $template = $("#qs-custom-page-template").clone().html();
$template = $template.replace(/\{\{idx\}\}/g, (idx+1));
$("#q-setup-panel-" + idx).after($template);
$("#q-setup-panel-" + (idx+1)).fadeIn();
$("#q-setup-panel-" + (idx+1)).find("input[type=text]").placeholder();
$("#q-setup-panel-" + (idx+1)).find("textarea").placeholder();
$("#q-setup-panel-" + (idx+1)).find("input[type=hidden]").each( function( idx, el ) {
if ( $(el).attr("name").match(/enabled/) ) {
$(el).val("true");
}
});
});
/**
* General
*/
// If the user clicks an input that's inside a <label> markup, it will toggle
// the control connected to the <label>. This prevents that, as long as the
// event target is the input. Needed for the Google Analytics input.
$("label").on('click', 'input[type=text]', function(e) {
if( e.target.nodeName === 'INPUT') {
e.preventDefault();
return false;
}
});
// Tiptip!
$(".q-setup-info-icn").tipTip();
// Lazy load thickbox images
$(".show-thickbox-image").on("click", function() {
var id = $(this).data("lazy-load-target");
var $img = $("#" + id);
var height = $img.data("height");
var width = $img.data("width");
var src = $img.data("src");
$img.attr("src", src).attr("height", height).attr("width", width);
});
// Always reset the "finish" checkbox and "publish website" controls. These
// can be stuck to an incorrect state if the user hits the "back" button
// in their browser
$("#q-setup-final-warning").removeAttr('checked');
$("#form-step3 a.gd-quicksetup-wizard-submit").html( objectL10n.publish ).addClass("button-disabled").attr("disabled", true);
});
|
lechium/tvOS142Headers | System/Library/PrivateFrameworks/CoreRecognition.framework/CRColor.h | <gh_stars>1-10
/*
* This header is generated by classdump-dyld 1.5
* on Tuesday, November 10, 2020 at 10:14:44 PM Mountain Standard Time
* Operating System: Version 14.2 (Build 18K57)
* Image Source: /System/Library/PrivateFrameworks/CoreRecognition.framework/CoreRecognition
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
#import <UIKitCore/UIColor.h>
@interface CRColor : UIColor
@end
|
openpreserve/plato | plato-model/src/main/java/eu/scape_project/planning/model/ResourceDescription.java | /*******************************************************************************
* Copyright 2006 - 2012 Vienna University of Technology,
* Department of Software Technology and Interactive Systems, IFS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package eu.scape_project.planning.model;
import java.io.Serializable;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Lob;
import javax.persistence.OneToOne;
/**
* Contains fields for describing the resources needed for an {@link Alternative}.
*
* @author <NAME>
*/
@Entity
public class ResourceDescription implements Serializable, ITouchable {
private static final long serialVersionUID = 8652623297851734108L;
/**
* Hibernate note: standard length for a string column is 255
* validation is broken because we use facelet templates (issue resolved in Seam 2.0)
* therefore allow "long" entries
*/
@Lob
private String reasonForConsidering;
/**
* Hibernate note: standard length for a string column is 255
* validation is broken because we use facelet templates (issue resolved in Seam 2.0)
* therefore allow "long" entries
*/
@Lob
private String configSettings;
/**
* Hibernate note: standard length for a string column is 255
* validation is broken because we use facelet templates (issue resolved in Seam 2.0)
* therefore allow "long" entries
*/
@Lob
private String necessaryResources;
@Id @GeneratedValue
private int id;
@OneToOne(cascade=CascadeType.ALL)
private ChangeLog changeLog = new ChangeLog();
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getConfigSettings() {
return configSettings;
}
public void setConfigSettings(String desirable) {
this.configSettings = desirable;
}
public String getNecessaryResources() {
return necessaryResources;
}
public void setNecessaryResources(String necessaryResources) {
this.necessaryResources = necessaryResources;
}
public String getReasonForConsidering() {
return reasonForConsidering;
}
public void setReasonForConsidering(String reasonForConsidering) {
this.reasonForConsidering = reasonForConsidering;
}
public ChangeLog getChangeLog() {
return changeLog;
}
public void setChangeLog(ChangeLog value) {
changeLog = value;
}
public boolean isChanged(){
return changeLog.isAltered();
}
public void touch() {
changeLog.touch();
}
/**
* @see ITouchable#handleChanges(IChangesHandler)
*/
public void handleChanges(IChangesHandler h) {
h.visit(this);
}
}
|
oxelson/gempak | gempak/source/diaglib/dg/dgcnrdt.c | <reponame>oxelson/gempak
#include "dg.h"
void dgc_nrdt ( const int *ifpn, const char *time1, const char *time2,
const int *level1, const int *level2, const int *ivcord,
const char *parm, float *grid, int *igx, int *igy, int *ighd,
int *iret )
/************************************************************************
* dgc_nrdt *
* *
* This subroutine reads the requested grid from a grid file by calling *
* GD_RDAT. It also applies any specific functions to the grid, for *
* example, adding a column of data or subsetting. *
* *
* dgc_nrdt ( ifpn, time1, time2, level1, level2, ivcord, parm, grid, *
* igx, igy, ighd, iret ) *
* *
* Input parameters: *
* *ifpn const int GDFILE entry position number *
* *time1 const char GEMPAK grid date-time *
* *time2 const char GEMPAK grid date-time *
* *level1 const int GEMPAK grid levle *
* *level2 const int GEMPAK grid levle *
* *ivcord const int GEMPAK vertical coordinate *
* 0 = NONE *
* 1 = PRES *
* 2 = THTA *
* 3 = HGHT *
* *parm const char GEMPAK parameter name *
* *
* Output parameters: *
* *grid float Grid data *
* *igx int Number of horizontal points *
* *igy int Number of vertical points *
* *ighd int Grid header *
* *iret int Return code *
* 0 = normal return *
* -7 = grid not found *
* -30 = open grid failed *
* -31 = navigation not same *
** *
* Log: *
* <NAME>/SAIC 10/03 *
* <NAME>/HPC 01/04 Check Ret code from GD subroutines; do *
* not pass parameter as returnable arg *
* <NAME>/HPC 02/04 CALL ER_WMSG for -31 *
* K. Brill/HPC 02/04 Remove all other ER_WMSG calls *
* <NAME>/SAIC 2/04 Modified to use new GD file management *
* <NAME>/SAIC 5/04 Added call to DG_T2IG *
* <NAME>/SAIC 11/04 Added check for PARM == DRCT *
* <NAME>/SAIC 2/06 Recoded from Fortran *
* D.W.Plummer/NCEP 10/06 Dynamically allocate transfer grid *
* <NAME>/NCEP 2/13 Add check before freeing memory *
************************************************************************/
{
char filnam[MXFLSZ+1], tmpnam[MXFLSZ+1], uparm[14];
float rnvblk[LLNNAV], adum;
int ifidx, igdf, mxgd, navsz, zero, ier;
float *transfer_grid;
/*----------------------------------------------------------------------*/
*iret = 0;
navsz = LLNNAV;
zero = 0;
ifidx = (*ifpn) - 1;
if ( strlen ( _nfile.ntmplt[ifidx] ) == 0 ||
( strcmp ( time1, _nfile.crtgdt1[ifidx] ) == 0 &&
strcmp ( time2, _nfile.crtgdt2[ifidx] ) == 0 ) ) {
/*
* This GDFILE entry is an actual file
*/
strcpy ( filnam, _nfile.crtfnm[ifidx] );
} else {
/*
* This GDFILE entry is a template
*/
cfl_mnam ( (char *)time1, _nfile.ntmplt[ifidx], tmpnam, &ier );
strcpy ( filnam, _nfile.gflpth[ifidx] );
strcat ( filnam, "/" );
strcat ( filnam, tmpnam );
strcpy ( _nfile.crtfnm[ifidx], filnam );
}
gd_open ( filnam, &_nfile.outflg[ifidx], &zero, &navsz, &igdf, &adum,
rnvblk, &mxgd, &ier, strlen(filnam) );
if ( ier != 0 ) {
*iret = -30;
return;
}
/*
* Allocate the transfer grid based on the kx and ky of the nav block.
* Add 1 to kx to account for the possibility of adding a column later.
*/
G_MALLOC ( transfer_grid, float, ((int)((rnvblk[4]+1)*rnvblk[5])),
"Error allocating transfer grid" );
cgd_rdat ( &igdf, time1, time2, level1, level2, ivcord, parm,
transfer_grid, igx, igy, ighd, &ier );
if ( ier != 0 ) {
*iret = -7;
}
else {
strcpy ( _nfile.crtgdt1[ifidx], time1 );
strcpy ( _nfile.crtgdt2[ifidx], time2 );
/*
* Transfer just-read grid navigation to internal grid navigation.
*/
cst_lcuc ( (char *)parm, uparm, &ier );
if ( strcmp ( uparm, "DRCT" ) == 0 ) {
ighd[1] = 1;
}
dg_t2ig ( rnvblk, ighd, transfer_grid, grid, igx, igy, &ier );
if ( ier != 0 ) {
*iret = -7;
}
}
if ( transfer_grid != NULL ) {
G_FREE ( transfer_grid, float );
}
return;
}
|
ramonamela/compss | tests/sources/java/4_data_locality_basic/src/main/java/basicTest/DataLocality.java | package basicTest;
public class DataLocality {
private static int numTasks;
private static String fileName;
public static void main(String[] args) {
// Check arguments
if (args.length != 2) {
System.out.println("[ERROR] Bad number of parameters");
System.out.println("Usage: dataLocality <numTasks> <outputFile>");
System.exit(-1);
}
// Add for test stability
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// Nothing to do
}
// Throw tasks
numTasks = Integer.parseInt(args[0]);
fileName = args[1];
System.out.println("[LOG] Creating single chain of tasks.");
for (int i = 0; i < numTasks; ++i) {
DataLocalityImpl.task(i, fileName);
}
// End result
System.out.println("[LOG] No more work for main app. Waiting tasks to finish");
System.out.println("[LOG] Result must be checked (script)");
}
}
|
dynamicguy/ipf | archetypes/basic/src/main/resources/archetype-resources/src/main/java/SampleServer.java | <gh_stars>10-100
#set( $symbol_pound = '#' )
#set( $symbol_dollar = '$' )
#set( $symbol_escape = '\' )
package ${package};
import org.apache.camel.spring.Main;
public class SampleServer {
public static void main(String[] args) throws Exception {
Main.main("-ac", "/context.xml");
}
}
|
realkotob/phaser3-rex-notes | plugins/gameobjects/containerlite/ContainerLite.js | import Base from './Base.js';
import Methods from './Methods.js';
import { GetParent } from './GetParent.js';
class ContainerLite extends Base {
constructor(scene, x, y, width, height, children) {
super(scene, x, y, width, height);
this.type = 'rexContainerLite';
this.isRexContainerLite = true;
this.syncChildrenEnable = true;
this._active = true;
this._mask = null;
this._scrollFactorX = 1;
this._scrollFactorY = 1;
if (children) {
this.add(children);
}
}
destroy(fromScene) {
// This Game Object has already been destroyed
if (!this.scene) {
return;
}
this.syncChildrenEnable = false; // Don't sync properties changing anymore
super.destroy(fromScene);
}
resize(width, height) {
this.setSize(width, height);
return this;
}
get x() {
return this._x;
}
set x(value) {
if (this._x === value) {
return;
}
this._x = value;
this.syncPosition();
}
get y() {
return this._y;
}
set y(value) {
if (this._y === value) {
return;
}
this._y = value;
this.syncPosition();
}
// Override
get rotation() {
return super.rotation;
}
set rotation(value) {
if (this.rotation === value) {
return;
}
super.rotation = value;
this.syncPosition();
}
// Override
get scaleX() {
return super.scaleX;
}
set scaleX(value) {
if (this.scaleX === value) {
return;
}
super.scaleX = value;
this.syncPosition();
}
// Override
get scaleY() {
return super.scaleY;
}
set scaleY(value) {
if (this.scaleY === value) {
return;
}
super.scaleY = value;
this.syncPosition();
}
// Override
get scale() {
return super.scale;
}
set scale(value) {
if (this.scale === value) {
return;
}
super.scale = value;
this.syncPosition();
}
// Override
get visible() {
return super.visible;
}
set visible(value) {
if (super.visible === value) {
return;
}
super.visible = value;
this.syncVisible();
}
// Override
get alpha() {
return super.alpha;
}
set alpha(value) {
if (super.alpha === value) {
return;
}
super.alpha = value;
this.syncAlpha();
}
// Override
get active() {
return this._active;
}
set active(value) {
if (this._active === value) {
return;
}
this._active = value;
this.syncActive();
}
// Override
get mask() {
return this._mask;
}
set mask(mask) {
if (this._mask === mask) {
return;
}
this._mask = mask;
this.syncMask();
}
// Override
get scrollFactorX() {
return this._scrollFactorX;
}
set scrollFactorX(value) {
if (this._scrollFactorX === value) {
return;
}
this._scrollFactorX = value;
this.syncScrollFactor();
}
get scrollFactorY() {
return this._scrollFactorY;
}
set scrollFactorY(value) {
if (this._scrollFactorY === value) {
return;
}
this._scrollFactorY = value;
this.syncScrollFactor();
}
// Compatiable with container plugin
get list() {
return this.children;
}
static GetParent(child) {
return GetParent(child);
}
}
Object.assign(
ContainerLite.prototype,
Methods
);
export default ContainerLite; |
nsmilyanski/Java-Fundamentals | src/Lists/lab/MergingLists.java | package Lists.lab;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Scanner;
import java.util.stream.Collectors;
public class MergingLists {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
List<Integer> firstList = Arrays.stream(scanner.nextLine().split(" "))
.map(Integer::parseInt)
.collect(Collectors.toList());
List<Integer> secondList = Arrays.stream(scanner.nextLine().split(" "))
.map(Integer::parseInt)
.collect(Collectors.toList());
List<Integer> resultList = new ArrayList<>();
int minSize = Math.min(firstList.size(), secondList.size());
for (int i = 0; i < minSize; i++) {
resultList.add(firstList.get(i));
resultList.add(secondList.get(i));
}
if (firstList.size() > secondList.size()){
resultList.addAll( firstList.subList(minSize, firstList.size()));
}else {
resultList.addAll( secondList.subList(minSize, secondList.size()));
}
System.out.println(resultList.toString().replaceAll("[\\[\\],]", ""));
}
}
|
valgur/OCP | opencascade/StepRepr_GlobalUncertaintyAssignedContext.hxx | <gh_stars>100-1000
// Created on: 1995-12-01
// Created by: EXPRESS->CDL V0.2 Translator
// Copyright (c) 1995-1999 <NAME>
// Copyright (c) 1999-2014 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _StepRepr_GlobalUncertaintyAssignedContext_HeaderFile
#define _StepRepr_GlobalUncertaintyAssignedContext_HeaderFile
#include <Standard.hxx>
#include <Standard_Type.hxx>
#include <StepBasic_HArray1OfUncertaintyMeasureWithUnit.hxx>
#include <StepRepr_RepresentationContext.hxx>
#include <Standard_Integer.hxx>
class TCollection_HAsciiString;
class StepBasic_UncertaintyMeasureWithUnit;
class StepRepr_GlobalUncertaintyAssignedContext;
DEFINE_STANDARD_HANDLE(StepRepr_GlobalUncertaintyAssignedContext, StepRepr_RepresentationContext)
class StepRepr_GlobalUncertaintyAssignedContext : public StepRepr_RepresentationContext
{
public:
//! Returns a GlobalUncertaintyAssignedContext
Standard_EXPORT StepRepr_GlobalUncertaintyAssignedContext();
Standard_EXPORT void Init (const Handle(TCollection_HAsciiString)& aContextIdentifier, const Handle(TCollection_HAsciiString)& aContextType, const Handle(StepBasic_HArray1OfUncertaintyMeasureWithUnit)& aUncertainty);
Standard_EXPORT void SetUncertainty (const Handle(StepBasic_HArray1OfUncertaintyMeasureWithUnit)& aUncertainty);
Standard_EXPORT Handle(StepBasic_HArray1OfUncertaintyMeasureWithUnit) Uncertainty() const;
Standard_EXPORT Handle(StepBasic_UncertaintyMeasureWithUnit) UncertaintyValue (const Standard_Integer num) const;
Standard_EXPORT Standard_Integer NbUncertainty() const;
DEFINE_STANDARD_RTTIEXT(StepRepr_GlobalUncertaintyAssignedContext,StepRepr_RepresentationContext)
protected:
private:
Handle(StepBasic_HArray1OfUncertaintyMeasureWithUnit) uncertainty;
};
#endif // _StepRepr_GlobalUncertaintyAssignedContext_HeaderFile
|
hispindia/BIHAR-2.7 | dhis-2/dhis-services/dhis-service-datamart-default/src/main/java/org/hisp/dhis/datamart/aggregation/cache/MemoryAggregationCache.java | package org.hisp.dhis.datamart.aggregation.cache;
/*
* Copyright (c) 2004-2012, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodService;
import org.hisp.dhis.system.util.ConversionUtils;
/**
* @author <NAME>
*/
public class MemoryAggregationCache
implements AggregationCache
{
private static final String SEPARATOR = "-";
// -------------------------------------------------------------------------
// Cache
// -------------------------------------------------------------------------
private final ThreadLocal<Map<String, Collection<Integer>>> intersectingPeriodCache = new ThreadLocal<Map<String,Collection<Integer>>>();
private final ThreadLocal<Map<String, Collection<Integer>>> periodBetweenCache = new ThreadLocal<Map<String,Collection<Integer>>>();
private final ThreadLocal<Map<String, Period>> periodCache = new ThreadLocal<Map<String,Period>>();
private final ThreadLocal<Map<String, Integer>> organisationUnitLevelCache = new ThreadLocal<Map<String, Integer>>();
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private OrganisationUnitService organisationUnitService;
public void setOrganisationUnitService( OrganisationUnitService organisationUnitService )
{
this.organisationUnitService = organisationUnitService;
}
private PeriodService periodService;
public void setPeriodService( PeriodService periodService )
{
this.periodService = periodService;
}
// -------------------------------------------------------------------------
// AggregationCache implementation
// -------------------------------------------------------------------------
public Collection<Integer> getIntersectingPeriods( final Date startDate, final Date endDate )
{
final String key = startDate.toString() + SEPARATOR + endDate.toString();
Map<String, Collection<Integer>> cache = intersectingPeriodCache.get();
Collection<Integer> periods = null;
if ( cache != null && ( periods = cache.get( key ) ) != null )
{
return periods;
}
periods = ConversionUtils.getIdentifiers( Period.class, periodService.getIntersectingPeriods( startDate, endDate ) );
cache = ( cache == null ) ? new HashMap<String, Collection<Integer>>() : cache;
cache.put( key, periods );
intersectingPeriodCache.set( cache );
return periods;
}
public Collection<Integer> getPeriodsBetweenDates( final Date startDate, final Date endDate )
{
final String key = startDate.toString() + SEPARATOR + endDate.toString();
Map<String, Collection<Integer>> cache = periodBetweenCache.get();
Collection<Integer> periods = null;
if ( cache != null && ( periods = cache.get( key ) ) != null )
{
return periods;
}
periods = ConversionUtils.getIdentifiers( Period.class, periodService.getPeriodsBetweenDates( startDate, endDate ) );
cache = ( cache == null ) ? new HashMap<String, Collection<Integer>>() : cache;
cache.put( key, periods );
periodBetweenCache.set( cache );
return periods;
}
public Period getPeriod( final int id )
{
final String key = String.valueOf( id );
Map<String, Period> cache = periodCache.get();
Period period = null;
if ( cache != null && ( period = cache.get( key ) ) != null )
{
return period;
}
period = periodService.getPeriod( id );
cache = ( cache == null ) ? new HashMap<String, Period>() : cache;
cache.put( key, period );
periodCache.set( cache );
return period;
}
public int getLevelOfOrganisationUnit( final int id )
{
final String key = String.valueOf( id );
Map<String, Integer> cache = organisationUnitLevelCache.get();
Integer level = null;
if ( cache != null && ( level = cache.get( key ) ) != null )
{
return level;
}
level = organisationUnitService.getLevelOfOrganisationUnit( id );
cache = ( cache == null ) ? new HashMap<String, Integer>() : cache;
cache.put( key, level );
organisationUnitLevelCache.set( cache );
return level;
}
public void clearCache()
{
intersectingPeriodCache.remove();
periodBetweenCache.remove();
periodCache.remove();
organisationUnitLevelCache.remove();
}
}
|
joaedwar/Tella-Android | mobile/src/main/java/rs/readahead/washington/mobile/domain/repository/IFeedbackRepository.java | <filename>mobile/src/main/java/rs/readahead/washington/mobile/domain/repository/IFeedbackRepository.java
package rs.readahead.washington.mobile.domain.repository;
import androidx.annotation.NonNull;
import io.reactivex.Completable;
import rs.readahead.washington.mobile.domain.entity.Feedback;
public interface IFeedbackRepository {
Completable sendFeedback(@NonNull Feedback feedback);
}
|
amaurirg/Web2Py | parameters_8080.py | <gh_stars>0
password="<PASSWORD>(1<PASSWORD>,20,<PASSWORD>"
|
abin-coding/baidu | src/utils/test/tprinter_test.cc | // Copyright (c) 2015, Baidu.com, Inc. All Rights Reserved
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#define private public
#include "utils/tprinter.h"
#include <glog/logging.h>
#include <gtest/gtest.h>
namespace tera {
class TPrinterTest : public ::testing::Test, public TPrinter {
public:
TPrinterTest()
: TPrinter(3, "No.", "year<int>", "avg<double>") {
}
~TPrinterTest() {}
};
TEST_F(TPrinterTest, ParseColType) {
string item, name;
CellType type;
item = "hello<int>";
EXPECT_TRUE(TPrinter::ParseColType(item, &name, &type));
VLOG(5) << name << " " << type;
EXPECT_EQ(name, "hello");
EXPECT_EQ(type, INT);
item = "hello";
EXPECT_FALSE(TPrinter::ParseColType(item, &name, &type));
}
TEST_F(TPrinterTest, NumToStr) {
int64_t i = 100;
ASSERT_EQ("100", NumToStr(i));
ASSERT_EQ("0", NumToStr(0));
ASSERT_EQ("10", NumToStr(10));
ASSERT_EQ("10K", NumToStr(10000));
ASSERT_EQ("10P", NumToStr(10000000000000000ll));
ASSERT_EQ("12.34K", NumToStr(12344));
ASSERT_EQ("10.11P", NumToStr(10110000000000000ll));
ASSERT_EQ("1", NumToStr(1.0));
ASSERT_EQ("1.23", NumToStr(1.23));
ASSERT_EQ("1.20", NumToStr(1.2));
}
TEST_F(TPrinterTest, AddRow) {
// test varargs row
ASSERT_TRUE(AddRow(3, "1", 2013, 1.234));
ASSERT_TRUE(AddRow(3, "2", 2014, 500.0));
ASSERT_EQ(2, (int)body_.size());
ASSERT_EQ(3, (int)body_[0].size());
ASSERT_EQ(body_[0][0].type, STRING);
ASSERT_EQ(body_[0][1].type, INT);
ASSERT_EQ(body_[0][1].value.i, 2013);
ASSERT_EQ(3, (int)body_[1].size());
ASSERT_EQ(body_[1][2].type, DOUBLE);
ASSERT_EQ(body_[1][2].value.d, 500);
ASSERT_FALSE(AddRow(4, "2", 2014, 500.0));
ASSERT_FALSE(AddRow(1, "2", 2014, 500.0));
// test int vector row
std::vector<int64_t> vi(3, 9);
ASSERT_TRUE(AddRow(vi));
ASSERT_EQ(body_[2][0].type, INT);
ASSERT_EQ(body_[2][1].type, INT);
ASSERT_EQ(body_[2][2].value.i, 9);
vi.resize(1);
ASSERT_FALSE(AddRow(vi));
// test string vector row
std::vector<string> vs(3, "hello");
ASSERT_TRUE(AddRow(vs));
ASSERT_EQ(body_[3][0].type, STRING);
ASSERT_EQ(body_[3][1].type, STRING);
ASSERT_EQ(*body_[3][2].value.s, "hello");
vs.resize(5);
ASSERT_FALSE(AddRow(vs));
}
TEST_F(TPrinterTest, New) {
ASSERT_EQ(3, (int)head_.size());
ASSERT_EQ(STRING, head_[0].second);
ASSERT_EQ(INT, head_[1].second);
ASSERT_EQ(DOUBLE, head_[2].second);
}
TEST_F(TPrinterTest, ToString) {
ASSERT_TRUE(AddRow(3, "1", 2013, 1.234));
ASSERT_TRUE(AddRow(3, "2", 2014, 500));
string outstr = ToString();
LOG(ERROR) << outstr.size() << std::endl << outstr;
}
} // namespace tera
int main(int argc, char** argv) {
::google::ParseCommandLineFlags(&argc, &argv, true);
::google::InitGoogleLogging(argv[0]);
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
|
teimurjan/teimurjan.github.io.development | src/templates/markdown/index.js | import React from 'react'
import { Helmet } from 'react-helmet'
import PropTypes from 'prop-types'
import Layout from '../../components/layout'
import { Container } from '../../components/container/index.styles'
import { MarkdownTitle, MarkdownContent } from './index.styles'
const MarkdownPage = ({ pageContext }) => {
const { markdownRemark, site } = pageContext
const { frontmatter, html } = markdownRemark
const {
siteMetadata: { title: siteTitle }
} = site
const title =
frontmatter.title && frontmatter.title.length > 0
? frontmatter.title
: undefined
return (
<Layout>
{title && <Helmet title={`${siteTitle} - ${title}`} />}
<Container>
{title && <MarkdownTitle>{title}</MarkdownTitle>}
<MarkdownContent dangerouslySetInnerHTML={{ __html: html }} />
</Container>
</Layout>
)
}
MarkdownPage.propTypes = {
pageContext: PropTypes.shape({
markdownRemark: PropTypes.string,
site: PropTypes.shape({ siteMetadata: { title: PropTypes.string } })
}),
data: PropTypes.shape({
markdownRemark: PropTypes.shape({
frontmatter: PropTypes.shape({
title: PropTypes.string.isRequired
}),
html: PropTypes.string.isRequired
}),
site: PropTypes.shape({
siteMetadata: PropTypes.shape({
title: PropTypes.string.isRequired
})
})
}).isRequired
}
export default MarkdownPage
|
joeshannon/mockito | src/main/java/org/mockito/listeners/MockitoListener.java | /*
* Copyright (c) 2016 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockito.listeners;
/**
* Marker interface for all types of Mockito listeners.
* For more information, see {@link org.mockito.MockitoFramework#addListener(MockitoListener)}.
*/
public interface MockitoListener {}
|
pip-services3-go/pip-services3-expressions-go | tokenizers/TokenType.go | package tokenizers
// Types (categories) of tokens such as "number", "symbol" or "word".
const (
Unknown = iota
Eof
Eol
Float
Integer
HexDecimal
Number
Symbol
Quoted
Word
Keyword
Whitespace
Comment
Special
)
|
antonmedv/year | packages/2013/09/23/index.js | <gh_stars>1-10
module.exports = new Date(2013, 8, 23)
|
tonypilz/global-instances-access | devel/examples/globalDependency.h | #pragma once
#include <src/globalInstances.h>
#include <iostream>
namespace dependency {
struct A
{
A();
int val = 4;
};
struct B
{
B();
int val = 5;
};
A::A(){
global::instance<B>().ifAvailable([this](B& b){
std::cout<<"a is using value of b:" << b.val << "\n";});
}
B::B(){
global::instance<A>().ifAvailable([this](A& a){
std::cout<<"b is using value of a:" << a.val << "\n";});
}
void main_dependency(){
global::Instance<A> a;
global::Instance<B> b; //prints: "a is using value of b:5"
// "b is using value of a:4"
}
}
|
Exynos7580/android_hardware_samsung_slsi_exynos7580 | mobicore/ClientLib/include/mcUuid.h | <reponame>Exynos7580/android_hardware_samsung_slsi_exynos7580<gh_stars>1-10
/*
* Copyright (c) 2013-2015 TRUSTONIC LIMITED
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the TRUSTONIC LIMITED nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef MC_UUID_H_
#define MC_UUID_H_
#ifdef WIN32
#define _UNUSED
#else
#define _UNUSED __attribute__((unused))
#endif
#define UUID_TYPE
#define UUID_LENGTH 16
/** Universally Unique Identifier (UUID) according to ISO/IEC 11578. */
typedef struct {
uint8_t value[UUID_LENGTH]; /**< Value of the UUID. */
} mcUuid_t, *mcUuid_ptr;
/** UUID value used as free marker in service provider containers. */
#define MC_UUID_FREE_DEFINE \
{ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, \
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }
static _UNUSED const mcUuid_t MC_UUID_FREE = {
MC_UUID_FREE_DEFINE
};
/** Reserved UUID. */
#define MC_UUID_RESERVED_DEFINE \
{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, \
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }
static _UNUSED const mcUuid_t MC_UUID_RESERVED = {
MC_UUID_RESERVED_DEFINE
};
/** UUID for system applications. */
#define MC_UUID_SYSTEM_DEFINE \
{ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, \
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE }
static _UNUSED const mcUuid_t MC_UUID_SYSTEM = {
MC_UUID_SYSTEM_DEFINE
};
#define MC_UUID_RTM_DEFINE \
{ 0x12, 0x34, 0x12, 0x34, 0x12, 0x34, 0x12, 0x34, \
0x12, 0x34, 0x12, 0x34, 0x12, 0x34, 0x12, 0x34 }
static _UNUSED const mcUuid_t MC_UUID_RTM = {
MC_UUID_RTM_DEFINE
};
/**
* TODO: Replace with v5 UUID (milestone #3)
*/
#define LTA_UUID_DEFINE \
{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, \
0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}
#endif // MC_UUID_H_
|
pymma/drools47jdk8 | experimental/drools-testing/src/org/drools/testing/core/model/RuleFiredAssertion.java | package org.drools.testing.core.model;
public class RuleFiredAssertion implements IAssertion {
private String ruleName;
private boolean atLeastOnce = false;
private int expectedFiredCount;
private int actualFiredCount;
private boolean success;
}
|
abel1502/mipt_1s | SoftLang/dsl/stmttypes.dsl.h | DEF_TYPE(Compound)
DEF_TYPE(Return)
DEF_TYPE(Loop)
DEF_TYPE(Cond)
DEF_TYPE(VarDecl)
DEF_TYPE(Expr)
DEF_TYPE(Empty)
|
CertainLach/padla | java-commons/src/main/java/ru/progrm_jarvis/javacommons/bytecode/package-info.java | /**
* Utilities related to JVM bytecode.
*/
package ru.progrm_jarvis.javacommons.bytecode;
|
opinary/gg-ez-vp | src/helpers/preloadIcons.js | export default function preloadIcons(iconsList, base = '.') {
iconsList?.forEach(iconName => {
const img = new Image();
img.src = `${base}/icons/${iconName}.svg`;
});
}
|
fioresoft/NO5 | NO5TL/containers.h | <reponame>fioresoft/NO5<filename>NO5TL/containers.h<gh_stars>1-10
// containers.h: interface for the containers class.
//
//////////////////////////////////////////////////////////////////////
#if !defined(AFX_CONTAINERS_H__1AB52C89_927C_11D7_A17A_F575EA193B2C__INCLUDED_)
#define AFX_CONTAINERS_H__1AB52C89_927C_11D7_A17A_F575EA193B2C__INCLUDED_
#if _MSC_VER > 1000
#pragma once
#endif // _MSC_VER > 1000
namespace NO5TL
{
template <class T>
class CList
{
struct ListItem
{
T m_t;
ListItem *m_pNext;
//
ListItem()
{
m_pNext = NULL;
}
ListItem(const T &t)
{
m_t = t;
m_pNext = NULL;
}
T & operator *()
{
return m_t;
}
const T & operator *() const
{
return m_t;
}
// prefix
ListItem & operator++()
{
this = m_pNext;
return *this;
}
// postfix
ListItem & operator++(int)
{
ListItem *res = this;
this = m_pNext;
return *res;
}
ListItem * operator->()
{
return this;
}
};
ListItem *m_pStart;
//
ListItem * GetNext(void)
{
ListItem *next = m_pStart;
if(!next){
next = new ListItem();
ATLASSERT(next);
m_pStart = next;
return next;
}
while(next->m_pNext){
next = next->m_pNext;
}
next->m_pNext = new ListItem();
ATLASSERT(next->m_pNext);
return next->m_pNext;
}
ListItem * GetItem(int index)
{
int i;
ListItem *next = m_pStart;
ATLASSERT(index >= 0 && index < GetSize());
for(i=0;i<index;i++){
next = next->m_pNext;
}
return next;
}
public:
typedef ListItem iterator;
public:
CList(void)
{
m_pStart = NULL;
}
~CList()
{
RemoveAll();
}
int GetSize(void) const
{
int count = 0;
ListItem *next = m_pStart;
while(next){
next = next->m_pNext;
count++;
}
return count;
}
void Add(const T &t)
{
ListItem *next = GetNext();
next->m_t = t;
}
void Insert(int index, const T &t)
{
ListItem *item;
// index == GetSize() is ok here ( inserts at the end )
ATLASSERT(index >= 0 && index <= GetSize());
item = new ListItem(t);
if(index == 0){
item->m_pNext = m_pStart;
m_pStart = item;
}
else{
ListItem *prev = GetItem(index - 1);
item->m_pNext = prev->m_pNext;
prev->m_pNext = item;
}
}
T & GetAt(int index)
{
ListItem *p;
ATLASSERT(index >= 0 && index < GetSize());
p = GetItem(index);
return p->m_t;
}
void RemoveAt(int index)
{
ListItem *tmp;
ATLASSERT(index >= 0 && index < GetSize());
if(index == 0){
tmp = m_pStart;
m_pStart = m_pStart->m_pNext;
delete tmp;
}
else{
ListItem *prev = GetItem(index - 1);
tmp = prev->m_pNext;
prev->m_pNext = tmp->m_pNext;
delete tmp;
}
}
void RemoveAll(void)
{
int count = GetSize();
int i;
for(i=0;i<count;i++)
RemoveAt(0);
}
int Find(const T &t)
{
int i = 0;
int res = -1;
ListItem *next = m_pStart;
while(next){
if(next->m_t == t){
res = i;
break;
}
else{
next = next->m_pNext;
i++;
}
}
return res;
}
bool Remove(const T &t)
{
int index = Find(t);
bool res = false;
if(index >= 0){
RemoveAt(index);
res = true;
}
return res;
}
};
template <class T1,class T2>
struct CPair
{
T1 first;
T2 second;
CPair(){}
CPair(const T1 &_first,const T2 &_second)
{
first = _first;
second = _second;
}
};
template <class Key,class T>
class CMultiMap
{
public:
typedef CPair<Key,T> value_type;
private:
CList<value_type> m_lst;
public:
void Add(const Key &key,const T &t)
{
m_lst.Add(value_type(key,t));
}
value_type & GetAt(int index)
{
return m_lst.GetAt(index);
}
Key & GetKeyAt(int index)
{
value_type &res = m_lst.GetAt(index);
return res.first;
}
T & GetValueAt(int index)
{
value_type &res = m_lst.GetAt(index);
return res.second;
}
int GetSize(void) const
{
return m_lst.GetSize();
}
T Lookup(const Key &key)
{
int i;
int size = m_lst.GetSize();
value_type value;
for(i=0;i<size;i++){
value = m_lst.GetAt(i);
if(value.first == key)
return value.second;
}
return T();
}
void RemoveAt(int index)
{
m_lst.RemoveAt(index);
}
void Remove(const Key &key)
{
int i;
int size = m_lst.GetSize();
value_type value;
for(i=0;i<size;i++){
value = m_lst.GetAt(i);
if(value.first == key)
break;
}
if(i < size)
m_lst.RemoveAt(i);
}
void RemoveAll()
{
m_lst.RemoveAll();
}
};
} // NO5TL
#endif // !defined(AFX_CONTAINERS_H__1AB52C89_927C_11D7_A17A_F575EA193B2C__INCLUDED_)
|
Sweetist/cangaroo | app/interactors/cangaroo/perform_flow.rb | module Cangaroo
class PerformFlow
include Interactor::Organizer
organize ValidateJsonSchema,
CountJsonObject,
PerformJobs
end
end
|
otseobande/Book-A-Meal | client/src/components/PageNotFound/PageNotFound.js | import React from 'react';
import lost from '../../../assets/img/lost.svg';
import styles from './pagenotfound.scss';
const PageNotFound = () => (
<div className={styles.background}>
<div className={styles.notFound}>
<img
src={lost}
width="150"
alt="not found"
className={styles.image}
/>
<p>Page not found</p>
</div>
</div>
);
export default PageNotFound;
|
natpenguin/neo4j-ogm | neo4j-ogm-tests/neo4j-ogm-integration-tests/src/test/java/org/neo4j/ogm/utils/RelationshipUtilsTest.java | <gh_stars>0
/*
* Copyright (c) 2002-2022 "Neo4j,"
* Neo4j Sweden AB [http://neo4j.com]
*
* This file is part of Neo4j.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.neo4j.ogm.utils;
import static org.assertj.core.api.Assertions.*;
import org.junit.Test;
/**
* @author <NAME>
*/
public class RelationshipUtilsTest {
@Test
public void testFieldNameInferenceFromRelationshipType() {
expect("writesPolicy", RelationshipUtils.inferFieldName("WRITES_POLICY"));
}
@Test
public void testGetterNameInference() {
expect("getWritesPolicy", RelationshipUtils.inferGetterName("WRITES_POLICY"));
}
@Test
public void testSetterNameInference() {
expect("setWritesPolicy", RelationshipUtils.inferSetterName("WRITES_POLICY"));
}
//
@Test
public void testRelationshipTypeInferenceFromFieldName() {
expect("WRITES_POLICY", RelationshipUtils.inferRelationshipType("writesPolicy"));
}
@Test
public void testRelationshipTypeInferenceFromGetterName() {
expect("WRITES_POLICY", RelationshipUtils.inferRelationshipType("getWritesPolicy"));
}
@Test
public void testRelationshipTypeInferenceSetterName() {
expect("WRITES_POLICY", RelationshipUtils.inferRelationshipType("setWritesPolicy"));
}
//
@Test
public void testSimpleFieldNameInferenceFromRelationshipType() {
expect("policy", RelationshipUtils.inferFieldName("POLICY"));
}
@Test
public void testSimpleGetterNameInference() {
expect("getPolicy", RelationshipUtils.inferGetterName("POLICY"));
}
@Test
public void testSimpleSetterNameInference() {
expect("setPolicy", RelationshipUtils.inferSetterName("POLICY"));
}
//
@Test
public void testSimpleRelationshipTypeInferenceFromFieldName() {
expect("POLICY", RelationshipUtils.inferRelationshipType("policy"));
}
@Test
public void testSimpleRelationshipTypeInferenceFromGetterName() {
expect("POLICY", RelationshipUtils.inferRelationshipType("getPolicy"));
}
@Test
public void testSimpleRelationshipTypeInferenceSetterName() {
expect("POLICY", RelationshipUtils.inferRelationshipType("setPolicy"));
}
private void expect(String expected, String actual) {
assertThat(actual).isEqualTo(expected);
}
}
|
ford442/SDL2 | src/test/SDL_test_memory.c | <filename>src/test/SDL_test_memory.c
/*
Simple DirectMedia Layer
Copyright (C) 1997-2019 <NAME> <<EMAIL>>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
#include "SDL_config.h"
#include "SDL_assert.h"
#include "SDL_stdinc.h"
#include "SDL_log.h"
#include "SDL_test_crc32.h"
#include "SDL_test_memory.h"
#ifdef HAVE_LIBUNWIND_H
#include <libunwind.h>
#endif
/* This is a simple tracking allocator to demonstrate the use of SDL's
memory allocation replacement functionality.
It gets slow with large numbers of allocations and shouldn't be used
for production code.
*/
typedef struct SDL_tracked_allocation{
void *mem;
size_t size;
Uint64 stack[10];
char stack_names[10][256];
struct SDL_tracked_allocation *next;
}SDL_tracked_allocation;
static SDLTest_Crc32Context s_crc32_context;
static SDL_malloc_func SDL_malloc_orig=NULL;
static SDL_calloc_func SDL_calloc_orig=NULL;
static SDL_realloc_func SDL_realloc_orig=NULL;
static SDL_free_func SDL_free_orig=NULL;
static int s_previous_allocations=0;
static SDL_tracked_allocation *s_tracked_allocations[256];
static unsigned int get_allocation_bucket(void *mem){
CrcUint32 crc_value;
unsigned int index;
SDLTest_Crc32Calc(&s_crc32_context,(CrcUint8 * ) & mem,sizeof(mem),&crc_value);
index=(crc_value & (SDL_arraysize(s_tracked_allocations)-1));
return index;
}
static SDL_bool SDL_IsAllocationTracked(void *mem){
SDL_tracked_allocation *entry;
int index=get_allocation_bucket(mem);
for (entry=s_tracked_allocations[index]; entry; entry=entry->next){
if(mem == entry->mem){
return SDL_TRUE;
}
}
return SDL_FALSE;
}
static void SDL_TrackAllocation(void *mem,size_t size){
SDL_tracked_allocation *entry;
int index=get_allocation_bucket(mem);
if(SDL_IsAllocationTracked(mem)){
return;
}
entry=(SDL_tracked_allocation *) SDL_malloc_orig(sizeof(*entry));
if(!entry){
return;
}
entry->mem=mem;
entry->size=size;
/* Generate the stack trace for the allocation */
SDL_zero(entry->stack);
#ifdef HAVE_LIBUNWIND_H
{
int stack_index;
unw_cursor_t cursor;
unw_context_t context;
unw_getcontext(&context);
unw_init_local(&cursor, &context);
stack_index = 0;
while (unw_step(&cursor) > 0) {
unw_word_t offset, pc;
char sym[256];
unw_get_reg(&cursor, UNW_REG_IP, &pc);
entry->stack[stack_index] = pc;
if (unw_get_proc_name(&cursor, sym, sizeof(sym), &offset) == 0) {
snprintf(entry->stack_names[stack_index], sizeof(entry->stack_names[stack_index]), "%s+0x%llx", sym, (unsigned long long)offset);
}
++stack_index;
if (stack_index == SDL_arraysize(entry->stack)) {
break;
}
}
}
#endif /* HAVE_LIBUNWIND_H */
entry->next=s_tracked_allocations[index];
s_tracked_allocations[index]=entry;
}
static void SDL_UntrackAllocation(void *mem){
SDL_tracked_allocation *entry,*prev;
int index=get_allocation_bucket(mem);
prev=NULL;
for (entry=s_tracked_allocations[index]; entry; entry=entry->next){
if(mem == entry->mem){
if(prev){
prev->next=entry->next;
} else{
s_tracked_allocations[index]=entry->next;
}
SDL_free_orig(entry);
return;
}
prev=entry;
}
}
static void *SDLCALL
SDLTest_TrackedMalloc(size_t
size)
{
void *mem;
mem=SDL_malloc_orig(size);
if (mem) {
SDL_TrackAllocation(mem,size
);
}
return
mem;
}
static void *SDLCALL
SDLTest_TrackedCalloc(size_t
nmemb,
size_t size
)
{
void *mem;
mem=SDL_calloc_orig(nmemb,size);
if (mem) {
SDL_TrackAllocation(mem,nmemb
* size);
}
return
mem;
}
static void *SDLCALL
SDLTest_TrackedRealloc(void *ptr,size_t size){
void *mem;
SDL_assert(!ptr || SDL_IsAllocationTracked(ptr));
mem=SDL_realloc_orig(ptr,size);
if(mem && mem != ptr){
if(ptr){
SDL_UntrackAllocation(ptr);
}
SDL_TrackAllocation(mem,size);
}
return mem;
}
static void SDLCALL
SDLTest_TrackedFree(void *ptr){
if(!ptr){
return;
}
if(!s_previous_allocations){
SDL_assert(SDL_IsAllocationTracked(ptr));
}
SDL_UntrackAllocation(ptr);
SDL_free_orig(ptr);
}
int SDLTest_TrackAllocations(){
if(SDL_malloc_orig){
return 0;
}
SDLTest_Crc32Init(&s_crc32_context);
s_previous_allocations=SDL_GetNumAllocations();
if(s_previous_allocations != 0){
SDL_Log("SDLTest_TrackAllocations(): There are %d previous allocations, disabling free() validation",
s_previous_allocations);
}
SDL_GetMemoryFunctions(&SDL_malloc_orig,
&SDL_calloc_orig,
&SDL_realloc_orig,
&SDL_free_orig);
SDL_SetMemoryFunctions(SDLTest_TrackedMalloc,
SDLTest_TrackedCalloc,
SDLTest_TrackedRealloc,
SDLTest_TrackedFree);
return 0;
}
void SDLTest_LogAllocations(){
char *message=NULL;
size_t message_size=0;
char line[128],*tmp;
SDL_tracked_allocation *entry;
int index,count,stack_index;
Uint64 total_allocated;
if(!SDL_malloc_orig){
return;
}
#define ADD_LINE() \
message_size += (SDL_strlen(line) + 1); \
tmp = (char *)SDL_realloc_orig(message, message_size); \
if (!tmp) { \
return; \
} \
message = tmp; \
SDL_strlcat(message, line, message_size)
SDL_strlcpy(line,"Memory allocations:\n",sizeof(line));
ADD_LINE();
SDL_strlcpy(line,"Expect 2 allocations from within SDL_GetErrBuf()\n",sizeof(line));
ADD_LINE();
count=0;
total_allocated=0;
for (index=0; index < SDL_arraysize(s_tracked_allocations); ++index){
for (entry=s_tracked_allocations[index]; entry; entry=entry->next){
SDL_snprintf(line,sizeof(line),"Allocation %d: %d bytes\n",count,(int) entry->size);
ADD_LINE();
/* Start at stack index 1 to skip our tracking functions */
for (stack_index=1; stack_index < SDL_arraysize(entry->stack); ++stack_index){
if(!entry->stack[stack_index]){
break;
}
SDL_snprintf(line,sizeof(line),"\t0x%"
SDL_PRIx64
": %s\n",entry->stack[stack_index],entry->stack_names[stack_index]);
ADD_LINE();
}
total_allocated+=entry->size;
++count;
}
}
SDL_snprintf(line,sizeof(line),"Total: %.2f Kb in %d allocations\n",(float) total_allocated / 1024,count);
ADD_LINE();
#undef ADD_LINE
SDL_Log("%s",message);
}
/* vi: set ts=4 sw=4 expandtab: */
|
mwwx/ignite | modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/schema/CacheTableImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.query.calcite.schema;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import java.util.function.Predicate;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.schema.Statistic;
import org.apache.calcite.schema.impl.AbstractTable;
import org.apache.calcite.util.ImmutableBitSet;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.processors.query.calcite.exec.ExecutionContext;
import org.apache.ignite.internal.processors.query.calcite.exec.TableScan;
import org.apache.ignite.internal.processors.query.calcite.metadata.ColocationGroup;
import org.apache.ignite.internal.processors.query.calcite.prepare.MappingQueryContext;
import org.apache.ignite.internal.processors.query.calcite.rel.logical.IgniteLogicalTableScan;
import org.apache.ignite.internal.processors.query.calcite.trait.IgniteDistribution;
import org.apache.ignite.internal.processors.query.calcite.type.IgniteTypeFactory;
import org.apache.ignite.internal.processors.query.h2.IgniteH2Indexing;
import org.apache.ignite.internal.processors.query.stat.ObjectStatisticsImpl;
import org.apache.ignite.internal.processors.query.stat.StatisticsKey;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.jetbrains.annotations.Nullable;
/**
* Ignite cache-based table implementation.
*/
public class CacheTableImpl extends AbstractTable implements IgniteCacheTable {
/** */
private final CacheTableDescriptor desc;
/** */
private final GridKernalContext ctx;
/** */
private final Map<String, IgniteIndex> indexes = new ConcurrentHashMap<>();
/** */
private volatile boolean idxRebuildInProgress;
/**
* @param ctx Kernal context.
* @param desc Table descriptor.
*/
public CacheTableImpl(GridKernalContext ctx, CacheTableDescriptor desc) {
this.ctx = ctx;
this.desc = desc;
}
/** {@inheritDoc} */
@Override public RelDataType getRowType(RelDataTypeFactory typeFactory, ImmutableBitSet requiredColumns) {
return desc.rowType((IgniteTypeFactory)typeFactory, requiredColumns);
}
/** {@inheritDoc} */
@Override public Statistic getStatistic() {
IgniteH2Indexing idx = (IgniteH2Indexing)ctx.query().getIndexing();
final String tblName = desc.typeDescription().tableName();
final String schemaName = desc.typeDescription().schemaName();
ObjectStatisticsImpl statistics = (ObjectStatisticsImpl)idx.statsManager().getLocalStatistics(
new StatisticsKey(schemaName, tblName));
if (statistics != null)
return new IgniteStatisticsImpl(statistics);
return new IgniteStatisticsImpl(desc);
}
/** {@inheritDoc} */
@Override public CacheTableDescriptor descriptor() {
return desc;
}
/** {@inheritDoc} */
@Override public IgniteLogicalTableScan toRel(
RelOptCluster cluster,
RelOptTable relOptTbl,
@Nullable List<RexNode> proj,
@Nullable RexNode cond,
@Nullable ImmutableBitSet requiredColumns
) {
return IgniteLogicalTableScan.create(cluster, cluster.traitSet(), relOptTbl, proj, cond, requiredColumns);
}
/** {@inheritDoc} */
@Override public <Row> Iterable<Row> scan(
ExecutionContext<Row> execCtx,
ColocationGroup group,
Predicate<Row> filter,
Function<Row, Row> rowTransformer,
@Nullable ImmutableBitSet usedColumns) {
UUID localNodeId = execCtx.localNodeId();
if (group.nodeIds().contains(localNodeId))
return new TableScan<>(execCtx, desc, group.partitions(localNodeId), filter, rowTransformer, usedColumns);
return Collections.emptyList();
}
/** {@inheritDoc} */
@Override public IgniteDistribution distribution() {
return desc.distribution();
}
/** {@inheritDoc} */
@Override public ColocationGroup colocationGroup(MappingQueryContext ctx) {
return desc.colocationGroup(ctx);
}
/** {@inheritDoc} */
@Override public Map<String, IgniteIndex> indexes() {
return Collections.unmodifiableMap(indexes);
}
/** {@inheritDoc} */
@Override public void addIndex(IgniteIndex idxTbl) {
indexes.put(idxTbl.name(), idxTbl);
}
/** {@inheritDoc} */
@Override public IgniteIndex getIndex(String idxName) {
return indexes.get(idxName);
}
/** {@inheritDoc} */
@Override public void removeIndex(String idxName) {
indexes.remove(idxName);
}
/** {@inheritDoc} */
@Override public void markIndexRebuildInProgress(boolean mark) {
idxRebuildInProgress = mark;
}
/** {@inheritDoc} */
@Override public boolean isIndexRebuildInProgress() {
return idxRebuildInProgress;
}
/** {@inheritDoc} */
@Override public <C> C unwrap(Class<C> aCls) {
if (aCls.isInstance(desc))
return aCls.cast(desc);
return super.unwrap(aCls);
}
/** {@inheritDoc} */
@Override public void ensureCacheStarted() {
if (desc.cacheContext() == null) {
try {
ctx.cache().dynamicStartCache(null, desc.cacheInfo().config().getName(), null,
false, true, true).get();
}
catch (IgniteCheckedException ex) {
throw U.convertException(ex);
}
}
}
/** {@inheritDoc} */
@Override public boolean isModifiable() {
return true;
}
}
|
bogolla/mfl_api | chul/tests/test_models.py | from django.test import TestCase
from model_mommy import mommy
from ..models import (
CommunityHealthUnit,
CommunityHealthWorker,
CommunityHealthWorkerContact,
Status,
CommunityHealthUnitContact,
Approver,
CommunityHealthUnitApproval,
CommunityHealthWorkerApproval,
ApprovalStatus
)
class TestCommunityHealthUnit(TestCase):
def test_save(self):
mommy.make(CommunityHealthUnit)
self.assertEquals(1, CommunityHealthUnit.objects.count())
def test_save_with_code(self):
mommy.make(CommunityHealthUnit, code='7800')
self.assertEquals(1, CommunityHealthUnit.objects.count())
class TestCommunityHealthWorkerModel(TestCase):
def test_save(self):
mommy.make(CommunityHealthWorker, id_number='12345678')
self.assertEquals(1, CommunityHealthWorker.objects.count())
class TestCommunityHealthWorkerContact(TestCase):
def test_save(self):
mommy.make(CommunityHealthWorkerContact)
self.assertEquals(1, CommunityHealthWorkerContact.objects.count())
class TestModels(TestCase):
def test_save(self):
models = [
CommunityHealthUnit, CommunityHealthWorker,
CommunityHealthWorkerContact, Status,
CommunityHealthUnitContact, Approver, CommunityHealthUnitApproval,
CommunityHealthWorkerApproval, ApprovalStatus
]
for model_cls in models:
mommy.make(model_cls)
self.assertNotEquals(0, len(model_cls.objects.all()))
|
vtfn/tolb | src/list/_internal/slice.js | <reponame>vtfn/tolb
/* eslint-disable no-param-reassign */
export default function slice(start, end, list) {
const length = list.length;
if (end == null) end = length;
if (typeof list === 'string')
return list.slice(start, end);
if (end < 0) end = length + end;
if (end > length) end = length;
if (start < 0) start = length + start;
if (start < 0) start = 0;
const result = new Array(Math.max(0, end - start));
for (let i = start, k = 0; i < end; i++, k++)
result[k] = list[i];
return result;
}
|
laserhenry/iStartalk-ios | libqimcommon-ios/QIMCommon/QIMKit/QIMKit+QIMMiddleVirtualAccountManager.h | <filename>libqimcommon-ios/QIMCommon/QIMKit/QIMKit+QIMMiddleVirtualAccountManager.h
//
// QIMKit+QIMMiddleVirtualAccountManager.h
// QIMCommon
//
// StarTalk IMSDK-iOS
//
// Created by Lucas on 11/29/18.
// Copyright © 2018 QIM. All rights reserved.
// Copyright © 2021 StarTalk Limited. All rights reserved.
//
#import "STKit.h"
NS_ASSUME_NONNULL_BEGIN
@interface STKit (QIMMiddleVirtualAccountManager)
- (NSArray *)getMiddleVirtualAccounts;
- (BOOL)isMiddleVirtualAccountWithJid:(NSString *)jid;
@end
NS_ASSUME_NONNULL_END
|
shishuihao/third-party-api | third-party-api-push/src/main/java/cn/shishuihao/thirdparty/api/push/properties/package-info.java | /**
* push properties.
*
* @author shishuihao
* @version 1.0.0
*/
package cn.shishuihao.thirdparty.api.push.properties;
|
lenxin/spring-security | web/src/main/java/org/springframework/security/web/jackson2/DefaultSavedRequestMixin.java | package org.springframework.security.web.jackson2;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import org.springframework.security.web.savedrequest.DefaultSavedRequest;
/**
* Jackson mixin class to serialize/deserialize {@link DefaultSavedRequest}. This mixin
* use {@link org.springframework.security.web.savedrequest.DefaultSavedRequest.Builder}
* to deserialized json.In order to use this mixin class you also need to register
* {@link CookieMixin}.
* <p>
* <pre>
* ObjectMapper mapper = new ObjectMapper();
* mapper.registerModule(new WebServletJackson2Module());
* </pre>
*
* @author <NAME>
* @see WebServletJackson2Module
* @see org.springframework.security.jackson2.SecurityJackson2Modules
* @since 4.2
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY)
@JsonDeserialize(builder = DefaultSavedRequest.Builder.class)
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY, getterVisibility = JsonAutoDetect.Visibility.NONE)
abstract class DefaultSavedRequestMixin {
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.