answer
stringlengths
17
10.2M
package com.conveyal.taui.models; import com.conveyal.r5.analyst.scenario.StopSpec; import com.conveyal.taui.AnalysisServerException; import com.vividsolutions.jts.geom.Coordinate; import org.geotools.geometry.jts.JTS; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.TransformException; import java.util.ArrayList; import java.util.List; import java.util.Stack; import java.util.stream.Collectors; class ModificationStop { private static double MIN_SPACING_PERCENTAGE = 0.25; private static int DEFAULT_SEGMENT_SPEED = 15; private Coordinate coordinate; private String id; private double distanceFromStart; private ModificationStop(Coordinate c, String id, double distanceFromStart) { this.coordinate = c; this.id = id; this.distanceFromStart = distanceFromStart; } /** * Create the StopSpec types required by r5 * @param stops * @return */ static List<StopSpec> toSpec (List<ModificationStop> stops) { return stops .stream() .map(s -> { if (s.id == null){ return new StopSpec(s.coordinate.x, s.coordinate.y); } else { return new StopSpec(s.id); } }) .collect(Collectors.toList()); } /** * We don't just use `StopSpec`s here because we need to keep the `distanceFromStart` for generating hop times. * @param segments Modification segments * @return ModificationStop[] */ static List<ModificationStop> getStopsFromSegments (List<Segment> segments) { Stack<ModificationStop> stops = new Stack<>(); CoordinateReferenceSystem crs = DefaultGeographicCRS.WGS84; if (segments == null || segments.size() == 0) { return new ArrayList<>(); } Segment firstSegment = segments.get(0); if (firstSegment.stopAtStart) { stops.add(new ModificationStop(firstSegment.geometry.getCoordinates()[0], firstSegment.fromStopId, 0)); } double distanceToLastStop = 0; double distanceToLineSegmentStart = 0; for (Segment segment : segments) { Coordinate[] coords = segment.geometry.getCoordinates(); int spacing = segment.spacing; for (int i = 1; i < coords.length; i++) { Coordinate c0 = coords[i - 1]; Coordinate c1 = coords[i]; double distanceThisLineSegment; try { distanceThisLineSegment = JTS.orthodromicDistance(c0, c1, crs); } catch (TransformException e) { throw AnalysisServerException.Unknown(e.getMessage()); } if (spacing > 0) { // Auto-created stops while (distanceToLastStop + spacing < distanceToLineSegmentStart + distanceThisLineSegment) { double frac = (distanceToLastStop + spacing - distanceToLineSegmentStart) / distanceThisLineSegment; if (frac < 0) frac = 0; Coordinate c = new Coordinate(c0.x + (c1.x - c0.x) * frac, c0.y + (c1.y - c0.y) * frac); // We can't just add segment.spacing because of converting negative fractions to zero above. // This can happen when the last segment did not have automatic stop creation, or had a larger // spacing. TODO in the latter case, we probably want to continue to apply the spacing from the // last line segment until we create a new stop? distanceToLastStop = distanceToLineSegmentStart + frac * distanceThisLineSegment; // Add the auto-created stop without an id stops.add(new ModificationStop(c, null, distanceToLastStop)); } } distanceToLineSegmentStart += distanceThisLineSegment; } if (segment.stopAtEnd) { // If the last auto-generated stop was too close, pop it if (stops.size() > 0) { ModificationStop lastStop = stops.peek(); if (lastStop.id == null && (distanceToLineSegmentStart - distanceToLastStop) / spacing < MIN_SPACING_PERCENTAGE) { stops.pop(); } } Coordinate endCoord = coords[coords.length - 1]; ModificationStop toStop = new ModificationStop(endCoord, segment.toStopId, distanceToLineSegmentStart); stops.add(toStop); } distanceToLastStop = distanceToLineSegmentStart; } return new ArrayList<>(stops); } static int[] getDwellTimes (List<ModificationStop> stops, Integer[] dwellTimes, int defaultDwellTime) { if (stops == null || stops.size() == 0) { return new int[0]; } int[] stopDwellTimes = new int[stops.size()]; int realStopIndex = 0; for (int i = 0; i < stops.size(); i++) { String id = stops.get(i).id; if (id == null || dwellTimes == null || dwellTimes.length <= realStopIndex) { stopDwellTimes[i] = defaultDwellTime; } else { Integer specificDwellTime = dwellTimes[realStopIndex]; stopDwellTimes[i] = specificDwellTime != null ? specificDwellTime : defaultDwellTime; realStopIndex++; } } return stopDwellTimes; } static int[] getHopTimes (List<ModificationStop> stops, int[] segmentSpeeds) { if (stops == null || stops.size() < 2) { return new int[0]; } int[] hopTimes = new int[stops.size() - 1]; ModificationStop lastStop = stops.get(0); int realStopIndex = 0; for (int i = 0; i < hopTimes.length; i++) { ModificationStop stop = stops.get(i + 1); double hopDistance = stop.distanceFromStart - lastStop.distanceFromStart; int segmentSpeed = segmentSpeeds.length > realStopIndex ? segmentSpeeds[realStopIndex] : DEFAULT_SEGMENT_SPEED; hopTimes[i] = (int) (hopDistance / (segmentSpeed * 1000) * 3000); if (stop.id != null) { realStopIndex++; } lastStop = stop; } return hopTimes; } }
package com.centurylink.cloud.sdk.servers.services; import com.centurylink.cloud.sdk.common.management.client.QueueClient; import com.centurylink.cloud.sdk.common.management.services.domain.queue.future.OperationFuture; import com.centurylink.cloud.sdk.common.management.services.domain.queue.future.job.JobFuture; import com.centurylink.cloud.sdk.common.management.services.domain.queue.future.job.NoWaitingJobFuture; import com.centurylink.cloud.sdk.common.management.services.domain.queue.future.job.ParallelJobsFuture; import com.centurylink.cloud.sdk.common.management.services.domain.queue.future.job.SequentialJobsFuture; import com.centurylink.cloud.sdk.core.client.domain.Link; import com.centurylink.cloud.sdk.core.services.QueryService; import com.centurylink.cloud.sdk.servers.client.ServerClient; import com.centurylink.cloud.sdk.servers.client.domain.ip.PublicIpMetadata; import com.centurylink.cloud.sdk.servers.client.domain.server.BaseServerResponse; import com.centurylink.cloud.sdk.servers.client.domain.server.CreateSnapshotRequest; import com.centurylink.cloud.sdk.servers.client.domain.server.IpAddress; import com.centurylink.cloud.sdk.servers.client.domain.server.RestoreServerRequest; import com.centurylink.cloud.sdk.servers.client.domain.server.metadata.ServerMetadata; import com.centurylink.cloud.sdk.servers.services.domain.group.filters.GroupFilter; import com.centurylink.cloud.sdk.servers.services.domain.group.refs.Group; import com.centurylink.cloud.sdk.servers.services.domain.ip.CreatePublicIpConfig; import com.centurylink.cloud.sdk.servers.services.domain.ip.ModifyPublicIpConfig; import com.centurylink.cloud.sdk.servers.services.domain.ip.PublicIpConverter; import com.centurylink.cloud.sdk.servers.services.domain.server.CreateServerConfig; import com.centurylink.cloud.sdk.servers.services.domain.server.ServerConverter; import com.centurylink.cloud.sdk.servers.services.domain.server.filters.ServerFilter; import com.centurylink.cloud.sdk.servers.services.domain.server.future.CreateServerJobFuture; import com.centurylink.cloud.sdk.servers.services.domain.server.refs.Server; import com.centurylink.cloud.sdk.servers.services.domain.server.refs.ServerByIdRef; import com.google.inject.Inject; import java.util.Arrays; import java.util.List; import java.util.stream.Stream; import static com.centurylink.cloud.sdk.core.function.Predicates.*; import static com.centurylink.cloud.sdk.core.services.filter.Filters.nullable; import static com.google.common.base.Preconditions.checkNotNull; import static java.util.stream.Collectors.toList; /** * @author ilya.drabenia */ public class ServerService implements QueryService<Server, ServerFilter, ServerMetadata> { private final ServerConverter serverConverter; private final GroupService groupService; private final ServerClient client; private final QueueClient queueClient; private final PublicIpConverter publicIpConverter; @Inject public ServerService(ServerConverter serverConverter, ServerClient client, QueueClient queueClient, GroupService groupService, PublicIpConverter publicIpConverter) { this.serverConverter = serverConverter; this.client = client; this.queueClient = queueClient; this.groupService = groupService; this.publicIpConverter = publicIpConverter; } public OperationFuture<ServerMetadata> create(CreateServerConfig command) { BaseServerResponse response = client .create(serverConverter.buildCreateServerRequest(command)); ServerMetadata serverInfo = client .findServerByUuid(response.findServerUuid()); return new OperationFuture<>( serverInfo, new SequentialJobsFuture( () -> new CreateServerJobFuture(response.findStatusId(), serverInfo.getId(), queueClient, client), () -> addPublicIpIfNeeded(command, serverInfo) ) ); } private JobFuture addPublicIpIfNeeded(CreateServerConfig command, ServerMetadata serverInfo) { if (command.getNetwork().getPublicIpConfig() != null) { return addPublicIp( serverInfo.asRefById(), command.getNetwork().getPublicIpConfig() ) .jobFuture(); } else { return new NoWaitingJobFuture(); } } /** * Delete existing server * @param server server reference * @return OperationFuture wrapper for ServerRef */ public OperationFuture<Server> delete(Server server) { BaseServerResponse response = client.delete(idByRef(server)); return new OperationFuture<>( server, response.findStatusId(), queueClient ); } /** * Delete existing servers * @param servers the array of servers to delete * @return OperationFuture wrapper for list of ServerRef */ public OperationFuture<List<Server>> delete(Server... servers) { List<JobFuture> futures = Arrays.asList(servers).stream() .map(serverRef -> delete(serverRef).jobFuture()) .collect(toList()); return new OperationFuture<>( Arrays.asList(servers), new ParallelJobsFuture(futures) ); } /** * Delete existing servers * @param filter server filter object * @return OperationFuture wrapper for list of ServerRef */ public OperationFuture<List<Server>> delete(ServerFilter filter) { List<Server> serverRefs = find(filter).stream() .map(metadata -> metadata.asRefById()) .collect(toList()); return delete(serverRefs.toArray(new Server[serverRefs.size()])); } String idByRef(Server ref) { if (ref.is(ServerByIdRef.class)) { return ref.as(ServerByIdRef.class).getId(); } else { return findByRef(ref).getId(); } } /** * {@inheritDoc} */ @Override public Stream<ServerMetadata> findLazy(ServerFilter filter) { return filter .applyFindLazy(serverFilter -> { if (isAlwaysTruePredicate(serverFilter.getPredicate()) && isAlwaysTruePredicate(serverFilter.getGroupFilter().getPredicate()) && isAlwaysTruePredicate(serverFilter.getGroupFilter().getDataCenterFilter().getPredicate()) && serverFilter.getServerIds().size() > 0) { return serverFilter .getServerIds() .stream() .map(nullable(client::findServerById)) .filter(notNull()); } else { return groupService .findLazy(serverFilter.getGroupFilter()) .flatMap(group -> group.getServers().stream()) .filter(serverFilter.getPredicate()) .filter((serverFilter.getServerIds().size() > 0) ? combine(ServerMetadata::getId, in(serverFilter.getServerIds())) : alwaysTrue() ); } }); } /** * Power on a single server or group of servers * * @param serverRefs server references list * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> powerOn(Server... serverRefs) { return powerOperationResponse( client.powerOn(ids(serverRefs)) ); } /** * Power on a single server or group of servers * * @param serverFilter search servers criteria * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> powerOn(ServerFilter serverFilter) { return powerOperationResponse( client.powerOn(ids(serverFilter)) ); } /** * Power off a single server or group of servers * * @param serverRefs server references list * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> powerOff(Server... serverRefs) { return powerOperationResponse( client.powerOff(ids(serverRefs)) ); } /** * Power off a single server or group of servers * * @param serverFilter search servers criteria * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> powerOff(ServerFilter serverFilter) { return powerOperationResponse( client.powerOff(ids(serverFilter)) ); } /** * Start maintenance mode on a single server or group of servers * * @param serverRefs server references list * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> startMaintenance(Server... serverRefs) { return powerOperationResponse( client.startMaintenance(ids(serverRefs)) ); } /** * Start maintenance mode on a single server or group of servers * * @param serverFilter search servers criteria * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> startMaintenance(ServerFilter serverFilter) { return powerOperationResponse( client.startMaintenance(ids(serverFilter)) ); } /** * Stop maintenance mode on a single server or group of servers * * @param serverRefs server references list * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> stopMaintenance(Server... serverRefs) { return powerOperationResponse( client.stopMaintenance(ids(serverRefs)) ); } /** * Stop maintenance mode on a single server or group of servers * * @param serverFilter search servers criteria * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> stopMaintenance(ServerFilter serverFilter) { return powerOperationResponse( client.stopMaintenance(ids(serverFilter)) ); } /** * Pause a single server or group of servers * * @param serverRefs server references list * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> pause(Server... serverRefs) { return powerOperationResponse( client.pause(ids(serverRefs)) ); } /** * Pause a single server or group of servers * * @param serverFilter search servers criteria * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> pause(ServerFilter serverFilter) { return powerOperationResponse( client.pause(ids(serverFilter)) ); } /** * Reboot a single server or group of servers * * @param serverRefs server references list * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> reboot(Server... serverRefs) { return powerOperationResponse( client.reboot(ids(serverRefs)) ); } /** * Reboot a single server or group of servers * * @param serverFilter search servers criteria * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> reboot(ServerFilter serverFilter) { return powerOperationResponse( client.reboot(ids(serverFilter)) ); } /** * Reset a single server or group of servers * * @param serverRefs server references list * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> reset(Server... serverRefs) { return powerOperationResponse( client.reset(ids(serverRefs)) ); } /** * Reset a single server or group of servers * * @param serverFilter search servers criteria * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> reset(ServerFilter serverFilter) { return powerOperationResponse( client.reset(ids(serverFilter)) ); } /** * Shut down a single server or group of servers * * @param serverRefs server references list * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> shutDown(Server... serverRefs) { return powerOperationResponse( client.shutDown(ids(serverRefs)) ); } /** * Shut down a single server or group of servers * * @param serverFilter search servers criteria * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> shutDown(ServerFilter serverFilter) { return powerOperationResponse( client.shutDown(ids(serverFilter)) ); } /** * Archive a single server or group of servers * * @param serverRefs server references list * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> archive(Server... serverRefs) { return powerOperationResponse( client.archive(ids(serverRefs)) ); } /** * Archive a single server or group of servers * * @param serverFilter search servers criteria * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> archive(ServerFilter serverFilter) { return powerOperationResponse( client.archive(ids(serverFilter)) ); } /** * Create snapshot of a single server or group of servers * * @param expirationDays expiration days (must be between 1 and 10) * @param serverRefs server references list * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> createSnapshot(Integer expirationDays, Server... serverRefs) { return powerOperationResponse( client.createSnapshot( new CreateSnapshotRequest() .snapshotExpirationDays(expirationDays) .serverIds(ids(serverRefs)) ) ); } /** * Create snapshot of a single server or group of servers * * @param expirationDays expiration days (must be between 1 and 10) * @param serverFilter search servers criteria * @return OperationFuture wrapper for BaseServerResponse list */ public OperationFuture<List<BaseServerResponse>> createSnapshot(Integer expirationDays, ServerFilter serverFilter) { return powerOperationResponse( client.createSnapshot( new CreateSnapshotRequest() .snapshotExpirationDays(expirationDays) .serverIds(ids(serverFilter)) ) ); } /** * Delete all snapshots for provided servers * @param servers server references * @return OperationFuture wrapper for list of ServerRef */ public OperationFuture<List<Server>> deleteSnapshot(Server... servers) { List<Server> serverList = Arrays.asList(servers); List<JobFuture> futures = serverList.stream() .map(serverRef -> findByRef(serverRef)) .flatMap(metadata -> metadata.getDetails().getSnapshots().stream()) .map(snapshot -> baseServerResponse( client.deleteSnapshot(snapshot.getServerId(), snapshot.getId())) .jobFuture()) .collect(toList()); return new OperationFuture<>( serverList, new ParallelJobsFuture(futures) ); } /** * Delete all snapshots for server criteria * @param serverFilter search servers criteria * @return OperationFuture wrapper for list of ServerRef */ public OperationFuture<List<Server>> deleteSnapshot(ServerFilter serverFilter) { return deleteSnapshot(getRefsFromFilter(serverFilter)); } /** * Restore a given archived server to a specified group * * @param server server reference * @param group group reference * @return OperationFuture wrapper for BaseServerResponse */ public OperationFuture<Link> restore(Server server, Group group) { return baseServerResponse( restore(server, groupService.findByRef(group).getId()) ); } private Link restore(Server server, String groupId) { return client.restore( idByRef(server), new RestoreServerRequest() .targetGroupId(groupId) ); } /** * Restore a group of archived servers to a specified group * @param servers servers references * @return OperationFuture wrapper for list of ServerRef */ OperationFuture<List<Server>> restore(String groupId, Server... servers) { return restore(Arrays.asList(servers), groupId); } /** * Restore a list of archived servers to a specified group * @param serverList server List references * @return OperationFuture wrapper for list of ServerRef */ OperationFuture<List<Server>> restore(List<Server> serverList, String groupId) { List<JobFuture> futures = serverList.stream() .map(server -> baseServerResponse( client.restore( idByRef(server), new RestoreServerRequest() .targetGroupId(groupId)) ) .jobFuture() ) .collect(toList()); return new OperationFuture<>( serverList, new ParallelJobsFuture(futures) ); } /** * Revert a set of servers to snapshot * @param servers server references * @return OperationFuture wrapper for list of ServerRef */ OperationFuture<List<Server>> revertToSnapshot(Server... servers) { List<Server> serverList = Arrays.asList(servers); List<JobFuture> futures = serverList.stream() .map(serverRef -> findByRef(serverRef)) .flatMap(metadata -> metadata.getDetails().getSnapshots().stream()) .map(snapshot -> baseServerResponse( client.revertToSnapshot(snapshot.getServerId(), snapshot.getId())) .jobFuture()) .collect(toList()); return new OperationFuture<>( serverList, new ParallelJobsFuture(futures) ); } /** * Revert a set of servers to snapshot * @param filter search servers criteria * @return OperationFuture wrapper for list of ServerRef */ public OperationFuture<List<Server>> revertToSnapshot(ServerFilter filter) { return revertToSnapshot(getRefsFromFilter(filter)); } private List<String> ids(Server... serverRefs) { return Stream .of(serverRefs) .filter(notNull()) .map(this::idByRef) .map(String::toUpperCase) .collect(toList()); } public List<String> ids(ServerFilter serverFilter) { List<ServerMetadata> serverMetadataList = find(serverFilter); return serverMetadataList .stream() .filter(notNull()) .map(ServerMetadata::getId) .map(String::toUpperCase) .collect(toList()); } public List<String> ids(GroupFilter groupFilter) { return ids( new ServerFilter() .groupsWhere(groupFilter) ); } public List<String> ids(Group... groups) { return ids( new ServerFilter() .groups(groups) ); } /** * Add public IP to server * * @param serverRef server reference * @param publicIpConfig publicIp config * @return OperationFuture wrapper for ServerRef */ public OperationFuture<Server> addPublicIp(Server serverRef, CreatePublicIpConfig publicIpConfig) { Link response = client.addPublicIp(idByRef(serverRef), publicIpConverter.createPublicIpRequest(publicIpConfig)); return new OperationFuture<>( serverRef, response.getId(), queueClient ); } /** * Modify ALL existing public IPs on server * @param server server reference * @param config publicIp config * @return OperationFuture wrapper for ServerRef */ public OperationFuture<Server> modifyPublicIp(Server server, ModifyPublicIpConfig config) { checkNotNull(config, "PublicIpConfig must be not null"); List<IpAddress> ipAddresses = findByRef(server).getDetails().getIpAddresses(); List<String> responseIds = ipAddresses.stream() .map(address -> address.getPublicIp()) .filter(notNull()) .map(ipAddress -> client.modifyPublicIp(idByRef(server), ipAddress, publicIpConverter.createPublicIpRequest(config))) .map(Link::getId) .collect(toList()); return new OperationFuture<>( server, responseIds, queueClient ); } /** * Modify provided public IP on server * @param server server reference * @param publicIp public ip * @param config publicIp config * @return OperationFuture wrapper for ServerRef */ public OperationFuture<Server> modifyPublicIp(Server server, String publicIp, ModifyPublicIpConfig config) { checkNotNull(config, "PublicIpConfig must be not null"); checkNotNull(publicIp, "public ip must not be null"); Link response = client.modifyPublicIp(idByRef(server), publicIp, publicIpConverter.createPublicIpRequest(config) ); return new OperationFuture<>( server, response.getId(), queueClient ); } /** * Modify ALL existing public IPs on servers * @param servers The list of server references * @param config publicIp config * @return OperationFuture wrapper for list of ServerRef */ public OperationFuture<List<Server>> modifyPublicIp(List<Server> servers, ModifyPublicIpConfig config) { List<JobFuture> futures = servers.stream() .map(serverRef -> modifyPublicIp(serverRef, config).jobFuture()) .collect(toList()); return new OperationFuture<>( servers, new ParallelJobsFuture(futures) ); } /** * Modify existing public IP on servers * @param filter The server filter object * @param config publicIp config * @return OperationFuture wrapper for list of ServerRef */ public OperationFuture<List<Server>> modifyPublicIp(ServerFilter filter, ModifyPublicIpConfig config) { return modifyPublicIp(Arrays.asList(getRefsFromFilter(filter)), config); } Server[] getRefsFromFilter(ServerFilter filter) { List<Server> serverRefs = filter.getServerIds().stream() .map(id -> Server.refById(id)) .collect(toList()); return serverRefs.toArray(new Server[serverRefs.size()]); } /** * Get public IP object * * @param serverRef server reference * @param publicIp existing public IP address * @return public IP response object */ public PublicIpMetadata getPublicIp(Server serverRef, String publicIp) { return client.getPublicIp(idByRef(serverRef), publicIp); } /** * Get list public IPs for provided server reference {@code server} * * @param server server reference * @return list public IPs */ public List<PublicIpMetadata> findPublicIp(Server server) { List<IpAddress> ipAddresses = findByRef(server).getDetails().getIpAddresses(); return ipAddresses.stream() .map(IpAddress::getPublicIp) .filter(notNull()) .map(address -> getPublicIp(server, address)) .collect(toList()); } /** * Remove public IP from server * * @param serverRef server reference * @param ipAddress existing public IP address * @return OperationFuture wrapper for ServerRef */ public OperationFuture<Server> removePublicIp(Server serverRef, String ipAddress) { checkNotNull(ipAddress, "ipAddress must be not null"); Link response = client.removePublicIp(idByRef(serverRef), ipAddress); return new OperationFuture<>( serverRef, response.getId(), queueClient ); } /** * Remove all public IPs from server * * @param serverRef server reference * @return server reference */ public OperationFuture<Server> removePublicIp(Server serverRef) { ServerMetadata serverMetadata = findByRef(serverRef); List<JobFuture> jobFutures = serverMetadata.getDetails().getIpAddresses() .stream() .map(IpAddress::getPublicIp) .filter(notNull()) .map(address -> removePublicIp(serverRef, address).jobFuture()) .collect(toList()); return new OperationFuture<>( serverRef, new ParallelJobsFuture(jobFutures) ); } public OperationFuture<List<BaseServerResponse>> powerOperationResponse(List<BaseServerResponse> apiResponse) { return new OperationFuture<>( apiResponse, apiResponse .stream() .filter(notNull()) .map(BaseServerResponse::findStatusId) .collect(toList()), queueClient ); } private OperationFuture<Link> baseServerResponse(Link response) { return new OperationFuture<>( response, response.getId(), queueClient ); } }
package com.foomoo.awf.controllers; import com.foomoo.awf.pojo.*; import com.foomoo.awf.processors.ReferralSubmitter; import com.foomoo.awf.validators.ReferralValidator; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.web.bind.WebDataBinder; import org.springframework.web.bind.annotation.*; import org.springframework.web.multipart.MultipartFile; import org.springframework.web.servlet.view.RedirectView; import javax.servlet.http.HttpSession; import javax.validation.Valid; import java.time.Duration; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; @Controller @RequestMapping("/") public class MainController { /** * Name of the {@link Referral} object as stored in the user's HTTP session. */ private static final String REFERRAL_ID_SESSION_ATTRIBUTE = "AWFREFERRAL_ID"; /** * Length of time that sessions shall persist for. */ private static final Duration SESSION_DURATION = Duration.ofDays(30); private final ReferralSubmitter referralSubmitter; private final ReferralRepository referralRepository; public MainController(final ReferralSubmitter referralSubmitter, final ReferralRepository referralRepository) { this.referralSubmitter = referralSubmitter; this.referralRepository = referralRepository; } @ModelAttribute("allApplicableCircumstances") public List<ApplicableCircumstance> populateApplicableCicumstances() { return Arrays.asList(ApplicableCircumstance.values()); } @ModelAttribute("allGenders") public List<Gender> populateGenders() { return Arrays.asList(Gender.values()); } /** * Retrieve a view containing the client's {@link Referral} as found in the {@link HttpSession}. If no Referral is * found a new one will be created and stored in the session. * * @param model The model used to render the view. Shall be populated with the client's Referral. * @param session The HttpSession to retrieve the {@link Referral} for. * @return The view to render the referral form. */ @GetMapping public String getReferralFormView(final Model model, final HttpSession session) { session.setMaxInactiveInterval((int) SESSION_DURATION.getSeconds()); final Referral referral = getOrCreateReferralForSession(session); model.addAttribute(referral); return "form"; } /** * Clear the {@link Referral} held in the client's {@link HttpSession}. * * @param session The client's HttpSession. * @return A redirect view to get the referral form view. */ @PostMapping(params = "action=clear") public RedirectView clearReferral(final HttpSession session) { setReferralOnSession(session, new Referral()); return new RedirectView(""); } /** * Save the given {@link Referral} to the client's {@link HttpSession} and redirect to get the referral form view. * * @param referral The Referral to save. * @param session The client's HttpSession. * @return A redirect view to get the referral form view. */ @PostMapping(params = "action=save") public RedirectView saveReferral(final Referral referral, final HttpSession session) { setReferralOnSession(session, referral); return new RedirectView(""); } /** * If the given {@link Referral} is valid submit it to the configured {@link ReferralSubmitter} and return * a submission commplete view. If the Referral is invalid then return the form view. * <p> * Regardless of the validity of the Referral, store it in the {@link HttpSession} to it can be retrieved later. * * @param referral The Referral to submit. * @param bindingResult Result of the Referral's validity checking. * @param session The client's session. * @return View indicating submission completion or failure. */ @PostMapping(params = "action=submit") public String checkReferral(@Valid final Referral referral, final BindingResult bindingResult, @RequestParam("file1") final MultipartFile file1, @RequestParam("file2") final MultipartFile file2, final HttpSession session) { setReferralOnSession(session, referral); if (bindingResult.hasErrors()) { return "form"; } else { final List<MultipartFile> multipartFiles = Stream.of(file1, file2) .filter(f -> !f.isEmpty()).collect(Collectors.toList()); referralSubmitter.submit(referral, multipartFiles); // Clear the referral from the session. setReferralOnSession(session, new Referral()); return "thanks"; } } /** * Create a new {@link Referral} held in the client's {@link HttpSession} populated with test data. * * @param session The client's HttpSession. * @return A redirect view to get the referral form view. */ @GetMapping("test") public RedirectView populateTestData(final HttpSession session) { final Referral referral = new Referral(); ReferralPopulator.populateReferral(referral); setReferralOnSession(session, referral); return new RedirectView(""); } /** * Initialise the data binder for this controller. */ @InitBinder protected void initBinder(final WebDataBinder webDataBinder) { webDataBinder.addValidators(new ReferralValidator()); } /** * Retrieves the {@link Referral} for the given HttpSession. If no referral exists for the session then create * a new one. * * @param session The HttpSession to retrieve or create a Referral for. * @return The retrieved or created Referral. */ private Referral getOrCreateReferralForSession(final HttpSession session) { final Object attribute = session.getAttribute(REFERRAL_ID_SESSION_ATTRIBUTE); Referral referral; if (attribute == null) { // Since there is no information about any Referrals in the session, just create a blank // referral and don't worry about persisting it at this stage. referral = new Referral(); } else { referral = referralRepository.findOne((UUID) attribute); if (referral == null) { referral = new Referral(); } } return referral; } /** * Persists the given Referral, reusing the referral id from the session. * If no referral id exists, create a new id and store on the session. * * @param session The user's HttpSession. * @param referral The Referral to persist as part of the session. */ private void setReferralOnSession(final HttpSession session, final Referral referral) { final Object attribute = session.getAttribute(REFERRAL_ID_SESSION_ATTRIBUTE); final UUID referralId; if (attribute == null) { referralId = UUID.randomUUID(); session.setAttribute(REFERRAL_ID_SESSION_ATTRIBUTE, referralId); } else { referralId = (UUID) attribute; } referral.setId(referralId); referralRepository.save(referral); } }
package org.cleartk.token.tokenizer.chunk; import static org.junit.Assert.assertEquals; import java.util.Arrays; import org.apache.uima.UIMAException; import org.apache.uima.UimaContext; import org.cleartk.chunker.ChunkLabeler_ImplBase; import org.cleartk.chunker.Chunker; import org.cleartk.chunker.DefaultChunkLabeler; import org.cleartk.token.TokenTestBase; import org.cleartk.token.type.Sentence; import org.cleartk.token.type.Subtoken; import org.cleartk.token.type.Token; import org.cleartk.type.test.Lemma; import org.junit.Test; import org.uimafit.factory.AnnotationFactory; import org.uimafit.factory.UimaContextFactory; import org.uimafit.factory.initializable.InitializableFactory; import org.uimafit.util.JCasUtil; public class ChunkTokenizerLabelerTest extends TokenTestBase{ /** * these tests represent what would typically happen when the InstanceConsumer is ClassifierAnnotator * @throws UIMAException */ @Test public void testClassifierAnnotator() throws UIMAException { UimaContext context = UimaContextFactory.createUimaContext( ChunkLabeler_ImplBase.PARAM_CHUNK_ANNOTATION_CLASS_NAME, Token.class.getName(), Chunker.PARAM_LABELED_ANNOTATION_CLASS_NAME, Subtoken.class.getName()); DefaultChunkLabeler defaultChunkLabeler = InitializableFactory.create(context, DefaultChunkLabeler.class); jCas.setDocumentText("Technological progress is like an axe in the hands of a pathological criminal."); //Albert Einstein Sentence sentence = new Sentence(jCas, 0, 78); sentence.addToIndexes(); Subtoken subtoken1 = AnnotationFactory.createAnnotation(jCas, 0, 13, Subtoken.class); //Technological Subtoken subtoken2 = AnnotationFactory.createAnnotation(jCas, 14, 22, Subtoken.class); //progress Subtoken subtoken3 = AnnotationFactory.createAnnotation(jCas, 23, 25, Subtoken.class); Subtoken subtoken4 = AnnotationFactory.createAnnotation(jCas, 26, 30, Subtoken.class); //like Subtoken subtoken5 = AnnotationFactory.createAnnotation(jCas, 31, 33, Subtoken.class); Subtoken subtoken6 = AnnotationFactory.createAnnotation(jCas, 34, 37, Subtoken.class); //axe Subtoken subtoken7 = AnnotationFactory.createAnnotation(jCas, 38, 40, Subtoken.class); Subtoken subtoken8 = AnnotationFactory.createAnnotation(jCas, 41, 44, Subtoken.class); //the Subtoken subtoken9 = AnnotationFactory.createAnnotation(jCas, 45, 50, Subtoken.class); //hands Subtoken subtoken10 = AnnotationFactory.createAnnotation(jCas, 51, 53, Subtoken.class); Subtoken subtoken11 = AnnotationFactory.createAnnotation(jCas, 54, 55, Subtoken.class); Subtoken subtoken12 = AnnotationFactory.createAnnotation(jCas, 56, 68, Subtoken.class); //pathological Subtoken subtoken13 = AnnotationFactory.createAnnotation(jCas, 69, 77, Subtoken.class); //criminal Subtoken subtoken14 = AnnotationFactory.createAnnotation(jCas, 77, 78, Subtoken.class); defaultChunkLabeler.chunks2Labels(jCas, sentence); assertEquals(ChunkLabeler_ImplBase.OUTSIDE_LABEL, defaultChunkLabeler.getLabel(subtoken1)); assertEquals(ChunkLabeler_ImplBase.OUTSIDE_LABEL, defaultChunkLabeler.getLabel(subtoken2)); assertEquals(ChunkLabeler_ImplBase.OUTSIDE_LABEL, defaultChunkLabeler.getLabel(subtoken3)); assertEquals(ChunkLabeler_ImplBase.OUTSIDE_LABEL, defaultChunkLabeler.getLabel(subtoken4)); assertEquals(ChunkLabeler_ImplBase.OUTSIDE_LABEL, defaultChunkLabeler.getLabel(subtoken5)); assertEquals(ChunkLabeler_ImplBase.OUTSIDE_LABEL, defaultChunkLabeler.getLabel(subtoken6)); assertEquals(ChunkLabeler_ImplBase.OUTSIDE_LABEL, defaultChunkLabeler.getLabel(subtoken7)); defaultChunkLabeler.setLabel(subtoken1, "B-Token"); //begin Technological defaultChunkLabeler.setLabel(subtoken2, ChunkLabeler_ImplBase.OUTSIDE_LABEL); defaultChunkLabeler.setLabel(subtoken3, "I-Token");//begin is defaultChunkLabeler.setLabel(subtoken4, "B-Token");//begin like defaultChunkLabeler.setLabel(subtoken5, "B-Token");//begin an axe in defaultChunkLabeler.setLabel(subtoken6, "I-Token"); defaultChunkLabeler.setLabel(subtoken7, "I-Token"); defaultChunkLabeler.setLabel(subtoken8, ChunkLabeler_ImplBase.OUTSIDE_LABEL); defaultChunkLabeler.setLabel(subtoken9, "I-Token");//begin hands of defaultChunkLabeler.setLabel(subtoken10, "I-Token"); defaultChunkLabeler.setLabel(subtoken11, "B-Token");//begin a defaultChunkLabeler.setLabel(subtoken12, "B-Token");//begin pathological defaultChunkLabeler.setLabel(subtoken13, ChunkLabeler_ImplBase.OUTSIDE_LABEL); defaultChunkLabeler.setLabel(subtoken14, "I-Token"); defaultChunkLabeler.labels2Chunks(jCas, sentence); Token token = JCasUtil.selectByIndex(jCas, Token.class, 0); assertEquals("Technological", token.getCoveredText()); token = JCasUtil.selectByIndex(jCas, Token.class, 1); assertEquals("is", token.getCoveredText()); token = JCasUtil.selectByIndex(jCas, Token.class, 2); assertEquals("like", token.getCoveredText()); token = JCasUtil.selectByIndex(jCas, Token.class, 3); assertEquals("an axe in", token.getCoveredText()); token = JCasUtil.selectByIndex(jCas, Token.class, 4); assertEquals("hands of", token.getCoveredText()); token = JCasUtil.selectByIndex(jCas, Token.class, 5); assertEquals("a", token.getCoveredText()); token = JCasUtil.selectByIndex(jCas, Token.class, 6); assertEquals("pathological", token.getCoveredText()); token = JCasUtil.selectByIndex(jCas, Token.class, 7); assertEquals(".", token.getCoveredText()); } /** * these tests represent what would typically happen when the InstanceConsumer is a DataWriter * @throws UIMAException */ @Test public void testDataWriter() throws UIMAException { UimaContext context = UimaContextFactory.createUimaContext( ChunkLabeler_ImplBase.PARAM_CHUNK_ANNOTATION_CLASS_NAME, Token.class.getName(), Chunker.PARAM_LABELED_ANNOTATION_CLASS_NAME, Subtoken.class.getName()); DefaultChunkLabeler defaultChunkLabeler = InitializableFactory.create(context, DefaultChunkLabeler.class); jCas.setDocumentText("Technological progress is like an axe in the hands of a pathological criminal."); //Albert Einstein Sentence sentence = new Sentence(jCas, 0, 78); sentence.addToIndexes(); Subtoken subtoken1 = AnnotationFactory.createAnnotation(jCas, 0, 13, Subtoken.class); //Technological Subtoken subtoken2 = AnnotationFactory.createAnnotation(jCas, 14, 22, Subtoken.class); //progress Subtoken subtoken3 = AnnotationFactory.createAnnotation(jCas, 23, 25, Subtoken.class); Subtoken subtoken4 = AnnotationFactory.createAnnotation(jCas, 26, 30, Subtoken.class); //like Subtoken subtoken5 = AnnotationFactory.createAnnotation(jCas, 31, 33, Subtoken.class); Subtoken subtoken6 = AnnotationFactory.createAnnotation(jCas, 34, 37, Subtoken.class); //axe Subtoken subtoken7 = AnnotationFactory.createAnnotation(jCas, 38, 40, Subtoken.class); Subtoken subtoken8 = AnnotationFactory.createAnnotation(jCas, 41, 44, Subtoken.class); //the Subtoken subtoken9 = AnnotationFactory.createAnnotation(jCas, 45, 50, Subtoken.class); //hands Subtoken subtoken10 = AnnotationFactory.createAnnotation(jCas, 51, 53, Subtoken.class); Subtoken subtoken11 = AnnotationFactory.createAnnotation(jCas, 54, 55, Subtoken.class); Subtoken subtoken12 = AnnotationFactory.createAnnotation(jCas, 56, 68, Subtoken.class); //pathological Subtoken subtoken13 = AnnotationFactory.createAnnotation(jCas, 69, 77, Subtoken.class); //criminal Subtoken subtoken14 = AnnotationFactory.createAnnotation(jCas, 77, 78, Subtoken.class); AnnotationFactory.createAnnotation(jCas, 0, 13, Token.class); //Technological AnnotationFactory.createAnnotation(jCas, 23, 25, Token.class); AnnotationFactory.createAnnotation(jCas, 26, 30, Token.class); //like AnnotationFactory.createAnnotation(jCas, 31, 40, Token.class); //an axe in AnnotationFactory.createAnnotation(jCas, 45, 53, Token.class); //hands of AnnotationFactory.createAnnotation(jCas, 54, 55, Token.class); AnnotationFactory.createAnnotation(jCas, 56, 68, Token.class); //pathological AnnotationFactory.createAnnotation(jCas, 77, 78, Token.class); defaultChunkLabeler.chunks2Labels(jCas, sentence); assertEquals("B-Token", defaultChunkLabeler.getLabel(subtoken1)); assertEquals(ChunkLabeler_ImplBase.OUTSIDE_LABEL, defaultChunkLabeler.getLabel(subtoken2)); assertEquals("B-Token", defaultChunkLabeler.getLabel(subtoken3)); assertEquals("B-Token", defaultChunkLabeler.getLabel(subtoken4)); assertEquals("B-Token", defaultChunkLabeler.getLabel(subtoken5)); assertEquals("I-Token", defaultChunkLabeler.getLabel(subtoken6)); assertEquals("I-Token", defaultChunkLabeler.getLabel(subtoken7)); assertEquals(ChunkLabeler_ImplBase.OUTSIDE_LABEL, defaultChunkLabeler.getLabel(subtoken8)); assertEquals("B-Token", defaultChunkLabeler.getLabel(subtoken9)); assertEquals("I-Token", defaultChunkLabeler.getLabel(subtoken10)); assertEquals("B-Token", defaultChunkLabeler.getLabel(subtoken11)); assertEquals("B-Token", defaultChunkLabeler.getLabel(subtoken12)); assertEquals(ChunkLabeler_ImplBase.OUTSIDE_LABEL, defaultChunkLabeler.getLabel(subtoken13)); assertEquals("B-Token", defaultChunkLabeler.getLabel(subtoken14)); } @Test public void testGetChunkLabel() throws UIMAException { UimaContext context = UimaContextFactory.createUimaContext( ChunkLabeler_ImplBase.PARAM_CHUNK_ANNOTATION_CLASS_NAME, Lemma.class.getName(), DefaultChunkLabeler.PARAM_CHUNK_LABEL_FEATURE_NAME, "value", Chunker.PARAM_LABELED_ANNOTATION_CLASS_NAME, Token.class.getName()); DefaultChunkLabeler defaultChunkLabeler = InitializableFactory.create(context, DefaultChunkLabeler.class); jCas.setDocumentText("Technological progress is like an axe in the hands of a pathological criminal."); //Albert Einstein Lemma lemma = AnnotationFactory.createAnnotation(jCas, 0, 22, Lemma.class); lemma.setValue("THEME"); assertEquals("THEME", defaultChunkLabeler.getChunkLabel(jCas, lemma)); AnnotationFactory.createAnnotation(jCas, 56, 13, Token.class); //Technological AnnotationFactory.createAnnotation(jCas, 23, 77, Token.class); Token token1 = AnnotationFactory.createAnnotation(jCas, 56, 68, Token.class); //pathological Token token2 = AnnotationFactory.createAnnotation(jCas, 69, 77, Token.class); //criminal defaultChunkLabeler.createChunk(jCas, Arrays.asList(token1, token2), "blue"); lemma = JCasUtil.selectByIndex(jCas, Lemma.class, 1); assertEquals("pathological criminal", lemma.getCoveredText()); assertEquals("blue", lemma.getValue()); } }
package com.fundynamic.d2tm.game.drawing; import com.fundynamic.d2tm.game.controls.Mouse; import com.fundynamic.d2tm.game.map.Map; import com.fundynamic.d2tm.game.map.Perimeter; import com.fundynamic.d2tm.game.map.renderer.*; import com.fundynamic.d2tm.game.math.Vector2D; import org.newdawn.slick.Graphics; import org.newdawn.slick.Image; import org.newdawn.slick.SlickException; import org.newdawn.slick.geom.Vector2f; public class Viewport { private static final int PIXELS_NEAR_BORDER = 2; private final Vector2D viewportDimensions; private final Graphics graphics; private final Image buffer; private final Vector2D drawingVector; private final Perimeter viewingVectorPerimeter; private final StructureRenderer structureRenderer; private final StructureViewportRenderer structureViewportRenderer; private final MapCellTerrainRenderer mapCellTerrainRenderer; private final MapCellShroudRenderer mapCellShroudRenderer; private final MapCellMouseInteractionRenderer mapCellMouseInteractionRenderer; private final MapCellViewportRenderer mapCellViewportRenderer; private Vector2D velocity; private float moveSpeed; private Vector2D viewingVector; private Map map; public Viewport(Vector2D viewportDimensions, Vector2D drawingVector, Vector2D viewingVector, Graphics graphics, Map map, float moveSpeed, int tileWidth, int tileHeight, Mouse mouse) throws SlickException { this.viewportDimensions = viewportDimensions; this.map = map; this.graphics = graphics; this.drawingVector = drawingVector; this.buffer = constructImage(viewportDimensions); this.viewingVectorPerimeter = map.createViewablePerimeter(viewportDimensions, tileWidth, tileHeight); this.viewingVector = viewingVector; this.velocity = Vector2D.zero(); this.moveSpeed = moveSpeed; this.mapCellViewportRenderer = new MapCellViewportRenderer(map, tileHeight, tileWidth, viewportDimensions); this.structureViewportRenderer = new StructureViewportRenderer(map, tileHeight, tileWidth, viewportDimensions); this.mapCellTerrainRenderer = new MapCellTerrainRenderer(); this.mapCellShroudRenderer = new MapCellShroudRenderer(map); this.structureRenderer = new StructureRenderer(); this.mapCellMouseInteractionRenderer = new MapCellMouseInteractionRenderer(mouse); } public void render() throws SlickException { final Graphics bufferGraphics = this.buffer.getGraphics(); if (bufferGraphics == null) return; // HACK HACK: this makes sure our tests are happy by not having to stub all the way down these methods... mapCellViewportRenderer.render(this.buffer, viewingVector, mapCellTerrainRenderer); structureViewportRenderer.render(this.buffer, viewingVector, structureRenderer); mapCellViewportRenderer.render(this.buffer, viewingVector, mapCellMouseInteractionRenderer); mapCellViewportRenderer.render(this.buffer, viewingVector, mapCellShroudRenderer); drawBufferToGraphics(graphics, drawingVector); } public void update(float delta) { Vector2D translation = velocity.scale(delta); viewingVector = viewingVectorPerimeter.makeSureVectorStaysWithin(viewingVector.add(translation)); } private void moveLeft() { this.velocity = Vector2D.create(-moveSpeed, this.velocity.getY()); } private void moveRight() { this.velocity = Vector2D.create(moveSpeed, this.velocity.getY()); } private void moveUp() { this.velocity = Vector2D.create(this.velocity.getX(), -moveSpeed); } private void moveDown() { this.velocity = Vector2D.create(this.velocity.getX(), moveSpeed); } private void stopMovingHorizontally() { this.velocity = Vector2D.create(0, this.velocity.getY()); } private void stopMovingVertically() { this.velocity = Vector2D.create(this.velocity.getX(), 0); } private void drawBufferToGraphics(Graphics graphics, Vector2D drawingVector) { graphics.drawImage(buffer, drawingVector.getX(), drawingVector.getY()); } // These methods are here mainly for (easier) testing. Best would be to remove them if possible - and at the very // least not the use them in the non-test code. public Vector2D getViewingVector() { return viewingVector; } protected Image constructImage(Vector2D screenResolution) throws SlickException { return new Image(screenResolution.getXAsInt(), screenResolution.getYAsInt()); } public Map getMap() { return this.map; } public void tellAboutNewMousePositions(int newx, int newy) { if (newx <= PIXELS_NEAR_BORDER) { moveLeft(); } else if (newx >= viewportDimensions.getX() - PIXELS_NEAR_BORDER) { moveRight(); } else { stopMovingHorizontally(); } if (newy <= PIXELS_NEAR_BORDER) { moveUp(); } else if (newy >= viewportDimensions.getY() - PIXELS_NEAR_BORDER) { moveDown(); } else { stopMovingVertically(); } } /** * Takes screen pixel coordinate and translates that into an absolute pixel coordinate on the map */ public int getAbsoluteX(int xPositionOnScreen) { return xPositionOnScreen + (int)viewingVector.getX(); } /** * Takes screen pixel coordinate and translates that into an absolute pixel coordinate on the map */ public int getAbsoluteY(int yPositionOnScreen) { return yPositionOnScreen + (int)viewingVector.getY(); } }
package com.gameminers.farrago.enums; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import com.gameminers.farrago.selector.ItemSelector; import com.gameminers.farrago.selector.NullSelector; import com.gameminers.farrago.selector.OreSelector; import com.gameminers.farrago.selector.Selector; public enum MineralColor { // Special Types GLASS (new OreSelector("blockGlass"), 0xDDDDFF, 800, "Glass [Potions]"), POTATO (new ItemSelector(Items.potato), 0xEDC77C, 300, "Potato [Spud]"), GLOW (new OreSelector("glowstone"), 0xF2CF21, 2400, "Glowstone [Torches]"), OBSIDIAN(new OreSelector("ingotObsidian", new ItemSelector(Blocks.obsidian)), 0x1E001B, 1200, "Obsidian [Rocket]"), // Regular Tiers LEAD (new OreSelector("lead"), 0x3F3C51, 120, "Lead"), STONE (new OreSelector("cobblestone"), 0x898989, 130, "Stone"), COAL (new OreSelector("coal", new ItemSelector(Items.coal)), 0x454545, 180, "Coal"), LAPIS (new OreSelector("gemLapis"), 0x1846B2, 200, "Lapis Lazuli"), ALUMINUM(new OreSelector("ingotAluminum", new OreSelector("ingotAluminium")), 0xEEEEEE, 225, "Aluminum"), TIN (new OreSelector("ingotTin"), 0xDDDDEE, 225, "Tin"), QUARTZ (new OreSelector("gemQuartz"), 0xDACEC1, 200, "Nether Quartz"), COPPER (new OreSelector("ingotCopper"), 0x9F4710, 225, "Copper"), IRON (new OreSelector("ingotIron"), 0xD8AF93, 250, "Iron"), SILVER (new OreSelector("ingotSilver"), 0xCCD8E6, 280, "Silver"), GOLD (new OreSelector("ingotGold"), 0xFCEE4B, 300, "Gold [Mobs]"), YTTRIUM (new OreSelector("ingotYttrium"), 0xB1B1B1, 320, "Yttrium [Mobs]"), STEEL (new OreSelector("ingotSteel"), 0x888888, 350, "Steel [Mobs]"), EMERALD (new OreSelector("gemEmerald"), 0x17DD62, 450, "Emerald [Mobs]"), DIAMOND (new OreSelector("gemDiamond"), 0x5DECF5, 850, "Diamond [Mobs]"), // Predictable Tiers ENDER (new OreSelector("enderPearl", new ItemSelector(Items.ender_pearl)), 0x258474, 200, "Ender Pearl [Mobs, Predictable]"), PLATINUM(new OreSelector("ingotPlatinum"), 0x67D4F6, 450, "Platinum [Mobs, Predictable]"), IRIDIUM (new OreSelector("ingotIridium"), 0xAAAAAA, 650, "Iridium [Mobs, Predictable]"), CREATIVE(new NullSelector(), 0xFF00FF,32767,"Creative [Mobs, Predictable, Virtually Indestructible]"), ; private final Selector selector; private final int color; private final String friendlyName; private final int durability; private MineralColor(Selector selector, int color, int durability, String friendlyName) { this.selector = selector; this.color = color; this.friendlyName = friendlyName; this.durability = durability; } public Selector getSelector() { return selector; } public int getColor() { return color; } public String getFriendlyName() { return friendlyName; } public int getDurability() { return durability; } }
package com.gamingmesh.jobs.commands.list; import org.bukkit.Bukkit; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import com.gamingmesh.jobs.Jobs; import com.gamingmesh.jobs.commands.Cmd; import com.gamingmesh.jobs.dao.JobsManager.DataBaseType; public class convert implements Cmd { @Override public boolean perform(Jobs plugin, CommandSender sender, String[] args) { if (sender instanceof Player) { sender.sendMessage(Jobs.getLanguage().getMessage("general.error.fromconsole")); return true; } if (args.length > 0) { Jobs.getCommandManager().sendUsage(sender, "convert"); return true; } Bukkit.getScheduler().runTaskAsynchronously(plugin, Jobs::convertDatabase); String from = "MySQL"; String to = "SQLite"; if (Jobs.getDBManager().getDbType() != DataBaseType.SqLite) { from = "SQLite"; to = "MySQL"; } Jobs.consoleMsg("&eData base was converted from &2" + from + " &eto &2" + to + "&e!"); return true; } }
package com.github.sormuras.bach.workflow; import com.github.sormuras.bach.Bach; import com.github.sormuras.bach.ToolCall; import com.github.sormuras.bach.ToolOperator; import java.io.PrintWriter; import java.nio.file.Files; import java.time.format.DateTimeFormatter; import java.util.ArrayList; public class CompileModules implements ToolOperator { static final String NAME = "compile-modules"; @Override public String name() { return NAME; } @Override public int run(Bach bach, PrintWriter out, PrintWriter err, String... args) { var project = bach.project(); var paths = bach.configuration().paths(); var spaces = project.spaces(); var space = spaces.space(args[0]); var declarations = space.modules().list(); var classes = paths.out(space.name(), "classes"); var modules = paths.out(space.name(), "modules"); var release0 = space.targets(); var classes0 = classes.resolve("java-" + release0.orElse(Runtime.version().feature())); try { Files.createDirectories(modules); } catch (Exception exception) { throw new RuntimeException("Create directories failed: " + modules); } var javacCommands = new ArrayList<ToolCall>(); var jarCommands = new ArrayList<ToolCall>(); for (var module : declarations) { var name = module.name(); var file = modules.resolve(name + ".jar"); var jar = ToolCall.of("jar").with("--create").with("--file", file); jar = jar.with("--module-version", project.version().value()); if (Runtime.version().feature() >= 19) { var date = project.version().date(); jar = jar.with("--date", date.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)); } var mainProgram = name.replace('.', '/') + "/Main.java"; var mainJava = module.base().sources().stream() .map(dir -> dir.resolve(mainProgram)) .filter(Files::isRegularFile) .findFirst(); if (mainJava.isPresent()) { jar = jar.with("--main-class", name + ".Main"); } // include base classes (from compile-classes) and resources if (Files.isDirectory(classes0.resolve(name))) { jar = jar.with("-C", classes0.resolve(name), "."); } for (var resources : module.base().resources()) { jar = jar.with("-C", resources, "."); } // include classes of patched module for (var requires : space.requires()) { var required = spaces.space(requires); if (required.modules().find(name).isPresent()) { var javaR = "java-" + required.targets().orElse(Runtime.version().feature()); jar = jar.with("-C", paths.out(requires, "classes", javaR, name), "."); } } // compile and include targeted classes and resources for (var release : module.targeted().keySet().stream().sorted().toList()) { var folders = module.targeted().get(release); for (var sources : folders.sources()) { var classesR = classes.resolve("java-" + release).resolve(name); var javac = ToolCall.of("javac").with("--release", release); var modulePath = space.toModulePath(paths); if (modulePath.isPresent()) { javac = javac.with("--module-path", modulePath.get()); javac = javac.with("--processor-module-path", modulePath.get()); } javac = javac .with("--class-path", classes0.resolve(name)) .with("-implicit:none") .with("-d", classesR) .withFindFiles(sources, "**.java"); javacCommands.add(javac); jar = jar.with("--release", release).with("-C", classesR, "."); } var needsReleaseArgument = folders.sources().isEmpty() && !folders.resources().isEmpty(); if (needsReleaseArgument) jar = jar.with("--release", release); for (var resources : folders.resources()) { jar = jar.with("-C", resources, "."); } } jarCommands.add(jar); } javacCommands.stream().parallel().forEach(bach::run); jarCommands.stream().parallel().forEach(bach::run); bach.run("checksum", "--list-dir", modules); return 0; } }
package com.github.ansell.csv.access; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.Function; import java.util.stream.Collectors; import javax.script.ScriptException; import org.apache.commons.io.IOUtils; import org.jooq.lambda.Unchecked; import com.fasterxml.jackson.databind.SequenceWriter; import com.fasterxml.jackson.dataformat.csv.CsvSchema; import com.github.ansell.csv.util.CSVUtil; import com.github.ansell.csv.util.ValueMapping; import com.github.ansell.csv.util.ValueMapping.ValueMappingLanguage; import com.healthmarketscience.jackcess.Column; import com.healthmarketscience.jackcess.Cursor; import com.healthmarketscience.jackcess.CursorBuilder; import com.healthmarketscience.jackcess.Database; import com.healthmarketscience.jackcess.DatabaseBuilder; import com.healthmarketscience.jackcess.Index; import com.healthmarketscience.jackcess.Row; import com.healthmarketscience.jackcess.Table; import com.healthmarketscience.jackcess.util.Joiner; import joptsimple.OptionException; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; /** * Mapper from Access to CSV files. * * @author Peter Ansell p_ansell@yahoo.com */ public class AccessMapper { public static void main(String... args) throws Exception { final OptionParser parser = new OptionParser(); final OptionSpec<Void> help = parser.accepts("help").forHelp(); final OptionSpec<File> input = parser.accepts("input").withRequiredArg().ofType(File.class).required() .describedAs("The input Access file to be mapped."); final OptionSpec<File> mapping = parser.accepts("mapping").withRequiredArg().ofType(File.class).required() .describedAs("The mapping file."); final OptionSpec<File> output = parser.accepts("output").withRequiredArg().ofType(File.class).required() .describedAs("The directory to contain the mapped file."); final OptionSpec<String> outputPrefix = parser.accepts("prefix").withRequiredArg().ofType(String.class) .defaultsTo("Mapped-").describedAs("The prefix to use to name the mapped files."); OptionSet options = null; try { options = parser.parse(args); } catch (final OptionException e) { System.out.println(e.getMessage()); parser.printHelpOn(System.out); throw e; } if (options.has(help)) { parser.printHelpOn(System.out); return; } final Path inputPath = input.value(options).toPath(); if (!Files.exists(inputPath)) { throw new FileNotFoundException("Could not find input Access file: " + inputPath.toString()); } final Path mappingPath = mapping.value(options).toPath(); if (!Files.exists(mappingPath)) { throw new FileNotFoundException("Could not find mappng CSV file: " + mappingPath.toString()); } try (final BufferedReader readerMapping = Files.newBufferedReader(mappingPath);) { List<ValueMapping> map = ValueMapping.extractMappings(readerMapping); try (final InputStream readerDB = Files.newInputStream(inputPath);) { dumpToCSVs(readerDB, output.value(options).toPath(), outputPrefix.value(options)); } try (final InputStream readerDB = Files.newInputStream(inputPath);) { mapDBToSingleCSV(readerDB, map, output.value(options).toPath(), outputPrefix.value(options) + "Single-"); } } } private static void mapDBToSingleCSV(InputStream readerDB, List<ValueMapping> map, Path csvPath, String csvPrefix) throws IOException { Path tempFile = Files.createTempFile("Source-accessdb", ".accdb"); Files.copy(readerDB, tempFile, StandardCopyOption.REPLACE_EXISTING); try (final Database db = DatabaseBuilder.open(tempFile.toFile());) { // Ordered mappings so that the first table in the mapping is the // one to perform the base joins on Table originTable = null; ConcurrentMap<ValueMapping, Table> tableMapping = new ConcurrentHashMap<>(); ConcurrentMap<ValueMapping, Table> foreignKeyMapping = new ConcurrentHashMap<>(); ConcurrentMap<ValueMapping, Joiner> joiners = new ConcurrentHashMap<>(); // Populate the table mapping for each value mapping for (final ValueMapping nextValueMapping : map) { String[] splitDBField = nextValueMapping.getInputField().split("\\."); System.out.println(nextValueMapping.getInputField()); Table nextTable = db.getTable(splitDBField[0]); tableMapping.put(nextValueMapping, nextTable); if (originTable == null) { originTable = nextTable; } if (nextValueMapping.getLanguage() == ValueMappingLanguage.ACCESS) { String[] splitForeignDBField = nextValueMapping.getOutputField().split("\\."); foreignKeyMapping.put(nextValueMapping, db.getTable(splitForeignDBField[0])); try { joiners.put(nextValueMapping, Joiner.create(nextTable, db.getTable(splitForeignDBField[0]))); System.out.println("PK->FK: " + joiners.get(nextValueMapping).toFKString()); } catch (IllegalArgumentException e) { e.printStackTrace(); } } } // There may have been no mappings... if (originTable != null) { List<String> headers = map.stream().map(m -> m.getOutputField()).collect(Collectors.toList()); final CsvSchema schema = CSVUtil.buildSchema(headers); try (final Writer csv = Files .newBufferedWriter(csvPath.resolve(csvPrefix + originTable.getName() + ".csv")); final SequenceWriter csvWriter = CSVUtil.newCSVWriter(new BufferedWriter(csv), schema);) { // Run through the fields on the origin table joining them // as necessary before running the other non-access mappings // on the resulting list of strings for (Row nextRow : originTable) { ConcurrentMap<String, String> output = new ConcurrentHashMap<>(); ConcurrentMap<String, Row> componentRowsForThisRow = new ConcurrentHashMap<>(); List<? extends Column> originColumns = originTable.getColumns(); for (Column nextOriginColumn : originColumns) { for (final ValueMapping nextValueMapping : map) { String[] splitDBField = nextValueMapping.getInputField().split("\\."); if (splitDBField[0].equals(originTable.getName()) && splitDBField[1].equals(nextOriginColumn.getName())) { if (foreignKeyMapping.containsKey(nextValueMapping)) { if (joiners.containsKey(nextValueMapping)) { Row findFirstRow = joiners.get(nextValueMapping).findFirstRow(nextRow); if (findFirstRow != null) { String[] splitDBFieldOutput = nextValueMapping.getOutputField().split("\\."); componentRowsForThisRow.put(splitDBFieldOutput[0], findFirstRow); } } else { // System.out.println( // "TODO: Support fetching of // foreign keys when an index was // not available: " // nextValueMapping.getInputField() // nextValueMapping.getOutputField()); } } else { componentRowsForThisRow.put(splitDBField[0], nextRow); } } } } // Populate the foreign row values for (final ValueMapping nextValueMapping : map) { String[] splitDBField = nextValueMapping.getInputField().split("\\."); if (componentRowsForThisRow.containsKey(splitDBField[0])) { Row findFirstRow = componentRowsForThisRow.get(splitDBField[0]); Object nextColumnValue = findFirstRow.get(splitDBField[1]); if (nextColumnValue != null) { output.put(nextValueMapping.getOutputField(), nextColumnValue.toString()); //System.out.println( // nextValueMapping.getOutputField() + "=>" + nextColumnValue.toString()); } else { //System.out.println("No mapping found for: " + nextValueMapping.getInputField()); } } } List<String> nextEmittedRow = new ArrayList<>(map.size()); // Then after all are filled, emit the row for (final ValueMapping nextValueMapping : map) { nextEmittedRow.add(output.getOrDefault(nextValueMapping.getOutputField(), "")); } // System.out.println("nextEmittedRow: " + // nextEmittedRow); csvWriter.write(nextEmittedRow); } } } } } private static void dumpToCSVs(InputStream input, Path outputDir, String csvPrefix) throws IOException { Path tempFile = Files.createTempFile("Source-accessdb", ".accdb"); Files.copy(input, tempFile, StandardCopyOption.REPLACE_EXISTING); try (final Database db = DatabaseBuilder.open(tempFile.toFile());) { for (String tableName : db.getTableNames()) { System.out.println(""); String csvName = csvPrefix + tableName + ".csv"; Path csvPath = outputDir.resolve(csvName); System.out.println("Converting " + tableName + " to CSV: " + csvPath.toAbsolutePath().toString()); Table table = db.getTable(tableName); debugTable(table); String[] tempArray = new String[table.getColumnCount()]; int x = 0; for (Column nextColumn : table.getColumns()) { tempArray[x++] = nextColumn.getName(); } final CsvSchema schema = CSVUtil.buildSchema(Arrays.asList(tempArray)); try (final Writer csv = Files.newBufferedWriter(csvPath); final SequenceWriter csvWriter = CSVUtil.newCSVWriter(new BufferedWriter(csv), schema);) { int rows = 0; for (Row nextRow : table) { int i = 0; for (Object nextValue : nextRow.values()) { tempArray[i++] = nextValue == null ? null : nextValue.toString(); } csvWriter.write(Arrays.asList(tempArray)); rows++; } System.out.println("Converted " + rows + " rows from table " + tableName); } System.out.println(""); System.out.println(" } } } private static void debugTable(Table table) throws IOException { System.out.println("\tTable columns for " + table.getName()); for (Column nextColumn : table.getColumns()) { System.out.println("\t\t" + nextColumn.getName()); } try { Index primaryKeyIndex = table.getPrimaryKeyIndex(); System.out.println( "\tFound primary key index for table: " + table.getName() + " named " + primaryKeyIndex.getName()); debugIndex(primaryKeyIndex, new HashSet<>()); for (Index nextIndex : table.getIndexes()) { if (!nextIndex.getName().equals(primaryKeyIndex.getName())) { System.out.println("\tFound non-primary key index for table: " + table.getName() + " named " + nextIndex.getName()); debugIndex(nextIndex, new HashSet<>()); } } } catch (IllegalArgumentException e) { System.out.println("No primary key index found for table: " + table.getName()); } Cursor cursor = table.getDefaultCursor(); int i = 0; while (cursor.moveToNextRow()) { if (i >= 20) { break; } System.out.println(cursor.getCurrentRow().toString()); i++; } } private static void debugIndex(Index index, Set<Index> visited) throws IOException { visited.add(index); System.out.println("\t\tIndex columns:"); for (Index.Column nextColumn : index.getColumns()) { System.out.print("\t\t\t" + nextColumn.getName()); } System.out.println(""); Index referencedIndex = index.getReferencedIndex(); if (referencedIndex != null) { System.out.println("\t" + index.getName() + " references another index: " + referencedIndex.getName()); if (!visited.contains(referencedIndex)) { visited.add(referencedIndex); debugIndex(referencedIndex, visited); } } } }
package com.github.droidfu.http; import java.io.IOException; import java.net.ConnectException; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import oauth.signpost.OAuthConsumer; import oauth.signpost.exception.OAuthExpectationFailedException; import oauth.signpost.exception.OAuthMessageSignerException; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.HttpVersion; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpRequestRetryHandler; import org.apache.http.client.HttpResponseException; import org.apache.http.client.ResponseHandler; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.conn.params.ConnManagerParams; import org.apache.http.conn.params.ConnPerRouteBean; import org.apache.http.conn.params.ConnRoutePNames; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.impl.client.AbstractHttpClient; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.client.RequestWrapper; import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpParams; import org.apache.http.params.HttpProtocolParams; import org.apache.http.protocol.BasicHttpContext; import org.apache.http.protocol.ExecutionContext; import org.apache.http.protocol.HttpContext; import android.content.Context; import android.content.IntentFilter; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.net.Proxy; import android.util.Log; public abstract class BetterHttpRequest { private static final String LOG_TAG = BetterHttpRequest.class.getSimpleName(); private static final int MAX_CONNECTIONS = 6; private static final int MAX_RETRIES = 5; private static final int RETRY_SLEEP_TIME_MILLIS = 3 * 1000; private static final int CONNECTION_TIMEOUT = 10 * 1000; private static final String REQUEST_URI_BACKUP = "request_uri_backup"; protected static final String HTTP_CONTENT_TYPE_HEADER = "Content-Type"; protected static final String HTTP_USER_AGENT = "Android/DroidFu"; private static AbstractHttpClient httpClient; private static Context appContext; private static HashMap<String, String> defaultHeaders = new HashMap<String, String>(); static { setupHttpClient(); } private List<Integer> expectedStatusCodes = new ArrayList<Integer>(); private OAuthConsumer oauthConsumer; protected HttpUriRequest request; /** * Wraps the {@link HttpResponse} into a {@link BetterHttpResponse}. Also * takes care of throwing a {@link HttpResponseException} if an unexpected * response code was received. */ private ResponseHandler<BetterHttpResponse> responseHandler = new ResponseHandler<BetterHttpResponse>() { public BetterHttpResponse handleResponse(HttpResponse response) throws ClientProtocolException, IOException { int status = response.getStatusLine().getStatusCode(); if (expectedStatusCodes != null && !expectedStatusCodes.contains(status)) { throw new HttpResponseException(status, "Unexpected status code: " + status); } return new BetterHttpResponse(response); } }; /** * A custom request-retry handler which supports re-signing previously * failed messages. TODO: ignore non-idem-potent requests? */ private HttpRequestRetryHandler retryHandler = new HttpRequestRetryHandler() { public boolean retryRequest(IOException exception, int executionCount, HttpContext context) { if (executionCount > MAX_RETRIES) { return false; } exception.printStackTrace(); Log.d(BetterHttpRequest.class.getSimpleName(), "Retrying " + request.getRequestLine().getUri() + " (tried: " + executionCount + " times)"); // Apache HttpClient rewrites the request URI to be relative before // sending a request, but we need the full URI for OAuth signing, // so restore it before proceeding. RequestWrapper request = (RequestWrapper) context.getAttribute(ExecutionContext.HTTP_REQUEST); URI rewrittenUri = request.getURI(); URI originalUri = (URI) context.getAttribute(REQUEST_URI_BACKUP); request.setURI(originalUri); // re-sign the request, otherwise this may yield 401s if (oauthConsumer != null) { try { oauthConsumer.sign(request); } catch (Exception e) { e.printStackTrace(); // no reason to retry this return false; } } // restore URI to whatever Apache HttpClient expects request.setURI(rewrittenUri); return true; } }; private static void setupHttpClient() { BasicHttpParams httpParams = new BasicHttpParams(); ConnManagerParams.setTimeout(httpParams, CONNECTION_TIMEOUT); ConnManagerParams.setMaxConnectionsPerRoute(httpParams, new ConnPerRouteBean( MAX_CONNECTIONS)); ConnManagerParams.setMaxTotalConnections(httpParams, MAX_CONNECTIONS); HttpProtocolParams.setVersion(httpParams, HttpVersion.HTTP_1_1); HttpProtocolParams.setUserAgent(httpParams, HTTP_USER_AGENT); SchemeRegistry schemeRegistry = new SchemeRegistry(); schemeRegistry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); schemeRegistry.register(new Scheme("https", PlainSocketFactory.getSocketFactory(), 443)); ThreadSafeClientConnManager cm = new ThreadSafeClientConnManager(httpParams, schemeRegistry); httpClient = new DefaultHttpClient(cm, httpParams); } public static void updateProxySettings(Context context) { HttpParams httpParams = httpClient.getParams(); ConnectivityManager connectivity = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo nwInfo = connectivity.getActiveNetworkInfo(); if (nwInfo == null) { return; } if (nwInfo.getType() == ConnectivityManager.TYPE_MOBILE) { String proxyHost = Proxy.getDefaultHost(); int proxyPort = Proxy.getDefaultPort(); if (proxyHost != null && proxyPort > -1) { Log.d(LOG_TAG, "Detected carrier proxy " + proxyHost + ":" + proxyPort); HttpHost proxy = new HttpHost(proxyHost, proxyPort); httpParams.setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy); } } else { httpParams.setParameter(ConnRoutePNames.DEFAULT_PROXY, null); } } public static void setContext(Context context) { if (BetterHttpRequest.appContext != null) { return; } BetterHttpRequest.appContext = context.getApplicationContext(); context.registerReceiver(new ConnectionChangedBroadcastReceiver(), new IntentFilter( ConnectivityManager.CONNECTIVITY_ACTION)); } public static void setPortForScheme(String scheme, int port) { Scheme _scheme = new Scheme(scheme, PlainSocketFactory.getSocketFactory(), port); httpClient.getConnectionManager().getSchemeRegistry().register(_scheme); } public static void setDefaultHeader(String header, String value) { defaultHeaders.put(header, value); } public static BetterHttpRequest get(String url) { return new HttpGet(url, defaultHeaders); } public static BetterHttpRequest post(String url, HttpEntity payload) { return new HttpPost(url, payload, defaultHeaders); } public HttpUriRequest unwrap() { return request; } public BetterHttpRequest expecting(Integer... statusCodes) { expectedStatusCodes = Arrays.asList(statusCodes); return this; } public BetterHttpRequest signed(OAuthConsumer oauthConsumer) throws OAuthMessageSignerException, OAuthExpectationFailedException { this.oauthConsumer = oauthConsumer; oauthConsumer.sign(this.unwrap()); return this; } public BetterHttpResponse send() throws ConnectException { if (appContext != null) { updateProxySettings(appContext); } HttpContext httpContext = new BasicHttpContext(); // Apache HttpClient rewrites the request URI to be relative before // sending a request, but we need the full URI in the retry handler, // so store it manually before proceeding. httpContext.setAttribute(REQUEST_URI_BACKUP, request.getURI()); httpClient.setHttpRequestRetryHandler(retryHandler); int numAttempts = 0; while (numAttempts < MAX_RETRIES) { numAttempts++; try { if (oauthConsumer != null) { oauthConsumer.sign(request); } return httpClient.execute(request, responseHandler, httpContext); } catch (Exception e) { waitAndContinue(e, numAttempts, MAX_RETRIES); } } return null; } private void waitAndContinue(Exception cause, int numAttempts, int maxAttempts) throws ConnectException { if (numAttempts == maxAttempts) { Log.e(LOG_TAG, "request failed after " + numAttempts + " attempts"); ConnectException ex = new ConnectException(); ex.initCause(cause); throw ex; } else { cause.printStackTrace(); Log.e(LOG_TAG, "request failed, will retry after " + RETRY_SLEEP_TIME_MILLIS / 1000 + " secs..."); try { Thread.sleep(RETRY_SLEEP_TIME_MILLIS); } catch (InterruptedException e1) { } } } }
package com.kscs.util.plugins.xjc; import com.sun.codemodel.*; import com.sun.tools.xjc.Options; import com.sun.tools.xjc.Plugin; import com.sun.tools.xjc.outline.ClassOutline; import com.sun.tools.xjc.outline.FieldOutline; import com.sun.tools.xjc.outline.Outline; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import java.util.ResourceBundle; /** * XJC Plugin to make generated classes immutable */ public class ImmutablePlugin extends Plugin { @Override public String getOptionName() { return "Ximmutable"; } @Override public String getUsage() { return new PluginUsageBuilder(ResourceBundle.getBundle(ImmutablePlugin.class.getName()), "usage").addMain("immutable").build(); } @Override public boolean run(final Outline outline, final Options opt, final ErrorHandler errorHandler) throws SAXException { final ApiConstructs apiConstructs = new ApiConstructs(outline, opt, errorHandler); for (final ClassOutline classOutline : outline.getClasses()) { final JDefinedClass definedClass = classOutline.implClass; for (final FieldOutline fieldOutline : classOutline.getDeclaredFields()) { final JFieldVar declaredField; if(fieldOutline.getPropertyInfo().isCollection() && !((declaredField = PluginUtil.getDeclaredField(fieldOutline)).type().isArray())) { final JClass elementType = ((JClass)declaredField.type()).getTypeParameters().get(0); final JMethod oldGetter = definedClass.getMethod("get"+fieldOutline.getPropertyInfo().getName(true), new JType[0]); final JFieldVar immutableField = definedClass.field(JMod.PROTECTED | JMod.TRANSIENT, declaredField.type(), getImmutableFieldName(declaredField), JExpr._null()); definedClass.methods().remove(oldGetter); final JMethod newGetter = definedClass.method(JMod.PUBLIC, oldGetter.type(), oldGetter.name()); final JConditional ifFieldNull = newGetter.body()._if(JExpr._this().ref(declaredField).eq(JExpr._null())); ifFieldNull._then().assign(JExpr._this().ref(declaredField), JExpr._new(apiConstructs.arrayListClass.narrow(elementType))); final JConditional ifImmutableFieldNull = newGetter.body()._if(JExpr._this().ref(immutableField).eq(JExpr._null())); immutableInit(apiConstructs, ifImmutableFieldNull._then(), JExpr._this(), declaredField); newGetter.body()._return(JExpr._this().ref(immutableField)); } else { final String setterName = "set" + fieldOutline.getPropertyInfo().getName(true); final JMethod setterMethod = definedClass.getMethod(setterName, new JType[]{fieldOutline.getRawType()}); if (setterMethod != null) { setterMethod.mods().setProtected(); } } } } return true; } public String getImmutableFieldName(final FieldOutline fieldVar) { return fieldVar.getPropertyInfo().getName(false) + "_RO"; } public String getImmutableFieldName(final JFieldVar fieldVar) { return fieldVar.name() + "_RO"; } public void immutableInit(final ApiConstructs apiConstructs, final JBlock body, final JExpression instanceRef, final FieldOutline collectionField) { body.assign(instanceRef.ref(getImmutableFieldName(collectionField)), PluginUtil.nullSafe(collectionField, apiConstructs.unmodifiableList(instanceRef.ref(collectionField.getPropertyInfo().getName(false))))); } public void immutableInit(final ApiConstructs apiConstructs, final JBlock body, final JExpression instanceRef, final JFieldVar declaredField) { body.assign(instanceRef.ref(getImmutableFieldName(declaredField)), PluginUtil.nullSafe(declaredField, apiConstructs.unmodifiableList(instanceRef.ref(declaredField)))); } }
/** * (TMS) */ package com.lhjz.portal.controller; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.web.PageableDefault; import org.springframework.data.web.SortDefault; import org.springframework.messaging.simp.SimpMessagingTemplate; import org.springframework.security.access.annotation.Secured; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import com.lhjz.portal.base.BaseController; import com.lhjz.portal.component.MailSender; import com.lhjz.portal.entity.Blog; import com.lhjz.portal.entity.BlogAuthority; import com.lhjz.portal.entity.BlogFollower; import com.lhjz.portal.entity.BlogHistory; import com.lhjz.portal.entity.BlogNews; import com.lhjz.portal.entity.BlogStow; import com.lhjz.portal.entity.Channel; import com.lhjz.portal.entity.ChatChannel; import com.lhjz.portal.entity.ChatDirect; import com.lhjz.portal.entity.Comment; import com.lhjz.portal.entity.Dir; import com.lhjz.portal.entity.Label; import com.lhjz.portal.entity.Log; import com.lhjz.portal.entity.Space; import com.lhjz.portal.entity.SpaceAuthority; import com.lhjz.portal.entity.Tag; import com.lhjz.portal.entity.security.User; import com.lhjz.portal.model.BlogCommentPayload; import com.lhjz.portal.model.BlogPayload; import com.lhjz.portal.model.BlogPayload.Cmd; import com.lhjz.portal.model.BlogSearchResult; import com.lhjz.portal.model.Mail; import com.lhjz.portal.model.PollBlog; import com.lhjz.portal.model.RespBody; import com.lhjz.portal.model.ToastrPayload; import com.lhjz.portal.pojo.Enum.Action; import com.lhjz.portal.pojo.Enum.CommentType; import com.lhjz.portal.pojo.Enum.Editor; import com.lhjz.portal.pojo.Enum.Status; import com.lhjz.portal.pojo.Enum.Target; import com.lhjz.portal.pojo.Enum.VoteType; import com.lhjz.portal.repository.BlogAuthorityRepository; import com.lhjz.portal.repository.BlogFollowerRepository; import com.lhjz.portal.repository.BlogHistoryRepository; import com.lhjz.portal.repository.BlogNewsRepository; import com.lhjz.portal.repository.BlogRepository; import com.lhjz.portal.repository.BlogStowRepository; import com.lhjz.portal.repository.ChannelRepository; import com.lhjz.portal.repository.ChatDirectRepository; import com.lhjz.portal.repository.CommentRepository; import com.lhjz.portal.repository.DirRepository; import com.lhjz.portal.repository.LabelRepository; import com.lhjz.portal.repository.LogRepository; import com.lhjz.portal.repository.SpaceRepository; import com.lhjz.portal.repository.TagRepository; import com.lhjz.portal.repository.UserRepository; import com.lhjz.portal.service.ChatChannelService; import com.lhjz.portal.util.DateUtil; import com.lhjz.portal.util.MapUtil; import com.lhjz.portal.util.StringUtil; import com.lhjz.portal.util.TemplateUtil; import com.lhjz.portal.util.ThreadUtil; import com.lhjz.portal.util.ValidateUtil; import com.lhjz.portal.util.WebUtil; /** * * @author xi * * @date 2015328 1:19:05 * */ @Controller @RequestMapping("admin/blog") public class BlogController extends BaseController { static Logger logger = LoggerFactory.getLogger(BlogController.class); @Value("${tms.blog.upload.path}") private String uploadPath; @Value("${tms.blog.md2pdf.path}") private String md2pdfPath; @Value("${tms.bin.node.path}") private String nodePath; @Autowired BlogRepository blogRepository; @Autowired BlogHistoryRepository blogHistoryRepository; @Autowired BlogAuthorityRepository blogAuthorityRepository; @Autowired SpaceRepository spaceRepository; @Autowired ChannelRepository channelRepository; @Autowired ChatDirectRepository chatDirectRepository; @Autowired UserRepository userRepository; @Autowired CommentRepository commentRepository; @Autowired BlogStowRepository blogStowRepository; @Autowired BlogFollowerRepository blogFollowerRepository; @Autowired LogRepository logRepository; @Autowired TagRepository tagRepository; @Autowired LabelRepository labelRepository; @Autowired DirRepository dirRepository; @Autowired BlogNewsRepository blogNewsRepository; @Autowired MailSender mailSender; @Autowired ChatChannelService chatChannelService; @Autowired SimpMessagingTemplate messagingTemplate; @PersistenceContext private EntityManager em; @RequestMapping(value = "create", method = RequestMethod.POST) @ResponseBody public RespBody create(@RequestParam("url") String url, @RequestParam(value = "spaceId", required = false) Long spaceId, @RequestParam(value = "dirId", required = false) Long dirId, @RequestParam(value = "privated", required = false) Boolean privated, @RequestParam(value = "opened", required = false) Boolean opened, @RequestParam(value = "editor", required = false) String editor, @RequestParam(value = "usernames", required = false) String usernames, @RequestParam("title") String title, @RequestParam("content") String content, @RequestParam("contentHtml") String contentHtml) { if (StringUtil.isEmpty(title)) { return RespBody.failed("!"); } if (StringUtil.isEmpty(content)) { return RespBody.failed("!"); } Blog blog = new Blog(); blog.setTitle(title); blog.setContent(content); if (StringUtils.isNotBlank(editor)) { blog.setEditor(Editor.valueOf(editor)); } else { blog.setEditor(Editor.Markdown); } if (spaceId != null) { Space space = spaceRepository.findOne(spaceId); if (space == null) { return RespBody.failed("!"); } blog.setSpace(space); } if (dirId != null) { Dir dir = dirRepository.findOne(dirId); if (dir == null) { return RespBody.failed("!"); } blog.setDir(dir); } if (privated != null) { blog.setPrivated(privated); } if (opened != null) { blog.setOpened(opened); } Blog blog2 = blogRepository.saveAndFlush(blog); log(Action.Create, Target.Blog, blog2.getId(), blog2.getTitle()); final String href = url + "#/blog/" + blog2.getId(); final String html = contentHtml; final User loginUser = getLoginUser(); final Mail mail = Mail.instance(); if (StringUtil.isNotEmpty(usernames)) { if (StringUtil.isNotEmpty(usernames)) { String[] usernameArr = usernames.split(","); Arrays.asList(usernameArr).stream().forEach((username) -> { mail.addUsers(getUser(username)); }); wsSendToUsers(blog2, Cmd.At, WebUtil.getUsername(), usernameArr); } try { mailSender .sendHtmlByQueue(String.format("TMS-@_%s", DateUtil.format(new Date(), DateUtil.FORMAT7)), TemplateUtil.process("templates/mail/mail-dynamic", MapUtil.objArr2Map("user", loginUser, "date", new Date(), "href", href, "title", "@", "content", html)), getLoginUserName(loginUser), mail.get()); } catch (Exception e) { e.printStackTrace(); } } return RespBody.succeed(blog2); } private void wsSendToUsers(Blog blog, Cmd cmd, String loginUsername, String... usernames) { try { ThreadUtil.exec(() -> { BlogPayload blogPayload = BlogPayload.builder().id(blog.getId()).version(blog.getVersion()) .title(blog.getTitle()).cmd(cmd).username(loginUsername).build(); for (String username : usernames) { BlogNews blogNews = blogNewsRepository.saveAndFlush(BlogNews.builder().bid(blog.getId()) .title(blog.getTitle()).to(username).cmd(cmd).username(loginUsername).build()); blogPayload.setNid(blogNews.getId()); messagingTemplate.convertAndSendToUser(username, "/blog/update", blogPayload); } }); } catch (Exception e) { logger.error(e.getMessage(), e); } } private void wsSendToUsers(Blog blog, Comment comment, Cmd cmd, String loginUsername, String... usernames) { try { ThreadUtil.exec(() -> { BlogPayload blogPayload = BlogPayload.builder().id(blog.getId()).version(blog.getVersion()) .title(blog.getTitle()).cid(comment.getId()).cmd(cmd).username(loginUsername).build(); for (String username : usernames) { BlogNews blogNews = blogNewsRepository .saveAndFlush(BlogNews.builder().bid(blog.getId()).cid(comment.getId()) .title(blog.getTitle()).to(username).cmd(cmd).username(loginUsername).build()); blogPayload.setNid(blogNews.getId()); messagingTemplate.convertAndSendToUser(username, "/blog/update", blogPayload); } }); } catch (Exception e) { logger.error(e.getMessage(), e); } } private void wsSend(Blog blog, Cmd cmd, String loginUsername) { try { ThreadUtil.exec(() -> { messagingTemplate.convertAndSend("/blog/update", BlogPayload.builder().id(blog.getId()).openEdit(blog.getOpenEdit()).version(blog.getVersion()) .title(blog.getTitle()).cmd(cmd).username(loginUsername).build()); }); } catch (Exception e) { logger.error(e.getMessage(), e); } } @RequestMapping(value = "list", method = RequestMethod.GET) @ResponseBody public RespBody list(@SortDefault(value = "id", direction = Direction.DESC) Sort sort) { if (!isSuper()) { return RespBody.failed("!"); } List<Blog> blogs = blogRepository.findByStatusNot(Status.Deleted, sort); blogs.forEach(b -> b.setContent(null)); return RespBody.succeed(blogs); } @RequestMapping(value = "listMy", method = RequestMethod.GET) @ResponseBody public RespBody listMy(@SortDefault(value = "id", direction = Direction.DESC) Sort sort) { List<Blog> blogs = blogRepository.findByStatusNot(Status.Deleted, sort).stream().filter(b -> hasAuth(b)) .peek(b -> { b.setContent(null); b.setBlogAuthorities(null); b.setUpdater(null); User creator = b.getCreator(); if (creator != null) { User user2 = new User(); user2.setUsername(creator.getUsername()); user2.setAuthorities(null); user2.setStatus(null); b.setCreator(user2); } b.setCreateDate(null); b.setOpenEdit(null); b.setOpened(null); b.setReadCnt(null); b.setTags(null); b.setType(null); Dir dir = b.getDir(); if (dir != null) { Dir dir2 = new Dir(); dir2.setId(dir.getId()); dir2.setOpened(null); dir2.setPrivated(null); dir2.setStatus(null); b.setDir(dir2); } Space space = b.getSpace(); if (space != null) { Space space2 = new Space(); space2.setId(space.getId()); space2.setDirs(null); space2.setOpened(null); space2.setPrivated(null); space2.setSpaceAuthorities(null); space2.setStatus(null); space2.setType(null); b.setSpace(space2); } }).collect(Collectors.toList()); return RespBody.succeed(blogs); } @RequestMapping(value = "update", method = RequestMethod.POST) @ResponseBody public RespBody update(@RequestParam("url") String url, @RequestParam(value = "usernames", required = false) String usernames, @RequestParam("id") Long id, @RequestParam("version") Long version, @RequestParam("title") String title, @RequestParam("content") String content, @RequestParam(value = "diff", required = false) String diff, @RequestParam(value = "contentHtml", required = false) String contentHtml, @RequestParam(value = "contentHtmlOld", required = false) String contentHtmlOld) { if (StringUtil.isEmpty(title)) { return RespBody.failed("!"); } if (StringUtil.isEmpty(content)) { return RespBody.failed("!"); } Blog blog = blogRepository.findOne(id); Boolean isOpenEdit = blog.getOpenEdit() == null ? false : blog.getOpenEdit(); if (!isSuperOrCreator(blog.getCreator().getUsername()) && !isOpenEdit) { return RespBody.failed("!"); } if (isOpenEdit && !hasAuth(blog)) { return RespBody.failed("!"); } if (blog.getVersion() != version.longValue()) { return RespBody.failed(",!"); } boolean isUpdated = false; if (!content.equals(blog.getContent())) { logWithProperties(Action.Update, Target.Blog, blog.getId(), "content", diff, blog.getTitle()); isUpdated = true; } if (!title.equals(blog.getTitle())) { logWithProperties(Action.Update, Target.Blog, blog.getId(), "title", title, blog.getTitle()); isUpdated = true; } if (isUpdated) { BlogHistory blogHistory = new BlogHistory(); blogHistory.setBlog(blog); blogHistory.setTitle(blog.getTitle()); blogHistory.setContent(blog.getContent()); blogHistory.setBlogUpdater(blog.getUpdater()); blogHistory.setBlogUpdateDate(blog.getUpdateDate()); blogHistory.setEditor(blog.getEditor()); blogHistoryRepository.saveAndFlush(blogHistory); blog.setTitle(title); blog.setContent(content); Blog blog2 = blogRepository.saveAndFlush(blog); wsSend(blog2, Cmd.U, WebUtil.getUsername()); final User loginUser = getLoginUser(); final String href = url + "#/blog/" + blog2.getId(); final String html; if (StringUtil.isNotEmpty(diff)) { html = "<h3>(Markdown):</h3><b>:</b> <a href=\"" + href + "\">" + href + "</a><hr/>" + diff; } else { html = "<h3>:</h3>" + contentHtml + "<hr/><h3>:</h3>" + contentHtmlOld; } final Mail mail = Mail.instance(); List<BlogFollower> followers = blogFollowerRepository.findByBlogAndStatusNot(blog2, Status.Deleted); if (!blog.getCreator().equals(loginUser)) { BlogFollower blogFollower = blogFollowerRepository.findOneByBlogAndCreator(blog2, loginUser); if (blogFollower != null) { if (blogFollower.getStatus().equals(Status.Deleted)) { blogFollower.setStatus(Status.New); blogFollowerRepository.saveAndFlush(blogFollower); logWithProperties(Action.Update, Target.Blog, id, "follower", blog2.getTitle()); } } else { BlogFollower blogFollower2 = new BlogFollower(); blogFollower2.setBlog(blog2); blogFollowerRepository.saveAndFlush(blogFollower2); logWithProperties(Action.Update, Target.Blog, id, "follower", blog.getTitle()); } } mail.addUsers(followers.stream().map(f -> f.getCreator()).collect(Collectors.toList()), loginUser); mail.addUsers(Arrays.asList(blog2.getCreator()), loginUser); List<String> usernameArr = Arrays .asList(StringUtil.isNotEmpty(usernames) ? usernames.split(",") : new String[0]); List<String> fs = followers.stream().map(f -> f.getCreator().getUsername()) .filter(f -> !usernameArr.contains(f)).collect(Collectors.toList()); wsSendToUsers(blog2, Cmd.F, WebUtil.getUsername(), fs.toArray(new String[0])); String bCreator = blog.getCreator().getUsername(); if (!blog.getCreator().equals(loginUser) && !usernameArr.contains(bCreator) && !fs.contains(bCreator)) { wsSendToUsers(blog, Cmd.OU, WebUtil.getUsername(), bCreator); } if (usernameArr.size() > 0) { usernameArr.stream().forEach((username) -> { mail.addUsers(getUser(username)); }); wsSendToUsers(blog2, Cmd.At, WebUtil.getUsername(), usernameArr.toArray(new String[0])); } if (!mail.isEmpty()) { try { mailSender.sendHtmlByQueue( String.format("TMS-@_%s", DateUtil.format(new Date(), DateUtil.FORMAT7)), TemplateUtil.process("templates/mail/mail-dynamic", MapUtil.objArr2Map("user", loginUser, "date", new Date(), "href", href, "title", "@", "content", html)), getLoginUserName(loginUser), mail.get()); } catch (Exception e) { e.printStackTrace(); } } return RespBody.succeed(blog2); } else { return RespBody.failed("!"); } } @RequestMapping(value = "editor/change", method = RequestMethod.POST) @ResponseBody public RespBody changeEditor(@RequestParam("id") Long id, @RequestParam("version") Long version, @RequestParam("content") String content, @RequestParam("editor") String editor) { if (StringUtil.isEmpty(content)) { return RespBody.failed("!"); } Blog blog = blogRepository.findOne(id); if (!isSuperOrCreator(blog.getCreator().getUsername())) { return RespBody.failed("!"); } if (blog.getVersion() != version.longValue()) { return RespBody.failed(",!"); } BlogHistory blogHistory = new BlogHistory(); blogHistory.setBlog(blog); blogHistory.setTitle(blog.getTitle()); blogHistory.setContent(blog.getContent()); blogHistory.setBlogUpdater(blog.getUpdater()); blogHistory.setBlogUpdateDate(blog.getUpdateDate()); blogHistory.setEditor(blog.getEditor()); blogHistoryRepository.saveAndFlush(blogHistory); blog.setContent(content); blog.setEditor(Editor.valueOf(editor)); Blog blog2 = blogRepository.saveAndFlush(blog); wsSend(blog2, Cmd.U, WebUtil.getUsername()); return RespBody.succeed(blog2); } @RequestMapping(value = "delete", method = RequestMethod.POST) @ResponseBody public RespBody delete(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!isSuperOrCreator(blog.getCreator().getUsername())) { return RespBody.failed("!"); } blog.setStatus(Status.Deleted); blogRepository.saveAndFlush(blog); wsSend(blog, Cmd.D, WebUtil.getUsername()); log(Action.Delete, Target.Blog, id, blog.getTitle()); return RespBody.succeed(id); } @RequestMapping(value = "get", method = RequestMethod.GET) @ResponseBody public RespBody get(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (blog == null || Status.Deleted.equals(blog.getStatus())) { return RespBody.failed("!"); } if (!hasAuth(blog)) { return RespBody.failed("!"); } Long readCnt = blog.getReadCnt(); if (readCnt == null) { readCnt = 1L; } else { readCnt = readCnt + 1; } blogRepository.updateReadCnt(readCnt, id); blog.setReadCnt(readCnt); return RespBody.succeed(blog); } @RequestMapping(value = "search", method = RequestMethod.GET) @ResponseBody public RespBody search(@RequestParam("search") String search, @RequestParam(value = "comment", defaultValue = "false") Boolean comment, @RequestParam(value = "ellipsis", defaultValue = "60") Integer ellipsis, @SortDefault(value = "id", direction = Direction.DESC) Sort sort) { if (StringUtil.isEmpty(search)) { return RespBody.failed("!"); } List<Blog> blogs = new ArrayList<>(); List<Comment> comments = new ArrayList<>(); if (search.toLowerCase().startsWith("tags:") || search.toLowerCase().startsWith("tag:")) { String[] arr = search.split(":", 2); if (StringUtil.isNotEmpty(arr[1].trim())) { String[] tags = arr[1].trim().split("\\s+"); blogs = blogRepository.findByStatusNotAndTags_nameIn(Status.Deleted, Arrays.asList(tags), sort).stream() .filter(b -> hasAuth(b)).peek(b -> { b.setContent(StringUtil.limitLength(b.getContent(), ellipsis)); b.setBlogAuthorities(null); }).collect(Collectors.toList()); } if (comment) { return RespBody.succeed(new BlogSearchResult(blogs, comments)); } } else if (search.toLowerCase().startsWith("from:")) { String[] arr = search.split(":", 2); if (StringUtil.isNotEmpty(arr[1].trim())) { String[] condis = arr[1].trim().split("\\s+"); User user = getUser(condis[0]); if (user != null) { if (condis.length == 1) { blogs = blogRepository.findByCreatorAndStatusNot(user, Status.Deleted, sort).stream() .filter(b -> hasAuth(b)).peek(b -> { b.setContent(StringUtil.limitLength(b.getContent(), ellipsis)); b.setBlogAuthorities(null); }).collect(Collectors.toList()); } else { blogs = blogRepository .findByCreatorAndStatusNotAndTitleContainingIgnoreCaseOrCreatorAndStatusNotAndContentContainingIgnoreCase( user, Status.Deleted, condis[1], user, Status.Deleted, condis[1], sort) .stream().filter(b -> hasAuth(b)).peek(b -> { b.setContent(StringUtil.limitLength(b.getContent(), ellipsis)); b.setBlogAuthorities(null); }).collect(Collectors.toList()); } } } if (comment) { return RespBody.succeed(new BlogSearchResult(blogs, comments)); } } else { blogs = blogRepository .findByStatusNotAndTitleContainingIgnoreCaseOrStatusNotAndContentContainingIgnoreCase( Status.Deleted, search, Status.Deleted, search, sort) .stream().filter(b -> hasAuth(b)).peek(b -> { b.setContent(StringUtil.limitLength(b.getContent(), ellipsis)); b.setBlogAuthorities(null); }).collect(Collectors.toList()); if (comment) { comments = commentRepository.findByTypeAndStatusNotAndContentContainingIgnoreCase(CommentType.Blog, Status.Deleted, search, sort).stream().filter(c -> hasAuth(Long.valueOf(c.getTargetId()))) .peek(c -> { c.setContent(StringUtil.limitLength(c.getContent(), ellipsis)); }).collect(Collectors.toList()); return RespBody.succeed(new BlogSearchResult(blogs, comments)); } } return RespBody.succeed(blogs); } @RequestMapping(value = "openEdit", method = RequestMethod.POST) @ResponseBody public RespBody openEdit(@RequestParam("id") Long id, @RequestParam("open") Boolean open) { Blog blog = blogRepository.findOne(id); if (blog == null) { return RespBody.failed(",!"); } if (!isSuperOrCreator(blog.getCreator().getUsername())) { return RespBody.failed("!"); } blog.setOpenEdit(open); blogRepository.saveAndFlush(blog); if (!blog.getPrivated() && open) { wsSend(blog, Cmd.Open, WebUtil.getUsername()); } logWithProperties(Action.Update, Target.Blog, id, "openEdit", open, blog.getTitle()); return RespBody.succeed(); } private boolean isVoterExists(String voters) { boolean isExits = false; if (voters != null) { String loginUsername = WebUtil.getUsername(); String[] voterArr = voters.split(","); for (String voter : voterArr) { if (voter.equals(loginUsername)) { isExits = true; break; } } } return isExits; } private String calcVoters(String voters) { List<String> list = Stream.of(voters.split(",")).filter(v -> !v.equals(WebUtil.getUsername())) .collect(Collectors.toList()); return StringUtil.join(",", list); } @RequestMapping(value = "vote", method = RequestMethod.POST) @ResponseBody public RespBody vote(@RequestParam("id") Long id, @RequestParam("url") String url, @RequestParam("contentHtml") String contentHtml, @RequestParam(value = "type", required = false) String type) { Blog blog = blogRepository.findOne(id); if (blog == null) { return RespBody.failed("!"); } String loginUsername = WebUtil.getUsername(); String title = ""; final User loginUser = getLoginUser(); if (VoteType.Zan.name().equalsIgnoreCase(type)) { String voteZan = blog.getVoteZan(); if (isVoterExists(voteZan)) { return RespBody.failed("[]"); } else { String vz = voteZan == null ? loginUsername : voteZan + ',' + loginUsername; Integer voteZanCnt = blog.getVoteZanCnt(); Integer vzc = voteZanCnt == null ? 1 : voteZanCnt + 1; blogRepository.updateVoteZan(vz, vzc, id); logWithProperties(Action.Update, Target.Blog, id, "voteZan", blog.getTitle()); blog.setVoteZan(vz); blog.setVoteZanCnt(vzc); title = loginUser.getName() + "[" + loginUsername + "]!"; } } else { String voteZan = blog.getVoteZan(); if (isVoterExists(voteZan)) { String vz = this.calcVoters(voteZan); Integer voteZanCnt = blog.getVoteZanCnt(); Integer vzc = voteZanCnt == null ? 0 : voteZanCnt - 1; blogRepository.updateVoteZan(vz, vzc, id); blog.setVoteZan(vz); blog.setVoteZanCnt(vzc); return RespBody.succeed(blog); } } final String href = url + "#/blog/" + id; final String titleHtml = title; final Mail mail = Mail.instance().addUsers(blog.getCreator()); final String html = "<h3>:</h3><hr/>" + contentHtml; try { mailSender.sendHtmlByQueue(String.format("TMS-@_%s", DateUtil.format(new Date(), DateUtil.FORMAT7)), TemplateUtil.process("templates/mail/mail-dynamic", MapUtil.objArr2Map("user", loginUser, "date", new Date(), "href", href, "title", titleHtml, "content", html)), getLoginUserName(loginUser), mail.get()); } catch (Exception e) { e.printStackTrace(); } return RespBody.succeed(blog); } @RequestMapping(value = "comment/vote", method = RequestMethod.POST) @ResponseBody public RespBody voteComment(@RequestParam("cid") Long cid, @RequestParam("url") String url, @RequestParam("contentHtml") String contentHtml, @RequestParam(value = "type", required = false) String type) { Comment comment = commentRepository.findOne(cid); if (comment == null) { return RespBody.failed("!"); } String loginUsername = WebUtil.getUsername(); Comment comment2 = null; String title = ""; final User loginUser = getLoginUser(); if (VoteType.Zan.name().equalsIgnoreCase(type)) { String voteZan = comment.getVoteZan(); if (isVoterExists(voteZan)) { return RespBody.failed("[]"); } else { comment.setVoteZan(voteZan == null ? loginUsername : voteZan + ',' + loginUsername); Integer voteZanCnt = comment.getVoteZanCnt(); comment.setVoteZanCnt(voteZanCnt == null ? 1 : voteZanCnt + 1); comment2 = commentRepository.saveAndFlush(comment); title = loginUser.getName() + "[" + loginUsername + "]!"; logWithProperties(Action.Update, Target.Comment, cid, "voteZan", comment2.getTargetId(), comment2.getContent()); } } else { String voteZan = comment.getVoteZan(); if (isVoterExists(voteZan)) { comment.setVoteZan(this.calcVoters(voteZan)); Integer voteZanCnt = comment.getVoteZanCnt(); comment.setVoteZanCnt(voteZanCnt == null ? 0 : voteZanCnt - 1); comment2 = commentRepository.saveAndFlush(comment); return RespBody.succeed(comment2); } } final String href = url + "#/blog/" + comment.getTargetId() + "?cid=" + cid; final String titleHtml = title; final Mail mail = Mail.instance().addUsers(comment.getCreator()); final String html = "<h3>:</h3><hr/>" + contentHtml; try { mailSender.sendHtmlByQueue(String.format("TMS-@_%s", DateUtil.format(new Date(), DateUtil.FORMAT7)), TemplateUtil.process("templates/mail/mail-dynamic", MapUtil.objArr2Map("user", loginUser, "date", new Date(), "href", href, "title", titleHtml, "content", html)), getLoginUserName(loginUser), mail.get()); } catch (Exception e) { e.printStackTrace(); } return RespBody.succeed(comment2); } @RequestMapping(value = "share/to/search", method = RequestMethod.GET) @ResponseBody public RespBody searchShareTo(@RequestParam("search") String search) { if (StringUtil.isEmpty(search)) { return RespBody.failed("!"); } Map<String, Object> map = new HashMap<>(); List<User> users = userRepository.findTop6ByUsernameContainingIgnoreCaseAndEnabledTrue(search); List<Channel> channels = channelRepository.findTop6ByNameContainingIgnoreCaseAndStatusNot(search, Status.Deleted); channels.forEach(c -> c.setMembers(null)); map.put("users", users); map.put("channels", channels); return RespBody.succeed(map); } @RequestMapping(value = "share", method = RequestMethod.POST) @ResponseBody public RespBody share(@RequestParam("basePath") String basePath, @RequestParam("id") Long id, @RequestParam("html") String html, @RequestParam(value = "desc", required = false) String desc, @RequestParam(value = "users", required = false) String users, @RequestParam(value = "channels", required = false) String channels, @RequestParam(value = "mails", required = false) String mails) { Blog blog = blogRepository.findOne(id); if (!hasAuth(blog)) { return RespBody.failed("!"); } final User loginUser = getLoginUser(); final String href = basePath + "#/blog/" + id; final String html2 = StringUtil.replace( "<h1 style=\"color: blue;\">: <a target=\"_blank\" href=\"{?1}\">{?2}</a></h1><hr/>{?3}", href, blog.getTitle(), html); final String title = StringUtil.isNotEmpty(desc) ? desc : ""; final String shareDesc = StringUtil.isNotEmpty(desc) ? "> ****" + desc : StringUtil.EMPTY; Mail mail = Mail.instance(); if (StringUtil.isNotEmpty(users)) { Stream.of(users.split(",")).forEach(username -> { User user = getUser(username); if (user != null) { mail.addUsers(user); ChatDirect chatDirect = new ChatDirect(); chatDirect.setChatTo(user); chatDirect.setContent( StringUtil.replace(" loginUser.getUsername(), blog.getTitle(), href, blog.getContent(), shareDesc)); chatDirectRepository.saveAndFlush(chatDirect); } }); } if (StringUtil.isNotEmpty(channels)) { Stream.of(channels.split(",")).forEach(name -> { Channel channel = channelRepository.findOneByName(name); if (channel != null) { channel.getMembers().forEach(user -> { mail.addUsers(user); }); ChatChannel chatChannel = new ChatChannel(); chatChannel.setChannel(channel); chatChannel.setContent( StringUtil.replace(" loginUser.getUsername(), blog.getTitle(), href, blog.getContent(), shareDesc)); chatChannelService.save(chatChannel); } }); } if (StringUtil.isNotEmpty(mails)) { Stream.of(mails.split(",")).forEach(m -> { if (ValidateUtil.isEmail(m)) { mail.add(m); } }); } ThreadUtil.exec(() -> { try { Thread.sleep(3000); mailSender.sendHtml(String.format("TMS-_%s", DateUtil.format(new Date(), DateUtil.FORMAT7)), TemplateUtil.process("templates/mail/mail-dynamic", MapUtil.objArr2Map("user", loginUser, "date", new Date(), "href", href, "title", title, "content", html2)), getLoginUserName(loginUser), mail.get()); logger.info(""); } catch (Exception e) { e.printStackTrace(); logger.error(""); } }); return RespBody.succeed(); } @RequestMapping(value = "comment/share", method = RequestMethod.POST) @ResponseBody public RespBody shareComment(@RequestParam("basePath") String basePath, @RequestParam("id") Long id, @RequestParam("href") final String href, @RequestParam("html") String html, @RequestParam(value = "desc", required = false) String desc, @RequestParam(value = "users", required = false) String users, @RequestParam(value = "channels", required = false) String channels, @RequestParam(value = "mails", required = false) String mails) { Comment comment = commentRepository.findOne(id); final User loginUser = getLoginUser(); final String html2 = StringUtil.replace( "<h1 style=\"color: blue;\">: <a target=\"_blank\" href=\"{?1}\">{?2}</a></h1><hr/>{?3}", href, "", html); final String title = StringUtil.isNotEmpty(desc) ? desc : ""; Mail mail = Mail.instance(); if (StringUtil.isNotEmpty(users)) { Stream.of(users.split(",")).forEach(username -> { User user = getUser(username); if (user != null) { mail.addUsers(user); ChatDirect chatDirect = new ChatDirect(); chatDirect.setChatTo(user); chatDirect.setContent( StringUtil.replace(" loginUser.getUsername(), "", href, comment.getContent())); chatDirectRepository.saveAndFlush(chatDirect); } }); } if (StringUtil.isNotEmpty(channels)) { Stream.of(channels.split(",")).forEach(name -> { Channel channel = channelRepository.findOneByName(name); if (channel != null) { channel.getMembers().forEach(user -> { mail.addUsers(user); }); ChatChannel chatChannel = new ChatChannel(); chatChannel.setChannel(channel); chatChannel.setContent( StringUtil.replace(" loginUser.getUsername(), "", href, comment.getContent())); chatChannelService.save(chatChannel); } }); } if (StringUtil.isNotEmpty(mails)) { Stream.of(mails.split(",")).forEach(m -> { if (ValidateUtil.isEmail(m)) { mail.add(m); } }); } ThreadUtil.exec(() -> { try { Thread.sleep(3000); mailSender.sendHtml(String.format("TMS-_%s", DateUtil.format(new Date(), DateUtil.FORMAT7)), TemplateUtil.process("templates/mail/mail-dynamic", MapUtil.objArr2Map("user", loginUser, "date", new Date(), "href", href, "title", title, "content", html2)), getLoginUserName(loginUser), mail.get()); logger.info(""); } catch (Exception e) { e.printStackTrace(); logger.error(""); } }); return RespBody.succeed(); } @RequestMapping(value = "comment/create", method = RequestMethod.POST) @ResponseBody public RespBody createComment(@RequestParam("basePath") String basePath, @RequestParam("id") Long id, @RequestParam("content") String content, @RequestParam("contentHtml") final String contentHtml, @RequestParam(value = "users", required = false) String users) { if (!hasAuth(id)) { return RespBody.failed("!"); } Comment comment = new Comment(); comment.setContent(content); comment.setTargetId(String.valueOf(id)); comment.setType(CommentType.Blog); Comment comment2 = commentRepository.saveAndFlush(comment); log(Action.Create, Target.Comment, comment2.getId(), content, id); final User loginUser = getLoginUser(); final String href = basePath + "#/blog/" + id + "?cid=" + comment2.getId(); Blog blog = blogRepository.findOne(id); Mail mail = Mail.instance(); mail.addUsers(Arrays.asList(blog.getCreator()), loginUser); List<String> atUsers = Arrays.asList(StringUtil.isNotEmpty(users) ? users.split(",") : new String[0]); if (atUsers.size() > 0) { atUsers.forEach(username -> { User user = getUser(username); mail.addUsers(user); }); wsSendToUsers(blog, comment2, Cmd.CAt, WebUtil.getUsername(), atUsers.toArray(new String[0])); } List<BlogFollower> followers = blogFollowerRepository.findByBlogAndStatusNot(blog, Status.Deleted); mail.addUsers(followers.stream().map(f -> f.getCreator()).collect(Collectors.toList()), loginUser); List<String> fs = followers.stream().map(f -> f.getCreator().getUsername()).filter(f -> !atUsers.contains(f)) .collect(Collectors.toList()); wsSendToUsers(blog, comment2, Cmd.FCC, WebUtil.getUsername(), fs.toArray(new String[0])); String bCreator = blog.getCreator().getUsername(); if (!blog.getCreator().equals(loginUser) && !atUsers.contains(bCreator) && !fs.contains(bCreator)) { wsSendToUsers(blog, comment2, Cmd.CC, WebUtil.getUsername(), bCreator); } // auto follow blog boolean isFollower = followers.stream().anyMatch(f -> f.getCreator().equals(loginUser)); if (!isFollower && !blog.getCreator().equals(loginUser)) { BlogFollower blogFollower = blogFollowerRepository.findOneByBlogAndCreator(blog, getLoginUser()); if (blogFollower != null) { if (blogFollower.getStatus().equals(Status.Deleted)) { blogFollower.setStatus(Status.New); blogFollowerRepository.saveAndFlush(blogFollower); } } else { blogFollower = new BlogFollower(); blogFollower.setBlog(blog); blogFollowerRepository.saveAndFlush(blogFollower); } } final String html = StringUtil.replace( "<h1 style=\"color: blue;\">: <a target=\"_blank\" href=\"{?1}\">{?2}</a></h1><hr/>{?3}", href, blog.getTitle(), contentHtml); try { mailSender.sendHtmlByQueue(String.format("TMS-_%s", DateUtil.format(new Date(), DateUtil.FORMAT7)), TemplateUtil.process("templates/mail/mail-dynamic", MapUtil.objArr2Map("user", loginUser, "date", new Date(), "href", href, "title", "", "content", html)), getLoginUserName(loginUser), mail.get()); } catch (Exception e) { e.printStackTrace(); } return RespBody.succeed(comment2); } @RequestMapping(value = "comment/update", method = RequestMethod.POST) @ResponseBody public RespBody updateComment(@RequestParam("basePath") String basePath, @RequestParam("id") Long id, @RequestParam("cid") Long cid, @RequestParam("version") Long version, @RequestParam("content") String content, @RequestParam("contentHtml") final String contentHtml, @RequestParam("diff") final String diff, @RequestParam(value = "users", required = false) String users) { Comment comment = commentRepository.findOne(cid); if (comment == null) { return RespBody.failed(",!"); } if (!isSuperOrCreator(comment.getCreator().getUsername())) { return RespBody.failed("!"); } if (comment.getVersion() != version.longValue()) { return RespBody.failed(",!"); } if (!hasAuth(Long.valueOf(comment.getTargetId()))) { return RespBody.failed("!"); } comment.setContent(content); logWithProperties(Action.Update, Target.Comment, cid, "content", diff, id); Comment comment2 = commentRepository.saveAndFlush(comment); final User loginUser = getLoginUser(); final String href = basePath + "#/blog/" + id + "?cid=" + comment2.getId(); Blog blog = blogRepository.findOne(id); Mail mail = Mail.instance(); mail.addUsers(Arrays.asList(blog.getCreator()), loginUser); List<String> atUsers = Arrays.asList(StringUtil.isNotEmpty(users) ? users.split(",") : new String[0]); if (atUsers.size() > 0) { atUsers.forEach(username -> { User user = getUser(username); mail.addUsers(user); }); wsSendToUsers(blog, comment2, Cmd.CAt, WebUtil.getUsername(), atUsers.toArray(new String[0])); } List<BlogFollower> followers = blogFollowerRepository.findByBlogAndStatusNot(blog, Status.Deleted); mail.addUsers(followers.stream().map(f -> f.getCreator()).collect(Collectors.toList()), loginUser); List<String> fs = followers.stream().map(f -> f.getCreator().getUsername()).filter(f -> !atUsers.contains(f)) .collect(Collectors.toList()); wsSendToUsers(blog, comment2, Cmd.FCU, WebUtil.getUsername(), fs.toArray(new String[0])); String bCreator = blog.getCreator().getUsername(); if (!blog.getCreator().equals(loginUser) && !atUsers.contains(bCreator) && !fs.contains(bCreator)) { wsSendToUsers(blog, comment2, Cmd.CU, WebUtil.getUsername(), bCreator); } final String html = StringUtil.replace( "<h1 style=\"color: blue;\">: <a target=\"_blank\" href=\"{?1}\">{?2}</a></h1><hr/>{?3}", href, blog.getTitle(), contentHtml); try { mailSender.sendHtmlByQueue(String.format("TMS-_%s", DateUtil.format(new Date(), DateUtil.FORMAT7)), TemplateUtil.process("templates/mail/mail-dynamic", MapUtil.objArr2Map("user", loginUser, "date", new Date(), "href", href, "title", "", "content", html)), getLoginUserName(loginUser), mail.get()); } catch (Exception e) { e.printStackTrace(); } return RespBody.succeed(comment2); } @RequestMapping(value = "comment/query", method = RequestMethod.GET) @ResponseBody public RespBody queryComment(@RequestParam("id") Long id, @PageableDefault(sort = { "id" }, direction = Direction.ASC) Pageable pageable) { if (!hasAuth(id)) { return RespBody.failed(""); } Page<Comment> page = commentRepository.findByTargetIdAndStatusNot(String.valueOf(id), Status.Deleted, pageable); return RespBody.succeed(page); } @RequestMapping(value = "comment/remove", method = RequestMethod.POST) @ResponseBody public RespBody removeComment(@RequestParam("cid") Long cid) { Comment comment = commentRepository.findOne(cid); if (comment != null) { if (!isSuperOrCreator(comment.getCreator().getUsername())) { return RespBody.failed("!"); } comment.setStatus(Status.Deleted); commentRepository.saveAndFlush(comment); log(Action.Delete, Target.Comment, cid, comment.getContent()); } return RespBody.succeed(cid); } @RequestMapping(value = "comment/get", method = RequestMethod.GET) @ResponseBody public RespBody getComment(@RequestParam("cid") Long cid) { Comment comment = commentRepository.findOne(cid); if (!hasAuth(Long.valueOf(comment.getTargetId()))) { return RespBody.failed(""); } return RespBody.succeed(comment); } @RequestMapping(value = "space/update", method = RequestMethod.POST) @ResponseBody public RespBody updateSpace(@RequestParam("id") Long id, @RequestParam(value = "sid", required = false) Long sid, @RequestParam(value = "did", required = false) Long did) { Blog blog = blogRepository.findOne(id); if (!isSuperOrCreator(blog.getCreator().getUsername())) { if (blog.getSpace() == null || (!blog.getSpace().getCreator().getUsername().equals(WebUtil.getUsername()))) { return RespBody.failed("!"); } } Space space = sid != null ? spaceRepository.findOne(sid) : null; // blog.setSpace(space); Dir dir = did != null ? dirRepository.findOne(did) : null; // blog.setDir(dir); blogRepository.updateSpaceAndDir(space, dir, id); // Blog blog2 = blogRepository.saveAndFlush(blog); String val = StringUtil.EMPTY; if (space != null && dir != null) { val = space.getName() + " / " + dir.getName(); } else if (space != null && dir == null) { val = space.getName(); } else if (space == null && dir != null) { val = dir.getName(); } logWithProperties(Action.Update, Target.Blog, id, "space", val, blog.getTitle()); em.detach(blog); return RespBody.succeed(blogRepository.findOne(id)); } @RequestMapping(value = "privated/update", method = RequestMethod.POST) @ResponseBody public RespBody updatePrivated(@RequestParam("id") Long id, @RequestParam("privated") Boolean privated) { Blog blog = blogRepository.findOne(id); if (!isSuperOrCreator(blog.getCreator().getUsername())) { return RespBody.failed("!"); } blogRepository.updatePrivatedAndOpened(privated, (privated ? false : blog.getOpened()), id); // Blog blog2 = blogRepository.saveAndFlush(blog); logWithProperties(Action.Update, Target.Blog, id, "privated", privated, blog.getTitle()); em.detach(blog); return RespBody.succeed(blogRepository.findOne(id)); } @RequestMapping(value = "opened/update", method = RequestMethod.POST) @ResponseBody public RespBody updateOpened(@RequestParam("id") Long id, @RequestParam("opened") Boolean opened) { Blog blog = blogRepository.findOne(id); if (!isSuperOrCreator(blog.getCreator().getUsername())) { return RespBody.failed("!"); } blogRepository.updatePrivatedAndOpened((opened ? false : blog.getPrivated()), opened, id); // Blog blog2 = blogRepository.saveAndFlush(blog); logWithProperties(Action.Update, Target.Blog, id, "opened", opened, blog.getTitle()); em.detach(blog); return RespBody.succeed(blogRepository.findOne(id)); } @RequestMapping(value = "history/list", method = RequestMethod.GET) @ResponseBody public RespBody listHistory(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!hasAuth(blog)) { return RespBody.failed("!"); } List<BlogHistory> blogHistories = blogHistoryRepository.findByBlogAndStatusNot(blog, Status.Deleted); return RespBody.succeed(blogHistories); } @RequestMapping(value = "history/get", method = RequestMethod.GET) @ResponseBody public RespBody getHistory(@RequestParam("hid") Long hid) { BlogHistory blogHistory = blogHistoryRepository.findOne(hid); Blog blog = blogHistory.getBlog(); if (!hasAuth(blog)) { return RespBody.failed("!"); } return RespBody.succeed(blogHistory); } @RequestMapping(value = "history/remove", method = RequestMethod.POST) @ResponseBody public RespBody removeHistory(@RequestParam("hid") Long hid) { BlogHistory blogHistory = blogHistoryRepository.findOne(hid); Blog blog = blogHistory.getBlog(); if (!isSuperOrCreator(blog.getCreator().getUsername())) { return RespBody.failed("!"); } blogHistory.setStatus(Status.Deleted); blogHistoryRepository.saveAndFlush(blogHistory); return RespBody.succeed(hid); } @RequestMapping(value = "history/restore", method = RequestMethod.POST) @ResponseBody public RespBody restoreHistory(@RequestParam("hid") Long hid) { BlogHistory blogHistory = blogHistoryRepository.findOne(hid); Blog blog = blogHistory.getBlog(); if (!hasAuth(blog)) { return RespBody.failed("!"); } Boolean isOpenEdit = blog.getOpenEdit() == null ? false : blog.getOpenEdit(); if (!isSuperOrCreator(blog.getCreator().getUsername()) && !isOpenEdit) { return RespBody.failed("!"); } if (isOpenEdit && !hasAuth(blog)) { return RespBody.failed("!"); } BlogHistory blogHistory2 = new BlogHistory(); blogHistory2.setBlog(blog); blogHistory2.setTitle(blog.getTitle()); blogHistory2.setContent(blog.getContent()); blogHistory2.setBlogUpdater(blog.getUpdater()); blogHistory2.setBlogUpdateDate(blog.getUpdateDate()); blogHistory2.setEditor(blog.getEditor()); blogHistoryRepository.saveAndFlush(blogHistory2); blog.setTitle(blogHistory.getTitle()); blog.setContent(blogHistory.getContent()); if (blogHistory.getEditor() != null) { blog.setEditor(blogHistory.getEditor()); } Blog blog2 = blogRepository.saveAndFlush(blog); return RespBody.succeed(blog2); } @RequestMapping(value = "download/{id}", method = RequestMethod.GET) public void download(HttpServletRequest request, HttpServletResponse response, @PathVariable Long id, @RequestParam(value = "type", defaultValue = "pdf") String type) throws Exception { logger.debug("download blog start..."); Blog blog = blogRepository.findOne(id); if (blog == null) { try { response.sendError(404, "!"); return; } catch (IOException e) { e.printStackTrace(); } } if (!hasAuth(blog)) { try { response.sendError(404, "!"); return; } catch (IOException e) { e.printStackTrace(); } } // (ServletContext) String path = WebUtil.getRealPath(request); String blogUpdateDate = DateUtil.format(blog.getUpdateDate(), DateUtil.FORMAT9); String mdFileName = blog.getId() + "_" + blogUpdateDate + ".md"; String pdfFileName = blog.getId() + "_" + blogUpdateDate + ".pdf"; String md2htmlFileName = blog.getId() + "_" + blogUpdateDate + ".html"; String mdFilePath = path + uploadPath + mdFileName; String pdfFilePath = path + uploadPath + pdfFileName; String md2htmlFilePath = path + uploadPath + md2htmlFileName; File fileMd = new File(mdFilePath); if (!fileMd.exists()) { try { String content = StringUtil.EMPTY; if (Editor.Html.equals(blog.getEditor())) { content = "<div class='markdown-body'><style>.markdown-body .tms-chat-msg-code-trigger{display: none;}.markdown-body{font-size:14px;line-height:1.6}.markdown-body>:first-child{margin-top:0!important}.markdown-body>:last-child{margin-bottom:0!important}.markdown-body a{word-break:break-all}.markdown-body a.absent{color:#C00}.markdown-body a.anchor{bottom:0;cursor:pointer;display:block;left:0;margin-left:-30px;padding-left:30px;position:absolute;top:0}.markdown-body h1,.markdown-body h2,.markdown-body h3,.markdown-body h4,.markdown-body h5,.markdown-body h6{cursor:text;font-weight:700;margin:20px 0 10px;padding:0;position:relative;word-break:break-all;}.markdown-body h1 .mini-icon-link,.markdown-body h2 .mini-icon-link,.markdown-body h3 .mini-icon-link,.markdown-body h4 .mini-icon-link,.markdown-body h5 .mini-icon-link,.markdown-body h6 .mini-icon-link{color:#000;display:none}.markdown-body h1:hover a.anchor,.markdown-body h2:hover a.anchor,.markdown-body h3:hover a.anchor,.markdown-body h4:hover a.anchor,.markdown-body h5:hover a.anchor,.markdown-body h6:hover a.anchor{line-height:1;margin-left:-22px;padding-left:0;text-decoration:none;top:15%}.markdown-body h1:hover a.anchor .mini-icon-link,.markdown-body h2:hover a.anchor .mini-icon-link,.markdown-body h3:hover a.anchor .mini-icon-link,.markdown-body h4:hover a.anchor .mini-icon-link,.markdown-body h5:hover a.anchor .mini-icon-link,.markdown-body h6:hover a.anchor .mini-icon-link{display:inline-block}.markdown-body hr:after,.markdown-body hr:before{display:table;content:''}.markdown-body h1 code,.markdown-body h1 tt,.markdown-body h2 code,.markdown-body h2 tt,.markdown-body h3 code,.markdown-body h3 tt,.markdown-body h4 code,.markdown-body h4 tt,.markdown-body h5 code,.markdown-body h5 tt,.markdown-body h6 code,.markdown-body h6 tt{font-size:inherit}.markdown-body h1{color:#000;font-size:28px}.markdown-body h2{border-bottom:1px solid #CCC;color:#000;font-size:24px}.markdown-body h3{font-size:18px}.markdown-body h4{font-size:16px}.markdown-body h5{font-size:14px}.markdown-body h6{color:#777;font-size:14px}.markdown-body blockquote,.markdown-body dl,.markdown-body ol,.markdown-body p,.markdown-body pre,.markdown-body table,.markdown-body ul{margin:15px 0}.markdown-body hr{overflow:hidden;background:#e7e7e7;height:4px;padding:0;margin:16px 0;border:0;-moz-box-sizing:content-box;box-sizing:content-box}.markdown-body h1+p,.markdown-body h2+p,.markdown-body h3+p,.markdown-body h4+p,.markdown-body h5+p,.markdown-body h6+p,.markdown-body ol li>:first-child,.markdown-body ul li>:first-child{margin-top:0}.markdown-body hr:after{clear:both}.markdown-body a:first-child h1,.markdown-body a:first-child h2,.markdown-body a:first-child h3,.markdown-body a:first-child h4,.markdown-body a:first-child h5,.markdown-body a:first-child h6,.markdown-body>h1:first-child,.markdown-body>h1:first-child+h2,.markdown-body>h2:first-child,.markdown-body>h3:first-child,.markdown-body>h4:first-child,.markdown-body>h5:first-child,.markdown-body>h6:first-child{margin-top:0;padding-top:0}.markdown-body li p.first{display:inline-block}.markdown-body ol,.markdown-body ul{padding-left:30px}.markdown-body ol.no-list,.markdown-body ul.no-list{list-style-type:none;padding:0}.markdown-body ol ol,.markdown-body ol ul,.markdown-body ul ol,.markdown-body ul ul{margin-bottom:0}.markdown-body dl{padding:0}.markdown-body dl dt{font-size:14px;font-style:italic;font-weight:700;margin:15px 0 5px;padding:0}.markdown-body dl dt:first-child{padding:0}.markdown-body dl dt>:first-child{margin-top:0}.markdown-body dl dt>:last-child{margin-bottom:0}.markdown-body dl dd{margin:0 0 15px;padding:0 15px}.markdown-body blockquote>:first-child,.markdown-body dl dd>:first-child{margin-top:0}.markdown-body blockquote>:last-child,.markdown-body dl dd>:last-child{margin-bottom:0}.markdown-body blockquote{border-left:4px solid #DDD;color:#777;padding:0 15px}.markdown-body table{border-collapse:collapse}.markdown-body table th{font-weight:700}.markdown-body table td,.markdown-body table th{border:1px solid #CCC;padding:6px 13px}.markdown-body table tr{background-color:#FFF;border-top:1px solid #CCC}.markdown-body table tr:nth-child(2n){background-color:#F8F8F8}.markdown-body img{max-width:100%}.markdown-body span.frame{display:block;overflow:hidden}.markdown-body span.frame>span{border:1px solid #DDD;display:block;float:left;margin:13px 0 0;overflow:hidden;padding:7px;width:auto}.markdown-body span.frame span img{display:block;float:left}.markdown-body span.frame span span{clear:both;color:#333;display:block;padding:5px 0 0}.markdown-body span.align-center{clear:both;display:block;overflow:hidden}.markdown-body span.align-center>span{display:block;margin:13px auto 0;overflow:hidden;text-align:center}.markdown-body span.align-center span img{margin:0 auto;text-align:center}.markdown-body span.align-right{clear:both;display:block;overflow:hidden}.markdown-body span.align-right>span{display:block;margin:13px 0 0;overflow:hidden;text-align:right}.markdown-body span.align-right span img{margin:0;text-align:right}.markdown-body span.float-left{display:block;float:left;margin-right:13px;overflow:hidden}.markdown-body span.float-left span{margin:13px 0 0}.markdown-body span.float-right{display:block;float:right;margin-left:13px;overflow:hidden}.markdown-body span.float-right>span{display:block;margin:13px auto 0;overflow:hidden;text-align:right}.markdown-body code,.markdown-body tt{background-color:#F8F8F8;border:1px solid #EAEAEA;border-radius:3px;margin:0 2px;padding:0 5px;white-space:normal}.markdown-body pre>code{background:none;border:none;margin:0;padding:0;white-space:pre}.markdown-body .highlight pre,.markdown-body pre{background-color:#F8F8F8;border:1px solid #CCC;border-radius:3px;font-size:13px;line-height:19px;overflow:auto;padding:6px 10px}.markdown-body pre code,.markdown-body pre tt{background-color:transparent;border:none}.markdown-body .emoji{width:1.5em;height:1.5em;display:inline-block;margin-bottom:-.25em;background-size:contain;}</style>" + blog.getContent() + "</div>"; } else { content = blog.getContent(); } FileUtils.writeStringToFile(fileMd, content, "UTF-8"); } catch (IOException e) { e.printStackTrace(); } } File filePdf = new File(pdfFilePath); if (!filePdf.exists()) { try { String pathNode = StringUtil.isNotEmpty(md2pdfPath) ? md2pdfPath : new File(Class.class.getClass().getResource("/md2pdf").getPath()).getAbsolutePath(); String node = StringUtil.isNotEmpty(nodePath) ? nodePath : "node"; String nodeCmd = StringUtil.replace(node + " {?1} {?2} {?3}", pathNode, mdFilePath, pdfFilePath); logger.info("Node CMD: " + nodeCmd); Process process = Runtime.getRuntime().exec(nodeCmd); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(process.getInputStream())); String s = null; while ((s = bufferedReader.readLine()) != null) { logger.info(s); } process.waitFor(); logger.info("Md2pdf done!"); } catch (IOException | InterruptedException e) { e.printStackTrace(); } } // 1.ContentType // response.setContentType("multipart/form-data"); response.setContentType("application/x-msdownload;"); response.addHeader("Content-Type", "text/html; charset=utf-8"); String dnFileName = null; String dnFileLength = null; File dnFile = null; if ("md".equalsIgnoreCase(type) || "html".equalsIgnoreCase(type)) { // download markdown or html dnFileName = blog.getTitle().trim() + "." + type; dnFileLength = String.valueOf(fileMd.length()); dnFile = fileMd; } else if ("md2html".equalsIgnoreCase(type)) { // download markdown as html File md2fileHtml = new File(md2htmlFilePath); dnFileName = blog.getTitle().trim() + ".html"; dnFileLength = String.valueOf(md2fileHtml.length()); dnFile = md2fileHtml; } else { // download pdf dnFileName = blog.getTitle().trim() + ".pdf"; dnFileLength = String.valueOf(filePdf.length()); dnFile = filePdf; } response.setHeader("Content-Disposition", "attachment; fileName=" + StringUtil.encodingFileName(dnFileName)); response.setHeader("Content-Length", dnFileLength); java.io.BufferedInputStream bis = null; java.io.BufferedOutputStream bos = null; try { bis = new BufferedInputStream(new FileInputStream(dnFile)); bos = new BufferedOutputStream(response.getOutputStream()); byte[] buff = new byte[2048]; int bytesRead; while (-1 != (bytesRead = bis.read(buff, 0, buff.length))) { bos.write(buff, 0, bytesRead); } } catch (IOException e) { e.printStackTrace(); } finally { if (bis != null) { bis.close(); } if (bos != null) { bos.close(); } } } @PostMapping("download/md2html/{id}") @ResponseBody public RespBody downloadHtmlFromMd(HttpServletRequest request, @PathVariable Long id, @RequestParam(value = "content") String content) throws Exception { logger.debug("download blog md2html start..."); Blog blog = blogRepository.findOne(id); if (blog == null) { return RespBody.failed("!"); } if (!hasAuth(blog)) { return RespBody.failed("!"); } // (ServletContext) String path = WebUtil.getRealPath(request); String blogUpdateDate = DateUtil.format(blog.getUpdateDate(), DateUtil.FORMAT9); String md2htmlFileName = blog.getId() + "_" + blogUpdateDate + ".html"; String md2htmlFilePath = path + uploadPath + md2htmlFileName; File md2fileHtml = new File(md2htmlFilePath); if (!md2fileHtml.exists()) { try { FileUtils.writeStringToFile(md2fileHtml, content, "UTF-8"); } catch (IOException e) { e.printStackTrace(); return RespBody.failed(e.getMessage()); } } return RespBody.succeed(); } private boolean hasAuth(Blog b) { if (b == null) { return false; } if (b.getStatus().equals(Status.Deleted)) { return false; } return hasAuthWithDeleted(b); } private boolean hasAuth(Long id) { if (id == null) { return false; } Blog b = blogRepository.findOne(id); if (b == null) { return false; } if (b.getStatus().equals(Status.Deleted)) { return false; } return hasAuthWithDeleted(b); } private boolean hasSpaceAuth(Space s) { if (s == null) { return false; } if (isSuper()) { return true; } if (s.getStatus().equals(Status.Deleted)) { return false; } if (Boolean.TRUE.equals(s.getOpened())) { return true; } User loginUser = new User(WebUtil.getUsername()); if (s.getCreator().equals(loginUser)) { return true; } if (!s.getPrivated()) { return true; } boolean exists = false; for (SpaceAuthority sa : s.getSpaceAuthorities()) { if (loginUser.equals(sa.getUser())) { exists = true; break; } else { Channel channel = sa.getChannel(); if (channel != null) { Set<User> members = channel.getMembers(); if (members.contains(loginUser)) { exists = true; break; } } } } return exists; } private boolean hasAuthWithDeleted(Blog b) { if (b == null) { return false; } if (isSuper()) { return true; } User loginUser = new User(WebUtil.getUsername()); if (b.getCreator().equals(loginUser)) { return true; } if (Boolean.TRUE.equals(b.getOpened())) { return true; } if (!b.getPrivated()) { if (b.getSpace() == null) { return true; } else { return hasSpaceAuth(b.getSpace()); } } boolean exists = false; for (BlogAuthority ba : b.getBlogAuthorities()) { if (loginUser.equals(ba.getUser())) { exists = true; break; } else { Channel channel = ba.getChannel(); if (channel != null) { Set<User> members = channel.getMembers(); if (members.contains(loginUser)) { exists = true; break; } } } } return exists; } @RequestMapping(value = "auth/get", method = RequestMethod.GET) @ResponseBody public RespBody getAuth(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!hasAuth(blog)) { return RespBody.failed("!"); } return RespBody.succeed(blog.getBlogAuthorities()); } @RequestMapping(value = "auth/add", method = RequestMethod.POST) @ResponseBody public RespBody addAuth(@RequestParam("id") Long id, @RequestParam(value = "channels", required = false) String channels, @RequestParam(value = "users", required = false) String users) { Blog blog = blogRepository.findOne(id); if (!isSuperOrCreator(blog.getCreator().getUsername())) { return RespBody.failed("!"); } List<BlogAuthority> blogAuthorities = new ArrayList<>(); if (StringUtil.isNotEmpty(channels)) { Stream.of(channels.split(",")).forEach(c -> { Channel channel = channelRepository.findOne(Long.valueOf(c)); if (channel != null) { BlogAuthority blogAuthority = new BlogAuthority(); blogAuthority.setBlog(blog); blogAuthority.setChannel(channel); blogAuthorities.add(blogAuthority); } }); } if (StringUtil.isNotEmpty(users)) { Stream.of(users.split(",")).forEach(u -> { User user = userRepository.findOne(u); if (user != null) { BlogAuthority blogAuthority = new BlogAuthority(); blogAuthority.setBlog(blog); blogAuthority.setUser(user); blogAuthorities.add(blogAuthority); } }); } List<BlogAuthority> list = blogAuthorityRepository.save(blogAuthorities); blogAuthorityRepository.flush(); blog.getBlogAuthorities().addAll(list); return RespBody.succeed(blog); } @RequestMapping(value = "auth/remove", method = RequestMethod.POST) @ResponseBody public RespBody removeAuth(@RequestParam("id") Long id, @RequestParam(value = "channels", required = false) String channels, @RequestParam(value = "users", required = false) String users) { Blog blog = blogRepository.findOne(id); if (!isSuperOrCreator(blog.getCreator().getUsername())) { return RespBody.failed("!"); } List<BlogAuthority> list = new ArrayList<>(); Collection<Channel> channelC = new ArrayList<>(); if (StringUtil.isNotEmpty(channels)) { Stream.of(channels.split(",")).forEach(c -> { Channel ch = new Channel(); ch.setId(Long.valueOf(c)); channelC.add(ch); BlogAuthority ba = new BlogAuthority(); ba.setBlog(blog); ba.setChannel(ch); list.add(ba); }); } Collection<User> userC = new ArrayList<>(); if (StringUtil.isNotEmpty(users)) { Stream.of(users.split(",")).forEach(u -> { User user = new User(); user.setUsername(u); userC.add(user); BlogAuthority ba = new BlogAuthority(); ba.setBlog(blog); ba.setUser(user); list.add(ba); }); } if (channelC.size() > 0 && userC.size() > 0) { blogAuthorityRepository.removeAuths(blog, channelC, userC); } else { if (channelC.size() > 0) { blogAuthorityRepository.removeChannelAuths(blog, channelC); } else if (userC.size() > 0) { blogAuthorityRepository.removeUserAuths(blog, userC); } } blogAuthorityRepository.flush(); blog.getBlogAuthorities().removeAll(list); return RespBody.succeed(blog); } @RequestMapping(value = "stow/add", method = RequestMethod.POST) @ResponseBody public RespBody addStow(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!hasAuth(blog)) { return RespBody.failed("!"); } User loginUser = getLoginUser(); BlogStow blogStow3 = blogStowRepository.findOneByBlogAndCreator(blog, loginUser); if (blogStow3 != null) { if (!blogStow3.getStatus().equals(Status.Deleted)) { return RespBody.failed("!"); } else { blogStow3.setStatus(Status.New); BlogStow blogStow = blogStowRepository.saveAndFlush(blogStow3); logWithProperties(Action.Update, Target.Blog, id, "stow", blog.getTitle()); return RespBody.succeed(blogStow); } } else { BlogStow blogStow = new BlogStow(); blogStow.setBlog(blog); BlogStow blogStow2 = blogStowRepository.saveAndFlush(blogStow); logWithProperties(Action.Update, Target.Blog, id, "stow", blog.getTitle()); return RespBody.succeed(blogStow2); } } @RequestMapping(value = "stow/remove", method = RequestMethod.POST) @ResponseBody public RespBody removeStow(@RequestParam("sid") Long sid) { BlogStow blogStow = blogStowRepository.findOne(sid); if (blogStow == null) { return RespBody.failed("!"); } if (!hasAuth(blogStow.getBlog())) { return RespBody.failed("!"); } blogStow.setStatus(Status.Deleted); blogStowRepository.saveAndFlush(blogStow); return RespBody.succeed(sid); } @RequestMapping(value = "stow/listMy", method = RequestMethod.GET) @ResponseBody public RespBody listMyStow() { List<BlogStow> blogStows = blogStowRepository.findByCreatorAndStatusNot(getLoginUser(), Status.Deleted); blogStows = blogStows.stream().filter(bs -> !bs.getBlog().getStatus().equals(Status.Deleted)) .collect(Collectors.toList()); blogStows.forEach(bs -> { bs.getBlog().setContent(null); bs.getBlog().setBlogAuthorities(null); }); return RespBody.succeed(blogStows); } @RequestMapping(value = "stow/get", method = RequestMethod.GET) @ResponseBody public RespBody getStow(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!hasAuth(blog)) { return RespBody.failed("!"); } BlogStow blogStow = blogStowRepository.findOneByBlogAndCreatorAndStatusNot(blog, getLoginUser(), Status.Deleted); return RespBody.succeed(blogStow); } @RequestMapping(value = "stow/list", method = RequestMethod.GET) @ResponseBody public RespBody listStow(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!hasAuth(blog)) { return RespBody.failed("!"); } List<BlogStow> stows = blogStowRepository.findByBlogAndStatusNot(blog, Status.Deleted); stows.forEach(bs -> bs.setBlog(null)); return RespBody.succeed(stows); } @RequestMapping(value = "follower/add", method = RequestMethod.POST) @ResponseBody public RespBody addFollower(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!hasAuth(blog)) { return RespBody.failed("!"); } User loginUser = getLoginUser(); BlogFollower blogFollower = blogFollowerRepository.findOneByBlogAndCreator(blog, loginUser); if (blogFollower != null) { if (!blogFollower.getStatus().equals(Status.Deleted)) { return RespBody.failed("!"); } else { blogFollower.setStatus(Status.New); BlogFollower blogFollower2 = blogFollowerRepository.saveAndFlush(blogFollower); logWithProperties(Action.Update, Target.Blog, id, "follower", blog.getTitle()); return RespBody.succeed(blogFollower2); } } else { BlogFollower blogFollower2 = new BlogFollower(); blogFollower2.setBlog(blog); BlogFollower blogFollower3 = blogFollowerRepository.saveAndFlush(blogFollower2); logWithProperties(Action.Update, Target.Blog, id, "follower", blog.getTitle()); return RespBody.succeed(blogFollower3); } } @RequestMapping(value = "follower/remove", method = RequestMethod.POST) @ResponseBody public RespBody removeFollower(@RequestParam("fid") Long fid) { BlogFollower blogFollower = blogFollowerRepository.findOne(fid); if (blogFollower == null) { return RespBody.failed("!"); } if (!hasAuth(blogFollower.getBlog())) { return RespBody.failed("!"); } blogFollower.setStatus(Status.Deleted); blogFollowerRepository.saveAndFlush(blogFollower); return RespBody.succeed(fid); } @RequestMapping(value = "follower/listMy", method = RequestMethod.GET) @ResponseBody public RespBody listMyFollower() { List<BlogFollower> blogFollowers = blogFollowerRepository.findByCreatorAndStatusNot(getLoginUser(), Status.Deleted); blogFollowers.forEach(bf -> { bf.getBlog().setContent(null); bf.getBlog().setBlogAuthorities(null); }); return RespBody.succeed(blogFollowers); } @RequestMapping(value = "follower/get", method = RequestMethod.GET) @ResponseBody public RespBody getFollower(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!hasAuth(blog)) { return RespBody.failed("!"); } BlogFollower blogFollower = blogFollowerRepository.findOneByBlogAndCreatorAndStatusNot(blog, getLoginUser(), Status.Deleted); return RespBody.succeed(blogFollower); } @RequestMapping(value = "follower/list", method = RequestMethod.GET) @ResponseBody public RespBody listFollower(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!hasAuth(blog)) { return RespBody.failed("!"); } List<BlogFollower> followers = blogFollowerRepository.findByBlogAndStatusNot(blog, Status.Deleted); followers.forEach(bf -> bf.setBlog(null)); return RespBody.succeed(followers); } @RequestMapping(value = "poll", method = RequestMethod.GET) @ResponseBody public RespBody poll(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!hasAuth(blog)) { return RespBody.failed("!"); } return RespBody.succeed(PollBlog.builder().version(blog.getVersion()).build()); } @RequestMapping(value = "log/my", method = RequestMethod.GET) @ResponseBody public RespBody myLog() { List<Log> logs = logRepository.findByTargetInAndCreateDateAfter(Arrays.asList(Target.Blog, Target.Comment), new DateTime().minusDays(7).toDate()); if (logs.size() == 0) { return RespBody.succeed(getLogs(null)); } Collections.reverse(logs); Log last = logs.get(logs.size() - 1); logs = logs.stream().filter(lg -> { String targetId = lg.getTargetId(); if (Target.Blog.equals(lg.getTarget())) { Blog blog = blogRepository.findOne(Long.valueOf(targetId)); return hasAuthWithDeleted(blog); } else if (Target.Comment.equals(lg.getTarget())) { Comment comment = commentRepository.findOne(Long.valueOf(targetId)); Blog blog = blogRepository.findOne(Long.valueOf(comment.getTargetId())); return hasAuthWithDeleted(blog); } return false; }).limit(100).collect(Collectors.toList()); if (logs.size() == 0) { logs = getLogs(last.getId()); } return RespBody.succeed(logs); } private List<Log> getLogs(Long id) { List<Target> targets = Arrays.asList(Target.Blog, Target.Comment); List<Log> logs = null; if (id != null) { logs = logRepository.findTop50ByStatusNotAndTargetInAndIdLessThanOrderByIdDesc(Status.Deleted, targets, id); } else { logs = logRepository.findTop50ByStatusNotAndTargetInOrderByIdDesc(Status.Deleted, targets); } if (logs.size() == 0) { return new ArrayList<Log>(); } Log last = logs.get(logs.size() - 1); logs = logs.stream().filter(lg -> { String targetId = lg.getTargetId(); if (Target.Blog.equals(lg.getTarget())) { Blog blog = blogRepository.findOne(Long.valueOf(targetId)); return hasAuthWithDeleted(blog); } else if (Target.Comment.equals(lg.getTarget())) { Comment comment = commentRepository.findOne(Long.valueOf(targetId)); Blog blog = blogRepository.findOne(Long.valueOf(comment.getTargetId())); return hasAuthWithDeleted(blog); } return false; }).collect(Collectors.toList()); if (logs.size() == 0) { return getLogs(last.getId()); } return logs; } @RequestMapping(value = "log/my/more", method = RequestMethod.GET) @ResponseBody public RespBody myMoreLog(@RequestParam(value = "lastId", required = false) Long lastId) { return RespBody.succeed(getLogs(lastId)); } @RequestMapping(value = "tag/add", method = RequestMethod.POST) @ResponseBody public RespBody addTag(@RequestParam("id") Long id, @RequestParam("tags") String tags) { Blog blog = blogRepository.findOne(id); Boolean isOpenEdit = blog.getOpenEdit() == null ? false : blog.getOpenEdit(); if (!isSuperOrCreator(blog.getCreator().getUsername()) && !isOpenEdit) { return RespBody.failed("!"); } User loginUser = getLoginUser(); if (StringUtil.isNotEmpty(tags)) { Stream.of(tags.split(",")).forEach(t -> { Tag tag = tagRepository.findOneByNameAndCreator(t, loginUser); if (tag == null) { Tag tag2 = new Tag(); tag2.setName(t); tag2.getBlogs().add(blog); tag = tagRepository.saveAndFlush(tag2); } else { tag.getBlogs().add(blog); tagRepository.saveAndFlush(tag); } blog.getTags().add(tag); }); } return RespBody.succeed(blog); } @RequestMapping(value = "tag/remove", method = RequestMethod.POST) @ResponseBody public RespBody removeTag(@RequestParam("id") Long id, @RequestParam("tags") String tags) { Blog blog = blogRepository.findOne(id); Boolean isOpenEdit = blog.getOpenEdit() == null ? false : blog.getOpenEdit(); if (!isSuperOrCreator(blog.getCreator().getUsername()) && !isOpenEdit) { return RespBody.failed("!"); } User loginUser = getLoginUser(); if (StringUtil.isNotEmpty(tags)) { Stream.of(tags.split(",")).forEach(t -> { Tag tag = tagRepository.findOneByNameAndCreator(t, loginUser); if (tag != null) { tag.getBlogs().remove(blog); tagRepository.saveAndFlush(tag); blog.getTags().remove(tag); } }); } return RespBody.succeed(blog); } @RequestMapping(value = "tag/my", method = RequestMethod.GET) @ResponseBody public RespBody myTag() { List<Tag> tags = tagRepository.findByCreator(getLoginUser()); return RespBody.succeed(tags); } @PostMapping("dir/update") @ResponseBody public RespBody updateDir(@RequestParam("id") Long id, @RequestParam(value = "did", required = false) Long did) { Blog blog = blogRepository.findOne(id); if (blog == null) { return RespBody.failed(""); } if (!hasAuth(blog)) { return RespBody.failed(""); } Dir dir = null; if (did != null) { dir = dirRepository.findOne(did); if (dir == null) { return RespBody.failed(""); } } blog.setDir(dir); blogRepository.saveAndFlush(blog); return RespBody.succeed(blog); } @GetMapping("news/list") @ResponseBody public RespBody listNews(@PageableDefault(sort = { "id" }, direction = Direction.DESC) Pageable pageable) { Page<BlogNews> news = blogNewsRepository.findByToAndUsernameNotAndStatusNot(WebUtil.getUsername(), WebUtil.getUsername(), Status.Deleted, pageable); return RespBody.succeed(news); } @PostMapping("news/delete") @ResponseBody public RespBody deleteNews(@RequestParam("id") Long id) { BlogNews news = blogNewsRepository.findOne(id); if (!isSuperOrCreator(news.getTo())) { return RespBody.failed(""); } news.setStatus(Status.Deleted); blogNewsRepository.saveAndFlush(news); messagingTemplate.convertAndSendToUser(news.getTo(), "/blog/toastr", ToastrPayload.builder().id(String.valueOf(news.getId())).build()); return RespBody.succeed(id); } @PostMapping("share/create") @ResponseBody public RespBody createShare(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!isSuperOrCreator(blog.getCreator())) { return RespBody.failed(""); } String shareId = UUID.randomUUID().toString(); int cnt = blogRepository.updateShareId(shareId, id); if (cnt == 1) { blog.setShareId(shareId); return RespBody.succeed(blog); } return RespBody.failed(); } @PostMapping("share/remove") @ResponseBody public RespBody removeShare(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (!isSuperOrCreator(blog.getCreator())) { return RespBody.failed(""); } int cnt = blogRepository.updateShareId(null, id); if (cnt == 1) { blog.setShareId(null); return RespBody.succeed(blog); } return RespBody.failed(); } @PostMapping("tpl/update") @ResponseBody public RespBody updateTpl(@RequestParam("id") Long id, @RequestParam("tpl") Integer tpl, @RequestParam(value = "desc", required = false) String desc) { Blog blog = blogRepository.findOne(id); if (!isSuperOrCreator(blog.getCreator())) { return RespBody.failed(""); } // 1privated 2opened (0null) int cnt = blogRepository.updateTpl(tpl, desc, id); if (cnt == 1) { blog.setTpl(tpl); return RespBody.succeed(blog); } return RespBody.failed(); } @GetMapping("tpl/list") @ResponseBody public RespBody listTpl() { List<Blog> blogs = blogRepository.queryTpl(WebUtil.getUsername()); return RespBody.succeed(blogs); } @GetMapping("tpl/hotCnt/inc") @ResponseBody public RespBody incTplHotCnt(@RequestParam("id") Long id) { Blog blog = blogRepository.findOne(id); if (blog == null || Status.Deleted.equals(blog.getStatus())) { return RespBody.failed("!"); } Long hotCnt = blog.getTplHotCnt(); if (hotCnt == null) { hotCnt = 1L; } else { hotCnt = hotCnt + 1; } blogRepository.updateTplHotCnt(hotCnt, id); return RespBody.succeed(); } @PostMapping("history/repair") @Secured({ "ROLE_ADMIN" }) @ResponseBody public RespBody repairHistory() { List<BlogHistory> blogHistories = blogHistoryRepository.findAll(); blogHistories.forEach(item -> { if (item.getEditor() == null) { Blog blog = blogRepository.findOne(item.getBlog().getId()); if (blog != null) { item.setEditor(blog.getEditor() != null ? blog.getEditor() : Editor.Markdown); blogHistoryRepository.saveAndFlush(item); } } }); return RespBody.succeed(); } private void wsSend(Comment comment, com.lhjz.portal.model.BlogCommentPayload.Cmd cmd, String loginUsername) { try { ThreadUtil.exec(() -> { messagingTemplate.convertAndSend("/blog/comment/update", BlogCommentPayload.builder().id(comment.getId()).version(comment.getVersion()) .bid(comment.getTargetId()).cmd(cmd).username(loginUsername).build()); }); } catch (Exception e) { logger.error(e.getMessage(), e); } } @PostMapping("comment/label/toggle") @ResponseBody public RespBody toggelCommentLabel(@RequestParam("cid") Long cid, @RequestParam("name") String name) { Comment comment = commentRepository.findOne(cid); if (!hasAuth(Long.valueOf(comment.getTargetId()))) { return RespBody.failed(""); } Optional<Label> tagOpt = comment.getLabels().stream().filter(tag -> { // & name return tag.getCreator().equals(WebUtil.getUsername()) && tag.getName().equals(name); }).findFirst(); if (tagOpt.isPresent()) { labelRepository.delete(tagOpt.get()); comment.getLabels().remove(tagOpt.get()); wsSend(comment, com.lhjz.portal.model.BlogCommentPayload.Cmd.D, WebUtil.getUsername()); } else { Label tag = new Label(); tag.setName(name); tag.setDescription(name); tag.setCreator(WebUtil.getUsername()); tag.setCreateDate(new Date()); tag.setComment(comment); Label tag2 = labelRepository.saveAndFlush(tag); comment.getLabels().add(tag2); wsSend(comment, com.lhjz.portal.model.BlogCommentPayload.Cmd.C, WebUtil.getUsername()); } return RespBody.succeed(comment); } }
package com.lordmau5.ffs.client; import com.lordmau5.ffs.tile.TileEntityTankFrame; import cpw.mods.fml.client.registry.ISimpleBlockRenderingHandler; import cpw.mods.fml.client.registry.RenderingRegistry; import net.minecraft.block.Block; import net.minecraft.client.renderer.RenderBlocks; import net.minecraft.client.renderer.Tessellator; import net.minecraft.init.Blocks; import net.minecraft.tileentity.TileEntity; import net.minecraft.world.IBlockAccess; import net.minecraftforge.client.MinecraftForgeClient; public class TankFrameRenderer implements ISimpleBlockRenderingHandler { public static final int id = RenderingRegistry.getNextAvailableRenderId(); @Override public void renderInventoryBlock(Block block, int metadata, int modelId, RenderBlocks renderer) { } public static int getPassForFrameRender(RenderBlocks rb) { return MinecraftForgeClient.getRenderPass(); } @Override public boolean renderWorldBlock(IBlockAccess world, int x, int y, int z, Block block, int modelId, RenderBlocks renderer) { return renderWorldBlock(world, x, y, z, block, renderer, getPassForFrameRender(renderer)); } public boolean renderWorldBlock(IBlockAccess ba, int x, int y, int z, Block block, RenderBlocks rb, int pass) { // Here to prevent Minecraft from crashing when nothing renders on a render pass // (rarely in pass 0, often in pass 1) // This is a 1.7 bug. Tessellator.instance.addVertexWithUV(x, y, z, 0, 0); Tessellator.instance.addVertexWithUV(x, y, z, 0, 0); Tessellator.instance.addVertexWithUV(x, y, z, 0, 0); Tessellator.instance.addVertexWithUV(x, y, z, 0, 0); TileEntity tile = ba.getTileEntity(x, y, z); if (!(tile instanceof TileEntityTankFrame)) { return false; } boolean invalidRender = true; Block renderBlock = Blocks.stone; TileEntityTankFrame te = (TileEntityTankFrame) tile; if(te.getBlock() != null) { Block exBlock = te.getBlock().getBlock(); if (exBlock != null) { renderBlock = exBlock; invalidRender = false; } } IBlockAccess origBa = rb.blockAccess; boolean isFrameBlockOpaque = renderBlock.isOpaqueCube(); if (((isFrameBlockOpaque || renderBlock.canRenderInPass(0)) && pass == 0) || ((!isFrameBlockOpaque || renderBlock.canRenderInPass(1)) && pass == 1)) { if(invalidRender) { rb.renderStandardBlock(renderBlock, x, y, z); } else { rb.blockAccess = new FrameBlockAccessWrapper(origBa); try { rb.renderBlockByRenderType(renderBlock, x, y, z); } catch (Exception e) { rb.renderStandardBlock(Blocks.stone, x, y, z); } } rb.blockAccess = origBa; } return true; } @Override public boolean shouldRender3DInInventory(int modelId) { return false; } @Override public int getRenderId() { return id; } }
package com.nexmo.quickstart.voice; import com.nexmo.client.NexmoClient; import com.nexmo.client.auth.JWTAuthMethod; import com.nexmo.client.voice.Call; import com.nexmo.client.voice.CallEvent; import java.nio.file.FileSystems; import static com.nexmo.quickstart.Util.configureLogging; import static com.nexmo.quickstart.Util.envVar; public class SendDtmfToCall { public static void main(String[] args) throws Exception { configureLogging(); String APPLICATION_ID = envVar("APPLICATION_ID"); String PRIVATE_KEY = envVar("PRIVATE_KEY"); NexmoClient client = new NexmoClient( new JWTAuthMethod( APPLICATION_ID, FileSystems.getDefault().getPath(PRIVATE_KEY) ) ); String NEXMO_NUMBER = envVar("NEXMO_NUMBER"); String TO_NUMBER = envVar("TO_NUMBER"); CallEvent call = client.getVoiceClient().createCall(new Call( TO_NUMBER, NEXMO_NUMBER, "https://gist.githubusercontent.com/ChrisGuzman/d6add5b23a8cf913dcdc5a8eabc223ef/raw/a1eb52e0ce2d3cef98bab14d27f3adcdff2af881/long_talk.json" )); Thread.sleep(20000); String UUID = call.getUuid(); String DIGITS = "332393"; client.getVoiceClient().sendDtmf(UUID, DIGITS); } }
package com.revature.controllers; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyEmitterReturnValueHandler; import com.revature.domain.Batch; import com.revature.domain.Person; import com.revature.domain.PersonRole; import com.revature.services.BatchLogic; import com.revature.services.PersonLogic; import com.revature.services.PersonRoleLogic; @CrossOrigin(origins = "http://localhost:8080/api/v1/persons", maxAge = 3600) @RestController @RequestMapping(value = "/api/v1/") public class PersonController { @Autowired private PersonLogic personLogic; @RequestMapping(method = RequestMethod.GET, value = "persons/{id}") public ResponseEntity<Person> getPersonById(@PathVariable("id") Integer id ){ Person person = personLogic.getPersonById(id); if (person != null) { return ResponseEntity.ok(person); } else { System.out.println("ERROR!"); return new ResponseEntity<>(HttpStatus.NOT_FOUND); } } @RequestMapping(method = RequestMethod.GET, value = "persons") public ResponseEntity<Page<Person>> getPerson(Pageable pageable, @RequestParam(defaultValue="", required=false) String firstname, @RequestParam(defaultValue="", required=false) String lastname, @RequestParam(defaultValue="0", required=false) Integer role){ if(role==0){ if(!"".equals(firstname)){ return ResponseEntity.ok(personLogic.getPersonByFirstName(pageable, firstname)); } else if (!("".equals(lastname))){ return ResponseEntity.ok(personLogic.getPersonByLastName(pageable, lastname)); } else{ return ResponseEntity.ok(personLogic.getAllPersons(pageable)); } } else { if(role==1){ return ResponseEntity.ok(personLogic.getAllTrainees(pageable)); } else if (role == 2) { return ResponseEntity.ok(personLogic.getAllTrainers(pageable)); } else { return ResponseEntity.ok(null); } } } @RequestMapping(method = RequestMethod.POST, value = "persons") public ResponseEntity<Person> createPerson(@RequestBody Person newPerson){ System.out.println(newPerson); personLogic.createPerson(newPerson); return ResponseEntity.ok(newPerson); } @RequestMapping(method = RequestMethod.PUT, value = "persons") public ResponseEntity<Person> modifyPerson(@RequestBody Person updatedPerson){ Person person = personLogic.getPersonById(updatedPerson.getId()); if(person != null) { personLogic.updatePerson(updatedPerson); return ResponseEntity.ok(updatedPerson); } else { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } } @RequestMapping(method = RequestMethod.DELETE, value = "persons") public ResponseEntity<String> deletePerson(int pId){ Person pers = personLogic.getPersonById(pId); if (pers != null) { personLogic.deletePerson(pers); String message = pers.getFirstName() + " " + pers.getLastName() + " was deleted."; return ResponseEntity.ok(message); } else { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } } }
package com.sandwell.JavaSimulation3D; import static com.sandwell.JavaSimulation.Util.formatNumber; import java.util.ArrayList; import java.util.HashMap; import com.jaamsim.input.InputAgent; import com.jaamsim.math.Color4d; import com.sandwell.JavaSimulation.BooleanInput; import com.sandwell.JavaSimulation.BooleanListInput; import com.sandwell.JavaSimulation.BooleanVector; import com.sandwell.JavaSimulation.ColourInput; import com.sandwell.JavaSimulation.DoubleInput; import com.sandwell.JavaSimulation.DoubleListInput; import com.sandwell.JavaSimulation.DoubleVector; import com.sandwell.JavaSimulation.EntityInput; import com.sandwell.JavaSimulation.EntityListInput; import com.sandwell.JavaSimulation.ErrorException; import com.sandwell.JavaSimulation.FileEntity; import com.sandwell.JavaSimulation.Input; import com.sandwell.JavaSimulation.InputErrorException; import com.sandwell.JavaSimulation.IntegerVector; import com.sandwell.JavaSimulation.Keyword; import com.sandwell.JavaSimulation.ProbabilityDistribution; import com.sandwell.JavaSimulation.Process; import com.sandwell.JavaSimulation.Tester; import com.sandwell.JavaSimulation.Vector; /** * Class ModelEntity - JavaSimulation3D */ public class ModelEntity extends DisplayEntity { // Breakdowns @Keyword(desc = "Reliability is defined as:\n" + " 100% - (plant breakdown time / total operation time)\n " + "or\n " + "(Operational Time)/(Breakdown + Operational Time)", example = "Object1 Reliability { 0.95 }") private final DoubleInput availability; protected double hoursForNextFailure; // The number of working hours required before the next breakdown protected double iATFailure; // inter arrival time between failures protected boolean breakdownPending; // true when a breakdown is to occur protected boolean brokendown; // true => entity is presently broken down protected boolean maintenance; // true => entity is presently in maintenance protected boolean associatedBreakdown; // true => entity is presently in Associated Breakdown protected boolean associatedMaintenance; // true => entity is presently in Associated Maintenance protected double breakdownStartTime; // Start time of the most recent breakdown protected double breakdownEndTime; // End time of the most recent breakdown // Breakdown Probability Distributions @Keyword(desc = "A ProbabilityDistribution object that governs the duration of breakdowns (in hours).", example = "Object1 DowntimeDurationDistribution { BreakdownProbDist1 }") private final EntityInput<ProbabilityDistribution> downtimeDurationDistribution; @Keyword(desc = "A ProbabilityDistribution object that governs when breakdowns occur (in hours).", example = "Object1 DowntimeIATDistribution { BreakdownProbDist1 }") private final EntityInput<ProbabilityDistribution> downtimeIATDistribution; // Maintenance @Keyword(desc = "The simulation time for the start of the first maintenance for each maintenance cycle.", example = "Object1 FirstMaintenanceTime { 24 h }") protected DoubleListInput firstMaintenanceTimes; @Keyword(desc = "The time between maintenance activities for each maintenance cycle", example = "Object1 MaintenanceInterval { 168 h }") protected DoubleListInput maintenanceIntervals; @Keyword(desc = "The durations of a single maintenance event for each maintenance cycle.", example = "Object1 MaintenanceDuration { 336 h }") protected DoubleListInput maintenanceDurations; protected IntegerVector maintenancePendings; // Number of maintenance periods that are due @Keyword(desc = "A Boolean value. Allows scheduled maintenances to be skipped if it overlaps " + "with another planned maintenance event.", example = "Object1 SkipMaintenanceIfOverlap { TRUE }") protected BooleanListInput skipMaintenanceIfOverlap; @Keyword(desc = "A list of objects that share the maintenance schedule with this object. " + "In order for the maintenance to start, all objects on this list must be available." + "This keyword is for Handlers and Signal Blocks only.", example = "Block1 SharedMaintenance { Block2 Block2 }") private final EntityListInput<ModelEntity> sharedMaintenanceList; protected ModelEntity masterMaintenanceEntity; // The entity that has maintenance information protected boolean performMaintenanceAfterShipDelayPending; // maintenance needs to be done after shipDelay // Maintenance based on hours of operations @Keyword(desc = "Working time for the start of the first maintenance for each maintenance cycle", example = "Object1 FirstMaintenanceOperatingHours { 1000 2500 h }") private final DoubleListInput firstMaintenanceOperatingHours; @Keyword(desc = "Working time between one maintenance event and the next for each maintenance cycle", example = "Object1 MaintenanceOperatingHoursIntervals { 2000 5000 h }") private final DoubleListInput maintenanceOperatingHoursIntervals; @Keyword(desc = "Duration of maintenance events based on working hours for each maintenance cycle", example = "Ship1 MaintenanceOperatingHoursDurations { 24 48 h }") private final DoubleListInput maintenanceOperatingHoursDurations; protected IntegerVector maintenanceOperatingHoursPendings; // Number of maintenance periods that are due protected DoubleVector hoursForNextMaintenanceOperatingHours; protected double maintenanceStartTime; // Start time of the most recent maintenance protected double maintenanceEndTime; // End time of the most recent maintenance protected DoubleVector nextMaintenanceTimes; // next start time for each maintenance protected double nextMaintenanceDuration; // duration for next maintenance protected DoubleVector lastScheduledMaintenanceTimes; @Keyword(desc = "If maintenance has been deferred by the DeferMaintenanceLookAhead keyword " + "for longer than this time, the maintenance will start even if " + "there is an object within the lookahead. There must be one entry for each " + "defined maintenance schedule if DeferMaintenanceLookAhead is used. This" + "keyword is only used for signal blocks.", example = "Object1 DeferMaintenanceLimit { 50 50 h }") private final DoubleListInput deferMaintenanceLimit; @Keyword(desc = "If the duration of the downtime is longer than this time, equipment will be released", example = "Object1 DowntimeToReleaseEquipment { 1.0 h }") protected final DoubleInput downtimeToReleaseEquipment; @Keyword(desc = "A list of Boolean values corresponding to the maintenance cycles. If a value is TRUE, " + "then routes/tasks are released before performing the maintenance in the cycle.", example = "Object1 ReleaseEquipment { TRUE FALSE FALSE }") protected final BooleanListInput releaseEquipment; @Keyword(desc = "A list of Boolean values corresponding to the maintenance cycles. If a value is " + "TRUE, then maintenance in the cycle can start even if the equipment is presently " + "working.", example = "Object1 ForceMaintenance { TRUE FALSE FALSE }") protected final BooleanListInput forceMaintenance; // Statistics @Keyword(desc = "If TRUE, then statistics for this object are " + "included in the main output report.", example = "Object1 PrintToReport { TRUE }") private final BooleanInput printToReport; // States private static Vector stateList = new Vector( 11, 1 ); // List of valid states private final HashMap<String, StateRecord> stateMap; protected double workingHours; // Accumulated working time spent in working states private static class StateRecord { String stateName; int index; double initializationHours; double totalHours; double completedCycleHours; double currentCycleHours; double lastStartTimeInState; double secondLastStartTimeInState; public StateRecord(String state, int i) { stateName = state; index = i; } public int getIndex() { return index; } public String getStateName() { return stateName; } public double getTotalHours() { return totalHours; } public double getCompletedCycleHours() { return completedCycleHours; } public double getCurrentCycleHours() { return currentCycleHours; } public double getLastStartTimeInState() { return lastStartTimeInState; } public double getSecondLastStartTimeInState() { return secondLastStartTimeInState; } public void setInitializationHours(double init) { initializationHours = init; } public void setTotalHours(double total) { totalHours = total; } public void setCompletedCycleHours(double hours) { completedCycleHours = hours; } public void setCurrentCycleHours(double hours) { currentCycleHours = hours; } public void setLastStartTimeInState(double lastTime) { lastStartTimeInState = lastTime; } public void setSecondLastStartTimeInState(double secondLastTime) { secondLastStartTimeInState = secondLastTime; } @Override public String toString() { return getStateName(); } public void addHours(double dur) { totalHours += dur; currentCycleHours += dur; } public void clearReportStats() { totalHours = 0.0d; completedCycleHours = 0.0d; } public void clearCurrentCycleHours() { currentCycleHours = 0.0d; } } private double timeOfLastStateChange; private int numberOfCompletedCycles; protected double lastHistogramUpdateTime; // Last time at which a histogram was updated for this entity protected double secondToLastHistogramUpdateTime; // Second to last time at which a histogram was updated for this entity protected DoubleVector lastStartTimePerState; // Last time at which the state changed from some other state to each state protected DoubleVector secondToLastStartTimePerState; // The second to last time at which the state changed from some other state to each state private StateRecord presentState; // The present state of the entity protected FileEntity stateReportFile; // The file to store the state information private String finalLastState = ""; // The final state of the entity (in a sequence of transitional states) private double timeOfLastPrintedState = 0; // The time that the last state printed in the trace state file // Graphics protected final static Color4d breakdownColor = ColourInput.DARK_RED; // Color of the entity in breaking down protected final static Color4d maintenanceColor = ColourInput.RED; // Color of the entity in maintenance static { stateList.addElement( "Idle" ); stateList.addElement( "Working" ); stateList.addElement( "Breakdown" ); stateList.addElement( "Maintenance" ); } { maintenanceDurations = new DoubleListInput("MaintenanceDurations", "Maintenance", new DoubleVector()); maintenanceDurations.setValidRange(0.0d, Double.POSITIVE_INFINITY); maintenanceDurations.setUnits("h"); this.addInput(maintenanceDurations, true, "MaintenanceDuration"); maintenanceIntervals = new DoubleListInput("MaintenanceIntervals", "Maintenance", new DoubleVector()); maintenanceIntervals.setValidRange(0.0d, Double.POSITIVE_INFINITY); maintenanceIntervals.setUnits("h"); this.addInput(maintenanceIntervals, true, "MaintenanceInterval"); firstMaintenanceTimes = new DoubleListInput("FirstMaintenanceTimes", "Maintenance", new DoubleVector()); firstMaintenanceTimes.setValidRange(0.0d, Double.POSITIVE_INFINITY); firstMaintenanceTimes.setUnits("h"); this.addInput(firstMaintenanceTimes, true, "FirstMaintenanceTime"); forceMaintenance = new BooleanListInput("ForceMaintenance", "Maintenance", null); this.addInput(forceMaintenance, true); releaseEquipment = new BooleanListInput("ReleaseEquipment", "Maintenance", null); this.addInput(releaseEquipment, true); availability = new DoubleInput("Reliability", "Breakdowns", 1.0d, 0.0d, 1.0d); this.addInput(availability, true); downtimeIATDistribution = new EntityInput<ProbabilityDistribution>(ProbabilityDistribution.class, "DowntimeIATDistribution", "Breakdowns", null); this.addInput(downtimeIATDistribution, true); downtimeDurationDistribution = new EntityInput<ProbabilityDistribution>(ProbabilityDistribution.class, "DowntimeDurationDistribution", "Breakdowns", null); this.addInput(downtimeDurationDistribution, true); downtimeToReleaseEquipment = new DoubleInput("DowntimeToReleaseEquipment", "Breakdowns", 0.0d, 0.0d, Double.POSITIVE_INFINITY); this.addInput(downtimeToReleaseEquipment, true); skipMaintenanceIfOverlap = new BooleanListInput("SkipMaintenanceIfOverlap", "Maintenance", new BooleanVector()); this.addInput(skipMaintenanceIfOverlap, true); deferMaintenanceLimit = new DoubleListInput("DeferMaintenanceLimit", "Maintenance", null); deferMaintenanceLimit.setValidRange(0.0d, Double.POSITIVE_INFINITY); deferMaintenanceLimit.setUnits("h"); this.addInput(deferMaintenanceLimit, true); sharedMaintenanceList = new EntityListInput<ModelEntity>(ModelEntity.class, "SharedMaintenance", "Maintenance", new ArrayList<ModelEntity>(0)); this.addInput(sharedMaintenanceList, true); firstMaintenanceOperatingHours = new DoubleListInput("FirstMaintenanceOperatingHours", "Maintenance", new DoubleVector()); firstMaintenanceOperatingHours.setValidRange(0.0d, Double.POSITIVE_INFINITY); firstMaintenanceOperatingHours.setUnits("h"); this.addInput(firstMaintenanceOperatingHours, true); maintenanceOperatingHoursDurations = new DoubleListInput("MaintenanceOperatingHoursDurations", "Maintenance", new DoubleVector()); maintenanceOperatingHoursDurations.setValidRange(1e-15, Double.POSITIVE_INFINITY); maintenanceOperatingHoursDurations.setUnits("h"); this.addInput(maintenanceOperatingHoursDurations, true); maintenanceOperatingHoursIntervals = new DoubleListInput("MaintenanceOperatingHoursIntervals", "Maintenance", new DoubleVector()); maintenanceOperatingHoursIntervals.setValidRange(1e-15, Double.POSITIVE_INFINITY); maintenanceOperatingHoursIntervals.setUnits("h"); this.addInput(maintenanceOperatingHoursIntervals, true); printToReport = new BooleanInput("PrintToReport", "Report", true); this.addInput(printToReport, true); } public ModelEntity() { lastHistogramUpdateTime = 0.0; secondToLastHistogramUpdateTime = 0.0; lastStartTimePerState = new DoubleVector(); secondToLastStartTimePerState = new DoubleVector(); hoursForNextFailure = 0.0; iATFailure = 0.0; maintenancePendings = new IntegerVector( 1, 1 ); maintenanceOperatingHoursPendings = new IntegerVector( 1, 1 ); hoursForNextMaintenanceOperatingHours = new DoubleVector( 1, 1 ); performMaintenanceAfterShipDelayPending = false; lastScheduledMaintenanceTimes = new DoubleVector(); breakdownStartTime = 0.0; breakdownEndTime = Double.POSITIVE_INFINITY; breakdownPending = false; brokendown = false; associatedBreakdown = false; maintenanceStartTime = 0.0; maintenanceEndTime = Double.POSITIVE_INFINITY; maintenance = false; associatedMaintenance = false; workingHours = 0.0; stateMap = new HashMap<String, StateRecord>(); initStateMap(); } /** * Clear internal properties */ public void clearInternalProperties() { hoursForNextFailure = 0.0; performMaintenanceAfterShipDelayPending = false; breakdownPending = false; brokendown = false; associatedBreakdown = false; maintenance = false; associatedMaintenance = false; workingHours = 0.0; } @Override public void validate() throws InputErrorException { super.validate(); this.validateMaintenance(); Input.validateIndexedLists(firstMaintenanceOperatingHours.getValue(), maintenanceOperatingHoursIntervals.getValue(), "FirstMaintenanceOperatingHours", "MaintenanceOperatingHoursIntervals"); Input.validateIndexedLists(firstMaintenanceOperatingHours.getValue(), maintenanceOperatingHoursDurations.getValue(), "FirstMaintenanceOperatingHours", "MaintenanceOperatingHoursDurations"); if( getAvailability() < 1.0 ) { if( getDowntimeDurationDistribution() == null ) { throw new InputErrorException("When availability is less than one you must define downtimeDurationDistribution in your input file!"); } } if( downtimeIATDistribution.getValue() != null ) { if( getDowntimeDurationDistribution() == null ) { throw new InputErrorException("When DowntimeIATDistribution is set, DowntimeDurationDistribution must also be set."); } } if( skipMaintenanceIfOverlap.getValue().size() > 0 ) Input.validateIndexedLists(firstMaintenanceTimes.getValue(), skipMaintenanceIfOverlap.getValue(), "FirstMaintenanceTimes", "SkipMaintenanceIfOverlap"); if( releaseEquipment.getValue() != null ) Input.validateIndexedLists(firstMaintenanceTimes.getValue(), releaseEquipment.getValue(), "FirstMaintenanceTimes", "ReleaseEquipment"); if( forceMaintenance.getValue() != null ) { Input.validateIndexedLists(firstMaintenanceTimes.getValue(), forceMaintenance.getValue(), "FirstMaintenanceTimes", "ForceMaintenance"); } if(downtimeDurationDistribution.getValue() != null && downtimeDurationDistribution.getValue().getMinimumValue() < 0) throw new InputErrorException("DowntimeDurationDistribution cannot allow negative values"); if(downtimeIATDistribution.getValue() != null && downtimeIATDistribution.getValue().getMinimumValue() < 0) throw new InputErrorException("DowntimeIATDistribution cannot allow negative values"); } @Override public void earlyInit() { super.earlyInit(); if( downtimeDurationDistribution.getValue() != null ) { downtimeDurationDistribution.getValue().initialize(); } if( downtimeIATDistribution.getValue() != null ) { downtimeIATDistribution.getValue().initialize(); } } /** * Runs after initialization period */ public void collectInitializationStats() { for ( StateRecord each : stateMap.values() ) { each.setInitializationHours( getTotalHoursFor(each) ); each.clearReportStats(); if (each == presentState) each.setCurrentCycleHours( getCurrentCycleHoursFor(each) ); } if ( this.isWorking() ) workingHours += getCurrentTime() - timeOfLastStateChange; timeOfLastStateChange = getCurrentTime(); numberOfCompletedCycles = 0; } /** * Runs when cycle is finished */ public void collectCycleStats() { // finalize cycle for each state record for ( StateRecord each : stateMap.values() ) { double hour = each.getCompletedCycleHours(); hour += getCurrentCycleHoursFor(each); each.setCompletedCycleHours(hour); each.clearCurrentCycleHours(); if (each == presentState) each.setTotalHours( getTotalHoursFor(each) ); } if ( this.isWorking() ) workingHours += getCurrentTime() - timeOfLastStateChange; timeOfLastStateChange = getCurrentTime(); numberOfCompletedCycles++; } /** * Runs after each report interval */ public void clearReportStats() { // clear totalHours for each state record for ( StateRecord each : stateMap.values() ) { each.clearReportStats(); } numberOfCompletedCycles = 0; } public int getNumberOfCompletedCycles() { return numberOfCompletedCycles; } /** * Clear the current cycle hours */ protected void clearCurrentCycleHours() { // clear current cycle hours for each state record for ( StateRecord each : stateMap.values() ) { if (each == presentState) each.setTotalHours( getTotalHoursFor(each) ); each.clearCurrentCycleHours(); } if ( this.isWorking() ) workingHours += getCurrentTime() - timeOfLastStateChange; timeOfLastStateChange = getCurrentTime(); } public void initStateMap() { // Populate the hash map for the states and StateRecord stateMap.clear(); for (int i = 0; i < getStateList().size(); i++) { String state = (String)getStateList().get(i); StateRecord stateRecord = new StateRecord(state, i); stateMap.put(state.toLowerCase() , stateRecord); } timeOfLastStateChange = getCurrentTime(); } private StateRecord getStateRecordFor(String state) { return stateMap.get(state.toLowerCase()); } private StateRecord getStateRecordFor(int index) { String state = (String)getStateList().get(index); return getStateRecordFor(state); } public double getCompletedCycleHoursFor(String state) { return getStateRecordFor(state).getCompletedCycleHours(); } public double getCompletedCycleHoursFor(int index) { return getStateRecordFor(index).getCompletedCycleHours(); } public double getCompletedCycleHours() { double total = 0.0d; for (int i = 0; i < getStateList().size(); i ++) total += getStateRecordFor(i).getCompletedCycleHours(); return total; } public double getTotalHoursFor(int index) { return getTotalHoursFor( (String) getStateList().get(index) ); } public double getTotalHoursFor(String state) { StateRecord rec = getStateRecordFor(state); return getTotalHoursFor(rec); } public double getTotalHoursFor(StateRecord state) { double hours = state.getTotalHours(); if (presentState == state) hours += getCurrentTime() - timeOfLastStateChange; return hours; } public double getTotalHours() { double total = getCurrentTime() - timeOfLastStateChange; for (int i = 0; i < getNumberOfStates(); i++) total += getStateRecordFor(i).getTotalHours(); return total; } // INPUT public void validateMaintenance() { Input.validateIndexedLists(firstMaintenanceTimes.getValue(), maintenanceIntervals.getValue(), "FirstMaintenanceTimes", "MaintenanceIntervals"); Input.validateIndexedLists(firstMaintenanceTimes.getValue(), maintenanceDurations.getValue(), "FirstMaintenanceTimes", "MaintenanceDurations"); for( int i = 0; i < maintenanceIntervals.getValue().size(); i++ ) { if( maintenanceIntervals.getValue().get( i ) < maintenanceDurations.getValue().get( i ) ) { throw new InputErrorException("MaintenanceInterval should be greater than MaintenanceDuration (%f) <= (%f)", maintenanceIntervals.getValue().get(i), maintenanceDurations.getValue().get(i)); } } } // INITIALIZATION METHODS public void clearStatistics() { for( int i = 0; i < getMaintenanceOperatingHoursIntervals().size(); i++ ) { hoursForNextMaintenanceOperatingHours.set( i, hoursForNextMaintenanceOperatingHours.get( i ) - this.getWorkingHours() ); } // Determine the time for the first breakdown event /*if ( downtimeIATDistribution == null ) { if( breakdownSeed != 0 ) { breakdownRandGen.initialiseWith( breakdownSeed ); hoursForNextFailure = breakdownRandGen.getUniformFrom_To( 0.5*iATFailure, 1.5*iATFailure ); } else { hoursForNextFailure = getNextBreakdownIAT(); } } else { hoursForNextFailure = getNextBreakdownIAT(); }*/ } /** * *!*!*!*! OVERLOAD !*!*!*!* * Initialize statistics */ public void initialize() { brokendown = false; maintenance = false; associatedBreakdown = false; associatedMaintenance = false; // Create state trace file if required if (testFlag(FLAG_TRACESTATE)) { String fileName = InputAgent.getReportDirectory() + InputAgent.getRunName() + "-" + this.getName() + ".trc"; stateReportFile = new FileEntity( fileName, FileEntity.FILE_WRITE, false ); } lastStartTimePerState.fillWithEntriesOf( getStateList().size(), 0.0 ); secondToLastStartTimePerState.fillWithEntriesOf( getStateList().size(), 0.0 ); workingHours = 0.0; // Calculate the average downtime duration if distributions are used double average = 0.0; if(getDowntimeDurationDistribution() != null) average = getDowntimeDurationDistribution().getExpectedValue(); // Calculate the average downtime inter-arrival time if( (getAvailability() == 1.0 || average == 0.0) ) { iATFailure = 10.0E10; } else { if( getDowntimeIATDistribution() != null ) { iATFailure = getDowntimeIATDistribution().getExpectedValue(); // Adjust the downtime inter-arrival time to get the specified availability if( ! Tester.equalCheckTolerance( iATFailure, ( (average / (1.0 - getAvailability())) - average ) ) ) { getDowntimeIATDistribution().setValueFactor_For( ( (average / (1.0 - getAvailability())) - average) / iATFailure, this ); iATFailure = getDowntimeIATDistribution().getExpectedValue(); } } else { iATFailure = ( (average / (1.0 - getAvailability())) - average ); } } // Determine the time for the first breakdown event hoursForNextFailure = getNextBreakdownIAT(); int ind = this.indexOfState( "Idle" ); if( ind != -1 ) { this.setPresentState( "Idle" ); } brokendown = false; // Start the maintenance network if( firstMaintenanceTimes.getValue().size() != 0 ) { maintenancePendings.fillWithEntriesOf( firstMaintenanceTimes.getValue().size(), 0 ); lastScheduledMaintenanceTimes.fillWithEntriesOf( firstMaintenanceTimes.getValue().size(), Double.POSITIVE_INFINITY ); this.doMaintenanceNetwork(); } // calculate hours for first operating hours breakdown for ( int i = 0; i < getMaintenanceOperatingHoursIntervals().size(); i++ ) { hoursForNextMaintenanceOperatingHours.add( firstMaintenanceOperatingHours.getValue().get( i ) ); maintenanceOperatingHoursPendings.add( 0 ); } } // ACCESSOR METHODS /** * Return the time at which the most recent maintenance is scheduled to end */ public double getMaintenanceEndTime() { return maintenanceEndTime; } /** * Return the time at which a the most recent breakdown is scheduled to end */ public double getBreakdownEndTime() { return breakdownEndTime; } public double getTimeOfLastStateChange() { return timeOfLastStateChange; } /** * Returns the availability proportion. */ public double getAvailability() { return availability.getValue(); } public DoubleListInput getFirstMaintenanceTimes() { return firstMaintenanceTimes; } public boolean getPrintToReport() { return printToReport.getValue(); } public boolean isBrokendown() { return brokendown; } public boolean isBreakdownPending() { return breakdownPending; } public boolean isInAssociatedBreakdown() { return associatedBreakdown; } public boolean isInMaintenance() { return maintenance; } public boolean isInAssociatedMaintenance() { return associatedMaintenance; } public boolean isInService() { return ( brokendown || maintenance || associatedBreakdown || associatedMaintenance ); } public void setBrokendown( boolean bool ) { brokendown = bool; this.setPresentState(); } public void setMaintenance( boolean bool ) { maintenance = bool; this.setPresentState(); } public void setAssociatedBreakdown( boolean bool ) { associatedBreakdown = bool; } public void setAssociatedMaintenance( boolean bool ) { associatedMaintenance = bool; } public ProbabilityDistribution getDowntimeDurationDistribution() { return downtimeDurationDistribution.getValue(); } public double getDowntimeToReleaseEquipment() { return downtimeToReleaseEquipment.getValue(); } public boolean hasServiceDefined() { return( maintenanceDurations.getValue().size() > 0 || getDowntimeDurationDistribution() != null ); } // HOURS AND STATES /** * Return true if the entity is working */ public boolean isWorking() { return false; } /** * Returns the present status. */ public String getPresentState() { if (presentState == null) return ""; return presentState.getStateName(); } public boolean presentStateEquals(String state) { return getPresentState().equals(state); } public boolean presentStateMatches(String state) { return getPresentState().equalsIgnoreCase(state); } public boolean presentStateStartsWith(String prefix) { return getPresentState().startsWith(prefix); } public boolean presentStateEndsWith(String suffix) { return getPresentState().endsWith(suffix); } protected int getPresentStateIndex() { if (presentState == null) return -1; return presentState.getIndex(); } public void setPresentState() {} /** * Updates the statistics, then sets the present status to be the specified value. */ public void setPresentState( String state ) { if( traceFlag ) this.trace("setState( "+state+" )"); if( traceFlag ) this.traceLine(" Old State = "+getPresentState() ); if( ! presentStateEquals( state ) ) { if (testFlag(FLAG_TRACESTATE)) this.printStateTrace(state); int ind = this.indexOfState( state ); if( ind != -1 ) { if (presentState != null) { double time = getCurrentTime(); if (time != timeOfLastStateChange) { double dur = time - timeOfLastStateChange; presentState.addHours(dur); if ( this.isWorking() ) workingHours += dur; } } timeOfLastStateChange = getCurrentTime(); presentState = getStateRecordFor(state); presentState.setSecondLastStartTimeInState(presentState.getLastStartTimeInState()); presentState.setLastStartTimeInState(getCurrentTime()); if( lastStartTimePerState.size() > 0 ) { if( secondToLastStartTimePerState.size() > 0 ) { secondToLastStartTimePerState.set( ind, lastStartTimePerState.get( ind ) ); } lastStartTimePerState.set( ind, getCurrentTime() ); } } else { throw new ErrorException( this + " Specified state: " + state + " was not found in the StateList: " + this.getStateList() ); } } } /** * Print that state information on the trace state log file */ public void printStateTrace( String state ) { // First state ever if( finalLastState.equals("") ) { finalLastState = state; stateReportFile.putString(String.format("%.5f %s.setState( \"%s\" ) dt = %s\n", 0.0d, this.getName(), getPresentState(), formatNumber(getCurrentTime()))); stateReportFile.flush(); timeOfLastPrintedState = getCurrentTime(); } else { // The final state in a sequence from the previous state change (one step behind) if ( ! Tester.equalCheckTimeStep( timeOfLastPrintedState, getCurrentTime() ) ) { stateReportFile.putString(String.format("%.5f %s.setState( \"%s\" ) dt = %s\n", timeOfLastPrintedState, this.getName(), finalLastState, formatNumber(getCurrentTime() - timeOfLastPrintedState))); // for( int i = 0; i < stateTraceRelatedModelEntities.size(); i++ ) { // ModelEntitiy each = (ModelEntitiy) stateTraceRelatedModelEntities.get( i ); // putString( ) stateReportFile.flush(); timeOfLastPrintedState = getCurrentTime(); } finalLastState = state; } } /** * Returns the amount of time spent in the specified status. */ public double getCurrentCycleHoursFor( String state ) { StateRecord rec = getStateRecordFor(state); return getCurrentCycleHoursFor(rec); } /** * Return spent hours for a given state at the index in stateList */ public double getCurrentCycleHoursFor(int index) { StateRecord rec = getStateRecordFor(index); return getCurrentCycleHoursFor(rec); } public double getCurrentCycleHoursFor(StateRecord state) { double hours = state.getCurrentCycleHours(); if (presentState == state) hours += getCurrentTime() - timeOfLastStateChange; return hours; } /** * Set the last time a histogram was updated for this entity */ public void setLastHistogramUpdateTime( double time ) { secondToLastHistogramUpdateTime = lastHistogramUpdateTime; lastHistogramUpdateTime = time; } /** * Returns the time from the start of the start state to the start of the end state */ public double getTimeFromStartState_ToEndState( String startState, String endState) { // Determine the index of the start state StateRecord startStateRec = this.getStateRecordFor(startState); if (startStateRec == null) { throw new ErrorException("Specified state: %s was not found in the StateList.", startState); } // Determine the index of the end state StateRecord endStateRec = this.getStateRecordFor(endState); if (endStateRec == null) { throw new ErrorException("Specified state: %s was not found in the StateList.", endState); } // Is the start time of the end state greater or equal to the start time of the start state? if (endStateRec.getLastStartTimeInState() >= startStateRec.getLastStartTimeInState()) { // If either time was not in the present cycle, return NaN if (endStateRec.getLastStartTimeInState() <= lastHistogramUpdateTime || startStateRec.getLastStartTimeInState() <= lastHistogramUpdateTime ) { return Double.NaN; } // Return the time from the last start time of the start state to the last start time of the end state return endStateRec.getLastStartTimeInState() - startStateRec.getLastStartTimeInState(); } else { // If either time was not in the present cycle, return NaN if (endStateRec.getLastStartTimeInState() <= lastHistogramUpdateTime || startStateRec.getSecondLastStartTimeInState() <= secondToLastHistogramUpdateTime ) { return Double.NaN; } // Return the time from the second to last start time of the start date to the last start time of the end state return endStateRec.getLastStartTimeInState() - startStateRec.getSecondLastStartTimeInState(); } } /** * Return the commitment */ public double getCommitment() { return 1.0 - this.getFractionOfTimeForState( "Idle" ); } /** * Return the fraction of time for the given status */ public double getFractionOfTimeForState( String aState ) { if( getTotalHours() > 0.0 ) { return ((this.getTotalHoursFor( aState ) / getTotalHours()) ); } else { return 0.0; } } /** * Return the percentage of time for the given status */ public double getPercentageOfTimeForState( String aState ) { if( getTotalHours() > 0.0 ) { return ((this.getTotalHoursFor( aState ) / getTotalHours()) * 100.0); } else { return 0.0; } } /** * Returns the number of hours the entity is in use. * *!*!*!*! OVERLOAD !*!*!*!* */ public double getWorkingHours() { double hours = 0.0d; if ( this.isWorking() ) hours = getCurrentTime() - timeOfLastStateChange; return workingHours + hours; } public Vector getStateList() { return stateList; } public int indexOfState( String state ) { StateRecord stateRecord = stateMap.get( state.toLowerCase() ); if (stateRecord != null) return stateRecord.getIndex(); return -1; } /** * Return total number of states */ public int getNumberOfStates() { return stateMap.size(); } /** * Return the total hours in current cycle for all the states */ public double getCurrentCycleHours() { double total = getCurrentTime() - timeOfLastStateChange; for (int i = 0; i < getNumberOfStates(); i++) { total += getStateRecordFor(i).getCurrentCycleHours(); } return total; } // MAINTENANCE METHODS /** * Perform tasks required before a maintenance period */ public void doPreMaintenance() { //@debug@ cr 'Entity should be overloaded' print } /** * Start working again following a breakdown or maintenance period */ public void restart() { //@debug@ cr 'Entity should be overloaded' print } /** * Disconnect routes, release truck assignments, etc. when performing maintenance or breakdown */ public void releaseEquipment() {} public boolean releaseEquipmentForMaintenanceSchedule( int index ) { if( releaseEquipment.getValue() == null ) return true; return releaseEquipment.getValue().get( index ); } public boolean forceMaintenanceSchedule( int index ) { if( forceMaintenance.getValue() == null ) return false; return forceMaintenance.getValue().get( index ); } /** * Perform all maintenance schedules that are due */ public void doMaintenance() { // scheduled maintenance for( int index = 0; index < maintenancePendings.size(); index++ ) { if( this.getMaintenancePendings().get( index ) > 0 ) { if( traceFlag ) this.trace( "Starting Maintenance Schedule: " + index ); this.doMaintenance(index); } } // Operating hours maintenance for( int index = 0; index < maintenanceOperatingHoursPendings.size(); index++ ) { if( this.getWorkingHours() > hoursForNextMaintenanceOperatingHours.get( index ) ) { hoursForNextMaintenanceOperatingHours.set(index, this.getWorkingHours() + getMaintenanceOperatingHoursIntervals().get( index )); maintenanceOperatingHoursPendings.addAt( 1, index ); this.doMaintenanceOperatingHours(index); } } } /** * Perform all the planned maintenance that is due for the given schedule */ public void doMaintenance( int index ) { double wait; if( masterMaintenanceEntity != null ) { wait = masterMaintenanceEntity.getMaintenanceDurations().getValue().get( index ); } else { wait = this.getMaintenanceDurations().getValue().get( index ); } if( wait > 0.0 && maintenancePendings.get( index ) != 0 ) { if( traceFlag ) this.trace( "ModelEntity.doMaintenance_Wait() -- start of maintenance" ); // Keep track of the start and end of maintenance times maintenanceStartTime = getCurrentTime(); if( masterMaintenanceEntity != null ) { maintenanceEndTime = maintenanceStartTime + ( maintenancePendings.get( index ) * masterMaintenanceEntity.getMaintenanceDurations().getValue().get( index ) ); } else { maintenanceEndTime = maintenanceStartTime + ( maintenancePendings.get( index ) * maintenanceDurations.getValue().get( index ) ); } this.setPresentState( "Maintenance" ); maintenance = true; this.doPreMaintenance(); // Release equipment if necessary if( this.releaseEquipmentForMaintenanceSchedule( index ) ) { this.releaseEquipment(); } while( maintenancePendings.get( index ) != 0 ) { maintenancePendings.subAt( 1, index ); scheduleWait( wait ); // If maintenance pending goes negative, something is wrong if( maintenancePendings.get( index ) < 0 ) { this.error( "ModelEntity.doMaintenance_Wait()", "Maintenace pending should not be negative", "maintenacePending = "+maintenancePendings.get( index ) ); } } if( traceFlag ) this.trace( "ModelEntity.doMaintenance_Wait() -- end of maintenance" ); // The maintenance is over this.setPresentState( "Idle" ); maintenance = false; this.restart(); } } /** * Perform all the planned maintenance that is due */ public void doMaintenanceOperatingHours( int index ) { if(maintenanceOperatingHoursPendings.get( index ) == 0 ) return; if( traceFlag ) this.trace( "ModelEntity.doMaintenance_Wait() -- start of maintenance" ); // Keep track of the start and end of maintenance times maintenanceStartTime = getCurrentTime(); maintenanceEndTime = maintenanceStartTime + (maintenanceOperatingHoursPendings.get( index ) * getMaintenanceOperatingHoursDurationFor(index)); this.setPresentState( "Maintenance" ); maintenance = true; this.doPreMaintenance(); while( maintenanceOperatingHoursPendings.get( index ) != 0 ) { //scheduleWait( maintenanceDurations.get( index ) ); scheduleWait( maintenanceEndTime - maintenanceStartTime ); maintenanceOperatingHoursPendings.subAt( 1, index ); // If maintenance pending goes negative, something is wrong if( maintenanceOperatingHoursPendings.get( index ) < 0 ) { this.error( "ModelEntity.doMaintenance_Wait()", "Maintenace pending should not be negative", "maintenacePending = "+maintenanceOperatingHoursPendings.get( index ) ); } } if( traceFlag ) this.trace( "ModelEntity.doMaintenance_Wait() -- end of maintenance" ); // The maintenance is over maintenance = false; this.setPresentState( "Idle" ); this.restart(); } /** * Check if a maintenance is due. if so, try to perform the maintenance */ public boolean checkMaintenance() { if( traceFlag ) this.trace( "checkMaintenance()" ); if( checkOperatingHoursMaintenance() ) { return true; } // List of all entities going to maintenance ArrayList<ModelEntity> sharedMaintenanceEntities; // This is not a master maintenance entity if( masterMaintenanceEntity != null ) { sharedMaintenanceEntities = masterMaintenanceEntity.getSharedMaintenanceList(); } // This is a master maintenance entity else { sharedMaintenanceEntities = getSharedMaintenanceList(); } // If this entity is in shared maintenance relation with a group of entities if( sharedMaintenanceEntities.size() > 0 || masterMaintenanceEntity != null ) { // Are all entities in the group ready for maintenance if( this.areAllEntitiesAvailable() ) { // For every entity in the shared maintenance list plus the master maintenance entity for( int i=0; i <= sharedMaintenanceEntities.size(); i++ ) { ModelEntity aModel; // Locate master maintenance entity( after all entity in shared maintenance list have been taken care of ) if( i == sharedMaintenanceEntities.size() ) { // This entity is manster maintenance entity if( masterMaintenanceEntity == null ) { aModel = this; } // This entity is on the shared maintenannce list of the master maintenance entity else { aModel = masterMaintenanceEntity; } } // Next entity in the shared maintenance list else { aModel = sharedMaintenanceEntities.get( i ); } // Check for aModel maintenances for( int index = 0; index < maintenancePendings.size(); index++ ) { if( aModel.getMaintenancePendings().get( index ) > 0 ) { if( traceFlag ) this.trace( "Starting Maintenance Schedule: " + index ); aModel.startProcess("doMaintenance", index); } } } return true; } else { return false; } } // This block is maintained indipendently else { // Check for maintenances for( int i = 0; i < maintenancePendings.size(); i++ ) { if( maintenancePendings.get( i ) > 0 ) { if( this.canStartMaintenance( i ) ) { if( traceFlag ) this.trace( "Starting Maintenance Schedule: " + i ); this.startProcess("doMaintenance", i); return true; } } } } return false; } /** * Determine how many hours of maintenance is scheduled between startTime and endTime */ public double getScheduledMaintenanceHoursForPeriod( double startTime, double endTime ) { if( traceFlag ) this.trace("Handler.getScheduledMaintenanceHoursForPeriod( "+startTime+", "+endTime+" )" ); double totalHours = 0.0; double firstTime = 0.0; // Add on hours for all pending maintenance for( int i=0; i < maintenancePendings.size(); i++ ) { totalHours += maintenancePendings.get( i ) * maintenanceDurations.getValue().get( i ); } if( traceFlag ) this.traceLine( "Hours of pending maintenances="+totalHours ); // Add on hours for all maintenance scheduled to occur in the given period from startTime to endTime for( int i=0; i < maintenancePendings.size(); i++ ) { // Find the first time that maintenance is scheduled after startTime firstTime = firstMaintenanceTimes.getValue().get( i ); while( firstTime < startTime ) { firstTime += maintenanceIntervals.getValue().get( i ); } if( traceFlag ) this.traceLine(" first time maintenance "+i+" is scheduled after startTime= "+firstTime ); // Now have the first maintenance start time after startTime // Add all maintenances that lie in the given interval while( firstTime < endTime ) { if( traceFlag ) this.traceLine(" Checking for maintenances for period:"+firstTime+" to "+endTime ); // Add the maintenance totalHours += maintenanceDurations.getValue().get( i ); // Update the search period endTime += maintenanceDurations.getValue().get( i ); // Look for next maintenance in new interval firstTime += maintenanceIntervals.getValue().get( i ); if( traceFlag ) this.traceLine(" Adding Maintenance duration = "+maintenanceDurations.getValue().get( i ) ); } } // Return the total hours of maintenance scheduled from startTime to endTime if( traceFlag ) this.traceLine( "Maintenance hours to add= "+totalHours ); return totalHours; } public boolean checkOperatingHoursMaintenance() { if( traceFlag ) this.trace("checkOperatingHoursMaintenance()"); // Check for maintenances for( int i = 0; i < getMaintenanceOperatingHoursIntervals().size(); i++ ) { // If the entity is not available, maintenance cannot start if( ! this.canStartMaintenance( i ) ) continue; if( this.getWorkingHours() > hoursForNextMaintenanceOperatingHours.get( i ) ) { hoursForNextMaintenanceOperatingHours.set(i, (this.getWorkingHours() + getMaintenanceOperatingHoursIntervals().get( i ))); maintenanceOperatingHoursPendings.addAt( 1, i ); if( traceFlag ) this.trace( "Starting Maintenance Operating Hours Schedule : " + i ); this.startProcess("doMaintenanceOperatingHours", i); return true; } } return false; } /** * Wrapper method for doMaintenance_Wait. */ public void doMaintenanceNetwork() { this.startProcess("doMaintenanceNetwork_Wait"); } /** * Network for planned maintenance. * This method should be called in the initialize method of the specific entity. */ public void doMaintenanceNetwork_Wait() { // Initialize schedules for( int i=0; i < maintenancePendings.size(); i++ ) { maintenancePendings.set( i, 0 ); } nextMaintenanceTimes = new DoubleVector(firstMaintenanceTimes.getValue()); nextMaintenanceDuration = 0; // Find the next maintenance event int index = 0; double earliestTime = Double.POSITIVE_INFINITY; for( int i=0; i < nextMaintenanceTimes.size(); i++ ) { double time = nextMaintenanceTimes.get( i ); if( Tester.lessCheckTolerance( time, earliestTime ) ) { earliestTime = time; index = i; nextMaintenanceDuration = maintenanceDurations.getValue().get( i ); } } // Make sure that maintenance for entities on the shared list are being called after those entities have been initialize (AT TIME ZERO) scheduleLastLIFO(); while( true ) { double dt = earliestTime - getCurrentTime(); // Wait for the maintenance check time if( dt > Process.getEventTolerance() ) { scheduleWait( dt ); } // Increment the number of maintenances due for the entity maintenancePendings.addAt( 1, index ); // If this is a master maintenance entity if (getSharedMaintenanceList().size() > 0) { // If all the entities on the shared list are ready for maintenance if( this.areAllEntitiesAvailable() ) { // Put this entity to maintenance if( traceFlag ) this.trace( "Starting Maintenance Schedule: " + index ); this.startProcess("doMaintenance", index); } } // If this entity is maintained independently else { // Do maintenance if possible if( ! this.isInService() && this.canStartMaintenance( index ) ) { // if( traceFlag ) this.trace( "doMaintenanceNetwork_Wait: Starting Maintenance. PresentState = "+presentState+" IsAvailable? = "+this.isAvailable() ); if( traceFlag ) this.trace( "Starting Maintenance Schedule: " + index ); this.startProcess("doMaintenance", index); } // Keep track of the time the maintenance was attempted else { lastScheduledMaintenanceTimes.set( index, getCurrentTime() ); // If skipMaintenance was defined, cancel the maintenance if( this.shouldSkipMaintenance( index ) ) { // if a different maintenance is due, cancel this maintenance boolean cancelMaintenance = false; for( int i=0; i < maintenancePendings.size(); i++ ) { if( i != index ) { if( maintenancePendings.get( i ) > 0 ) { cancelMaintenance = true; break; } } } if( cancelMaintenance || this.isInMaintenance() ) { maintenancePendings.subAt( 1, index ); } } // Do a check after the limit has expired if( this.getDeferMaintenanceLimit( index ) > 0.0 ) { this.startProcess( "scheduleCheckMaintenance", this.getDeferMaintenanceLimit( index ) ); } } } // Determine the next maintenance time nextMaintenanceTimes.addAt( maintenanceIntervals.getValue().get( index ), index ); // Find the next maintenance event index = 0; earliestTime = Double.POSITIVE_INFINITY; for( int i=0; i < nextMaintenanceTimes.size(); i++ ) { double time = nextMaintenanceTimes.get( i ); if( Tester.lessCheckTolerance( time, earliestTime ) ) { earliestTime = time; index = i; nextMaintenanceDuration = maintenanceDurations.getValue().get( i ); } } } } public double getDeferMaintenanceLimit( int index ) { if( deferMaintenanceLimit.getValue() == null ) return 0.0d; return deferMaintenanceLimit.getValue().get( index ); } public void scheduleCheckMaintenance( double wait ) { scheduleWait( wait ); this.checkMaintenance(); } public boolean shouldSkipMaintenance( int index ) { if( skipMaintenanceIfOverlap.getValue().size() == 0 ) return false; return skipMaintenanceIfOverlap.getValue().get( index ); } /** * Return TRUE if there is a pending maintenance for any schedule */ public boolean isMaintenancePending() { for( int i = 0; i < maintenancePendings.size(); i++ ) { if( maintenancePendings.get( i ) > 0 ) { return true; } } for( int i = 0; i < hoursForNextMaintenanceOperatingHours.size(); i++ ) { if( this.getWorkingHours() > hoursForNextMaintenanceOperatingHours.get( i ) ) { return true; } } return false; } public boolean isForcedMaintenancePending() { if( forceMaintenance.getValue() == null ) return false; for( int i = 0; i < maintenancePendings.size(); i++ ) { if( maintenancePendings.get( i ) > 0 && forceMaintenance.getValue().get(i) ) { return true; } } return false; } public ArrayList<ModelEntity> getSharedMaintenanceList () { return sharedMaintenanceList.getValue(); } public IntegerVector getMaintenancePendings () { return maintenancePendings; } public DoubleListInput getMaintenanceDurations() { return maintenanceDurations; } /** * Return the start of the next scheduled maintenance time if not in maintenance, * or the start of the current scheduled maintenance time if in maintenance */ public double getNextMaintenanceStartTime() { if( nextMaintenanceTimes == null ) return Double.POSITIVE_INFINITY; else return nextMaintenanceTimes.getMin(); } /** * Return the duration of the next maintenance event (assuming only one pending) */ public double getNextMaintenanceDuration() { return nextMaintenanceDuration; } // Shows if an Entity would ever go on service public boolean hasServiceScheduled() { if( firstMaintenanceTimes.getValue().size() != 0 || masterMaintenanceEntity != null ) { return true; } return false; } public void setMasterMaintenanceBlock( ModelEntity aModel ) { masterMaintenanceEntity = aModel; } // BREAKDOWN METHODS /** * No Comments Given. */ public void calculateTimeOfNextFailure() { hoursForNextFailure = (this.getWorkingHours() + this.getNextBreakdownIAT()); } /** * Activity Network for Breakdowns. */ public void doBreakdown() { } /** * Prints the header for the entity's state list. * @return bottomLine contains format for each column of the bottom line of the group report */ public IntegerVector printUtilizationHeaderOn( FileEntity anOut ) { IntegerVector bottomLine = new IntegerVector(); if( getStateList().size() != 0 ) { anOut.putStringTabs( "Name", 1 ); bottomLine.add( ReportAgent.BLANK ); int doLoop = getStateList().size(); for( int x = 0; x < doLoop; x++ ) { String state = (String)getStateList().get( x ); anOut.putStringTabs( state, 1 ); bottomLine.add( ReportAgent.AVERAGE_PCT_ONE_DEC ); } anOut.newLine(); } return bottomLine; } /** * Print the entity's name and percentage of hours spent in each state. * @return columnValues are the values for each column in the group report (0 if the value is a String) */ public DoubleVector printUtilizationOn( FileEntity anOut ) { double total; DoubleVector columnValues = new DoubleVector(); if( getNumberOfStates() != 0 ) { total = getTotalHours(); if( !(total == 0.0) ) { anOut.putStringTabs( getName(), 1 ); columnValues.add( 0.0 ); for( int i = 0; i < getNumberOfStates(); i++ ) { double value = getTotalHoursFor( i ) / total; anOut.putDoublePercentWithDecimals( value, 1 ); anOut.putTabs( 1 ); columnValues.add( value ); } anOut.newLine(); } } return columnValues; } /** * This method must be overridden in any subclass of ModelEntity. */ public boolean isAvailable() { throw new ErrorException( "Must override isAvailable in any subclass of ModelEntity." ); } /** * This method must be overridden in any subclass of ModelEntity. */ public boolean canStartMaintenance( int index ) { return isAvailable(); } /** * This method must be overridden in any subclass of ModelEntity. */ public boolean canStartForcedMaintenance() { return isAvailable(); } /** * This method must be overridden in any subclass of ModelEntity. */ public boolean areAllEntitiesAvailable() { throw new ErrorException( "Must override areAllEntitiesAvailable in any subclass of ModelEntity." ); } /** * Return the time of the next breakdown duration */ public double getBreakdownDuration() { // if( traceFlag ) this.trace( "getBreakdownDuration()" ); // If a distribution was specified, then select a duration randomly from the distribution if ( getDowntimeDurationDistribution() != null ) { return getDowntimeDurationDistribution().nextValue(); } else { return 0.0; } } /** * Return the time of the next breakdown IAT */ public double getNextBreakdownIAT() { if( getDowntimeIATDistribution() != null ) { return getDowntimeIATDistribution().nextValue(); } else { return iATFailure; } } public double getHoursForNextFailure() { return hoursForNextFailure; } public void setHoursForNextFailure( double hours ) { hoursForNextFailure = hours; } /** Returns a vector of strings describing the ModelEntity. Override to add details @return Vector - tab delimited strings describing the DisplayEntity **/ @Override public Vector getInfo() { Vector info = super.getInfo(); if ( presentStateEquals("") ) info.addElement( "Present State\t<no state>" ); else info.addElement( "Present State" + "\t" + getPresentState() ); return info; } protected DoubleVector getMaintenanceOperatingHoursIntervals() { return maintenanceOperatingHoursIntervals.getValue(); } protected double getMaintenanceOperatingHoursDurationFor(int index) { return maintenanceOperatingHoursDurations.getValue().get(index); } protected ProbabilityDistribution getDowntimeIATDistribution() { return downtimeIATDistribution.getValue(); } }
package com.socrata.datasync.ui; import com.socrata.datasync.*; import com.socrata.datasync.DatasetUtils; import com.socrata.datasync.PublishMethod; import com.socrata.datasync.Utils; import com.socrata.datasync.config.controlfile.ControlFile; import com.socrata.datasync.job.IntegrationJob; import com.socrata.datasync.config.userpreferences.UserPreferences; import com.socrata.datasync.config.userpreferences.UserPreferencesJava; import com.socrata.datasync.job.IntegrationJob; import com.socrata.datasync.job.JobStatus; import com.socrata.datasync.model.ControlFileModel; import com.socrata.datasync.model.DatasetModel; import com.socrata.datasync.validation.IntegrationJobValidity; import com.socrata.exceptions.LongRunningQueryException; import com.socrata.exceptions.SodaError; import com.socrata.model.importer.Dataset; import org.apache.http.HttpException; import com.fasterxml.jackson.databind.DeserializationConfig; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationConfig; import com.fasterxml.jackson.databind.SerializationFeature; import javax.swing.*; import javax.swing.filechooser.FileNameExtensionFilter; import java.awt.*; import java.awt.dnd.InvalidDnDOperationException; import java.awt.event.*; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.net.URISyntaxException; public class IntegrationJobTab implements JobTab { private static final int DATASET_ID_TEXTFIELD_WIDTH = 160; private static final int JOB_COMMAND_TEXTFIELD_WIDTH = 212; private static final int JOB_FILE_TEXTFIELD_WIDTH = 263; private static final int JOB_TEXTFIELD_HEIGHT = 26; private static final int JOB_FIELD_VGAP = 5; private static final FlowLayout FLOW_LEFT = new FlowLayout(FlowLayout.LEFT, 0, 0); private static final FlowLayout FLOW_RIGHT = new FlowLayout(FlowLayout.LEFT, 0, JOB_FIELD_VGAP); private static final String DEFAULT_RUN_JOB_COMMAND = "(Generates when job is saved)"; private static final String BROWSE_BUTTON_TEXT = "Browse..."; private static final String EMPTY_TEXTAREA_CONTENT = ""; private static final String JOB_FILE_NAME = "Socrata Integration Job"; private static final String JOB_FILE_EXTENSION = "sij"; private static final int HELP_ICON_TOP_PADDING = 12; private static final String FILE_TO_PUBLISH_TIP_TEXT = "CSV or TSV file containing the data to be published"; private static final String HAS_HEADER_ROW_TIP_TEXT = "<html><body style='width: 300px'>Check this box if the first row in the CSV/TSV " + "contains the column identifiers (API field names) in the dataset.<br>" + "If the CSV/TSV does not have a header row the order of rows must exactly match the order in the dataset.</body></html>"; private static final String DATASET_ID_TIP_TEXT = "<html><body style='width: 300px'>The identifier in the form of xxxx-xxxx (e.g. n38h-y5wp) " + "of the Socrata dataset where the data will be published</body></html>"; private static final String PUBLISH_METHOD_TIP_TEXT = "<html><body style='width: 400px'>Method used to publish data:<br>" + "<strong>replace</strong>: simply replaces the dataset with the data in the CSV/TSV file to publish.<br>" + "<strong>upsert</strong>: update rows that already exist and append any new rows.<br>" + "&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; " + "NOTE: updating rows requires the dataset to have Row Identifier<br>" + "<strong>append</strong>: adds all rows in the CSV/TSV as new rows.<br>" + "<strong>delete</strong>: delete all rows matching Row Identifiers given in CSV/TSV file. " + "CSV/TSV should only contain a single column listing the Row Identifiers to delete.<br>" + "&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; " + "NOTE: requires dataset to have Row Identifier." + "</body></html>"; private static final String PUBLISH_VIA_FTP_ROW_TIP_TEXT = "<html><body style='width: 400px'>'Replace via HTTP' is the preferred " + "and most efficient publishing method. Sends CSV/TSV file over HTTP, automatically detects " + "changes since last update, and only updates new/changed rows.<br>" + "<strong>NOTE</strong>: If you choose FTP, your firewall may need to be configured to allow FTP traffic through ports " + "22222 (for the control connection) and all ports within the range of 3131 to 3141 (for data connection)</body></html>"; private static final String CONTROL_FILE_TIP_TEXT = "<html><body style='width: 300px'>" + "Establishes import configuration such as date formatting and Location column being populated" + " from existing columns (for more information refer to Help -> control file configuration)</body></html>"; private static final String GET_COLUMN_IDS_TIP_TEXT = "<html><body style='width: 400px'>" + "Displays a comma-separated list of the column identifiers (API field names) for the" + " dataset with the given ID (should be used as the header row of the CSV/TSV)" + "</body></html>"; private static final String RUN_COMMAND_TIP_TEXT = "<html><body style='width: 300px'>After saving the job this field will be populated with a command-line command that can be used to run the job." + " This command can be input into tools such as the Windows Task Scheduler or ETL tools to run the job automatically.</body></html>"; public static final String CONTAINS_A_HEADER_ROW_CHECKBOX_TEXT = "File to publish contains a header row"; public static final String PUBLISH_VIA_SODA_RADIO_TEXT = "SODA2"; public static final String PUBLISH_VIA_FTP_RADIO_TEXT = "FTP"; public static final String PUBLISH_VIA_HTTP_RADIO_TEXT = "HTTP"; public static final String COPY_TO_CLIPBOARD_BUTTON_TEXT = "Copy to clipboard"; private JFrame mainFrame; private JPanel jobPanel; private String jobFileLocation; //Rest of the code assumes that this is never null. Adding to avoid null pointer exception when job initialization fails. private JLabel jobTabTitleLabel = new JLabel("Untitled"); private JTextField datasetIDTextField; private String lastDatasetId; // used to decide when to regen the control file private JTextField fileToPublishTextField; private String lastFileToPublish; // used to decide when to regen the control file private JComboBox publishMethodComboBox; private ButtonGroup publishMethodRadioButtonGroup; private JRadioButton soda2Button; private JRadioButton ftpButton; private JRadioButton httpButton; private JPanel publishViaFTPLabelContainer; private JButton browseForControlFileButton; private JPanel controlFileLabelContainer; private JPanel controlFileSelectorContainer; private JButton generateEditControlFileButton; private ControlFileModel controlFileModel; private DatasetModel datasetModel; private JTextField runCommandTextField; private boolean usingControlFile; // build Container with all tab components populated with given job data public IntegrationJobTab(IntegrationJob job, JFrame containingFrame) { mainFrame = containingFrame; // build tab panel form jobPanel = new JPanel(new GridLayout(5,2)); addFileToPublishFieldToJobPanel(); addDatasetIdFieldToJobPanel(); addPublishMethodFieldToJobPanel(); // controlFileContentTextArea = new JTextArea(EMPTY_TEXTAREA_CONTENT); addControlFileFieldToJobPanel(); addRunCommandFieldToJobPanel(); loadJobDataIntoUIFields(job); lastDatasetId = datasetIDTextField.getText(); lastFileToPublish = fileToPublishTextField.getText(); if(job.getPublishMethod() == null) publishMethodComboBox.setSelectedItem(PublishMethod.replace); } private void addControlFileFieldToJobPanel() { controlFileLabelContainer = UIUtility.generateLabelWithHelpBubble( "Step 4 - Tell us how to import your file", CONTROL_FILE_TIP_TEXT, HELP_ICON_TOP_PADDING); jobPanel.add(controlFileLabelContainer); controlFileSelectorContainer = new JPanel(FLOW_RIGHT); generateEditControlFileButton = new JButton("Map fields"); generateEditControlFileButton.addActionListener(new EditControlFileListener()); controlFileSelectorContainer.add(generateEditControlFileButton); jobPanel.add(controlFileSelectorContainer); } private void addRunCommandFieldToJobPanel() { jobPanel.add(UIUtility.generateLabelWithHelpBubble( "Step 5 - Copy command for later (optional)", RUN_COMMAND_TIP_TEXT, HELP_ICON_TOP_PADDING)); JPanel runCommandTextFieldContainer = new JPanel(FLOW_RIGHT); runCommandTextField = new JTextField(DEFAULT_RUN_JOB_COMMAND); runCommandTextField.setPreferredSize(new Dimension( JOB_COMMAND_TEXTFIELD_WIDTH, JOB_TEXTFIELD_HEIGHT)); runCommandTextField.setEditable(false); runCommandTextField.addMouseListener(new JobCommandTextFieldListener()); runCommandTextFieldContainer.add(runCommandTextField); JButton copyJobCommandButton = new JButton(COPY_TO_CLIPBOARD_BUTTON_TEXT); copyJobCommandButton.addActionListener(new CopyJobCommandListener()); runCommandTextFieldContainer.add(copyJobCommandButton); jobPanel.add(runCommandTextFieldContainer); } private void addPublishMethodFieldToJobPanel() { jobPanel.add(UIUtility.generateLabelWithHelpBubble( "Step 3 - Select update method", PUBLISH_METHOD_TIP_TEXT, HELP_ICON_TOP_PADDING)); JPanel publishMethodTextFieldContainer = new JPanel(FLOW_RIGHT); publishMethodComboBox = new JComboBox<PublishMethod>(PublishMethod.values()); publishMethodComboBox.addActionListener(new PublishMethodComboBoxListener()); publishMethodTextFieldContainer.add(publishMethodComboBox); //Create the radio buttons //TODO: For test purposes only. Remove these eventually soda2Button = new JRadioButton(PUBLISH_VIA_SODA_RADIO_TEXT); ftpButton = new JRadioButton(PUBLISH_VIA_FTP_RADIO_TEXT); httpButton = new JRadioButton(PUBLISH_VIA_HTTP_RADIO_TEXT); httpButton.setSelected(true); publishViaFTPLabelContainer = new JPanel(FLOW_LEFT); jobPanel.add(publishMethodTextFieldContainer); } private void addDatasetIdFieldToJobPanel() { jobPanel.add(UIUtility.generateLabelWithHelpBubble( "Step 2 - Enter Dataset ID to update", DATASET_ID_TIP_TEXT, HELP_ICON_TOP_PADDING)); JPanel datasetIDTextFieldContainer = new JPanel(FLOW_RIGHT); datasetIDTextField = new JTextField(); datasetIDTextField.setPreferredSize(new Dimension( DATASET_ID_TEXTFIELD_WIDTH, JOB_TEXTFIELD_HEIGHT)); datasetIDTextFieldContainer.add(datasetIDTextField); jobPanel.add(datasetIDTextFieldContainer); } private void addFileToPublishFieldToJobPanel() { jobPanel.add( UIUtility.generateLabelWithHelpBubble("Step 1 - Select file to publish", FILE_TO_PUBLISH_TIP_TEXT, HELP_ICON_TOP_PADDING)); JPanel fileSelectorContainer = new JPanel(FLOW_RIGHT); fileToPublishTextField = new JTextField(); fileToPublishTextField.setPreferredSize(new Dimension( JOB_FILE_TEXTFIELD_WIDTH, JOB_TEXTFIELD_HEIGHT)); fileSelectorContainer.add(fileToPublishTextField); JFileChooser fileToPublishChooser = new JFileChooser(); JButton openButton = new JButton(BROWSE_BUTTON_TEXT); FileToPublishSelectorListener chooserListener = new FileToPublishSelectorListener( fileToPublishChooser, fileToPublishTextField); openButton.addActionListener(chooserListener); fileSelectorContainer.add(openButton); jobPanel.add(fileSelectorContainer); } private void setReplaceRadioButtons(IntegrationJob job) { if (!job.getPublishViaDi2Http() && !job.getPublishViaFTP()) soda2Button.setSelected(true); else{ ftpButton.setSelected(job.getPublishViaFTP()); httpButton.setSelected(job.getPublishViaDi2Http()); } } private void loadJobDataIntoUIFields(IntegrationJob job) { try { if (job.getControlFileContent() != null) { ObjectMapper mapper = new ObjectMapper().enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY); ControlFile controlFile = mapper.readValue(job.getControlFileContent(), ControlFile.class); //Ideally this could be saved with the control file or factored out. However, we're stuck with this redundant call // because of DI2's strict enforcement of control files and the current factoring of what CSVTableModel knows about controlFile.getFileTypeControl().filePath(job.getFileToPublish()); //TODO: This is not being saved due to the fact that this value is set on the control file and not the job. Pick one or the other. controlFile.getFileTypeControl().hasHeaderRow(job.getFileToPublishHasHeaderRow()); updateControlFileModel(controlFile,job.getDatasetID()); } datasetIDTextField.setText(job.getDatasetID()); fileToPublishTextField.setText(job.getFileToPublish()); PublishMethod jobPublishMethod = job.getPublishMethod(); publishMethodComboBox.setSelectedItem(jobPublishMethod); updatePublishViaReplaceUIFields(job.getPublishViaFTP() || job.getPublishViaDi2Http()); //Set the defaults on the button correctly. setReplaceRadioButtons(job); //TODO: If there is a way to save the pointer to the file, but not the file then we'll need to add a way to load it here if (job.getPathToControlFile() != null) { System.out.println("SKipping"); } jobFileLocation = job.getPathToSavedFile(); // if this is an existing job (meaning the job was opened from a file) // then populate the scheduler command textfield if (!jobFileLocation.equals("")) { runCommandTextField.setText( Utils.getRunJobCommand(jobFileLocation)); } jobTabTitleLabel = new JLabel(job.getJobFilename()); } catch (Exception e){ JOptionPane.showMessageDialog(mainFrame, "Error: " + e.getMessage()); } } private void updateControlFileModel(ControlFile controlFile, String fourbyfour) throws LongRunningQueryException, InterruptedException, HttpException, IOException, URISyntaxException{ UserPreferences userPrefs = new UserPreferencesJava(); datasetModel = new DatasetModel(userPrefs, fourbyfour); controlFileModel = new ControlFileModel(controlFile, datasetModel); } private void updatePublishViaReplaceUIFields(boolean showFileInfo) { publishViaFTPLabelContainer.setVisible(true); if(showFileInfo) { controlFileLabelContainer.setVisible(true); controlFileSelectorContainer.setVisible(true); } else { controlFileLabelContainer.setVisible(false); controlFileSelectorContainer.setVisible(false); } jobPanel.updateUI(); } public JPanel getTabPanel() { return jobPanel; } public JobStatus runJobNow() { if (controlFileModel == null || controlFileModel.validate().isError()) { JobStatus noControlFile = JobStatus.INVALID_PUBLISH_METHOD; noControlFile.setMessage("We aren't quite ready to upload. Click the \"Map Fields\" button to set the mappings for your CSV"); return noControlFile; } IntegrationJob jobToRun = new IntegrationJob(); jobToRun.setDatasetID(datasetIDTextField.getText()); jobToRun.setFileToPublish(fileToPublishTextField.getText()); jobToRun.setPublishMethod( (PublishMethod) publishMethodComboBox.getSelectedItem()); jobToRun.setFileToPublishHasHeaderRow(controlFileModel.getControlFile().getFileTypeControl().hasHeaderRow); jobToRun.setPublishViaFTP(ftpButton.isSelected()); jobToRun.setPublishViaDi2Http(httpButton.isSelected()); if (usingControlFile) { jobToRun.setPathToControlFile(controlFileModel.getPath()); } else { jobToRun.setControlFileContent(controlFileModel.getControlFileContents()); } jobToRun.setUserAgentClient(); return jobToRun.run(); } public void saveJob() { // Save job data IntegrationJob newIntegrationJob = new IntegrationJob(); newIntegrationJob.setDatasetID(datasetIDTextField.getText()); newIntegrationJob.setFileToPublish(fileToPublishTextField.getText()); newIntegrationJob.setPublishMethod( (PublishMethod) publishMethodComboBox.getSelectedItem()); newIntegrationJob.setFileToPublishHasHeaderRow(controlFileModel.getControlFile().getFileTypeControl().hasHeaderRow); newIntegrationJob.setPublishViaFTP(ftpButton.isSelected()); newIntegrationJob.setPublishViaDi2Http(httpButton.isSelected()); newIntegrationJob.setPathToControlFile(controlFileModel.getPath()); newIntegrationJob.setControlFileContent(controlFileModel.getControlFileContents()); newIntegrationJob.setPathToSavedFile(jobFileLocation); // TODO If an existing file was selected WARN user of overwriting // if first time saving this job: Open dialog box to select "Save as..." location // otherwise save to existing file boolean updateJobCommandTextField = false; String selectedJobFileLocation = jobFileLocation; if(selectedJobFileLocation.equals("")) { JFileChooser savedJobFileChooser = new JFileChooser(); FileNameExtensionFilter filter = new FileNameExtensionFilter( JOB_FILE_NAME + " (*." + JOB_FILE_EXTENSION + ")", JOB_FILE_EXTENSION); savedJobFileChooser.setFileFilter(filter); int returnVal = savedJobFileChooser.showSaveDialog(mainFrame); if (returnVal == JFileChooser.APPROVE_OPTION) { File file = savedJobFileChooser.getSelectedFile(); selectedJobFileLocation = file.getAbsolutePath(); if(!selectedJobFileLocation.endsWith("." + JOB_FILE_EXTENSION)) { selectedJobFileLocation += "." + JOB_FILE_EXTENSION; } jobFileLocation = selectedJobFileLocation; newIntegrationJob.setPathToSavedFile(selectedJobFileLocation); jobTabTitleLabel.setText(newIntegrationJob.getJobFilename()); updateJobCommandTextField = true; } } saveJobAsFile(newIntegrationJob, updateJobCommandTextField, selectedJobFileLocation); } private void saveJobAsFile(IntegrationJob newIntegrationJob, boolean updateJobCommandTextField, String selectedJobFileLocation) { try { newIntegrationJob.writeToFile(selectedJobFileLocation); // Update job tab title label jobTabTitleLabel.setText(Utils.getFilename(selectedJobFileLocation)); // Update the textfield with new command if(updateJobCommandTextField) { String runJobCommand = Utils.getRunJobCommand( newIntegrationJob.getPathToSavedFile()); runCommandTextField.setText(runJobCommand); } } catch (IOException e) { JOptionPane.showMessageDialog(mainFrame, "Error saving " + selectedJobFileLocation + ": " + e.getMessage()); } } public JLabel getJobTabTitleLabel() { return jobTabTitleLabel; } public String getJobFileLocation() { return jobFileLocation; } private class FileToPublishSelectorListener implements ActionListener { JFileChooser fileChooser; JTextField filePathTextField; public FileToPublishSelectorListener(JFileChooser chooser, JTextField textField) { fileChooser = chooser; filePathTextField = textField; fileChooser.setFileFilter( UIUtility.getFileChooserFilter(IntegrationJobValidity.allowedFileToPublishExtensions)); } public void actionPerformed(ActionEvent e) { int returnVal = fileChooser.showOpenDialog(mainFrame); if (returnVal == JFileChooser.APPROVE_OPTION) { File file = fileChooser.getSelectedFile(); filePathTextField.setText(file.getAbsolutePath()); } else { // Open command cancelled by user: do nothing } } } private class RegenerateControlFileListener extends FocusAdapter implements ActionListener { @Override public void focusLost(FocusEvent e){ if (!e.isTemporary()) { setRegenerateControlFile(); } } public void actionPerformed(ActionEvent e) { setRegenerateControlFile(); } //Set the right variables so that the control file is regenerated when going to the mapping dialog //TODO: Consider paying attention to these at run job time as well private void setRegenerateControlFile(){ controlFileModel = null; datasetModel = null; } } private class PublishMethodComboBoxListener implements ActionListener { public void actionPerformed(ActionEvent e) { PublishMethod selectedPublishMethod = (PublishMethod) publishMethodComboBox.getSelectedItem(); ftpButton.setVisible(PublishMethod.replace.equals(selectedPublishMethod)); httpButton.setSelected(true); //Should not be null if (controlFileModel != null) controlFileModel.setType(Utils.capitalizeFirstLetter(selectedPublishMethod.name())); updatePublishViaReplaceUIFields(controlFileNeeded()); } } private boolean controlFileNeeded() { return httpButton.isSelected() || ftpButton.isSelected(); } private class JobCommandTextFieldListener implements MouseListener { @Override public void mouseClicked(MouseEvent e) { JTextField jobCommandTextField = (JTextField) e.getSource(); jobCommandTextField.selectAll(); } @Override public void mouseExited(MouseEvent e) { } @Override public void mouseEntered(MouseEvent e) { } @Override public void mousePressed(MouseEvent e) { } @Override public void mouseReleased(MouseEvent e) { } } private class CopyJobCommandListener implements ActionListener { public void actionPerformed(ActionEvent e) { String runJobCommand = runCommandTextField.getText(); UIUtility.copyToClipboard(runJobCommand); } } private boolean isDirty() { return controlFileModel == null || !datasetIDTextField.getText().equals(lastDatasetId) || !fileToPublishTextField.getText().equals(lastFileToPublish); } private class EditControlFileListener implements ActionListener { public void actionPerformed(ActionEvent evnt) { String generateControlFileErrorMessage; if(!datasetIdValid()) { generateControlFileErrorMessage = "Error generating control file: " + "you must enter valid Dataset ID"; JOptionPane.showMessageDialog(mainFrame, generateControlFileErrorMessage); } else { try { if (isDirty()) { ControlFile controlFile = generateControlFile( new UserPreferencesJava(), fileToPublishTextField.getText(), (PublishMethod) publishMethodComboBox.getSelectedItem(), datasetIDTextField.getText(), true); updateControlFileModel(controlFile,datasetIDTextField.getText()); lastDatasetId = datasetIDTextField.getText(); lastFileToPublish = fileToPublishTextField.getText(); } ControlFileEditDialog editorFrame = new ControlFileEditDialog(controlFileModel,mainFrame); } catch (Exception e) { e.printStackTrace(); generateControlFileErrorMessage = "Error generating control file: " + e.getMessage(); JOptionPane.showMessageDialog(mainFrame, generateControlFileErrorMessage); } } } private boolean fileToPublishIsSelected() { String fileToPublish = fileToPublishTextField.getText(); return !fileToPublish.equals(""); } /** * Generates default content of control.json based on given job parameters * * @param ddl Soda 2 ddl object * @param publishMethod to use to publish (upsert, append, replace, or delete) * NOTE: this option will be overriden if userPrefs has pathToFTPControlFile or pathToControlFile set * @param datasetId id of the Socrata dataset to publish to * @param fileToPublish filename of file to publish (.tsv or .csv file) * @param containsHeaderRow if true assume the first row in CSV/TSV file is a list of the dataset columns, * otherwise upload all rows as new rows (column order must exactly match that of * Socrata dataset) * @return content of control.json based on given job parameters * @throws com.socrata.exceptions.SodaError * @throws InterruptedException */ private String generateControlFileContent(UserPreferences prefs, String fileToPublish, PublishMethod publishMethod, String datasetId, boolean containsHeaderRow) throws HttpException, URISyntaxException, InterruptedException, IOException { ControlFile control = generateControlFile(prefs,fileToPublish,publishMethod,datasetId,containsHeaderRow); ObjectMapper mapper = new ObjectMapper().configure(SerializationFeature.INDENT_OUTPUT, true); return mapper.writeValueAsString(control); } private ControlFile generateControlFile(UserPreferences prefs, String fileToPublish, PublishMethod publishMethod, String datasetId, boolean containsHeaderRow) throws HttpException, URISyntaxException, InterruptedException, IOException { Dataset datasetInfo = DatasetUtils.getDatasetInfo(prefs, datasetId); boolean useGeocoding = DatasetUtils.hasLocationColumn(datasetInfo); String[] columns = null; if (!containsHeaderRow) { if (PublishMethod.delete.equals(publishMethod)) columns = new String[]{DatasetUtils.getRowIdentifierName(datasetInfo)}; else columns = DatasetUtils.getFieldNamesArray(datasetInfo); } return ControlFile.generateControlFile(fileToPublish, publishMethod, columns, useGeocoding, containsHeaderRow); } } private boolean datasetIdValid() { String datasetId = datasetIDTextField.getText(); return Utils.uidIsValid(datasetId); } }
package com.vaguehope.onosendai.model; import android.database.Cursor; import android.view.View; import android.widget.ImageView; import android.widget.TextView; import com.vaguehope.onosendai.R; import com.vaguehope.onosendai.images.ImageLoadRequest; import com.vaguehope.onosendai.images.ImageLoader; import com.vaguehope.onosendai.model.TweetRowView.QuotingTweetRowView; import com.vaguehope.onosendai.storage.DbProvider; import com.vaguehope.onosendai.storage.TweetCursorReader; import com.vaguehope.onosendai.widget.PendingImage; public enum TweetLayout { MAIN(0, R.layout.tweetlistrow) { @Override public TweetRowView makeRowView (final View view, final TweetListViewState tweetListViewState) { return new TweetRowView( (ImageView) view.findViewById(R.id.imgMain), (TextView) view.findViewById(R.id.txtTweet), (TextView) view.findViewById(R.id.txtName) ); } @Override public void applyTweetTo (final Tweet item, final TweetRowView rowView, final ImageLoader imageLoader, final int reqWidth, final DbProvider dbProvider) { if (item.isFiltered()) { rowView.getTweet().setText(R.string.tweet_filtered); } else { rowView.getTweet().setText(item.getBody()); } final String usernameWithSubtitle = item.getUsernameWithSubtitle(); rowView.getName().setText(usernameWithSubtitle != null ? usernameWithSubtitle : item.getFullnameWithSubtitle()); final String avatarUrl = item.getAvatarUrl(); if (avatarUrl != null) { imageLoader.loadImage(new ImageLoadRequest(avatarUrl, rowView.getAvatar())); } else { rowView.getAvatar().setImageResource(R.drawable.question_blue); } } @Override public void applyCursorTo (final Cursor c, final TweetCursorReader cursorReader, final TweetRowView rowView, final ImageLoader imageLoader, final int reqWidth, final DbProvider dbProvider) { final String name; final String username = cursorReader.readUsernameWithSubtitle(c); if (username != null) { name = username; } else { name = cursorReader.readFullnameWithSubtitle(c); } rowView.getName().setText(name); if (cursorReader.readFiltered(c)) { rowView.getTweet().setText(R.string.tweet_filtered); } else { rowView.getTweet().setText(cursorReader.readBody(c)); } final String avatarUrl = cursorReader.readAvatar(c); if (avatarUrl != null) { imageLoader.loadImage(new ImageLoadRequest(avatarUrl, rowView.getAvatar())); } else { rowView.getAvatar().setImageResource(R.drawable.question_blue); } } }, INLINE_MEDIA(1, R.layout.tweetlistinlinemediarow) { @Override public TweetRowView makeRowView (final View view, final TweetListViewState tweetListViewState) { final PendingImage pendingImage = (PendingImage) view.findViewById(R.id.imgMedia); pendingImage.setExpandedTracker(tweetListViewState.getExpandedImagesTracker()); return new TweetRowView( (ImageView) view.findViewById(R.id.imgMain), (TextView) view.findViewById(R.id.txtTweet), (TextView) view.findViewById(R.id.txtName), pendingImage ); } @Override public void applyTweetTo (final Tweet item, final TweetRowView rowView, final ImageLoader imageLoader, final int reqWidth, final DbProvider dbProvider) { MAIN.applyTweetTo(item, rowView, imageLoader, reqWidth, dbProvider); setImage(item.getInlineMediaUrl(), rowView, imageLoader, reqWidth); } @Override public void applyCursorTo (final Cursor c, final TweetCursorReader cursorReader, final TweetRowView rowView, final ImageLoader imageLoader, final int reqWidth, final DbProvider dbProvider) { MAIN.applyCursorTo(c, cursorReader, rowView, imageLoader, reqWidth, dbProvider); setImage(cursorReader.readInlineMedia(c), rowView, imageLoader, reqWidth); } }, QUOTED(2, R.layout.tweetlistquoterow) { @Override public TweetRowView makeRowView (final View view, final TweetListViewState tweetListViewState) { final View t = view.findViewById(R.id.tweet); final View qt = view.findViewById(R.id.quotedTweet); final PendingImage pendingImage = (PendingImage) t.findViewById(R.id.imgMedia); pendingImage.setExpandedTracker(tweetListViewState.getExpandedImagesTracker()); final PendingImage qPendingImage = (PendingImage) qt.findViewById(R.id.imgMedia); qPendingImage.setExpandedTracker(tweetListViewState.getExpandedImagesTracker()); return new QuotingTweetRowView( (ImageView) t.findViewById(R.id.imgMain), (TextView) t.findViewById(R.id.txtTweet), (TextView) t.findViewById(R.id.txtName), pendingImage, (ImageView) qt.findViewById(R.id.imgMain), (TextView) qt.findViewById(R.id.txtTweet), (TextView) qt.findViewById(R.id.txtName), qPendingImage ); } @Override public void applyTweetTo (final Tweet item, final TweetRowView rowView, final ImageLoader imageLoader, final int reqWidth, final DbProvider dbProvider) { MAIN.applyTweetTo(item, rowView, imageLoader, reqWidth, dbProvider); final String inlineMediaUrl = item.getInlineMediaUrl(); if (inlineMediaUrl != null) { setImage(inlineMediaUrl, rowView, imageLoader, reqWidth); } else { rowView.showInlineMedia(false); } applyQuotedTweet(item.getQuotedSid(), dbProvider, (QuotingTweetRowView) rowView, imageLoader, reqWidth); } @Override public void applyCursorTo (final Cursor c, final TweetCursorReader cursorReader, final TweetRowView rowView, final ImageLoader imageLoader, final int reqWidth, final DbProvider dbProvider) { MAIN.applyCursorTo(c, cursorReader, rowView, imageLoader, reqWidth, dbProvider); final String inlineMediaUrl = cursorReader.readInlineMedia(c); if (inlineMediaUrl != null) { setImage(inlineMediaUrl, rowView, imageLoader, reqWidth); } else { rowView.showInlineMedia(false); } applyQuotedTweet(cursorReader.readQuotedSid(c), dbProvider, (QuotingTweetRowView) rowView, imageLoader, reqWidth); } private void applyQuotedTweet (final String quotedSid, final DbProvider dbProvider, final QuotingTweetRowView rowView, final ImageLoader imageLoader, final int reqWidth) { // TODO load quoted tweet on BG thread. final Tweet quotedTweet = dbProvider.getDb().getTweetDetails(quotedSid); if (quotedTweet != null) { rowView.getQTweet().setText(quotedTweet.getBody()); final String usernameWithSubtitle = quotedTweet.getUsernameWithSubtitle(); rowView.getQName().setText(usernameWithSubtitle != null ? usernameWithSubtitle : quotedTweet.getFullnameWithSubtitle()); final String avatarUrl = quotedTweet.getAvatarUrl(); if (avatarUrl != null) { imageLoader.loadImage(new ImageLoadRequest(avatarUrl, rowView.getQAvatar())); } else { rowView.getQAvatar().setImageResource(R.drawable.question_blue); } final String quotedInlineMediaUrl = quotedTweet.getInlineMediaUrl(); if (quotedInlineMediaUrl != null) { rowView.showQInlineMedia(true); imageLoader.loadImage(new ImageLoadRequest(quotedInlineMediaUrl, rowView.getQInlineMedia(), reqWidth, rowView.getQInlineMediaLoadListener())); } else { rowView.showQInlineMedia(false); } } else { rowView.getQTweet().setText(String.format("[ %s ]", quotedSid)); rowView.getQName().setText(""); rowView.getQAvatar().setImageResource(R.drawable.question_blue); rowView.showQInlineMedia(false); } } }, SEAMLESS_MEDIA(3, R.layout.tweetlistseamlessmediarow) { @Override public TweetRowView makeRowView (final View view, final TweetListViewState tweetListViewState) { final PendingImage pendingImage = (PendingImage) view.findViewById(R.id.imgMedia); pendingImage.setExpandedTracker(tweetListViewState.getExpandedImagesTracker()); return new TweetRowView(pendingImage); } @Override public void applyTweetTo (final Tweet item, final TweetRowView rowView, final ImageLoader imageLoader, final int reqWidth, final DbProvider dbProvider) { setImage(item.getInlineMediaUrl(), rowView, imageLoader, reqWidth); } @Override public void applyCursorTo (final Cursor c, final TweetCursorReader cursorReader, final TweetRowView rowView, final ImageLoader imageLoader, final int reqWidth, final DbProvider dbProvider) { setImage(cursorReader.readInlineMedia(c), rowView, imageLoader, reqWidth); } }; protected static void setImage (final String inlineMediaUrl, final TweetRowView rowView, final ImageLoader imageLoader, final int reqWidth) { final ImageView imageView = rowView.getInlineMedia(); rowView.showInlineMedia(true); if (inlineMediaUrl != null) { imageLoader.loadImage(new ImageLoadRequest(inlineMediaUrl, imageView, reqWidth, rowView.getInlineMediaLoadListener())); } else { imageView.setImageResource(R.drawable.question_blue); } } private final int index; private final int layout; private TweetLayout (final int index, final int layout) { this.index = index; this.layout = layout; } public int getIndex () { return this.index; } public int getLayout () { return this.layout; } public abstract TweetRowView makeRowView (final View view, final TweetListViewState tweetListViewState); public abstract void applyTweetTo (Tweet item, TweetRowView rowView, ImageLoader imageLoader, int reqWidth, DbProvider dbProvider); public abstract void applyCursorTo (Cursor c, TweetCursorReader cursorReader, TweetRowView rowView, ImageLoader imageLoader, int reqWidth, DbProvider dbProvider); }
package com.cinchapi.concourse.plugin.data; import java.util.AbstractMap; import java.util.AbstractSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import javax.annotation.concurrent.NotThreadSafe; import com.cinchapi.concourse.Link; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.zaxxer.sparsebits.SparseBitSet; /** * An implementation of a {@code Map} that characterizes entries on the fly as * they are added or * removed. This is used to characterize user data as it is being entered, so * that the visualization * engine can query this map to immediately view data characterization in * constant time. * * Apart from {@code #put(Object, Set)}, {@code #remove(Object)}, * {@code #putAll(Map)}, and {@code #clear()}, * all methods are delegated to an internal map. The four aforementioned methods * are overridden in terms of * functionality to characterize the entries in the map before performing the * original intended function. * * {@link TrackingMultimap} is parametrized by type-parameters K and V, but the * underlying internal map is * in the form {@code Map<K, Set<V>>}. This is to comply with the format of * data, which is either a Map from * Strings (keys) to Objects (values), or Objects (values) to Longs (records). * * @author Jeff Nelson */ // TODO talk about what is tracked for keys and what is tracked for values @NotThreadSafe public abstract class TrackingMultimap<K, V> extends AbstractMap<K, Set<V>> { /** * Return the correct {@link DataType} for the {@code clazz}. * * @param clazz the {@link Class} to translate * @return the correct {@link DataType} */ private static DataType getDataTypeForClass(Class<?> clazz) { if(Number.class.isAssignableFrom(clazz) || OTHER_NUMBER_CLASSES.contains(clazz)) { return DataType.NUMBER; } else if(clazz == String.class) { return DataType.STRING; } else if(clazz == Boolean.class || clazz == boolean.class) { return DataType.BOOLEAN; } else if(clazz == Link.class) { return DataType.LINK; } else { return DataType.UNKNOWN; } } /** * A collection of classes that don't extend {@link Number} should be * considered {@link DataType#NUMBER numerical}. */ private static Set<Class<?>> OTHER_NUMBER_CLASSES = Sets .newIdentityHashSet(); static { OTHER_NUMBER_CLASSES.add(int.class); OTHER_NUMBER_CLASSES.add(long.class); OTHER_NUMBER_CLASSES.add(float.class); OTHER_NUMBER_CLASSES.add(double.class); OTHER_NUMBER_CLASSES.add(short.class); OTHER_NUMBER_CLASSES.add(byte.class); } /** * An internal map where the data is actually stored. */ private Map<K, Set<V>> data; /** * A mapping from each of the {@link DataType data types} to the number of * stored keys that are characterized as such. */ private final Map<DataType, AtomicInteger> keyTypes; /** * The total number of values (including duplicates) added across all the * keys. */ private final AtomicLong totalValueCount; /** * The total number of unique values (e.g. excluding duplicates) that are * stored across all the keys. */ private final AtomicLong uniqueValueCount; /** * An approximate cache of values stored across all the keys. * <p> * Whenever a value is added to the map, the bit for its * {@link Object#hashCode() hash code} is flipped to indicate that the value * is stored. However, hash codes are not guaranteed to be unique among * objects, so its necessary to look through all the values and test the * equality for a potential match to determine if an object is actually * contained or not. * </p> */ private final SparseBitSet valueCache; /** * Construct a new instance. * * @param delegate an {@link Map#isEmpty() empty} map */ protected TrackingMultimap(Map<K, Set<V>> delegate) { Preconditions.checkState(delegate.isEmpty()); this.data = delegate; this.keyTypes = Maps.newIdentityHashMap(); this.keyTypes.put(DataType.NUMBER, new AtomicInteger(0)); this.keyTypes.put(DataType.STRING, new AtomicInteger(0)); this.keyTypes.put(DataType.BOOLEAN, new AtomicInteger(0)); this.keyTypes.put(DataType.UNKNOWN, new AtomicInteger(0)); this.totalValueCount = new AtomicLong(0); this.uniqueValueCount = new AtomicLong(0); this.valueCache = new SparseBitSet(); } @Override public Set<Entry<K, Set<V>>> entrySet() { return data.entrySet(); } public Map<DataType, Float> getPercentKeyDataTypes() { Map<DataType, Float> percents = Maps.newIdentityHashMap(); /* * TODO do the work to get the percents */ return percents; } /** * Determines the proportion of occurrence of a particular key. This is * merely the frequency of that key divided by the total number of key frequencies. * * @param element the key for which the proportion is being sought * @return the proportion of the key */ public double proportion(K element) { double total = 0; for (Set<V> value : data.values()) { total += value.size(); } double frequency = data.get(element).size(); return frequency / total; } /** * Calculates the uniqueness of the data by summing the squares of the * proportions of each key within the {@link #keySet() key set}, * determining the square root of the sum, and subtracting it from 1. This * always results in a number between 0 and 1. * <p> * For datasets with a large number of distinct values appearing in * relatively similar frequency, this function returns a relatively high * number, since there are many unique values. Mathematically, each * contributes a small amount to the proportion, so the square root term is * small, returning a large end result. * </p> * <p> * Conversely, for datasets with a few dominating values, this function * returns a fairly low number. This is because the higher proportions from * the dominating values contribute more heavily towards the sum of squares. * The square root is therefore higher, and when subtracted from 1, returns * a lower number. * </p> * * @return the uniqueness of the data, on a scale from 0 to 1. */ public double uniqueness() { double sumOfSquares = 0; for (K key : this.keySet()) { sumOfSquares += Math.pow(proportion(key), 2); } return 1 - Math.sqrt(sumOfSquares); } /** * Returns whether the {@link TrackingMultimap} contains values of the * specified {@link DataType}. * * @param type the {@link DataType} being queried * @return {@code true} if the {@code Map} contains this {@link DataType}, * false otherwise */ public boolean containsDataType(DataType type) { return getPercentKeyDataTypes().get(type) > 0; } /* * Object -> Set<Long> * record -> key -> set<values> * key -> value -> set<records> */ /** * Return {@code true} if this map associates {@code value} with at least * one key. * <p> * This method is different from {@link #containsValue(Object)} because it * checks for values <strong>within</strong> the Sets that are mapped from * keys. Use the aforementioned if you need to check for the existence of an * entire Set as opposed to an individual value. * </p> * * @param value the value to checks * @return {@code true} if the value is contained, {@code false} otherwise */ public boolean hasValue(V value) { int hashCode = Math.abs(value.hashCode()); if(valueCache.get(hashCode)) { for (Set<V> values : data.values()) { if(values.contains(value)) { return true; } } return false; } else { return false; } } /** * Merge all the {@code values} into the set of values that is mapped from * {@code key}. * * @param key the key * @param values the values to merge * @return all the values mapped from {@code key} after the merge */ public Set<V> merge(K key, Set<V> values) { for (V value : values) { put(key, value); } return get(key); } /** * <p> * <strong>NOTE:</strong> This implementation will replace all the existing * values mapped from {@code key} with those specified in the {@code value}. * If you want "merge-like" functionality call the {@link #merge(Set)} * method. * </p> * {@inheritDoc} */ @Override public Set<V> put(K key, Set<V> value) { Set<V> stored = data.get(key); if(stored == null) { stored = new ValueSetWrapper(); data.put(key, stored); } for (V element : stored) { remove(key, element); } for (V element : value) { put(key, element); } return stored; } /** * Add a new association between {@code key} and {@code value} to the map if * it doesn't already exist. * * @param key the key * @param value the value * @return {@code true} if the association didn't previously exist and is * not added */ public boolean put(K key, V value) { Set<V> values = data.get(key); if(values == null) { values = new ValueSetWrapper(); data.put(key, values); } if(values.add(value)) { DataType keyType = getDataTypeForClass(key.getClass()); keyTypes.get(keyType).incrementAndGet(); // TODO: track more stats for keys, value tracking happens // in the ValueSetWrapper... return true; } else { return false; } } /** * Remove the association between {@code key} and {@code value} from the * map. * * @param key the key * @param value the value * @return {@code true} if the association previously existed and is removed */ public boolean remove(K key, V value) { Set<V> values = data.get(key); if(values != null && values.remove(value)) { DataType keyType = getDataTypeForClass(key.getClass()); keyTypes.get(keyType).decrementAndGet(); // TODO: track more stats for keys, value tracking happens // in the ValueSetWrapper if(values.isEmpty()) { data.remove(values); } return true; } else { return false; } } @SuppressWarnings("unchecked") @Override public Set<V> remove(Object key) { Set<V> stored = data.get(key); if(stored != null) { for (V element : stored) { remove((K) key, element); // type cast is valid because the // presence of elements over which to // iterate ensures that #put(K key, V // value) was called, which performs // type checking } } return stored; } @Override public Set<V> get(Object key) { return data.get(key); } /** * Return a new {@link Set} (of the appropriate type) to use for storing the * values that are mapped from a key. * * @return a new {@link Set} */ protected abstract Set<V> createValueSet(); /** * A broad classification of objects that describes the nature of the data. * * @author Jeff Nelson */ public static enum DataType { BOOLEAN, NUMBER, STRING, LINK, UNKNOWN; } /** * An internal wrapper around a Set returned from the * {@link #createValueSet()} method. * <p> * The wrapper is responsible for tracking stats for the individual set and * updating the appropriate variables of the outer class. This ensures that * the caller can interact with individual value sets without breaking * tracking semantics. * </p> * * @author Jeff Nelson */ private class ValueSetWrapper extends AbstractSet<V> { /** * The wrapped set that actually stores the data. */ private final Set<V> values = createValueSet(); @Override public boolean add(V element) { boolean contained = hasValue(element); if(values.add(element)) { totalValueCount.incrementAndGet(); if(!contained) { // The value was not previously contained, so we must update // the number of unique values stored across all the keys. uniqueValueCount.incrementAndGet(); valueCache.set(Math.abs(element.hashCode())); } return true; } else { return false; } } @Override public Iterator<V> iterator() { return new Iterator<V>() { /** * The delegate iterator that controls state. */ private final Iterator<V> delegate = values.iterator(); /** * The last value returned from the {@link #next()} method. */ private V next = null; @Override public boolean hasNext() { return delegate.hasNext(); } @Override public V next() { next = delegate.next(); return next; } @Override public void remove() { ValueSetWrapper.this.remove(next); next = null; } }; } @SuppressWarnings("unchecked") @Override public boolean remove(Object element) { if(values.remove(element)) { totalValueCount.decrementAndGet(); boolean contained = hasValue((V) element); if(!contained) { // Since the value is no longer "contained" we are free to // decrement the number of unique values stored across all // the keys uniqueValueCount.decrementAndGet(); } return true; } else { return false; } } @Override public int size() { return values.size(); } } }
package daxum.temporalconvergence.block; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import daxum.temporalconvergence.TemporalConvergence; import daxum.temporalconvergence.item.ModItems; import net.minecraft.block.Block; import net.minecraft.block.SoundType; import net.minecraft.block.material.Material; import net.minecraft.block.properties.IProperty; import net.minecraft.block.state.BlockFaceShape; import net.minecraft.block.state.BlockStateContainer; import net.minecraft.block.state.IBlockState; import net.minecraft.crash.CrashReport; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.util.EnumFacing; import net.minecraft.util.ReportedException; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.RayTraceResult; import net.minecraft.util.math.Vec3d; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; public class BlockBase extends Block { private final Map<IBlockState, AxisAlignedBB[]> stateBoxMap = new HashMap(8); private boolean hasTileEntity = false; public BlockBase(Material material, String registryName, float hardness, float resistance, Tool tool, MiningLevel level, SoundType sound) { super(material); setUnlocalizedName(registryName); setRegistryName(registryName); setCreativeTab(ModItems.TEMPCONVTAB); setHardness(hardness); setResistance(resistance); setHarvestLevel(tool.name, level.level); setSoundType(sound); } public BlockBase(String registryName, BlockPresets preset) { this(preset.material, registryName, preset.hardness, preset.resistance, preset.tool, preset.level, preset.sound); } public BlockBase(String registryName, float hardness, float resistance, Tool tool, MiningLevel level) { this(Material.ROCK, registryName, hardness, resistance, tool, level, SoundType.STONE); } public BlockBase(String registryName) { this(registryName, 2.0f, 10.0f, Tool.PICKAXE, MiningLevel.WOOD); } protected void setHarvestTool(Tool tool) { setHarvestLevel(tool.name, getHarvestLevel(getDefaultState())); } protected void setMiningLevel(MiningLevel level) { setHarvestLevel(getHarvestTool(getDefaultState()), level.level); } protected void setHasTileEntity() { hasTileEntity = true; } @Override protected BlockStateContainer createBlockState() { return new BlockStateContainer(this, getProperties()); } //This is one of the stupidest things I've ever done. Also probably really inefficient. protected IProperty[] getProperties() { List<IProperty> properties = new ArrayList<>(); for (Field field : getClass().getFields()) { if (Modifier.isStatic(field.getModifiers()) && Modifier.isFinal(field.getModifiers())) { try { Object o = field.get(null); if (o instanceof IProperty) { properties.add((IProperty) o); } } catch (IllegalArgumentException | IllegalAccessException e) { TemporalConvergence.LOGGER.fatal("Reflective BlockState wizardry failed for {}!", getClass()); throw new ReportedException(new CrashReport("Reflective BlockState wizardry failed for " + getClass() + "!", e)); } } } //TODO: remove sorting in 1.13, it's only needed because of state -> meta properties.sort(new Comparator<IProperty>() { @Override public int compare(IProperty arg0, IProperty arg1) { return arg0.getName().compareTo(arg1.getName()); } }); return properties.toArray(new IProperty[0]); } protected void setStateDefaults(Object... objects) { IBlockState defaultState = blockState.getBaseState(); for (int i = 0; i < objects.length; i += 2) { if (objects[i] instanceof IProperty && i + 1 < objects.length) { defaultState = setStateValue(defaultState, (IProperty)objects[i], objects[i + 1]); } else { TemporalConvergence.LOGGER.fatal("Malformed default list for {}", getClass()); throw new IllegalArgumentException("Malformed default list for " + getClass()); } } setDefaultState(defaultState); } private <T extends Comparable<T>> IBlockState setStateValue(IBlockState state, IProperty<T> property, Object value) { try { return state.withProperty(property, (T)value); } catch(ClassCastException e) { TemporalConvergence.LOGGER.fatal("Failed to assign property {} of {} to {}", property, getClass(), value); throw new ReportedException(new CrashReport("Failed to assign property " + property + " of " + getClass() + " to " + value, e)); } } protected boolean isCube() { return true; } @Override public boolean isNormalCube(IBlockState state, IBlockAccess world, BlockPos pos) { return isCube(); } @Override public boolean isFullCube(IBlockState state) { return isCube(); } @Override public boolean isOpaqueCube(IBlockState state) { return isCube(); } @Override public BlockFaceShape getBlockFaceShape(IBlockAccess world, IBlockState state, BlockPos pos, EnumFacing side) { return isNormalCube(state, world, pos) ? BlockFaceShape.SOLID : BlockFaceShape.UNDEFINED; } protected AxisAlignedBB[] getNewBoundingBoxList(World world, BlockPos pos, IBlockState state) { return new AxisAlignedBB[] {state.getBoundingBox(world, pos)}; } public final AxisAlignedBB[] getBoundingBoxList(World world, BlockPos pos, IBlockState state) { AxisAlignedBB[] aabbList = stateBoxMap.get(state); if (aabbList == null) { stateBoxMap.put(state, getNewBoundingBoxList(world, pos, state)); aabbList = stateBoxMap.get(state); } return aabbList; } public AxisAlignedBB[] getSelectedBBList(World world, BlockPos pos, IBlockState state) { AxisAlignedBB[] oldList = getBoundingBoxList(world, pos, state); AxisAlignedBB[] aabbList = new AxisAlignedBB[oldList.length]; for (int i = 0; i < aabbList.length; i++) { aabbList[i] = oldList[i].offset(pos); } return aabbList; } public boolean hasMultipleBoundingBoxes() { return false; } @Override public void addCollisionBoxToList(IBlockState state, World world, BlockPos pos, AxisAlignedBB entityBox, List<AxisAlignedBB> aabbList, Entity entity, boolean actualState) { if (hasMultipleBoundingBoxes()) { for (AxisAlignedBB aabb : getBoundingBoxList(world, pos, state)) { addCollisionBoxToList(pos, entityBox, aabbList, aabb); } } else { addCollisionBoxToList(pos, entityBox, aabbList, state.getCollisionBoundingBox(world, pos)); } } @Override public RayTraceResult collisionRayTrace(IBlockState state, World world, BlockPos pos, Vec3d start, Vec3d end) { if (hasMultipleBoundingBoxes()) { RayTraceResult rtr = null; for (AxisAlignedBB aabb : getBoundingBoxList(world, pos, state)) { if (rtr == null) { rtr = rayTrace(pos, start, end, aabb); } else { break; } } return rtr; } else { return rayTrace(pos, start, end, state.getBoundingBox(world, pos)); } } @Override public boolean hasTileEntity(IBlockState state) { return hasTileEntity; } public enum BlockPresets { STONE(Material.ROCK, 2.0f, 10.0f, Tool.PICKAXE, MiningLevel.WOOD, SoundType.STONE), PLANT(Material.PLANTS, 0.0f, 0.0f, Tool.NONE, MiningLevel.HAND, SoundType.PLANT), UNBREAKABLE(Material.BARRIER, -1.0f, Float.MAX_VALUE, Tool.NONE, MiningLevel.HAND, SoundType.STONE), IRON(Material.IRON, 5.0f, 30.0f, Tool.PICKAXE, MiningLevel.STONE, SoundType.METAL), WOOD(Material.WOOD, 2.0f, 15.0f, Tool.AXE, MiningLevel.HAND, SoundType.WOOD), STONE_MACHINE(Material.ROCK, 5.0f, 30.0f, Tool.PICKAXE, MiningLevel.IRON, SoundType.STONE), WEAK_IRON(Material.IRON, 2.0f, 10.0f, Tool.PICKAXE, MiningLevel.WOOD, SoundType.METAL), GLASS(Material.GLASS, 0.3f, 1.5f, Tool.NONE, MiningLevel.HAND, SoundType.GLASS); private final Material material; private final float hardness; private final float resistance; private final Tool tool; private final MiningLevel level; private final SoundType sound; private BlockPresets(Material m, float h, float r, Tool t, MiningLevel l, SoundType s) { material = m; hardness = h; resistance = r; tool = t; level = l; sound = s; } } public enum Tool { NONE(""), PICKAXE("pickaxe"), AXE("axe"), SHOVEL("shovel"); private final String name; private Tool(String n) { name = n; } } public enum MiningLevel { HAND(-1), WOOD(0), GOLD(0), STONE(1), IRON(2), DIAMOND(3); private final int level; private MiningLevel(int l) { level = l; } } //TODO: remove three methods below in 1.13 @Override public IBlockState getStateFromMeta(int meta) { return blockState.getValidStates().get(meta); } @Override public int getMetaFromState(IBlockState state) { if (state.getPropertyKeys().isEmpty()) { return 0; } else { for (int i = 0; i < blockState.getValidStates().size() && i < 16; i++) { if (blockState.getValidStates().get(i).equals(state)) { return i; } } TemporalConvergence.LOGGER.error("Couldn't convert blockState {} to meta", state); return 0; } } //Bit of a hack to fix improper meta ordering @Override public IBlockState getStateForPlacement(World world, BlockPos pos, EnumFacing facing, float hitX, float hitY, float hitZ, int meta, EntityLivingBase placer) { if (meta == 0) { return getDefaultState(); } return getStateFromMeta(meta); } }
package com.yahoo.vespa.model.container.http.xml; import com.yahoo.config.model.producer.AbstractConfigProducer; import com.yahoo.vespa.model.builder.xml.dom.chains.ChainsBuilder; import com.yahoo.vespa.model.builder.xml.dom.chains.ComponentsBuilder; import com.yahoo.vespa.model.builder.xml.dom.chains.ComponentsBuilder.ComponentType; import com.yahoo.vespa.model.builder.xml.dom.chains.DomChainBuilderBase; import com.yahoo.vespa.model.builder.xml.dom.chains.DomChainsBuilder; import com.yahoo.vespa.model.container.component.chain.Chain; import com.yahoo.vespa.model.container.http.Filter; import com.yahoo.vespa.model.container.http.FilterChains; import org.w3c.dom.Element; import java.util.*; /** * @author tonytv */ public class FilterChainsBuilder extends DomChainsBuilder<Filter, Chain<Filter>, FilterChains> { private static final Collection<ComponentType<Filter>> allowedComponentTypes = Collections.singleton(ComponentType.filter); //TODO: simplify private static final Map<String, Class<? extends DomChainBuilderBase<? extends Filter, ? extends Chain<Filter>>>> chainType2BuilderClass = Collections.unmodifiableMap( new LinkedHashMap<String, Class<? extends DomChainBuilderBase<? extends Filter, ? extends Chain<Filter>>>>() {{ put("request-chain", FilterChainBuilder.class); put("response-chain", FilterChainBuilder.class); }}); public FilterChainsBuilder() { super(null, allowedComponentTypes, null); } @Override protected FilterChains newChainsInstance(AbstractConfigProducer parent) { return new FilterChains(parent); } @Override protected ChainsBuilder<Filter, Chain<Filter>> readChains( AbstractConfigProducer ancestor, List<Element> allChainsElems, Map<String, ComponentsBuilder.ComponentType> outerComponentTypeByComponentName) { return new ChainsBuilder<>(ancestor, allChainsElems, outerComponentTypeByComponentName, chainType2BuilderClass); } }
package de.skuzzle.inject.proxy; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.util.Collections; import java.util.Set; import java.util.UUID; import javax.inject.Singleton; import com.google.inject.Binder; import com.google.inject.Binding; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Provider; import com.google.inject.Scope; import com.google.inject.TypeLiteral; import com.google.inject.binder.LinkedBindingBuilder; import com.google.inject.binder.ScopedBindingBuilder; import com.google.inject.internal.BindingBuilder; import com.google.inject.name.Names; import com.google.inject.spi.Dependency; import com.google.inject.spi.HasDependencies; import com.google.inject.spi.Toolable; /** * Allows to bind classes and interfaces as scoped proxies. * @author Simon Taddiken */ public final class ScopedProxyBinder { private ScopedProxyBinder() { // hidden constructor; } public static ScopedProxyBuilder using(Binder binder) { checkNotNull(binder, "binder"); return new ScopedProxyBuilderImpl(binder); } public interface ScopedProxyBuilder { ScopedProxyBuilder andConstructionStrategy(ConstructionStrategy strategy); <T> LinkedBindingBuilder<T> bind(Class<T> cls); <T> LinkedBindingBuilder<T> bind(Class<T> cls, Class<? extends Annotation> annotationClass); <T> LinkedBindingBuilder<T> bind(Class<T> cls, Annotation annotation); <T> LinkedBindingBuilder<T> bind(Key<T> key); } private static final class ScopedProxyBuilderImpl implements ScopedProxyBuilder { private final Binder binder; private ConstructionStrategy strategy = ConstructionStrategies.OBJENESIS; ScopedProxyBuilderImpl(Binder binder) { this.binder = binder; } @Override public ScopedProxyBuilder andConstructionStrategy( ConstructionStrategy strategy) { checkNotNull(strategy); this.strategy = strategy; return this; } @Override public <T> LinkedBindingBuilder<T> bind(Class<T> cls, Class<? extends Annotation> annotationClass) { return bind(Key.get(cls, annotationClass)); } @Override public <T> LinkedBindingBuilder<T> bind(Class<T> cls, Annotation annotation) { return bind(Key.get(cls, annotation)); } @Override public <T> LinkedBindingBuilder<T> bind(Class<T> cls) { return bind(Key.get(cls)); } @Override public <T> LinkedBindingBuilder<T> bind(Key<T> sourceKey) { checkNotNull(sourceKey); return new FluentInterfaceImpl<T>(this.binder, sourceKey, this.strategy); } } private static final class FluentInterfaceImpl<T> implements LinkedBindingBuilder<T>, ScopedBindingBuilder { private final Binder binder; private final ConstructionStrategy strategy; private final Key<T> source; private final Key<T> rewrittenKey; private BindingBuilder<T> targetBuilder; private FluentInterfaceImpl(Binder binder, Key<T> sourceKey, ConstructionStrategy strategy) { this.binder = binder; this.strategy = strategy; this.source = sourceKey; this.rewrittenKey = bindSource(); bindRewritten(); } private Key<T> bindSource() { // backup the original binding using an internal annotation to // create a unique hidden key. final UUID uuid = UUID.randomUUID(); final Key<T> rewritten = Key.get(this.source.getTypeLiteral(), Names.named(uuid.toString())); // bind the user specified source type to the provider which creates // the scoped proxy objects. this.binder.bind(this.source) .toProvider(new ScopedProxyProvider<>( this.source, rewritten, this.strategy)) .in(Singleton.class); return rewritten; } private BindingBuilder<T> bindRewritten() { if (this.targetBuilder == null) { this.targetBuilder = (BindingBuilder<T>) this.binder.bind(this.rewrittenKey); } return this.targetBuilder; } @Override public ScopedBindingBuilder to(Key<? extends T> key) { checkNotNull(key); bindRewritten().to(key); return this; } @Override public ScopedBindingBuilder to(Class<? extends T> implementation) { return this.to(Key.get(implementation)); } @Override public ScopedBindingBuilder to(TypeLiteral<? extends T> implementation) { return this.to(Key.get(implementation)); } @Override public void toInstance(T instance) { this.bindRewritten().toInstance(instance); } @Override public ScopedBindingBuilder toProvider(Provider<? extends T> provider) { this.bindRewritten().toProvider(provider); return this; } @Override public ScopedBindingBuilder toProvider( javax.inject.Provider<? extends T> provider) { this.bindRewritten().toProvider(provider); return this; } @Override public ScopedBindingBuilder toProvider( Class<? extends javax.inject.Provider<? extends T>> providerType) { this.bindRewritten().toProvider(providerType); return this; } @Override public ScopedBindingBuilder toProvider( TypeLiteral<? extends javax.inject.Provider<? extends T>> providerType) { this.bindRewritten().toProvider(providerType); return this; } @Override public ScopedBindingBuilder toProvider( Key<? extends javax.inject.Provider<? extends T>> providerKey) { this.bindRewritten().toProvider(providerKey); return this; } @Override public <S extends T> ScopedBindingBuilder toConstructor( Constructor<S> constructor) { this.bindRewritten().toConstructor(constructor); return this; } @Override public <S extends T> ScopedBindingBuilder toConstructor( Constructor<S> constructor, TypeLiteral<? extends S> type) { this.bindRewritten().toConstructor(constructor, type); return this; } @Override public void in(Class<? extends Annotation> scopeAnnotation) { checkSingleton(scopeAnnotation); this.bindRewritten().in(scopeAnnotation); } @Override public void in(Scope scope) { this.bindRewritten().in(scope); } @Override public void asEagerSingleton() { checkSingleton(Singleton.class); } private static void checkSingleton(Class<? extends Annotation> scopeAnnotation) { if (Singleton.class.equals(scopeAnnotation) || com.google.inject.Singleton.class.equals(scopeAnnotation)) { throw new UnsupportedOperationException("Scoped proxies can not be " + "bound as singleton. Theres is no reason to do this"); } } } private static class ScopedProxyProvider<T> implements Provider<T>, HasDependencies { final Key<T> rewritten; final ConstructionStrategy strategy; Set<Dependency<?>> dependencies; T ref; ScopedProxyProvider(Key<T> sourceKey, Key<T> rewrittenKey, ConstructionStrategy strategy) { this.rewritten = rewrittenKey; this.strategy = strategy; this.dependencies = Collections.singleton( Dependency.get(Key.get(Injector.class))); } @Inject @Toolable @SuppressWarnings("unchecked") void initialize(Injector injector) { final Binding<T> realBinding = injector.getBinding(this.rewritten); final Provider<T> realProvider = injector.getProvider(realBinding.getKey()); // The proxy will be a sub type of the source type of the binding final Class<T> proxyType = (Class<T>) realBinding.getKey() .getTypeLiteral().getRawType(); this.dependencies = Collections.singleton( Dependency.get(this.rewritten)); this.ref = InstanceBuilder.forType(proxyType) .withConstructionStrategy(this.strategy) .dispatchTo(realProvider) .create(injector); } @Override public T get() { checkState(this.ref != null, "Scoped proxy provider not initialized"); return this.ref; } @Override public Set<Dependency<?>> getDependencies() { return this.dependencies; } } }
package ee.tuleva.onboarding.epis; import static ee.tuleva.onboarding.epis.cashflows.CashFlow.Type.CASH; import static ee.tuleva.onboarding.epis.cashflows.CashFlow.Type.CONTRIBUTION_CASH; import static ee.tuleva.onboarding.epis.fund.FundDto.FundStatus.ACTIVE; import static java.time.LocalDate.parse; import ee.tuleva.onboarding.auth.principal.Person; import ee.tuleva.onboarding.currency.Currency; import ee.tuleva.onboarding.epis.account.FundBalanceDto; import ee.tuleva.onboarding.epis.application.ApplicationResponse; import ee.tuleva.onboarding.epis.cashflows.CashFlow; import ee.tuleva.onboarding.epis.cashflows.CashFlowStatement; import ee.tuleva.onboarding.epis.contact.ContactDetails; import ee.tuleva.onboarding.epis.fund.FundDto; import ee.tuleva.onboarding.epis.fund.NavDto; import ee.tuleva.onboarding.epis.mandate.ApplicationDTO; import ee.tuleva.onboarding.epis.mandate.ApplicationResponseDTO; import ee.tuleva.onboarding.epis.mandate.ApplicationStatus; import ee.tuleva.onboarding.epis.mandate.MandateDto; import ee.tuleva.onboarding.mandate.application.ApplicationType; import java.math.BigDecimal; import java.time.Instant; import java.time.LocalDate; import java.util.List; import java.util.Map; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.springframework.beans.factory.annotation.Value; import org.springframework.cache.annotation.CacheEvict; import org.springframework.cache.annotation.Cacheable; import org.springframework.cache.annotation.Caching; import org.springframework.context.annotation.Profile; import org.springframework.security.oauth2.client.OAuth2RestOperations; import org.springframework.stereotype.Service; import org.springframework.web.client.RestOperations; @Service @Slf4j @Profile("mock") public class MockEpisService extends EpisService { private final String APPLICATIONS_CACHE_NAME = "applications"; private final String TRANSFER_APPLICATIONS_CACHE_NAME = "transferApplications"; private final String CONTACT_DETAILS_CACHE_NAME = "contactDetails"; private final String ACCOUNT_STATEMENT_CACHE_NAME = "accountStatement"; private final String CASH_FLOW_STATEMENT_CACHE_NAME = "cashFlowStatement"; private final String FUNDS_CACHE_NAME = "funds"; @Value("${epis.service.url}") String episServiceUrl; public MockEpisService( RestOperations userTokenRestTemplate, OAuth2RestOperations clientCredentialsRestTemplate) { super(userTokenRestTemplate, clientCredentialsRestTemplate); } @Cacheable(value = APPLICATIONS_CACHE_NAME, key = "#person.personalCode", sync = true) public List<ApplicationDTO> getApplications(Person person) { return List.of( ApplicationDTO.builder() .date(Instant.parse("2001-01-02T01:23:45Z")) .type(ApplicationType.SELECTION) .status(ApplicationStatus.COMPLETE) .id(123L) .currency("EUR") .sourceFundIsin("source") .build()); } @Cacheable( value = CASH_FLOW_STATEMENT_CACHE_NAME, key = "{ #person.personalCode, #fromDate, #toDate }", sync = true) public CashFlowStatement getCashFlowStatement( Person person, LocalDate fromDate, LocalDate toDate) { val time = Instant.parse("2022-01-02T01:23:45Z"); val currency = Currency.EUR.name(); val amount = new BigDecimal("2000"); return CashFlowStatement.builder() .startBalance( Map.of( "1", CashFlow.builder() .time(time) .priceTime(time) .amount(new BigDecimal("1000.0")) .currency(currency) .isin(null) .build())) .endBalance( Map.of( "1", CashFlow.builder() .time(time) .priceTime(time) .amount(new BigDecimal("1100.0")) .currency(currency) .isin(null) .build())) .transactions( List.of( CashFlow.builder() .time(time) .priceTime(time) .amount(new BigDecimal("2000.0")) .currency(currency) .isin(null) .type(CASH) .build(), CashFlow.builder() .time(time.plusSeconds(1)) .priceTime(time.plusSeconds(1)) .amount(amount.negate()) .currency(currency) .isin(null) .type(CASH) .build(), CashFlow.builder() .time(time.plusSeconds(1)) .priceTime(time.plusSeconds(1)) .amount(BigDecimal.valueOf(10.01)) .currency(currency) .isin("EE3600001707") .type(CONTRIBUTION_CASH) .build())) .build(); } @Caching( evict = { @CacheEvict(value = APPLICATIONS_CACHE_NAME, key = "#person.personalCode"), @CacheEvict(value = TRANSFER_APPLICATIONS_CACHE_NAME, key = "#person.personalCode"), @CacheEvict(value = CONTACT_DETAILS_CACHE_NAME, key = "#person.personalCode"), @CacheEvict(value = ACCOUNT_STATEMENT_CACHE_NAME, key = "#person.personalCode"), }) public void clearCache(Person person) { log.info("Clearing cache for {}", person.getPersonalCode()); } @Cacheable(value = CONTACT_DETAILS_CACHE_NAME, key = "#person.personalCode") public ContactDetails getContactDetails(Person person) { return mockContactDetails(); } @Cacheable(value = CONTACT_DETAILS_CACHE_NAME, key = "#person.personalCode") public ContactDetails getContactDetails(Person person, String token) { return mockContactDetails(); } private ContactDetails mockContactDetails() { return ContactDetails.builder() .firstName("Erko") .lastName("Risthein") .personalCode("38501010002") .addressRow1("Tuleva, Telliskivi 60") .country("EE") .postalIndex("10412") .districtCode("0784") .contactPreference(ContactDetails.ContactPreferenceType.valueOf("E")) .languagePreference(ContactDetails.LanguagePreferenceType.valueOf("EST")) .noticeNeeded("Y") .email("tuleva@tuleva.ee") .phoneNumber("+372546545") .pensionAccountNumber("993432432") .thirdPillarDistribution(List.of(new ContactDetails.Distribution("EE123", BigDecimal.ONE))) .isSecondPillarActive(true) .isThirdPillarActive(true) .build(); } @Cacheable(value = ACCOUNT_STATEMENT_CACHE_NAME, key = "#person.personalCode") public List<FundBalanceDto> getAccountStatement(Person person) { return List.of( FundBalanceDto.builder() .isin("EE3600109435") .value(BigDecimal.valueOf(123.0)) .unavailableValue(BigDecimal.valueOf(234.0)) .units(BigDecimal.valueOf(345.0)) .nav(BigDecimal.valueOf(0.12345)) .currency("EUR") .activeContributions(true) .build(), FundBalanceDto.builder() .isin("EE3600001707") .value(BigDecimal.valueOf(123.0)) .unavailableValue(BigDecimal.valueOf(234.0)) .units(BigDecimal.valueOf(345.0)) .nav(BigDecimal.valueOf(0.12345)) .currency("EUR") .activeContributions(true) .build()); } @Cacheable(value = FUNDS_CACHE_NAME, unless = "#result.isEmpty()") public List<FundDto> getFunds() { return List.of( new FundDto("EE3600109435", "Tuleva Maailma Aktsiate Pensionifond", "TUK75", 2, ACTIVE)); } public NavDto getNav(String isin, LocalDate date) { return new NavDto(isin, parse("2019-08-19"), new BigDecimal("19.0")); } public ApplicationResponseDTO sendMandate(MandateDto mandate) { return new ApplicationResponseDTO(); } public ApplicationResponse sendCancellation(ApplicationResponse cancellation) { return new ApplicationResponse(); } @CacheEvict(value = CONTACT_DETAILS_CACHE_NAME, key = "#person.personalCode") public ContactDetails updateContactDetails(Person person, ContactDetails contactDetails) { return mockContactDetails(); } }
package fi.csc.microarray.analyser; import java.io.ByteArrayInputStream; import java.io.File; import java.net.InetAddress; import java.net.URL; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.LinkedHashMap; import java.util.Timer; import java.util.TimerTask; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import javax.jms.JMSException; import org.apache.log4j.Logger; import fi.csc.microarray.analyser.AnalysisDescription.ParameterDescription; import fi.csc.microarray.config.Configuration; import fi.csc.microarray.config.DirectoryLayout; import fi.csc.microarray.constants.ApplicationConstants; import fi.csc.microarray.filebroker.FileBrokerClient; import fi.csc.microarray.messaging.JobState; import fi.csc.microarray.messaging.MessagingEndpoint; import fi.csc.microarray.messaging.MessagingListener; import fi.csc.microarray.messaging.MessagingTopic; import fi.csc.microarray.messaging.MonitoredNodeBase; import fi.csc.microarray.messaging.Topics; import fi.csc.microarray.messaging.MessagingTopic.AccessMode; import fi.csc.microarray.messaging.message.CommandMessage; import fi.csc.microarray.messaging.message.JobLogMessage; import fi.csc.microarray.messaging.message.JobMessage; import fi.csc.microarray.messaging.message.NamiMessage; import fi.csc.microarray.messaging.message.ParameterMessage; import fi.csc.microarray.messaging.message.ResultMessage; import fi.csc.microarray.messaging.message.JobMessage.ParameterSecurityPolicy; import fi.csc.microarray.service.KeepAliveShutdownHandler; import fi.csc.microarray.service.ShutdownCallback; import fi.csc.microarray.util.MemUtil; /** * Executes analysis jobs and handles input&output. Uses multithreading * and thread pool. * * @author Taavi Hupponen, Aleksi Kallio */ public class AnalyserServer extends MonitoredNodeBase implements MessagingListener, ResultCallback, ShutdownCallback { private static class InternalParameterSecurityPolicy implements ParameterSecurityPolicy { private static final int MAX_VALUE_LENGTH = 1000; public boolean isValueValid(String value, ParameterDescription parameterDescription) { // Check parameter size (DOS protection) if (value.length() > MAX_VALUE_LENGTH) { return false; } else { return true; } } } private static InternalParameterSecurityPolicy INTERNAL_PARAMETER_SECURITY_POLICY = new InternalParameterSecurityPolicy(); private static AnalysisDescription SOURCECODE_FETCH_DESCRIPTION = new AnalysisDescription(null); public static final String DESCRIPTION_OUTPUT_NAME = "description"; public static final String SOURCECODE_OUTPUT_NAME = "sourcecode"; /** * Loggers. */ private static Logger logger; private static Logger loggerJobs; private static Logger loggerStatus; /** * Directory for storing input and output files. */ private int receiveTimeout; private int scheduleTimeout; private int timeoutCheckInterval; private boolean sweepWorkDir; private int maxJobs; /** * Id of the analyser server instance. */ private String id = UUID.randomUUID().toString(); private File workDir; /** * All the analysis tools. */ private ToolRepository toolRepository; /** * Our route to messaging fabric. */ private MessagingEndpoint endpoint; private MessagingTopic managerTopic; private FileBrokerClient fileBroker; /** * Java utility for multithreading. */ private ExecutorService executorService; // synchronize with this object when accessing the job maps below private Object jobsLock = new Object(); private LinkedHashMap<String, AnalysisJob> receivedJobs = new LinkedHashMap<String, AnalysisJob>(); private LinkedHashMap<String, AnalysisJob> scheduledJobs = new LinkedHashMap<String, AnalysisJob>(); private LinkedHashMap<String, AnalysisJob> runningJobs = new LinkedHashMap<String, AnalysisJob>(); Timer timeoutTimer; /** * * @throws Exception */ public AnalyserServer() throws Exception { // Initialise dir, config and logging DirectoryLayout.initialiseServerLayout(Arrays.asList(new String[] {"comp"})); Configuration configuration = DirectoryLayout.getInstance().getConfiguration(); // Initialise static variables, so late because they need logging SOURCECODE_FETCH_DESCRIPTION.setName("Fetch sourcecode (system internal operation)"); SOURCECODE_FETCH_DESCRIPTION.addParameter(new AnalysisDescription.ParameterDescription("tool id", "ID (technical name) of the tool", false)); // Initialise instance variables this.receiveTimeout = configuration.getInt("comp", "receive-timeout"); this.scheduleTimeout = configuration.getInt("comp", "schedule-timeout"); this.timeoutCheckInterval = configuration.getInt("comp", "timeout-check-interval"); this.sweepWorkDir= configuration.getBoolean("comp", "sweep-work-dir"); this.maxJobs = configuration.getInt("comp", "max-jobs"); logger = Logger.getLogger(AnalyserServer.class); loggerJobs = Logger.getLogger("jobs"); loggerStatus = Logger.getLogger("status"); // initialize working directory logger.info("starting compute service..."); this.workDir = DirectoryLayout.getInstance().getJobsDataDirBase(id); // initialise custom scripts dir DirectoryLayout.getInstance().getCustomScriptsDir(); // initialize executor service this.executorService = Executors.newCachedThreadPool(); // initialize analysis tools this.toolRepository = new ToolRepository(this.workDir); // initialize timeout checker timeoutTimer = new Timer(true); timeoutTimer.schedule(new TimeoutTimerTask(), timeoutCheckInterval, timeoutCheckInterval); // initialize communications this.endpoint = new MessagingEndpoint(this); MessagingTopic analyseTopic = endpoint.createTopic(Topics.Name.AUTHORISED_REQUEST_TOPIC, AccessMode.READ); analyseTopic.setListener(this); managerTopic = endpoint.createTopic(Topics.Name.MANAGER_TOPIC, AccessMode.WRITE); fileBroker = new FileBrokerClient(this.endpoint.createTopic(Topics.Name.AUTHORISED_URL_TOPIC, AccessMode.WRITE)); // create keep-alive thread and register shutdown hook KeepAliveShutdownHandler.init(this); logger.info("analyser is up and running [" + ApplicationConstants.NAMI_VERSION + "]"); logger.info("[mem: " + MemUtil.getMemInfo() + "]"); } public String getName() { return "analyser"; } /** * Process incoming message. JobMessage for submitting a job, CommandMessage for canceling one. * * Also operation descriptions and source codes are requested with a JobMessage. * */ public void onNamiMessage(NamiMessage namiMessage) { // create job, request operation descriptions or source code for operation if (namiMessage instanceof JobMessage) { JobMessage jobMessage = (JobMessage)namiMessage; // return the operations descriptions if ("describe".equals(jobMessage.getAnalysisId())) { logger.info("sending all descriptions"); sendReplyMessage(jobMessage, createDescriptionsMessage(jobMessage)); return; } // return source code for an operation else if ("describe-operation".equals(jobMessage.getAnalysisId())) { sendReplyMessage(jobMessage, createSourceCodeMessage(jobMessage)); return; } // job message else { receiveJob(jobMessage); } } // command messages else if (namiMessage instanceof CommandMessage) { CommandMessage commandMessage = (CommandMessage)namiMessage; if (CommandMessage.COMMAND_ACCEPT_OFFER.equals(commandMessage.getCommand())) { // is this AS accepted? String acceptedId = commandMessage.getNamedParameter(ParameterMessage.PARAMETER_AS_ID); String jobId = commandMessage.getNamedParameter(ParameterMessage.PARAMETER_JOB_ID); logger.debug("ACCEPT_OFFER for analyser: " + acceptedId + " job: " + jobId); // client chose this AS to run this job if (this.id.equals(acceptedId)) { AnalysisJob job; synchronized(jobsLock) { // check that we have the job as scheduled job = scheduledJobs.get(commandMessage.getNamedParameter(ParameterMessage.PARAMETER_JOB_ID)); if (job != null) { scheduledJobs.remove(jobId); runningJobs.put(job.getId(), job); // run the job executorService.execute(job); logger.info("Executing job " + job.analysis.getFullName() + ", "+ job.getId() + ", " + job.getInputMessage().getUsername()) ; } else { logger.warn("Got ACCEPT_OFFER for job which is not scheduled."); } } } // client chose some other as, forget this job else { logger.debug("Removing scheduled job " + jobId); synchronized(jobsLock) { AnalysisJob jobToBeForgotten = receivedJobs.remove(jobId); // job was in the receivedQueue if (jobToBeForgotten != null) { receivedJobs.remove(jobToBeForgotten); } // job was scheduled else { scheduledJobs.remove(jobId); activeJobRemoved(); } } } } else if (CommandMessage.COMMAND_CANCEL.equals(commandMessage.getCommand())) { String jobId = commandMessage.getParameters().get(0); AnalysisJob job; synchronized(jobsLock) { if (receivedJobs.containsKey(jobId)) { job = receivedJobs.remove(jobId); } else if (scheduledJobs.containsKey(jobId)) { job = scheduledJobs.remove(jobId); } else { job = runningJobs.get(jobId); } } if (job != null) { job.cancel(); } } updateStatus(); } // unknown message else { logger.error("unidentified message: " + namiMessage.getMessageID()); } } public File getWorkDir() { return workDir; } public boolean shouldSweepWorkDir() { return sweepWorkDir; } public void removeRunningJob(AnalysisJob job) { String hostname = ""; try { hostname = InetAddress.getLocalHost().getCanonicalHostName(); } catch (UnknownHostException e1) { logger.warn("Could not get local hostname."); hostname = ""; } char delimiter = ';'; loggerJobs.info(job.getId() + delimiter + job.getInputMessage().getAnalysisId().replaceAll("\"", "") + delimiter + job.getState() + delimiter + job.getInputMessage().getUsername() + delimiter + job.getExecutionStartTime().toString() + delimiter + job.getExecutionEndTime().toString() + delimiter + hostname); logger.debug("Analyser server removing job " + job.getId() + "(" + job.getState() + ")"); synchronized(jobsLock) { this.runningJobs.remove(job.getId()); } activeJobRemoved(); // send message to manager sendJobLogMessage(job); } public void sendJobLogMessage(AnalysisJob job) { JobLogMessage jobLogMessage; String hostname = ""; try { hostname = InetAddress.getLocalHost().getCanonicalHostName(); } catch (UnknownHostException e1) { logger.warn("Could not get local hostname."); hostname = ""; } jobLogMessage = new JobLogMessage( job.getInputMessage().getAnalysisId().replaceAll("\"", ""), job.getState(), job.getId(), job.getExecutionStartTime(), job.getExecutionEndTime(), job.getResultMessage().getErrorMessage(), job.getResultMessage().getOutputText(), job.getInputMessage().getUsername(), hostname); try { managerTopic.sendMessage(jobLogMessage); } catch (JMSException e) { logger.error("Could not send job log message.", e); } } /** * This is the callback method for a job to send the result message. When a job is finished the thread * running a job will clean up all the data files after calling this method. * * For this reason, all the data must be sent before this method returns. * * */ public void sendResultMessage(NamiMessage original, ResultMessage reply) { try { endpoint.replyToMessage(original, reply); } catch (JMSException e) { logger.error("Could not send ResultMessage " + reply.getMessageID()); } logger.info("result message sent (" + reply.getMessageID() + " " + reply.getState() + ")"); } public FileBrokerClient getFileBrokerClient() { return this.fileBroker; } /** * Sends the message in new thread. * @param original * @param reply */ private void sendReplyMessage(final NamiMessage original, final NamiMessage reply) { new Thread(new Runnable() { public void run() { try { endpoint.replyToMessage(original, reply); } catch (JMSException e) { logger.error("Could not send message.", e); } } }).start(); } private void activeJobRemoved() { synchronized (jobsLock) { if (!receivedJobs.isEmpty() && ((runningJobs.size() + scheduledJobs.size() < maxJobs))) { AnalysisJob job = receivedJobs.values().iterator().next(); receivedJobs.remove(job.getId()); scheduleJob(job); } this.updateStatus(); } } private void receiveJob(JobMessage jobMessage) { // check that we can run the requested analysis AnalysisDescription description = null; try { description = toolRepository.getDescription(jobMessage.getAnalysisId()); } catch (AnalysisException e) { logger.warn("Could not fetch description for " + jobMessage.getAnalysisId()); ResultMessage resultMessage = new ResultMessage("", JobState.ERROR, "", "Could not load operation.", "", jobMessage.getReplyTo()); sendReplyMessage(jobMessage, resultMessage); return; } if (description == null) { logger.info("Analysis " + jobMessage.getAnalysisId() + " not found."); ResultMessage resultMessage = new ResultMessage("", JobState.ERROR, "", "Operation not found.", "", jobMessage.getReplyTo()); sendReplyMessage(jobMessage, resultMessage); return; } // check if requested operation is supported, if not, just ignore the request if (!toolRepository.supports(description.getFullName())) { logger.debug("Analysis " + jobMessage.getAnalysisId() + " ( " + description.getSourceResourceName() + " ) not supported, ignoring request."); return; } AnalysisJob job; try { job = description.createAnalysisJob(jobMessage, this); } catch (AnalysisException e) { logger.warn("could not create analysis job for " + jobMessage.getAnalysisId()); ResultMessage resultMessage = new ResultMessage("", JobState.ERROR, "", "Could not initialise operation.", "", jobMessage.getReplyTo()); sendReplyMessage(jobMessage, resultMessage); return; } // now we know that we can run this job // check if we could run it now or later synchronized(jobsLock) { job.setReceiveTime(new Date()); // could run it now if (runningJobs.size() + scheduledJobs.size() < maxJobs) { scheduleJob(job); } // run later else { receivedJobs.put(job.getId(), job); // try to send the ack message try { sendAckMessage(job); } catch (Exception e) { receivedJobs.remove(job.getId()); logger.error("Could not send ACK for job " + job.getId()); } } } updateStatus(); } private void scheduleJob(AnalysisJob job) { synchronized(jobsLock) { job.setScheduleTime(new Date()); scheduledJobs.put(job.getId(), job); } try { sendOfferMessage(job); } catch (Exception e) { synchronized(jobsLock) { scheduledJobs.remove(job.getId()); } logger.error("Could not send OFFER for job " + job.getId()); } updateStatus(); } private void sendAckMessage(AnalysisJob job) throws JMSException { // create ack message CommandMessage offerMessage = new CommandMessage(CommandMessage.COMMAND_ACK); offerMessage.addNamedParameter(ParameterMessage.PARAMETER_AS_ID, this.id); offerMessage.addNamedParameter(ParameterMessage.PARAMETER_JOB_ID, job.getId()); // try to send the message sendReplyMessage(job.getInputMessage(), offerMessage); } private void sendOfferMessage(AnalysisJob job) throws JMSException { // create offer message CommandMessage offerMessage = new CommandMessage(CommandMessage.COMMAND_OFFER); offerMessage.addNamedParameter(ParameterMessage.PARAMETER_AS_ID, this.id); offerMessage.addNamedParameter(ParameterMessage.PARAMETER_JOB_ID, job.getId()); // try to send the message sendReplyMessage(job.getInputMessage(), offerMessage); } private ResultMessage createDescriptionsMessage(JobMessage requestMessage) { ResultMessage resultMessage = new ResultMessage("", JobState.COMPLETED, "", "", "", requestMessage.getReplyTo()); try { String description = toolRepository.serialiseAsStringBuffer().toString(); URL url = fileBroker.addFile(new ByteArrayInputStream(description.getBytes()), null); resultMessage.addPayload(DESCRIPTION_OUTPUT_NAME, url); } catch (Exception e) { logger.error("Could not send analysis descriptions", e); resultMessage.setState(JobState.ERROR); resultMessage.setErrorMessage("Could not send analysis descriptions."); } return resultMessage; } private ResultMessage createSourceCodeMessage(JobMessage requestMessage) { ResultMessage resultMessage = new ResultMessage("", JobState.COMPLETED, "", "", "", requestMessage.getReplyTo()); try { String name = new String(requestMessage.getParameters(INTERNAL_PARAMETER_SECURITY_POLICY, SOURCECODE_FETCH_DESCRIPTION).get(0)); logger.info("sending source code for " + name); String sourceCode = toolRepository.getDescription(name).getSourceCode(); byte[] bytes = sourceCode.getBytes(); if (bytes.length == 0) { bytes = "<empty source code>".getBytes(); // zero length bytes content would hang upload } URL url = fileBroker.addFile(new ByteArrayInputStream(bytes), null); resultMessage.addPayload(SOURCECODE_OUTPUT_NAME, url); } catch (Exception e) { logger.error("Could not send analysis source code", e); resultMessage.setState(JobState.ERROR); resultMessage.setErrorMessage("Could not send analysis source code."); } return resultMessage; } private void updateStatus() { synchronized(jobsLock) { loggerStatus.info("received jobs: " + receivedJobs.size() + ", scheduled jobs: " + scheduledJobs.size() + ", running jobs: " + runningJobs.size()); } } /** * The order of the jobs in the receivedJobs and scheduledJobs is FIFO. Because of synchronizations * this does not necessarily strictly correspond to the receiveTime and scheduleTime fields of the * jobs, but is close enough. * * As the jobs are ordered, it is enough to check the jobs until the first new enough job is found * as the following jobs are newer (almost always, see above). * * TODO send BUSY if timeout? * */ private class TimeoutTimerTask extends TimerTask { @Override public void run() { synchronized(jobsLock) { ArrayList<AnalysisJob> jobsToBeRemoved = new ArrayList<AnalysisJob>(); // get old received jobs for (AnalysisJob job: receivedJobs.values()) { if ((System.currentTimeMillis() - receiveTimeout * 1000) > job.getReceiveTime().getTime()) { jobsToBeRemoved.add(job); } else { break; } } // remove old received jobs for (AnalysisJob job: jobsToBeRemoved) { receivedJobs.remove(job.getId()); logger.debug("Removing old received job: " + job.getId()); logger.debug("Jobs received: " + receivedJobs.size() + ", scheduled: " + scheduledJobs.size() + ", running: " + runningJobs.size()); } // get old scheduled jobs jobsToBeRemoved.clear(); for (AnalysisJob job: scheduledJobs.values()) { if ((System.currentTimeMillis() - scheduleTimeout * 1000) > job.getScheduleTime().getTime()) { jobsToBeRemoved.add(job); } else { break; } } // remove old scheduled jobs for (AnalysisJob job: jobsToBeRemoved) { scheduledJobs.remove(job.getId()); logger.debug("Removing old scheduled job: " + job.getId()); activeJobRemoved(); logger.debug("Jobs received: " + receivedJobs.size() + ", scheduled: " + scheduledJobs.size() + ", running: " + runningJobs.size()); } } } } public void shutdown() { logger.info("shutdown requested"); // close messaging endpoint try { this.endpoint.close(); } catch (JMSException e) { logger.error("closing messaging endpoint failed", e); } logger.info("shutting down"); } }
package info.u_team.u_team_core.item.tool; import java.util.Iterator; import com.google.common.collect.Iterators; import net.minecraft.item.TieredItem; import net.minecraftforge.fml.RegistryObject; public class ToolSet implements Iterable<RegistryObject<? extends TieredItem>> { private final RegistryObject<UAxeItem> axe; private final RegistryObject<UHoeItem> hoe; private final RegistryObject<UPickaxeItem> pickaxe; private final RegistryObject<UShovelItem> shovel; private final RegistryObject<USwordItem> sword; public ToolSet(RegistryObject<UAxeItem> axe, RegistryObject<UHoeItem> hoe, RegistryObject<UPickaxeItem> pickaxe, RegistryObject<UShovelItem> shovel, RegistryObject<USwordItem> sword) { this.axe = axe; this.hoe = hoe; this.pickaxe = pickaxe; this.shovel = shovel; this.sword = sword; } public RegistryObject<UAxeItem> getAxe() { return axe; } public RegistryObject<UHoeItem> getHoe() { return hoe; } public RegistryObject<UPickaxeItem> getPickaxe() { return pickaxe; } public RegistryObject<UShovelItem> getShovel() { return shovel; } public RegistryObject<USwordItem> getSword() { return sword; } @Override public Iterator<RegistryObject<? extends TieredItem>> iterator() { return Iterators.forArray(axe, hoe, pickaxe, shovel, sword); } }
package io.github.mzmine.taskcontrol; import java.util.ArrayList; import java.util.List; import javafx.beans.property.SimpleStringProperty; import javafx.beans.property.StringProperty; /** * An abstract implementation of task which defines common methods to make Task implementation * easier. Added task status listener */ public abstract class AbstractTask implements Task { private TaskStatus status = TaskStatus.WAITING; private String errorMessage = null; // listener to control status changes private List<TaskStatusListener> listener; private StringProperty name = new SimpleStringProperty("Task name"); public final String getName() { return name.get(); } public final void setName(String value) { name.set(value); } public StringProperty nameProperty() { return name; } public final void setStatus(TaskStatus newStatus) { TaskStatus old = status; this.status = newStatus; if (listener != null && !status.equals(old)) for (int i = 0; i < listener.size(); i++) listener.get(i).taskStatusChanged(this, status, old); } /** * Convenience method for determining if this task has been canceled. Also returns true if the * task encountered an error. * * @return true if this task has been canceled or stopped due to an error */ public final boolean isCanceled() { return (status == TaskStatus.CANCELED) || (status == TaskStatus.ERROR); } /** * Convenience method for determining if this task has been completed * * @return true if this task is finished */ public final boolean isFinished() { return status == TaskStatus.FINISHED; } /** * @see io.github.mzmine.taskcontrol.Task#cancel() */ @Override public void cancel() { setStatus(TaskStatus.CANCELED); } /** * @see io.github.mzmine.taskcontrol.Task#getErrorMessage() */ @Override public final String getErrorMessage() { return errorMessage; } public final void setErrorMessage(String errorMessage) { this.errorMessage = errorMessage; } @Override public TaskPriority getTaskPriority() { return TaskPriority.NORMAL; } /** * Returns the TaskStatus of this Task * * @return The current status of this task */ @Override public final TaskStatus getStatus() { return this.status; } public void addTaskStatusListener(TaskStatusListener list) { if (listener == null) listener = new ArrayList<>(); listener.add(list); } public boolean removeTaskStatusListener(TaskStatusListener list) { if (listener != null) return listener.remove(list); else return false; } public void clearTaskStatusListener() { if (listener != null) listener.clear(); } }
package io.maxthomas.dictum.concrete; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ImmutableMap; import edu.jhu.hlt.concrete.AnnotationMetadata; import edu.jhu.hlt.concrete.CommunicationMetadata; import edu.jhu.hlt.concrete.Entity; import edu.jhu.hlt.concrete.EntityMention; import edu.jhu.hlt.concrete.EntityMentionSet; import edu.jhu.hlt.concrete.EntitySet; import edu.jhu.hlt.concrete.LanguageIdentification; import edu.jhu.hlt.concrete.NITFInfo; import edu.jhu.hlt.concrete.TextSpan; import edu.jhu.hlt.concrete.TokenRefSequence; import edu.jhu.hlt.concrete.TokenTagging; import io.maxthomas.dictum.Communication; import io.maxthomas.dictum.CommunicationTagging; import io.maxthomas.dictum.Constituent; import io.maxthomas.dictum.Dependency; import io.maxthomas.dictum.DependencyParse; import io.maxthomas.dictum.FlatTextSpan; import io.maxthomas.dictum.InDocEntity; import io.maxthomas.dictum.InDocEntityGroup; import io.maxthomas.dictum.InDocEntityMention; import io.maxthomas.dictum.InDocEntityMentionGroup; import io.maxthomas.dictum.LanguageID; import io.maxthomas.dictum.Parse; import io.maxthomas.dictum.Section; import io.maxthomas.dictum.Sentence; import io.maxthomas.dictum.SpanLink; import io.maxthomas.dictum.TaggedToken; import io.maxthomas.dictum.TaggedTokenGroup; import io.maxthomas.dictum.Token; import io.maxthomas.dictum.Tokenization; import io.maxthomas.dictum.primitives.Confidence; import io.maxthomas.dictum.primitives.IntGreaterThanZero; import io.maxthomas.dictum.primitives.IntZeroOrGreater; import io.maxthomas.dictum.primitives.NonEmptyNonWhitespaceString; import io.maxthomas.dictum.primitives.UnixTimestamp; /** * Utility class that takes in Concrete {@link edu.jhu.hlt.concrete.Communication} * objects and converts them to Dictum {@link Communication} objects. */ public final class FromConcrete { private static final Logger LOGGER = LoggerFactory.getLogger(FromConcrete.class); private FromConcrete() { } /** * @param c * a Concrete {@link edu.jhu.hlt.concrete.Communication} * @return a dictum {@link Communication} * @throws InvalidStructException * on failed validation */ public static final Communication convert(edu.jhu.hlt.concrete.Communication c) throws InvalidStructException { Communication.Builder b = new Communication.Builder(); try { b.setId(c.getId()) .setUUID(UUID.fromString(c.getUuid().getUuidString())) .setType(c.getType()) .setText(c.getText()); if (c.isSetStartTime()) b.setStartTime(UnixTimestamp.create(c.getStartTime())); if (c.isSetEndTime()) b.setEndTime(UnixTimestamp.create(c.getEndTime())); AnnotationMetadata amd = c.getMetadata(); b.setTool(NonEmptyNonWhitespaceString.create(amd.getTool())) .setKBest(IntGreaterThanZero.create(amd.getKBest())) .setTimestamp(UnixTimestamp.create(amd.getTimestamp())); if (c.isSetCommunicationMetadata()) { CommunicationMetadata cmd = c.getCommunicationMetadata(); NITFInfo nitfi = cmd.getNitfInfo(); // TODO: NITFInfo -> dictum } if (c.isSetCommunicationTaggingList()) c.getCommunicationTaggingList() .stream() .map(FromConcrete::convertConcreteCommTagging) .forEach(b::addTags); if (c.isSetKeyValueMap()) b.putAllKVs(c.getKeyValueMap()); if (c.isSetLidList()) for (LanguageIdentification lid : c.getLidList()) b.addLanguageIDs(convert(lid)); if (c.isSetSectionList()) { for (edu.jhu.hlt.concrete.Section s : c.getSectionList()) { Section ps = convert(s); b.putIdToSectionMap(ps.getUUID(), ps); } } ImmutableMap.Builder<UUID, Tokenization> mb = new ImmutableMap.Builder<>(); for (Section s : b.getIdToSectionMap().values()) { for (Sentence st : s.getIdToSentenceMap().values()) { st.getPowerTokenization().ifPresent(t -> mb.put(t.getUUID(), t)); } } Map<UUID, Tokenization> m = mb.build(); if (c.isSetEntityMentionSetList()) { for (EntityMentionSet s : c.getEntityMentionSetList()) { InDocEntityMentionGroup pemg = convert(s, m); b.putIdToEntityMentionsMap(pemg.getUUID(), pemg); } } if (c.isSetEntitySetList()) { for (EntitySet s : c.getEntitySetList()) { InDocEntityGroup peg = convert(s); b.putIdToEntitiesMap(peg.getUUID(), peg); } } return b.build(); } catch (NullPointerException | IllegalArgumentException | IllegalStateException e) { throw new InvalidStructException("Caught exception converting concrete communication.", e); } } private static final InDocEntityGroup convert(EntitySet a) { InDocEntityGroup.Builder b = new InDocEntityGroup.Builder(); b.setUUID(UUID.fromString(a.getUuid().getUuidString())); AnnotationMetadata amd = a.getMetadata(); b.setTool(NonEmptyNonWhitespaceString.create(amd.getTool())) .setKBest(IntGreaterThanZero.create(amd.getKBest())) .setTimestamp(UnixTimestamp.create(amd.getTimestamp())); for (Entity e : a.getEntityList()) { InDocEntity pe = convert(e); b.putIdToEntityMap(pe.getUUID(), pe); } if (a.isSetMentionSetId()) b.setMentionSetUUID(convert(a.getMentionSetId())); return b.build(); } private static final InDocEntity convert(Entity a) { InDocEntity.Builder b = new InDocEntity.Builder(); b.setUUID(convert(a.getUuid())); b.setNullableCanonicalName(a.getCanonicalName()); if (a.isSetConfidence()) b.setConfidence(new Confidence.Builder().setScore(a.getConfidence()).build()); b.setNullableType(a.getType()); for (edu.jhu.hlt.concrete.UUID u : a.getMentionIdList()) b.addMentionUUIDs(convert(u)); return b.build(); } private static final InDocEntityMentionGroup convert(EntityMentionSet a, Map<UUID, Tokenization> m) { InDocEntityMentionGroup.Builder b = new InDocEntityMentionGroup.Builder(); b.setUUID(convert(a.getUuid())); AnnotationMetadata amd = a.getMetadata(); b.setTool(NonEmptyNonWhitespaceString.create(amd.getTool())) .setKBest(IntGreaterThanZero.create(amd.getKBest())) .setTimestamp(UnixTimestamp.create(amd.getTimestamp())); for (EntityMention e : a.getMentionList()) { InDocEntityMention pem = convert(e, m); b.putIdToEntityMentionMap(pem.getUUID(), pem); } return b.build(); } private static final InDocEntityMention convert(EntityMention a, Map<UUID, Tokenization> m) { InDocEntityMention.Builder b = new InDocEntityMention.Builder(); b.setUUID(convert(a.getUuid())); b.setNullableEntityType(a.getEntityType()); b.setNullablePhraseType(a.getPhraseType()); b.setNullableText(a.getText()); if (a.isSetChildMentionIdList()) for (edu.jhu.hlt.concrete.UUID u : a.getChildMentionIdList()) b.addChildMentionUUIDs(convert(u)); if (a.isSetConfidence()) b.setConfidence(new Confidence.Builder().setScore(a.getConfidence()).build()); TokenRefSequence trs = a.getTokens(); UUID u = convert(trs.getTokenizationId()); if (!m.containsKey(u)) throw new IllegalStateException("TokenRefSequence references tokenization UUID: " + u.toString() + ", but this Tokenization UUID is not present in this communication."); Tokenization tkz = m.get(u); Map<Integer, Token> toks = tkz.getIndexToTokenMap(); if (trs.isSetAnchorTokenIndex()) { int ai = trs.getAnchorTokenIndex(); b.setAnchorToken(toks.get(ai)); b.setAnchorTokenIndex(ai); } b.setTextSpan(convert(trs.getTextSpan())); for (Integer i : trs.getTokenIndexList()) { b.addTokenIndices(i); Token t = toks.get(i); b.addTokens(t); } b.setTokenizationUUID(u); b.setTokenization(tkz); return b.build(); } private static final CommunicationTagging convertConcreteCommTagging(edu.jhu.hlt.concrete.CommunicationTagging cct) { CommunicationTagging.Builder b = new CommunicationTagging.Builder(); b.setUUID(UUID.fromString(cct.getUuid().getUuidString())); AnnotationMetadata amd = cct.getMetadata(); b.setTool(NonEmptyNonWhitespaceString.create(amd.getTool())) .setKBest(IntGreaterThanZero.create(amd.getKBest())) .setTimestamp(UnixTimestamp.create(amd.getTimestamp())); b.setTaggingType(cct.getTaggingType()); b.putAllTagToConfidenceMap(convert(cct.getTagList(), cct.getConfidenceList())); return b.build(); } private static final UUID convert(edu.jhu.hlt.concrete.UUID uuid) { return UUID.fromString(uuid.getUuidString()); } private static final LanguageID convert(LanguageIdentification lid) { LanguageID.Builder b = new LanguageID.Builder(); b.setUUID(convert(lid.getUuid())); AnnotationMetadata amd = lid.getMetadata(); b.setTool(NonEmptyNonWhitespaceString.create(amd.getTool())) .setKBest(IntGreaterThanZero.create(amd.getKBest())) .setTimestamp(UnixTimestamp.create(amd.getTimestamp())); for (Map.Entry<String, Double> e : lid.getLanguageToProbabilityMap().entrySet()) { LOGGER.debug("Adding following to map: {}", e.toString()); b.putLanguageToProbMap(e.getKey(), Confidence.fromDouble(e.getValue())); } return b.build(); } private static final Section convert(edu.jhu.hlt.concrete.Section s) { Section.Builder b = new Section.Builder(); b.setUUID(convert(s.getUuid())); b.setKind(s.getKind()); b.setNullableLabel(s.getLabel()); b.setTextSpan(convert(s.getTextSpan())); if (s.isSetNumberList()) b.addAllNumbers(s.getNumberList()); if (s.isSetSentenceList()) for (edu.jhu.hlt.concrete.Sentence st : s.getSentenceList()) { Sentence pst = convert(st); b.putIdToSentenceMap(pst.getUUID(), pst); } return b.build(); } private static final Optional<FlatTextSpan> convert(TextSpan ts) { if (ts == null) return Optional.empty(); else return Optional.of(new FlatTextSpan.Builder() .setStart(ts.getStart()) .setEnd(ts.getEnding()) .build()); } public static final Sentence convert(edu.jhu.hlt.concrete.Sentence s) { Sentence.Builder b = new Sentence.Builder(); b.setUUID(convert(s.getUuid())); b.setTextSpan(convert(s.getTextSpan())); if (s.isSetTokenization()) b.setPowerTokenization(convert(s.getTokenization())); return b.build(); } private static final Tokenization convert (edu.jhu.hlt.concrete.Tokenization tkz) { Tokenization.Builder b = new Tokenization.Builder(); b.setUUID(convert(tkz.getUuid())); AnnotationMetadata amd = tkz.getMetadata(); b.setTool(NonEmptyNonWhitespaceString.create(amd.getTool())) .setKBest(IntGreaterThanZero.create(amd.getKBest())) .setTimestamp(UnixTimestamp.create(amd.getTimestamp())); b.setType(tkz.getKind().toString()); if (tkz.isSetTokenList()) { List<edu.jhu.hlt.concrete.Token> tl = tkz.getTokenList().getTokenList(); for (edu.jhu.hlt.concrete.Token t : tl) { Token pt = convert(t); b.putIndexToTokenMap(pt.getIndex().getVal(), pt); } } if (tkz.isSetTokenTaggingList()) { for (TokenTagging ttl : tkz.getTokenTaggingList()) { TaggedTokenGroup ptt = convert(ttl); b.putIdToTokenTagGroupMap(ptt.getUUID(), ptt); } } if (tkz.isSetParseList()) { for (edu.jhu.hlt.concrete.Parse p : tkz.getParseList()) { Parse pp = convert(p); b.putIdToParseMap(pp.getUUID(), pp); } } if (tkz.isSetDependencyParseList()) { for (edu.jhu.hlt.concrete.DependencyParse dp : tkz.getDependencyParseList()) { DependencyParse pdp = convert(dp); b.putIdToDependencyParseMap(pdp.getUUID(), pdp); } } Tokenization local = b.build(); Tokenization.Builder nb = new Tokenization.Builder(); nb.mergeFrom(local); if (tkz.isSetSpanLinkList()) { for (edu.jhu.hlt.concrete.SpanLink sl : tkz.getSpanLinkList()) { SpanLink psl = convert(sl, local); nb.addSpanLinks(psl); } } return nb.build(); } private static final SpanLink convert(edu.jhu.hlt.concrete.SpanLink sl, Tokenization tkz) { SpanLink.Builder b = new SpanLink.Builder(); if (sl.isSetConcreteTarget()) b.setConcreteTarget(convert(sl.getConcreteTarget())); b.setNullableExternalTarget(sl.getExternalTarget()); TokenRefSequence trs = sl.getTokens(); Map<Integer, Token> toks = tkz.getIndexToTokenMap(); if (trs.isSetAnchorTokenIndex()) { int ai = trs.getAnchorTokenIndex(); b.setAnchorToken(toks.get(ai)); b.setAnchorTokenIndex(ai); } b.setTextSpan(convert(trs.getTextSpan())); for (Integer i : trs.getTokenIndexList()) { b.addTokenIndices(i); Token t = toks.get(i); b.addTokens(t); } b.setTokenization(tkz); b.setLinkType(sl.getLinkType()); b.setTokenizationUUID(tkz.getUUID()); return b.build(); } private static final DependencyParse convert(edu.jhu.hlt.concrete.DependencyParse p) { DependencyParse.Builder b = new DependencyParse.Builder(); b.setUUID(convert(p.getUuid())); AnnotationMetadata amd = p.getMetadata(); b.setTool(NonEmptyNonWhitespaceString.create(amd.getTool())) .setKBest(IntGreaterThanZero.create(amd.getKBest())) .setTimestamp(UnixTimestamp.create(amd.getTimestamp())); for (edu.jhu.hlt.concrete.Dependency d : p.getDependencyList()) b.addDependencies(convert(d)); return b.build(); } private static final Dependency convert(edu.jhu.hlt.concrete.Dependency d) { Dependency.Builder b = new Dependency.Builder(); b.setDependentIndex(d.getDep()); if (d.isSetGov()) { int gov = d.getGov(); b.setGovernorIndex(gov); } b.setNullableEdgeType(d.getEdgeType()); return b.build(); } private static final Parse convert(edu.jhu.hlt.concrete.Parse p) { Parse.Builder b = new Parse.Builder(); b.setUUID(convert(p.getUuid())); AnnotationMetadata amd = p.getMetadata(); b.setTool(NonEmptyNonWhitespaceString.create(amd.getTool())) .setKBest(IntGreaterThanZero.create(amd.getKBest())) .setTimestamp(UnixTimestamp.create(amd.getTimestamp())); for (edu.jhu.hlt.concrete.Constituent c : p.getConstituentList()) { Constituent pc = convert(c); b.putConstituents(pc.getId(), pc); } return b.build(); } private static final Constituent convert(edu.jhu.hlt.concrete.Constituent c) { Constituent.Builder b = new Constituent.Builder(); b.setId(c.getId()); b.setNullableTag(c.getTag()); b.addAllChildList(c.getChildList()); if (c.isSetHeadChildIndex()) b.setHeadChildIndex(c.getHeadChildIndex()); if (c.isSetStart()) b.setStart(c.getStart()); if (c.isSetEnding()) b.setEnd(c.getEnding()); return b.build(); } private static final Token convert (edu.jhu.hlt.concrete.Token tk) { Token.Builder b = new Token.Builder(); b.setIndex(new IntZeroOrGreater.Builder().setVal(tk.getTokenIndex()).build()); b.setTextSpan(convert(tk.getTextSpan())); b.setNullableTokenText(tk.getText()); return b.build(); } private static final TaggedTokenGroup convert(TokenTagging tt) { TaggedTokenGroup.Builder b = new TaggedTokenGroup.Builder(); b.setUUID(convert(tt.getUuid())); AnnotationMetadata amd = tt.getMetadata(); b.setTool(NonEmptyNonWhitespaceString.create(amd.getTool())) .setKBest(IntGreaterThanZero.create(amd.getKBest())) .setTimestamp(UnixTimestamp.create(amd.getTimestamp())); b.setNullableTaggingType(tt.getTaggingType()); for (edu.jhu.hlt.concrete.TaggedToken tok : tt.getTaggedTokenList()) { TaggedToken pt = convert(tok); b.putIndexToTaggedTokenMap(pt.getIndex().getVal(), pt); } return b.build(); } private static final Map<String, Confidence> convert(List<String> sl, List<Double> dl) { Map<String, Confidence> m = new LinkedHashMap<>(); if (sl == null) { LOGGER.warn("String list null: returning empty map."); return m; } else if (dl == null) { LOGGER.warn("Double list null: returning empty map."); return m; } final int tls = sl.size(); final int cls = dl.size(); if (tls != 0) { if (cls != 0) { if (tls == cls) { for (int i = 0; i < tls; i++) { Confidence c = new Confidence.Builder() .setScore(dl.get(i)) .build(); m.put(sl.get(i), c); } } } else LOGGER.warn("Size of string list [{}] differs from size of double list [{}]. Neither will be added.", tls, cls); } else if (cls != 0) LOGGER.warn("Double list set but String list is not. Not adding."); return m; } private static final TaggedToken convert(edu.jhu.hlt.concrete.TaggedToken tt) { TaggedToken.Builder b = new TaggedToken.Builder(); b.setIndex(new IntZeroOrGreater.Builder().setVal(tt.getTokenIndex()).build()); b.setTag(tt.getTag()); return b.build(); } }
package org.eclipse.birt.core.template; import java.util.Iterator; import junit.framework.TestCase; import org.eclipse.birt.core.template.TextTemplate.ExpressionValueNode; import org.eclipse.birt.core.template.TextTemplate.ImageNode; import org.eclipse.birt.core.template.TextTemplate.TextNode; import org.eclipse.birt.core.template.TextTemplate.ValueNode; public class TemplateParserTest extends TestCase { public void testValueOf( ) { String input = "<value-of>script</value-of>"; String golden = "<value-of>script</value-of>"; TextTemplate template = new TemplateParser( ).parse( input ); assertEquals( golden, template ); } public void testViewTimeValueOf( ) { String input = "<viewtime-value-of>script</viewtime-value-of>"; String golden = "<viewtime-value-of>script</viewtime-value-of>"; TextTemplate template = new TemplateParser( ).parse( input ); assertEquals( golden, template ); } public void testImage( ) { String input = ""; String golden = ""; TextTemplate template = new TemplateParser( ).parse( input ); assertEquals( golden, template ); } public void testImageTag( ) { String input = "<image name=\"ABC\"/>"; String golden = "<image name=\"ABC\"></image>"; TextTemplate template = new TemplateParser( ).parse( input ); assertEquals( golden, template ); } public void testText( ) { String input = "text any text"; String golden = "<text>text any text</text>"; TextTemplate template = new TemplateParser( ).parse( input ); assertEquals( golden, template ); } protected void assertEquals( String golden, TextTemplate template ) { StringBuffer buffer = new StringBuffer( ); TextTemplateWriter.write( template, buffer ); assertEquals( golden, buffer.toString( ) ); } static protected class TextTemplateWriter implements TextTemplate.Visitor { static void write( TextTemplate template, StringBuffer buffer ) { TextTemplate.Visitor visitor = new TextTemplateWriter( ); Iterator iter = template.getNodes( ).iterator( ); while ( iter.hasNext( ) ) { TextTemplate.Node node = (TextTemplate.Node) iter.next( ); node.accept( visitor, buffer ); } } public Object visitText( TextNode node, Object value ) { StringBuffer buffer = (StringBuffer) value; buffer.append( "<text>" ); buffer.append( node.getContent( ) ); buffer.append( "</text>" ); return buffer; } public Object visitValue( ValueNode node, Object value ) { StringBuffer buffer = (StringBuffer) value; buffer.append( "<value-of" ); if ( node.getFormat( ) != null ) { buffer.append( "format='" ); buffer.append( node.getFormat( ) ); buffer.append( "'" ); } buffer.append( ">" ); buffer.append( node.getValue( ) ); buffer.append( "</value-of>" ); return buffer; } public Object visitImage( ImageNode image, Object value ) { StringBuffer buffer = (StringBuffer) value; buffer.append( "<image" ); String name = image.getImageName( ); if ( name != null ) { buffer.append( " name=\"" ); buffer.append( name ); buffer.append( "\"" ); } buffer.append( ">" ); String expr = image.getExpr( ); if ( expr != null ) { buffer.append( image.getExpr( ) ); } buffer.append( "</image>" ); return buffer; } public Object visitExpressionValue( ExpressionValueNode node, Object value ) { StringBuffer buffer = (StringBuffer) value; buffer.append( "<viewtime-value-of" ); if ( node.getFormat( ) != null ) { buffer.append( "format='" ); buffer.append( node.getFormat( ) ); buffer.append( "'" ); } buffer.append( ">" ); buffer.append( node.getValue( ) ); buffer.append( "</viewtime-value-of>" ); return buffer; } } }
package javax.time.calendrical; import static javax.time.calendrical.LocalPeriodUnit.DAYS; import static javax.time.calendrical.LocalPeriodUnit.ERAS; import static javax.time.calendrical.LocalPeriodUnit.FOREVER; import static javax.time.calendrical.LocalPeriodUnit.HALF_DAYS; import static javax.time.calendrical.LocalPeriodUnit.HOURS; import static javax.time.calendrical.LocalPeriodUnit.MICROS; import static javax.time.calendrical.LocalPeriodUnit.MILLIS; import static javax.time.calendrical.LocalPeriodUnit.MINUTES; import static javax.time.calendrical.LocalPeriodUnit.MONTHS; import static javax.time.calendrical.LocalPeriodUnit.NANOS; import static javax.time.calendrical.LocalPeriodUnit.SECONDS; import static javax.time.calendrical.LocalPeriodUnit.WEEKS; import static javax.time.calendrical.LocalPeriodUnit.WEEK_BASED_YEARS; import static javax.time.calendrical.LocalPeriodUnit.YEARS; import javax.time.DateTimeConstants; import javax.time.DayOfWeek; import javax.time.Instant; import javax.time.ZoneOffset; /** * A standard set of fields. * <p> * This set of fields provide field-based access to manipulate a date, time or date-time. * The standard set of fields can be extended by implementing {@link DateTimeField}. * * <h4>Implementation notes</h4> * This is a final, immutable and thread-safe enum. */ public enum LocalDateTimeField implements DateTimeField { /** * The nano-of-second. * <p> * This counts the nanosecond within the second, from 0 to 999,999,999. * This field has the same meaning for all calendar systems. */ NANO_OF_SECOND("NanoOfSecond", NANOS, SECONDS, DateTimeValueRange.of(0, 999_999_999)), /** * The nano-of-day. * <p> * This counts the nanosecond within the day, from 0 to (24 * 60 * 60 * 1,000,000,000) - 1. * This field has the same meaning for all calendar systems. */ NANO_OF_DAY("NanoOfDay", NANOS, DAYS, DateTimeValueRange.of(0, 86400L * 1000_000_000L - 1)), /** * The micro-of-second. * <p> * This counts the microsecond within the second, from 0 to 999,999. * This field has the same meaning for all calendar systems. */ MICRO_OF_SECOND("MicroOfSecond", MICROS, SECONDS, DateTimeValueRange.of(0, 999_999)), /** * The micro-of-day. * <p> * This counts the microsecond within the day, from 0 to (24 * 60 * 60 * 1,000,000) - 1. * This field has the same meaning for all calendar systems. */ MICRO_OF_DAY("MicroOfDay", MICROS, DAYS, DateTimeValueRange.of(0, 86400L * 1000_000L - 1)), /** * The milli-of-second. * <p> * This counts the millisecond within the second, from 0 to 999. * This field has the same meaning for all calendar systems. */ MILLI_OF_SECOND("MilliOfSecond", MILLIS, SECONDS, DateTimeValueRange.of(0, 999)), /** * The milli-of-day. * <p> * This counts the millisecond within the day, from 0 to (24 * 60 * 60 * 1,000) - 1. * This field has the same meaning for all calendar systems. */ MILLI_OF_DAY("MilliOfDay", MILLIS, DAYS, DateTimeValueRange.of(0, 86400L * 1000L - 1)), /** * The second-of-minute. * <p> * This counts the second within the minute, from 0 to 59. * This field has the same meaning for all calendar systems. */ SECOND_OF_MINUTE("SecondOfMinute", SECONDS, MINUTES, DateTimeValueRange.of(0, 59)), /** * The second-of-day. * <p> * This counts the second within the day, from 0 to (24 * 60 * 60) - 1. * This field has the same meaning for all calendar systems. */ SECOND_OF_DAY("SecondOfDay", SECONDS, DAYS, DateTimeValueRange.of(0, 86400L - 1)), /** * The minute-of-hour. * <p> * This counts the minute within the hour, from 0 to 59. * This field has the same meaning for all calendar systems. */ MINUTE_OF_HOUR("MinuteOfHour", MINUTES, HOURS, DateTimeValueRange.of(0, 59)), /** * The minute-of-day. * <p> * This counts the minute within the day, from 0 to (24 * 60) - 1. * This field has the same meaning for all calendar systems. */ MINUTE_OF_DAY("MinuteOfDay", MINUTES, DAYS, DateTimeValueRange.of(0, (24 * 60) - 1)), /** * The hour-of-am-pm. * <p> * This counts the hour within the AM/PM, from 0 to 11. * This is the hour that would be observed on a standard 12-hour digital clock. * This field has the same meaning for all calendar systems. */ HOUR_OF_AMPM("HourOfAmPm", HOURS, HALF_DAYS, DateTimeValueRange.of(0, 11)), /** * The clock-hour-of-am-pm. * <p> * This counts the hour within the AM/PM, from 1 to 12. * This is the hour that would be observed on a standard 12-hour analog wall clock. * This field has the same meaning for all calendar systems. */ CLOCK_HOUR_OF_AMPM("ClockHourOfAmPm", HOURS, HALF_DAYS, DateTimeValueRange.of(1, 12)), /** * The hour-of-day. * <p> * This counts the hour within the day, from 0 to 23. * This is the hour that would be observed on a standard 24-hour digital clock. * This field has the same meaning for all calendar systems. */ HOUR_OF_DAY("HourOfDay", HOURS, DAYS, DateTimeValueRange.of(0, 23)), /** * The clock-hour-of-day. * <p> * This counts the hour within the AM/PM, from 1 to 24. * This is the hour that would be observed on a 24-hour analog wall clock. * This field has the same meaning for all calendar systems. */ CLOCK_HOUR_OF_DAY("ClockHourOfDay", HOURS, DAYS, DateTimeValueRange.of(1, 24)), /** * The am-pm-of-day. * <p> * This counts the AM/PM within the day, from 0 (AM) to 1 (PM). * This field has the same meaning for all calendar systems. */ AMPM_OF_DAY("AmPmOfDay", HALF_DAYS, DAYS, DateTimeValueRange.of(0, 1)), /** * The day-of-week, such as Tuesday. * <p> * This represents the standard concept of the day of the week. * In the default ISO calendar system, this has values from Monday (1) to Sunday (7). * The {@link DayOfWeek} class can be used to interpret the result. * <p> * Most non-ISO calendar systems also define a seven day week that aligns with ISO. * Those calendar systems must also use the same numbering system, from Monday (1) to * Sunday (7), which allows {@code DayOfWeek} to be used. * <p> * Calendar systems that do not have a standard seven day week should implement this field * if they have a similar concept of named or numbered days within a period similar * to a week. It is recommended that the numbering starts from 1. */ DAY_OF_WEEK("DayOfWeek", DAYS, WEEKS, DateTimeValueRange.of(1, 7)), /** * The aligned day-of-week within a month. * <p> * This represents concept of the count of days within the period of a week * where the weeks are aligned to the start of the month. * This field is typically used with {@link #ALIGNED_WEEK_OF_MONTH}. * <p> * For example, in a calendar systems with a seven day week, the first aligned-week-of-month * starts on day-of-month 1, the second aligned-week starts on day-of-month 8, and so on. * Within each of these aligned-weeks, the days are numbered from 1 to 7 and returned * as the value of this field. * As such, day-of-month 1 to 7 will have aligned-day-of-week values from 1 to 7. * And day-of-month 8 to 14 will repeat this with aligned-day-of-week values from 1 to 7. * <p> * Calendar systems that do not have a seven day week should typically implement this * field in the same way, but using the alternate week length. */ ALIGNED_DAY_OF_WEEK_IN_MONTH("AlignedDayOfWeekInMonth", DAYS, WEEKS, DateTimeValueRange.of(1, 7)), /** * The aligned day-of-week within a year. * <p> * This represents concept of the count of days within the period of a week * where the weeks are aligned to the start of the year. * This field is typically used with {@link #ALIGNED_WEEK_OF_YEAR}. * <p> * For example, in a calendar systems with a seven day week, the first aligned-week-of-year * starts on day-of-year 1, the second aligned-week starts on day-of-year 8, and so on. * Within each of these aligned-weeks, the days are numbered from 1 to 7 and returned * as the value of this field. * As such, day-of-year 1 to 7 will have aligned-day-of-week values from 1 to 7. * And day-of-year 8 to 14 will repeat this with aligned-day-of-week values from 1 to 7. * <p> * Calendar systems that do not have a seven day week should typically implement this * field in the same way, but using the alternate week length. */ ALIGNED_DAY_OF_WEEK_IN_YEAR("AlignedDayOfWeekInYear", DAYS, WEEKS, DateTimeValueRange.of(1, 7)), /** * The day-of-month. * <p> * This represents the concept of the day within the month. * In the default ISO calendar system, this has values from 1 to 31 in most months. * April, June, September, November have days from 1 to 30, while February has days * from 1 to 28, or 29 in a leap year. * <p> * Non-ISO calendar systems should implement this field using the most recognized * day-of-month values for users of the calendar system. * Normally, this is a count of days from 1 to the length of the month. */ DAY_OF_MONTH("DayOfMonth", DAYS, MONTHS, DateTimeValueRange.of(1, 28, 31)), /** * The day-of-year. * <p> * This represents the concept of the day within the year. * In the default ISO calendar system, this has values from 1 to 365 in standard * years and 1 to 366 in leap years. * <p> * Non-ISO calendar systems should implement this field using the most recognized * day-of-year values for users of the calendar system. * Normally, this is a count of days from 1 to the length of the year. */ DAY_OF_YEAR("DayOfYear", DAYS, YEARS, DateTimeValueRange.of(1, 365, 366)), /** * The epoch-day, based on the Java epoch of 1970-01-01 (ISO). * <p> * This field is the sequential count of days where 1970-01-01 (ISO) is zero. * Note that this uses the <i>local</i> time-line, ignoring offset and time-zone. * <p> * This field is strictly defined to have the same meaning in all calendar systems. * This is necessary to ensure interoperation between calendars. */ EPOCH_DAY("EpochDay", DAYS, FOREVER, DateTimeValueRange.of((long) (DateTimeConstants.MIN_YEAR * 365.25), (long) (DateTimeConstants.MAX_YEAR * 365.25))), /** * The aligned week within a month. * <p> * This represents concept of the count of weeks within the period of a month * where the weeks are aligned to the start of the month. * This field is typically used with {@link #ALIGNED_DAY_OF_WEEK_IN_MONTH}. * <p> * For example, in a calendar systems with a seven day week, the first aligned-week-of-month * starts on day-of-month 1, the second aligned-week starts on day-of-month 8, and so on. * Thus, day-of-month values 1 to 7 are in aligned-week 1, while day-of-month values * 8 to 14 are in aligned-week 2, and so on. * <p> * Calendar systems that do not have a seven day week should typically implement this * field in the same way, but using the alternate week length. */ ALIGNED_WEEK_OF_MONTH("AlignedWeekOfMonth", WEEKS, MONTHS, DateTimeValueRange.of(1, 4, 5)), /** * The week within a month. * <p> * This represents concept of the count of weeks within the month where weeks * start on a fixed day-of-week, such as Monday. * This field is typically used with {@link #DAY_OF_WEEK}. * <p> * In the default ISO calendar system, the week starts on Monday and there must be at * least 4 days in the first week. * Week one is the week starting on a Monday where there are at least 4 days in the month. * Thus, week one may start up to three days before the start of the month. * If the first week starts after the start of the month then the period before is week zero. * <p> * For example:<br /> * - if the 1st day of the month is a Monday, week one starts on the 1st and there is no week zero<br /> * - if the 2nd day of the month is a Monday, week one starts on the 2nd and the 1st is in week zero<br /> * - if the 4th day of the month is a Monday, week one starts on the 4th and the 1st to 3rd is in week zero<br /> * - if the 5th day of the month is a Monday, week two starts on the 5th and the 1st to 4th is in week one<br /> * <p> * Non-ISO calendar systems should implement this field in the same way, taking * into account any differences in week or month length. */ WEEK_OF_MONTH("WeekOfMonth", WEEKS, MONTHS, DateTimeValueRange.of(0, 1, 4, 5)), /** * The week within a week-based-year. * <p> * This represents the concept of the count of weeks within a week-based-year. * This field is defined by ISO-8601 and based on a year, known as the week-based-year, * that always starts on Monday. * This field is typically used with {@link #DAY_OF_WEEK} and {@link #WEEK_BASED_YEAR}. * <p> * In the default ISO calendar system, the week starts on Monday and there must be at * least 4 days in the first week. With these definitions, the week-based-year can start up * to 3 days before or up to 3 days after the start of the standard year. * Thus, if the 1st day of the regular year is a Tuesday, then the week-based-year starts * on December 31st of the previous regular year. Similarly, if the 1st day of the regular * year is a Sunday, then the week-based-year starts on January 2nd. * Given this definition, the week of the week-based-year counts the week from one * to 52 or 53 within the week-based-year. * <p> * Non-ISO calendar systems should implement this field in the same way, taking * into account any differences in week or year length. */ WEEK_OF_WEEK_BASED_YEAR("WeekOfWeekBasedYear", WEEKS, WEEK_BASED_YEARS, DateTimeValueRange.of(1, 52, 53)), /** * The aligned week within a year. * <p> * This represents concept of the count of weeks within the period of a year * where the weeks are aligned to the start of the year. * This field is typically used with {@link #ALIGNED_DAY_OF_WEEK_IN_YEAR}. * <p> * For example, in a calendar systems with a seven day week, the first aligned-week-of-year * starts on day-of-year 1, the second aligned-week starts on day-of-year 8, and so on. * Thus, day-of-year values 1 to 7 are in aligned-week 1, while day-of-year values * 8 to 14 are in aligned-week 2, and so on. * <p> * Calendar systems that do not have a seven day week should typically implement this * field in the same way, but using the alternate week length. */ ALIGNED_WEEK_OF_YEAR("AlignedWeekOfYear", WEEKS, YEARS, DateTimeValueRange.of(1, 53)), /** * The week within a year. * <p> * This represents concept of the count of weeks within the year where weeks * start on a fixed day-of-week, such as Monday. * This field is typically used with {@link #DAY_OF_WEEK}. * <p> * In the default ISO calendar system, the week starts on Monday and there must be at * least 4 days in the first week. * Week one is the week starting on a Monday where there are at least 4 days in the year. * Thus, week one may start up to three days before the start of the year. * If the first week starts after the start of the year then the period before is week zero. * <p> * For example:<br /> * - if the 1st day of the year is a Monday, week one starts on the 1st and there is no week zero<br /> * - if the 2nd day of the year is a Monday, week one starts on the 2nd and the 1st is in week zero<br /> * - if the 4th day of the year is a Monday, week one starts on the 4th and the 1st to 3rd is in week zero<br /> * - if the 5th day of the year is a Monday, week two starts on the 5th and the 1st to 4th is in week one<br /> * <p> * Non-ISO calendar systems should implement this field in the same way, taking * into account any differences in week or year length. */ WEEK_OF_YEAR("WeekOfYear", WEEKS, YEARS, DateTimeValueRange.of(0, 1, 52, 53)), /** * The month-of-year, such as March. * <p> * This represents the concept of the month within the year. * In the default ISO calendar system, this has values from January (1) to December (12). * <p> * Non-ISO calendar systems should implement this field using the most recognized * month-of-year values for users of the calendar system. * Normally, this is a count of months starting from 1. */ MONTH_OF_YEAR("MonthOfYear", MONTHS, YEARS, DateTimeValueRange.of(1, 12)), /** * The epoch-month based on the Java epoch of 1970-01-01. * <p> * This field is the sequential count of months where January 1970 (ISO) is zero. * Note that this uses the <i>local</i> time-line, ignoring offset and time-zone. * <p> * Non-ISO calendar systems should also implement this field to represent a sequential * count of months. It is recommended to define zero as the month of 1970-01-01 (ISO). */ EPOCH_MONTH("EpochMonth", MONTHS, FOREVER, DateTimeValueRange.of((DateTimeConstants.MIN_YEAR - 1970L) * 12, (DateTimeConstants.MAX_YEAR - 1970L) * 12L - 1L)), /** * The proleptic week-based-year. * <p> * This represents the concept of the week-based-year, counting sequentially using negative * numbers and not based on the era. This field is defined by ISO-8601, and numbers years * related to the standard ISO year, ensuring that the week-based-year always starts on Monday. * This field is typically used with {@link #DAY_OF_WEEK} and {@link #WEEK_OF_WEEK_BASED_YEAR}. * <p> * In the default ISO calendar system, the week starts on Monday and there must be at * least 4 days in the first week. With these definitions, the week-based-year can start up * to 3 days before or up to 3 days after the start of the standard year. * Similarly, the week-based-year can end before or after the end of the regular year. * Thus, if the 1st day of the regular year is a Tuesday, then the week-based-year starts * on December 31st of the previous regular year. Similarly, if the 1st day of the regular * year is a Sunday, then the week-based-year starts on January 2nd. * <p> * Non-ISO calendar systems should implement this field in the same way, taking * into account any differences in week or year length. */ WEEK_BASED_YEAR("WeekBasedYear", WEEK_BASED_YEARS, FOREVER, DateTimeValueRange.of(DateTimeConstants.MIN_YEAR, DateTimeConstants.MAX_YEAR)), /** * The year within the era. * <p> * This represents the concept of the year within the era. * This field is typically used with {@link #ERA}. * <p> * In the default ISO calendar system, there are two eras defined, 'BCE' and 'CE'. * The era 'CE' is the one currently in use and year-of-era runs from 1 to the maximum value. * The era 'BCE' is the previous era, and the year-of-era runs backwards. * <p> * For example, subtracting a year each time yield the following:<br /> * - year-proleptic 2 = 'CE' year-of-era 2<br /> * - year-proleptic 1 = 'CE' year-of-era 1<br /> * - year-proleptic 0 = 'BCE' year-of-era 1<br /> * - year-proleptic -1 = 'BCE' year-of-era 2<br /> * <p> * Note that the ISO-8601 standard does not actually define eras. * Note also that the ISO eras do not align with the well-known AD/BC eras due to the * change between the Julian and Gregorian calendar systems. * <p> * Non-ISO calendar systems should implement this field using the most recognized * year-of-era value for users of the calendar system. * Since most calendar systems have only two eras, the year-of-era numbering approach * will typically be the same as that used by the ISO calendar system. * The year-of-era value should typically always be positive, however this is not required. */ YEAR_OF_ERA("YearOfEra", YEARS, FOREVER, DateTimeValueRange.of(1, DateTimeConstants.MAX_YEAR, DateTimeConstants.MAX_YEAR + 1)), /** * The proleptic year, such as 2012. * <p> * This represents the concept of the year, counting sequentially and using negative numbers. * The proleptic year is not interpreted in terms of the era. * See {@link #YEAR_OF_ERA} for an example showing the mapping from proleptic year to year-of-era. * <p> * This field should be used in preference to {@code YEAR_OF_ERA} when working exclusively * with the ISO calendar system. This is because era is not a concept in daily use, thus it * is a concept liable to be forgotten in calculations. * <p> * Non-ISO calendar systems should implement this field as follows. * If the calendar system has only two eras, before and after a fixed date, then the * proleptic-year value must be the same as the year-of-era value for the later era, * and increasingly negative for the earlier era. * If the calendar system has more than two eras, then the proleptic-year value may be * defined with any appropriate value, although defining it to be the same as ISO may be * the best option. */ YEAR("Year", YEARS, FOREVER, DateTimeValueRange.of(DateTimeConstants.MIN_YEAR, DateTimeConstants.MAX_YEAR)), /** * The era. * <p> * This represents the concept of the era, which is the largest division of the time-line. * This field is typically used with {@link #YEAR_OF_ERA}. * <p> * In the default ISO calendar system, there are two eras defined, 'BCE' and 'CE'. * The era 'CE' is the one currently in use and year-of-era runs from 1 to the maximum value. * The era 'BCE' is the previous era, and the year-of-era runs backwards. * See {@link #YEAR_OF_ERA} for a full example. * <p> * Non-ISO calendar systems should implement this field to define eras. * The value of the era that was active on 1970-01-01 (ISO) must be assigned the value 1. * Earlier eras must have sequentially smaller values. * Later eras must have sequentially larger values, */ ERA("Era", ERAS, FOREVER, DateTimeValueRange.of(0, 1)), /** * The instant epoch-seconds. * <p> * This represents the concept of the sequential count of seconds where * 1970-01-01T00:00Z (ISO) is zero. * This field may be used with {@link #NANO_OF_DAY} to represent the fraction of the day. * <p> * An {@link Instant} represents an instantaneous point on the time-line. * On their own they have no elements which allow a local date-time to be obtained. * Only when paired with an offset or time-zone can the local date or time be found. * This field allows the seconds part of the instant to be queried. * <p> * This field is strictly defined to have the same meaning in all calendar systems. * This is necessary to ensure interoperation between calendars. */ INSTANT_SECONDS("InstantSeconds", SECONDS, FOREVER, DateTimeValueRange.of(Long.MIN_VALUE, Long.MAX_VALUE)), /** * The offset from UTC/Greenwich. * <p> * This represents the concept of the offset in seconds of local time from UTC/Greenwich. * <p> * A {@link ZoneOffset} represents the period of time that local time differs from UTC/Greenwich. * This is usually a fixed number of hours and minutes. * It is equivalent to the {@link ZoneOffset#getTotalSeconds() total amount} of the offset in seconds. * For example, during the winter Paris has an offset of {@code +01:00}, which is 3600 seconds. * <p> * This field is strictly defined to have the same meaning in all calendar systems. * This is necessary to ensure interoperation between calendars. */ OFFSET_SECONDS("OffsetSeconds", SECONDS, FOREVER, DateTimeValueRange.of(-18 * 3600, 18 * 3600)); private final String name; private final PeriodUnit baseUnit; private final PeriodUnit rangeUnit; private final DateTimeValueRange range; private LocalDateTimeField(String name, PeriodUnit baseUnit, PeriodUnit rangeUnit, DateTimeValueRange range) { this.name = name; this.baseUnit = baseUnit; this.rangeUnit = rangeUnit; this.range = range; } @Override public String getName() { return name; } @Override public PeriodUnit getBaseUnit() { return baseUnit; } @Override public PeriodUnit getRangeUnit() { return rangeUnit; } @Override public DateTimeValueRange range() { return range; } /** * Checks if this field represents a component of a date. * * @return true if it is a component of a date */ public boolean isDateField() { return ordinal() >= DAY_OF_WEEK.ordinal() && ordinal() <= ERA.ordinal(); } /** * Checks if this field represents a component of a time. * * @return true if it is a component of a time */ public boolean isTimeField() { return ordinal() < DAY_OF_WEEK.ordinal(); } /** * Checks that the specified value is valid for this field. * <p> * This validates that the value is within the outer range of valid values * returned by {@link #range()}. * * @param value the value to check * @return the value that was passed in */ public long checkValidValue(long value) { // JAVA8 default method on interface return range().checkValidValue(value, this); } /** * Checks that the specified value is valid and fits in an {@code int}. * <p> * This validates that the value is within the outer range of valid values * returned by {@link #range()}. * It also checks that all valid values are within the bounds of an {@code int}. * * @param value the value to check * @return the value that was passed in */ public int checkValidIntValue(long value) { // JAVA8 default method on interface return range().checkValidIntValue(value, this); } @Override public int compare(DateTimeAccessor dateTime1, DateTimeAccessor dateTime2) { return Long.compare(dateTime1.getLong(this), dateTime2.getLong(this)); } @Override public boolean doIsSupported(DateTimeAccessor dateTime) { return dateTime.isSupported(this); } @Override public DateTimeValueRange doRange(DateTimeAccessor dateTime) { return dateTime.range(this); } @Override public long doGet(DateTimeAccessor dateTime) { return dateTime.getLong(this); } @Override public <R extends DateTimeAccessor> R doSet(R dateTime, long newValue) { return (R) dateTime.with(this, newValue); } @Override public boolean resolve(DateTimeBuilder builder, long value) { return false; // resolve implemented in builder } @Override public String toString() { return getName(); } }
package org.epics.css.dal.epics; import gov.aps.jca.CAException; import gov.aps.jca.Channel; import gov.aps.jca.Context; import gov.aps.jca.JCALibrary; import gov.aps.jca.TimeoutException; import gov.aps.jca.configuration.DefaultConfiguration; import gov.aps.jca.dbr.DBRType; import gov.aps.jca.event.ConnectionEvent; import gov.aps.jca.event.ConnectionListener; import gov.aps.jca.event.ContextExceptionEvent; import gov.aps.jca.event.ContextExceptionListener; import gov.aps.jca.event.ContextMessageEvent; import gov.aps.jca.event.ContextMessageListener; import gov.aps.jca.event.ContextVirtualCircuitExceptionEvent; import gov.aps.jca.event.QueuedEventDispatcher; import gov.aps.jca.jni.ThreadSafeContext; import java.util.Iterator; import java.util.Properties; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import javax.naming.NamingException; import javax.naming.directory.DirContext; import org.csstudio.platform.logging.CentralLogger; import org.epics.css.dal.EventSystemListener; import org.epics.css.dal.RemoteException; import org.epics.css.dal.SimpleProperty; import org.epics.css.dal.Timestamp; import org.epics.css.dal.context.AbstractApplicationContext; import org.epics.css.dal.context.ConnectionException; import org.epics.css.dal.context.PlugEvent; import org.epics.css.dal.context.RemoteInfo; import org.epics.css.dal.device.AbstractDevice; import org.epics.css.dal.impl.PropertyUtilities; import org.epics.css.dal.proxy.AbstractPlug; import org.epics.css.dal.proxy.DeviceProxy; import org.epics.css.dal.proxy.DirectoryProxy; import org.epics.css.dal.proxy.PropertyProxy; import org.epics.css.dal.spi.Plugs; import com.cosylab.epics.caj.CAJContext; /** * Implementation of EPICS plugin. * * @author ikriznar */ public class EPICSPlug extends AbstractPlug implements ContextMessageListener, ContextExceptionListener { /** * Wrapper class of <code>Runnable</code> to <code>TimerTask</code>. */ class ScheduledTask extends TimerTask { private Runnable r; public ScheduledTask(Runnable r) { this.r = r; } public void run() { try { r.run(); } catch (Throwable th) { th.printStackTrace(); } } } /** * Plug type string. */ public static final String PLUG_TYPE = "EPICS"; /** * Plug scheme suffix. */ public static final String SCHEME_SUFFIX = "EPICS"; /** * Default authority. */ public static final String DEFAULT_AUTHORITY = "DEFAULT"; /** * Property name for JCA context type flag. * If <code>false</code> or not defined then by default CAJ instance of JCA context is used. * If value set to <code>true</code> in System properties or in configuration properties, then JNI (thread safe) * instance of JCA context is used. * Property defined in System properties take precedence before property in defined in configuration. */ public static final String USE_JNI = "EPICSPlug.use_jni"; /** * Property name for use common executor flag: {@link #useCommonExecutor} */ public static final String PROPERTY_USE_COMMON_EXECUTOR = "EPICSPlug.property.use_common_executor"; /** * Property name for core threads property: {@link #coreThreads} * <p> * The number of core threads must be non-negative. * </p> */ public static final String PROPERTY_CORE_THREADS = "EPICSPlug.property.core_threads"; /** * Property name for max threads property: {@link #maxThreads} * <p> * The number of core threads must be non-negative and greater than the number of core threads. * </p> */ public static final String PROPERTY_MAX_THREADS = "EPICSPlug.property.max_threads"; /** * Defines if a common <code>Executor</code> from this <code>EPICSPlug</code> should be used instead of * individual <code>Executor<code>s in <code>PropertyProxyImpl</code>s. * * @see PropertyProxyImpl */ private boolean useCommonExecutor; /** * Defines the number of core threads to be used with <code>ThreadPoolExecutor</code> from this * <code>EPICSPlug</code> or <code>PropertyProxyImpl</code>. * * @see PropertyProxyImpl */ private int coreThreads; /** * Defines the maximum number of threads to be used with <code>ThreadPoolExecutor</code> from this * <code>EPICSPlug</code> or <code>PropertyProxyImpl</code>. * * @see PropertyProxyImpl */ private int maxThreads; /** * Timer instance (used for on-time monitors). */ private Timer timer; /** * PendIO timeout. * TODO to be configurable */ private double timeout = 5.0; /** * Context. */ private Context context; private static EPICSPlug sharedInstance; /** * <code>ThreadPoolExecutor</code> used by this <code>EPICSPlug</code> if {@link #useCommonExecutor} * is selected. */ private ThreadPoolExecutor executor; /** * Create EPICS plug instance. * @param configuration * @throws RemoteException */ private EPICSPlug(Properties configuration) throws RemoteException { super(configuration); initialize(); } private EPICSPlug(AbstractApplicationContext context) throws RemoteException { super(context); initialize(); } /** * Create new EPICS plug instance. * @param configuration * @return * @throws Exception */ public static synchronized AbstractPlug getInstance(Properties configuration) throws Exception { if (sharedInstance == null) { sharedInstance = new EPICSPlug(configuration); } return sharedInstance; } public static AbstractPlug getInstance(AbstractApplicationContext ctx) throws RemoteException { return new EPICSPlug(ctx); } /* (non-Javadoc) * @see org.epics.css.dal.proxy.AbstractPlug#releaseInstance() */ public synchronized void releaseInstance() throws Exception { if (executor!=null) { // TODO is this OK? getExecutor().shutdown(); try { if (!getExecutor().awaitTermination(1, TimeUnit.SECONDS)) getExecutor().shutdownNow(); } catch (InterruptedException ie) { } } if (context!=null) { if (!cachedPropertyProxiesIterator().hasNext()) { context.destroy(); context=null; } } if (sharedInstance==this) { sharedInstance=null; } } /** * Initialize EPICS plug. * @throws RemoteException */ private void initialize() throws RemoteException { useCommonExecutor = false; if (System.getProperties().containsKey(PROPERTY_USE_COMMON_EXECUTOR)) { useCommonExecutor = new Boolean(System.getProperty(PROPERTY_USE_COMMON_EXECUTOR, "false")); } else { useCommonExecutor = new Boolean(getConfiguration().getProperty(PROPERTY_USE_COMMON_EXECUTOR, "false")); } coreThreads = 2; if (System.getProperties().containsKey(PROPERTY_CORE_THREADS)) { coreThreads = new Integer(System.getProperty(PROPERTY_CORE_THREADS, "2")); } else { coreThreads = new Integer(getConfiguration().getProperty(PROPERTY_CORE_THREADS, "2")); } maxThreads = 10; if (System.getProperties().containsKey(PROPERTY_MAX_THREADS)) { maxThreads = new Integer(System.getProperty(PROPERTY_MAX_THREADS, "10")); } else { maxThreads = new Integer(getConfiguration().getProperty(PROPERTY_MAX_THREADS, "10")); } // checks for coreThreads and maxThreads values if (maxThreads == 0) { if (coreThreads != 0) { System.out.print("> EPICSPlug number of core threads can not be "+coreThreads+". It was changed to "); coreThreads = 0; System.out.println(coreThreads+"."); } } else { if (coreThreads < 1) { System.out.print("> EPICSPlug number of core threads can not be "+coreThreads+". It was changed to "); coreThreads = 1; System.out.println(coreThreads+"."); } if (maxThreads < 0 || maxThreads < coreThreads) { System.out.print("> EPICSPlug maximum number of threads can not be "+maxThreads+". It was changed to "); maxThreads = coreThreads; System.out.println(maxThreads+"."); } } boolean use_jni=false; if (System.getProperties().containsKey(USE_JNI)) { use_jni = new Boolean(System.getProperty(USE_JNI, "false")); } else { use_jni = new Boolean(getConfiguration().getProperty(USE_JNI, "false")); } CentralLogger.getInstance().debug(this, "pure java: " + !use_jni); if (!use_jni) { context = createJCAContext(); } else { context = createThreadSafeContext(); } // initialize supported proxy implementation PlugUtilities.initializeSupportedProxyImplementations(this); timeout= Plugs.getConnectionTimeout(getConfiguration(), 10000)/1000.0; } /** * Timer lazy initialization pattern. * @return timer instance. */ private synchronized Timer getTimer() { if (timer == null) timer = new Timer("SimulatorPlugTimer"); return timer; } /** * Schedule task for execution. * @param r ask to be scheduled. * @param delay delay in milliseconds before task is to be executed. * @param rate reschedule perion, if <code>0</code> periodic rescheduling is disabled. * @return <code>TimerTask</code> instance, used to cancel the task scheduling. */ public TimerTask schedule(Runnable r, long delay, long rate) { ScheduledTask t = new ScheduledTask(r); if (rate > 0) { getTimer().scheduleAtFixedRate(t, delay, rate); } else { getTimer().schedule(t, delay); } return t; } /** * @see org.epics.css.dal.proxy.AbstractPlug#getDeviceImplementationClass(java.lang.String) */ @Override protected Class<? extends AbstractDevice> getDeviceImplementationClass(String uniqueDeviceName) { throw new UnsupportedOperationException("Devices not supported"); } /** * @see org.epics.css.dal.proxy.AbstractPlug#getDeviceProxyImplementationClass(java.lang.String) */ @Override protected Class<? extends DeviceProxy> getDeviceProxyImplementationClass(String uniqueDeviceName) { throw new UnsupportedOperationException("Devices not supported"); } /* * @see org.epics.css.dal.proxy.AbstractPlug#getPropertyImplementationClass(java.lang.String) */ @Override public Class<? extends SimpleProperty<?>> getPropertyImplementationClass(String propertyName) { class ConnectionListenerImpl implements ConnectionListener { /* * @see gov.aps.jca.event.ConnectionListener#connectionChanged(gov.aps.jca.event.ConnectionEvent) */ public synchronized void connectionChanged(ConnectionEvent event) { this.notifyAll(); } } // create channel Channel channel = null; ConnectionListenerImpl listener = new ConnectionListenerImpl(); try { synchronized (listener) { channel = this.getContext().createChannel(propertyName, listener); listener.wait((long)(timeout*1000)); } // if not connected this will throw exception DBRType type = channel.getFieldType(); int elementCount = channel.getElementCount(); return PlugUtilities.getPropertyImplForDBRType(type, elementCount); } catch (Throwable th) { throw new RuntimeException("Failed create CA channel tqo determine channel type.", th); } finally { if (channel != null && channel.getConnectionState() != Channel.CLOSED) channel.dispose(); } } /* * @see org.epics.css.dal.proxy.AbstractPlug#getPropertyImplementationClass(java.lang.Class) */ @Override public Class<? extends SimpleProperty<?>> getPropertyImplementationClass(Class<? extends SimpleProperty<?>> type, String propertyName) throws RemoteException { if (type != null) return PropertyUtilities.getImplementationClass(type); else return getPropertyImplementationClass(propertyName); //return super.getPropertyImplementationClass(type, propertyName); } /* * @see org.epics.css.dal.proxy.AbstractPlug#getPropertyProxyImplementationClass(java.lang.String) */ @Override public Class<? extends PropertyProxy<?>> getPropertyProxyImplementationClass(String propertyName) { throw new RuntimeException("Unsupported property type."); } /* * @see org.epics.css.dal.proxy.AbstractPlug#createNewPropertyProxy(java.lang.String, java.lang.Class) */ protected <TT extends PropertyProxy<?>> TT createNewPropertyProxy( String uniqueName, Class<TT> type) throws ConnectionException { try { PropertyProxy p = type.getConstructor(EPICSPlug.class, String.class).newInstance(this, uniqueName); // add to directory cache if (p instanceof DirectoryProxy) putDirectoryProxyToCache((DirectoryProxy) p); return type.cast(p); } catch (Exception e) { throw new ConnectionException(this, "Failed to instantiate property proxy '" + uniqueName + "' for type '" + type.getName() + "'.", e); } } /* * @see org.epics.css.dal.proxy.AbstractPlug#getPlugType() */ public String getPlugType() { return "EPICS"; } /* * @see org.epics.css.dal.proxy.AbstractPlug#createNewDirectoryProxy(java.lang.String) */ protected DirectoryProxy createNewDirectoryProxy(String uniqueName) throws ConnectionException { // directory is already added to cache in createNewPropertyProxy method throw new RuntimeException("Error in factory implementation, PropertyProxy must be created first."); } /* * @see org.epics.css.dal.proxy.AbstractPlug#createNewDeviceProxy(java.lang.String, java.lang.Class) */ protected <T extends DeviceProxy> T createNewDeviceProxy(String uniqueName, Class<T> type) throws ConnectionException { throw new UnsupportedOperationException("Devices not supported"); } /** * @see org.epics.css.dal.context.PlugContext#createRemoteInfo(java.lang.String) */ public RemoteInfo createRemoteInfo(String uniqueName) throws NamingException { return new RemoteInfo(uniqueName, DEFAULT_AUTHORITY, PLUG_TYPE); } /** * @see org.epics.css.dal.context.PlugContext#getDefaultDirectory() */ public DirContext getDefaultDirectory() { // TODO implement return null; } /* * @see Context.flushIO(double) */ public void flushIO() { try { // CAJ will take care of optimization getContext().flushIO(); } catch (Throwable th) { th.printStackTrace(); } } /* * @see Context.pendIO(double) */ public void pendIO() throws CAException, TimeoutException, RemoteException { getContext().pendIO(timeout); } /* * @see Context */ public synchronized Context getContext() { return context; } private CAJContext createJCAContext() throws RemoteException { try { DefaultConfiguration edconf = new DefaultConfiguration("event_dispatcher"); edconf.setAttribute("class", QueuedEventDispatcher.class.getName()); DefaultConfiguration config = new DefaultConfiguration("EPICSPlugConfig"); config.setAttribute("class", JCALibrary.CHANNEL_ACCESS_JAVA); config.addChild(edconf); // create context CAJContext c= (CAJContext)JCALibrary.getInstance().createContext(config); // force explicit initialization c.initialize(); // register all context listeners c.addContextExceptionListener(this); c.addContextMessageListener(this); return c; } catch (Throwable th) { th.printStackTrace(); // rethrow to abort EPICS plug instance creation throw new RemoteException(this,"Failed to initilze EPICS plug", th); } } private ThreadSafeContext createThreadSafeContext() throws RemoteException { try { DefaultConfiguration edconf = new DefaultConfiguration("event_dispatcher"); edconf.setAttribute("class", QueuedEventDispatcher.class.getName()); DefaultConfiguration config = new DefaultConfiguration("EPICSPlugConfig"); config.setAttribute("class", JCALibrary.JNI_THREAD_SAFE); config.addChild(edconf); // create context ThreadSafeContext c= (ThreadSafeContext)JCALibrary.getInstance().createContext(config); // force explicit initialization c.initialize(); // register all context listeners c.addContextExceptionListener(this); c.addContextMessageListener(this); return c; } catch (Throwable th) { th.printStackTrace(); // rethrow to abort EPICS plug instance creation throw new RemoteException(this,"Failed to initilze EPICS plug", th); } } /** * Get timeout parameter (in seconds). * @return timeout (in seconds) */ public double getTimeout() { return timeout; } /** * Gets {@link #useCommonExecutor} property. * @return <code>true</code> if common executor should be used and <code>false</code> otherwise. */ public boolean isUseCommonExecutor() { return useCommonExecutor; } /** * Gets {@link #coreThreads} property. * @return the number of core threads. */ public int getCoreThreads() { return coreThreads; } /** * Gets {@link #maxThreads} property. * @return the maximum number of threads. */ public int getMaxThreads() { return maxThreads; } public ThreadPoolExecutor getExecutor() { if (executor==null) { synchronized (this) { if (!useCommonExecutor) throw new IllegalStateException("EPICSPlug is configured not to use a common executor."); if (maxThreads == 0) throw new IllegalStateException("Maximum number of threads must be greater than 0."); if (executor==null) { // executor= new ThreadPoolExecutor(coreThreads,maxThreads,Long.MAX_VALUE, TimeUnit.NANOSECONDS, // new ArrayBlockingQueue<Runnable>(maxThreads)); executor= new ThreadPoolExecutor(coreThreads,maxThreads,Long.MAX_VALUE, TimeUnit.NANOSECONDS, new ArrayBlockingQueue<Runnable>(100000)); executor.prestartAllCoreThreads(); } } } return executor; } /* (non-Javadoc) * @see gov.aps.jca.event.ContextExceptionListener#contextException(gov.aps.jca.event.ContextExceptionEvent) */ @SuppressWarnings("unchecked") public void contextException(ContextExceptionEvent ev) { if (plugListeners == null) return; synchronized (plugListeners) { if (plugListeners.isEmpty()) return; PlugEvent<ContextExceptionEvent> event = new PlugEvent<ContextExceptionEvent>(this, ev, new Timestamp(), "Context exception", null, ContextExceptionEvent.class); Iterator<EventSystemListener<PlugEvent>> iter = plugListeners.iterator(); while (iter.hasNext()) { iter.next().errorArrived(event); } } } /* (non-Javadoc) * @see gov.aps.jca.event.ContextExceptionListener#contextVirtualCircuitException(gov.aps.jca.event.ContextVirtualCircuitExceptionEvent) */ @SuppressWarnings("unchecked") public void contextVirtualCircuitException(ContextVirtualCircuitExceptionEvent ev) { if (plugListeners == null) return; synchronized (plugListeners) { if (plugListeners.isEmpty()) return; PlugEvent<ContextVirtualCircuitExceptionEvent> event = new PlugEvent<ContextVirtualCircuitExceptionEvent>(this, ev, new Timestamp(), "Context virtual circuit exception", null, ContextVirtualCircuitExceptionEvent.class); Iterator<EventSystemListener<PlugEvent>> iter = plugListeners.iterator(); while (iter.hasNext()) { iter.next().eventArrived(event); } } } /* (non-Javadoc) * @see gov.aps.jca.event.ContextMessageListener#contextMessage(gov.aps.jca.event.ContextMessageEvent) */ @SuppressWarnings("unchecked") public void contextMessage(ContextMessageEvent ev) { if (plugListeners == null) return; synchronized (plugListeners) { if (plugListeners.isEmpty()) return; PlugEvent<ContextMessageEvent> event = new PlugEvent<ContextMessageEvent>(this, ev, new Timestamp(), "Context message", null, ContextMessageEvent.class); Iterator<EventSystemListener<PlugEvent>> iter = plugListeners.iterator(); while (iter.hasNext()) { iter.next().eventArrived(event); } } } }
package net.glowstone.block.blocktype; import net.glowstone.GlowChunk; import net.glowstone.block.GlowBlock; import net.glowstone.block.GlowBlockState; import net.glowstone.block.entity.TEContainer; import net.glowstone.block.entity.TEHopper; import net.glowstone.block.entity.TileEntity; import net.glowstone.entity.GlowPlayer; import net.glowstone.entity.objects.GlowItem; import net.glowstone.inventory.MaterialMatcher; import net.glowstone.inventory.ToolType; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.block.BlockFace; import org.bukkit.block.BlockState; import org.bukkit.entity.Entity; import org.bukkit.entity.EntityType; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.ItemStack; import org.bukkit.material.Hopper; import org.bukkit.util.Vector; import java.util.HashMap; public class BlockHopper extends BlockContainer { public void setFacingDirection(BlockState bs, BlockFace face) { byte data; switch (face) { case DOWN: data = 0; break; case UP: data = 1; break; case NORTH: data = 2; break; case SOUTH: data = 3; break; case WEST: data = 4; break; case EAST: default: data = 5; break; } bs.setRawData(data); } @Override public TileEntity createTileEntity(GlowChunk chunk, int cx, int cy, int cz) { return new TEHopper(chunk.getBlock(cx, cy, cz)); } @Override public void placeBlock(GlowPlayer player, GlowBlockState state, BlockFace face, ItemStack holding, Vector clickedLoc) { super.placeBlock(player, state, face, holding, clickedLoc); setFacingDirection(state, face.getOppositeFace()); requestPulse(state); } @Override protected MaterialMatcher getNeededMiningTool(GlowBlock block) { return ToolType.PICKAXE; } @Override public void receivePulse(GlowBlock block) { if (block.getTileEntity() == null) { return; } TEHopper hopper = (TEHopper) block.getTileEntity(); pullItems(block, hopper); pushItems(block, hopper); } private void pullItems(GlowBlock block, TEHopper hopper) { GlowBlock source = block.getRelative(BlockFace.UP); if (source.getType() == null || source.getType() == Material.AIR) { GlowItem item = getFirstDroppedItem(source.getLocation()); if (item == null) { return; } ItemStack stack = item.getItemStack(); HashMap<Integer, ItemStack> add = hopper.getInventory().addItem(stack); if (add.size() > 0) { item.setItemStack(add.get(0)); } else { item.remove(); } } else if (source.getTileEntity() != null && source.getTileEntity() instanceof TEContainer) { TEContainer sourceContainer = (TEContainer) source.getTileEntity(); if (sourceContainer.getInventory() == null || sourceContainer.getInventory().getContents().length == 0) { return; } ItemStack item = getFirstItem(sourceContainer); if (item == null) { return; } ItemStack clone = item.clone(); clone.setAmount(1); if (hopper.getInventory().addItem(clone).size() > 0) { return; } if (item.getAmount() - 1 == 0) { sourceContainer.getInventory().remove(item); } else { item.setAmount(item.getAmount() - 1); } } } private void pushItems(GlowBlock block, TEHopper hopper) { if (hopper.getInventory() == null || hopper.getInventory().getContents().length == 0) { return; } GlowBlock target = block.getRelative(((Hopper) block.getState().getData()).getFacing()); if (target.getType() != null && target.getTileEntity() instanceof TEContainer) { ItemStack item = getFirstItem(hopper); if (item == null) { return; } ItemStack clone = item.clone(); clone.setAmount(1); if (((TEContainer) target.getTileEntity()).getInventory().addItem(clone).size() > 0) { return; } if (item.getAmount() - 1 == 0) { hopper.getInventory().remove(item); } else { item.setAmount(item.getAmount() - 1); } } } private GlowItem getFirstDroppedItem(Location location) { for (Entity entity : location.getChunk().getEntities()) { if (location.getBlockX() != entity.getLocation().getBlockX() || location.getBlockY() != entity.getLocation().getBlockY() || location.getBlockZ() != entity.getLocation().getBlockZ()) { continue; } if (entity.getType() != EntityType.DROPPED_ITEM) { continue; } return ((GlowItem) entity); } return null; } private ItemStack getFirstItem(TEContainer container) { Inventory inventory = container.getInventory(); for (int i = 0; i < inventory.getSize(); i++) { if (inventory.getItem(i) == null || inventory.getItem(i).getType() == null) { continue; } return inventory.getItem(i); } return null; } @Override public void requestPulse(GlowBlockState state) { state.getBlock().getWorld().requestPulse(state.getBlock(), 8, false); } @Override public boolean canTickRandomly() { return true; } }
package net.openhft.chronicle.core.util; import java.util.function.DoubleFunction; // TODO add a dummy histogram. public class Histogram { private int powersOf2; private final int fractionBits; private long totalCount, overRange; private int[] sampleCount; private long floor; public Histogram() { this(42, 4); } public Histogram(int powersOf2, int fractionBits) { this.powersOf2 = powersOf2; this.fractionBits = fractionBits; sampleCount = new int[powersOf2 << fractionBits]; floor = Double.doubleToRawLongBits(1) >> (52 - fractionBits); } public int sample(double time) { int bucket = (int) ((Double.doubleToRawLongBits(time) >> (52 - fractionBits)) - floor); if (bucket >= sampleCount.length) overRange++; else if (bucket >= 0) sampleCount[bucket]++; totalCount++; return bucket; } public double percentile(double fraction) { long value = (long) (totalCount * (1 - fraction)); value -= overRange; if (value < 0) return Double.POSITIVE_INFINITY; for (int i = sampleCount.length - 1; i >= 0; i value -= sampleCount[i]; if (value < 0) { long bits = ((((i + floor) << 1) + 1) << (51 - fractionBits)); return Double.longBitsToDouble(bits); } } return 1; } public String toMicrosFormat() { return toMicrosFormat(t -> t / 1e3); } public String toMicrosFormat(DoubleFunction<Double> toMicros) { if (totalCount < 1_000_000) return "50/90 99/99.9 99.99 - worst was " + p(toMicros.apply(percentile(0.5))) + " / " + p(toMicros.apply(percentile(0.9))) + " " + p(toMicros.apply(percentile(0.99))) + " / " + p(toMicros.apply(percentile(0.999))) + " " + p(toMicros.apply(percentile(0.9999))) + " - " + p(toMicros.apply(percentile(1))); if (totalCount < 10_000_000) return "50/90 99/99.9 99.99/99.999 - worst was " + p(toMicros.apply(percentile(0.5))) + " / " + p(toMicros.apply(percentile(0.9))) + " " + p(toMicros.apply(percentile(0.99))) + " / " + p(toMicros.apply(percentile(0.999))) + " " + p(toMicros.apply(percentile(0.9999))) + " / " + p(toMicros.apply(percentile(0.99999))) + " - " + p(toMicros.apply(percentile(1))); return "50/90 99/99.9 99.99/99.999 99.9999/worst was " + p(toMicros.apply(percentile(0.5))) + " / " + p(toMicros.apply(percentile(0.9))) + " " + p(toMicros.apply(percentile(0.99))) + " / " + p(toMicros.apply(percentile(0.999))) + " " + p(toMicros.apply(percentile(0.9999))) + " / " + p(toMicros.apply(percentile(0.99999))) + " " + p(toMicros.apply(percentile(0.999999))) + " / " + p(toMicros.apply(percentile(1))); } public String toLongMicrosFormat(DoubleFunction<Double> toMicros) { if (totalCount < 1_000_000) return "50/90 93/99 99.3/99.9 99.93/99.99 - worst was " + p(toMicros.apply(percentile(0.5))) + " / " + p(toMicros.apply(percentile(0.9))) + " " + p(toMicros.apply(percentile(0.93))) + " / " + p(toMicros.apply(percentile(0.99))) + " " + p(toMicros.apply(percentile(0.993))) + " / " + p(toMicros.apply(percentile(0.999))) + " " + p(toMicros.apply(percentile(0.9993))) + " / " + p(toMicros.apply(percentile(0.9999))) + " - " + p(toMicros.apply(percentile(1))); if (totalCount < 10_000_000) return "50/90 93/99 99.3/99.9 99.93/99.99 99.993/99.999 - worst was " + p(toMicros.apply(percentile(0.5))) + " / " + p(toMicros.apply(percentile(0.9))) + " " + p(toMicros.apply(percentile(0.93))) + " / " + p(toMicros.apply(percentile(0.99))) + " " + p(toMicros.apply(percentile(0.993))) + " / " + p(toMicros.apply(percentile(0.999))) + " " + p(toMicros.apply(percentile(0.9993))) + " / " + p(toMicros.apply(percentile(0.9999))) + " " + p(toMicros.apply(percentile(0.99993))) + " / " + p(toMicros.apply(percentile(0.99999))) + " - " + p(toMicros.apply(percentile(1))); return "50/90 93/99 99.3/99.9 99.93/99.99 99.993/99.999 99.9993/99.9999 - worst was " + p(toMicros.apply(percentile(0.5))) + " / " + p(toMicros.apply(percentile(0.9))) + " " + p(toMicros.apply(percentile(0.93))) + " / " + p(toMicros.apply(percentile(0.99))) + " " + p(toMicros.apply(percentile(0.993))) + " / " + p(toMicros.apply(percentile(0.999))) + " " + p(toMicros.apply(percentile(0.9993))) + " / " + p(toMicros.apply(percentile(0.9999))) + " " + p(toMicros.apply(percentile(0.99993))) + " / " + p(toMicros.apply(percentile(0.99999))) + " " + p(toMicros.apply(percentile(0.999993))) + " / " + p(toMicros.apply(percentile(0.999999))) + " - " + p(toMicros.apply(percentile(1))); } private String p(double v) { return v < 0.1 ? String.format("%.3f", v) : v < 1 ? String.format("%.2f", v) : v < 10 ? String.format("%.1f", v) : v < 1000 ? Long.toString(Math.round(v)) : String.format("%,d", Math.round(v / 10) * 10); } public long totalCount() { return totalCount; } public void reset(){ sampleCount = new int[powersOf2 << fractionBits]; totalCount = overRange = 0; } }
package net.zero918nobita.Xemime.interpreter; import net.zero918nobita.Xemime.entity.*; import net.zero918nobita.Xemime.parser.Parser; import net.zero918nobita.Xemime.utils.VirtualMemoryMonitor; import net.zero918nobita.Xemime.ast.*; import java.io.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Map; import java.util.TreeMap; /** * * @author Kodai Matsumoto */ public class Main { private static Parser parser; private static TreeMap<Address, Node> entities = new TreeMap<Address, Node>() {{ put(new Address(0, 0), Bool.Nil); put(new Address(0, 1), Bool.T); }}; public static Default defaultObj = new Default() {{ Address addressOfDefaultObj = Main.register(defaultObj); setMember(Symbol.intern(0, "this"), addressOfDefaultObj); setMember(Symbol.intern(0, "THIS"), addressOfDefaultObj); setMember(Symbol.intern(0, "Default"), addressOfDefaultObj); setMember(Symbol.intern(0, "Core"), register(new X_Core())); setMember(Symbol.intern(0, "Object"), register(new X_Object())); }}; public static Frame frame = new Frame(); /** Core.exit(); true false */ private static boolean allowExitMethod = true; /** * * @param table */ public static void loadLocalFrame(Handler table) { frame.loadLocalFrame(table); } public static void unloadLocalFrame() { frame.unloadLocalFrame(); } /** * * @param sym * @return true false */ public static boolean hasSymbol(Symbol sym) { return frame.hasSymbol(sym) || defaultObj.hasMember(sym); } /** * * @param sym * @return */ public static Address getAddressOfSymbol(Symbol sym) throws Exception { return (frame.hasSymbol(sym)) ? frame.getAddressOfSymbol(sym) : defaultObj.getAddressOfMember(sym); } /** * * @param sym * @return */ public static Node getValueOfSymbol(Symbol sym) throws Exception { if (frame.hasSymbol(sym)) { return frame.getValueOfSymbol(sym); } else { return (defaultObj.hasMember(sym)) ? defaultObj.message(0, sym) : null; } } /** * * @param address * @return */ public static Node getValueOfReference(Address address) { return entities.get(address); } /** * * @param sym * @param ref */ public static void setAddress(Symbol sym, Address ref) throws Exception { if (frame.hasSymbol(sym)) { frame.setAddress(sym, ref); return; } if (!defaultObj.hasMember(sym)) throw new Exception(parser.getLocation() + ": `" + sym.getName() + "` "); defaultObj.setMember(sym, ref); } /** * * @param sym * @param obj * @throws Exception */ public static void setValue(Symbol sym, Node obj) throws Exception { if (frame.hasSymbol(sym)) { frame.setValue(sym, obj); return; } Address ref = register(obj); if (!defaultObj.hasMember(sym)) throw new Exception(parser.getLocation() + ": `" + sym.getName() + "` "); defaultObj.setMember(sym, ref); } /** * * @param sym * @param ref */ public static void defAddress(Symbol sym, Address ref) throws Exception { if (frame.numberOfLayers() != 0) { frame.defAddress(sym, ref); return; } defaultObj.setMember(sym, ref); } /** * * @param sym * @param obj */ public static void defValue(Symbol sym, Node obj) throws Exception { if (frame.numberOfLayers() != 0) { frame.defValue(sym, obj); return; } Address ref = register(obj); defaultObj.setMember(sym, ref); } /** * Address * @param obj * @return Address */ public static Address register(Node obj) { entities.put(new Address(0,entities.lastKey().getAddress() + 1), obj); return new Address(0, entities.lastKey().getAddress()); } /** * Xemime * ( or ) * <br> * -debug * @param args */ public static void main(String[] args) { VirtualMemoryMonitor vmm; Thread vmmThread; boolean debug = Arrays.asList(args).contains("-debug"); if ((debug && args.length >= 3) || (!debug && args.length >= 2)) { usage(); System.out.println(System.lineSeparator() + "Usage: java -jar Xemime.jar [source file name]"); return; } if (debug) { vmm = new VirtualMemoryMonitor(); vmmThread = new Thread(vmm); vmmThread.start(); } try { parser = new Parser(); BufferedReader in; if ((debug && args.length == 1) || (!debug && args.length == 0)) { usage(); in = new BufferedReader(new InputStreamReader(System.in)); System.out.print(System.lineSeparator() + "[1]> "); String line; while (true) { line = in.readLine(); if (line != null && !line.equals("")) { ArrayList<Node> result; try { result = parser.parse(line); } catch(Exception e) { System.out.println(e.getMessage()); System.out.print("[" + (parser.getLocation() + 1) + "]> "); parser.goDown(1); continue; } for (Node c : result) { try { System.out.println(c.run()); } catch(Exception e) { System.out.println(e.getMessage()); break; } } System.out.print("[" + (parser.getLocation() + 1) + "]> "); parser.goDown(1); } else if (line == null) { break; } } } else { in = new BufferedReader(new FileReader(args[0])); StringBuilder stringBuilder = new StringBuilder(); String line; while ((line = in.readLine()) != null) { stringBuilder.append(line); stringBuilder.append('\n'); } ArrayList<Node> result = null; try { result = parser.parse(stringBuilder.toString()); } catch(Exception e) { System.out.println(e.getMessage()); System.exit(1); } for (Node c : result) { try { c.run(); } catch(Exception e) { System.out.println(e.getMessage()); System.exit(1); } } } in.close(); } catch(IOException e) { e.printStackTrace(); } } /** * Xemime * @param source * @throws Exception */ public static void exec(String source) throws Exception { parser = new Parser(); ArrayList<Node> result = parser.parse(source); for (Node node : result) node.run(); } private static void usage() { System.out.println(" _ __ _ \n" + " | |/ /__ ____ ___ (_)___ ___ ___ \n" + " | / _ \\/ __ `__ \\/ / __ `__ \\/ _ \\\n" + " / / __/ / / / / / / / / / / / __/\n" + "/_/|_\\___/_/ /_/ /_/_/_/ /_/ /_/\\___/ \n\n" + "Xemime Version 1.0.0 2017-09-05"); } /** * Object <br> * <br> * */ private static class X_Object extends Handler { X_Object() { super(0); setMember(Symbol.intern(0, "clone"), new X_Clone()); setMember(Symbol.intern(0, "new"), new X_New()); setMember(Symbol.intern(0, "proto"), new Bool(0, false)); } /** * Object.clone <br> * clone */ private static class X_Clone extends Native { X_Clone() { super(0, 0); } @Override protected Address exec(ArrayList<Node> params, Address self) throws Exception { return Main.register(params.get(0).run()); } } private static class X_New extends Native { X_New() { super(0, 0); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { Handler obj1 = (Handler) params.get(0).run(); Handler obj2 = new Handler(0); obj2.setMember(Symbol.intern(0, "proto"), new Bool(0, false)); if (obj1.hasMember(Symbol.intern(0, "proto"))) { Handler proto = (Handler) obj1.getMember(Symbol.intern(0, "proto")); for (Map.Entry<Symbol, Address> entry : proto.getMembers().entrySet()) { obj2.setMember(entry.getKey(), entry.getValue()); } } return Main.register(obj2); } } } /** * Core <br> * */ private static class X_Core extends Handler { X_Core() { super(0); setMember(Symbol.intern(0, "if"), new X_If()); setMember(Symbol.intern(0, "print"), new X_Print()); setMember(Symbol.intern(0, "println"), new X_Println()); setMember(Symbol.intern(0, "exit"), new X_Exit()); } /** * Core.exit <br> * Xemime */ private static class X_Exit extends Native { X_Exit() { super(0, 0); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { if (Main.allowExitMethod) System.exit(0); throw new Exception("exit"); } } /** * Core.print <br> * 1 */ private static class X_Print extends Native { X_Print() { super(0, 1); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { Node o = params.get(1).run(); System.out.print(o); return o; } } /** * Core.println <br> * 1 */ private static class X_Println extends Native { X_Println() { super(0, 1); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { Node o = params.get(1).run(); System.out.println(o); return o; } } /** * Core.if <br> * 2NIL2 * NIL3 */ private static class X_If extends Native { X_If(){ super(0, 3); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { return (params.get(1).run().equals(Bool.Nil)) ? params.get(3).run() : params.get(2).run(); } } } }
package org.adonai.ui.mainpage; import javafx.beans.binding.Bindings; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.collections.FXCollections; import javafx.collections.transformation.FilteredList; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.fxml.FXML; import javafx.geometry.Bounds; import javafx.geometry.Point2D; import javafx.scene.Parent; import javafx.scene.control.*; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyEvent; import javafx.scene.input.MouseButton; import javafx.scene.input.MouseEvent; import javafx.scene.layout.*; import javafx.stage.WindowEvent; import org.adonai.actions.*; import org.adonai.actions.add.AddSongAction; import org.adonai.model.*; import org.adonai.online.DropboxAdapter; import org.adonai.services.AddSongService; import org.adonai.services.RemoveSongService; import org.adonai.services.RenumberService; import org.adonai.services.SessionService; import org.adonai.ui.Consts; import org.adonai.ui.SongCellFactory; import org.adonai.ui.UiUtils; import org.adonai.ui.editor.SongEditor; import java.util.*; import java.util.logging.Logger; public class MainPageController { @FXML private ToolBar tbaActions; @FXML private BorderPane border; @FXML private ToolBar tbLeft; @FXML private ToolBar tbRight; @FXML private StackPane spDetails; @FXML private ToggleButton togSongbooks; @FXML private ToggleButton togSessions; @FXML private ToggleButton togSession; @FXML private ListView<Song> lviSession; @FXML private ListView<Song> lviSongs; @FXML private ListView<Session> lviSessions; @FXML private VBox panSongDetails; @FXML private VBox panSessionDetails; private MainPageContent currentContent; @FXML private Label lblCurrentEntity; private FilteredList<Song> filteredSongList; private ConfigurationService configurationService = new ConfigurationService(); private SessionService sessionService = new SessionService(); private Configuration configuration; private int iconSizeToolbar = Consts.ICON_SIZE_SMALL; private static final Logger LOGGER = Logger.getLogger(MainPageController.class.getName()); private Session currentSession = null; private Song currentSong = null; private TextField txtSessionName = new TextField(); public void initialize() { lviSongs.setCellFactory(new SongCellFactory()); lviSession.setCellFactory(new SongCellFactory()); lviSessions.setPlaceholder(new Label("No sessions available, press + to add ones")); lviSession.setPlaceholder(new Label("No songs in session available, press + to add ones")); panSongDetails.setBackground(Background.EMPTY); panSessionDetails.setBackground(Background.EMPTY); panSessionDetails.getChildren().add(txtSessionName); panSessionDetails.getChildren().add(lviSession); txtSessionName.textProperty().addListener(new ChangeListener<String>() { @Override public void changed(ObservableValue<? extends String> observable, String oldValue, String newValue) { getCurrentSession().setName(newValue); } }); configuration = configurationService.get(); selectSongbook(); refreshListViews(null); lviSongs.toFront(); tbLeft.minWidthProperty().bind(Bindings.max(border.heightProperty(), tbLeft.prefWidthProperty())); tbRight.minWidthProperty().bind(Bindings.max(border.heightProperty(), tbRight.prefWidthProperty())); //Button Plus Button btnAdd = new Button(); btnAdd.setGraphic(Consts.createImageView("plus", iconSizeToolbar)); btnAdd.setId("btnPlus"); tbaActions.getItems().add(btnAdd); btnAdd.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { Bounds controlBounds = UiUtils.getBounds(btnAdd); Double x = controlBounds.getMinX() + 10; Double y = controlBounds.getMinY() - 20 - AddSongAction.ADD_SONG_DIALOG_HEIGHT; //In Song details reimport content of current song if (currentContent.equals(MainPageContent.SONG)) { AddSongAction addSongHandler = new AddSongAction(); addSongHandler.add(x, y, getCurrentSongBook(), new EventHandler<WindowEvent>() { @Override public void handle(WindowEvent event) { Song song = addSongHandler.getNewSong(); LOGGER.info("New song " + song + " created"); if (song != null) { currentSong.getSongParts().clear(); currentSong.getSongParts().addAll(song.getSongParts()); refreshListViews(song); //Refresh list data and select the new song in editor selectSong(song); } } }); } else //In Songbook add new song if (currentContent.equals(MainPageContent.SONGBOOK)) { AddSongAction addSongHandler = new AddSongAction(); addSongHandler.add(x, y, getCurrentSongBook(), new EventHandler<WindowEvent>() { @Override public void handle(WindowEvent event) { Song song = addSongHandler.getNewSong(); LOGGER.info("New song " + song + " created"); if (song != null) { SongBook songBook = getCurrentSongBook(); AddSongService addSongService = new AddSongService(); //Add new song to songbook addSongService.addSong(song, songBook); refreshListViews(song); //Refresh list data and select the new song in editor selectSong(song); } } }); } else if (currentContent.equals(MainPageContent.SESSION)) { // in session add new song and add to session SelectAction<Song> selectSong = new SelectAction<Song>(); List<Song> allSongs = getCurrentSongBook().getSongs(); Double xSession = controlBounds.getMinX() + 10; Double ySession = controlBounds.getMinY() - 20 - SelectAction.SEARCHDIALOG_HEIGHT; selectSong.open(allSongs, xSession, ySession, new SongCellFactory(), new EventHandler<WindowEvent>() { @Override public void handle(WindowEvent event) { Song selectedSong = selectSong.getSelectedItem(); if (selectedSong != null) { LOGGER.info("Add song " + selectedSong.getId() + " to session " + getCurrentSession().getName()); sessionService.addSong(getCurrentSession(), selectedSong); refreshListViews(selectedSong); } } }); } else if (currentContent.equals(MainPageContent.SESSIONS)) { SessionService sessionService = new SessionService(); sessionService.newSession(configuration); refreshListViews(null); } } }); //Button Minus Button btnRemove = new Button(); btnRemove.setId("btnMinus"); btnRemove.setGraphic(Consts.createImageView("minus", iconSizeToolbar)); tbaActions.getItems().add(btnRemove); btnRemove.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { LOGGER.info("Button minus was pressed"); if (currentContent.equals(MainPageContent.SESSIONS)) { LOGGER.info("Remove session " + getCurrentSession().getName()); configuration.getSessions().remove(getCurrentSession()); refreshListViews(null); } else if (currentContent.equals(MainPageContent.SESSION)) { int selectedIndex = lviSession.getSelectionModel().getSelectedIndex(); Session session = getCurrentSession(); session.getSongs().remove(selectedIndex); int previous = Math.max(selectedIndex - 1, 0); lviSession.getSelectionModel().select(previous); refreshListViews(null); } else if (currentContent.equals(MainPageContent.SONGBOOK)) { RemoveSongService removeSongService = new RemoveSongService(); removeSongService.removeSong(lviSongs.getSelectionModel().getSelectedItem(), getCurrentSongBook()); refreshListViews(null); } } }); tbaActions.getItems().add(new Separator()); Button btnMp3 = new Button(); btnMp3.setId("btnMp3"); btnMp3.setGraphic(Consts.createImageView(AdditionalType.AUDIO.name().toLowerCase(), iconSizeToolbar)); tbaActions.getItems().add(btnMp3); btnMp3.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { if (! currentContent.equals(MainPageContent.SESSIONS)) { ConnectSongWithMp3Action connectSongWithMp3Action = new ConnectSongWithMp3Action(); Bounds controlBounds = UiUtils.getBounds(btnMp3); Double x = controlBounds.getMinX() + 10; Double y = controlBounds.getMinY() - 20 - ConnectSongWithMp3Action.CONNECTSONGDIALOG_HEIGHT; connectSongWithMp3Action.connect(x, y, getSelectedSong()); } } }); tbaActions.getItems().add(new Separator()); //Button Export origin /**Button btnExportWithChords = new Button ("Export origin"); btnExportWithChords.setId("btnExportWithChords"); btnExportWithChords.setGraphic(Consts.createImageView("export", iconSizeToolbar)); tbaActions.getItems().add(btnExportWithChords); btnExportWithChords.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { ExportAction exportAction = new ExportAction(); exportAction.export(configuration, getCurrentSongs(), getExportName(), true, false); } });**/ //Button Export transposed Button btnExportWithoutChords = new Button ("Export"); btnExportWithoutChords.setId("btnExportWithoutChords"); btnExportWithoutChords.setGraphic(Consts.createImageView("export", iconSizeToolbar)); tbaActions.getItems().add(btnExportWithoutChords); btnExportWithoutChords.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { ExportAction exportAction = new ExportAction(); exportAction.export(configuration, getCurrentSongs(), getExportName(), true); } }); Button btnUserAdmin = new Button("Users"); btnUserAdmin.setId("btnUserAdmin"); btnUserAdmin.setGraphic(Consts.createImageView("user", iconSizeToolbar)); tbaActions.getItems().add(btnUserAdmin); btnUserAdmin.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { Bounds controlBounds = UiUtils.getBounds(btnUserAdmin); Double x = controlBounds.getMinX() + 10; Double y = controlBounds.getMinY() - 20 - UsersAdminAction.HEIGHT; UsersAdminAction usersAdminAction = new UsersAdminAction(); usersAdminAction.open(configuration, x, y); } }); //Button Save Button btnSave = new Button (); btnSave.setGraphic(Consts.createImageView("save", iconSizeToolbar)); tbaActions.getItems().add(btnSave); btnSave.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { configurationService.set(configuration); } }); //Button Backup Button btnToCloud = new Button (); btnToCloud.setGraphic(Consts.createImageView("backup", iconSizeToolbar)); tbaActions.getItems().add(btnToCloud); btnToCloud.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { DropboxAdapter dropboxAdapter = new DropboxAdapter(); dropboxAdapter.upload(configurationService.getConfigFile()); } }); //Button Recover Button btnFromCloud = new Button (); btnFromCloud.setGraphic(Consts.createImageView("recover", iconSizeToolbar)); tbaActions.getItems().add(btnFromCloud); btnFromCloud.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { DropboxAdapter dropboxAdapter = new DropboxAdapter(); dropboxAdapter.download(configurationService.getConfigFile().getParentFile()); } }); Button btnRenumber = new Button (); btnRenumber.setGraphic(Consts.createImageView("number", iconSizeToolbar)); tbaActions.getItems().add(btnRenumber); btnRenumber.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { SongBook songBook = getCurrentSongBook(); RenumberService renumberService = new RenumberService(); renumberService.renumber(songBook); } }); //Button Configurations Button btnConfigurations = new Button(); btnConfigurations.setGraphic(Consts.createImageView("settings", iconSizeToolbar)); tbaActions.getItems().add(btnConfigurations); btnConfigurations.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { Bounds controlBounds = UiUtils.getBounds(btnConfigurations); Double x = controlBounds.getMinX() - (ConfigurationAction.CONFIGDIALOG_WIDTH / 2); Double y = controlBounds.getMinY() - 20 - ConfigurationAction.CONFIGDIALOG_HEIGHT; ConfigurationAction configurationAction = new ConfigurationAction(); configurationAction.openConfigurations(x, y); } }); tbaActions.getItems().add(new Separator()); //Button Exit Button btnExit = new Button(); btnExit.setGraphic(Consts.createImageView("exit", iconSizeToolbar)); tbaActions.getItems().add(btnExit); btnExit.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { if (configurationService.hasChanged()) { Alert alert = new Alert(Alert.AlertType.CONFIRMATION); alert.setTitle("Exit adonai"); alert.setHeaderText("You have unsaved changes!"); alert.setContentText("Do you want to save your changes?"); Optional<ButtonType> result = alert.showAndWait(); if (result.get() == ButtonType.OK){ configurationService.set(configuration); } } System.exit(0); } }); //Views lviSongs.setOnKeyPressed(new EventHandler<KeyEvent>() { @Override public void handle(KeyEvent event) { LOGGER.info("KeyHandler " + event.getCode().toString()); if (event.getCode().equals(KeyCode.SPACE)) { SearchAction searchAction = new SearchAction(); Point2D point2D = lviSongs.localToScreen(lviSongs.getWidth() - 305, lviSongs.getHeight() - 55); searchAction.open(filteredSongList, "", point2D.getX(), point2D.getY()); } } }); togSongbooks.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { selectSongbook(); } }); togSessions.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { selectSessions(); } }); togSession.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { Session sessionToSelect = currentSession; if (sessionToSelect == null && ! configuration.getSessions().isEmpty()) sessionToSelect = configuration.getSessions().get(0); selectSession(sessionToSelect); } }); lviSessions.setOnMouseClicked(new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent event) { if (event.getButton().equals(MouseButton.PRIMARY) && event.getClickCount() == 2) { selectSession(lviSessions.getSelectionModel().getSelectedItem()); } } }); lviSession.setOnMouseClicked(new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent event) { LOGGER.info("mouseClicked on lviSession " + event.getClickCount()); if (event.getButton().equals(MouseButton.PRIMARY) && event.getClickCount() == 2) { selectSong(lviSession.getSelectionModel().getSelectedItem()); } } }); lviSongs.setOnMouseClicked(new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent event) { LOGGER.info("mouseClicked on lviSongs " + event.getClickCount()); if (event.getButton().equals(MouseButton.PRIMARY) && event.getClickCount() == 2) { selectSong(lviSongs.getSelectionModel().getSelectedItem()); } } }); refreshButtonState(); } private void refreshButtonState () { boolean sessionsAvailable = configuration.getSessions().size() > 0; togSession.setVisible(sessionsAvailable); } private void selectSessions () { LOGGER.info("select sessions"); currentSong = null; currentContent = MainPageContent.SESSIONS; lblCurrentEntity.setText("SESSIONS" ); spDetails.getChildren().clear(); spDetails.getChildren().add(lviSessions); lviSessions.requestFocus(); refreshListViews(null); lviSessions.getSelectionModel().selectFirst(); togSessions.setSelected(true); } private void selectSongbook () { LOGGER.info("select songbook"); currentSong = null; currentContent = MainPageContent.SONGBOOK; lblCurrentEntity.setText("SONGBOOK"); spDetails.getChildren().clear(); spDetails.getChildren().add(lviSongs); lviSongs.requestFocus(); refreshListViews(null); lviSongs.getSelectionModel().selectFirst(); togSongbooks.setSelected(true); } private void selectSession (Session session) { LOGGER.info("select session " + session); if (session == null) return; currentContent = MainPageContent.SESSION; currentSong = null; currentSession = session; txtSessionName.setText(session.getName()); spDetails.getChildren().clear(); spDetails.getChildren().add(panSessionDetails); lviSession.requestFocus(); lblCurrentEntity.setText("SESSION '" + currentSession.getName() + "'"); refreshListViews(null); lviSession.getSelectionModel().selectFirst(); togSession.setSelected(true); } private void selectSong (Song song) { LOGGER.info("Select song " + song); currentSong = song; currentContent = MainPageContent.SONG; SongEditor songEditor = new SongEditor(configuration, song); Parent songEditorPanel = songEditor.getPanel(); songEditorPanel.setVisible(true); VBox.setVgrow(songEditorPanel, Priority.ALWAYS); songEditorPanel.setStyle("-fx-background-color: #000000;"); panSongDetails.setStyle("-fx-background-color: #000000;"); panSongDetails.getChildren().clear(); panSongDetails.getChildren().add(songEditorPanel); panSongDetails.toFront(); panSongDetails.requestFocus(); spDetails.getChildren().clear(); spDetails.getChildren().add(panSongDetails); lblCurrentEntity.setText("SONG '" + currentSong.getName() + "'"); LOGGER.info("panSongDetails: " + panSongDetails.getWidth() + "-" + panSongDetails.getHeight()); LOGGER.info("lviSongs: " + lviSongs.getWidth() + "-" + lviSongs.getHeight()); } private SongBook getCurrentSongBook () { return configuration.getSongBooks().get(0); } private Session getCurrentSession () { if (currentContent == MainPageContent.SESSIONS) { return lviSessions.getSelectionModel().getSelectedItem(); } else if (currentContent == MainPageContent.SESSION) { return currentSession; } else throw new IllegalStateException("Invalid page content " + currentContent); } private Collection<Song> getCurrentSongs () { if (currentContent == MainPageContent.SONGBOOK) { return getCurrentSongBook().getSongs(); } else if (currentContent == MainPageContent.SESSION) { return sessionService.getSongs(currentSession, getCurrentSongBook()); } else if (currentContent == MainPageContent.SONG) { return Arrays.asList(currentSong); } else throw new IllegalStateException("Invalid page content " + currentContent); } private Song getSelectedSong () { if (currentContent == MainPageContent.SONGBOOK) { return lviSongs.getSelectionModel().getSelectedItem(); } else if (currentContent == MainPageContent.SESSION) { return lviSession.getSelectionModel().getSelectedItem(); } else if (currentContent == MainPageContent.SONG) { return currentSong; } else throw new IllegalStateException("Invalid page content " + currentContent); } private String getExportName () { if (currentContent == MainPageContent.SONGBOOK) { return "songbook"; } else if (currentContent == MainPageContent.SESSION) { return currentSession.getName(); } else if (currentContent == MainPageContent.SONG) { return currentSong.getName(); } else return ""; } private void refreshListViews(Song selectSong) { LOGGER.info("Refresh views"); filteredSongList = new FilteredList<Song>(FXCollections.observableArrayList(getCurrentSongBook().getSongs()), s->true); lviSongs.setItems(filteredSongList); List<Song> sessionSongs = currentSession != null ? sessionService.getSongs(currentSession, getCurrentSongBook()): new ArrayList<>(); lviSession.setItems(FXCollections.observableArrayList(sessionSongs)); lviSessions.setItems(FXCollections.observableArrayList(configuration.getSessions())); if (selectSong != null) { LOGGER.info("Select song " + selectSong ); lviSongs.getSelectionModel().select(selectSong); lviSession.getSelectionModel().select(selectSong); } } public MainPageContent getCurrentContent() { return currentContent; } }
package de.peeeq.wurstscript.intermediateLang.optimizer; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.util.Iterator; import java.util.Locale; import de.peeeq.wurstscript.WurstOperator; import de.peeeq.wurstscript.jassIm.ImBoolVal; import de.peeeq.wurstscript.jassIm.ImExitwhen; import de.peeeq.wurstscript.jassIm.ImExpr; import de.peeeq.wurstscript.jassIm.ImIf; import de.peeeq.wurstscript.jassIm.ImIntVal; import de.peeeq.wurstscript.jassIm.ImOperatorCall; import de.peeeq.wurstscript.jassIm.ImProg; import de.peeeq.wurstscript.jassIm.ImRealVal; import de.peeeq.wurstscript.jassIm.ImReturn; import de.peeeq.wurstscript.jassIm.ImStmt; import de.peeeq.wurstscript.jassIm.ImStmts; import de.peeeq.wurstscript.jassIm.JassIm; import de.peeeq.wurstscript.jassIm.JassImElement; import de.peeeq.wurstscript.translation.imtranslation.ImTranslator; public class SimpleRewrites { private final ImProg prog; private final ImTranslator trans; public SimpleRewrites(ImTranslator trans) { this.prog = trans.getImProg(); this.trans = trans; } public void optimize() { optimizeElement(prog); // we need to flatten the program, because we introduced new // StatementExprs prog.flatten(trans); removeUnreachableCode(prog); } private void removeUnreachableCode(ImProg prog) { prog.accept(new ImProg.DefaultVisitor() { @Override public void visit(ImStmts stmts) { removeUnreachableCode(stmts); } }); } private void removeUnreachableCode(ImStmts stmts) { Iterator<ImStmt> it = stmts.iterator(); boolean reachable = true; while (it.hasNext()) { ImStmt s = it.next(); if (reachable) { if (s instanceof ImReturn) { reachable = false; } else if (s instanceof ImExitwhen) { ImExitwhen imExitwhen = (ImExitwhen) s; ImExpr expr = imExitwhen.getCondition(); if (expr instanceof ImBoolVal) { boolean b = ((ImBoolVal) expr).getValB(); if (b) { // found "exitwhen true" reachable = false; } } } } else { it.remove(); } } } /** Recursively optimizes the element */ private void optimizeElement(JassImElement elem) { // optimize children: for (int i = 0; i < elem.size(); i++) { optimizeElement(elem.get(i)); } if (elem instanceof ImOperatorCall) { ImOperatorCall opc = (ImOperatorCall) elem; optimizeOpCall(opc); } else if (elem instanceof ImIf) { ImIf imIf = (ImIf) elem; optimizeIf(imIf); } else if (elem instanceof ImExitwhen) { ImExitwhen imExitwhen = (ImExitwhen) elem; optimizeExitwhen(imExitwhen); } } private void optimizeExitwhen(ImExitwhen imExitwhen) { ImExpr expr = imExitwhen.getCondition(); if (expr instanceof ImBoolVal) { boolean b = ((ImBoolVal) expr).getValB(); if (!b) { imExitwhen.replaceWith(JassIm.ImNull()); } } } private void optimizeOpCall(ImOperatorCall opc) { // Binary if (opc.getArguments().size() > 1) { ImExpr left = opc.getArguments().get(0); ImExpr right = opc.getArguments().get(1); if (left instanceof ImBoolVal && right instanceof ImBoolVal) { boolean b1 = ((ImBoolVal) left).getValB(); boolean b2 = ((ImBoolVal) right).getValB(); boolean result; switch (opc.getOp()) { case OR: result = b1 || b2; break; case AND: result = b1 && b2; break; case EQ: result = b1 == b2; break; case NOTEQ: result = b1 != b2; break; default: result = false; break; } opc.replaceWith(JassIm.ImBoolVal(result)); } else if (left instanceof ImIntVal && right instanceof ImIntVal) { int i1 = ((ImIntVal) left).getValI(); int i2 = ((ImIntVal) right).getValI(); boolean isConditional = false; boolean isArithmetic = false; boolean result = false; int resultVal = 0; switch (opc.getOp()) { case GREATER: result = i1 > i2; isConditional = true; break; case GREATER_EQ: result = i1 >= i2; isConditional = true; break; case LESS: result = i1 < i2; isConditional = true; break; case LESS_EQ: result = i1 <= i2; isConditional = true; break; case EQ: result = i1 == i2; isConditional = true; break; case NOTEQ: result = i1 != i2; isConditional = true; break; case PLUS: resultVal = i1 + i2; isArithmetic = true; break; case MINUS: resultVal = i1 - i2; isArithmetic = true; break; case MULT: resultVal = i1 * i2; isArithmetic = true; break; case MOD_INT: if (i2 != 0) { resultVal = i1 % i2; isArithmetic = true; } break; case MOD_REAL: float f1 = i1; float f2 = i2; if (f2 != 0) { float resultF = f1 % f2; opc.replaceWith(JassIm.ImRealVal(String.valueOf(resultF))); } break; case DIV_INT: if (i2 != 0) { resultVal = i1 / i2; isArithmetic = true; } break; case DIV_REAL: float f3 = i1; float f4 = i2; if (f4 != 0) { float resultF = f3 / f4; opc.replaceWith(JassIm.ImRealVal(String.valueOf(resultF))); } break; default: result = false; isConditional = false; isArithmetic = false; break; } if (isConditional) { opc.replaceWith(JassIm.ImBoolVal(result)); } else if (isArithmetic) { opc.replaceWith(JassIm.ImIntVal(resultVal)); } } else if (left instanceof ImRealVal && right instanceof ImRealVal) { float f1 = Float.parseFloat(((ImRealVal) left).getValR()); float f2 = Float.parseFloat(((ImRealVal) right).getValR()); boolean isConditional = false; boolean isArithmetic = false; boolean result = false; float resultVal = 0; switch (opc.getOp()) { case GREATER: result = f1 > f2; isConditional = true; break; case GREATER_EQ: result = f1 >= f2; isConditional = true; break; case LESS: result = f1 < f2; isConditional = true; break; case LESS_EQ: result = f1 <= f2; isConditional = true; break; case EQ: result = f1 == f2; isConditional = true; break; case NOTEQ: result = f1 != f2; isConditional = true; break; case PLUS: resultVal = f1 + f2; isArithmetic = true; break; case MINUS: resultVal = f1 - f2; isArithmetic = true; break; case MULT: resultVal = f1 * f2; isArithmetic = true; break; case MOD_REAL: if (f2 != 0) { resultVal = f1 % f2; isArithmetic = true; } break; case DIV_INT: if (f2 != 0) { resultVal = f1 / f2; isArithmetic = true; } break; case DIV_REAL: if (f2 != 0) { resultVal = f1 / f2; isArithmetic = true; } break; default: result = false; isConditional = false; isArithmetic = false; break; } if (isConditional) { opc.replaceWith(JassIm.ImBoolVal(result)); } else if (isArithmetic) { // convert result to string, using 4 decimal digits String s = floatToStringWith4decimalDigits(resultVal); // String s = new BigDecimal(resultVal).toPlainString(); // check if the string representation is exact if (Float.parseFloat(s) == resultVal) { opc.replaceWith(JassIm.ImRealVal(s)); } } } } // Unary else { ImExpr expr = opc.getArguments().get(0); if (expr instanceof ImBoolVal) { boolean b1 = ((ImBoolVal) expr).getValB(); boolean result; switch (opc.getOp()) { case NOT: result = !b1; break; default: result = false; break; } opc.replaceWith(JassIm.ImBoolVal(result)); } else if (opc.getOp() == WurstOperator.NOT && expr instanceof ImOperatorCall) { // optimize negation of some operators ImOperatorCall inner = (ImOperatorCall) expr; switch (inner.getOp()) { case NOT: opc.replaceWith(inner.getArguments().remove(0)); break; case EQ: case NOTEQ: case LESS: case LESS_EQ: case GREATER: case GREATER_EQ: opc.replaceWith(JassIm.ImOperatorCall(oppositeOperator(inner.getOp()), JassIm.ImExprs(inner.getArguments().removeAll()))); break; } } } } /** returns the opposite of an operator */ private WurstOperator oppositeOperator(WurstOperator op) { switch (op) { case EQ: return WurstOperator.NOTEQ; case GREATER: return WurstOperator.LESS_EQ; case GREATER_EQ: return WurstOperator.LESS; case LESS: return WurstOperator.GREATER_EQ; case LESS_EQ: return WurstOperator.GREATER; case NOTEQ: return WurstOperator.EQ; default: throw new Error("operator " + op + " does not have an opposite."); } } private static String floatToStringWith4decimalDigits(float resultVal) { DecimalFormat format = new DecimalFormat(); // use a fixed locale, so that it does not randomly replace a dot by // comma on German PCs // hope this works format.setDecimalFormatSymbols(new DecimalFormatSymbols(Locale.US)); format.setMinimumIntegerDigits(1); format.setMaximumFractionDigits(4); format.setMinimumFractionDigits(0); format.setGroupingUsed(false); String s = format.format(resultVal); return s; } private void optimizeIf(ImIf imIf) { if (imIf.getThenBlock().isEmpty() && imIf.getElseBlock().isEmpty()) { imIf.replaceWith(imIf.getCondition().copy()); } else if (imIf.getCondition() instanceof ImBoolVal) { ImBoolVal boolVal = (ImBoolVal) imIf.getCondition(); if (boolVal.getValB()) { // we have something like 'if true ...' // replace the if statement with the then-block // we have to use ImStatementExpr to get multiple statements // into one statement as needed // for the replaceWith function // we need to copy the thenBlock because otherwise it would have // two parents (we have not removed it from the old if-block) imIf.replaceWith(JassIm.ImStatementExpr(imIf.getThenBlock().copy(), JassIm.ImNull())); } else { if (!imIf.getElseBlock().isEmpty()) { imIf.replaceWith(JassIm.ImStatementExpr(imIf.getElseBlock().copy(), JassIm.ImNull())); } else { imIf.replaceWith(JassIm.ImNull()); } } } } }
package io.debezium.connector.mysql; import java.math.BigDecimal; import java.time.Duration; import java.util.Random; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Importance; import org.apache.kafka.common.config.ConfigDef.Type; import org.apache.kafka.common.config.ConfigDef.Width; import io.debezium.config.CommonConnectorConfig; import io.debezium.config.Configuration; import io.debezium.config.EnumeratedValue; import io.debezium.config.Field; import io.debezium.config.Field.ValidationOutput; import io.debezium.connector.mysql.antlr.MySqlAntlrDdlParser; import io.debezium.heartbeat.Heartbeat; import io.debezium.jdbc.JdbcValueConverters; import io.debezium.jdbc.JdbcValueConverters.BigIntUnsignedMode; import io.debezium.jdbc.TemporalPrecisionMode; import io.debezium.relational.RelationalDatabaseConnectorConfig; import io.debezium.relational.Tables.TableFilter; import io.debezium.relational.ddl.DdlParser; import io.debezium.relational.history.DatabaseHistory; import io.debezium.relational.history.KafkaDatabaseHistory; /** * The configuration properties. */ public class MySqlConnectorConfig extends RelationalDatabaseConnectorConfig { /** * The set of predefined BigIntUnsignedHandlingMode options or aliases. */ public static enum BigIntUnsignedHandlingMode implements EnumeratedValue { /** * Represent {@code BIGINT UNSIGNED} values as precise {@link BigDecimal} values, which are * represented in change events in a binary form. This is precise but difficult to use. */ PRECISE("precise"), /** * Represent {@code BIGINT UNSIGNED} values as precise {@code long} values. This may be less precise * but is far easier to use. */ LONG("long"); private final String value; private BigIntUnsignedHandlingMode(String value) { this.value = value; } @Override public String getValue() { return value; } public BigIntUnsignedMode asBigIntUnsignedMode() { switch (this) { case LONG: return BigIntUnsignedMode.LONG; case PRECISE: default: return BigIntUnsignedMode.PRECISE; } } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @return the matching option, or null if no match is found */ public static BigIntUnsignedHandlingMode parse(String value) { if (value == null) return null; value = value.trim(); for (BigIntUnsignedHandlingMode option : BigIntUnsignedHandlingMode.values()) { if (option.getValue().equalsIgnoreCase(value)) return option; } return null; } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @param defaultValue the default value; may be null * @return the matching option, or null if no match is found and the non-null default is invalid */ public static BigIntUnsignedHandlingMode parse(String value, String defaultValue) { BigIntUnsignedHandlingMode mode = parse(value); if (mode == null && defaultValue != null) mode = parse(defaultValue); return mode; } } /** * The set of predefined SnapshotMode options or aliases. */ public static enum SnapshotMode implements EnumeratedValue { /** * Perform a snapshot when it is needed. */ WHEN_NEEDED("when_needed", true), /** * Perform a snapshot only upon initial startup of a connector. */ INITIAL("initial", true), /** * Perform a snapshot of only the database schemas (without data) and then begin reading the binlog. * This should be used with care, but it is very useful when the change event consumers need only the changes * from the point in time the snapshot is made (and doesn't care about any state or changes prior to this point). */ SCHEMA_ONLY("schema_only", false), /** * Perform a snapshot of only the database schemas (without data) and then begin reading the binlog at the current binlog position. * This can be used for recovery only if the connector has existing offsets and the database.history.kafka.topic does not exist (deleted). * This recovery option should be used with care as it assumes there have been no schema changes since the connector last stopped, * otherwise some events during the gap may be processed with an incorrect schema and corrupted. */ SCHEMA_ONLY_RECOVERY("schema_only_recovery", false), /** * Never perform a snapshot and only read the binlog. This assumes the binlog contains all the history of those * databases and tables that will be captured. */ NEVER("never", false), /** * Perform a snapshot and then stop before attempting to read the binlog. */ INITIAL_ONLY("initial_only", true); private final String value; private final boolean includeData; private SnapshotMode(String value, boolean includeData) { this.value = value; this.includeData = includeData; } @Override public String getValue() { return value; } /** * Whether this snapshotting mode should include the actual data or just the * schema of captured tables. */ public boolean includeData() { return includeData; } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @return the matching option, or null if no match is found */ public static SnapshotMode parse(String value) { if (value == null) return null; value = value.trim(); for (SnapshotMode option : SnapshotMode.values()) { if (option.getValue().equalsIgnoreCase(value)) return option; } return null; } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @param defaultValue the default value; may be null * @return the matching option, or null if no match is found and the non-null default is invalid */ public static SnapshotMode parse(String value, String defaultValue) { SnapshotMode mode = parse(value); if (mode == null && defaultValue != null) mode = parse(defaultValue); return mode; } } /** * The set of predefined Snapshot Locking Mode options. */ public static enum SnapshotLockingMode implements EnumeratedValue { /** * This mode will block all writes for the entire duration of the snapshot. * * Replaces deprecated configuration option snapshot.locking.minimal with a value of false. */ EXTENDED("extended"), /** * The connector holds the global read lock for just the initial portion of the snapshot while the connector reads the database * schemas and other metadata. The remaining work in a snapshot involves selecting all rows from each table, and this can be done * in a consistent fashion using the REPEATABLE READ transaction even when the global read lock is no longer held and while other * MySQL clients are updating the database. * * Replaces deprecated configuration option snapshot.locking.minimal with a value of true. */ MINIMAL("minimal"), /** * This mode will avoid using ANY table locks during the snapshot process. This mode can only be used with SnapShotMode * set to schema_only or schema_only_recovery. */ NONE("none"); private final String value; private SnapshotLockingMode(String value) { this.value = value; } @Override public String getValue() { return value; } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @return the matching option, or null if no match is found */ public static SnapshotLockingMode parse(String value) { if (value == null) return null; value = value.trim(); for (SnapshotLockingMode option : SnapshotLockingMode.values()) { if (option.getValue().equalsIgnoreCase(value)) return option; } return null; } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @param defaultValue the default value; may be null * @return the matching option, or null if no match is found and the non-null default is invalid */ public static SnapshotLockingMode parse(String value, String defaultValue) { SnapshotLockingMode mode = parse(value); if (mode == null && defaultValue != null) mode = parse(defaultValue); return mode; } } /** * The set of predefined SecureConnectionMode options or aliases. */ public static enum SecureConnectionMode implements EnumeratedValue { /** * Establish an unencrypted connection. */ DISABLED("disabled"), /** * Establish a secure (encrypted) connection if the server supports secure connections. * Fall back to an unencrypted connection otherwise. */ PREFERRED("preferred"), /** * Establish a secure connection if the server supports secure connections. * The connection attempt fails if a secure connection cannot be established. */ REQUIRED("required"), /** * Like REQUIRED, but additionally verify the server TLS certificate against the configured Certificate Authority * (CA) certificates. The connection attempt fails if no valid matching CA certificates are found. */ VERIFY_CA("verify_ca"), /** * Like VERIFY_CA, but additionally verify that the server certificate matches the host to which the connection is * attempted. */ VERIFY_IDENTITY("verify_identity"); private final String value; private SecureConnectionMode(String value) { this.value = value; } @Override public String getValue() { return value; } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @return the matching option, or null if no match is found */ public static SecureConnectionMode parse(String value) { if (value == null) return null; value = value.trim(); for (SecureConnectionMode option : SecureConnectionMode.values()) { if (option.getValue().equalsIgnoreCase(value)) return option; } return null; } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @param defaultValue the default value; may be null * @return the matching option, or null if no match is found and the non-null default is invalid */ public static SecureConnectionMode parse(String value, String defaultValue) { SecureConnectionMode mode = parse(value); if (mode == null && defaultValue != null) mode = parse(defaultValue); return mode; } } /** * The set of predefined Gtid New Channel Position options. */ public static enum GtidNewChannelPosition implements EnumeratedValue { /** * This mode will start reading new gtid channel from mysql servers last_executed position */ LATEST("latest"), /** * This mode will start reading new gtid channel from earliest available position in server. * This is needed when during active-passive failover the new gtid channel becomes active and receiving writes. #DBZ-923 */ EARLIEST("earliest"); private final String value; private GtidNewChannelPosition(String value) { this.value = value; } @Override public String getValue() { return value; } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @return the matching option, or null if no match is found */ public static GtidNewChannelPosition parse(String value) { if (value == null) return null; value = value.trim(); for (GtidNewChannelPosition option : GtidNewChannelPosition.values()) { if (option.getValue().equalsIgnoreCase(value)) return option; } return null; } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @param defaultValue the default value; may be null * @return the matching option, or null if no match is found and the non-null default is invalid */ public static GtidNewChannelPosition parse(String value, String defaultValue) { GtidNewChannelPosition mode = parse(value); if (mode == null && defaultValue != null) mode = parse(defaultValue); return mode; } } /** * The set of predefined modes for dealing with failures during binlog event processing. */ public static enum EventProcessingFailureHandlingMode implements EnumeratedValue { /** * Problematic events will be skipped. */ IGNORE("ignore"), /** * Problematic event and their binlog position will be logged and the events will be skipped. */ WARN("warn"), /** * An exception indicating the problematic events and their binlog position is raised, causing the connector to be stopped. */ FAIL("fail"); private final String value; private EventProcessingFailureHandlingMode(String value) { this.value = value; } @Override public String getValue() { return value; } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @return the matching option, or null if no match is found */ public static EventProcessingFailureHandlingMode parse(String value) { if (value == null) { return null; } value = value.trim(); for (EventProcessingFailureHandlingMode option : EventProcessingFailureHandlingMode.values()) { if (option.getValue().equalsIgnoreCase(value)) return option; } return null; } } public static enum DdlParsingMode implements EnumeratedValue { LEGACY("legacy") { @Override public DdlParser getNewParserInstance(JdbcValueConverters valueConverters, TableFilter tableFilter) { return new MySqlDdlParser(false, (MySqlValueConverters) valueConverters); } }, ANTLR("antlr") { @Override public DdlParser getNewParserInstance(JdbcValueConverters valueConverters, TableFilter tableFilter) { return new MySqlAntlrDdlParser((MySqlValueConverters) valueConverters, tableFilter); } }; private final String value; private DdlParsingMode(String value) { this.value = value; } @Override public String getValue() { return value; } public abstract DdlParser getNewParserInstance(JdbcValueConverters valueConverters, TableFilter tableFilter); /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @return the matching option, or null if no match is found */ public static DdlParsingMode parse(String value) { if (value == null) return null; value = value.trim(); for (DdlParsingMode option : DdlParsingMode.values()) { if (option.getValue().equalsIgnoreCase(value)) return option; } return null; } /** * Determine if the supplied value is one of the predefined options. * * @param value the configuration property value; may not be null * @param defaultValue the default value; may be null * @return the matching option, or null if no match is found and the non-null default is invalid */ public static DdlParsingMode parse(String value, String defaultValue) { DdlParsingMode mode = parse(value); if (mode == null && defaultValue != null) { mode = parse(defaultValue); } return mode; } } private static final String DATABASE_WHITELIST_NAME = "database.whitelist"; private static final String TABLE_WHITELIST_NAME = "table.whitelist"; private static final String TABLE_IGNORE_BUILTIN_NAME = "table.ignore.builtin"; /** * Default size of the binlog buffer used for examining transactions and * deciding whether to propagate them or not. A size of 0 disables the buffer, * all events will be passed on directly as they are passed by the binlog * client. */ private static final int DEFAULT_BINLOG_BUFFER_SIZE = 0; public static final Field HOSTNAME = Field.create("database.hostname") .withDisplayName("Hostname") .withType(Type.STRING) .withWidth(Width.MEDIUM) .withImportance(Importance.HIGH) .withValidation(Field::isRequired) .withDescription("Resolvable hostname or IP address of the MySQL database server."); public static final Field PORT = Field.create("database.port") .withDisplayName("Port") .withType(Type.INT) .withWidth(Width.SHORT) .withDefault(3306) .withImportance(Importance.HIGH) .withValidation(Field::isInteger) .withDescription("Port of the MySQL database server."); public static final Field USER = Field.create("database.user") .withDisplayName("User") .withType(Type.STRING) .withWidth(Width.SHORT) .withImportance(Importance.HIGH) .withValidation(Field::isRequired) .withDescription("Name of the MySQL database user to be used when connecting to the database."); public static final Field PASSWORD = Field.create("database.password") .withDisplayName("Password") .withType(Type.PASSWORD) .withWidth(Width.SHORT) .withImportance(Importance.HIGH) .withDescription("Password of the MySQL database user to be used when connecting to the database."); public static final Field SERVER_NAME = Field.create("database.server.name") .withDisplayName("Namespace") .withType(Type.STRING) .withWidth(Width.MEDIUM) .withImportance(Importance.HIGH) .withValidation(Field::isRequired, MySqlConnectorConfig::validateServerNameIsDifferentFromHistoryTopicName) .withDescription("Unique name that identifies the database server and all recorded offsets, and" + "that is used as a prefix for all schemas and topics. " + "Each distinct MySQL installation should have a separate namespace and monitored by " + "at most one Debezium connector."); public static final Field ON_CONNECT_STATEMENTS = Field.create("database.initial.statements") .withDisplayName("Initial statements") .withType(Type.STRING) .withWidth(Width.LONG) .withImportance(Importance.LOW) .withDescription("A semicolon separated list of SQL statements to be executed when a JDBC connection (not binlog reading connection) to the database is established. " + "Note that the connector may establish JDBC connections at its own discretion, so this should typically be used for configuration of session parameters only," + "but not for executing DML statements. Use doubled semicolon (';;') to use a semicolon as a character and not as a delimiter."); public static final Field SERVER_ID = Field.create("database.server.id") .withDisplayName("Cluster ID") .withType(Type.LONG) .withWidth(Width.LONG) .withImportance(Importance.HIGH) .withDefault(MySqlConnectorConfig::randomServerId) .withValidation(Field::isRequired, Field::isPositiveLong) .withDescription("A numeric ID of this database client, which must be unique across all " + "currently-running database processes in the cluster. This connector joins the " + "MySQL database cluster as another server (with this unique ID) so it can read " + "the binlog. By default, a random number is generated between 5400 and 6400."); public static final Field SSL_MODE = Field.create("database.ssl.mode") .withDisplayName("SSL mode") .withEnum(SecureConnectionMode.class, SecureConnectionMode.DISABLED) .withWidth(Width.MEDIUM) .withImportance(Importance.MEDIUM) .withDescription("Whether to use an encrypted connection to MySQL. Options include" + "'disabled' (the default) to use an unencrypted connection; " + "'preferred' to establish a secure (encrypted) connection if the server supports secure connections, " + "but fall back to an unencrypted connection otherwise; " + "'required' to use a secure (encrypted) connection, and fail if one cannot be established; " + "'verify_ca' like 'required' but additionally verify the server TLS certificate against the configured Certificate Authority " + "(CA) certificates, or fail if no valid matching CA certificates are found; or" + "'verify_identity' like 'verify_ca' but additionally verify that the server certificate matches the host to which the connection is attempted."); public static final Field SSL_KEYSTORE = Field.create("database.ssl.keystore") .withDisplayName("SSL Keystore") .withType(Type.STRING) .withWidth(Width.LONG) .withImportance(Importance.MEDIUM) .withDescription("Location of the Java keystore file containing an application process's own certificate and private key."); public static final Field SSL_KEYSTORE_PASSWORD = Field.create("database.ssl.keystore.password") .withDisplayName("SSL Keystore Password") .withType(Type.PASSWORD) .withWidth(Width.MEDIUM) .withImportance(Importance.MEDIUM) .withDescription("Password to access the private key from the keystore file specified by 'ssl.keystore' configuration property or the 'javax.net.ssl.keyStore' system or JVM property. " + "This password is used to unlock the keystore file (store password), and to decrypt the private key stored in the keystore (key password)."); public static final Field SSL_TRUSTSTORE = Field.create("database.ssl.truststore") .withDisplayName("SSL Truststore") .withType(Type.STRING) .withWidth(Width.LONG) .withImportance(Importance.MEDIUM) .withDescription("Location of the Java truststore file containing the collection of CA certificates trusted by this application process (trust store)."); public static final Field SSL_TRUSTSTORE_PASSWORD = Field.create("database.ssl.truststore.password") .withDisplayName("SSL Truststore Password") .withType(Type.PASSWORD) .withWidth(Width.MEDIUM) .withImportance(Importance.MEDIUM) .withDescription("Password to unlock the keystore file (store password) specified by 'ssl.trustore' configuration property or the 'javax.net.ssl.trustStore' system or JVM property."); public static final Field TABLES_IGNORE_BUILTIN = Field.create(TABLE_IGNORE_BUILTIN_NAME) .withDisplayName("Ignore system databases") .withType(Type.BOOLEAN) .withWidth(Width.SHORT) .withImportance(Importance.LOW) .withDefault(true) .withValidation(Field::isBoolean) .withDependents(DATABASE_WHITELIST_NAME) .withDescription("Flag specifying whether built-in tables should be ignored."); public static final Field JDBC_DRIVER = Field.create("database.jdbc.driver") .withDisplayName("Jdbc Driver Class Name") .withType(Type.CLASS) .withWidth(Width.MEDIUM) .withDefault(com.mysql.cj.jdbc.Driver.class.getName()) .withImportance(Importance.LOW) .withValidation(Field::isClassName) .withDescription("JDBC Driver class name used to connect to the MySQL database server."); /** * A comma-separated list of regular expressions that match database names to be monitored. * May not be used with {@link #DATABASE_BLACKLIST}. */ public static final Field DATABASE_WHITELIST = Field.create(DATABASE_WHITELIST_NAME) .withDisplayName("Databases") .withType(Type.LIST) .withWidth(Width.LONG) .withImportance(Importance.HIGH) .withDependents(TABLE_WHITELIST_NAME) .withDescription("The databases for which changes are to be captured"); /** * A comma-separated list of regular expressions that match database names to be excluded from monitoring. * May not be used with {@link #DATABASE_WHITELIST}. */ public static final Field DATABASE_BLACKLIST = Field.create("database.blacklist") .withDisplayName("Exclude Databases") .withType(Type.STRING) .withWidth(Width.LONG) .withImportance(Importance.MEDIUM) .withValidation(MySqlConnectorConfig::validateDatabaseBlacklist) .withInvisibleRecommender() .withDescription(""); /** * A comma-separated list of regular expressions that match the fully-qualified names of tables to be monitored. * Fully-qualified names for tables are of the form {@code <databaseName>.<tableName>} or * {@code <databaseName>.<schemaName>.<tableName>}. May not be used with {@link #TABLE_BLACKLIST}, and superseded by database * inclusions/exclusions. */ public static final Field TABLE_WHITELIST = Field.create(TABLE_WHITELIST_NAME) .withDisplayName("Tables") .withType(Type.LIST) .withWidth(Width.LONG) .withImportance(Importance.HIGH) .withValidation(Field::isListOfRegex) .withDescription("The tables for which changes are to be captured"); /** * A comma-separated list of regular expressions that match the fully-qualified names of tables to be excluded from * monitoring. Fully-qualified names for tables are of the form {@code <databaseName>.<tableName>} or * {@code <databaseName>.<schemaName>.<tableName>}. May not be used with {@link #TABLE_WHITELIST}. */ public static final Field TABLE_BLACKLIST = Field.create("table.blacklist") .withDisplayName("Exclude Tables") .withType(Type.STRING) .withWidth(Width.LONG) .withImportance(Importance.MEDIUM) .withValidation(Field::isListOfRegex, MySqlConnectorConfig::validateTableBlacklist) .withInvisibleRecommender(); /** * A comma-separated list of regular expressions that match fully-qualified names of columns to be excluded from monitoring * and change messages. Fully-qualified names for columns are of the form {@code <databaseName>.<tableName>.<columnName>} * or {@code <databaseName>.<schemaName>.<tableName>.<columnName>}. */ public static final Field COLUMN_BLACKLIST = Field.create("column.blacklist") .withDisplayName("Exclude Columns") .withType(Type.STRING) .withWidth(Width.LONG) .withImportance(Importance.MEDIUM) .withDescription(""); /** * A comma-separated list of regular expressions that match source UUIDs in the GTID set used to find the binlog * position in the MySQL server. Only the GTID ranges that have sources matching one of these include patterns will * be used. * May not be used with {@link #GTID_SOURCE_EXCLUDES}. */ public static final Field GTID_SOURCE_INCLUDES = Field.create("gtid.source.includes") .withDisplayName("Include GTID sources") .withType(Type.LIST) .withWidth(Width.LONG) .withImportance(Importance.HIGH) .withDependents(TABLE_WHITELIST_NAME) .withDescription("The source UUIDs used to include GTID ranges when determine the starting position in the MySQL server's binlog."); /** * A comma-separated list of regular expressions that match source UUIDs in the GTID set used to find the binlog * position in the MySQL server. Only the GTID ranges that have sources matching none of these exclude patterns will * be used. * May not be used with {@link #GTID_SOURCE_INCLUDES}. */ public static final Field GTID_SOURCE_EXCLUDES = Field.create("gtid.source.excludes") .withDisplayName("Exclude GTID sources") .withType(Type.STRING) .withWidth(Width.LONG) .withImportance(Importance.MEDIUM) .withValidation(MySqlConnectorConfig::validateGtidSetExcludes) .withInvisibleRecommender() .withDescription("The source UUIDs used to exclude GTID ranges when determine the starting position in the MySQL server's binlog."); /** * If set to true, we will only produce DML events into Kafka for transactions that were written on MySQL servers * with UUIDs matching the filters defined by the {@link #GTID_SOURCE_INCLUDES} or {@link #GTID_SOURCE_EXCLUDES} * configuration options, if they are specified. * * Defaults to true. * * When true, either {@link #GTID_SOURCE_INCLUDES} or {@link #GTID_SOURCE_EXCLUDES} must be set. */ public static final Field GTID_SOURCE_FILTER_DML_EVENTS = Field.create("gtid.source.filter.dml.events") .withDisplayName("Filter DML events") .withType(Type.BOOLEAN) .withWidth(Width.SHORT) .withImportance(Importance.MEDIUM) .withDefault(true) .withDescription("If set to true, we will only produce DML events into Kafka for transactions that were written on mysql servers with UUIDs matching the filters defined by the gtid.source.includes or gtid.source.excludes configuration options, if they are specified."); /** * If set to 'latest', connector when encountering new GTID channel after job restart will start reading it from the * latest executed position (default). When set to 'earliest' the connector will start reading new GTID channels from the first available position. * This is useful when in active-passive mysql setup during failover new GTID channel starts receiving writes, see #DBZ-923 * * Defaults to latest. */ public static final Field GTID_NEW_CHANNEL_POSITION = Field.create("gtid.new.channel.position") .withDisplayName("GTID start position") .withEnum(GtidNewChannelPosition.class, GtidNewChannelPosition.LATEST) .withWidth(Width.SHORT) .withImportance(Importance.MEDIUM) .withDescription("If set to 'latest', when connector sees new GTID, it will start consuming gtid channel from the server latest executed gtid position. If 'earliest' connector starts reading channel from first available (not purged) gtid position on the server."); public static final Field CONNECTION_TIMEOUT_MS = Field.create("connect.timeout.ms") .withDisplayName("Connection Timeout (ms)") .withType(Type.INT) .withWidth(Width.SHORT) .withImportance(Importance.MEDIUM) .withDescription("Maximum time in milliseconds to wait after trying to connect to the database before timing out.") .withDefault(30 * 1000) .withValidation(Field::isPositiveInteger); public static final Field KEEP_ALIVE = Field.create("connect.keep.alive") .withDisplayName("Keep connection alive (true/false)") .withType(Type.BOOLEAN) .withWidth(Width.SHORT) .withImportance(Importance.LOW) .withDescription("Whether a separate thread should be used to ensure the connection is kept alive.") .withDefault(true) .withValidation(Field::isBoolean); public static final Field KEEP_ALIVE_INTERVAL_MS = Field.create("connect.keep.alive.interval.ms") .withDisplayName("Keep alive interval (ms)") .withType(Type.LONG) .withWidth(Width.SHORT) .withImportance(Importance.LOW) .withDescription("Interval in milliseconds to wait for connection checking if keep alive thread is used.") .withDefault(Duration.ofMinutes(1).toMillis()) .withValidation(Field::isPositiveInteger); public static final Field ROW_COUNT_FOR_STREAMING_RESULT_SETS = Field.create("min.row.count.to.stream.results") .withDisplayName("Stream result set of size") .withType(Type.LONG) .withWidth(Width.MEDIUM) .withImportance(Importance.LOW) .withDescription("The number of rows a table must contain to stream results rather than pull " + "all into memory during snapshots. Defaults to 1,000. Use 0 to stream all results " + "and completely avoid checking the size of each table.") .withDefault(1_000) .withValidation(Field::isNonNegativeLong); public static final Field BUFFER_SIZE_FOR_BINLOG_READER = Field.create("binlog.buffer.size") .withDisplayName("Binlog reader buffer size") .withType(Type.INT) .withWidth(Width.MEDIUM) .withImportance(Importance.MEDIUM) .withDescription("The size of a look-ahead buffer used by the binlog reader to decide whether " + "the transaction in progress is going to be committed or rolled back. " + "Use 0 to disable look-ahead buffering. " + "Defaults to " + DEFAULT_BINLOG_BUFFER_SIZE + " (i.e. buffering is disabled).") .withDefault(DEFAULT_BINLOG_BUFFER_SIZE) .withValidation(Field::isNonNegativeInteger); /** * The database history class is hidden in the {@link #configDef()} since that is designed to work with a user interface, * and in these situations using Kafka is the only way to go. */ public static final Field DATABASE_HISTORY = Field.create("database.history") .withDisplayName("Database history class") .withType(Type.CLASS) .withWidth(Width.LONG) .withImportance(Importance.LOW) .withInvisibleRecommender() .withDescription("The name of the DatabaseHistory class that should be used to store and recover database schema changes. " + "The configuration properties for the history are prefixed with the '" + DatabaseHistory.CONFIGURATION_FIELD_PREFIX_STRING + "' string.") .withDefault(KafkaDatabaseHistory.class.getName()); public static final Field INCLUDE_SCHEMA_CHANGES = Field.create("include.schema.changes") .withDisplayName("Include database schema changes") .withType(Type.BOOLEAN) .withWidth(Width.SHORT) .withImportance(Importance.MEDIUM) .withDescription("Whether the connector should publish changes in the database schema to a Kafka topic with " + "the same name as the database server ID. Each schema change will be recorded using a key that " + "contains the database name and whose value includes the DDL statement(s)." + "The default is 'true'. This is independent of how the connector internally records database history.") .withDefault(true); public static final Field INCLUDE_SQL_QUERY = Field.create("include.query") .withDisplayName("Include original SQL query with in change events") .withType(Type.BOOLEAN) .withWidth(Width.SHORT) .withImportance(Importance.MEDIUM) .withDescription("Whether the connector should include the original SQL query that generated the change event. " + "Note: This option requires MySQL be configured with the binlog_rows_query_log_events option set to ON. Query will not be present for events generated from snapshot. " + "WARNING: Enabling this option may expose tables or fields explicitly blacklisted or masked by including the original SQL statement in the change event. " + "For this reason the default value is 'false'.") .withDefault(false); public static final Field SNAPSHOT_MODE = Field.create("snapshot.mode") .withDisplayName("Snapshot mode") .withEnum(SnapshotMode.class, SnapshotMode.INITIAL) .withWidth(Width.SHORT) .withImportance(Importance.LOW) .withDescription("The criteria for running a snapshot upon startup of the connector. " + "Options include: " + "'when_needed' to specify that the connector run a snapshot upon startup whenever it deems it necessary; " + "'initial' (the default) to specify the connector can run a snapshot only when no offsets are available for the logical server name; " + "'initial_only' same as 'initial' except the connector should stop after completing the snapshot and before it would normally read the binlog; and" + "'never' to specify the connector should never run a snapshot and that upon first startup the connector should read from the beginning of the binlog. " + "The 'never' mode should be used with care, and only when the binlog is known to contain all history."); /** * @deprecated Replaced with SNAPSHOT_LOCKING_MODE */ @Deprecated public static final Field SNAPSHOT_MINIMAL_LOCKING = Field.create("snapshot.minimal.locks") .withDisplayName("Use shortest database locking for snapshots") .withType(Type.BOOLEAN) .withWidth(Width.SHORT) .withImportance(Importance.LOW) .withDescription("NOTE: This option has been deprecated in favor of snapshot.locking.mode. \n" + "Controls how long the connector holds onto the global read lock while it is performing a snapshot. The default is 'true', " + "which means the connector holds the global read lock (and thus prevents any updates) for just the initial portion of the snapshot " + "while the database schemas and other metadata are being read. The remaining work in a snapshot involves selecting all rows from " + "each table, and this can be done using the snapshot process' REPEATABLE READ transaction even when the lock is no longer held and " + "other operations are updating the database. However, in some cases it may be desirable to block all writes for the entire duration " + "of the snapshot; in such cases set this property to 'false'.") .withDefault(true); public static final Field SNAPSHOT_LOCKING_MODE = Field.create("snapshot.locking.mode") .withDisplayName("Snapshot locking mode") .withEnum(SnapshotLockingMode.class, SnapshotLockingMode.MINIMAL) .withWidth(Width.SHORT) .withImportance(Importance.LOW) .withDescription("Controls how long the connector holds onto the global read lock while it is performing a snapshot. The default is 'minimal', " + "which means the connector holds the global read lock (and thus prevents any updates) for just the initial portion of the snapshot " + "while the database schemas and other metadata are being read. The remaining work in a snapshot involves selecting all rows from " + "each table, and this can be done using the snapshot process' REPEATABLE READ transaction even when the lock is no longer held and " + "other operations are updating the database. However, in some cases it may be desirable to block all writes for the entire duration " + "of the snapshot; in such cases set this property to 'extended'. Using a value of 'none' will prevent the connector from acquiring any " + "table locks during the snapshot process. This mode can only be used in combination with snapshot.mode values of 'schema_only' or " + "'schema_only_recovery' and is only safe to use if no schema changes are happening while the snapshot is taken.") .withValidation(MySqlConnectorConfig::validateSnapshotLockingMode); public static final Field TIME_PRECISION_MODE = Field.create("time.precision.mode") .withDisplayName("Time Precision") .withEnum(TemporalPrecisionMode.class, TemporalPrecisionMode.ADAPTIVE_TIME_MICROSECONDS) .withWidth(Width.SHORT) .withImportance(Importance.MEDIUM) .withDescription("Time, date, and timestamps can be represented with different kinds of precisions, including:" + "'adaptive_time_microseconds' (the default) like 'adaptive' mode, but TIME fields always use microseconds precision;" + "'adaptive' (deprecated) bases the precision of time, date, and timestamp values on the database column's precision; " + "'connect' always represents time, date, and timestamp values using Kafka Connect's built-in representations for Time, Date, and Timestamp, " + "which uses millisecond precision regardless of the database columns' precision."); public static final Field BIGINT_UNSIGNED_HANDLING_MODE = Field.create("bigint.unsigned.handling.mode") .withDisplayName("BIGINT UNSIGNED Handling") .withEnum(BigIntUnsignedHandlingMode.class, BigIntUnsignedHandlingMode.LONG) .withWidth(Width.SHORT) .withImportance(Importance.MEDIUM) .withDescription("Specify how BIGINT UNSIGNED columns should be represented in change events, including:" + "'precise' uses java.math.BigDecimal to represent values, which are encoded in the change events using a binary representation and Kafka Connect's 'org.apache.kafka.connect.data.Decimal' type; " + "'long' (the default) represents values using Java's 'long', which may not offer the precision but will be far easier to use in consumers."); public static final Field EVENT_DESERIALIZATION_FAILURE_HANDLING_MODE = Field.create("event.deserialization.failure.handling.mode") .withDisplayName("Event deserialization failure handling") .withEnum(EventProcessingFailureHandlingMode.class, EventProcessingFailureHandlingMode.FAIL) .withWidth(Width.SHORT) .withImportance(Importance.MEDIUM) .withDescription("Specify how failures during deserialization of binlog events (i.e. when encountering a corrupted event) should be handled, including:" + "'fail' (the default) an exception indicating the problematic event and its binlog position is raised, causing the connector to be stopped; " + "'warn' the problematic event and its binlog position will be logged and the event will be skipped;" + "'ignore' the problematic event will be skipped."); public static final Field INCONSISTENT_SCHEMA_HANDLING_MODE = Field.create("inconsistent.schema.handling.mode") .withDisplayName("Inconsistent schema failure handling") .withEnum(EventProcessingFailureHandlingMode.class, EventProcessingFailureHandlingMode.FAIL) .withWidth(Width.SHORT) .withImportance(Importance.MEDIUM) .withDescription("Specify how binlog events that belong to a table missing from internal schema representation (i.e. internal representation is not consistent with database) should be handled, including:" + "'fail' (the default) an exception indicating the problematic event and its binlog position is raised, causing the connector to be stopped; " + "'warn' the problematic event and its binlog position will be logged and the event will be skipped;" + "'ignore' the problematic event will be skipped."); public static final Field SNAPSHOT_SELECT_STATEMENT_OVERRIDES_BY_TABLE = Field.create("snapshot.select.statement.overrides") .withDisplayName("List of tables where the default select statement used during snapshotting should be overridden.") .withType(Type.STRING) .withWidth(Width.LONG) .withImportance(Importance.MEDIUM) .withDescription(" This property contains a comma-separated list of fully-qualified tables (DB_NAME.TABLE_NAME). Select statements for the individual tables are " + "specified in further configuration properties, one for each table, identified by the id 'snapshot.select.statement.overrides.[DB_NAME].[TABLE_NAME]'. " + "The value of those properties is the select statement to use when retrieving data from the specific table during snapshotting. " + "A possible use case for large append-only tables is setting a specific point where to start (resume) snapshotting, in case a previous snapshotting was interrupted."); public static final Field SNAPSHOT_DELAY_MS = Field.create("snapshot.delay.ms") .withDisplayName("Snapshot Delay (milliseconds)") .withType(Type.LONG) .withWidth(Width.MEDIUM) .withImportance(Importance.LOW) .withDescription("The number of milliseconds to delay before a snapshot will begin.") .withDefault(0L) .withValidation(Field::isNonNegativeLong); public static final Field DDL_PARSER_MODE = Field.create("ddl.parser.mode") .withDisplayName("DDL parser mode") .withEnum(DdlParsingMode.class, DdlParsingMode.ANTLR) .withWidth(Width.SHORT) .withImportance(Importance.MEDIUM) .withDescription("MySQL DDL statements can be parsed in different ways:" + "'legacy' parsing is creating a TokenStream and comparing token by token with an expected values." + "The decisions are made by matched token values." + "'antlr' (the default) uses generated parser from MySQL grammar using ANTLR v4 tool which use ALL(*) algorithm for parsing." + "This parser creates a parsing tree for DDL statement, then walks trough it and apply changes by node types in parsed tree."); /** * Method that generates a Field for specifying that string columns whose names match a set of regular expressions should * have their values truncated to be no longer than the specified number of characters. * * @param length the maximum length of the column's string values written in source records; must be positive * @return the field; never null */ public static final Field TRUNCATE_COLUMN(int length) { if (length <= 0) throw new IllegalArgumentException("The truncation length must be positive"); return Field.create("column.truncate.to." + length + ".chars") .withValidation(Field::isInteger) .withDescription("A comma-separated list of regular expressions matching fully-qualified names of columns that should " + "be truncated to " + length + " characters."); } /** * Method that generates a Field for specifying that string columns whose names match a set of regular expressions should * have their values masked by the specified number of asterisk ('*') characters. * * @param length the number of asterisks that should appear in place of the column's string values written in source records; * must be positive * @return the field; never null */ public static final Field MASK_COLUMN(int length) { if (length <= 0) throw new IllegalArgumentException("The mask length must be positive"); return Field.create("column.mask.with." + length + ".chars") .withValidation(Field::isInteger) .withDescription("A comma-separated list of regular expressions matching fully-qualified names of columns that should " + "be masked with " + length + " asterisk ('*') characters."); } /** * The set of {@link Field}s defined as part of this configuration. */ public static Field.Set ALL_FIELDS = Field.setOf(USER, PASSWORD, HOSTNAME, PORT, ON_CONNECT_STATEMENTS, SERVER_ID, SERVER_NAME, CONNECTION_TIMEOUT_MS, KEEP_ALIVE, KEEP_ALIVE_INTERVAL_MS, CommonConnectorConfig.MAX_QUEUE_SIZE, CommonConnectorConfig.MAX_BATCH_SIZE, CommonConnectorConfig.POLL_INTERVAL_MS, BUFFER_SIZE_FOR_BINLOG_READER, Heartbeat.HEARTBEAT_INTERVAL, Heartbeat.HEARTBEAT_TOPICS_PREFIX, DATABASE_HISTORY, INCLUDE_SCHEMA_CHANGES, INCLUDE_SQL_QUERY, TABLE_WHITELIST, TABLE_BLACKLIST, TABLES_IGNORE_BUILTIN, DATABASE_WHITELIST, DATABASE_BLACKLIST, COLUMN_BLACKLIST, SNAPSHOT_MODE, SNAPSHOT_MINIMAL_LOCKING, SNAPSHOT_LOCKING_MODE, GTID_SOURCE_INCLUDES, GTID_SOURCE_EXCLUDES, GTID_SOURCE_FILTER_DML_EVENTS, TIME_PRECISION_MODE, RelationalDatabaseConnectorConfig.DECIMAL_HANDLING_MODE, SSL_MODE, SSL_KEYSTORE, SSL_KEYSTORE_PASSWORD, SSL_TRUSTSTORE, SSL_TRUSTSTORE_PASSWORD, JDBC_DRIVER, BIGINT_UNSIGNED_HANDLING_MODE, EVENT_DESERIALIZATION_FAILURE_HANDLING_MODE, INCONSISTENT_SCHEMA_HANDLING_MODE, SNAPSHOT_DELAY_MS, DDL_PARSER_MODE, CommonConnectorConfig.TOMBSTONES_ON_DELETE); /** * The set of {@link Field}s that are included in the {@link #configDef() configuration definition}. This includes * all fields defined in this class (though some are always invisible since they are not to be exposed to the user interface) * plus several that are specific to the {@link KafkaDatabaseHistory} class, since history is always stored in Kafka * when run via the user interface. */ protected static Field.Set EXPOSED_FIELDS = ALL_FIELDS.with(KafkaDatabaseHistory.BOOTSTRAP_SERVERS, KafkaDatabaseHistory.TOPIC, KafkaDatabaseHistory.RECOVERY_POLL_ATTEMPTS, KafkaDatabaseHistory.RECOVERY_POLL_INTERVAL_MS, DatabaseHistory.SKIP_UNPARSEABLE_DDL_STATEMENTS, DatabaseHistory.STORE_ONLY_MONITORED_TABLES_DDL, DatabaseHistory.DDL_FILTER); private final SnapshotLockingMode snapshotLockingMode; private final DdlParsingMode ddlParsingMode; public MySqlConnectorConfig(Configuration config) { super( config, config.getString(SERVER_NAME), null, // TODO whitelist handling is still done locally here null ); // If deprecated snapshot.minimal.locking property is explicitly configured if (config.hasKey(MySqlConnectorConfig.SNAPSHOT_MINIMAL_LOCKING.name())) { // Coerce it into its replacement appropriate snapshot.locking.mode value if (config.getBoolean(MySqlConnectorConfig.SNAPSHOT_MINIMAL_LOCKING)) { this.snapshotLockingMode = SnapshotLockingMode.MINIMAL; } else { this.snapshotLockingMode = SnapshotLockingMode.EXTENDED; } } else { // Otherwise use configured snapshot.locking.mode configuration. this.snapshotLockingMode = SnapshotLockingMode.parse(config.getString(SNAPSHOT_LOCKING_MODE), SNAPSHOT_LOCKING_MODE.defaultValueAsString()); } String ddlParsingModeStr = config.getString(MySqlConnectorConfig.DDL_PARSER_MODE); this.ddlParsingMode = DdlParsingMode.parse(ddlParsingModeStr, MySqlConnectorConfig.DDL_PARSER_MODE.defaultValueAsString()); } public SnapshotLockingMode getSnapshotLockingMode() { return this.snapshotLockingMode; } public DdlParsingMode getDdlParsingMode() { return ddlParsingMode; } protected static ConfigDef configDef() { ConfigDef config = new ConfigDef(); Field.group(config, "MySQL", HOSTNAME, PORT, USER, PASSWORD, ON_CONNECT_STATEMENTS, SERVER_NAME, SERVER_ID, SSL_MODE, SSL_KEYSTORE, SSL_KEYSTORE_PASSWORD, SSL_TRUSTSTORE, SSL_TRUSTSTORE_PASSWORD, JDBC_DRIVER); Field.group(config, "History Storage", KafkaDatabaseHistory.BOOTSTRAP_SERVERS, KafkaDatabaseHistory.TOPIC, KafkaDatabaseHistory.RECOVERY_POLL_ATTEMPTS, KafkaDatabaseHistory.RECOVERY_POLL_INTERVAL_MS, DATABASE_HISTORY, DatabaseHistory.SKIP_UNPARSEABLE_DDL_STATEMENTS, DatabaseHistory.DDL_FILTER, DatabaseHistory.STORE_ONLY_MONITORED_TABLES_DDL); Field.group(config, "Events", INCLUDE_SCHEMA_CHANGES, INCLUDE_SQL_QUERY, TABLES_IGNORE_BUILTIN, DATABASE_WHITELIST, TABLE_WHITELIST, COLUMN_BLACKLIST, TABLE_BLACKLIST, DATABASE_BLACKLIST, GTID_SOURCE_INCLUDES, GTID_SOURCE_EXCLUDES, GTID_SOURCE_FILTER_DML_EVENTS, BUFFER_SIZE_FOR_BINLOG_READER, Heartbeat.HEARTBEAT_INTERVAL, Heartbeat.HEARTBEAT_TOPICS_PREFIX, EVENT_DESERIALIZATION_FAILURE_HANDLING_MODE, INCONSISTENT_SCHEMA_HANDLING_MODE, CommonConnectorConfig.TOMBSTONES_ON_DELETE); Field.group(config, "Connector", CONNECTION_TIMEOUT_MS, KEEP_ALIVE, KEEP_ALIVE_INTERVAL_MS, CommonConnectorConfig.MAX_QUEUE_SIZE, CommonConnectorConfig.MAX_BATCH_SIZE, CommonConnectorConfig.POLL_INTERVAL_MS, SNAPSHOT_MODE, SNAPSHOT_LOCKING_MODE, SNAPSHOT_MINIMAL_LOCKING, TIME_PRECISION_MODE, RelationalDatabaseConnectorConfig.DECIMAL_HANDLING_MODE, BIGINT_UNSIGNED_HANDLING_MODE, SNAPSHOT_DELAY_MS, DDL_PARSER_MODE); return config; } private static int validateDatabaseBlacklist(Configuration config, Field field, ValidationOutput problems) { String whitelist = config.getString(DATABASE_WHITELIST); String blacklist = config.getString(DATABASE_BLACKLIST); if (whitelist != null && blacklist != null) { problems.accept(DATABASE_BLACKLIST, blacklist, "Whitelist is already specified"); return 1; } return 0; } private static int validateTableBlacklist(Configuration config, Field field, ValidationOutput problems) { String whitelist = config.getString(TABLE_WHITELIST); String blacklist = config.getString(TABLE_BLACKLIST); if (whitelist != null && blacklist != null) { problems.accept(TABLE_BLACKLIST, blacklist, "Whitelist is already specified"); return 1; } return 0; } private static int validateGtidSetExcludes(Configuration config, Field field, ValidationOutput problems) { String includes = config.getString(GTID_SOURCE_INCLUDES); String excludes = config.getString(GTID_SOURCE_EXCLUDES); if (includes != null && excludes != null) { problems.accept(GTID_SOURCE_EXCLUDES, excludes, "Included GTID source UUIDs are already specified"); return 1; } return 0; } /** * Validate the new snapshot.locking.mode configuration, which replaces snapshot.minimal.locking. * * If minimal.locking is explicitly defined and locking.mode is NOT explicitly defined: * - coerce minimal.locking into the new snap.locking.mode property. * * If minimal.locking is NOT explicitly defined and locking.mode IS explicitly defined: * - use new locking.mode property. * * If BOTH minimal.locking and locking.mode ARE defined: * - Throw a validation error. */ private static int validateSnapshotLockingMode(Configuration config, Field field, ValidationOutput problems) { // Determine which configurations are explicitly defined final boolean isMinimalLockingExplicitlyDefined = config.hasKey(SNAPSHOT_MINIMAL_LOCKING.name()); final boolean isSnapshotModeExplicitlyDefined = config.hasKey(SNAPSHOT_LOCKING_MODE.name()); // If both configuration options are explicitly defined, we'll throw a validation error. if (isMinimalLockingExplicitlyDefined && isSnapshotModeExplicitlyDefined) { // Then display a validation error. problems.accept(SNAPSHOT_MINIMAL_LOCKING, config.getBoolean(SNAPSHOT_MINIMAL_LOCKING), "Deprecated configuration " + SNAPSHOT_MINIMAL_LOCKING.name() + " in conflict. Cannot use both " + SNAPSHOT_MINIMAL_LOCKING.name() + " and " + SNAPSHOT_LOCKING_MODE.name() + " configuration options."); return 1; } // Determine what value to use for SnapshotLockingMode final SnapshotLockingMode lockingModeValue; // if minimalLocking is defined if (isMinimalLockingExplicitlyDefined) { // Grab the configured minimal locks configuration option final boolean minimalLocksEnabled = config.getBoolean(MySqlConnectorConfig.SNAPSHOT_MINIMAL_LOCKING); // Coerce minimal locking => snapshot mode. if (minimalLocksEnabled) { lockingModeValue = SnapshotLockingMode.MINIMAL; } else { lockingModeValue = SnapshotLockingMode.EXTENDED; } } else { // Otherwise use SnapshotLockingMode // Grab explicitly configured value lockingModeValue = SnapshotLockingMode.parse(config.getString(MySqlConnectorConfig.SNAPSHOT_LOCKING_MODE)); } // Sanity check, validate the configured value is a valid option. if (lockingModeValue == null) { problems.accept(SNAPSHOT_LOCKING_MODE, lockingModeValue, "Must be a valid snapshot.locking.mode value"); return 1; } // Everything checks out ok. return 0; } private static int randomServerId() { int lowestServerId = 5400; int highestServerId = 6400; return lowestServerId + new Random().nextInt(highestServerId - lowestServerId); } }
package org.cobbzilla.util.reflect; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.commons.beanutils.MethodUtils; import org.apache.commons.collections.Transformer; import org.apache.commons.lang3.ArrayUtils; import java.io.Closeable; import java.lang.annotation.Annotation; import java.lang.reflect.*; import java.math.BigDecimal; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import static com.google.common.base.Preconditions.checkNotNull; import static java.lang.reflect.Modifier.isFinal; import static java.lang.reflect.Modifier.isStatic; import static org.cobbzilla.util.collection.ArrayUtil.arrayToString; import static org.cobbzilla.util.daemon.ZillaRuntime.*; import static org.cobbzilla.util.string.StringUtil.uncapitalize; /** * Handy tools for working quickly with reflection APIs, which tend to be verbose. */ @Slf4j public class ReflectionUtil { public static final Class<?>[] EMPTY_CLASS_ARRAY = new Class<?>[0]; public static final Class<?>[] SINGLE_STRING_ARG = {String.class}; public static Boolean toBoolean(Object object) { if (object == null) return null; if (object instanceof Boolean) return (Boolean) object; if (object instanceof String) return Boolean.valueOf(object.toString()); return null; } public static Boolean toBoolean(Object object, String field, boolean defaultValue) { final Boolean val = toBoolean(get(object, field)); return val == null ? defaultValue : val; } public static Long toLong(Object object) { if (object == null) return null; if (object instanceof Number) return ((Number) object).longValue(); if (object instanceof String) return Long.valueOf(object.toString()); return null; } public static Integer toInteger(Object object) { if (object == null) return null; if (object instanceof Number) return ((Number) object).intValue(); if (object instanceof String) return Integer.valueOf(object.toString()); return null; } public static Integer toIntegerOrNull(Object object) { if (object == null) return null; if (object instanceof Number) return ((Number) object).intValue(); if (object instanceof String) { try { return Integer.valueOf(object.toString()); } catch (Exception e) { log.info("toIntegerOrNull("+object+"): "+e); return null; } } return null; } public static Short toShort(Object object) { if (object == null) return null; if (object instanceof Number) return ((Number) object).shortValue(); if (object instanceof String) return Short.valueOf(object.toString()); return null; } public static Float toFloat(Object object) { if (object == null) return null; if (object instanceof Number) return ((Number) object).floatValue(); if (object instanceof String) return Float.valueOf(object.toString()); return null; } public static Double toDouble(Object object) { if (object == null) return null; if (object instanceof Number) return ((Number) object).doubleValue(); if (object instanceof String) return Double.valueOf(object.toString()); return null; } public static BigDecimal toBigDecimal(Object object) { if (object == null) return null; if (object instanceof Double) return big((Double) object); if (object instanceof Float) return big((Float) object); if (object instanceof Number) return big(((Number) object).longValue()); if (object instanceof String) return big(object.toString()); return null; } /** * Do a Class.forName and only throw unchecked exceptions. * @param clazz full class name. May end in [] to indicate array class * @param <T> The class type * @return A Class&lt;clazz&gt; object */ public static <T> Class<? extends T> forName(String clazz) { if (empty(clazz)) return (Class<? extends T>) Object.class; if (clazz.endsWith("[]")) return arrayClass(forName(clazz.substring(0, clazz.length()-2))); try { return (Class<? extends T>) Class.forName(clazz); } catch (Exception e) { return die("Class.forName("+clazz+") error: "+e, e); } } public static Collection<Class> forNames(String[] classNames) { final List<Class> list = new ArrayList<>(); if (!empty(classNames)) for (String c : classNames) list.add(forName(c)); return list; } public static <T> Class<? extends T> arrayClass (Class clazz) { return forName("[L"+clazz.getName()+";"); } /** * Create an instance of a class, only throwing unchecked exceptions. The class must have a default constructor. * @param clazz we will instantiate an object of this type * @param <T> The class type * @return An Object that is an instance of Class&lt;clazz&gt; object */ public static <T> T instantiate(Class<T> clazz) { try { return clazz.getDeclaredConstructor().newInstance(); } catch (Exception e) { return die("Error instantiating "+clazz+": "+e, e); } } /** * Create an instance of a class based on a class name, only throwing unchecked exceptions. The class must have a default constructor. * @param clazz full class name * @param <T> The class type * @return An Object that is an instance of Class&lt;clazz&gt; object */ public static <T> T instantiate(String clazz) { try { return (T) instantiate(forName(clazz)); } catch (Exception e) { return die("instantiate("+clazz+"): "+e, e); } } private static final Map<Class, Map<Object, Enum>> enumCache = new ConcurrentHashMap<>(1000); /** * Create an instance of a class using the supplied argument to a matching single-argument constructor. * @param clazz The class to instantiate * @param argument The object that will be passed to a matching single-argument constructor * @param <T> Could be anything * @return A new instance of clazz, created using a constructor that matched argument's class. */ public static <T> T instantiate(Class<T> clazz, Object argument) { Constructor<T> constructor = null; Class<?> tryClass = argument.getClass(); if (clazz.isPrimitive()) { switch (clazz.getName()) { case "boolean": return (T) Boolean.valueOf(argument.toString()); case "byte": return (T) Byte.valueOf(argument.toString()); case "short": return (T) Short.valueOf(argument.toString()); case "char": return (T) Character.valueOf(empty(argument) ? 0 : argument.toString().charAt(0)); case "int": return (T) Integer.valueOf(argument.toString()); case "long": return (T) Long.valueOf(argument.toString()); case "float": return (T) Float.valueOf(argument.toString()); case "double": return (T) Double.valueOf(argument.toString()); default: return die("instantiate: unrecognized primitive type: "+clazz.getName()); } } if (clazz.isEnum()) { return argument == null ? null : (T) enumCache .computeIfAbsent(clazz, c -> new ConcurrentHashMap<>(50)) .computeIfAbsent(argument, e -> { try { final Method valueOf = clazz.getMethod("valueOf", SINGLE_STRING_ARG); return (Enum) valueOf.invoke(null, new Object[]{argument.toString()}); } catch (Exception ex) { return die("instantiate: error instantiating enum "+clazz.getName()+": "+e); } }); } while (constructor == null) { try { constructor = clazz.getConstructor(tryClass); } catch (NoSuchMethodException e) { if (tryClass.equals(Object.class)) { // try interfaces for (Class<?> iface : argument.getClass().getInterfaces()) { try { constructor = clazz.getConstructor(iface); } catch (NoSuchMethodException e2) { // noop } } break; } else { tryClass = tryClass.getSuperclass(); } } } if (constructor == null) { die("instantiate: no constructor could be found for class "+clazz.getName()+", argument type "+argument.getClass().getName()); } try { return constructor.newInstance(argument); } catch (Exception e) { return die("instantiate("+clazz.getName()+", "+argument+"): "+e, e); } } /** * Create an instance of a class using the supplied argument to a matching single-argument constructor. * @param clazz The class to instantiate * @param arguments The objects that will be passed to a matching constructor * @param <T> Could be anything * @return A new instance of clazz, created using a constructor that matched argument's class. */ public static <T> T instantiate(Class<T> clazz, Object... arguments) { try { for (Constructor constructor : clazz.getConstructors()) { final Class<?>[] cParams = constructor.getParameterTypes(); if (cParams.length == arguments.length) { boolean match = true; for (int i=0; i<cParams.length; i++) { if (!cParams[i].isAssignableFrom(arguments[i].getClass())) { match = false; break; } } if (match) return (T) constructor.newInstance(arguments); } } log.warn("instantiate("+clazz.getName()+"): no matching constructor found, trying with exact match (will probably fail), args="+ArrayUtils.toString(arguments)); final Class<?>[] parameterTypes = new Class[arguments.length]; for (int i=0; i<arguments.length; i++) { parameterTypes[i] = getSimpleClass(arguments[i]); } return clazz.getConstructor(parameterTypes).newInstance(arguments); } catch (Exception e) { return die("instantiate("+clazz.getName()+", "+Arrays.toString(arguments)+"): "+e, e); } } public static Class<?> getSimpleClass(Object argument) { Class<?> argClass = argument.getClass(); final int enhancePos = argClass.getName().indexOf("$$Enhance"); if (enhancePos != -1) { argClass = forName(argClass.getName().substring(0, enhancePos)); } return argClass; } public static String getSimpleClassName(Object argument) { return getSimpleClass(argument).getClass().getSimpleName(); } /** * Make a copy of the object, assuming its class has a copy constructor * @param thing The thing to copy * @param <T> Whatevs * @return A copy of the object, created using the thing's copy constructor */ public static <T> T copy(T thing) { return (T) instantiate(thing.getClass(), thing); } /** * Mirror the object. Create a new instance and copy all fields * @param thing The thing to copy * @param <T> Whatevs * @return A mirror of the object, created using the thing's default constructor and copying all fields with 'copy' */ public static <T> T mirror(T thing) { T copy = (T) instantiate(thing.getClass()); copy(copy, thing); return copy; } public static Object invokeStatic(Method m, Object... values) { try { return m.invoke(null, values); } catch (Exception e) { return die("invokeStatic: "+m.getClass().getSimpleName()+"."+m.getName()+"("+arrayToString(values, ", ")+"): "+e, e); } } public static Field getDeclaredField(Class<?> clazz, String field) { try { return clazz.getDeclaredField(field); } catch (NoSuchFieldException e) { if (clazz.equals(Object.class)) { log.info("getDeclaredField: field not found "+clazz.getName()+"/"+field); return null; } } return getDeclaredField(clazz.getSuperclass(), field); } public static Field getField(Class<?> clazz, String field) { try { return clazz.getField(field); } catch (NoSuchFieldException e) { if (clazz.equals(Object.class)) { log.info("getField: field not found "+clazz.getName()+"/"+field); return null; } } return getDeclaredField(clazz.getSuperclass(), field); } public static <T> Method factoryMethod(Class<T> clazz, Object value) { // find a static method that takes the value and returns an instance of the class for (Method m : clazz.getMethods()) { if (m.getReturnType().equals(clazz)) { final Class<?>[] parameterTypes = m.getParameterTypes(); if (parameterTypes != null && parameterTypes.length == 1 && parameterTypes[0].isAssignableFrom(value.getClass())) { return m; } } } log.warn("factoryMethod: class "+clazz.getName()+" does not have static factory method that takes a String, returning null"); return null; } public static <T> T callFactoryMethod(Class<T> clazz, Object value) { final Method m = factoryMethod(clazz, value); return m != null ? (T) invokeStatic(m, value) : null; } public static Object scrubStrings(Object thing, String[] fields) { if (empty(thing)) return thing; if (thing.getClass().isPrimitive() || thing instanceof String || thing instanceof Number || thing instanceof Enum) return thing; if (thing instanceof JsonNode) { if (thing instanceof ObjectNode) { for (String field : fields) { if (((ObjectNode) thing).has(field)) { ((ObjectNode) thing).remove(field); } } } else if (thing instanceof ArrayNode) { ArrayNode arrayNode = (ArrayNode) thing; for (int i = 0; i < arrayNode.size(); i++) { scrubStrings(arrayNode.get(i), fields); } } } else if (thing instanceof Map) { final Map map = (Map) thing; final Set toRemove = new HashSet(); for (Object e : map.entrySet()) { Map.Entry entry = (Map.Entry) e; if (ArrayUtils.contains(fields, entry.getKey().toString())) { toRemove.add(entry.getKey()); } else { scrubStrings(entry.getValue(), fields); } } for (Object key : toRemove) map.remove(key); } else if (Object[].class.isAssignableFrom(thing.getClass())) { if ( !((Object[]) thing)[0].getClass().isPrimitive() ) { for (Object obj : ((Object[]) thing)) { scrubStrings(obj, fields); } } } else if (thing instanceof Collection) { for (Object obj : ((Collection) thing)) { scrubStrings(obj, fields); } } else { for (String field : ReflectionUtil.toMap(thing).keySet()) { final Object val = get(thing, field, null); if (val != null) { if (ArrayUtils.contains(fields, field)) { setNull(thing, field, String.class); } else { scrubStrings(val, fields); } } } } return thing; } private enum Accessor { get, set } /** * Copies fields from src to dest. Code is easier to read if this method is understdood to be like an assignment statement, dest = src * * We consider only 'getter' methods that meet the following criteria: * (1) starts with "get" * (2) takes zero arguments * (3) has a return value * (4) does not carry any annotation whose simple class name is "Transient" * * The value returned from the source getter will be copied to the destination (via setter), if a setter exists, and: * (1) No getter exists on the destination, or (2) the destination's getter returns a different value (.equals returns false) * * Getters that return null values on the source object will not be copied. * * @param dest destination object * @param src source object * @param <T> objects must share a type * @return count of fields copied */ public static <T> int copy (T dest, T src) { return copy(dest, src, null, null); } /** * Same as copy(dest, src) but only named fields are copied * @param dest destination object * @param src source object * @param fields only fields with these names will be considered for copying * @param <T> objects must share a type * @return count of fields copied */ public static <T> int copy (T dest, T src, String[] fields) { int copyCount = 0; if (fields != null) { for (String field : fields) { try { final Object value = get(src, field, null); if (value != null) { set(dest, field, value); copyCount++; } } catch (Exception e) { log.debug("copy: field=" + field + ": " + e); } } } return copyCount; } /** * Same as copy(dest, src) but only named fields are copied * @param dest destination object, or a Map<String, Object> * @param src source object * @param fields only fields with these names will be considered for copying * @param exclude fields with these names will NOT be considered for copying * @param <T> objects must share a type * @return count of fields copied */ public static <T> int copy (T dest, T src, String[] fields, String[] exclude) { int copyCount = 0; final boolean isMap = dest instanceof Map; try { if (src instanceof Map) copyFromMap(dest, (Map<String, Object>) src, exclude); checkGetter: for (Method getter : src.getClass().getMethods()) { // only look for getters on the source object (methods with no arguments that have a return value) final Class<?>[] types = getter.getParameterTypes(); if (types.length != 0) continue; if (getter.getReturnType().equals(Void.class)) continue;; // and it must be named appropriately final String fieldName = fieldName(getter.getName()); if (fieldName == null || ArrayUtils.contains(exclude, fieldName)) continue; // if specific fields were given, it must be one of those if (fields != null && !ArrayUtils.contains(fields, fieldName)) continue; // getter must not be marked @Transient if (isIgnored(src, fieldName, getter)) continue; // what would the setter be called? final String setterName = setterForGetter(getter.getName()); if (setterName == null) continue; // get the setter method on the destination object Method setter = null; if (!isMap) { try { setter = dest.getClass().getMethod(setterName, getter.getReturnType()); } catch (Exception e) { log.debug("copy: setter not found: " + setterName); continue; } } // do not copy null fields (should this be configurable?) final Object srcValue = getter.invoke(src); if (srcValue == null) continue; // does the dest have a getter? if so grab the current value Object destValue = null; try { if (isMap) { destValue = ((Map) dest).get(fieldName); } else { destValue = getter.invoke(dest); } } catch (Exception e) { log.debug("copy: error calling getter on dest: "+e); } // copy the value from src to dest, if it's different if (!srcValue.equals(destValue)) { if (isMap) { ((Map) dest).put(fieldName, srcValue); } else { setter.invoke(dest, srcValue); } copyCount++; } } } catch (Exception e) { throw new IllegalArgumentException("Error copying "+dest.getClass().getSimpleName()+" from src="+src+": "+e, e); } return copyCount; } private static <T> boolean isIgnored(T o, String fieldName, Method getter) { Field field = null; try { field = o.getClass().getDeclaredField(fieldName); } catch (NoSuchFieldException ignored) {} return isIgnored(getter.getAnnotations()) || (field != null && isIgnored(field.getAnnotations())); } private static boolean isIgnored(Annotation[] annotations) { if (annotations != null) { for (Annotation a : annotations) { final Class<?>[] interfaces = a.getClass().getInterfaces(); if (interfaces != null) { for (Class<?> i : interfaces) { if (i.getSimpleName().equals("Transient")) { return true; } } } } } return false; } public static String fieldName(String method) { if (method.startsWith("get")) return uncapitalize(method.substring(3)); if (method.startsWith("set")) return uncapitalize(method.substring(3)); if (method.startsWith("is")) return uncapitalize(method.substring(2)); return null; } public static String setterForGetter(String getter) { if (getter.startsWith("get")) return "set"+getter.substring(3); if (getter.startsWith("is")) return "set"+getter.substring(2); return null; } /** * Call setters on an object based on keys and values in a Map * @param dest destination object * @param src map of field name -> value * @param <T> type of object * @return the destination object */ public static <T> T copyFromMap (T dest, Map<String, Object> src) { return copyFromMap(dest, src, null); } public static <T> T copyFromMap (T dest, Map<String, Object> src, String[] exclude) { for (Map.Entry<String, Object> entry : src.entrySet()) { final String key = entry.getKey(); if (exclude != null && ArrayUtils.contains(exclude, key)) continue; final Object value = entry.getValue(); if (value != null && Map.class.isAssignableFrom(value.getClass())) { if (hasGetter(dest, key)) { Map m = (Map) value; if (m.isEmpty()) continue; if (m.keySet().iterator().next().getClass().equals(String.class)) { copyFromMap(get(dest, key), (Map<String, Object>) m); } else { log.info("copyFromMap: not recursively copying Map (has non-String keys): " + key); } } } else { if (Map.class.isAssignableFrom(dest.getClass())) {// || dest.getClass().getName().equals(HashMap.class.getName())) { ((Map) dest).put(key, value); } else { if (hasSetter(dest, key, value.getClass())) { set(dest, key, value); } else { final Class pc = getPrimitiveClass(value.getClass()); if (pc != null && hasSetter(dest, key, pc)) { set(dest, key, value); } else { log.info("copyFromMap: skipping uncopyable property: "+key); } } } } } return dest; } public static Class getPrimitiveClass(Class<?> clazz) { if (clazz.isArray()) return arrayClass(getPrimitiveClass(clazz.getComponentType())); switch (clazz.getSimpleName()) { case "Long": return long.class; case "Integer": return int.class; case "Short": return short.class; case "Double": return double.class; case "Float": return float.class; case "Boolean": return boolean.class; case "Character": return char.class; default: return null; } } public static final String[] TO_MAP_STANDARD_EXCLUDES = {"declaringClass", "class"}; /** * Make a copy of the object, assuming its class has a copy constructor * @param thing The thing to copy * @return A copy of the object, created using the thing's copy constructor */ public static Map<String, Object> toMap(Object thing) { return toMap(thing, null, TO_MAP_STANDARD_EXCLUDES); } public static Map<String, Object> toMap(Object thing, String[] fields) { return toMap(thing, fields, TO_MAP_STANDARD_EXCLUDES); } public static Map<String, Object> toMap(Object thing, String[] fields, String[] exclude) { final Map<String, Object> map = new HashMap<>(); copy(map, thing, fields, exclude); return map; } /** * Find the concrete class for the first declared parameterized class variable * @param clazz The class to search for parameterized types * @return The first concrete class for a parameterized type found in clazz */ public static <T> Class<T> getFirstTypeParam(Class clazz) { return getTypeParam(clazz, 0); } public static <T> Class<T> getTypeParam(Class clazz, int index) { // todo: add a cache on this thing... could do wonders Class check = clazz; while (check.getGenericSuperclass() == null || !(check.getGenericSuperclass() instanceof ParameterizedType)) { check = check.getSuperclass(); if (check.equals(Object.class)) die("getTypeParam("+clazz.getName()+"): no type parameters found"); } final ParameterizedType parameterizedType = (ParameterizedType) check.getGenericSuperclass(); final Type[] actualTypeArguments = parameterizedType.getActualTypeArguments(); if (index >= actualTypeArguments.length) die("getTypeParam("+clazz.getName()+"): "+actualTypeArguments.length+" type parameters found, index "+index+" out of bounds"); if (actualTypeArguments[index] instanceof Class) return (Class) actualTypeArguments[index]; if (actualTypeArguments[index] instanceof ParameterizedType) return (Class) ((ParameterizedType) actualTypeArguments[index]).getRawType(); return (Class<T>) ((Type) actualTypeArguments[index]).getClass(); } /** * Find the concrete class for a parameterized class variable. * @param clazz The class to start searching. Search will continue up through superclasses * @param impl The type (or a supertype) of the parameterized class variable * @return The first concrete class found that is assignable to an instance of impl */ public static <T> Class<T> getFirstTypeParam(Class clazz, Class impl) { // todo: add a cache on this thing... could do wonders Class check = clazz; while (check != null && !check.equals(Object.class)) { Class superCheck = check; Type superType = superCheck.getGenericSuperclass(); while (superType != null && !superType.equals(Object.class)) { if (superType instanceof ParameterizedType) { final ParameterizedType ptype = (ParameterizedType) superType; final Class<?> rawType = (Class<?>) ptype.getRawType(); if (impl.isAssignableFrom(rawType)) { return (Class<T>) rawType; } for (Type t : ptype.getActualTypeArguments()) { if (impl.isAssignableFrom((Class<?>) t)) { return (Class<T>) t; } } } else if (superType instanceof Class) { superType = ((Class) superType).getGenericSuperclass(); } } check = check.getSuperclass(); } return null; } public static Object get(Object object, String field) { Object target = object; for (String token : field.split("\\.")) { if (target == null) return null; target = invoke_get(target, token); } return target; } public static <T> T get(Object object, String field, T defaultValue) { try { final Object val = get(object, field); return val == null ? defaultValue : (T) val; } catch (Exception e) { log.warn("get: "+e); return defaultValue; } } public static boolean isGetter(Method method) { return method.getName().startsWith("get") || method.getName().startsWith("is") && method.getParameters().length == 0; } public static boolean hasGetter(Object object, String field) { Object target = object; try { for (String token : field.split("\\.")) { final String methodName = getAccessorMethodName(Accessor.get, token); target = MethodUtils.invokeExactMethod(target, methodName, null); } } catch (NoSuchMethodException e) { return false; } catch (Exception e) { return false; } return true; } public static Class getterType(Object object, String field) { try { final Object o = get(object, field); if (o == null) return die("getterType: cannot determine field type, value was null"); return o.getClass(); } catch (Exception e) { return die("getterType: simple get failed: "+e, e); } } /** * Call a setter * @param object the object to call set(field) on * @param field the field name * @param value the value to set */ public static void set(Object object, String field, Object value) { set(object, field, value, value == null ? null : value.getClass()); } /** * Call a setter with a hint as to what the type should be * @param object the object to call set(field) on * @param field the field name * @param value the value to set * @param type type of the field */ public static void set(Object object, String field, Object value, Class<?> type) { if (type != null) { if (value == null) { setNull(object, field, type); return; } else if (!type.isAssignableFrom(value.getClass())) { // if value is not assignable to type, then the type class should have a constructor for the value class value = instantiate(type, value); } } final String[] tokens = field.split("\\."); Object target = getTarget(object, tokens); if (target != null) invoke_set(target, tokens[tokens.length - 1], value); } public static void setNull(Object object, String field, Class type) { final String[] tokens = field.split("\\."); Object target = getTarget(object, tokens); if (target != null) invoke_set_null(target, tokens[tokens.length - 1], type); } private static Object getTarget(Object object, String[] tokens) { Object target = object; for (int i=0; i<tokens.length-1; i++) { target = invoke_get(target, tokens[i]); if (target == null) { log.warn("getTarget("+object+", "+Arrays.toString(tokens)+"): exiting early, null object found at token="+tokens[i]); return null; } } return target; } public static boolean hasSetter(Object object, String field, Class type) { Object target = object; final String[] tokens = field.split("\\."); try { for (int i=0; i<tokens.length-1; i++) { target = MethodUtils.invokeExactMethod(target, tokens[i], null); } target.getClass().getMethod(getAccessorMethodName(Accessor.set, tokens[tokens.length-1]), type); } catch (NoSuchMethodException e) { return false; } catch (Exception e) { return false; } return true; } private static String getAccessorMethodName(Accessor accessor, String token) { return token.length() == 1 ? accessor.name() +token.toUpperCase() : accessor.name() + token.substring(0, 1).toUpperCase() + token.substring(1); } private static Object invoke_get(Object target, String token) { final String methodName = getAccessorMethodName(Accessor.get, token); try { target = MethodUtils.invokeMethod(target, methodName, null); } catch (Exception e) { final String isMethod = methodName.replaceFirst("get", "is"); try { target = MethodUtils.invokeMethod(target, isMethod, null); } catch (Exception e2) { if (target instanceof Map) return ((Map) target).get(token); if (target instanceof ObjectNode) return ((ObjectNode) target).get(token); throw new IllegalArgumentException("Error calling "+methodName+" and "+isMethod+": "+e+", "+e2); } } return target; } private static Map<String, Method> setterCache = new ConcurrentHashMap<>(5000); private static Map<Class, Object[]> nullArgCache = new ConcurrentHashMap<>(5000); private static void invoke_set(Object target, String token, Object value) { final String cacheKey = target.getClass().getName()+"."+token+"."+(value == null ? "null" : value.getClass().getName()); final Method method = setterCache.computeIfAbsent(cacheKey, s -> { final String methodName = getAccessorMethodName(Accessor.set, token); Method found = null; if (value == null) { // try to find a single-arg method named methodName... for (Method m : target.getClass().getMethods()) { if (m.getName().equals(methodName) && m.getParameterTypes().length == 1) { if (found != null) { return die("invoke_set: value was null and multiple single-arg methods named " + methodName + " exist"); } else { found = m; } } } } else { try { found = MethodUtils.getMatchingAccessibleMethod(target.getClass(), methodName, new Class<?>[]{value.getClass()}); } catch (Exception e) { return die("Error calling " + methodName + ": " + e); } } return found != null ? found : die("invoke_set: no method " + methodName + " found on target: " + target); }); if (value == null) { try { final Object[] nullArg = nullArgCache.computeIfAbsent(method.getParameterTypes()[0], type -> new Object[] {getNullArgument(type)}); method.invoke(target, nullArg); } catch (Exception e) { die("Error calling " + method.getName() + " on target: " + target + " - " + e); } } else { try { MethodUtils.invokeMethod(target, method.getName(), value); } catch (Exception e) { die("Error calling " + method.getName() + ": " + e); } } } private static void invoke_set_null(Object target, String token, Class type) { final String methodName = getAccessorMethodName(Accessor.set, token); try { MethodUtils.invokeMethod(target, methodName, new Object[] {getNullArgument(type)}, new Class[] { type }); } catch (Exception e) { die("Error calling "+methodName+": "+e); } } private static Object getNullArgument(Class clazz) { if (clazz.isPrimitive()) { switch (clazz.getName()) { case "boolean": return false; case "byte": return (byte) 0; case "short": return (short) 0; case "char": return (char) 0; case "int": return (int) 0; case "long": return (long) 0; case "float": return (float) 0; case "double": return (double) 0; default: return die("instantiate: unrecognized primitive type: "+clazz.getName()); } } return null; } /** * Finds the type parameter for the given class. * * @param klass a parameterized class * @return the class's type parameter */ public static Class<?> getTypeParameter(Class<?> klass) { return getTypeParameter(klass, Object.class); } /** * Finds the type parameter for the given class which is assignable to the bound class. * * @param klass a parameterized class * @param bound the type bound * @param <T> the type bound * @return the class's type parameter */ @SuppressWarnings("unchecked") public static <T> Class<T> getTypeParameter(Class<?> klass, Class<? super T> bound) { Type t = checkNotNull(klass); while (t instanceof Class<?>) { t = ((Class<?>) t).getGenericSuperclass(); } /* This is not guaranteed to work for all cases with convoluted piping * of type parameters: but it can at least resolve straight-forward * extension with single type parameter (as per [Issue-89]). * And when it fails to do that, will indicate with specific exception. */ if (t instanceof ParameterizedType) { // should typically have one of type parameters (first one) that matches: for (Type param : ((ParameterizedType) t).getActualTypeArguments()) { if (param instanceof Class<?>) { final Class<T> cls = determineClass(bound, param); if (cls != null) { return cls; } } else if (param instanceof TypeVariable) { for (Type paramBound : ((TypeVariable<?>) param).getBounds()) { if (paramBound instanceof Class<?>) { final Class<T> cls = determineClass(bound, paramBound); if (cls != null) { return cls; } } } } } } return die("Cannot figure out type parameterization for " + klass.getName()); } @SuppressWarnings("unchecked") private static <T> Class<T> determineClass(Class<? super T> bound, Type candidate) { if (candidate instanceof Class<?>) { final Class<?> cls = (Class<?>) candidate; if (bound.isAssignableFrom(cls)) { return (Class<T>) cls; } } return null; } public static void close(Object o) throws Exception { if (o == null) return; if (o instanceof Closeable) { ((Closeable) o).close(); } else { final Method closeMethod = o.getClass().getMethod("close", (Class<?>[]) null); if (closeMethod == null) die("no close method found on " + o.getClass().getName()); closeMethod.invoke(o); } } public static void closeQuietly(Object o) { if (o == null) return; try { close(o); } catch (Exception e) { log.warn("close: error closing: "+e); } } @NoArgsConstructor @AllArgsConstructor public static class Setter<T> { @Getter protected String field; @Getter protected String value; public void set(T data) { ReflectionUtil.set(data, field, value); } @Override public String toString() { return getClass().getName() + '{' + field + ", " + value + '}'; } } private static class CallerInspector extends SecurityManager { public String getCallerClassName() { return getClassContext()[2].getName(); } public String getCallerClassName(int depth) { return getClassContext()[depth].getName(); } } private final static CallerInspector callerInspector = new CallerInspector(); public static String callerClassName() { return callerInspector.getCallerClassName(); } public static String callerClassName(int depth) { return callerInspector.getCallerClassName(depth); } public static String callerClassName(String match) { final StackTraceElement s = callerFrame(match); return s == null ? "callerClassName: no match: "+match : s.getMethodName(); } public static String callerMethodName() { return new Throwable().getStackTrace()[2].getMethodName(); } public static String callerMethodName(int depth) { return new Throwable().getStackTrace()[depth].getMethodName(); } public static String callerMethodName(String match) { final StackTraceElement s = callerFrame(match); return s == null ? "callerMethodName: no match: "+match : s.getMethodName(); } public static String caller () { final StackTraceElement[] t = new Throwable().getStackTrace(); if (t == null || t.length == 0) return "caller: NO STACK TRACE!"; return caller(t[Math.max(t.length-1, 2)]); } public static String caller (int depth) { final StackTraceElement[] t = new Throwable().getStackTrace(); if (t == null || t.length == 0) return "caller: NO STACK TRACE!"; return caller(t[Math.min(depth, t.length-1)]); } public static String caller(String match) { final StackTraceElement s = callerFrame(match); return s == null ? "caller: no match: "+match : caller(s); } public static StackTraceElement callerFrame(String match) { final StackTraceElement[] t = new Throwable().getStackTrace(); if (t == null || t.length == 0) return null; for (StackTraceElement s : t) if (caller(s).contains(match)) return s; return null; } public static String caller(StackTraceElement s) { return s.getClassName() + "." + s.getMethodName() + ":" + s.getLineNumber(); } /** * Replace any string values with their transformed values * @param map a map of things * @param transformer a transformer * @return the same map, but if any value was a string, the transformer has been applied to it. */ public static Map transformStrings(Map map, Transformer transformer) { if (empty(map)) return map; final Map setOps = new HashMap(); for (Object entry : map.entrySet()) { final Map.Entry e = (Map.Entry) entry; if (e.getValue() instanceof String) { setOps.put(e.getKey(), transformer.transform(e.getValue()).toString()); } else if (e.getValue() instanceof Map) { setOps.put(e.getKey(), transformStrings((Map) e.getValue(), transformer)); } } for (Object entry : setOps.entrySet()) { final Map.Entry e = (Map.Entry) entry; map.put(e.getKey(), e.getValue()); } return map; } public static boolean isStaticFinalString(Field f) { return isStaticFinal(f, String.class, ""); } public static boolean isStaticFinalString(Field f, String prefix) { return isStaticFinal(f, String.class, prefix); } public static boolean isStaticFinal(Field f, Class type) { return isStaticFinal(f, type, ""); } public static boolean isStaticFinal(Field f, Class type, String prefix) { final int mods = f.getModifiers(); return isStatic(mods) && isFinal(mods) && type.isAssignableFrom(f.getType()) && f.getName().startsWith(prefix); } public static <T> T constValue(Field f) { try { return (T) f.get(null); } catch (Exception e) { return die("constValue: "+e); } } }
package org.deeplearning4j.nn.layers; import lombok.extern.slf4j.Slf4j; import org.deeplearning4j.common.config.DL4JClassLoading; import org.nd4j.linalg.factory.Nd4j; /** * Simple meta helper util class for instantiating * platform specific layer helpers that handle interaction with * lower level libraries like cudnn and onednn. * * @author Adam Gibson */ @Slf4j public class HelperUtils { /** * Creates a {@link LayerHelper} * for use with platform specific code. * @param <T> the actual class type to be returned * @param cudnnHelperClassName the cudnn class name * @param oneDnnClassName the one dnn class name * @param layerHelperSuperClass the layer helper super class * @param layerName the name of the layer to be created * @param arguments the arguments to be used in creation of the layer * @return */ public static <T extends LayerHelper> T createHelper(String cudnnHelperClassName, String oneDnnClassName, Class<? extends LayerHelper> layerHelperSuperClass, String layerName, Object... arguments) { String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend"); LayerHelper helperRet = null; if("CUDA".equalsIgnoreCase(backend) && cudnnHelperClassName != null && !cudnnHelperClassName.isEmpty()) { if(DL4JClassLoading.loadClassByName(cudnnHelperClassName) != null) { log.debug("Attempting to initialize cudnn helper {}",cudnnHelperClassName); helperRet = DL4JClassLoading.createNewInstance( cudnnHelperClassName, layerHelperSuperClass, arguments); log.debug("Cudnn helper {} successfully initialized",cudnnHelperClassName); } else { log.warn("Unable to find class {} using the classloader set for Dl4jClassLoading. Trying to use class loader that loaded the class {} instead.",cudnnHelperClassName,layerHelperSuperClass.getName()); ClassLoader classLoader = DL4JClassLoading.getDl4jClassloader(); DL4JClassLoading.setDl4jClassloaderFromClass(layerHelperSuperClass); try { helperRet = DL4JClassLoading.createNewInstance( cudnnHelperClassName, layerHelperSuperClass, arguments); } catch (Exception e) { log.warn("Unable to use helper implementation {} for helper type {}, please check your classpath. Falling back to built in normal methods for now.",cudnnHelperClassName,layerHelperSuperClass.getName()); } log.warn("Returning class loader to original one."); DL4JClassLoading.setDl4jClassloader(classLoader); } if (helperRet != null && !helperRet.checkSupported()) { return null; } if(helperRet != null) { log.debug("{} successfully initialized",cudnnHelperClassName); } } else if("CPU".equalsIgnoreCase(backend) && oneDnnClassName != null && !oneDnnClassName.isEmpty()) { helperRet = DL4JClassLoading.createNewInstance( oneDnnClassName, arguments); log.trace("Created oneDNN helper: {}, layer {}", oneDnnClassName,layerName); } if (helperRet != null && !helperRet.checkSupported()) { log.debug("Removed helper {} as not supported", helperRet.getClass()); return null; } return (T) helperRet; } }
package org.dita.dost.module; import static org.dita.dost.util.Constants.*; import static org.dita.dost.util.FileUtils.getRelativePath; import static org.dita.dost.util.FileUtils.getRelativeUnixPath; import static org.dita.dost.util.FileUtils.resolve; import static org.dita.dost.util.Job.*; import static org.dita.dost.util.Configuration.*; import static org.dita.dost.util.URLUtils.*; import static org.dita.dost.util.FilterUtils.*; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.util.*; import javax.xml.parsers.DocumentBuilder; import javax.xml.transform.Result; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.sax.SAXSource; import javax.xml.transform.stream.StreamResult; import org.apache.xerces.xni.grammars.XMLGrammarPool; import org.apache.xml.resolver.tools.CatalogResolver; import org.dita.dost.exception.DITAOTException; import org.dita.dost.exception.DITAOTXMLErrorHandler; import org.dita.dost.log.MessageUtils; import org.dita.dost.pipeline.AbstractPipelineInput; import org.dita.dost.pipeline.AbstractPipelineOutput; import org.dita.dost.reader.DitaValReader; import org.dita.dost.reader.GrammarPoolManager; import org.dita.dost.reader.SubjectSchemeReader; import org.dita.dost.util.*; import org.dita.dost.writer.*; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.*; import org.xml.sax.helpers.XMLFilterImpl; /** * DebugAndFilterModule implement the second step in preprocess. It will insert debug * information into every dita files and filter out the information that is not * necessary. * * @author Zhang, Yuan Peng */ public final class DebugAndFilterModule extends AbstractPipelineModuleImpl { /** Generate {@code xtrf} and {@code xtrc} attributes */ final boolean genDebugInfo = Boolean.parseBoolean(Configuration.configuration.get("generate-debug-attributes")); /** Absolute input map path. */ private File inputMap; /** use grammar pool cache */ private boolean gramcache = true; private boolean setSystemId; /** Profiling is enabled. */ private boolean profilingEnabled; private boolean validate; private String transtype; private boolean forceUnique; /** Absolute DITA-OT base path. */ private File ditaDir; private File ditavalFile; /** Absolute input directory path. */ private File inputDir; private FilterUtils filterUtils; /** Absolute path to current destination file. */ private File outputFile; private Map<String, Map<String, Set<String>>> validateMap; private Map<String, Map<String, String>> defaultValueMap; /** XMLReader instance for parsing dita file */ private XMLReader reader; /** Absolute path to current source file. */ private File currentFile; private Map<File, Set<File>> dic; private SubjectSchemeReader subjectSchemeReader; private FilterUtils baseFilterUtils; private ForceUniqueFilter forceUniqueFilter; private DitaWriterFilter ditaWriterFilter; @Override public AbstractPipelineOutput execute(final AbstractPipelineInput input) throws DITAOTException { if (logger == null) { throw new IllegalStateException("Logger not set"); } try { readArguments(input); init(); for (final FileInfo f: job.getFileInfo()) { if (ATTR_FORMAT_VALUE_DITA.equals(f.format) || ATTR_FORMAT_VALUE_DITAMAP.equals(f.format) || f.isConrefTarget || f.isCopyToSource) { processFile(f); } } performCopytoTask(); job.write(); } catch (final Exception e) { e.printStackTrace(); throw new DITAOTException("Exception doing debug and filter module processing: " + e.getMessage(), e); } return null; } private void processFile(final FileInfo f) { currentFile = new File(inputDir, f.file.getPath()); if (!currentFile.exists()) { // Assuming this is an copy-to target file, ignore it logger.debug("Ignoring a copy-to file " + f.file); return; } outputFile = new File(job.tempDir, f.file.getPath()); final File outputDir = outputFile.getParentFile(); if (!outputDir.exists() && !outputDir.mkdirs()) { logger.error("Failed to create output directory " + outputDir.getAbsolutePath()); return; } logger.info("Processing " + currentFile.getAbsolutePath()); final Set<File> schemaSet = dic.get(f.file); if (schemaSet != null && !schemaSet.isEmpty()) { logger.debug("Loading subject schemes"); subjectSchemeReader.reset(); for (final File schema : schemaSet) { subjectSchemeReader.loadSubjectScheme(new File(FileUtils.resolve(job.tempDir.getAbsolutePath(), schema.getPath()) + SUBJECT_SCHEME_EXTENSION)); } validateMap = subjectSchemeReader.getValidValuesMap(); defaultValueMap = subjectSchemeReader.getDefaultValueMap(); } else { validateMap = Collections.EMPTY_MAP; defaultValueMap = Collections.EMPTY_MAP; } if (profilingEnabled) { filterUtils = baseFilterUtils.refine(subjectSchemeReader.getSubjectSchemeMap()); } OutputStream out = null; try { out = new FileOutputStream(outputFile); reader.setErrorHandler(new DITAOTXMLErrorHandler(currentFile.getAbsolutePath(), logger)); final TransformerFactory tf = TransformerFactory.newInstance(); final Transformer serializer = tf.newTransformer(); XMLReader xmlSource = reader; for (final XMLFilter filter: getProcessingPipe(currentFile, f.file)) { filter.setParent(xmlSource); xmlSource = filter; } // ContentHandler must be reset so e.g. Saxon 9.1 will reassign ContentHandler // when reusing filter with multiple Transformers. xmlSource.setContentHandler(null); final Source source = new SAXSource(xmlSource, new InputSource(currentFile.toURI().toString())); final Result result = new StreamResult(out); serializer.transform(source, result); } catch (final RuntimeException e) { throw e; } catch (final Exception e) { logger.error(e.getMessage(), e) ; } finally { if (out != null) { try { out.close(); }catch (final Exception e) { logger.error(e.getMessage(), e) ; } } } } private void init() throws IOException, DITAOTException, SAXException { // Output subject schemas outputSubjectScheme(); subjectSchemeReader = new SubjectSchemeReader(); subjectSchemeReader.setLogger(logger); dic = SubjectSchemeReader.readMapFromXML(new File(job.tempDir, FILE_NAME_SUBJECT_DICTIONARY)); if (profilingEnabled) { final DitaValReader filterReader = new DitaValReader(); filterReader.setLogger(logger); filterReader.initXMLReader(setSystemId); Map<FilterKey, Action> filterMap; if (ditavalFile != null) { filterReader.read(ditavalFile.getAbsoluteFile()); filterMap = filterReader.getFilterMap(); } else { filterMap = Collections.EMPTY_MAP; } baseFilterUtils = new FilterUtils(printTranstype.contains(transtype), filterMap); baseFilterUtils.setLogger(logger); } initXmlReader(); initFilters(); } /** * Init xml reader used for pipeline parsing. */ private void initXmlReader() throws SAXException { CatalogUtils.setDitaDir(ditaDir); reader = XMLUtils.getXMLReader(); if (validate) { reader.setFeature(FEATURE_VALIDATION, true); try { reader.setFeature(FEATURE_VALIDATION_SCHEMA, true); } catch (final SAXNotRecognizedException e) { // Not Xerces, ignore exception } } reader.setFeature(FEATURE_NAMESPACE, true); final CatalogResolver resolver = CatalogUtils.getCatalogResolver(); reader.setEntityResolver(resolver); if (gramcache) { final XMLGrammarPool grammarPool = GrammarPoolManager.getGrammarPool(); try { reader.setProperty("http://apache.org/xml/properties/internal/grammar-pool", grammarPool); logger.info("Using Xerces grammar pool for DTD and schema caching."); } catch (final NoClassDefFoundError e) { logger.debug("Xerces not available, not using grammar caching"); } catch (final SAXNotRecognizedException e) { logger.warn("Failed to set Xerces grammar pool for parser: " + e.getMessage()); } catch (final SAXNotSupportedException e) { logger.warn("Failed to set Xerces grammar pool for parser: " + e.getMessage()); } } } /** * Initialize reusable filters. */ private void initFilters() { forceUniqueFilter = new ForceUniqueFilter(); forceUniqueFilter.setLogger(logger); forceUniqueFilter.setJob(job); forceUniqueFilter.setEntityResolver(reader.getEntityResolver()); ditaWriterFilter = new DitaWriterFilter(); ditaWriterFilter.setLogger(logger); ditaWriterFilter.setJob(job); ditaWriterFilter.setEntityResolver(reader.getEntityResolver()); } /** * Get pipe line filters * * @param fileToParse absolute path to current file being processed * @param inFile relative file path */ private List<XMLFilter> getProcessingPipe(final File fileToParse, final File inFile) { final List<XMLFilter> pipe = new ArrayList<XMLFilter>(); if (genDebugInfo) { final DebugFilter debugFilter = new DebugFilter(); debugFilter.setLogger(logger); debugFilter.setInputFile(fileToParse); pipe.add(debugFilter); } if (filterUtils != null) { final ProfilingFilter profilingFilter = new ProfilingFilter(); profilingFilter.setLogger(logger); profilingFilter.setFilterUtils(filterUtils); pipe.add(profilingFilter); } final ValidationFilter validationFilter = new ValidationFilter(); validationFilter.setLogger(logger); validationFilter.setValidateMap(validateMap); validationFilter.setCurrentFile(toURI(inFile)); validationFilter.setJob(job); pipe.add(validationFilter); final NormalizeFilter normalizeFilter = new NormalizeFilter(); normalizeFilter.setLogger(logger); pipe.add(normalizeFilter); if (forceUnique) { forceUniqueFilter.setCurrentFile(currentFile); pipe.add(forceUniqueFilter); } ditaWriterFilter.setDefaultValueMap(defaultValueMap); ditaWriterFilter.setCurrentFile(currentFile); ditaWriterFilter.setOutputFile(outputFile); pipe.add(ditaWriterFilter); return pipe; } private void readArguments(AbstractPipelineInput input) { final String baseDir = input.getAttribute(ANT_INVOKER_PARAM_BASEDIR); ditaDir = new File(input.getAttribute(ANT_INVOKER_EXT_PARAM_DITADIR)); transtype = input.getAttribute(ANT_INVOKER_EXT_PARAM_TRANSTYPE); profilingEnabled = true; if (input.getAttribute(ANT_INVOKER_PARAM_PROFILING_ENABLED) != null) { profilingEnabled = Boolean.parseBoolean(input.getAttribute(ANT_INVOKER_PARAM_PROFILING_ENABLED)); } if (profilingEnabled) { if (input.getAttribute(ANT_INVOKER_PARAM_DITAVAL) != null) { ditavalFile = new File(input.getAttribute(ANT_INVOKER_PARAM_DITAVAL)); if (!ditavalFile.isAbsolute()) { ditavalFile = new File(baseDir, ditavalFile.getPath()).getAbsoluteFile(); } } } gramcache = "yes".equalsIgnoreCase(input.getAttribute(ANT_INVOKER_EXT_PARAM_GRAMCACHE)); validate = Boolean.valueOf(input.getAttribute("validate")); setSystemId = "yes".equals(input.getAttribute(ANT_INVOKER_EXT_PARAN_SETSYSTEMID)); forceUnique = Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAN_FORCE_UNIQUE)); inputDir = job.getInputDir(); if (!inputDir.isAbsolute()) { inputDir = new File(baseDir, inputDir.getPath()).getAbsoluteFile(); } inputMap = new File(inputDir, job.getInputMap().getPath()).getAbsoluteFile(); } /** * Output subject schema file. * * @throws DITAOTException if generation files */ private void outputSubjectScheme() throws DITAOTException { try { final Map<File, Set<File>> graph = SubjectSchemeReader.readMapFromXML(new File(job.tempDir, FILE_NAME_SUBJECT_RELATION)); final Queue<File> queue = new LinkedList<File>(graph.keySet()); final Set<File> visitedSet = new HashSet<File>(); final DocumentBuilder builder = XMLUtils.getDocumentBuilder(); builder.setEntityResolver(CatalogUtils.getCatalogResolver()); while (!queue.isEmpty()) { final File parent = queue.poll(); final Set<File> children = graph.get(parent); if (children != null) { queue.addAll(children); } if (new File("ROOT").equals(parent) || visitedSet.contains(parent)) { continue; } visitedSet.add(parent); File tmprel = new File(FileUtils.resolve(job.tempDir, parent) + SUBJECT_SCHEME_EXTENSION); Document parentRoot; if (!tmprel.exists()) { final File src = new File(job.getInputDir(), parent.getPath()); parentRoot = builder.parse(src); } else { parentRoot = builder.parse(tmprel); } if (children != null) { for (final File childpath: children) { final Document childRoot = builder.parse(new File(inputMap.getParentFile(), childpath.getPath())); mergeScheme(parentRoot, childRoot); generateScheme(new File(job.tempDir, childpath.getPath() + SUBJECT_SCHEME_EXTENSION), childRoot); } } //Output parent scheme generateScheme(new File(job.tempDir.getAbsoluteFile(), parent.getPath() + SUBJECT_SCHEME_EXTENSION), parentRoot); } } catch (final Exception e) { logger.error(e.getMessage(), e) ; throw new DITAOTException(e); } } private void mergeScheme(final Document parentRoot, final Document childRoot) { final Queue<Element> pQueue = new LinkedList<Element>(); pQueue.offer(parentRoot.getDocumentElement()); while (!pQueue.isEmpty()) { final Element pe = pQueue.poll(); NodeList pList = pe.getChildNodes(); for (int i = 0; i < pList.getLength(); i++) { final Node node = pList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { pQueue.offer((Element)node); } } String value = pe.getAttribute(ATTRIBUTE_NAME_CLASS); if (StringUtils.isEmptyString(value) || !SUBJECTSCHEME_SUBJECTDEF.matches(value)) { continue; } if (!StringUtils.isEmptyString( value = pe.getAttribute(ATTRIBUTE_NAME_KEYREF))) { // extend child scheme final Element target = searchForKey(childRoot.getDocumentElement(), value); if (target == null) { /* * TODO: we have a keyref here to extend into child scheme, but can't * find any matching <subjectdef> in child scheme. Shall we throw out * a warning? * * Not for now, just bypass it. */ continue; } // target found pList = pe.getChildNodes(); for (int i = 0; i < pList.getLength(); i++) { final Node tmpnode = childRoot.importNode(pList.item(i), false); if (tmpnode.getNodeType() == Node.ELEMENT_NODE && searchForKey(target, ((Element)tmpnode).getAttribute(ATTRIBUTE_NAME_KEYS)) != null) { continue; } target.appendChild(tmpnode); } } else if (!StringUtils.isEmptyString( value = pe.getAttribute(ATTRIBUTE_NAME_KEYS))) { // merge into parent scheme final Element target = searchForKey(childRoot.getDocumentElement(), value); if (target != null) { pList = target.getChildNodes(); for (int i = 0; i < pList.getLength(); i++) { final Node tmpnode = parentRoot.importNode(pList.item(i), false); if (tmpnode.getNodeType() == Node.ELEMENT_NODE && searchForKey(pe, ((Element)tmpnode).getAttribute(ATTRIBUTE_NAME_KEYS)) != null) { continue; } pe.appendChild(tmpnode); } } } } } private Element searchForKey(final Element root, final String key) { if (root == null || StringUtils.isEmptyString(key)) { return null; } final Queue<Element> queue = new LinkedList<Element>(); queue.offer(root); while (!queue.isEmpty()) { final Element pe = queue.poll(); final NodeList pchildrenList = pe.getChildNodes(); for (int i = 0; i < pchildrenList.getLength(); i++) { final Node node = pchildrenList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { queue.offer((Element)node); } } String value = pe.getAttribute(ATTRIBUTE_NAME_CLASS); if (StringUtils.isEmptyString(value) || !SUBJECTSCHEME_SUBJECTDEF.matches(value)) { continue; } value = pe.getAttribute(ATTRIBUTE_NAME_KEYS); if (StringUtils.isEmptyString(value)) { continue; } if (value.equals(key)) { return pe; } } return null; } /** * Serialize subject scheme file. * * @param filename output filepath * @param root subject scheme document * * @throws DITAOTException if generation fails */ private void generateScheme(final File filename, final Document root) throws DITAOTException { final File p = filename.getParentFile(); if (!p.exists() && !p.mkdirs()) { throw new DITAOTException("Failed to make directory " + p.getAbsolutePath()); } FileOutputStream out = null; try { out = new FileOutputStream(filename); final StreamResult res = new StreamResult(out); final DOMSource ds = new DOMSource(root); final TransformerFactory tff = TransformerFactory.newInstance(); final Transformer tf = tff.newTransformer(); tf.transform(ds, res); } catch (final Exception e) { logger.error(e.getMessage(), e) ; throw new DITAOTException(e); } finally { if (out != null) { try { out.close(); } catch (IOException e) { throw new DITAOTException(e); } } } } /** * Execute copy-to task, generate copy-to targets base on sources */ private void performCopytoTask() { final Map<File, File> copytoMap = new HashMap<File, File>(); for (final Map.Entry<URI, URI> e: job.getCopytoMap().entrySet()) { copytoMap.put(toFile(e.getKey()), toFile(e.getValue())); } if (forceUniqueFilter != null) { copytoMap.putAll(forceUniqueFilter.copyToMap); } for (final Map.Entry<File, File> entry: copytoMap.entrySet()) { final File copytoTarget = entry.getKey(); final File copytoSource = entry.getValue(); final File srcFile = new File(job.tempDir, copytoSource.getPath()); final File targetFile = new File(job.tempDir, copytoTarget.getPath()); if (targetFile.exists()) { logger.warn(MessageUtils.getInstance().getMessage("DOTX064W", copytoTarget.getPath()).toString()); } else { final File inputMapInTemp = new File(job.tempDir, job.getInputMap().getPath()).getAbsoluteFile(); copyFileWithPIReplaced(srcFile, targetFile, copytoTarget, inputMapInTemp); // add new file info into job final FileInfo src = job.getFileInfo(toURI(copytoSource)); final FileInfo.Builder b = src != null ? new FileInfo.Builder(src) : new FileInfo.Builder(); final FileInfo dst = b.uri(toURI(copytoTarget)).isCopyToSource(false).build(); job.add(dst); } } } /** * Copy files and replace workdir PI contents. * * @param src * @param target * @param copytoTargetFilename * @param inputMapInTemp */ public void copyFileWithPIReplaced(final File src, final File target, final File copytoTargetFilename, final File inputMapInTemp) { if (!target.getParentFile().exists() && !target.getParentFile().mkdirs()) { logger.error("Failed to create copy-to target directory " + target.getParentFile().getAbsolutePath()); return; } final File path2project = DebugAndFilterModule.getPathtoProject(copytoTargetFilename, target, inputMapInTemp, job); final File workdir = target.getParentFile(); XMLFilter filter = new CopyToFilter(workdir, path2project); logger.info("Processing " + src.getAbsolutePath() + " to " + target.getAbsolutePath()); try { XMLUtils.transform(src, target, Arrays.asList(filter)); } catch (final DITAOTException e) { logger.error("Failed to write copy-to file: " + e.getMessage(), e); } } /** * XML filter to rewrite processing instructions to reflect copy-to location. The following processing-instructions are * processed: * * <ul> * <li>{@link Constants#PI_WORKDIR_TARGET PI_WORKDIR_TARGET}</li> * <li>{@link Constants#PI_WORKDIR_TARGET_URI PI_WORKDIR_TARGET_URI}</li> * <li>{@link Constants#PI_PATH2PROJ_TARGET PI_PATH2PROJ_TARGET}</li> * <li>{@link Constants#PI_PATH2PROJ_TARGET_URI PI_PATH2PROJ_TARGET_URI}</li> * </ul> */ private static final class CopyToFilter extends XMLFilterImpl { private final File workdir; private final File path2project; CopyToFilter(final File workdir, final File path2project) { super(); this.workdir = workdir; this.path2project = path2project; } @Override public void processingInstruction(final String target, final String data) throws SAXException { String d = data; if(target.equals(PI_WORKDIR_TARGET)) { if (workdir != null) { try { if (!OS_NAME.toLowerCase().contains(OS_NAME_WINDOWS)) { d = workdir.getCanonicalPath(); } else { d = UNIX_SEPARATOR + workdir.getCanonicalPath(); } } catch (final IOException e) { throw new RuntimeException("Failed to get canonical path for working directory: " + e.getMessage(), e); } } } else if(target.equals(PI_WORKDIR_TARGET_URI)) { if (workdir != null) { d = workdir.toURI().toString(); } } else if (target.equals(PI_PATH2PROJ_TARGET)) { if (path2project != null) { d = path2project.getPath(); } } else if (target.equals(PI_PATH2PROJ_TARGET_URI)) { if (path2project != null) { d = toURI(path2project).toString(); if (!d.endsWith(URI_SEPARATOR)) { d = d + URI_SEPARATOR; } } } getContentHandler().processingInstruction(target, d); } } /** * Get path to base directory * * @param filename relative input file path from base directory * @param traceFilename absolute input file * @param inputMap absolute path to start file * @return path to base directory, {@code null} if not available */ public static File getPathtoProject(final File filename, final File traceFilename, final File inputMap, final Job job) { if (job.getGeneratecopyouter() != Job.Generate.OLDSOLUTION) { if (isOutFile(traceFilename, inputMap)) { return toFile(getRelativePathFromOut(traceFilename.getAbsoluteFile(), job)); } else { return new File(getRelativeUnixPath(traceFilename.getAbsolutePath(), inputMap.getAbsolutePath())).getParentFile(); } } else { return getRelativePath(filename); } } /** * Just for the overflowing files. * @param overflowingFile overflowingFile * @return relative path to out */ public static String getRelativePathFromOut(final File overflowingFile, final Job job) { final File relativePath = getRelativePath(job.getInputFile(), overflowingFile); final File outputDir = job.getOutputDir().getAbsoluteFile(); final File outputPathName = new File(outputDir, "index.html"); final File finalOutFilePathName = resolve(outputDir, relativePath.getPath()); final File finalRelativePathName = getRelativePath(finalOutFilePathName, outputPathName); File parentDir = finalRelativePathName.getParentFile(); if (parentDir == null || parentDir.getPath().isEmpty()) { parentDir = new File("."); } return parentDir.getPath() + File.separator; } /** * Check if path falls outside start document directory * * @param filePathName absolute path to test * @param inputMap absolute input map path * @return {@code true} if outside start directory, otherwise {@code false} */ private static boolean isOutFile(final File filePathName, final File inputMap){ final File relativePath = getRelativePath(inputMap.getAbsoluteFile(), filePathName.getAbsoluteFile()); return !(relativePath.getPath().length() == 0 || !relativePath.getPath().startsWith("..")); } }
package org.hisp.dhis.startup; import com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hisp.dhis.jdbc.StatementBuilder; import org.hisp.dhis.system.startup.AbstractStartupRoutine; import org.hisp.quick.StatementHolder; import org.hisp.quick.StatementManager; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; import java.sql.ResultSet; import java.sql.Statement; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author Lars Helge Overland */ public class TableAlteror extends AbstractStartupRoutine { private static final Log log = LogFactory.getLog( TableAlteror.class ); // Dependencies @Autowired private StatementManager statementManager; @Autowired private StatementBuilder statementBuilder; // Execute @Override @Transactional public void execute() { int defaultCategoryComboId = getDefaultCategoryCombo(); int defaultOptionComboId = getDefaultOptionCombo(); // Drop obsolete tables executeSql( "DROP TABLE categoryoptioncomboname" ); executeSql( "DROP TABLE orgunitgroupsetstructure" ); executeSql( "DROP TABLE orgunitstructure" ); executeSql( "DROP TABLE orgunithierarchystructure" ); executeSql( "DROP TABLE orgunithierarchy" ); executeSql( "DROP TABLE columnorder" ); executeSql( "DROP TABLE roworder" ); executeSql( "DROP TABLE sectionmembers" ); executeSql( "DROP TABLE reporttable_categoryoptioncombos" ); executeSql( "DROP TABLE reporttable_dataelementgroupsets" ); executeSql( "DROP TABLE dashboardcontent_datamartexports" ); executeSql( "DROP TABLE dashboardcontent_mapviews" ); executeSql( "DROP TABLE dashboardcontent_documents" ); executeSql( "DROP TABLE dashboardcontent_maps" ); executeSql( "DROP TABLE dashboardcontent_reports" ); executeSql( "DROP TABLE dashboardcontent_reporttables" ); executeSql( "DROP TABLE dashboardcontent" ); executeSql( "DROP TABLE customvalue" ); executeSql( "DROP TABLE reporttable_displaycolumns" ); executeSql( "DROP TABLE reportreporttables" ); executeSql( "DROP TABLE frequencyoverrideassociation" ); executeSql( "DROP TABLE dataelement_dataelementgroupsetmembers" ); executeSql( "DROP TABLE dashboardcontent_olapurls" ); executeSql( "DROP TABLE olapurl" ); executeSql( "DROP TABLE target" ); executeSql( "DROP TABLE calculateddataelement" ); executeSql( "DROP TABLE systemsequence" ); executeSql( "DROP TABLE reporttablecolumn" ); executeSql( "DROP TABLE datamartexport" ); executeSql( "DROP TABLE datamartexportdataelements" ); executeSql( "DROP TABLE datamartexportindicators" ); executeSql( "DROP TABLE datamartexportorgunits" ); executeSql( "DROP TABLE datamartexportperiods" ); executeSql( "DROP TABLE datasetlockedperiods" ); executeSql( "DROP TABLE datasetlocksource" ); executeSql( "DROP TABLE datasetlock" ); executeSql( "DROP TABLE datasetlockexceptions" ); executeSql( "DROP TABLE indicator_indicatorgroupsetmembers" ); executeSql( "DROP TABLE maplegendsetindicator" ); executeSql( "DROP TABLE maplegendsetdataelement" ); executeSql( "DROP TABLE loginfailure" ); executeSql( "DROP TABLE dashboarditem_trackedentitytabularreports" ); executeSql( "DROP TABLE categoryoptioncombousergroupaccesses" ); executeSql( "DROP TABLE validationrulegroupuserrolestoalert" ); executeSql( "DROP TABLE expressionoptioncombo" ); executeSql( "DROP TABLE orgunitgroupdatasets" ); executeSql( "DROP TABLE datavalue_audit" ); executeSql( "DROP TABLE datadictionaryusergroupaccesses" ); executeSql( "DROP TABLE datadictionaryindicators" ); executeSql( "DROP TABLE datadictionarydataelements" ); executeSql( "DROP TABLE datadictionary" ); executeSql( "DROP TABLE caseaggregationcondition" ); executeSql( "DROP TABLE trackedentitytabularreportusergroupaccesses" ); executeSql( "DROP TABLE trackedentitytabularreport_filters" ); executeSql( "DROP TABLE trackedentitytabularreport_dimensions" ); executeSql( "DROP TABLE trackedentitytabularreport" ); executeSql( "DROP TABLE trackedentityaggregatereportusergroupaccesses" ); executeSql( "DROP TABLE trackedentityaggregatereport_filters" ); executeSql( "DROP TABLE trackedentityaggregatereport_dimension" ); executeSql( "DROP TABLE trackedentityaggregatereport" ); executeSql( "ALTER TABLE categoryoptioncombo drop column userid" ); executeSql( "ALTER TABLE categoryoptioncombo drop column publicaccess" ); executeSql( "ALTER TABLE categoryoptioncombo alter column name type text" ); executeSql( "ALTER TABLE dataelementcategoryoption drop column categoryid" ); executeSql( "ALTER TABLE reporttable DROP column paramleafparentorganisationunit" ); executeSql( "ALTER TABLE reporttable DROP column dimension_type" ); executeSql( "ALTER TABLE reporttable DROP column dimensiontype" ); executeSql( "ALTER TABLE reporttable DROP column tablename" ); executeSql( "ALTER TABLE reporttable DROP column existingtablename" ); executeSql( "ALTER TABLE reporttable DROP column docategoryoptioncombos" ); executeSql( "ALTER TABLE reporttable DROP column mode" ); executeSql( "ALTER TABLE categoryoptioncombo DROP COLUMN displayorder" ); executeSql( "ALTER TABLE section DROP COLUMN label" ); executeSql( "ALTER TABLE section DROP COLUMN title" ); executeSql( "ALTER TABLE organisationunit DROP COLUMN polygoncoordinates" ); executeSql( "ALTER TABLE organisationunit DROP COLUMN geocode" ); executeSql( "ALTER TABLE indicator DROP COLUMN extendeddataelementid" ); executeSql( "ALTER TABLE indicator DROP COLUMN numeratoraggregationtype" ); executeSql( "ALTER TABLE indicator DROP COLUMN denominatoraggregationtype" ); executeSql( "ALTER TABLE dataset DROP COLUMN locked" ); executeSql( "ALTER TABLE dataset DROP COLUMN skipaggregation" ); executeSql( "ALTER TABLE configuration DROP COLUMN completenessrecipientsid" ); executeSql( "ALTER TABLE dataelement DROP COLUMN alternativename" ); executeSql( "ALTER TABLE dataelement DROP COLUMN aggregateexportcategoryoptioncombo" ); executeSql( "ALTER TABLE dataelement DROP COLUMN aggregateexportattributeoptioncombo" ); executeSql( "ALTER TABLE dataset DROP COLUMN aggregateexportcategoryoptioncombo" ); executeSql( "ALTER TABLE dataset DROP COLUMN aggregateexportattributeoptioncombo" ); executeSql( "ALTER TABLE indicator DROP COLUMN alternativename" ); executeSql( "ALTER TABLE orgunitgroup DROP COLUMN image" ); executeSql( "ALTER TABLE report DROP COLUMN usingorgunitgroupsets" ); executeSql( "ALTER TABLE eventchart DROP COLUMN datatype" ); executeSql( "ALTER TABLE validationrule DROP COLUMN type" ); executeSql( "ALTER TABLE organisationunit DROP COLUMN active" ); executeSql( "ALTER TABLE organisationunit DROP COLUMN uuid" ); executeSql( "DROP INDEX datamart_crosstab" ); // prepare uid function insertUidDbFunction(); // remove relative period type executeSql( "DELETE FROM period WHERE periodtypeid=(select periodtypeid from periodtype where name in ( 'Survey', 'OnChange', 'Relative' ))" ); executeSql( "DELETE FROM periodtype WHERE name in ( 'Survey', 'OnChange', 'Relative' )" ); // mapping executeSql( "DROP TABLE maporganisationunitrelation" ); executeSql( "ALTER TABLE mapview DROP COLUMN mapid" ); executeSql( "ALTER TABLE mapview DROP COLUMN mapsource" ); executeSql( "ALTER TABLE mapview DROP COLUMN mapsourcetype" ); executeSql( "ALTER TABLE mapview DROP COLUMN mapdatetype" ); executeSql( "ALTER TABLE mapview DROP COLUMN featuretype" ); executeSql( "ALTER TABLE mapview DROP COLUMN bounds" ); executeSql( "ALTER TABLE mapview DROP COLUMN valuetype" ); executeSql( "ALTER TABLE mapview DROP COLUMN legendtype" ); executeSql( "ALTER TABLE mapview ALTER COLUMN opacity TYPE double precision" ); executeSql( "UPDATE incomingsms SET userid = 0 WHERE userid IS NULL" ); executeSql( "ALTER TABLE smscommands ALTER COLUMN completenessmethod TYPE text" ); executeSql( "UPDATE smscommands SET completenessmethod='ALL_DATAVALUE' WHERE completenessmethod='1'" ); executeSql( "UPDATE smscommands SET completenessmethod='AT_LEAST_ONE_DATAVALUE' WHERE completenessmethod='2'" ); executeSql( "UPDATE smscommands SET completenessmethod='DO_NOT_MARK_COMPLETE' WHERE completenessmethod='3'" ); executeSql( "ALTER TABLE smscommands ALTER COLUMN uid set NOT NULL" ); executeSql( "ALTER TABLE smscommands ALTER COLUMN created set NOT NULL" ); executeSql( "ALTER TABLE smscommands ALTER COLUMN lastUpdated set NOT NULL" ); executeSql( "ALTER TABLE maplegend DROP CONSTRAINT maplegend_name_key" ); executeSql( "UPDATE mapview SET layer = 'thematic1' WHERE layer IS NULL" ); executeSql( "UPDATE mapview SET hidden = false WHERE hidden IS NULL" ); executeSql( "UPDATE mapview SET eventclustering = false WHERE eventclustering IS NULL" ); executeSql( "UPDATE mapview SET eventpointradius = 0 WHERE eventpointradius IS NULL" ); executeSql( "UPDATE programnotificationtemplate SET trackedentityattributeid = 0 WHERE trackedentityattributeid IS NULL" ); executeSql( "UPDATE programnotificationtemplate SET dataelementid = 0 WHERE dataelementid IS NULL" ); executeSql( "DELETE FROM systemsetting WHERE name = 'longitude'" ); executeSql( "DELETE FROM systemsetting WHERE name = 'latitude'" ); executeSql( "DELETE FROM systemsetting WHERE name = 'keySystemMonitoringUrl'" ); executeSql( "DELETE FROM systemsetting WHERE name = 'keySystemMonitoringUsername'" ); executeSql( "DELETE FROM systemsetting WHERE name = 'keySystemMonitoringPassword'" ); executeSql( "ALTER TABLE maplayer DROP CONSTRAINT maplayer_mapsource_key" ); executeSql( "ALTER TABLE maplayer DROP COLUMN mapsource" ); executeSql( "ALTER TABLE maplayer DROP COLUMN mapsourcetype" ); executeSql( "ALTER TABLE maplayer DROP COLUMN layer" ); // extended data element executeSql( "ALTER TABLE dataelement DROP CONSTRAINT fk_dataelement_extendeddataelementid" ); executeSql( "ALTER TABLE dataelement DROP COLUMN extendeddataelementid" ); executeSql( "ALTER TABLE indicator DROP CONSTRAINT fk_indicator_extendeddataelementid" ); executeSql( "ALTER TABLE indicator DROP COLUMN extendeddataelementid" ); executeSql( "DROP TABLE extendeddataelement" ); executeSql( "ALTER TABLE organisationunit DROP COLUMN hasPatients" ); // category combo not null executeSql( "update dataelement set categorycomboid = " + defaultCategoryComboId + " where categorycomboid is null" ); executeSql( "alter table dataelement alter column categorycomboid set not null" ); executeSql( "update dataset set categorycomboid = " + defaultCategoryComboId + " where categorycomboid is null" ); executeSql( "alter table dataset alter column categorycomboid set not null" ); executeSql( "update program set categorycomboid = " + defaultCategoryComboId + " where categorycomboid is null" ); executeSql( "alter table program alter column categorycomboid set not null" ); // categories_categoryoptions // set to 0 temporarily int c1 = executeSql( "UPDATE categories_categoryoptions SET sort_order=0 WHERE sort_order is NULL OR sort_order=0" ); if ( c1 > 0 ) { updateSortOrder( "categories_categoryoptions", "categoryid", "categoryoptionid" ); } executeSql( "ALTER TABLE categories_categoryoptions DROP CONSTRAINT categories_categoryoptions_pkey" ); executeSql( "ALTER TABLE categories_categoryoptions ADD CONSTRAINT categories_categoryoptions_pkey PRIMARY KEY (categoryid, sort_order)" ); // categorycombos_categories // set to 0 temporarily int c2 = executeSql( "update categorycombos_categories SET sort_order=0 where sort_order is NULL OR sort_order=0" ); if ( c2 > 0 ) { updateSortOrder( "categorycombos_categories", "categorycomboid", "categoryid" ); } executeSql( "ALTER TABLE categorycombos_categories DROP CONSTRAINT categorycombos_categories_pkey" ); executeSql( "ALTER TABLE categorycombos_categories ADD CONSTRAINT categorycombos_categories_pkey PRIMARY KEY (categorycomboid, sort_order)" ); // categorycombos_optioncombos executeSql( "ALTER TABLE categorycombos_optioncombos DROP CONSTRAINT categorycombos_optioncombos_pkey" ); executeSql( "ALTER TABLE categorycombos_optioncombos ADD CONSTRAINT categorycombos_optioncombos_pkey PRIMARY KEY (categoryoptioncomboid)" ); executeSql( "ALTER TABLE categorycombos_optioncombos DROP CONSTRAINT fk4bae70f697e49675" ); // categoryoptioncombos_categoryoptions executeSql( "alter table categoryoptioncombos_categoryoptions drop column sort_order" ); executeSql( "alter table categoryoptioncombos_categoryoptions add constraint categoryoptioncombos_categoryoptions_pkey primary key(categoryoptioncomboid, categoryoptionid)" ); // dataelementcategoryoption executeSql( "ALTER TABLE dataelementcategoryoption DROP CONSTRAINT fk_dataelement_categoryid" ); executeSql( "ALTER TABLE dataelementcategoryoption DROP CONSTRAINT dataelementcategoryoption_shortname_key" ); // minmaxdataelement query index executeSql( "CREATE INDEX index_minmaxdataelement ON minmaxdataelement( sourceid, dataelementid, categoryoptioncomboid )" ); // update periodType field to ValidationRule executeSql( "UPDATE validationrule SET periodtypeid = (SELECT periodtypeid FROM periodtype WHERE name='Monthly') WHERE periodtypeid is null" ); // set varchar to text executeSql( "ALTER TABLE dataelement ALTER COLUMN description TYPE text" ); executeSql( "ALTER TABLE dataelementgroupset ALTER COLUMN description TYPE text" ); executeSql( "ALTER TABLE indicatorgroupset ALTER COLUMN description TYPE text" ); executeSql( "ALTER TABLE orgunitgroupset ALTER COLUMN description TYPE text" ); executeSql( "ALTER TABLE indicator ALTER COLUMN description TYPE text" ); executeSql( "ALTER TABLE validationrule ALTER COLUMN description TYPE text" ); executeSql( "ALTER TABLE expression ALTER COLUMN expression TYPE text" ); executeSql( "ALTER TABLE translation ALTER COLUMN value TYPE text" ); executeSql( "ALTER TABLE organisationunit ALTER COLUMN comment TYPE text" ); executeSql( "ALTER TABLE program ALTER COLUMN description TYPE text" ); executeSql( "ALTER TABLE trackedentityattribute ALTER COLUMN description TYPE text" ); executeSql( "ALTER TABLE programrule ALTER COLUMN condition TYPE text" ); executeSql( "ALTER TABLE programruleaction ALTER COLUMN content TYPE text" ); executeSql( "ALTER TABLE programruleaction ALTER COLUMN data TYPE text" ); executeSql( "ALTER TABLE trackedentitycomment ALTER COLUMN commenttext TYPE text" ); executeSql( "ALTER TABLE users ALTER COLUMN openid TYPE text" ); executeSql( "ALTER TABLE users ALTER COLUMN ldapid TYPE text" ); executeSql( "ALTER TABLE dataentryform ALTER COLUMN htmlcode TYPE text" ); executeSql( "ALTER TABLE minmaxdataelement RENAME minvalue TO minimumvalue" ); executeSql( "ALTER TABLE minmaxdataelement RENAME maxvalue TO maximumvalue" ); executeSql( "update minmaxdataelement set generatedvalue = generated where generatedvalue is null" ); executeSql( "alter table minmaxdataelement drop column generated" ); executeSql( "alter table minmaxdataelement alter column generatedvalue set not null" ); // orgunit shortname uniqueness executeSql( "ALTER TABLE organisationunit DROP CONSTRAINT organisationunit_shortname_key" ); executeSql( "ALTER TABLE section DROP CONSTRAINT section_name_key" ); executeSql( "UPDATE section SET showrowtotals = false WHERE showrowtotals IS NULL" ); executeSql( "UPDATE section SET showcolumntotals = false WHERE showcolumntotals IS NULL" ); executeSql( "UPDATE dataelement SET aggregationtype='avg_sum_org_unit' where aggregationtype='average'" ); // revert prepare aggregate*Value tables for offline diffs executeSql( "ALTER TABLE aggregateddatavalue DROP COLUMN modified" ); executeSql( "ALTER TABLE aggregatedindicatorvalue DROP COLUMN modified " ); executeSql( "UPDATE indicatortype SET indicatornumber=false WHERE indicatornumber is null" ); // program executeSql( "ALTER TABLE programinstance ALTER COLUMN patientid DROP NOT NULL" ); // migrate charts from dimension to category, series, filter executeSql( "UPDATE chart SET series='period', category='data', filter='organisationunit' WHERE dimension='indicator'" ); executeSql( "UPDATE chart SET series='data', category='organisationunit', filter='period' WHERE dimension='organisationUnit'" ); executeSql( "UPDATE chart SET series='period', category='data', filter='organisationunit' WHERE dimension='dataElement_period'" ); executeSql( "UPDATE chart SET series='data', category='organisationunit', filter='period' WHERE dimension='organisationUnit_dataElement'" ); executeSql( "UPDATE chart SET series='data', category='period', filter='organisationunit' WHERE dimension='period'" ); executeSql( "UPDATE chart SET series='data', category='period', filter='organisationunit' WHERE dimension='period_dataElement'" ); executeSql( "UPDATE chart SET type='bar' where type='bar3d'" ); executeSql( "UPDATE chart SET type='stackedbar' where type='stackedBar'" ); executeSql( "UPDATE chart SET type='stackedbar' where type='stackedBar3d'" ); executeSql( "UPDATE chart SET type='line' where type='line3d'" ); executeSql( "UPDATE chart SET type='pie' where type='pie'" ); executeSql( "UPDATE chart SET type='pie' where type='pie3d'" ); executeSql( "UPDATE programruleaction SET programnotificationtemplateid= 0 where programnotificationtemplateid is NULL" ); executeSql( "UPDATE chart SET type=lower(type), series=lower(series), category=lower(category), filter=lower(filter)" ); executeSql( "ALTER TABLE chart ALTER COLUMN dimension DROP NOT NULL" ); executeSql( "ALTER TABLE chart DROP COLUMN size" ); executeSql( "ALTER TABLE chart DROP COLUMN verticallabels" ); executeSql( "ALTER TABLE chart DROP COLUMN targetline" ); executeSql( "ALTER TABLE chart DROP COLUMN horizontalplotorientation" ); executeSql( "ALTER TABLE chart DROP COLUMN monthsLastYear" ); executeSql( "ALTER TABLE chart DROP COLUMN quartersLastYear" ); executeSql( "ALTER TABLE chart DROP COLUMN last6BiMonths" ); executeSql( "ALTER TABLE chart DROP CONSTRAINT chart_title_key" ); executeSql( "ALTER TABLE chart DROP CONSTRAINT chart_name_key" ); executeSql( "ALTER TABLE chart DROP COLUMN domainaxixlabel" ); executeSql( "ALTER TABLE chart DROP COLUMN rewindrelativeperiods" ); executeSql( "ALTER TABLE chart ALTER hideLegend DROP NOT NULL" ); executeSql( "ALTER TABLE chart ALTER regression DROP NOT NULL" ); executeSql( "ALTER TABLE chart ALTER hideSubtitle DROP NOT NULL" ); executeSql( "ALTER TABLE chart ALTER userOrganisationUnit DROP NOT NULL" ); // remove outdated relative periods executeSql( "ALTER TABLE reporttable DROP COLUMN last6months" ); executeSql( "ALTER TABLE reporttable DROP COLUMN last9months" ); executeSql( "ALTER TABLE reporttable DROP COLUMN sofarthisyear" ); executeSql( "ALTER TABLE reporttable DROP COLUMN sofarthisfinancialyear" ); executeSql( "ALTER TABLE reporttable DROP COLUMN last3to6months" ); executeSql( "ALTER TABLE reporttable DROP COLUMN last6to9months" ); executeSql( "ALTER TABLE reporttable DROP COLUMN last9to12months" ); executeSql( "ALTER TABLE reporttable DROP COLUMN last12individualmonths" ); executeSql( "ALTER TABLE reporttable DROP COLUMN individualmonthsthisyear" ); executeSql( "ALTER TABLE reporttable DROP COLUMN individualquartersthisyear" ); executeSql( "ALTER TABLE reporttable DROP COLUMN programid" ); executeSql( "ALTER TABLE chart DROP COLUMN last6months" ); executeSql( "ALTER TABLE chart DROP COLUMN last9months" ); executeSql( "ALTER TABLE chart DROP COLUMN sofarthisyear" ); executeSql( "ALTER TABLE chart DROP COLUMN sofarthisfinancialyear" ); executeSql( "ALTER TABLE chart DROP COLUMN last3to6months" ); executeSql( "ALTER TABLE chart DROP COLUMN last6to9months" ); executeSql( "ALTER TABLE chart DROP COLUMN last9to12months" ); executeSql( "ALTER TABLE chart DROP COLUMN last12individualmonths" ); executeSql( "ALTER TABLE chart DROP COLUMN individualmonthsthisyear" ); executeSql( "ALTER TABLE chart DROP COLUMN individualquartersthisyear" ); executeSql( "ALTER TABLE chart DROP COLUMN organisationunitgroupsetid" ); executeSql( "ALTER TABLE chart DROP COLUMN programid" ); // remove source executeSql( "ALTER TABLE datasetsource DROP CONSTRAINT fk766ae2938fd8026a" ); executeSql( "ALTER TABLE datasetlocksource DROP CONSTRAINT fk582fdf7e8fd8026a" ); executeSql( "ALTER TABLE completedatasetregistration DROP CONSTRAINT fk_datasetcompleteregistration_sourceid" ); executeSql( "ALTER TABLE minmaxdataelement DROP CONSTRAINT fk_minmaxdataelement_sourceid" ); executeSql( "ALTER TABLE datavalue DROP CONSTRAINT fk_datavalue_sourceid" ); executeSql( "ALTER TABLE organisationunit DROP CONSTRAINT fke509dd5ef1c932ed" ); executeSql( "DROP TABLE source CASCADE" ); executeSql( "DROP TABLE datavaluearchive" ); // message executeSql( "ALTER TABLE messageconversation DROP COLUMN messageconversationkey" ); executeSql( "UPDATE messageconversation SET lastmessage=lastupdated WHERE lastmessage is null" ); executeSql( "ALTER TABLE message DROP COLUMN messagesubject" ); executeSql( "ALTER TABLE message DROP COLUMN messagekey" ); executeSql( "ALTER TABLE message DROP COLUMN sentdate" ); executeSql( "ALTER TABLE usermessage DROP COLUMN messagedate" ); executeSql( "UPDATE usermessage SET isfollowup=false WHERE isfollowup is null" ); executeSql( "DROP TABLE message_usermessages" ); // create code unique constraints executeSql( "ALTER TABLE dataelement ADD CONSTRAINT dataelement_code_key UNIQUE(code)" ); executeSql( "ALTER TABLE indicator ADD CONSTRAINT indicator_code_key UNIQUE(code)" ); executeSql( "ALTER TABLE organisationunit ADD CONSTRAINT organisationunit_code_key UNIQUE(code)" ); executeSql( "ALTER TABLE organisationunit ALTER COLUMN code TYPE varchar(50)" ); executeSql( "ALTER TABLE indicator ALTER COLUMN code TYPE varchar(50)" ); // remove uuid executeSql( "ALTER TABLE attribute DROP COLUMN uuid" ); executeSql( "ALTER TABLE categorycombo DROP COLUMN uuid" ); executeSql( "ALTER TABLE categoryoptioncombo DROP COLUMN uuid" ); executeSql( "ALTER TABLE chart DROP COLUMN uuid" ); executeSql( "ALTER TABLE concept DROP COLUMN uuid" ); executeSql( "ALTER TABLE constant DROP COLUMN uuid" ); executeSql( "ALTER TABLE dataelement DROP COLUMN uuid" ); executeSql( "ALTER TABLE dataelementcategory DROP COLUMN uuid" ); executeSql( "ALTER TABLE dataelementcategoryoption DROP COLUMN uuid" ); executeSql( "ALTER TABLE dataelementgroup DROP COLUMN uuid" ); executeSql( "ALTER TABLE dataelementgroupset DROP COLUMN uuid" ); executeSql( "ALTER TABLE dataset DROP COLUMN uuid" ); executeSql( "ALTER TABLE indicator DROP COLUMN uuid" ); executeSql( "ALTER TABLE indicatorgroup DROP COLUMN uuid" ); executeSql( "ALTER TABLE indicatorgroupset DROP COLUMN uuid" ); executeSql( "ALTER TABLE indicatortype DROP COLUMN uuid" ); // executeSql( "ALTER TABLE organisationunit DROP COLUMN uuid" ); executeSql( "ALTER TABLE orgunitgroup DROP COLUMN uuid" ); executeSql( "ALTER TABLE orgunitgroupset DROP COLUMN uuid" ); executeSql( "ALTER TABLE orgunitlevel DROP COLUMN uuid" ); executeSql( "ALTER TABLE report DROP COLUMN uuid" ); executeSql( "ALTER TABLE validationrule DROP COLUMN uuid" ); executeSql( "ALTER TABLE validationrulegroup DROP COLUMN uuid" ); // replace null with false for boolean fields executeSql( "update dataset set fieldcombinationrequired = false where fieldcombinationrequired is null" ); executeSql( "update chart set hidelegend = false where hidelegend is null" ); executeSql( "update chart set regression = false where regression is null" ); executeSql( "update chart set hidesubtitle = false where hidesubtitle is null" ); executeSql( "update chart set userorganisationunit = false where userorganisationunit is null" ); executeSql( "update chart set percentstackedvalues = false where percentstackedvalues is null" ); executeSql( "update chart set cumulativevalues = false where cumulativevalues is null" ); executeSql( "update chart set nospacebetweencolumns = false where nospacebetweencolumns is null" ); executeSql( "update indicator set annualized = false where annualized is null" ); executeSql( "update indicatortype set indicatornumber = false where indicatornumber is null" ); executeSql( "update dataset set mobile = false where mobile is null" ); executeSql( "update dataset set allowfutureperiods = false where allowfutureperiods is null" ); executeSql( "update dataset set validcompleteonly = false where validcompleteonly is null" ); executeSql( "update dataset set notifycompletinguser = false where notifycompletinguser is null" ); executeSql( "update dataset set approvedata = false where approvedata is null" ); executeSql( "update dataelement set zeroissignificant = false where zeroissignificant is null" ); executeSql( "update organisationunit set haspatients = false where haspatients is null" ); executeSql( "update organisationunit set openingdate = '1970-01-01' where openingdate is null" ); executeSql( "update dataset set expirydays = 0 where expirydays is null" ); executeSql( "update eventchart set hidelegend = false where hidelegend is null" ); executeSql( "update eventchart set regression = false where regression is null" ); executeSql( "update eventchart set hidetitle = false where hidetitle is null" ); executeSql( "update eventchart set hidesubtitle = false where hidesubtitle is null" ); executeSql( "update eventchart set hidenadata = false where hidenadata is null" ); executeSql( "update eventchart set percentstackedvalues = false where percentstackedvalues is null" ); executeSql( "update eventchart set cumulativevalues = false where cumulativevalues is null" ); executeSql( "update eventchart set nospacebetweencolumns = false where nospacebetweencolumns is null" ); executeSql( "update reporttable set showdimensionlabels = false where showdimensionlabels is null" ); executeSql( "update eventreport set showdimensionlabels = false where showdimensionlabels is null" ); executeSql( "update reporttable set skiprounding = false where skiprounding is null" ); executeSql( "update validationrule set skipformvalidation = false where skipformvalidation is null" ); executeSql( "update validationnotificationtemplate set sendstrategy = 'COLLECTIVE_SUMMARY' where sendstrategy is null" ); // move timelydays from system setting => dataset property executeSql( "update dataset set timelydays = 15 where timelydays is null" ); executeSql( "delete from systemsetting where name='completenessOffset'" ); executeSql( "update report set paramreportingmonth = false where paramreportingmonth is null" ); executeSql( "update report set paramparentorganisationunit = false where paramorganisationunit is null" ); executeSql( "update reporttable set paramreportingmonth = false where paramreportingmonth is null" ); executeSql( "update reporttable set paramparentorganisationunit = false where paramparentorganisationunit is null" ); executeSql( "update reporttable set paramorganisationunit = false where paramorganisationunit is null" ); executeSql( "update reporttable set paramgrandparentorganisationunit = false where paramgrandparentorganisationunit is null" ); executeSql( "update reporttable set reportingmonth = false where reportingmonth is null" ); executeSql( "update reporttable set reportingbimonth = false where reportingbimonth is null" ); executeSql( "update reporttable set reportingquarter = false where reportingquarter is null" ); executeSql( "update reporttable set monthsthisyear = false where monthsthisyear is null" ); executeSql( "update reporttable set quartersthisyear = false where quartersthisyear is null" ); executeSql( "update reporttable set thisyear = false where thisyear is null" ); executeSql( "update reporttable set monthslastyear = false where monthslastyear is null" ); executeSql( "update reporttable set quarterslastyear = false where quarterslastyear is null" ); executeSql( "update reporttable set lastyear = false where lastyear is null" ); executeSql( "update reporttable set last5years = false where last5years is null" ); executeSql( "update reporttable set lastsixmonth = false where lastsixmonth is null" ); executeSql( "update reporttable set last4quarters = false where last4quarters is null" ); executeSql( "update reporttable set last12months = false where last12months is null" ); executeSql( "update reporttable set last3months = false where last3months is null" ); executeSql( "update reporttable set last6bimonths = false where last6bimonths is null" ); executeSql( "update reporttable set last4quarters = false where last4quarters is null" ); executeSql( "update reporttable set last2sixmonths = false where last2sixmonths is null" ); executeSql( "update reporttable set thisfinancialyear = false where thisfinancialyear is null" ); executeSql( "update reporttable set lastfinancialyear = false where lastfinancialyear is null" ); executeSql( "update reporttable set last5financialyears = false where last5financialyears is null" ); executeSql( "update reporttable set cumulative = false where cumulative is null" ); executeSql( "update reporttable set userorganisationunit = false where userorganisationunit is null" ); executeSql( "update reporttable set userorganisationunitchildren = false where userorganisationunitchildren is null" ); executeSql( "update reporttable set userorganisationunitgrandchildren = false where userorganisationunitgrandchildren is null" ); executeSql( "update reporttable set subtotals = true where subtotals is null" ); executeSql( "update reporttable set hideemptyrows = false where hideemptyrows is null" ); executeSql( "update reporttable set hideemptycolumns = false where hideemptycolumns is null" ); executeSql( "update reporttable set displaydensity = 'normal' where displaydensity is null" ); executeSql( "update reporttable set fontsize = 'normal' where fontsize is null" ); executeSql( "update reporttable set digitgroupseparator = 'space' where digitgroupseparator is null" ); executeSql( "update reporttable set sortorder = 0 where sortorder is null" ); executeSql( "update reporttable set toplimit = 0 where toplimit is null" ); executeSql( "update reporttable set showhierarchy = false where showhierarchy is null" ); executeSql( "update reporttable set legenddisplaystyle = 'FILL' where legenddisplaystyle is null" ); executeSql( "update reporttable set legenddisplaystrategy = 'FIXED' where legenddisplaystrategy is null" ); executeSql( "update reporttable set hidetitle = false where hidetitle is null" ); executeSql( "update reporttable set hidesubtitle = false where hidesubtitle is null" ); // reporttable col/row totals = keep existing || copy from totals || true executeSql( "update reporttable set totals = true where totals is null" ); executeSql( "update reporttable set coltotals = totals where coltotals is null" ); executeSql( "update reporttable set coltotals = true where coltotals is null" ); executeSql( "update reporttable set rowtotals = totals where rowtotals is null" ); executeSql( "update reporttable set rowtotals = true where rowtotals is null" ); executeSql( "alter table reporttable drop column totals" ); // reporttable col/row subtotals executeSql( "update reporttable set colsubtotals = subtotals where colsubtotals is null" ); executeSql( "update reporttable set rowsubtotals = subtotals where rowsubtotals is null" ); // reporttable upgrade counttype to outputtype executeSql( "update eventreport set outputtype = 'EVENT' where outputtype is null and counttype = 'events'" ); executeSql( "update eventreport set outputtype = 'TRACKED_ENTITY_INSTANCE' where outputtype is null and counttype = 'tracked_entity_instances'" ); executeSql( "update eventreport set hidetitle = false where hidetitle is null" ); executeSql( "update eventreport set hidesubtitle = false where hidesubtitle is null" ); executeSql( "update eventreport set outputtype = 'EVENT' where outputtype is null" ); executeSql( "alter table eventreport drop column counttype" ); executeSql( "update chart set reportingmonth = false where reportingmonth is null" ); executeSql( "update chart set reportingbimonth = false where reportingbimonth is null" ); executeSql( "update chart set reportingquarter = false where reportingquarter is null" ); executeSql( "update chart set monthsthisyear = false where monthsthisyear is null" ); executeSql( "update chart set quartersthisyear = false where quartersthisyear is null" ); executeSql( "update chart set thisyear = false where thisyear is null" ); executeSql( "update chart set monthslastyear = false where monthslastyear is null" ); executeSql( "update chart set quarterslastyear = false where quarterslastyear is null" ); executeSql( "update chart set lastyear = false where lastyear is null" ); executeSql( "update chart set lastsixmonth = false where lastsixmonth is null" ); executeSql( "update chart set last12months = false where last12months is null" ); executeSql( "update chart set last3months = false where last3months is null" ); executeSql( "update chart set last5years = false where last5years is null" ); executeSql( "update chart set last4quarters = false where last4quarters is null" ); executeSql( "update chart set last6bimonths = false where last6bimonths is null" ); executeSql( "update chart set last4quarters = false where last4quarters is null" ); executeSql( "update chart set last2sixmonths = false where last2sixmonths is null" ); executeSql( "update chart set showdata = false where showdata is null" ); executeSql( "update chart set userorganisationunit = false where userorganisationunit is null" ); executeSql( "update chart set userorganisationunitchildren = false where userorganisationunitchildren is null" ); executeSql( "update chart set userorganisationunitgrandchildren = false where userorganisationunitgrandchildren is null" ); executeSql( "update chart set hidetitle = false where hidetitle is null" ); executeSql( "update chart set sortorder = 0 where sortorder is null" ); executeSql( "update eventreport set showhierarchy = false where showhierarchy is null" ); executeSql( "update eventreport set counttype = 'events' where counttype is null" ); executeSql( "update eventreport set hidenadata = false where hidenadata is null" ); // eventreport col/rowtotals = keep existing || copy from totals || true executeSql( "update eventreport set totals = true where totals is null" ); executeSql( "update eventreport set coltotals = totals where coltotals is null" ); executeSql( "update eventreport set coltotals = true where coltotals is null" ); executeSql( "update eventreport set rowtotals = totals where rowtotals is null" ); executeSql( "update eventreport set rowtotals = true where rowtotals is null" ); executeSql( "alter table eventreport drop column totals" ); // eventreport col/row subtotals executeSql( "update eventreport set colsubtotals = subtotals where colsubtotals is null" ); executeSql( "update eventreport set rowsubtotals = subtotals where rowsubtotals is null" ); // eventchart upgrade counttype to outputtype executeSql( "update eventchart set outputtype = 'EVENT' where outputtype is null and counttype = 'events'" ); executeSql( "update eventchart set outputtype = 'TRACKED_ENTITY_INSTANCE' where outputtype is null and counttype = 'tracked_entity_instances'" ); executeSql( "update eventchart set outputtype = 'EVENT' where outputtype is null" ); executeSql( "alter table eventchart drop column counttype" ); executeSql( "update eventchart set sortorder = 0 where sortorder is null" ); // Move chart filters to chart_filters table executeSql( "insert into chart_filters (chartid, sort_order, filter) select chartid, 0, filter from chart" ); executeSql( "alter table chart drop column filter" ); // Upgrade chart dimension identifiers executeSql( "update chart set series = 'dx' where series = 'data'" ); executeSql( "update chart set series = 'pe' where series = 'period'" ); executeSql( "update chart set series = 'ou' where series = 'organisationunit'" ); executeSql( "update chart set category = 'dx' where category = 'data'" ); executeSql( "update chart set category = 'pe' where category = 'period'" ); executeSql( "update chart set category = 'ou' where category = 'organisationunit'" ); executeSql( "update chart_filters set filter = 'dx' where filter = 'data'" ); executeSql( "update chart_filters set filter = 'pe' where filter = 'period'" ); executeSql( "update chart_filters set filter = 'ou' where filter = 'organisationunit'" ); executeSql( "update dataentryform set format = 1 where format is null" ); executeSql( "update dataelementgroup set shortname=name where shortname is null and length(name)<=50" ); executeSql( "update orgunitgroup set shortname=name where shortname is null and length(name)<=50" ); // report, reporttable, chart groups executeSql( "DROP TABLE reportgroupmembers" ); executeSql( "DROP TABLE reportgroup" ); executeSql( "DROP TABLE reporttablegroupmembers" ); executeSql( "DROP TABLE reporttablegroup" ); executeSql( "DROP TABLE chartgroupmembers" ); executeSql( "DROP TABLE chartgroup" ); executeSql( "delete from usersetting where name='currentStyle' and value like '%blue/blue.css'" ); executeSql( "delete from systemsetting where name='currentStyle' and value like '%blue/blue.css'" ); executeSql( "update dataentryform set style='regular' where style is null" ); executeSql( "UPDATE dataset SET skipaggregation = false WHERE skipaggregation IS NULL" ); executeSql( "UPDATE dataset SET skipoffline = false WHERE skipoffline IS NULL" ); executeSql( "UPDATE dataset SET renderastabs = false WHERE renderastabs IS NULL" ); executeSql( "UPDATE dataset SET renderhorizontally = false WHERE renderhorizontally IS NULL" ); executeSql( "UPDATE dataset SET novaluerequirescomment = false WHERE novaluerequirescomment IS NULL" ); executeSql( "UPDATE dataset SET openfutureperiods = 12 where allowfutureperiods is true" ); executeSql( "UPDATE dataset SET openfutureperiods = 0 where allowfutureperiods is false" ); executeSql( "update dataset SET compulsoryfieldscompleteonly = false WHERE compulsoryfieldscompleteonly IS NULL" ); executeSql( "ALTER TABLE dataset DROP COLUMN allowfutureperiods" ); executeSql( "UPDATE categorycombo SET skiptotal = false WHERE skiptotal IS NULL" ); // short names executeSql( "ALTER TABLE dataelement ALTER COLUMN shortname TYPE character varying(50)" ); executeSql( "ALTER TABLE indicator ALTER COLUMN shortname TYPE character varying(50)" ); executeSql( "ALTER TABLE dataset ALTER COLUMN shortname TYPE character varying(50)" ); executeSql( "ALTER TABLE organisationunit ALTER COLUMN shortname TYPE character varying(50)" ); executeSql( "update report set type='jasperReportTable' where type is null and reporttableid is not null" ); executeSql( "update report set type='jasperJdbc' where type is null and reporttableid is null" ); // upgrade authorities executeSql( "UPDATE userroleauthorities SET authority='F_DOCUMENT_PUBLIC_ADD' WHERE authority='F_DOCUMENT_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_REPORT_PUBLIC_ADD' WHERE authority='F_REPORT_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_REPORTTABLE_PUBLIC_ADD' WHERE authority='F_REPORTTABLE_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_DATASET_PUBLIC_ADD' WHERE authority='F_DATASET_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_DATAELEMENT_PUBLIC_ADD' WHERE authority='F_DATAELEMENT_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_DATAELEMENTGROUP_PUBLIC_ADD' WHERE authority='F_DATAELEMENTGROUP_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_DATAELEMENTGROUPSET_PUBLIC_ADD' WHERE authority='F_DATAELEMENTGROUPSET_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_ORGUNITGROUP_PUBLIC_ADD' WHERE authority='F_ORGUNITGROUP_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_ORGUNITGROUPSET_PUBLIC_ADD' WHERE authority='F_ORGUNITGROUPSET_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_INDICATOR_PUBLIC_ADD' WHERE authority='F_INDICATOR_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_INDICATORGROUP_PUBLIC_ADD' WHERE authority='F_INDICATORGROUP_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_INDICATORGROUPSET_PUBLIC_ADD' WHERE authority='F_INDICATORGROUPSET_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_USERROLE_PUBLIC_ADD' WHERE authority='F_USERROLE_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_USERGROUP_PUBLIC_ADD' WHERE authority='F_USER_GRUP_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_USERGROUP_UPDATE' WHERE authority='F_USER_GRUP_UPDATE'" ); executeSql( "UPDATE userroleauthorities SET authority='F_USERGROUP_DELETE' WHERE authority='F_USER_GRUP_DELETE'" ); executeSql( "UPDATE userroleauthorities SET authority='F_USERGROUP_LIST' WHERE authority='F_USER_GRUP_LIST'" ); executeSql( "UPDATE userroleauthorities SET authority='F_SQLVIEW_PUBLIC_ADD' WHERE authority='F_SQLVIEW_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_OPTIONSET_PUBLIC_ADD' WHERE authority='F_OPTIONSET_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_VALIDATIONRULEGROUP_PUBLIC_ADD' WHERE authority='F_VALIDATIONRULEGROUP_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_TRACKED_ENTITY_ATTRIBUTE_PUBLIC_ADD' WHERE authority='F_TRACKED_ENTITY_ATTRIBUTE_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_PROGRAM_INDICATOR_PUBLIC_ADD' WHERE authority='F_ADD_PROGRAM_INDICATOR'" ); executeSql( "UPDATE userroleauthorities SET authority='F_LEGEND_SET_PUBLIC_ADD' WHERE authority='F_LEGEND_SET_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_VALIDATIONRULE_PUBLIC_ADD' WHERE authority='F_VALIDATIONRULE_ADD'" ); executeSql( "UPDATE userroleauthorities SET authority='F_ATTRIBUTE_PUBLIC_ADD' WHERE authority='F_ATTRIBUTE_ADD'" ); // remove unused authorities executeSql( "DELETE FROM userroleauthorities WHERE authority='F_CONCEPT_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_CONSTANT_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_DATAELEMENT_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_DATAELEMENTGROUP_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_DATAELEMENTGROUPSET_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_DATAELEMENT_MINMAX_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_DATASET_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_SECTION_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_DATAVALUE_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_INDICATOR_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_INDICATORTYPE_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_INDICATORGROUP_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_INDICATORGROUPSET_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_ORGANISATIONUNIT_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_ORGUNITGROUP_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_ORGUNITGROUPSET_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_USERROLE_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_USERGROUP_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_VALIDATIONRULE_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_VALIDATIONRULEGROUP_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_REPORT_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_SQLVIEW_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_VALIDATIONCRITERIA_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_OPTIONSET_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_ATTRIBUTE_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_PATIENTATTRIBUTE_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_PATIENT_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_UPDATE_PROGRAM_INDICATOR'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_PROGRAM_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_PROGRAMSTAGE_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_PROGRAMSTAGE_SECTION_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_PATIENTIDENTIFIERTYPE_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_PROGRAM_ATTRIBUTE_UPDATE'" ); executeSql( "DELETE FROM userroleauthorities WHERE authority='F_PATIENT_DATAVALUE_UPDATE'" ); // remove unused configurations executeSql( "delete from systemsetting where name='keySmsConfig'" ); executeSql( "delete from systemsetting where name='keySmsConfiguration'" ); executeSql( "delete from systemsetting where name='keySmsConfigurations'" ); // update denominator of indicator which has indicatortype as 'number' executeSql( "UPDATE indicator SET denominator = 1, denominatordescription = '' WHERE indicatortypeid IN (SELECT DISTINCT indicatortypeid FROM indicatortype WHERE indicatornumber = true) AND denominator IS NULL" ); // remove name/shortName uniqueness executeSql( "ALTER TABLE organisationunit DROP CONSTRAINT organisationunit_name_key" ); executeSql( "ALTER TABLE orgunitgroup ADD CONSTRAINT orgunitgroup_name_key UNIQUE (name)" ); executeSql( "ALTER TABLE orgunitgroupset ADD CONSTRAINT orgunitgroupset_name_key UNIQUE (name)" ); executeSql( "ALTER TABLE indicator DROP CONSTRAINT indicator_name_key" ); executeSql( "ALTER TABLE indicator DROP CONSTRAINT indicator_shortname_key" ); executeSql( "ALTER TABLE indicatorgroup DROP CONSTRAINT indicatorgroup_name_key" ); executeSql( "ALTER TABLE indicatorgroupset DROP CONSTRAINT indicatorgroupset_name_key" ); executeSql( "ALTER TABLE dataset DROP CONSTRAINT dataset_name_key" ); executeSql( "ALTER TABLE dataset DROP CONSTRAINT dataset_shortname_key" ); executeSql( "ALTER TABLE document DROP CONSTRAINT document_name_key" ); executeSql( "ALTER TABLE reporttable DROP CONSTRAINT reporttable_name_key" ); executeSql( "ALTER TABLE report DROP CONSTRAINT report_name_key" ); executeSql( "ALTER TABLE usergroup DROP CONSTRAINT usergroup_name_key" ); executeSql( "ALTER TABLE dataelementcategory DROP COLUMN conceptid" ); executeSql( "ALTER TABLE dataelementcategoryoption DROP COLUMN conceptid" ); // upgrade system charts/maps to public read-only sharing executeSql( "UPDATE chart SET publicaccess='r executeSql( "UPDATE map SET publicaccess='r executeSql( "UPDATE chart SET publicaccess=' executeSql( "UPDATE map SET publicaccess=' executeSql( "update dataelementcategory set datadimension = false where datadimension is null" ); executeSql( "UPDATE dataset SET dataelementdecoration=false WHERE dataelementdecoration is null" ); executeSql( "update sqlview set sqlviewid=viweid" ); executeSql( "alter table sqlview drop column viewid" ); executeSql( "update sqlview set type = 'QUERY' where query is true" ); executeSql( "update sqlview set type = 'VIEW' where type is null" ); executeSql( "alter table sqlview drop column query" ); executeSql( "UPDATE dashboard SET publicaccess=' executeSql( "UPDATE optionset SET version=0 WHERE version IS NULL" ); executeSql( "UPDATE dataset SET version=0 WHERE version IS NULL" ); executeSql( "UPDATE program SET version=0 WHERE version IS NULL" ); executeSql( "update program set shortname = substring(name,0,50) where shortname is null" ); executeSql( "update programstageinstance set attributeoptioncomboid = " + defaultOptionComboId + " where attributeoptioncomboid is null" ); executeSql( "update programstageinstance set storedby=completedby where storedby is null and completedby is not null" ); executeSql( "ALTER TABLE datavalue ALTER COLUMN lastupdated TYPE timestamp" ); executeSql( "ALTER TABLE completedatasetregistration ALTER COLUMN date TYPE timestamp" ); executeSql( "ALTER TABLE message ALTER COLUMN userid DROP NOT NULL" ); executeSql( "ALTER TABLE message ALTER COLUMN messagetext TYPE text" ); executeSql( "drop index crosstab" ); executeSql( "delete from usersetting where name = 'dashboardConfig' or name = 'dashboardConfiguration'" ); executeSql( "update usersetting set name = 'keyUiLocale' where name = 'currentLocale'" ); executeSql( "update usersetting set name = 'keyDbLocale' where name = 'keyLocaleUserSetting'" ); executeSql( "update usersetting set name = 'keyStyle' where name = 'currentStyle'" ); executeSql( "ALTER TABLE interpretation ALTER COLUMN userid DROP NOT NULL" ); executeSql( "UPDATE interpretation SET publicaccess='r executeSql( "ALTER TABLE dataset DROP COLUMN symbol" ); executeSql( "ALTER TABLE users ALTER COLUMN password DROP NOT NULL" ); // set default dataDimension on orgUnitGroupSet and deGroupSet executeSql( "UPDATE dataelementgroupset SET datadimension=true WHERE datadimension IS NULL" ); executeSql( "ALTER TABLE dataelementgroupset ALTER COLUMN datadimension SET NOT NULL" ); executeSql( "UPDATE orgunitgroupset SET datadimension=true WHERE datadimension IS NULL" ); executeSql( "ALTER TABLE orgunitgroupset ALTER COLUMN datadimension SET NOT NULL" ); executeSql( "ALTER TABLE validationnotificationtemplate ALTER COLUMN sendstrategy SET NOT NULL" ); // set attribute defaults executeSql( "UPDATE attribute SET dataelementattribute=false WHERE dataelementattribute IS NULL" ); executeSql( "UPDATE attribute SET dataelementgroupattribute=false WHERE dataelementgroupattribute IS NULL" ); executeSql( "UPDATE attribute SET indicatorattribute=false WHERE indicatorattribute IS NULL" ); executeSql( "UPDATE attribute SET indicatorgroupattribute=false WHERE indicatorgroupattribute IS NULL" ); executeSql( "UPDATE attribute SET organisationunitattribute=false WHERE organisationunitattribute IS NULL" ); executeSql( "UPDATE attribute SET organisationunitgroupattribute=false WHERE organisationunitgroupattribute IS NULL" ); executeSql( "UPDATE attribute SET organisationunitgroupsetattribute=false WHERE organisationunitgroupsetattribute IS NULL" ); executeSql( "UPDATE attribute SET userattribute=false WHERE userattribute IS NULL" ); executeSql( "UPDATE attribute SET usergroupattribute=false WHERE usergroupattribute IS NULL" ); executeSql( "UPDATE attribute SET datasetattribute=false WHERE datasetattribute IS NULL" ); executeSql( "UPDATE attribute SET programattribute=false WHERE programattribute IS NULL" ); executeSql( "UPDATE attribute SET programstageattribute=false WHERE programstageattribute IS NULL" ); executeSql( "UPDATE attribute SET trackedentityattribute=false WHERE trackedentityattribute IS NULL" ); executeSql( "UPDATE attribute SET trackedentityattributeattribute=false WHERE trackedentityattributeattribute IS NULL" ); executeSql( "UPDATE attribute SET categoryoptionattribute=false WHERE categoryoptionattribute IS NULL" ); executeSql( "UPDATE attribute SET categoryoptiongroupattribute=false WHERE categoryoptiongroupattribute IS NULL" ); executeSql( "UPDATE attribute SET documentattribute=false WHERE documentattribute IS NULL" ); executeSql( "UPDATE attribute SET optionattribute=false WHERE optionattribute IS NULL" ); executeSql( "UPDATE attribute SET optionsetattribute=false WHERE optionsetattribute IS NULL" ); executeSql( "UPDATE attribute SET constantattribute=false WHERE constantattribute IS NULL" ); executeSql( "UPDATE attribute SET legendsetattribute=false WHERE legendsetattribute IS NULL" ); executeSql( "UPDATE attribute SET programindicatorattribute=false WHERE programindicatorattribute IS NULL" ); executeSql( "UPDATE attribute SET sqlViewAttribute=false WHERE sqlViewAttribute IS NULL" ); executeSql( "UPDATE attribute SET sectionAttribute=false WHERE sectionAttribute IS NULL" ); executeSql( "UPDATE attribute SET categoryoptioncomboattribute=false WHERE categoryoptioncomboattribute IS NULL" ); executeSql( "update attribute set isunique=false where isunique is null" ); executeSql( "ALTER TABLE trackedentityattributedimension DROP COLUMN operator" ); executeSql( "ALTER TABLE trackedentitydataelementdimension DROP COLUMN operator" ); // update attribute.code, set to null if code='' executeSql( "UPDATE attribute SET code=NULL WHERE code=''" ); //update programruleaction: executeSql( "ALTER TABLE programruleaction DROP COLUMN name" ); //update programrule executeSql( "UPDATE programrule SET rulecondition = condition WHERE rulecondition IS NULL" ); executeSql( "ALTER TABLE programrule DROP COLUMN condition" ); // data approval executeSql( "UPDATE dataapproval SET accepted=false WHERE accepted IS NULL" ); executeSql( "ALTER TABLE dataapproval ALTER COLUMN accepted SET NOT NULL" ); executeSql( "DELETE FROM dataapproval WHERE categoryoptiongroupid IS NOT NULL" ); executeSql( "ALTER TABLE dataapproval DROP COLUMN categoryoptiongroupid" ); executeSql( "UPDATE dataapproval SET attributeoptioncomboid=categoryoptioncomboid WHERE categoryoptioncomboid IS NOT NULL" ); executeSql( "ALTER TABLE dataapproval DROP COLUMN categoryoptioncomboid" ); executeSql( "UPDATE dataapproval SET attributeoptioncomboid=" + defaultCategoryComboId + " WHERE attributeoptioncomboid IS NULL" ); executeSql( "ALTER TABLE dataapproval ALTER COLUMN attributeoptioncomboid SET NOT NULL" ); // validation rule group, new column alertbyorgunits executeSql( "UPDATE validationrulegroup SET alertbyorgunits=false WHERE alertbyorgunits IS NULL" ); executeSql( "update expression set missingvaluestrategy = 'SKIP_IF_ANY_VALUE_MISSING' where missingvaluestrategy is null and (nullifblank is true or nullifblank is null)" ); executeSql( "update expression set missingvaluestrategy = 'NEVER_SKIP' where missingvaluestrategy is null nullifblank is false" ); executeSql( "alter table expression alter column missingvaluestrategy set not null" ); executeSql( "alter table expression drop column nullifblank" ); executeSql( "drop table expressiondataelement" ); executeSql( "drop table expressionsampleelement" ); executeSql( "alter table dataelementcategoryoption alter column startdate type date" ); executeSql( "alter table dataelementcategoryoption alter column enddate type date" ); executeSql( "alter table dataelement drop column sortorder" ); executeSql( "alter table indicator drop column sortorder" ); executeSql( "alter table dataset drop column sortorder" ); executeSql( "alter table dataelement drop column active" ); executeSql( "alter table datavalue alter column value type varchar(50000)" ); executeSql( "alter table datavalue alter column comment type varchar(50000)" ); executeSql( "alter table datavalueaudit alter column value type varchar(50000)" ); executeSql( "alter table trackedentitydatavalue alter column value type varchar(50000)" ); executeSql( "alter table trackedentityattributevalue alter column value type varchar(50000)" ); executeSql( "update trackedentitydatavalue set providedelsewhere=false where providedelsewhere is null" ); executeSql( "update datavalueaudit set attributeoptioncomboid = " + defaultOptionComboId + " where attributeoptioncomboid is null" ); executeSql( "alter table datavalueaudit alter column attributeoptioncomboid set not null;" ); executeSql( "update dataelementcategoryoption set shortname = substring(name,0,50) where shortname is null" ); // AttributeValue executeSql( "UPDATE attributevalue SET created=now() WHERE created IS NULL" ); executeSql( "UPDATE attributevalue SET lastupdated=now() WHERE lastupdated IS NULL" ); executeSql( "ALTER TABLE attributevalue ALTER value TYPE text" ); executeSql( "DELETE FROM attributevalue where value IS NULL or value=''" ); executeSql( "update dashboarditem set shape = 'normal' where shape is null" ); executeSql( "update categoryoptioncombo set ignoreapproval = false where ignoreapproval is null" ); executeSql( "alter table version alter column versionkey set not null" ); executeSql( "alter table version add constraint version_versionkey_key unique(versionkey)" ); // Cacheable executeSql( "UPDATE report set cachestrategy='RESPECT_SYSTEM_SETTING' where cachestrategy is null" ); executeSql( "UPDATE sqlview set cachestrategy='RESPECT_SYSTEM_SETTING' where cachestrategy is null" ); executeSql( "update categorycombo set datadimensiontype = 'DISAGGREGATION' where dimensiontype = 'disaggregation'" ); executeSql( "update categorycombo set datadimensiontype = 'ATTRIBUTE' where dimensiontype = 'attribute'" ); executeSql( "update categorycombo set datadimensiontype = 'DISAGGREGATION' where datadimensiontype is null" ); executeSql( "alter table categorycombo drop column dimensiontype" ); executeSql( "update dataelementcategory set datadimensiontype = 'DISAGGREGATION' where dimensiontype = 'disaggregation'" ); executeSql( "update dataelementcategory set datadimensiontype = 'ATTRIBUTE' where dimensiontype = 'attribute'" ); executeSql( "update dataelementcategory set datadimensiontype = 'DISAGGREGATION' where datadimensiontype is null" ); executeSql( "alter table dataelementcategory drop column dimensiontype" ); executeSql( "update categoryoptiongroupset set datadimensiontype = 'ATTRIBUTE' where datadimensiontype is null" ); executeSql( "update categoryoptiongroup set datadimensiontype = 'ATTRIBUTE' where datadimensiontype is null" ); executeSql( "update reporttable set completedonly = false where completedonly is null" ); executeSql( "update chart set completedonly = false where completedonly is null" ); executeSql( "update eventreport set completedonly = false where completedonly is null" ); executeSql( "update eventchart set completedonly = false where completedonly is null" ); executeSql( "update program set enrollmentdatelabel = dateofenrollmentdescription where enrollmentdatelabel is null" ); executeSql( "update program set incidentdatelabel = dateofincidentdescription where incidentdatelabel is null" ); executeSql( "update programinstance set incidentdate = dateofincident where incidentdate is null" ); executeSql( "alter table programinstance alter column incidentdate drop not null" ); executeSql( "alter table program drop column dateofenrollmentdescription" ); executeSql( "alter table program drop column dateofincidentdescription" ); executeSql( "alter table programinstance drop column dateofincident" ); executeSql( "update programstage set reportdatetouse = 'indicentDate' where reportdatetouse='dateOfIncident'" ); executeSql( "update programstage set repeatable = irregular where repeatable is null" ); executeSql( "update programstage set repeatable = false where repeatable is null" ); executeSql( "alter table programstage drop column reportdatedescription" ); executeSql( "alter table programstage drop column irregular" ); executeSql( "update smscodes set compulsory = false where compulsory is null" ); executeSql( "alter table programmessage drop column storecopy" ); executeSql( "alter table programindicator drop column missingvaluereplacement" ); executeSql( "update keyjsonvalue set namespacekey = key where namespacekey is null" ); executeSql( "alter table keyjsonvalue alter column namespacekey set not null" ); executeSql( "alter table keyjsonvalue drop column key" ); executeSql( "alter table trackedentityattributevalue drop column encrypted_value" ); executeSql( "alter table predictor drop column predictororglevels" ); // Remove data mart executeSql( "drop table aggregateddatasetcompleteness" ); executeSql( "drop table aggregateddatasetcompleteness_temp" ); executeSql( "drop table aggregateddatavalue" ); executeSql( "drop table aggregateddatavalue_temp" ); executeSql( "drop table aggregatedindicatorvalue" ); executeSql( "drop table aggregatedindicatorvalue_temp" ); executeSql( "drop table aggregatedorgunitdatasetcompleteness" ); executeSql( "drop table aggregatedorgunitdatasetcompleteness_temp" ); executeSql( "drop table aggregatedorgunitdatavalue" ); executeSql( "drop table aggregatedorgunitdatavalue_temp" ); executeSql( "drop table aggregatedorgunitindicatorvalue" ); executeSql( "drop table aggregatedorgunitindicatorvalue_temp" ); executeSql( "alter table trackedentitydatavalue alter column storedby TYPE character varying(255)" ); executeSql( "alter table datavalue alter column storedby TYPE character varying(255)" ); executeSql( "alter table datastatisticsevent alter column eventtype type character varying" ); executeSql( "alter table orgunitlevel drop constraint orgunitlevel_name_key" ); executeSql( "update interpretation set likes = 0 where likes is null" ); executeSql( "update chart set regressiontype = 'NONE' where regression is false or regression is null" ); executeSql( "update chart set regressiontype = 'LINEAR' where regression is true" ); executeSql( "alter table chart alter column regressiontype set not null" ); executeSql( "alter table chart drop column regression" ); executeSql( "update eventchart set regressiontype = 'NONE' where regression is false or regression is null" ); executeSql( "update eventchart set regressiontype = 'LINEAR' where regression is true" ); executeSql( "alter table eventchart alter column regressiontype set not null" ); executeSql( "alter table eventchart drop column regression" ); executeSql( "alter table validationrule drop column ruletype" ); executeSql( "alter table validationrule drop column skiptestexpressionid" ); executeSql( "alter table validationrule drop column organisationunitlevel" ); executeSql( "alter table validationrule drop column sequentialsamplecount" ); executeSql( "alter table validationrule drop column annualsamplecount" ); executeSql( "alter table validationrule drop column sequentialskipcount" ); // remove TrackedEntityAttributeGroup executeSql( "alter table trackedentityattribute drop column trackedentityattributegroupid" ); executeSql( "ALTER TABLE trackedentityattribute DROP CONSTRAINT fk_trackedentityattribute_attributegroupid" ); // remove id object parts from embedded objects upgradeEmbeddedObject( "datainputperiod" ); upgradeEmbeddedObject( "datasetelement" ); updateEnums(); upgradeDataValueSoftDelete(); initOauth2(); upgradeDataValuesWithAttributeOptionCombo(); upgradeCompleteDataSetRegistrationsWithAttributeOptionCombo(); upgradeMapViewsToAnalyticalObject(); upgradeTranslations(); upgradeToDataApprovalWorkflows(); executeSql( "alter table dataapproval alter column workflowid set not null" ); executeSql( "alter table dataapproval add constraint dataapproval_unique_key unique (dataapprovallevelid,workflowid,periodid,organisationunitid,attributeoptioncomboid)" ); upgradeImplicitAverageMonitoringRules(); updateOptions(); upgradeAggregationType( "reporttable" ); upgradeAggregationType( "chart" ); updateRelativePeriods(); updateNameColumnLengths(); upgradeMapViewsToColumns(); upgradeDataDimensionsToEmbeddedOperand(); upgradeDataDimensionItemsToReportingRateMetric(); upgradeDataDimensionItemToEmbeddedProgramAttribute(); upgradeDataDimensionItemToEmbeddedProgramDataElement(); updateObjectTranslation(); upgradeDataSetElements(); removeOutdatedTranslationProperties(); updateLegendRelationship(); updateHideEmptyRows(); executeSql( "update programindicator set analyticstype = 'EVENT' where analyticstype is null" ); executeSql( "alter table programindicator alter column analyticstype set not null" ); //TODO: remove - not needed in release 2.26. executeSql( "update programindicator set analyticstype = programindicatoranalyticstype" ); executeSql( "alter table programindicator drop programindicatoranalyticstype" ); // Scheduler fixes for 2.29 executeSql( "delete from systemsetting where name='keyScheduledTasks'" ); executeSql( "delete from systemsetting where name='keyDataMartTask'" ); executeSql( "delete from systemsetting where name='dataSyncCron'" ); executeSql( "delete from systemsetting where name='metaDataSyncCron'" ); updateDimensionFilterToText(); insertDefaultBoundariesForBoundlessProgramIndicators(); executeSql("alter table jobconfiguration drop column configurable;"); log.info( "Tables updated" ); } private void upgradeEmbeddedObject( String table ) { executeSql( "ALTER TABLE " + table + " DROP COLUMN uid" ); executeSql( "ALTER TABLE " + table + " DROP COLUMN created" ); executeSql( "ALTER TABLE " + table + " DROP COLUMN lastupdated" ); executeSql( "ALTER TABLE " + table + " DROP COLUMN code" ); } private void removeOutdatedTranslationProperties() { executeSql( "delete from indicatortranslations where objecttranslationid in (select objecttranslationid from objecttranslation where property in ('numeratorDescription', 'denominatorDescription'))" ); executeSql( "delete from objecttranslation where property in ('numeratorDescription', 'denominatorDescription')" ); } private void upgradeDataValueSoftDelete() { executeSql( "update datavalue set deleted = false where deleted is null" ); executeSql( "alter table datavalue alter column deleted set not null" ); executeSql( "create index in_datavalue_deleted on datavalue(deleted)" ); } private void initOauth2() { // OAuth2 executeSql( "CREATE TABLE oauth_code (" + " code VARCHAR(256), authentication " + statementBuilder.getLongVarBinaryType() + ")" ); executeSql( "CREATE TABLE oauth_access_token (" + " token_id VARCHAR(256)," + " token " + statementBuilder.getLongVarBinaryType() + "," + " authentication_id VARCHAR(256) PRIMARY KEY," + " user_name VARCHAR(256)," + " client_id VARCHAR(256)," + " authentication " + statementBuilder.getLongVarBinaryType() + "," + " refresh_token VARCHAR(256)" + ")" ); executeSql( "CREATE TABLE oauth_refresh_token (" + " token_id VARCHAR(256)," + " token " + statementBuilder.getLongVarBinaryType() + "," + " authentication " + statementBuilder.getLongVarBinaryType() + ")" ); } private void updateEnums() { executeSql( "update report set type='JASPER_REPORT_TABLE' where type='jasperReportTable'" ); executeSql( "update report set type='JASPER_JDBC' where type='jasperJdbc'" ); executeSql( "update report set type='HTML' where type='html'" ); executeSql( "update dashboarditem set shape='NORMAL' where shape ='normal'" ); executeSql( "update dashboarditem set shape='DOUBLE_WIDTH' where shape ='double_width'" ); executeSql( "update dashboarditem set shape='FULL_WIDTH' where shape ='full_width'" ); executeSql( "update reporttable set displaydensity='COMFORTABLE' where displaydensity='comfortable'" ); executeSql( "update reporttable set displaydensity='NORMAL' where displaydensity='normal'" ); executeSql( "update reporttable set displaydensity='COMPACT' where displaydensity='compact'" ); executeSql( "update eventreport set displaydensity='COMFORTABLE' where displaydensity='comfortable'" ); executeSql( "update eventreport set displaydensity='NORMAL' where displaydensity='normal'" ); executeSql( "update eventreport set displaydensity='COMPACT' where displaydensity='compact'" ); executeSql( "update reporttable set fontsize='LARGE' where fontsize='large'" ); executeSql( "update reporttable set fontsize='NORMAL' where fontsize='normal'" ); executeSql( "update reporttable set fontsize='SMALL' where fontsize='small'" ); executeSql( "update eventreport set fontsize='LARGE' where fontsize='large'" ); executeSql( "update eventreport set fontsize='NORMAL' where fontsize='normal'" ); executeSql( "update eventreport set fontsize='SMALL' where fontsize='small'" ); executeSql( "update reporttable set digitgroupseparator='NONE' where digitgroupseparator='none'" ); executeSql( "update reporttable set digitgroupseparator='SPACE' where digitgroupseparator='space'" ); executeSql( "update reporttable set digitgroupseparator='COMMA' where digitgroupseparator='comma'" ); executeSql( "update eventreport set digitgroupseparator='NONE' where digitgroupseparator='none'" ); executeSql( "update eventreport set digitgroupseparator='SPACE' where digitgroupseparator='space'" ); executeSql( "update eventreport set digitgroupseparator='COMMA' where digitgroupseparator='comma'" ); executeSql( "update eventreport set datatype='AGGREGATED_VALUES' where datatype='aggregated_values'" ); executeSql( "update eventreport set datatype='EVENTS' where datatype='individual_cases'" ); executeSql( "update chart set type='COLUMN' where type='column'" ); executeSql( "update chart set type='STACKED_COLUMN' where type='stackedcolumn'" ); executeSql( "update chart set type='STACKED_COLUMN' where type='stackedColumn'" ); executeSql( "update chart set type='BAR' where type='bar'" ); executeSql( "update chart set type='STACKED_BAR' where type='stackedbar'" ); executeSql( "update chart set type='STACKED_BAR' where type='stackedBar'" ); executeSql( "update chart set type='LINE' where type='line'" ); executeSql( "update chart set type='AREA' where type='area'" ); executeSql( "update chart set type='PIE' where type='pie'" ); executeSql( "update chart set type='RADAR' where type='radar'" ); executeSql( "update chart set type='GAUGE' where type='gauge'" ); executeSql( "update eventchart set type='COLUMN' where type='column'" ); executeSql( "update eventchart set type='STACKED_COLUMN' where type='stackedcolumn'" ); executeSql( "update eventchart set type='STACKED_COLUMN' where type='stackedColumn'" ); executeSql( "update eventchart set type='BAR' where type='bar'" ); executeSql( "update eventchart set type='STACKED_BAR' where type='stackedbar'" ); executeSql( "update eventchart set type='STACKED_BAR' where type='stackedBar'" ); executeSql( "update eventchart set type='LINE' where type='line'" ); executeSql( "update eventchart set type='AREA' where type='area'" ); executeSql( "update eventchart set type='PIE' where type='pie'" ); executeSql( "update eventchart set type='RADAR' where type='radar'" ); executeSql( "update eventchart set type='GAUGE' where type='gauge'" ); executeSql( "update dataentryform set style='COMFORTABLE' where style='comfortable'" ); executeSql( "update dataentryform set style='NORMAL' where style='regular'" ); executeSql( "update dataentryform set style='COMPACT' where style='compact'" ); executeSql( "update dataentryform set style='NONE' where style='none'" ); } private void upgradeDataSetElements() { String autoIncr = statementBuilder.getAutoIncrementValue(); String uid = statementBuilder.getUid(); String insertSql = "insert into datasetelement(datasetelementid,uid,datasetid,dataelementid,created,lastupdated) " + "select " + autoIncr + " as datasetelementid, " + uid + " as uid, " + "dsm.datasetid as datasetid, " + "dsm.dataelementid as dataelementid, " + "now() as created, " + "now() as lastupdated " + "from datasetmembers dsm; " + "drop table datasetmembers; "; executeSql( insertSql ); executeSql( "alter table datasetelement alter column uid set not null" ); executeSql( "alter table datasetelement alter column created set not null" ); executeSql( "alter table datasetelement alter column lastupdated set not null" ); executeSql( "alter table datasetelement alter column datasetid drop not null" ); } private void upgradeAggregationType( String table ) { executeSql( "update " + table + " set aggregationtype='SUM' where aggregationtype='sum'" ); executeSql( "update " + table + " set aggregationtype='COUNT' where aggregationtype='count'" ); executeSql( "update " + table + " set aggregationtype='STDDEV' where aggregationtype='stddev'" ); executeSql( "update " + table + " set aggregationtype='VARIANCE' where aggregationtype='variance'" ); executeSql( "update " + table + " set aggregationtype='MIN' where aggregationtype='min'" ); executeSql( "update " + table + " set aggregationtype='MAX' where aggregationtype='max'" ); executeSql( "update " + table + " set aggregationtype='DEFAULT' where aggregationtype='default' or aggregationtype is null" ); } private void updateRelativePeriods() { executeSql( "update relativeperiods set thismonth=reportingmonth" ); executeSql( "update relativeperiods set thisbimonth=reportingbimonth" ); executeSql( "update relativeperiods set thisquarter=reportingquarter" ); executeSql( "update relativeperiods set lastweek = false where lastweek is null" ); executeSql( "update relativeperiods set weeksthisyear = false where weeksthisyear is null" ); executeSql( "update relativeperiods set bimonthsthisyear = false where bimonthsthisyear is null" ); executeSql( "update relativeperiods set last4weeks = false where last4weeks is null" ); executeSql( "update relativeperiods set last12weeks = false where last12weeks is null" ); executeSql( "update relativeperiods set last6months = false where last6months is null" ); executeSql( "update relativeperiods set thismonth = false where thismonth is null" ); executeSql( "update relativeperiods set thisbimonth = false where thisbimonth is null" ); executeSql( "update relativeperiods set thisquarter = false where thisquarter is null" ); executeSql( "update relativeperiods set thissixmonth = false where thissixmonth is null" ); executeSql( "update relativeperiods set thisweek = false where thisweek is null" ); executeSql( "update relativeperiods set lastmonth = false where lastmonth is null" ); executeSql( "update relativeperiods set lastbimonth = false where lastbimonth is null" ); executeSql( "update relativeperiods set lastquarter = false where lastquarter is null" ); executeSql( "update relativeperiods set lastsixmonth = false where lastsixmonth is null" ); executeSql( "update relativeperiods set lastweek = false where lastweek is null" ); executeSql( "update relativeperiods set thisday = false where thisday is null" ); executeSql( "update relativeperiods set yesterday = false where yesterday is null" ); executeSql( "update relativeperiods set last3days = false where last3days is null" ); executeSql( "update relativeperiods set last7days = false where last7days is null" ); executeSql( "update relativeperiods set last14days = false where last14days is null" ); // Set non-null constraint on fields executeSql( "alter table relativeperiods alter column thisday set not null" ); executeSql( "alter table relativeperiods alter column yesterday set not null" ); executeSql( "alter table relativeperiods alter column last3Days set not null" ); executeSql( "alter table relativeperiods alter column last7Days set not null" ); executeSql( "alter table relativeperiods alter column last14Days set not null" ); executeSql( "alter table relativeperiods alter column thisMonth set not null" ); executeSql( "alter table relativeperiods alter column lastMonth set not null" ); executeSql( "alter table relativeperiods alter column thisBimonth set not null" ); executeSql( "alter table relativeperiods alter column lastBimonth set not null" ); executeSql( "alter table relativeperiods alter column thisQuarter set not null" ); executeSql( "alter table relativeperiods alter column lastQuarter set not null" ); executeSql( "alter table relativeperiods alter column thisSixMonth set not null" ); executeSql( "alter table relativeperiods alter column lastSixMonth set not null" ); executeSql( "alter table relativeperiods alter column monthsThisYear set not null" ); executeSql( "alter table relativeperiods alter column quartersThisYear set not null" ); executeSql( "alter table relativeperiods alter column thisYear set not null" ); executeSql( "alter table relativeperiods alter column monthsLastYear set not null" ); executeSql( "alter table relativeperiods alter column quartersLastYear set not null" ); executeSql( "alter table relativeperiods alter column lastYear set not null" ); executeSql( "alter table relativeperiods alter column last5Years set not null" ); executeSql( "alter table relativeperiods alter column last12Months set not null" ); executeSql( "alter table relativeperiods alter column last6Months set not null" ); executeSql( "alter table relativeperiods alter column last3Months set not null" ); executeSql( "alter table relativeperiods alter column last6BiMonths set not null" ); executeSql( "alter table relativeperiods alter column last4Quarters set not null" ); executeSql( "alter table relativeperiods alter column last2SixMonths set not null" ); executeSql( "alter table relativeperiods alter column thisFinancialYear set not null" ); executeSql( "alter table relativeperiods alter column lastFinancialYear set not null" ); executeSql( "alter table relativeperiods alter column last5FinancialYears set not null" ); executeSql( "alter table relativeperiods alter column thisWeek set not null" ); executeSql( "alter table relativeperiods alter column lastWeek set not null" ); executeSql( "alter table relativeperiods alter column last4Weeks set not null" ); executeSql( "alter table relativeperiods alter column last12Weeks set not null" ); executeSql( "alter table relativeperiods alter column last52Weeks set not null" ); } private void updateNameColumnLengths() { List<String> tables = Lists.newArrayList( "user", "usergroup", "organisationunit", "orgunitgroup", "orgunitgroupset", "section", "dataset", "sqlview", "dataelement", "dataelementgroup", "dataelementgroupset", "categorycombo", "dataelementcategory", "dataelementcategoryoption", "indicator", "indicatorgroup", "indicatorgroupset", "indicatortype", "validationrule", "validationrulegroup", "constant", "attribute", "attributegroup", "program", "programstage", "programindicator", "trackedentitytype", "trackedentityattribute" ); for ( String table : tables ) { executeSql( "alter table " + table + " alter column name type character varying(230)" ); } } private void upgradeDataValuesWithAttributeOptionCombo() { final String sql = statementBuilder.getNumberOfColumnsInPrimaryKey( "datavalue" ); Integer no = statementManager.getHolder().queryForInteger( sql ); if ( no >= 5 ) { return; // attributeoptioncomboid already part of pkey } int optionComboId = getDefaultOptionCombo(); executeSql( "alter table datavalue drop constraint datavalue_pkey;" ); executeSql( "alter table datavalue add column attributeoptioncomboid integer;" ); executeSql( "update datavalue set attributeoptioncomboid = " + optionComboId + " where attributeoptioncomboid is null;" ); executeSql( "alter table datavalue alter column attributeoptioncomboid set not null;" ); executeSql( "alter table datavalue add constraint fk_datavalue_attributeoptioncomboid foreign key (attributeoptioncomboid) references categoryoptioncombo (categoryoptioncomboid) match simple;" ); executeSql( "alter table datavalue add constraint datavalue_pkey primary key(dataelementid, periodid, sourceid, categoryoptioncomboid, attributeoptioncomboid);" ); log.info( "Data value table upgraded with attributeoptioncomboid column" ); } private void upgradeCompleteDataSetRegistrationsWithAttributeOptionCombo() { final String sql = statementBuilder.getNumberOfColumnsInPrimaryKey( "completedatasetregistration" ); Integer no = statementManager.getHolder().queryForInteger( sql ); if ( no >= 4 ) { return; // attributeoptioncomboid already part of pkey } int optionComboId = getDefaultOptionCombo(); executeSql( "alter table completedatasetregistration drop constraint completedatasetregistration_pkey" ); executeSql( "alter table completedatasetregistration add column attributeoptioncomboid integer;" ); executeSql( "update completedatasetregistration set attributeoptioncomboid = " + optionComboId + " where attributeoptioncomboid is null;" ); executeSql( "alter table completedatasetregistration alter column attributeoptioncomboid set not null;" ); executeSql( "alter table completedatasetregistration add constraint fk_completedatasetregistration_attributeoptioncomboid foreign key (attributeoptioncomboid) references categoryoptioncombo (categoryoptioncomboid) match simple;" ); executeSql( "alter table completedatasetregistration add constraint completedatasetregistration_pkey primary key(datasetid, periodid, sourceid, attributeoptioncomboid);" ); log.info( "Complete data set registration table upgraded with attributeoptioncomboid column" ); } private void upgradeMapViewsToAnalyticalObject() { executeSql( "insert into mapview_dataelements ( mapviewid, sort_order, dataelementid ) select mapviewid, 0, dataelementid from mapview where dataelementid is not null" ); executeSql( "alter table mapview drop column dataelementid" ); executeSql( "insert into mapview_dataelementoperands ( mapviewid, sort_order, dataelementoperandid ) select mapviewid, 0, dataelementoperandid from mapview where dataelementoperandid is not null" ); executeSql( "alter table mapview drop column dataelementoperandid" ); executeSql( "insert into mapview_indicators ( mapviewid, sort_order, indicatorid ) select mapviewid, 0, indicatorid from mapview where indicatorid is not null" ); executeSql( "alter table mapview drop column indicatorid" ); executeSql( "insert into mapview_organisationunits ( mapviewid, sort_order, organisationunitid ) select mapviewid, 0, parentorganisationunitid from mapview where parentorganisationunitid is not null" ); executeSql( "alter table mapview drop column parentorganisationunitid" ); executeSql( "insert into mapview_periods ( mapviewid, sort_order, periodid ) select mapviewid, 0, periodid from mapview where periodid is not null" ); executeSql( "alter table mapview drop column periodid" ); executeSql( "insert into mapview_orgunitlevels ( mapviewid, sort_order, orgunitlevel ) select m.mapviewid, 0, o.level " + "from mapview m join orgunitlevel o on (m.organisationunitlevelid=o.orgunitlevelid) where m.organisationunitlevelid is not null" ); executeSql( "alter table mapview drop column organisationunitlevelid" ); executeSql( "alter table mapview drop column dataelementgroupid" ); executeSql( "alter table mapview drop column indicatorgroupid" ); executeSql( "update mapview set userorganisationunit = false where userorganisationunit is null" ); executeSql( "update mapview set userorganisationunitchildren = false where userorganisationunitchildren is null" ); executeSql( "update mapview set userorganisationunitgrandchildren = false where userorganisationunitgrandchildren is null" ); } private void upgradeTranslations() { final String sql = statementBuilder.getNumberOfColumnsInPrimaryKey( "translation" ); Integer no = statementManager.getHolder().queryForInteger( sql ); if ( no == 1 ) { return; // translationid already set as single pkey } executeSql( statementBuilder.getDropPrimaryKey( "translation" ) ); executeSql( statementBuilder.getAddPrimaryKeyToExistingTable( "translation", "translationid" ) ); executeSql( statementBuilder.getDropNotNullConstraint( "translation", "objectid", "integer" ) ); } /** * Convert from older releases where dataApproval referenced dataset * instead of workflow: * <p> * For every dataset that has either ("approve data" == true) *or* * (existing data approval database records referencing it), a workflow will * be created with the same name as the data set. This workflow will be * associated with all approval levels in the system and have a period type * equal to the data set's period type. If the data set's approvedata == * true, then the data set will be associated with this workflow. * If there are existing data approval records that reference this data set, * then they will be changed to reference the associated workflow instead. */ private void upgradeToDataApprovalWorkflows() { if ( executeSql( "update dataset set approvedata = approvedata where datasetid < 0" ) < 0 ) { return; // Already converted because dataset.approvedata no longer exists. } executeSql( "insert into dataapprovalworkflow ( workflowid, uid, created, lastupdated, name, periodtypeid, userid, publicaccess ) " + "select " + statementBuilder.getAutoIncrementValue() + ", " + statementBuilder.getUid() + ", now(), now(), ds.name, ds.periodtypeid, ds.userid, ds.publicaccess " + "from (select datasetid from dataset where approvedata = true union select distinct datasetid from dataapproval) as a " + "join dataset ds on ds.datasetid = a.datasetid" ); executeSql( "insert into dataapprovalworkflowlevels (workflowid, dataapprovallevelid) " + "select w.workflowid, l.dataapprovallevelid from dataapprovalworkflow w " + "cross join dataapprovallevel l" ); executeSql( "update dataset set workflowid = ( select w.workflowid from dataapprovalworkflow w where w.name = dataset.name)" ); executeSql( "alter table dataset drop column approvedata cascade" ); // Cascade to SQL Views, if any. executeSql( "update dataapproval set workflowid = ( select ds.workflowid from dataset ds where ds.datasetid = dataapproval.datasetid)" ); executeSql( "alter table dataapproval drop constraint dataapproval_unique_key" ); executeSql( "alter table dataapproval drop column datasetid cascade" ); // Cascade to SQL Views, if any. log.info( "Added any workflows needed for approvble datasets and/or approved data." ); } /** * Convert from pre-2.22 releases where the right hand sides of surveillance rules were * implicitly averaged. This just wraps the previous expression in a call to AVG(). * <p> * We use the presence of the lowoutliers column to determine whether we need to make the * change. Just to be extra sure, our rewrite SQL won't rewrite rules which already have * references to AVG or STDDEV. */ private void upgradeImplicitAverageMonitoringRules() { if ( executeSql( "update validationrule set lowoutliers = lowoutliers where validationruleid < 0" ) < 0 ) { return; // Already converted because lowoutlier fields are gone } // Just to be extra sure, we don't modify any expressions which already contain a call to AVG or STDDEV executeSql( "INSERT INTO expressionsampleelement (expressionid, dataelementid) " + "SELECT ede.expressionid, ede.dataelementid " + "FROM expressiondataelement ede " + "JOIN expression e ON e.expressionid = ede.expressionid " + "JOIN validationrule v ON v.rightexpressionid = e.expressionid " + "WHERE v.ruletype='SURVEILLANCE' " + "AND e.expression NOT LIKE '%AVG%' and e.expression NOT LIKE '%STDDEV%';" ); executeSql( "update expression set expression=" + statementBuilder.concatenate( "'AVG('", "expression", "')'" ) + " from validationrule where ruletype='SURVEILLANCE' AND rightexpressionid=expressionid " + "AND expression NOT LIKE '%AVG%' and expression NOT LIKE '%STDDEV%';" ); executeSql( "ALTER TABLE validationrule DROP COLUMN highoutliers" ); executeSql( "ALTER TABLE validationrule DROP COLUMN lowoutliers" ); log.info( "Added explicit AVG calls to olid-style implicit average surveillance rules" ); } private List<Integer> getDistinctIdList( String table, String col1 ) { StatementHolder holder = statementManager.getHolder(); List<Integer> distinctIds = new ArrayList<>(); try { Statement statement = holder.getStatement(); ResultSet resultSet = statement.executeQuery( "SELECT DISTINCT " + col1 + " FROM " + table ); while ( resultSet.next() ) { distinctIds.add( resultSet.getInt( 1 ) ); } } catch ( Exception ex ) { log.error( ex ); } finally { holder.close(); } return distinctIds; } private Map<Integer, List<Integer>> getIdMap( String table, String col1, String col2, List<Integer> distinctIds ) { StatementHolder holder = statementManager.getHolder(); Map<Integer, List<Integer>> idMap = new HashMap<>(); try { Statement statement = holder.getStatement(); for ( Integer distinctId : distinctIds ) { List<Integer> foreignIds = new ArrayList<>(); ResultSet resultSet = statement.executeQuery( "SELECT " + col2 + " FROM " + table + " WHERE " + col1 + "=" + distinctId ); while ( resultSet.next() ) { foreignIds.add( resultSet.getInt( 1 ) ); } idMap.put( distinctId, foreignIds ); } } catch ( Exception ex ) { log.error( ex ); } finally { holder.close(); } return idMap; } private void updateHideEmptyRows() { executeSql( "update chart set hideemptyrowitems = 'NONE' where hideemptyrows is false or hideemptyrows is null; " + "update chart set hideemptyrowitems = 'ALL' where hideemptyrows is true; " + "alter table chart alter column hideemptyrowitems set not null; " + "alter table chart drop column hideemptyrows;" ); executeSql( "update eventchart set hideemptyrowitems = 'NONE' where hideemptyrows is false or hideemptyrows is null; " + "update eventchart set hideemptyrowitems = 'ALL' where hideemptyrows is true; " + "alter table eventchart alter column hideemptyrowitems set not null; " + "alter table eventchart drop column hideemptyrows;" ); } private void updateSortOrder( String table, String col1, String col2 ) { List<Integer> distinctIds = getDistinctIdList( table, col1 ); log.info( "Got distinct ids: " + distinctIds.size() ); Map<Integer, List<Integer>> idMap = getIdMap( table, col1, col2, distinctIds ); log.info( "Got id map: " + idMap.size() ); for ( Integer distinctId : idMap.keySet() ) { int sortOrder = 1; for ( Integer foreignId : idMap.get( distinctId ) ) { String sql = "UPDATE " + table + " SET sort_order=" + sortOrder++ + " WHERE " + col1 + "=" + distinctId + " AND " + col2 + "=" + foreignId; int count = executeSql( sql ); log.info( "Executed: " + count + " - " + sql ); } } } private Integer getDefaultOptionCombo() { String sql = "select coc.categoryoptioncomboid from categoryoptioncombo coc " + "inner join categorycombos_optioncombos cco on coc.categoryoptioncomboid=cco.categoryoptioncomboid " + "inner join categorycombo cc on cco.categorycomboid=cc.categorycomboid " + "where cc.name='default';"; return statementManager.getHolder().queryForInteger( sql ); } private Integer getDefaultCategoryCombo() { String sql = "select categorycomboid from categorycombo where name = 'default'"; return statementManager.getHolder().queryForInteger( sql ); } private void updateOptions() { String sql = "insert into optionvalue(optionvalueid, code, name, optionsetid, sort_order) " + "select " + statementBuilder.getAutoIncrementValue() + ", optionvalue, optionvalue, optionsetid, ( sort_order + 1 ) " + "from optionsetmembers"; int result = executeSql( sql ); if ( result != -1 ) { executeSql( "drop table optionsetmembers" ); } } /** * Upgrades existing map views to use mapview_columns for multiple column * dimensions. */ private void upgradeMapViewsToColumns() { String sql = "insert into mapview_columns(mapviewid, sort_order, dimension) " + "select mapviewid, 0, 'dx' " + "from mapview mv " + "where not exists (" + "select mc.mapviewid " + "from mapview_columns mc " + "where mv.mapviewid = mc.mapviewid)"; executeSql( sql ); } /** * Upgrades data dimension items to use embedded data element operands. */ private void upgradeDataDimensionsToEmbeddedOperand() { String sql = "update datadimensionitem di " + "set dataelementoperand_dataelementid = ( " + "select op.dataelementid " + "from dataelementoperand op " + "where di.dataelementoperandid=op.dataelementoperandid " + "), " + "dataelementoperand_categoryoptioncomboid = ( " + "select op.categoryoptioncomboid " + "from dataelementoperand op " + "where di.dataelementoperandid=op.dataelementoperandid " + ") " + "where di.dataelementoperandid is not null; " + "alter table datadimensionitem drop column dataelementoperandid;"; executeSql( sql ); } /** * Upgrade data dimension items for legacy data sets to use REPORTING_RATE * as metric. */ private void upgradeDataDimensionItemsToReportingRateMetric() { String sql = "update datadimensionitem " + "set metric='REPORTING_RATE' " + "where datasetid is not null " + "and metric is null;"; executeSql( sql ); } /** * Upgrades data dimension items to use embedded * ProgramTrackedEntityAttributeDimensionItem class. */ private void upgradeDataDimensionItemToEmbeddedProgramAttribute() { String sql = "update datadimensionitem di " + "set programattribute_programid = (select programid from program_attributes where programtrackedentityattributeid=di.programattributeid), " + "programattribute_attributeid = (select trackedentityattributeid from program_attributes where programtrackedentityattributeid=di.programattributeid) " + "where programattributeid is not null " + "and (programattribute_programid is null and programattribute_attributeid is null); " + "alter table datadimensionitem drop column programattributeid;"; executeSql( sql ); } /** * Upgrades data dimension items to use embedded * ProgramDataElementDimensionItem class. */ private void upgradeDataDimensionItemToEmbeddedProgramDataElement() { String sql = "update datadimensionitem di " + "set programdataelement_programid = (select programid from programdataelement where programdataelementid=di.programdataelementid), " + "programdataelement_dataelementid = (select dataelementid from programdataelement where programdataelementid=di.programdataelementid) " + "where di.programdataelementid is not null " + "and (programdataelement_programid is null and programdataelement_dataelementid is null); " + "alter table datadimensionitem drop column programdataelementid; " + "drop table programdataelementtranslations; " + "drop table programdataelement;"; // Remove if program data element is to be reintroduced executeSql( sql ); } /** * Creates an utility function in the database for generating uid values in select statements. * Example usage: select uid(); */ private void insertUidDbFunction() { String uidFunction = "CREATE OR REPLACE FUNCTION uid() RETURNS text AS $$ SELECT substring('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' " + "FROM (random()*51)::int +1 for 1) || array_to_string(ARRAY(SELECT substring('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' " + " FROM (random()*61)::int + 1 FOR 1) FROM generate_series(1,10)), '') $$ LANGUAGE sql;"; executeSql( uidFunction ); } /** * Inserts default {@link AnalyticsPeriodBoundary} objects for program indicators that has no boundaries defined. * Based on the analyticsType if the program indicator, the insert is made */ private void insertDefaultBoundariesForBoundlessProgramIndicators() { String findBoundlessAndInsertDefaultBoundaries = "create temporary table temp_unbounded_programindicators (programindicatorid integer,analyticstype varchar(10)) on commit drop;" + "insert into temp_unbounded_programindicators (programindicatorid,analyticstype ) select pi.programindicatorid,pi.analyticstype " + "from programindicator pi left join analyticsperiodboundary apb on apb.programindicatorid = pi.programindicatorid group by pi.programindicatorid " + "having count(apb.*) = 0;" + "insert into analyticsperiodboundary (analyticsperiodboundaryid, uid, created, lastupdated, boundarytarget,analyticsperiodboundarytype, programindicatorid) " + "select nextval('hibernate_sequence'), uid(), now(), now(), 'EVENT_DATE', 'AFTER_START_OF_REPORTING_PERIOD', ubpi.programindicatorid " + "from temp_unbounded_programindicators ubpi where ubpi.analyticstype = 'EVENT';" + "insert into analyticsperiodboundary (analyticsperiodboundaryid, uid, created, lastupdated, boundarytarget,analyticsperiodboundarytype, programindicatorid) " + "select nextval('hibernate_sequence'), uid(), now(), now(), 'EVENT_DATE', 'BEFORE_END_OF_REPORTING_PERIOD', ubpi.programindicatorid " + "from temp_unbounded_programindicators ubpi where ubpi.analyticstype = 'EVENT';" + "insert into analyticsperiodboundary (analyticsperiodboundaryid, uid, created, lastupdated, boundarytarget,analyticsperiodboundarytype, programindicatorid) " + "select nextval('hibernate_sequence'), uid(), now(), now(), 'ENROLLMENT_DATE', 'AFTER_START_OF_REPORTING_PERIOD', ubpi.programindicatorid " + "from temp_unbounded_programindicators ubpi where ubpi.analyticstype = 'ENROLLMENT';" + "insert into analyticsperiodboundary (analyticsperiodboundaryid, uid, created, lastupdated, boundarytarget,analyticsperiodboundarytype, programindicatorid) " + "select nextval('hibernate_sequence'), uid(), now(), now(), 'ENROLLMENT_DATE', 'BEFORE_END_OF_REPORTING_PERIOD', ubpi.programindicatorid " + "from temp_unbounded_programindicators ubpi where ubpi.analyticstype = 'ENROLLMENT';"; executeSql( findBoundlessAndInsertDefaultBoundaries ); } private int executeSql( String sql ) { try { // TODO use jdbcTemplate return statementManager.getHolder().executeUpdate( sql ); } catch ( Exception ex ) { log.debug( ex ); return -1; } } private void addTranslationTable( List<Map<String, String>> listTables, String className, String translationTable, String objectTable, String objectId ) { Map<String, String> mapTables = new HashMap<>(); mapTables.put( "className", className ); mapTables.put( "translationTable", translationTable ); mapTables.put( "objectTable", objectTable ); mapTables.put( "objectId", objectId ); listTables.add( mapTables ); } private void updateObjectTranslation() { List<Map<String, String>> listTables = new ArrayList<>(); addTranslationTable( listTables, "DataElement", "dataelementtranslations", "dataelement", "dataelementid" ); addTranslationTable( listTables, "DataElementCategory", "dataelementcategorytranslations", "dataelementcategory", "categoryid" ); addTranslationTable( listTables, "Attribute", "attributetranslations", "attribute", "attributeid" ); addTranslationTable( listTables, "Indicator", "indicatortranslations", "indicator", "indicatorid" ); addTranslationTable( listTables, "OrganisationUnit", "organisationUnittranslations", "organisationunit", "organisationunitid" ); addTranslationTable( listTables, "DataElementCategoryCombo", "categorycombotranslations", "categorycombo", "categorycomboid" ); addTranslationTable( listTables, "OrganisationUnit", "organisationUnittranslations", "organisationunit", "organisationunitid" ); addTranslationTable( listTables, "DataElementGroup", "dataelementgrouptranslations", "dataelementgroup", "dataelementgroupid" ); addTranslationTable( listTables, "DataSet", "datasettranslations", "dataset", "datasetid" ); addTranslationTable( listTables, "IndicatorType", "indicatortypetranslations", "indicatortype", "indicatortypeid" ); addTranslationTable( listTables, "Section", "datasetsectiontranslations", "section", "sectionid" ); addTranslationTable( listTables, "Chart", "charttranslations", "chart", "chartid" ); addTranslationTable( listTables, "Color", "colortranslations", "color", "colorid" ); addTranslationTable( listTables, "ColorSet", "colorsettranslations", "colorset", "colorsetid" ); addTranslationTable( listTables, "Constant", "constanttranslations", "constant", "constantid" ); addTranslationTable( listTables, "Dashboard", "dashboardtranslations", "dashboard", "dashboardid" ); addTranslationTable( listTables, "DashboardItem", "dashboarditemtranslations", "dashboarditemid", "dashboarditemid" ); addTranslationTable( listTables, "DataApprovalLevel", "dataapprovalleveltranslations", "dataapprovallevel", "dataapprovallevelid" ); addTranslationTable( listTables, "DataApprovalWorkflow", "dataapprovalworkflowtranslations", "dataapprovalworkflow", "workflowid" ); addTranslationTable( listTables, "CategoryOptionGroup", "categoryoptiongrouptranslations", "categoryoptiongroup", "categoryoptiongroupid" ); addTranslationTable( listTables, "CategoryOptionGroupSet", "categoryoptiongroupsettranslations", "categoryoptiongroupset", "categoryoptiongroupsetid" ); addTranslationTable( listTables, "DataElementCategoryOption", "categoryoptiontranslations", "dataelementcategoryoption", "categoryoptionid" ); addTranslationTable( listTables, "DataElementCategoryOptionCombo", "categoryoptioncombotranslations", "categoryoptioncombo", "categoryoptioncomboid" ); addTranslationTable( listTables, "DataElementGroupSet", "dataelementgroupsettranslations", "dataelementgroupset", "dataelementgroupsetid" ); addTranslationTable( listTables, "DataElementOperand", "dataelementoperandtranslations", "dataelementoperand", "dataelementoperandid" ); addTranslationTable( listTables, "DataEntryForm", "dataentryformtranslations", "dataentryform", "dataentryformid" ); addTranslationTable( listTables, "DataStatistics", "statisticstranslations", "datastatistics", "statisticsid" ); addTranslationTable( listTables, "Document", "documenttranslations", "document", "documentid" ); addTranslationTable( listTables, "EventChart", "eventcharttranslations", "eventchart", "eventchartid" ); addTranslationTable( listTables, "EventReport", "eventreporttranslations", "eventreport", "eventreportid" ); addTranslationTable( listTables, "IndicatorGroup", "indicatorgrouptranslations", "indicatorgroup", "indicatorgroupid" ); addTranslationTable( listTables, "IndicatorGroupSet", "indicatorgroupsettranslations", "indicatorgroupset", "indicatorgroupsetid" ); addTranslationTable( listTables, "Interpretation", "interpretationtranslations", "interpretation", "interpretationid" ); addTranslationTable( listTables, "InterpretationComment", "interpretationcommenttranslations", "interpretationcomment", "interpretationcommentid" ); addTranslationTable( listTables, "Legend", "maplegendtranslations", "maplegend", "maplegendid" ); addTranslationTable( listTables, "LegendSet", "maplegendsettranslations", "maplegendset", "maplegendsetid" ); addTranslationTable( listTables, "Map", "maptranslations", "map", "mapid" ); addTranslationTable( listTables, "MapLayer", "maplayertranslations", "maplayer", "maplayerid" ); addTranslationTable( listTables, "MapView", "mapviewtranslations", "mapview", "mapviewid" ); addTranslationTable( listTables, "Message", "messagetranslations", "message", "messageid" ); addTranslationTable( listTables, "MessageConversation", "messageconversationtranslations", "messageconversation", "messageconversationid" ); addTranslationTable( listTables, "Option", "optionvaluetranslations", "optionvalue", "optionvalueid" ); addTranslationTable( listTables, "OptionSet", "optionsettranslations", "optionset", "optionsetid" ); addTranslationTable( listTables, "OrganisationUnit", "organisationunittranslations", "organisationunit", "organisationunitid" ); addTranslationTable( listTables, "OrganisationUnitGroup", "orgunitgrouptranslations", "orgunitgroup", "orgunitgroupid" ); addTranslationTable( listTables, "OrganisationUnitGroupSet", "orgunitgroupsettranslations", "orgunitgroupset", "orgunitgroupsetid" ); addTranslationTable( listTables, "OrganisationUnitLevel", "orgunitleveltranslations", "orgunitlevel", "orgunitlevelid" ); addTranslationTable( listTables, "Period", "periodtranslations", "period", "periodid" ); addTranslationTable( listTables, "Program", "programtranslations", "program", "programid" ); addTranslationTable( listTables, "ProgramDataElement", "programdataelementtranslations", "programdataelement", "programdataelementid" ); addTranslationTable( listTables, "ProgramIndicator", "programindicatortranslations", "programindicator", "programindicatorid" ); addTranslationTable( listTables, "ProgramInstance", "programinstancetranslations", "programinstance", "programinstanceid" ); addTranslationTable( listTables, "ProgramMessage", "programmessagetranslations", "programmessage", "id" ); addTranslationTable( listTables, "ProgramStage", "programstagetranslations", "programstage", "programstageid" ); addTranslationTable( listTables, "ProgramStageDataElement", "programstagedataelementtranslations", "programstagedataelement", "programstagedataelementid" ); addTranslationTable( listTables, "ProgramStageInstance", "programstageinstancetranslations", "programstageinstance", "programstageinstanceid" ); addTranslationTable( listTables, "ProgramStageSection", "programstagesectiontranslations", "programstagesection", "programstagesectionid" ); addTranslationTable( listTables, "ProgramTrackedEntityAttribute", "programattributestranslations", "programtrackedentityattribute", "programtrackedentityattributeid" ); addTranslationTable( listTables, "ProgramRule", "programruletranslations", "programrule", "programruleid" ); addTranslationTable( listTables, "ProgramRuleAction", "programruleactiontranslations", "programruleaction", "programruleactionid" ); addTranslationTable( listTables, "ProgramRuleVariable", "programrulevariabletranslations", "programrulevariable", "programrulevariableid" ); addTranslationTable( listTables, "RelationshipType", "relationshiptypetranslations", "relationshiptype", "relationshiptypeid" ); addTranslationTable( listTables, "Report", "reporttranslations", "report", "reportid" ); addTranslationTable( listTables, "ReportTable", "reporttabletranslations", "reporttable", "reporttableid" ); addTranslationTable( listTables, "TrackedEntityType", "trackedentitytranslations", "trackedentitytype", "trackedentitytypeid" ); addTranslationTable( listTables, "TrackedEntityAttribute", "trackedentityattributetranslations", "trackedentityattribute", "trackedentityattributeid" ); addTranslationTable( listTables, "TrackedEntityInstance", "trackedentityinstancetranslations", "trackedentityinstance", "trackedentityinstanceid" ); addTranslationTable( listTables, "User", "userinfotranslations", "userinfo", "userinfoid" ); addTranslationTable( listTables, "UserAuthorityGroup", "userroletranslations", "userrole", "userroleid" ); addTranslationTable( listTables, "UserCredentials", "usertranslations", "users", "userid" ); addTranslationTable( listTables, "UserGroup", "usergrouptranslations", "usergroup", "usergroupid" ); addTranslationTable( listTables, "ValidationCriteria", "validationcriteriatranslations", "validationcriteria", "validationcriteriaid" ); addTranslationTable( listTables, "ValidationRule", "validationruletranslations", "validationrule", "validationruleid" ); addTranslationTable( listTables, "ValidationRuleGroup", "validationrulegrouptranslations", "validationrulegroup", "validationrulegroupid" ); executeSql( "alter table translation add column objectid integer;" ); String sql; for ( Map<String, String> table : listTables ) { sql = " insert into objecttranslation ( objecttranslationid, locale , property , value ) " + " select t.translationid, t.locale, " + " case when t.objectproperty = 'shortName' then 'SHORT_NAME' " + " when t.objectproperty = 'formName' then 'FORM_NAME' " + " when t.objectproperty = 'name' then 'NAME' " + " when t.objectproperty = 'description' then'DESCRIPTION'" + " else t.objectproperty " + " end ," + " t.value " + " from translation as t " + " where t.objectclass = '" + table.get( "className" ) + "'" + " and t.objectproperty is not null " + " and t.locale is not null " + " and t.value is not null " + " and not exists ( select 1 from objecttranslation where objecttranslationid = t.translationid ) " + " and ( " + " exists ( select 1 from " + table.get( "objectTable" ) + " where " + table.get( "objectId" ) + " = t.objectid ) " + " or exists ( select 1 from " + table.get( "objectTable" ) + " where uid = t.objectuid ) " + " ) ;"; executeSql( sql ); sql = " insert into " + table.get( "translationTable" ) + " ( " + table.get( "objectId" ) + ", objecttranslationid ) " + " select " + " case when t.objectid is not null then t.objectid " + " else ( select " + table.get( "objectId" ) + " from " + table.get( "objectTable" ) + " where uid = t.objectuid ) " + " end," + " o.objecttranslationid " + " from objecttranslation o inner join translation t on o.objecttranslationid = t.translationid and t.objectclass = '" + table.get( "className" ) + "'" + " and not exists ( select 1 from " + table.get( "translationTable" ) + " where objecttranslationid = o.objecttranslationid) ;"; executeSql( sql ); } } private void updateLegendRelationship() { String sql = "update maplegend l set maplegendsetid = (select legendsetid from maplegendsetmaplegend m where m.maplegendid = l.maplegendid);"; executeSql( sql ); sql = " drop table maplegendsetmaplegend"; executeSql( sql ); } private void updateDimensionFilterToText() { executeSql( "alter table trackedentityattributedimension alter column \"filter\" type text;" ); executeSql( "alter table trackedentitydataelementdimension alter column \"filter\" type text;" ); executeSql( "alter table trackedentityprogramindicatordimension alter column \"filter\" type text;" ); } }
package org.embulk.input.http; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.Lists; import org.apache.http.Header; import org.apache.http.NameValuePair; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.HttpClient; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.utils.URIBuilder; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicNameValuePair; import org.embulk.config.Config; import org.embulk.config.ConfigDefault; import org.embulk.config.ConfigDiff; import org.embulk.config.ConfigInject; import org.embulk.config.ConfigSource; import org.embulk.config.Task; import org.embulk.config.TaskReport; import org.embulk.config.TaskSource; import org.embulk.spi.BufferAllocator; import org.embulk.spi.Exec; import org.embulk.spi.FileInputPlugin; import org.embulk.spi.TransactionalFileInput; import org.embulk.spi.util.InputStreamFileInput; import org.embulk.spi.util.RetryExecutor; import org.slf4j.Logger; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import java.util.Map; public class HttpFileInputPlugin implements FileInputPlugin { private final Logger logger = Exec.getLogger(getClass()); public interface PluginTask extends Task { @Config("url") String getUrl(); @Config("charset") @ConfigDefault("\"utf-8\"") String getCharset(); @Config("method") @ConfigDefault("\"get\"") String getMethod(); @Config("user_agent") @ConfigDefault("\"Embulk::Input::HttpFileInputPlugin\"") String getUserAgent(); @Config("open_timeout") @ConfigDefault("2000") int getOpenTimeout(); @Config("read_timeout") @ConfigDefault("10000") int getReadTimeout(); @Config("max_retries") @ConfigDefault("5") int getMaxRetries(); @Config("retry_interval") @ConfigDefault("10000") int getRetryInterval(); @Config("request_interval") @ConfigDefault("0") int getRequestInterval(); void setRequestInterval(int requestInterval); @Config("interval_includes_response_time") @ConfigDefault("null") boolean getIntervalIncludesResponseTime(); @Config("params") @ConfigDefault("null") Optional<ParamsOption> getParams(); @Config("basic_auth") @ConfigDefault("null") Optional<BasicAuthOption> getBasicAuth(); @Config("pager") @ConfigDefault("null") Optional<PagerOption> getPager(); @Config("request_headers") @ConfigDefault("{}") Map<String, String> getRequestHeaders(); @ConfigInject BufferAllocator getBufferAllocator(); List<List<QueryOption.Query>> getQueries(); void setQueries(List<List<QueryOption.Query>> queries); HttpMethod getHttpMethod(); void setHttpMethod(HttpMethod httpMethod); } public enum HttpMethod { POST, GET } @Override public ConfigDiff transaction(ConfigSource config, FileInputPlugin.Control control) { PluginTask task = config.loadConfig(PluginTask.class); final int tasks; if (task.getParams().isPresent()) { List<List<QueryOption.Query>> queries = task.getParams().get().generateQueries(task.getPager()); task.setQueries(queries); tasks = queries.size(); } else if (task.getPager().isPresent()) { List<List<QueryOption.Query>> queries = task.getPager().get().expand(); task.setQueries(queries); tasks = queries.size(); } else { task.setQueries(Lists.<List<QueryOption.Query>>newArrayList()); task.setRequestInterval(0); tasks = 1; } task.setHttpMethod(HttpMethod.valueOf(task.getMethod().toUpperCase())); return resume(task.dump(), tasks, control); } @Override public ConfigDiff resume(TaskSource taskSource, int taskCount, FileInputPlugin.Control control) { control.run(taskSource, taskCount); return Exec.newConfigDiff(); } @Override public void cleanup(TaskSource taskSource, int taskCount, List<TaskReport> successTaskReports) { } @Override public TransactionalFileInput open(TaskSource taskSource, int taskIndex) { PluginTask task = taskSource.loadTask(PluginTask.class); HttpRequestBase request; try { request = makeRequest(task, taskIndex); } catch (URISyntaxException | UnsupportedEncodingException e) { throw Throwables.propagate(e); } HttpClientBuilder builder = HttpClientBuilder.create() .disableAutomaticRetries() .setDefaultRequestConfig(makeRequestConfig(task)) .setDefaultHeaders(makeHeaders(task)); if (task.getBasicAuth().isPresent()) { builder.setDefaultCredentialsProvider(makeCredentialsProvider(task.getBasicAuth().get(), request)); } HttpClient client = builder.build(); logger.info(String.format("%s \"%s\"", task.getMethod().toUpperCase(), request.getURI().toString())); RetryableHandler retryable = new RetryableHandler(client, request); long startTimeMills = System.currentTimeMillis(); try { RetryExecutor.retryExecutor(). withRetryLimit(task.getMaxRetries()). withInitialRetryWait(task.getRetryInterval()). withMaxRetryWait(30 * 60 * 1000). runInterruptible(retryable); InputStream stream = retryable.getResponse().getEntity().getContent(); PluginFileInput input = new PluginFileInput(task, stream, startTimeMills); stream = null; return input; } catch (Exception e) { throw Throwables.propagate(e); } } private CredentialsProvider makeCredentialsProvider(BasicAuthOption basicAuth, HttpRequestBase request) { final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); final AuthScope authScope = new AuthScope(request.getURI().getHost(), request.getURI().getPort()); credentialsProvider.setCredentials(authScope, new UsernamePasswordCredentials(basicAuth.getUser(), basicAuth.getPassword())); return credentialsProvider; } private HttpRequestBase makeRequest(PluginTask task, int taskIndex) throws URISyntaxException, UnsupportedEncodingException { final List<QueryOption.Query> queries = (task.getQueries().isEmpty()) ? null : task.getQueries().get(taskIndex); if (task.getHttpMethod() == HttpMethod.GET) { HttpGet request = new HttpGet(task.getUrl()); if (queries != null) { URIBuilder builder = new URIBuilder(request.getURI()); for (QueryOption.Query q : queries) { for (String v : q.getValues()) { builder.addParameter(q.getName(), v); } } request.setURI(builder.build()); } return request; } else if (task.getHttpMethod() == HttpMethod.POST) { HttpPost request = new HttpPost(task.getUrl()); if (queries != null) { List<NameValuePair> pairs = new ArrayList<>(); for (QueryOption.Query q : queries) { for (String v : q.getValues()) { pairs.add(new BasicNameValuePair(q.getName(), v)); } } request.setEntity(new UrlEncodedFormEntity(pairs)); } return request; } throw new IllegalArgumentException(String.format("Unsupported http method %s", task.getMethod())); } private List<Header> makeHeaders(PluginTask task) { List<Header> headers = new ArrayList<>(); headers.add(new BasicHeader("Accept", "*/*")); headers.add(new BasicHeader("Accept-Charset", task.getCharset())); headers.add(new BasicHeader("Accept-Encoding", "gzip, deflate")); headers.add(new BasicHeader("Accept-Language", "en-us,en;q=0.5")); headers.add(new BasicHeader("User-Agent", task.getUserAgent())); for (Map.Entry<String, String> entry : task.getRequestHeaders().entrySet()) { headers.add(new BasicHeader(entry.getKey(), entry.getValue())); } return headers; } private RequestConfig makeRequestConfig(PluginTask task) { return RequestConfig.custom() .setCircularRedirectsAllowed(true) .setMaxRedirects(10) .setRedirectsEnabled(true) .setConnectTimeout(task.getOpenTimeout()) .setSocketTimeout(task.getReadTimeout()) .build(); } public static class PluginFileInput extends InputStreamFileInput implements TransactionalFileInput { private final Logger logger = Exec.getLogger(getClass()); private final long startTimeMills; private final PluginTask task; public PluginFileInput(PluginTask task, InputStream stream, long startTimeMills) { super(task.getBufferAllocator(), new SingleFileProvider(stream)); this.startTimeMills = startTimeMills; this.task = task; } public TaskReport commit() { return Exec.newTaskReport(); } @Override public void close() { super.close(); handleInterval(); } @Override public void abort() { } protected void handleInterval() { if (task.getRequestInterval() <= 0) { return; } long interval = task.getRequestInterval(); if (task.getIntervalIncludesResponseTime()) { interval = interval - (System.currentTimeMillis() - startTimeMills); } if (interval > 0) { logger.info(String.format("waiting %d msec ...", interval)); try { Thread.sleep(interval); } catch (InterruptedException e) { throw Throwables.propagate(e); } } } private static class SingleFileProvider implements InputStreamFileInput.Provider { private final InputStream stream; private boolean opened = false; public SingleFileProvider(InputStream stream) { this.stream = stream; } @Override public InputStream openNext() throws IOException { if (opened) { return null; } opened = true; return stream; } @Override public void close() throws IOException { if (!opened) { stream.close(); } } } } }
package org.hivedb.hibernate; import java.io.Serializable; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.SQLException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.logging.Logger; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hibernate.Criteria; import org.hibernate.EmptyInterceptor; import org.hibernate.FlushMode; import org.hibernate.Interceptor; import org.hibernate.LockMode; import org.hibernate.Query; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.criterion.Order; import org.hibernate.criterion.Projections; import org.hibernate.criterion.Restrictions; import org.hibernate.shards.session.ShardedSessionImpl; import org.hibernate.shards.util.Lists; import org.hivedb.Hive; import org.hivedb.HiveLockableException; import org.hivedb.HiveRuntimeException; import org.hivedb.annotations.AnnotationHelper; import org.hivedb.annotations.DataIndexDelegate; import org.hivedb.annotations.IndexType; import org.hivedb.configuration.EntityConfig; import org.hivedb.configuration.EntityIndexConfig; import org.hivedb.configuration.EntityIndexConfigDelegator; import org.hivedb.configuration.EntityIndexConfigImpl; import org.hivedb.util.GenerateInstance; import org.hivedb.util.GeneratedClassFactory; import org.hivedb.util.GeneratedInstanceInterceptor; import org.hivedb.util.PrimitiveUtils; import org.hivedb.util.ReflectionTools; import org.hivedb.util.functional.Amass; import org.hivedb.util.functional.Atom; import org.hivedb.util.functional.Filter; import org.hivedb.util.functional.Joiner; import org.hivedb.util.functional.Pair; import org.hivedb.util.functional.Predicate; import org.hivedb.util.functional.Transform; import org.hivedb.util.functional.Unary; public class BaseDataAccessObject implements DataAccessObject<Object, Serializable>{ private Log log = LogFactory.getLog(BaseDataAccessObject.class); protected HiveSessionFactory factory; protected EntityConfig config; protected Class<?> clazz; protected Interceptor defaultInterceptor = EmptyInterceptor.INSTANCE; protected Hive hive; protected EntityIndexConfig partitionIndexEntityIndexConfig; public Hive getHive() { return hive; } public void setHive(Hive hive) { this.hive = hive; } public BaseDataAccessObject(EntityConfig config, Hive hive, HiveSessionFactory factory) { this.clazz = config.getRepresentedInterface(); this.config = config; this.factory = factory; this.hive = hive; } public BaseDataAccessObject(EntityConfig config, Hive hive, HiveSessionFactory factory, Interceptor interceptor) { this(config,hive,factory); this.defaultInterceptor = interceptor; } public Serializable delete(final Serializable id) { SessionCallback callback = new SessionCallback() { public void execute(Session session) { Object deleted = get(id, session); session.delete(deleted); }}; doInTransaction(callback, getSession()); return id; } public Boolean exists(Serializable id) { return hive.directory().doesResourceIdExist(config.getResourceName(), id); } public Object get(final Serializable id) { try { QueryCallback query = new QueryCallback(){ public Collection<Object> execute(Session session) { Object fetched = get(id,session); if(fetched == null && exists(id)){ try { hive.directory().deleteResourceId(config.getResourceName(), id); } catch (HiveLockableException e) { log.warn(String.format("%s with id %s exists in the directory but not on the data node. Unable to cleanup record because Hive was read-only.", config.getResourceName(), id)); } log.warn(String.format("%s with id %s exists in the directory but not on the data node. Directory record removed.", config.getResourceName(), id)); } return Lists.newArrayList(fetched); }}; Object fetched = Atom.getFirstOrThrow(queryInTransaction(query, getSession())); if(fetched == null && exists(id)){ try { hive.directory().deleteResourceId(config.getResourceName(), id); } catch (HiveLockableException e) { log.warn(String.format("%s with id %s exists in the directory but not on the data node. Unable to cleanup record because Hive was read-only.", config.getResourceName(), id)); } log.warn(String.format("%s with id %s exists in the directory but not on the data node. Directory record removed.", config.getResourceName(), id)); } return fetched; } catch(RuntimeException e) { //This save us a directory hit for all cases except when requesting a non-existent id. if(!exists(id)) return null; else throw e; } } private Object get(Serializable id, Session session) { return session.get(getRespresentedClass(), id); } public Collection<Object> findByProperty(final String propertyName, final Object propertyValue) { return findByProperties(propertyName, Collections.singletonMap(propertyName, propertyValue)); } public Collection<Object> findByProperties(String partitioningPropertyName, final Map<String,Object> propertyNameValueMap) { final EntityIndexConfig entityIndexConfig = resolveEntityIndexConfig(partitioningPropertyName); Session session = createSessionForIndex(config, entityIndexConfig, propertyNameValueMap.get(partitioningPropertyName)); final Map<String, Entry<EntityIndexConfig, Object>> propertyNameEntityIndexConfigValueMap = Transform.toOrderedMap( new Unary<String, Entry<String, Entry<EntityIndexConfig, Object>>>() { public Entry<String, Entry<EntityIndexConfig, Object>> f(String propertyName) { EntityIndexConfig entityIndexConfig = resolveEntityIndexConfig(propertyName); DataIndexDelegate dataIndexDelegate = AnnotationHelper.getAnnotationDeeply(clazz, propertyName, DataIndexDelegate.class); EntityIndexConfig resolvedEntityIndexConfig = (dataIndexDelegate != null) ? resolveEntityIndexConfig(dataIndexDelegate.value()) : entityIndexConfig; return new Pair<String, Entry<EntityIndexConfig, Object>>( resolvedEntityIndexConfig.getPropertyName(), new Pair<EntityIndexConfig, Object>(resolvedEntityIndexConfig, propertyNameValueMap.get(propertyName))); } }, propertyNameValueMap.keySet()); QueryCallback query; if (Filter.isMatch(new Predicate<String>() { // We must use HQL to query for primitive collection properties. public boolean f(String propertyName) { return isPrimitiveCollection(propertyName); }}, propertyNameEntityIndexConfigValueMap.keySet())) query = new QueryCallback(){ public Collection<Object> execute(Session session) { Map <String, Object> revisedPropertyNameValueMap = Transform.toMap( new Unary<Entry<String, Entry<EntityIndexConfig, Object>>, String>() { public String f(Map.Entry<String,Map.Entry<EntityIndexConfig,Object>> item) { return item.getKey(); } }, new Unary<Entry<String, Entry<EntityIndexConfig, Object>>, Object>() { public Object f(Map.Entry<String,Map.Entry<EntityIndexConfig,Object>> item) { return item.getValue().getValue(); } }, propertyNameEntityIndexConfigValueMap.entrySet()); return queryWithHQL(session, revisedPropertyNameValueMap); }}; else query = new QueryCallback(){ @SuppressWarnings("unchecked") public Collection<Object> execute(Session session) { Criteria criteria = session.createCriteria(config.getRepresentedInterface()).setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); for (Entry<EntityIndexConfig,Object> entityIndexConfigValueEntry : propertyNameEntityIndexConfigValueMap.values()) { EntityIndexConfig entityIndexConfig = entityIndexConfigValueEntry.getKey(); Object value = entityIndexConfigValueEntry.getValue(); addPropertyRestriction(entityIndexConfig, criteria, entityIndexConfig.getPropertyName(), value); } return criteria.list(); }}; return queryInTransaction(query, session); } public Collection<Object> getProperty(final String propertyName, final int firstResult, final int maxResults) { QueryCallback callback = new QueryCallback(){ @SuppressWarnings("unchecked") public Collection<Object> execute(Session session) { Query query = session.createQuery( String.format( "select %s from %s", propertyName, GeneratedClassFactory.getGeneratedClass(config.getRepresentedInterface()).getSimpleName())); if (maxResults > 0) { query.setFirstResult(firstResult); query.setMaxResults(maxResults); } return query.list(); }}; return queryInTransaction(callback, getSession()); } @SuppressWarnings("unchecked") protected Collection<Object> queryWithHQL(Session session, Map<String, Object> propertyNameValueMap) { final StringBuilder queryString = new StringBuilder(String.format("from %s as x where", GeneratedClassFactory.getGeneratedClass(config.getRepresentedInterface()).getSimpleName())); for (Entry<String, Object> entry : propertyNameValueMap.entrySet()) { String propertyName = entry.getKey(); if (ReflectionTools.isCollectionProperty(config.getRepresentedInterface(), propertyName)) queryString.append(String.format(" :%s in elements (x.%s)", propertyName, propertyName)); else queryString.append(String.format(" :%s = x.%s", propertyName, propertyName)); } Query query = session.createQuery(queryString.toString()); for (Entry<String, Object> entry : propertyNameValueMap.entrySet()) query.setParameter(entry.getKey(), entry.getValue()); return query.list(); } public Integer getCount(final String propertyName, final Object propertyValue) { final EntityIndexConfig indexConfig = resolveEntityIndexConfig(propertyName); QueryCallback query; Session session = null; try { session = createSessionForIndex(config, indexConfig, propertyValue); } catch (UnsupportedOperationException e) { return 0; } if (isPrimitiveCollection(propertyName)) { query = new QueryCallback(){ public Collection<Object> execute(Session session) { return queryWithHQLRowCount(indexConfig, session, propertyValue); }}; } else { query = new QueryCallback(){ @SuppressWarnings("unchecked") public Collection<Object> execute(Session session) { // setResultTransformer fixes a Hibernate bug of returning duplicates when joins exist Criteria criteria = session.createCriteria(config.getRepresentedInterface()).setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); addPropertyRestriction(indexConfig, criteria, propertyName, propertyValue); criteria.setProjection( Projections.rowCount() ); return criteria.list(); }}; } return (Integer)Atom.getFirstOrThrow(queryInTransaction(query, session)); } private boolean isPrimitiveCollection(final String propertyName) { return ReflectionTools.isCollectionProperty(config.getRepresentedInterface(), propertyName) && !ReflectionTools.isComplexCollectionItemProperty(config.getRepresentedInterface(), propertyName); } @SuppressWarnings("unchecked") private Collection<Object> queryWithHQLRowCount(EntityIndexConfig indexConfig, Session session, Object propertyValue) { Query query = session.createQuery(String.format("select count(%s) from %s as x where :value in elements (x.%s)", config.getIdPropertyName(), GeneratedClassFactory.getGeneratedClass(config.getRepresentedInterface()).getSimpleName(), indexConfig.getIndexName()) ).setParameter("value", propertyValue); return query.list(); } protected EntityIndexConfig resolveEntityIndexConfig(String propertyName) { EntityIndexConfig indexConfig = config.getPrimaryIndexKeyPropertyName().equals(propertyName) ? createEntityIndexConfigForPartitionIndex(config) : config.getEntityIndexConfig(propertyName); return indexConfig; } private EntityIndexConfig createEntityIndexConfigForPartitionIndex(EntityConfig entityConfig) { if (partitionIndexEntityIndexConfig == null) partitionIndexEntityIndexConfig = new EntityIndexConfigImpl(entityConfig.getRepresentedInterface(), entityConfig.getPrimaryIndexKeyPropertyName()); return partitionIndexEntityIndexConfig; } public Collection<Object> findByProperty(final String propertyName, final Object propertyValue, final Integer firstResult, final Integer maxResults) { final EntityConfig entityConfig = config; final EntityIndexConfig indexConfig = entityConfig.getEntityIndexConfig(propertyName); Session session = createSessionForIndex(entityConfig, indexConfig, propertyValue); QueryCallback callback; if (isPrimitiveCollection(propertyName)) { callback = new QueryCallback(){ @SuppressWarnings("unchecked") public Collection<Object> execute(Session session) { Query query = session.createQuery(String.format("from %s as x where :value in elements (x.%s) order by x.%s asc limit %s, %s", GeneratedClassFactory.getGeneratedClass(entityConfig.getRepresentedInterface()).getSimpleName(), indexConfig.getIndexName(), entityConfig.getIdPropertyName(), firstResult, maxResults) ).setEntity("value", propertyValue); return query.list(); }}; } else { callback = new QueryCallback(){ @SuppressWarnings("unchecked") public Collection<Object> execute(Session session) { Criteria criteria = session.createCriteria(entityConfig.getRepresentedInterface()).setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); addPropertyRestriction(indexConfig, criteria, propertyName, propertyValue); criteria.setFirstResult(firstResult); criteria.setMaxResults(maxResults); criteria.addOrder(Order.asc(entityConfig.getIdPropertyName())); return criteria.list(); }}; } return queryInTransaction(callback, session); } private void addPropertyRestriction(EntityIndexConfig indexConfig, Criteria criteria, String propertyName, Object propertyValue) { if (ReflectionTools.isCollectionProperty(config.getRepresentedInterface(), propertyName)) if (ReflectionTools.isComplexCollectionItemProperty(config.getRepresentedInterface(), propertyName)) { criteria.createAlias(propertyName, "x") .add( Restrictions.eq("x." + indexConfig.getInnerClassPropertyName(), propertyValue)); } else throw new UnsupportedOperationException("This call should have used HQL, not Criteria"); else criteria.add( Restrictions.eq(propertyName, propertyValue)); } protected Session createSessionForIndex(EntityConfig entityConfig, EntityIndexConfig indexConfig, Object propertyValue) { if (indexConfig.getIndexType().equals(IndexType.Delegates)) return factory.openSession( ((EntityIndexConfigDelegator)indexConfig).getDelegateEntityConfig().getResourceName(), propertyValue); else if (indexConfig.getIndexType().equals(IndexType.Hive)) return factory.openSession( entityConfig.getResourceName(), indexConfig.getIndexName(), propertyValue); else if (indexConfig.getIndexType().equals(IndexType.Data)) return factory.openAllShardsSession(); else if (indexConfig.getIndexType().equals(IndexType.Partition)) return factory.openSession(propertyValue); throw new RuntimeException(String.format("Unknown IndexType: %s", indexConfig.getIndexType())); } public Collection<Object> findByPropertyRange(final String propertyName, final Object minValue, final Object maxValue) { // Use an AllShardsresolutionStrategy + Criteria final EntityConfig entityConfig = config; final EntityIndexConfig indexConfig = config.getEntityIndexConfig(propertyName); Session session = factory.openAllShardsSession(); QueryCallback callback; if (isPrimitiveCollection(propertyName)) { callback = new QueryCallback(){ @SuppressWarnings("unchecked") public Collection<Object> execute(Session session) { Query query = session.createQuery(String.format("from %s as x where x.%s between (:minValue, :maxValue)", entityConfig.getRepresentedInterface().getSimpleName(), indexConfig.getIndexName()) ).setEntity("minValue", minValue).setEntity("maxValue", maxValue); return query.list(); }}; } else { callback = new QueryCallback(){ @SuppressWarnings("unchecked") public Collection<Object> execute(Session session) { Criteria criteria = session.createCriteria(config.getRepresentedInterface()).setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); addPropertyRangeRestriction(indexConfig, criteria, propertyName, minValue, maxValue); return criteria.list(); }}; } return queryInTransaction(callback, session); } public Integer getCountByRange(final String propertyName, final Object minValue, final Object maxValue) { // Use an AllShardsresolutionStrategy + Criteria final EntityIndexConfig indexConfig = config.getEntityIndexConfig(propertyName); Session session = factory.openAllShardsSession(); QueryCallback query = new QueryCallback(){ @SuppressWarnings("unchecked") public Collection<Object> execute(Session session) { Criteria criteria = session.createCriteria(config.getRepresentedInterface()).setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); addPropertyRangeRestriction(indexConfig, criteria, propertyName, minValue, maxValue); criteria.setProjection( Projections.rowCount() ); return criteria.list(); }}; return (Integer)Atom.getFirstOrThrow(queryInTransaction(query, session)); } public Collection<Object> findByPropertyRange(final String propertyName, final Object minValue, final Object maxValue, final Integer firstResult, final Integer maxResults) { // Use an AllShardsresolutionStrategy + Criteria final EntityConfig entityConfig = config; final EntityIndexConfig indexConfig = config.getEntityIndexConfig(propertyName); Session session = factory.openAllShardsSession(); QueryCallback callback; if (isPrimitiveCollection(propertyName)) { callback = new QueryCallback(){ @SuppressWarnings("unchecked") public Collection<Object> execute(Session session) { Query query = session.createQuery(String.format("from %s as x where %s between (:minValue, :maxValue) order by x.%s asc limit %s, %s", entityConfig.getRepresentedInterface().getSimpleName(), indexConfig.getIndexName(), entityConfig.getIdPropertyName(), firstResult, maxResults) ).setEntity("minValue", minValue).setEntity("maxValue", maxValue); return query.list(); }}; } else { callback = new QueryCallback(){ @SuppressWarnings("unchecked") public Collection<Object> execute(Session session) { Criteria criteria = session.createCriteria(config.getRepresentedInterface()).setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); addPropertyRangeRestriction(indexConfig, criteria, propertyName, minValue, maxValue); criteria.add( Restrictions.between(propertyName, minValue, maxValue)); criteria.setFirstResult(firstResult); criteria.setMaxResults(maxResults); criteria.addOrder(Order.asc(propertyName)); return criteria.list(); }}; } return queryInTransaction(callback, session); } private void addPropertyRangeRestriction(EntityIndexConfig indexConfig, Criteria criteria, String propertyName, Object minValue, Object maxValue) { if (ReflectionTools.isCollectionProperty(config.getRepresentedInterface(), propertyName)) if (ReflectionTools.isComplexCollectionItemProperty(config.getRepresentedInterface(), propertyName)) { criteria.createAlias(propertyName, "x") .add( Restrictions.between("x." + propertyName, minValue, maxValue)); } else throw new UnsupportedOperationException("This isn't working yet"); else criteria.add( Restrictions.between(propertyName, minValue, maxValue)); } public Object save(final Object entity) { final Collection<Object> entities = populateDataIndexDelegates(Collections.singletonList(entity)); final Object populatedEntity = Atom.getFirstOrThrow(entities); SessionCallback callback = new SessionCallback(){ public void execute(Session session) { session.saveOrUpdate(getRespresentedClass().getName(),entity); }}; SessionCallback cleanupCallback = new SessionCallback(){ public void execute(Session session) { session.refresh(entity); session.lock(getRespresentedClass().getName(),entity, LockMode.UPGRADE); session.update(getRespresentedClass().getName(),entity); log.warn(String.format("%s with id %s exists in the data node but not on the directory. Data node record was updated and re-indexed.", config.getResourceName(), config.getId(entity))); }}; doSave(populatedEntity, callback, cleanupCallback); return entity; } private void doSave(final Object entity, SessionCallback callback, SessionCallback cleanupCallback) { try { doInTransaction(callback, getSession()); } catch(org.hibernate.TransactionException dupe) { if(dupe.getCause().getClass().equals(org.hibernate.exception.ConstraintViolationException.class) && !exists(config.getId(entity))) { doInTransaction(cleanupCallback, factory.openSession(config.getPrimaryIndexKey(entity))); } else { log.error(String.format("Detected an integrity constraint violation on the data node but %s with id %s exists in the directory.", config.getResourceName(), config.getId(entity))); throw dupe; } } catch(org.hibernate.exception.ConstraintViolationException dupe) { if(!exists(config.getId(entity))) { doInTransaction(cleanupCallback, factory.openSession(config.getPrimaryIndexKey(entity))); } else { log.error(String.format("Detected an integrity constraint violation on the data node but %s with id %s exists in the directory.", config.getResourceName(), config.getId(entity))); throw dupe; } } } private void doSaveAll(final Collection<Object> entities, SessionCallback callback, SessionCallback cleanupCallback) { try { doInTransaction(callback, getSession()); } catch(org.hibernate.TransactionException dupe) { if(dupe.getCause().getClass().equals(org.hibernate.exception.ConstraintViolationException.class) || dupe.getCause().getClass().equals(org.hibernate.StaleObjectStateException.class)) { doInTransaction(cleanupCallback, factory.openSession(config.getPrimaryIndexKey(Atom.getFirstOrThrow(entities)))); } else { log.error(String.format("Detected an integrity constraint violation on the data node while doing a saveAll with entities of class %s.", config.getResourceName())); throw dupe; } } catch(org.hibernate.exception.ConstraintViolationException dupe) { doInTransaction(cleanupCallback, factory.openSession(config.getPrimaryIndexKey(Atom.getFirstOrThrow(entities)))); } } public Collection<Object> saveAll(final Collection<Object> collection) { final Collection<Object> entities = populateDataIndexDelegates(collection); validateNonNull(entities); SessionCallback callback = new SessionCallback(){ public void execute(Session session) { for(Object entity : entities) { session.saveOrUpdate(getRespresentedClass().getName(),entity); } }}; SessionCallback cleanupCallback = new SessionCallback(){ public void execute(Session session) { for(Object entity : entities) { try { session.refresh(entity); } catch(RuntimeException e) { //Damned Hibernate } if(!exists(config.getId(entity))){ if(existsInSession(session, config.getId(entity))){ session.lock(getRespresentedClass().getName(),entity, LockMode.UPGRADE); session.update(getRespresentedClass().getName(),entity); log.warn(String.format("%s with id %s exists in the data node but not on the directory. Data node record was updated and re-indexed.", config.getResourceName(), config.getId(entity))); } else { session.saveOrUpdate(getRespresentedClass().getName(), entity); } } else { if(!existsInSession(session, config.getId(entity))){ try { getHive().directory().deleteResourceId(config.getResourceName(), config.getId(entity)); } catch (HiveLockableException e) { log.warn(String.format("%s with id %s exists in the directory but not on the data node. Unable to cleanup record because Hive was read-only.", config.getResourceName(), config.getId(entity))); } log.warn(String.format("%s with id %s exists in the directory but not on the data node. Directory record removed.", config.getResourceName(), config.getId(entity))); } session.saveOrUpdate(getRespresentedClass().getName(), entity); } } }}; doSaveAll(entities, callback, cleanupCallback); return collection; } private Boolean existsInSession(Session session, Serializable id) { return null != session.get(getRespresentedClass(), id); } private void validateNonNull(final Collection<Object> collection) { if (Filter.isMatch(new Filter.NullPredicate<Object>(), collection)) { String ids = Amass.joinByToString(new Joiner.ConcatStrings<String>(", "), Transform.map(new Unary<Object, String>() { public String f(Object item) { return item != null ? config.getId(item).toString() : "null"; }}, collection)); throw new HiveRuntimeException(String.format("Encountered null items in collection: %s", ids)); } } protected Session getSession() { return factory.openSession(factory.getDefaultInterceptor()); } @SuppressWarnings("unchecked") public Class<Object> getRespresentedClass() { return (Class<Object>) EntityResolver.getPersistedImplementation(clazz); } public Interceptor getInterceptor() {return this.defaultInterceptor;} public void setInterceptor(Interceptor i) {this.defaultInterceptor = i;} public static void doInTransaction(SessionCallback callback, Session session) { Transaction tx = null; try { tx = session.beginTransaction(); callback.execute(session); tx.commit(); } catch( RuntimeException e ) { LogFactory.getLog(BaseDataAccessObject.class).debug("doInTransaction: Error on data node " + OpenSessionEventImpl.getNode(), e); if(tx != null) tx.rollback(); throw e; } finally { session.close(); } } public static Collection<Object> queryInTransaction(QueryCallback callback, Session session) { Collection<Object> results = Lists.newArrayList(); try { session.setFlushMode(FlushMode.MANUAL); Transaction tx = session.beginTransaction(); results = callback.execute(session); tx.commit(); } catch( RuntimeException e ) { LogFactory.getLog(BaseDataAccessObject.class).debug("queryInTransaction: Error on data node " + OpenSessionEventImpl.getNode(), e); throw e; } finally { session.close(); } return results; } // This operation needs to be generalized with an attribute and push down to a lower layer public Collection<Object> populateDataIndexDelegates(Collection<Object> instances) { return Transform.map(new Unary<Object, Object>() { public Object f(final Object instance) { final List<Method> allMethodsWithAnnotation = AnnotationHelper.getAllMethodsWithAnnotation(clazz, DataIndexDelegate.class); if (allMethodsWithAnnotation.size()==0) return instance; Object modified = new GenerateInstance<Object>((Class<Object>)clazz).generateAndCopyProperties(instance); for (Method getter : allMethodsWithAnnotation) { String delegatorPropertyName = ReflectionTools.getPropertyNameOfAccessor(getter); EntityIndexConfig entityIndexConfig = config.getEntityIndexConfig(delegatorPropertyName); String delegatePropertyName = AnnotationHelper.getAnnotationDeeply(clazz, delegatorPropertyName, DataIndexDelegate.class).value(); GeneratedInstanceInterceptor.setProperty( modified, delegatePropertyName, PrimitiveUtils.getPrimitiveEquivalent(Filter.grepUnique(entityIndexConfig.getIndexValues(modified)))); } return modified; }}, instances); } }
package org.jboss.remoting3.remote; import java.io.IOException; import java.nio.ByteBuffer; import org.jboss.remoting3.MessageCancelledException; import org.jboss.remoting3.MessageInputStream; import org.xnio.Pooled; import org.xnio.channels.Channels; import org.xnio.streams.BufferPipeInputStream; /** * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> */ final class InboundMessage { final short messageId; final RemoteConnectionChannel channel; int inboundWindow; boolean closed; boolean cancelled; static final IntIndexer<InboundMessage> INDEXER = new IntIndexer<InboundMessage>() { public int getKey(final InboundMessage argument) { return argument.messageId & 0xffff; } public boolean equals(final InboundMessage argument, final int index) { return (argument.messageId & 0xffff) == index; } }; InboundMessage(final short messageId, final RemoteConnectionChannel channel, int inboundWindow) { this.messageId = messageId; this.channel = channel; this.inboundWindow = inboundWindow; } BufferPipeInputStream inputStream = new BufferPipeInputStream(new BufferPipeInputStream.InputHandler() { public void acknowledge(final Pooled<ByteBuffer> acked) throws IOException { int consumed = acked.getResource().position(); openInboundWindow(consumed); Pooled<ByteBuffer> pooled = allocate(Protocol.MESSAGE_WINDOW_OPEN); try { ByteBuffer buffer = pooled.getResource(); buffer.putInt(consumed); // Open window by buffer size buffer.flip(); Channels.sendBlocking(channel.getConnection().getChannel(), buffer); } finally { pooled.free(); } } public void close() throws IOException { sendAsyncClose(); } }); MessageInputStream messageInputStream = new MessageInputStream() { public int read() throws IOException { return inputStream.read(); } public int read(final byte[] bytes, final int offs, final int length) throws IOException { return inputStream.read(bytes, offs, length); } public long skip(final long l) throws IOException { return inputStream.skip(l); } public int available() throws IOException { return inputStream.available(); } public void close() throws IOException { synchronized (InboundMessage.this) { if (cancelled) { throw new MessageCancelledException(); } } inputStream.close(); } }; void sendAsyncClose() throws IOException { Pooled<ByteBuffer> pooled = allocate(Protocol.MESSAGE_ASYNC_CLOSE); try { ByteBuffer buffer = pooled.getResource(); buffer.flip(); Channels.sendBlocking(channel.getConnection().getChannel(), buffer); } finally { pooled.free(); } } Pooled<ByteBuffer> allocate(byte protoId) { Pooled<ByteBuffer> pooled = channel.allocate(protoId); ByteBuffer buffer = pooled.getResource(); buffer.putShort(messageId); return pooled; } void openInboundWindow(int consumed) { synchronized (this) { inboundWindow += consumed; } } void closeInboundWindow(int produced) { synchronized (this) { if ((inboundWindow -= produced) < 0) { channel.getConnection().handleException(new IOException("Input overrun")); } } } void handleIncoming(Pooled<ByteBuffer> pooledBuffer) { boolean eof = false; synchronized (this) { if (closed) { // ignore pooledBuffer.free(); return; } if (inboundWindow == 0) { pooledBuffer.free(); // TODO log window overrun try { sendAsyncClose(); } catch (IOException e) { // todo log it } return; } ByteBuffer buffer = pooledBuffer.getResource(); closeInboundWindow(buffer.remaining() - 8); buffer.position(buffer.position() - 1); byte flags = buffer.get(); eof = (flags & Protocol.MSG_FLAG_EOF) != 0; if (eof) { closed = true; channel.freeInboundMessage(messageId); } boolean cancelled = (flags & Protocol.MSG_FLAG_CANCELLED) != 0; if (cancelled) { this.cancelled = true; } } inputStream.push(pooledBuffer); if (eof) { inputStream.pushEof(); } } }
package org.folio.rest.tools.client; import io.vertx.core.Handler; import io.vertx.core.http.HttpClient; import io.vertx.core.http.HttpClientResponse; import io.vertx.core.json.DecodeException; import io.vertx.core.logging.Logger; import io.vertx.core.logging.LoggerFactory; import java.util.concurrent.CompletableFuture; /** * @author shale * */ class HTTPJsonResponseHandler implements Handler<HttpClientResponse> { private static final Logger log = LoggerFactory.getLogger(HTTPJsonResponseHandler.class); CompletableFuture<Response> cf; String endpoint; RollBackURL rollbackURL; HttpClient httpClient; public HTTPJsonResponseHandler(String endpoint, CompletableFuture<Response> cf){ this.cf = cf; this.endpoint = endpoint; } @Override public void handle(HttpClientResponse hcr) { boolean hasBody[] = new boolean[]{false}; hcr.endHandler( eh -> { //needed in cases where there is no body content if(!hasBody[0]){ Response r = new Response(); r.code = hcr.statusCode(); r.endpoint = this.endpoint; r.headers = hcr.headers(); if(!Response.isSuccess(r.code)){ r.populateError(this.endpoint, r.code, hcr.statusMessage()); } cf.complete(r); } if(httpClient != null){ //this is not null when autoclose = true httpClient.close(); } }); hcr.bodyHandler( bh -> { hasBody[0] = true; Response r = new Response(); r.code = hcr.statusCode(); r.endpoint = this.endpoint; r.headers = hcr.headers(); if(Response.isSuccess(r.code)){ if(r.code == 204 || bh.length() == 0) { r.body = null; cf.complete(r); } else { try { r.body = bh.toJsonObject(); cf.complete(r); } catch (DecodeException decodeException) { cf.completeExceptionally(decodeException); } } } else{ String message = hcr.statusMessage(); if(bh != null){ message = bh.toString(); } r.populateError(this.endpoint, r.code, message); cf.complete(r); } if(HttpModuleClient2.cache != null && r.body != null) { try { HttpModuleClient2.cache.put(endpoint, cf.get()); } catch (Exception e) { log.error(e.getMessage(), e); } } /* if(r.error != null && rollbackURL != null){ }*/ }); hcr.exceptionHandler( eh -> { if(httpClient != null){ //this is not null when autoclose = true try { httpClient.close(); } catch (Exception e) { log.error("HTTPJsonResponseHandler class tried closing a client that was closed, this may be ok. " + e.getMessage(), e); } } cf.completeExceptionally(eh); }); } }
package org.jenkinsci.plugins.github; import hudson.Plugin; import hudson.init.InitMilestone; import hudson.init.Initializer; import org.jenkinsci.plugins.github.config.GitHubPluginConfig; import org.jenkinsci.plugins.github.migration.Migrator; import javax.annotation.Nonnull; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; /** * Main entry point for this plugin * <p> * Launches migration from old config versions * Contains helper method to get global plugin configuration - {@link #configuration()} * * @author lanwen (Merkushev Kirill) */ public class GitHubPlugin extends Plugin { /** * Launched before plugin starts * Adds alias for {@link GitHubPlugin} to simplify resulting xml. */ public static void addXStreamAliases() { Migrator.enableCompatibilityAliases(); Migrator.enableAliases(); } @Initializer(after = InitMilestone.EXTENSIONS_AUGMENTED, before = InitMilestone.JOB_LOADED) public static void runMigrator() throws Exception { new Migrator().migrate(); } @Override public void start() throws Exception { addXStreamAliases(); } /** * Shortcut method for getting instance of {@link GitHubPluginConfig}. * * @return configuration of plugin */ @Nonnull public static GitHubPluginConfig configuration() { return defaultIfNull( GitHubPluginConfig.all().get(GitHubPluginConfig.class), GitHubPluginConfig.EMPTY_CONFIG ); } }
package slimpleslickgame; import org.newdawn.slick.geom.Shape; import util.ColorSwitch; import client.GameEvent; import client.GameStatsEvents; public class OpponentPlayer extends Player { public OpponentPlayer(byte id, GameStatsEvents gse) { super(id, gse); } @Override public void update(int delta, Shape containerShape) { if(dead) return; GameEvent e; int score = 0; while ((e = gse.pop(id)) != null) { switch (e.getRole()) { case CREEP: { if (e.isAlive()) { super.creeps.put(e.getId(), new Creep(e.getPosition(), ColorSwitch.getColorFromId(e.getSendId()))); } else { super.creeps.remove(e.getId()); } break; } case PLAYER: { if (!e.isAlive()){ dead = true; } if(e.getPlayerHp() != -1){ stats.setHP(e.getPlayerHp()); } if (e.getPosition() != null) { super.position = e.getPosition(); } if (e.getDirection() != null) { // super.direction = e.getDirection(); } if (e.getScore() > 0) { score = e.getScore(); } super.updatePosition(containerShape); break; } case BULLET: { if (e.isAlive()) { super.gun.shoot(e.getPosition()); } else { super.gun.delete(e.getId()); } break; } } } for (Creep c : super.creeps.values()) { c.update(delta); } stats.update(delta, score); super.gun.update(delta); } }
package org.lightmare.jpa.jta; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Stack; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import javax.transaction.HeuristicMixedException; import javax.transaction.HeuristicRollbackException; import javax.transaction.NotSupportedException; import javax.transaction.RollbackException; import javax.transaction.SystemException; import javax.transaction.UserTransaction; import org.lightmare.ejb.handlers.BeanHandler; import org.lightmare.jpa.JpaManager; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.ObjectUtils; /** * {@link UserTransaction} implementation for JNDI and EJB beans * * @author levan * */ public class UserTransactionImpl implements UserTransaction { // Caches EntityTransaction instances for immediate commit or join with // other transactions private Stack<EntityTransaction> transactions; // Caches EntityManager instances for clear up private Stack<EntityManager> ems; // Caches EntityTransaction instances for immediate commit private Stack<EntityTransaction> requareNews; // Caches EntityManager instances for immediate clean up private Stack<EntityManager> requareNewEms; private Object caller; // Denotes active transaction private static int ACTIVE = 1; // Denotes inactive transaction private static int INACTIVE = 0; public UserTransactionImpl(EntityTransaction... transactions) { this.transactions = new Stack<EntityTransaction>(); if (CollectionUtils.valid(transactions)) { addTransactions(transactions); } } private void beginAll() throws NotSupportedException, SystemException { for (EntityTransaction transaction : transactions) { transaction.begin(); } } @Override public void begin() throws NotSupportedException, SystemException { if (CollectionUtils.valid(transactions)) { beginAll(); } } private void commit(EntityTransaction transaction) throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { if (transaction.isActive()) { transaction.commit(); } } private void commitAll() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { EntityTransaction transaction; while (CollectionUtils.notEmpty(transactions)) { transaction = transactions.pop(); commit(transaction); } } @Override public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { try { if (CollectionUtils.valid(transactions)) { commitAll(); } } finally { closeEntityManagers(); } } @Override public int getStatus() throws SystemException { int active = INACTIVE; if (CollectionUtils.valid(transactions)) { for (EntityTransaction transaction : transactions) { boolean isActive = transaction.isActive(); active += isActive ? ACTIVE : INACTIVE; } } if (CollectionUtils.valid(requareNews)) { for (EntityTransaction transaction : requareNews) { boolean isActive = transaction.isActive(); active += isActive ? ACTIVE : INACTIVE; } } return active; } /** * Rollbacks passed {@link EntityTransaction} if it is active * * @param transaction */ private void rollback(EntityTransaction transaction) { if (transaction.isActive()) { transaction.rollback(); } } private void rollbackAll() throws IllegalStateException, SecurityException, SystemException { EntityTransaction transaction; while (CollectionUtils.notEmpty(transactions)) { transaction = transactions.pop(); rollback(transaction); } } @Override public void rollback() throws IllegalStateException, SecurityException, SystemException { try { if (CollectionUtils.valid(transactions)) { rollbackAll(); } } finally { closeEntityManagers(); } } private void setRollbackOnly(EntityTransaction transaction) throws IllegalStateException, SystemException { if (transaction.isActive()) { transaction.setRollbackOnly(); } } private void setRollbackOnlyAll() throws IllegalStateException, SystemException { for (EntityTransaction transaction : transactions) { setRollbackOnly(transaction); } } @Override public void setRollbackOnly() throws IllegalStateException, SystemException { if (CollectionUtils.valid(transactions)) { setRollbackOnlyAll(); } } @Override public void setTransactionTimeout(int time) throws SystemException { throw new UnsupportedOperationException( "Timeouts are not supported yet"); } private Stack<EntityTransaction> getNews() { if (requareNews == null) { requareNews = new Stack<EntityTransaction>(); } return requareNews; } private Stack<EntityManager> getNewEms() { if (requareNewEms == null) { requareNewEms = new Stack<EntityManager>(); } return requareNewEms; } /** * Check if {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type * transactions stack is empty * * @return <code>boolean</code> */ private boolean checkNews() { boolean notEmpty = CollectionUtils.valid(requareNews); return notEmpty; } /** * Check if {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type * transactions referenced {@link EntityManager} stack is empty * * @return <code>boolean</code> */ private boolean checkNewEms() { boolean notEmpty = CollectionUtils.valid(requareNewEms); return notEmpty; } /** * Adds new {@link EntityTransaction} for * {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} annotated bean * methods * * @param entityTransaction */ public void pushReqNew(EntityTransaction entityTransaction) { getNews().push(entityTransaction); } /** * Adds {@link EntityManager} to collection to close after * {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type transactions * processing * * @param em */ public void pushReqNewEm(EntityManager em) { getNewEms().push(em); } public void commitReqNew() throws SecurityException, IllegalStateException, RollbackException, HeuristicMixedException, HeuristicRollbackException, SystemException { try { if (checkNews()) { EntityTransaction entityTransaction = getNews().pop(); commit(entityTransaction); } } finally { closeReqNew(); } } /** * Closes all cached immediate {@link EntityManager} instances */ private void closeReqNew() { if (checkNewEms()) { EntityManager em = getNewEms().pop(); JpaManager.closeEntityManager(em); } } /** * Adds {@link EntityTransaction} to transactions {@link List} for further * processing * * @param transaction */ public void addTransaction(EntityTransaction transaction) { transactions.add(transaction); } /** * Adds {@link EntityTransaction}s to transactions {@link List} for further * processing * * @param transactions */ public void addTransactions(EntityTransaction... transactions) { Collections.addAll(this.transactions, transactions); } /** * Adds {@link EntityManager} to collection to close after transactions * processing * * @param em */ public void addEntityManager(EntityManager em) { if (ObjectUtils.notNull(em)) { if (ems == null) { ems = new Stack<EntityManager>(); } ems.push(em); } } /** * Adds {@link EntityManager}'s to collection to close after transactions * processing * * @param em */ public void addEntityManagers(Collection<EntityManager> ems) { if (CollectionUtils.valid(ems)) { for (EntityManager em : ems) { addEntityManager(em); } } } /** * Closes all cached {@link EntityManager} instances */ private void closeAllEntityManagers() { EntityManager em; while (CollectionUtils.notEmpty(ems)) { em = ems.pop(); JpaManager.closeEntityManager(em); } } /** * Closes all contained {@link EntityManager}s */ public void closeEntityManagers() { if (CollectionUtils.valid(ems)) { closeAllEntityManagers(); } } public boolean checkCaller(BeanHandler handler) { boolean check = ObjectUtils.notNull(caller); if (check) { check = caller.equals(handler.getBean()); } return check; } public void setCaller(BeanHandler handler) { caller = handler.getBean(); } public Object getCaller() { return caller; } }
package org.lightmare.jpa.jta; import java.lang.reflect.InvocationHandler; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Stack; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import javax.transaction.HeuristicMixedException; import javax.transaction.HeuristicRollbackException; import javax.transaction.NotSupportedException; import javax.transaction.RollbackException; import javax.transaction.SystemException; import javax.transaction.UserTransaction; import org.lightmare.utils.ObjectUtils; /** * {@link UserTransaction} implementation for jndi and ejb beans * * @author levan * */ public class UserTransactionImpl implements UserTransaction { private List<EntityTransaction> transactions; private List<EntityManager> ems; private Stack<EntityTransaction> requareNews; private Stack<EntityManager> requareNewEms; private InvocationHandler caller; public UserTransactionImpl(EntityTransaction... transactions) { if (ObjectUtils.available(transactions)) { this.transactions = new ArrayList<EntityTransaction>( Arrays.asList(transactions)); } else { this.transactions = new ArrayList<EntityTransaction>(); } } private void beginAll() throws NotSupportedException, SystemException { for (EntityTransaction transaction : transactions) { transaction.begin(); } } @Override public void begin() throws NotSupportedException, SystemException { if (ObjectUtils.available(transactions)) { beginAll(); } } private void commit(EntityTransaction transaction) throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { if (transaction.isActive()) { transaction.commit(); } } private void commitAll() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { for (EntityTransaction transaction : transactions) { commit(transaction); } } @Override public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { try { if (ObjectUtils.available(transactions)) { commitAll(); } } finally { closeEntityManagers(); } } @Override public int getStatus() throws SystemException { int active = 0; if (ObjectUtils.available(transactions)) { for (EntityTransaction transaction : transactions) { boolean isActive = transaction.isActive(); active += isActive ? 1 : 0; } } if (ObjectUtils.available(requareNews)) { for (EntityTransaction transaction : requareNews) { boolean isActive = transaction.isActive(); active += isActive ? 1 : 0; } } return active; } private void rollback(EntityTransaction transaction) { if (transaction.isActive()) { transaction.rollback(); } } private void rollbackAll() throws IllegalStateException, SecurityException, SystemException { for (EntityTransaction transaction : transactions) { rollback(transaction); } } @Override public void rollback() throws IllegalStateException, SecurityException, SystemException { try { if (ObjectUtils.available(transactions)) { rollbackAll(); } } finally { closeEntityManagers(); } } private void setRollbackOnly(EntityTransaction transaction) throws IllegalStateException, SystemException { if (transaction.isActive()) { transaction.setRollbackOnly(); } } private void setRollbackOnlyAll() throws IllegalStateException, SystemException { for (EntityTransaction transaction : transactions) { setRollbackOnly(transaction); } } @Override public void setRollbackOnly() throws IllegalStateException, SystemException { if (ObjectUtils.available(transactions)) { setRollbackOnlyAll(); } } @Override public void setTransactionTimeout(int time) throws SystemException { throw new UnsupportedOperationException( "Timeouts are not supported yet"); } private Stack<EntityTransaction> getNews() { if (requareNews == null) { requareNews = new Stack<EntityTransaction>(); } return requareNews; } private Stack<EntityManager> getNewEms() { if (requareNewEms == null) { requareNewEms = new Stack<EntityManager>(); } return requareNewEms; } /** * Check if {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type * transactions stack is empty * * @return <code>boolean</code> */ private boolean checkNews() { boolean notEmpty = ObjectUtils.available(requareNews); return notEmpty; } /** * Check if {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type * transactions referenced {@link EntityManager} stack is empty * * @return <code>boolean</code> */ private boolean checkNewEms() { boolean notEmpty = ObjectUtils.available(requareNewEms); return notEmpty; } /** * Adds new {@link EntityTransaction} for * {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} annotated bean * methods * * @param entityTransaction */ public void pushReqNew(EntityTransaction entityTransaction) { getNews().push(entityTransaction); } /** * Adds {@link EntityManager} to collection to close after * {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type transactions * processing * * @param em */ public void pushReqNewEm(EntityManager em) { getNewEms().push(em); } public void commitReqNew() throws SecurityException, IllegalStateException, RollbackException, HeuristicMixedException, HeuristicRollbackException, SystemException { try { if (checkNews()) { EntityTransaction entityTransaction = getNews().pop(); commit(entityTransaction); } } finally { closeReqNew(); } } private void closeReqNew() { if (checkNewEms()) { EntityManager em = getNewEms().pop(); if (em.isOpen()) { em.close(); } } } /** * Adds {@link EntityTransaction} to transactions {@link List} for further * processing * * @param transaction */ public void addTransaction(EntityTransaction transaction) { transactions.add(transaction); } /** * Adds {@link EntityTransaction}s to transactions {@link List} for further * processing * * @param transactions */ public void addTransactions(EntityTransaction... transactions) { Collections.addAll(this.transactions, transactions); } /** * Adds {@link EntityManager} to collection to close after transactions * processing * * @param em */ public void addEntityManager(EntityManager em) { if (ObjectUtils.notNull(em)) { if (ems == null) { ems = new ArrayList<EntityManager>(); } ems.add(em); } } private void closeEntityManager(EntityManager em) { if (em.isOpen()) { em.close(); } } private void closeAllEntityManagers() { for (EntityManager em : ems) { closeEntityManager(em); } } /** * Closes all contained {@link EntityManager}s */ public void closeEntityManagers() { if (ObjectUtils.available(ems)) { closeAllEntityManagers(); } } public boolean checkCaller(InvocationHandler handler) { boolean check = ObjectUtils.notNull(caller); if (check) { check = caller.equals(handler); } return check; } public void setCaller(InvocationHandler handler) { caller = handler; } public InvocationHandler getCaller() { return caller; } }
package crazypants.enderio.machines.machine.spawner; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.enderio.core.client.render.BoundingBox; import crazypants.enderio.machines.config.config.SpawnerConfig; import crazypants.enderio.util.CapturedMob; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityCreature; import net.minecraft.entity.EntityLiving; import net.minecraft.util.EntitySelectors; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.MathHelper; import net.minecraft.world.DifficultyInstance; import net.minecraft.world.World; import net.minecraftforge.event.ForgeEventFactory; import net.minecraftforge.fml.common.eventhandler.Event.Result; public class SpawnerLogic { public static interface ISpawnerCallback { @Nonnull World getSpawnerWorld(); @Nonnull BlockPos getSpawnerPos(); int getRange(); default @Nonnull BoundingBox getBounds() { return new BoundingBox(getSpawnerPos()).expand(getRange(), 1d, getRange()); } void setNotification(@Nonnull SpawnerNotification note); void removeNotification(@Nonnull SpawnerNotification note); @Nullable CapturedMob getEntity(); default void setHome(@Nonnull EntityCreature entity) { } void resetCapturedMob(); } private final @Nonnull ISpawnerCallback spawner; public SpawnerLogic(@Nonnull ISpawnerCallback spawner) { this.spawner = spawner; } public boolean isAreaClear() { int spawnRange = spawner.getRange(); return isAreaClear(spawnRange, 2, SpawnerConfig.poweredSpawnerMaxNearbyEntities.get()); } public boolean isAreaClear(int spawnRangeXZ, int spawnRangeY, int amount) { if (SpawnerConfig.poweredSpawnerMaxNearbyEntities.get() > 0) { World world = spawner.getSpawnerWorld(); BlockPos pos = spawner.getSpawnerPos(); Entity entity = createEntity(world.getDifficultyForLocation(pos), true); if (!(entity instanceof EntityLiving)) { cleanupUnspawnedEntity(entity); spawner.setNotification(SpawnerNotification.BAD_SOUL); return false; } final boolean result = isAreaClear(world, entity, spawnRangeXZ, spawnRangeY, amount); cleanupUnspawnedEntity(entity); return result; } return true; } private boolean isAreaClear(World world, Entity entity, int spawnRangeXZ, int spawnRangeY, int amount) { if (SpawnerConfig.poweredSpawnerMaxNearbyEntities.get() > 0) { int nearbyEntities = world .getEntitiesWithinAABB(entity.getClass(), spawner.getBounds().expand(spawnRangeXZ, spawnRangeY, spawnRangeXZ), EntitySelectors.IS_ALIVE).size(); if (nearbyEntities >= amount) { spawner.setNotification(SpawnerNotification.AREA_FULL); return false; } spawner.removeNotification(SpawnerNotification.AREA_FULL); } return true; } public boolean trySpawnEntity() { World world = spawner.getSpawnerWorld(); BlockPos pos = spawner.getSpawnerPos(); Entity entity = createEntity(world.getDifficultyForLocation(pos), true); if (!(entity instanceof EntityLiving)) { cleanupUnspawnedEntity(entity); spawner.setNotification(SpawnerNotification.BAD_SOUL); return false; } EntityLiving entityliving = (EntityLiving) entity; int spawnRange = spawner.getRange(); if (!isAreaClear(world, entity, spawnRange, 2, SpawnerConfig.poweredSpawnerMaxNearbyEntities.get())) { cleanupUnspawnedEntity(entity); return false; } for (int i = 0; i < SpawnerConfig.poweredSpawnerMaxSpawnTries.get(); i++) { double x = pos.getX() + .5 + (world.rand.nextDouble() - world.rand.nextDouble()) * spawnRange; double y = pos.getY() + world.rand.nextInt(3) - 1; double z = pos.getZ() + .5 + (world.rand.nextDouble() - world.rand.nextDouble()) * spawnRange; entity.setLocationAndAngles(x, y, z, world.rand.nextFloat() * 360.0F, 0.0F); if (canSpawnEntity(entityliving)) { if (entityliving instanceof EntityCreature) { spawner.setHome(((EntityCreature) entityliving)); } world.spawnEntity(entityliving); world.playEvent(2004, pos, 0); entityliving.spawnExplosionParticle(); final Entity ridingEntity = entity.getRidingEntity(); if (ridingEntity != null) { ridingEntity.setLocationAndAngles(entity.posX, entity.posY, entity.posZ, entity.rotationYaw, 0.0F); } for (Entity passenger : entity.getPassengers()) { passenger.setLocationAndAngles(entity.posX, entity.posY, entity.posZ, entity.rotationYaw, 0.0F); } return true; } } cleanupUnspawnedEntity(entity); spawner.setNotification(SpawnerNotification.NO_LOCATION_FOUND); return false; } public boolean anyLocationInRange() { World world = spawner.getSpawnerWorld(); BlockPos pos = spawner.getSpawnerPos(); Entity entity = createEntity(world.getDifficultyForLocation(pos), true); if (!(entity instanceof EntityLiving)) { cleanupUnspawnedEntity(entity); spawner.setNotification(SpawnerNotification.BAD_SOUL); return false; } EntityLiving entityliving = (EntityLiving) entity; int spawnRange = spawner.getRange(); int minxi = MathHelper.floor(pos.getX() + (0.0d - Math.nextAfter(1.0d, 0.0d)) * spawnRange); int maxxi = MathHelper.floor(pos.getX() + (Math.nextAfter(1.0d, 0.0d) - 0.0d) * spawnRange); int minyi = pos.getY() + 0 - 1; int maxyi = pos.getY() + 2 - 1; int minzi = MathHelper.floor(pos.getZ() + (0.0d - Math.nextAfter(1.0d, 0.0d)) * spawnRange); int maxzi = MathHelper.floor(pos.getZ() + (Math.nextAfter(1.0d, 0.0d) - 0.0d) * spawnRange); for (int x = minxi; x <= maxxi; x++) { for (int y = minyi; y <= maxyi; y++) { for (int z = minzi; z <= maxzi; z++) { entityliving.setLocationAndAngles(x + .5, y, z + .5, 0.0F, 0.0F); if (canSpawnEntity(entityliving)) { cleanupUnspawnedEntity(entity); spawner.removeNotification(SpawnerNotification.NO_LOCATION_AT_ALL); return true; } } } } cleanupUnspawnedEntity(entity); spawner.setNotification(SpawnerNotification.NO_LOCATION_AT_ALL); return false; } private void cleanupUnspawnedEntity(Entity entity) { if (entity != null) { final Entity ridingEntity = entity.getRidingEntity(); if (ridingEntity != null) { ridingEntity.setDead(); } for (Entity passenger : entity.getPassengers()) { passenger.setDead(); } } } @Nullable Entity createEntity(DifficultyInstance difficulty, boolean forceAlive) { CapturedMob capturedMob = spawner.getEntity(); if (capturedMob == null) { return null; } Entity ent = capturedMob.getEntity(spawner.getSpawnerWorld(), spawner.getSpawnerPos(), difficulty, false); if (ent == null) { // Entity must have been removed from this save or is otherwise missing, so revert to blank spawner spawner.resetCapturedMob(); return null; } if (forceAlive && SpawnerConfig.poweredSpawnerMaxPlayerDistance.get() <= 0 && SpawnerConfig.poweredSpawnerDespawnTimeSeconds.get() > 0 && ent instanceof EntityLiving) { ent.getEntityData().setLong(BlockPoweredSpawner.KEY_SPAWNED_BY_POWERED_SPAWNER, spawner.getSpawnerWorld().getTotalWorldTime()); ((EntityLiving) ent).enablePersistence(); } return ent; } protected boolean canSpawnEntity(EntityLiving entityliving) { // this is the logic from ForgeEventFactory.canEntitySpawnSpawner() with some additions switch (SpawnerConfig.poweredSpawnerUseForgeSpawnChecks.get() ? ForgeEventFactory.canEntitySpawn(entityliving, entityliving.world, (float) entityliving.posX, (float) entityliving.posY, (float) entityliving.posZ, true) : Result.DEFAULT) { case ALLOW: return true; case DEFAULT: if (SpawnerConfig.poweredSpawnerUseVanillaSpawnChecks.get()) { return entityliving.getCanSpawnHere() && entityliving.isNotColliding(); // vanilla logic } else { return entityliving.isNotColliding(); } case DENY: default: spawner.setNotification(SpawnerNotification.DENIED); return false; } } }
package org.lightmare.jpa.jta; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Stack; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import javax.transaction.HeuristicMixedException; import javax.transaction.HeuristicRollbackException; import javax.transaction.NotSupportedException; import javax.transaction.RollbackException; import javax.transaction.SystemException; import javax.transaction.UserTransaction; import org.lightmare.ejb.handlers.BeanHandler; import org.lightmare.jpa.JpaManager; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.ObjectUtils; /** * Implementation of {@link UserTransaction} interface for JNDI and EJB beans * * @author levan * */ public class UserTransactionImpl implements UserTransaction { // Caches EntityTransaction instances for immediate commit or join with // other transactions private Stack<EntityTransaction> transactions; // Caches EntityManager instances for clear up private Stack<EntityManager> ems; // Caches EntityTransaction instances for immediate commit private Stack<EntityTransaction> requareNews; // Caches EntityManager instances for immediate clean up private Stack<EntityManager> requareNewEms; private Object caller; // Denotes active transaction private static int ACTIVE = 1; // Denotes inactive transaction private static int INACTIVE = 0; private static final String TIMEOUT_NOT_SUPPORTED_ERROR = "Timeouts are not supported yet"; protected UserTransactionImpl(EntityTransaction... transactions) { this.transactions = new Stack<EntityTransaction>(); if (CollectionUtils.valid(transactions)) { addTransactions(transactions); } } private void beginAll() throws NotSupportedException, SystemException { for (EntityTransaction transaction : transactions) { transaction.begin(); } } @Override public void begin() throws NotSupportedException, SystemException { if (CollectionUtils.valid(transactions)) { beginAll(); } } private void commit(EntityTransaction transaction) throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { if (transaction.isActive()) { transaction.commit(); } } private void commitAll() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { EntityTransaction transaction; while (CollectionUtils.notEmpty(transactions)) { transaction = transactions.pop(); commit(transaction); } } @Override public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { try { if (CollectionUtils.valid(transactions)) { commitAll(); } } finally { closeEntityManagers(); } } @Override public int getStatus() throws SystemException { int active = INACTIVE; if (CollectionUtils.valid(transactions)) { for (EntityTransaction transaction : transactions) { boolean isActive = transaction.isActive(); active += isActive ? ACTIVE : INACTIVE; } } if (CollectionUtils.valid(requareNews)) { for (EntityTransaction transaction : requareNews) { boolean isActive = transaction.isActive(); active += isActive ? ACTIVE : INACTIVE; } } return active; } /** * Rollbacks passed {@link EntityTransaction} if it is active * * @param transaction */ private void rollback(EntityTransaction transaction) { if (transaction.isActive()) { transaction.rollback(); } } private void rollbackAll() throws IllegalStateException, SecurityException, SystemException { EntityTransaction transaction; while (CollectionUtils.notEmpty(transactions)) { transaction = transactions.pop(); rollback(transaction); } } @Override public void rollback() throws IllegalStateException, SecurityException, SystemException { try { if (CollectionUtils.valid(transactions)) { rollbackAll(); } } finally { closeEntityManagers(); } } private void setRollbackOnly(EntityTransaction transaction) throws IllegalStateException, SystemException { if (transaction.isActive()) { transaction.setRollbackOnly(); } } private void setRollbackOnlyAll() throws IllegalStateException, SystemException { for (EntityTransaction transaction : transactions) { setRollbackOnly(transaction); } } @Override public void setRollbackOnly() throws IllegalStateException, SystemException { if (CollectionUtils.valid(transactions)) { setRollbackOnlyAll(); } } @Override public void setTransactionTimeout(int time) throws SystemException { throw new UnsupportedOperationException(TIMEOUT_NOT_SUPPORTED_ERROR); } private Stack<EntityTransaction> getNews() { if (requareNews == null) { requareNews = new Stack<EntityTransaction>(); } return requareNews; } private Stack<EntityManager> getNewEms() { if (requareNewEms == null) { requareNewEms = new Stack<EntityManager>(); } return requareNewEms; } /** * Check if {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type * transactions stack is empty * * @return <code>boolean</code> */ private boolean checkNews() { boolean notEmpty = CollectionUtils.valid(requareNews); return notEmpty; } /** * Check if {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type * transactions referenced {@link EntityManager} stack is empty * * @return <code>boolean</code> */ private boolean checkNewEms() { boolean notEmpty = CollectionUtils.valid(requareNewEms); return notEmpty; } /** * Adds new {@link EntityTransaction} for * {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} annotated bean * methods * * @param entityTransaction */ public void pushReqNew(EntityTransaction entityTransaction) { getNews().push(entityTransaction); } /** * Adds {@link EntityManager} to collection to close after * {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type transactions * processing * * @param em */ public void pushReqNewEm(EntityManager em) { getNewEms().push(em); } public void commitReqNew() throws SecurityException, IllegalStateException, RollbackException, HeuristicMixedException, HeuristicRollbackException, SystemException { try { if (checkNews()) { EntityTransaction entityTransaction = getNews().pop(); commit(entityTransaction); } } finally { closeReqNew(); } } /** * Closes all cached immediate {@link EntityManager} instances */ private void closeReqNew() { if (checkNewEms()) { EntityManager em = getNewEms().pop(); JpaManager.closeEntityManager(em); } } /** * Adds {@link EntityTransaction} to transactions {@link List} for further * processing * * @param transaction */ public void addTransaction(EntityTransaction transaction) { transactions.add(transaction); } /** * Adds {@link EntityTransaction}s to transactions {@link List} for further * processing * * @param transactions */ public void addTransactions(EntityTransaction... transactions) { Collections.addAll(this.transactions, transactions); } /** * Adds {@link EntityManager} to collection to close after transactions * processing * * @param em */ public void addEntityManager(EntityManager em) { if (ObjectUtils.notNull(em)) { if (ems == null) { ems = new Stack<EntityManager>(); } ems.push(em); } } /** * Adds {@link EntityManager}'s to collection to close after transactions * processing * * @param em */ public void addEntityManagers(Collection<EntityManager> ems) { if (CollectionUtils.valid(ems)) { for (EntityManager em : ems) { addEntityManager(em); } } } /** * Closes all cached {@link EntityManager} instances */ private void closeAllEntityManagers() { EntityManager em; while (CollectionUtils.notEmpty(ems)) { em = ems.pop(); JpaManager.closeEntityManager(em); } } /** * Closes all contained {@link EntityManager}s */ public void closeEntityManagers() { if (CollectionUtils.valid(ems)) { closeAllEntityManagers(); } } public boolean checkCaller(BeanHandler handler) { boolean check = ObjectUtils.notNull(caller); if (check) { check = caller.equals(handler.getBean()); } return check; } public void setCaller(BeanHandler handler) { caller = handler.getBean(); } public Object getCaller() { return caller; } }
package org.estatio.capex.dom.invoice; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.function.Function; import javax.annotation.Nullable; import javax.inject.Inject; import javax.jdo.annotations.Column; import javax.jdo.annotations.IdentityType; import javax.jdo.annotations.Index; import javax.jdo.annotations.Indices; import javax.jdo.annotations.InheritanceStrategy; import javax.jdo.annotations.PersistenceCapable; import javax.jdo.annotations.Queries; import javax.jdo.annotations.Query; import javax.validation.constraints.Digits; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import org.joda.time.LocalDate; import org.apache.isis.applib.annotation.Action; import org.apache.isis.applib.annotation.ActionLayout; import org.apache.isis.applib.annotation.BookmarkPolicy; import org.apache.isis.applib.annotation.Contributed; import org.apache.isis.applib.annotation.DomainObject; import org.apache.isis.applib.annotation.DomainObjectLayout; import org.apache.isis.applib.annotation.Editing; import org.apache.isis.applib.annotation.MemberOrder; import org.apache.isis.applib.annotation.Mixin; import org.apache.isis.applib.annotation.Programmatic; import org.apache.isis.applib.annotation.SemanticsOf; import org.apache.isis.applib.annotation.Where; import org.apache.isis.applib.util.TitleBuffer; import org.apache.isis.schema.utils.jaxbadapters.PersistentEntityAdapter; import org.incode.module.document.dom.impl.docs.Document; import org.estatio.capex.dom.documents.LookupAttachedPdfService; import org.estatio.capex.dom.documents.categorisation.document.BudgetItemChooser; import org.estatio.capex.dom.documents.categorisation.invoice.SellerBankAccountCreator; import org.estatio.capex.dom.invoice.approval.IncomingInvoiceApprovalState; import org.estatio.capex.dom.invoice.approval.IncomingInvoiceApprovalStateTransition; import org.estatio.capex.dom.invoice.approval.triggers.IncomingInvoice_triggerAbstract; import org.estatio.capex.dom.project.Project; import org.estatio.capex.dom.state.State; import org.estatio.capex.dom.state.StateTransition; import org.estatio.capex.dom.state.StateTransitionType; import org.estatio.capex.dom.state.Stateful; import org.estatio.dom.asset.Property; import org.estatio.dom.budgeting.budgetitem.BudgetItem; import org.estatio.dom.charge.Charge; import org.estatio.dom.financial.bankaccount.BankAccount; import org.estatio.dom.invoice.Invoice; import org.estatio.dom.invoice.InvoiceItem; import org.estatio.dom.invoice.InvoiceStatus; import org.estatio.dom.invoice.PaymentMethod; import org.estatio.dom.party.Party; import org.estatio.dom.tax.Tax; import lombok.Getter; import lombok.Setter; @PersistenceCapable( identityType = IdentityType.DATASTORE // unused since rolled-up to superclass: //,schema = "dbo" //,table = "IncomingInvoice" ) @javax.jdo.annotations.Inheritance( strategy = InheritanceStrategy.SUPERCLASS_TABLE) @javax.jdo.annotations.Discriminator( "incomingInvoice.IncomingInvoice" ) @Queries({ @Query( name = "findByApprovalState", language = "JDOQL", value = "SELECT " + "FROM org.estatio.capex.dom.invoice.IncomingInvoice " + "WHERE approvalState == :approvalState "), @Query( name = "findByInvoiceNumber", language = "JDOQL", value = "SELECT " + "FROM org.estatio.capex.dom.invoice.IncomingInvoice " + "WHERE invoiceNumber == :invoiceNumber "), @Query( name = "findByInvoiceNumberAndSellerAndInvoiceDate", language = "JDOQL", value = "SELECT " + "FROM org.estatio.capex.dom.invoice.IncomingInvoice " + "WHERE invoiceNumber == :invoiceNumber " + " && seller == :seller " + " && invoiceDate == :invoiceDate "), @Query( name = "findByInvoiceDateBetween", language = "JDOQL", value = "SELECT " + "FROM org.estatio.capex.dom.invoice.IncomingInvoice " + "WHERE invoiceDate >= :fromDate " + " && invoiceDate <= :toDate "), @Query( name = "findByDueDateBetween", language = "JDOQL", value = "SELECT " + "FROM org.estatio.capex.dom.invoice.IncomingInvoice " + "WHERE dueDate >= :fromDate " + " && dueDate <= :toDate "), @Query( name = "findByDateReceivedBetween", language = "JDOQL", value = "SELECT " + "FROM org.estatio.capex.dom.invoice.IncomingInvoice " + "WHERE dateReceived >= :fromDate " + " && dateReceived <= :toDate "), @Query( name = "findNotInAnyPaymentBatchByApprovalStateAndPaymentMethod", language = "JDOQL", value = "SELECT " + "FROM org.estatio.capex.dom.invoice.IncomingInvoice " + "WHERE !(SELECT invoice " + " FROM org.estatio.capex.dom.payment.PaymentLine).contains(this) " + " && approvalState == :approvalState " + " && paymentMethod == :paymentMethod " + "ORDER BY invoiceDate ASC " ), @Query( name = "findByBankAccount", language = "JDOQL", value = "SELECT " + "FROM org.estatio.capex.dom.invoice.IncomingInvoice " + "WHERE bankAccount == :bankAccount ") }) @Indices({ @Index(name = "IncomingInvoice_approvalState_IDX", members = { "approvalState" }) }) // unused, since rolled-up //@Unique(name = "IncomingInvoice_invoiceNumber_UNQ", members = { "invoiceNumber" }) @DomainObject( editing = Editing.DISABLED, objectType = "incomingInvoice.IncomingInvoice", persistingLifecycleEvent = IncomingInvoice.ObjectPersistingEvent.class, persistedLifecycleEvent = IncomingInvoice.ObjectPersistedEvent.class ) @DomainObjectLayout( bookmarking = BookmarkPolicy.AS_ROOT ) @XmlJavaTypeAdapter(PersistentEntityAdapter.class) public class IncomingInvoice extends Invoice<IncomingInvoice> implements SellerBankAccountCreator, Stateful { public static class ObjectPersistedEvent extends org.apache.isis.applib.services.eventbus.ObjectPersistedEvent <IncomingInvoice> { } public static class ObjectPersistingEvent extends org.apache.isis.applib.services.eventbus.ObjectPersistingEvent <IncomingInvoice> { } public IncomingInvoice() { super("seller,invoiceNumber"); } public IncomingInvoice( final IncomingInvoiceType typeIfAny, final String invoiceNumber, final Property property, final String atPath, final Party buyer, final Party seller, final LocalDate invoiceDate, final LocalDate dueDate, final PaymentMethod paymentMethod, final InvoiceStatus invoiceStatus, final LocalDate dateReceived, final BankAccount bankAccount, final IncomingInvoiceApprovalState approvalStateIfAny){ super("invoiceNumber"); setType(typeIfAny); setInvoiceNumber(invoiceNumber); setProperty(property); setApplicationTenancyPath(atPath); setBuyer(buyer); setSeller(seller); setInvoiceDate(invoiceDate); setDueDate(dueDate); setPaymentMethod(paymentMethod); setStatus(invoiceStatus); setDateReceived(dateReceived); setBankAccount(bankAccount); setApprovalState(approvalStateIfAny); } public String title() { final TitleBuffer buf = new TitleBuffer(); final Optional<Document> document = lookupAttachedPdfService.lookupIncomingInvoicePdfFrom(this); document.ifPresent(d -> buf.append(d.getName())); final Party seller = getSeller(); if(seller != null) { buf.append(": ", seller); } final String invoiceNumber = getInvoiceNumber(); if(invoiceNumber != null) { buf.append(", ", invoiceNumber); } return buf.toString(); } @Mixin(method="act") public static class addItem { private final IncomingInvoice incomingInvoice; public addItem(final IncomingInvoice incomingInvoice) { this.incomingInvoice = incomingInvoice; } @MemberOrder(name="items", sequence = "1") public IncomingInvoice act( final IncomingInvoiceType type, final Charge charge, @Nullable final String description, final BigDecimal netAmount, @Nullable final BigDecimal vatAmount, @Nullable final BigDecimal grossAmount, @Nullable final Tax tax, @Nullable final LocalDate dueDate, @Nullable final LocalDate startDate, @Nullable final LocalDate endDate, @Nullable final Property property, @Nullable final Project project, @Nullable final BudgetItem budgetItem) { final BigInteger sequence = incomingInvoice.nextItemSequence(); incomingInvoiceItemRepository.upsert( sequence, incomingInvoice, type, charge, description, netAmount, vatAmount, grossAmount, tax, dueDate, startDate, endDate, property, project, budgetItem); return incomingInvoice; } public String disableAct() { return incomingInvoice.reasonDisabledDueToState(); } public IncomingInvoiceType default0Act() { return incomingInvoice.getType(); } public LocalDate default7Act() { return ofFirstItem(IncomingInvoiceItem::getDueDate); } public LocalDate default8Act() { return ofFirstItem(IncomingInvoiceItem::getStartDate); } public LocalDate default9Act() { return ofFirstItem(IncomingInvoiceItem::getEndDate); } public Property default10Act() { return incomingInvoice.getProperty(); } public Project default11Act() { return ofFirstItem(IncomingInvoiceItem::getProject); } public List<BudgetItem> choices12Act( final IncomingInvoiceType type, final Charge charge, final String description, final BigDecimal netAmount, final BigDecimal vatAmount, final BigDecimal grossAmount, final Tax tax, final LocalDate dueDate, final LocalDate startDate, final LocalDate endDate, final Property property, final Project project) { return budgetItemChooser.choicesBudgetItemFor(property, charge); } private <T> T ofFirstItem(final Function<IncomingInvoiceItem, T> f) { final Optional<IncomingInvoiceItem> firstItemIfAny = firstItemIfAny(); return firstItemIfAny.map(f).orElse(null); } private Optional<IncomingInvoiceItem> firstItemIfAny() { return incomingInvoice.getItems().stream() .filter(IncomingInvoiceItem.class::isInstance) .map(IncomingInvoiceItem.class::cast) .findFirst(); } @Inject BudgetItemChooser budgetItemChooser; @Inject IncomingInvoiceItemRepository incomingInvoiceItemRepository; } @Mixin(method="act") public static class changeBankAccount extends IncomingInvoice_triggerAbstract { private final IncomingInvoice incomingInvoice; public changeBankAccount(final IncomingInvoice incomingInvoice) { super(incomingInvoice, Arrays.asList(IncomingInvoiceApprovalState.NEW), null); this.incomingInvoice = incomingInvoice; } @Action(semantics = SemanticsOf.IDEMPOTENT) @ActionLayout(contributed= Contributed.AS_ACTION) public IncomingInvoice act( final BankAccount bankAccount, @Nullable final String comment){ incomingInvoice.setBankAccount(bankAccount); trigger(comment); return incomingInvoice; } public boolean hideAct() { return cannotTransition(); } } /** * Default type, used for routing. * * <p> * This can be overridden for each invoice item. * </p> */ @Getter @Setter @Column(allowsNull = "false") private IncomingInvoiceType type; /** * This relates to the owning property, while the child items may either also relate to the property, * or could potentially relate to individual units within the property. * * <p> * Note that InvoiceForLease also has a reference to FixedAsset. It's not possible to move this * up to the Invoice superclass because invoicing module does not "know" about fixed assets. * </p> */ @javax.jdo.annotations.Column(name = "propertyId", allowsNull = "true") @org.apache.isis.applib.annotation.Property(hidden = Where.REFERENCES_PARENT) @Getter @Setter private Property property; @Getter @Setter @Column(allowsNull = "true", name = "bankAccountId") private BankAccount bankAccount; @Getter @Setter @Column(allowsNull = "true") private LocalDate dateReceived; @Getter @Setter @Column(allowsNull = "true", name="invoiceId") private IncomingInvoice relatesTo; // TODO: need to remove this from superclass, ie push down to InvoiceForLease subclass so not in this subtype @org.apache.isis.applib.annotation.Property(hidden = Where.EVERYWHERE) @Override public InvoiceStatus getStatus() { return super.getStatus(); } @org.apache.isis.applib.annotation.Property(hidden = Where.ALL_TABLES) @javax.jdo.annotations.Column(scale = 2, allowsNull = "true") @Getter @Setter private BigDecimal netAmount; @org.apache.isis.applib.annotation.Property(hidden = Where.ALL_TABLES) @Digits(integer = 9, fraction = 2) public BigDecimal getVatAmount() { return getGrossAmount() != null && getNetAmount() != null ? getGrossAmount().subtract(getNetAmount()) : null; } @javax.jdo.annotations.Column(scale = 2, allowsNull = "true") @Getter @Setter private BigDecimal grossAmount; @Action(semantics = SemanticsOf.IDEMPOTENT) public IncomingInvoice editSeller( @Nullable final Party seller){ setSeller(seller); return this; } public String disableEditSeller(){ if (isImmutable()){ return "The invoice cannot be changed"; } return sellerIsImmutable() ? "Seller is immutable because an item is linked to an order" : null; } private boolean sellerIsImmutable(){ for (InvoiceItem item : getItems()){ IncomingInvoiceItem ii = (IncomingInvoiceItem) item; if (ii.isLinkedToOrderItem()){ return true; } } return false; } @Getter @Setter @javax.jdo.annotations.Column(allowsNull = "false") private IncomingInvoiceApprovalState approvalState; @Override public < DO, ST extends StateTransition<DO, ST, STT, S>, STT extends StateTransitionType<DO, ST, STT, S>, S extends State<S> > S getStateOf( final Class<ST> stateTransitionClass) { if(stateTransitionClass == IncomingInvoiceApprovalStateTransition.class) { return (S) approvalState; } return null; } @Override public < DO, ST extends StateTransition<DO, ST, STT, S>, STT extends StateTransitionType<DO, ST, STT, S>, S extends State<S> > void setStateOf( final Class<ST> stateTransitionClass, final S newState) { if(stateTransitionClass == IncomingInvoiceApprovalStateTransition.class) { setApprovalState( (IncomingInvoiceApprovalState) newState ); } } @Programmatic public String reasonDisabledDueToState() { final IncomingInvoiceApprovalState approvalState1 = getApprovalState(); switch (approvalState1) { case NEW: case COMPLETED: return null; default: return "Cannot modify because invoice is in state of " + getApprovalState(); } } @Programmatic public String reasonIncomplete(){ if (getBankAccount() == null) { return "Bank account is required"; } return null; } @Inject LookupAttachedPdfService lookupAttachedPdfService; }
package org.lightmare.rest.providers; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import javax.ws.rs.ext.Provider; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.server.spi.Container; import org.glassfish.jersey.server.spi.ContainerLifecycleListener; import org.lightmare.rest.RestConfig; import org.lightmare.utils.ObjectUtils; /** * Reloads {@link RestConfig} (implementation of {@link ResourceConfig}) at * runtime * * @author levan * */ @Provider public class RestReloader implements ContainerLifecycleListener { private static RestReloader reloader; private static final Lock LOCK = new ReentrantLock(); public RestReloader() { ObjectUtils.lock(LOCK); try { reloader = this; } finally { LOCK.unlock(); } } public static RestReloader get() { synchronized (RestReloader.class) { return reloader; } } private Container container; public void reload() { container.reload(); } public void reload(ResourceConfig config) { container.reload(config); } @Override public void onStartup(Container container) { this.container = container; } @Override public void onReload(Container container) { } @Override public void onShutdown(Container container) { } }
package org.monarchinitiative.exomiser.core.model; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import de.charite.compbio.jannovar.annotation.VariantEffect; import de.charite.compbio.jannovar.mendel.ModeOfInheritance; import htsjdk.variant.variantcontext.*; import org.monarchinitiative.exomiser.core.filters.FilterResult; import org.monarchinitiative.exomiser.core.filters.FilterType; import org.monarchinitiative.exomiser.core.genome.GenomeAssembly; import org.monarchinitiative.exomiser.core.model.frequency.FrequencyData; import org.monarchinitiative.exomiser.core.model.pathogenicity.PathogenicityData; import org.monarchinitiative.exomiser.core.model.pathogenicity.VariantEffectPathogenicityScore; import java.util.*; /** * This class is a wrapper for the {@code Variant} class from the jannovar * hierarchy, and additionally includes all of the information on pathogenicity * and frequency that is added to each variant by the Exomizer program. * * @author Jules Jacobsen <jules.jacobsen@sanger.ac.uk> * @author Peter Robinson <peter.robinson@charite.de> */ @JsonPropertyOrder({"genomeAssembly", "chromosomeName", "chromosome", "position", "ref", "alt", "phredScore", "variantEffect", "nonCodingVariant", "filterStatus", "variantScore", "frequencyScore", "pathogenicityScore", "predictedPathogenic", "passedFilterTypes", "failedFilterTypes", "frequencyData", "pathogenicityData", "compatibleInheritanceModes", "contributingInheritanceModes", "transcriptAnnotations"}) public class VariantEvaluation implements Comparable<VariantEvaluation>, Filterable, Inheritable, Variant { //threshold over which a variant effect score is considered pathogenic private static final float DEFAULT_PATHOGENICITY_THRESHOLD = 0.5f; // HTSJDK {@link VariantContext} instance of this allele @JsonIgnore private final VariantContext variantContext; // numeric index of the alternative allele in {@link private final int altAlleleId; //VariantCoordinates variables - these are a minimal requirement for describing a variant private final GenomeAssembly genomeAssembly; private final int chr; private final String chromosomeName; private final int pos; private final String ref; private final String alt; //Variant variables, for a richer more VCF-like experience private final double phredScore; //TODO for the time being this is ignored @JsonIgnore private Map<String,SampleGenotype> sampleGenotypes; //VariantAnnotation private VariantEffect variantEffect; private List<TranscriptAnnotation> annotations; @JsonIgnore private String geneSymbol; @JsonIgnore private String geneId; //results from filters private final Set<FilterType> passedFilterTypes; private final Set<FilterType> failedFilterTypes; //score-related stuff - these are mutable private FrequencyData frequencyData; private PathogenicityData pathogenicityData; @JsonProperty("contributingInheritanceModes") private Set<ModeOfInheritance> contributingModes = EnumSet.noneOf(ModeOfInheritance.class); private Set<ModeOfInheritance> compatibleInheritanceModes = EnumSet.noneOf(ModeOfInheritance.class); private VariantEvaluation(Builder builder) { genomeAssembly = builder.genomeAssembly; chr = builder.chr; chromosomeName = builder.chromosomeName; pos = builder.pos; ref = builder.ref; alt = builder.alt; phredScore = builder.phredScore; variantEffect = builder.variantEffect; annotations = builder.annotations; geneSymbol = builder.geneSymbol; geneId = builder.geneId; variantContext = builder.variantContext; altAlleleId = builder.altAlleleId; sampleGenotypes = builder.sampleGenotypes; passedFilterTypes = EnumSet.copyOf(builder.passedFilterTypes); failedFilterTypes = EnumSet.copyOf(builder.failedFilterTypes); frequencyData = builder.frequencyData; pathogenicityData = builder.pathogenicityData; } @Override public GenomeAssembly getGenomeAssembly() { return genomeAssembly; } /** * @return an integer representing the chromosome. 1-22 are obvious, * chrX=23, ChrY=24, ChrM=25. */ @Override public int getChromosome() { return chr; } /** * @return a String such "4" or "X" in the case of chromosome 23 */ @Override public String getChromosomeName() { return chromosomeName; } /** * @return Return the 1-based start position of the variant on its * chromosome. */ @Override public int getPosition() { return pos; } /** * @return reference allele, or "-" in case of insertions. */ @Override public String getRef() { return ref; } /** * @return alternative allele, or "-" in case of deletions. */ @Override public String getAlt() { return alt; } public VariantContext getVariantContext() { return variantContext; } public int getAltAlleleId() { return altAlleleId; } public double getPhredScore() { return phredScore; } /** * @return the most prevalent {@link VariantEffect} such as {@link VariantEffect#MISSENSE_VARIANT}, * {@link VariantEffect#FRAMESHIFT_ELONGATION}, etc., or <code>null</code> * if there is no annotated effect. */ @Override public VariantEffect getVariantEffect() { return variantEffect; } public void setVariantEffect (VariantEffect ve){ variantEffect = ve; } /** * @return the gene symbol associated with the variant. */ @Override public String getGeneSymbol() { return geneSymbol; } public void setGeneSymbol(String symbol) { geneSymbol = symbol; } @Override public String getGeneId() { return geneId; } public void setGeneId(String geneId) { this.geneId = geneId; } /** * This function returns a list of all of the * {@link de.charite.compbio.jannovar.annotation.Annotation Annotation} objects that have been * associated with the current variant. This function can be called if * client code wants to display one line for each affected transcript, e.g., * <ul> * <li>LTF(uc003cpr.3:exon5:c.30_31insAAG:p.R10delinsRR) * <li>LTF(uc003cpq.3:exon2:c.69_70insAAG:p.R23delinsRR) * <li>LTF(uc010hjh.3:exon2:c.69_70insAAG:p.R23delinsRR) * </ul> * <p> */ @Override public List<TranscriptAnnotation> getTranscriptAnnotations() { return annotations; } public void setAnnotations(List<TranscriptAnnotation> annotations) { this.annotations = annotations; } @Override public boolean hasTranscriptAnnotations() { return !annotations.isEmpty(); } /** * @return a String such as chr6:g.29911092G>T */ // SPDI? @JsonIgnore public String getHgvsGenome() { return chr + ":g." + pos + ref + ">" + alt; } @JsonIgnore public String getGenotypeString() { //TODO: build this from the sampleGenotypes // collect genotype string list List<String> gtStrings = new ArrayList<>(); for (Genotype gt : variantContext.getGenotypes()) { StringJoiner genotypeStringJoiner = new StringJoiner("/"); for (Allele allele : gt.getAlleles()) { if (allele.isNoCall()) { genotypeStringJoiner.add("."); } else if (allele.equals(variantContext.getAlternateAllele(altAlleleId))) { genotypeStringJoiner.add("1"); } else { genotypeStringJoiner.add("0"); } } gtStrings.add(genotypeStringJoiner.toString()); } // normalize 1/0 to 0/1 and join genotype strings with colon for (int i = 0; i < gtStrings.size(); ++i) { if (gtStrings.get(i).equals("1/0")) { gtStrings.set(i, "0/1"); } } return Joiner.on(":").join(gtStrings); } /** * @return A map of sample ids and their corresponding {@link SampleGenotype} * @since 11.0.0 */ public Map<String, SampleGenotype> getSampleGenotypes() { return sampleGenotypes; } /** * Returns the {@link SampleGenotype} for a given sample identifier. If the identifier is not found an empty * {@link SampleGenotype} will be returned. * * @param sampleId sample id of the individual of interest * @return the {@link SampleGenotype} of the individual for this variant, or an empty {@link SampleGenotype} if the * sample is not represented * @since 11.0.0 */ public SampleGenotype getSampleGenotype(String sampleId) { return sampleGenotypes.getOrDefault(sampleId, SampleGenotype.empty()); } /** * This method is used to add a {@code FilterResult} object to this variant. * Such objects represent the results of running the variant through a {@code Filter}. * * @param filterResult * @return */ @Override public boolean addFilterResult(FilterResult filterResult) { if (filterResult.passed()) { return addPassedFilterResult(filterResult); } return addFailedFilterResult(filterResult); } private synchronized boolean addPassedFilterResult(FilterResult filterResult) { passedFilterTypes.add(filterResult.getFilterType()); return true; } private synchronized boolean addFailedFilterResult(FilterResult filterResult) { failedFilterTypes.add(filterResult.getFilterType()); return false; } /** * @return the set of FilterResult objects that represent the result of * filtering */ public Set<FilterType> getPassedFilterTypes() { return EnumSet.copyOf(passedFilterTypes); } /** * @return the Set of {@code FilterType} which the {@code VariantEvaluation} * failed to pass. */ public Set<FilterType> getFailedFilterTypes() { return EnumSet.copyOf(failedFilterTypes); } /** * Under some inheritance modes a variant should not pass, but others it will. For example if a variant is relatively * common it could pass as being compatible under a compound heterozygous model, but might be too common to be * considered as a candidate under an autosomal dominant model. Hence we need to be able to check whether a variant * passed under a specific mode of inheritance otherwise alleles will be reported as having passed under the wrong mode. * * @param modeOfInheritance the mode of inheritance under which the failed filters are required. * @return a set of failed {@code FilterType} for the variant under the {@code ModeOfInheritance} input model. */ public synchronized Set<FilterType> getFailedFilterTypesForMode(ModeOfInheritance modeOfInheritance){ EnumSet<FilterType> failedFiltersCopy = EnumSet.copyOf(failedFilterTypes); if (!isCompatibleWith(modeOfInheritance)) { failedFiltersCopy.add(FilterType.INHERITANCE_FILTER); return failedFiltersCopy; } return failedFiltersCopy; } /** * We're making the assumption that all variants will pass a filter, so if * no filters have been applied, this method will return true. Once a * {@link VariantEvaluation} has been filtered this will return true until * the {@link VariantEvaluation} has failed a filter. * <p> * Note: This may change so that passed/failed/unfiltered can only ever be * true for one status. * * @return */ @Override public synchronized boolean passedFilters() { return failedFilterTypes.isEmpty(); } @Override public synchronized boolean passedFilter(FilterType filterType) { return !failedFilterTypes.contains(filterType) && passedFilterTypes.contains(filterType); } private synchronized boolean isUnFiltered() { return failedFilterTypes.isEmpty() && passedFilterTypes.isEmpty(); } public FilterStatus getFilterStatus() { if (isUnFiltered()) { return FilterStatus.UNFILTERED; } if (passedFilters()) { return FilterStatus.PASSED; } return FilterStatus.FAILED; } public FilterStatus getFilterStatusForMode(ModeOfInheritance modeOfInheritance) { if (isUnFiltered()) { return FilterStatus.UNFILTERED; } if (isCompatibleWith(modeOfInheritance) && passedFilters()) { return FilterStatus.PASSED; } return FilterStatus.FAILED; } /** * Returns the variant score (prediction of the pathogenicity * and relevance of the Variant) by combining the frequency and pathogenicity scores for this variant. * * @return a score between 0 and 1 */ public float getVariantScore() { return getFrequencyScore() * getPathogenicityScore(); } /** * @return a score between 0 and 1 */ public float getFrequencyScore() { return frequencyData.getScore(); } /** * Some variants such as splice site variants, are assumed to be pathogenic. At the moment no particular * software is used to evaluate this, we merely take the variant class from the Jannovar code and assign a score. * * Note that we use results of filtering to remove Variants that are predicted to be simply non-pathogenic. However, * amongst variants predicted to be potentially pathogenic, there are different strengths of prediction, which is * what this score tries to reflect. * * For missense mutations, we use the predictions of MutationTaster, polyphen, and SIFT taken from the dbNSFP * project, if present, or otherwise return a default score. * * The score returned here is therefore an overall pathogenicity score defined on the basis of * "medical genetic intuition". * @return a score between 0 and 1 */ public float getPathogenicityScore() { float predictedScore = pathogenicityData.getScore(); float variantEffectScore = VariantEffectPathogenicityScore.getPathogenicityScoreOf(variantEffect); if (variantEffect == VariantEffect.MISSENSE_VARIANT) { // CAUTION! REVEL scores tend to be more nuanced and frequently lower thant either the default variant effect score // or the other predicted path scores, yet apparently are more concordant with ClinVar. For this reason it might be // best to check for a REVEL prediction and defer wholly to that if present rather than do the following. // In version 10.1.0 the MISSENSE variant constraint was removed from the defaultPathogenicityDao and variantDataServiceImpl // so that non-missense variants would get ClinVar annotations and other non-synonymous path scores from the variant store. // In order that missense variants are not over-represented if they have poor predicted scores this clause was added here. return pathogenicityData.hasPredictedScore() ? predictedScore : variantEffectScore; } else { return Math.max(predictedScore, variantEffectScore); } } public FrequencyData getFrequencyData() { return frequencyData; } public void setFrequencyData(FrequencyData frequencyData) { this.frequencyData = frequencyData; } public PathogenicityData getPathogenicityData() { return pathogenicityData; } public void setPathogenicityData(PathogenicityData pathogenicityData) { this.pathogenicityData = pathogenicityData; } public void setContributesToGeneScoreUnderMode(ModeOfInheritance modeOfInheritance) { contributingModes.add(modeOfInheritance); } public boolean contributesToGeneScore() { return !contributingModes.isEmpty(); } public boolean contributesToGeneScoreUnderMode(ModeOfInheritance modeOfInheritance) { return modeOfInheritance == ModeOfInheritance.ANY && !contributingModes.isEmpty() || contributingModes.contains(modeOfInheritance); } /** * @return true or false depending on whether the variant effect is considered pathogenic. Pathogenoic variants are * considered to be those with a pathogenicity score greater than 0.5. Missense variants will always return true. */ public boolean isPredictedPathogenic() { if (variantEffect == VariantEffect.MISSENSE_VARIANT) { //we're making the assumption that a missense variant is always potentially pathogenic. //Given the prediction scores are predictions, they could fall below the default threshold so //we'll leave it up to the user to decide return true; } else { return getPathogenicityScore() >= DEFAULT_PATHOGENICITY_THRESHOLD; } } @Override public void setCompatibleInheritanceModes(Set<ModeOfInheritance> compatibleModes) { if (compatibleModes.isEmpty()) { compatibleInheritanceModes = EnumSet.noneOf(ModeOfInheritance.class); } else { this.compatibleInheritanceModes = EnumSet.copyOf(compatibleModes); } } @Override public Set<ModeOfInheritance> getCompatibleInheritanceModes() { return EnumSet.copyOf(compatibleInheritanceModes); } @Override public boolean isCompatibleWith(ModeOfInheritance modeOfInheritance) { return modeOfInheritance == ModeOfInheritance.ANY || compatibleInheritanceModes.contains(modeOfInheritance); } /** * Sorts variants according to their natural ordering of genome position. Variants are sorted according to * chromosome number, chromosome position, reference sequence then alternative sequence. * * @param other * @return comparator score consistent with equals. */ @Override public int compareTo(VariantEvaluation other) { if (this.chr != other.chr) { return Integer.compare(this.chr, other.chr); } if (this.pos != other.pos) { return Integer.compare(this.pos, other.pos); } if (!this.ref.equals(other.ref)) { return this.ref.compareTo(other.ref); } return this.alt.compareTo(other.alt); } public static class RankBasedComparator implements Comparator<VariantEvaluation> { @Override public int compare(VariantEvaluation v1, VariantEvaluation v2) { return compareByRank(v1, v2); } } public static int compareByRank(VariantEvaluation some, VariantEvaluation other) { if (some.contributesToGeneScore() != other.contributesToGeneScore()) { return -Boolean.compare(some.contributesToGeneScore(), other.contributesToGeneScore()); } float thisScore = some.getVariantScore(); float otherScore = other.getVariantScore(); if (thisScore != otherScore) { return -Float.compare(thisScore, otherScore); } if (some.chr != other.chr) { return Integer.compare(some.chr, other.chr); } if (some.pos != other.pos) { return Integer.compare(some.pos, other.pos); } if (!some.ref.equals(other.ref)) { return some.ref.compareTo(other.ref); } return some.alt.compareTo(other.alt); } @Override public int hashCode() { return Objects.hash(genomeAssembly, chr, pos, ref, alt); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final VariantEvaluation other = (VariantEvaluation) obj; if (this.genomeAssembly != other.genomeAssembly) { return false; } if (this.chr != other.chr) { return false; } if (this.pos != other.pos) { return false; } return Objects.equals(this.ref, other.ref) && Objects.equals(this.alt, other.alt); } public String toString() { // expose frequency and pathogenicity scores? if(contributesToGeneScore()) { //Add a star to the output string between the variantEffect and the score return "VariantEvaluation{assembly=" + genomeAssembly + " chr=" + chr + " pos=" + pos + " ref=" + ref + " alt=" + alt + " qual=" + phredScore + " " + variantEffect + " * score=" + getVariantScore() + " " + getFilterStatus() + " failedFilters=" + failedFilterTypes + " passedFilters=" + passedFilterTypes + " compatibleWith=" + compatibleInheritanceModes + " sampleGenotypes=" + sampleGenotypes + "}"; } return "VariantEvaluation{assembly=" + genomeAssembly + " chr=" + chr + " pos=" + pos + " ref=" + ref + " alt=" + alt + " qual=" + phredScore + " " + variantEffect + " score=" + getVariantScore() + " " + getFilterStatus() + " failedFilters=" + failedFilterTypes + " passedFilters=" + passedFilterTypes + " compatibleWith=" + compatibleInheritanceModes + " sampleGenotypes=" + sampleGenotypes + "}"; } public static Builder builder(int chr, int pos, String ref, String alt) { return new Builder(chr, pos, ref, alt); } /** * Builder class for producing a valid VariantEvaluation. */ public static class Builder { private GenomeAssembly genomeAssembly = GenomeAssembly.HG19; private int chr; private String chromosomeName; private int pos; private String ref; private String alt; private double phredScore = 0; private VariantEffect variantEffect = VariantEffect.SEQUENCE_VARIANT; private List<TranscriptAnnotation> annotations = Collections.emptyList(); private String geneSymbol = "."; private String geneId = GeneIdentifier.EMPTY_FIELD; private VariantContext variantContext; private int altAlleleId; private Map<String,SampleGenotype> sampleGenotypes = Collections.emptyMap(); private PathogenicityData pathogenicityData = PathogenicityData.empty(); private FrequencyData frequencyData = FrequencyData.empty(); private final Set<FilterType> passedFilterTypes = EnumSet.noneOf(FilterType.class); private final Set<FilterType> failedFilterTypes = EnumSet.noneOf(FilterType.class); /** * Creates a minimal variant * * @param chr * @param pos * @param ref * @param alt */ private Builder(int chr, int pos, String ref, String alt) { this.chr = chr; this.pos = pos; this.ref = ref; this.alt = alt; } public Builder genomeAssembly(GenomeAssembly genomeAssembly) { this.genomeAssembly = genomeAssembly; return this; } public Builder genomeAssembly(String genomeAssembly) { this.genomeAssembly = GenomeAssembly.fromValue(genomeAssembly); return this; } public Builder chromosomeName(String chromosomeName) { this.chromosomeName = chromosomeName; return this; } /** * Safety method to handle creating the chromosome name in cases where * the name is not explicitly set. This should happen in the * VariantFactory, but for testing we're happy with a sensible default * value. It's not critical, but is nice to prevent a lot of silly * duplicate code. * * @param chr * @return */ private String buildChromosomeName(int chr) { switch (chr) { case 23: return "X"; case 24: return "Y"; case 25: return "MT"; default: return String.valueOf(chr); } } public Builder variantContext(VariantContext variantContext) { this.variantContext = variantContext; return this; } public Builder altAlleleId(int altAlleleId) { this.altAlleleId = altAlleleId; return this; } public Builder quality(double phredScore) { this.phredScore = phredScore; return this; } public Builder sampleGenotypes(Map<String, SampleGenotype> sampleGenotypes) { Objects.requireNonNull(sampleGenotypes); this.sampleGenotypes = ImmutableMap.copyOf(sampleGenotypes); return this; } public Builder variantEffect(VariantEffect variantEffect) { this.variantEffect = variantEffect; return this; } public Builder annotations(List<TranscriptAnnotation> annotations) { this.annotations = annotations; return this; } public Builder geneSymbol(String geneSymbol) { this.geneSymbol = inputOrfirstValueInCommaSeparatedString(geneSymbol); return this; } private String inputOrfirstValueInCommaSeparatedString(String geneSymbol) { int commaIndex = geneSymbol.indexOf(','); return (commaIndex > -1) ? geneSymbol.substring(0, commaIndex) : geneSymbol; } public Builder geneId(String geneId) { this.geneId = geneId; return this; } public Builder pathogenicityData(PathogenicityData pathogenicityData) { this.pathogenicityData = pathogenicityData; return this; } public Builder frequencyData(FrequencyData frequencyData) { this.frequencyData = frequencyData; return this; } public Builder filterResults(FilterResult... filterResults) { return filterResults(Arrays.asList(filterResults)); } public Builder filterResults(Collection<FilterResult> filterResults) { for (FilterResult filterResult : filterResults) { if (filterResult.passed()) { this.passedFilterTypes.add(filterResult.getFilterType()); } else { this.failedFilterTypes.add(filterResult.getFilterType()); } } return this; } public VariantEvaluation build() { if (chromosomeName == null) { chromosomeName = buildChromosomeName(chr); } if (variantContext == null) { // We don't check that the variant context agrees with the coordinates here as the variant context could // have been split into different allelic variants so the positions and alleles could differ. variantContext = buildVariantContext(chr, pos, ref, alt, phredScore); } return new VariantEvaluation(this); } /** * @return a generic one-based position variant context with a heterozygous genotype having no attributes. */ private VariantContext buildVariantContext(int chr, int pos, String ref, String alt, double qual) { Allele refAllele = Allele.create(ref, true); Allele altAllele = Allele.create(alt); List<Allele> alleles = Arrays.asList(refAllele, altAllele); VariantContextBuilder vcBuilder = new VariantContextBuilder(); // build Genotype GenotypeBuilder gtBuilder = new GenotypeBuilder("sample").noAttributes(); //default to HETEROZYGOUS gtBuilder.alleles(alleles); // build VariantContext vcBuilder.loc("chr" + chr, pos, pos - 1L + ref.length()); vcBuilder.alleles(alleles); vcBuilder.genotypes(gtBuilder.make()); vcBuilder.log10PError(-0.1 * qual); return vcBuilder.make(); } } }
package org.opennars.inference; import org.opennars.control.DerivationContext; import org.opennars.entity.*; import org.opennars.io.Symbols; import org.opennars.language.*; import org.opennars.main.Parameters; import java.util.HashMap; import java.util.Map; import static org.opennars.inference.TruthFunctions.*; import static org.opennars.language.Terms.reduceComponents; /** * Compound term composition and decomposition rules, with two premises. * <p> * New compound terms are introduced only in forward inference, while * decompositional rules are also used in backward inference */ public final class CompositionalRules { /** * {<S ==> M>, <P ==> M>} |- {<(S|P) ==> M>, <(S&P) ==> M>, <(S-P) ==> * M>, * <(P-S) ==> M>} * * @param taskSentence The first premise * @param belief The second premise * @param index The location of the shared term * @param nal Reference to the memory */ static void composeCompound(final Statement taskContent, final Statement beliefContent, final int index, final DerivationContext nal) { if ((!nal.getCurrentTask().sentence.isJudgment()) || (taskContent.getClass() != beliefContent.getClass())) { return; } final Term componentT = taskContent.term[1 - index]; final Term componentB = beliefContent.term[1 - index]; final Term componentCommon = taskContent.term[index]; final int order1 = taskContent.getTemporalOrder(); final int order2 = beliefContent.getTemporalOrder(); final int order = TemporalRules.composeOrder(order1, order2); if (order == TemporalRules.ORDER_INVALID) { return; } if ((componentT instanceof CompoundTerm) && ((CompoundTerm) componentT).containsAllTermsOf(componentB)) { decomposeCompound((CompoundTerm) componentT, componentB, componentCommon, index, true, order, nal); return; } else if ((componentB instanceof CompoundTerm) && ((CompoundTerm) componentB).containsAllTermsOf(componentT)) { decomposeCompound((CompoundTerm) componentB, componentT, componentCommon, index, false, order, nal); return; } final TruthValue truthT = nal.getCurrentTask().sentence.truth; final TruthValue truthB = nal.getCurrentBelief().truth; final TruthValue truthOr = union(truthT, truthB); final TruthValue truthAnd = intersection(truthT, truthB); TruthValue truthDif = null; Term termOr = null; Term termAnd = null; Term termDif = null; if (index == 0) { if (taskContent instanceof Inheritance) { termOr = IntersectionInt.make(componentT, componentB); termAnd = IntersectionExt.make(componentT, componentB); if (truthB.isNegative()) { if (!truthT.isNegative()) { termDif = DifferenceExt.make(componentT, componentB); truthDif = intersection(truthT, negation(truthB)); } } else if (truthT.isNegative()) { termDif = DifferenceExt.make(componentB, componentT); truthDif = intersection(truthB, negation(truthT)); } } else if (taskContent instanceof Implication) { termOr = Disjunction.make(componentT, componentB); termAnd = Conjunction.make(componentT, componentB); } processComposed(taskContent, componentCommon, termOr, order, truthOr, nal); processComposed(taskContent, componentCommon, termAnd, order, truthAnd, nal); processComposed(taskContent, componentCommon, termDif, order, truthDif, nal); } else { // index == 1 if (taskContent instanceof Inheritance) { termOr = IntersectionExt.make(componentT, componentB); termAnd = IntersectionInt.make(componentT, componentB); if (truthB.isNegative()) { if (!truthT.isNegative()) { termDif = DifferenceInt.make(componentT, componentB); truthDif = intersection(truthT, negation(truthB)); } } else if (truthT.isNegative()) { termDif = DifferenceInt.make(componentB, componentT); truthDif = intersection(truthB, negation(truthT)); } } else if (taskContent instanceof Implication) { termOr = Conjunction.make(componentT, componentB); termAnd = Disjunction.make(componentT, componentB); } processComposed(taskContent, termOr, componentCommon, order, truthOr, nal); processComposed(taskContent, termAnd, componentCommon, order, truthAnd, nal); processComposed(taskContent, termDif, componentCommon, order, truthDif, nal); } } /** * Finish composing implication term * * @param premise1 Type of the contentInd * @param subject Subject of contentInd * @param predicate Predicate of contentInd * @param truth TruthValue of the contentInd * @param memory Reference to the memory */ private static void processComposed(final Statement statement, final Term subject, final Term predicate, final int order, final TruthValue truth, final DerivationContext nal) { if ((subject == null) || (predicate == null)) { return; } final Term content = Statement.make(statement, subject, predicate, order); if ((content == null) || statement == null || content.equals(statement) || content.equals(nal.getCurrentBelief().term)) { return; } final BudgetValue budget = BudgetFunctions.compoundForward(truth, content, nal); nal.doublePremiseTask(content, truth, budget, false, false); //(allow overlap) but not needed here, isn't detachment, this one would be even problematic from control perspective because its composition } /** * {<(S|P) ==> M>, <P ==> M>} |- <S ==> M> * * @param implication The implication term to be decomposed * @param componentCommon The part of the implication to be removed * @param term1 The other term in the contentInd * @param index The location of the shared term: 0 for subject, 1 for * predicate * @param compoundTask Whether the implication comes from the task * @param nal Reference to the memory */ private static void decomposeCompound(final CompoundTerm compound, final Term component, final Term term1, final int index, final boolean compoundTask, final int order, final DerivationContext nal) { if ((compound instanceof Statement) || (compound instanceof ImageExt) || (compound instanceof ImageInt)) { return; } Term term2 = reduceComponents(compound, component, nal.mem()); if (term2 == null) { return; } long delta = 0; while ((term2 instanceof Conjunction) && (((CompoundTerm) term2).term[0] instanceof Interval)) { final Interval interval = (Interval) ((CompoundTerm) term2).term[0]; delta += interval.time; term2 = ((CompoundTerm)term2).setComponent(0, null, nal.mem()); } final Task task = nal.getCurrentTask(); final Sentence sentence = task.sentence; final Sentence belief = nal.getCurrentBelief(); final Statement oldContent = (Statement) task.getTerm(); final TruthValue v1; final TruthValue v2; if (compoundTask) { v1 = sentence.truth; v2 = belief.truth; } else { v1 = belief.truth; v2 = sentence.truth; } TruthValue truth = null; final Term content; if (index == 0) { content = Statement.make(oldContent, term1, term2, order); if (content == null) { return; } if (oldContent instanceof Inheritance) { if (compound instanceof IntersectionExt) { truth = reduceConjunction(v1, v2); } else if (compound instanceof IntersectionInt) { truth = reduceDisjunction(v1, v2); } else if ((compound instanceof SetInt) && (component instanceof SetInt)) { truth = reduceConjunction(v1, v2); } else if ((compound instanceof SetExt) && (component instanceof SetExt)) { truth = reduceDisjunction(v1, v2); } else if (compound instanceof DifferenceExt) { if (compound.term[0].equals(component)) { truth = reduceDisjunction(v2, v1); } else { truth = reduceConjunctionNeg(v1, v2); } } } else if (oldContent instanceof Implication) { if (compound instanceof Conjunction) { truth = reduceConjunction(v1, v2); } else if (compound instanceof Disjunction) { truth = reduceDisjunction(v1, v2); } } } else { content = Statement.make(oldContent, term2, term1, order); if (content == null) { return; } if (oldContent instanceof Inheritance) { if (compound instanceof IntersectionInt) { truth = reduceConjunction(v1, v2); } else if (compound instanceof IntersectionExt) { truth = reduceDisjunction(v1, v2); } else if ((compound instanceof SetExt) && (component instanceof SetExt)) { truth = reduceConjunction(v1, v2); } else if ((compound instanceof SetInt) && (component instanceof SetInt)) { truth = reduceDisjunction(v1, v2); } else if (compound instanceof DifferenceInt) { if (compound.term[1].equals(component)) { truth = reduceDisjunction(v2, v1); } else { truth = reduceConjunctionNeg(v1, v2); } } } else if (oldContent instanceof Implication) { if (compound instanceof Disjunction) { truth = reduceConjunction(v1, v2); } else if (compound instanceof Conjunction) { truth = reduceDisjunction(v1, v2); } } } if (truth != null) { final BudgetValue budget = BudgetFunctions.compoundForward(truth, content, nal); if (delta != 0) { long baseTime = task.sentence.getOccurenceTime(); if (baseTime != Stamp.ETERNAL) { baseTime += delta; nal.getTheNewStamp().setOccurrenceTime(baseTime); } } nal.doublePremiseTask(content, truth, budget, false, true); //(allow overlap), a form of detachment } } /** * {(||, S, P), P} |- S {(&&, S, P), P} |- S * * @param implication The implication term to be decomposed * @param componentCommon The part of the implication to be removed * @param compoundTask Whether the implication comes from the task * @param nal Reference to the memory */ static void decomposeStatement(final CompoundTerm compound, final Term component, final boolean compoundTask, final int index, final DerivationContext nal) { final boolean isTemporalConjunction = (compound instanceof Conjunction) && !((Conjunction) compound).isSpatial; if (isTemporalConjunction && (compound.getTemporalOrder() == TemporalRules.ORDER_FORWARD) && (index != 0)) { return; } long occurrence_time = nal.getCurrentTask().sentence.getOccurenceTime(); if(isTemporalConjunction && (compound.getTemporalOrder() == TemporalRules.ORDER_FORWARD)) { if(!nal.getCurrentTask().sentence.isEternal() && compound.term[index + 1] instanceof Interval) { final long shift_occurrence = ((Interval)compound.term[index + 1]).time; occurrence_time = nal.getCurrentTask().sentence.getOccurenceTime() + shift_occurrence; } } final Task task = nal.getCurrentTask(); final Sentence taskSentence = task.sentence; final Sentence belief = nal.getCurrentBelief(); final Term content = reduceComponents(compound, component, nal.mem()); if (content == null) { return; } TruthValue truth = null; BudgetValue budget; if (taskSentence.isQuestion() || taskSentence.isQuest()) { budget = BudgetFunctions.compoundBackward(content, nal); nal.getTheNewStamp().setOccurrenceTime(occurrence_time); nal.doublePremiseTask(content, truth, budget, false, false); // special inference to answer conjunctive questions with query variables if (taskSentence.term.hasVarQuery()) { final Concept contentConcept = nal.mem().concept(content); if (contentConcept == null) { return; } final Sentence contentBelief = contentConcept.getBelief(nal, task); if (contentBelief == null) { return; } final Task contentTask = new Task(contentBelief, task.budget, false); nal.setCurrentTask(contentTask); final Term conj = Conjunction.make(component, content); truth = intersection(contentBelief.truth, belief.truth); budget = BudgetFunctions.compoundForward(truth, conj, nal); nal.getTheNewStamp().setOccurrenceTime(occurrence_time); nal.doublePremiseTask(conj, truth, budget, false, false); } } else { final TruthValue v1; final TruthValue v2; if (compoundTask) { v1 = taskSentence.truth; v2 = belief.truth; } else { v1 = belief.truth; v2 = taskSentence.truth; } if (compound instanceof Conjunction) { if (taskSentence.isGoal()) { if (compoundTask) { truth = intersection(v1, v2); } else { return; } } else { // isJudgment truth = reduceConjunction(v1, v2); } } else if (compound instanceof Disjunction) { if (taskSentence.isGoal()) { if (compoundTask) { truth = reduceConjunction(v2, v1); } else { return; } } else { // isJudgment truth = reduceDisjunction(v1, v2); } } else { return; } budget = BudgetFunctions.compoundForward(truth, content, nal); } nal.getTheNewStamp().setOccurrenceTime(occurrence_time); nal.doublePremiseTask(content, truth, budget, false, false); } /** * Introduce a dependent variable in an outer-layer conjunction {<S --> P1>, * <S --> P2>} |- (&&, <#x --> P1>, <#x --> P2>) * * @param taskContent The first premise <M --> S> * @param beliefContent The second premise <M --> P> * @param index The location of the shared term: 0 for subject, 1 for * predicate * @param nal Reference to the memory */ public static void introVarOuter(final Statement taskContent, final Statement beliefContent, final int index, final DerivationContext nal) { if (!(taskContent instanceof Inheritance)) { return; } final Variable varInd1 = new Variable("$varInd1"); final Variable varInd2 = new Variable("$varInd2"); Term term11dependent=null, term12dependent=null, term21dependent=null, term22dependent=null; Term term11, term12, term21, term22, commonTerm = null; final Map<Term, Term> subs = new HashMap<>(); if (index == 0) { term11 = varInd1; term21 = varInd1; term12 = taskContent.getPredicate(); term22 = beliefContent.getPredicate(); term12dependent=term12; term22dependent=term22; if (term12 instanceof ImageExt) { if ((/*(ImageExt)*/term12).containsTermRecursively(term22)) { commonTerm = term22; } if(commonTerm == null && term12 instanceof ImageExt) { commonTerm = ((ImageExt) term12).getTheOtherComponent(); if(!(term22.containsTermRecursively(commonTerm))) { commonTerm=null; } if (term22 instanceof ImageExt && ((commonTerm == null) || !(term22).containsTermRecursively(commonTerm))) { commonTerm = ((ImageExt) term22).getTheOtherComponent(); if ((commonTerm == null) || !(term12).containsTermRecursively(commonTerm)) { commonTerm = null; } } } if (commonTerm != null) { subs.put(commonTerm, varInd2); term12 = ((CompoundTerm) term12).applySubstitute(subs); if(!(term22 instanceof CompoundTerm)) { term22 = varInd2; } else { term22 = ((CompoundTerm) term22).applySubstitute(subs); } } } if (commonTerm==null && term22 instanceof ImageExt) { if ((/*(ImageExt)*/term22).containsTermRecursively(term12)) { commonTerm = term12; } if(commonTerm == null && term22 instanceof ImageExt) { commonTerm = ((ImageExt) term22).getTheOtherComponent(); if(!(term12.containsTermRecursively(commonTerm))) { commonTerm=null; } if (term12 instanceof ImageExt && ((commonTerm == null) || !(term12).containsTermRecursively(commonTerm))) { commonTerm = ((ImageExt) term12).getTheOtherComponent(); if ((commonTerm == null) || !(term22).containsTermRecursively(commonTerm)) { commonTerm = null; } } } if (commonTerm != null) { subs.put(commonTerm, varInd2); term22 = ((CompoundTerm) term22).applySubstitute(subs); if(!(term12 instanceof CompoundTerm)) { term12 = varInd2; } else { term12 = ((CompoundTerm) term12).applySubstitute(subs); } } } } else { term11 = taskContent.getSubject(); term21 = beliefContent.getSubject(); term12 = varInd1; term22 = varInd1; term11dependent=term11; term21dependent=term21; if (term21 instanceof ImageInt) { if ((/*(ImageInt)*/term21).containsTermRecursively(term11)) { commonTerm = term11; } if(term11 instanceof ImageInt && commonTerm == null && term21 instanceof ImageInt) { commonTerm = ((ImageInt) term11).getTheOtherComponent(); if(!(term21.containsTermRecursively(commonTerm))) { commonTerm=null; } if ((commonTerm == null) || !(term21).containsTermRecursively(commonTerm)) { commonTerm = ((ImageInt) term21).getTheOtherComponent(); if ((commonTerm == null) || !(term11).containsTermRecursively(commonTerm)) { commonTerm = null; } } } if (commonTerm != null) { subs.put(commonTerm, varInd2); term21 = ((CompoundTerm) term21).applySubstitute(subs); if(!(term11 instanceof CompoundTerm)) { term11 = varInd2; } else { term11 = ((CompoundTerm) term11).applySubstitute(subs); } } } if (commonTerm==null && term11 instanceof ImageInt) { if ((/*(ImageInt)*/term11).containsTermRecursively(term21)) { commonTerm = term21; } if(term21 instanceof ImageInt && commonTerm == null && term11 instanceof ImageInt) { commonTerm = ((ImageInt) term21).getTheOtherComponent(); if(!(term11.containsTermRecursively(commonTerm))) { commonTerm=null; } if ((commonTerm == null) || !(term11).containsTermRecursively(commonTerm)) { commonTerm = ((ImageInt) term11).getTheOtherComponent(); if ((commonTerm == null) || !(term21).containsTermRecursively(commonTerm)) { commonTerm = null; } } } if (commonTerm != null) { subs.put(commonTerm, varInd2); term11 = ((CompoundTerm) term11).applySubstitute(subs); if(!(term21 instanceof CompoundTerm)) { term21 = varInd2; } else { term21 = ((CompoundTerm) term21).applySubstitute(subs); } } } } Statement state1 = Inheritance.make(term11, term12); Statement state2 = Inheritance.make(term21, term22); Term content = Implication.make(state1, state2); if (content == null) { return; } final TruthValue truthT = nal.getCurrentTask().sentence.truth; final TruthValue truthB = nal.getCurrentBelief().truth; if ((truthT == null) || (truthB == null)) { if(Parameters.DEBUG) { System.out.println("ERROR: Belief with null truth value. (introVarOuter)"); } return; } TruthValue truth = induction(truthT, truthB); BudgetValue budget = BudgetFunctions.compoundForward(truth, content, nal); nal.doublePremiseTask(content, truth, budget, false, false); content = Implication.make(state2, state1); truth = induction(truthB, truthT); budget = BudgetFunctions.compoundForward(truth, content, nal); nal.doublePremiseTask(content, truth, budget, false, false); content = Equivalence.make(state1, state2); truth = comparison(truthT, truthB); budget = BudgetFunctions.compoundForward(truth, content, nal); nal.doublePremiseTask(content, truth, budget, false, false); final Variable varDep = new Variable("#varDep"); if (index == 0) { state1 = Inheritance.make(varDep, term12dependent); state2 = Inheritance.make(varDep, term22dependent); } else { state1 = Inheritance.make(term11dependent, varDep); state2 = Inheritance.make(term21dependent, varDep); } if ((state1==null) || (state2 == null)) return; content = Conjunction.make(state1, state2); truth = intersection(truthT, truthB); budget = BudgetFunctions.compoundForward(truth, content, nal); nal.doublePremiseTask(content, truth, budget, false, false); } /** * {<M --> S>, <C ==> <M --> P>>} |- <(&&, <#x --> S>, C) ==> <#x --> P>> * {<M --> S>, (&&, C, <M --> P>)} |- (&&, C, <<#x --> S> ==> <#x --> P>>) * * @param taskContent The first premise directly used in internal induction, * <M --> S> * @param beliefContent The componentCommon to be used as a premise in * internal induction, <M --> P> * @param oldCompound The whole contentInd of the first premise, Implication * or Conjunction * @param nal Reference to the memory */ static boolean introVarInner(final Statement premise1, final Statement premise2, final CompoundTerm oldCompound, final DerivationContext nal) { final Task task = nal.getCurrentTask(); final Sentence taskSentence = task.sentence; if (!taskSentence.isJudgment() || (premise1.getClass() != premise2.getClass()) || oldCompound.containsTerm(premise1)) { return false; } final Term subject1 = premise1.getSubject(); final Term subject2 = premise2.getSubject(); final Term predicate1 = premise1.getPredicate(); final Term predicate2 = premise2.getPredicate(); final Term commonTerm1; final Term commonTerm2; if (subject1.equals(subject2)) { commonTerm1 = subject1; commonTerm2 = secondCommonTerm(predicate1, predicate2, 0); } else if (predicate1.equals(predicate2)) { commonTerm1 = predicate1; commonTerm2 = secondCommonTerm(subject1, subject2, 0); } else { return false; } final Sentence belief = nal.getCurrentBelief(); final Map<Term, Term> substitute = new HashMap<>(); boolean b1 = false, b2 = false; { final Variable varDep2 = new Variable("#varDep2"); Term content = Conjunction.make(premise1, oldCompound); if (!(content instanceof CompoundTerm)) return false; substitute.put(commonTerm1, varDep2); content = ((CompoundTerm)content).applySubstitute(substitute); final TruthValue truth = intersection(taskSentence.truth, belief.truth); final BudgetValue budget = BudgetFunctions.forward(truth, nal); b1 = (nal.doublePremiseTask(content, truth, budget, false, false))!=null; } substitute.clear(); { final Variable varInd1 = new Variable("$varInd1"); final Variable varInd2 = new Variable("$varInd2"); substitute.put(commonTerm1, varInd1); if (commonTerm2 != null) { substitute.put(commonTerm2, varInd2); } Term content = Implication.make(premise1, oldCompound); if ((content == null) || (!(content instanceof CompoundTerm))) { return false; } content = ((CompoundTerm)content).applySubstituteToCompound(substitute); final TruthValue truth; if (premise1.equals(taskSentence.term)) { truth = induction(belief.truth, taskSentence.truth); } else { truth = induction(taskSentence.truth, belief.truth); } final BudgetValue budget = BudgetFunctions.forward(truth, nal); b2 = nal.doublePremiseTask(content, truth, budget, false, false)!=null; } return b1 || b2; } /** * Introduce a second independent variable into two terms with a common * component * * @param term1 The first term * @param term2 The second term * @param index The index of the terms in their statement */ private static Term secondCommonTerm(final Term term1, final Term term2, final int index) { Term commonTerm = null; if (index == 0) { if ((term1 instanceof ImageExt) && (term2 instanceof ImageExt)) { commonTerm = ((ImageExt) term1).getTheOtherComponent(); if ((commonTerm == null) || !term2.containsTermRecursively(commonTerm)) { commonTerm = ((ImageExt) term2).getTheOtherComponent(); if ((commonTerm == null) || !term1.containsTermRecursively(commonTerm)) { commonTerm = null; } } } } else if ((term1 instanceof ImageInt) && (term2 instanceof ImageInt)) { commonTerm = ((ImageInt) term1).getTheOtherComponent(); if ((commonTerm == null) || !term2.containsTermRecursively(commonTerm)) { commonTerm = ((ImageInt) term2).getTheOtherComponent(); if ((commonTerm == null) || !term1.containsTermRecursively(commonTerm)) { commonTerm = null; } } } return commonTerm; } public static void eliminateVariableOfConditionAbductive(final int figure, final Sentence sentence, final Sentence belief, final DerivationContext nal) { Statement T1 = (Statement) sentence.term; Statement T2 = (Statement) belief.term; Term S1 = T2.getSubject(); Term S2 = T1.getSubject(); Term P1 = T2.getPredicate(); Term P2 = T1.getPredicate(); final Map<Term, Term> res1 = new HashMap<>(); final Map<Term, Term> res2 = new HashMap<>(); final Map<Term, Term> res3 = new HashMap<>(); final Map<Term, Term> res4 = new HashMap<>(); if (figure == 21) { res1.clear(); res2.clear(); Variables.findSubstitute(Symbols.VAR_INDEPENDENT, P1, S2, res1, res2); //this part is T1 = (Statement) T1.applySubstitute(res2); //independent, the rule works if it unifies if(T1==null) { return; } T2 = (Statement) T2.applySubstitute(res1); if(T2==null) { return; } S1 = T2.getSubject(); S2 = T1.getSubject(); P1 = T2.getPredicate(); P2 = T1.getPredicate(); //update the variables because T1 and T2 may have changed if (S1 instanceof Conjunction) { //try to unify P2 with a component eliminateVariableOfConditionAbductiveTryUnification1(sentence, belief, nal, P2, (CompoundTerm) S1, res3, res4); } if (P2 instanceof Conjunction) { //try to unify S1 with a component eliminateVariableOfConditionAbductiveTryUnification1(sentence, belief, nal, S1, (CompoundTerm) P2, res3, res4); } } else if (figure == 12) { res1.clear(); res2.clear(); Variables.findSubstitute(Symbols.VAR_INDEPENDENT, S1, P2, res1, res2); //this part is T1 = (Statement) T1.applySubstitute(res2); //independent, the rule works if it unifies if(T1==null) { return; } T2 = (Statement) T2.applySubstitute(res1); if(T2==null) { return; } S1 = T2.getSubject(); S2 = T1.getSubject(); P1 = T2.getPredicate(); P2 = T1.getPredicate(); //update the variables because T1 and T2 may have changed if (S2 instanceof Conjunction) { //try to unify P1 with a component eliminateVariableOfConditionAbductiveTryUnification1(sentence, belief, nal, P1, (CompoundTerm) S2, res3, res4); } if (P1 instanceof Conjunction) { //try to unify S2 with a component eliminateVariableOfConditionAbductiveTryUnification1(sentence, belief, nal, S2, (CompoundTerm) P1, res3, res4); } } else if (figure == 11) { res1.clear(); res2.clear(); Variables.findSubstitute(Symbols.VAR_INDEPENDENT, S1, S2, res1, res2); //this part is T1 = (Statement) T1.applySubstitute(res2); //independent, the rule works if it unifies if(T1==null) { return; } T2 = (Statement) T2.applySubstitute(res1); if(T2==null) { return; } S1 = T2.getSubject(); S2 = T1.getSubject(); P1 = T2.getPredicate(); P2 = T1.getPredicate(); //update the variables because T1 and T2 may have changed if (P1 instanceof Conjunction) { //try to unify P2 with a component eliminateVariableOfConditionAbductiveTryUnification1(sentence, belief, nal, P2, (CompoundTerm) P1, res3, res4); } if (P2 instanceof Conjunction) { //try to unify P1 with a component eliminateVariableOfConditionAbductiveTryUnification1(sentence, belief, nal, P1, (CompoundTerm) P2, res3, res4); } } else if (figure == 22) { res1.clear(); res2.clear(); Variables.findSubstitute(Symbols.VAR_INDEPENDENT, P1, P2, res1, res2); //this part is T1 = (Statement) T1.applySubstitute(res2); //independent, the rule works if it unifies if(T1==null) { return; } T2 = (Statement) T2.applySubstitute(res1); if(T2==null) { return; } S1 = T2.getSubject(); S2 = T1.getSubject(); P1 = T2.getPredicate(); P2 = T1.getPredicate(); //update the variables because T1 and T2 may have changed if (S1 instanceof Conjunction) { //try to unify S2 with a component for (final Term s1 : ((CompoundTerm) S1).term) { res3.clear(); res4.clear(); //here the dependent part matters, see example of Issue40 if (Variables.findSubstitute(Symbols.VAR_DEPENDENT, s1, S2, res3, res4)) { for (Term s2 : ((CompoundTerm) S1).term) { if (!(s2 instanceof CompoundTerm)) { continue; } s2 = ((CompoundTerm) s2).applySubstitute(res3); if(s2==null || s2.hasVarIndep()) { continue; } if (s2!=null && !s2.equals(s1) && (sentence.truth != null) && (belief.truth != null)) { final TruthValue truth = abduction(sentence.truth, belief.truth); final BudgetValue budget = BudgetFunctions.compoundForward(truth, s2, nal); nal.doublePremiseTask(s2, truth, budget, false, false); } } } } } if (S2 instanceof Conjunction) { //try to unify S1 with a component for (final Term s1 : ((CompoundTerm) S2).term) { res3.clear(); res4.clear(); //here the dependent part matters, see example of Issue40 if (Variables.findSubstitute(Symbols.VAR_DEPENDENT, s1, S1, res3, res4)) { for (Term s2 : ((CompoundTerm) S2).term) { if (!(s2 instanceof CompoundTerm)) { continue; } s2 = ((CompoundTerm) s2).applySubstitute(res3); if(s2==null || s2.hasVarIndep()) { continue; } if (s2!=null && !s2.equals(s1) && (sentence.truth != null) && (belief.truth != null)) { final TruthValue truth = abduction(sentence.truth, belief.truth); final BudgetValue budget = BudgetFunctions.compoundForward(truth, s2, nal); nal.doublePremiseTask(s2, truth, budget, false, false); } } } } } } } private static void eliminateVariableOfConditionAbductiveTryUnification1(Sentence sentence, Sentence belief, DerivationContext nal, Term p1, CompoundTerm p2, Map<Term, Term> res3, Map<Term, Term> res4) { for (final Term s1 : p2.term) { res3.clear(); res4.clear(); //here the dependent part matters, see example of Issue40 if (Variables.findSubstitute(Symbols.VAR_DEPENDENT, s1, p1, res3, res4)) { eliminateVariableOfConditionAbductiveInner1(sentence, belief, nal, p2, res3, s1); } } } private static void eliminateVariableOfConditionAbductiveInner1(Sentence sentence, Sentence belief, DerivationContext nal, CompoundTerm s1, Map<Term, Term> res3, Term s12) { for (Term s2 : s1.term) { if (!(s2 instanceof CompoundTerm)) { continue; } s2 = ((CompoundTerm) s2).applySubstitute(res3); if(s2==null || s2.hasVarIndep()) { continue; } if (!s2.equals(s12) && (sentence.truth != null) && (belief.truth != null)) { final TruthValue truth = abduction(sentence.truth, belief.truth); final BudgetValue budget = BudgetFunctions.compoundForward(truth, s2, nal); nal.doublePremiseTask(s2, truth, budget, false, false); } } } static void IntroVarSameSubjectOrPredicate(final Sentence originalMainSentence, final Sentence subSentence, final Term component, final Term content, final int index, final DerivationContext nal) { final Term T1 = originalMainSentence.term; if (!(T1 instanceof CompoundTerm) || !(content instanceof CompoundTerm)) { return; } CompoundTerm T = (CompoundTerm) T1; CompoundTerm T2 = (CompoundTerm) content; if ((component instanceof Inheritance && content instanceof Inheritance) || (component instanceof Similarity && content instanceof Similarity)) { //CompoundTerm result = T; if (component.equals(content)) { return; //wouldnt make sense to create a conjunction here, would contain a statement twice } final Variable depIndVar1 = new Variable("#depIndVar1"); final Variable depIndVar2 = new Variable("#depIndVar2"); if (((Statement) component).getPredicate().equals(((Statement) content).getPredicate()) && !(((Statement) component).getPredicate() instanceof Variable)) { CompoundTerm zw = (CompoundTerm) T.term[index]; zw = (CompoundTerm) zw.setComponent(1, depIndVar1, nal.mem()); T2 = (CompoundTerm) T2.setComponent(1, depIndVar1, nal.mem()); final Conjunction res = (Conjunction) Conjunction.make(zw, T2); T = (CompoundTerm) T.setComponent(index, res, nal.mem()); } else if (((Statement) component).getSubject().equals(((Statement) content).getSubject()) && !(((Statement) component).getSubject() instanceof Variable)) { CompoundTerm zw = (CompoundTerm) T.term[index]; zw = (CompoundTerm) zw.setComponent(0, depIndVar2, nal.mem()); T2 = (CompoundTerm) T2.setComponent(0, depIndVar2, nal.mem()); final Conjunction res = (Conjunction) Conjunction.make(zw, T2); T = (CompoundTerm) T.setComponent(index, res, nal.mem()); } final TruthValue truth = induction(originalMainSentence.truth, subSentence.truth); final BudgetValue budget = BudgetFunctions.compoundForward(truth, T, nal); nal.doublePremiseTask(T, truth, budget, false, false); } } }
package com.yahoo.vespa.filedistribution; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.ListenableFuture; import com.yahoo.concurrent.DaemonThreadFactory; import com.yahoo.config.FileReference; import com.yahoo.jrt.ErrorCode; import com.yahoo.jrt.Request; import com.yahoo.jrt.StringValue; import com.yahoo.log.LogLevel; import com.yahoo.vespa.config.Connection; import com.yahoo.vespa.config.ConnectionPool; import java.io.File; import java.time.Duration; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.logging.Logger; import java.util.stream.Collectors; /** * Downloads file reference using rpc requests to config server and keeps track of files being downloaded * <p> * Some methods are synchronized to make sure access to downloads is atomic * * @author hmusum */ // TODO: Handle shutdown of executors class FileReferenceDownloader { private final static Logger log = Logger.getLogger(FileReferenceDownloader.class.getName()); private final static Duration rpcTimeout = Duration.ofSeconds(10); private final ExecutorService downloadExecutor = Executors.newFixedThreadPool(10, new DaemonThreadFactory("filereference downloader")); private ExecutorService readFromQueueExecutor = Executors.newFixedThreadPool(1, new DaemonThreadFactory("filereference download queue")); private final ConnectionPool connectionPool; private final ConcurrentLinkedQueue<FileReferenceDownload> downloadQueue = new ConcurrentLinkedQueue<>(); private final Map<FileReference, FileReferenceDownload> downloads = new LinkedHashMap<>(); private final Map<FileReference, Double> downloadStatus = new HashMap<>(); private final Duration downloadTimeout; private final FileReceiver fileReceiver; FileReferenceDownloader(File downloadDirectory, ConnectionPool connectionPool, Duration timeout) { log.log(LogLevel.DEBUG, "FileReferenceDownloader connection pool:\n" + connectionPool); this.connectionPool = connectionPool; this.downloadTimeout = timeout; readFromQueueExecutor.submit(this::readFromQueue); this.fileReceiver = new FileReceiver(connectionPool.getSupervisor(), this, downloadDirectory); } private synchronized Optional<File> startDownload(FileReference fileReference, Duration timeout, FileReferenceDownload fileReferenceDownload) throws ExecutionException, InterruptedException, TimeoutException { downloads.put(fileReference, fileReferenceDownload); setDownloadStatus(fileReference.value(), 0.0); int numAttempts = 0; boolean downloadStarted = false; do { if (startDownloadRpc(fileReference)) downloadStarted = true; else Thread.sleep(100); } while (!downloadStarted && ++numAttempts <= 10); // TODO: How long/many times to retry? if (downloadStarted) { return fileReferenceDownload.future().get(timeout.toMillis(), TimeUnit.MILLISECONDS); } else { fileReferenceDownload.future().setException(new RuntimeException("Failed getting file reference '" + fileReference.value() + "'")); downloads.remove(fileReference); return Optional.empty(); } } synchronized void addToDownloadQueue(FileReferenceDownload fileReferenceDownload) { downloadQueue.add(fileReferenceDownload); } void receiveFile(FileReference fileReference, String filename, byte[] content, long xxHash) { fileReceiver.receiveFile(fileReference, filename, content, xxHash); } synchronized Set<FileReference> queuedDownloads() { return downloadQueue.stream() .map(FileReferenceDownload::fileReference) .collect(Collectors.toCollection(LinkedHashSet::new)); } private void readFromQueue() { do { FileReferenceDownload fileReferenceDownload = downloadQueue.poll(); if (fileReferenceDownload == null) { try { Thread.sleep(10); } catch (InterruptedException e) { /* ignore for now */} } else { log.log(LogLevel.INFO, "Will download file reference '" + fileReferenceDownload.fileReference().value() + "'"); downloadExecutor.submit(() -> startDownload(fileReferenceDownload.fileReference(), downloadTimeout, fileReferenceDownload)); } } while (true); } void completedDownloading(FileReference fileReference, File file) { if (downloads.containsKey(fileReference)) downloads.get(fileReference).future().set(Optional.of(file)); downloadStatus.put(fileReference, 100.0); } private boolean startDownloadRpc(FileReference fileReference) throws ExecutionException, InterruptedException { Connection connection = connectionPool.getCurrent(); Request request = new Request("filedistribution.serveFile"); request.parameters().add(new StringValue(fileReference.value())); execute(request, connection); if (validateResponse(request)) { log.log(LogLevel.DEBUG, "Request callback, OK. Req: " + request + "\nSpec: " + connection); if (request.returnValues().get(0).asInt32() == 0) { log.log(LogLevel.INFO, "Found file reference '" + fileReference.value() + "' available at " + connection.getAddress()); return true; } else { log.log(LogLevel.INFO, "File reference '" + fileReference.value() + "' not found for " + connection.getAddress()); connectionPool.setNewCurrentConnection(); return false; } } else { log.log(LogLevel.WARNING, "Request failed. Req: " + request + "\nSpec: " + connection.getAddress() + ", error code: " + request.errorCode()); if (request.isError() && request.errorCode() == ErrorCode.CONNECTION || request.errorCode() == ErrorCode.TIMEOUT) { log.log(LogLevel.WARNING, "Setting error for connection " + connection.getAddress()); connectionPool.setError(connection, request.errorCode()); } return false; } } synchronized boolean isDownloading(FileReference fileReference) { return downloads.containsKey(fileReference); } synchronized ListenableFuture<Optional<File>> addDownloadListener(FileReference fileReference, Runnable runnable) { FileReferenceDownload fileReferenceDownload = downloads.get(fileReference); fileReferenceDownload.future().addListener(runnable, downloadExecutor); return fileReferenceDownload.future(); } private void execute(Request request, Connection connection) { connection.invokeSync(request, (double) rpcTimeout.getSeconds()); } private boolean validateResponse(Request request) { if (request.isError()) { return false; } else if (request.returnValues().size() == 0) { return false; } else if (!request.checkReturnTypes("is")) { // TODO: Do not hard-code return type log.log(LogLevel.WARNING, "Invalid return types for response: " + request.errorMessage()); return false; } return true; } double downloadStatus(String file) { return downloadStatus.getOrDefault(new FileReference(file), 0.0); } void setDownloadStatus(String file, double percentageDownloaded) { downloadStatus.put(new FileReference(file), percentageDownloaded); } Map<FileReference, Double> downloadStatus() { return ImmutableMap.copyOf(downloadStatus); } }
package org.owasp.esapi.reference; import java.util.HashMap; import org.apache.log4j.Level; import javax.servlet.http.HttpSession; import org.owasp.esapi.ESAPI; import org.owasp.esapi.LogFactory; import org.owasp.esapi.Logger; import org.owasp.esapi.User; public class Log4JLogFactory implements LogFactory { private String applicationName; @SuppressWarnings("unchecked") private HashMap loggersMap = new HashMap(); /** * Null argument constructor for this implementation of the LogFactory interface * needed for dynamic configuration. */ public Log4JLogFactory() {} /** * Constructor for this implementation of the LogFactory interface. * * @param applicationName The name of this application this logger is being constructed for. */ public Log4JLogFactory(String applicationName) { this.applicationName = applicationName; } /** * {@inheritDoc} */ public void setApplicationName(String newApplicationName) { applicationName = newApplicationName; } /** * {@inheritDoc} */ @SuppressWarnings("unchecked") public Logger getLogger(Class clazz) { // If a logger for this class already exists, we return the same one, otherwise we create a new one. Logger classLogger = (Logger) loggersMap.get(clazz); if (classLogger == null) { classLogger = new Log4JLogger(applicationName, clazz.getName()); loggersMap.put(clazz, classLogger); } return classLogger; } /** * {@inheritDoc} */ @SuppressWarnings("unchecked") public Logger getLogger(String moduleName) { // If a logger for this module already exists, we return the same one, otherwise we create a new one. Logger moduleLogger = (Logger) loggersMap.get(moduleName); if (moduleLogger == null) { moduleLogger = new Log4JLogger(applicationName, moduleName); loggersMap.put(moduleName, moduleLogger); } return moduleLogger; } private static class Log4JLogger implements org.owasp.esapi.Logger { /** The jlogger object used by this class to log everything. */ private org.apache.log4j.Logger jlogger = null; /** The application name using this log. */ private String applicationName = null; /** The module name using this log. */ private String moduleName = null; /** * Public constructor should only ever be called via the appropriate LogFactory * * @param applicationName the application name * @param moduleName the module name */ private Log4JLogger(String applicationName, String moduleName) { this.applicationName = applicationName; this.moduleName = moduleName; this.jlogger = org.apache.log4j.Logger.getLogger(applicationName + ":" + moduleName); } /** * {@inheritDoc} * Note: In this implementation, this change is not persistent, * meaning that if the application is restarted, the log level will revert to the level defined in the * ESAPI SecurityConfiguration properties file. */ public void setLevel(int level) { try { jlogger.setLevel(convertESAPILeveltoLoggerLevel( level )); } catch (IllegalArgumentException e) { this.error(Logger.SECURITY_FAILURE, "", e); } } private static Level convertESAPILeveltoLoggerLevel(int level) { switch (level) { case Logger.OFF: return Level.OFF; case Logger.FATAL: return Level.FATAL; case Logger.ERROR: return Level.ERROR; case Logger.WARNING: return Level.WARN; case Logger.INFO: return Level.INFO; case Logger.DEBUG: return Level.DEBUG; //fine case Logger.TRACE: return Level.TRACE; //finest case Logger.ALL: return Level.ALL; default: { throw new IllegalArgumentException("Invalid logging level. Value was: " + level); } } } /** * {@inheritDoc} */ public void trace(EventType type, String message, Throwable throwable) { log(Level.TRACE, type, message, throwable); } /** * {@inheritDoc} */ public void trace(EventType type, String message) { log(Level.TRACE, type, message, null); } /** * {@inheritDoc} */ public void debug(EventType type, String message, Throwable throwable) { log(Level.DEBUG, type, message, throwable); } /** * {@inheritDoc} */ public void debug(EventType type, String message) { log(Level.DEBUG, type, message, null); } /** * {@inheritDoc} */ public void info(EventType type, String message) { log(Level.INFO, type, message, null); } /** * {@inheritDoc} */ public void info(EventType type, String message, Throwable throwable) { log(Level.INFO, type, message, throwable); } /** * {@inheritDoc} */ public void warning(EventType type, String message, Throwable throwable) { log(Level.WARN, type, message, throwable); } /** * {@inheritDoc} */ public void warning(EventType type, String message) { log(Level.WARN, type, message, null); } /** * {@inheritDoc} */ public void error(EventType type, String message, Throwable throwable) { log(Level.ERROR, type, message, throwable); } /** * {@inheritDoc} */ public void error(EventType type, String message) { log(Level.ERROR, type, message, null); } /** * {@inheritDoc} */ public void fatal(EventType type, String message, Throwable throwable) { log(Level.FATAL, type, message, throwable); } /** * {@inheritDoc} */ public void fatal(EventType type, String message) { log(Level.FATAL, type, message, null); } /** * Log the message after optionally encoding any special characters that might be dangerous when viewed * by an HTML based log viewer. Also encode any carriage returns and line feeds to prevent log * injection attacks. This logs all the supplied parameters plus the user ID, user's source IP, a logging * specific session ID, and the current date/time. * * It will only log the message if the current logging level is enabled, otherwise it will * discard the message. * * @param level the severity level of the security event * @param type the type of the event (SECURITY, FUNCTIONALITY, etc.) * @param success whether this was a failed or successful event * @param message the message * @param throwable the throwable */ private void log(Level level, EventType type, String message, Throwable throwable) { // Before we waste time preparing this event for the log, we check to see if it needs to be logged if (!jlogger.isEnabledFor( level )) return; User user = ESAPI.authenticator().getCurrentUser(); // create a random session number for the user to represent the user's 'session', if it doesn't exist already String userSessionIDforLogging = "unknown"; try { HttpSession session = ESAPI.httpUtilities().getCurrentRequest().getSession( false ); userSessionIDforLogging = (String)session.getAttribute("ESAPI_SESSION"); // if there is no session ID for the user yet, we create one and store it in the user's session if ( userSessionIDforLogging == null ) { userSessionIDforLogging = ""+ ESAPI.randomizer().getRandomInteger(0, 1000000); session.setAttribute("ESAPI_SESSION", userSessionIDforLogging); } } catch( NullPointerException e ) { // continue } // ensure there's something to log if ( message == null ) { message = ""; } // ensure no CRLF injection into logs for forging records String clean = message.replace( '\n', '_' ).replace( '\r', '_' ); if ( ((DefaultSecurityConfiguration)ESAPI.securityConfiguration()).getLogEncodingRequired() ) { clean = ESAPI.encoder().encodeForHTML(message); if (!message.equals(clean)) { clean += " (Encoded)"; } } // create the message to log String msg = ""; if ( user != null && type != null) { msg = type + " " + user.getAccountName()+ ":" + user.getAccountId() + "@"+ user.getLastHostAddress() +":" + userSessionIDforLogging + " " + clean; } boolean logAppName = ((DefaultSecurityConfiguration)ESAPI.securityConfiguration()).getLogApplicationName(); boolean logServerIP = ((DefaultSecurityConfiguration)ESAPI.securityConfiguration()).getLogServerIP(); if (!logServerIP) { if (logAppName) { jlogger.log(level, applicationName + " " + moduleName + " " + msg, throwable); } else { //!logAppName jlogger.log(level, moduleName + " " + msg, throwable); } } else { //logServerIP if (logAppName) { jlogger.log(level, applicationName + ":" + ESAPI.currentRequest().getServerName() + ":" + ESAPI.currentRequest().getLocalPort() + " " + moduleName + " " + msg, throwable); } else { //!logAppName jlogger.log(level, ESAPI.currentRequest().getServerName() + ":" + ESAPI.currentRequest().getLocalPort() + " " +moduleName + " " + msg, throwable); } } } /** * {@inheritDoc} */ public boolean isDebugEnabled() { return jlogger.isEnabledFor(Level.DEBUG); } /** * {@inheritDoc} */ public boolean isErrorEnabled() { return jlogger.isEnabledFor(Level.ERROR); } /** * {@inheritDoc} */ public boolean isFatalEnabled() { return jlogger.isEnabledFor(Level.FATAL); } /** * {@inheritDoc} */ public boolean isInfoEnabled() { return jlogger.isEnabledFor(Level.INFO); } /** * {@inheritDoc} */ public boolean isTraceEnabled() { return jlogger.isEnabledFor(Level.TRACE); } /** * {@inheritDoc} */ public boolean isWarningEnabled() { return jlogger.isEnabledFor(Level.WARN); } } }
package org.sipfoundry.sipxbridge; import gov.nist.javax.sip.DialogTimeoutEvent; import gov.nist.javax.sip.SipListenerExt; import gov.nist.javax.sip.SipStackExt; import gov.nist.javax.sip.TransactionExt; import java.util.Collection; import java.util.Iterator; import javax.sip.ClientTransaction; import javax.sip.Dialog; import javax.sip.DialogState; import javax.sip.DialogTerminatedEvent; import javax.sip.IOExceptionEvent; import javax.sip.RequestEvent; import javax.sip.ResponseEvent; import javax.sip.ServerTransaction; import javax.sip.SipListener; import javax.sip.SipProvider; import javax.sip.TimeoutEvent; import javax.sip.Transaction; import javax.sip.TransactionAlreadyExistsException; import javax.sip.TransactionState; import javax.sip.TransactionTerminatedEvent; import javax.sip.address.SipURI; import javax.sip.address.Hop; import javax.sip.header.CSeqHeader; import javax.sip.header.ContactHeader; import javax.sip.header.FromHeader; import javax.sip.header.ProxyAuthorizationHeader; import javax.sip.header.ToHeader; import javax.sip.header.ViaHeader; import javax.sip.message.Request; import javax.sip.message.Response; import org.apache.log4j.Logger; /** * This is the JAIN-SIP listener that fields all request and response events * from the stack. * * @author M. Ranganathan * */ public class SipListenerImpl implements SipListenerExt { private static Logger logger = Logger.getLogger(SipListenerImpl.class); /** * Handle a Dialog Terminated event. Cleans up all the resources associated * with a Dialog. */ private static void handleAuthenticationChallenge( ResponseEvent responseEvent) throws Exception { SipProvider provider = (SipProvider) responseEvent.getSource(); Dialog dialog = responseEvent.getDialog(); int statusCode = responseEvent.getResponse().getStatusCode(); ClientTransaction ctx = responseEvent.getClientTransaction(); if ( ctx == null ) { logger.debug("Dropping response"); return; } TransactionContext transactionContext = TransactionContext.get(ctx); /* * challenge from LAN side. Forward it to the WAN. */ if (provider == Gateway.getLanProvider()) { /* * By default, we do not handle LAN originated challenges unless the inbound domain is the * same as the sipx domain -- in which case sipx will challenge us and we will forward that * challenge. * xx-6663: Forward authentication challenges. */ ServerTransaction stx = ((TransactionContext) responseEvent .getClientTransaction().getApplicationData()) .getServerTransaction(); if (stx != null && stx.getState() != TransactionState.TERMINATED) { /* * Tear down the Back to back user agent immediately. */ BackToBackUserAgent backToBackUserAgent = DialogContext.getBackToBackUserAgent(dialog); if ( backToBackUserAgent != null && transactionContext.getOperation() == Operation.SEND_INVITE_TO_SIPX_PROXY ) { backToBackUserAgent.tearDownNow(); } /* * Forward it to the peer. Maybe he knows how to handle the challenge and if not * he will hang up the call. */ Response errorResponse = SipUtilities.createResponse(stx, statusCode); SipUtilities.copyHeaders(responseEvent.getResponse(),errorResponse); errorResponse.removeHeader(ContactHeader.NAME); ContactHeader cth = SipUtilities.createContactHeader(null, ((TransactionExt)stx).getSipProvider(), SipUtilities.getViaTransport(errorResponse)); errorResponse.setHeader(cth); if ( TransactionContext.get(responseEvent.getClientTransaction()).getItspAccountInfo() == null || TransactionContext.get(responseEvent.getClientTransaction()).getItspAccountInfo().isGlobalAddressingUsed()) { SipUtilities.setGlobalAddress(errorResponse); } stx.sendResponse(errorResponse); } return; } Response response = responseEvent.getResponse(); CSeqHeader cseqHeader = (CSeqHeader) response .getHeader(CSeqHeader.NAME); if ( responseEvent.getClientTransaction() == null || responseEvent .getClientTransaction().getApplicationData() == null ) { logger.warn("Cannot process event : NullClientTransaction or NullTransactionContext"); return; } /* * Note that we need to store a pointer in the TransactionContext * because REGISTER does not have a dialog. */ ItspAccountInfo accountInfo = ((TransactionContext) responseEvent .getClientTransaction().getApplicationData()) .getItspAccountInfo(); String method = cseqHeader.getMethod(); String callId = SipUtilities.getCallId(response); /* * If we find a non-dummy ITSP account then check to see if we have * exceeded the failure count. If we have exceeded that count then * we are done with this request. */ ServerTransaction stx = TransactionContext.get(ctx).getServerTransaction(); if (accountInfo != null && stx == null && ( accountInfo.incrementFailureCount(callId) > 1 || accountInfo.getPassword() == null ) ) { /* * Got a 4xx response. Increment the failure count for the account * and mark it as AUTHENTICATION_FAILED */ accountInfo.setState(AccountState.AUTHENTICATION_FAILED); if (logger.isDebugEnabled()) { logger .debug("SipListenerImpl: could not authenticate with server. method = " + method); } accountInfo.removeFailureCounter(callId); if (responseEvent.getDialog() != null) { BackToBackUserAgent b2bua = DialogContext .getBackToBackUserAgent(responseEvent.getDialog()); logger .debug("Cannot authenticate request -- tearing down call"); if (b2bua != null) { b2bua.tearDown(Gateway.SIPXBRIDGE_USER, ReasonCode.AUTHENTICATION_FAILURE, "Could not authenticate request"); } } if (!accountInfo.isAlarmSent()) { Gateway.getAlarmClient().raiseAlarm( "SIPX_BRIDGE_AUTHENTICATION_FAILED", accountInfo.getSipDomain()); accountInfo.setAlarmSent(true); } return; } else if ( accountInfo != null && accountInfo.getPassword() == null && stx != null ) { /* * Forward the challenge back to the call originator if this is a dummy account we * created for purposes of bridging the call. */ logger.debug("Forwarding challenge from WAN for dummy account"); if (stx.getState() != TransactionState.TERMINATED ) { Response errorResponse = SipUtilities.createResponse(stx, statusCode); SipUtilities.copyHeaders(responseEvent.getResponse(),errorResponse); errorResponse.removeHeader(ContactHeader.NAME); ContactHeader cth = SipUtilities.createContactHeader(null, ((TransactionExt)stx).getSipProvider(), SipUtilities.getViaTransport(errorResponse)); errorResponse.setHeader(cth); stx.sendResponse(errorResponse); return; } else { logger.debug("Late arriving response for a dummy response -- ignoring. \n" + "Could not find server transaction or server transaction is TERMINATED." + "Discarding the response."); return; } } ClientTransaction newClientTransaction = Gateway .getAuthenticationHelper().handleChallenge(response, responseEvent.getClientTransaction(), provider, method.equals(Request.REGISTER) ? 0 : -1); TransactionContext tad = (TransactionContext) responseEvent .getClientTransaction().getApplicationData(); tad.setClientTransaction(newClientTransaction); if (dialog == null) { /* * Out of dialog challenge ( REGISTER ). */ newClientTransaction.sendRequest(); } else { if (logger.isDebugEnabled()) { logger.debug("SipListenerImpl : dialog = " + dialog); } BackToBackUserAgent b2bua = DialogContext .getBackToBackUserAgent(responseEvent.getDialog()); if (b2bua != null) { DialogContext dialogApplicationData = (DialogContext) dialog.getApplicationData(); DialogContext newDialogApplicationData = DialogContext.attach(b2bua, newClientTransaction.getDialog(), newClientTransaction, newClientTransaction .getRequest()); b2bua.addDialog(newDialogApplicationData); if ( newDialogApplicationData != dialogApplicationData ) { b2bua.removeDialog(dialog); newDialogApplicationData.setPeerDialog(dialogApplicationData .getPeerDialog()); newClientTransaction.getDialog().setApplicationData( newDialogApplicationData); newDialogApplicationData.setItspInfo(dialogApplicationData.getItspInfo()); /* * Hook the application data pointer of the previous guy in the * chain at us. */ DialogContext peerDialogApplicationData = (DialogContext) dialogApplicationData .getPeerDialog().getApplicationData(); peerDialogApplicationData.setPeerDialog(newClientTransaction .getDialog()); newDialogApplicationData.setRtpSession(dialogApplicationData .getRtpSession()); ProxyAuthorizationHeader pah = (ProxyAuthorizationHeader) newClientTransaction.getRequest().getHeader(ProxyAuthorizationHeader.NAME); newDialogApplicationData.setProxyAuthorizationHeader(pah); if (logger.isDebugEnabled()) { logger.debug("SipListenerImpl: New Dialog = " + newClientTransaction.getDialog()); } } } if (dialog.getState() == DialogState.CONFIRMED) { /* * In-DIALOG challenge. Re-INVITE was challenged. */ ToHeader toHeader = (ToHeader) newClientTransaction .getRequest().getHeader(ToHeader.NAME); if (toHeader.getTag() != null) { /* * This check should not be necessary. */ dialog.sendRequest(newClientTransaction); } } else { DialogContext dialogContext = DialogContext.get(newClientTransaction.getDialog()); b2bua.addDialog(dialogContext); newClientTransaction.sendRequest(); } DialogContext dialogContext = DialogContext.get(newClientTransaction.getDialog()); if ( !dialogContext.isSessionTimerStarted()) { dialogContext.startSessionTimer(); } } } /* * (non-Javadoc) * * @see javax.sip.SipListener#processDialogTerminated(javax.sip.DialogTerminatedEvent ) */ public void processDialogTerminated(DialogTerminatedEvent dte) { DialogContext dialogContext = DialogContext.get(dte.getDialog()); if ( dialogContext != null ) { logger.debug("DialogTerminatedEvent: dialog created at " + dialogContext.getCreationPointStackTrace()); logger.debug("DialogTerminatedEvent: dialog inserted at " + dialogContext.getInsertionPointStackTrace()); logger.debug("DialogCreated by request: " + dialogContext.getRequest()); dialogContext.cancelSessionTimer(); BackToBackUserAgent b2bua = dialogContext.getBackToBackUserAgent(); if (b2bua != null) { b2bua.removeDialog(dte.getDialog()); } } } public void processIOException(IOExceptionEvent ioex) { logger.error("Got an unexpected IOException " + ioex.getHost() + ":" + ioex.getPort() + "/" + ioex.getTransport()); } /* * (non-Javadoc) * * @see javax.sip.SipListener#processRequest(javax.sip.RequestEvent) */ public void processRequest(RequestEvent requestEvent) { if (logger.isDebugEnabled()) { logger.debug("Gateway: got an incoming request " + requestEvent.getRequest()); } Request request = requestEvent.getRequest(); String method = request.getMethod(); SipProvider provider = (SipProvider) requestEvent.getSource(); ViaHeader viaHeader = (ViaHeader) request.getHeader(ViaHeader.NAME); try { if (Gateway.getState() == GatewayState.STOPPING) { logger.debug("Gateway is stopping -- returning"); return; } else if (Gateway.getState() == GatewayState.INITIALIZING) { logger.debug("Rejecting request -- gateway is initializing"); Response response = ProtocolObjects.messageFactory .createResponse(Response.SERVICE_UNAVAILABLE, request); response .setReasonPhrase("Gateway is initializing -- try later"); ServerTransaction st = requestEvent.getServerTransaction(); if (st == null) { st = provider.getNewServerTransaction(request); } st.sendResponse(response); return; } else if (provider == Gateway.getLanProvider() && method.equals(Request.INVITE) && ((viaHeader.getReceived() != null && !Gateway .isAddressFromProxy(viaHeader.getReceived())) || (viaHeader.getReceived() == null && !Gateway .isAddressFromProxy(viaHeader.getHost())))) { /* * Check to see that via header originated from proxy server. */ ServerTransaction st = requestEvent.getServerTransaction(); if (st == null) { st = provider.getNewServerTransaction(request); } Response forbidden = SipUtilities.createResponse(st, Response.FORBIDDEN); forbidden .setReasonPhrase("Request not issued from SIPX proxy server"); st.sendResponse(forbidden); return; } /* * Find the ITSP account and check if enabled. If so, then proceed, otherwise * send an error and bail out here. */ ItspAccountInfo itspAccount = null; if ( provider == Gateway.getLanProvider() ) { itspAccount = Gateway.getAccountManager().getAccount(request); } else { String viaHost = SipUtilities.getViaHost(request); int viaPort = SipUtilities.getViaPort(request); if ( viaPort == -1 ) { viaPort = 5060; } itspAccount = Gateway.getAccountManager().getItspAccount(viaHost, viaPort); } if ( !request.getMethod().equals(Request.ACK) && itspAccount != null && ! itspAccount.isEnabled() ) { ServerTransaction st = requestEvent.getServerTransaction(); if ( st == null ) { st = provider.getNewServerTransaction(requestEvent.getRequest()); } Response response = SipUtilities.createResponse(st, Response.SERVICE_UNAVAILABLE); response.setReasonPhrase("ITSP account is disabled"); st.sendResponse(response); return; } if (method.equals(Request.INVITE) || method.equals(Request.ACK) || method.equals(Request.CANCEL) || method.equals(Request.BYE) || method.equals(Request.OPTIONS) || method.equals(Request.REFER) || method.equals(Request.PRACK)) { Gateway.getCallControlManager().processRequest(requestEvent); } else if ( method.equals(Request.REGISTER) && provider == Gateway.getLanProvider() ) { Gateway.getRegistrationManager().proxyRegisterRequest(requestEvent,itspAccount); } else { try { Response response = ProtocolObjects.messageFactory .createResponse(Response.METHOD_NOT_ALLOWED, request); ServerTransaction st = requestEvent.getServerTransaction(); if (st == null) { st = provider.getNewServerTransaction(request); } st.sendResponse(response); } catch (TransactionAlreadyExistsException ex) { logger.error("transaction already exists", ex); } catch (Exception ex) { logger.error("unexpected exception", ex); throw new SipXbridgeException("Unexpected exceptione", ex); } } } catch (TransactionAlreadyExistsException ex) { logger.error("transaction already exists", ex); return; } catch (Exception ex) { logger.error("Unexpected exception ", ex); throw new SipXbridgeException("Unexpected exceptione", ex); } } /* * (non-Javadoc) * * @see javax.sip.SipListener#processResponse(javax.sip.ResponseEvent) */ public void processResponse(ResponseEvent responseEvent) { if (Gateway.getState() == GatewayState.STOPPING) { logger.debug("Gateway is stopping -- returning"); return; } Response response = responseEvent.getResponse(); CSeqHeader cseqHeader = (CSeqHeader) response .getHeader(CSeqHeader.NAME); String method = cseqHeader.getMethod(); Dialog dialog = responseEvent.getDialog(); try { if ( method.equals(Request.INVITE) && response.getStatusCode() == 200 && response.getHeader(ContactHeader.NAME) == null ) { logger.debug("Dropping bad response"); if (dialog != null && DialogContext.get(dialog) != null) { DialogContext.get(dialog).getBackToBackUserAgent().tearDown("sipXbridge", ReasonCode.PROTOCOL_ERROR, "Protocol Error - 200 OK with no contact"); } else if ( dialog != null ) { dialog.delete(); } return; } if (dialog != null && dialog.getApplicationData() == null && method.equals(Request.INVITE)) { /* * if the tx does not exist but the dialog does exist then this * is a forked response */ SipProvider provider = (SipProvider) responseEvent.getSource(); logger.debug("Forked dialog response detected."); String callId = SipUtilities.getCallId(response); BackToBackUserAgent b2bua = Gateway.getBackToBackUserAgentFactory().getBackToBackUserAgent(callId); /* * Kill off the dialog if we cannot find a dialog context. */ if (b2bua == null && response.getStatusCode() == Response.OK) { Request ackRequest = dialog.createAck(cseqHeader .getSeqNumber()); /* Cannot access the dialogContext here */ dialog.sendAck(ackRequest); Request byeRequest = dialog.createRequest(Request.BYE); ClientTransaction byeClientTransaction = provider .getNewClientTransaction(byeRequest); dialog.sendRequest(byeClientTransaction); return; } /* * This is a forked response. We need to find the original call * leg and retrieve the original RTP session from that call leg. * TODO : Each call leg must get its own RTP bridge so this code * needs to be rewritten. For now, they all share the same bridge. */ String callLegId = SipUtilities.getCallLegId(response); for ( Dialog sipDialog : b2bua.dialogTable) { if ( DialogContext.get(sipDialog).getCallLegId().equals(callLegId) && DialogContext.get(sipDialog).rtpSession != null ) { DialogContext context = DialogContext.get(sipDialog); Request request = context.getRequest(); DialogContext newContext = DialogContext.attach(b2bua, dialog,context.getDialogCreatingTransaction() , request); newContext.setRtpSession(context.getRtpSession()); /* * At this point we only do one half of the association * with the peer dialog. When the ACK is sent, the other * half of the association is established. */ newContext.setPeerDialog(context.getPeerDialog()); dialog.setApplicationData(newContext); break; } } /* * Could not find the original dialog context. * This means the fork response came in too late. Send BYE * to that leg. */ if ( dialog.getApplicationData() == null ) { logger.debug("callLegId = " + callLegId); logger.debug("dialogTable = " + b2bua.dialogTable); b2bua.tearDown(Gateway.SIPXBRIDGE_USER, ReasonCode.FORK_TIMED_OUT, "Fork timed out"); return; } } /* * Handle proxy challenge. */ if (response.getStatusCode() == Response.PROXY_AUTHENTICATION_REQUIRED || response.getStatusCode() == Response.UNAUTHORIZED) { handleAuthenticationChallenge(responseEvent); return; } ItspAccountInfo accountInfo = null; if (responseEvent.getClientTransaction() != null && ((TransactionContext) responseEvent .getClientTransaction().getApplicationData()) != null) { accountInfo = ((TransactionContext) responseEvent.getClientTransaction() .getApplicationData()).getItspAccountInfo(); } String callId = SipUtilities.getCallId(response); /* * Garbage collect the failure counter if it exists. */ if (accountInfo != null && response.getStatusCode() / 200 == 1) { accountInfo.removeFailureCounter(callId); } if (method.equals(Request.REGISTER)) { Gateway.getRegistrationManager().processResponse(responseEvent); } else if (method.equals(Request.INVITE) || method.equals(Request.CANCEL) || method.equals(Request.BYE) || method.equals(Request.REFER) || method.equals(Request.OPTIONS)) { Gateway.getCallControlManager().processResponse(responseEvent); } else { logger.warn("dropping response " + method); } } catch (Exception ex) { logger.error("Unexpected error processing response >>>> " + response, ex); logger.error("cause = " + ex.getCause()); if (dialog != null && DialogContext.get(dialog) != null) { DialogContext.get(dialog).getBackToBackUserAgent().tearDown(); } } } /** * Remove state. Drop B2Bua structrue from our table so we will drop all * requests corresponding to this call in future. */ public void processTimeout(TimeoutEvent timeoutEvent) { ClientTransaction ctx = timeoutEvent.getClientTransaction(); try { if (ctx != null) { Request request = ctx.getRequest(); if (request.getMethod().equals(Request.OPTIONS)) { ClientTransaction clientTransaction = timeoutEvent .getClientTransaction(); Dialog dialog = clientTransaction.getDialog(); BackToBackUserAgent b2bua = DialogContext.get(dialog) .getBackToBackUserAgent(); b2bua.tearDown(Gateway.SIPXBRIDGE_USER, ReasonCode.SESSION_TIMER_ERROR, "OPTIONS Session timer timed out."); } else if (request.getMethod().equals(Request.REGISTER)) { Gateway.getRegistrationManager().processTimeout( timeoutEvent); } else if (request.getMethod().equals(Request.BYE)) { ClientTransaction clientTransaction = timeoutEvent .getClientTransaction(); Dialog dialog = clientTransaction.getDialog(); BackToBackUserAgent b2bua = DialogContext.get(dialog) .getBackToBackUserAgent(); dialog.delete(); } else if (request.getMethod().equals(Request.INVITE)) { /* * If this is a refer request -- grab the MOH Dialog and * kill it. Otherwise we are stuck with the MOH dialog. */ BackToBackUserAgent b2bua = DialogContext.get( ctx.getDialog()).getBackToBackUserAgent(); TransactionContext transactionContext = TransactionContext .get(ctx); if (transactionContext.getOperation() == Operation.SEND_INVITE_TO_SIPX_PROXY) { b2bua.tearDown(Gateway.SIPXBRIDGE_USER, ReasonCode.CALL_SETUP_ERROR, "SipxProxy is down"); } else { if (transactionContext.getOperation() == Operation.SEND_INVITE_TO_ITSP || transactionContext.getOperation() == Operation.SPIRAL_BLIND_TRANSFER_INVITE_TO_ITSP) { logger.debug("Timed sending request to ITSP -- trying alternate proxy"); if ( ctx.getState() != TransactionState.TERMINATED ) { ctx.terminate(); } /* Try another hop */ Collection<Hop> hops = transactionContext .getProxyAddresses(); if (hops == null || hops.size() == 0 ) { b2bua.sendByeToMohServer(); TransactionContext txContext = TransactionContext.get(ctx); if ( txContext.getServerTransaction() != null && txContext.getServerTransaction().getState() != TransactionState.TERMINATED ) { Response errorResponse = SipUtilities.createResponse(txContext.getServerTransaction(), Response.REQUEST_TIMEOUT); errorResponse.setReasonPhrase("ITSP Timed Out"); SipUtilities.addSipFrag(errorResponse, "ITSP Domain : " + txContext.getItspAccountInfo().getProxyDomain()); txContext.getServerTransaction().sendResponse(errorResponse); } } else { /* * We have another hop to try. OK send it to the * other side. */ b2bua.resendInviteToItsp(timeoutEvent .getClientTransaction()); } } else { logger.debug("Timed out processing " + transactionContext.getOperation()); b2bua.sendByeToMohServer(); } } } } } catch (Exception ex) { logger.error("Error processing timeout event", ex); } } public void processTransactionTerminated(TransactionTerminatedEvent tte) { logger.debug("Transaction terminated event"); } /** * Dialog timed out (resending INVITE or sending BYE). */ public void processDialogTimeout(DialogTimeoutEvent dialogTimeoutEvent) { DialogContext dialogContext = DialogContext.get(dialogTimeoutEvent.getDialog()); try { String reason = dialogTimeoutEvent.getReason().toString(); dialogContext.sendBye(true,reason); } catch (Exception ex) { logger.error("Exception sending BYE on timed out Dialog",ex); } } }
package org.spongepowered.api.util.ban; import org.spongepowered.api.entity.player.User; import org.spongepowered.api.text.Text; /** * Interface representing the contract of bans. */ public interface BanFactory { /** * Obtains an instance of a {@link BanBuilder}. * * @return A new BanBuilder */ BanBuilder builder(); /** * Creates an indefinite ban on a user. * * @param user The user * @return The created ban */ Ban of(User user); /** * Creates an indefinite ban with a reason on a user. * * @param user The user * @param reason The reason * @return The created ban */ Ban of(User user, Text.Literal reason); /** * Checks if this is a flowerpot. * * @return Whether this is a flowerpot */ boolean isFlowerPot(); }
package com.mesosphere.sdk.helloworld.scheduler; import com.google.protobuf.TextFormat; import com.mesosphere.sdk.offer.ResourceUtils; import com.mesosphere.sdk.scheduler.plan.*; import com.mesosphere.sdk.scheduler.plan.strategy.SerialStrategy; import com.mesosphere.sdk.scheduler.recovery.RecoveryPlanOverrider; import com.mesosphere.sdk.scheduler.recovery.RecoveryPlanOverriderFactory; import com.mesosphere.sdk.scheduler.recovery.RecoveryStep; import com.mesosphere.sdk.scheduler.recovery.RecoveryType; import com.mesosphere.sdk.scheduler.recovery.constrain.UnconstrainedLaunchConstrainer; import com.mesosphere.sdk.state.StateStore; import com.mesosphere.sdk.storage.Persister; import com.mesosphere.sdk.testing.*; import com.mesosphere.sdk.testutils.TestConstants; import org.apache.mesos.Protos; import org.apache.mesos.SchedulerDriver; import org.junit.After; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; import java.io.File; import java.util.*; import java.util.stream.Collectors; /** * Tests for the hello world service and its example yml files. */ public class ServiceTest { @After public void afterTest() { Mockito.validateMockitoUsage(); } /** * Validates service deployment in the default configuration case. */ @Test public void testDefaultDeployment() throws Exception { runDefaultDeployment(true); } /** * Checks that if an unessential task in a pod fails, that the other task in the same pod is unaffected. */ @Test public void testNonEssentialTaskFailure() throws Exception { Collection<SimulationTick> ticks = new ArrayList<>(); ticks.add(Send.register()); ticks.add(Expect.reconciledImplicitly()); // Verify that service launches 1 hello pod. ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.launchedTasks("hello-0-essential", "hello-0-nonessential")); // Running, no readiness check is applicable: ticks.add(Send.taskStatus("hello-0-essential", Protos.TaskState.TASK_RUNNING).build()); ticks.add(Send.taskStatus("hello-0-nonessential", Protos.TaskState.TASK_RUNNING).build()); // No more hellos to launch: ticks.add(Send.offerBuilder("hello").setHostname("host-foo").build()); ticks.add(Expect.declinedLastOffer()); ticks.add(Expect.allPlansComplete()); // When non-essential "agent" task fails, only agent task is relaunched, server task is unaffected: ticks.add(Send.taskStatus("hello-0-nonessential", Protos.TaskState.TASK_FAILED).build()); // Turn the crank with an arbitrary offer so that the failure is processed. // This also tests that the task is still tied to its prior location by checking that the offer is declined. ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.declinedLastOffer()); // Neither task should be killed: server should be unaffected, and agent is already in a terminal state ticks.add(Expect.taskNameNotKilled("hello-0-nonessential")); ticks.add(Expect.taskNameNotKilled("hello-0-essential")); // Send the matching offer to relaunch ONLY the agent against: ticks.add(Send.offerBuilder("hello").setPodIndexToReoffer(0).build()); ticks.add(Expect.launchedTasks("hello-0-nonessential")); ticks.add(Send.taskStatus("hello-0-nonessential", Protos.TaskState.TASK_RUNNING).build()); ticks.add(Expect.allPlansComplete()); // Matching ExecutorInfo == same pod: ticks.add(new ExpectTasksShareExecutor("hello-0-essential", "hello-0-nonessential")); new ServiceTestRunner("examples/nonessential_tasks.yml").run(ticks); } /** * Checks that if an essential task in a pod fails, that all tasks in the pod are relaunched. */ @Test public void testEssentialTaskFailure() throws Exception { Collection<SimulationTick> ticks = new ArrayList<>(); ticks.add(Send.register()); ticks.add(Expect.reconciledImplicitly()); // Verify that service launches 1 hello pod. ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.launchedTasks("hello-0-essential", "hello-0-nonessential")); // Running, no readiness check is applicable: ticks.add(Send.taskStatus("hello-0-essential", Protos.TaskState.TASK_RUNNING).build()); ticks.add(Send.taskStatus("hello-0-nonessential", Protos.TaskState.TASK_RUNNING).build()); // No more hellos to launch: ticks.add(Send.offerBuilder("hello").setHostname("host-foo").build()); ticks.add(Expect.declinedLastOffer()); ticks.add(Expect.allPlansComplete()); // When essential "server" task fails, both server+agent are relaunched: ticks.add(Send.taskStatus("hello-0-essential", Protos.TaskState.TASK_FAILED).build()); // Turn the crank with an arbitrary offer so that the failure is processed. // This also tests that the task is still tied to its prior location by checking that the offer is declined. ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.declinedLastOffer()); // Only the agent task is killed: server is already in a terminal state ticks.add(Expect.taskNameKilled("hello-0-nonessential")); ticks.add(Expect.taskNameNotKilled("hello-0-essential")); // Send the matching offer to relaunch both the server and agent: ticks.add(Send.offerBuilder("hello").setPodIndexToReoffer(0).build()); ticks.add(Expect.launchedTasks("hello-0-essential", "hello-0-nonessential")); ticks.add(Send.taskStatus("hello-0-nonessential", Protos.TaskState.TASK_RUNNING).build()); ticks.add(Send.taskStatus("hello-0-essential", Protos.TaskState.TASK_RUNNING).build()); ticks.add(Expect.allPlansComplete()); // Matching ExecutorInfo == same pod: ticks.add(new ExpectTasksShareExecutor("hello-0-essential", "hello-0-nonessential")); new ServiceTestRunner("examples/nonessential_tasks.yml").run(ticks); } /** * Checks that unexpected Tasks are killed. */ @Test public void testZombieTaskKilling() throws Exception { Collection<SimulationTick> ticks = new ArrayList<>(); ticks.add(Send.register()); ticks.add(Expect.reconciledImplicitly()); // Verify that service launches 1 hello pod. ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.launchedTasks("hello-0-server")); // Running, no readiness check is applicable: ticks.add(Send.taskStatus("hello-0-server", Protos.TaskState.TASK_RUNNING).build()); String taskId = UUID.randomUUID().toString(); ticks.add(Send.taskStatus("hello-0-server", Protos.TaskState.TASK_RUNNING) .setTaskId(taskId) .build()); ticks.add(Expect.taskIdKilled(taskId)); ticks.add(Expect.taskNameNotKilled("hello-0-server")); new ServiceTestRunner("examples/simple.yml").run(ticks); } /** * Verifies that a set of two or more tasks all share the same ExecutorInfo (i.e. the same pod). */ private static class ExpectTasksShareExecutor implements Expect { private final List<String> taskNames; private ExpectTasksShareExecutor(String... taskNames) { this.taskNames = Arrays.asList(taskNames); } @Override public String getDescription() { return String.format("Tasks share the same executor: %s", taskNames); } @Override public void expect(ClusterState state, SchedulerDriver mockDriver) throws AssertionError { Set<Protos.ExecutorInfo> executors = taskNames.stream() .map(name -> state.getLastLaunchedTask(name).getExecutor()) .collect(Collectors.toSet()); Assert.assertEquals(String.format( "Expected tasks to share a single matching executor, but had: %s", executors.stream().map(e -> TextFormat.shortDebugString(e)).collect(Collectors.toList())), 1, executors.size()); } } /** * Tests scheduler behavior when the number of {@code world} pods is reduced. */ @Test public void testHelloDecommissionNotAllowed() throws Exception { // Simulate an initial deployment with default of 2 world nodes (and 1 hello node): ServiceTestResult result = runDefaultDeployment(true); Assert.assertEquals( new TreeSet<>(Arrays.asList("hello-0-server", "world-0-server", "world-1-server")), result.getPersister().getChildren("/Tasks")); // Now test behavior when that's reduced to 1 world node: Collection<SimulationTick> ticks = new ArrayList<>(); ticks.add(Send.register()); ticks.add(Expect.reconciledExplicitly(result.getPersister())); ticks.add(Send.taskStatus("hello-0-server", Protos.TaskState.TASK_RUNNING).build()); ticks.add(Send.taskStatus("world-0-server", Protos.TaskState.TASK_RUNNING).setReadinessCheckExitCode(0).build()); ticks.add(Send.taskStatus("world-1-server", Protos.TaskState.TASK_RUNNING).setReadinessCheckExitCode(0).build()); // Need to send an offer to trigger the implicit reconciliation ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.reconciledImplicitly()); ticks.add(Expect.declinedLastOffer()); ticks.add(new Expect() { @Override public void expect(ClusterState state, SchedulerDriver mockDriver) { List<String> deployErrors = state.getPlans().stream() .filter(p -> p.isDeployPlan()) .findFirst() .get().getErrors(); Assert.assertTrue(deployErrors.get(0).contains("PodSpec named 'hello' has 0 tasks, expected >=1 tasks")); } @Override public String getDescription() { return "check deploy plan error"; } }); new ServiceTestRunner() .setOptions("hello.count", "0") .setState(result) .run(ticks); } @Test public void testWorldDecommissionDefaultExecutor() throws Exception { testWorldDecommission(true); } @Test public void testWorldDecommissionCustomExecutor() throws Exception { testWorldDecommission(false); } /** * Tests scheduler behavior when the number of {@code world} pods is reduced. */ private void testWorldDecommission(boolean useDefaultExecutor) throws Exception { // Simulate an initial deployment with default of 2 world nodes (and 1 hello node): ServiceTestResult result = runDefaultDeployment(useDefaultExecutor); Assert.assertEquals( new TreeSet<>(Arrays.asList("hello-0-server", "world-0-server", "world-1-server")), result.getPersister().getChildren("/Tasks")); // Now test behavior when that's reduced to 1 world node: Collection<SimulationTick> ticks = new ArrayList<>(); ticks.add(Send.register()); ticks.add(Expect.reconciledExplicitly(result.getPersister())); ticks.add(Send.taskStatus("hello-0-server", Protos.TaskState.TASK_RUNNING).build()); ticks.add(Send.taskStatus("world-0-server", Protos.TaskState.TASK_RUNNING).setReadinessCheckExitCode(0).build()); ticks.add(Send.taskStatus("world-1-server", Protos.TaskState.TASK_RUNNING).setReadinessCheckExitCode(0).build()); // Now, we expect there to be the following plan state: // - a deploy plan that's COMPLETE, with only hello-0 (empty world phase) // - a recovery plan that's COMPLETE // - a decommission plan that's PENDING with phases for world-1 and world-0 (in that order) // When default executor is being used, three additional resources need to be unreserved. int stepCount = useDefaultExecutor ? 9 : 6; // Check initial plan state ticks.add(new ExpectDecommissionPlanProgress(Arrays.asList( new StepCount("world-1", stepCount, 0, 0), new StepCount("world-0", stepCount, 0, 0)))); // Need to send an offer to trigger the implicit reconciliation. ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.reconciledImplicitly()); ticks.add(Expect.declinedLastOffer()); // Check plan state after an offer came through: world-1-server killed ticks.add(new ExpectDecommissionPlanProgress(Arrays.asList( new StepCount("world-1", stepCount - 1, 0, 1), new StepCount("world-0", stepCount, 0, 0)))); ticks.add(Expect.taskNameKilled("world-1-server")); // Offer world-0 resources and check that nothing happens (haven't gotten there yet): ticks.add(Send.offerBuilder("world").setPodIndexToReoffer(0).build()); ticks.add(new ExpectDecommissionPlanProgress(Arrays.asList( new StepCount("world-1", stepCount - 2, 1, 1), new StepCount("world-0", stepCount, 0, 0)))); // Offer world-1 resources and check that world-1 resources are wiped: ticks.add(Send.offerBuilder("world").setPodIndexToReoffer(1).build()); ticks.add(Expect.unreservedTasks("world-1-server")); ticks.add(new ExpectDecommissionPlanProgress(Arrays.asList( new StepCount("world-1", 1, 0, stepCount - 1), new StepCount("world-0", stepCount, 0, 0)))); // FAIL ticks.add(new ExpectEmptyResources(result.getPersister(), "world-1-server")); // Turn the crank with an arbitrary offer to finish erasing world-1: ticks.add(Expect.knownTasks(result.getPersister(), "hello-0-server", "world-0-server", "world-1-server")); ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); ticks.add(Expect.knownTasks(result.getPersister(), "hello-0-server", "world-0-server")); ticks.add(new ExpectDecommissionPlanProgress(Arrays.asList( new StepCount("world-1", 0, 0, stepCount), new StepCount("world-0", stepCount, 0, 0)))); // Now let's proceed with decommissioning world-0. This time a single offer with the correct resources results // in both killing/flagging the task, and clearing its resources: ticks.add(Send.offerBuilder("world").setPodIndexToReoffer(0).build()); ticks.add(new ExpectDecommissionPlanProgress(Arrays.asList( new StepCount("world-1", 0, 0, stepCount), new StepCount("world-0", 1, 0, stepCount - 1)))); ticks.add(Expect.taskNameKilled("world-0-server")); ticks.add(new ExpectEmptyResources(result.getPersister(), "world-0-server")); // Turn the crank once again to erase the world-0 stub: ticks.add(Expect.knownTasks(result.getPersister(), "hello-0-server", "world-0-server")); ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); ticks.add(Expect.knownTasks(result.getPersister(), "hello-0-server")); ticks.add(new ExpectDecommissionPlanProgress(Arrays.asList( new StepCount("world-1", 0, 0, stepCount), new StepCount("world-0", 0, 0, stepCount)))); ticks.add(Expect.allPlansComplete()); ServiceTestRunner runner = new ServiceTestRunner() .setOptions("world.count", "0") .setState(result); if (!useDefaultExecutor) { runner.setUseCustomExecutor(); } runner.run(ticks); } @Test public void startedTaskIsPendingAfterRestartWithDefaultExecutor() throws Exception { Collection<SimulationTick> ticks = new ArrayList<>(); ticks.add(Send.register()); ticks.add(Expect.reconciledImplicitly()); // Verify that service launches 1 hello pod then 2 world pods. ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.launchedTasks("hello-0-server")); // Send another offer before hello-0 is finished: ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); // Running, no readiness check is applicable: ticks.add(Send.taskStatus("hello-0-server", Protos.TaskState.TASK_RUNNING).build()); // Now world-0 will deploy: ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.launchedTasks("world-0-server")); // world-0 has a readiness check, so the scheduler is waiting for that. ticks.add(Send.taskStatus("world-0-server", Protos.TaskState.TASK_RUNNING).setReadinessCheckExitCode(1).build()); ticks.add(Expect.stepStatus("deploy", "world", "world-0:[server]", Status.STARTED)); ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); ServiceTestRunner runner = new ServiceTestRunner(); ServiceTestResult result = runner.run(ticks); // Start a new scheduler: ticks.clear(); ticks.add(Send.register()); ticks.add(Expect.reconciledExplicitly(result.getPersister())); // Since the readiness check of the task did not pass, we expect it to remain in the PENDING state ticks.add(Expect.stepStatus("deploy", "world", "world-0:[server]", Status.PENDING)); ServiceTestResult restarted = new ServiceTestRunner().setState(result).run(ticks); } @Test public void runningTaskIsCompleteAfterRestartWithDefaultExecutor() throws Exception { Collection<SimulationTick> ticks = new ArrayList<>(); ticks.add(Send.register()); ticks.add(Expect.reconciledImplicitly()); // Verify that service launches 1 hello pod then 2 world pods. ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.launchedTasks("hello-0-server")); // Send another offer before hello-0 is finished: ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); // Running, no readiness check is applicable: ticks.add(Send.taskStatus("hello-0-server", Protos.TaskState.TASK_RUNNING).build()); // Now world-0 will deploy: ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.launchedTasks("world-0-server")); // world-0 has a readiness check, so the scheduler is waiting for that. ticks.add(Send.taskStatus("world-0-server", Protos.TaskState.TASK_RUNNING).setReadinessCheckExitCode(0).build()); ticks.add(Expect.stepStatus("deploy", "world", "world-0:[server]", Status.COMPLETE)); ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); ServiceTestRunner runner = new ServiceTestRunner(); ServiceTestResult result = runner.run(ticks); // Start a new scheduler: ticks.clear(); ticks.add(Send.register()); ticks.add(Expect.reconciledExplicitly(result.getPersister())); // Since the readiness check of the task did not pass, we expect it to remain in the PENDING state ticks.add(Expect.stepStatus("deploy", "world", "world-0:[server]", Status.COMPLETE)); ServiceTestResult restarted = new ServiceTestRunner().setState(result).run(ticks); } @Test public void startedTaskIsCompleteAfterRestartWithCustomExecutor() throws Exception { Collection<SimulationTick> ticks = new ArrayList<>(); ticks.add(Send.register()); ticks.add(Expect.reconciledImplicitly()); // Verify that service launches 1 hello pod then 2 world pods. ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.launchedTasks("hello-0-server")); // Send another offer before hello-0 is finished: ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); // Running, no readiness check is applicable: ticks.add(Send.taskStatus("hello-0-server", Protos.TaskState.TASK_RUNNING).build()); // Now world-0 will deploy: ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.launchedTasks("world-0-server")); // world-0 has a readiness check, so the scheduler is waiting for that. ticks.add(Send.taskStatus("world-0-server", Protos.TaskState.TASK_RUNNING).setReadinessCheckExitCode(1).build()); ticks.add(Expect.stepStatus("deploy", "world", "world-0:[server]", Status.STARTED)); ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); ServiceTestRunner runner = new ServiceTestRunner().setUseCustomExecutor(); ServiceTestResult result = runner.run(ticks); // Start a new scheduler: ticks.clear(); ticks.add(Send.register()); ticks.add(Expect.reconciledExplicitly(result.getPersister())); // Since the readiness check of the task did not pass, we expect it to remain in the PENDING state ticks.add(Expect.stepStatus("deploy", "world", "world-0:[server]", Status.COMPLETE)); ServiceTestResult restarted = new ServiceTestRunner().setState(result).setUseCustomExecutor().run(ticks); } @Test public void transientToCustomPermanentFailureTransition() throws Exception { Protos.Offer unacceptableOffer = Protos.Offer.newBuilder() .setId(Protos.OfferID.newBuilder().setValue(UUID.randomUUID().toString())) .setFrameworkId(TestConstants.FRAMEWORK_ID) .setSlaveId(TestConstants.AGENT_ID) .setHostname(TestConstants.HOSTNAME) .addResources( Protos.Resource.newBuilder() .setName("mem") .setType(Protos.Value.Type.SCALAR) .setScalar(Protos.Value.Scalar.newBuilder().setValue(1.0))) .build(); Collection<SimulationTick> ticks = new ArrayList<>(); ticks.add(Send.register()); ticks.add(Expect.reconciledImplicitly()); // Verify that service launches 1 hello pod then 2 world pods. ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.launchedTasks("hello-0-server")); // Send another offer before hello-0 is finished: ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); // Running, no readiness check is applicable: ticks.add(Send.taskStatus("hello-0-server", Protos.TaskState.TASK_RUNNING).build()); // Now world-0 will deploy: ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.launchedTasks("world-0-server")); // With world-0's readiness check passing, world-1 still won't launch due to a hostname placement constraint: ticks.add(Send.taskStatus("world-0-server", Protos.TaskState.TASK_RUNNING).setReadinessCheckExitCode(0).build()); // world-1 will finally launch if the offered hostname is different: ticks.add(Send.offerBuilder("world").setHostname("host-foo").build()); ticks.add(Expect.launchedTasks("world-1-server")); ticks.add(Send.taskStatus("world-1-server", Protos.TaskState.TASK_RUNNING).setReadinessCheckExitCode(0).build()); ticks.add(Expect.allPlansComplete()); // Kill hello-0 to trigger transient recovery ticks.add(Send.taskStatus("hello-0-server", Protos.TaskState.TASK_FAILED).build()); // Send an unused offer to trigger an evaluation of the recovery plan ticks.add(Send.offer(unacceptableOffer)); // Expect default transient recovery triggered ticks.add(Expect.recoveryStepStatus("hello-0:[server]", "hello-0:[server]", Status.PREPARED)); // Now trigger custom permanent replacement of that pod ticks.add(Send.replacePod("hello-0")); // Send an unused offer to trigger an evaluation of the recovery plan ticks.add(Send.offer(unacceptableOffer)); // Custom expectation not relevant to other tests Expect expectSingleRecoveryPhase = new Expect() { @Override public void expect(ClusterState state, SchedulerDriver mockDriver) throws AssertionError { Plan recoveryPlan = state.getPlans().stream() .filter(plan -> plan.getName().equals("recovery")) .findAny().get(); Assert.assertEquals(1, recoveryPlan.getChildren().size()); } @Override public String getDescription() { return "Single recovery phase"; } }; ticks.add(expectSingleRecoveryPhase); ticks.add(Expect.recoveryStepStatus("custom-hello-recovery", "hello-0", Status.PREPARED)); // Complete recovery ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.launchedTasks("hello-0-server")); ticks.add(Send.taskStatus("hello-0-server", Protos.TaskState.TASK_RUNNING).build()); ticks.add(Expect.allPlansComplete()); new ServiceTestRunner() .setRecoveryManagerFactory(new RecoveryPlanOverriderFactory() { @Override public RecoveryPlanOverrider create(StateStore stateStore, Collection<Plan> plans) { return new RecoveryPlanOverrider() { @Override public Optional<Phase> override(PodInstanceRequirement podInstanceRequirement) { if (podInstanceRequirement.getPodInstance().getPod().getType().equals("hello") && podInstanceRequirement.getRecoveryType().equals(RecoveryType.PERMANENT)) { Phase phase = new DefaultPhase( "custom-hello-recovery", Arrays.asList( new RecoveryStep( podInstanceRequirement.getPodInstance().getName(), podInstanceRequirement, new UnconstrainedLaunchConstrainer(), stateStore)), new SerialStrategy<>(), Collections.emptyList()); return Optional.of(phase); } return Optional.empty(); } }; } }) .run(ticks); } private static class StepCount { private final String phaseName; private final int pendingCount; private final int preparedCount; private final int completedCount; private StepCount(String phaseName, int pendingCount, int preparedCount, int completedCount) { this.phaseName = phaseName; this.pendingCount = pendingCount; this.preparedCount = preparedCount; this.completedCount = completedCount; } private Status statusOfStepIndex(int index) { if (completedCount > index) { return Status.COMPLETE; } else if (completedCount + preparedCount > index) { return Status.PREPARED; } else { return Status.PENDING; } } @Override public String toString() { return String.format("phase=%s[pending=%d,prepared=%d,completed=%d]", phaseName, pendingCount, preparedCount, completedCount); } } /** * Expects that the specified task has no resources listed in the state store. */ private static class ExpectEmptyResources implements Expect { private final Persister persisterWithTasks; private final String taskName; private ExpectEmptyResources(Persister persisterWithTasks, String taskName) { this.persisterWithTasks = persisterWithTasks; this.taskName = taskName; } @Override public String getDescription() { return String.format("Empty resource list for task: %s", taskName); } @Override public void expect(ClusterState state, SchedulerDriver mockDriver) throws AssertionError { Optional<Protos.TaskInfo> task = new StateStore(persisterWithTasks).fetchTask(taskName); Assert.assertTrue(String.format("Task %s not found", taskName), task.isPresent()); Assert.assertEquals(String.format("Expected zero resources, got: %s", task.get().getResourcesList()), 0, task.get().getResourcesCount()); } } /** * Expects that the decommission plan has a specified composition/statuses. */ private static class ExpectDecommissionPlanProgress implements Expect { private final List<StepCount> stepCounts; private ExpectDecommissionPlanProgress(List<StepCount> stepCounts) { this.stepCounts = stepCounts; } @Override public String getDescription() { return String.format("Decommission plan with phase steps: %s", stepCounts); } @Override public void expect(ClusterState state, SchedulerDriver mockDriver) throws AssertionError { Map<String, Plan> plans = state.getPlans().stream() .collect(Collectors.toMap(Plan::getName, p -> p)); Assert.assertEquals(3, plans.size()); // Deploy plan: complete, world phase is empty Plan plan = plans.get("deploy"); Assert.assertEquals(Status.COMPLETE, plan.getStatus()); Assert.assertEquals(Arrays.asList("hello", "world"), plan.getChildren().stream().map(Phase::getName).collect(Collectors.toList())); Map<String, Phase> phases = plan.getChildren().stream() .collect(Collectors.toMap(Phase::getName, p -> p)); Assert.assertEquals(Arrays.asList("hello-0:[server]"), phases.get("hello").getChildren().stream().map(Step::getName).collect(Collectors.toList())); Assert.assertTrue(phases.get("world").getChildren().isEmpty()); // Recovery plan: still complete and empty plan = plans.get("recovery"); Assert.assertEquals(Status.COMPLETE, plan.getStatus()); Assert.assertTrue(plan.getChildren().isEmpty()); // Decommission: in_progress: world-1 sent kill and world-0 still pending plan = plans.get("decommission"); boolean allStepsPending = stepCounts.stream().allMatch(sc -> sc.completedCount == 0 && sc.preparedCount == 0); boolean allStepsComplete = stepCounts.stream().allMatch(sc -> sc.pendingCount == 0 && sc.preparedCount == 0); final Status expectedPlanStatus; if (allStepsPending) { expectedPlanStatus = Status.PENDING; } else if (allStepsComplete) { expectedPlanStatus = Status.COMPLETE; } else { expectedPlanStatus = Status.IN_PROGRESS; } Assert.assertEquals(expectedPlanStatus, plan.getStatus()); Assert.assertEquals(stepCounts.stream().map(s -> s.phaseName).collect(Collectors.toList()), plan.getChildren().stream().map(Phase::getName).collect(Collectors.toList())); phases = plan.getChildren().stream() .collect(Collectors.toMap(Phase::getName, p -> p)); Assert.assertEquals(stepCounts.size(), phases.size()); for (StepCount stepCount : stepCounts) { Phase phase = phases.get(stepCount.phaseName); Map<String, Status> stepStatuses = phase.getChildren().stream() .collect(Collectors.toMap(Step::getName, Step::getStatus)); Assert.assertEquals( String.format("Number of steps doesn't match expectation in %s: %s", stepCount, stepStatuses), stepCount.pendingCount + stepCount.preparedCount + stepCount.completedCount, phase.getChildren().size()); Assert.assertEquals( String.format("Step statuses don't match expectation in %s", stepCount), getExpectedStepStatuses(state, stepCount), stepStatuses); } } private static Map<String, Status> getExpectedStepStatuses(ClusterState state, StepCount stepCount) { Map<String, Status> expectedSteps = new HashMap<>(); expectedSteps.put(String.format("kill-%s-server", stepCount.phaseName), stepCount.statusOfStepIndex(expectedSteps.size())); LaunchedPod pod = state.getLastLaunchedPod(stepCount.phaseName); Collection<String> resourceIds = new ArrayList<>(); resourceIds.addAll(ResourceUtils.getResourceIds(ResourceUtils.getAllResources(pod.getTasks()))); resourceIds.addAll(ResourceUtils.getResourceIds(pod.getExecutor().getResourcesList())); for (String resourceId : resourceIds) { expectedSteps.put(String.format("unreserve-%s", resourceId), stepCount.statusOfStepIndex(expectedSteps.size())); } expectedSteps.put(String.format("erase-%s-server", stepCount.phaseName), stepCount.statusOfStepIndex(expectedSteps.size())); return expectedSteps; } } /** * Runs a default hello world deployment and returns the persisted state that resulted. */ private ServiceTestResult runDefaultDeployment(boolean useDefaultExecutor) throws Exception { Collection<SimulationTick> ticks = new ArrayList<>(); ticks.add(Send.register()); ticks.add(Expect.reconciledImplicitly()); // Verify that service launches 1 hello pod then 2 world pods. ticks.add(Send.offerBuilder("hello").build()); ticks.add(Expect.launchedTasks("hello-0-server")); // Send another offer before hello-0 is finished: ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); // Running, no readiness check is applicable: ticks.add(Send.taskStatus("hello-0-server", Protos.TaskState.TASK_RUNNING).build()); // Now world-0 will deploy: ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.launchedTasks("world-0-server")); // world-0 has a readiness check, so the scheduler is waiting for that: ticks.add(Send.taskStatus("world-0-server", Protos.TaskState.TASK_RUNNING).build()); ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); // With world-0's readiness check passing, world-1 still won't launch due to a hostname placement constraint: ticks.add(Send.taskStatus("world-0-server", Protos.TaskState.TASK_RUNNING).setReadinessCheckExitCode(0).build()); ticks.add(Send.offerBuilder("world").build()); ticks.add(Expect.declinedLastOffer()); // world-1 will finally launch if the offered hostname is different: ticks.add(Send.offerBuilder("world").setHostname("host-foo").build()); ticks.add(Expect.launchedTasks("world-1-server")); ticks.add(Send.taskStatus("world-1-server", Protos.TaskState.TASK_RUNNING).setReadinessCheckExitCode(0).build()); // No more worlds to launch: ticks.add(Send.offerBuilder("world").setHostname("host-bar").build()); ticks.add(Expect.declinedLastOffer()); ticks.add(Expect.allPlansComplete()); ServiceTestRunner runner = new ServiceTestRunner(); if (!useDefaultExecutor) { runner.setUseCustomExecutor(); } return runner.run(ticks); } /** * Validates all service specs in the hello-world examples/ directory. */ @Test public void testExampleSpecs() throws Exception { // Some example files may require additional custom scheduler envvars: Map<String, Map<String, String>> schedulerEnvForExamples = new HashMap<>(); schedulerEnvForExamples.put("secrets.yml", toMap( "HELLO_SECRET1", "hello-world/secret1", "HELLO_SECRET2", "hello-world/secret2", "WORLD_SECRET1", "hello-world/secret1", "WORLD_SECRET2", "hello-world/secret2", "WORLD_SECRET3", "hello-world/secret3")); // Iterate over yml files in dist/examples/, run sanity check for each: File[] exampleFiles = ServiceTestRunner.getDistFile("examples").listFiles(); Assert.assertNotNull(exampleFiles); Assert.assertTrue(exampleFiles.length != 0); for (File examplesFile : exampleFiles) { ServiceTestRunner serviceTestRunner = new ServiceTestRunner(examplesFile); Map<String, String> schedulerEnv = schedulerEnvForExamples.get(examplesFile.getName()); if (schedulerEnv != null) { serviceTestRunner.setSchedulerEnv(schedulerEnv); } try { serviceTestRunner.run(); } catch (Exception e) { throw new Exception(String.format( "Failed to render %s: %s", examplesFile.getAbsolutePath(), e.getMessage()), e); } } } /** * Validates the default service spec. */ @Test public void testDefaultSpec() throws Exception { new ServiceTestRunner().run(); } private static Map<String, String> toMap(String... keyVals) { Map<String, String> map = new HashMap<>(); if (keyVals.length % 2 != 0) { throw new IllegalArgumentException(String.format( "Expected an even number of arguments [key, value, key, value, ...], got: %d", keyVals.length)); } for (int i = 0; i < keyVals.length; i += 2) { map.put(keyVals[i], keyVals[i + 1]); } return map; } }
package org.vetmeduni.readtools; import java.io.IOException; import java.io.InputStream; import java.util.Hashtable; import java.util.Properties; public class ProjectProperties { private static String PROPERTIES_FILE = "/version.prop"; /** * The default properties */ private static Hashtable<String, String> DEFAULT_VERSION_VALUES = new Hashtable<String, String>() {{ put("version", "UNKNOWN"); put("name", "Program"); put("build", "develop"); // the build will be computed except it is in develop put("timestamp", "unknown"); put("contact_person", "DGS"); put("contact_email", ""); }}; private static String name = null; private static String version = null; private static String build = null; private static String timestamp = null; private static String contactPerson = null; private static String contactEmail = null; /** * Get the name of the program * * @return the name of the program */ public static String getName() { if (name == null) { setValue("name"); } return name; } /** * Get the version for the program * * @return the version */ public static String getVersion() { if (version == null) { setValue("version"); } return version; } /** * Get the build for this project * * @return the build String */ public static String getBuild() { if (build == null) { setValue("build"); } return build; } /** * Get the compilation time * * @return the timestamp */ public static String getTimestamp() { if (timestamp == null) { setValue("timestamp"); } return timestamp; } /** * Get the contact person * * @return the contact person */ public static String getContactPerson() { if (contactPerson == null) { setValue("contact_person"); } return contactPerson; } /** * Get the contact email * * @return the contact email */ public static String getContactEmail() { if (contactEmail == null) { setValue("contact_email"); } return contactEmail; } /** * Get the formated version in the format v.${version}.r_${build} * * @return the formatted version */ public static String getFormattedVersion() { if (version == null || build == null) { getAllPropertiesForProgramHeader(); } return String.format("v.%s.r_%s", version, build); } /** * Get the formatted name with version like Name v.${version}.r_${build} * * @return the formatted name with version */ public static String getFormattedNameWithVersion() { if (version == null || build == null || name == null) { getAllPropertiesForProgramHeader(); } return String.format("%s %s", name, getFormattedVersion()); } /** * Get the full contact (Name + email) * * @return the full contact */ public static String getContact() { if (contactPerson == null || contactEmail == null) { getAllPropertiesForProgramHeader(); } return String.format("%s (%s)", contactPerson, contactEmail); } /** * Get a value from the property file * * @param tag the tag in the property file * * @return the value */ private static String getFromProperties(String tag) { InputStream stream = ProjectProperties.class.getResourceAsStream(PROPERTIES_FILE); if (stream == null) { return DEFAULT_VERSION_VALUES.get(tag); } Properties props = new Properties(); try { props.load(stream); stream.close(); String prop = (String) props.get(tag); return (prop == null) ? DEFAULT_VERSION_VALUES.get(tag) : prop; } catch (IOException e) { return DEFAULT_VERSION_VALUES.get(tag); } } /** * Get all the properties at the same time from file */ private static void getAllPropertiesForProgramHeader() { InputStream stream = ProjectProperties.class.getResourceAsStream(PROPERTIES_FILE); if (stream == null) { setDefaults(); return; } Properties props = new Properties(); try { props.load(stream); stream.close(); for (String tag : DEFAULT_VERSION_VALUES.keySet()) { String prop = (String) props.get(tag); String val = (isAbsent(prop)) ? DEFAULT_VERSION_VALUES.get(tag) : prop; setValue(tag, val); } } catch (IOException e) { setDefaults(); } } /** * Check if the propertie value is absent (null or start with $) * * @param value the value to test * * @return <code>true</code> if it is absent; <code>false</code> otherwise */ private static boolean isAbsent(String value) { if (value == null || value.contains("$")) { return true; } return false; } /** * Set a tag value * * @param tag the tag to set */ private static void setValue(String tag) { String val = getFromProperties(tag); if (isAbsent(val)) { setDefault(tag); } else { setValue(tag, val); } } /** * Set a tag with a value * * @param tag the tag to set * @param val the value */ private static void setValue(String tag, String val) { switch (tag) { case "name": name = val; break; case "build": build = val; break; case "timestamp": timestamp = val; break; case "version": version = val; break; case "contact_person": contactPerson = val; break; case "contact_email": contactEmail = val; break; default: throw new IllegalArgumentException("Property " + tag + " not found"); } } /** * Set the default value for a tag * * @param tag the tag to set */ private static void setDefault(String tag) { String val = DEFAULT_VERSION_VALUES.get(tag); setValue(tag, val); } /** * Set all values for all the tags */ private static void setDefaults() { for (String tag : DEFAULT_VERSION_VALUES.keySet()) { setDefault(tag); } } }
package com.sonyericsson.hudson.plugins.gerrit.gerritevents; import net.sf.json.JSONObject; import com.sonyericsson.hudson.plugins.gerrit.gerritevents.dto.GerritEvent; import com.sonyericsson.hudson.plugins.gerrit.gerritevents.dto.attr.Provider; /** * A handler to deliver gerrit events and connection ones to listener. * * @author rinrinne &lt;rinrin.ne@gmail.om&gt; */ public interface Handler { /** * Post string data to working queue. * Note that you must not update objects passed to this method. * * @param data a line of text from the stream-events stream of events. */ void post(String data); /** * Post string data to working queue. * Note that you must not update objects passed to this method. * * @param data a line of text from the stream-events stream of events. * @param provider the Gerrit server info. */ void post(String data, Provider provider); /** * Post json object to working queue. * Note that you must not update objects passed to this method. * * @param json a json object from the stream-events stream of events. */ void post(JSONObject json); /** * Post json object to working queue. * Note that you must not update objects passed to this method. * * @param json a json object from the stream-events stream of events. * @param provider the Gerrit server info. */ void post(JSONObject json, Provider provider); /** * Post GerritEvent object to working queue. * Note that you must not update objects passed to this method. * * @param event a GerritEvent object. */ void post(GerritEvent event); /** * Add listener for GerritEvent. * * @param listener the listener. */ void addListener(GerritEventListener listener); /** * Add listener for GerrirtConnectionEvent. * * @param listener the listener. */ void addListener(ConnectionListener listener); /** * Remove listener for GerritEvent. * * @param listener the listener. */ void removeListener(GerritEventListener listener); /** * Remove listener for GerrirtConnectionEvent. * * @param listener the listener. */ void removeListener(ConnectionListener listener); }
package ru.r2cloud.jradio.jy1sat; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class AggregateBeacons { private static final int MAX_WHOLE_ORBIT_CHUNKS = 12; private static final int MAX_HIGH_RES_CHUNKS = 5; private static final int MAX_FITTER_CHUNKS = 7; public static List<FitterMessageBatch> readFitterMessages(List<Jy1satBeacon> beacons) throws IOException { Collections.sort(beacons, Jy1satBeaconComparator.INSTACE); ByteArrayOutputStream baos = null; int lastIndex = 0; Jy1satBeacon firstBeacon = null; List<FitterMessageBatch> result = new ArrayList<>(); for (Jy1satBeacon cur : beacons) { int index = getFitterMessageIndex(cur); if (index == -1) { continue; } if (baos == null) { baos = new ByteArrayOutputStream(); firstBeacon = cur; } for (int i = lastIndex; i < (index - 1); i++) { // fill the gap between the last transmittion and the next one baos.write(new byte[200]); } baos.write(cur.getPayload()); lastIndex = index; if (lastIndex == MAX_FITTER_CHUNKS && firstBeacon != null) { lastIndex = 0; FitterMessageBatch batch = new FitterMessageBatch(firstBeacon.getRealtimeTelemetry().getSequenceNumber(), baos.toByteArray()); result.add(batch); baos = null; firstBeacon = null; } } if (baos != null && firstBeacon != null) { for (int i = lastIndex; i <= MAX_FITTER_CHUNKS; i++) { // TODO gaps will cause non-null values in wholeorbit. // fiture out how to parse sparse byte array // fill the gap between the last transmittion and the next one baos.write(new byte[200]); } FitterMessageBatch batch = new FitterMessageBatch(firstBeacon.getRealtimeTelemetry().getSequenceNumber(), baos.toByteArray()); result.add(batch); } return result; } public static List<HighResolutionDataBatch> readHighResolutionData(List<Jy1satBeacon> beacons) throws IOException { Collections.sort(beacons, Jy1satBeaconComparator.INSTACE); ByteArrayOutputStream baos = null; int lastIndex = 0; Jy1satBeacon firstBeacon = null; List<HighResolutionDataBatch> result = new ArrayList<>(); for (Jy1satBeacon cur : beacons) { int index = getHiResMessageIndex(cur); if (index == -1) { continue; } if (baos == null) { baos = new ByteArrayOutputStream(); firstBeacon = cur; } for (int i = lastIndex; i < (index - 1); i++) { // fill the gap between the last transmittion and the next one baos.write(new byte[200]); } baos.write(cur.getPayload()); lastIndex = index; if (lastIndex == MAX_HIGH_RES_CHUNKS && firstBeacon != null) { lastIndex = 0; HighResolutionDataBatch batch = new HighResolutionDataBatch(firstBeacon.getRealtimeTelemetry().getSequenceNumber(), baos.toByteArray()); result.add(batch); baos = null; firstBeacon = null; } } if (baos != null && firstBeacon != null) { for (int i = lastIndex; i <= MAX_HIGH_RES_CHUNKS; i++) { // TODO gaps will cause non-null values in wholeorbit. // fiture out how to parse sparse byte array // fill the gap between the last transmittion and the next one baos.write(new byte[200]); } HighResolutionDataBatch batch = new HighResolutionDataBatch(firstBeacon.getRealtimeTelemetry().getSequenceNumber(), baos.toByteArray()); result.add(batch); } return result; } public static List<WholeOrbitDataBatch> readWholeOrbit(List<Jy1satBeacon> beacons) throws IOException { Collections.sort(beacons, Jy1satBeaconComparator.INSTACE); ByteArrayOutputStream baos = null; int lastIndex = 0; Jy1satBeacon firstBeacon = null; List<WholeOrbitDataBatch> result = new ArrayList<>(); for (Jy1satBeacon cur : beacons) { int index = getWODMessageIndex(cur); if (index == -1) { continue; } if (baos == null) { baos = new ByteArrayOutputStream(); firstBeacon = cur; } for (int i = lastIndex; i < (index - 1); i++) { // fill the gap between the last transmittion and the next one baos.write(new byte[200]); } baos.write(cur.getPayload()); lastIndex = index; if (lastIndex == MAX_WHOLE_ORBIT_CHUNKS && firstBeacon != null) { lastIndex = 0; WholeOrbitDataBatch batch = new WholeOrbitDataBatch(firstBeacon.getRealtimeTelemetry().getSequenceNumber(), baos.toByteArray()); result.add(batch); baos = null; firstBeacon = null; } } if (baos != null && firstBeacon != null) { for (int i = lastIndex; i <= MAX_WHOLE_ORBIT_CHUNKS; i++) { // TODO gaps will cause non-null values in wholeorbit. // fiture out how to parse sparse byte array // fill the gap between the last transmittion and the next one baos.write(new byte[200]); } WholeOrbitDataBatch batch = new WholeOrbitDataBatch(firstBeacon.getRealtimeTelemetry().getSequenceNumber(), baos.toByteArray()); result.add(batch); } return result; } private static int getFitterMessageIndex(Jy1satBeacon beacon) { if (beacon.getHeader().getId() < 18 || beacon.getHeader().getId() > 24) { return -1; } return beacon.getHeader().getId() - 17; } private static int getWODMessageIndex(Jy1satBeacon beacon) { if (beacon.getHeader().getId() < 1 || beacon.getHeader().getId() > 12) { return -1; } return beacon.getHeader().getId(); } private static int getHiResMessageIndex(Jy1satBeacon beacon) { if (beacon.getHeader().getId() < 13 || beacon.getHeader().getId() > 17) { return -1; } return beacon.getHeader().getId() - 12; } private AggregateBeacons() { // do nothing } }
package ru.shcoder.fuzzysearch; public class CharsScoreStrategy extends ScoreWeight implements ScoreStrategy { /** * Get similarity score by number of matching chars * @param str1 First string * @param str2 Second string * @return Similarity Score */ public double getScore(String str1, String str2) { int sharedCharsCount = 0; for(int i = 0; i < str1.length(); i++) { String subStr2 = str2.substring(sharedCharsCount); int matchedCharIndex = subStr2.indexOf(str1.charAt(i)); if (matchedCharIndex > -1) { sharedCharsCount++; } } return (double)sharedCharsCount / (str2.length() + str1.length() - sharedCharsCount); } }
package scrum.client.collaboration; import ilarkesto.core.base.Str; import ilarkesto.gwt.client.AAction; import ilarkesto.gwt.client.AViewEditWidget; import ilarkesto.gwt.client.Gwt; import ilarkesto.gwt.client.HyperlinkWidget; import ilarkesto.gwt.client.TableBuilder; import ilarkesto.gwt.client.editor.ATextEditorModel; import ilarkesto.gwt.client.editor.RichtextEditorWidget; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import scrum.client.common.AScrumGwtEntity; import scrum.client.common.AScrumWidget; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.Widget; public class CommentsWidget extends AScrumWidget { private static final int COMMENTS_PER_PAGE = 5; private SimplePanel editorWrapper; private FlowPanel commentListPanel; private AScrumGwtEntity parent; private Map<Comment, CommentWidget> widgets; private HyperlinkWidget activateCommentLink; private RichtextEditorWidget editor; private int currentPage = 1; public CommentsWidget(AScrumGwtEntity parent) { this.parent = parent; } @Override protected Widget onInitialization() { new RequestCommentsServiceCall(parent.getId()).execute(); // TODO commentsManagerComponent activateCommentLink = new HyperlinkWidget(new ActivateCommentEditorAction()); widgets = new HashMap<Comment, CommentWidget>(); editorWrapper = new SimplePanel(); editorWrapper.setWidget(activateCommentLink); commentListPanel = new FlowPanel(); FlowPanel panel = new FlowPanel(); panel = new FlowPanel(); panel.setStyleName("CommentsWidget"); panel.add(editorWrapper); panel.add(commentListPanel); return panel; } @Override protected void onUpdate() { if (editor == null && editorWrapper.getWidget() != activateCommentLink) { editorWrapper.setWidget(activateCommentLink); } else if (editor != null && editorWrapper.getWidget() != editor) { editorWrapper.setWidget(editor); } updateCommentList(); super.onUpdate(); } private void updateCommentList() { commentListPanel.clear(); List<Comment> comments = parent.getComments(); Collections.sort(comments, Comment.REVERSE_DATEANDTIME_COMPARATOR); List<Comment> pageComments = filterCurrentPageComments(comments); for (Comment comment : pageComments) { CommentWidget widget = getWidget(comment); commentListPanel.add(widget); } if (comments.size() > COMMENTS_PER_PAGE) { commentListPanel.add(createPageNavigator(comments.size())); } } private Widget createPageNavigator(int commentCount) { TableBuilder tb = new TableBuilder(); tb.setWidth(null); tb.addFieldLabel("Pages:"); int page = 1; int endIdx = COMMENTS_PER_PAGE - 1; if (currentPage > 1) { tb.addSpacer(5, 1); tb.add(Gwt.createDiv("CommentsWidget-pageNavigator-page", new HyperlinkWidget(new ShowPageAction("<", currentPage - 1)))); tb.addSpacer(5, 1); } while (true) { tb.addSpacer(5, 1); if (currentPage == page) { tb.add(Gwt.createDiv("CommentsWidget-pageNavigator-currentPage", String.valueOf(page))); } else { tb.add(Gwt.createDiv("CommentsWidget-pageNavigator-page", new HyperlinkWidget(new ShowPageAction(String.valueOf(page), page)))); } if (endIdx >= commentCount - 1) break; page++; endIdx += COMMENTS_PER_PAGE; } if (page > currentPage) { tb.addSpacer(10, 1); tb.add(Gwt.createDiv("CommentsWidget-pageNavigator-page", new HyperlinkWidget(new ShowPageAction(">", currentPage + 1)))); } return Gwt.createDiv("CommentsWidget-pageNavigator", tb.createTable()); } private List<Comment> filterCurrentPageComments(List<Comment> comments) { List<Comment> ret = new ArrayList<Comment>(COMMENTS_PER_PAGE); int startIdx = (currentPage - 1) * COMMENTS_PER_PAGE; int endIdx = startIdx + COMMENTS_PER_PAGE - 1; if (endIdx >= comments.size()) endIdx = comments.size() - 1; for (int i = startIdx; i <= endIdx; i++) { ret.add(comments.get(i)); } return ret; } private CommentWidget getWidget(Comment comment) { CommentWidget widget = widgets.get(comment); if (widget == null) { widget = new CommentWidget(comment); widgets.put(comment, widget); } return widget; } private void postComment() { String text = editor.getEditorText(); if (Str.isBlank(text)) return; text = text.trim(); Comment comment = new Comment(parent, getAuth().getUser(), text); getDao().createComment(comment); update(); } private void activateEditor() { this.editor = new RichtextEditorWidget(new ATextEditorModel() { @Override public void setValue(String text) { postComment(); } @Override public String getValue() { return null; } }); this.editor.switchToEditMode(); this.editor.setModeSwitchHandler(new AViewEditWidget.ModeSwitchHandler() { @Override public void onViewerActivated(AViewEditWidget widget) { editor = null; update(); } @Override public void onEditorActivated(AViewEditWidget widget) {} }); update(); } private class ActivateCommentEditorAction extends AAction { @Override public String getLabel() { return "Create a comment..."; } @Override protected void onExecute() { activateEditor(); } } private class ShowPageAction extends AAction { private String label; private int page; public ShowPageAction(String label, int page) { super(); this.label = label; this.page = page; } @Override public String getLabel() { return label; } @Override public String getTooltip() { return "Show page " + page; } @Override protected void onExecute() { currentPage = page; update(); } } }
package com.andyadc.idea.common.serialization.json; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.serializer.SerializeConfig; import com.alibaba.fastjson.serializer.SerializerFeature; import com.alibaba.fastjson.serializer.SimpleDateFormatSerializer; import com.andyadc.idea.common.IdeaConstants; /** * @author andaicheng * @version 2016/12/30 */ public class FastjsonSerializer { private static final SerializeConfig SERIALIZE_CONFIG; private static final SerializerFeature[] SERIALIZER_FEATURE = { SerializerFeature.WriteMapNullValue, SerializerFeature.WriteNullStringAsEmpty, //null""null SerializerFeature.WriteNullNumberAsZero, //null0null SerializerFeature.WriteNullBooleanAsFalse, //Booleannullfalsenull SerializerFeature.WriteNullListAsEmpty, //listnull[]null SerializerFeature.DisableCircularReferenceDetect }; static { SERIALIZE_CONFIG = new SerializeConfig(); SERIALIZE_CONFIG.put(java.util.Date.class, new SimpleDateFormatSerializer(IdeaConstants.DATE_FORMAT)); SERIALIZE_CONFIG.put(java.sql.Date.class, new SimpleDateFormatSerializer(IdeaConstants.DATE_FORMAT)); } private FastjsonSerializer() { } public static String toJSON(Object o) { return JSON.toJSONString(o, SERIALIZE_CONFIG, SERIALIZER_FEATURE); } public static <T> T fromJson(String json, Class<T> clazz) { return JSON.parseObject(json, clazz); } }
package seedu.taskitty.logic.parser; import seedu.taskitty.commons.exceptions.IllegalValueException; import seedu.taskitty.commons.util.StringUtil; import seedu.taskitty.commons.util.TaskUtil; import seedu.taskitty.logic.commands.*; import seedu.taskitty.model.tag.Tag; import seedu.taskitty.model.task.Task; import seedu.taskitty.model.task.TaskDate; import seedu.taskitty.model.task.TaskTime; import static seedu.taskitty.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT; import static seedu.taskitty.commons.core.Messages.MESSAGE_UNKNOWN_COMMAND; import java.text.SimpleDateFormat; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.joestelmach.natty.DateGroup; import com.joestelmach.natty.Parser; import javafx.util.Pair; /** * Parses user input. */ public class CommandParser { public static final String COMMAND_QUOTE_SYMBOL = "\""; public static final String EMPTY_STRING = ""; public static final int NOT_FOUND = -1; public static final int STRING_START = 0; /** * Used for initial separation of command word and args. */ private static final Pattern BASIC_COMMAND_FORMAT = Pattern.compile("(?<commandWord>\\S+)(?<arguments>.*)"); private static final Pattern TASK_INDEX_ARGS_FORMAT = Pattern.compile("(?<targetIndex>.+)"); //Used for checking for number date formats in arguments private static final Pattern LOCAL_DATE_FORMAT = Pattern.compile(".* (?<arguments>\\d(\\d)?[/-]\\d(\\d)?).*"); private static final Pattern KEYWORDS_ARGS_FORMAT = Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace private static final Pattern TASK_DATA_ARGS_FORMAT = //Tags must be at the end Pattern.compile("(?<arguments>[\\p{Graph} ]+)"); // \p{Graph} is \p{Alnum} or \p{Punct} /** * Parses user input into command for execution. * * @param userInput full user input string * @return the command based on the user input */ public Command parseCommand(String userInput) { final Matcher matcher = BASIC_COMMAND_FORMAT.matcher(userInput.trim()); if (!matcher.matches()) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_ERROR)); } final String commandWord = matcher.group("commandWord"); final String arguments = matcher.group("arguments"); switch (commandWord) { case AddCommand.COMMAND_WORD: return prepareAdd(arguments); case DeleteCommand.COMMAND_WORD: return prepareDelete(arguments); case EditCommand.COMMAND_WORD: return prepareEdit(arguments); case ClearCommand.COMMAND_WORD: return new ClearCommand(); case FindCommand.COMMAND_WORD: return prepareFind(arguments); case ExitCommand.COMMAND_WORD: return new ExitCommand(); case HelpCommand.COMMAND_WORD: return new HelpCommand(); case UndoCommand.COMMAND_WORD: return new UndoCommand(); case RedoCommand.COMMAND_WORD: return new RedoCommand(); case DoneCommand.COMMAND_WORD: return prepareDone(arguments); case ViewCommand.COMMAND_WORD: return prepareView(arguments); case SaveCommand.COMMAND_WORD: return prepareSave(arguments); default: return new IncorrectCommand(MESSAGE_UNKNOWN_COMMAND); } } //@@author A0135793W private Command prepareSave(String argument) { try { return new SaveCommand(argument.trim()); } catch (IllegalValueException ive) { return new IncorrectCommand(ive.getMessage()); } } //@@author A0130853L /** * Parses arguments in the context of the view command. * * @param args full command args string * @return the prepared command */ private Command prepareView(String arguments) { if (arguments.trim().isEmpty()) { return new ViewCommand(); // view all upcoming uncompleted tasks, events and deadlines } if (arguments.trim().equals("done")) { return new ViewCommand("done"); // view done command } if (arguments.trim().equals("all")) { return new ViewCommand("all"); // view all command } String[] details = extractTaskDetailsNatty(arguments); if (details.length!= 3) { // no date was successfully extracted return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, Command.MESSAGE_FORMAT + ViewCommand.MESSAGE_PARAMETER)); } else { assert details[1] != null; // contains date return new ViewCommand(details[1]); } } //@@author A0139930B /** * Parses arguments in the context of the add task command. * * @param args full command args string * @return the prepared command */ private Command prepareAdd(String args){ final Matcher matcher = TASK_DATA_ARGS_FORMAT.matcher(args.trim()); // Validate arg string format if (!matcher.matches()) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, Command.MESSAGE_FORMAT + AddCommand.MESSAGE_PARAMETER)); } try { String arguments = matcher.group("arguments"); String taskDetailArguments = getTaskDetailArguments(arguments); String tagArguments = getTagArguments(arguments); return new AddCommand( extractTaskDetailsNatty(taskDetailArguments), getTagsFromArgs(tagArguments), args ); } catch (IllegalValueException ive) { return new IncorrectCommand(ive.getMessage()); } } /** * Parses the argument to get a string of all the relevant details of the task * * @param arguments command args string without command word */ private String getTaskDetailArguments(String arguments) { int detailLastIndex = arguments.indexOf(Tag.TAG_PREFIX); if (detailLastIndex == NOT_FOUND) { detailLastIndex = arguments.length(); } return arguments.substring(STRING_START, detailLastIndex).trim(); } /** * Parses the argument to get a string of all tags, including the Tag prefix * * @param arguments command args string without command word */ private String getTagArguments(String arguments) { int tagStartIndex = arguments.indexOf(Tag.TAG_PREFIX); if (tagStartIndex == NOT_FOUND) { tagStartIndex = arguments.length(); } return arguments.substring(tagStartIndex); } /** * Extracts the task details into a String array representing the name, date, time. * Details are arranged according to index shown in Task * * @param dataArguments command args string with only name, date, time arguments */ private String[] extractTaskDetailsNatty(String dataArguments) { String dataArgumentsNattyFormat = convertToNattyDateFormat(dataArguments); int nameEndIndex = dataArgumentsNattyFormat.length(); ArrayList<String> details = new ArrayList<String>(); //Attempt to extract name out if it is surrounded by quotes nameEndIndex = dataArgumentsNattyFormat.lastIndexOf(COMMAND_QUOTE_SYMBOL); boolean isNameExtracted = false; if (nameEndIndex != NOT_FOUND) { int nameStartIndex = dataArgumentsNattyFormat.indexOf(COMMAND_QUOTE_SYMBOL); if (nameStartIndex == NOT_FOUND) { nameStartIndex = STRING_START; } //+1 because we want the quote included in the string String nameDetail = dataArgumentsNattyFormat.substring(nameStartIndex, nameEndIndex + 1); //remove name from dataArguments dataArgumentsNattyFormat = dataArgumentsNattyFormat.replace(nameDetail, EMPTY_STRING); //remove quotes from nameDetail nameDetail = nameDetail.replaceAll(COMMAND_QUOTE_SYMBOL, EMPTY_STRING); details.add(Task.TASK_COMPONENT_INDEX_NAME, nameDetail); isNameExtracted = true; } Parser dateTimeParser = new Parser(); List<DateGroup> dateGroups = dateTimeParser.parse(dataArgumentsNattyFormat); nameEndIndex = dataArgumentsNattyFormat.length(); for (DateGroup group : dateGroups) { List<Date> dates = group.getDates(); //Natty's getPosition returns 1 based position //-1 because we want the 0 based position nameEndIndex = Math.min(nameEndIndex, group.getPosition() - 1); for (Date date : dates) { details.add(extractLocalDate(date)); details.add(extractLocalTime(date)); } } if (!isNameExtracted) { details.add(Task.TASK_COMPONENT_INDEX_NAME, dataArgumentsNattyFormat.substring(STRING_START, nameEndIndex).trim()); } String[] returnDetails = new String[details.size()]; details.toArray(returnDetails); return returnDetails; } //@@author A0139052L /** * Converts any number formats of date from the local format to one which can be parsed by natty * @param arguments * @return arguments with converted dates if any */ private String convertToNattyDateFormat(String arguments) { Matcher matchDate = LOCAL_DATE_FORMAT.matcher(arguments); if (matchDate.matches()) { String localDateString = matchDate.group("arguments"); String dateSeparator = getDateSeparator(localDateString); return convertToNattyFormat(arguments, localDateString, dateSeparator); } else { return arguments; } } /** * Get the separator between day month and year in a date * @param localDateString the string representing the date * @return the separator character used in localDateString */ private String getDateSeparator(String localDateString) { // if 2nd char in string is an integer, then the 3rd char must be the separator // else 2nd char is the separator if (StringUtil.isInteger(localDateString.substring(1,2))) { return localDateString.substring(2, 3); } else { return localDateString.substring(1, 2); } } /** * Convert the local date format inside arguments into a format * which can be parsed by natty * @param arguments the full argument string * @param localDateString the localDate extracted out from arguments * @param dateSeparator the separator for the date extracted out * @return converted string where the date format has been converted from local to natty format */ private String convertToNattyFormat(String arguments, String localDateString, String dateSeparator) { String[] dateComponents = localDateString.split(dateSeparator); int indexOfDate = arguments.indexOf(localDateString); String nattyDateString = swapDayAndMonth(dateComponents, dateSeparator); arguments = arguments.replace(localDateString, nattyDateString); String stringFromConvertedDate = arguments.substring(indexOfDate); String stringUpToConvertedDate = arguments.substring(0, indexOfDate); return convertToNattyDateFormat(stringUpToConvertedDate) + stringFromConvertedDate; } /** * Swaps the day and month component of the date * @param dateComponents the String array obtained after separting the date string * @param dateSeparator the Separator used in the date string * @return the date string with its day and month component swapped */ private String swapDayAndMonth(String[] dateComponents, String dateSeparator) { StringBuilder nattyDateStringBuilder = new StringBuilder(); nattyDateStringBuilder.append(dateComponents[1]); nattyDateStringBuilder.append(dateSeparator); nattyDateStringBuilder.append(dateComponents[0]); return nattyDateStringBuilder.toString(); } //@@author A0139930B /** * Takes in a date from Natty and converts it into a string representing date * Format of date returned is according to TaskDate * * @param date retrieved using Natty */ private String extractLocalDate(Date date) { SimpleDateFormat dateFormat = new SimpleDateFormat(TaskDate.DATE_FORMAT_STRING); return dateFormat.format(date); } /** * Takes in a date from Natty and converts it into a string representing time * Format of time returned is according to TaskTime * * @param date retrieved using Natty */ private String extractLocalTime(Date date) { SimpleDateFormat timeFormat = new SimpleDateFormat(TaskTime.TIME_FORMAT_STRING); String currentTime = timeFormat.format(new Date()); String inputTime = timeFormat.format(date); if (currentTime.equals(inputTime)) { //Natty parses the current time if string does not include time. //We want to ignore input when current time equal input time return null; } return inputTime; } //@@author A0139930B /** * Extracts the new task's tags from the add command's tag arguments string. * Merges duplicate tag strings. */ private static Set<String> getTagsFromArgs(String tagArguments) throws IllegalValueException { // no tags if (tagArguments.isEmpty()) { return Collections.emptySet(); } // replace first delimiter prefix, then split final Collection<String> tagStrings = Arrays.asList(tagArguments .replaceFirst(Tag.TAG_PREFIX, EMPTY_STRING) .split(Tag.TAG_PREFIX)); return new HashSet<>(tagStrings); } //@@author A0139052L /** * Parses arguments in the context of the delete person command. * * @param args full command args string * @return the prepared command */ private Command prepareDelete(String args) { String dataArgs = args.trim(); String[] indexes = dataArgs.split("\\s"); ArrayList<Pair<Integer, Integer>> listOfIndexes = getIndexes(indexes); if (listOfIndexes == null) { return new IncorrectCommand( String.format(MESSAGE_INVALID_COMMAND_FORMAT, Command.MESSAGE_FORMAT + DeleteCommand.MESSAGE_PARAMETER)); } return new DeleteCommand(listOfIndexes, args); } //@@author A0135793W /** * Parses arguments in the context of the mark as done command. * * @param args full command args string * @return the prepared command */ private Command prepareDone(String args) { String dataArgs = args.trim(); String[] indexes = dataArgs.split("\\s"); ArrayList<Pair<Integer, Integer>> listOfIndexes = getIndexes(indexes); if (listOfIndexes == null) { return new IncorrectCommand( String.format(MESSAGE_INVALID_COMMAND_FORMAT, Command.MESSAGE_FORMAT + DoneCommand.MESSAGE_PARAMETER)); } return new DoneCommand(listOfIndexes, args); } /**@@author A0139052L * * Parses each index string in the array and adds them to a list if valid * @param indexes the string array of indexes separated * @return a list of all valid indexes parsed or null if an invalid index was given */ private ArrayList<Pair<Integer, Integer>> getIndexes(String[] indexes) { Pair<Integer, Integer> categoryAndIndex; ArrayList<Pair<Integer, Integer>> listOfIndexes = new ArrayList<Pair<Integer, Integer>>(); for (String index: indexes) { if (index.contains("-")) { String[] splitIndex = index.split("-"); categoryAndIndex = getCategoryAndIndex(splitIndex[0]); Optional<Integer> secondIndex = parseIndex(splitIndex[1]); if (!secondIndex.isPresent() || categoryAndIndex == null) { return null; } int firstIndex = categoryAndIndex.getValue(); int categoryIndex = categoryAndIndex.getKey(); if (firstIndex >= secondIndex.get()) { return null; } for (; firstIndex <= secondIndex.get(); firstIndex++) { categoryAndIndex = new Pair<Integer, Integer>(categoryIndex, firstIndex); listOfIndexes.add(categoryAndIndex); } } else { categoryAndIndex = getCategoryAndIndex(index); if (categoryAndIndex == null) { return null; } listOfIndexes.add(categoryAndIndex); } } return listOfIndexes; } /** * Parses arguments in the context of the edit task command. * * @param args full command args string * @return the prepared command */ private Command prepareEdit(String args) { String[] splitArgs = args.trim().split(" "); if (splitArgs.length < 2) { return new IncorrectCommand( String.format(MESSAGE_INVALID_COMMAND_FORMAT, Command.MESSAGE_FORMAT + EditCommand.MESSAGE_PARAMETER)); } Pair<Integer, Integer> categoryAndIndexPair = getCategoryAndIndex(splitArgs[0]); if (categoryAndIndexPair == null) { return new IncorrectCommand( String.format(MESSAGE_INVALID_COMMAND_FORMAT, Command.MESSAGE_FORMAT + EditCommand.MESSAGE_PARAMETER)); } try { String arguments = ""; for (int i = 1; i<splitArgs.length; i++){ arguments = arguments + splitArgs[i] + " "; } arguments.substring(0, arguments.length() - 1); String taskDetailArguments = getTaskDetailArguments(arguments); String tagArguments = getTagArguments(arguments); return new EditCommand( extractTaskDetailsNatty(taskDetailArguments), getTagsFromArgs(tagArguments), categoryAndIndexPair.getValue(), categoryAndIndexPair.getKey(), args); } catch (IllegalValueException ive) { return new IncorrectCommand(ive.getMessage()); } } //@@author A0139052L /** * Parses the string and returns the categoryIndex and the index if a valid one was given * @param args * @return an int array with categoryIndex and index in 0 and 1 index respectively */ private Pair<Integer, Integer> getCategoryAndIndex(String args) { if (args.trim().equals(EMPTY_STRING)) { return null; } // category index should be the first char in the string Optional<Integer> checkForCategory = parseIndex(args.substring(0, 1)); Optional<Integer> index; int categoryIndex; if (checkForCategory.isPresent()){ index = parseIndex(args); // give the default category index if none was provided categoryIndex = TaskUtil.getDefaultCategoryIndex(); } else { // index should be the rest of the string if category char is present index = parseIndex(args.substring(1)); categoryIndex = TaskUtil.getCategoryIndex(args.charAt(0)); } if (!index.isPresent()){ return null; } return new Pair<Integer, Integer>(categoryIndex, index.get()); } //@@author /** * Returns the specified index in the {@code command} IF a positive unsigned integer is given as the index. * Returns an {@code Optional.empty()} otherwise. */ private Optional<Integer> parseIndex(String command) { final Matcher matcher = TASK_INDEX_ARGS_FORMAT.matcher(command.trim()); if (!matcher.matches()) { return Optional.empty(); } String index = matcher.group("targetIndex"); if(!StringUtil.isUnsignedInteger(index)){ return Optional.empty(); } return Optional.of(Integer.parseInt(index)); } /** * Parses arguments in the context of the find person command. * * @param args full command args string * @return the prepared command */ private Command prepareFind(String args) { final Matcher matcher = KEYWORDS_ARGS_FORMAT.matcher(args.trim()); if (!matcher.matches()) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE)); } // keywords delimited by whitespace final String[] keywords = matcher.group("keywords").split("\\s+"); final Set<String> keywordSet = new HashSet<>(Arrays.asList(keywords)); return new FindCommand(keywordSet); } }
package seedu.unburden.logic.commands; import java.util.List; import seedu.unburden.commons.core.Messages; import seedu.unburden.commons.exceptions.*; import seedu.unburden.commons.core.UnmodifiableObservableList; import seedu.unburden.model.tag.UniqueTagList; import seedu.unburden.model.tag.UniqueTagList.DuplicateTagException; import seedu.unburden.model.task.Date; import seedu.unburden.model.task.Name; import seedu.unburden.model.task.ReadOnlyTask; import seedu.unburden.model.task.Task; import seedu.unburden.model.task.TaskDescription; import seedu.unburden.model.task.Time; import seedu.unburden.model.task.UniqueTaskList.*; /* * edit any field of the task\ * @@author A0139714B */ public class EditCommand extends Command { public static final String COMMAND_WORD = "edit"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Edits the task identified by the index number used in the last task listing.\n" + "Parameters: INDEX (must be a positive integer) TASKNAME i/TASKDESCRIPTION d/DATE s/STARTTIME e/ENDTIME" + "Example: " + COMMAND_WORD + " 1 meeting with boss i/project presentation d/23-12-2016 s/1200 e/1300" ; public static final String MESSAGE_EDIT_TASK_SUCCESS = "Updated Task: %1$s\n"; public static final String MESSAGE_EDIT_FAIL = "Editing has failed. Please check the details and try again"; private final int targetIndex; private final Task toEdit; private final String newName, newTaskDescription, newDate, newStartTime, newEndTime; public EditCommand(int index, String newName, String newTaskDescription, String newDate, String newStartTime, String newEndTime) throws IllegalValueException { this.targetIndex = index; if (newName == null) { this.newName = ""; //dummy value } else { this.newName = newName; } if (newTaskDescription == null) { this.newTaskDescription = ""; //dummy value } else { this.newTaskDescription = newTaskDescription; } if (newDate == null) { this.newDate = ""; //dummy value } else { this.newDate = newDate; } if (newStartTime == null) { this.newStartTime = ""; //dummy value } else { this.newStartTime = newStartTime; } if (newEndTime == null) { this.newEndTime = ""; //dummy value } else { this.newEndTime = newEndTime; } this.toEdit = new Task(new Name(this.newName), new TaskDescription(this.newTaskDescription), new Date(this.newDate), new Time(this.newStartTime), new Time(this.newEndTime), new UniqueTagList()); } @Override public CommandResult execute() throws IllegalValueException{ UnmodifiableObservableList<ReadOnlyTask> lastShownList = model.getFilteredTaskList(); if (lastShownList.size() < targetIndex) { indicateAttemptToExecuteIncorrectCommand(); return new CommandResult(Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX); } ReadOnlyTask taskToEdit = lastShownList.get(targetIndex - 1); try { model.saveToPrevLists(); model.editTask(taskToEdit, toEdit); overdueOrNot(); return new CommandResult(String.format(MESSAGE_EDIT_TASK_SUCCESS, lastShownList.get(targetIndex - 1))); } catch (TaskNotFoundException ee) { return new CommandResult(Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX); } catch (IllegalValueException e) { return new CommandResult(MESSAGE_EDIT_FAIL); } } //This method checks the entire list to check for overdue tasks private void overdueOrNot() throws IllegalValueException, DuplicateTagException { List<ReadOnlyTask> currentTaskList= model.getListOfTask().getTaskList(); for(ReadOnlyTask task : currentTaskList){ if(((Task) task).checkOverDue()){ ((Task) task).setOverdue(); } else{ ((Task) task).setNotOverdue(); } } } }
package sizebay.catalog.client; import lombok.Getter; import lombok.Setter; import sizebay.catalog.client.model.TenantCountry; import sizebay.catalog.client.model.TenantStatus; @Getter @Setter public class TenantBasicDetails { private String name; private String domain; private TenantCountry country; private TenantStatus status; }
package uk.co.adaptivelogic.jbehavetogherkin; import org.jbehave.core.model.Scenario; import org.jbehave.core.model.Story; import org.jbehave.core.parsers.RegexStoryParser; import java.io.*; import java.nio.charset.Charset; public class Main { public static void main(String[] args) { InputStreamReader jBehaveIn = new InputStreamReader(System.in, Charset.defaultCharset()); OutputStreamWriter gherkinOut = new OutputStreamWriter(System.out, Charset.defaultCharset()); translate(jBehaveIn, gherkinOut); } private static void translate(InputStreamReader jBehaveIn, OutputStreamWriter gherkinOut) { Story jbehave = readJBehave(jBehaveIn); String gherkin = translate(jbehave); writeGherkin(gherkinOut, gherkin); } private static void writeGherkin(OutputStreamWriter gherkinOut, String gherkin) { try { gherkinOut.write(gherkin); gherkinOut.close(); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static String translate(Story story) { StringBuilder builder = new StringBuilder(); for (Scenario scenario : story.getScenarios()) { for (String step : scenario.getSteps()) { builder.append(step); } } return builder.toString(); } private static Story readJBehave(InputStreamReader jBehaveIn) { BufferedReader bufferedReader = new BufferedReader(jBehaveIn); StringBuilder jbehaveBuilder = new StringBuilder(); try { String line; while ((line = bufferedReader.readLine()) != null) { jbehaveBuilder.append(line); } } catch (IOException ioe) { throw new RuntimeException(ioe); } RegexStoryParser storyParser = new RegexStoryParser(); return storyParser.parseStory(jbehaveBuilder.toString()); } }
package uk.co.qmunity.lib.part.compat.fmp; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.UUID; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.RenderBlocks; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.util.AxisAlignedBB; import net.minecraft.util.MovingObjectPosition; import net.minecraft.util.Vec3; import net.minecraft.world.World; import net.minecraftforge.common.util.ForgeDirection; import org.lwjgl.opengl.GL11; import uk.co.qmunity.lib.QLModInfo; import uk.co.qmunity.lib.QmunityLib; import uk.co.qmunity.lib.client.render.RenderHelper; import uk.co.qmunity.lib.client.render.RenderMultipart; import uk.co.qmunity.lib.part.IMicroblock; import uk.co.qmunity.lib.part.IPart; import uk.co.qmunity.lib.part.IPartCenter; import uk.co.qmunity.lib.part.IPartCollidable; import uk.co.qmunity.lib.part.IPartInteractable; import uk.co.qmunity.lib.part.IPartOccluding; import uk.co.qmunity.lib.part.IPartRedstone; import uk.co.qmunity.lib.part.IPartSelectable; import uk.co.qmunity.lib.part.IPartSelectableCustom; import uk.co.qmunity.lib.part.IPartSolid; import uk.co.qmunity.lib.part.IPartThruHole; import uk.co.qmunity.lib.part.IPartTicking; import uk.co.qmunity.lib.part.IPartUpdateListener; import uk.co.qmunity.lib.part.ITilePartHolder; import uk.co.qmunity.lib.part.PartRegistry; import uk.co.qmunity.lib.part.compat.MultipartSystem; import uk.co.qmunity.lib.part.compat.OcclusionHelper; import uk.co.qmunity.lib.part.compat.PartUpdateManager; import uk.co.qmunity.lib.raytrace.QMovingObjectPosition; import uk.co.qmunity.lib.raytrace.RayTracer; import uk.co.qmunity.lib.vec.Vec3d; import uk.co.qmunity.lib.vec.Vec3dCube; import uk.co.qmunity.lib.vec.Vec3i; import codechicken.lib.data.MCDataInput; import codechicken.lib.data.MCDataOutput; import codechicken.lib.raytracer.ExtendedMOP; import codechicken.lib.raytracer.IndexedCuboid6; import codechicken.lib.vec.Cuboid6; import codechicken.lib.vec.Vector3; import codechicken.microblock.CommonMicroblock; import codechicken.microblock.ISidedHollowConnect; import codechicken.multipart.INeighborTileChange; import codechicken.multipart.IRedstonePart; import codechicken.multipart.NormalOcclusionTest; import codechicken.multipart.NormallyOccludedPart; import codechicken.multipart.PartMap; import codechicken.multipart.TMultiPart; import codechicken.multipart.TNormalOcclusion; import codechicken.multipart.TSlottedPart; import codechicken.multipart.scalatraits.TSlottedTile; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; public class FMPPart extends TMultiPart implements ITilePartHolder, TNormalOcclusion, IRedstonePart, INeighborTileChange, IFMPPart, ISidedHollowConnect, TSlottedPart { private Map<String, IPart> parts = new HashMap<String, IPart>(); private List<IPart> added = new ArrayList<IPart>(); private boolean shouldDieInAFire = false; private boolean loaded = false; private boolean converted = false; private final boolean simulated; public FMPPart(boolean simulated) { this.simulated = simulated; } public FMPPart() { this(false); } public FMPPart(Map<String, IPart> parts) { this(); this.parts = parts; for (String s : parts.keySet()) parts.get(s).setParent(this); } @Override public String getType() { return QLModInfo.MODID + "_multipart"; } @Override public List<IPart> getParts() { List<IPart> parts = new ArrayList<IPart>(); for (String s : this.parts.keySet()) { IPart p = this.parts.get(s); if (p.getParent() != null) parts.add(p); } return parts; } @Override public Iterable<IndexedCuboid6> getSubParts() { List<IndexedCuboid6> cubes = new ArrayList<IndexedCuboid6>(); for (IPart p : getParts()) if (p instanceof IPartSelectable) for (Vec3dCube c : ((IPartSelectable) p).getSelectionBoxes()) cubes.add(new IndexedCuboid6(0, new Cuboid6(c.clone().expand(0.001).toAABB()))); if (cubes.size() == 0) cubes.add(new IndexedCuboid6(0, new Cuboid6(0, 0, 0, 1, 1, 1))); return cubes; } @Override public ExtendedMOP collisionRayTrace(Vec3 start, Vec3 end) { QMovingObjectPosition qmop = rayTrace(new Vec3d(start), new Vec3d(end)); if (qmop == null) return null; new Cuboid6(qmop.getCube().clone().expand(0.001).toAABB()).setBlockBounds(tile().getBlockType()); Vec3 v = qmop.hitVec.subtract(start); return new ExtendedMOP(qmop, 0, v.xCoord * v.xCoord + v.yCoord * v.yCoord + v.zCoord * v.zCoord); } private boolean firstTick = true; @Override public void update() { if (firstTick) { if (converted) { for (IPart p : getParts()) if (p instanceof IPartUpdateListener) ((IPartUpdateListener) p).onConverted(); } else { if (!loaded) { for (IPart p : added) if (p instanceof IPartUpdateListener) ((IPartUpdateListener) p).onAdded(); } else { for (IPart p : added) ((IPartUpdateListener) p).onLoaded(); } } if (!world().isRemote) sendDescUpdate(); firstTick = false; } for (IPart p : getParts()) { if (p instanceof IPartTicking) ((IPartTicking) p).update(); } if (!world().isRemote && (shouldDieInAFire || getParts().size() == 0)) tile().remPart(this); } @Override public void save(NBTTagCompound tag) { super.save(tag); NBTTagList l = new NBTTagList(); for (Entry<String, IPart> e : getPartMap().entrySet()) { NBTTagCompound t = new NBTTagCompound(); t.setString("id", e.getKey()); t.setString("type", e.getValue().getType()); NBTTagCompound data = new NBTTagCompound(); e.getValue().writeToNBT(data); t.setTag("data", data); l.appendTag(t); } tag.setTag("parts", l); } @Override public void load(NBTTagCompound tag) { super.load(tag); NBTTagList l = tag.getTagList("parts", new NBTTagCompound().getId()); for (int i = 0; i < l.tagCount(); i++) { NBTTagCompound t = l.getCompoundTagAt(i); String id = t.getString("id"); IPart p = getPart(id); if (p == null) { p = PartRegistry.createPart(t.getString("type"), false); if (p == null) continue; p.setParent(this); parts.put(id, p); } NBTTagCompound data = t.getCompoundTag("data"); p.readFromNBT(data); } if (getParts().size() == 0) shouldDieInAFire = true; loaded = true; if (tile() != null && getWorld() != null) getWorld().markBlockRangeForRenderUpdate(getX(), getY(), getZ(), getX(), getY(), getZ()); } @Override public void writeDesc(MCDataOutput packet) { super.writeDesc(packet); FMPDataOutput buffer = new FMPDataOutput(packet); try { buffer.writeInt(getPartMap().size()); for (Entry<String, IPart> e : getPartMap().entrySet()) { buffer.writeUTF(e.getKey()); buffer.writeUTF(e.getValue().getType()); e.getValue().writeUpdateData(buffer, -1); } } catch (Exception ex) { ex.printStackTrace(); } } @Override public void readDesc(MCDataInput packet) { super.readDesc(packet); FMPDataInput buffer = new FMPDataInput(packet); try { int amt = buffer.readInt(); for (int i = 0; i < amt; i++) { String id = buffer.readUTF(); String type = buffer.readUTF(); IPart p = getPart(id); if (p == null) { p = PartRegistry.createPart(type, true); if (p == null) continue; p.setParent(this); parts.put(id, p); } p.readUpdateData(buffer, -1); } } catch (Exception ex) { ex.printStackTrace(); } } // Part holder methods @Override public World getWorld() { return world(); } @Override public int getX() { return x(); } @Override public int getY() { return y(); } @Override public int getZ() { return z(); } @Override public void addPart(IPart part) { int before = parts.size(); parts.put(genIdentifier(), part); part.setParent(this); if (!simulated) { if (part instanceof IPartUpdateListener) ((IPartUpdateListener) part).onAdded(); for (IPart p : getParts()) if (p != part && p instanceof IPartUpdateListener) ((IPartUpdateListener) p).onPartChanged(part); if (before > 0) PartUpdateManager.addPart(this, part); if (tile() != null) { tile().markDirty(); getWorld().markBlockRangeForRenderUpdate(getX(), getY(), getZ(), getX(), getY(), getZ()); if (!getWorld().isRemote && before > 0) getWorld().notifyBlocksOfNeighborChange(getX(), getY(), getZ(), tile().blockType); } } } @Override public boolean removePart(IPart part) { if (part == null) return false; if (!parts.containsValue(part)) return false; if (!simulated) { PartUpdateManager.removePart(this, part); if (part instanceof IPartUpdateListener) ((IPartUpdateListener) part).onRemoved(); } String id = getIdentifier(part); parts.remove(id); part.setParent(null); if (!simulated) { for (IPart p : getParts()) if (p != part && p instanceof IPartUpdateListener) ((IPartUpdateListener) p).onPartChanged(part); tile().markDirty(); getWorld().markBlockRangeForRenderUpdate(getX(), getY(), getZ(), getX(), getY(), getZ()); if (!getWorld().isRemote) getWorld().notifyBlocksOfNeighborChange(getX(), getY(), getZ(), tile().blockType); refreshSlots(); } return true; } private void refreshSlots() { if (tile() instanceof TSlottedTile) { TSlottedTile t = (TSlottedTile) tile(); TMultiPart[] old = t.v_partMap(); TMultiPart[] parts = new TMultiPart[old.length]; for (int i = 0; i < old.length; i++) if (old[i] != null && old[i] != this) parts[i] = old[i]; t.v_partMap_$eq(parts); t.bindPart(this); } } private String genIdentifier() { String s = null; do { s = UUID.randomUUID().toString(); } while (parts.containsKey(s)); return s; } private String getIdentifier(IPart part) { for (String s : parts.keySet()) if (parts.get(s).equals(part)) return s; return null; } private IPart getPart(String id) { for (String s : parts.keySet()) if (s.equals(id)) return parts.get(s); return null; } @Override public boolean canAddPart(IPart part) { if (tile() == null) return true; if (part instanceof IPartCollidable) { List<Vec3dCube> cubes = new ArrayList<Vec3dCube>(); ((IPartCollidable) part).addCollisionBoxesToList(cubes, null); for (Vec3dCube c : cubes) if (!getWorld().checkNoEntityCollision(c.clone().add(getX(), getY(), getZ()).toAABB())) return false; } if (part instanceof IPartOccluding) { for (Vec3dCube b : ((IPartOccluding) part).getOcclusionBoxes()) { NormallyOccludedPart nop = new NormallyOccludedPart(new Cuboid6(b.toAABB())); try { if (!tile().canAddPart(nop)) return false; } catch (Exception ex) { return false; } } } return OcclusionHelper.occlusionTest(this, part); } @Override public QMovingObjectPosition rayTrace(Vec3d start, Vec3d end) { QMovingObjectPosition closest = null; double dist = Double.MAX_VALUE; for (IPart p : getParts()) { if (p instanceof IPartSelectable) { QMovingObjectPosition mop = ((IPartSelectable) p).rayTrace(start, end); if (mop == null) continue; double d = start.distanceTo(new Vec3d(mop.hitVec)); if (d < dist) { closest = mop; dist = d; } } } return closest; } @Override public void sendUpdatePacket(IPart part, int channel) { if (tile() != null && world() != null && getParts().contains(part)) PartUpdateManager.sendPartUpdate(this, part, channel); } @Override @SideOnly(Side.CLIENT) public boolean renderStatic(Vector3 pos, int pass) { boolean did = false; RenderBlocks renderer = RenderBlocks.getInstance(); RenderHelper.instance.setRenderCoords(getWorld(), (int) pos.x, (int) pos.y, (int) pos.z); renderer.blockAccess = getWorld(); for (IPart p : getParts()) { if (p.getParent() != null) { if (p.shouldRenderOnPass(pass)) { p.renderStatic(new Vec3i((int) pos.x, (int) pos.y, (int) pos.z), RenderHelper.instance, renderer, pass); RenderHelper.instance.resetRenderedSides(); RenderHelper.instance.resetTextureRotations(); RenderHelper.instance.resetTransformations(); RenderHelper.instance.setColor(0xFFFFFF); } } } renderer.blockAccess = null; RenderHelper.instance.reset(); return did; } @Override @SideOnly(Side.CLIENT) public void renderDynamic(Vector3 pos, float frame, int pass) { GL11.glPushMatrix(); { GL11.glTranslated(pos.x, pos.y, pos.z); for (IPart p : getParts()) { if (p.getParent() != null) { GL11.glPushMatrix(); if (p.shouldRenderOnPass(pass)) p.renderDynamic(new Vec3d(0, 0, 0), frame, pass); GL11.glPopMatrix(); } } } GL11.glPopMatrix(); } @Override @SideOnly(Side.CLIENT) public void drawBreaking(RenderBlocks renderBlocks) { QMovingObjectPosition mop = rayTrace(RayTracer.instance().getStartVector(Minecraft.getMinecraft().thePlayer), RayTracer.instance() .getEndVector(Minecraft.getMinecraft().thePlayer)); if (mop == null || mop.getPart() == null) return; RenderHelper.instance.setRenderCoords(getWorld(), getX(), getY(), getZ()); RenderMultipart.renderBreaking(getWorld(), getX(), getY(), getZ(), renderBlocks, mop); RenderHelper.instance.reset(); } @Override @SideOnly(Side.CLIENT) public boolean drawHighlight(MovingObjectPosition hit, EntityPlayer player, float frame) { QMovingObjectPosition mop = rayTrace(RayTracer.instance().getStartVector(Minecraft.getMinecraft().thePlayer), RayTracer.instance() .getEndVector(Minecraft.getMinecraft().thePlayer)); if (mop == null || mop.getPart() == null || !(mop.getPart() instanceof IPartSelectableCustom)) return false; return ((IPartSelectableCustom) mop.getPart()).drawHighlight(mop, player, frame); } @Override public void addCollisionBoxesToList(List<Vec3dCube> l, AxisAlignedBB bounds, Entity entity) { List<Vec3dCube> boxes = new ArrayList<Vec3dCube>(); for (IPart p : getParts()) { if (p instanceof IPartCollidable) { List<Vec3dCube> boxes_ = new ArrayList<Vec3dCube>(); ((IPartCollidable) p).addCollisionBoxesToList(boxes_, entity); for (Vec3dCube c : boxes_) { Vec3dCube cube = c.clone(); cube.add(getX(), getY(), getZ()); cube.setPart(p); boxes.add(cube); } boxes_.clear(); } } for (Vec3dCube c : boxes) { if (c.toAABB().intersectsWith(bounds)) l.add(c); } } @Override public Iterable<Cuboid6> getCollisionBoxes() { List<Cuboid6> cubes = new ArrayList<Cuboid6>(); List<Vec3dCube> boxes = new ArrayList<Vec3dCube>(); addCollisionBoxesToList(boxes, AxisAlignedBB.getBoundingBox(x(), y(), z(), x() + 1, y() + 1, z() + 1), null); for (Vec3dCube c : boxes) cubes.add(new Cuboid6(c.clone().add(-x(), -y(), -z()).toAABB())); return cubes; } @Override public void onNeighborChanged() { super.onNeighborChanged(); if (simulated) return; for (IPart p : getParts()) if (p != null && p instanceof IPartUpdateListener) ((IPartUpdateListener) p).onNeighborBlockChange(); } @Override public void onPartChanged(TMultiPart part) { IPart changed = null; if (part instanceof CommonMicroblock) changed = new FMPMicroblock((CommonMicroblock) part); for (IPart p : getParts()) if (p != null && p instanceof IPartUpdateListener) ((IPartUpdateListener) p).onPartChanged(changed); } @Override public Iterable<ItemStack> getDrops() { List<ItemStack> l = new ArrayList<ItemStack>(); for (IPart p : getParts()) { List<ItemStack> d = p.getDrops(); if (d != null) l.addAll(d); } return l; } @Override public Iterable<Cuboid6> getOcclusionBoxes() { List<Cuboid6> cubes = new ArrayList<Cuboid6>(); for (IPart p : getParts()) if (p != null && p instanceof IPartOccluding) for (Vec3dCube c : ((IPartOccluding) p).getOcclusionBoxes()) cubes.add(new IndexedCuboid6(0, new Cuboid6(c.toAABB()))); return cubes; } @Override public boolean occlusionTest(TMultiPart part) { if (part instanceof CommonMicroblock) { IMicroblock mb = new FMPMicroblock((CommonMicroblock) part); for (IPart p : getParts()) if (!p.occlusionTest(mb)) return false; } return NormalOcclusionTest.apply(this, part); } @Override public boolean canConnectRedstone(int side) { for (IPart p : getParts()) if (p instanceof IPartRedstone) if (((IPartRedstone) p).canConnectRedstone(ForgeDirection.getOrientation(side))) return true; return false; } @Override public int strongPowerLevel(int side) { int max = 0; for (IPart p : getParts()) if (p instanceof IPartRedstone) max = Math.max(max, ((IPartRedstone) p).getStrongPower(ForgeDirection.getOrientation(side))); return max; } @Override public int weakPowerLevel(int side) { int max = 0; for (IPart p : getParts()) if (p instanceof IPartRedstone) max = Math.max(max, ((IPartRedstone) p).getWeakPower(ForgeDirection.getOrientation(side))); return max; } @Override public void onNeighborTileChanged(int arg0, boolean arg1) { if (simulated) return; for (IPart p : getParts()) if (p != null && p instanceof IPartUpdateListener) ((IPartUpdateListener) p).onNeighborTileChange(); } @Override public boolean weakTileChanges() { return true; } @Override public void onAdded() { if (simulated) return; for (IPart p : getParts()) if (p != null && p instanceof IPartUpdateListener) ((IPartUpdateListener) p).onAdded(); } @Override public void onRemoved() { if (simulated) return; for (IPart p : getParts()) if (p != null && p instanceof IPartUpdateListener) ((IPartUpdateListener) p).onRemoved(); } @Override public void onChunkLoad() { if (simulated) return; for (IPart p : getParts()) if (p != null && p instanceof IPartUpdateListener) ((IPartUpdateListener) p).onLoaded(); } @Override public void onChunkUnload() { if (simulated) return; for (IPart p : getParts()) if (p != null && p instanceof IPartUpdateListener) ((IPartUpdateListener) p).onUnloaded(); } @Override public void onConverted() { converted = true; } @Override public void onMoved() { for (IPart p : getParts()) { if (p != null && p instanceof IPartUpdateListener) { ((IPartUpdateListener) p).onUnloaded(); ((IPartUpdateListener) p).onLoaded(); ((IPartUpdateListener) p).onNeighborBlockChange(); } } } @Override public void onWorldJoin() { super.onWorldJoin(); onChunkLoad(); } @Override public void onWorldSeparate() { super.onWorldSeparate(); onChunkUnload(); } @Override public ItemStack pickItem(MovingObjectPosition hit) { QMovingObjectPosition mop = rayTrace(RayTracer.instance().getStartVector(QmunityLib.proxy.getPlayer()), RayTracer.instance() .getEndVector(QmunityLib.proxy.getPlayer())); if (mop == null) return null; return mop.getPart().getItem(); } @Override public void harvest(MovingObjectPosition hit, EntityPlayer player) { if (world().isRemote) return; QMovingObjectPosition mop = rayTrace(RayTracer.instance().getStartVector(player), RayTracer.instance().getEndVector(player)); if (mop != null) { if (mop.getPart().breakAndDrop(player, mop)) mop.getPart().getParent().removePart(mop.getPart()); if (getParts().size() == 0) super.harvest(hit, player); } } @Override public void click(EntityPlayer player, MovingObjectPosition hit, ItemStack item) { QMovingObjectPosition mop = rayTrace(RayTracer.instance().getStartVector(player), RayTracer.instance().getEndVector(player)); if (mop != null) if (mop.getPart() instanceof IPartInteractable) ((IPartInteractable) mop.getPart()).onClicked(player, mop, item); } @Override public boolean activate(EntityPlayer player, MovingObjectPosition hit, ItemStack item) { QMovingObjectPosition mop = rayTrace(RayTracer.instance().getStartVector(player), RayTracer.instance().getEndVector(player)); if (mop != null) if (mop.getPart() instanceof IPartInteractable) return ((IPartInteractable) mop.getPart()).onActivated(player, mop, item); return false; } @Override public Map<String, IPart> getPartMap() { return parts; } @Override public List<IMicroblock> getMicroblocks() { return MultipartSystem.FMP.getCompat().getMicroblocks(getWorld(), new Vec3i(this)); } @Override public boolean isSimulated() { return simulated; } public boolean isSolid(ForgeDirection face) { for (IPart p : getParts()) if (p instanceof IPartSolid && ((IPartSolid) p).isSideSolid(face)) return true; return true; } @Override public boolean isSolid(int side) { return isSolid(ForgeDirection.getOrientation(side)); } @Override public float getStrength(MovingObjectPosition hit, EntityPlayer player) { QMovingObjectPosition mop = rayTrace(RayTracer.instance().getStartVector(player), RayTracer.instance().getEndVector(player)); if (mop != null && mop.getPart() != null) return (float) (30 * mop.getPart().getHardness(player, mop)); return 30; } @Override public int getLightValue() { int val = 0; for (IPart p : getParts()) val = Math.max(val, p.getLightValue()); return val; } @Override public int getHollowSize(int side) { int val = 0; boolean found = false; for (IPart p : getParts()) { if (p instanceof IPartThruHole) { val = Math.max(val, ((IPartThruHole) p).getHollowSize(ForgeDirection.getOrientation(side))); found = true; } } if (found && (val > 0 || val < 12)) return val; return 8; } @Override public int getSlotMask() { for (IPart p : getParts()) if (p instanceof IPartCenter) return PartMap.CENTER.mask; return 0; } } interface IFMPPart { public boolean isSolid(int side); }
package org.jfree.data.general; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.jfree.data.DomainInfo; import org.jfree.data.KeyToGroupMap; import org.jfree.data.KeyedValues; import org.jfree.data.Range; import org.jfree.data.RangeInfo; import org.jfree.data.category.CategoryDataset; import org.jfree.data.category.DefaultCategoryDataset; import org.jfree.data.category.IntervalCategoryDataset; import org.jfree.data.function.Function2D; import org.jfree.data.xy.IntervalXYDataset; import org.jfree.data.xy.OHLCDataset; import org.jfree.data.xy.TableXYDataset; import org.jfree.data.xy.XYDataset; import org.jfree.data.xy.XYSeries; import org.jfree.data.xy.XYSeriesCollection; import org.jfree.util.ArrayUtilities; /** * A collection of useful static methods relating to datasets. */ public final class DatasetUtilities { /** * Private constructor for non-instanceability. */ private DatasetUtilities() { // now try to instantiate this ;-) } /** * Calculates the total of all the values in a {@link PieDataset}. If * the dataset contains negative or <code>null</code> values, they are * ignored. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The total. */ public static double calculatePieDatasetTotal(PieDataset dataset) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } List keys = dataset.getKeys(); double totalValue = 0; Iterator iterator = keys.iterator(); while (iterator.hasNext()) { Comparable current = (Comparable) iterator.next(); if (current != null) { Number value = dataset.getValue(current); double v = 0.0; if (value != null) { v = value.doubleValue(); } if (v > 0) { totalValue = totalValue + v; } } } return totalValue; } /** * Creates a pie dataset from a table dataset by taking all the values * for a single row. * * @param dataset the dataset (<code>null</code> not permitted). * @param rowKey the row key. * * @return A pie dataset. */ public static PieDataset createPieDatasetForRow(CategoryDataset dataset, Comparable rowKey) { int row = dataset.getRowIndex(rowKey); return createPieDatasetForRow(dataset, row); } /** * Creates a pie dataset from a table dataset by taking all the values * for a single row. * * @param dataset the dataset (<code>null</code> not permitted). * @param row the row (zero-based index). * * @return A pie dataset. */ public static PieDataset createPieDatasetForRow(CategoryDataset dataset, int row) { DefaultPieDataset result = new DefaultPieDataset(); int columnCount = dataset.getColumnCount(); for (int current = 0; current < columnCount; current++) { Comparable columnKey = dataset.getColumnKey(current); result.setValue(columnKey, dataset.getValue(row, current)); } return result; } /** * Creates a pie dataset from a table dataset by taking all the values * for a single column. * * @param dataset the dataset (<code>null</code> not permitted). * @param columnKey the column key. * * @return A pie dataset. */ public static PieDataset createPieDatasetForColumn(CategoryDataset dataset, Comparable columnKey) { int column = dataset.getColumnIndex(columnKey); return createPieDatasetForColumn(dataset, column); } /** * Creates a pie dataset from a {@link CategoryDataset} by taking all the * values for a single column. * * @param dataset the dataset (<code>null</code> not permitted). * @param column the column (zero-based index). * * @return A pie dataset. */ public static PieDataset createPieDatasetForColumn(CategoryDataset dataset, int column) { DefaultPieDataset result = new DefaultPieDataset(); int rowCount = dataset.getRowCount(); for (int i = 0; i < rowCount; i++) { Comparable rowKey = dataset.getRowKey(i); result.setValue(rowKey, dataset.getValue(i, column)); } return result; } /** * Creates a new pie dataset based on the supplied dataset, but modified * by aggregating all the low value items (those whose value is lower * than the <code>percentThreshold</code>) into a single item with the * key "Other". * * @param source the source dataset (<code>null</code> not permitted). * @param key a new key for the aggregated items (<code>null</code> not * permitted). * @param minimumPercent the percent threshold. * * @return The pie dataset with (possibly) aggregated items. */ public static PieDataset createConsolidatedPieDataset(PieDataset source, Comparable key, double minimumPercent) { return DatasetUtilities.createConsolidatedPieDataset( source, key, minimumPercent, 2 ); } /** * Creates a new pie dataset based on the supplied dataset, but modified * by aggregating all the low value items (those whose value is lower * than the <code>percentThreshold</code>) into a single item. The * aggregated items are assigned the specified key. Aggregation only * occurs if there are at least <code>minItems</code> items to aggregate. * * @param source the source dataset (<code>null</code> not permitted). * @param key the key to represent the aggregated items. * @param minimumPercent the percent threshold (ten percent is 0.10). * @param minItems only aggregate low values if there are at least this * many. * * @return The pie dataset with (possibly) aggregated items. */ public static PieDataset createConsolidatedPieDataset(PieDataset source, Comparable key, double minimumPercent, int minItems) { DefaultPieDataset result = new DefaultPieDataset(); double total = DatasetUtilities.calculatePieDatasetTotal(source); // Iterate and find all keys below threshold percentThreshold List keys = source.getKeys(); ArrayList otherKeys = new ArrayList(); Iterator iterator = keys.iterator(); while (iterator.hasNext()) { Comparable currentKey = (Comparable) iterator.next(); Number dataValue = source.getValue(currentKey); if (dataValue != null) { double value = dataValue.doubleValue(); if (value / total < minimumPercent) { otherKeys.add(currentKey); } } } // Create new dataset with keys above threshold percentThreshold iterator = keys.iterator(); double otherValue = 0; while (iterator.hasNext()) { Comparable currentKey = (Comparable) iterator.next(); Number dataValue = source.getValue(currentKey); if (dataValue != null) { if (otherKeys.contains(currentKey) && otherKeys.size() >= minItems) { // Do not add key to dataset otherValue += dataValue.doubleValue(); } else { // Add key to dataset result.setValue(currentKey, dataValue); } } } // Add other category if applicable if (otherKeys.size() >= minItems) { result.setValue(key, otherValue); } return result; } public static CategoryDataset createCategoryDataset(String rowKeyPrefix, String columnKeyPrefix, double[][] data) { DefaultCategoryDataset result = new DefaultCategoryDataset(); for (int r = 0; r < data.length; r++) { String rowKey = rowKeyPrefix + (r + 1); for (int c = 0; c < data[r].length; c++) { String columnKey = columnKeyPrefix + (c + 1); result.addValue(new Double(data[r][c]), rowKey, columnKey); } } return result; } public static CategoryDataset createCategoryDataset(String rowKeyPrefix, String columnKeyPrefix, Number[][] data) { DefaultCategoryDataset result = new DefaultCategoryDataset(); for (int r = 0; r < data.length; r++) { String rowKey = rowKeyPrefix + (r + 1); for (int c = 0; c < data[r].length; c++) { String columnKey = columnKeyPrefix + (c + 1); result.addValue(data[r][c], rowKey, columnKey); } } return result; } /** * Creates a {@link CategoryDataset} that contains a copy of the data in * an array (instances of <code>Double</code> are created to represent the * data items). * <p> * Row and column keys are taken from the supplied arrays. * * @param rowKeys the row keys (<code>null</code> not permitted). * @param columnKeys the column keys (<code>null</code> not permitted). * @param data the data. * * @return The dataset. */ public static CategoryDataset createCategoryDataset(Comparable[] rowKeys, Comparable[] columnKeys, double[][] data) { // check arguments... if (rowKeys == null) { throw new IllegalArgumentException("Null 'rowKeys' argument."); } if (columnKeys == null) { throw new IllegalArgumentException("Null 'columnKeys' argument."); } if (ArrayUtilities.hasDuplicateItems(rowKeys)) { throw new IllegalArgumentException("Duplicate items in 'rowKeys'."); } if (ArrayUtilities.hasDuplicateItems(columnKeys)) { throw new IllegalArgumentException( "Duplicate items in 'columnKeys'." ); } if (rowKeys.length != data.length) { throw new IllegalArgumentException( "The number of row keys does not match the number of rows in " + "the data array." ); } int columnCount = 0; for (int r = 0; r < data.length; r++) { columnCount = Math.max(columnCount, data[r].length); } if (columnKeys.length != columnCount) { throw new IllegalArgumentException( "The number of column keys does not match the number of " + "columns in the data array." ); } // now do the work... DefaultCategoryDataset result = new DefaultCategoryDataset(); for (int r = 0; r < data.length; r++) { Comparable rowKey = rowKeys[r]; for (int c = 0; c < data[r].length; c++) { Comparable columnKey = columnKeys[c]; result.addValue(new Double(data[r][c]), rowKey, columnKey); } } return result; } /** * Creates a {@link CategoryDataset} by copying the data from the supplied * {@link KeyedValues} instance. * * @param rowKey the row key (<code>null</code> not permitted). * @param rowData the row data (<code>null</code> not permitted). * * @return A dataset. */ public static CategoryDataset createCategoryDataset(Comparable rowKey, KeyedValues rowData) { if (rowKey == null) { throw new IllegalArgumentException("Null 'rowKey' argument."); } if (rowData == null) { throw new IllegalArgumentException("Null 'rowData' argument."); } DefaultCategoryDataset result = new DefaultCategoryDataset(); for (int i = 0; i < rowData.getItemCount(); i++) { result.addValue(rowData.getValue(i), rowKey, rowData.getKey(i)); } return result; } /** * Creates an {@link XYDataset} by sampling the specified function over a * fixed range. * * @param f the function (<code>null</code> not permitted). * @param start the start value for the range. * @param end the end value for the range. * @param samples the number of sample points (must be > 1). * @param seriesKey the key to give the resulting series * (<code>null</code> not permitted). * * @return A dataset. */ public static XYDataset sampleFunction2D(Function2D f, double start, double end, int samples, Comparable seriesKey) { if (f == null) { throw new IllegalArgumentException("Null 'f' argument."); } if (seriesKey == null) { throw new IllegalArgumentException("Null 'seriesKey' argument."); } if (start >= end) { throw new IllegalArgumentException("Requires 'start' < 'end'."); } if (samples < 2) { throw new IllegalArgumentException("Requires 'samples' > 1"); } XYSeries series = new XYSeries(seriesKey); double step = (end - start) / (samples - 1); for (int i = 0; i < samples; i++) { double x = start + (step * i); series.add(x, f.getValue(x)); } XYSeriesCollection collection = new XYSeriesCollection(series); return collection; } /** * Returns <code>true</code> if the dataset is empty (or <code>null</code>), * and <code>false</code> otherwise. * * @param dataset the dataset (<code>null</code> permitted). * * @return A boolean. */ public static boolean isEmptyOrNull(PieDataset dataset) { if (dataset == null) { return true; } int itemCount = dataset.getItemCount(); if (itemCount == 0) { return true; } for (int item = 0; item < itemCount; item++) { Number y = dataset.getValue(item); if (y != null) { double yy = y.doubleValue(); if (yy > 0.0) { return false; } } } return true; } /** * Returns <code>true</code> if the dataset is empty (or <code>null</code>), * and <code>false</code> otherwise. * * @param dataset the dataset (<code>null</code> permitted). * * @return A boolean. */ public static boolean isEmptyOrNull(CategoryDataset dataset) { if (dataset == null) { return true; } int rowCount = dataset.getRowCount(); int columnCount = dataset.getColumnCount(); if (rowCount == 0 || columnCount == 0) { return true; } for (int r = 0; r < rowCount; r++) { for (int c = 0; c < columnCount; c++) { if (dataset.getValue(r, c) != null) { return false; } } } return true; } /** * Returns <code>true</code> if the dataset is empty (or <code>null</code>), * and <code>false</code> otherwise. * * @param dataset the dataset (<code>null</code> permitted). * * @return A boolean. */ public static boolean isEmptyOrNull(XYDataset dataset) { if (dataset != null) { for (int s = 0; s < dataset.getSeriesCount(); s++) { if (dataset.getItemCount(s) > 0) { return false; } } } return true; } /** * Returns the range of values in the domain (x-values) of a dataset. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The range of values (possibly <code>null</code>). */ public static Range findDomainBounds(XYDataset dataset) { return findDomainBounds(dataset, true); } /** * Returns the range of values in the domain (x-values) of a dataset. * * @param dataset the dataset (<code>null</code> not permitted). * @param includeInterval determines whether or not the x-interval is taken * into account (only applies if the dataset is an * {@link IntervalXYDataset}). * * @return The range of values (possibly <code>null</code>). */ public static Range findDomainBounds(XYDataset dataset, boolean includeInterval) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } Range result = null; // if the dataset implements DomainInfo, life is easier if (dataset instanceof DomainInfo) { DomainInfo info = (DomainInfo) dataset; result = info.getDomainBounds(includeInterval); } else { result = iterateDomainBounds(dataset, includeInterval); } return result; } /** * Iterates over the items in an {@link XYDataset} to find * the range of x-values. If the dataset is an instance of * {@link IntervalXYDataset}, the starting and ending x-values * will be used for the bounds calculation. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The range (possibly <code>null</code>). */ public static Range iterateDomainBounds(XYDataset dataset) { return iterateDomainBounds(dataset, true); } /** * Iterates over the items in an {@link XYDataset} to find * the range of x-values. * * @param dataset the dataset (<code>null</code> not permitted). * @param includeInterval a flag that determines, for an * {@link IntervalXYDataset}, whether the x-interval or just the * x-value is used to determine the overall range. * * @return The range (possibly <code>null</code>). */ public static Range iterateDomainBounds(XYDataset dataset, boolean includeInterval) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } double minimum = Double.POSITIVE_INFINITY; double maximum = Double.NEGATIVE_INFINITY; int seriesCount = dataset.getSeriesCount(); double lvalue; double uvalue; if (includeInterval && dataset instanceof IntervalXYDataset) { IntervalXYDataset intervalXYData = (IntervalXYDataset) dataset; for (int series = 0; series < seriesCount; series++) { int itemCount = dataset.getItemCount(series); for (int item = 0; item < itemCount; item++) { lvalue = intervalXYData.getStartXValue(series, item); uvalue = intervalXYData.getEndXValue(series, item); if (!Double.isNaN(lvalue)) { minimum = Math.min(minimum, lvalue); } if (!Double.isNaN(uvalue)) { maximum = Math.max(maximum, uvalue); } } } } else { for (int series = 0; series < seriesCount; series++) { int itemCount = dataset.getItemCount(series); for (int item = 0; item < itemCount; item++) { lvalue = dataset.getXValue(series, item); uvalue = lvalue; if (!Double.isNaN(lvalue)) { minimum = Math.min(minimum, lvalue); maximum = Math.max(maximum, uvalue); } } } } if (minimum > maximum) { return null; } else { return new Range(minimum, maximum); } } /** * Returns the range of values in the range for the dataset. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The range (possibly <code>null</code>). */ public static Range findRangeBounds(CategoryDataset dataset) { return findRangeBounds(dataset, true); } /** * Returns the range of values in the range for the dataset. * * @param dataset the dataset (<code>null</code> not permitted). * @param includeInterval a flag that determines whether or not the * y-interval is taken into account. * * @return The range (possibly <code>null</code>). */ public static Range findRangeBounds(CategoryDataset dataset, boolean includeInterval) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } Range result = null; if (dataset instanceof RangeInfo) { RangeInfo info = (RangeInfo) dataset; result = info.getRangeBounds(includeInterval); } else { result = iterateRangeBounds(dataset, includeInterval); } return result; } /** * Returns the range of values in the range for the dataset. This method * is the partner for the {@link #findDomainBounds(XYDataset)} method. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The range (possibly <code>null</code>). */ public static Range findRangeBounds(XYDataset dataset) { return findRangeBounds(dataset, true); } /** * Returns the range of values in the range for the dataset. This method * is the partner for the {@link #findDomainBounds(XYDataset, boolean)} * method. * * @param dataset the dataset (<code>null</code> not permitted). * @param includeInterval a flag that determines whether or not the * y-interval is taken into account. * * @return The range (possibly <code>null</code>). */ public static Range findRangeBounds(XYDataset dataset, boolean includeInterval) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } Range result = null; if (dataset instanceof RangeInfo) { RangeInfo info = (RangeInfo) dataset; result = info.getRangeBounds(includeInterval); } else { result = iterateRangeBounds(dataset, includeInterval); } return result; } /** * Iterates over the data item of the category dataset to find * the range bounds. * * @param dataset the dataset (<code>null</code> not permitted). * @param includeInterval a flag that determines whether or not the * y-interval is taken into account. * * @return The range (possibly <code>null</code>). * * @deprecated As of 1.0.10, use * {@link #iterateRangeBounds(CategoryDataset, boolean)}. */ public static Range iterateCategoryRangeBounds(CategoryDataset dataset, boolean includeInterval) { return iterateRangeBounds(dataset, includeInterval); } /** * Iterates over the data item of the category dataset to find * the range bounds. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The range (possibly <code>null</code>). * * @since 1.0.10 */ public static Range iterateRangeBounds(CategoryDataset dataset) { return iterateRangeBounds(dataset, true); } /** * Iterates over the data item of the category dataset to find * the range bounds. * * @param dataset the dataset (<code>null</code> not permitted). * @param includeInterval a flag that determines whether or not the * y-interval is taken into account. * * @return The range (possibly <code>null</code>). * * @since 1.0.10 */ public static Range iterateRangeBounds(CategoryDataset dataset, boolean includeInterval) { double minimum = Double.POSITIVE_INFINITY; double maximum = Double.NEGATIVE_INFINITY; int rowCount = dataset.getRowCount(); int columnCount = dataset.getColumnCount(); if (includeInterval && dataset instanceof IntervalCategoryDataset) { // handle the special case where the dataset has y-intervals that // we want to measure IntervalCategoryDataset icd = (IntervalCategoryDataset) dataset; Number lvalue, uvalue; for (int row = 0; row < rowCount; row++) { for (int column = 0; column < columnCount; column++) { lvalue = icd.getStartValue(row, column); uvalue = icd.getEndValue(row, column); if (lvalue != null && !Double.isNaN(lvalue.doubleValue())) { minimum = Math.min(minimum, lvalue.doubleValue()); } if (uvalue != null && !Double.isNaN(uvalue.doubleValue())) { maximum = Math.max(maximum, uvalue.doubleValue()); } } } } else { // handle the standard case (plain CategoryDataset) for (int row = 0; row < rowCount; row++) { for (int column = 0; column < columnCount; column++) { Number value = dataset.getValue(row, column); if (value != null) { double v = value.doubleValue(); if (!Double.isNaN(v)) { minimum = Math.min(minimum, v); maximum = Math.max(maximum, v); } } } } } if (minimum == Double.POSITIVE_INFINITY) { return null; } else { return new Range(minimum, maximum); } } /** * Iterates over the data item of the xy dataset to find * the range bounds. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The range (possibly <code>null</code>). * * @deprecated As of 1.0.10, use {@link #iterateRangeBounds(XYDataset)}. */ public static Range iterateXYRangeBounds(XYDataset dataset) { return iterateRangeBounds(dataset); } /** * Iterates over the data item of the xy dataset to find * the range bounds. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The range (possibly <code>null</code>). * * @since 1.0.10 */ public static Range iterateRangeBounds(XYDataset dataset) { return iterateRangeBounds(dataset, true); } /** * Iterates over the data items of the xy dataset to find * the range bounds. * * @param dataset the dataset (<code>null</code> not permitted). * @param includeInterval a flag that determines, for an * {@link IntervalXYDataset}, whether the y-interval or just the * y-value is used to determine the overall range. * * @return The range (possibly <code>null</code>). * * @since 1.0.10 */ public static Range iterateRangeBounds(XYDataset dataset, boolean includeInterval) { double minimum = Double.POSITIVE_INFINITY; double maximum = Double.NEGATIVE_INFINITY; int seriesCount = dataset.getSeriesCount(); // handle three cases by dataset type if (includeInterval && dataset instanceof IntervalXYDataset) { // handle special case of IntervalXYDataset IntervalXYDataset ixyd = (IntervalXYDataset) dataset; for (int series = 0; series < seriesCount; series++) { int itemCount = dataset.getItemCount(series); for (int item = 0; item < itemCount; item++) { double lvalue = ixyd.getStartYValue(series, item); double uvalue = ixyd.getEndYValue(series, item); if (!Double.isNaN(lvalue)) { minimum = Math.min(minimum, lvalue); } if (!Double.isNaN(uvalue)) { maximum = Math.max(maximum, uvalue); } } } } else if (includeInterval && dataset instanceof OHLCDataset) { // handle special case of OHLCDataset OHLCDataset ohlc = (OHLCDataset) dataset; for (int series = 0; series < seriesCount; series++) { int itemCount = dataset.getItemCount(series); for (int item = 0; item < itemCount; item++) { double lvalue = ohlc.getLowValue(series, item); double uvalue = ohlc.getHighValue(series, item); if (!Double.isNaN(lvalue)) { minimum = Math.min(minimum, lvalue); } if (!Double.isNaN(uvalue)) { maximum = Math.max(maximum, uvalue); } } } } else { // standard case - plain XYDataset for (int series = 0; series < seriesCount; series++) { int itemCount = dataset.getItemCount(series); for (int item = 0; item < itemCount; item++) { double value = dataset.getYValue(series, item); if (!Double.isNaN(value)) { minimum = Math.min(minimum, value); maximum = Math.max(maximum, value); } } } } if (minimum == Double.POSITIVE_INFINITY) { return null; } else { return new Range(minimum, maximum); } } /** * Finds the minimum domain (or X) value for the specified dataset. This * is easy if the dataset implements the {@link DomainInfo} interface (a * good idea if there is an efficient way to determine the minimum value). * Otherwise, it involves iterating over the entire data-set. * <p> * Returns <code>null</code> if all the data values in the dataset are * <code>null</code>. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The minimum value (possibly <code>null</code>). */ public static Number findMinimumDomainValue(XYDataset dataset) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } Number result = null; // if the dataset implements DomainInfo, life is easy if (dataset instanceof DomainInfo) { DomainInfo info = (DomainInfo) dataset; return new Double(info.getDomainLowerBound(true)); } else { double minimum = Double.POSITIVE_INFINITY; int seriesCount = dataset.getSeriesCount(); for (int series = 0; series < seriesCount; series++) { int itemCount = dataset.getItemCount(series); for (int item = 0; item < itemCount; item++) { double value; if (dataset instanceof IntervalXYDataset) { IntervalXYDataset intervalXYData = (IntervalXYDataset) dataset; value = intervalXYData.getStartXValue(series, item); } else { value = dataset.getXValue(series, item); } if (!Double.isNaN(value)) { minimum = Math.min(minimum, value); } } } if (minimum == Double.POSITIVE_INFINITY) { result = null; } else { result = new Double(minimum); } } return result; } /** * Returns the maximum domain value for the specified dataset. This is * easy if the dataset implements the {@link DomainInfo} interface (a good * idea if there is an efficient way to determine the maximum value). * Otherwise, it involves iterating over the entire data-set. Returns * <code>null</code> if all the data values in the dataset are * <code>null</code>. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The maximum value (possibly <code>null</code>). */ public static Number findMaximumDomainValue(XYDataset dataset) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } Number result = null; // if the dataset implements DomainInfo, life is easy if (dataset instanceof DomainInfo) { DomainInfo info = (DomainInfo) dataset; return new Double(info.getDomainUpperBound(true)); } // hasn't implemented DomainInfo, so iterate... else { double maximum = Double.NEGATIVE_INFINITY; int seriesCount = dataset.getSeriesCount(); for (int series = 0; series < seriesCount; series++) { int itemCount = dataset.getItemCount(series); for (int item = 0; item < itemCount; item++) { double value; if (dataset instanceof IntervalXYDataset) { IntervalXYDataset intervalXYData = (IntervalXYDataset) dataset; value = intervalXYData.getEndXValue(series, item); } else { value = dataset.getXValue(series, item); } if (!Double.isNaN(value)) { maximum = Math.max(maximum, value); } } } if (maximum == Double.NEGATIVE_INFINITY) { result = null; } else { result = new Double(maximum); } } return result; } /** * Returns the minimum range value for the specified dataset. This is * easy if the dataset implements the {@link RangeInfo} interface (a good * idea if there is an efficient way to determine the minimum value). * Otherwise, it involves iterating over the entire data-set. Returns * <code>null</code> if all the data values in the dataset are * <code>null</code>. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The minimum value (possibly <code>null</code>). */ public static Number findMinimumRangeValue(CategoryDataset dataset) { // check parameters... if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } // work out the minimum value... if (dataset instanceof RangeInfo) { RangeInfo info = (RangeInfo) dataset; return new Double(info.getRangeLowerBound(true)); } // hasn't implemented RangeInfo, so we'll have to iterate... else { double minimum = Double.POSITIVE_INFINITY; int seriesCount = dataset.getRowCount(); int itemCount = dataset.getColumnCount(); for (int series = 0; series < seriesCount; series++) { for (int item = 0; item < itemCount; item++) { Number value; if (dataset instanceof IntervalCategoryDataset) { IntervalCategoryDataset icd = (IntervalCategoryDataset) dataset; value = icd.getStartValue(series, item); } else { value = dataset.getValue(series, item); } if (value != null) { minimum = Math.min(minimum, value.doubleValue()); } } } if (minimum == Double.POSITIVE_INFINITY) { return null; } else { return new Double(minimum); } } } /** * Returns the minimum range value for the specified dataset. This is * easy if the dataset implements the {@link RangeInfo} interface (a good * idea if there is an efficient way to determine the minimum value). * Otherwise, it involves iterating over the entire data-set. Returns * <code>null</code> if all the data values in the dataset are * <code>null</code>. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The minimum value (possibly <code>null</code>). */ public static Number findMinimumRangeValue(XYDataset dataset) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } // work out the minimum value... if (dataset instanceof RangeInfo) { RangeInfo info = (RangeInfo) dataset; return new Double(info.getRangeLowerBound(true)); } // hasn't implemented RangeInfo, so we'll have to iterate... else { double minimum = Double.POSITIVE_INFINITY; int seriesCount = dataset.getSeriesCount(); for (int series = 0; series < seriesCount; series++) { int itemCount = dataset.getItemCount(series); for (int item = 0; item < itemCount; item++) { double value; if (dataset instanceof IntervalXYDataset) { IntervalXYDataset intervalXYData = (IntervalXYDataset) dataset; value = intervalXYData.getStartYValue(series, item); } else if (dataset instanceof OHLCDataset) { OHLCDataset highLowData = (OHLCDataset) dataset; value = highLowData.getLowValue(series, item); } else { value = dataset.getYValue(series, item); } if (!Double.isNaN(value)) { minimum = Math.min(minimum, value); } } } if (minimum == Double.POSITIVE_INFINITY) { return null; } else { return new Double(minimum); } } } /** * Returns the maximum range value for the specified dataset. This is easy * if the dataset implements the {@link RangeInfo} interface (a good idea * if there is an efficient way to determine the maximum value). * Otherwise, it involves iterating over the entire data-set. Returns * <code>null</code> if all the data values are <code>null</code>. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The maximum value (possibly <code>null</code>). */ public static Number findMaximumRangeValue(CategoryDataset dataset) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } // work out the minimum value... if (dataset instanceof RangeInfo) { RangeInfo info = (RangeInfo) dataset; return new Double(info.getRangeUpperBound(true)); } // hasn't implemented RangeInfo, so we'll have to iterate... else { double maximum = Double.NEGATIVE_INFINITY; int seriesCount = dataset.getRowCount(); int itemCount = dataset.getColumnCount(); for (int series = 0; series < seriesCount; series++) { for (int item = 0; item < itemCount; item++) { Number value; if (dataset instanceof IntervalCategoryDataset) { IntervalCategoryDataset icd = (IntervalCategoryDataset) dataset; value = icd.getEndValue(series, item); } else { value = dataset.getValue(series, item); } if (value != null) { maximum = Math.max(maximum, value.doubleValue()); } } } if (maximum == Double.NEGATIVE_INFINITY) { return null; } else { return new Double(maximum); } } } /** * Returns the maximum range value for the specified dataset. This is * easy if the dataset implements the {@link RangeInfo} interface (a good * idea if there is an efficient way to determine the maximum value). * Otherwise, it involves iterating over the entire data-set. Returns * <code>null</code> if all the data values are <code>null</code>. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The maximum value (possibly <code>null</code>). */ public static Number findMaximumRangeValue(XYDataset dataset) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } // work out the minimum value... if (dataset instanceof RangeInfo) { RangeInfo info = (RangeInfo) dataset; return new Double(info.getRangeUpperBound(true)); } // hasn't implemented RangeInfo, so we'll have to iterate... else { double maximum = Double.NEGATIVE_INFINITY; int seriesCount = dataset.getSeriesCount(); for (int series = 0; series < seriesCount; series++) { int itemCount = dataset.getItemCount(series); for (int item = 0; item < itemCount; item++) { double value; if (dataset instanceof IntervalXYDataset) { IntervalXYDataset intervalXYData = (IntervalXYDataset) dataset; value = intervalXYData.getEndYValue(series, item); } else if (dataset instanceof OHLCDataset) { OHLCDataset highLowData = (OHLCDataset) dataset; value = highLowData.getHighValue(series, item); } else { value = dataset.getYValue(series, item); } if (!Double.isNaN(value)) { maximum = Math.max(maximum, value); } } } if (maximum == Double.NEGATIVE_INFINITY) { return null; } else { return new Double(maximum); } } } /** * Returns the minimum and maximum values for the dataset's range * (y-values), assuming that the series in one category are stacked. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The range (<code>null</code> if the dataset contains no values). */ public static Range findStackedRangeBounds(CategoryDataset dataset) { return findStackedRangeBounds(dataset, 0.0); } /** * Returns the minimum and maximum values for the dataset's range * (y-values), assuming that the series in one category are stacked. * * @param dataset the dataset (<code>null</code> not permitted). * @param base the base value for the bars. * * @return The range (<code>null</code> if the dataset contains no values). */ public static Range findStackedRangeBounds(CategoryDataset dataset, double base) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } Range result = null; double minimum = Double.POSITIVE_INFINITY; double maximum = Double.NEGATIVE_INFINITY; int categoryCount = dataset.getColumnCount(); for (int item = 0; item < categoryCount; item++) { double positive = base; double negative = base; int seriesCount = dataset.getRowCount(); for (int series = 0; series < seriesCount; series++) { Number number = dataset.getValue(series, item); if (number != null) { double value = number.doubleValue(); if (value > 0.0) { positive = positive + value; } if (value < 0.0) { negative = negative + value; // '+', remember value is negative } } } minimum = Math.min(minimum, negative); maximum = Math.max(maximum, positive); } if (minimum <= maximum) { result = new Range(minimum, maximum); } return result; } /** * Returns the minimum and maximum values for the dataset's range * (y-values), assuming that the series in one category are stacked. * * @param dataset the dataset. * @param map a structure that maps series to groups. * * @return The value range (<code>null</code> if the dataset contains no * values). */ public static Range findStackedRangeBounds(CategoryDataset dataset, KeyToGroupMap map) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } boolean hasValidData = false; Range result = null; // create an array holding the group indices for each series... int[] groupIndex = new int[dataset.getRowCount()]; for (int i = 0; i < dataset.getRowCount(); i++) { groupIndex[i] = map.getGroupIndex(map.getGroup( dataset.getRowKey(i))); } // minimum and maximum for each group... int groupCount = map.getGroupCount(); double[] minimum = new double[groupCount]; double[] maximum = new double[groupCount]; int categoryCount = dataset.getColumnCount(); for (int item = 0; item < categoryCount; item++) { double[] positive = new double[groupCount]; double[] negative = new double[groupCount]; int seriesCount = dataset.getRowCount(); for (int series = 0; series < seriesCount; series++) { Number number = dataset.getValue(series, item); if (number != null) { hasValidData = true; double value = number.doubleValue(); if (value > 0.0) { positive[groupIndex[series]] = positive[groupIndex[series]] + value; } if (value < 0.0) { negative[groupIndex[series]] = negative[groupIndex[series]] + value; // '+', remember value is negative } } } for (int g = 0; g < groupCount; g++) { minimum[g] = Math.min(minimum[g], negative[g]); maximum[g] = Math.max(maximum[g], positive[g]); } } if (hasValidData) { for (int j = 0; j < groupCount; j++) { result = Range.combine(result, new Range(minimum[j], maximum[j])); } } return result; } /** * Returns the minimum value in the dataset range, assuming that values in * each category are "stacked". * * @param dataset the dataset (<code>null</code> not permitted). * * @return The minimum value. * * @see #findMaximumStackedRangeValue(CategoryDataset) */ public static Number findMinimumStackedRangeValue(CategoryDataset dataset) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } Number result = null; boolean hasValidData = false; double minimum = 0.0; int categoryCount = dataset.getColumnCount(); for (int item = 0; item < categoryCount; item++) { double total = 0.0; int seriesCount = dataset.getRowCount(); for (int series = 0; series < seriesCount; series++) { Number number = dataset.getValue(series, item); if (number != null) { hasValidData = true; double value = number.doubleValue(); if (value < 0.0) { total = total + value; // '+', remember value is negative } } } minimum = Math.min(minimum, total); } if (hasValidData) { result = new Double(minimum); } return result; } /** * Returns the maximum value in the dataset range, assuming that values in * each category are "stacked". * * @param dataset the dataset (<code>null</code> not permitted). * * @return The maximum value (possibly <code>null</code>). * * @see #findMinimumStackedRangeValue(CategoryDataset) */ public static Number findMaximumStackedRangeValue(CategoryDataset dataset) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } Number result = null; boolean hasValidData = false; double maximum = 0.0; int categoryCount = dataset.getColumnCount(); for (int item = 0; item < categoryCount; item++) { double total = 0.0; int seriesCount = dataset.getRowCount(); for (int series = 0; series < seriesCount; series++) { Number number = dataset.getValue(series, item); if (number != null) { hasValidData = true; double value = number.doubleValue(); if (value > 0.0) { total = total + value; } } } maximum = Math.max(maximum, total); } if (hasValidData) { result = new Double(maximum); } return result; } /** * Returns the minimum and maximum values for the dataset's range, * assuming that the series are stacked. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The range ([0.0, 0.0] if the dataset contains no values). */ public static Range findStackedRangeBounds(TableXYDataset dataset) { return findStackedRangeBounds(dataset, 0.0); } /** * Returns the minimum and maximum values for the dataset's range, * assuming that the series are stacked, using the specified base value. * * @param dataset the dataset (<code>null</code> not permitted). * @param base the base value. * * @return The range (<code>null</code> if the dataset contains no values). */ public static Range findStackedRangeBounds(TableXYDataset dataset, double base) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } double minimum = base; double maximum = base; for (int itemNo = 0; itemNo < dataset.getItemCount(); itemNo++) { double positive = base; double negative = base; int seriesCount = dataset.getSeriesCount(); for (int seriesNo = 0; seriesNo < seriesCount; seriesNo++) { double y = dataset.getYValue(seriesNo, itemNo); if (!Double.isNaN(y)) { if (y > 0.0) { positive += y; } else { negative += y; } } } if (positive > maximum) { maximum = positive; } if (negative < minimum) { minimum = negative; } } if (minimum <= maximum) { return new Range(minimum, maximum); } else { return null; } } /** * Calculates the total for the y-values in all series for a given item * index. * * @param dataset the dataset. * @param item the item index. * * @return The total. * * @since 1.0.5 */ public static double calculateStackTotal(TableXYDataset dataset, int item) { double total = 0.0; int seriesCount = dataset.getSeriesCount(); for (int s = 0; s < seriesCount; s++) { double value = dataset.getYValue(s, item); if (!Double.isNaN(value)) { total = total + value; } } return total; } /** * Calculates the range of values for a dataset where each item is the * running total of the items for the current series. * * @param dataset the dataset (<code>null</code> not permitted). * * @return The range. * * @see #findRangeBounds(CategoryDataset) */ public static Range findCumulativeRangeBounds(CategoryDataset dataset) { if (dataset == null) { throw new IllegalArgumentException("Null 'dataset' argument."); } boolean allItemsNull = true; // we'll set this to false if there is at // least one non-null data item... double minimum = 0.0; double maximum = 0.0; for (int row = 0; row < dataset.getRowCount(); row++) { double runningTotal = 0.0; for (int column = 0; column <= dataset.getColumnCount() - 1; column++) { Number n = dataset.getValue(row, column); if (n != null) { allItemsNull = false; double value = n.doubleValue(); if (!Double.isNaN(value)) { runningTotal = runningTotal + value; minimum = Math.min(minimum, runningTotal); maximum = Math.max(maximum, runningTotal); } } } } if (!allItemsNull) { return new Range(minimum, maximum); } else { return null; } } }
package org.opencms.gwt.client; import org.opencms.db.CmsResourceState; import org.opencms.gwt.client.rpc.CmsRpcAction; import org.opencms.gwt.client.rpc.CmsRpcPrefetcher; import org.opencms.gwt.client.ui.CmsErrorDialog; import org.opencms.gwt.client.ui.CmsNotification; import org.opencms.gwt.client.ui.input.upload.CmsFileInfo; import org.opencms.gwt.client.util.CmsMediaQuery; import org.opencms.gwt.client.util.CmsUniqueActiveItemContainer; import org.opencms.gwt.client.util.I_CmsSimpleCallback; import org.opencms.gwt.shared.CmsCoreData; import org.opencms.gwt.shared.rpc.I_CmsCoreService; import org.opencms.gwt.shared.rpc.I_CmsCoreServiceAsync; import org.opencms.gwt.shared.rpc.I_CmsVfsService; import org.opencms.gwt.shared.rpc.I_CmsVfsServiceAsync; import org.opencms.util.CmsStringUtil; import org.opencms.util.CmsUUID; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.NodeList; import com.google.gwt.user.client.Window.Location; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.rpc.SerializationException; import com.google.gwt.user.client.rpc.ServiceDefTarget; import com.google.web.bindery.event.shared.Event; import com.google.web.bindery.event.shared.EventBus; import com.google.web.bindery.event.shared.SimpleEventBus; /** * Client side core data provider.<p> * * @since 8.0.0 * * @see org.opencms.gwt.CmsGwtActionElement */ public final class CmsCoreProvider extends CmsCoreData { /** Path to system folder. */ public static final String VFS_PATH_SYSTEM = "/system/"; /** Media query do detect device with no hover capability. */ private static final CmsMediaQuery TOUCH_ONLY = CmsMediaQuery.parse("(hover: none)"); /** Internal instance. */ private static CmsCoreProvider INSTANCE; /** The core service instance. */ private static I_CmsCoreServiceAsync SERVICE; /** The vfs-service instance. */ private static I_CmsVfsServiceAsync VFS_SERVICE; /** The unique active item container for the flyout menu. */ private CmsUniqueActiveItemContainer m_activeFlyoutMenu = new CmsUniqueActiveItemContainer(); /** The client time when the data is loaded. */ private long m_clientTime; /** Event bus for client side events. */ private EventBus m_eventBus = new SimpleEventBus(); /** Flag which indicates whether we are in Internet Explorer 7. */ private boolean m_isIe7; /** * Prevent instantiation.<p> * * @throws SerializationException if deserialization failed */ protected CmsCoreProvider() throws SerializationException { super((CmsCoreData)CmsRpcPrefetcher.getSerializedObjectFromDictionary(getService(), DICT_NAME)); m_clientTime = System.currentTimeMillis(); I_CmsUserAgentInfo userAgentInfo = GWT.create(I_CmsUserAgentInfo.class); m_isIe7 = userAgentInfo.isIE7(); } /** * Returns the client message instance.<p> * * @return the client message instance */ public static CmsCoreProvider get() { if (INSTANCE == null) { try { INSTANCE = new CmsCoreProvider(); } catch (SerializationException e) { CmsErrorDialog.handleException( new Exception( "Deserialization of core data failed. This may be caused by expired java-script resources, please clear your browser cache and try again.", e)); } } return INSTANCE; } /** * Gets the content attribute of a meta tag with a given name.<p> * * @param nameToFind the name of the meta tag * * @return the content attribute value of the found meta tag, or null if no meta tag with the given name was found */ public static String getMetaElementContent(String nameToFind) { NodeList<Element> metas = Document.get().getDocumentElement().getElementsByTagName("meta"); for (int i = 0; i < metas.getLength(); i++) { Element meta = metas.getItem(i); String name = meta.getAttribute("name"); if (nameToFind.equals(name)) { return meta.getAttribute("content"); } } return null; } /** * Returns the core service instance.<p> * * @return the core service instance */ public static I_CmsCoreServiceAsync getService() { if (SERVICE == null) { SERVICE = GWT.create(I_CmsCoreService.class); String serviceUrl = CmsCoreProvider.get().link("org.opencms.gwt.CmsCoreService.gwt"); ((ServiceDefTarget)SERVICE).setServiceEntryPoint(serviceUrl); } return SERVICE; } /** * Returns the vfs service instance.<p> * * @return the vfs service instance */ public static I_CmsVfsServiceAsync getVfsService() { if (VFS_SERVICE == null) { VFS_SERVICE = GWT.create(I_CmsVfsService.class); String serviceUrl = CmsCoreProvider.get().link("org.opencms.gwt.CmsVfsService.gwt"); ((ServiceDefTarget)VFS_SERVICE).setServiceEntryPoint(serviceUrl); } return VFS_SERVICE; } /** * Checks if the client is touch-only. * * <p>This uses media queries, but the touch-only status can also be forcibly turned on with the request parameter __touchOnly=1. * * @return true if the client is touch-only */ public static boolean isTouchOnly() { return TOUCH_ONLY.matches() || "1".equals(Location.getParameter("__touchOnly")); } /** * Adds the current site root of this context to the given resource name.<p> * * @param sitePath the resource name * * @return the translated resource name including site root * * @see #removeSiteRoot(String) */ public String addSiteRoot(String sitePath) { if (sitePath == null) { return null; } String siteRoot = getAdjustedSiteRoot(getSiteRoot(), sitePath); StringBuffer result = new StringBuffer(128); result.append(siteRoot); if (((siteRoot.length() == 0) || (siteRoot.charAt(siteRoot.length() - 1) != '/')) && ((sitePath.length() == 0) || (sitePath.charAt(0) != '/'))) { // add slash between site root and resource if required result.append('/'); } result.append(sitePath); return result.toString(); } /** * Creates a new CmsUUID.<p> * * @param callback the callback to execute */ public void createUUID(final AsyncCallback<CmsUUID> callback) { // do not stop/start since we do not want to give any feedback to the user CmsRpcAction<CmsUUID> action = new CmsRpcAction<CmsUUID>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { getService().createUUID(this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override protected void onResponse(CmsUUID result) { callback.onSuccess(result); } }; action.execute(); } /** * Fires a client side event.<p> * * @param event the event to fire */ public void fireEvent(Event<?> event) { m_eventBus.fireEvent(event); } /** * Returns the adjusted site root for a resource using the provided site root as a base.<p> * * Usually, this would be the site root for the current site. * However, if a resource from the <code>/system/</code> folder is requested, * this will be the empty String.<p> * * @param siteRoot the site root of the current site * @param resourcename the resource name to get the adjusted site root for * * @return the adjusted site root for the resource */ public String getAdjustedSiteRoot(String siteRoot, String resourcename) { if (resourcename.startsWith(VFS_PATH_SYSTEM)) { return ""; } else { return siteRoot; } } /** * Returns the approximate time on the server.<p> * * @return the approximate server time */ public long getEstimatedServerTime() { return m_clientTime + (System.currentTimeMillis() - m_clientTime); } /** * Gets the core event bus.<p> * * @return the core event bus */ public EventBus getEventBus() { return m_eventBus; } /** * Returns the link to view the given resource in the file explorer.<p> * * @param sitePath the resource site path * * @return the link */ public String getExplorerLink(String sitePath) { return getFileExplorerLink() + sitePath; } /** * Gets the unique active item container which holds a reference to the currently active content element flyout menu.<p> * * @return the unique active item container for flyout menus */ public CmsUniqueActiveItemContainer getFlyoutMenuContainer() { return m_activeFlyoutMenu; } /** * Fetches the state of a resource from the server.<p> * * @param structureId the structure id of the resource * @param callback the callback which should receive the result */ public void getResourceState(final CmsUUID structureId, final AsyncCallback<CmsResourceState> callback) { CmsRpcAction<CmsResourceState> action = new CmsRpcAction<CmsResourceState>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { start(0, false); getService().getResourceState(structureId, this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override protected void onResponse(CmsResourceState result) { stop(false); callback.onSuccess(result); } }; action.execute(); } /** * Returns the resource type name for a given filename.<p> * * @param file the file info * * @return the resource type name */ public String getResourceType(CmsFileInfo file) { String typeName = null; typeName = getExtensionMapping().get(file.getFileSuffix().toLowerCase()); if (typeName == null) { typeName = "plain"; } return typeName; } /** * Returns the resource type name for a given filename.<p> * * @param file the file info * * @return the resource type name */ public String getResourceTypeIcon(CmsFileInfo file) { String typeName = null; typeName = getIconMapping().get(file.getFileSuffix().toLowerCase()); if (typeName == null) { typeName = getIconMapping().get(""); } return typeName; } /** * Returns if the current user agent is IE7.<p> * * @return <code>true</code> if the current user agent is IE7 */ public boolean isIe7() { return m_isIe7; } /** * Returns an absolute link given a site path.<p> * * @param sitePath the site path * * @return the absolute link */ public String link(String sitePath) { return CmsStringUtil.joinPaths(getVfsPrefix(), sitePath); } /** * Locks the given resource with a temporary lock.<p> * * @param structureId the resource structure id * @param callback the callback to execute */ public void lock(final CmsUUID structureId, final I_CmsSimpleCallback<Boolean> callback) { CmsRpcAction<String> lockAction = new CmsRpcAction<String>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { start(200, false); getService().lockTemp(structureId, this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override public void onResponse(String result) { stop(false); if (result != null) { // unable to lock String text = Messages.get().key(Messages.GUI_LOCK_NOTIFICATION_2, structureId, result); CmsNotification.get().sendDeferred(CmsNotification.Type.WARNING, text); } callback.execute(result == null ? Boolean.TRUE : Boolean.FALSE); } }; lockAction.execute(); } /** * Locks the given resource with a temporary lock.<p> * * @param structureId the resource structure id * @param loadTime the time when the requested resource was loaded * @param callback the callback to execute */ public void lock(final CmsUUID structureId, long loadTime, final I_CmsSimpleCallback<Boolean> callback) { CmsRpcAction<String> lockAction = new CmsRpcAction<String>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { start(200, false); getService().lockTemp(structureId, loadTime, this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override public void onResponse(String result) { stop(false); if (result != null) { // unable to lock String text = Messages.get().key(Messages.GUI_LOCK_NOTIFICATION_2, structureId, result); CmsNotification.get().sendDeferred(CmsNotification.Type.WARNING, text); } callback.execute(result == null ? Boolean.TRUE : Boolean.FALSE); } }; lockAction.execute(); } /** * Locks the given resource with a temporary lock.<p> * * @param sitePath the site path of the resource to lock * @param loadTime the time when the requested resource was loaded * @param callback the callback to execute */ public void lock(final String sitePath, long loadTime, final I_CmsSimpleCallback<Boolean> callback) { CmsRpcAction<String> lockAction = new CmsRpcAction<String>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { start(200, false); getService().lockIfExists(sitePath, loadTime, this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override public void onResponse(String result) { stop(false); if (result != null) { // unable to lock String text = Messages.get().key(Messages.GUI_LOCK_NOTIFICATION_2, sitePath, result); CmsNotification.get().sendDeferred(CmsNotification.Type.WARNING, text); } callback.execute(result == null ? Boolean.TRUE : Boolean.FALSE); } }; lockAction.execute(); } /** * Tries to lock a resource with a given structure id and returns an error if the locking fails.<p> * * @param structureId the structure id of the resource to lock * @param callback the callback to execute */ public void lockOrReturnError(final CmsUUID structureId, final I_CmsSimpleCallback<String> callback) { CmsRpcAction<String> lockAction = new CmsRpcAction<String>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { start(200, false); getService().lockTemp(structureId, this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override public void onResponse(String result) { stop(false); if (result != null) { // unable to lock final String text = Messages.get().key(Messages.GUI_LOCK_NOTIFICATION_2, structureId, result); CmsNotification.get().sendDeferred(CmsNotification.Type.WARNING, text); } callback.execute(result); } }; lockAction.execute(); } /** * Tries to lock a resource with a given structure id and returns an error if the locking fails.<p> * * @param structureId the structure id of the resource to lock * @param loadTime the time when the requested resource was loaded * @param callback the callback to execute */ public void lockOrReturnError( final CmsUUID structureId, final long loadTime, final I_CmsSimpleCallback<String> callback) { CmsRpcAction<String> lockAction = new CmsRpcAction<String>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { start(200, false); getService().lockTemp(structureId, loadTime, this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override public void onResponse(String result) { stop(false); if (result != null) { // unable to lock final String text = Messages.get().key(Messages.GUI_LOCK_NOTIFICATION_2, structureId, result); CmsNotification.get().sendDeferred(CmsNotification.Type.WARNING, text); } callback.execute(result); } }; lockAction.execute(); } /** * Tries to lock a resource with a given site path and returns an error if the locking fails.<p> * If the resource does not exist yet, the next existing ancestor folder will be checked if it is lockable.<p> * * @param sitePath the site path of the resource to lock * @param callback the callback to execute */ public void lockOrReturnError(final String sitePath, final I_CmsSimpleCallback<String> callback) { CmsRpcAction<String> lockAction = new CmsRpcAction<String>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { start(200, false); getService().lockIfExists(sitePath, this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override public void onResponse(String result) { stop(false); if (result != null) { // unable to lock final String text = Messages.get().key(Messages.GUI_LOCK_NOTIFICATION_2, sitePath, result); CmsNotification.get().sendDeferred(CmsNotification.Type.WARNING, text); } callback.execute(result); } }; lockAction.execute(); } /** * Tries to lock a resource with a given site path and returns an error if the locking fails.<p> * If the resource does not exist yet, the next existing ancestor folder will be checked if it is lockable.<p> * * @param sitePath the site path of the resource to lock * @param loadTime the time when the requested resource was loaded * @param callback the callback to execute */ public void lockOrReturnError( final String sitePath, final long loadTime, final I_CmsSimpleCallback<String> callback) { CmsRpcAction<String> lockAction = new CmsRpcAction<String>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { start(200, false); getService().lockIfExists(sitePath, loadTime, this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override public void onResponse(String result) { stop(false); if (result != null) { // unable to lock final String text = Messages.get().key(Messages.GUI_LOCK_NOTIFICATION_2, sitePath, result); CmsNotification.get().sendDeferred(CmsNotification.Type.WARNING, text); } callback.execute(result); } }; lockAction.execute(); } /** * Removes the current site root prefix from the given root path, * that is adjusts the resource name for the current site root.<p> * * If the resource name does not start with the current site root, * it is left untouched.<p> * * @param rootPath the resource name * * @return the resource name adjusted for the current site root * * @see #addSiteRoot(String) */ public String removeSiteRoot(String rootPath) { String siteRoot = getAdjustedSiteRoot(getSiteRoot(), rootPath); if ((siteRoot != null) && (siteRoot.equals(getSiteRoot())) && rootPath.startsWith(siteRoot) && ((rootPath.length() == siteRoot.length()) || (rootPath.charAt(siteRoot.length()) == '/'))) { rootPath = rootPath.substring(siteRoot.length()); } return rootPath; } /** * @see org.opencms.gwt.shared.CmsCoreData#setShowEditorHelp(boolean) */ @Override public void setShowEditorHelp(final boolean show) { super.setShowEditorHelp(show); CmsRpcAction<Void> action = new CmsRpcAction<Void>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { getService().setShowEditorHelp(show, this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override protected void onResponse(Void result) { //nothing to do } }; action.execute(); } /** * Returns the absolute link to the given root path.<p> * * @param rootPath the root path * @param callback the callback to execute */ public void substituteLinkForRootPath(final String rootPath, final I_CmsSimpleCallback<String> callback) { CmsRpcAction<String> action = new CmsRpcAction<String>() { @Override public void execute() { getVfsService().substituteLinkForRootPath(getSiteRoot(), rootPath, this); } @Override protected void onResponse(String result) { callback.execute(result); } }; action.execute(); } /** * Unlocks the given resource, synchronously.<p> * * @param structureId the resource structure id * * @return <code>true</code> if succeeded, if not a a warning is already shown to the user */ public boolean unlock(final CmsUUID structureId) { // lock the sitemap CmsRpcAction<String> unlockAction = new CmsRpcAction<String>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { start(200, false); getService().unlock(structureId, this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override public void onResponse(String result) { stop(false); if (result == null) { return; } // unable to lock String text = Messages.get().key(Messages.GUI_UNLOCK_NOTIFICATION_2, structureId.toString(), result); CmsNotification.get().send(CmsNotification.Type.WARNING, text); } }; return unlockAction.executeSync() == null; } /** * Unlocks the given resource, synchronously.<p> * * @param sitePath the resource site path * * @return <code>true</code> if succeeded, if not a a warning is already shown to the user */ public boolean unlock(final String sitePath) { // lock the sitemap CmsRpcAction<String> unlockAction = new CmsRpcAction<String>() { /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#execute() */ @Override public void execute() { start(200, false); getService().unlock(sitePath, this); } /** * @see org.opencms.gwt.client.rpc.CmsRpcAction#onResponse(java.lang.Object) */ @Override public void onResponse(String result) { stop(false); if (result == null) { return; } // unable to lock String text = Messages.get().key(Messages.GUI_UNLOCK_NOTIFICATION_2, sitePath, result); CmsNotification.get().send(CmsNotification.Type.WARNING, text); } }; return unlockAction.executeSync() == null; } }
package ru.stqa.pft.sandbox; import org.testng.Assert; import org.testng.annotations.Test; public class SquareTests { @Test public void testArea () { Square s = new Square(5); Assert.assertEquals(s.area(), 25.0); } @Test public void testArea1 () { Square s = new Square(10); Assert.assertEquals(s.area(), 100.0); } }
package algorithms.imageProcessing; import algorithms.misc.MedianSmooth; import java.util.List; /** * * @author nichole */ public class MedianTransform { /** * A computationally expensive multiscale median transform. * see, the pyramidal mean transform version instead. * This method has a runtime complexity of n_iter * O(N_pixels * lg2(windowArea)) * where windowArea grows from 1 to 2*2*lg2(imageDimension) + 1 * and nIter = lg2(imageDimension). * @param input * @param outputTransformed * @param outputCoeff */ public void multiscaleMedianTransform(GreyscaleImage input, List<GreyscaleImage> outputTransformed, List<GreyscaleImage> outputCoeff) { int imgDimen = Math.min(input.getWidth(), input.getHeight()); GreyscaleImage img0 = input.copyImage(); int nr = (int)(Math.ceil(Math.log(imgDimen)/Math.log(2))); int s = 1; outputTransformed.add(img0.copyToSignedImage()); outputCoeff.add(img0.createSignedWithDimensions()); for (int j = 0; j < (nr - 1); ++j) { int winL = 2*s + 1; MedianSmooth med = new MedianSmooth(); outputTransformed.add(med.calculate(outputTransformed.get(j), winL, winL)); outputCoeff.add(outputTransformed.get(j).subtract(outputTransformed.get(j + 1))); s = 2*s; } } public GreyscaleImage reconstructMultiscaleMedianTransform(GreyscaleImage c0, List<GreyscaleImage> mmCoeff) { int nr = mmCoeff.size(); GreyscaleImage output = c0.copyToSignedImage(); for (int j = 0; j < nr; ++j) { output = output.add(mmCoeff.get(j)); } return output; } public void multiscalePyramidalMedianTransform(GreyscaleImage input, List<GreyscaleImage> outputTransformed, List<GreyscaleImage> outputCoeff) { int imgDimen = Math.min(input.getWidth(), input.getHeight()); GreyscaleImage img0 = input.copyImage(); int nr = (int)(Math.ceil(Math.log(imgDimen)/Math.log(2))); int s = 1; int winL = 2*s + 1; ImageProcessor imageProcessor = new ImageProcessor(); outputTransformed.add(img0.copyToSignedImage()); outputCoeff.add(img0.createSignedWithDimensions()); for (int j = 0; j < (nr - 1); ++j) { MedianSmooth med = new MedianSmooth(); GreyscaleImage cJ = outputTransformed.get(j); GreyscaleImage cJPlus1Ast = med.calculate(cJ, winL, winL); // decimation: GreyscaleImage cJPlus1 = imageProcessor.binImage(cJPlus1Ast, 2); GreyscaleImage wJPlus1 = cJ.subtract(cJPlus1Ast); outputTransformed.add(cJPlus1); outputCoeff.add(wJPlus1); assert(cJ.getWidth() == wJPlus1.getWidth()); } outputCoeff.remove(0); } public GreyscaleImage reconstructPyramidalMultiscaleMedianTransform( GreyscaleImage c0, List<GreyscaleImage> mmCoeff) { int nr = mmCoeff.size(); ImageProcessor imageProcessor = new ImageProcessor(); GreyscaleImage output = c0.copyToSignedImage(); for (int j = (nr - 1); j > -1; --j) { // expand by factor of 2. TODO: replace w/ B-spline interpolation GreyscaleImage cJPrime = imageProcessor.unbinImage(output, 2); GreyscaleImage wJ = mmCoeff.get(j); output = cJPrime.add(wJ); } return output; } public void multiscaleMedianWaveletTransform(GreyscaleImage input) { if (true) { throw new UnsupportedOperationException("not yet implemented"); } /* from: Sparse Image and Signal Processing, Second Edition, by Starck, Murtagh, and Fadili estimate st dev using Donoho and Johnstone (1994) based on wavelet coeff of noisy data Y at the finest resolution level. The wavelet coeff of Y at finest scale tend to be mostly noise, while wavelet coeff of X at same scale can be viewed as outliers. sigma = MAD(w_1)/0.6745 = median(|w_1 - median(w_1)|)/0.6745 where MAD stands for the median absolute deviation w_1 are the orthogonal wavelet coefficients of Y at the finest scale. For 2-D images, the above estimator is to be applied with the diagonal subband of the 2-D separable orthogonal wavelet transform. We now turn to the estimation of . As the noise is additive, we have , and it is easy to see that If the atoms in the dictionary all have equal unit -norm, then obviously . This formula is also easily implemented if the -norms were known analytically, as is the case for the curvelet tight frame (see Section 5.4.2.2). But if these norms are not known in closed form, they can be estimated in practice by taking the transform of a Dirac, and then computing the -norm of each subband. */ /* detect in w_(j+1) the significant coefficients: |w_(j+1)| > tau * MAD(w_(j+1))/0.6745 where MAD stands for the median absolute deviation used as an estimator of the noise standard deviation. see eqn (6.9) and tau a threshold chosen large enough to avoid false detections, for instance tau=5. set to zero all significant coefficients in w_(j+1). compute c_prime_j = w_(j+1) + c_(j+1). hence c_prime_j is a version of c_j, but without the detected significant structures. compute the 2D starlet transform of c_prime_j with j+1 scales. we get w={w_prime_j,...w_prime_(j+1), c_prime_(j+1)} set c_(j+1) = c_prime_(j+1). therefore, c_(j+1) is smoothed with wavelets, but strong features have been extracted with median compute the median-wavelet coefficients: w_(j+1) = c_j - c_(j+1). s = 2*s output: w={w_1,...w_j, c_j} */ } }
package nl.xservices.plugins; import android.annotation.SuppressLint; import android.app.Activity; import android.content.*; import android.content.pm.ActivityInfo; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.net.Uri; import android.os.Build; import android.text.Html; import android.util.Base64; import android.view.Gravity; import android.widget.Toast; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaInterface; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.PluginResult; import org.apache.http.util.ByteArrayBuffer; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.*; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.List; import java.util.Timer; import java.util.TimerTask; import java.util.regex.Matcher; import java.util.regex.Pattern; public class SocialSharing extends CordovaPlugin { private static final String ACTION_AVAILABLE_EVENT = "available"; private static final String ACTION_SHARE_EVENT = "share"; private static final String ACTION_CAN_SHARE_VIA = "canShareVia"; private static final String ACTION_CAN_SHARE_VIA_EMAIL = "canShareViaEmail"; private static final String ACTION_SHARE_VIA = "shareVia"; private static final String ACTION_SHARE_VIA_TWITTER_EVENT = "shareViaTwitter"; private static final String ACTION_SHARE_VIA_FACEBOOK_EVENT = "shareViaFacebook"; private static final String ACTION_SHARE_VIA_FACEBOOK_WITH_PASTEMESSAGEHINT = "shareViaFacebookWithPasteMessageHint"; private static final String ACTION_SHARE_VIA_WHATSAPP_EVENT = "shareViaWhatsApp"; private static final String ACTION_SHARE_VIA_SMS_EVENT = "shareViaSMS"; private static final String ACTION_SHARE_VIA_EMAIL_EVENT = "shareViaEmail"; private static final int ACTIVITY_CODE_SENDVIAEMAIL = 2; private CallbackContext _callbackContext; private String pasteMessage; private abstract class SocialSharingRunnable implements Runnable { public CallbackContext callbackContext; SocialSharingRunnable(CallbackContext cb) { this.callbackContext = cb; } } @Override public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { this._callbackContext = callbackContext; // only used for onActivityResult this.pasteMessage = null; if (ACTION_AVAILABLE_EVENT.equals(action)) { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK)); return true; } else if (ACTION_SHARE_EVENT.equals(action)) { return doSendIntent(callbackContext, args.getString(0), args.getString(1), args.getJSONArray(2), args.getString(3), null, false); } else if (ACTION_SHARE_VIA_TWITTER_EVENT.equals(action)) { return doSendIntent(callbackContext, args.getString(0), args.getString(1), args.getJSONArray(2), args.getString(3), "twitter", false); } else if (ACTION_SHARE_VIA_FACEBOOK_EVENT.equals(action)) { return doSendIntent(callbackContext, args.getString(0), args.getString(1), args.getJSONArray(2), args.getString(3), "com.facebook.katana", false); } else if (ACTION_SHARE_VIA_FACEBOOK_WITH_PASTEMESSAGEHINT.equals(action)) { this.pasteMessage = args.getString(4); return doSendIntent(callbackContext, args.getString(0), args.getString(1), args.getJSONArray(2), args.getString(3), "com.facebook.katana", false); } else if (ACTION_SHARE_VIA_WHATSAPP_EVENT.equals(action)) { return doSendIntent(callbackContext, args.getString(0), args.getString(1), args.getJSONArray(2), args.getString(3), "whatsapp", false); } else if (ACTION_CAN_SHARE_VIA.equals(action)) { return doSendIntent(callbackContext, args.getString(0), args.getString(1), args.getJSONArray(2), args.getString(3), args.getString(4), true); } else if (ACTION_CAN_SHARE_VIA_EMAIL.equals(action)) { if (isEmailAvailable()) { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK)); return true; } else { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, "not available")); return false; } } else if (ACTION_SHARE_VIA.equals(action)) { return doSendIntent(callbackContext, args.getString(0), args.getString(1), args.getJSONArray(2), args.getString(3), args.getString(4), false); } else if (ACTION_SHARE_VIA_SMS_EVENT.equals(action)) { return invokeSMSIntent(callbackContext, args.getJSONObject(0), args.getString(1)); } else if (ACTION_SHARE_VIA_EMAIL_EVENT.equals(action)) { return invokeEmailIntent(callbackContext, args.getString(0), args.getString(1), args.getJSONArray(2), args.isNull(3) ? null : args.getJSONArray(3), args.isNull(4) ? null : args.getJSONArray(4), args.isNull(5) ? null : args.getJSONArray(5)); } else { callbackContext.error("socialSharing." + action + " is not a supported function. Did you mean '" + ACTION_SHARE_EVENT + "'?"); return false; } } private boolean isEmailAvailable() { final Intent intent = new Intent(Intent.ACTION_SENDTO, Uri.fromParts("mailto", "someone@domain.com", null)); return cordova.getActivity().getPackageManager().queryIntentActivities(intent, 0).size() > 1; } private boolean invokeEmailIntent(final CallbackContext callbackContext, final String message, final String subject, final JSONArray to, final JSONArray cc, final JSONArray bcc, final JSONArray files) throws JSONException { final SocialSharing plugin = this; cordova.getThreadPool().execute(new SocialSharingRunnable(callbackContext) { public void run() { final Intent draft = new Intent(Intent.ACTION_SEND_MULTIPLE); if (notEmpty(message)) { Pattern htmlPattern = Pattern.compile(".*\\<[^>]+>.*", Pattern.DOTALL); if (htmlPattern.matcher(message).matches()) { draft.putExtra(android.content.Intent.EXTRA_TEXT, Html.fromHtml(message)); draft.setType("text/html"); } else { draft.putExtra(android.content.Intent.EXTRA_TEXT, message); draft.setType("text/plain"); } } if (notEmpty(subject)) { draft.putExtra(android.content.Intent.EXTRA_SUBJECT, subject); } try { if (to != null && to.length() > 0) { draft.putExtra(android.content.Intent.EXTRA_EMAIL, toStringArray(to)); } if (cc != null && cc.length() > 0) { draft.putExtra(android.content.Intent.EXTRA_CC, toStringArray(cc)); } if (bcc != null && bcc.length() > 0) { draft.putExtra(android.content.Intent.EXTRA_BCC, toStringArray(bcc)); } if (files.length() > 0) { ArrayList<Uri> fileUris = new ArrayList<Uri>(); final String dir = getDownloadDir(); for (int i = 0; i < files.length(); i++) { final Uri fileUri = getFileUriAndSetType(draft, dir, files.getString(i), subject, i); if (fileUri != null) { fileUris.add(fileUri); } } if (!fileUris.isEmpty()) { draft.putExtra(Intent.EXTRA_STREAM, fileUris); } } } catch (Exception e) { callbackContext.error(e.getMessage()); } draft.setType("application/octet-stream"); cordova.startActivityForResult(plugin, Intent.createChooser(draft, "Choose Email App"), ACTIVITY_CODE_SENDVIAEMAIL); } }); return true; } private String getDownloadDir() throws IOException { final String dir = webView.getContext().getExternalFilesDir(null) + "/socialsharing-downloads"; // external createOrCleanDir(dir); return dir; } private boolean doSendIntent(final CallbackContext callbackContext, final String msg, final String subject, final JSONArray files, final String url, final String appPackageName, final boolean peek) { final CordovaInterface mycordova = cordova; final CordovaPlugin plugin = this; cordova.getThreadPool().execute(new SocialSharingRunnable(callbackContext) { public void run() { String message = msg; final boolean hasMultipleAttachments = files.length() > 1; final Intent sendIntent = new Intent(hasMultipleAttachments ? Intent.ACTION_SEND_MULTIPLE : Intent.ACTION_SEND); sendIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); if (files.length() > 0) { ArrayList<Uri> fileUris = new ArrayList<Uri>(); try { final String dir = getDownloadDir(); Uri fileUri = null; for (int i = 0; i < files.length(); i++) { fileUri = getFileUriAndSetType(sendIntent, dir, files.getString(i), subject, i); if (fileUri != null) { fileUris.add(fileUri); } } if (!fileUris.isEmpty()) { if (hasMultipleAttachments) { sendIntent.putExtra(Intent.EXTRA_STREAM, fileUris); } else { sendIntent.putExtra(Intent.EXTRA_STREAM, fileUri); } } } catch (Exception e) { callbackContext.error(e.getMessage()); } } else { sendIntent.setType("text/plain"); } if (notEmpty(subject)) { sendIntent.putExtra(Intent.EXTRA_SUBJECT, subject); } // add the URL to the message, as there seems to be no separate field if (notEmpty(url)) { if (notEmpty(message)) { message += " " + url; } else { message = url; } } if (notEmpty(message)) { sendIntent.putExtra(android.content.Intent.EXTRA_TEXT, message); sendIntent.putExtra("sms_body", message); // sometimes required when the user picks share via sms } if (appPackageName != null) { String packageName = appPackageName; String passedActivityName = null; if (packageName.contains("/")) { String[] items = appPackageName.split("/"); packageName = items[0]; passedActivityName = items[1]; } final ActivityInfo activity = getActivity(callbackContext, sendIntent, packageName); if (activity != null) { if (peek) { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK)); } else { sendIntent.addCategory(Intent.CATEGORY_LAUNCHER); sendIntent.setComponent(new ComponentName(activity.applicationInfo.packageName, passedActivityName != null ? passedActivityName : activity.name)); mycordova.startActivityForResult(plugin, sendIntent, 0); if (pasteMessage != null) { // add a little delay because target app (facebook only atm) needs to be started first new Timer().schedule(new TimerTask() { public void run() { cordova.getActivity().runOnUiThread(new Runnable() { public void run() { showPasteMessage(msg, pasteMessage); } }); } }, 2000); } } } } else { if (peek) { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK)); } else { mycordova.startActivityForResult(plugin, Intent.createChooser(sendIntent, null), 1); } } } }); return true; } @SuppressLint("NewApi") private void showPasteMessage(String msg, String label) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) { return; } // copy to clipboard final ClipboardManager clipboard = (android.content.ClipboardManager) cordova.getActivity().getSystemService(Context.CLIPBOARD_SERVICE); final ClipData clip = android.content.ClipData.newPlainText(label, msg); clipboard.setPrimaryClip(clip); // show a toast final Toast toast = Toast.makeText(webView.getContext(), label, Toast.LENGTH_LONG); toast.setGravity(Gravity.CENTER_VERTICAL | Gravity.CENTER_HORIZONTAL, 0, 0); toast.show(); } private Uri getFileUriAndSetType(Intent sendIntent, String dir, String image, String subject, int nthFile) throws IOException { // we're assuming an image, but this can be any filetype you like String localImage = image; /** * As file.deleteOnExit does not work on Android, we need to delete files manually. * Deleting them in onActivityResult is not a good idea, because for example a base64 encoded file * will not be available for upload to Facebook (it's deleted before it's uploaded). * So the best approach is deleting old files when saving (sharing) a new one. */ private void cleanupOldFiles(File dir) { for (File f : dir.listFiles()) { //noinspection ResultOfMethodCallIgnored f.delete(); } } private static boolean notEmpty(String what) { return what != null && !"".equals(what) && !"null".equalsIgnoreCase(what); } private static String[] toStringArray(JSONArray jsonArray) throws JSONException { String[] result = new String[jsonArray.length()]; for (int i = 0; i < jsonArray.length(); i++) { result[i] = jsonArray.getString(i); } return result; } public static String sanitizeFilename(String name) {
package api.web.gw2.mapping.v2.raids; import api.web.gw2.mapping.core.IdValue; import api.web.gw2.mapping.v2.APIv2; @APIv2(endpoint = "v2/raids") // NOI18N. public interface RaidWingEvent { /** * Gets the id of this raid wing event. * @return A {@code String} instance, never {@code null}. */ @IdValue(flavor = IdValue.Flavor.STRING) String getId(); /** * Gets the type of this raid wing event. * @return A {@code RaidWingEventType} instance, never {@code null}. */ RaidWingEventType getType(); }
package br.usp.ime.academicdevoir.entidade; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.Inheritance; import javax.persistence.InheritanceType; import org.hibernate.validator.constraints.Email; import org.hibernate.validator.constraints.Length; import br.usp.ime.academicdevoir.infra.Privilegio; @Entity @Inheritance(strategy=InheritanceType.SINGLE_TABLE) public class Usuario { /** * @uml.property name="id" */ @Id @GeneratedValue private Long id; /** * @uml.property name="nome" */ @Length(min = 5, max = 50) private String nome; /** * @uml.property name="login" */ @Length(min = 2, max = 30) private String login; /** * @uml.property name="senha" */ @Length(min = 5, max = 32) private String senha; /** * @uml.property name="email" */ @Email private String email; /** * @uml.property name="privilegio" * @uml.associationEnd */ private Privilegio privilegio; /** * @return * @uml.property name="id" */ public Long getId() { return id; } /** * @param id * @uml.property name="id" */ public void setId(Long id) { this.id = id; } /** * @return * @uml.property name="nome" */ public String getNome() { return nome; } /** * @param nome * @uml.property name="nome" */ public void setNome(String nome) { this.nome = nome; } /** * @return * @uml.property name="login" */ public String getLogin() { return login; } /** * @param login * @uml.property name="login" */ public void setLogin(String login) { this.login = login; } /** * @return * @uml.property name="senha" */ public String getSenha() { return senha; } /** * @param senha * @uml.property name="senha" */ public void setSenha(String senha) { this.senha = senha; } /** * @return * @uml.property name="email" */ public String getEmail() { return email; } /** * @param email * @uml.property name="email" */ public void setEmail(String email) { this.email = email; } /** * @return * @uml.property name="privilegio" */ public Privilegio getPrivilegio() { return privilegio; } /** * @param privilegio * @uml.property name="privilegio" */ public void setPrivilegio(Privilegio privilegio) { this.privilegio = privilegio; } }
package cc.mallet.topics; import static cc.mallet.topics.FastQMVUpdaterRunnable.logger; import java.util.Arrays; import java.util.List; import java.util.ArrayList; import java.util.TreeSet; import java.util.Iterator; import java.util.Formatter; import java.util.Locale; import java.util.concurrent.*; import java.util.logging.*; import java.util.zip.*; import java.io.*; import java.text.NumberFormat; import cc.mallet.types.*; import cc.mallet.topics.TopicAssignment; import cc.mallet.util.Randoms; import cc.mallet.util.MalletLogger; import gnu.trove.TByteArrayList; import gnu.trove.map.hash.TIntIntHashMap; import gnu.trove.map.hash.TObjectIntHashMap; import static java.lang.Math.log; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.Queue; import org.knowceans.util.RandomSamplers; import org.knowceans.util.Vectors; /** * Simple parallel threaded implementation of LDA, following Newman, Asuncion, * Smyth and Welling, Distributed Algorithms for Topic Models JMLR (2009), with * SparseLDA sampling scheme and data structure from Yao, Mimno and McCallum, * Efficient Methods for Topic Model Inference on Streaming Document * Collections, KDD (2009). * * @author David Mimno, Andrew McCallum Omiros test mercucial */ public class FastQMVParallelTopicModel implements Serializable { public static final int UNASSIGNED_TOPIC = -1; public static Logger logger = MalletLogger.getLogger(FastQMVParallelTopicModel.class.getName()); public ArrayList<MixTopicModelTopicAssignment> data; // the training instances and their topic assignments public Alphabet[] alphabet; // the alphabet for the input data public LabelAlphabet topicAlphabet; // the alphabet for the topics public byte numModalities; // Number of modalities public int numTopics; // Number of topics to be fit // These values are used to encode type/topic counts as // count/topic pairs in a single int. //public int topicMask; //public int topicBits; public int[] numTypes; // per modality public int[] totalTokens; //per modality public int[] totalDocsPerModality; //number of docs containing this modality public double[][] alpha; // Dirichlet(alpha,alpha,...) is the distribution over topics public double[] alphaSum; public double[] beta; // Prior on per-topic multinomial distribution over words public double[] betaSum; public double[] gamma; public double[] docSmoothingOnlyMass; public double[][] docSmoothingOnlyCumValues; public List<Integer> inActiveTopicIndex = new LinkedList<Integer>(); //inactive topic index for all modalities public boolean usingSymmetricAlpha = false; public static final double DEFAULT_BETA = 0.01; //we should only have one updating thread that updates global counts, // otherwise use AtomicIntegerArray for tokensPerTopic and split typeTopicCounts in such a way that only one thread updates topicCounts for a specific type public int[][][] typeTopicCounts; public int[][] tokensPerTopic; // indexed by <topic index> public FTree[][] trees; //store //public FTree betaSmoothingTree; //store we will have big overhead on updating (two more tree updates) //public List<ConcurrentLinkedQueue<FastQDelta>> queues; // for dirichlet estimation public int[][] docLengthCounts; // histogram of document sizes public int[][][] topicDocCounts; // histogram of document/topic counts, indexed by <topic index, sequence position index> private int[] histogramSize; public int numIterations = 1000; public int burninPeriod = 200; public int saveSampleInterval = 10; public int optimizeInterval = 50; //public int temperingInterval = 0; public int showTopicsInterval = 50; public int wordsPerTopic = 15; public int saveStateInterval = 0; public String stateFilename = null; public int saveModelInterval = 0; public String modelFilename = null; public int randomSeed = -1; public NumberFormat formatter; public boolean printLogLikelihood = true; public double[][] p_a; // a for beta prior for modalities correlation public double[][] p_b; // b for beta prir for modalities correlation //public double[][][] pDistr_Mean; // modalities correlation distribution accross documents (used in a, b beta params optimization) //public double[][][] pDistr_Var; // modalities correlation distribution accross documents (used in a, b beta params optimization) public double[][] pMean; // modalities correlation protected double[] tablesCnt; // tables count per modality protected double gammaRoot = 10; // gammaRoot for all modalities (sumTables cnt) protected RandomSamplers samp; protected Randoms random; public double[][] perplexities;//= new TObjectIntHashMap<Double>(); public StringBuilder expMetadata = new StringBuilder(1000); public boolean useCycleProposals = true; public String batchId = ""; // The number of times each type appears in the corpus int[][] typeTotals; // The max over typeTotals, used for beta[0] optimization int[] maxTypeCount; int numThreads = 1; private static LabelAlphabet newLabelAlphabet(int numTopics) { LabelAlphabet ret = new LabelAlphabet(); for (int i = 0; i < numTopics; i++) { ret.lookupIndex("topic" + i); } return ret; } public FastQMVParallelTopicModel(int numberOfTopics, byte numModalities, double alpha, double beta, boolean useCycleProposals) { this.numModalities = numModalities; this.useCycleProposals = useCycleProposals; this.data = new ArrayList<MixTopicModelTopicAssignment>(); this.topicAlphabet = newLabelAlphabet(numberOfTopics); this.numTopics = numberOfTopics; this.alphaSum = new double[numModalities]; this.alpha = new double[numModalities][numTopics + 1]; //in order to include new topic probability this.totalTokens = new int[numModalities]; this.betaSum = new double[numModalities]; this.beta = new double[numModalities]; this.gamma = new double[numModalities]; this.docSmoothingOnlyMass = new double[numModalities]; this.docSmoothingOnlyCumValues = new double[numModalities][numTopics]; tokensPerTopic = new int[numModalities][numTopics]; for (Byte m = 0; m < numModalities; m++) { this.alphaSum[m] = numTopics * alpha; Arrays.fill(this.alpha[m], alpha); this.beta[m] = beta; this.gamma[m] = 1; } this.alphabet = new Alphabet[numModalities]; this.totalDocsPerModality = new int[numModalities]; formatter = NumberFormat.getInstance(); formatter.setMaximumFractionDigits(5); logger.info("FastQMV LDANumTopics: " + numTopics + ", Modalities: " + this.numModalities + ", Iterations: " + this.numIterations); appendMetadata("Initial NumTopics: " + numTopics + ", Modalities: " + this.numModalities + ", Iterations: " + this.numIterations); p_a = new double[numModalities][numModalities]; p_b = new double[numModalities][numModalities]; pMean = new double[numModalities][numModalities];; // modalities correlation this.numTypes = new int[numModalities]; perplexities = new double[numModalities][200]; this.samp = new RandomSamplers(ThreadLocalRandom.current()); tablesCnt = new double[numModalities]; random = null; if (randomSeed == -1) { random = new Randoms(); } else { random = new Randoms(randomSeed); } } public StringBuilder getExpMetadata() { return expMetadata; } private void appendMetadata(String line) { expMetadata.append(line + "\n"); } public Alphabet[] getAlphabet() { return alphabet; } public LabelAlphabet getTopicAlphabet() { return topicAlphabet; } public int getNumTopics() { return numTopics; } public ArrayList<MixTopicModelTopicAssignment> getData() { return data; } public void setNumIterations(int numIterations) { this.numIterations = numIterations; } public void setBurninPeriod(int burninPeriod) { this.burninPeriod = burninPeriod; } public void setTopicDisplay(int interval, int n) { this.showTopicsInterval = interval; this.wordsPerTopic = n; } public void setRandomSeed(int seed) { randomSeed = seed; } /** * Interval for optimizing Dirichlet hyperparameters */ public void setOptimizeInterval(int interval) { this.optimizeInterval = interval; // Make sure we always have at least one sample // before optimizing hyperparameters if (saveSampleInterval > optimizeInterval) { saveSampleInterval = optimizeInterval; } } public void setSymmetricAlpha(boolean b) { usingSymmetricAlpha = b; } public void setNumThreads(int threads) { this.numThreads = threads; } /** * Define how often and where to save a text representation of the current * state. Files are GZipped. * * @param interval Save a copy of the state every <code>interval</code> * iterations. * @param filename Save the state to this file, with the iteration number as * a suffix */ public void setSaveState(int interval, String filename) { this.saveStateInterval = interval; this.stateFilename = filename; } /** * Define how often and where to save a serialized model. * * @param interval Save a serialized model every <code>interval</code> * iterations. * @param filename Save to this file, with the iteration number as a suffix */ public void setSaveSerializedModel(int interval, String filename) { this.saveModelInterval = interval; this.modelFilename = filename; } public void addInstances(InstanceList[] training, String batchId) { TObjectIntHashMap<String> entityPosition = new TObjectIntHashMap<String>(); typeTotals = new int[numModalities][]; appendMetadata("Statistics for batch:" + batchId); Randoms random = null; if (randomSeed == -1) { random = new Randoms(); } else { random = new Randoms(randomSeed); } for (Byte m = 0; m < numModalities; m++) { alphabet[m] = training[m].getDataAlphabet(); numTypes[m] = alphabet[m].size(); typeTotals[m] = new int[numTypes[m]]; String modInfo = "Modality<" + m + ">[" + (training[m].size() > 0 ? training[m].get(0).getSource().toString() : "-") + "] Size:" + training[m].size() + " Alphabet count: " + numTypes[m]; logger.info(modInfo); appendMetadata(modInfo); betaSum[m] = beta[m] * numTypes[m]; int doc = 0; for (Instance instance : training[m]) { doc++; long iterationStart = System.currentTimeMillis(); FeatureSequence tokens = (FeatureSequence) instance.getData(); //docLengthCounts[m][tokens.getLength()]++; LabelSequence topicSequence = new LabelSequence(topicAlphabet, new int[tokens.size()]); int size = tokens.size(); int[] topics = new int[size]; //topicSequence.getFeatures(); for (int position = 0; position < topics.length; position++) { int type = tokens.getIndexAtPosition(position); //int topic = ThreadLocalRandom.current().nextInt(numTopics); //random.nextInt(numTopics); int topic = random.nextInt(numTopics); topics[position] = topic; typeTotals[m][type]++; } //TopicAssignment t = new TopicAssignment(instance, topicSequence); TopicAssignment t = new TopicAssignment(instance, new LabelSequence(topicAlphabet, topics)); //data.add(t); MixTopicModelTopicAssignment mt; String entityId = (String) instance.getName(); //int index = i == 0 ? -1 : data.indexOf(mt); int index = -1; //if (i != 0 && (entityPosition.containsKey(entityId))) { if (m != 0 && entityPosition.containsKey(entityId)) { index = entityPosition.get(entityId); mt = data.get(index); mt.Assignments[m] = t; } else { mt = new MixTopicModelTopicAssignment(entityId, new TopicAssignment[numModalities]); mt.Assignments[m] = t; data.add(mt); index = data.size() - 1; entityPosition.put(entityId, index); } long elapsedMillis = System.currentTimeMillis() - iterationStart; if (doc % 100 == 0) { logger.fine(elapsedMillis + "ms " + " docNum:" + doc); } } } initializeHistograms(); initSpace(); buildInitialTypeTopicCounts(); } // public void initializeFromState(File stateFile) throws IOException { // String line; // String[] fields; // BufferedReader reader = new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(stateFile)))); // line = reader.readLine(); // // Skip some lines starting with "#" that describe the format and specify hyperparameters // while (line.startsWith(" // line = reader.readLine(); // fields = line.split(" "); // for (TopicAssignment document : data) { // FeatureSequence tokens = (FeatureSequence) document.instance.getData(); // FeatureSequence topicSequence = (FeatureSequence) document.topicSequence; // int[] topics = topicSequence.getFeatures(); // for (int position = 0; position < tokens.size(); position++) { // int type = tokens.getIndexAtPosition(position); // if (type == Integer.parseInt(fields[3])) { // topics[position] = Integer.parseInt(fields[5]); // } else { // System.err.println("instance list and state do not match: " + line); // line = reader.readLine(); // if (line != null) { // fields = line.split(" "); // initializeHistograms(); // buildInitialTypeTopicCounts(); public void initSpace() { trees = new FTree[numModalities][]; typeTopicCounts = new int[numModalities][][]; tokensPerTopic = new int[numModalities][numTopics]; maxTypeCount = new int[numModalities]; for (Byte m = 0; m < numModalities; m++) { typeTopicCounts[m] = new int[numTypes[m]][numTopics]; trees[m] = new FTree[numTypes[m]]; //find maxTypeCount needed in countHistogram Optimize Beta maxTypeCount[m] = 0; for (int type = 0; type < numTypes[m]; type++) { if (typeTotals[m][type] > maxTypeCount[m]) { maxTypeCount[m] = typeTotals[m][type]; } } } } public void buildInitialTypeTopicCounts() { for (Byte i = 0; i < numModalities; i++) { // Clear the topic totals Arrays.fill(tokensPerTopic[i], 0); // Clear the type/topic counts, only for (int type = 0; type < numTypes[i]; type++) { Arrays.fill(typeTopicCounts[i][type], 0); } Arrays.fill(docLengthCounts[i], 0); for (int t = 0; t < numTopics; t++) { Arrays.fill(topicDocCounts[i][t], 0); } } Arrays.fill(totalDocsPerModality, 0); for (MixTopicModelTopicAssignment entity : data) { for (Byte m = 0; m < numModalities; m++) { TopicAssignment document = entity.Assignments[m]; if (document != null) { totalDocsPerModality[m]++; FeatureSequence tokens = (FeatureSequence) document.instance.getData(); docLengthCounts[m][tokens.getLength()]++; FeatureSequence topicSequence = (FeatureSequence) document.topicSequence; int[] localTopicCounts = new int[numTopics]; int[] topics = topicSequence.getFeatures(); for (int position = 0; position < tokens.size(); position++) { int topic = topics[position]; if (topic == UNASSIGNED_TOPIC) { continue; } localTopicCounts[topics[position]]++; tokensPerTopic[m][topic]++; int type = tokens.getIndexAtPosition(position); typeTopicCounts[m][type][topic]++; } for (int topic = 0; topic < numTopics; topic++) { topicDocCounts[m][topic][localTopicCounts[topic]]++; } } } } //init trees double[] temp = new double[numTopics]; for (Byte m = 0; m < numModalities; m++) { for (int w = 0; w < numTypes[m]; ++w) { int[] currentTypeTopicCounts = typeTopicCounts[m][w]; for (int currentTopic = 0; currentTopic < numTopics; currentTopic++) { // temp[currentTopic] = (currentTypeTopicCounts[currentTopic] + beta[0]) * alpha[currentTopic] / (tokensPerTopic[currentTopic] + betaSum); if (useCycleProposals) { temp[currentTopic] = (currentTypeTopicCounts[currentTopic] + beta[m]) / (tokensPerTopic[m][currentTopic] + betaSum[m]); //with cycle proposal } else { temp[currentTopic] = gamma[m] * alpha[m][currentTopic] * (currentTypeTopicCounts[currentTopic] + beta[m]) / (tokensPerTopic[m][currentTopic] + betaSum[m]); } } //trees[w].init(numTopics); trees[m][w] = new FTree(temp); //reset temp Arrays.fill(temp, 0); } docSmoothingOnlyMass[m] = 0; if (useCycleProposals) { // cachedCoefficients cumulative array that will be used for binary search for (int topic = 0; topic < numTopics; topic++) { docSmoothingOnlyMass[m] += gamma[m] * alpha[m][topic]; docSmoothingOnlyCumValues[m][topic] = docSmoothingOnlyMass[m]; } } } } public void mergeSimilarTopics(int maxNumWords, TByteArrayList modalities, double mergeSimilarity, int deleteNumTopics) { // consider similarity on top numWords HashMap<String, SparseVector> labelVectors = new HashMap<String, SparseVector>(); String labelId = ""; NormalizedDotProductMetric cosineSimilarity = new NormalizedDotProductMetric(); //int[] topicMapping = new int[numTopics]; ArrayList<ArrayList<TreeSet<IDSorter>>> topicSortedWords = new ArrayList<ArrayList<TreeSet<IDSorter>>>(numModalities); for (Byte m = 0; m < numModalities; m++) { topicSortedWords.add(getSortedWords(m)); } int[][] topicTypeCounts = new int[numModalities][numTopics]; double[][] topicsSkewWeight = calcTopicsSkewOnText(topicTypeCounts, topicSortedWords); for (int topic = 0; topic < numTopics; topic++) { int previousVocabularySize = 0; labelId = Integer.toString(topic); int[] wordTypes = new int[maxNumWords * modalities.size()]; double[] weights = new double[maxNumWords * modalities.size()]; for (Byte m = 0; m < numModalities && modalities.contains(m); m++) { int activeNumWords = Math.min(maxNumWords, (int)Math.round(topicsSkewWeight[m][topic] * topicTypeCounts[m][topic])); TreeSet<IDSorter> sortedWords = topicSortedWords.get(m).get(topic); Iterator<IDSorter> iterator = sortedWords.iterator(); int wordCnt = 0; while (iterator.hasNext() && wordCnt < activeNumWords) { IDSorter info = iterator.next(); wordTypes[wordCnt] = previousVocabularySize + info.getID();//((String) entity).hashCode(); weights[wordCnt] = info.getWeight() / tokensPerTopic[m][topic]; wordCnt++; } previousVocabularySize += maxTypeCount[m]; } labelVectors.put(labelId, new SparseVector(wordTypes, weights, maxNumWords, maxNumWords, true, false, true)); } double similarity = 0; //double maxSimilarity = 0; String labelTextId; //TObjectDoubleHashMap<String> topicSimilarities = new TObjectDoubleHashMap<String>(); // double[][] topicSimilarities = new double[numTopics][numTopics]; // for (int i = 0; i < numTopics; i++) { // Arrays.fill(topicSimilarities[i], 0); TIntIntHashMap mergedTopics = new TIntIntHashMap(); for (int t = 0; t < numTopics; t++) { for (int t_text = t + 1; t_text < numTopics; t_text++) { labelId = Integer.toString(t); labelTextId = Integer.toString(t_text); SparseVector source = labelVectors.get(labelId); SparseVector target = labelVectors.get(labelTextId); similarity = 0; if (source != null & target != null) { similarity = 1 - Math.abs(cosineSimilarity.distance(source, target)); // the function returns distance not similarity } if (similarity > mergeSimilarity) { mergedTopics.put(t, t_text); logger.info("Merge topics: " + t + " and " + t_text); for (Byte m = 0; m < numModalities; m++) { alpha[m][t] = 0; } } //topicSimilarities[t][t_text] = similarity; } } List<Integer> deletedTopics = new LinkedList<Integer>(); for (int kk = 0; kk <= numTopics; kk++) { //tokensPerTopic[0][kk] if ( topicsSkewWeight[0][kk]* alpha[0][kk]>splitLimit && !mergedTopics.containsKey(kk)) { deletedTopics.add(kk); logger.info("Delete topics: " + kk); } } //Topics having a large number of tokens but low Token and topic exclusivity (differentiation) //or topics that are small but have small topic exclusivity should be deleted... int splittedTopic = -1; //if (!inActiveTopicIndex.isEmpty()) { int maxTopic = 0; double maxAlpha = 0; double avgAlpha = 0; for (int kk = 0; kk <= numTopics; kk++) { //int k = kactive.get(kk); avgAlpha += alpha[0][kk]; if (alpha[0][kk] > maxAlpha) { maxAlpha = alpha[0][kk]; maxTopic = kk; } } avgAlpha = avgAlpha / (numTopics - inActiveTopicIndex.size()); if (maxAlpha > 10 * avgAlpha) { splittedTopic = maxTopic; } if (mergedTopics.size() > 0 || (!deletedTopics.isEmpty())) { for (MixTopicModelTopicAssignment entity : data) { for (Byte m = 0; m < numModalities; m++) { TopicAssignment document = entity.Assignments[m]; if (document != null) { //FeatureSequence tokens = (FeatureSequence) document.instance.getData(); FeatureSequence topicSequence = (FeatureSequence) document.topicSequence; int[] topics = topicSequence.getFeatures(); for (int position = 0; position < topics.length; position++) { int oldTopic = topics[position]; if (deletedTopics.contains(oldTopic)) { topics[position] = ParallelTopicModel.UNASSIGNED_TOPIC; } if (mergedTopics.containsKey(oldTopic)) { topics[position] = mergedTopics.get(oldTopic); } } } } } buildInitialTypeTopicCounts(); } } /** * Gather statistics on the size of documents and create histograms for use * in Dirichlet hyperparameter optimization. */ private void initializeHistograms() { int maxTotalAllModalities = 0; //int[] maxTokens = new int[numModalities]; histogramSize = new int[numModalities]; Arrays.fill(totalTokens, 0); // Arrays.fill(maxTokens, 0); Arrays.fill(histogramSize, 0); for (MixTopicModelTopicAssignment entity : data) { for (Byte i = 0; i < numModalities; i++) { int seqLen; TopicAssignment document = entity.Assignments[i]; if (document != null) { FeatureSequence fs = (FeatureSequence) document.instance.getData(); seqLen = fs.getLength(); if (seqLen > histogramSize[i]) { histogramSize[i] = seqLen; } totalTokens[i] += seqLen; } } } for (Byte i = 0; i < numModalities; i++) { String infoStr = "Modality<" + i + "> Max tokens per entity: " + histogramSize[i] + ", Total tokens: " + totalTokens[i]; logger.info(infoStr); appendMetadata(infoStr); maxTotalAllModalities += histogramSize[i]; } logger.info("max tokens all modalities: " + maxTotalAllModalities); docLengthCounts = new int[numModalities][]; topicDocCounts = new int[numModalities][][]; for (Byte m = 0; m < numModalities; m++) { docLengthCounts[m] = new int[histogramSize[m] + 1]; topicDocCounts[m] = new int[numTopics][histogramSize[m] + 1]; for (int t = 0; t < numTopics; t++) { Arrays.fill(topicDocCounts[m][t], 0); } } // for (MixTopicModelTopicAssignment entity : data) { // for (Byte i = 0; i < numModalities; i++) { // TopicAssignment document = entity.Assignments[i]; // if (document != null) { // FeatureSequence tokens = (FeatureSequence) document.instance.getData(); // docLengthCounts[i][tokens.getLength()]++; } // public void optimizeAlpha(FastQWorkerRunnable[] runnables) { // // First clear the sufficient statistic histograms // //Arrays.fill(docLengthCounts, 0); //// for (int topic = 0; topic < topicDocCounts.length; topic++) { //// Arrays.fill(topicDocCounts[topic], 0); //// for (int thread = 0; thread < numThreads; thread++) { //// //int[] sourceLengthCounts = runnables[thread].getDocLengthCounts(); //// int[][] sourceTopicCounts = runnables[thread].getTopicDocCounts(); ////// for (int count = 0; count < sourceLengthCounts.length; count++) { ////// if (sourceLengthCounts[count] > 0) { ////// docLengthCounts[count] += sourceLengthCounts[count]; ////// sourceLengthCounts[count] = 0; //// for (int topic = 0; topic < numTopics; topic++) { //// if (!usingSymmetricAlpha) { //// for (int count = 0; count < sourceTopicCounts[topic].length; count++) { //// if (sourceTopicCounts[topic][count] > 0) { //// topicDocCounts[topic][count] += sourceTopicCounts[topic][count]; //// sourceTopicCounts[topic][count] = 0; //// } else { //// // For the symmetric version, we only need one //// // count array, which I'm putting in the same //// // data structure, but for topic 0. All other //// // topic histograms will be empty. //// // I'm duplicating this for loop, which //// // isn't the best thing, but it means only checking //// // whether we are symmetric or not numTopics times, //// // instead of numTopics * longest document length. //// for (int count = 0; count < sourceTopicCounts[topic].length; count++) { //// if (sourceTopicCounts[topic][count] > 0) { //// topicDocCounts[0][count] += sourceTopicCounts[topic][count]; //// // ^ the only change //// sourceTopicCounts[topic][count] = 0; // if (usingSymmetricAlpha) { // alphaSum[0] = Dirichlet.learnSymmetricConcentration(topicDocCounts[0], // docLengthCounts, // numTopics, // alphaSum[0]); // for (int topic = 0; topic < numTopics; topic++) { // alpha[topic] = alphaSum[0] / numTopics; // } else { // try { // alphaSum[0] = Dirichlet.learnParameters(alpha, topicDocCounts, docLengthCounts, 1.001, 1.0, 1); // } catch (RuntimeException e) { // // Dirichlet optimization has become unstable. This is known to happen for very small corpora (~5 docs). // logger.warning("Dirichlet optimization has become unstable. Resetting to alpha_t = 1.0."); // alphaSum[0] = numTopics; // for (int topic = 0; topic < numTopics; topic++) { // alpha[topic] = 1.0; // String alphaStr = ""; // for (int topic = 0; topic < numTopics; topic++) { // alphaStr += formatter.format(alpha[topic]) + " "; // logger.info("[Alpha: [" + alphaStr + "] "); // public void optimizeBeta(FastQWorkerRunnable[] runnables) { // // The histogram starts at count 0, so if all of the // // tokens of the most frequent type were assigned to one topic, // // we would need to store a maxTypeCount + 1 count. // int[] countHistogram = new int[maxTypeCount + 1]; // // Now count the number of type/topic pairs that have // // each number of tokens. // for (int type = 0; type < numTypes; type++) { // int[] counts = typeTopicCounts[type]; // for (int topic = 0; topic < numTopics; topic++) { // int count = counts[topic]; // if (count > 0) { // countHistogram[count]++; // // Figure out how large we need to make the "observation lengths" // // histogram. // int maxTopicSize = 0; // for (int topic = 0; topic < numTopics; topic++) { // if (tokensPerTopic[topic] > maxTopicSize) { // maxTopicSize = tokensPerTopic[topic]; // // Now allocate it and populate it. // int[] topicSizeHistogram = new int[maxTopicSize + 1]; // for (int topic = 0; topic < numTopics; topic++) { // topicSizeHistogram[tokensPerTopic[topic]]++; // betaSum[0] = Dirichlet.learnSymmetricConcentration(countHistogram, // topicSizeHistogram, // numTypes, // betaSum[0]); // beta[0] = betaSum[0] / numTypes; // //TODO: copy/update trees in threads // logger.info("[beta[0]: " + formatter.format(beta[0]) + "] "); // // Now publish the new value //// for (int thread = 0; thread < numThreads; thread++) { //// runnables[thread].resetBeta(beta[0], betaSum); public void estimate() throws IOException { long startTime = System.currentTimeMillis(); final CyclicBarrier barrier = new CyclicBarrier(numThreads + 2);//one for the current thread and one for the updater FastQMVWorkerRunnable[] runnables = new FastQMVWorkerRunnable[numThreads]; int docsPerThread = data.size() / numThreads; int offset = 0; //pDistr_Var = new double[numModalities][numModalities][data.size()]; for (byte i = 0; i < numModalities; i++) { Arrays.fill(this.p_a[i], 0.1d); Arrays.fill(this.p_b[i], 1d); } for (int thread = 0; thread < numThreads; thread++) { // some docs may be missing at the end due to integer division if (thread == numThreads - 1) { docsPerThread = data.size() - offset; } Randoms random = null; if (randomSeed == -1) { random = new Randoms(); } else { random = new Randoms(randomSeed); } runnables[thread] = new FastQMVWorkerRunnable( numTopics, numModalities, alpha, alphaSum, beta, betaSum, gamma, docSmoothingOnlyMass, docSmoothingOnlyCumValues, random, data, typeTopicCounts, tokensPerTopic, offset, docsPerThread, trees, useCycleProposals, thread, p_a, p_b, //queues.get(thread), barrier, inActiveTopicIndex //,betaSmoothingTree ); //runnables[thread].initializeAlphaStatistics(docLengthCounts.length); offset += docsPerThread; //runnables[thread].makeOnlyThread(); } Randoms randomUpd = null; if (randomSeed == -1) { randomUpd = new Randoms(); } else { randomUpd = new Randoms(randomSeed); } FastQMVUpdaterRunnable updater = new FastQMVUpdaterRunnable( typeTopicCounts, tokensPerTopic, trees, //queues, alpha, alphaSum, beta, betaSum, gamma, docSmoothingOnlyMass, docSmoothingOnlyCumValues, useCycleProposals, barrier, numTopics, numModalities, docLengthCounts, topicDocCounts, numTypes, maxTypeCount, randomUpd, inActiveTopicIndex // , betaSmoothingTree ); // List<Queue<FastQDelta>> queues = new ArrayList<Queue<FastQDelta>>(numThreads); // for (int thread = 0; thread < numThreads; thread++) { // // queues.add(new ConcurrentLinkedQueue<FastQDelta>()); // queues.add(new LinkedBlockingQueue<FastQDelta>()); ExecutorService executor = Executors.newFixedThreadPool(numThreads + 1); for (int iteration = 1; iteration <= numIterations; iteration++) { List<Queue<FastQDelta>> queues = new ArrayList<Queue<FastQDelta>>(numThreads); for (int thread = 0; thread < numThreads; thread++) { //queues.add(new ConcurrentLinkedQueue<FastQDelta>()); queues.add(new LinkedBlockingQueue<FastQDelta>()); } long iterationStart = System.currentTimeMillis(); if (showTopicsInterval != 0 && iteration != 0 && iteration % showTopicsInterval == 0) { logger.info("\n" + displayTopWords(wordsPerTopic, 5, false)); } if (saveStateInterval != 0 && iteration % saveStateInterval == 0) { this.printState(new File(stateFilename + '.' + iteration)); } if (saveModelInterval != 0 && iteration % saveModelInterval == 0) { this.write(new File(modelFilename + '.' + iteration)); } updater.setOptimizeParams(false); if (iteration < burninPeriod && numModalities > 1) { for (byte i = 0; i < numModalities; i++) { Arrays.fill(this.p_a[i], Math.min((double) iteration / 100, 1d)); //Arrays.fill(this.p_b[i], 1d); } logger.info("common p_a: " + formatter.format(this.p_a[0][1])); } else if (iteration > burninPeriod && optimizeInterval != 0 && iteration % saveSampleInterval == 0) { //updater.setOptimizeParams(true); optimizeP(iteration + optimizeInterval > numIterations); //merge similar topics TByteArrayList modalities = new TByteArrayList(); modalities.add((byte) 0); mergeSimilarTopics(20, modalities, 0.6, 15); optimizeDP(); optimizeGamma(); optimizeBeta(); recalcTrees(); } updater.setQueues(queues); executor.submit(updater); // if (numThreads > 1) { // Submit runnables to thread pool for (int thread = 0; thread < numThreads; thread++) { // if (iteration > burninPeriod && optimizeInterval != 0 // && iteration % saveSampleInterval == 0) { // runnables[thread].collectAlphaStatistics(); runnables[thread].setQueue(queues.get(thread)); logger.fine("submitting thread " + thread); executor.submit(runnables[thread]); //runnables[thread].run(); } try { barrier.await(); } catch (InterruptedException e) { System.out.println("Main Thread interrupted!"); e.printStackTrace(); } catch (BrokenBarrierException e) { System.out.println("Main Thread interrupted!"); e.printStackTrace(); } // // I'm getting some problems that look like // // a thread hasn't started yet when it is first // // polled, so it appears to be finished. // // This only occurs in very short corpora. // try { // Thread.sleep(20); // } catch (InterruptedException e) { // //TODO: use Barrier here // boolean finished = false; // while (!finished) { // try { // Thread.sleep(10); // } catch (InterruptedException e) { // finished = true; // // Are all the threads done? // for (int thread = 0; thread < numThreads; thread++) { // //logger.info("thread " + thread + " done? " + runnables[thread].isFinished); // finished = finished && runnables[thread].isFinished; // finished = finished && updater.isFinished; long elapsedMillis = System.currentTimeMillis() - iterationStart; if (elapsedMillis < 5000) { logger.info(elapsedMillis + "ms "); } else { logger.info((elapsedMillis / 1000) + "s "); } // if (iteration > burninPeriod && optimizeInterval != 0 // && iteration % optimizeInterval == 0) { // //optimizeAlpha(runnables); // //optimizeBeta(runnables); // //recalc trees for multi threaded recalc every time .. for single threaded only when beta[0] (or alpha in not cyvling proposal) is changing // //recalcTrees(); // logger.info("[O " + (System.currentTimeMillis() - iterationStart) + "] "); if (iteration % 10 == 0) { if (printLogLikelihood) { for (Byte i = 0; i < numModalities; i++) { //Arrays.fill(this.p_a[i], (double) (iteration / 100)); //Arrays.fill(this.p_b[i], 1d); double ll = modelLogLikelihood()[i] / totalTokens[i]; perplexities[i][iteration / 10] = ll; logger.info("<" + iteration + "> modality<" + i + "> LL/token: " + formatter.format(ll)); //LL for eachmodality if (iteration + 10 > numIterations) { appendMetadata("Modality<" + i + "> LL/token: " + formatter.format(ll)); //LL for eachmodality //logger.info("[alphaSum[" + i + "]: " + formatter.format(alphaSum[i]) + "] "); } } } else { logger.info("<" + iteration + ">"); } } } executor.shutdownNow(); long seconds = Math.round((System.currentTimeMillis() - startTime) / 1000.0); long minutes = seconds / 60; seconds %= 60; long hours = minutes / 60; minutes %= 60; long days = hours / 24; hours %= 24; StringBuilder timeReport = new StringBuilder(); timeReport.append("\nTotal time: "); if (days != 0) { timeReport.append(days); timeReport.append(" days "); } if (hours != 0) { timeReport.append(hours); timeReport.append(" hours "); } if (minutes != 0) { timeReport.append(minutes); timeReport.append(" minutes "); } timeReport.append(seconds); timeReport.append(" seconds"); logger.info(timeReport.toString()); } public void printTopWords(File file, int numWords, int numLabels, boolean useNewLines) throws IOException { PrintStream out = new PrintStream(file); printTopWords(out, numWords, numLabels, useNewLines); out.close(); } public void saveExperiment(String SQLLiteDB, String experimentId, String experimentDescription) { Connection connection = null; Statement statement = null; try { // create a database connection if (!SQLLiteDB.isEmpty()) { connection = DriverManager.getConnection(SQLLiteDB); statement = connection.createStatement(); statement.setQueryTimeout(30); // set timeout to 30 sec. //statement.executeUpdate("drop table if exists TopicAnalysis"); //statement.executeUpdate("create table if not exists Experiment (ExperimentId nvarchar(50), Description nvarchar(200), Metadata nvarchar(500), InitialSimilarity Double, PhraseBoost Integer) "); //String deleteSQL = String.format("Delete from Experiment where ExperimentId = '%s'", experimentId); //statement.executeUpdate(deleteSQL); //TODO topic analysis don't exist here String boostSelect = String.format("select \n" + " a.experimentid, PhraseCnts, textcnts, textcnts/phrasecnts as boost\n" + "from \n" + "(select experimentid, itemtype, avg(counts) as PhraseCnts from topicanalysis\n" + "where itemtype=-1\n" + "group by experimentid, itemtype) a inner join\n" + "(select experimentid, itemtype, avg(counts) as textcnts from topicanalysis\n" + "where itemtype=0 and ExperimentId = '%s' \n" + "group by experimentid, itemtype) b on a.experimentId=b.experimentId\n" + "order by a.experimentId;", experimentId); float boost = 70; ResultSet rs = statement.executeQuery(boostSelect); while (rs.next()) { boost = rs.getFloat("boost"); } String similaritySelect = String.format("select experimentid, avg(avgent) as avgSimilarity, avg(counts) as avgLinks, count(*) as EntitiesCnt\n" + "from( \n" + "select experimentid, avg(similarity) as avgent, count(similarity) as counts\n" + "from entitysimilarity\n" + "where similarity>0.65 and ExperimentId = '%s' group by experimentid, entityid1)\n" + "group by experimentid", experimentId); PreparedStatement bulkInsert = null; String sql = "insert into Experiment values(?,?,?, ?, ? );"; try { connection.setAutoCommit(false); bulkInsert = connection.prepareStatement(sql); bulkInsert.setString(1, experimentId); bulkInsert.setString(2, experimentDescription); bulkInsert.setString(3, expMetadata.toString()); bulkInsert.setDouble(4, 0.6); bulkInsert.setInt(5, Math.round(boost)); bulkInsert.executeUpdate(); connection.commit(); } catch (SQLException e) { if (connection != null) { try { System.err.print("Transaction is being rolled back"); connection.rollback(); } catch (SQLException excep) { System.err.print("Error in insert experiment details"); } } } finally { if (bulkInsert != null) { bulkInsert.close(); } connection.setAutoCommit(true); } } } catch (SQLException e) { // if the error message is "out of memory", // it probably means no database file is found System.err.println(e.getMessage()); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { // connection close failed. System.err.println(e); } } } public void saveTopics(String SQLLiteDB, String experimentId, String batchId) { Connection connection = null; Statement statement = null; try { // create a database connection if (!SQLLiteDB.isEmpty()) { connection = DriverManager.getConnection(SQLLiteDB); statement = connection.createStatement(); statement.setQueryTimeout(30); // set timeout to 30 sec. //statement.executeUpdate("create table if not exists TopicDetails (TopicId integer, ItemType integer, Weight double, TotalTokens int, BatchId TEXT,ExperimentId nvarchar(50)) "); //String deleteSQL = String.format("Delete from TopicDetails where ExperimentId = '%s'", experimentId); //statement.executeUpdate(deleteSQL); String topicDetailInsertsql = "insert into TopicDetails values(?,?,?,?,?,? );"; PreparedStatement bulkTopicDetailInsert = null; try { connection.setAutoCommit(false); bulkTopicDetailInsert = connection.prepareStatement(topicDetailInsertsql); for (int topic = 0; topic < numTopics; topic++) { for (Byte m = 0; m < numModalities; m++) { bulkTopicDetailInsert.setInt(1, topic); bulkTopicDetailInsert.setInt(2, m); bulkTopicDetailInsert.setDouble(3, alpha[m][topic]); bulkTopicDetailInsert.setInt(4, tokensPerTopic[m][topic]); bulkTopicDetailInsert.setString(5, batchId); bulkTopicDetailInsert.setString(6, experimentId); bulkTopicDetailInsert.executeUpdate(); } } connection.commit(); } catch (SQLException e) { if (connection != null) { try { System.err.print("Transaction is being rolled back"); connection.rollback(); } catch (SQLException excep) { System.err.print("Error in insert topic details"); } } } finally { if (bulkTopicDetailInsert != null) { bulkTopicDetailInsert.close(); } connection.setAutoCommit(true); } //statement.executeUpdate("drop table if exists TopicAnalysis"); //statement.executeUpdate("create table if not exists TopicAnalysis (TopicId integer, ItemType integer, Item nvarchar(100), Counts double, BatchId TEXT, ExperimentId nvarchar(50)) "); //deleteSQL = String.format("Delete from TopicAnalysis where ExperimentId = '%s'", experimentId); //statement.executeUpdate(deleteSQL); PreparedStatement bulkInsert = null; String sql = "insert into TopicAnalysis values(?,?,?,?,?,?);"; try { connection.setAutoCommit(false); bulkInsert = connection.prepareStatement(sql); ArrayList<ArrayList<TreeSet<IDSorter>>> topicSortedWords = new ArrayList<ArrayList<TreeSet<IDSorter>>>(numModalities); for (Byte m = 0; m < numModalities; m++) { topicSortedWords.add(getSortedWords(m)); } for (int topic = 0; topic < numTopics; topic++) { for (Byte m = 0; m < numModalities; m++) { TreeSet<IDSorter> sortedWords = topicSortedWords.get(m).get(topic); int word = 1; Iterator<IDSorter> iterator = sortedWords.iterator(); while (iterator.hasNext() && word < 20) { IDSorter info = iterator.next(); bulkInsert.setInt(1, topic); bulkInsert.setInt(2, m); bulkInsert.setString(3, alphabet[m].lookupObject(info.getID()).toString()); bulkInsert.setDouble(4, info.getWeight()); bulkInsert.setString(5, batchId); bulkInsert.setString(6, experimentId); //bulkInsert.setDouble(6, 1); bulkInsert.executeUpdate(); word++; } } } // also find and write phrases TObjectIntHashMap<String>[] phrases = findTopicPhrases(); for (int ti = 0; ti < numTopics; ti++) { // Print phrases Object[] keys = phrases[ti].keys(); int[] values = phrases[ti].values(); double counts[] = new double[keys.length]; for (int i = 0; i < counts.length; i++) { counts[i] = values[i]; } // double countssum = MatrixOps.sum(counts); Alphabet alph = new Alphabet(keys); RankedFeatureVector rfv = new RankedFeatureVector(alph, counts); int max = rfv.numLocations() < 20 ? rfv.numLocations() : 20; for (int ri = 0; ri < max; ri++) { int fi = rfv.getIndexAtRank(ri); double count = counts[fi];// / countssum; String phraseStr = alph.lookupObject(fi).toString(); bulkInsert.setInt(1, ti); bulkInsert.setInt(2, -1); bulkInsert.setString(3, phraseStr); bulkInsert.setDouble(4, count); bulkInsert.setString(5, batchId); bulkInsert.setString(6, experimentId); //bulkInsert.setDouble(6, 1); bulkInsert.executeUpdate(); } } connection.commit(); // if (!sql.equals("")) { // statement.executeUpdate(sql); // if (!sql.equals("")) { // statement.executeUpdate(sql); } catch (SQLException e) { if (connection != null) { try { System.err.print("Transaction is being rolled back"); connection.rollback(); } catch (SQLException excep) { System.err.print("Error in insert topicAnalysis"); } } } finally { if (bulkInsert != null) { bulkInsert.close(); } connection.setAutoCommit(true); } } } catch (SQLException e) { // if the error message is "out of memory", // it probably means no database file is found System.err.println(e.getMessage()); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { // connection close failed. System.err.println(e); } } } /** * Return an array of sorted sets (one set per topic). Each set contains * IDSorter objects with integer keys into the alphabet. To get direct * access to the Strings, use getTopWords(). */ public ArrayList<TreeSet<IDSorter>> getSortedWords(int modality) { ArrayList<TreeSet<IDSorter>> topicSortedWords = new ArrayList<TreeSet<IDSorter>>(numTopics); // Initialize the tree sets for (int topic = 0; topic < numTopics; topic++) { TreeSet<IDSorter> topicTreeSet = new TreeSet<IDSorter>(); topicSortedWords.add(topicTreeSet); // Collect counts for (int type = 0; type < numTypes[modality]; type++) { int cnt = typeTopicCounts[modality][type][topic]; if (cnt > 0) { topicTreeSet.add(new IDSorter(type, cnt)); } } } return topicSortedWords; } /** * Return an array (one element for each topic) of arrays of words, which * are the most probable words for that topic in descending order. These are * returned as Objects, but will probably be Strings. * * @param numWords The maximum length of each topic's array of words (may be * less). */ public Object[][] getTopWords(int numWords, int modality) { ArrayList<TreeSet<IDSorter>> topicSortedWords = getSortedWords(modality); Object[][] result = new Object[numTopics][]; for (int topic = 0; topic < numTopics; topic++) { TreeSet<IDSorter> sortedWords = topicSortedWords.get(topic); // How many words should we report? Some topics may have fewer than // the default number of words with non-zero weight. int limit = numWords; if (sortedWords.size() < numWords) { limit = sortedWords.size(); } result[topic] = new Object[limit]; Iterator<IDSorter> iterator = sortedWords.iterator(); for (int i = 0; i < limit; i++) { IDSorter info = iterator.next(); result[topic][i] = alphabet[modality].lookupObject(info.getID()); } } return result; } public void printTopWords(PrintStream out, int numWords, int numLabels, boolean usingNewLines) { out.print(displayTopWords(numWords, numLabels, usingNewLines)); } public String displayTopWords(int numWords, int numLabels, boolean usingNewLines) { StringBuilder out = new StringBuilder(); ArrayList<ArrayList<TreeSet<IDSorter>>> topicSortedWords = new ArrayList<ArrayList<TreeSet<IDSorter>>>(4); for (Byte m = 0; m < numModalities; m++) { topicSortedWords.add(getSortedWords(m)); } // Print results for each topic for (int topic = 0; topic < numTopics; topic++) { for (Byte m = 0; m < numModalities; m++) { TreeSet<IDSorter> sortedWords = topicSortedWords.get(m).get(topic); int word = 1; Iterator<IDSorter> iterator = sortedWords.iterator(); if (usingNewLines) { out.append(topic + "\t" + formatter.format(alpha[m][topic]) + "\n"); while (iterator.hasNext() && word < numWords) { IDSorter info = iterator.next(); out.append(alphabet[m].lookupObject(info.getID()) + "\t" + formatter.format(info.getWeight()) + "\n"); word++; } } else { out.append(topic + "\t" + formatter.format(alpha[m][topic]) + "\t"); while (iterator.hasNext() && word < numWords) { IDSorter info = iterator.next(); out.append(alphabet[m].lookupObject(info.getID()) + " "); word++; } } } out.append("\n"); } return out.toString(); } public void topicXMLReport(PrintWriter out, int numWords, int numLabels) { ArrayList<ArrayList<TreeSet<IDSorter>>> topicSortedWords = new ArrayList<ArrayList<TreeSet<IDSorter>>>(4); for (Byte m = 0; m < numModalities; m++) { topicSortedWords.add(getSortedWords(m)); } out.println("<?xml version='1.0' ?>"); out.println("<topicModel>"); for (int topic = 0; topic < numTopics; topic++) { for (Byte m = 0; m < numModalities; m++) { out.println(" <topic id='" + topic + "' alpha='" + alpha[m][topic] + "' modality='" + m + "' totalTokens='" + tokensPerTopic[m][topic] + "'>"); int word = 1; Iterator<IDSorter> iterator = topicSortedWords.get(m).get(topic).iterator(); while (iterator.hasNext() && word <= numWords) { IDSorter info = iterator.next(); out.println(" <word rank='" + word + "'>" + alphabet[m].lookupObject(info.getID()) + "</word>"); word++; } } out.println(" </topic>"); } out.println("</topicModel>"); } public TObjectIntHashMap<String>[] findTopicPhrases() { int numTopics = this.getNumTopics(); TObjectIntHashMap<String>[] phrases = new TObjectIntHashMap[numTopics]; Alphabet alphabet = this.getAlphabet()[0]; // Get counts of phrases in topics // Search bigrams within corpus to see if they have been assigned to the same topic, adding them to topic phrases for (int ti = 0; ti < numTopics; ti++) { phrases[ti] = new TObjectIntHashMap<String>(); } for (int di = 0; di < this.getData().size(); di++) { TopicAssignment t = this.getData().get(di).Assignments[0]; if (t != null) { Instance instance = t.instance; FeatureSequence fvs = (FeatureSequence) instance.getData(); boolean withBigrams = false; if (fvs instanceof FeatureSequenceWithBigrams) { withBigrams = true; } int prevtopic = -1; int prevfeature = -1; int topic = -1; StringBuffer sb = null; int feature = -1; int doclen = fvs.size(); for (int pi = 0; pi < doclen; pi++) { feature = fvs.getIndexAtPosition(pi); topic = t.topicSequence.getIndexAtPosition(pi); if (topic == prevtopic && (!withBigrams || ((FeatureSequenceWithBigrams) fvs).getBiIndexAtPosition(pi) != -1)) { if (sb == null) { sb = new StringBuffer(alphabet.lookupObject(prevfeature).toString() + " " + alphabet.lookupObject(feature)); } else { sb.append(" "); sb.append(alphabet.lookupObject(feature)); } } else if (sb != null) { String sbs = sb.toString(); //logger.info ("phrase:"+sbs); if (phrases[prevtopic].get(sbs) == 0) { phrases[prevtopic].put(sbs, 0); } phrases[prevtopic].increment(sbs); prevtopic = prevfeature = -1; sb = null; } else { prevtopic = topic; prevfeature = feature; } } } } return phrases; } public void topicPhraseXMLReport(PrintWriter out, int numWords) { //Phrases only for modality 0 --> text int numTopics = this.getNumTopics(); Alphabet alphabet = this.getAlphabet()[0]; TObjectIntHashMap<String>[] phrases = findTopicPhrases(); // phrases[] now filled with counts // Now start printing the XML out.println("<?xml version='1.0' ?>"); out.println("<topics>"); ArrayList<TreeSet<IDSorter>> topicSortedWords = getSortedWords(0); double[] probs = new double[alphabet.size()]; for (int ti = 0; ti < numTopics; ti++) { out.print(" <topic id=\"" + ti + "\" alpha=\"" + alpha[0][ti] + "\" totalTokens=\"" + tokensPerTopic[0][ti] + "\" "); // For gathering <term> and <phrase> output temporarily // so that we can get topic-title information before printing it to "out". ByteArrayOutputStream bout = new ByteArrayOutputStream(); PrintStream pout = new PrintStream(bout); // For holding candidate topic titles AugmentableFeatureVector titles = new AugmentableFeatureVector(new Alphabet()); // Print words int word = 1; Iterator<IDSorter> iterator = topicSortedWords.get(ti).iterator(); while (iterator.hasNext() && word < numWords) { IDSorter info = iterator.next(); pout.println(" <word weight=\"" + (info.getWeight() / tokensPerTopic[0][ti]) + "\" count=\"" + Math.round(info.getWeight()) + "\">" + alphabet.lookupObject(info.getID()) + "</word>"); word++; if (word < 20) // consider top 20 individual words as candidate titles { titles.add(alphabet.lookupObject(info.getID()), info.getWeight()); } } /* for (int type = 0; type < alphabet.size(); type++) probs[type] = this.getCountFeatureTopic(type, ti) / (double)this.getCountTokensPerTopic(ti); RankedFeatureVector rfv = new RankedFeatureVector (alphabet, probs); for (int ri = 0; ri < numWords; ri++) { int fi = rfv.getIndexAtRank(ri); pout.println (" <term weight=\""+probs[fi]+"\" count=\""+this.getCountFeatureTopic(fi,ti)+"\">"+alphabet.lookupObject(fi)+ "</term>"); if (ri < 20) // consider top 20 individual words as candidate titles titles.add(alphabet.lookupObject(fi), this.getCountFeatureTopic(fi,ti)); } */ // Print phrases Object[] keys = phrases[ti].keys(); int[] values = phrases[ti].values(); double counts[] = new double[keys.length]; for (int i = 0; i < counts.length; i++) { counts[i] = values[i]; } double countssum = MatrixOps.sum(counts); Alphabet alph = new Alphabet(keys); RankedFeatureVector rfv = new RankedFeatureVector(alph, counts); int max = rfv.numLocations() < numWords ? rfv.numLocations() : numWords; for (int ri = 0; ri < max; ri++) { int fi = rfv.getIndexAtRank(ri); pout.println(" <phrase weight=\"" + counts[fi] / countssum + "\" count=\"" + values[fi] + "\">" + alph.lookupObject(fi) + "</phrase>"); // Any phrase count less than 20 is simply unreliable if (ri < 20 && values[fi] > 20) { titles.add(alph.lookupObject(fi), 100 * values[fi]); // prefer phrases with a factor of 100 } } // Select candidate titles StringBuffer titlesStringBuffer = new StringBuffer(); rfv = new RankedFeatureVector(titles.getAlphabet(), titles); int numTitles = 10; for (int ri = 0; ri < numTitles && ri < rfv.numLocations(); ri++) { // Don't add redundant titles if (titlesStringBuffer.indexOf(rfv.getObjectAtRank(ri).toString()) == -1) { titlesStringBuffer.append(rfv.getObjectAtRank(ri)); if (ri < numTitles - 1) { titlesStringBuffer.append(", "); } } else { numTitles++; } } out.println("titles=\"" + titlesStringBuffer.toString() + "\">"); out.print(bout.toString()); out.println(" </topic>"); } out.println("</topics>"); } /** * Write the internal representation of type-topic counts (count/topic pairs * in descending order by count) to a file. */ public void printTypeTopicCounts(File file) throws IOException { PrintWriter out = new PrintWriter(new FileWriter(file)); for (Byte m = 0; m < numModalities; m++) { for (int type = 0; type < numTypes[m]; type++) { StringBuilder buffer = new StringBuilder(); buffer.append(type + " " + alphabet[m].lookupObject(type)); int[] topicCounts = typeTopicCounts[m][type]; int index = 0; while (index < topicCounts.length) { int count = topicCounts[index]; buffer.append(" " + index + ":" + count); index++; } out.println(buffer); } } out.close(); } public void printTopicWordWeights(File file) throws IOException { PrintWriter out = new PrintWriter(new FileWriter(file)); printTopicWordWeights(out); out.close(); } /** * Print an unnormalized weight for every word in every topic. Most of these * will be equal to the smoothing parameter beta[0]. */ public void printTopicWordWeights(PrintWriter out) throws IOException { // Probably not the most efficient way to do this... for (int topic = 0; topic < numTopics; topic++) { for (Byte m = 0; m < numModalities; m++) { for (int type = 0; type < numTypes[m]; type++) { //int[] topicCounts = typeTopicCounts[type]; double weight = beta[m]; weight += typeTopicCounts[m][type][topic]; out.println(topic + "\t" + alphabet[m].lookupObject(type) + "\t" + weight); } } } } /** * Get the smoothed distribution over topics for a training instance. */ public double[] getTopicProbabilities(int instanceID, byte modality) { LabelSequence topics = data.get(instanceID).Assignments[modality].topicSequence; return getTopicProbabilities(topics, modality); } /** * Get the smoothed distribution over topics for a topic sequence, which may * be from the training set or from a new instance with topics assigned by * an inferencer. */ public double[] getTopicProbabilities(LabelSequence topics, byte modality) { double[] topicDistribution = new double[numTopics]; // Loop over the tokens in the document, counting the current topic // assignments. for (int position = 0; position < topics.getLength(); position++) { topicDistribution[topics.getIndexAtPosition(position)]++; } // Add the smoothing parameters and normalize double sum = 0.0; for (int topic = 0; topic < numTopics; topic++) { topicDistribution[topic] += gamma[modality] * alpha[modality][topic]; sum += topicDistribution[topic]; } // And normalize for (int topic = 0; topic < numTopics; topic++) { topicDistribution[topic] /= sum; } return topicDistribution; } // public void printDocumentTopics(File file) throws IOException { // PrintWriter out = new PrintWriter(new FileWriter(file)); // printDocumentTopics(out); // out.close(); // public void printDocumentTopics(PrintWriter out) { // printDocumentTopics(out, 0.0, -1); // //TODO save weights in DB (not needed any more as thay calculated on the fly) private double[] calcSkew() { // Calc Skew weight //skewOn == SkewType.LabelsOnly // The skew index of eachType double[][] typeSkewIndexes = new double[numModalities][]; //<modality, type> double[] skewWeight = new double[numModalities]; // The skew index of each Lbl Type //public double[] lblTypeSkewIndexes; //double [][] typeSkewIndexes = new double skewSum = 0; int nonZeroSkewCnt = 1; for (Byte i = 0; i < numModalities; i++) { typeSkewIndexes[i] = new double[numTypes[i]]; for (int type = 0; type < numTypes[i]; type++) { int totalTypeCounts = 0; typeSkewIndexes[i][type] = 0; int[] targetCounts = typeTopicCounts[i][type]; int index = 0; int count = 0; while (index < targetCounts.length) { count = targetCounts[index]; typeSkewIndexes[i][type] += Math.pow((double) count, 2); totalTypeCounts += count; //currentTopic = currentTypeTopicCounts[index] & topicMask; index++; } if (totalTypeCounts > 0) { typeSkewIndexes[i][type] = typeSkewIndexes[i][type] / Math.pow((double) (totalTypeCounts), 2); } if (typeSkewIndexes[i][type] > 0) { nonZeroSkewCnt++; skewSum += typeSkewIndexes[i][type]; } } skewWeight[i] = skewSum / (double) nonZeroSkewCnt; // (double) 1 / (1 + skewSum / (double) nonZeroSkewCnt); appendMetadata("Modality<" + i + "> Discr. Weight: " + formatter.format(skewWeight[i])); //LL for eachmodality } return skewWeight; } public double[][] calcTopicsSkewOnText(int[][] topicTypeCounts, ArrayList<ArrayList<TreeSet<IDSorter>>> topicSortedWords) { double[][] topicsSkewWeight = new double[numModalities][numTopics]; //ArrayList<TreeSet<IDSorter>> topicSortedWords = getSortedWords(0); for (Byte i = 0; i < numModalities; i++) { for (int topic = 0; topic < numTopics; topic++) { topicsSkewWeight[i][topic] = 0.0; TreeSet<IDSorter> sortedWords = topicSortedWords.get(i).get(topic); topicTypeCounts[i][topic] = sortedWords.size(); for (IDSorter info : sortedWords) { double probability = info.getWeight() / tokensPerTopic[i][topic]; topicsSkewWeight[i][topic] += probability * probability; } } } return topicsSkewWeight; } public double[][] calcTopicsSkewOnPubs(int[][] topicTypeCounts, ArrayList<ArrayList<TreeSet<IDSorter>>> topicSortedWords) { double[][] topicsSkewWeight = new double[numModalities][numTopics]; //ArrayList<TreeSet<IDSorter>> topicSortedWords = getSortedWords(0); for (Byte i = 0; i < numModalities; i++) { for (int topic = 0; topic < numTopics; topic++) { topicsSkewWeight[i][topic] = 0.0; TreeSet<IDSorter> sortedWords = topicSortedWords.get(i).get(topic); topicTypeCounts[i][topic] = sortedWords.size(); for (IDSorter info : sortedWords) { double probability = info.getWeight() / tokensPerTopic[i][topic]; topicsSkewWeight[i][topic] += probability * probability; } } } return topicsSkewWeight; } public void optimizeBeta() { // The histogram starts at count 0, so if all of the // tokens of the most frequent type were assigned to one topic, // we would need to store a maxTypeCount + 1 count. for (Byte m = 0; m < numModalities; m++) { double prevBetaSum = betaSum[m]; int[] countHistogram = new int[maxTypeCount[m] + 1]; // Now count the number of type/topic pairs that have // each number of tokens. for (int type = 0; type < numTypes[m]; type++) { int[] counts = typeTopicCounts[m][type]; for (int topic = 0; topic < numTopics; topic++) { int count = counts[topic]; if (count > 0) { countHistogram[count]++; } } } // Figure out how large we need to make the "observation lengths" // histogram. int maxTopicSize = 0; for (int topic = 0; topic < numTopics; topic++) { if (tokensPerTopic[m][topic] > maxTopicSize) { maxTopicSize = tokensPerTopic[m][topic]; } } // Now allocate it and populate it. int[] topicSizeHistogram = new int[maxTopicSize + 1]; for (int topic = 0; topic < numTopics; topic++) { topicSizeHistogram[tokensPerTopic[m][topic]]++; } try { betaSum[m] = Dirichlet.learnSymmetricConcentration(countHistogram, topicSizeHistogram, numTypes[m], betaSum[m]); if (Double.isNaN(betaSum[m]) || betaSum[m] < 0.0001) { betaSum[m] = prevBetaSum; } beta[m] = betaSum[m] / numTypes[m]; } catch (RuntimeException e) { // Dirichlet optimization has become unstable. This is known to happen for very small corpora (~5 docs). logger.warning("Dirichlet optimization has become unstable:" + e.getMessage() + ". Resetting to previous Beta"); betaSum[m] = prevBetaSum; beta[m] = betaSum[m] / numTypes[m]; } //TODO: copy/update trees in threads logger.info("[beta[" + m + "]: " + formatter.format(beta[m]) + "] "); // Now publish the new value // for (int thread = 0; thread < numThreads; thread++) { // runnables[thread].resetBeta(beta[0], betaSum[0]); } } private void optimizeGamma() { // hyperparameters for DP and Dirichlet samplers // Teh+06: Docs: (1, 1), M1-3: (0.1, 0.1); HMM: (1, 1) double aalpha = 5; double balpha = 0.1; //double abeta = 0.1; //double bbeta = 0.1; // Teh+06: Docs: (1, 0.1), M1-3: (5, 0.1), HMM: (1, 1) double agamma = 5; double bgamma = 0.1; // number of samples for parameter samplers int R = 10; double totaltablesCnt = 0; for (Byte m = 0; m < numModalities; m++) { totaltablesCnt += tablesCnt[m]; } for (Byte m = 0; m < numModalities; m++) { for (int r = 0; r < R; r++) { // gamma[0]: root level (Escobar+West95) with n = T double eta = samp.randBeta(gammaRoot + 1, totaltablesCnt); double bloge = bgamma - log(eta); double pie = 1. / (1. + (totaltablesCnt * bloge / (agamma + numTopics - 1))); int u = samp.randBernoulli(pie); gammaRoot = samp.randGamma(agamma + numTopics - 1 + u, 1. / bloge); // for (byte m = 0; m < numModalities; m++) { // alpha: document level (Teh+06) double qs = 0; double qw = 0; for (int j = 0; j < docLengthCounts[m].length; j++) { for (int i = 0; i < docLengthCounts[m][j]; i++) { // (49) (corrected) qs += samp.randBernoulli(j / (j + gamma[m])); qw += log(samp.randBeta(gamma[m] + 1, j)); } } gamma[m] = samp.randGamma(aalpha + tablesCnt[m] - qs, 1. / (balpha - qw)); } logger.info("GammaRoot: " + gammaRoot); //for (byte m = 0; m < numModalities; m++) { logger.info("Gamma[" + m + "]: " + gamma[m]); } } private void optimizeDP() { double[][] mk = new double[numModalities][numTopics + 1]; Arrays.fill(tablesCnt, 0); for (int t = 0; t < numTopics; t++) { inActiveTopicIndex.add(t); //inActive by default and activate if found } for (byte m = 0; m < numModalities; m++) { for (int t = 0; t < numTopics; t++) { //int k = kactive.get(kk); for (int i = 0; i < topicDocCounts[m][t].length; i++) { //for (int j = 0; j < numDocuments; j++) { if (topicDocCounts[m][t][i] > 0 && i > 1) { inActiveTopicIndex.remove(new Integer(t)); //..remove(t); //sample number of tables // number of tables a CRP(alpha tau) produces for nmk items //TODO: See if using the "minimal path" assumption to reduce bookkeeping gives the same results. //Huge Memory consumption due to topicDocCounts (* NumThreads), and striling number of first kind allss double[][] //Also 2x slower than the parametric version due to UpdateAlphaAndSmoothing int curTbls = 0; try { curTbls = random.nextAntoniak(gamma[m] * alpha[m][t], i); } catch (Exception e) { curTbls = 1; } mk[m][t] += (topicDocCounts[m][t][i] * curTbls); //mk[m][t] += 1;//direct minimal path assignment Samplers.randAntoniak(gamma[0][m] * alpha[m].get(t), tokensPerTopic[m].get(t)); // nmk[m].get(k)); } else if (topicDocCounts[m][t][i] > 0 && i == 1) //nmk[m].get(k) = 0 or 1 { inActiveTopicIndex.remove(new Integer(t)); mk[m][t] += topicDocCounts[m][t][i]; } } } // end outter for loop if (!inActiveTopicIndex.isEmpty()) { String empty = ""; for (int i = 0; i < inActiveTopicIndex.size(); i++) { empty += formatter.format(inActiveTopicIndex.get(i)) + " "; } logger.info("Inactive Topics: " + empty); } //for (byte m = 0; m < numModalities; m++) { //alpha[m].fill(0, numTopics, 0); alphaSum[m] = 0; mk[m][numTopics] = gammaRoot; tablesCnt[m] = Vectors.sum(mk[m]); byte numSamples = 10; for (int i = 0; i < numSamples; i++) { double[] tt = sampleDirichlet(mk[m]); // On non parametric with new topic we would have numTopics+1 topics for (int kk = 0; kk <= numTopics; kk++) { for (int kk = 0; kk <= numTopics; kk++) { //int k = kactive.get(kk); alpha[m][kk] = tt[kk] / (double) numSamples; alphaSum[m] += gamma[m] * alpha[m][kk]; //tau.set(k, tt[kk]); } } logger.info("AlphaSum[" + m + "]: " + alphaSum[m]); //for (byte m = 0; m < numModalities; m++) { String alphaStr = ""; for (int topic = 0; topic < numTopics; topic++) { alphaStr += topic + ":" + formatter.format(alpha[m][topic]) + " "; } logger.info("[Alpha[" + m + "]: [" + alphaStr + "] "); } // if (alpha[m].size() < numTopics + 1) { // alpha[m].add(tt[numTopics]); // } else { // alpha[m].set(numTopics, tt[numTopics]); //tau.set(K, tt[K]); //Recalc trees } private double[] sampleDirichlet(double[] p) { double magnitude = 1; double[] partition; magnitude = 0; partition = new double[p.length]; // Add up the total for (int i = 0; i < p.length; i++) { magnitude += p[i]; } for (int i = 0; i < p.length; i++) { partition[i] = p[i] / magnitude; } double distribution[] = new double[partition.length]; // For each dimension, draw a sample from Gamma(mp_i, 1) double sum = 0; for (int i = 0; i < distribution.length; i++) { distribution[i] = random.nextGamma(partition[i] * magnitude, 1); if (distribution[i] <= 0) { distribution[i] = 0.0001; } sum += distribution[i]; } // Normalize for (int i = 0; i < distribution.length; i++) { distribution[i] /= sum; } return distribution; } // private void recalcTrees() { // //recalc trees // double[] temp = new double[numTopics]; // for (Byte m = 0; m < numModalities; m++) { // for (int w = 0; w < numTypes[m]; ++w) { // int[] currentTypeTopicCounts = typeTopicCounts[m][w]; // for (int currentTopic = 0; currentTopic < numTopics; currentTopic++) { //// temp[currentTopic] = (currentTypeTopicCounts[currentTopic] + beta[0]) * alpha[currentTopic] / (tokensPerTopic[currentTopic] + betaSum); // if (useCycleProposals) { // temp[currentTopic] = (currentTypeTopicCounts[currentTopic] + beta[m]) / (tokensPerTopic[m][currentTopic] + betaSum[m]); //with cycle proposal // } else { // temp[currentTopic] = gamma[m] * alpha[m][currentTopic] * (currentTypeTopicCounts[currentTopic] + beta[m]) / (tokensPerTopic[m][currentTopic] + betaSum[m]); // trees[m][w].constructTree(temp); // //reset temp // Arrays.fill(temp, 0); private void recalcTrees() { //recalc trees double[] temp = new double[numTopics]; for (Byte m = 0; m < numModalities; m++) { for (int w = 0; w < numTypes[m]; ++w) { int[] currentTypeTopicCounts = typeTopicCounts[m][w]; for (int currentTopic = 0; currentTopic < numTopics; currentTopic++) { // temp[currentTopic] = (currentTypeTopicCounts[currentTopic] + beta[0]) / (tokensPerTopic[currentTopic] + betaSum[0]); if (useCycleProposals) { temp[currentTopic] = (currentTypeTopicCounts[currentTopic] + beta[m]) / (tokensPerTopic[m][currentTopic] + betaSum[m]); //with cycle proposal } else { temp[currentTopic] = gamma[m] * alpha[m][currentTopic] * (currentTypeTopicCounts[currentTopic] + beta[m]) / (tokensPerTopic[m][currentTopic] + betaSum[m]); } } //trees[w] = new FTree(temp); trees[m][w].constructTree(temp); //reset temp Arrays.fill(temp, 0); } docSmoothingOnlyMass[m] = 0; if (useCycleProposals) { // cachedCoefficients cumulative array that will be used for binary search for (int topic = 0; topic < numTopics; topic++) { docSmoothingOnlyMass[m] += gamma[m] * alpha[m][topic]; docSmoothingOnlyCumValues[m][topic] = docSmoothingOnlyMass[m]; } } } } public void optimizeP(boolean appendMetadata) { // for (int thread = 0; thread < numThreads; thread++) { // runnables[thread].getPDistr_Mean(); //we consider beta known = 1 --> a=(inverse digamma) [lnGx-lnG(1-x)+y(b)] // --> a = - 1 / (1/N (Sum(lnXi))), i=1..N , where Xi = mean (pDistr_Mean) //statistics for p optimization double[][][] pDistr_Mean = new double[numModalities][numModalities][data.size()];; // modalities correlation distribution accross documents (used in a, b beta params optimization) for (int docCnt = 0; docCnt < data.size(); docCnt++) { MixTopicModelTopicAssignment doc = data.get(docCnt); int[][] localTopicCounts = new int[numModalities][numTopics]; int[] oneDocTopics; FeatureSequence tokenSequence; int[] docLength = new int[numModalities]; for (byte m = 0; m < numModalities; m++) { if (doc.Assignments[m] != null) { //TODO can I order by tokens/topics?? oneDocTopics = doc.Assignments[m].topicSequence.getFeatures(); //System.arraycopy(oneDocTopics[m], 0, doc.Assignments[m].topicSequence.getFeatures(), 0, doc.Assignments[m].topicSequence.getFeatures().length-1); tokenSequence = ((FeatureSequence) doc.Assignments[m].instance.getData()); docLength[m] = tokenSequence.getLength(); //size is the same?? // populate topic counts for (int position = 0; position < docLength[m]; position++) { if (oneDocTopics[position] == UNASSIGNED_TOPIC) { System.err.println(" Init Sampling UNASSIGNED_TOPIC"); continue; } localTopicCounts[m][oneDocTopics[position]]++; //, localTopicCounts[m][oneDocTopics[m][position]] + 1); } } } for (byte m = 1; m < numModalities; m++) { if (doc.Assignments[m] != null) { oneDocTopics = doc.Assignments[m].topicSequence.getFeatures(); tokenSequence = ((FeatureSequence) doc.Assignments[m].instance.getData()); for (int position = 0; position < tokenSequence.getLength(); position++) { if (oneDocTopics[position] == UNASSIGNED_TOPIC) { System.err.println(" Init Sampling UNASSIGNED_TOPIC"); continue; } for (byte i = (byte) (m - 1); i >= 0; i pDistr_Mean[m][i][docCnt] += (localTopicCounts[i][oneDocTopics[position]] > 0 ? 1.0 : 0d) / (double) docLength[m]; pDistr_Mean[i][m][docCnt] = pDistr_Mean[m][i][docCnt]; //pDistr_Var[m][i][docCnt]+= localTopicCounts[i][newTopic]/docLength[m]; } } } } } for (Byte m = 0; m < numModalities; m++) { pMean[m][m] = 1; for (Byte i = (byte) (m + 1); i < numModalities; i++) { //optimize based on mean & variance double sum = 0; for (int j = 0; j < pDistr_Mean[m][i].length; j++) { sum += pDistr_Mean[m][i][j]; } pMean[m][i] = sum / totalDocsPerModality[m]; pMean[i][m] = pMean[m][i]; //double var = 2 * (1 - mean); double a = -1.0 / Math.log(pMean[m][i]); double b = 1; logger.info("[p:" + m + "_" + i + " mean:" + pMean[m][i] + " a:" + a + " b:" + b + "] "); if (appendMetadata) { appendMetadata("[p:" + m + "_" + i + " mean:" + pMean[m][i] + " a:" + a + " b:" + b + "] "); } p_a[m][i] = Math.min(a, 100);//a=100--> almost p=99% p_a[i][m] = Math.min(a, 100);; p_b[m][i] = b; p_b[i][m] = b; } } // Now publish the new value // for (int thread = 0; thread < numThreads; thread++) { // runnables[thread].resetP_a(p_a); // runnables[thread].resetP_a(p_b); } public void printDocumentTopics(PrintWriter out, double threshold, int max, String SQLLiteDB, String experimentId, String batchId) { if (out != null) { out.print("#doc name topic proportion ...\n"); } int[] docLen = new int[numModalities]; int[][] topicCounts = new int[numModalities][numTopics]; IDSorter[] sortedTopics = new IDSorter[numTopics]; for (int topic = 0; topic < numTopics; topic++) { // Initialize the sorters with dummy values sortedTopics[topic] = new IDSorter(topic, topic); } if (max < 0 || max > numTopics) { max = numTopics; } double[] skewWeight = calcSkew(); Connection connection = null; Statement statement = null; try { // create a database connection if (!SQLLiteDB.isEmpty()) { connection = DriverManager.getConnection(SQLLiteDB); statement = connection.createStatement(); statement.setQueryTimeout(30); // set timeout to 30 sec. // statement.executeUpdate("drop table if exists PubTopic"); //statement.executeUpdate("create table if not exists PubTopic (PubId nvarchar(50), TopicId Integer, Weight Double , BatchId Text, ExperimentId nvarchar(50)) "); //statement.executeUpdate(String.format("Delete from PubTopic where ExperimentId = '%s'", experimentId)); } PreparedStatement bulkInsert = null; String sql = "insert into PubTopic values(?,?,?,?,? );"; try { connection.setAutoCommit(false); bulkInsert = connection.prepareStatement(sql); for (int doc = 0; doc < data.size(); doc++) { int cntEnd = numModalities; StringBuilder builder = new StringBuilder(); builder.append(doc); builder.append("\t"); String docId = "no-name"; docId = data.get(doc).EntityId.toString(); builder.append(docId); builder.append("\t"); for (Byte m = 0; m < cntEnd; m++) { if (data.get(doc).Assignments[m] != null) { Arrays.fill(topicCounts[m], 0); LabelSequence topicSequence = (LabelSequence) data.get(doc).Assignments[m].topicSequence; int[] currentDocTopics = topicSequence.getFeatures(); docLen[m] = data.get(doc).Assignments[m].topicSequence.getLength();// currentDocTopics.length; // Count up the tokens for (int token = 0; token < docLen[m]; token++) { topicCounts[m][currentDocTopics[token]]++; } } } // And normalize for (int topic = 0; topic < numTopics; topic++) { double topicProportion = 0; double normalizeSum = 0; for (Byte m = 0; m < cntEnd; m++) { //Omiros: TODO: I should reweight each modality's contribution in the proportion of the document based on its discrimination power (skew index) //topicProportion += (m == 0 ? 1 : skewWeight[m]) * pMean[0][m] * ((double) topicCounts[m][topic] + (double) gamma[m] * alpha[m][topic]) / (docLen[m] + alphaSum[m]); //normalizeSum += (m == 0 ? 1 : skewWeight[m]) * pMean[0][m]; topicProportion += (m == 0 ? 1 : skewWeight[m]) * pMean[0][m] * ((double) topicCounts[m][topic] + (double) gamma[m] * alpha[m][topic]) / (docLen[m] + alphaSum[m]); normalizeSum += (m == 0 ? 1 : skewWeight[m]) * pMean[0][m]; } sortedTopics[topic].set(topic, (topicProportion / normalizeSum)); } Arrays.sort(sortedTopics); // statement.executeUpdate("insert into person values(1, 'leo')"); // statement.executeUpdate("insert into person values(2, 'yui')"); // ResultSet rs = statement.executeQuery("select * from person"); for (int i = 0; i < max; i++) { if (sortedTopics[i].getWeight() < threshold) { break; } builder.append(sortedTopics[i].getID() + "\t" + sortedTopics[i].getWeight() + "\t"); if (out != null) { out.println(builder); } if (!SQLLiteDB.isEmpty()) { // sql += String.format(Locale.ENGLISH, "insert into PubTopic values('%s',%d,%.4f,'%s' );", docId, sortedTopics[i].getID(), sortedTopics[i].getWeight(), experimentId); bulkInsert.setString(1, docId); bulkInsert.setInt(2, sortedTopics[i].getID()); bulkInsert.setDouble(3, (double) Math.round(sortedTopics[i].getWeight() * 10000) / 10000); bulkInsert.setString(4, batchId); bulkInsert.setString(5, experimentId); bulkInsert.executeUpdate(); } } // if ((doc / 10) * 10 == doc && !sql.isEmpty()) { // statement.executeUpdate(sql); // sql = ""; } if (!SQLLiteDB.isEmpty()) { connection.commit(); } // if (!sql.isEmpty()) { // statement.executeUpdate(sql); } catch (SQLException e) { if (connection != null) { try { System.err.print("Transaction is being rolled back"); connection.rollback(); } catch (SQLException excep) { System.err.print("Error in insert topicAnalysis"); } } } finally { if (bulkInsert != null) { bulkInsert.close(); } connection.setAutoCommit(true); } } catch (SQLException e) { // if the error message is "out of memory", // it probably means no database file is found System.err.println(e.getMessage()); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { // connection close failed. System.err.println(e); } } } public void CreateTables(String SQLLiteDB, String experimentId) { Connection connection = null; Statement statement = null; try { // create a database connection if (!SQLLiteDB.isEmpty()) { connection = DriverManager.getConnection(SQLLiteDB); statement = connection.createStatement(); statement.setQueryTimeout(30); // set timeout to 30 sec. // statement.executeUpdate("drop table if exists PubTopic"); //statement.executeUpdate("create table if not exists PubTopic (PubId nvarchar(50), TopicId Integer, Weight Double , BatchId Text, ExperimentId nvarchar(50)) "); statement.executeUpdate(String.format("Delete from PubTopic where ExperimentId = '%s'", experimentId)); //statement.executeUpdate("create table if not exists Experiment (ExperimentId nvarchar(50), Description nvarchar(200), Metadata nvarchar(500), InitialSimilarity Double, PhraseBoost Integer) "); String deleteSQL = String.format("Delete from Experiment where ExperimentId = '%s'", experimentId); statement.executeUpdate(deleteSQL); //statement.executeUpdate("create table if not exists TopicDetails (TopicId integer, ItemType integer, Weight double, TotalTokens int, BatchId TEXT,ExperimentId nvarchar(50)) "); deleteSQL = String.format("Delete from TopicDetails where ExperimentId = '%s'", experimentId); statement.executeUpdate(deleteSQL); deleteSQL = String.format("Delete from TopicDescription where ExperimentId = '%s'", experimentId); statement.executeUpdate(deleteSQL); //statement.executeUpdate("create table if not exists TopicAnalysis (TopicId integer, ItemType integer, Item nvarchar(100), Counts double, BatchId TEXT, ExperimentId nvarchar(50)) "); deleteSQL = String.format("Delete from TopicAnalysis where ExperimentId = '%s'", experimentId); statement.executeUpdate(deleteSQL); //statement.executeUpdate("create table if not exists PubTopic (PubId nvarchar(50), TopicId Integer, Weight Double , BatchId Text, ExperimentId nvarchar(50)) "); statement.executeUpdate(String.format("Delete from PubTopic where ExperimentId = '%s'", experimentId)); //statement.executeUpdate("create table if not exists ExpDiagnostics (ExperimentId text, BatchId text, EntityId text, EntityType int, ScoreName text, Score double )"); deleteSQL = String.format("Delete from ExpDiagnostics where ExperimentId = '%s'", experimentId); statement.executeUpdate(deleteSQL); } } catch (SQLException e) { // if the error message is "out of memory", // it probably means no database file is found System.err.println(e.getMessage()); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { // connection close failed. System.err.println(e); } } } /** * @param out A print writer * @param threshold Only print topics with proportion greater than this * number * @param max Print no more than this many topics */ // public void printDocumentTopics(PrintWriter out, double threshold, int max) { // out.print("#doc name topic proportion ...\n"); // int docLen; // int[] topicCounts = new int[numTopics]; // IDSorter[] sortedTopics = new IDSorter[numTopics]; // for (int topic = 0; topic < numTopics; topic++) { // // Initialize the sorters with dummy values // sortedTopics[topic] = new IDSorter(topic, topic); // if (max < 0 || max > numTopics) { // max = numTopics; // for (int doc = 0; doc < data.size(); doc++) { // LabelSequence topicSequence = (LabelSequence) data.get(doc).topicSequence; // int[] currentDocTopics = topicSequence.getFeatures(); // StringBuilder builder = new StringBuilder(); // builder.append(doc); // builder.append("\t"); // if (data.get(doc).instance.getName() != null) { // builder.append(data.get(doc).instance.getName()); // } else { // builder.append("no-name"); // builder.append("\t"); // docLen = currentDocTopics.length; // // Count up the tokens // for (int token = 0; token < docLen; token++) { // topicCounts[currentDocTopics[token]]++; // // And normalize // for (int topic = 0; topic < numTopics; topic++) { // sortedTopics[topic].set(topic, (gamma[0] * alpha[topic] + topicCounts[topic]) / (docLen + alphaSum[0])); // Arrays.sort(sortedTopics); // for (int i = 0; i < max; i++) { // if (sortedTopics[i].getWeight() < threshold) { // break; // builder.append(sortedTopics[i].getID() + "\t" // + sortedTopics[i].getWeight() + "\t"); // out.println(builder); // Arrays.fill(topicCounts, 0); // public double[][] getSubCorpusTopicWords(boolean[] documentMask, boolean normalized, boolean smoothed) { // double[][] result = new double[numTopics][numTypes]; // int[] subCorpusTokensPerTopic = new int[numTopics]; // for (int doc = 0; doc < data.size(); doc++) { // if (documentMask[doc]) { // int[] words = ((FeatureSequence) data.get(doc).instance.getData()).getFeatures(); // int[] topics = data.get(doc).topicSequence.getFeatures(); // for (int position = 0; position < topics.length; position++) { // result[topics[position]][words[position]]++; // subCorpusTokensPerTopic[topics[position]]++; // if (smoothed) { // for (int topic = 0; topic < numTopics; topic++) { // for (int type = 0; type < numTypes; type++) { // result[topic][type] += beta[0]; // if (normalized) { // double[] topicNormalizers = new double[numTopics]; // if (smoothed) { // for (int topic = 0; topic < numTopics; topic++) { // topicNormalizers[topic] = 1.0 / (subCorpusTokensPerTopic[topic] + numTypes * beta[0]); // } else { // for (int topic = 0; topic < numTopics; topic++) { // topicNormalizers[topic] = 1.0 / subCorpusTokensPerTopic[topic]; // for (int topic = 0; topic < numTopics; topic++) { // for (int type = 0; type < numTypes; type++) { // result[topic][type] *= topicNormalizers[topic]; // return result; // public double[][] getTopicWords(boolean normalized, boolean smoothed) { // double[][] result = new double[numTopics][numTypes]; // for (int type = 0; type < numTypes; type++) { // int[] topicCounts = typeTopicCounts[type]; // int index = 0; // while (index < topicCounts.length // && topicCounts[index] > 0) { // result[index][type] += topicCounts[index]; // index++; // if (smoothed) { // for (int topic = 0; topic < numTopics; topic++) { // for (int type = 0; type < numTypes; type++) { // result[topic][type] += beta[0]; // if (normalized) { // double[] topicNormalizers = new double[numTopics]; // if (smoothed) { // for (int topic = 0; topic < numTopics; topic++) { // topicNormalizers[topic] = 1.0 / (tokensPerTopic[topic] + numTypes * beta[0]); // } else { // for (int topic = 0; topic < numTopics; topic++) { // topicNormalizers[topic] = 1.0 / tokensPerTopic[topic]; // for (int topic = 0; topic < numTopics; topic++) { // for (int type = 0; type < numTypes; type++) { // result[topic][type] *= topicNormalizers[topic]; // return result; // public double[][] getDocumentTopics(boolean normalized, boolean smoothed) { // double[][] result = new double[data.size()][numTopics]; // for (int doc = 0; doc < data.size(); doc++) { // int[] topics = data.get(doc).topicSequence.getFeatures(); // for (int position = 0; position < topics.length; position++) { // result[doc][topics[position]]++; // if (smoothed) { // for (int topic = 0; topic < numTopics; topic++) { // result[doc][topic] += gamma[0] * alpha[topic]; // if (normalized) { // double sum = 0.0; // for (int topic = 0; topic < numTopics; topic++) { // sum += result[doc][topic]; // double normalizer = 1.0 / sum; // for (int topic = 0; topic < numTopics; topic++) { // result[doc][topic] *= normalizer; // return result; // public ArrayList<TreeSet<IDSorter>> getTopicDocuments(double smoothing) { // ArrayList<TreeSet<IDSorter>> topicSortedDocuments = new ArrayList<TreeSet<IDSorter>>(numTopics); // // Initialize the tree sets // for (int topic = 0; topic < numTopics; topic++) { // topicSortedDocuments.add(new TreeSet<IDSorter>()); // int[] topicCounts = new int[numTopics]; // for (int doc = 0; doc < data.size(); doc++) { // int[] topics = data.get(doc).topicSequence.getFeatures(); // for (int position = 0; position < topics.length; position++) { // topicCounts[topics[position]]++; // for (int topic = 0; topic < numTopics; topic++) { // topicSortedDocuments.get(topic).add(new IDSorter(doc, (topicCounts[topic] + smoothing) / (topics.length + numTopics * smoothing))); // topicCounts[topic] = 0; // return topicSortedDocuments; // public void printTopicDocuments(PrintWriter out) { // printTopicDocuments(out, 100); // /** // * @param out A print writer // * @param count Print this number of top documents // */ // public void printTopicDocuments(PrintWriter out, int max) { // out.println("#topic doc name proportion ..."); // ArrayList<TreeSet<IDSorter>> topicSortedDocuments = getTopicDocuments(10.0); // for (int topic = 0; topic < numTopics; topic++) { // TreeSet<IDSorter> sortedDocuments = topicSortedDocuments.get(topic); // int i = 0; // for (IDSorter sorter : sortedDocuments) { // if (i == max) { // break; // int doc = sorter.getID(); // double proportion = sorter.getWeight(); // String name = (String) data.get(doc).instance.getName(); // if (name == null) { // name = "no-name"; // out.format("%d %d %s %f\n", topic, doc, name, proportion); public void printState(File f) throws IOException { PrintStream out = new PrintStream(new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(f)))); printState(out); out.close(); } public void printState(PrintStream out) { out.println("#doc source pos typeindex type topic"); out.print("#alpha : "); for (Byte m = 0; m < numModalities; m++) { out.println("modality:" + m); for (int topic = 0; topic < numTopics; topic++) { out.print(gamma[m] * alpha[m][topic] + " "); } } out.println(); out.println("#beta[0] : " + beta[0]); for (int doc = 0; doc < data.size(); doc++) { for (Byte m = 0; m < numModalities; m++) { FeatureSequence tokenSequence = (FeatureSequence) data.get(doc).Assignments[m].instance.getData(); LabelSequence topicSequence = (LabelSequence) data.get(doc).Assignments[m].topicSequence; String source = "NA"; if (data.get(doc).Assignments[m].instance.getSource() != null) { source = data.get(doc).Assignments[m].instance.getSource().toString(); } Formatter output = new Formatter(new StringBuilder(), Locale.US); for (int pi = 0; pi < topicSequence.getLength(); pi++) { int type = tokenSequence.getIndexAtPosition(pi); int topic = topicSequence.getIndexAtPosition(pi); output.format("%d %s %d %d %s %d\n", doc, source, pi, type, alphabet[m].lookupObject(type), topic); /* out.print(doc); out.print(' '); out.print(source); out.print(' '); out.print(pi); out.print(' '); out.print(type); out.print(' '); out.print(alphabet.lookupObject(type)); out.print(' '); out.print(topic); out.println(); */ } out.print(output); } } } public double[] modelLogLikelihood() { double[] logLikelihood = new double[numModalities]; Arrays.fill(logLikelihood, 0); //int nonZeroTopics; // The likelihood of the model is a combination of a // Dirichlet-multinomial for the words in each topic // and a Dirichlet-multinomial for the topics in each // document. // The likelihood function of a dirichlet multinomial is // Gamma( sum_i alpha_i ) prod_i Gamma( alpha_i + N_i ) // prod_i Gamma( alpha_i ) Gamma( sum_i (alpha_i + N_i) ) // So the log likelihood is // logGamma ( sum_i alpha_i ) - logGamma ( sum_i (alpha_i + N_i) ) + // sum_i [ logGamma( alpha_i + N_i) - logGamma( alpha_i ) ] // Do the documents first int[] topicCounts = new int[numTopics]; double[] topicLogGammas = new double[numTopics]; int[] docTopics; for (Byte m = 0; m < numModalities; m++) { for (int topic = 0; topic < numTopics; topic++) { topicLogGammas[topic] = Dirichlet.logGammaStirling(gamma[m] * alpha[m][topic]); } int modalityCnt = 0; for (int doc = 0; doc < data.size(); doc++) { if (data.get(doc).Assignments[m] != null) { LabelSequence topicSequence = (LabelSequence) data.get(doc).Assignments[m].topicSequence; docTopics = topicSequence.getFeatures(); if (docTopics.length > 0) { for (int token = 0; token < docTopics.length; token++) { topicCounts[docTopics[token]]++; } for (int topic = 0; topic < numTopics; topic++) { if (topicCounts[topic] > 0) { logLikelihood[m] += (Dirichlet.logGammaStirling(gamma[m] * alpha[m][topic] + topicCounts[topic]) - topicLogGammas[topic]); } } // subtract the (count + parameter) sum term logLikelihood[m] -= Dirichlet.logGammaStirling(alphaSum[m] + docTopics.length); modalityCnt++; } Arrays.fill(topicCounts, 0); } } // add the parameter sum term logLikelihood[m] += modalityCnt * Dirichlet.logGammaStirling(alphaSum[m]); if (Double.isNaN(logLikelihood[m])) { logger.warning("NaN in log likelihood level1 calculation" + " for modality: " + m); logLikelihood[m] = 0; break; } else if (Double.isInfinite(logLikelihood[m])) { logger.warning("infinite log likelihood at level1 " + " for modality: " + m); logLikelihood[m] = 0; break; } // And the topics // Count the number of type-topic pairs that are not just (logGamma(beta[0]) - logGamma(beta[0])) int nonZeroTypeTopics = 0; for (int type = 0; type < numTypes[m]; type++) { // reuse this array as a pointer //topicCounts = typeTopicCounts[m][type]; int index = 0; while (index < typeTopicCounts[m][type].length) { int count = typeTopicCounts[m][type][index]; if (count > 0) { nonZeroTypeTopics++; //logLikelihood[m] += Dirichlet.logGammaStirling(beta[m] + count); logLikelihood[m] += (beta[m] + count) == 0 ? 0 : Dirichlet.logGammaStirling(beta[m] + count); if (Double.isNaN(logLikelihood[m])) { logger.warning("NaN in log likelihood calculation"); logLikelihood[m] = 0; break; } else if (Double.isInfinite(logLikelihood[m])) { logger.warning("infinite log likelihood"); logLikelihood[m] = 0; break; } } index++; } } for (int topic = 0; topic < numTopics; topic++) { logLikelihood[m] -= (beta[m] * numTypes[m] + tokensPerTopic[m][topic]) == 0 ? 0 : Dirichlet.logGammaStirling((beta[m] * numTypes[m]) + tokensPerTopic[m][topic]); // logLikelihood[m] // -= Dirichlet.logGammaStirling((beta[m] * numTypes[m]) // + tokensPerTopic[m][topic]); if (Double.isNaN(logLikelihood[m])) { logger.info("NaN after topic " + topic + " " + tokensPerTopic[m][topic]); logLikelihood[m] = 0; break; } else if (Double.isInfinite(logLikelihood[m])) { logger.info("Infinite value after topic " + topic + " " + tokensPerTopic[m][topic]); logLikelihood[m] = 0; break; } } // logGamma(|V|*beta) for every topic logLikelihood[m] += (beta[m] * numTypes[m]) == 0 ? 0 : Dirichlet.logGammaStirling(beta[m] * numTypes[m]) * numTopics; // logGamma(beta) for all type/topic pairs with non-zero count logLikelihood[m] -= beta[m] == 0 ? 0 : Dirichlet.logGammaStirling(beta[m]) * nonZeroTypeTopics; if (Double.isNaN(logLikelihood[m])) { logger.info("at the end"); } else if (Double.isInfinite(logLikelihood[m])) { logger.info("Infinite value beta [" + m + "]: " + beta[m] + " * " + numTypes[m]); logLikelihood[m] = 0; } } return logLikelihood; } // /** // * Return a tool for estimating topic distributions for new documents // */ // public TopicInferencer getInferencer() { // return new TopicInferencer(typeTopicCounts, tokensPerTopic, // data.get(0).instance.getDataAlphabet(), // alpha, beta[0], betaSum[0]); // /** // * Return a tool for evaluating the marginal probability of new documents // * under this model // */ public MarginalProbEstimator getProbEstimator() { return new MarginalProbEstimator(numTopics, alpha[0], alphaSum[0], beta[0], typeTopicCounts[0], tokensPerTopic[0]); } // Serialization private static final long serialVersionUID = 1; private static final int CURRENT_SERIAL_VERSION = 0; private static final int NULL_INTEGER = -1; private void writeObject(ObjectOutputStream out) throws IOException { out.writeInt(CURRENT_SERIAL_VERSION); out.writeObject(data); out.writeObject(alphabet); out.writeObject(topicAlphabet); out.writeInt(numTopics); out.writeObject(numTypes); out.writeObject(alpha); out.writeObject(alphaSum); out.writeObject(beta); out.writeObject(betaSum); out.writeObject(gamma); out.writeObject(typeTopicCounts); out.writeObject(tokensPerTopic); out.writeObject(docLengthCounts); out.writeObject(topicDocCounts); out.writeInt(numIterations); out.writeInt(burninPeriod); out.writeInt(saveSampleInterval); out.writeInt(optimizeInterval); out.writeInt(showTopicsInterval); out.writeInt(wordsPerTopic); out.writeInt(saveStateInterval); out.writeObject(stateFilename); out.writeInt(saveModelInterval); out.writeObject(modelFilename); out.writeInt(randomSeed); out.writeObject(formatter); out.writeBoolean(printLogLikelihood); out.writeInt(numThreads); } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { int version = in.readInt(); data = (ArrayList<MixTopicModelTopicAssignment>) in.readObject(); alphabet = (Alphabet[]) in.readObject(); topicAlphabet = (LabelAlphabet) in.readObject(); numTopics = in.readInt(); numTypes = (int[]) in.readObject(); alpha = (double[][]) in.readObject(); alphaSum = (double[]) in.readObject(); beta = (double[]) in.readObject(); betaSum = (double[]) in.readObject(); gamma = (double[]) in.readObject(); typeTopicCounts = (int[][][]) in.readObject(); tokensPerTopic = (int[][]) in.readObject(); docLengthCounts = (int[][]) in.readObject(); topicDocCounts = (int[][][]) in.readObject(); numIterations = in.readInt(); burninPeriod = in.readInt(); saveSampleInterval = in.readInt(); optimizeInterval = in.readInt(); showTopicsInterval = in.readInt(); wordsPerTopic = in.readInt(); saveStateInterval = in.readInt(); stateFilename = (String) in.readObject(); saveModelInterval = in.readInt(); modelFilename = (String) in.readObject(); randomSeed = in.readInt(); formatter = (NumberFormat) in.readObject(); printLogLikelihood = in.readBoolean(); numThreads = in.readInt(); } public void write(File serializedModelFile) { try { ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(serializedModelFile)); oos.writeObject(this); oos.close(); } catch (IOException e) { System.err.println("Problem serializing ParallelTopicModel to file " + serializedModelFile + ": " + e); } } public static FastQMVParallelTopicModel read(File f) throws Exception { FastQMVParallelTopicModel topicModel = null; ObjectInputStream ois = new ObjectInputStream(new FileInputStream(f)); topicModel = (FastQMVParallelTopicModel) ois.readObject(); ois.close(); topicModel.initializeHistograms(); return topicModel; } public static void main(String[] args) { try { InstanceList[] training = new InstanceList[1]; training[0] = InstanceList.load(new File(args[0])); int numTopics = args.length > 1 ? Integer.parseInt(args[1]) : 200; byte mod = 1; FastQMVParallelTopicModel lda = new FastQMVParallelTopicModel(numTopics, mod, 0.1, 0.01, true); lda.printLogLikelihood = true; lda.setTopicDisplay(50, 7); lda.addInstances(training, ""); lda.setNumThreads(Integer.parseInt(args[2])); lda.estimate(); logger.info("printing state"); lda.printState(new File("state.gz")); logger.info("finished printing"); } catch (Exception e) { e.printStackTrace(); } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package ch.unizh.ini.tobi.rccar; import ch.unizh.ini.caviar.chip.*; import ch.unizh.ini.caviar.event.*; import ch.unizh.ini.caviar.event.EventPacket; import ch.unizh.ini.caviar.eventprocessing.FilterChain; import ch.unizh.ini.caviar.eventprocessing.EventFilter2D; import ch.unizh.ini.caviar.eventprocessing.filter.XYTypeFilter; import ch.unizh.ini.caviar.graphics.FrameAnnotater; import javax.media.opengl.*; import javax.media.opengl.GLAutoDrawable; import javax.media.opengl.glu.*; import javax.swing.*; import java.awt.Graphics2D; import java.awt.Dimension; import java.util.*; import java.util.Observable; import java.util.Observer; import java.beans.*; import java.io.*; import com.sun.opengl.util.*; /** * * @author braendch * This Filter creates for each event an orientation vector and calculates the common orientation of its neighbors. * To pass the filter difference of these two orientations has to be smaller than a centain tolerance (in degrees), * further on the orientation must be within a certain range around vertical (ori) and the neighborhoodvector has to * be big enough (neighborThr) to ensure that it doesn't have the right orientation just because of random. * To create the orientation vector for each event the receptive (width*height) field is investigated and the * normalized orientation vectors to each past event in the receptive field that satisfies a certain actuality * (dt) is divided by the time past between the two events. * If two events are of different polarity (data index 3) the orientation is roatated by 90 - this is because the contrast gradient * is perpendicular to an edge. * To simplify calculation all vectors have an positive y-component. * The orientation History takes account of the past orientaions of the events and of the neighbors. * The paoli value (complexMap data index 0) is a fuzzy value to determine if an event is part of a line * --> 0 = absolutely not part of a line, 1 = absolutely part of a line */ public class OrientationCluster extends EventFilter2D implements Observer, FrameAnnotater { public boolean isGeneratingFilter(){ return true;} private float paoliThr=getPrefs().getFloat("OrientationCluster.paoliThr",2000); {setPropertyTooltip("Paoli Threshold","Minimum of Paoli value to be accepted");} private float paoliTau=getPrefs().getFloat("OrientationCluster.paoliTau",2000); {setPropertyTooltip("Paoli Tau","The value with which the paoli decays");} private float tolerance=getPrefs().getFloat("OrientationCluster.tolerance",10); {setPropertyTooltip("Tolerance","Percentage of deviation tolerated");} private float neighborThr=getPrefs().getFloat("OrientationCluster.neighborThr",10); {setPropertyTooltip("Neighbor Threshold","Minimum Length of Neighbor Vector to be accepted");} private float historyFactor=getPrefs().getFloat("OrientationCluster.historyFactor",1); {setPropertyTooltip("historyFactor","if oriHistoryEnabled this determines how strong the actual vector gets influenced by the previous one");} private float ori=getPrefs().getFloat("OrientationCluster.ori",45); {setPropertyTooltip("Orientation","Orientation tolerated");} private float dt=getPrefs().getFloat("OrientationCluster.dt",10000); {setPropertyTooltip("Delta","Time Criteria for selection");} private float factor=getPrefs().getFloat("OrientationCluster.factor",1000); {setPropertyTooltip("Excitatory Factor","Determines the excitatory synapse weight");} private int width=getPrefs().getInt("OrientationCluster.width",1); private int height=getPrefs().getInt("OrientationCluster.height",1); { setPropertyTooltip("width","width of RF, total is 2*width+1"); setPropertyTooltip("height","length of RF, total length is height*2+1"); } private boolean showAll=getPrefs().getBoolean("OrientationCluster.showAll",false); {setPropertyTooltip("showAll","shows all events");} private boolean showVectorsEnabled=getPrefs().getBoolean("SimpleOrientationFilter.showVectorsEnabled",false); {setPropertyTooltip("showVectorsEnabled","shows local orientation segments");} private boolean showOriEnabled=getPrefs().getBoolean("SimpleOrientationFilter.showOriEnabled",true); {setPropertyTooltip("showOriEnabled","Shows Orientation with color code");} private boolean showPaoliEnabled=getPrefs().getBoolean("SimpleOrientationFilter.showPaoliEnabled",false); {setPropertyTooltip("showPaoliEnabled","shows if an Event is part of a Line");} private boolean oriHistoryEnabled=getPrefs().getBoolean("OrientationCluster.oriHistoryEnabled",false); {setPropertyTooltip("oriHistoryEnabled","enable use of prior orientation values to filter out events not consistent with history");} private boolean paoliWindowEnabled=getPrefs().getBoolean("OrientationCluster.paoliWindowEnabled",false); {setPropertyTooltip("paoliWindowEnabled","enables the window of the paoli values");} // VectorMap[x][y][data] -->data: 0=x-component, 1=y-component, 2=timestamp, 3=polarity (0=off, 1=on, 4=theta // OriHistoryMap [x][y][data] --> data 0=x-component, 1=y-component, 2/3 = components neighborvector private float[][][] vectorMap; private float[][][] oriHistoryMap; private float[][] paoliArray; FilterChain preFilterChain; private XYTypeFilter xYFilter; public OrientationCluster(AEChip chip) { super(chip); //build hierachy preFilterChain = new FilterChain(chip); xYFilter = new XYTypeFilter(chip); this.setEnclosedFilter(xYFilter); xYFilter.setEnclosed(true, this); //xYFilter.getPropertyChangeSupport().addPropertyChangeListener("filterEnabled",this); chip.getCanvas().addAnnotator(this); initFilter(); resetFilter(); } @Override synchronized public void setFilterEnabled(boolean yes){ super.setFilterEnabled(yes); if(yes){ resetFilter(); } else{ vectorMap=null; out=null; } } private void checkMaps(){ //it has to be checked if the VectorMap fits on the actual chip if(vectorMap==null || vectorMap.length!=chip.getSizeX() || vectorMap[0].length!=chip.getSizeY()) { allocateMaps(); } } synchronized private void allocateMaps() { //the VectorMap is fitted on the chip size if(!isFilterEnabled()) return; log.info("OrientationCluster.allocateMaps()"); if(chip!=null){ vectorMap=new float[chip.getSizeX()][chip.getSizeY()][7]; oriHistoryMap=new float[chip.getSizeX()][chip.getSizeY()][7]; } resetFilter(); } synchronized public EventPacket filterPacket(EventPacket in) { //Check if the filter should be active if(in==null) return null; if(!filterEnabled) return in; if(enclosedFilter!=null) in=enclosedFilter.filterPacket(in); int n=in.getSize(); if(n==0) return in; //Check if the input for the filter is the right one Class inputClass=in.getEventClass(); if(inputClass!=PolarityEvent.class){ log.warning("Wrong input event type "+in.getEventClass()+", disabling filter"); setFilterEnabled(false); return in; } checkOutputPacketEventType(OrientationEvent.class); OutputEventIterator outItr=out.outputIterator(); int sizex=chip.getSizeX()-1; int sizey=chip.getSizeY()-1; checkMaps(); for(Object ein:in){ PolarityEvent e=(PolarityEvent)ein; int x=e.x; int y=e.y; int xx=0; int yy=0; float t=0; double vectorLength; float neighborX=0; float neighborY=0; float neighborTheta=0; float neighborLength=0; //calculate the actual vector and the neighborhood vector vectorMap[x][y][0]=0; vectorMap[x][y][1]=0; vectorMap[x][y][2]=(float)e.timestamp; //get the polarity of the vector if(e.polarity == PolarityEvent.Polarity.Off){ vectorMap[x][y][3] = 0; } else { vectorMap[x][y][3] = 1; } //iteration trough the whole RF for(int h=-height; h<=height; h++){ for(int w=-width; w<=width; w++){ if(0<x+w && x+w<sizex && 0<y+h && y+h<sizey){ //calculation of timestampdifference (+1 to avoid division trough 0) t=e.timestamp-vectorMap[x+w][y+h][2]+1; if(t<dt){ //one has to check if the events are of the same polarity if(vectorMap[x][y][3] != vectorMap[x+w][y+h][3]){ //if they are of a different polarity, the values have to be rotated if (w<0){ //different polarity - left side --> 90 CW xx = h; yy = -w; } else { //different polarity - right side --> 90 CCW xx = -h; yy = w; } } else { //if they are of the same kind this doesn't have to be done if (h<0){ //same polarity - down (unwanted) --> point inversion xx = -w; yy = -h; } else { //same polarity - up --> nothing xx = w; yy = h; } } //The normalized value of the vector component gets multiplied by a factor and "decayed" (1/t) and added vectorLength = Math.sqrt(xx*xx+yy*yy); if (vectorLength != 0.0){ vectorMap[x][y][0] = (float)(vectorMap[x][y][0]+(xx/(vectorLength))*(factor/t)); vectorMap[x][y][1] = (float)(vectorMap[x][y][1]+(yy/(vectorLength))*(factor/t)); //Neighborhood vector calculation if(oriHistoryEnabled){ neighborX = neighborX + (vectorMap[x+w][y+h][0]+historyFactor*oriHistoryMap[x+w][y+h][0]); neighborY = neighborY + (vectorMap[x+w][y+h][1]+historyFactor*oriHistoryMap[x+w][y+h][1]); } else { neighborX = neighborX + vectorMap[x+w][y+h][0]; neighborY = neighborY + vectorMap[x+w][y+h][1]; } //To save Calculation time the paoli value is created with the "historical" value of the orientation /*System.out.println("Paoli"); System.out.println(complexMap[x+w][y+h][0]); System.out.print(vectorMap[x+w][y+h][4]); System.out.print(oriHistoryMap[x][y][4]); System.out.println(Math.abs( vectorMap[x+w][y+h][4]-oriHistoryMap[x][y][4] )); complexMap[x+w][y+h][0] = complexMap[x+w][y+h][0]*(1 + (1 - 0.5*Math.abs( vectorMap[x+w][y+h][4]-oriHistoryMap[x][y][4] ) - complexMap[x+w][y+h][0])); //System.out.println(complexMap[x+w][y+h][0]);*/ } } } } } neighborLength = (float)Math.sqrt(neighborX*neighborX+neighborY*neighborY); neighborTheta = (float)Math.tanh(neighborX/neighborY); if(oriHistoryEnabled){ vectorMap[x][y][4] = (float)(Math.tanh((vectorMap[x][y][0]+historyFactor*oriHistoryMap[x][y][0]) /(vectorMap[x][y][1]+historyFactor*oriHistoryMap[x][y][1]))); } else { vectorMap[x][y][4] = (float)(Math.tanh(vectorMap[x][y][0]/vectorMap[x][y][1])); } //The paoli is upgraded by a exp decay function with Tau = paoliTau vectorMap[x][y][5] = vectorMap[x][y][5]*(float)Math.exp(-(vectorMap[x][y][2]-oriHistoryMap[x][y][2])/paoliTau); //The historyMap is upgraded oriHistoryMap[x][y][0] = vectorMap[x][y][0]; oriHistoryMap[x][y][1] = vectorMap[x][y][1]; oriHistoryMap[x][y][2] = vectorMap[x][y][2]; oriHistoryMap[x][y][4] = vectorMap[x][y][4]; //Create Output if(vectorMap[x][y][0]!=0 && vectorMap[x][y][1]!=0){ if(Math.abs(vectorMap[x][y][4]-neighborTheta)<Math.PI*tolerance/180 && Math.abs(vectorMap[x][y][4])<ori*Math.PI/180 && neighborLength > neighborThr){ //the paoli value of the neighbors in the direction of the orientation vector has to be increased //for each line above and below the actual event it is checked which the x value on the line (xl) is for(int i=1; i<=height; i++){ int xl =x+ (int)(i*(vectorMap[x][y][0]/vectorMap[x][y][1])); /*System.out.println("paoli"); System.out.println(vectorMap[x][y][0]); System.out.println(vectorMap[x][y][1]); System.out.println(i); System.out.println((int)(i*(vectorMap[x][y][0]/vectorMap[x][y][1]))); System.out.println(x); System.out.println(xl);*/ if(0<x-xl && x+xl<sizex && 0<y-i && y+i<sizey && 0<x+xl && x-xl<sizex && 0<y+i && y-i<sizey){ vectorMap[x+xl][y+i][5] = vectorMap[x+xl][y+i][5]+1; vectorMap[x-xl][y-i][5] = vectorMap[x-xl][y-i][5]+1; } } if(showOriEnabled){ OrientationEvent eout=(OrientationEvent)outItr.nextOutput(); eout.copyFrom(e); eout.orientation=(byte)Math.abs(4*vectorMap[x][y][4]); eout.hasOrientation=true; } if(showPaoliEnabled){ if(vectorMap[x][y][0]>paoliThr){ OrientationEvent eout=(OrientationEvent)outItr.nextOutput(); eout.copyFrom(e); eout.hasOrientation=false; } } /*System.out.println("-->clustered"); System.out.println(Math.abs(vectorMap[x][y][4]-neighborTheta)); System.out.println(vectorMap[x][y][4]); System.out.println(Math.abs(vectorMap[x][y][4])); */} } else { if(showAll){ OrientationEvent eout=(OrientationEvent)outItr.nextOutput(); eout.copyFrom(e); eout.hasOrientation=false; } } } if(paoliWindowEnabled) { checkPaoliFrame(); paoliCanvas.repaint(); } return out; } public void resetFilter(){ System.out.println("reset!"); if(!isFilterEnabled()) return; paoliArray=new float[chip.getSizeX()/2][chip.getSizeY()/2]; if(vectorMap!=null){ for(int i=0;i<vectorMap.length;i++) for(int j=0;j<vectorMap[i].length;j++){ Arrays.fill(vectorMap[i][j],0); } } if(oriHistoryMap!=null){ for(int i=0;i<oriHistoryMap.length;i++) for(int j=0;j<oriHistoryMap[i].length;j++) Arrays.fill(oriHistoryMap[i][j],0); } } public Object getFilterState() { return vectorMap; } public void initFilter(){ System.out.println("init!"); resetFilter(); } public void update(Observable o, Object arg){ initFilter(); } public boolean isShowVectorsEnabled() { return showVectorsEnabled; } public void setShowVectorsEnabled(boolean showVectorsEnabled) { this.showVectorsEnabled = showVectorsEnabled; getPrefs().putBoolean("SimpleOrientationFilter.showVectorsEnabled",showVectorsEnabled); } public void annotate(GLAutoDrawable drawable) { if(!isAnnotationEnabled() ) return; GL gl=drawable.getGL(); if(isShowVectorsEnabled()){ // draw individual orientation vectors gl.glPushMatrix(); gl.glColor3f(1,1,1); gl.glLineWidth(1f); gl.glBegin(GL.GL_LINES); for(Object o:out){ OrientationEvent e=(OrientationEvent)o; drawOrientationVector(gl,e); } gl.glEnd(); gl.glPopMatrix(); } } /** not used */ public void annotate(float[][][] frame) { } /** not used */ public void annotate(Graphics2D g) { } private void drawOrientationVector(GL gl, OrientationEvent e){ if(!e.hasOrientation) return; OrientationEvent.UnitVector d=OrientationEvent.unitVectors[e.orientation]; gl.glVertex2f(e.x-d.x,e.y-d.y); gl.glVertex2f(e.x+d.x,e.y+d.y); } void checkPaoliFrame(){ if(paoliFrame==null || (paoliFrame!=null && !paoliFrame.isVisible())) createPaoliFrame(); } JFrame paoliFrame=null; GLCanvas paoliCanvas=null; GLU glu=null; void createPaoliFrame(){ paoliFrame=new JFrame("Hough accumulator"); paoliFrame.setPreferredSize(new Dimension(chip.getSizeX(),chip.getSizeY())); paoliCanvas=new GLCanvas(); paoliCanvas.addGLEventListener(new GLEventListener(){ public void init(GLAutoDrawable drawable) { } synchronized public void display(GLAutoDrawable drawable) { if(paoliArray==null) return; GL gl=drawable.getGL(); gl.glLoadIdentity(); gl.glScalef(drawable.getWidth(),drawable.getHeight(),1); gl.glClearColor(0,0,0,0); gl.glClear(GL.GL_COLOR_BUFFER_BIT); for(int i=0;i<chip.getSizeX();i++){ for(int j=0;j<chip.getSizeX();j++){ float f=paoliArray[i][j]; gl.glColor3f(f,f,f); gl.glRectf(i,j,i+1,j+1); } } gl.glPointSize(6); gl.glColor3f(1,0,0); gl.glBegin(GL.GL_POINTS); //gl.glVertex2f(thetaMaxIndex, rhoMaxIndex); gl.glEnd(); // if(glut==null) glut=new GLUT(); int error=gl.glGetError(); if(error!=GL.GL_NO_ERROR){ if(glu==null) glu=new GLU(); log.warning("GL error number "+error+" "+glu.gluErrorString(error)); } } synchronized public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) { GL gl=drawable.getGL(); final int B=10; gl.glMatrixMode(GL.GL_PROJECTION); gl.glLoadIdentity(); // very important to load identity matrix here so this works after first resize!!! gl.glOrtho(-B,drawable.getWidth()+B,-B,drawable.getHeight()+B,10000,-10000); gl.glMatrixMode(GL.GL_MODELVIEW); gl.glViewport(0,0,width,height); } public void displayChanged(GLAutoDrawable drawable, boolean modeChanged, boolean deviceChanged) { } }); paoliFrame.getContentPane().add(paoliCanvas); paoliFrame.pack(); paoliFrame.setVisible(true); } public boolean isOriHistoryEnabled() { return oriHistoryEnabled; } public void setOriHistoryEnabled(boolean oriHistoryEnabled) { this.oriHistoryEnabled = oriHistoryEnabled; getPrefs().putBoolean("OrientationCluster.oriHistoryEnabled",oriHistoryEnabled); } public boolean isShowOriEnabled() { return showOriEnabled; } public void setShowOriEnabled(boolean showOriEnabled) { this.showOriEnabled = showOriEnabled; getPrefs().putBoolean("OrientationCluster.showOriEnabled",showOriEnabled); } public boolean isShowPaoliEnabled() { return showPaoliEnabled; } public void setPaoliWindowEnabled(boolean paoliWindowEnabled) { this.paoliWindowEnabled = paoliWindowEnabled; getPrefs().putBoolean("OrientationCluster.paoliWindowEnabled",paoliWindowEnabled); } public boolean isPaoliWindowEnabled() { return paoliWindowEnabled; } public void setShowPaoliEnabled(boolean showPaoliEnabled) { this.showPaoliEnabled = showPaoliEnabled; getPrefs().putBoolean("OrientationCluster.showPaoliEnabled",showPaoliEnabled); } public boolean isShowAll() { return showAll; } public void setShowAll(boolean showAll) { this.showAll = showAll; getPrefs().putBoolean("OrientationCluser.showAll",showAll); } public float getTolerance() { return tolerance; } synchronized public void setTolerance(float tolerance) { this.tolerance = tolerance; allocateMaps(); getPrefs().putFloat("OrientationCluster.tolerance",tolerance); } public float getPaoliThr() { return paoliThr; } synchronized public void setPaoliThr(float paoliThr) { this.paoliThr = paoliThr; allocateMaps(); getPrefs().putFloat("OrientationCluster.paoliThr",paoliThr); } public float getPaoliTau() { return paoliTau; } synchronized public void setPaoliTau(float paoliTau) { this.paoliTau = paoliTau; allocateMaps(); getPrefs().putFloat("OrientationCluster.paoliTau",paoliTau); } public float getNeighborThr() { return neighborThr; } synchronized public void setNeighborThr(float neighborThr) { this.neighborThr = neighborThr; allocateMaps(); getPrefs().putFloat("OrientationCluster.neighborThr",neighborThr); } public float getOri() { return ori; } synchronized public void setOri(float ori) { this.ori = ori; allocateMaps(); getPrefs().putFloat("OrientationCluster.ori",ori); } public float getDt() { return dt; } synchronized public void setDt(float dt) { this.dt = dt; allocateMaps(); getPrefs().putFloat("OrientationCluster.dt",dt); } public float getFactor() { return factor; } synchronized public void setFactor(float factor) { this.factor = factor; allocateMaps(); getPrefs().putFloat("OrientationCluster.factor",factor); } public float getHistoryFactor() { return historyFactor; } synchronized public void setHistoryFactor(float historyFactor) { this.historyFactor = historyFactor; allocateMaps(); getPrefs().putFloat("OrientationCluster.historyFactor",historyFactor); } public int getHeight() { return height; } synchronized public void setHeight(int height) { this.height = height; allocateMaps(); getPrefs().putInt("OrientationCluster.height",height); } public int getWidth() { return width; } synchronized public void setWidth(int width) { this.width = width; allocateMaps(); getPrefs().putInt("OrientationCluster.width",width); } public XYTypeFilter getXYFilter() { return xYFilter; } public void setXYFilter(XYTypeFilter xYFilter) { this.xYFilter = xYFilter; } }
package com.akjava.gwt.three.client.js.objects; import com.akjava.gwt.three.client.gwt.JSParameter; import com.akjava.gwt.three.client.gwt.core.Intersect; import com.akjava.gwt.three.client.js.cameras.Camera; import com.akjava.gwt.three.client.js.core.Object3D; import com.akjava.gwt.three.client.js.core.Raycaster; import com.google.gwt.core.client.JsArray; /** * i have no idead wht is this. * @author aki * */ public class LOD extends Object3D{ protected LOD() { } /** * @deprecated on r72 * @return */ public final native JsArray<Object3D> getObjects()/*-{ return this.objects; }-*/; /** * @deprecated on r72 * @return */ public final native void setObjects(JsArray<Object3D> objects)/*-{ this.objects = objects; }-*/; /** * { distance: distance, object: object } * @return */ public final native JSParameter getLevels()/*-{ return this.levels; }-*/; public final native void addLevel(Object3D object,double distance)/*-{ this.addLevel(object,distance); }-*/; public final native Object3D getObjectForDistance(double distance)/*-{ return this.getObjectForDistance(distance); }-*/; public final native void update(Camera camera)/*-{ this.update(); }-*/; public final native void raycast(Raycaster raycaster,JsArray<Intersect> intersects)/*-{ this.raycast(raycaster,intersects); }-*/; public final native LOD copy(LOD source)/*-{ return this.copy(source); }-*/; public final native LOD clone()/*-{ return this.clone(); }-*/; }
package com.booleangold.import_photos; import java.io.*; import java.text.SimpleDateFormat; import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.prefs.Preferences; import javax.swing.*; import java.awt.event.*; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import com.drew.imaging.ImageMetadataReader; import com.drew.imaging.ImageProcessingException; import com.drew.metadata.*; import com.drew.metadata.exif.ExifSubIFDDirectory; public class ImportPhotos extends JFrame implements ActionListener, PropertyChangeListener { static final long serialVersionUID = 1; private static final String SOURCE_KEY = "source_dir_"; private static final String DEST_KEY = "dest_dir_"; private static final int RECENT_DIR_COUNT = 4; private static final SimpleDateFormat mYearFormat = new SimpleDateFormat("yyyy"); private static final SimpleDateFormat mMonthFormat = new SimpleDateFormat("MM"); private final JComboBox mSource; private final JButton mBrowseSourceButton; private final JComboBox mDest; private final JButton mBrowseDestButton; private final JButton mImportButton; private final JButton mCancelButton; private final JProgressBar mProgressBar; private final Preferences mPrefs; private CopyTask mCopyTask; class CopyTask extends SwingWorker<Void, Void> { private final File mSource; private final String mDestRoot; CopyTask(File source, String destRoot) { mSource = source; mDestRoot = destRoot; } @Override protected Void doInBackground() throws Exception { try { List<File> filesToImport = CollectDirectory(mSource, mDestRoot); // System.out.println("Importing files: " + filesToImport.size()); firePropertyChange("fileCount", 0, filesToImport.size()); } catch (Exception e) { // TODO: Error dialog System.out.println("Failed to traverse directories"); } return null; } } ImportPhotos(String title) { setTitle(title); // Create Components mPrefs = Preferences.userNodeForPackage(ImportPhotos.class); mSource = MakeComboBox("E:\\DCIM\\", SOURCE_KEY); JLabel sourceLabel = new JLabel("Import pictures from: "); mBrowseSourceButton = new JButton("Browse"); mBrowseSourceButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { ChooseDirectory(mSource); SaveComboBox(mSource, SOURCE_KEY); } }); JLabel destLabel = new JLabel("Copy pictures to: "); mDest = MakeComboBox("D:\\Users\\Beth\\Documents\\Pictures", DEST_KEY); mBrowseDestButton = new JButton("Browse"); mBrowseDestButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { ChooseDirectory(mDest); SaveComboBox(mDest, DEST_KEY); } }); mImportButton = new JButton("Import"); mImportButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { Import(); } }); mCancelButton = new JButton("Cancel"); mCancelButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { ImportPhotos.this.dispose(); } }); mProgressBar = new JProgressBar(0, 100); //Create a layout and add components to it. GroupLayout layout = new GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setAutoCreateGaps(true); layout.setAutoCreateContainerGaps(true); layout.setHorizontalGroup( layout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING) .addComponent(sourceLabel) .addComponent(destLabel)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.TRAILING) .addComponent(mSource) .addComponent(mDest)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.TRAILING) .addComponent(mBrowseSourceButton) .addComponent(mBrowseDestButton)) ) .addComponent(mProgressBar) .addGroup(layout.createSequentialGroup() .addComponent(mImportButton) .addComponent(mCancelButton)) ); layout.setVerticalGroup( layout.createSequentialGroup() .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addComponent(sourceLabel) .addComponent(mSource) .addComponent(mBrowseSourceButton)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addComponent(destLabel) .addComponent(mDest) .addComponent(mBrowseDestButton)) ) .addComponent(mProgressBar) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addComponent(mImportButton) .addComponent(mCancelButton, GroupLayout.Alignment.TRAILING)) ); this.pack(); this.setResizable(false); } JComboBox MakeComboBox(String defaultItem, String base_key) { JComboBox retval = new JComboBox(); for (int i = 0; i < RECENT_DIR_COUNT; i++) { String key = base_key + Integer.toString(i); String dir = mPrefs.get(key, null); if (dir != null) { retval.addItem(dir); } } if (retval.getItemCount() == 0) { retval.addItem(defaultItem); } return retval; } void SaveComboBox(JComboBox cb, String base_key) { int itemCount = cb.getItemCount(); for (int i = 0; i < RECENT_DIR_COUNT && i < itemCount; i++) { mPrefs.put(base_key + Integer.toString(i), (String) cb.getItemAt(i)); } } void ChooseDirectory(JComboBox comboBox) { JFileChooser chooser; // Start at currently selected directory, if possible try { chooser = new JFileChooser((String) comboBox.getSelectedItem()); } catch (Exception e) { chooser = new JFileChooser(); } chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int returnVal = chooser.showOpenDialog(this); if(returnVal == JFileChooser.APPROVE_OPTION) { String path = chooser.getSelectedFile().getAbsolutePath(); comboBox.insertItemAt(path, 0); comboBox.setSelectedItem(path); } } void Import() { String destRoot; File sourceDir; try { String sourceRoot = (String) mSource.getSelectedItem(); sourceDir = new File(sourceRoot); System.out.println("Import From: " + sourceRoot); destRoot = (String) mDest.getSelectedItem(); System.out.println("Import To: " + destRoot); } catch (Exception e) { // TODO: Error dialog System.out.println("Failed to convert items to Strings"); return; } mProgressBar.setIndeterminate(true); mCopyTask = new CopyTask(sourceDir, destRoot); mCopyTask.addPropertyChangeListener(this); mCopyTask.execute(); } List<File> CollectDirectory(File sourceDir, String destRoot) { LinkedList<File> result = new LinkedList<File>(); for (File file : sourceDir.listFiles()) { if (file.isDirectory()) { result.addAll(CollectDirectory(file, destRoot)); } else { System.out.println("File: " + file); result.add(file); } } return result; } File CollectFile(File file, String destRoot) { String year = null; String month = null; try { Metadata meta = ImageMetadataReader.readMetadata(file); Directory exif = meta.getDirectory(ExifSubIFDDirectory.class); Date date = exif.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL); year = mYearFormat.format(date); month = mMonthFormat.format(date); } catch (ImageProcessingException e) { if (file.getName().endsWith(".mov") || file.getName().endsWith(".MOV")) { year = mYearFormat.format(file.lastModified()); month = mMonthFormat.format(file.lastModified()); } // TODO } catch (IOException e) { // TODO } if (year != null && month != null) { File yearDir = new File(destRoot, year); File monthDir = new File(yearDir, month); if (!monthDir.exists()) { monthDir.mkdirs(); } // TODO: Copy file to monthDir File destFile = new File(monthDir, file.getName()); if (destFile.exists()) { System.out.println("Skipping duplicate file " + destFile.getPath()); } else { System.out.println("Preparing to import " + file.getPath() + " to " + destFile.getPath()); // return new File(file, destFile); } } return null; } /* * InputStream is = null; OutputStream os = null; try { is = new FileInputStream(file); os = new FileOutputStream(destFile); int length; while ((length = is.read(mBuffer)) > 0) { os.write(mBuffer, 0, length); } System.out.println("Successfully copied " + file.getPath() + " to " + destFile.getPath()); } catch (FileNotFoundException e) { System.out.println("Failed to copy " + file.getPath() + " to " + destFile.getPath()); } catch (IOException e) { System.out.println("Failed to copy " + file.getPath() + " to " + destFile.getPath()); } finally { try { is.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } try { os.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } */ public static void main(String[] args) { ImportPhotos window = new ImportPhotos("Import Photos"); window.setVisible(true); window.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); } @Override public void propertyChange(PropertyChangeEvent evt) { if ("progress" == evt.getPropertyName()) { int progress = (Integer) evt.getNewValue(); mProgressBar.setValue(progress); } else if ("fileCount" == evt.getPropertyName()) { int count = (Integer) evt.getNewValue(); System.out.println("Importing files: " + count); mProgressBar.setIndeterminate(false); mProgressBar.setMaximum(count); } } @Override public void actionPerformed(ActionEvent e) { // TODO Auto-generated method stub } }
package com.btmura.android.reddit.widget; import android.content.Context; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Rect; import android.graphics.RectF; import android.graphics.Typeface; import android.text.BoringLayout; import android.text.Layout; import android.text.Layout.Alignment; import android.text.Spannable; import android.text.SpannableStringBuilder; import android.text.StaticLayout; import android.text.TextUtils; import android.text.TextUtils.TruncateAt; import android.text.style.ClickableSpan; import android.text.style.ForegroundColorSpan; import android.text.style.StyleSpan; import android.util.AttributeSet; import android.util.Log; import android.view.GestureDetector; import android.view.GestureDetector.OnGestureListener; import android.view.MotionEvent; import com.btmura.android.reddit.BuildConfig; import com.btmura.android.reddit.R; import com.btmura.android.reddit.accounts.AccountUtils; import com.btmura.android.reddit.database.Kinds; import com.btmura.android.reddit.text.Formatter; import com.btmura.android.reddit.text.RelativeTime; public class ThingView extends CustomView implements OnGestureListener { public static final String TAG = "ThingView"; // TODO: Fix thread safety issue here. private static final Formatter FORMATTER = new Formatter(); private final GestureDetector detector; private OnVoteListener listener; private boolean expanded; private int kind; private int likes; private int nesting; private String linkTitle; private int thingBodyWidth; private String thumbnailUrl; private String thingId; private String title; private Bitmap bitmap; private boolean drawVotingArrows; private boolean drawScore; private CharSequence bodyText; private String scoreText; private final SpannableStringBuilder statusText = new SpannableStringBuilder(); private StyleSpan italicSpan; private final SpannableStringBuilder longDetailsText = new SpannableStringBuilder(); private String shortDetailsText; private Layout linkTitleLayout; private Layout titleLayout; private Layout bodyLayout; private Layout statusLayout; private Layout detailsLayout; private Rect scoreBounds; private RectF bodyBounds; private int rightHeight; private int minHeight; public ThingView(Context context) { this(context, null); } public ThingView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public ThingView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); detector = new GestureDetector(context, this); init(context); } private void init(Context context) { VotingArrows.init(context); Thumbnail.init(context); } public void setOnVoteListener(OnVoteListener listener) { this.listener = listener; } public void setThumbnailBitmap(Bitmap bitmap) { this.bitmap = bitmap; invalidate(); } public void setData(String accountName, String author, String body, long createdUtc, String domain, int downs, boolean expanded, int kind, int likes, String linkTitle, int nesting, long nowTimeMs, int numComments, boolean over18, String parentSubreddit, int score, String subreddit, int thingBodyWidth, String thingId, String thumbnailUrl, String title, int ups) { this.expanded = expanded; this.kind = kind; this.nesting = nesting; this.likes = likes; this.linkTitle = linkTitle; this.thingBodyWidth = thingBodyWidth; this.thingId = thingId; this.thumbnailUrl = thumbnailUrl; this.title = title; drawVotingArrows = AccountUtils.isAccount(accountName) && kind != Kinds.KIND_MESSAGE && expanded; drawScore = drawVotingArrows && kind == Kinds.KIND_LINK; if (drawScore) { if (scoreBounds == null) { scoreBounds = new Rect(); } scoreText = VotingArrows.getScoreText(score); } boolean showSubreddit = !TextUtils.isEmpty(subreddit) && !subreddit.equalsIgnoreCase(parentSubreddit); boolean showPoints = !drawScore && kind != Kinds.KIND_MESSAGE; boolean showNumComments = kind == Kinds.KIND_LINK; setStatusText(over18, showSubreddit, showPoints, showNumComments, author, createdUtc, nowTimeMs, numComments, score, subreddit); boolean showUpsDowns = kind == Kinds.KIND_LINK; setDetailsText(showUpsDowns, domain, downs, ups); if (!TextUtils.isEmpty(body)) { bodyText = FORMATTER.formatSpans(getContext(), body); if (bodyBounds == null) { bodyBounds = new RectF(); } } else { bodyText = null; } requestLayout(); } private void setStatusText(boolean showNsfw, boolean showSubreddit, boolean showPoints, boolean showNumComments, String author, long createdUtc, long nowTimeMs, int numComments, int score, String subreddit) { Context c = getContext(); Resources r = getResources(); statusText.clear(); statusText.clearSpans(); if (showNsfw) { String nsfw = c.getString(R.string.nsfw); statusText.append(nsfw).append(" "); statusText.setSpan(new ForegroundColorSpan(Color.RED), 0, nsfw.length(), 0); } if (showSubreddit) { statusText.append(subreddit).append(" "); } statusText.append(author).append(" "); if (showPoints) { statusText.append(r.getQuantityString(R.plurals.points, score, score)).append(" "); } if (createdUtc != 0) { statusText.append(RelativeTime.format(c, nowTimeMs, createdUtc)).append(" "); } if (showNumComments) { statusText.append(r.getQuantityString(R.plurals.comments, numComments, numComments)); } if (!expanded) { if (italicSpan == null) { italicSpan = new StyleSpan(Typeface.ITALIC); } statusText.setSpan(italicSpan, 0, statusText.length(), 0); } } private void setDetailsText(boolean showUpsDowns, String domain, int downs, int ups) { Resources r = getResources(); longDetailsText.clear(); if (showUpsDowns) { longDetailsText.append(r.getQuantityString(R.plurals.votes_up, ups, ups)) .append(" "); longDetailsText.append(r.getQuantityString(R.plurals.votes_down, downs, downs)) .append(" "); } if (!TextUtils.isEmpty(domain)) { longDetailsText.append(domain); shortDetailsText = domain; } else { shortDetailsText = ""; } } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int measuredWidth = 0; int measuredHeight = 0; int widthMode = MeasureSpec.getMode(widthMeasureSpec); int widthSize = MeasureSpec.getSize(widthMeasureSpec); switch (widthMode) { case MeasureSpec.AT_MOST: case MeasureSpec.EXACTLY: measuredWidth = widthSize; break; case MeasureSpec.UNSPECIFIED: measuredWidth = getSuggestedMinimumWidth(); break; } int linkTitleWidth; int titleWidth; int detailsWidth; CharSequence detailsText; int totalPadding = PADDING * (2 + nesting); int contentWidth = measuredWidth - totalPadding; if (thingBodyWidth > 0) { linkTitleWidth = titleWidth = Math.min(measuredWidth, thingBodyWidth) - totalPadding; int remainingWidth = measuredWidth - thingBodyWidth - totalPadding; if (remainingWidth > MAX_DETAILS_WIDTH) { detailsWidth = MAX_DETAILS_WIDTH; detailsText = longDetailsText; } else if (remainingWidth > MIN_DETAILS_WIDTH) { detailsWidth = MIN_DETAILS_WIDTH; detailsText = shortDetailsText; } else { detailsWidth = 0; detailsText = ""; } } else { linkTitleWidth = titleWidth = contentWidth; detailsWidth = 0; detailsText = ""; } int leftGadgetWidth = 0; if (drawVotingArrows) { leftGadgetWidth += VotingArrows.getWidth(drawVotingArrows) + PADDING; if (drawScore) { VotingArrows.measureScoreText(scoreText, scoreBounds); } } if (!TextUtils.isEmpty(thumbnailUrl)) { leftGadgetWidth += Thumbnail.getWidth() + PADDING; } titleWidth -= leftGadgetWidth; int statusWidth = contentWidth - leftGadgetWidth; if (detailsWidth > 0) { statusWidth -= detailsWidth + PADDING; } linkTitleWidth = Math.max(0, linkTitleWidth); titleWidth = Math.max(0, titleWidth); statusWidth = Math.max(0, statusWidth); detailsWidth = Math.max(0, detailsWidth); int leftHeight = 0; if (drawVotingArrows) { leftHeight = Math.max(leftHeight, VotingArrows.getHeight(drawVotingArrows, drawScore)); } if (kind == Kinds.KIND_LINK) { leftHeight = Math.max(leftHeight, Thumbnail.getHeight()); } linkTitleLayout = null; titleLayout = null; bodyLayout = null; rightHeight = 0; if (expanded && !TextUtils.isEmpty(linkTitle)) { linkTitleLayout = createLinkTitleLayout(linkTitleWidth); rightHeight += linkTitleLayout.getHeight() + ELEMENT_PADDING; } if (expanded && !TextUtils.isEmpty(title)) { titleLayout = createTitleLayout(titleWidth); rightHeight += titleLayout.getHeight() + ELEMENT_PADDING; } if (expanded && !TextUtils.isEmpty(bodyText)) { bodyLayout = createBodyLayout(titleWidth); rightHeight += bodyLayout.getHeight() + ELEMENT_PADDING; } if (!TextUtils.isEmpty(statusText)) { statusLayout = createStatusLayout(statusWidth); rightHeight += statusLayout.getHeight(); } detailsLayout = null; if (detailsWidth > 0) { detailsLayout = makeBoringLayout(THING_STATUS, detailsText, detailsWidth, Alignment.ALIGN_OPPOSITE); } minHeight = PADDING + Math.max(leftHeight, rightHeight) + PADDING; // Move from left to right one more time. int x = PADDING; if (drawVotingArrows) { x += VotingArrows.getWidth(drawVotingArrows); } if (bodyLayout != null) { bodyBounds.left = x; x += bodyLayout.getWidth(); bodyBounds.right = x; } // Move from top to bottom one more time. int y = (minHeight - rightHeight) / 2; if (linkTitleLayout != null) { y += linkTitleLayout.getHeight() + ELEMENT_PADDING; } if (isTopStatus() && statusLayout != null) { y += statusLayout.getHeight() + ELEMENT_PADDING; } if (bodyLayout != null) { bodyBounds.top = y; y += bodyLayout.getHeight(); bodyBounds.bottom = y; } int heightMode = MeasureSpec.getMode(heightMeasureSpec); int heightSize = MeasureSpec.getSize(heightMeasureSpec); switch (heightMode) { case MeasureSpec.AT_MOST: case MeasureSpec.EXACTLY: measuredHeight = heightSize; break; case MeasureSpec.UNSPECIFIED: measuredHeight = minHeight; break; } setMeasuredDimension(measuredWidth, measuredHeight); } private boolean isTopStatus() { return kind == Kinds.KIND_COMMENT; } private Layout createLinkTitleLayout(int width) { CharSequence truncated = TextUtils.ellipsize(linkTitle, TEXT_PAINTS[THING_LINK_TITLE], width, TruncateAt.END); return makeStaticLayout(THING_LINK_TITLE, truncated, width); } private Layout createTitleLayout(int width) { return makeStaticLayout(THING_TITLE, title, width); } private Layout createBodyLayout(int width) { return makeStaticLayout(THING_BODY, bodyText, width); } private Layout createStatusLayout(int width) { return makeBoringLayout(THING_STATUS, statusText, width, Alignment.ALIGN_NORMAL); } private static Layout makeStaticLayout(int paint, CharSequence text, int width) { return new StaticLayout(text, TEXT_PAINTS[paint], width, Alignment.ALIGN_NORMAL, 1f, 0f, true); } private static Layout makeBoringLayout(int paint, CharSequence text, int width, Alignment alignment) { BoringLayout.Metrics m = BoringLayout.isBoring(text, TEXT_PAINTS[paint]); return BoringLayout.make(text, TEXT_PAINTS[paint], width, alignment, 1f, 0f, m, true, TruncateAt.END, width); } @Override protected void onDraw(Canvas c) { if (detailsLayout != null) { int dx = c.getWidth() - PADDING - detailsLayout.getWidth(); int dy = (c.getHeight() - detailsLayout.getHeight()) / 2; c.translate(dx, dy); detailsLayout.draw(c); c.translate(-dx, -dy); } c.translate(PADDING * (1 + nesting), PADDING); if (linkTitleLayout != null) { linkTitleLayout.draw(c); c.translate(0, linkTitleLayout.getHeight() + ELEMENT_PADDING); } if (drawVotingArrows) { VotingArrows.draw(c, bitmap, scoreText, scoreBounds, likes, drawScore, true); c.translate(VotingArrows.getWidth(drawVotingArrows) + PADDING, 0); } if (!TextUtils.isEmpty(thumbnailUrl)) { Thumbnail.draw(c, bitmap); c.translate(Thumbnail.getWidth() + PADDING, 0); } c.translate(0, -PADDING + (minHeight - rightHeight) / 2); // Render the status at the top for comments. if (isTopStatus() && statusLayout != null) { statusLayout.draw(c); c.translate(0, statusLayout.getHeight() + ELEMENT_PADDING); } if (titleLayout != null) { titleLayout.draw(c); c.translate(0, titleLayout.getHeight() + ELEMENT_PADDING); } if (bodyLayout != null) { bodyLayout.draw(c); c.translate(0, bodyLayout.getHeight() + ELEMENT_PADDING); } // Render the status at the bottom for non-comments. if (!isTopStatus() && statusLayout != null) { statusLayout.draw(c); } } @Override public boolean onTouchEvent(MotionEvent e) { return detector.onTouchEvent(e) || onBodyTouchEvent(e) || super.onTouchEvent(e); } private boolean onBodyTouchEvent(MotionEvent e) { int action = e.getAction(); if ((action == MotionEvent.ACTION_DOWN || action == MotionEvent.ACTION_UP) && bodyText instanceof Spannable && bodyBounds != null && bodyBounds.contains(e.getX(), e.getY())) { float localX = e.getX() - bodyBounds.left; float localY = e.getY() - bodyBounds.top; int line = bodyLayout.getLineForVertical(Math.round(localY)); int offset = bodyLayout.getOffsetForHorizontal(line, localX); float right = bodyBounds.left + bodyLayout.getLineRight(line); if (BuildConfig.DEBUG) { Log.d(TAG, "b: " + bodyBounds + " x: " + e.getX() + " y: " + e.getY()); } if (localX > right) { if (BuildConfig.DEBUG) { Log.d(TAG, "lx: " + localX + " r: " + right); } return false; } Spannable bodySpan = (Spannable) bodyText; ClickableSpan[] spans = bodySpan.getSpans(offset, offset, ClickableSpan.class); if (spans != null && spans.length > 0) { if (action == MotionEvent.ACTION_UP) { spans[0].onClick(this); } return true; } } return false; } public boolean onDown(MotionEvent e) { return VotingArrows.onDown(e, getTopOffset(), getLeftOffset(), drawVotingArrows, drawScore, true); } public boolean onSingleTapUp(MotionEvent e) { return VotingArrows.onSingleTapUp(e, getTopOffset(), getLeftOffset(), drawVotingArrows, drawScore, true, listener, thingId, likes); } private float getTopOffset() { return linkTitleLayout != null ? linkTitleLayout.getHeight() + ELEMENT_PADDING : 0; } private float getLeftOffset() { return nesting * PADDING; } public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { return false; } public void onLongPress(MotionEvent e) { } public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { return false; } public void onShowPress(MotionEvent e) { } }
package raptor.pref; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.Date; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.eclipse.jface.preference.PreferenceConverter; import org.eclipse.jface.preference.PreferenceStore; import org.eclipse.jface.resource.StringConverter; import org.eclipse.jface.util.IPropertyChangeListener; import org.eclipse.jface.util.PropertyChangeEvent; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.FontData; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.RGB; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.widgets.Display; import raptor.Quadrant; import raptor.Raptor; import raptor.chat.ChatEvent; import raptor.chat.ChatType; import raptor.swt.BugPartners; import raptor.swt.GamesWindowItem; import raptor.swt.SWTUtils; import raptor.swt.SeekTableWindowItem; import raptor.swt.chess.controller.InactiveMouseAction; import raptor.swt.chess.controller.ObservingMouseAction; import raptor.swt.chess.controller.PlayingMouseAction; import raptor.util.RaptorStringUtils; /** * The RaptorPreferenceStore. Automatically loads and saves itself at * Raptor.USER_RAPTOR_DIR/raptor.properties . Had additional data type support. */ public class RaptorPreferenceStore extends PreferenceStore implements PreferenceKeys { private static final Log LOG = LogFactory .getLog(RaptorPreferenceStore.class); public static final String PREFERENCE_PROPERTIES_FILE = "raptor.properties"; public static final File RAPTOR_PROPERTIES = new File( Raptor.USER_RAPTOR_DIR, "raptor.properties"); protected String defaultMonospacedFontName; protected String defaultFontName; protected int defaultLargeFontSize; protected int defaultSmallFontSize; protected int defaultMediumFontSize; protected int defaultTinyFontSize; private IPropertyChangeListener propertyChangeListener = new IPropertyChangeListener() { public void propertyChange(PropertyChangeEvent arg0) { if (arg0.getProperty().endsWith("color")) { Raptor.getInstance().getColorRegistry() .put( arg0.getProperty(), PreferenceConverter.getColor( RaptorPreferenceStore.this, arg0 .getProperty())); } else if (arg0.getProperty().endsWith("font")) { Raptor.getInstance().getFontRegistry() .put( arg0.getProperty(), PreferenceConverter.getFontDataArray( RaptorPreferenceStore.this, arg0 .getProperty())); } } }; public RaptorPreferenceStore() { super(); FileInputStream fileIn = null; FileOutputStream fileOut = null; try { LOG.info("Loading RaptorPreferenceStore store " + PREFERENCE_PROPERTIES_FILE); loadDefaults(); if (RAPTOR_PROPERTIES.exists()) { load(fileIn = new FileInputStream(RAPTOR_PROPERTIES)); } else { RAPTOR_PROPERTIES.getParentFile().mkdir(); RAPTOR_PROPERTIES.createNewFile(); save(fileOut = new FileOutputStream(RAPTOR_PROPERTIES), "Last saved on " + new Date()); } } catch (Exception e) { LOG.error("Error reading or writing to file ", e); throw new RuntimeException(e); } finally { if (fileIn != null) { try { fileIn.close(); } catch (Throwable t) { } } if (fileOut != null) { try { fileOut.flush(); fileOut.close(); } catch (Throwable t) { } } } addPropertyChangeListener(propertyChangeListener); LOG.info("Loaded preferences from " + RAPTOR_PROPERTIES.getAbsolutePath()); } /** * Returns the foreground color to use for the specified chat event. Returns * null if no special color should be used. */ public Color getColor(ChatEvent event) { Color result = null; String key = null; if (event.getType() == ChatType.CHANNEL_TELL) { key = CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + event.getType() + "-" + event.getChannel() + "-color"; } else if (event.getType() == ChatType.BUGWHO_AVAILABLE_TEAMS || event.getType() == ChatType.BUGWHO_GAMES || event.getType() == ChatType.BUGWHO_UNPARTNERED_BUGGERS) { key = CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.BUGWHO_ALL + "-color"; } else if (event.getType() == ChatType.NOTIFICATION_DEPARTURE) { key = CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.NOTIFICATION_ARRIVAL + "-color"; } else { key = CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + event.getType() + "-color"; } try { if (!Raptor.getInstance().getColorRegistry().hasValueFor(key)) { // We don't want the default color if not found we want to // return null, so use // StringConverter instead of PreferenceConverter. String value = getString(key); if (StringUtils.isNotBlank(value)) { RGB rgb = StringConverter.asRGB(value, null); if (rgb != null) { Raptor.getInstance().getColorRegistry().put(key, rgb); } else { return null; } } else { return null; } } result = Raptor.getInstance().getColorRegistry().get(key); } catch (Throwable t) { result = null; } return result; } /** * Returns the color for the specified key. Returns BLACK if the key was not * found. */ public Color getColor(String key) { try { if (!Raptor.getInstance().getColorRegistry().hasValueFor(key)) { RGB rgb = PreferenceConverter.getColor(this, key); if (rgb != null) { Raptor.getInstance().getColorRegistry().put(key, rgb); } } return Raptor.getInstance().getColorRegistry().get(key); } catch (Throwable t) { LOG.error("Error in getColor(" + key + ") Returning black.", t); return new Color(Display.getCurrent(), new RGB(0, 0, 0)); } } public Rectangle getCurrentLayoutRectangle(String key) { key = "app-" + getString(APP_LAYOUT) + "-" + key; return getRectangle(key); } public int[] getCurrentLayoutSashWeights(String key) { key = "app-" + getString(APP_LAYOUT) + "-" + key; return getIntArray(key); } public RGB getDefaultColor(String key) { return PreferenceConverter.getDefaultColor(this, key); } public int[] getDefaultIntArray(String key) { return RaptorStringUtils.intArrayFromString(getDefaultString(key)); } public String[] getDefaultStringArray(String key) { return RaptorStringUtils.stringArrayFromString(getDefaultString(key)); } public String getDefauultMonospacedFont() { FontData[] fonts = Raptor.getInstance().getDisplay().getFontList(null, true); String[] preferredFontNames = null; String osName = System.getProperty("os.name"); if (osName.startsWith("Mac OS")) { preferredFontNames = SWTUtils.OSX_MONOSPACED_FONTS; } else if (osName.startsWith("Windows")) { preferredFontNames = SWTUtils.WINDOWS_MONOSPACED_FONTS; } else { preferredFontNames = SWTUtils.OTHER_MONOSPACED_FONTS; } String result = null; outer: for (int i = 0; i < preferredFontNames.length; i++) { for (FontData fontData : fonts) { if (fontData.getName().equalsIgnoreCase(preferredFontNames[i])) { result = preferredFontNames[i]; break outer; } } } if (result == null) { result = "Courier"; } return result; } /** * Returns the font for the specified key. Returns the default font if key * was not found. */ public Font getFont(String key) { try { if (!Raptor.getInstance().getFontRegistry().hasValueFor(key)) { FontData[] fontData = PreferenceConverter.getFontDataArray( this, key); Raptor.getInstance().getFontRegistry().put(key, fontData); } return Raptor.getInstance().getFontRegistry().get(key); } catch (Throwable t) { LOG.error("Error in getFont(" + key + ") Returning default font.", t); return Raptor.getInstance().getFontRegistry().defaultFont(); } } public int[] getIntArray(String key) { return RaptorStringUtils.intArrayFromString(getString(key)); } public Point getPoint(String key) { return PreferenceConverter.getPoint(this, key); } public Quadrant getQuadrant(String key) { return Quadrant.valueOf(getString(key)); } public Rectangle getRectangle(String key) { return PreferenceConverter.getRectangle(this, key); } public String[] getStringArray(String key) { return RaptorStringUtils.stringArrayFromString(getString(key)); } public void loadDefaults() { defaultFontName = Raptor.getInstance().getFontRegistry().defaultFont() .getFontData()[0].getName(); defaultMonospacedFontName = getDefauultMonospacedFont(); setDefaultMonitorBasedSizes(); // Action setDefault(ACTION_SEPARATOR_SEQUENCE, 400); // App settings. setDefault(APP_NAME, "Raptor .97"); setDefault(APP_SASH_WIDTH, 8); PreferenceConverter.setDefault(this, APP_PING_FONT, new FontData[] { new FontData(defaultFontName, defaultSmallFontSize, 0) }); PreferenceConverter.setDefault(this, APP_PING_COLOR, new RGB(0, 0, 0)); PreferenceConverter.setDefault(this, APP_STATUS_BAR_FONT, new FontData[] { new FontData(defaultFontName, defaultSmallFontSize, 0) }); PreferenceConverter.setDefault(this, APP_STATUS_BAR_COLOR, new RGB(0, 0, 0)); setDefault(APP_HOME_URL, "http://code.google.com/p/raptor-chess-interface/"); setDefault(APP_SOUND_ENABLED, true); setDefault(APP_USER_TAGS, "+Partner,-Partner,Cool,Dupe,Friend,Jerk,Lagger,Noob,Premover,Troll,Strange"); setDefault(APP_IS_LOGGING_GAMES, true); setDefault(APP_LAYOUT, "Layout1"); setDefault(APP_OPEN_LINKS_IN_EXTERNAL_BROWSER, false); setDefault(APP_BROWSER_QUADRANT, Quadrant.III); setDefault(APP_PGN_RESULTS_QUADRANT, Quadrant.III); setDefault(APP_CHESS_BOARD_QUADRANT, Quadrant.III); setDefault(APP_CHESS_BOARD_SECONDARY_QUADRANT, Quadrant.V); setDefault(APP_OPEN_LINKS_IN_EXTERNAL_BROWSER, false); setDefault(APP_IS_LAUNCHNG_HOME_PAGE, true); setDefault(APP_WINDOW_ITEM_POLL_INTERVAL, 5); // Layout 1 settings. setDefault(APP_WINDOW_BOUNDS, new Rectangle(0, 0, -1, -1)); setDefault(APP_QUAD9_QUAD12345678_SASH_WEIGHTS, new int[] { 10, 90 }); setDefault(APP_QUAD1_QUAD2345678_SASH_WEIGHTS, new int[] { 10, 90 }); setDefault(APP_QUAD2345_QUAD678_SASH_WEIGHTS, new int[] { 70, 30 }); setDefault(APP_QUAD2_QUAD3_QUAD4_QUAD5_SASH_WEIGHTS, new int[] { 10, 40, 10, 40 }); setDefault(APP_QUAD67_QUAD8_SASH_WEIGHTS, new int[] { 70, 30 }); setDefault(APP_QUAD6_QUAD7_SASH_WEIGHTS, new int[] { 50, 50 }); // Board setDefault(BOARD_ALLOW_MOUSE_WHEEL_NAVIGATION_WHEEL_PLAYING, false); setDefault(BOARD_SHOW_PLAYING_GAME_STATS_ON_GAME_END, true); setDefault(BOARD_PLAY_CHALLENGE_SOUND, true); setDefault(BOARD_PLAY_ABORT_REQUEST_SOUND, true); setDefault(BOARD_PLAY_DRAW_OFFER_SOUND, true); setDefault(BOARD_USER_MOVE_INPUT_MODE, "DragAndDrop"); setDefault(BOARD_SHOW_BUGHOUSE_SIDE_UP_TIME, true); setDefault(BOARD_PIECE_JAIL_LABEL_PERCENTAGE, 40); setDefault(BOARD_COOLBAR_MODE, true); setDefault(BOARD_COOLBAR_ON_TOP, true); setDefault(BOARD_CHESS_SET_NAME, "Wiki"); setDefault(BOARD_SQUARE_BACKGROUND_NAME, "GreenMarble"); setDefault(BOARD_IS_SHOW_COORDINATES, true); setDefault(BOARD_PIECE_SIZE_ADJUSTMENT, .06); setDefault(BOARD_IS_SHOWING_PIECE_JAIL, false); setDefault(BOARD_CLOCK_SHOW_MILLIS_WHEN_LESS_THAN, Integer.MIN_VALUE); setDefault(BOARD_CLOCK_SHOW_SECONDS_WHEN_LESS_THAN, 1000L * 60L * 60L + 1L); setDefault(BOARD_IS_PLAYING_10_SECOND_COUNTDOWN_SOUNDS, true); setDefault(BOARD_PREMOVE_ENABLED, true); setDefault(BOARD_PLAY_MOVE_SOUND_WHEN_OBSERVING, true); setDefault(BOARD_IS_SHOWING_PIECE_UNICODE_CHARS, true); setDefault(BOARD_QUEUED_PREMOVE_ENABLED, false); setDefault(BOARD_IS_USING_CROSSHAIRS_CURSOR, false); setDefault(BOARD_LAYOUT, "raptor.swt.chess.layout.RightOrientedLayout"); setDefault(BOARD_TAKEOVER_INACTIVE_GAMES, true); setDefault(BOARD_PIECE_JAIL_SHADOW_ALPHA, 30); setDefault(BOARD_PIECE_SHADOW_ALPHA, 60); setDefault(BOARD_COORDINATES_SIZE_PERCENTAGE, 26); setDefault(BOARD_ANNOUNCE_CHECK_WHEN_OPPONENT_CHECKS_ME, false); setDefault(BOARD_ANNOUNCE_CHECK_WHEN_I_CHECK_OPPONENT, false); setDefault(BOARD_SPEAK_MOVES_OPP_MAKES, false); setDefault(BOARD_SPEAK_MOVES_I_MAKE, false); setDefault(BOARD_SPEAK_WHEN_OBSERVING, false); setDefault(BOARD_SPEAK_RESULTS, false); setDefault(BOARD_IGNORE_OBSERVED_GAMES_IF_PLAYING, false); setDefault(PLAYING_CONTROLLER + LEFT_MOUSE_BUTTON_ACTION, PlayingMouseAction.None.toString()); setDefault(PLAYING_CONTROLLER + RIGHT_MOUSE_BUTTON_ACTION, PlayingMouseAction.PopupMenu.toString()); setDefault(PLAYING_CONTROLLER + MIDDLE_MOUSE_BUTTON_ACTION, PlayingMouseAction.SmartMove.toString()); setDefault(PLAYING_CONTROLLER + MISC1_MOUSE_BUTTON_ACTION, PlayingMouseAction.None.toString()); setDefault(PLAYING_CONTROLLER + MISC2_MOUSE_BUTTON_ACTION, PlayingMouseAction.None.toString()); setDefault(PLAYING_CONTROLLER + LEFT_DOUBLE_CLICK_MOUSE_BUTTON_ACTION, PlayingMouseAction.None.toString()); setDefault(OBSERVING_CONTROLLER + LEFT_MOUSE_BUTTON_ACTION, ObservingMouseAction.MakePrimaryGame.toString()); setDefault(OBSERVING_CONTROLLER + RIGHT_MOUSE_BUTTON_ACTION, ObservingMouseAction.AddGameChatTab.toString()); setDefault(OBSERVING_CONTROLLER + MIDDLE_MOUSE_BUTTON_ACTION, ObservingMouseAction.MatchWinner.toString()); setDefault(OBSERVING_CONTROLLER + MISC1_MOUSE_BUTTON_ACTION, ObservingMouseAction.None.toString()); setDefault(OBSERVING_CONTROLLER + MISC2_MOUSE_BUTTON_ACTION, ObservingMouseAction.None.toString()); setDefault( OBSERVING_CONTROLLER + LEFT_DOUBLE_CLICK_MOUSE_BUTTON_ACTION, ObservingMouseAction.None.toString()); setDefault(INACTIVE_CONTROLLER + LEFT_MOUSE_BUTTON_ACTION, InactiveMouseAction.None.toString()); setDefault(INACTIVE_CONTROLLER + RIGHT_MOUSE_BUTTON_ACTION, InactiveMouseAction.None.toString()); setDefault(INACTIVE_CONTROLLER + MIDDLE_MOUSE_BUTTON_ACTION, InactiveMouseAction.Rematch.toString()); setDefault(INACTIVE_CONTROLLER + MISC1_MOUSE_BUTTON_ACTION, InactiveMouseAction.None.toString()); setDefault(INACTIVE_CONTROLLER + MISC2_MOUSE_BUTTON_ACTION, InactiveMouseAction.None.toString()); setDefault(INACTIVE_CONTROLLER + LEFT_DOUBLE_CLICK_MOUSE_BUTTON_ACTION, InactiveMouseAction.None.toString()); PreferenceConverter.setDefault(this, BOARD_BACKGROUND_COLOR, new RGB(0, 0, 0)); PreferenceConverter.setDefault(this, BOARD_COORDINATES_COLOR, new RGB( 0, 0, 0)); PreferenceConverter.setDefault(this, BOARD_ACTIVE_CLOCK_COLOR, new RGB( 0, 255, 0)); PreferenceConverter.setDefault(this, BOARD_INACTIVE_CLOCK_COLOR, new RGB(128, 128, 128)); PreferenceConverter.setDefault(this, BOARD_CONTROL_COLOR, new RGB(128, 128, 128)); PreferenceConverter.setDefault(this, BOARD_LAG_OVER_20_SEC_COLOR, new RGB(255, 0, 0)); PreferenceConverter.setDefault(this, BOARD_PIECE_JAIL_LABEL_COLOR, new RGB(0, 255, 0)); PreferenceConverter.setDefault(this, BOARD_PIECE_JAIL_BACKGROUND_COLOR, new RGB(0, 0, 0)); PreferenceConverter.setDefault(this, BOARD_COORDINATES_FONT, new FontData[] { new FontData(defaultFontName, defaultMediumFontSize, 0) }); PreferenceConverter.setDefault(this, BOARD_CLOCK_FONT, new FontData[] { new FontData(defaultMonospacedFontName, 24, SWT.BOLD) }); PreferenceConverter.setDefault(this, BOARD_LAG_FONT, new FontData[] { new FontData(defaultFontName, defaultTinyFontSize, 0) }); PreferenceConverter.setDefault(this, BOARD_PLAYER_NAME_FONT, new FontData[] { new FontData(defaultFontName, defaultLargeFontSize, 0) }); PreferenceConverter.setDefault(this, BOARD_PIECE_JAIL_FONT, new FontData[] { new FontData(defaultFontName, defaultMediumFontSize, 0) }); PreferenceConverter.setDefault(this, BOARD_OPENING_DESC_FONT, new FontData[] { new FontData(defaultFontName, defaultTinyFontSize, 0) }); PreferenceConverter.setDefault(this, BOARD_STATUS_FONT, new FontData[] { new FontData(defaultFontName, defaultTinyFontSize, 0) }); PreferenceConverter.setDefault(this, BOARD_GAME_DESCRIPTION_FONT, new FontData[] { new FontData(defaultFontName, defaultTinyFontSize, 0) }); PreferenceConverter.setDefault(this, BOARD_PREMOVES_FONT, new FontData[] { new FontData(defaultFontName, defaultTinyFontSize, 0) }); // BugArena setDefault(BUG_ARENA_PARTNERS_INDEX, 0); setDefault(BUG_ARENA_MAX_PARTNERS_INDEX, BugPartners.getRatings().length - 1); setDefault(BUG_ARENA_TEAMS_INDEX, 0); setDefault(BUG_ARENA_TEAMS_IS_RATED, true); setDefault(BUG_ARENA_SELECTED_TAB, 0); // SeekTable setDefault(SEEK_TABLE_RATINGS_INDEX, 0); setDefault(SEEK_TABLE_MAX_RATINGS_INDEX, SeekTableWindowItem .getRatings().length - 1); setDefault(SEEK_TABLE_RATED_INDEX, 0); setDefault(SEEK_TABLE_SHOW_COMPUTERS, true); setDefault(SEEK_TABLE_SHOW_LIGHTNING, true); setDefault(SEEK_TABLE_SHOW_BLITZ, true); setDefault(SEEK_TABLE_SHOW_STANDARD, true); setDefault(SEEK_TABLE_SHOW_CRAZYHOUSE, true); setDefault(SEEK_TABLE_SHOW_FR, true); setDefault(SEEK_TABLE_SHOW_WILD, true); setDefault(SEEK_TABLE_SHOW_ATOMIC, true); setDefault(SEEK_TABLE_SHOW_SUICIDE, true); setDefault(SEEK_TABLE_SHOW_LOSERS, true); setDefault(SEEK_TABLE_SHOW_UNTIMED, true); setDefault(SEEK_TABLE_SELECTED_TAB, 2); // Games table setDefault(GAMES_TABLE_SELECTED_TAB, 1); setDefault(GAMES_TABLE_RATINGS_INDEX, 0); setDefault(GAMES_TABLE_MAX_RATINGS_INDEX, GamesWindowItem.getRatings().length - 1); setDefault(GAMES_TABLE_RATED_INDEX, 0); setDefault(GAMES_TABLE_SHOW_BUGHOUSE, true); setDefault(GAMES_TABLE_SHOW_LIGHTNING, true); setDefault(GAMES_TABLE_SHOW_BLITZ, true); setDefault(GAMES_TABLE_SHOW_STANDARD, true); setDefault(GAMES_TABLE_SHOW_CRAZYHOUSE, true); setDefault(GAMES_TABLE_SHOW_EXAMINED, true); setDefault(GAMES_TABLE_SHOW_WILD, true); setDefault(GAMES_TABLE_SHOW_ATOMIC, true); setDefault(GAMES_TABLE_SHOW_SUICIDE, true); setDefault(GAMES_TABLE_SHOW_LOSERS, true); setDefault(GAMES_TABLE_SHOW_UNTIMED, true); setDefault(GAMES_TABLE_SHOW_NONSTANDARD, true); setDefault(GAMES_TABLE_SHOW_PRIVATE, true); // Arrows PreferenceConverter.setDefault(this, ARROW_OBS_OPP_COLOR, new RGB(255, 0, 255)); PreferenceConverter .setDefault(this, ARROW_MY_COLOR, new RGB(0, 0, 255)); PreferenceConverter.setDefault(this, ARROW_PREMOVE_COLOR, new RGB(0, 0, 255)); PreferenceConverter.setDefault(this, ARROW_OBS_COLOR, new RGB(0, 0, 255)); setDefault(ARROW_SHOW_ON_OBS_AND_OPP_MOVES, true); setDefault(ARROW_SHOW_ON_MOVE_LIST_MOVES, true); setDefault(ARROW_SHOW_ON_MY_PREMOVES, true); setDefault(ARROW_SHOW_ON_MY_MOVES, false); setDefault(ARROW_ANIMATION_DELAY, 300L); setDefault(ARROW_FADE_AWAY_MODE, true); setDefault(ARROW_WIDTH_PERCENTAGE, 15); // Highlights PreferenceConverter.setDefault(this, HIGHLIGHT_OBS_OPP_COLOR, new RGB( 255, 0, 255)); PreferenceConverter.setDefault(this, HIGHLIGHT_MY_COLOR, new RGB(0, 0, 255)); PreferenceConverter.setDefault(this, HIGHLIGHT_PREMOVE_COLOR, new RGB( 0, 0, 255)); PreferenceConverter.setDefault(this, HIGHLIGHT_OBS_COLOR, new RGB(0, 0, 255)); setDefault(HIGHLIGHT_SHOW_ON_OBS_AND_OPP_MOVES, true); setDefault(HIGHLIGHT_SHOW_ON_MOVE_LIST_MOVES, true); setDefault(HIGHLIGHT_SHOW_ON_MY_PREMOVES, true); setDefault(HIGHLIGHT_SHOW_ON_MY_MOVES, false); setDefault(HIGHLIGHT_FADE_AWAY_MODE, false); setDefault(HIGHLIGHT_ANIMATION_DELAY, 300L); setDefault(HIGHLIGHT_WIDTH_PERCENTAGE, 3); // Game Results PreferenceConverter.setDefault(this, RESULTS_COLOR, new RGB(255, 0, 0)); PreferenceConverter.setDefault(this, RESULTS_FONT, new FontData[] { new FontData(defaultMonospacedFontName, 40, SWT.BOLD) }); setDefault(RESULTS_IS_SHOWING, true); setDefault(RESULTS_FADE_AWAY_MODE, true); setDefault(RESULTS_ANIMATION_DELAY, 500L); setDefault(RESULTS_WIDTH_PERCENTAGE, 80); // Chat setDefault(CHAT_MAX_CONSOLE_CHARS, 500000); setDefault(CHAT_TIMESTAMP_CONSOLE, false); setDefault(CHAT_TIMESTAMP_CONSOLE_FORMAT, "'['hh:mma']'"); setDefault(CHAT_IS_PLAYING_CHAT_ON_PTELL, true); setDefault(CHAT_IS_PLAYING_CHAT_ON_PERSON_TELL, true); setDefault(CHAT_IS_SMART_SCROLL_ENABLED, true); setDefault(CHAT_OPEN_CHANNEL_TAB_ON_CHANNEL_TELLS, false); setDefault(CHAT_OPEN_PERSON_TAB_ON_PERSON_TELLS, false); setDefault(CHAT_OPEN_PARTNER_TAB_ON_PTELLS, false); setDefault(CHAT_REMOVE_SUB_TAB_MESSAGES_FROM_MAIN_TAB, true); setDefault(CHAT_UNDERLINE_URLS, true); setDefault(CHAT_UNDERLINE_QUOTED_TEXT, true); setDefault(CHAT_UNDERLINE_SINGLE_QUOTES, false); setDefault(CHAT_PLAY_NOTIFICATION_SOUND_ON_ARRIVALS, true); setDefault(CHAT_PLAY_NOTIFICATION_SOUND_ON_DEPARTURES, false); PreferenceConverter.setDefault(this, CHAT_INPUT_FONT, new FontData[] { new FontData(defaultMonospacedFontName, defaultLargeFontSize, 0) }); PreferenceConverter.setDefault(this, CHAT_OUTPUT_FONT, new FontData[] { new FontData(defaultMonospacedFontName, defaultLargeFontSize, 0) }); PreferenceConverter.setDefault(this, CHAT_PROMPT_FONT, new FontData[] { new FontData(defaultMonospacedFontName, defaultLargeFontSize, 0) }); PreferenceConverter.setDefault(this, CHAT_INPUT_BACKGROUND_COLOR, new RGB(0, 0, 0)); PreferenceConverter.setDefault(this, CHAT_CONSOLE_BACKGROUND_COLOR, new RGB(0, 0, 0)); PreferenceConverter.setDefault(this, CHAT_INPUT_DEFAULT_TEXT_COLOR, new RGB(128, 128, 128)); PreferenceConverter.setDefault(this, CHAT_OUTPUT_BACKGROUND_COLOR, new RGB(255, 255, 255)); PreferenceConverter.setDefault(this, CHAT_OUTPUT_TEXT_COLOR, new RGB(0, 0, 0)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.CHALLENGE + "-color", new RGB(100, 149, 237)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.CSHOUT + "-color", new RGB(221, 160, 221)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.SHOUT + "-color", new RGB(221, 160, 221)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.KIBITZ + "-color", new RGB(100, 149, 237)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.WHISPER + "-color", new RGB(100, 149, 237)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.OUTBOUND + "-color", new RGB(128, 128, 128)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.PARTNER_TELL + "-color", new RGB(255, 0, 0)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.DRAW_REQUEST + "-color", new RGB(255, 0, 0)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.ABORT_REQUEST + "-color", new RGB(255, 0, 0)); PreferenceConverter .setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.TELL + "-color", new RGB(255, 0, 0)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.CHANNEL_TELL + "-" + 1 + "-color", new RGB(255, 200, 0)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.CHANNEL_TELL + "-" + 4 + "-color", new RGB(0, 255, 0)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.CHANNEL_TELL + "-" + 50 + "-color", new RGB(255, 175, 175)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.CHANNEL_TELL + "-" + 53 + "-color", new RGB(255, 0, 255)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.INTERNAL + "-color", new RGB(255, 0, 0)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.PLAYING_STATISTICS + "-color", new RGB(100, 149, 237)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.QTELL + "-color", new RGB(128, 128, 128)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.FINGER + "-color", new RGB(128, 128, 128)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.HISTORY + "-color", new RGB(128, 128, 128)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.GAMES + "-color", new RGB(128, 128, 128)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.BUGWHO_ALL + "-color", new RGB(128, 128, 128)); PreferenceConverter.setDefault(this, CHAT_CHAT_EVENT_TYPE_COLOR_APPEND_TO + ChatType.NOTIFICATION_ARRIVAL + "-color", new RGB( 255, 0, 0)); PreferenceConverter.setDefault(this, CHAT_PROMPT_COLOR, new RGB(128, 128, 128)); PreferenceConverter.setDefault(this, CHAT_QUOTE_UNDERLINE_COLOR, new RGB(0, 255, 0)); PreferenceConverter.setDefault(this, CHAT_LINK_UNDERLINE_COLOR, new RGB(11, 133, 238)); // Bug house buttons settings. PreferenceConverter.setDefault(this, BUG_BUTTONS_FONT, new FontData[] { new FontData(defaultFontName, defaultSmallFontSize, SWT.BOLD) }); // Bug house setDefault(BUGHOUSE_PLAYING_OPEN_PARTNER_BOARD, true); setDefault(BUGHOUSE_OBSERVING_OPEN_PARTNER_BOARD, true); setDefault(BUGHOUSE_SPEAK_COUNTDOWN_ON_PARTNER_BOARD, true); setDefault(BUGHOUSE_SPEAK_PARTNER_TELLS, true); setDefault(BUGHOUSE_IS_PLAYING_PARTNERSHIP_OFFERED_SOUND, true); // Fics setDefault(FICS_KEEP_ALIVE, false); setDefault(FICS_AUTO_CONNECT, false); setDefault(FICS_LOGIN_SCRIPT, "set seek 0\nset autoflag 1\n"); setDefault(FICS_AUTO_CONNECT, false); setDefault(FICS_PROFILE, "Primary"); setDefault(FICS_CLOSE_TABS_ON_DISCONNECT, false); setDefault(FICS_SHOW_BUGBUTTONS_ON_PARTNERSHIP, true); setDefault(FICS_NO_WRAP_ENABLED, true); setDefault(FICS_CHANNEL_COMMANDS, "+channel $channel,-channel $channel,in $channel"); setDefault( FICS_PERSON_COMMANDS, "finger $person,follow $person,history $person,joural $person,partner $person," + "observe $person,oldpstat $userName $person,pstat $userName $person," + "stored $person,variables $person,separator," + "+censor $person,-censor $person,+gnotify $person,-gnotify $person,+noplay $person,-noplay $person,+notify $person,-notify $person,separator," + "match $person 1 0,match $person 3 0,match $person 5 0,match $person 15 0"); setDefault(FICS_GAME_COMMANDS, "observe $gameId,allobservers $gameId,moves $gameId"); setDefault( FICS_REGULAR_EXPRESSIONS_TO_BLOCK, "defprompt set\\.,gameinfo set\\.,ms set\\.,startpos set\\.," + "pendinfo set\\.,nowrap set\\.,smartmove set\\.,premove set\\.," + "Style 12 set\\.,Your prompt will now not show the time\\.," + "You will not see seek ads\\.,You will not see seek ads.\\.," + "Auto-flagging enabled\\.,lock set\\.,set seek 0,set autoflag 1," + "allresults set\\.,Bell off\\.,set interface Raptor .*," + "You are not examining or setting up a game\\."); setDefault(FICS_SEEK_GAME_TYPE, ""); setDefault(FICS_SEEK_MINUTES, "5"); setDefault(FICS_SEEK_INC, "0"); setDefault(FICS_SEEK_MIN_RATING, "Any"); setDefault(FICS_SEEK_MAX_RATING, "Any"); setDefault(FICS_SEEK_MANUAL, false); setDefault(FICS_SEEK_FORMULA, true); setDefault(FICS_SEEK_RATED, true); setDefault(FICS_SEEK_COLOR, ""); setDefault(FICS_KEEP_ALIVE_COMMAND, "set busy in another window"); // Fics Primary setDefault(FICS_PRIMARY_USER_NAME, ""); setDefault(FICS_PRIMARY_PASSWORD, ""); setDefault(FICS_PRIMARY_IS_NAMED_GUEST, false); setDefault(FICS_PRIMARY_IS_ANON_GUEST, false); setDefault(FICS_PRIMARY_SERVER_URL, "freechess.org"); setDefault(FICS_PRIMARY_PORT, 5000); setDefault(FICS_PRIMARY_TIMESEAL_ENABLED, true); // Fics Secondary setDefault(FICS_SECONDARY_USER_NAME, ""); setDefault(FICS_SECONDARY_PASSWORD, ""); setDefault(FICS_SECONDARY_IS_NAMED_GUEST, false); setDefault(FICS_SECONDARY_IS_ANON_GUEST, false); setDefault(FICS_SECONDARY_SERVER_URL, "freechess.org"); setDefault(FICS_SECONDARY_PORT, 5000); setDefault(FICS_SECONDARY_TIMESEAL_ENABLED, true); // Fics Tertiary setDefault(FICS_TERTIARY_USER_NAME, ""); setDefault(FICS_TERTIARY_PASSWORD, ""); setDefault(FICS_TERTIARY_IS_NAMED_GUEST, false); setDefault(FICS_TERTIARY_IS_ANON_GUEST, false); setDefault(FICS_TERTIARY_SERVER_URL, "freechess.org"); setDefault(FICS_TERTIARY_PORT, 5000); setDefault(FICS_TERTIARY_TIMESEAL_ENABLED, true); // Bics setDefault(BICS_KEEP_ALIVE, false); setDefault(BICS_AUTO_CONNECT, false); setDefault(BICS_LOGIN_SCRIPT, "set autoflag 1\n\n"); setDefault(BICS_AUTO_CONNECT, false); setDefault(BICS_PROFILE, "Primary"); setDefault(BICS_CLOSE_TABS_ON_DISCONNECT, false); setDefault(BICS_SHOW_BUGBUTTONS_ON_PARTNERSHIP, true); setDefault(BICS_KEEP_ALIVE_COMMAND, "set busy in another window"); setDefault(BICS_CHANNEL_COMMANDS, "+channel $channel,-channel $channel,in $channel"); setDefault( BICS_PERSON_COMMANDS, "finger $person,follow $person,history $person,joural $person,partner $person," + "observe $person,oldpstat $userName $person,pstat $userName $person," + "stored $person,variables $person,separator," + "+censor $person,-censor $person,+gnotify $person,-gnotify $person,+noplay $person,-noplay $person,+notify $person,-notify $person,separator," + "match $person 1 0 zh,match $person 3 0 zh,match $person 1 0 zh fr,match $person 3 0 zh fr,match $person 2 0 bughouse," + "match $person 2 0 bughouse fr, match $person 2 0 bughouse w5"); setDefault(BICS_GAME_COMMANDS, "observe $gameId,allobservers $gameId,moves $gameId"); setDefault( BICS_REGULAR_EXPRESSIONS_TO_BLOCK, "defprompt set\\.,gameinfo set\\.,ms set\\.,startpos set\\.," + "pendinfo set\\.,nowrap set\\.,smartmove set\\.,premove set\\.," + "Style 12 set\\.,Your prompt will now not show the time\\.," + "You will not see seek ads\\.,You will not see seek ads.\\.," + "Auto-flagging enabled\\.,lock set\\."); // Bics Primary setDefault(BICS_PRIMARY_USER_NAME, ""); setDefault(BICS_PRIMARY_PASSWORD, ""); setDefault(BICS_PRIMARY_IS_NAMED_GUEST, false); setDefault(BICS_PRIMARY_IS_ANON_GUEST, false); setDefault(BICS_PRIMARY_SERVER_URL, "chess.sipay.ru"); setDefault(BICS_PRIMARY_PORT, 5000); setDefault(BICS_PRIMARY_TIMESEAL_ENABLED, true); // Bics Secondary setDefault(BICS_SECONDARY_USER_NAME, ""); setDefault(BICS_SECONDARY_PASSWORD, ""); setDefault(BICS_SECONDARY_IS_NAMED_GUEST, false); setDefault(BICS_SECONDARY_IS_ANON_GUEST, false); setDefault(BICS_SECONDARY_SERVER_URL, "chess.sipay.ru"); setDefault(BICS_SECONDARY_PORT, 5000); setDefault(BICS_SECONDARY_TIMESEAL_ENABLED, true); // Bics Tertiary setDefault(BICS_TERTIARY_USER_NAME, ""); setDefault(BICS_TERTIARY_PASSWORD, ""); setDefault(BICS_TERTIARY_IS_NAMED_GUEST, false); setDefault(BICS_TERTIARY_IS_ANON_GUEST, false); setDefault(BICS_TERTIARY_SERVER_URL, "chess.sipay.ru"); setDefault(BICS_TERTIARY_PORT, 5000); setDefault(BICS_TERTIARY_TIMESEAL_ENABLED, true); // Quadrant settings. setDefault("fics-" + MAIN_TAB_QUADRANT, Quadrant.VI); setDefault("fics-" + CHANNEL_TAB_QUADRANT, Quadrant.VI); setDefault("fics-" + PERSON_TAB_QUADRANT, Quadrant.VI); setDefault("fics-" + REGEX_TAB_QUADRANT, Quadrant.VI); setDefault("fics-" + PARTNER_TELL_TAB_QUADRANT, Quadrant.VI); setDefault("fics-" + CHESS_BOARD_QUADRANT, Quadrant.III); setDefault("fics-" + CHESS_BOARD_SECONDARY_QUADRANT, Quadrant.V); setDefault("fics-" + BUG_WHO_QUADRANT, Quadrant.VIII); setDefault("fics-" + SEEK_TABLE_QUADRANT, Quadrant.VIII); setDefault("fics-" + BUG_BUTTONS_QUADRANT, Quadrant.II); setDefault("fics-" + GAME_CHAT_TAB_QUADRANT, Quadrant.VI); setDefault("fics-" + GAMES_TAB_QUADRANT, Quadrant.VIII); setDefault("fics2-" + MAIN_TAB_QUADRANT, Quadrant.VII); setDefault("fics2-" + CHANNEL_TAB_QUADRANT, Quadrant.VII); setDefault("fics2-" + PERSON_TAB_QUADRANT, Quadrant.VII); setDefault("fics2-" + REGEX_TAB_QUADRANT, Quadrant.VII); setDefault("fics2-" + PARTNER_TELL_TAB_QUADRANT, Quadrant.VII); setDefault("fics2-" + CHESS_BOARD_QUADRANT, Quadrant.III); setDefault("fics2-" + CHESS_BOARD_SECONDARY_QUADRANT, Quadrant.V); setDefault("fics2-" + BUG_WHO_QUADRANT, Quadrant.VIII); setDefault("fics2-" + SEEK_TABLE_QUADRANT, Quadrant.VIII); setDefault("fics2-" + BUG_BUTTONS_QUADRANT, Quadrant.II); setDefault("fics2-" + GAME_CHAT_TAB_QUADRANT, Quadrant.VII); setDefault("fics2-" + GAMES_TAB_QUADRANT, Quadrant.VIII); setDefault("bics-" + MAIN_TAB_QUADRANT, Quadrant.VI); setDefault("bics-" + CHANNEL_TAB_QUADRANT, Quadrant.VI); setDefault("bics-" + PERSON_TAB_QUADRANT, Quadrant.VI); setDefault("bics-" + REGEX_TAB_QUADRANT, Quadrant.VI); setDefault("bics-" + PARTNER_TELL_TAB_QUADRANT, Quadrant.VI); setDefault("bics-" + CHESS_BOARD_QUADRANT, Quadrant.III); setDefault("bics-" + CHESS_BOARD_SECONDARY_QUADRANT, Quadrant.V); setDefault("bics-" + BUG_WHO_QUADRANT, Quadrant.VIII); setDefault("bics-" + SEEK_TABLE_QUADRANT, Quadrant.VIII); setDefault("bics-" + BUG_BUTTONS_QUADRANT, Quadrant.II); setDefault("bics-" + GAME_CHAT_TAB_QUADRANT, Quadrant.VI); setDefault("bics-" + GAMES_TAB_QUADRANT, Quadrant.VIII); setDefault("bics2-" + MAIN_TAB_QUADRANT, Quadrant.VII); setDefault("bics2-" + CHANNEL_TAB_QUADRANT, Quadrant.VII); setDefault("bics2-" + PERSON_TAB_QUADRANT, Quadrant.VII); setDefault("bics2-" + REGEX_TAB_QUADRANT, Quadrant.VII); setDefault("bics2-" + PARTNER_TELL_TAB_QUADRANT, Quadrant.VII); setDefault("bics2-" + CHESS_BOARD_QUADRANT, Quadrant.III); setDefault("bics2-" + CHESS_BOARD_SECONDARY_QUADRANT, Quadrant.V); setDefault("bics2-" + BUG_WHO_QUADRANT, Quadrant.VIII); setDefault("bics2-" + SEEK_TABLE_QUADRANT, Quadrant.VIII); setDefault("bics2-" + BUG_BUTTONS_QUADRANT, Quadrant.II); setDefault("bics2-" + GAME_CHAT_TAB_QUADRANT, Quadrant.VII); setDefault("bics2-" + GAMES_TAB_QUADRANT, Quadrant.VIII); setDefault(TIMESEAL_INIT_STRING, "TIMESTAMP|iv|OpenSeal|"); LOG.info("Loaded defaults " + PREFERENCE_PROPERTIES_FILE); } @Override public void save() { FileOutputStream fileOut = null; try { save(fileOut = new FileOutputStream(RAPTOR_PROPERTIES), "Last saved on " + new Date()); fileOut.flush(); } catch (IOException ioe) { LOG.error("Error saving raptor preferences:", ioe); throw new RuntimeException(ioe); } finally { try { fileOut.close(); } catch (Throwable t) { } } } public void setDefault(String key, Font font) { PreferenceConverter.setValue(this, key, font.getFontData()); } public void setDefault(String key, FontData[] fontData) { PreferenceConverter.setValue(this, key, fontData); } public void setDefault(String key, int[] values) { setDefault(key, RaptorStringUtils.toString(values)); } public void setDefault(String key, Point point) { PreferenceConverter.setValue(this, key, point); } public void setDefault(String key, Quadrant quadrant) { setDefault(key, quadrant.name()); } public void setDefault(String key, Rectangle rectangle) { PreferenceConverter.setDefault(this, key, rectangle); } public void setDefault(String key, RGB rgb) { PreferenceConverter.setValue(this, key, rgb); } public void setDefault(String key, String[] values) { setDefault(key, RaptorStringUtils.toString(values)); } public void setValue(String key, Font font) { PreferenceConverter.setValue(this, key, font.getFontData()); } public void setValue(String key, FontData[] fontData) { PreferenceConverter.setValue(this, key, fontData); } public void setValue(String key, int[] values) { setValue(key, RaptorStringUtils.toString(values)); } public void setValue(String key, Point point) { PreferenceConverter.setValue(this, key, point); } public void setValue(String key, Quadrant quadrant) { setValue(key, quadrant.name()); } public void setValue(String key, Rectangle rectangle) { PreferenceConverter.setValue(this, key, rectangle); } public void setValue(String key, RGB rgb) { PreferenceConverter.setValue(this, key, rgb); } public void setValue(String key, String[] values) { setValue(key, RaptorStringUtils.toString(values)); } protected void setDefaultMonitorBasedSizes() { Rectangle fullViewBounds = Display.getCurrent().getPrimaryMonitor() .getBounds(); int toolbarPieceSize = 12; String iconSize = "tiny"; defaultLargeFontSize = 10; defaultMediumFontSize = 10; defaultSmallFontSize = 8; defaultTinyFontSize = 6; if (fullViewBounds.height >= 1200) { iconSize = "large"; toolbarPieceSize = 24; defaultLargeFontSize = 18; defaultMediumFontSize = 16; defaultSmallFontSize = 14; defaultTinyFontSize = 12; } else if (fullViewBounds.height >= 1024) { iconSize = "medium"; toolbarPieceSize = 20; defaultLargeFontSize = 16; defaultMediumFontSize = 14; defaultSmallFontSize = 12; defaultTinyFontSize = 10; } else if (fullViewBounds.height >= 800) { iconSize = "small"; toolbarPieceSize = 16; defaultLargeFontSize = 12; defaultMediumFontSize = 12; defaultSmallFontSize = 10; defaultTinyFontSize = 8; } getDefauultMonospacedFont(); setDefault(PreferenceKeys.APP_ICON_SIZE, iconSize); setDefault(PreferenceKeys.APP_TOOLBAR_PIECE_SIZE, toolbarPieceSize); } }
package com.edinarobotics.zed.subsystems; import com.edinarobotics.utils.commands.MaintainStateCommand; import com.edinarobotics.utils.subsystems.Subsystem1816; import edu.wpi.first.wpilibj.Relay; public class Collector extends Subsystem1816 { public static byte COLLECTOR_IN = 1; public static byte COLLECTOR_OUT = -1; public static byte COLLECTOR_STOP = 0; private Relay leftStar; private Relay rightStar; private Relay roller; private byte leftStarDirection; private byte rightStarDirection; private byte rollerDirection; public Collector(int leftStar, int rightStar, int roller){ super("Collector"); this.leftStar = new Relay(leftStar); this.rightStar = new Relay(rightStar); this.roller = new Relay(roller); leftStarDirection = 0; rightStarDirection = 0; rollerDirection = 0; } protected void initDefaultCommand(){ setDefaultCommand(new MaintainStateCommand(this)); } /** * Sets the direction of the collector mechanisms. * A positive value will set the collector to a forward direction. Forward * is defined as the direction that will bring discs into the robot.<br/> * A negative value will set the collector to a backwards direction.<br/> * A zero value will stop the collector. * @param direction Sets the collector to the direction as given above. */ public void setCollectorDirection(byte direction){ leftStarDirection = direction; rightStarDirection = direction; rollerDirection = direction; update(); } private Relay.Value getRelayDirection(byte direction){ if(direction == 0){ return Relay.Value.kOff; } if(direction > 0){ return Relay.Value.kForward; } return Relay.Value.kReverse; } public void update(){ leftStar.set(getRelayDirection(leftStarDirection)); rightStar.set(getRelayDirection(rightStarDirection)); roller.set(getRelayDirection(rollerDirection)); } }
package com.fsck.k9.controller; import java.io.CharArrayWriter; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.CountDownLatch; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import android.app.Application; import android.app.KeyguardManager; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.media.AudioManager; import android.net.Uri; import android.os.PowerManager; import android.os.Process; import android.text.TextUtils; import android.util.Log; import com.fsck.k9.Account; import com.fsck.k9.AccountStats; import com.fsck.k9.K9; import com.fsck.k9.NotificationSetting; import com.fsck.k9.Preferences; import com.fsck.k9.R; import com.fsck.k9.SearchSpecification; import com.fsck.k9.activity.FolderList; import com.fsck.k9.activity.MessageList; import com.fsck.k9.helper.Utility; import com.fsck.k9.helper.power.TracingPowerManager; import com.fsck.k9.helper.power.TracingPowerManager.TracingWakeLock; import com.fsck.k9.mail.Address; import com.fsck.k9.mail.FetchProfile; import com.fsck.k9.mail.Flag; import com.fsck.k9.mail.Folder; import com.fsck.k9.mail.Folder.FolderType; import com.fsck.k9.mail.Folder.OpenMode; import com.fsck.k9.mail.Message; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mail.Part; import com.fsck.k9.mail.PushReceiver; import com.fsck.k9.mail.Pusher; import com.fsck.k9.mail.Store; import com.fsck.k9.mail.Transport; import com.fsck.k9.mail.internet.MimeMessage; import com.fsck.k9.mail.internet.MimeUtility; import com.fsck.k9.mail.internet.TextBody; import com.fsck.k9.mail.store.LocalStore; import com.fsck.k9.mail.store.LocalStore.LocalFolder; import com.fsck.k9.mail.store.LocalStore.LocalMessage; import com.fsck.k9.mail.store.LocalStore.PendingCommand; /** * Starts a long running (application) Thread that will run through commands * that require remote mailbox access. This class is used to serialize and * prioritize these commands. Each method that will submit a command requires a * MessagingListener instance to be provided. It is expected that that listener * has also been added as a registered listener using addListener(). When a * command is to be executed, if the listener that was provided with the command * is no longer registered the command is skipped. The design idea for the above * is that when an Activity starts it registers as a listener. When it is paused * it removes itself. Thus, any commands that that activity submitted are * removed from the queue once the activity is no longer active. */ public class MessagingController implements Runnable { /** * Immutable empty {@link String} array */ private static final String[] EMPTY_STRING_ARRAY = new String[0]; /** * Immutable empty {@link Message} array */ private static final Message[] EMPTY_MESSAGE_ARRAY = new Message[0]; /** * Immutable empty {@link Folder} array */ private static final Folder[] EMPTY_FOLDER_ARRAY = new Folder[0]; private static final String PENDING_COMMAND_MOVE_OR_COPY = "com.fsck.k9.MessagingController.moveOrCopy"; private static final String PENDING_COMMAND_MOVE_OR_COPY_BULK = "com.fsck.k9.MessagingController.moveOrCopyBulk"; private static final String PENDING_COMMAND_EMPTY_TRASH = "com.fsck.k9.MessagingController.emptyTrash"; private static final String PENDING_COMMAND_SET_FLAG_BULK = "com.fsck.k9.MessagingController.setFlagBulk"; private static final String PENDING_COMMAND_SET_FLAG = "com.fsck.k9.MessagingController.setFlag"; private static final String PENDING_COMMAND_APPEND = "com.fsck.k9.MessagingController.append"; private static final String PENDING_COMMAND_MARK_ALL_AS_READ = "com.fsck.k9.MessagingController.markAllAsRead"; private static final String PENDING_COMMAND_EXPUNGE = "com.fsck.k9.MessagingController.expunge"; private static MessagingController inst = null; private BlockingQueue<Command> mCommands = new PriorityBlockingQueue<Command>(); private Thread mThread; private Set<MessagingListener> mListeners = new CopyOnWriteArraySet<MessagingListener>(); private HashMap<SORT_TYPE, Boolean> sortAscending = new HashMap<SORT_TYPE, Boolean>(); private ConcurrentHashMap<String, AtomicInteger> sendCount = new ConcurrentHashMap<String, AtomicInteger>(); ConcurrentHashMap<Account, Pusher> pushers = new ConcurrentHashMap<Account, Pusher>(); public enum SORT_TYPE { SORT_DATE(R.string.sort_earliest_first, R.string.sort_latest_first, false), SORT_SUBJECT(R.string.sort_subject_alpha, R.string.sort_subject_re_alpha, true), SORT_SENDER(R.string.sort_sender_alpha, R.string.sort_sender_re_alpha, true), SORT_UNREAD(R.string.sort_unread_first, R.string.sort_unread_last, true), SORT_FLAGGED(R.string.sort_flagged_first, R.string.sort_flagged_last, true), SORT_ATTACHMENT(R.string.sort_attach_first, R.string.sort_unattached_first, true); private int ascendingToast; private int descendingToast; private boolean defaultAscending; SORT_TYPE(int ascending, int descending, boolean ndefaultAscending) { ascendingToast = ascending; descendingToast = descending; defaultAscending = ndefaultAscending; } public int getToast(boolean ascending) { if (ascending) { return ascendingToast; } else { return descendingToast; } } public boolean isDefaultAscending() { return defaultAscending; } }; private SORT_TYPE sortType = SORT_TYPE.SORT_DATE; private MessagingListener checkMailListener = null; private MemorizingListener memorizingListener = new MemorizingListener(); private boolean mBusy; private Application mApplication; // Key is accountUuid:folderName:messageUid , value is unimportant private ConcurrentHashMap<String, String> deletedUids = new ConcurrentHashMap<String, String>(); private String createMessageKey(Account account, String folder, Message message) { return createMessageKey(account, folder, message.getUid()); } private String createMessageKey(Account account, String folder, String uid) { return account.getUuid() + ":" + folder + ":" + uid; } private void suppressMessage(Account account, String folder, Message message) { if (account == null || folder == null || message == null) { return; } String messKey = createMessageKey(account, folder, message); deletedUids.put(messKey, "true"); } private void unsuppressMessage(Account account, String folder, String uid) { if (account == null || folder == null || uid == null) { return; } String messKey = createMessageKey(account, folder, uid); deletedUids.remove(messKey); } private boolean isMessageSuppressed(Account account, String folder, Message message) { if (account == null || folder == null || message == null) { return false; } String messKey = createMessageKey(account, folder, message); if (deletedUids.containsKey(messKey)) { return true; } return false; } private MessagingController(Application application) { mApplication = application; mThread = new Thread(this); mThread.start(); if (memorizingListener != null) { addListener(memorizingListener); } } /** * Gets or creates the singleton instance of MessagingController. Application is used to * provide a Context to classes that need it. * @param application * @return */ public synchronized static MessagingController getInstance(Application application) { if (inst == null) { inst = new MessagingController(application); } return inst; } public boolean isBusy() { return mBusy; } public void run() { Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND); while (true) { String commandDescription = null; try { Command command = mCommands.take(); if (command != null) { commandDescription = command.description; if (K9.DEBUG) Log.i(K9.LOG_TAG, "Running " + (command.isForeground ? "Foreground" : "Background") + " command '" + command.description + "', seq = " + command.sequence); mBusy = true; command.runnable.run(); if (K9.DEBUG) Log.i(K9.LOG_TAG, (command.isForeground ? "Foreground" : "Background") + " Command '" + command.description + "' completed"); for (MessagingListener l : getListeners(command.listener)) { l.controllerCommandCompleted(mCommands.size() > 0); } } } catch (Exception e) { Log.e(K9.LOG_TAG, "Error running command '" + commandDescription + "'", e); } mBusy = false; } } private void put(String description, MessagingListener listener, Runnable runnable) { putCommand(mCommands, description, listener, runnable, true); } private void putBackground(String description, MessagingListener listener, Runnable runnable) { putCommand(mCommands, description, listener, runnable, false); } private void putCommand(BlockingQueue<Command> queue, String description, MessagingListener listener, Runnable runnable, boolean isForeground) { int retries = 10; Exception e = null; while (retries { try { Command command = new Command(); command.listener = listener; command.runnable = runnable; command.description = description; command.isForeground = isForeground; queue.put(command); return; } catch (InterruptedException ie) { try { Thread.sleep(200); } catch (InterruptedException ne) { } e = ie; } } throw new Error(e); } public void addListener(MessagingListener listener) { mListeners.add(listener); refreshListener(listener); } public void refreshListener(MessagingListener listener) { if (memorizingListener != null && listener != null) { memorizingListener.refreshOther(listener); } } public void removeListener(MessagingListener listener) { mListeners.remove(listener); } public Set<MessagingListener> getListeners() { return mListeners; } public Set<MessagingListener> getListeners(MessagingListener listener) { if (listener == null) { return mListeners; } Set<MessagingListener> listeners = new HashSet<MessagingListener>(mListeners); listeners.add(listener); return listeners; } /** * Lists folders that are available locally and remotely. This method calls * listFoldersCallback for local folders before it returns, and then for * remote folders at some later point. If there are no local folders * includeRemote is forced by this method. This method should be called from * a Thread as it may take several seconds to list the local folders. * TODO this needs to cache the remote folder list * * @param account * @param includeRemote * @param listener * @throws MessagingException */ public void listFolders(final Account account, final boolean refreshRemote, final MessagingListener listener) { new Thread(new Runnable() { public void run() { for (MessagingListener l : getListeners(listener)) { l.listFoldersStarted(account); } List<? extends Folder> localFolders = null; try { Store localStore = account.getLocalStore(); localFolders = localStore.getPersonalNamespaces(false); Folder[] folderArray = localFolders.toArray(EMPTY_FOLDER_ARRAY); if (refreshRemote || localFolders == null || localFolders.size() == 0) { doRefreshRemote(account, listener); return; } for (MessagingListener l : getListeners(listener)) { l.listFolders(account, folderArray); } } catch (Exception e) { for (MessagingListener l : getListeners(listener)) { l.listFoldersFailed(account, e.getMessage()); } addErrorMessage(account, null, e); return; } finally { if (localFolders != null) { for (Folder localFolder : localFolders) { if (localFolder != null) { localFolder.close(); } } } } for (MessagingListener l : getListeners(listener)) { l.listFoldersFinished(account); } } }).start(); } private void doRefreshRemote(final Account account, MessagingListener listener) { put("doRefreshRemote", listener, new Runnable() { public void run() { List<? extends Folder> localFolders = null; try { Store store = account.getRemoteStore(); List<? extends Folder> remoteFolders = store.getPersonalNamespaces(false); LocalStore localStore = account.getLocalStore(); HashSet<String> remoteFolderNames = new HashSet<String>(); for (int i = 0, count = remoteFolders.size(); i < count; i++) { LocalFolder localFolder = localStore.getFolder(remoteFolders.get(i).getName()); if (!localFolder.exists()) { localFolder.create(FolderType.HOLDS_MESSAGES, account.getDisplayCount()); } remoteFolderNames.add(remoteFolders.get(i).getName()); } localFolders = localStore.getPersonalNamespaces(false); /* * Clear out any folders that are no longer on the remote store. */ for (Folder localFolder : localFolders) { String localFolderName = localFolder.getName(); if (localFolderName.equalsIgnoreCase(K9.INBOX) || localFolderName.equals(account.getTrashFolderName()) || localFolderName.equals(account.getOutboxFolderName()) || localFolderName.equals(account.getDraftsFolderName()) || localFolderName.equals(account.getSentFolderName()) || localFolderName.equals(account.getErrorFolderName())) { continue; } if (!remoteFolderNames.contains(localFolder.getName())) { localFolder.delete(false); } } localFolders = localStore.getPersonalNamespaces(false); Folder[] folderArray = localFolders.toArray(EMPTY_FOLDER_ARRAY); for (MessagingListener l : getListeners()) { l.listFolders(account, folderArray); } for (MessagingListener l : getListeners()) { l.listFoldersFinished(account); } } catch (Exception e) { for (MessagingListener l : getListeners()) { l.listFoldersFailed(account, ""); } addErrorMessage(account, null, e); } finally { if (localFolders != null) { for (Folder localFolder : localFolders) { if (localFolder != null) { localFolder.close(); } } } } } }); } /** * List the messages in the local message store for the given folder asynchronously. * * @param account * @param folder * @param listener * @throws MessagingException */ public void listLocalMessages(final Account account, final String folder, final MessagingListener listener) { new Thread(new Runnable() { public void run() { listLocalMessagesSynchronous(account, folder, listener); } }).start(); } /** * List the messages in the local message store for the given folder synchronously. * * @param account * @param folder * @param listener * @throws MessagingException */ public void listLocalMessagesSynchronous(final Account account, final String folder, final MessagingListener listener) { for (MessagingListener l : getListeners(listener)) { l.listLocalMessagesStarted(account, folder); } Folder localFolder = null; MessageRetrievalListener retrievalListener = new MessageRetrievalListener() { List<Message> pendingMessages = new ArrayList<Message>(); int totalDone = 0; public void messageStarted(String message, int number, int ofTotal) {} public void messageFinished(Message message, int number, int ofTotal) { if (!isMessageSuppressed(account, folder, message)) { pendingMessages.add(message); totalDone++; if (pendingMessages.size() > 10) { addPendingMessages(); } } else { for (MessagingListener l : getListeners(listener)) { l.listLocalMessagesRemoveMessage(account, folder, message); } } } public void messagesFinished(int number) { addPendingMessages(); } private void addPendingMessages() { for (MessagingListener l : getListeners(listener)) { l.listLocalMessagesAddMessages(account, folder, pendingMessages); } pendingMessages.clear(); } }; try { Store localStore = account.getLocalStore(); localFolder = localStore.getFolder(folder); localFolder.open(OpenMode.READ_WRITE); localFolder.getMessages( retrievalListener, false // Skip deleted messages ); if (K9.DEBUG) Log.v(K9.LOG_TAG, "Got ack that callbackRunner finished"); for (MessagingListener l : getListeners(listener)) { l.listLocalMessagesFinished(account, folder); } } catch (Exception e) { for (MessagingListener l : getListeners(listener)) { l.listLocalMessagesFailed(account, folder, e.getMessage()); } addErrorMessage(account, null, e); } finally { if (localFolder != null) { localFolder.close(); } } } public void searchLocalMessages(SearchSpecification searchSpecification, final Message[] messages, final MessagingListener listener) { searchLocalMessages(searchSpecification.getAccountUuids(), searchSpecification.getFolderNames(), messages, searchSpecification.getQuery(), searchSpecification.isIntegrate(), searchSpecification.getRequiredFlags(), searchSpecification.getForbiddenFlags(), listener); } /** * Find all messages in any local account which match the query 'query' * @param folderNames TODO * @param query * @param listener * @param searchAccounts TODO * @param account TODO * @param account * @throws MessagingException */ public void searchLocalMessages(final String[] accountUuids, final String[] folderNames, final Message[] messages, final String query, final boolean integrate, final Flag[] requiredFlags, final Flag[] forbiddenFlags, final MessagingListener listener) { if (K9.DEBUG) { Log.i(K9.LOG_TAG, "searchLocalMessages (" + "accountUuids=" + Utility.combine(accountUuids, ',') + ", folderNames = " + Utility.combine(folderNames, ',') + ", messages.size() = " + (messages != null ? messages.length : null) + ", query = " + query + ", integrate = " + integrate + ", requiredFlags = " + Utility.combine(requiredFlags, ',') + ", forbiddenFlags = " + Utility.combine(forbiddenFlags, ',') + ")"); } new Thread(new Runnable() { public void run() { final AccountStats stats = new AccountStats(); final Set<String> accountUuidsSet = new HashSet<String>(); if (accountUuids != null) { for (String accountUuid : accountUuids) { accountUuidsSet.add(accountUuid); } } final Preferences prefs = Preferences.getPreferences(mApplication.getApplicationContext()); Account[] accounts = prefs.getAccounts(); List<LocalFolder> foldersToSearch = null; boolean displayableOnly = false; boolean noSpecialFolders = true; for (final Account account : accounts) { if (accountUuids != null && !accountUuidsSet.contains(account.getUuid())) { continue; } if (accountUuids != null && accountUuidsSet.contains(account.getUuid())) { displayableOnly = true; noSpecialFolders = true; } else if (!integrate && folderNames == null) { Account.Searchable searchableFolders = account.getSearchableFolders(); switch (searchableFolders) { case NONE: continue; case DISPLAYABLE: displayableOnly = true; break; } } List<Message> messagesToSearch = null; if (messages != null) { messagesToSearch = new LinkedList<Message>(); for (Message message : messages) { if (message.getFolder().getAccount().getUuid().equals(account.getUuid())) { messagesToSearch.add(message); } } if (messagesToSearch.isEmpty()) { continue; } } if (listener != null) { listener.listLocalMessagesStarted(account, null); } if (integrate || displayableOnly || folderNames != null || noSpecialFolders) { List<LocalFolder> tmpFoldersToSearch = new LinkedList<LocalFolder>(); try { LocalStore store = account.getLocalStore(); List<? extends Folder> folders = store.getPersonalNamespaces(false); Set<String> folderNameSet = null; if (folderNames != null) { folderNameSet = new HashSet<String>(); for (String folderName : folderNames) { folderNameSet.add(folderName); } } for (Folder folder : folders) { LocalFolder localFolder = (LocalFolder)folder; boolean include = true; folder.refresh(prefs); String localFolderName = localFolder.getName(); if (integrate) { include = localFolder.isIntegrate(); } else { if (folderNameSet != null) { if (!folderNameSet.contains(localFolderName)) { include = false; } } // Never exclude the INBOX (see issue 1817) else if (noSpecialFolders && !localFolderName.equals(K9.INBOX) && ( localFolderName.equals(account.getTrashFolderName()) || localFolderName.equals(account.getOutboxFolderName()) || localFolderName.equals(account.getDraftsFolderName()) || localFolderName.equals(account.getSentFolderName()) || localFolderName.equals(account.getErrorFolderName()))) { include = false; } else if (displayableOnly && modeMismatch(account.getFolderDisplayMode(), folder.getDisplayClass())) { include = false; } } if (include) { tmpFoldersToSearch.add(localFolder); } } if (tmpFoldersToSearch.size() < 1) { continue; } foldersToSearch = tmpFoldersToSearch; } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Unable to restrict search folders in Account " + account.getDescription() + ", searching all", me); addErrorMessage(account, null, me); } } MessageRetrievalListener retrievalListener = new MessageRetrievalListener() { public void messageStarted(String message, int number, int ofTotal) {} public void messageFinished(Message message, int number, int ofTotal) { if (!isMessageSuppressed(message.getFolder().getAccount(), message.getFolder().getName(), message)) { List<Message> messages = new ArrayList<Message>(); messages.add(message); stats.unreadMessageCount += (!message.isSet(Flag.SEEN)) ? 1 : 0; stats.flaggedMessageCount += (message.isSet(Flag.FLAGGED)) ? 1 : 0; if (listener != null) { listener.listLocalMessagesAddMessages(account, null, messages); } } } public void messagesFinished(int number) { } }; try { String[] queryFields = {"html_content","subject","sender_list"}; LocalStore localStore = account.getLocalStore(); localStore.searchForMessages(retrievalListener, queryFields , query, foldersToSearch, messagesToSearch == null ? null : messagesToSearch.toArray(EMPTY_MESSAGE_ARRAY), requiredFlags, forbiddenFlags); } catch (Exception e) { if (listener != null) { listener.listLocalMessagesFailed(account, null, e.getMessage()); } addErrorMessage(account, null, e); } finally { if (listener != null) { listener.listLocalMessagesFinished(account, null); } } } if (listener != null) { listener.searchStats(stats); } } }).start(); } public void loadMoreMessages(Account account, String folder, MessagingListener listener) { try { LocalStore localStore = account.getLocalStore(); LocalFolder localFolder = localStore.getFolder(folder); localFolder.setVisibleLimit(localFolder.getVisibleLimit() + account.getDisplayCount()); synchronizeMailbox(account, folder, listener, null); } catch (MessagingException me) { addErrorMessage(account, null, me); throw new RuntimeException("Unable to set visible limit on folder", me); } } public void resetVisibleLimits(Account[] accounts) { for (Account account : accounts) { try { LocalStore localStore = account.getLocalStore(); localStore.resetVisibleLimits(account.getDisplayCount()); } catch (MessagingException e) { addErrorMessage(account, null, e); Log.e(K9.LOG_TAG, "Unable to reset visible limits", e); } } } /** * Start background synchronization of the specified folder. * @param account * @param folder * @param listener * @param providedRemoteFolder TODO */ public void synchronizeMailbox(final Account account, final String folder, final MessagingListener listener, final Folder providedRemoteFolder) { putBackground("synchronizeMailbox", listener, new Runnable() { public void run() { synchronizeMailboxSynchronous(account, folder, listener, providedRemoteFolder); } }); } /** * Start foreground synchronization of the specified folder. This is generally only called * by synchronizeMailbox. * @param account * @param folder * * TODO Break this method up into smaller chunks. * @param providedRemoteFolder TODO */ private void synchronizeMailboxSynchronous(final Account account, final String folder, final MessagingListener listener, Folder providedRemoteFolder) { Folder remoteFolder = null; LocalFolder tLocalFolder = null; if (K9.DEBUG) Log.i(K9.LOG_TAG, "Synchronizing folder " + account.getDescription() + ":" + folder); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxStarted(account, folder); } /* * We don't ever sync the Outbox or errors folder */ if (folder.equals(account.getOutboxFolderName()) || folder.equals(account.getErrorFolderName())) { for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxFinished(account, folder, 0, 0); } return; } Exception commandException = null; try { if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: About to process pending commands for account " + account.getDescription()); try { processPendingCommandsSynchronous(account); } catch (Exception e) { addErrorMessage(account, null, e); Log.e(K9.LOG_TAG, "Failure processing command, but allow message sync attempt", e); commandException = e; } /* * Get the message list from the local store and create an index of * the uids within the list. */ if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: About to get local folder " + folder); final LocalStore localStore = account.getLocalStore(); tLocalFolder = localStore.getFolder(folder); final LocalFolder localFolder = tLocalFolder; localFolder.open(OpenMode.READ_WRITE); Message[] localMessages = localFolder.getMessages(null); HashMap<String, Message> localUidMap = new HashMap<String, Message>(); for (Message message : localMessages) { localUidMap.put(message.getUid(), message); } if (providedRemoteFolder != null) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: using providedRemoteFolder " + folder); remoteFolder = providedRemoteFolder; } else { Store remoteStore = account.getRemoteStore(); if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: About to get remote folder " + folder); remoteFolder = remoteStore.getFolder(folder); if (! verifyOrCreateRemoteSpecialFolder(account, folder, remoteFolder, listener)) { return; } /* * Synchronization process: Open the folder Upload any local messages that are marked as PENDING_UPLOAD (Drafts, Sent, Trash) Get the message count Get the list of the newest K9.DEFAULT_VISIBLE_LIMIT messages getMessages(messageCount - K9.DEFAULT_VISIBLE_LIMIT, messageCount) See if we have each message locally, if not fetch it's flags and envelope Get and update the unread count for the folder Update the remote flags of any messages we have locally with an internal date newer than the remote message. Get the current flags for any messages we have locally but did not just download Update local flags For any message we have locally but not remotely, delete the local message to keep cache clean. Download larger parts of any new messages. (Optional) Download small attachments in the background. */ /* * Open the remote folder. This pre-loads certain metadata like message count. */ if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: About to open remote folder " + folder); remoteFolder.open(OpenMode.READ_WRITE); if (Account.EXPUNGE_ON_POLL.equals(account.getExpungePolicy())) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Expunging folder " + account.getDescription() + ":" + folder); remoteFolder.expunge(); } } /* * Get the remote message count. */ int remoteMessageCount = remoteFolder.getMessageCount(); int visibleLimit = localFolder.getVisibleLimit(); if (visibleLimit < 1) { visibleLimit = K9.DEFAULT_VISIBLE_LIMIT; } Message[] remoteMessageArray = EMPTY_MESSAGE_ARRAY; final ArrayList<Message> remoteMessages = new ArrayList<Message>(); // final ArrayList<Message> unsyncedMessages = new ArrayList<Message>(); HashMap<String, Message> remoteUidMap = new HashMap<String, Message>(); if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: Remote message count for folder " + folder + " is " + remoteMessageCount); final Date earliestDate = account.getEarliestPollDate(); if (remoteMessageCount > 0) { /* * Message numbers start at 1. */ int remoteStart = Math.max(0, remoteMessageCount - visibleLimit) + 1; int remoteEnd = remoteMessageCount; if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: About to get messages " + remoteStart + " through " + remoteEnd + " for folder " + folder); final AtomicInteger headerProgress = new AtomicInteger(0); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxHeadersStarted(account, folder); } remoteMessageArray = remoteFolder.getMessages(remoteStart, remoteEnd, earliestDate, null); int messageCount = remoteMessageArray.length; for (Message thisMess : remoteMessageArray) { headerProgress.incrementAndGet(); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxHeadersProgress(account, folder, headerProgress.get(), messageCount); } Message localMessage = localUidMap.get(thisMess.getUid()); if (localMessage == null || !localMessage.olderThan(earliestDate)) { remoteMessages.add(thisMess); remoteUidMap.put(thisMess.getUid(), thisMess); } } if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: Got " + remoteUidMap.size() + " messages for folder " + folder); remoteMessageArray = null; for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxHeadersFinished(account, folder, headerProgress.get(), remoteUidMap.size()); } } else if (remoteMessageCount < 0) { throw new Exception("Message count " + remoteMessageCount + " for folder " + folder); } /* * Remove any messages that are in the local store but no longer on the remote store or are too old */ if (account.syncRemoteDeletions()) { for (Message localMessage : localMessages) { if (remoteUidMap.get(localMessage.getUid()) == null && !localMessage.isSet(Flag.DELETED)) { localMessage.setFlag(Flag.X_DESTROYED, true); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxRemovedMessage(account, folder, localMessage); } } } } localMessages = null; /* * Now we download the actual content of messages. */ int newMessages = downloadMessages(account, remoteFolder, localFolder, remoteMessages, false); int unreadMessageCount = setLocalUnreadCountToRemote(localFolder, remoteFolder, newMessages); setLocalFlaggedCountToRemote(localFolder, remoteFolder); for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, folder, unreadMessageCount); } /* * Notify listeners that we're finally done. */ localFolder.setLastChecked(System.currentTimeMillis()); localFolder.setStatus(null); if (K9.DEBUG) Log.d(K9.LOG_TAG, "Done synchronizing folder " + account.getDescription() + ":" + folder + " @ " + new Date() + " with " + newMessages + " new messages"); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxFinished(account, folder, remoteMessageCount, newMessages); } if (commandException != null) { String rootMessage = getRootCauseMessage(commandException); Log.e(K9.LOG_TAG, "Root cause failure in " + account.getDescription() + ":" + tLocalFolder.getName() + " was '" + rootMessage + "'"); localFolder.setStatus(rootMessage); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxFailed(account, folder, rootMessage); } } if (K9.DEBUG) Log.i(K9.LOG_TAG, "Done synchronizing folder " + account.getDescription() + ":" + folder); } catch (Exception e) { Log.e(K9.LOG_TAG, "synchronizeMailbox", e); // If we don't set the last checked, it can try too often during // failure conditions String rootMessage = getRootCauseMessage(e); if (tLocalFolder != null) { try { tLocalFolder.setStatus(rootMessage); tLocalFolder.setLastChecked(System.currentTimeMillis()); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Could not set last checked on folder " + account.getDescription() + ":" + tLocalFolder.getName(), e); } } for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxFailed( account, folder, rootMessage); } addErrorMessage(account, null, e); Log.e(K9.LOG_TAG, "Failed synchronizing folder " + account.getDescription() + ":" + folder + " @ " + new Date()); } finally { if (providedRemoteFolder == null && remoteFolder != null) { remoteFolder.close(); } if (tLocalFolder != null) { tLocalFolder.close(); } } } /* * If the folder is a "special" folder we need to see if it exists * on the remote server. It if does not exist we'll try to create it. If we * can't create we'll abort. This will happen on every single Pop3 folder as * designed and on Imap folders during error conditions. This allows us * to treat Pop3 and Imap the same in this code. */ private boolean verifyOrCreateRemoteSpecialFolder(final Account account, final String folder, final Folder remoteFolder, final MessagingListener listener) throws MessagingException { if (folder.equals(account.getTrashFolderName()) || folder.equals(account.getSentFolderName()) || folder.equals(account.getDraftsFolderName())) { if (!remoteFolder.exists()) { if (!remoteFolder.create(FolderType.HOLDS_MESSAGES)) { for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxFinished(account, folder, 0, 0); } if (K9.DEBUG) Log.i(K9.LOG_TAG, "Done synchronizing folder " + folder); return false; } } } return true; } private int setLocalUnreadCountToRemote(LocalFolder localFolder, Folder remoteFolder, int newMessageCount) throws MessagingException { int remoteUnreadMessageCount = remoteFolder.getUnreadMessageCount(); if (remoteUnreadMessageCount != -1) { localFolder.setUnreadMessageCount(remoteUnreadMessageCount); } else { int unreadCount = 0; Message[] messages = localFolder.getMessages(null, false); for (Message message : messages) { if (!message.isSet(Flag.SEEN) && !message.isSet(Flag.DELETED)) { unreadCount++; } } localFolder.setUnreadMessageCount(unreadCount); } return localFolder.getUnreadMessageCount(); } private void setLocalFlaggedCountToRemote(LocalFolder localFolder, Folder remoteFolder) throws MessagingException { int remoteFlaggedMessageCount = remoteFolder.getFlaggedMessageCount(); if (remoteFlaggedMessageCount != -1) { localFolder.setFlaggedMessageCount(remoteFlaggedMessageCount); } else { int flaggedCount = 0; Message[] messages = localFolder.getMessages(null, false); for (Message message : messages) { if (message.isSet(Flag.FLAGGED) && !message.isSet(Flag.DELETED)) { flaggedCount++; } } localFolder.setFlaggedMessageCount(flaggedCount); } } private int downloadMessages(final Account account, final Folder remoteFolder, final LocalFolder localFolder, List<Message> inputMessages, boolean flagSyncOnly) throws MessagingException { final Date earliestDate = account.getEarliestPollDate(); if (earliestDate != null) { if (K9.DEBUG) { Log.d(K9.LOG_TAG, "Only syncing messages after " + earliestDate); } } final String folder = remoteFolder.getName(); int unreadBeforeStart = 0; try { AccountStats stats = account.getStats(mApplication); unreadBeforeStart = stats.unreadMessageCount; } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Unable to getUnreadMessageCount for account: " + account, e); } ArrayList<Message> syncFlagMessages = new ArrayList<Message>(); List<Message> unsyncedMessages = new ArrayList<Message>(); final AtomicInteger newMessages = new AtomicInteger(0); List<Message> messages = new ArrayList<Message>(inputMessages); for (Message message : messages) { if (message.isSet(Flag.DELETED)) { syncFlagMessages.add(message); } else if (!isMessageSuppressed(account, folder, message)) { Message localMessage = localFolder.getMessage(message.getUid()); if (localMessage == null) { if (!flagSyncOnly) { if (!message.isSet(Flag.X_DOWNLOADED_FULL) && !message.isSet(Flag.X_DOWNLOADED_PARTIAL)) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " has not yet been downloaded"); unsyncedMessages.add(message); } else { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " is partially or fully downloaded"); // Store the updated message locally localFolder.appendMessages(new Message[] { message }); localMessage = localFolder.getMessage(message.getUid()); localMessage.setFlag(Flag.X_DOWNLOADED_FULL, message.isSet(Flag.X_DOWNLOADED_FULL)); localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, message.isSet(Flag.X_DOWNLOADED_PARTIAL)); for (MessagingListener l : getListeners()) { l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage); if (!localMessage.isSet(Flag.SEEN)) { l.synchronizeMailboxNewMessage(account, folder, localMessage); } } } } } else if (!localMessage.isSet(Flag.DELETED)) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " is present in the local store"); if (!localMessage.isSet(Flag.X_DOWNLOADED_FULL) && !localMessage.isSet(Flag.X_DOWNLOADED_PARTIAL)) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " is not downloaded, even partially; trying again"); unsyncedMessages.add(message); } else { String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message); if (newPushState != null) { localFolder.setPushState(newPushState); } syncFlagMessages.add(message); } } } } final AtomicInteger progress = new AtomicInteger(0); final int todo = unsyncedMessages.size() + syncFlagMessages.size(); for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, folder, progress.get(), todo); } if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Have " + unsyncedMessages.size() + " unsynced messages"); messages.clear(); final ArrayList<Message> largeMessages = new ArrayList<Message>(); final ArrayList<Message> smallMessages = new ArrayList<Message>(); if (unsyncedMessages.size() > 0) { /* * Reverse the order of the messages. Depending on the server this may get us * fetch results for newest to oldest. If not, no harm done. */ Collections.reverse(unsyncedMessages); int visibleLimit = localFolder.getVisibleLimit(); int listSize = unsyncedMessages.size(); if (listSize > visibleLimit) { unsyncedMessages = unsyncedMessages.subList(listSize - visibleLimit, listSize); } FetchProfile fp = new FetchProfile(); if (remoteFolder.supportsFetchingFlags()) { fp.add(FetchProfile.Item.FLAGS); } fp.add(FetchProfile.Item.ENVELOPE); if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: About to fetch " + unsyncedMessages.size() + " unsynced messages for folder " + folder); fetchUnsyncedMessages(account, remoteFolder, localFolder, unsyncedMessages, smallMessages,largeMessages, progress, todo, fp); // If a message didn't exist, messageFinished won't be called, but we shouldn't try again // If we got here, nothing failed for (Message message : unsyncedMessages) { String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message); if (newPushState != null) { localFolder.setPushState(newPushState); } } if (K9.DEBUG) { Log.d(K9.LOG_TAG, "SYNC: Synced unsynced messages for folder " + folder); } } if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Have " + largeMessages.size() + " large messages and " + smallMessages.size() + " small messages out of " + unsyncedMessages.size() + " unsynced messages"); unsyncedMessages.clear(); /* * Grab the content of the small messages first. This is going to * be very fast and at very worst will be a single up of a few bytes and a single * download of 625k. */ FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.BODY); // fp.add(FetchProfile.Item.FLAGS); // fp.add(FetchProfile.Item.ENVELOPE); downloadSmallMessages(account, remoteFolder, localFolder, smallMessages, progress, unreadBeforeStart, newMessages, todo, fp); smallMessages.clear(); /* * Now do the large messages that require more round trips. */ fp.clear(); fp.add(FetchProfile.Item.STRUCTURE); downloadLargeMessages(account, remoteFolder, localFolder, largeMessages, progress, unreadBeforeStart, newMessages, todo, fp); largeMessages.clear(); /* * Refresh the flags for any messages in the local store that we didn't just * download. */ refreshLocalMessageFlags(account,remoteFolder,localFolder,syncFlagMessages,progress,todo); if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Synced remote messages for folder " + folder + ", " + newMessages.get() + " new messages"); localFolder.purgeToVisibleLimit(new MessageRemovalListener() { public void messageRemoved(Message message) { for (MessagingListener l : getListeners()) { l.synchronizeMailboxRemovedMessage(account, folder, message); } } }); return newMessages.get(); } private void fetchUnsyncedMessages(final Account account, final Folder remoteFolder, final LocalFolder localFolder, List<Message> unsyncedMessages, final ArrayList<Message> smallMessages, final ArrayList<Message> largeMessages, final AtomicInteger progress, final int todo, FetchProfile fp) throws MessagingException { final String folder = remoteFolder.getName(); final Date earliestDate = account.getEarliestPollDate(); remoteFolder.fetch(unsyncedMessages.toArray(EMPTY_MESSAGE_ARRAY), fp, new MessageRetrievalListener() { public void messageFinished(Message message, int number, int ofTotal) { try { String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message); if (newPushState != null) { localFolder.setPushState(newPushState); } if (message.isSet(Flag.DELETED) || message.olderThan(earliestDate)) { if (K9.DEBUG) { if (message.isSet(Flag.DELETED)) { Log.v(K9.LOG_TAG, "Newly downloaded message " + account + ":" + folder + ":" + message.getUid() + " was marked deleted on server, skipping"); } else { Log.d(K9.LOG_TAG, "Newly downloaded message " + message.getUid() + " is older than " + earliestDate + ", skipping"); } } progress.incrementAndGet(); for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, folder, progress.get(), todo); } return; } if (message.getSize() > account.getMaximumAutoDownloadMessageSize()) { largeMessages.add(message); } else { smallMessages.add(message); } // And include it in the view if (message.getSubject() != null && message.getFrom() != null) { /* * We check to make sure that we got something worth * showing (subject and from) because some protocols * (POP) may not be able to give us headers for * ENVELOPE, only size. */ if (!isMessageSuppressed(account, folder, message)) { // Store the new message locally localFolder.appendMessages(new Message[] { message }); Message localMessage = localFolder.getMessage(message.getUid()); syncFlags(localMessage, message); if (K9.DEBUG) Log.v(K9.LOG_TAG, "About to notify listeners that we got a new unsynced message " + account + ":" + folder + ":" + message.getUid()); for (MessagingListener l : getListeners()) { l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage); } } } } catch (Exception e) { Log.e(K9.LOG_TAG, "Error while storing downloaded message.", e); addErrorMessage(account, null, e); } } public void messageStarted(String uid, int number, int ofTotal) { } public void messagesFinished(int total) {} }); } private boolean shouldImportMessage(final Account account, final String folder, final Message message, final AtomicInteger progress, final Date earliestDate) { if (isMessageSuppressed(account, folder, message)) { if (K9.DEBUG) { Log.d(K9.LOG_TAG, "Message " + message.getUid() + " was suppressed "+ "but just downloaded. "+ "The race condition means we wasted some bandwidth. Oh well."); } return false; } if (message.olderThan(earliestDate)) { if (K9.DEBUG) { Log.d(K9.LOG_TAG, "Message " + message.getUid() + " is older than " + earliestDate + ", hence not saving"); } return false; } return true; } private void downloadSmallMessages(final Account account, final Folder remoteFolder, final LocalFolder localFolder, ArrayList<Message> smallMessages, final AtomicInteger progress, final int unreadBeforeStart, final AtomicInteger newMessages, final int todo, FetchProfile fp) throws MessagingException { final String folder = remoteFolder.getName(); final Date earliestDate = account.getEarliestPollDate(); if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Fetching small messages for folder " + folder); remoteFolder.fetch(smallMessages.toArray(new Message[smallMessages.size()]), fp, new MessageRetrievalListener() { public void messageFinished(Message message, int number, int ofTotal) { try { if (!shouldImportMessage(account, folder, message, progress, earliestDate)) { progress.incrementAndGet(); return; } // Store the updated message locally localFolder.appendMessages(new Message[] { message }); Message localMessage = localFolder.getMessage(message.getUid()); progress.incrementAndGet(); // Set a flag indicating this message has now be fully downloaded localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true); if (K9.DEBUG) Log.v(K9.LOG_TAG, "About to notify listeners that we got a new small message " + account + ":" + folder + ":" + message.getUid()); // Update the listener with what we've found for (MessagingListener l : getListeners()) { l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage); l.synchronizeMailboxProgress(account, folder, progress.get(), todo); if (!localMessage.isSet(Flag.SEEN)) { l.synchronizeMailboxNewMessage(account, folder, localMessage); } } // Send a notification of this message if (shouldNotifyForMessage(account, message)) { newMessages.incrementAndGet(); notifyAccount(mApplication, account, message, unreadBeforeStart, newMessages); } } catch (MessagingException me) { addErrorMessage(account, null, me); Log.e(K9.LOG_TAG, "SYNC: fetch small messages", me); } } public void messageStarted(String uid, int number, int ofTotal) { } public void messagesFinished(int total) {} }); if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Done fetching small messages for folder " + folder); } private void downloadLargeMessages(final Account account, final Folder remoteFolder, final LocalFolder localFolder, ArrayList<Message> largeMessages, final AtomicInteger progress, final int unreadBeforeStart, final AtomicInteger newMessages, final int todo, FetchProfile fp) throws MessagingException { final String folder = remoteFolder.getName(); final Date earliestDate = account.getEarliestPollDate(); if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Fetching large messages for folder " + folder); remoteFolder.fetch(largeMessages.toArray(new Message[largeMessages.size()]), fp, null); for (Message message : largeMessages) { if (!shouldImportMessage(account, folder, message, progress, earliestDate)) { progress.incrementAndGet(); continue; } if (message.getBody() == null) { /* * The provider was unable to get the structure of the message, so * we'll download a reasonable portion of the messge and mark it as * incomplete so the entire thing can be downloaded later if the user * wishes to download it. */ fp.clear(); fp.add(FetchProfile.Item.BODY_SANE); /* * TODO a good optimization here would be to make sure that all Stores set * the proper size after this fetch and compare the before and after size. If * they equal we can mark this SYNCHRONIZED instead of PARTIALLY_SYNCHRONIZED */ remoteFolder.fetch(new Message[] { message }, fp, null); // Store the updated message locally localFolder.appendMessages(new Message[] { message }); Message localMessage = localFolder.getMessage(message.getUid()); // Certain (POP3) servers give you the whole message even when you ask for only the first x Kb if (!message.isSet(Flag.X_DOWNLOADED_FULL)) { /* * Mark the message as fully downloaded if the message size is smaller than * the account's autodownload size limit, otherwise mark as only a partial * download. This will prevent the system from downloading the same message * twice. */ if (message.getSize() < account.getMaximumAutoDownloadMessageSize()) { localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true); } else { // Set a flag indicating that the message has been partially downloaded and // is ready for view. localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, true); } } } else { /* * We have a structure to deal with, from which * we can pull down the parts we want to actually store. * Build a list of parts we are interested in. Text parts will be downloaded * right now, attachments will be left for later. */ ArrayList<Part> viewables = new ArrayList<Part>(); ArrayList<Part> attachments = new ArrayList<Part>(); MimeUtility.collectParts(message, viewables, attachments); /* * Now download the parts we're interested in storing. */ for (Part part : viewables) { remoteFolder.fetchPart(message, part, null); } // Store the updated message locally localFolder.appendMessages(new Message[] { message }); Message localMessage = localFolder.getMessage(message.getUid()); // Set a flag indicating this message has been fully downloaded and can be // viewed. localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, true); } if (K9.DEBUG) Log.v(K9.LOG_TAG, "About to notify listeners that we got a new large message " + account + ":" + folder + ":" + message.getUid()); // Update the listener with what we've found progress.incrementAndGet(); Message localMessage = localFolder.getMessage(message.getUid()); for (MessagingListener l : getListeners()) { l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage); l.synchronizeMailboxProgress(account, folder, progress.get(), todo); if (!localMessage.isSet(Flag.SEEN)) { l.synchronizeMailboxNewMessage(account, folder, localMessage); } } // Send a notification of this message if (shouldNotifyForMessage(account, message)) { newMessages.incrementAndGet(); notifyAccount(mApplication, account, message, unreadBeforeStart, newMessages); } }//for large messsages if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Done fetching large messages for folder " + folder); } private void refreshLocalMessageFlags(final Account account, final Folder remoteFolder, final LocalFolder localFolder, ArrayList<Message> syncFlagMessages, final AtomicInteger progress, final int todo ) throws MessagingException { final String folder = remoteFolder.getName(); if (remoteFolder.supportsFetchingFlags()) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: About to sync flags for " + syncFlagMessages.size() + " remote messages for folder " + folder); FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.FLAGS); List<Message> undeletedMessages = new LinkedList<Message>(); for (Message message : syncFlagMessages) { if (!message.isSet(Flag.DELETED)) { undeletedMessages.add(message); } } remoteFolder.fetch(undeletedMessages.toArray(EMPTY_MESSAGE_ARRAY), fp, null); for (Message remoteMessage : syncFlagMessages) { Message localMessage = localFolder.getMessage(remoteMessage.getUid()); boolean messageChanged = syncFlags(localMessage, remoteMessage); if (messageChanged) { if (localMessage.isSet(Flag.DELETED) || isMessageSuppressed(account, folder, localMessage)) { for (MessagingListener l : getListeners()) { l.synchronizeMailboxRemovedMessage(account, folder, localMessage); } } else { for (MessagingListener l : getListeners()) { l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage); } } } progress.incrementAndGet(); for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, folder, progress.get(), todo); } } } } private boolean syncFlags(Message localMessage, Message remoteMessage) throws MessagingException { boolean messageChanged = false; if (localMessage == null || localMessage.isSet(Flag.DELETED)) { return false; } if (remoteMessage.isSet(Flag.DELETED)) { if (localMessage.getFolder().getAccount().syncRemoteDeletions()) { localMessage.setFlag(Flag.DELETED, true); messageChanged = true; } } else { for (Flag flag : new Flag[] { Flag.SEEN, Flag.FLAGGED, Flag.ANSWERED }) { if (remoteMessage.isSet(flag) != localMessage.isSet(flag)) { localMessage.setFlag(flag, remoteMessage.isSet(flag)); messageChanged = true; } } } return messageChanged; } private String getRootCauseMessage(Throwable t) { Throwable rootCause = t; Throwable nextCause = rootCause; do { nextCause = rootCause.getCause(); if (nextCause != null) { rootCause = nextCause; } } while (nextCause != null); return rootCause.getMessage(); } private void queuePendingCommand(Account account, PendingCommand command) { try { LocalStore localStore = account.getLocalStore(); localStore.addPendingCommand(command); } catch (Exception e) { addErrorMessage(account, null, e); throw new RuntimeException("Unable to enqueue pending command", e); } } private void processPendingCommands(final Account account) { putBackground("processPendingCommands", null, new Runnable() { public void run() { try { processPendingCommandsSynchronous(account); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "processPendingCommands", me); addErrorMessage(account, null, me); /* * Ignore any exceptions from the commands. Commands will be processed * on the next round. */ } } }); } private void processPendingCommandsSynchronous(Account account) throws MessagingException { LocalStore localStore = account.getLocalStore(); ArrayList<PendingCommand> commands = localStore.getPendingCommands(); int progress = 0; int todo = commands.size(); if (todo == 0) { return; } for (MessagingListener l : getListeners()) { l.pendingCommandsProcessing(account); l.synchronizeMailboxProgress(account, null, progress, todo); } PendingCommand processingCommand = null; try { for (PendingCommand command : commands) { processingCommand = command; if (K9.DEBUG) Log.d(K9.LOG_TAG, "Processing pending command '" + command + "'"); String[] components = command.command.split("\\."); String commandTitle = components[components.length - 1]; for (MessagingListener l : getListeners()) { l.pendingCommandStarted(account, commandTitle); } /* * We specifically do not catch any exceptions here. If a command fails it is * most likely due to a server or IO error and it must be retried before any * other command processes. This maintains the order of the commands. */ try { if (PENDING_COMMAND_APPEND.equals(command.command)) { processPendingAppend(command, account); } else if (PENDING_COMMAND_SET_FLAG_BULK.equals(command.command)) { processPendingSetFlag(command, account); } else if (PENDING_COMMAND_SET_FLAG.equals(command.command)) { processPendingSetFlagOld(command, account); } else if (PENDING_COMMAND_MARK_ALL_AS_READ.equals(command.command)) { processPendingMarkAllAsRead(command, account); } else if (PENDING_COMMAND_MOVE_OR_COPY_BULK.equals(command.command)) { processPendingMoveOrCopy(command, account); } else if (PENDING_COMMAND_MOVE_OR_COPY.equals(command.command)) { processPendingMoveOrCopyOld(command, account); } else if (PENDING_COMMAND_EMPTY_TRASH.equals(command.command)) { processPendingEmptyTrash(command, account); } else if (PENDING_COMMAND_EXPUNGE.equals(command.command)) { processPendingExpunge(command, account); } localStore.removePendingCommand(command); if (K9.DEBUG) Log.d(K9.LOG_TAG, "Done processing pending command '" + command + "'"); } catch (MessagingException me) { if (me.isPermanentFailure()) { addErrorMessage(account, null, me); Log.e(K9.LOG_TAG, "Failure of command '" + command + "' was permanent, removing command from queue"); localStore.removePendingCommand(processingCommand); } else { throw me; } } finally { progress++; for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, null, progress, todo); l.pendingCommandCompleted(account, commandTitle); } } } } catch (MessagingException me) { addErrorMessage(account, null, me); Log.e(K9.LOG_TAG, "Could not process command '" + processingCommand + "'", me); throw me; } finally { for (MessagingListener l : getListeners()) { l.pendingCommandsFinished(account); } } } /** * Process a pending append message command. This command uploads a local message to the * server, first checking to be sure that the server message is not newer than * the local message. Once the local message is successfully processed it is deleted so * that the server message will be synchronized down without an additional copy being * created. * TODO update the local message UID instead of deleteing it * * @param command arguments = (String folder, String uid) * @param account * @throws MessagingException */ private void processPendingAppend(PendingCommand command, Account account) throws MessagingException { Folder remoteFolder = null; LocalFolder localFolder = null; try { String folder = command.arguments[0]; String uid = command.arguments[1]; if (account.getErrorFolderName().equals(folder)) { return; } LocalStore localStore = account.getLocalStore(); localFolder = localStore.getFolder(folder); LocalMessage localMessage = (LocalMessage) localFolder.getMessage(uid); if (localMessage == null) { return; } Store remoteStore = account.getRemoteStore(); remoteFolder = remoteStore.getFolder(folder); if (!remoteFolder.exists()) { if (!remoteFolder.create(FolderType.HOLDS_MESSAGES)) { return; } } remoteFolder.open(OpenMode.READ_WRITE); if (remoteFolder.getMode() != OpenMode.READ_WRITE) { return; } Message remoteMessage = null; if (!localMessage.getUid().startsWith(K9.LOCAL_UID_PREFIX)) { remoteMessage = remoteFolder.getMessage(localMessage.getUid()); } if (remoteMessage == null) { if (localMessage.isSet(Flag.X_REMOTE_COPY_STARTED)) { Log.w(K9.LOG_TAG, "Local message with uid " + localMessage.getUid() + " has flag " + Flag.X_REMOTE_COPY_STARTED + " already set, checking for remote message with " + " same message id"); String rUid = remoteFolder.getUidFromMessageId(localMessage); if (rUid != null) { Log.w(K9.LOG_TAG, "Local message has flag " + Flag.X_REMOTE_COPY_STARTED + " already set, and there is a remote message with " + " uid " + rUid + ", assuming message was already copied and aborting this copy"); String oldUid = localMessage.getUid(); localMessage.setUid(rUid); localFolder.changeUid(localMessage); for (MessagingListener l : getListeners()) { l.messageUidChanged(account, folder, oldUid, localMessage.getUid()); } return; } else { Log.w(K9.LOG_TAG, "No remote message with message-id found, proceeding with append"); } } /* * If the message does not exist remotely we just upload it and then * update our local copy with the new uid. */ FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.BODY); localFolder.fetch(new Message[] { localMessage } , fp, null); String oldUid = localMessage.getUid(); localMessage.setFlag(Flag.X_REMOTE_COPY_STARTED, true); remoteFolder.appendMessages(new Message[] { localMessage }); localFolder.changeUid(localMessage); for (MessagingListener l : getListeners()) { l.messageUidChanged(account, folder, oldUid, localMessage.getUid()); } } else { /* * If the remote message exists we need to determine which copy to keep. */ /* * See if the remote message is newer than ours. */ FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.ENVELOPE); remoteFolder.fetch(new Message[] { remoteMessage }, fp, null); Date localDate = localMessage.getInternalDate(); Date remoteDate = remoteMessage.getInternalDate(); if (remoteDate != null && remoteDate.compareTo(localDate) > 0) { /* * If the remote message is newer than ours we'll just * delete ours and move on. A sync will get the server message * if we need to be able to see it. */ localMessage.setFlag(Flag.X_DESTROYED, true); } else { /* * Otherwise we'll upload our message and then delete the remote message. */ fp.clear(); fp = new FetchProfile(); fp.add(FetchProfile.Item.BODY); localFolder.fetch(new Message[] { localMessage }, fp, null); String oldUid = localMessage.getUid(); localMessage.setFlag(Flag.X_REMOTE_COPY_STARTED, true); remoteFolder.appendMessages(new Message[] { localMessage }); localFolder.changeUid(localMessage); for (MessagingListener l : getListeners()) { l.messageUidChanged(account, folder, oldUid, localMessage.getUid()); } if (remoteDate != null) { remoteMessage.setFlag(Flag.DELETED, true); if (Account.EXPUNGE_IMMEDIATELY.equals(account.getExpungePolicy())) { remoteFolder.expunge(); } } } } } finally { if (remoteFolder != null) { remoteFolder.close(); } if (localFolder != null) { localFolder.close(); } } } private void queueMoveOrCopy(Account account, String srcFolder, String destFolder, boolean isCopy, String uids[]) { if (account.getErrorFolderName().equals(srcFolder)) { return; } PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_MOVE_OR_COPY_BULK; int length = 3 + uids.length; command.arguments = new String[length]; command.arguments[0] = srcFolder; command.arguments[1] = destFolder; command.arguments[2] = Boolean.toString(isCopy); for (int i = 0; i < uids.length; i++) { command.arguments[3 + i] = uids[i]; } queuePendingCommand(account, command); } /** * Process a pending trash message command. * * @param command arguments = (String folder, String uid) * @param account * @throws MessagingException */ private void processPendingMoveOrCopy(PendingCommand command, Account account) throws MessagingException { Folder remoteSrcFolder = null; Folder remoteDestFolder = null; try { String srcFolder = command.arguments[0]; if (account.getErrorFolderName().equals(srcFolder)) { return; } String destFolder = command.arguments[1]; String isCopyS = command.arguments[2]; Store remoteStore = account.getRemoteStore(); remoteSrcFolder = remoteStore.getFolder(srcFolder); List<Message> messages = new ArrayList<Message>(); for (int i = 3; i < command.arguments.length; i++) { String uid = command.arguments[i]; if (!uid.startsWith(K9.LOCAL_UID_PREFIX)) { messages.add(remoteSrcFolder.getMessage(uid)); } } boolean isCopy = false; if (isCopyS != null) { isCopy = Boolean.parseBoolean(isCopyS); } if (!remoteSrcFolder.exists()) { throw new MessagingException("processingPendingMoveOrCopy: remoteFolder " + srcFolder + " does not exist", true); } remoteSrcFolder.open(OpenMode.READ_WRITE); if (remoteSrcFolder.getMode() != OpenMode.READ_WRITE) { throw new MessagingException("processingPendingMoveOrCopy: could not open remoteSrcFolder " + srcFolder + " read/write", true); } if (K9.DEBUG) Log.d(K9.LOG_TAG, "processingPendingMoveOrCopy: source folder = " + srcFolder + ", " + messages.size() + " messages, destination folder = " + destFolder + ", isCopy = " + isCopy); if (!isCopy && destFolder.equals(account.getTrashFolderName())) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "processingPendingMoveOrCopy doing special case for deleting message"); String destFolderName = destFolder; if (K9.FOLDER_NONE.equals(destFolderName)) { destFolderName = null; } remoteSrcFolder.delete(messages.toArray(EMPTY_MESSAGE_ARRAY), destFolderName); } else { remoteDestFolder = remoteStore.getFolder(destFolder); if (isCopy) { remoteSrcFolder.copyMessages(messages.toArray(EMPTY_MESSAGE_ARRAY), remoteDestFolder); } else { remoteSrcFolder.moveMessages(messages.toArray(EMPTY_MESSAGE_ARRAY), remoteDestFolder); } } if (!isCopy && Account.EXPUNGE_IMMEDIATELY.equals(account.getExpungePolicy())) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "processingPendingMoveOrCopy expunging folder " + account.getDescription() + ":" + srcFolder); remoteSrcFolder.expunge(); } } finally { if (remoteSrcFolder != null) { remoteSrcFolder.close(); } if (remoteDestFolder != null) { remoteDestFolder.close(); } } } private void queueSetFlag(final Account account, final String folderName, final String newState, final String flag, final String[] uids) { putBackground("queueSetFlag " + account.getDescription() + ":" + folderName, null, new Runnable() { public void run() { PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_SET_FLAG_BULK; int length = 3 + uids.length; command.arguments = new String[length]; command.arguments[0] = folderName; command.arguments[1] = newState; command.arguments[2] = flag; for (int i = 0; i < uids.length; i++) { command.arguments[3 + i] = uids[i]; } queuePendingCommand(account, command); processPendingCommands(account); } }); } /** * Processes a pending mark read or unread command. * * @param command arguments = (String folder, String uid, boolean read) * @param account */ private void processPendingSetFlag(PendingCommand command, Account account) throws MessagingException { String folder = command.arguments[0]; if (account.getErrorFolderName().equals(folder)) { return; } boolean newState = Boolean.parseBoolean(command.arguments[1]); Flag flag = Flag.valueOf(command.arguments[2]); Store remoteStore = account.getRemoteStore(); Folder remoteFolder = remoteStore.getFolder(folder); if (!remoteFolder.exists() || /* * Don't proceed if the remote folder doesn't support flags and * the flag to be changed isn't the deleted flag. This avoids * unnecessary connections to POP3 servers. */ // TODO: This should actually call a supportsSettingFlag(flag) method. (!remoteFolder.supportsFetchingFlags() && !Flag.DELETED.equals(flag))) { return; } try { remoteFolder.open(OpenMode.READ_WRITE); if (remoteFolder.getMode() != OpenMode.READ_WRITE) { return; } List<Message> messages = new ArrayList<Message>(); for (int i = 3; i < command.arguments.length; i++) { String uid = command.arguments[i]; if (!uid.startsWith(K9.LOCAL_UID_PREFIX)) { messages.add(remoteFolder.getMessage(uid)); } } if (messages.size() == 0) { return; } remoteFolder.setFlags(messages.toArray(EMPTY_MESSAGE_ARRAY), new Flag[] { flag }, newState); } finally { if (remoteFolder != null) { remoteFolder.close(); } } } // TODO: This method is obsolete and is only for transition from K-9 2.0 to K-9 2.1 // Eventually, it should be removed private void processPendingSetFlagOld(PendingCommand command, Account account) throws MessagingException { String folder = command.arguments[0]; String uid = command.arguments[1]; if (account.getErrorFolderName().equals(folder)) { return; } if (K9.DEBUG) Log.d(K9.LOG_TAG, "processPendingSetFlagOld: folder = " + folder + ", uid = " + uid); boolean newState = Boolean.parseBoolean(command.arguments[2]); Flag flag = Flag.valueOf(command.arguments[3]); Folder remoteFolder = null; try { Store remoteStore = account.getRemoteStore(); remoteFolder = remoteStore.getFolder(folder); if (!remoteFolder.exists()) { return; } remoteFolder.open(OpenMode.READ_WRITE); if (remoteFolder.getMode() != OpenMode.READ_WRITE) { return; } Message remoteMessage = null; if (!uid.startsWith(K9.LOCAL_UID_PREFIX)) { remoteMessage = remoteFolder.getMessage(uid); } if (remoteMessage == null) { return; } remoteMessage.setFlag(flag, newState); } finally { if (remoteFolder != null) { remoteFolder.close(); } } } private void queueExpunge(final Account account, final String folderName) { putBackground("queueExpunge " + account.getDescription() + ":" + folderName, null, new Runnable() { public void run() { PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_EXPUNGE; command.arguments = new String[1]; command.arguments[0] = folderName; queuePendingCommand(account, command); processPendingCommands(account); } }); } private void processPendingExpunge(PendingCommand command, Account account) throws MessagingException { String folder = command.arguments[0]; if (account.getErrorFolderName().equals(folder)) { return; } if (K9.DEBUG) Log.d(K9.LOG_TAG, "processPendingExpunge: folder = " + folder); Store remoteStore = account.getRemoteStore(); Folder remoteFolder = remoteStore.getFolder(folder); try { if (!remoteFolder.exists()) { return; } remoteFolder.open(OpenMode.READ_WRITE); if (remoteFolder.getMode() != OpenMode.READ_WRITE) { return; } remoteFolder.expunge(); if (K9.DEBUG) Log.d(K9.LOG_TAG, "processPendingExpunge: complete for folder = " + folder); } finally { if (remoteFolder != null) { remoteFolder.close(); } } } // TODO: This method is obsolete and is only for transition from K-9 2.0 to K-9 2.1 // Eventually, it should be removed private void processPendingMoveOrCopyOld(PendingCommand command, Account account) throws MessagingException { String srcFolder = command.arguments[0]; String uid = command.arguments[1]; String destFolder = command.arguments[2]; String isCopyS = command.arguments[3]; boolean isCopy = false; if (isCopyS != null) { isCopy = Boolean.parseBoolean(isCopyS); } if (account.getErrorFolderName().equals(srcFolder)) { return; } Store remoteStore = account.getRemoteStore(); Folder remoteSrcFolder = remoteStore.getFolder(srcFolder); Folder remoteDestFolder = remoteStore.getFolder(destFolder); if (!remoteSrcFolder.exists()) { throw new MessagingException("processPendingMoveOrCopyOld: remoteFolder " + srcFolder + " does not exist", true); } remoteSrcFolder.open(OpenMode.READ_WRITE); if (remoteSrcFolder.getMode() != OpenMode.READ_WRITE) { throw new MessagingException("processPendingMoveOrCopyOld: could not open remoteSrcFolder " + srcFolder + " read/write", true); } Message remoteMessage = null; if (!uid.startsWith(K9.LOCAL_UID_PREFIX)) { remoteMessage = remoteSrcFolder.getMessage(uid); } if (remoteMessage == null) { throw new MessagingException("processPendingMoveOrCopyOld: remoteMessage " + uid + " does not exist", true); } if (K9.DEBUG) Log.d(K9.LOG_TAG, "processPendingMoveOrCopyOld: source folder = " + srcFolder + ", uid = " + uid + ", destination folder = " + destFolder + ", isCopy = " + isCopy); if (!isCopy && destFolder.equals(account.getTrashFolderName())) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "processPendingMoveOrCopyOld doing special case for deleting message"); remoteMessage.delete(account.getTrashFolderName()); remoteSrcFolder.close(); return; } remoteDestFolder.open(OpenMode.READ_WRITE); if (remoteDestFolder.getMode() != OpenMode.READ_WRITE) { throw new MessagingException("processPendingMoveOrCopyOld: could not open remoteDestFolder " + srcFolder + " read/write", true); } if (isCopy) { remoteSrcFolder.copyMessages(new Message[] { remoteMessage }, remoteDestFolder); } else { remoteSrcFolder.moveMessages(new Message[] { remoteMessage }, remoteDestFolder); } remoteSrcFolder.close(); remoteDestFolder.close(); } private void processPendingMarkAllAsRead(PendingCommand command, Account account) throws MessagingException { String folder = command.arguments[0]; Folder remoteFolder = null; LocalFolder localFolder = null; try { Store localStore = account.getLocalStore(); localFolder = (LocalFolder) localStore.getFolder(folder); localFolder.open(OpenMode.READ_WRITE); Message[] messages = localFolder.getMessages(null, false); for (Message message : messages) { if (!message.isSet(Flag.SEEN)) { message.setFlag(Flag.SEEN, true); for (MessagingListener l : getListeners()) { l.listLocalMessagesUpdateMessage(account, folder, message); } } } localFolder.setUnreadMessageCount(0); for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, folder, 0); } if (account.getErrorFolderName().equals(folder)) { return; } Store remoteStore = account.getRemoteStore(); remoteFolder = remoteStore.getFolder(folder); if (!remoteFolder.exists()) { return; } remoteFolder.open(OpenMode.READ_WRITE); if (remoteFolder.getMode() != OpenMode.READ_WRITE) { return; } remoteFolder.setFlags(new Flag[] {Flag.SEEN}, true); remoteFolder.close(); } catch (UnsupportedOperationException uoe) { Log.w(K9.LOG_TAG, "Could not mark all server-side as read because store doesn't support operation", uoe); } finally { if (localFolder != null) { localFolder.close(); } if (remoteFolder != null) { remoteFolder.close(); } } } static long uidfill = 0; static AtomicBoolean loopCatch = new AtomicBoolean(); public void addErrorMessage(Account account, String subject, Throwable t) { if (!loopCatch.compareAndSet(false, true)) { return; } try { if (t == null) { return; } CharArrayWriter baos = new CharArrayWriter(t.getStackTrace().length * 10); PrintWriter ps = new PrintWriter(baos); t.printStackTrace(ps); ps.close(); if (subject == null) { subject = getRootCauseMessage(t); } addErrorMessage(account, subject, baos.toString()); } catch (Throwable it) { Log.e(K9.LOG_TAG, "Could not save error message to " + account.getErrorFolderName(), it); } finally { loopCatch.set(false); } } public void addErrorMessage(Account account, String subject, String body) { if (!K9.ENABLE_ERROR_FOLDER) { return; } if (!loopCatch.compareAndSet(false, true)) { return; } try { if (body == null || body.length() < 1) { return; } Store localStore = account.getLocalStore(); LocalFolder localFolder = (LocalFolder)localStore.getFolder(account.getErrorFolderName()); Message[] messages = new Message[1]; MimeMessage message = new MimeMessage(); message.setBody(new TextBody(body)); message.setFlag(Flag.X_DOWNLOADED_FULL, true); message.setSubject(subject); long nowTime = System.currentTimeMillis(); Date nowDate = new Date(nowTime); message.setInternalDate(nowDate); message.addSentDate(nowDate); message.setFrom(new Address(account.getEmail(), "K9mail internal")); messages[0] = message; localFolder.appendMessages(messages); localFolder.deleteMessagesOlderThan(nowTime - (15 * 60 * 1000)); } catch (Throwable it) { Log.e(K9.LOG_TAG, "Could not save error message to " + account.getErrorFolderName(), it); } finally { loopCatch.set(false); } } public void markAllMessagesRead(final Account account, final String folder) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Marking all messages in " + account.getDescription() + ":" + folder + " as read"); List<String> args = new ArrayList<String>(); args.add(folder); PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_MARK_ALL_AS_READ; command.arguments = args.toArray(EMPTY_STRING_ARRAY); queuePendingCommand(account, command); processPendingCommands(account); } public void setFlag( final Message[] messages, final Flag flag, final boolean newState) { actOnMessages(messages, new MessageActor() { @Override public void act(final Account account, final Folder folder, final List<Message> messages) { String[] uids = new String[messages.size()]; for (int i = 0; i < messages.size(); i++) { uids[i] = messages.get(i).getUid(); } setFlag(account, folder.getName(), uids, flag, newState); } }); } public void setFlag( final Account account, final String folderName, final String[] uids, final Flag flag, final boolean newState) { // TODO: put this into the background, but right now that causes odd behavior // because the FolderMessageList doesn't have its own cache of the flag states Folder localFolder = null; try { Store localStore = account.getLocalStore(); localFolder = localStore.getFolder(folderName); localFolder.open(OpenMode.READ_WRITE); ArrayList<Message> messages = new ArrayList<Message>(); for (String uid : uids) { // Allows for re-allowing sending of messages that could not be sent if (flag == Flag.FLAGGED && !newState && uid != null && account.getOutboxFolderName().equals(folderName)) { sendCount.remove(uid); } Message msg = localFolder.getMessage(uid); if (msg != null) { messages.add(msg); } } localFolder.setFlags(messages.toArray(EMPTY_MESSAGE_ARRAY), new Flag[] {flag}, newState); for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, folderName, localFolder.getUnreadMessageCount()); } if (account.getErrorFolderName().equals(folderName)) { return; } queueSetFlag(account, folderName, Boolean.toString(newState), flag.toString(), uids); processPendingCommands(account); } catch (MessagingException me) { addErrorMessage(account, null, me); throw new RuntimeException(me); } finally { if (localFolder != null) { localFolder.close(); } } }//setMesssageFlag public void clearAllPending(final Account account) { try { Log.w(K9.LOG_TAG, "Clearing pending commands!"); LocalStore localStore = account.getLocalStore(); localStore.removePendingCommands(); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Unable to clear pending command", me); addErrorMessage(account, null, me); } } public void loadMessageForViewRemote(final Account account, final String folder, final String uid, final MessagingListener listener) { put("loadMessageForViewRemote", listener, new Runnable() { public void run() { Folder remoteFolder = null; LocalFolder localFolder = null; try { LocalStore localStore = account.getLocalStore(); localFolder = localStore.getFolder(folder); localFolder.open(OpenMode.READ_WRITE); Message message = localFolder.getMessage(uid); if (message.isSet(Flag.X_DOWNLOADED_FULL)) { /* * If the message has been synchronized since we were called we'll * just hand it back cause it's ready to go. */ FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.ENVELOPE); fp.add(FetchProfile.Item.BODY); localFolder.fetch(new Message[] { message }, fp, null); } else { /* * At this point the message is not available, so we need to download it * fully if possible. */ Store remoteStore = account.getRemoteStore(); remoteFolder = remoteStore.getFolder(folder); remoteFolder.open(OpenMode.READ_WRITE); // Get the remote message and fully download it Message remoteMessage = remoteFolder.getMessage(uid); FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.BODY); remoteFolder.fetch(new Message[] { remoteMessage }, fp, null); // Store the message locally and load the stored message into memory localFolder.appendMessages(new Message[] { remoteMessage }); fp.add(FetchProfile.Item.ENVELOPE); message = localFolder.getMessage(uid); localFolder.fetch(new Message[] { message }, fp, null); // Mark that this message is now fully synched message.setFlag(Flag.X_DOWNLOADED_FULL, true); } // now that we have the full message, refresh the headers for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewHeadersAvailable(account, folder, uid, message); } for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewBodyAvailable(account, folder, uid, message); } for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewFinished(account, folder, uid, message); } } catch (Exception e) { for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewFailed(account, folder, uid, e); } addErrorMessage(account, null, e); } finally { if (remoteFolder!=null) { remoteFolder.close(); } if (localFolder!=null) { localFolder.close(); } }//finally }//run }); } public void loadMessageForView(final Account account, final String folder, final String uid, final MessagingListener listener) { for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewStarted(account, folder, uid); } new Thread(new Runnable() { public void run() { try { LocalStore localStore = account.getLocalStore(); LocalFolder localFolder = localStore.getFolder(folder); localFolder.open(OpenMode.READ_WRITE); LocalMessage message = (LocalMessage)localFolder.getMessage(uid); if (message==null || message.getId()==0) { throw new IllegalArgumentException("Message not found: folder=" + folder + ", uid=" + uid); } if (!message.isSet(Flag.SEEN)) { message.setFlag(Flag.SEEN, true); setFlag(new Message[] { message }, Flag.SEEN, true); } for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewHeadersAvailable(account, folder, uid, message); } FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.ENVELOPE); fp.add(FetchProfile.Item.BODY); localFolder.fetch(new Message[] { message }, fp, null); localFolder.close(); for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewBodyAvailable(account, folder, uid, message); } for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewFinished(account, folder, uid, message); } } catch (Exception e) { for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewFailed(account, folder, uid, e); } addErrorMessage(account, null, e); } } }).start(); } /** * Attempts to load the attachment specified by part from the given account and message. * @param account * @param message * @param part * @param listener */ public void loadAttachment( final Account account, final Message message, final Part part, final Object tag, final MessagingListener listener) { /* * Check if the attachment has already been downloaded. If it has there's no reason to * download it, so we just tell the listener that it's ready to go. */ try { if (part.getBody() != null) { for (MessagingListener l : getListeners()) { l.loadAttachmentStarted(account, message, part, tag, false); } if (listener != null) { listener.loadAttachmentStarted(account, message, part, tag, false); } for (MessagingListener l : getListeners()) { l.loadAttachmentFinished(account, message, part, tag); } if (listener != null) { listener.loadAttachmentFinished(account, message, part, tag); } return; } } catch (MessagingException me) { /* * If the header isn't there the attachment isn't downloaded yet, so just continue * on. */ } for (MessagingListener l : getListeners()) { l.loadAttachmentStarted(account, message, part, tag, true); } if (listener != null) { listener.loadAttachmentStarted(account, message, part, tag, false); } put("loadAttachment", listener, new Runnable() { public void run() { Folder remoteFolder = null; LocalFolder localFolder = null; try { LocalStore localStore = account.getLocalStore(); /* * We clear out any attachments already cached in the entire store and then * we update the passed in message to reflect that there are no cached * attachments. This is in support of limiting the account to having one * attachment downloaded at a time. */ localStore.pruneCachedAttachments(); ArrayList<Part> viewables = new ArrayList<Part>(); ArrayList<Part> attachments = new ArrayList<Part>(); MimeUtility.collectParts(message, viewables, attachments); for (Part attachment : attachments) { attachment.setBody(null); } Store remoteStore = account.getRemoteStore(); localFolder = localStore.getFolder(message.getFolder().getName()); remoteFolder = remoteStore.getFolder(message.getFolder().getName()); remoteFolder.open(OpenMode.READ_WRITE); //FIXME: This is an ugly hack that won't be needed once the Message objects have been united. Message remoteMessage = remoteFolder.getMessage(message.getUid()); remoteMessage.setBody(message.getBody()); remoteFolder.fetchPart(remoteMessage, part, null); localFolder.updateMessage((LocalMessage)message); for (MessagingListener l : getListeners()) { l.loadAttachmentFinished(account, message, part, tag); } if (listener != null) { listener.loadAttachmentFinished(account, message, part, tag); } } catch (MessagingException me) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Exception loading attachment", me); for (MessagingListener l : getListeners()) { l.loadAttachmentFailed(account, message, part, tag, me.getMessage()); } if (listener != null) { listener.loadAttachmentFailed(account, message, part, tag, me.getMessage()); } addErrorMessage(account, null, me); } finally { if (remoteFolder != null) { remoteFolder.close(); } if (localFolder != null) { localFolder.close(); } } } }); } /** * Stores the given message in the Outbox and starts a sendPendingMessages command to * attempt to send the message. * @param account * @param message * @param listener */ public void sendMessage(final Account account, final Message message, MessagingListener listener) { try { LocalStore localStore = account.getLocalStore(); LocalFolder localFolder = localStore.getFolder(account.getOutboxFolderName()); localFolder.open(OpenMode.READ_WRITE); localFolder.appendMessages(new Message[] { message }); Message localMessage = localFolder.getMessage(message.getUid()); localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true); localFolder.close(); sendPendingMessages(account, listener); } catch (Exception e) { /* for (MessagingListener l : getListeners()) { // TODO general failed } */ addErrorMessage(account, null, e); } } /** * Attempt to send any messages that are sitting in the Outbox. * @param account * @param listener */ public void sendPendingMessages(final Account account, MessagingListener listener) { putBackground("sendPendingMessages", listener, new Runnable() { public void run() { sendPendingMessagesSynchronous(account); } }); } public boolean messagesPendingSend(final Account account) { Folder localFolder = null; try { Store localStore = account.getLocalStore(); localFolder = localStore.getFolder( account.getOutboxFolderName()); if (!localFolder.exists()) { return false; } localFolder.open(OpenMode.READ_WRITE); int localMessages = localFolder.getMessageCount(); if (localMessages > 0) { return true; } } catch (Exception e) { Log.e(K9.LOG_TAG, "Exception while checking for unsent messages", e); } finally { if (localFolder != null) { localFolder.close(); } } return false; } /** * Attempt to send any messages that are sitting in the Outbox. * @param account * @param listener */ public void sendPendingMessagesSynchronous(final Account account) { Folder localFolder = null; try { Store localStore = account.getLocalStore(); localFolder = localStore.getFolder( account.getOutboxFolderName()); if (!localFolder.exists()) { return; } for (MessagingListener l : getListeners()) { l.sendPendingMessagesStarted(account); } localFolder.open(OpenMode.READ_WRITE); Message[] localMessages = localFolder.getMessages(null); boolean anyFlagged = false; int progress = 0; int todo = localMessages.length; for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, account.getSentFolderName(), progress, todo); } /* * The profile we will use to pull all of the content * for a given local message into memory for sending. */ FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.ENVELOPE); fp.add(FetchProfile.Item.BODY); if (K9.DEBUG) Log.i(K9.LOG_TAG, "Scanning folder '" + account.getOutboxFolderName() + "' (" + ((LocalFolder)localFolder).getId() + ") for messages to send"); Transport transport = Transport.getInstance(account); for (Message message : localMessages) { if (message.isSet(Flag.DELETED)) { message.setFlag(Flag.X_DESTROYED, true); continue; } if (message.isSet(Flag.FLAGGED)) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Skipping sending FLAGGED message " + message.getUid()); continue; } try { AtomicInteger count = new AtomicInteger(0); AtomicInteger oldCount = sendCount.putIfAbsent(message.getUid(), count); if (oldCount != null) { count = oldCount; } if (K9.DEBUG) Log.i(K9.LOG_TAG, "Send count for message " + message.getUid() + " is " + count.get()); if (count.incrementAndGet() > K9.MAX_SEND_ATTEMPTS) { Log.e(K9.LOG_TAG, "Send count for message " + message.getUid() + " has exceeded maximum attempt threshold, flagging"); message.setFlag(Flag.FLAGGED, true); anyFlagged = true; continue; } localFolder.fetch(new Message[] { message }, fp, null); try { message.setFlag(Flag.X_SEND_IN_PROGRESS, true); if (K9.DEBUG) Log.i(K9.LOG_TAG, "Sending message with UID " + message.getUid()); transport.sendMessage(message); message.setFlag(Flag.X_SEND_IN_PROGRESS, false); message.setFlag(Flag.SEEN, true); progress++; for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, account.getSentFolderName(), progress, todo); } if (K9.FOLDER_NONE.equals(account.getSentFolderName())) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Sent folder set to " + K9.FOLDER_NONE + ", deleting sent message"); message.setFlag(Flag.DELETED, true); } else { LocalFolder localSentFolder = (LocalFolder) localStore.getFolder( account.getSentFolderName()); if (K9.DEBUG) Log.i(K9.LOG_TAG, "Moving sent message to folder '" + account.getSentFolderName() + "' (" + localSentFolder.getId() + ") "); localFolder.moveMessages( new Message[] { message }, localSentFolder); if (K9.DEBUG) Log.i(K9.LOG_TAG, "Moved sent message to folder '" + account.getSentFolderName() + "' (" + localSentFolder.getId() + ") "); PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_APPEND; command.arguments = new String[] { localSentFolder.getName(), message.getUid() }; queuePendingCommand(account, command); processPendingCommands(account); } } catch (Exception e) { if (e instanceof MessagingException) { MessagingException me = (MessagingException)e; if (!me.isPermanentFailure()) { // Decrement the counter if the message could not possibly have been sent int newVal = count.decrementAndGet(); if (K9.DEBUG) Log.i(K9.LOG_TAG, "Decremented send count for message " + message.getUid() + " to " + newVal + "; no possible send"); } } message.setFlag(Flag.X_SEND_FAILED, true); Log.e(K9.LOG_TAG, "Failed to send message", e); for (MessagingListener l : getListeners()) { l.synchronizeMailboxFailed( account, localFolder.getName(), getRootCauseMessage(e)); } addErrorMessage(account, null, e); } } catch (Exception e) { Log.e(K9.LOG_TAG, "Failed to fetch message for sending", e); for (MessagingListener l : getListeners()) { l.synchronizeMailboxFailed( account, localFolder.getName(), getRootCauseMessage(e)); } addErrorMessage(account, null, e); /* * We ignore this exception because a future refresh will retry this * message. */ } } if (localFolder.getMessageCount() == 0) { localFolder.delete(false); } for (MessagingListener l : getListeners()) { l.sendPendingMessagesCompleted(account); } if (anyFlagged) { addErrorMessage(account, mApplication.getString(R.string.send_failure_subject), mApplication.getString(R.string.send_failure_body_fmt, K9.ERROR_FOLDER_NAME)); NotificationManager notifMgr = (NotificationManager)mApplication.getSystemService(Context.NOTIFICATION_SERVICE); Notification notif = new Notification(R.drawable.stat_notify_email_generic, mApplication.getString(R.string.send_failure_subject), System.currentTimeMillis()); Intent i = MessageList.actionHandleFolderIntent(mApplication, account, account.getErrorFolderName()); PendingIntent pi = PendingIntent.getActivity(mApplication, 0, i, 0); notif.setLatestEventInfo(mApplication, mApplication.getString(R.string.send_failure_subject), mApplication.getString(R.string.send_failure_body_abbrev, K9.ERROR_FOLDER_NAME), pi); notif.flags |= Notification.FLAG_SHOW_LIGHTS; notif.ledARGB = K9.NOTIFICATION_LED_SENDING_FAILURE_COLOR; notif.ledOnMS = K9.NOTIFICATION_LED_FAST_ON_TIME; notif.ledOffMS = K9.NOTIFICATION_LED_FAST_OFF_TIME; notifMgr.notify(-1000 - account.getAccountNumber(), notif); } } catch (Exception e) { for (MessagingListener l : getListeners()) { l.sendPendingMessagesFailed(account); } addErrorMessage(account, null, e); } finally { if (localFolder != null) { try { localFolder.close(); } catch (Exception e) { Log.e(K9.LOG_TAG, "Exception while closing folder", e); } } } } public void getAccountStats(final Context context, final Account account, final MessagingListener l) { Runnable unreadRunnable = new Runnable() { public void run() { try { AccountStats stats = account.getStats(context); l.accountStatusChanged(account, stats); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Count not get unread count for account " + account.getDescription(), me); } } }; put("getAccountStats:" + account.getDescription(), l, unreadRunnable); } public void getFolderUnreadMessageCount(final Account account, final String folderName, final MessagingListener l) { Runnable unreadRunnable = new Runnable() { public void run() { int unreadMessageCount = 0; try { Folder localFolder = account.getLocalStore().getFolder(folderName); unreadMessageCount = localFolder.getUnreadMessageCount(); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Count not get unread count for account " + account.getDescription(), me); } l.folderStatusChanged(account, folderName, unreadMessageCount); } }; put("getFolderUnread:" + account.getDescription() + ":" + folderName, l, unreadRunnable); } public boolean isMoveCapable(Message message) { return !message.getUid().startsWith(K9.LOCAL_UID_PREFIX); } public boolean isCopyCapable(Message message) { return isMoveCapable(message); } public boolean isMoveCapable(final Account account) { try { Store localStore = account.getLocalStore(); Store remoteStore = account.getRemoteStore(); return localStore.isMoveCapable() && remoteStore.isMoveCapable(); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Exception while ascertaining move capability", me); return false; } } public boolean isCopyCapable(final Account account) { try { Store localStore = account.getLocalStore(); Store remoteStore = account.getRemoteStore(); return localStore.isCopyCapable() && remoteStore.isCopyCapable(); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Exception while ascertaining copy capability", me); return false; } } public void moveMessages(final Account account, final String srcFolder, final Message[] messages, final String destFolder, final MessagingListener listener) { for (Message message : messages) { suppressMessage(account, srcFolder, message); } putBackground("moveMessages", null, new Runnable() { public void run() { moveOrCopyMessageSynchronous(account, srcFolder, messages, destFolder, false, listener); } }); } public void moveMessage(final Account account, final String srcFolder, final Message message, final String destFolder, final MessagingListener listener) { moveMessages(account, srcFolder, new Message[] { message }, destFolder, listener); } public void copyMessages(final Account account, final String srcFolder, final Message[] messages, final String destFolder, final MessagingListener listener) { putBackground("copyMessages", null, new Runnable() { public void run() { moveOrCopyMessageSynchronous(account, srcFolder, messages, destFolder, true, listener); } }); } public void copyMessage(final Account account, final String srcFolder, final Message message, final String destFolder, final MessagingListener listener) { copyMessages(account, srcFolder, new Message[] { message }, destFolder, listener); } private void moveOrCopyMessageSynchronous(final Account account, final String srcFolder, final Message[] inMessages, final String destFolder, final boolean isCopy, MessagingListener listener) { try { Store localStore = account.getLocalStore(); Store remoteStore = account.getRemoteStore(); if (!isCopy && (!remoteStore.isMoveCapable() || !localStore.isMoveCapable())) { return; } if (isCopy && (!remoteStore.isCopyCapable() || !localStore.isCopyCapable())) { return; } Folder localSrcFolder = localStore.getFolder(srcFolder); Folder localDestFolder = localStore.getFolder(destFolder); List<String> uids = new LinkedList<String>(); for (Message message : inMessages) { String uid = message.getUid(); if (!uid.startsWith(K9.LOCAL_UID_PREFIX)) { uids.add(uid); } } Message[] messages = localSrcFolder.getMessages(uids.toArray(EMPTY_STRING_ARRAY), null); if (messages.length > 0) { Map<String, Message> origUidMap = new HashMap<String, Message>(); for (Message message : messages) { origUidMap.put(message.getUid(), message); } if (K9.DEBUG) Log.i(K9.LOG_TAG, "moveOrCopyMessageSynchronous: source folder = " + srcFolder + ", " + messages.length + " messages, " + ", destination folder = " + destFolder + ", isCopy = " + isCopy); if (isCopy) { FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.ENVELOPE); fp.add(FetchProfile.Item.BODY); localSrcFolder.fetch(messages, fp, null); localSrcFolder.copyMessages(messages, localDestFolder); } else { localSrcFolder.moveMessages(messages, localDestFolder); for (String origUid : origUidMap.keySet()) { for (MessagingListener l : getListeners()) { l.messageUidChanged(account, srcFolder, origUid, origUidMap.get(origUid).getUid()); } unsuppressMessage(account, srcFolder, origUid); } } queueMoveOrCopy(account, srcFolder, destFolder, isCopy, origUidMap.keySet().toArray(EMPTY_STRING_ARRAY)); } processPendingCommands(account); } catch (MessagingException me) { addErrorMessage(account, null, me); throw new RuntimeException("Error moving message", me); } } public void expunge(final Account account, final String folder, final MessagingListener listener) { putBackground("expunge", null, new Runnable() { public void run() { queueExpunge(account, folder); } }); } public void deleteDraft(final Account account, String uid) { LocalFolder localFolder = null; try { LocalStore localStore = account.getLocalStore(); localFolder = localStore.getFolder(account.getDraftsFolderName()); localFolder.open(OpenMode.READ_WRITE); Message message = localFolder.getMessage(uid); if (message != null) { deleteMessages(new Message[] { message }, null); } } catch (MessagingException me) { addErrorMessage(account, null, me); } finally { if (localFolder != null) { localFolder.close(); } } } public void deleteMessages(final Message[] messages, final MessagingListener listener) { actOnMessages(messages, new MessageActor() { @Override public void act(final Account account, final Folder folder, final List<Message> messages) { for (Message message : messages) { suppressMessage(account, folder.getName(), message); } putBackground("deleteMessages", null, new Runnable() { public void run() { deleteMessagesSynchronous(account, folder.getName(), messages.toArray(EMPTY_MESSAGE_ARRAY), listener); } }); } }); } private void deleteMessagesSynchronous(final Account account, final String folder, final Message[] messages, MessagingListener listener) { Folder localFolder = null; Folder localTrashFolder = null; String[] uids = getUidsFromMessages(messages); try { //We need to make these callbacks before moving the messages to the trash //as messages get a new UID after being moved for (Message message : messages) { if (listener != null) { listener.messageDeleted(account, folder, message); } for (MessagingListener l : getListeners()) { l.messageDeleted(account, folder, message); } } Store localStore = account.getLocalStore(); localFolder = localStore.getFolder(folder); if (folder.equals(account.getTrashFolderName()) || K9.FOLDER_NONE.equals(account.getTrashFolderName())) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Deleting messages in trash folder or trash set to -None-, not copying"); localFolder.setFlags(messages, new Flag[] { Flag.DELETED }, true); } else { localTrashFolder = localStore.getFolder(account.getTrashFolderName()); if (!localTrashFolder.exists()) { localTrashFolder.create(Folder.FolderType.HOLDS_MESSAGES); } if (localTrashFolder.exists()) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Deleting messages in normal folder, moving"); localFolder.moveMessages(messages, localTrashFolder); } } for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, folder, localFolder.getUnreadMessageCount()); if (localTrashFolder != null) { l.folderStatusChanged(account, account.getTrashFolderName(), localTrashFolder.getUnreadMessageCount()); } } if (K9.DEBUG) Log.d(K9.LOG_TAG, "Delete policy for account " + account.getDescription() + " is " + account.getDeletePolicy()); if (folder.equals(account.getOutboxFolderName())) { for (Message message : messages) { // If the message was in the Outbox, then it has been copied to local Trash, and has // to be copied to remote trash PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_APPEND; command.arguments = new String[] { account.getTrashFolderName(), message.getUid() }; queuePendingCommand(account, command); } processPendingCommands(account); } else if (folder.equals(account.getTrashFolderName()) && account.getDeletePolicy() == Account.DELETE_POLICY_ON_DELETE) { queueSetFlag(account, folder, Boolean.toString(true), Flag.DELETED.toString(), uids); processPendingCommands(account); } else if (account.getDeletePolicy() == Account.DELETE_POLICY_ON_DELETE) { queueMoveOrCopy(account, folder, account.getTrashFolderName(), false, uids); processPendingCommands(account); } else if (account.getDeletePolicy() == Account.DELETE_POLICY_MARK_AS_READ) { queueSetFlag(account, folder, Boolean.toString(true), Flag.SEEN.toString(), uids); processPendingCommands(account); } else { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Delete policy " + account.getDeletePolicy() + " prevents delete from server"); } for (String uid : uids) { unsuppressMessage(account, folder, uid); } } catch (MessagingException me) { addErrorMessage(account, null, me); throw new RuntimeException("Error deleting message from local store.", me); } finally { if (localFolder != null) { localFolder.close(); } if (localTrashFolder != null) { localTrashFolder.close(); } } } private String[] getUidsFromMessages(Message[] messages) { String[] uids = new String[messages.length]; for (int i = 0; i < messages.length; i++) { uids[i] = messages[i].getUid(); } return uids; } private void processPendingEmptyTrash(PendingCommand command, Account account) throws MessagingException { Store remoteStore = account.getRemoteStore(); Folder remoteFolder = remoteStore.getFolder(account.getTrashFolderName()); try { if (remoteFolder.exists()) { remoteFolder.open(OpenMode.READ_WRITE); remoteFolder.setFlags(new Flag [] { Flag.DELETED }, true); if (Account.EXPUNGE_IMMEDIATELY.equals(account.getExpungePolicy())) { remoteFolder.expunge(); } } } finally { if (remoteFolder != null) { remoteFolder.close(); } } } public void emptyTrash(final Account account, MessagingListener listener) { putBackground("emptyTrash", listener, new Runnable() { public void run() { Folder localFolder = null; try { Store localStore = account.getLocalStore(); localFolder = localStore.getFolder(account.getTrashFolderName()); localFolder.open(OpenMode.READ_WRITE); localFolder.setFlags(new Flag[] { Flag.DELETED }, true); for (MessagingListener l : getListeners()) { l.emptyTrashCompleted(account); } List<String> args = new ArrayList<String>(); PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_EMPTY_TRASH; command.arguments = args.toArray(EMPTY_STRING_ARRAY); queuePendingCommand(account, command); processPendingCommands(account); } catch (Exception e) { Log.e(K9.LOG_TAG, "emptyTrash failed", e); addErrorMessage(account, null, e); } finally { if (localFolder != null) { localFolder.close(); } } } }); } public void sendAlternate(final Context context, Account account, Message message) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "About to load message " + account.getDescription() + ":" + message.getFolder().getName() + ":" + message.getUid() + " for sendAlternate"); loadMessageForView(account, message.getFolder().getName(), message.getUid(), new MessagingListener() { @Override public void loadMessageForViewBodyAvailable(Account account, String folder, String uid, Message message) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Got message " + account.getDescription() + ":" + folder + ":" + message.getUid() + " for sendAlternate"); try { Intent msg=new Intent(Intent.ACTION_SEND); String quotedText = null; Part part = MimeUtility.findFirstPartByMimeType(message, "text/plain"); if (part == null) { part = MimeUtility.findFirstPartByMimeType(message, "text/html"); } if (part != null) { quotedText = MimeUtility.getTextFromPart(part); } if (quotedText != null) { msg.putExtra(Intent.EXTRA_TEXT, quotedText); } msg.putExtra(Intent.EXTRA_SUBJECT, "Fwd: " + message.getSubject()); msg.setType("text/plain"); context.startActivity(Intent.createChooser(msg, context.getString(R.string.send_alternate_chooser_title))); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Unable to send email through alternate program", me); } } }); } /** * Checks mail for one or multiple accounts. If account is null all accounts * are checked. * * @param context * @param account * @param listener */ public void checkMail(final Context context, final Account account, final boolean ignoreLastCheckedTime, final boolean useManualWakeLock, final MessagingListener listener) { TracingWakeLock twakeLock = null; if (useManualWakeLock) { TracingPowerManager pm = TracingPowerManager.getPowerManager(context); twakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "K9 MessagingController.checkMail"); twakeLock.setReferenceCounted(false); twakeLock.acquire(K9.MANUAL_WAKE_LOCK_TIMEOUT); } final TracingWakeLock wakeLock = twakeLock; for (MessagingListener l : getListeners()) { l.checkMailStarted(context, account); } putBackground("checkMail", listener, new Runnable() { public void run() { final NotificationManager notifMgr = (NotificationManager)context .getSystemService(Context.NOTIFICATION_SERVICE); try { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Starting mail check"); Preferences prefs = Preferences.getPreferences(context); Account[] accounts; if (account != null) { accounts = new Account[] { account }; } else { accounts = prefs.getAccounts(); } for (final Account account : accounts) { final long accountInterval = account.getAutomaticCheckIntervalMinutes() * 60 * 1000; if (!ignoreLastCheckedTime && accountInterval <= 0) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Skipping synchronizing account " + account.getDescription()); continue; } if (K9.DEBUG) Log.i(K9.LOG_TAG, "Synchronizing account " + account.getDescription()); account.setRingNotified(false); putBackground("sendPending " + account.getDescription(), null, new Runnable() { public void run() { if (messagesPendingSend(account)) { if (account.isShowOngoing()) { Notification notif = new Notification(R.drawable.ic_menu_refresh, context.getString(R.string.notification_bg_send_ticker, account.getDescription()), System.currentTimeMillis()); Intent intent = MessageList.actionHandleFolderIntent(context, account, K9.INBOX); PendingIntent pi = PendingIntent.getActivity(context, 0, intent, 0); notif.setLatestEventInfo(context, context.getString(R.string.notification_bg_send_title), account.getDescription() , pi); notif.flags = Notification.FLAG_ONGOING_EVENT; if (K9.NOTIFICATION_LED_WHILE_SYNCING) { notif.flags |= Notification.FLAG_SHOW_LIGHTS; notif.ledARGB = account.getNotificationSetting().getLedColor(); notif.ledOnMS = K9.NOTIFICATION_LED_FAST_ON_TIME; notif.ledOffMS = K9.NOTIFICATION_LED_FAST_OFF_TIME; } notifMgr.notify(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber(), notif); } try { sendPendingMessagesSynchronous(account); } finally { if (account.isShowOngoing()) { notifMgr.cancel(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber()); } } } } } ); try { Account.FolderMode aDisplayMode = account.getFolderDisplayMode(); Account.FolderMode aSyncMode = account.getFolderSyncMode(); Store localStore = account.getLocalStore(); for (final Folder folder : localStore.getPersonalNamespaces(false)) { folder.open(Folder.OpenMode.READ_WRITE); folder.refresh(prefs); Folder.FolderClass fDisplayClass = folder.getDisplayClass(); Folder.FolderClass fSyncClass = folder.getSyncClass(); if (modeMismatch(aDisplayMode, fDisplayClass)) { // Never sync a folder that isn't displayed if (K9.DEBUG && false) Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName() + " which is in display mode " + fDisplayClass + " while account is in display mode " + aDisplayMode); continue; } if (modeMismatch(aSyncMode, fSyncClass)) { // Do not sync folders in the wrong class if (K9.DEBUG && false) Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName() + " which is in sync mode " + fSyncClass + " while account is in sync mode " + aSyncMode); continue; } if (K9.DEBUG) Log.v(K9.LOG_TAG, "Folder " + folder.getName() + " was last synced @ " + new Date(folder.getLastChecked())); if (!ignoreLastCheckedTime && folder.getLastChecked() > (System.currentTimeMillis() - accountInterval)) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName() + ", previously synced @ " + new Date(folder.getLastChecked()) + " which would be too recent for the account period"); continue; } putBackground("sync" + folder.getName(), null, new Runnable() { public void run() { LocalFolder tLocalFolder = null; try { // In case multiple Commands get enqueued, don't run more than // once final LocalStore localStore = account.getLocalStore(); tLocalFolder = localStore.getFolder(folder.getName()); tLocalFolder.open(Folder.OpenMode.READ_WRITE); if (!ignoreLastCheckedTime && tLocalFolder.getLastChecked() > (System.currentTimeMillis() - accountInterval)) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Not running Command for folder " + folder.getName() + ", previously synced @ " + new Date(folder.getLastChecked()) + " which would be too recent for the account period"); return; } if (account.isShowOngoing()) { Notification notif = new Notification(R.drawable.ic_menu_refresh, context.getString(R.string.notification_bg_sync_ticker, account.getDescription(), folder.getName()), System.currentTimeMillis()); Intent intent = MessageList.actionHandleFolderIntent(context, account, K9.INBOX); PendingIntent pi = PendingIntent.getActivity(context, 0, intent, 0); notif.setLatestEventInfo(context, context.getString(R.string.notification_bg_sync_title), account.getDescription() + context.getString(R.string.notification_bg_title_separator) + folder.getName(), pi); notif.flags = Notification.FLAG_ONGOING_EVENT; if (K9.NOTIFICATION_LED_WHILE_SYNCING) { notif.flags |= Notification.FLAG_SHOW_LIGHTS; notif.ledARGB = account.getNotificationSetting().getLedColor(); notif.ledOnMS = K9.NOTIFICATION_LED_FAST_ON_TIME; notif.ledOffMS = K9.NOTIFICATION_LED_FAST_OFF_TIME; } notifMgr.notify(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber(), notif); } try { synchronizeMailboxSynchronous(account, folder.getName(), listener, null); } finally { if (account.isShowOngoing()) { notifMgr.cancel(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber()); } } } catch (Exception e) { Log.e(K9.LOG_TAG, "Exception while processing folder " + account.getDescription() + ":" + folder.getName(), e); addErrorMessage(account, null, e); } finally { if (tLocalFolder != null) { tLocalFolder.close(); } } } } ); } } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Unable to synchronize account " + account.getName(), e); addErrorMessage(account, null, e); } finally { putBackground("clear notification flag for " + account.getDescription(), null, new Runnable() { public void run() { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Clearing notification flag for " + account.getDescription()); account.setRingNotified(false); try { if (account.getStats(context).unreadMessageCount == 0) { notifyAccountCancel(context, account); } } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Unable to getUnreadMessageCount for account: " + account, e); } } } ); } } } catch (Exception e) { Log.e(K9.LOG_TAG, "Unable to synchronize mail", e); addErrorMessage(account, null, e); } putBackground("finalize sync", null, new Runnable() { public void run() { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Finished mail sync"); if (wakeLock != null) { wakeLock.release(); } for (MessagingListener l : getListeners()) { l.checkMailFinished(context, account); } } } ); } }); } public void compact(final Account account, final MessagingListener ml) { putBackground("compact:" + account.getDescription(), ml, new Runnable() { public void run() { try { LocalStore localStore = account.getLocalStore(); long oldSize = localStore.getSize(); localStore.compact(); long newSize = localStore.getSize(); if (ml != null) { ml.accountSizeChanged(account, oldSize, newSize); } for (MessagingListener l : getListeners()) { l.accountSizeChanged(account, oldSize, newSize); } } catch (Exception e) { Log.e(K9.LOG_TAG, "Failed to compact account " + account.getDescription(), e); } } }); } public void clear(final Account account, final MessagingListener ml) { putBackground("clear:" + account.getDescription(), ml, new Runnable() { public void run() { try { LocalStore localStore = account.getLocalStore(); long oldSize = localStore.getSize(); localStore.clear(); localStore.resetVisibleLimits(account.getDisplayCount()); long newSize = localStore.getSize(); AccountStats stats = new AccountStats(); stats.size = newSize; stats.unreadMessageCount = 0; stats.flaggedMessageCount = 0; if (ml != null) { ml.accountSizeChanged(account, oldSize, newSize); ml.accountStatusChanged(account, stats); } for (MessagingListener l : getListeners()) { l.accountSizeChanged(account, oldSize, newSize); l.accountStatusChanged(account, stats); } } catch (Exception e) { Log.e(K9.LOG_TAG, "Failed to clear account " + account.getDescription(), e); } } }); } public void recreate(final Account account, final MessagingListener ml) { putBackground("recreate:" + account.getDescription(), ml, new Runnable() { public void run() { try { LocalStore localStore = account.getLocalStore(); long oldSize = localStore.getSize(); localStore.recreate(); localStore.resetVisibleLimits(account.getDisplayCount()); long newSize = localStore.getSize(); AccountStats stats = new AccountStats(); stats.size = newSize; stats.unreadMessageCount = 0; stats.flaggedMessageCount = 0; if (ml != null) { ml.accountSizeChanged(account, oldSize, newSize); ml.accountStatusChanged(account, stats); } for (MessagingListener l : getListeners()) { l.accountSizeChanged(account, oldSize, newSize); l.accountStatusChanged(account, stats); } } catch (Exception e) { Log.e(K9.LOG_TAG, "Failed to recreate account " + account.getDescription(), e); } } }); } private boolean shouldNotifyForMessage(Account account, Message message) { // Do not notify if the user does not have notifications // enabled or if the message has been read if (!account.isNotifyNewMail() || message.isSet(Flag.SEEN) || (account.getName() == null)) { return false; } Folder folder = message.getFolder(); if (folder != null) { // No notification for new messages in Trash, Drafts, or Sent folder. // But do notify if it's the INBOX (see issue 1817). String folderName = folder.getName(); if (!K9.INBOX.equals(folderName) && (account.getTrashFolderName().equals(folderName) || account.getDraftsFolderName().equals(folderName) || account.getSentFolderName().equals(folderName))) { return false; } } return true; } /** Creates a notification of new email messages * ringtone, lights, and vibration to be played */ private boolean notifyAccount(Context context, Account account, Message message, int previousUnreadMessageCount, AtomicInteger newMessageCount) { // If we have a message, set the notification to "<From>: <Subject>" StringBuilder messageNotice = new StringBuilder(); final KeyguardManager keyguardService = (KeyguardManager) context.getSystemService(Context.KEYGUARD_SERVICE); try { if (message != null && message.getFrom() != null) { Address[] fromAddrs = message.getFrom(); String from = fromAddrs.length > 0 ? fromAddrs[0].toFriendly().toString() : null; String subject = message.getSubject(); if (subject == null) { subject = context.getString(R.string.general_no_subject); } if (from != null) { // Show From: address by default if (!account.isAnIdentity(fromAddrs)) { messageNotice.append(from + ": " + subject); } // show To: if the message was sent from me else { if (!account.isNotifySelfNewMail()) { return false; } Address[] rcpts = message.getRecipients(Message.RecipientType.TO); String to = rcpts.length > 0 ? rcpts[0].toFriendly().toString() : null; if (to != null) { messageNotice.append(String.format(context.getString(R.string.message_list_to_fmt), to) +": "+subject); } else { messageNotice.append(context.getString(R.string.general_no_sender) + ": "+subject); } } } } } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Unable to get message information for notification.", e); } // If privacy mode active and keyguard active // If we could not set a per-message notification, revert to a default message if ((K9.keyguardPrivacy() && keyguardService.inKeyguardRestrictedInputMode()) || messageNotice.length() == 0) { messageNotice = new StringBuilder(context.getString(R.string.notification_new_title)); } NotificationManager notifMgr = (NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE); Notification notif = new Notification(R.drawable.stat_notify_email_generic, messageNotice, System.currentTimeMillis()); notif.number = previousUnreadMessageCount + newMessageCount.get(); Intent i = FolderList.actionHandleNotification(context, account, account.getAutoExpandFolderName()); PendingIntent pi = PendingIntent.getActivity(context, 0, i, 0); String accountNotice = context.getString(R.string.notification_new_one_account_fmt, notif.number, account.getDescription()); notif.setLatestEventInfo(context, accountNotice, messageNotice, pi); // Only ring or vibrate if we have not done so already on this // account and fetch boolean ringAndVibrate = false; if (!account.isRingNotified()) { account.setRingNotified(true); ringAndVibrate = true; } configureNotification(account.getNotificationSetting(), notif, ringAndVibrate); notifMgr.notify(account.getAccountNumber(), notif); return true; } /** * @param setting * Configuration template. Never <code>null</code>. * @param notification * Object to configure. Never <code>null</code>. * @param ringAndVibrate * <code>true</code> if ringtone/vibration are allowed, * <code>false</code> otherwise. */ private void configureNotification(final NotificationSetting setting, final Notification notification, final boolean ringAndVibrate) { if (ringAndVibrate) { if (setting.shouldRing()) { String ringtone = setting.getRingtone(); notification.sound = TextUtils.isEmpty(ringtone) ? null : Uri.parse(ringtone); notification.audioStreamType = AudioManager.STREAM_NOTIFICATION; } if (setting.isVibrate()) { long[] pattern = getVibratePattern(setting.getVibratePattern(), setting.getVibrateTimes()); notification.vibrate = pattern; } } if (setting.isLed()) { notification.flags |= Notification.FLAG_SHOW_LIGHTS; notification.ledARGB = setting.getLedColor(); notification.ledOnMS = K9.NOTIFICATION_LED_ON_TIME; notification.ledOffMS = K9.NOTIFICATION_LED_OFF_TIME; } } /* * Fetch a vibration pattern. * * @param vibratePattern Vibration pattern index to use. * @param vibrateTimes Number of times to do the vibration pattern. * @return Pattern multiplied by the number of times requested. */ public static long[] getVibratePattern(int vibratePattern, int vibrateTimes) { // These are "off, on" patterns, specified in milliseconds long[] pattern0 = new long[] {300,200}; // like the default pattern long[] pattern1 = new long[] {100,200}; long[] pattern2 = new long[] {100,500}; long[] pattern3 = new long[] {200,200}; long[] pattern4 = new long[] {200,500}; long[] pattern5 = new long[] {500,500}; long[] selectedPattern = pattern0; //default pattern switch (vibratePattern) { case 1: selectedPattern = pattern1; break; case 2: selectedPattern = pattern2; break; case 3: selectedPattern = pattern3; break; case 4: selectedPattern = pattern4; break; case 5: selectedPattern = pattern5; break; } long[] repeatedPattern = new long[selectedPattern.length * vibrateTimes]; for (int n = 0; n < vibrateTimes; n++) { System.arraycopy(selectedPattern, 0, repeatedPattern, n * selectedPattern.length, selectedPattern.length); } // Do not wait before starting the vibration pattern. repeatedPattern[0] = 0; return repeatedPattern; } /** Cancel a notification of new email messages */ public void notifyAccountCancel(Context context, Account account) { NotificationManager notifMgr = (NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE); notifMgr.cancel(account.getAccountNumber()); notifMgr.cancel(-1000 - account.getAccountNumber()); } public Message saveDraft(final Account account, final Message message) { Message localMessage = null; try { LocalStore localStore = account.getLocalStore(); LocalFolder localFolder = localStore.getFolder(account.getDraftsFolderName()); localFolder.open(OpenMode.READ_WRITE); localFolder.appendMessages(new Message[] { message }); localMessage = localFolder.getMessage(message.getUid()); localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true); PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_APPEND; command.arguments = new String[] { localFolder.getName(), localMessage.getUid() }; queuePendingCommand(account, command); processPendingCommands(account); } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Unable to save message as draft.", e); addErrorMessage(account, null, e); } return localMessage; } public boolean modeMismatch(Account.FolderMode aMode, Folder.FolderClass fMode) { if (aMode == Account.FolderMode.NONE || (aMode == Account.FolderMode.FIRST_CLASS && fMode != Folder.FolderClass.FIRST_CLASS) || (aMode == Account.FolderMode.FIRST_AND_SECOND_CLASS && fMode != Folder.FolderClass.FIRST_CLASS && fMode != Folder.FolderClass.SECOND_CLASS) || (aMode == Account.FolderMode.NOT_SECOND_CLASS && fMode == Folder.FolderClass.SECOND_CLASS)) { return true; } else { return false; } } static AtomicInteger sequencing = new AtomicInteger(0); class Command implements Comparable<Command> { public Runnable runnable; public MessagingListener listener; public String description; boolean isForeground; int sequence = sequencing.getAndIncrement(); @Override public int compareTo(Command other) { if (other.isForeground && !isForeground) { return 1; } else if (!other.isForeground && isForeground) { return -1; } else { return (sequence - other.sequence); } } } public MessagingListener getCheckMailListener() { return checkMailListener; } public void setCheckMailListener(MessagingListener checkMailListener) { if (this.checkMailListener != null) { removeListener(this.checkMailListener); } this.checkMailListener = checkMailListener; if (this.checkMailListener != null) { addListener(this.checkMailListener); } } public SORT_TYPE getSortType() { return sortType; } public void setSortType(SORT_TYPE sortType) { this.sortType = sortType; } public boolean isSortAscending(SORT_TYPE sortType) { Boolean sortAsc = sortAscending.get(sortType); if (sortAsc == null) { return sortType.isDefaultAscending(); } else return sortAsc; } public void setSortAscending(SORT_TYPE sortType, boolean nsortAscending) { sortAscending.put(sortType, nsortAscending); } public Collection<Pusher> getPushers() { return pushers.values(); } public boolean setupPushing(final Account account) { try { Pusher previousPusher = pushers.remove(account); if (previousPusher != null) { previousPusher.stop(); } Preferences prefs = Preferences.getPreferences(mApplication); Account.FolderMode aDisplayMode = account.getFolderDisplayMode(); Account.FolderMode aPushMode = account.getFolderPushMode(); List<String> names = new ArrayList<String>(); Store localStore = account.getLocalStore(); for (final Folder folder : localStore.getPersonalNamespaces(false)) { if (folder.getName().equals(account.getErrorFolderName()) || folder.getName().equals(account.getOutboxFolderName())) { if (K9.DEBUG && false) Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() + " which should never be pushed"); continue; } folder.open(Folder.OpenMode.READ_WRITE); folder.refresh(prefs); Folder.FolderClass fDisplayClass = folder.getDisplayClass(); Folder.FolderClass fPushClass = folder.getPushClass(); if (modeMismatch(aDisplayMode, fDisplayClass)) { // Never push a folder that isn't displayed if (K9.DEBUG && false) Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() + " which is in display class " + fDisplayClass + " while account is in display mode " + aDisplayMode); continue; } if (modeMismatch(aPushMode, fPushClass)) { // Do not push folders in the wrong class if (K9.DEBUG && false) Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() + " which is in push mode " + fPushClass + " while account is in push mode " + aPushMode); continue; } if (K9.DEBUG) Log.i(K9.LOG_TAG, "Starting pusher for " + account.getDescription() + ":" + folder.getName()); names.add(folder.getName()); } if (names.size() > 0) { PushReceiver receiver = new MessagingControllerPushReceiver(mApplication, account, this); int maxPushFolders = account.getMaxPushFolders(); if (names.size() > maxPushFolders) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Count of folders to push for account " + account.getDescription() + " is " + names.size() + ", greater than limit of " + maxPushFolders + ", truncating"); names = names.subList(0, maxPushFolders); } try { Store store = account.getRemoteStore(); if (!store.isPushCapable()) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Account " + account.getDescription() + " is not push capable, skipping"); return false; } Pusher pusher = store.getPusher(receiver); if (pusher != null) { Pusher oldPusher = pushers.putIfAbsent(account, pusher); if (oldPusher == null) { pusher.start(names); } } } catch (Exception e) { Log.e(K9.LOG_TAG, "Could not get remote store", e); return false; } return true; } else { if (K9.DEBUG) Log.i(K9.LOG_TAG, "No folders are configured for pushing in account " + account.getDescription()); return false; } } catch (Exception e) { Log.e(K9.LOG_TAG, "Got exception while setting up pushing", e); } return false; } public void stopAllPushing() { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Stopping all pushers"); Iterator<Pusher> iter = pushers.values().iterator(); while (iter.hasNext()) { Pusher pusher = iter.next(); iter.remove(); pusher.stop(); } } public void messagesArrived(final Account account, final Folder remoteFolder, final List<Message> messages, final boolean flagSyncOnly) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Got new pushed email messages for account " + account.getDescription() + ", folder " + remoteFolder.getName()); final CountDownLatch latch = new CountDownLatch(1); putBackground("Push messageArrived of account " + account.getDescription() + ", folder " + remoteFolder.getName(), null, new Runnable() { public void run() { LocalFolder localFolder = null; try { LocalStore localStore = account.getLocalStore(); localFolder= localStore.getFolder(remoteFolder.getName()); localFolder.open(OpenMode.READ_WRITE); account.setRingNotified(false); int newCount = downloadMessages(account, remoteFolder, localFolder, messages, flagSyncOnly); int unreadMessageCount = setLocalUnreadCountToRemote(localFolder, remoteFolder, messages.size()); setLocalFlaggedCountToRemote(localFolder, remoteFolder); localFolder.setLastPush(System.currentTimeMillis()); localFolder.setStatus(null); if (K9.DEBUG) Log.i(K9.LOG_TAG, "messagesArrived newCount = " + newCount + ", unread count = " + unreadMessageCount); if (unreadMessageCount == 0) { notifyAccountCancel(mApplication, account); } for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, remoteFolder.getName(), unreadMessageCount); } } catch (Exception e) { String rootMessage = getRootCauseMessage(e); String errorMessage = "Push failed: " + rootMessage; try { localFolder.setStatus(errorMessage); } catch (Exception se) { Log.e(K9.LOG_TAG, "Unable to set failed status on localFolder", se); } for (MessagingListener l : getListeners()) { l.synchronizeMailboxFailed(account, remoteFolder.getName(), errorMessage); } addErrorMessage(account, null, e); } finally { if (localFolder != null) { try { localFolder.close(); } catch (Exception e) { Log.e(K9.LOG_TAG, "Unable to close localFolder", e); } } latch.countDown(); } } }); try { latch.await(); } catch (Exception e) { Log.e(K9.LOG_TAG, "Interrupted while awaiting latch release", e); } if (K9.DEBUG) Log.i(K9.LOG_TAG, "MessagingController.messagesArrivedLatch released"); } enum MemorizingState { STARTED, FINISHED, FAILED }; class Memory { Account account; String folderName; MemorizingState syncingState = null; MemorizingState sendingState = null; MemorizingState pushingState = null; MemorizingState processingState = null; String failureMessage = null; int syncingTotalMessagesInMailbox; int syncingNumNewMessages; int folderCompleted = 0; int folderTotal = 0; String processingCommandTitle = null; Memory(Account nAccount, String nFolderName) { account = nAccount; folderName = nFolderName; } String getKey() { return getMemoryKey(account, folderName); } } static String getMemoryKey(Account taccount, String tfolderName) { return taccount.getDescription() + ":" + tfolderName; } class MemorizingListener extends MessagingListener { HashMap<String, Memory> memories = new HashMap<String, Memory>(31); Memory getMemory(Account account, String folderName) { Memory memory = memories.get(getMemoryKey(account, folderName)); if (memory == null) { memory = new Memory(account, folderName); memories.put(memory.getKey(), memory); } return memory; } @Override public synchronized void synchronizeMailboxStarted(Account account, String folder) { Memory memory = getMemory(account, folder); memory.syncingState = MemorizingState.STARTED; memory.folderCompleted = 0; memory.folderTotal = 0; } @Override public synchronized void synchronizeMailboxFinished(Account account, String folder, int totalMessagesInMailbox, int numNewMessages) { Memory memory = getMemory(account, folder); memory.syncingState = MemorizingState.FINISHED; memory.syncingTotalMessagesInMailbox = totalMessagesInMailbox; memory.syncingNumNewMessages = numNewMessages; } @Override public synchronized void synchronizeMailboxFailed(Account account, String folder, String message) { Memory memory = getMemory(account, folder); memory.syncingState = MemorizingState.FAILED; memory.failureMessage = message; } synchronized void refreshOther(MessagingListener other) { if (other != null) { Memory syncStarted = null; Memory sendStarted = null; Memory processingStarted = null; for (Memory memory : memories.values()) { if (memory.syncingState != null) { switch (memory.syncingState) { case STARTED: syncStarted = memory; break; case FINISHED: other.synchronizeMailboxFinished(memory.account, memory.folderName, memory.syncingTotalMessagesInMailbox, memory.syncingNumNewMessages); break; case FAILED: other.synchronizeMailboxFailed(memory.account, memory.folderName, memory.failureMessage); break; } } if (memory.sendingState != null) { switch (memory.sendingState) { case STARTED: sendStarted = memory; break; case FINISHED: other.sendPendingMessagesCompleted(memory.account); break; case FAILED: other.sendPendingMessagesFailed(memory.account); break; } } if (memory.pushingState != null) { switch (memory.pushingState) { case STARTED: other.setPushActive(memory.account, memory.folderName, true); break; case FINISHED: other.setPushActive(memory.account, memory.folderName, false); break; } } if (memory.processingState != null) { switch (memory.processingState) { case STARTED: processingStarted = memory; break; case FINISHED: case FAILED: other.pendingCommandsFinished(memory.account); break; } } } Memory somethingStarted = null; if (syncStarted != null) { other.synchronizeMailboxStarted(syncStarted.account, syncStarted.folderName); somethingStarted = syncStarted; } if (sendStarted != null) { other.sendPendingMessagesStarted(sendStarted.account); somethingStarted = sendStarted; } if (processingStarted != null) { other.pendingCommandsProcessing(processingStarted.account); if (processingStarted.processingCommandTitle != null) { other.pendingCommandStarted(processingStarted.account, processingStarted.processingCommandTitle); } else { other.pendingCommandCompleted(processingStarted.account, processingStarted.processingCommandTitle); } somethingStarted = processingStarted; } if (somethingStarted != null && somethingStarted.folderTotal > 0) { other.synchronizeMailboxProgress(somethingStarted.account, somethingStarted.folderName, somethingStarted.folderCompleted, somethingStarted.folderTotal); } } } @Override public synchronized void setPushActive(Account account, String folderName, boolean active) { Memory memory = getMemory(account, folderName); memory.pushingState = (active ? MemorizingState.STARTED : MemorizingState.FINISHED); } @Override public synchronized void sendPendingMessagesStarted(Account account) { Memory memory = getMemory(account, null); memory.sendingState = MemorizingState.STARTED; memory.folderCompleted = 0; memory.folderTotal = 0; } @Override public synchronized void sendPendingMessagesCompleted(Account account) { Memory memory = getMemory(account, null); memory.sendingState = MemorizingState.FINISHED; } @Override public synchronized void sendPendingMessagesFailed(Account account) { Memory memory = getMemory(account, null); memory.sendingState = MemorizingState.FAILED; } @Override public synchronized void synchronizeMailboxProgress(Account account, String folderName, int completed, int total) { Memory memory = getMemory(account, folderName); memory.folderCompleted = completed; memory.folderTotal = total; } @Override public synchronized void pendingCommandsProcessing(Account account) { Memory memory = getMemory(account, null); memory.processingState = MemorizingState.STARTED; memory.folderCompleted = 0; memory.folderTotal = 0; } @Override public synchronized void pendingCommandsFinished(Account account) { Memory memory = getMemory(account, null); memory.processingState = MemorizingState.FINISHED; } @Override public synchronized void pendingCommandStarted(Account account, String commandTitle) { Memory memory = getMemory(account, null); memory.processingCommandTitle = commandTitle; } @Override public synchronized void pendingCommandCompleted(Account account, String commandTitle) { Memory memory = getMemory(account, null); memory.processingCommandTitle = null; } } private void actOnMessages(Message[] messages, MessageActor actor) { Map<Account, Map<Folder, List<Message>>> accountMap = new HashMap<Account, Map<Folder, List<Message>>>(); for (Message message : messages) { Folder folder = message.getFolder(); Account account = folder.getAccount(); Map<Folder, List<Message>> folderMap = accountMap.get(account); if (folderMap == null) { folderMap = new HashMap<Folder, List<Message>>(); accountMap.put(account, folderMap); } List<Message> messageList = folderMap.get(folder); if (messageList == null) { messageList = new LinkedList<Message>(); folderMap.put(folder, messageList); } messageList.add(message); } for (Map.Entry<Account, Map<Folder, List<Message>>> entry : accountMap.entrySet()) { Account account = entry.getKey(); //account.refresh(Preferences.getPreferences(K9.app)); Map<Folder, List<Message>> folderMap = entry.getValue(); for (Map.Entry<Folder, List<Message>> folderEntry : folderMap.entrySet()) { Folder folder = folderEntry.getKey(); List<Message> messageList = folderEntry.getValue(); actor.act(account, folder, messageList); } } } interface MessageActor { public void act(final Account account, final Folder folder, final List<Message> messages); } }
package com.ghgande.j2mod.modbus.cmd; import com.ghgande.j2mod.modbus.Modbus; import com.ghgande.j2mod.modbus.io.ModbusSerialTransport; import com.ghgande.j2mod.modbus.io.ModbusTransaction; import com.ghgande.j2mod.modbus.io.ModbusTransport; import com.ghgande.j2mod.modbus.msg.WriteCoilRequest; import com.ghgande.j2mod.modbus.msg.WriteCoilResponse; import com.ghgande.j2mod.modbus.net.ModbusMasterFactory; /** * <p> * Class that implements a simple commandline tool for writing to a digital * output. * * <p> * Note that if you write to a remote I/O with a Modbus protocol stack, it will * most likely expect that the communication is <i>kept alive</i> after the * first write message. * * <p> * This can be achieved either by sending any kind of message, or by repeating * the write message within a given period of time. * * <p> * If the time period is exceeded, then the device might react by turning off * all signals of the I/O modules. After this timeout, the device might require * a reset message. * * @author Dieter Wimberger * @version 1.2rc1 (09/11/2004) */ public class WriteCoilTest { private static void printUsage() { System.out .println("java com.ghgande.j2mod.modbus.cmd.WriteCoilTest" + " <connection [String]>" + " <unit [int8]>" + " <coil [int16]>" + " <state [boolean]>" + " {<repeat [int]>}"); } public static void main(String[] args) { WriteCoilRequest req = null; ModbusTransport transport = null; ModbusTransaction trans = null; int ref = 0; boolean value = false; int repeat = 1; int unit = 0; // 1. Setup the parameters if (args.length < 4) { printUsage(); System.exit(1); } try { try { transport = ModbusMasterFactory.createModbusMaster(args[0]); if (transport instanceof ModbusSerialTransport) { ((ModbusSerialTransport) transport).setReceiveTimeout(500); if (System.getProperty("com.ghgande.j2mod.modbus.baud") != null) ((ModbusSerialTransport) transport).setBaudRate(Integer.parseInt(System.getProperty("com.ghgande.j2mod.modbus.baud"))); else ((ModbusSerialTransport) transport).setBaudRate(19200); } /* * There are a number of devices which won't initialize immediately * after being opened. Take a moment to let them come up. */ Thread.sleep(2000); unit = Integer.parseInt(args[1]); ref = Integer.parseInt(args[2]); value = "true".equals(args[3]); if (args.length == 5) { repeat = Integer.parseInt(args[4]); } } catch (Exception ex) { ex.printStackTrace(); printUsage(); System.exit(1); } // 3. Prepare the request req = new WriteCoilRequest(ref, value); req.setUnitID(unit); if (Modbus.debug) System.out.println("Request: " + req.getHexMessage()); // 4. Prepare the transaction trans = transport.createTransaction(); trans.setRequest(req); // 5. Execute the transaction repeat times for (int count = 0; count < repeat; count++) { trans.execute(); if (Modbus.debug) System.out.println("Response: " + trans.getResponse().getHexMessage()); WriteCoilResponse data = (WriteCoilResponse) trans.getResponse(); if (data != null) System.out.println("Coil = " + data.getCoil()); } // 6. Close the connection transport.close(); } catch (Exception ex) { ex.printStackTrace(); } System.exit(0); } }
package com.github.noxan.aves.demo.chat; import java.io.IOException; import java.util.logging.Level; import java.util.logging.Logger; import com.github.noxan.aves.auth.AuthException; import com.github.noxan.aves.auth.User; import com.github.noxan.aves.auth.accessor.UsernamePassword; import com.github.noxan.aves.auth.accessor.UsernamePasswordAccessor; import com.github.noxan.aves.auth.session.SessionManager; import com.github.noxan.aves.auth.storage.InMemoryUsernamePasswordStorage; import com.github.noxan.aves.net.Connection; import com.github.noxan.aves.server.ServerHandler; import com.github.noxan.aves.server.SocketServer; public class ChatServer implements ServerHandler { public static void main(String[] args) { ChatServer server = new ChatServer(); try { server.start(); } catch(IOException e) { e.printStackTrace(); } } private Logger logger = Logger.getLogger(getClass().getName()); private InMemoryUsernamePasswordStorage storage; private SessionManager sessionManager; private SocketServer server; public ChatServer() { storage = new InMemoryUsernamePasswordStorage(); storage.addUser("noxan", "123"); storage.addUser("test", "1234"); sessionManager = new SessionManager(storage); server = new SocketServer(this); } public void start() throws IOException { server.start(); } @Override public void readData(Connection connection, Object data) { logger.log(Level.INFO, data.toString()); String[] parts = data.toString().split(";"); try { switch(parts[0]) { case "login": UsernamePasswordAccessor accessor = new UsernamePassword(parts[1], parts[2]); try { User user = sessionManager.requestSession(accessor, connection); logger.log(Level.INFO, connection + " logged in as " + user.getUsername()); connection.write("login;ok"); server.broadcast(connection, "chat;Server;" + user.getUsername() + " joined"); } catch(AuthException e) { connection.write("login;" + e.getMessage()); } break; case "logout": logger.log(Level.INFO, connection + " logged out"); try { User user = sessionManager.getSession(connection); server.broadcast(connection, "chat;Server;" + user.getUsername() + " left"); } catch(AuthException ignored) { } sessionManager.destroySession(connection); break; default: logger.log(Level.WARNING, "unknown packet header: " + data.toString()); break; } } catch(IOException e) { e.printStackTrace(); } } @Override public void clientConnect(Connection connection) { logger.log(Level.INFO, connection + " connected"); } @Override public void clientDisconnect(Connection connection) { logger.log(Level.INFO, connection + " disconnected"); sessionManager.destroySession(connection); } @Override public void clientLost(Connection connection) { logger.log(Level.INFO, connection + " lost"); } }
package com.github.r1j0.statsd.client; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.charset.Charset; import org.apache.mina.core.future.ConnectFuture; import org.apache.mina.core.service.IoConnector; import org.apache.mina.core.session.IoSession; import org.apache.mina.filter.codec.ProtocolCodecFilter; import org.apache.mina.filter.codec.textline.TextLineCodecFactory; import org.apache.mina.filter.logging.LoggingFilter; import org.apache.mina.transport.socket.nio.NioDatagramConnector; public class StatsdClient { private static final String HOST = "127.0.0.1"; private static final int PORT = 39390; public static void main(String[] args) throws IOException, InterruptedException { for (int i = 0; i < 100; i++) { new Automatic().run(); } System.exit(0); } public static class Automatic extends Thread { public Automatic() { super(); } public void run() { IoConnector connector = new NioDatagramConnector(); connector.getSessionConfig().setReadBufferSize(2048); connector.getFilterChain().addLast("logger", new LoggingFilter()); connector.getFilterChain().addLast("codec", new ProtocolCodecFilter(new TextLineCodecFactory(Charset.forName("UTF-8")))); connector.setHandler(new StatsdClientHandler(" Hello Server.. äöüß")); ConnectFuture future = connector.connect(new InetSocketAddress(HOST, PORT)); if (!future.isConnected()) { return; } IoSession session = future.getSession(); session.getConfig().setUseReadOperation(true); session.close(true); connector.dispose(); } } }
package com.jcwhatever.nucleus.utils; import com.jcwhatever.nucleus.regions.data.SyncLocation; import com.jcwhatever.nucleus.utils.materials.Materials; import com.jcwhatever.nucleus.utils.text.TextUtils; import com.jcwhatever.nucleus.utils.validate.IValidator; import org.bukkit.Bukkit; import org.bukkit.Location; import org.bukkit.World; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.entity.Entity; import org.bukkit.event.player.PlayerTeleportEvent; import org.bukkit.util.Vector; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.Collection; import javax.annotation.Nullable; /** * Location utilities. */ public final class LocationUtils { private LocationUtils () {} // array to help convert yaw to block face private static final BlockFace[] YAW_FACES = new BlockFace[] { BlockFace.SOUTH, BlockFace.SOUTH_SOUTH_WEST, BlockFace.SOUTH_WEST, BlockFace.WEST_SOUTH_WEST, BlockFace.WEST, BlockFace.WEST_NORTH_WEST, BlockFace.NORTH_WEST, BlockFace.NORTH_NORTH_WEST, BlockFace.NORTH, BlockFace.NORTH_NORTH_EAST, BlockFace.NORTH_EAST, BlockFace.EAST_NORTH_EAST, BlockFace.EAST, BlockFace.EAST_SOUTH_EAST, BlockFace.SOUTH_EAST, BlockFace.SOUTH_SOUTH_EAST, BlockFace.SOUTH }; private static final Location CENTERED_LOCATION = new Location(null, 0, 0, 0); /** * Copy the values from a source {@link org.bukkit.Location} to a new * {@link org.bukkit.Location}. * * @param source The source location. * * @return A new {@link org.bukkit.Location}. */ public static Location copy(Location source) { PreCon.notNull(source); Location destination = new Location(null, 0, 0, 0); return copy(source, destination); } /** * Copy the values from a source {@link org.bukkit.Location} to a destination * {@link org.bukkit.Location}. * * @param source The source location. * @param destination The destination location. * * @return The destination {@link org.bukkit.Location}. */ public static Location copy(Location source, Location destination) { PreCon.notNull(source); PreCon.notNull(destination); destination.setWorld(source.getWorld()); destination.setX(source.getX()); destination.setY(source.getY()); destination.setZ(source.getZ()); destination.setYaw(source.getYaw()); destination.setPitch(source.getPitch()); return destination; } /** * Copy the values from a source {@link org.bukkit.Location} to a destination * {@link org.bukkit.util.Vector}. * * @param source The source location. * @param destination The destination vector. * * @return The destination {@link org.bukkit.util.Vector}. */ public static Vector copy(Location source, Vector destination) { PreCon.notNull(source); PreCon.notNull(destination); destination.setX(source.getX()); destination.setY(source.getY()); destination.setZ(source.getZ()); return destination; } /** * Copy the values from a source {@link org.bukkit.util.Vector} to a new * {@link org.bukkit.util.Vector}. * * @param source The source location. * * @return A new {@link org.bukkit.util.Vector}. */ public static Vector copy(Vector source) { PreCon.notNull(source); Vector vector = new Vector(0, 0, 0); return copy(source, vector); } /** * Copy the values from a source {@link org.bukkit.util.Vector} to a destination * {@link org.bukkit.util.Vector}. * * @param source The source location. * @param destination The destination vector. * * @return The destination {@link org.bukkit.util.Vector}. */ public static Vector copy(Vector source, Vector destination) { PreCon.notNull(source); PreCon.notNull(destination); destination.setX(source.getX()); destination.setY(source.getY()); destination.setZ(source.getZ()); return destination; } /** * Copy a source {@link org.bukkit.Location} and center the X and Z coordinates of the * copy to the source locations block. * * @param source The source location. * * @return A new {@link org.bukkit.Location} containing the result. */ public static Location getCenteredLocation(Location source) { PreCon.notNull(source); return getCenteredLocation(source, new Location(null, 0, 0, 0)); } /** * Copy a source {@link org.bukkit.Location} to an output {@link org.bukkit.Location} and * center the X and Z coordinates of the output to the source locations block. * * @param source The source location. * @param output The location to put the results into. * * @return The output {@link org.bukkit.Location}. */ public static Location getCenteredLocation(Location source, Location output) { PreCon.notNull(source); output.setWorld(source.getWorld()); output.setX(source.getBlockX() + 0.5); output.setY(source.getY()); output.setZ(source.getBlockZ() + 0.5); output.setYaw(source.getYaw()); output.setPitch(source.getPitch()); return output; } /** * Teleport an entity to a {@link org.bukkit.Location} centered on the X and Z * axis of the locations block. * * @param entity The entity to teleport. * @param location The teleport location. */ public static boolean teleportCentered(Entity entity, Location location) { PreCon.notNull(entity); PreCon.notNull(location); Location adjusted = getCenteredLocation(location, Bukkit.isPrimaryThread() ? CENTERED_LOCATION : new Location(null, 0, 0, 0)); return entity.teleport(adjusted, PlayerTeleportEvent.TeleportCause.PLUGIN); } /** * Copy a source {@link org.bukkit.Location} and change coordinate values to block * coordinates in the copy. * * <p>Removes yaw and pitch values, converts coordinates to whole numbers.</p> * * @param source The source location. * * @return A new {@link org.bukkit.Location} containing the result. */ public static Location getBlockLocation(Location source) { PreCon.notNull(source); return getBlockLocation(source, new Location(null, 0, 0, 0)); } /** * Copy a source {@link org.bukkit.Location} to an output location and change coordinate * values to block coordinates in the output. * * <p>Removes yaw and pitch values, converts coordinates to whole numbers.</p> * * @param source The source location. * @param output The location to put the results into. * * @return The output {@link org.bukkit.Location}. */ public static Location getBlockLocation(Location source, Location output) { PreCon.notNull(source); PreCon.notNull(output); output.setWorld(source.getWorld()); output.setX(source.getBlockX()); output.setY(source.getBlockY()); output.setZ(source.getBlockZ()); output.setYaw(0); output.setPitch(0); return output; } /** * Copy a source {@link org.bukkit.Location} and add values to the copy without changing * the original {@link org.bukkit.Location}. * * @param source The source location. * @param x The value to add to the X coordinates. * @param y The value to add to the Y coordinates. * @param z The value to add to the Z coordinates. * * @return A new {@link org.bukkit.Location}. */ public static Location add(Location source, double x, double y, double z) { return source.clone().add(x, y, z); } /** * Copy a source {@link org.bukkit.Location} to an output location and add values to the * output without changing the original {@link org.bukkit.Location}. * * @param source The source location. * @param output The location to put the results into. * @param x The value to add to the X coordinates. * @param y The value to add to the Y coordinates. * @param z The value to add to the Z coordinates. * * @return The output {@link org.bukkit.Location}. */ public static Location add(Location source, Location output, double x, double y, double z) { return copy(source, output).add(x, y, z); } /** * Copy a source {@link org.bukkit.Location} and add noise to the copy. * * <p>Translates the location to another random location within the specified * radius of the source location randomly.</p> * * @param source The location. * @param radiusX The max radius on the X axis. * @param radiusY The max radius on the Y axis. * @param radiusZ The max radius on the Z axis. * * @return A new {@link org.bukkit.Location}. */ public static Location addNoise(Location source, double radiusX, double radiusY, double radiusZ) { PreCon.notNull(source); return addNoise(source, source.clone(), radiusX, radiusY, radiusZ); } /** * Copy a source {@link org.bukkit.Location} to an output location and add noise * to the output. * * <p>Translates the location to another random location within the specified * radius of the source location randomly.</p> * * @param source The location. * @param output The location to put the results into. * @param radiusX The max radius on the X axis. * @param radiusY The max radius on the Y axis. * @param radiusZ The max radius on the Z axis. * * @return The output {@link org.bukkit.Location}. */ public static Location addNoise(Location source, Location output, double radiusX, double radiusY, double radiusZ) { PreCon.notNull(source); PreCon.notNull(output); PreCon.positiveNumber(radiusX); PreCon.positiveNumber(radiusY); PreCon.positiveNumber(radiusZ); double noiseX = 0; double noiseY = 0; double noiseZ = 0; if (radiusX > 0) { noiseX = Rand.getDouble(radiusX * 2) - radiusX; } if (radiusY > 0) { noiseY = Rand.getDouble(radiusY * 2) - radiusY; } if (radiusZ > 0) { noiseZ = Rand.getDouble(radiusZ * 2) - radiusZ; } return output.add(noiseX, noiseY, noiseZ); } /** * Determine if 2 locations can be considered the same using the specified * precision. * * <p>The precision is used as: location1 is about the same as location2 +/- precision.</p> * * @param location1 The first location to compare. * @param location2 The second location to compare. * @param precision The precision. */ public static boolean isLocationMatch(Location location1, Location location2, double precision) { PreCon.notNull(location1); PreCon.notNull(location2); PreCon.positiveNumber(precision); double xDelta = Math.abs(location1.getX() - location2.getX()); double zDelta = Math.abs(location1.getZ() - location2.getZ()); double yDelta = Math.abs(location1.getY() - location2.getY()); return xDelta <= precision && zDelta <= precision && yDelta <= precision; } /** * Parse a {@link org.bukkit.Location} from a formatted string. * * <p>Format of string : x,y,z</p> * * @param world The world the location is for. * @param coordinates The string coordinates. * * @return A new {@link org.bukkit.Location} or null if a location could not be parsed. */ @Nullable public static Location parseSimpleLocation(World world, String coordinates) { return parseSimpleLocation(new Location(null, 0, 0, 0), world, coordinates); } /** * Parse a {@link org.bukkit.Location} from a formatted string. * * <p>Format of string : x,y,z</p> * * @param output The location place the results in. * @param world The world the location is for. * @param coordinates The string coordinates. * * @return The output {@link org.bukkit.Location} or null if a location could not be parsed. */ @Nullable public static Location parseSimpleLocation(Location output, World world, String coordinates) { PreCon.notNull(output); PreCon.notNull(world); PreCon.notNull(coordinates); String[] parts = TextUtils.PATTERN_COMMA.split(coordinates); if (parts.length != 3) return null; double x = TextUtils.parseDouble(parts[0], Double.MAX_VALUE); double y = TextUtils.parseDouble(parts[1], Double.MAX_VALUE); double z = TextUtils.parseDouble(parts[2], Double.MAX_VALUE); if (x != Double.MAX_VALUE && y != Double.MAX_VALUE && z != Double.MAX_VALUE) { output.setWorld(world); output.setX(x); output.setY(y); output.setZ(z); return output; } return null; } /** * Parse a {@link org.bukkit.Location} from a formatted string. * * <p>Format of string: x,y,z,yawF,pitchF,worldName</p> * * @param coordinates The string coordinates. * * @return A new {@link SyncLocation} or null if the string could not be parsed. */ @Nullable public static SyncLocation parseLocation(String coordinates) { PreCon.notNull(coordinates); SyncLocation location = new SyncLocation((World)null, 0, 0, 0); return parseLocation(coordinates, location); } /** * Parse a location from a formatted string. * * <p>Format of string: x,y,z,yawF,pitchF,worldName</p> * * @param coordinates The string coordinates. * * @return A new {@link SyncLocation} or null if the string could not be parsed. */ @Nullable public static SyncLocation parseLocation(String coordinates, SyncLocation output) { PreCon.notNull(coordinates); String[] parts = TextUtils.PATTERN_COMMA.split(coordinates); if (parts.length != 6) return null; double x = TextUtils.parseDouble(parts[0], Double.MAX_VALUE); if (x == Double.MAX_VALUE) return null; double y = TextUtils.parseDouble(parts[1], Double.MAX_VALUE); if (y == Double.MAX_VALUE) return null; double z = TextUtils.parseDouble(parts[2], Double.MAX_VALUE); if (z == Double.MAX_VALUE) return null; float yaw = TextUtils.parseFloat(parts[3], Float.MAX_VALUE); if (yaw == Float.MAX_VALUE) return null; float pitch = TextUtils.parseFloat(parts[4], Float.MAX_VALUE); if (pitch == Float.MAX_VALUE) return null; output.setWorld(parts[5]); output.setX(x); output.setY(y); output.setZ(z); output.setYaw(yaw); output.setPitch(pitch); return output; } /** * Parse the world name from a from a location formatted string. * * <p>Format of string: x,y,z,yawF,pitchF,worldName</p> * * <p>Useful when the world the location is for is not loaded and * the name is needed.</p> * * @param coordinates The string coordinates. * * @return Null if the string could not be parsed. */ @Nullable public static String parseLocationWorldName(String coordinates) { PreCon.notNull(coordinates); String[] parts = TextUtils.PATTERN_COMMA.split(coordinates); if (parts.length != 6) return null; return parts[5]; } /** * Convert a {@link org.bukkit.Location} to a parsable string. * * @param location The location to convert. */ public static String locationToString(Location location) { PreCon.notNull(location); return String.valueOf(location.getX()) + ',' + location.getY() + ',' + location.getZ() + ',' + location.getYaw() + ',' + location.getPitch() + ',' + location.getWorld().getName(); } /** * Convert a {@link org.bukkit.Location} to a parsable string. * * @param location The location to convert. * @param floatingPointPlaces The number of places in the floating point values. */ public static String locationToString(Location location, int floatingPointPlaces) { PreCon.notNull(location); PreCon.positiveNumber(floatingPointPlaces); BigDecimal x = new BigDecimal(floatingPointPlaces == 0 ? location.getBlockX() : location.getX()) .setScale(floatingPointPlaces, RoundingMode.HALF_UP); BigDecimal y = new BigDecimal(floatingPointPlaces == 0 ? location.getBlockY() : location.getY()) .setScale(floatingPointPlaces, RoundingMode.HALF_UP); BigDecimal z = new BigDecimal(floatingPointPlaces == 0 ? location.getBlockZ() : location.getZ()) .setScale(floatingPointPlaces, RoundingMode.HALF_UP); BigDecimal yaw = new BigDecimal(location.getYaw()) .setScale(floatingPointPlaces, RoundingMode.HALF_UP); BigDecimal pitch = new BigDecimal(location.getPitch()) .setScale(floatingPointPlaces, RoundingMode.HALF_UP); return String.valueOf(x) + ',' + y + ',' + z + ',' + yaw + ',' + pitch + ',' + location.getWorld().getName(); } /** * Convert a locations yaw angle to a {@link org.bukkit.block.BlockFace}. * * @param location The location to convert. */ public static BlockFace getYawBlockFace(Location location) { PreCon.notNull(location); return getYawBlockFace(location.getYaw()); } /** * Convert a yaw angle to a {@link org.bukkit.block.BlockFace}. * * @param yaw The yaw angle to convert. */ public static BlockFace getYawBlockFace(float yaw) { yaw = yaw + 11.25f; yaw = yaw < 0 ? 360 - (Math.abs(yaw) % 360) : yaw % 360; int i = (int)(yaw / 22.5); return YAW_FACES[i]; } /** * Find a surface block (solid block that can be walked on) {@link org.bukkit.Location} * below the provided search location. * * @param source The source location. * * @return A new {@link org.bukkit.Location} or null if the search reaches below 0 on * the Y axis. */ @Nullable public static Location findSurfaceBelow(Location source) { return findSurfaceBelow(source, new Location(null, 0, 0, 0)); } /** * Find a surface block (solid block that can be walked on) {@link org.bukkit.Location} * below the specified source location. * * @param source The source location. * * @return The output {@link org.bukkit.Location} or null if the search reaches below * 0 on the Y axis. */ @Nullable public static Location findSurfaceBelow(Location source, Location output) { PreCon.notNull(source); output.setWorld(source.getWorld()); output.setX(source.getX()); output.setY(source.getBlockY()); output.setZ(source.getZ()); output.setYaw(source.getYaw()); output.setPitch(source.getPitch()); if (!Materials.isTransparent(output.getBlock().getType())) return output; output.add(0, -1, 0); Block current = source.getBlock(); while (!Materials.isSurface(current.getType())) { output.add(0, -1, 0); current = output.getBlock(); if (output.getY() < 0) { return null; } } return output; } /** * Get the {@link org.bukkit.Location} closest to the specified source location. * * @param source The source location. * @param locations The location candidates. */ @Nullable public static Location getClosestLocation(Location source, Collection<Location> locations) { return getClosestLocation(source, locations, null); } /** * Get the {@link org.bukkit.Location} closest to the specified source location. * * @param source The source location. * @param locations The location candidates. * @param validator The validator used to determine if a location is a candidate. */ @Nullable public static Location getClosestLocation(Location source, Collection<Location> locations, @Nullable IValidator<Location> validator) { PreCon.notNull(source); PreCon.notNull(locations); Location closest = null; double closestDist = Double.MAX_VALUE; for (Location loc : locations) { if (validator != null && !validator.isValid(loc)) continue; double dist; if ((dist = source.distanceSquared(loc)) < closestDist) { closest = loc; closestDist = dist; } } return closest; } /** * Determine if a target {@link org.bukkit.Location} is within the specified radius of * a source location. * * <p>If all radius values are equal, the radius is spherical. Otherwise the radius is cuboid.</p> * * @param source The source {@link org.bukkit.Location}. * @param target The target {@link org.bukkit.Location}. * @param radiusX The x-axis radius. * @param radiusY The y-axis radius. * @param radiusZ The z-axis radius. */ public static boolean isInRange(Location source, Location target, double radiusX, double radiusY, double radiusZ) { PreCon.notNull(source); PreCon.notNull(target); PreCon.positiveNumber(radiusX); PreCon.positiveNumber(radiusY); PreCon.positiveNumber(radiusZ); if (Double.compare(radiusX, radiusZ) == 0 && Double.compare(radiusY, radiusZ) == 0) { return source.distanceSquared(target) <= radiusX * radiusX; } else { double deltaX = Math.abs(source.getX() - target.getX()); double deltaY = Math.abs(source.getY() - target.getY()); double deltaZ = Math.abs(source.getZ() - target.getZ()); return deltaX <= radiusX && deltaY <= radiusY && deltaZ <= radiusZ; } } /** * Get a {@link org.bukkit.Location} that is a specified distance from a source location * using the source locations yaw angle to determine the direction of the new location * from the source location. * * <p>The new points Y coordinates are the same as the source location.</p> * * @param source The source location. * @param distance The distance from the source location. * * @return A new {@link org.bukkit.Location}. */ public static Location getYawLocation(Location source, double distance) { return getYawLocation(source, distance, source.getYaw(), new Location(null, 0, 0, 0)); } /** * Get a {@link org.bukkit.Location} that is a specified distance from a source location * using the source locations yaw angle to determine the direction of the new location * from the source location. * * <p>The new points Y coordinates are the same as the source location.</p> * * @param source The source location. * @param distance The distance from the source location. * @param output The {@link org.bukkit.Location} to output the results into. * * @return The output {@link org.bukkit.Location}. */ public static Location getYawLocation(Location source, double distance, Location output) { return getYawLocation(source, distance, source.getYaw(), output); } /** * Get a {@link org.bukkit.Location} that is a specified distance from a source location * using the specified yaw angle to determine the direction of the new location * from the source location. * * <p>The new points Y coordinates are the same as the source location.</p> * * @param source The source location. * @param distance The distance from the source location. * @param yaw The minecraft yaw angle (-180 to 180). * * @return A new {@link org.bukkit.Location}. */ public static Location getYawLocation(Location source, double distance, float yaw) { return getYawLocation(source, distance, yaw, new Location(null, 0, 0, 0)); } /** * Get a {@link org.bukkit.Location} that is a specified distance from a source location * using the specified yaw angle to determine the direction of the new location * from the source location. * * <p>The new points Y coordinates are the same as the source location.</p> * * @param source The source location. * @param distance The distance from the source location. * @param yaw The minecraft yaw angle (-180 to 180). * @param output The {@link org.bukkit.Location} to output the result into. * * @return The output {@link org.bukkit.Location}. */ public static Location getYawLocation(Location source, double distance, float yaw, Location output) { PreCon.notNull(source); PreCon.notNull(output); yaw = yaw >= 0 ? yaw % 360 : 180 + (180 - (Math.abs(yaw) % 180)); double radianYaw = Math.toRadians(-yaw); double x = Math.sin(radianYaw) * distance; double z = Math.cos(radianYaw) * distance; output.setWorld(source.getWorld()); output.setX(source.getX() + x); output.setY(source.getY()); output.setZ(source.getZ() + z); output.setYaw(source.getYaw()); output.setPitch(source.getPitch()); return output; } /** * Get the Minecraft yaw angle from the source location towards the target location. * * @param source The source {@link org.bukkit.Location}. * @param target The target {@link org.bukkit.Location}. */ public static float getYawAngle(Location source, Location target) { PreCon.notNull(source); PreCon.notNull(target); // Y and X to prevent ide warnings on Math.atan2 double deltaY = target.getX() - source.getX(); double deltaX = target.getZ() - source.getZ(); double angle = Math.atan2(deltaY, deltaX); return -(float)Math.toDegrees(angle); } /** * Rotate a {@link org.bukkit.Location} around an axis {@link org.bukkit.Location}. * * @param axis The axis location. * @param location The location to move. * @param rotationX The rotation around the X axis in degrees. * @param rotationY The rotation around the Y axis in degrees. * @param rotationZ The rotation around the Z axis in degrees. */ public static Location rotate(Location axis, Location location, double rotationX, double rotationY, double rotationZ) { PreCon.notNull(axis); PreCon.notNull(location); return rotate(axis, location, new Location(null, 0, 0, 0), rotationX, rotationY, rotationZ); } /** * Rotate a {@link org.bukkit.Location} around an axis {@link org.bukkit.Location}. * * @param axis The axis location. * @param location The location to move. * @param output The location to put results into. * @param rotationX The rotation around the X axis in degrees. * @param rotationY The rotation around the Y axis in degrees. * @param rotationZ The rotation around the Z axis in degrees. * * @return The output location. */ public static Location rotate(Location axis, Location location, Location output, double rotationX, double rotationY, double rotationZ) { PreCon.notNull(axis); PreCon.notNull(location); double x = location.getX(); double y = location.getY(); double z = location.getZ(); double centerX = axis.getX(); double centerY = axis.getY(); double centerZ = axis.getZ(); double translateX = x; double translateY = y; double translateZ = z; double yaw = location.getYaw(); // rotate on X axis if (Double.compare(rotationX, 0.0D) != 0) { double rotX = Math.toRadians(rotationX); translateY = rotateX(centerY, centerZ, y, z, rotX); translateZ = rotateZ(centerY, centerZ, y, z, rotX); } // rotate on Y axis if (Double.compare(rotationY, 0.0D) != 0) { double rotY = Math.toRadians(rotationY); translateX = rotateX(centerX, centerZ, x, z, rotY); translateZ = rotateZ(centerX, centerZ, x, z, rotY); yaw += rotationY; } // rotate on Z axis if (Double.compare(rotationZ, 0.0D) != 0) { double rotZ = Math.toRadians(rotationZ); translateX = rotateX(centerX, centerY, x, y, rotZ); translateY = rotateZ(centerX, centerY, x, y, rotZ); } output.setWorld(location.getWorld()); output.setX(translateX); output.setY(translateY); output.setZ(translateZ); output.setYaw((float) yaw); output.setPitch(location.getPitch()); return output; } private static double rotateX(double centerA, double centerB, double a, double b, double rotation) { return centerA + Math.cos(rotation) * (a - centerA) - Math.sin(rotation) * (b - centerB); } private static double rotateZ(double centerA, double centerB, double a, double b, double rotation) { return centerB + Math.sin(rotation) * (a - centerA) + Math.cos(rotation) * (b - centerB); } }
package com.ryanddawkins.glowing_spice; import javax.swing.JFrame; import java.awt.Toolkit; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import uk.co.caprica.vlcj.component.EmbeddedMediaPlayerComponent; import com.sun.jna.Native; /** * Movie player class to integrate with VLC player * * @author Ryan Dawkins * @package com.ryanddawkins.glowing_spice * @since 0.1 * @extends javax.swing.JFrame */ public class VideoPlayer extends JFrame { private String playingFile; private final EmbeddedMediaPlayerComponent mediaPlayerComponent; /** * Constructor that creates the JFrame */ public VideoPlayer() { // Sets fullscreen Toolkit tk = Toolkit.getDefaultToolkit(); int xsize = ((int) tk.getScreenSize().getWidth()); int ysize = ((int) tk.getScreenSize().getHeight()); this.setSize(xsize, ysize); this.setExtendedState(MAXIMIZED_BOTH); setUndecorated(false); // Grabs mediaComponent to call functions on this.mediaPlayerComponent = new EmbeddedMediaPlayerComponent(); this.setContentPane(this.mediaPlayerComponent); this.mediaPlayerComponent.getMediaPlayer().setSpu(-1); // This is so we can implement our own windowAdapter this.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); this.addWindowListener(new WindowAdapter(){ public void windowClosing(WindowEvent e){ setVisible(false); pause(); } }); this.setVisible(false); } /** * Chainable method to play the given file by string name * * @param String path filepath to video file * @return VideoPlayer this */ public VideoPlayer playFile(String path) { this.mediaPlayerComponent.getMediaPlayer().playMedia(path); this.playingFile = path; return this; } /** * Method to pause the movie by using the mediaPlayer object * * @return VideoPlayer this */ public VideoPlayer pause() { this.mediaPlayerComponent.getMediaPlayer().pause(); return this; } /** * Tells the mediaplayer to play the previous chapter * * @return VideoPlayer this */ public VideoPlayer previousChapter() { this.mediaPlayerComponent.getMediaPlayer().previousChapter(); return this; } /** * Tells the mediaplayer to play the next chapter * * @return VideoPlayer this */ public VideoPlayer nextChapter() { this.mediaPlayerComponent.getMediaPlayer().nextChapter(); return this; } /** * Fast forwards 10 seconds per second * * @return VideoPlayer this */ public VideoPlayer fastForward() { this.skip(10*10000); return this; } /** * Fast backwards 10 seconds per second * * @return VideoPlayer this */ public VideoPlayer fastBackward() { this.skip(-10*10000); return this; } public String getVideoStatus() { return null; } /** * Takes seconds integer and skips that many seconds per second * * @return VideoPlayer this */ private VideoPlayer skip(int miliseconds) { this.mediaPlayerComponent.getMediaPlayer().skip(miliseconds); return this; } }
package com.team254.frc2013.auto; import com.team254.frc2013.commands.CheckIntakeCalibratedCommand; import com.team254.frc2013.commands.DriveAtSpeedCommand; import com.team254.frc2013.commands.DriveProfiledCommand; import com.team254.frc2013.commands.SetIntakeDownCommand; import com.team254.frc2013.commands.ResetDriveEncodersCommand; import com.team254.frc2013.commands.ResetGyroCommand; import com.team254.frc2013.commands.RunIntakeCommand; import com.team254.frc2013.commands.ShiftCommand; import com.team254.frc2013.commands.ShootSequenceCommand; import com.team254.frc2013.commands.ShooterOnCommand; import com.team254.frc2013.commands.ShooterPresetCommand; import com.team254.frc2013.subsystems.Shooter; import com.team254.lib.control.impl.CustomProfile; import edu.wpi.first.wpilibj.command.CommandGroup; /** * Scores three starting discs from the back of the pyramid, picks up four more, then scores them * from the front of the pyramid. * @author tom@team254.com (Tom Bottiglieri) * @author pat@team254.com (Patrick Fairbank) */ public class SevenDiscAutoMode extends CommandGroup { CustomProfile profile = new CustomProfile(); public SevenDiscAutoMode() { // Shoot first discs addSequential(new ShooterOnCommand(true)); addSequential(new ShooterPresetCommand(Shooter.PRESET_BACK_PYRAMID)); addSequential(new SetIntakeDownCommand()); addSequential(new ShiftCommand(false)); addSequential(new ShootSequenceCommand()); addSequential(new ShootSequenceCommand()); addSequential(new ShootSequenceCommand(false)); // Pick up 2 middle discs addSequential(new CheckIntakeCalibratedCommand(.5)); addSequential(new ResetDriveEncodersCommand()); addSequential(new ResetGyroCommand()); addSequential(new RunIntakeCommand(1)); addSequential(new ShooterPresetCommand(Shooter.PRESET_FRONT_PYRAMID)); // Drive to front of pyramid and shoot 2 addSequential(new DriveProfiledCommand(6.35, 3.5, 0, 5)); addSequential(new RunIntakeCommand(0.0)); addSequential(new ShootSequenceCommand()); addSequential(new ShootSequenceCommand(false)); // Pick up 2 far discs addSequential(new RunIntakeCommand(1.0, true)); addSequential(new DriveAtSpeedCommand(10.5, 2.0, 0, 4.6)); // Drive to front of pyramid and shoot 2 addSequential(new DriveProfiledCommand(6.35, 5, 0, 2.5)); addSequential(new RunIntakeCommand(0.0)); addSequential(new ShootSequenceCommand()); addSequential(new ShootSequenceCommand()); addSequential(new ShootSequenceCommand()); addSequential(new ShootSequenceCommand()); addSequential(new ShooterOnCommand(false)); addSequential(new RunIntakeCommand(0.0)); } }
package com.valkryst.VTerminal.component; import com.valkryst.VRadio.Radio; import com.valkryst.VTerminal.AsciiCharacter; import com.valkryst.VTerminal.AsciiString; import com.valkryst.VTerminal.Panel; import com.valkryst.VTerminal.font.Font; import com.valkryst.VTerminal.misc.IntRange; import lombok.Getter; import lombok.Setter; import java.awt.Rectangle; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.util.Arrays; import java.util.Objects; import java.util.Optional; public class Component { /** The x-axis (column) coordinate of the top-left character. */ @Getter private int columnIndex; /** The y-axis (row) coordinate of the top-left character. */ @Getter private int rowIndex; /** The width, in characters. */ @Getter private int width; /** The height, in characters. */ @Getter private int height; /** Whether or not the component is currently the target of the user's input. */ @Getter private boolean isFocused = false; /** The bounding box. */ @Getter private Rectangle boundingBox = new Rectangle(); /** The strings representing the character-rows of the component. */ @Getter private AsciiString[] strings; /** The radio to transmit events to. */ @Getter private Radio<String> radio; /** The screen that the component is on. */ @Getter @Setter private Screen screen; /** * Constructs a new AsciiComponent. * * @param columnIndex * The x-axis (column) coordinate of the top-left character. * * @param rowIndex * The y-axis (row) coordinate of the top-left character. * * @param width * The width, in characters. * * @param height * The height, in characters. */ public Component(final int columnIndex, final int rowIndex, final int width, final int height) { if (columnIndex < 0) { throw new IllegalArgumentException("You must specify a columnIndex of 0 or greater."); } if (rowIndex < 0) { throw new IllegalArgumentException("You must specify a rowIndex of 0 or greater."); } if (width < 1) { throw new IllegalArgumentException("You must specify a width of 1 or greater."); } if (height < 1) { throw new IllegalArgumentException("You must specify a height of 1 or greater."); } this.columnIndex = columnIndex; this.rowIndex = rowIndex; this.width = width; this.height = height; boundingBox.setLocation(columnIndex, rowIndex); boundingBox.setSize(width, height); strings = new AsciiString[height]; for (int row = 0 ; row < height ; row++) { strings[row] = new AsciiString(width); } } @Override public boolean equals(final Object otherObj) { if (otherObj instanceof Component == false) { return false; } // Left out a check for isFocused since two components could be // virtually identical other than their focus. // Left out a check for radio. final Component otherComp = (Component) otherObj; boolean isEqual = super.equals(otherObj); isEqual &= Objects.equals(columnIndex, otherComp.getColumnIndex()); isEqual &= Objects.equals(rowIndex, otherComp.getRowIndex()); isEqual &= Objects.equals(width, otherComp.getWidth()); isEqual &= Objects.equals(height, otherComp.getHeight()); isEqual &= Objects.equals(boundingBox, otherComp.getBoundingBox()); isEqual &= Arrays.equals(strings, otherComp.getStrings()); return isEqual; } @Override public int hashCode() { return Objects.hash(columnIndex, rowIndex, width, height, boundingBox, strings); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("Component:"); sb.append("\n\tColumn Index:\t").append(columnIndex); sb.append("\n\tRow Index:\t").append(rowIndex); sb.append("\n\tWidth:\t").append(width); sb.append("\n\tHeight:\t").append(height); sb.append("\n\tIs Focused:\t").append(isFocused); sb.append("\n\tBounding Box:\t" + boundingBox); sb.append("\n\tStrings:\n"); for (final AsciiString string : strings) { for (final AsciiCharacter character : string.getCharacters()) { sb.append("\t").append(character.getCharacter()); } sb.append("\n\t\t"); } sb.append("\n\tRadio:\t" + radio); return sb.toString(); } /** * Registers events, required by the component, with the specified panel. * * @param panel * The panel to register events with. */ public void registerEventHandlers(final Panel panel) { final Font font = panel.getImageCache().getFont(); final int fontWidth = font.getWidth(); final int fontHeight = font.getHeight(); panel.addMouseListener(new MouseListener() { @Override public void mouseClicked(final MouseEvent e) { if (e.getButton() == MouseEvent.BUTTON1) { isFocused = intersects(e, fontWidth, fontHeight); } } @Override public void mousePressed(final MouseEvent e) {} @Override public void mouseReleased(final MouseEvent e) {} @Override public void mouseEntered(final MouseEvent e) {} @Override public void mouseExited(final MouseEvent e) {} }); } /** * Draws the component on the specified screen. * * @param screen * The screen to draw on. */ public void draw(final Screen screen) { for (int row = 0 ; row < strings.length ; row++) { screen.write(strings[row], columnIndex, rowIndex + row); } } /** Attempts to transmit a "DRAW" event to the assigned Radio. */ public void transmitDraw() { if (radio != null) { radio.transmit("DRAW"); } } /** * Determines if the specified component intersects with this component. * * @param otherComponent * The component to check intersection with. * * @return * Whether or not the components intersect. */ public boolean intersects(final Component otherComponent) { return otherComponent != null && boundingBox.intersects(otherComponent.getBoundingBox()); } /** * Determines if the specified point intersects with this component. * * @param pointX * The x-axis (column) coordinate. * * @param pointY * The y-axis (row) coordinate. * * @return * Whether or not the point intersects with this component. */ public boolean intersects(final int pointX, final int pointY) { boolean intersects = pointX >= columnIndex; intersects &= pointX < (boundingBox.getWidth() + columnIndex); intersects &= pointY >= rowIndex; intersects &= pointY < (boundingBox.getHeight() + rowIndex); return intersects; } /** * Determines whether or not the specified mouse event is at a point that intersects this component. * * @param event * The event. * * @param fontWidth * The width of the font being used to draw the component's characters. * * @param fontHeight * The height of the font being used to draw the component's characters. * * @return * Whether or not the mouse event is at a point that intersects this component. */ public boolean intersects(final MouseEvent event, final int fontWidth, final int fontHeight) { final int mouseX = event.getX() / fontWidth; final int mouseY = event.getY() / fontHeight; return intersects(mouseX, mouseY); } /** * Determines whether or not the specified position is within the bounds of the component. * * @param columnIndex * The x-axis (column) coordinate. * * @param rowIndex * The y-axis (row) coordinate. * * @return * Whether or not the specified position is within the bounds of the component. */ public boolean isPositionValid(final int columnIndex, final int rowIndex) { if (rowIndex < 0 || rowIndex > boundingBox.getHeight() - 1) { return false; } if (columnIndex < 0 || columnIndex > boundingBox.getWidth() - 1) { return false; } return true; } /** * Enables the blink effect for every character. * * @param millsBetweenBlinks * The amount of time, in milliseconds, before the blink effect can occur. * * @param radio * The Radio to transmit a DRAW event to whenever a blink occurs. */ public void enableBlinkEffect(final short millsBetweenBlinks, final Radio<String> radio) { for (final AsciiString s: strings) { for (final AsciiCharacter c : s.getCharacters()) { c.enableBlinkEffect(millsBetweenBlinks, radio); } } } /** Resumes the blink effect for every character. */ public void resumeBlinkEffect() { for (final AsciiString s: strings) { for (final AsciiCharacter c : s.getCharacters()) { c.resumeBlinkEffect(); } } } /** Pauses the blink effect for every character. */ public void pauseBlinkEffect() { for (final AsciiString s: strings) { for (final AsciiCharacter c : s.getCharacters()) { c.pauseBlinkEffect(); } } } /** Disables the blink effect for every character. */ public void disableBlinkEffect() { for (final AsciiString s: strings) { for (final AsciiCharacter c : s.getCharacters()) { c.disableBlinkEffect(); } } } /** * Retrieves the string corresponding to a row. * * @param rowIndex * The row index. * * @return * The string. */ public AsciiString getString(final int rowIndex) { return strings[rowIndex]; } /** Sets all characters to be redrawn. */ public void setAllCharactersToBeRedrawn() { for (final AsciiString string : strings) { string.setAllCharactersToBeRedrawn(); } } /** * Sets every character, on the Screen that the component * resides on, at the component's current location to be * redrawn. * * This should only be called when the component is moved * on-screen or resized. */ private void setLocationOnScreenToBeRedrawn() { for (int y = rowIndex ; y <= rowIndex + height ; y++) { screen.getString(y).setCharacterRangeToBeRedrawn(new IntRange(columnIndex, columnIndex + width)); } } /** * Retrieves the AsciiCharacter at a specific location. * * @param columnIndex * The x-axis (column) coordinate of the location. * * @param rowIndex * The y-axis (row) coordinate of the location. * * @return * The AsciiCharacter at the specified location or nothing * if the location is invalid. */ public Optional<AsciiCharacter> getCharacterAt(final int columnIndex, final int rowIndex) { if (isPositionValid(columnIndex, rowIndex)) { return Optional.of(strings[rowIndex].getCharacters()[columnIndex]); } return Optional.empty(); } /** * Sets a new value for the columnIndex. * * Does nothing if the specified columnIndex is < 0. * * @param columnIndex * The new x-axis (column) coordinate of the top-left character of the component. */ public void setColumnIndex(final int columnIndex) { if (columnIndex >= 0) { setLocationOnScreenToBeRedrawn(); this.columnIndex = columnIndex; boundingBox.setLocation(columnIndex, rowIndex); setAllCharactersToBeRedrawn(); } } /** * Sets a new value for the rowIndex. * * Does nothing if the specified rowIndex is < 0. * * @param rowIndex * The y-axis (row) coordinate of the top-left character of the component. */ public void setRowIndex(final int rowIndex) { if (rowIndex >= 0) { setLocationOnScreenToBeRedrawn(); this.rowIndex = rowIndex; boundingBox.setLocation(columnIndex, rowIndex); setAllCharactersToBeRedrawn(); } } /** * Sets a new value for the width. * * Does nothing if the specified width is < 0 or < columnIndex. * * @param width * The new width, in characters, of the component. */ public void setWidth(final int width) { if (width < 0 || width < columnIndex) { return; } setLocationOnScreenToBeRedrawn(); this.width = width; boundingBox.setSize(width, height); setAllCharactersToBeRedrawn(); } /** * Sets a new value for the height. * * Does nothing if the specified height is < 0 or < rowIndex. * * @param height * The new height, in characters, of the component. */ public void setHeight(final int height) { if (height < 0 || height < rowIndex) { return; } setLocationOnScreenToBeRedrawn(); this.height = height; boundingBox.setSize(width, height); setAllCharactersToBeRedrawn(); } /** * Sets a new radio. * * @param radio * The new radio. */ public void setRadio(final Radio<String> radio) { if (radio != null) { this.radio = radio; } } }
package com.valkryst.generator; import com.valkryst.NameGenerator; import com.valkryst.builder.MarkovBuilder; import java.util.ArrayList; import java.util.List; import java.util.NoSuchElementException; import java.util.concurrent.ThreadLocalRandom; import java.util.function.IntUnaryOperator; public final class MarkovNameGenerator implements NameGenerator { /** All two-character combinations found in the training names. */ private final List<String> sequences = new ArrayList<>(); // todo JavaDoc private final MarkovChain<String, String> markovChain = new MarkovChain<>(); /** * Constructs a MarkovNameGenerator. * * @param builder * Builder containing the training names. */ public MarkovNameGenerator(final MarkovBuilder builder) { builder.getTrainingNames().forEach(this::acquireProbabilities); } @Override public String generateName(final int length) { if (length == 0) { return "LENGTH_WAS_ZERO"; } final StringBuilder sb = new StringBuilder(); // Choose a random preSequence to begin with: String preSequence = sequences.get(ThreadLocalRandom.current().nextInt(sequences.size())); sb.append(preSequence); String previous = preSequence.substring(0, 1); String current = preSequence.substring(1, 2); for (int i = 2; i < length; ++i) { preSequence = previous + "" + current; try { final String next = chooseNextCharacter(preSequence); sb.append(next); previous = current; current = next; } catch (final NoSuchElementException e) { break; } } // Capitalize the first letter of the name: return sb.toString().substring(0, 1).toUpperCase() + sb.toString().substring(1); } /** * Parses the specified string to determine the probability of a character appearing after the previous two * characters beginning with the third character in the string and ending with the last. * * @param trainingString * A string to parse. */ public void acquireProbabilities(final String trainingString) { if (trainingString.length() < 2) { return; } for (int i = 2 ; i < trainingString.length(); ++i) { final String preSequence = trainingString.substring(i - 2, i); final String sequence = trainingString.substring(i, i + 1); if (sequences.contains(preSequence) == false) { sequences.add(preSequence); } markovChain.incrementOccurrences(preSequence, sequence); } } /** * Determines the sequence to follow the specified pre-sequence using the precomputed probabilities of which * sequences most often appear after the specified pre-sequence. * * @param preSequence * The pre-sequence to find the next character for. * * @return * The next character of the sequence. * * @throws NoSuchElementException * If the specified pre-sequence has no corresponding probabilities to determine the next sequence from. */ private String chooseNextCharacter(final String preSequence) throws NoSuchElementException { final ArrayList<String> sequences = markovChain.getAllSequences(preSequence); if (sequences.size() == 0) { throw new NoSuchElementException("There are no computed probabilities for the specified pre-sequence."); } Float highestProbability = null; final List<String> highestStrings = new ArrayList<>(); for(final String sequence : sequences) { final Float currentProbability = markovChain.getProbability(preSequence, sequence); // If the initial data has not yet been set, // then set it. if (highestProbability == null) { highestStrings.add(sequence); highestProbability = currentProbability; } else { if(currentProbability > highestProbability) { highestStrings.clear(); highestStrings.add(sequence); highestProbability = currentProbability; } else { highestStrings.add(sequence); } } } final int randomIndex = ThreadLocalRandom.current().nextInt(highestStrings.size()); return highestStrings.get(randomIndex); } }
package com.virtualfactory.engine; import com.virtualfactory.screen.other.VideoCamGUI; import com.virtualfactory.utils.Sensor; import com.jme3.app.Application; import com.jme3.app.SimpleApplication; import com.jme3.app.state.AbstractAppState; import com.jme3.app.state.AppStateManager; import com.jme3.asset.AssetManager; import com.jme3.audio.AudioNode; import com.jme3.bullet.BulletAppState; import com.jme3.bullet.collision.shapes.CapsuleCollisionShape; import com.jme3.bullet.control.CharacterControl; import com.jme3.bullet.control.RigidBodyControl; import com.jme3.input.FlyByCamera; import com.jme3.input.InputManager; import com.jme3.input.KeyInput; import com.jme3.input.controls.ActionListener; import com.jme3.input.controls.KeyTrigger; import com.jme3.light.AmbientLight; import com.jme3.light.PointLight; import com.jme3.math.ColorRGBA; import com.jme3.math.Matrix3f; import com.jme3.math.Quaternion; import com.jme3.math.Vector3f; import com.jme3.post.FilterPostProcessor; import com.jme3.post.filters.FadeFilter; import com.jme3.renderer.Camera; import com.jme3.renderer.ViewPort; import com.jme3.scene.Geometry; import com.jme3.scene.Node; import com.jme3.scene.Spatial; import com.jme3.texture.Texture; import com.jme3.util.SkyFactory; import com.virtualfactory.narrator.Narrator; import com.virtualfactory.utils.InvisibleWall; import com.virtualfactory.utils.Params; import java.util.HashMap; public class GameRunningState extends AbstractAppState { private SimpleApplication app; private AssetManager assetManager; private InputManager inputManager; private Node rootNode; private Camera cam; private FlyByCamera flyCam; private boolean moveForward; private boolean moveBackward; private boolean moveLeft; private boolean moveRight; private boolean lookUp; private boolean lookDown; private boolean lookLeft; private boolean lookRight; private boolean isDebugCamEnabled; private boolean isTopViewEnabled; private boolean isLightingEnabled; private boolean isPlayerUpstairs = true; private float playerSpeed = 1.3f; private Vector3f camDir; private Vector3f camLeft; private Vector3f walkDirection = new Vector3f(0, 0, 0); private FilterPostProcessor fpp; private FadeFilter fadeFilter; private ViewPort viewPort; private Node factory; private final BulletAppState bulletAppState; private CharacterControl player; private PointLight lamp1; private PointLight lamp2; private int viewNumber; private Narrator gameNarrator; private HashMap<String, Sensor> factorySensors; private AmbientLight ambient; private VideoCamGUI videoCamGUI; private AudioNode cameraMovingSound; public GameRunningState(BulletAppState bulletAppState) { this.bulletAppState = bulletAppState; } @Override public void initialize(AppStateManager stateManager, Application app) { super.initialize(stateManager, app); this.app = (SimpleApplication) app; this.assetManager = this.app.getAssetManager(); this.inputManager = this.app.getInputManager(); this.viewPort = this.app.getViewPort(); this.rootNode = this.app.getRootNode(); this.flyCam = this.app.getFlyByCamera(); this.cam = this.app.getCamera(); this.isPlayerUpstairs = true; createFactory(); initSoundEffects(); loadPlayerKeyControls(); gameNarrator = new Narrator(stateManager, assetManager, this.app.getGuiNode()); } private void createFactory() { fpp = new FilterPostProcessor(assetManager); fadeFilter = new FadeFilter(1.5f); fpp.addFilter(fadeFilter); viewPort.addProcessor(fpp); flyCam.setMoveSpeed(100); /* Factory */ factory = (Node) assetManager.loadModel("Models/factory.j3o"); factory.setLocalScale(250.0f, 250.0f, 250.0f); factory.setLocalTranslation(-9.0f, 0.0f, 82.0f); rootNode.attachChild(factory); RigidBodyControl rigidBody = new RigidBodyControl(0); factory.addControl(rigidBody); bulletAppState.getPhysicsSpace().add(rigidBody); Node grass = (Node) assetManager.loadModel("Models/grass.j3o"); grass.setLocalScale(250.0f, 250.0f, 250.0f); grass.setLocalTranslation(-9.0f, 0.0f, 82.0f); rootNode.attachChild(grass); createSkyBox(); createLighting(); createInvisibleWalls(); createSensors(); initVideoCamGUI(); /* First-person Player */ player = new CharacterControl(new CapsuleCollisionShape(0.4f, 24.5f, 1), 0.05f); player.setJumpSpeed(45); player.setFallSpeed(120); player.setGravity(Params.playerGravity); player.setPhysicsLocation(new Vector3f(51.68367f, 59.064148f, -292.67755f)); cam.setRotation(new Quaternion(0.07086334f, -0.01954512f, 0.0019515193f, 0.99729264f)); flyCam.setRotationSpeed(1.9499999f); player.setViewDirection(new Vector3f(0, 0, 1)); bulletAppState.getPhysicsSpace().add(player); } private void initSoundEffects() { cameraMovingSound = new AudioNode(assetManager, "Sounds/cameraMovingSound.wav", false); cameraMovingSound.setPositional(false); } private void loadPlayerKeyControls() { String[] mappings = {"move forward", "move backward", "move left", "move right", "look up", "look down", "look left", "look right", "toggle top view", "debug cam", "debug position"}; KeyTrigger[] triggers = {new KeyTrigger(KeyInput.KEY_W), new KeyTrigger(KeyInput.KEY_S), new KeyTrigger(KeyInput.KEY_A), new KeyTrigger(KeyInput.KEY_D), new KeyTrigger(KeyInput.KEY_UP), new KeyTrigger(KeyInput.KEY_DOWN), new KeyTrigger(KeyInput.KEY_LEFT), new KeyTrigger(KeyInput.KEY_RIGHT), new KeyTrigger(KeyInput.KEY_T), new KeyTrigger(KeyInput.KEY_0), new KeyTrigger(KeyInput.KEY_P)}; for (int i = 0; i < mappings.length; i++) { inputManager.addMapping(mappings[i], triggers[i]); inputManager.addListener(actionListener, mappings[i]); } } private ActionListener actionListener = new ActionListener() { @Override public void onAction(String name, boolean isKeyPressed, float tpf) { switch (name) { case "move forward": moveForward = isKeyPressed; break; case "move backward": moveBackward = isKeyPressed; break; case "move left": moveLeft = isKeyPressed; break; case "move right": moveRight = isKeyPressed; break; case "look up": lookUp = isKeyPressed; handleCameraMovingSound(lookUp); break; case "look down": lookDown = isKeyPressed; handleCameraMovingSound(lookDown); break; case "look left": lookLeft = isKeyPressed; handleCameraMovingSound(lookLeft); break; case "look right": lookRight = isKeyPressed; handleCameraMovingSound(lookRight); break; case "toggle top view": if (!isKeyPressed && isPlayerUpstairs) toggleTopView(); break; case "debug cam": if (!isKeyPressed) isDebugCamEnabled = !isDebugCamEnabled; break; case "debug position": if (!isKeyPressed) System.out.println("" + "\n\nlocation: " + cam.getLocation() + "\nleft: " + cam.getLeft() + "\nup: " + cam.getUp() + "\ndirection: " + cam.getDirection()); if (!isKeyPressed) Params.tutorial.nextStep(); break; default: break; } } }; public void handleCameraMovingSound(boolean isKeyPressed) { if (isKeyPressed && isSecurityCamActive()) cameraMovingSound.play(); else { if (!isThereLookKeyPressed()) cameraMovingSound.stop(); } } @Override public void update(float tpf) { updatePlayerPosition(); } public void updatePlayerPosition() { if (!Params.isLevelStarted) { if (videoCamGUI.isEnabled()) videoCamGUI.disable(); return; } if (factorySensors.get("top stairs").isPlayerInRange() || factorySensors.get("bottom stairs").isPlayerInRange() || this.fadeFilter.getValue() < 1) handleTransition(); if (lookUp) rotateCamera(-Params.rotationSpeed, cam.getLeft()); if (lookDown) rotateCamera(Params.rotationSpeed, cam.getLeft()); if (lookLeft) rotateCamera(Params.rotationSpeed, new Vector3f(0,1,0)); if (lookRight) rotateCamera(-Params.rotationSpeed, new Vector3f(0,1,0)); if (isTopViewEnabled || isDebugCamEnabled) { if (fadeFilter.getValue() <= 0) fadeFilter.fadeIn(); if (!videoCamGUI.getDisplayedDateAndTime().equals(videoCamGUI.getUpdatedDateAndTime())) videoCamGUI.updateDateAndTime(); return; } camDir = cam.getDirection().clone().multLocal(playerSpeed); camLeft = cam.getLeft().clone().multLocal(playerSpeed); walkDirection.set(0, 0, 0); // reset walkDirection vector if (moveForward) walkDirection.addLocal(camDir); if (moveBackward) walkDirection.addLocal(camDir.negate()); if (moveLeft) walkDirection.addLocal(camLeft); if (moveRight) walkDirection.addLocal(camLeft.negate()); player.setWalkDirection(walkDirection); // walk! cam.setLocation(player.getPhysicsLocation()); if (isPlayerUpstairs && player.getPhysicsLocation().getY() < 57.0f) player.warp(new Vector3f(new Vector3f(130.96266f, 59.064148f, -291.2517f))); else if (player.getPhysicsLocation().getY() < 12.65f) player.warp(new Vector3f(player.getPhysicsLocation().getX(), 12.65f, player.getPhysicsLocation().getZ())); } private void handleTransition() { if (fadeFilter.getDuration() < 1.5f) fadeFilter.setDuration(1.5f); boolean isFadeEffectStarted = fadeFilter.getValue() < 1; if (!isFadeEffectStarted) { playerSpeed = 0; fadeFilter.fadeOut(); AudioNode footsteps; footsteps = new AudioNode(assetManager, isPlayerUpstairs ? "Sounds/footsteps1.wav" : "Sounds/footsteps2.wav", false); footsteps.setPositional(false); footsteps.play(); } boolean isFadeEffectFinished = fadeFilter.getValue() <= 0; if (isFadeEffectFinished) { if (isPlayerUpstairs) { player.warp(new Vector3f(121.2937f, 12.65f, -309.41315f)); cam.setRotation(new Quaternion(0.04508071f, -0.4710204f, 0.02474963f, 0.8806219f)); isPlayerUpstairs = false; if (Params.isTutorialLevel && Params.tutorial.getCurrentStep() == 0) Params.tutorial.nextStep(); } else { player.setPhysicsLocation(new Vector3f(51.68367f, 59.064148f, -292.67755f)); cam.setRotation(new Quaternion(0.07086334f, -0.01954512f, 0.0019515193f, 0.99729264f)); if (!Params.isTutorialLevel) gameNarrator.talk("Second Floor.\nPress 'T' for a top view of the factory.", "Sounds/Narrator/instructions.wav"); isPlayerUpstairs = true; } fadeFilter.fadeIn(); playerSpeed = 1.3f; } } private void createSkyBox() { String path = "Textures/Skybox/"; Texture west = assetManager.loadTexture(path + "skyLeft.jpg"); Texture east = assetManager.loadTexture(path + "skyRight.jpg"); Texture north = assetManager.loadTexture(path + "skyFront.jpg"); Texture south = assetManager.loadTexture(path + "skyBack.jpg"); Texture top = assetManager.loadTexture(path + "skyTop.jpg"); Texture bottom = assetManager.loadTexture(path + "skyDown.jpg"); Spatial skyBox = SkyFactory.createSky(assetManager, west, east, north, south, top, bottom); rootNode.attachChild(skyBox); } private void createLighting() { if (isLightingEnabled || !Params.firstRun) return; Params.firstRun = false; isLightingEnabled = true; ColorRGBA color = ColorRGBA.White; lamp1 = new PointLight(); lamp1.setPosition(new Vector3f(40, 200, 150)); lamp1.setColor(color); lamp1.setRadius(lamp1.getRadius()/20); rootNode.addLight(lamp1); lamp2 = new PointLight(); lamp2.setPosition(new Vector3f(43.50383f, 80.081642f, -310.90753f)); lamp2.setColor(color); lamp2.setRadius(lamp1.getRadius()); rootNode.addLight(lamp2); /* Atmosphere */ ambient = new AmbientLight(); ambient.setColor(ColorRGBA.DarkGray.mult(1.8f)); rootNode.addLight(ambient); } private void createInvisibleWalls() { String[] wallNames = {"bottom right wall", "bottom left wall", "bottom front wall", "bottom back wall", "upper right wall", "upper left wall", "upper front wall", "upper back wall"}; Vector3f[] sizes = {new Vector3f(0.6f, 0.6f, 23.600018f), new Vector3f(1.4000001f, 0.6f, 23.600018f), new Vector3f(9.399996f, 1.0f, 1.0f), new Vector3f(9.399996f, 1.0f, 0.6f), new Vector3f(0.40000004f, 1.0f, 3.4000006f), new Vector3f(0.40000004f, 1.0f, 3.0000005f), new Vector3f(7.599997f, 1.0f, 0.20000003f), new Vector3f(9.599996f, 1.0f, 0.40000004f)}; Vector3f[] locations = {new Vector3f(-37.600044f, 5.850006f, -117.43932f), new Vector3f(137.1993f, 5.850006f, -117.43932f), new Vector3f(50.80012f, 9.999995f, 106.9995f), new Vector3f(50.60012f, 9.999995f, -346.80283f), new Vector3f(-40.543167f, 56.8007f, -318.2905f), new Vector3f(140.85591f, 56.8007f, -322.69077f), new Vector3f(31.656963f, 56.8007f, -289.88876f), new Vector3f(50.433777f, 56.38947f, -350.81924f)}; for (int i = 0; i < wallNames.length; i++) { Geometry invisibleWall = new InvisibleWall(bulletAppState, sizes[i], locations[i]); invisibleWall.setName(wallNames[i]); rootNode.attachChild(invisibleWall); } } private void createSensors() { String[] sensorNames = {"top stairs", "bottom stairs"}; Vector3f[] sensorSizes = {new Vector3f(15, 10, 5), new Vector3f(15, 10, 5)}; Vector3f[] sensorLocations = {new Vector3f(134.05f, 59.06f, -285.02f), new Vector3f(107.42f, 12.67f, -284.9f)}; factorySensors = new HashMap<>(); for (int i = 0; i < sensorNames.length; i++) factorySensors.put(sensorNames[i], new Sensor(sensorSizes[i], sensorLocations[i], bulletAppState)); } private void initVideoCamGUI() { videoCamGUI = new VideoCamGUI(assetManager, app.getGuiNode()); } private void rotateCamera(float value, Vector3f axis) { Matrix3f mat = new Matrix3f(); if (isTopViewEnabled) value = value * 0.3f; mat.fromAngleNormalAxis(flyCam.getRotationSpeed() * value, axis); Vector3f tempUp = cam.getUp(); Vector3f tempLeft = cam.getLeft(); Vector3f tempDir = cam.getDirection(); mat.mult(tempUp, tempUp); mat.mult(tempLeft, tempLeft); mat.mult(tempDir, tempDir); if (tempDir.getX() > Params.camMaxX || tempDir.getX() < Params.camMinX || tempDir.getY() > Params.camMaxY || tempDir.getY() < Params.camMinY || tempDir.getZ() > Params.camMaxZ || tempDir.getZ() < Params.camMinZ) return; Quaternion q = new Quaternion(); q.fromAxes(tempLeft, tempUp, tempDir); q.normalizeLocal(); cam.setAxes(q); } private void playSoundEffect(String pathToAudioFile) { AudioNode sfx; sfx = new AudioNode(assetManager, pathToAudioFile, false); sfx.setPositional(false); sfx.setPitch(1.3f); sfx.play(); } private void toggleTopView() { if (Params.topViewAvailable && viewNumber != 5) { isTopViewEnabled = true; if (fadeFilter.getDuration() > 0.25f) fadeFilter.setDuration(0.25f); if (viewNumber == 0) { playSoundEffect("Sounds/enteredTopView.wav"); } else { fadeFilter.fadeOut(); playSoundEffect("Sounds/cameraSwitch.wav"); } if (videoCamGUI.isDisabled()) videoCamGUI.enable(); switch(viewNumber) { case 0: videoCamGUI.showCamInfo(VideoCamGUI.STATIC_CAM); Params.camAxesLeft = cam.getLeft(); Params.camAxesUp = cam.getUp(); Params.camAxesDir = cam.getDirection(); Params.flyCamRotationSpeed = flyCam.getRotationSpeed(); cam.setLocation(new Vector3f(210.75597f, 191.22467f, -111.45984f)); cam.setAxes(new Vector3f(0.006238699f, 0.0011283755f, 0.9999799f), new Vector3f(-0.7573153f, 0.6530373f, 0.0039878786f), new Vector3f(-0.6530197f, -0.75732493f, 0.004928589f)); break; case 1: videoCamGUI.showCamInfo(VideoCamGUI.SECURITY_CAM_1); Params.camMaxY = Params.securityCamsMaxY; Params.camMinY = Params.securityCamsMinY; Params.camMaxX = Params.cam1MaxX; Params.camMinX = Params.cam1MinX; // Params.camMaxZ = Params.cam1MaxX; // Params.camMinZ = Params.cam1MinX; cam.setLocation(new Vector3f(138.94714f, 74.204185f, -118.346085f)); cam.setAxes(new Vector3f(-0.004745364f, 0.0011234581f, 0.99998814f), new Vector3f(-0.69315696f, 0.720775f, -0.0040991306f), new Vector3f(-0.720771f, -0.69316816f, -0.0026416779f)); break; case 2: videoCamGUI.showCamInfo(VideoCamGUI.SECURITY_CAM_2); Params.camMaxX = Params.playerMaxX; Params.camMinX = Params.playerMinX; Params.camMaxZ = 0; Params.camMinZ = -100; cam.setLocation(new Vector3f(50.173473f, 78.43454f, 112.47995f)); cam.setAxes(new Vector3f(-0.9999976f, 0.0011224343f, 0.0018219932f), new Vector3f(0f, 0.8618528f, -0.5071584f), new Vector3f(-0.002139542f, -0.5071572f, -0.86185086f)); break; case 3: videoCamGUI.showCamInfo(VideoCamGUI.SECURITY_CAM_3); Params.camMaxX = 100f; Params.camMinX = 0f; Params.camMaxZ = Params.playerMaxZ; Params.camMinZ = Params.playerMinZ; cam.setLocation(new Vector3f(-37.94872f, 71.8763f, -118.55907f)); cam.setAxes(new Vector3f(-0.0045000315f, 0.0011213869f, -0.9999892f), new Vector3f(0.4902432f, 0.8715848f, -0.0012287796f), new Vector3f(0.871574f, -0.49024346f, -0.004471898f)); break; case 4: videoCamGUI.showCamInfo(VideoCamGUI.SECURITY_CAM_4); Params.camMaxX = Params.playerMaxX; Params.camMinX = Params.playerMinX; Params.camMaxZ = 100f; Params.camMinZ = 0f; cam.setLocation(new Vector3f(54.01033f, 79.31754f, -347.24677f)); cam.setAxes(new Vector3f(0.99922884f, 0.0011243783f, 0.039248988f), new Vector3f(-0.019561216f, 0.8809709f, 0.47276595f), new Vector3f(-0.034045644f, -0.47316912f, 0.8803135f)); break; } viewNumber = (viewNumber + 1) % 6; flyCam.setMoveSpeed(0); // flyCam.setRotationSpeed(0); factory.getChild("Beams-Metal").setCullHint(Spatial.CullHint.Always); } else if (Params.topViewAvailable && isTopViewEnabled) { videoCamGUI.disable(); playSoundEffect("Sounds/exitTopView.wav"); isTopViewEnabled = false; cam.setAxes(Params.camAxesLeft, Params.camAxesUp, Params.camAxesDir); flyCam.setMoveSpeed(100); flyCam.setRotationSpeed(Params.flyCamRotationSpeed); factory.getChild("Beams-Metal").setCullHint(Spatial.CullHint.Never); viewNumber = (viewNumber + 1)%6; Params.camMaxX = Params.playerMaxX; Params.camMinX = Params.playerMinX; Params.camMaxY = Params.playerMaxY; Params.camMinY = Params.playerMinY; Params.camMaxZ = Params.playerMaxZ; Params.camMinZ = Params.playerMinZ; } } public boolean isTopViewEnabled() { return isTopViewEnabled; } public boolean isSecurityCamActive() { return isTopViewEnabled && viewNumber > 1; } public boolean isThereLookKeyPressed() { return lookUp || lookDown || lookLeft || lookRight; } public void setTopViewEnabled(boolean enabled) { isTopViewEnabled = enabled; } public int getViewNumber() { return viewNumber; } public void setViewNumber(int number) { viewNumber = number; } }
package org.apache.jmeter.services; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.Random; import java.io.Reader; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.apache.jmeter.util.JMeterUtils; import org.apache.jmeter.gui.JMeterFileFilter; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; /** * @author mstover * * TODO To change the template for this generated type comment go to * Window - Preferences - Java - Code Style - Code Templates */ public class FileServer { static Logger log = LoggingManager.getLoggerForClass(); File base; Map files = new HashMap(); private static FileServer server = new FileServer(); private Random random = new Random(); private FileServer() { base = new File(JMeterUtils.getProperty("user.dir")); } public static FileServer getFileServer() { return server; } public void setBasedir(String basedir) throws IOException { log.info("Setting basedir to: " + basedir); if(filesOpen()) { throw new IOException("Files are still open, cannot change base directory"); } files.clear(); if(basedir != null) { base = new File(basedir); if(!base.isDirectory()) { base = base.getParentFile(); } } } public String getBaseDir() { return base.getAbsolutePath(); } public synchronized void reserveFile(String filename) { log.info("filename = "+ filename+ " base = "+ base); if(!files.containsKey(filename)) { Object[] file = new Object[]{new File(base,filename),null}; files.put(filename,file); } } /** * Get the next line of the named file. * @param filename * @return * @throws IOException */ public synchronized String readLine(String filename) throws IOException { Object[] file = (Object[])files.get(filename); if(file != null) { if(file[1] == null) { BufferedReader r = new BufferedReader(new FileReader((File)file[0])); file[1] = r; } BufferedReader reader = (BufferedReader)file[1]; String line = reader.readLine(); if(line == null) { reader.close(); reader = new BufferedReader(new FileReader((File)file[0])); file[1] = reader; line = reader.readLine(); } return line; } throw new IOException("File never reserved"); } public void closeFiles() throws IOException { Iterator iter = files.keySet().iterator(); while(iter.hasNext()) { String name = (String)iter.next(); Object[] file = (Object[])files.get(name); if(file[1] != null) { ((Reader)file[1]).close(); file[1] = null; } } files.clear(); } protected boolean filesOpen() { Iterator iter = files.keySet().iterator(); while(iter.hasNext()) { String name = (String)iter.next(); Object[] file = (Object[])files.get(name); if(file[1] != null) { return true; } } return false; } /** * Method will get a random file in a base directory * @param basedir * @return */ public File getRandomFile(String basedir){ File input = null; if (basedir != null) { File src = new File(basedir); if (src.isDirectory() && src.list() != null) { File[] files = src.listFiles( new JMeterFileFilter(new String[] { ".txt",".obj" })); int count = files.length; input = files[random.nextInt(count)]; } } return input; } }
package datastructure.graph; import algorithm.AbstractMDSAlgorithm; import algorithm.AbstractMDSResult; import com.carrotsearch.hppc.IntObjectOpenHashMap; import com.carrotsearch.hppc.IntOpenHashSet; import com.carrotsearch.hppc.ObjectArrayList; import com.carrotsearch.hppc.cursors.IntCursor; import datastructure.Dataset; public class CompactUndirectedGraph implements Graph { private int maxVertexNumber = 0; private ObjectArrayList<Edge> edges = new ObjectArrayList<Edge>(); private IntOpenHashSet vertices = new IntOpenHashSet(); private IntObjectOpenHashMap<IntOpenHashSet> neig1 = new IntObjectOpenHashMap<>(); private IntObjectOpenHashMap<IntOpenHashSet> neig2 = new IntObjectOpenHashMap<>(); public CompactUndirectedGraph(Dataset dataset) { dataset.setAll(); maxVertexNumber = dataset.maxVertexNumber; for (int i = 1; i <= maxVertexNumber; i++) { vertices.add(i); } edges = new ObjectArrayList<Edge>(dataset.edges); neig1 = new IntObjectOpenHashMap<IntOpenHashSet>(dataset.neig1); neig2 = new IntObjectOpenHashMap<IntOpenHashSet>(dataset.neig2); } @Override public boolean isDirected() { return false; } @Override public ObjectArrayList<Edge> getEdges() { return edges; } @Override public IntOpenHashSet getVertices() { return vertices; } @Override public int getNumberOfVertices() { return maxVertexNumber; } @Override public IntOpenHashSet getN1(int vertex) { return neig1.get(vertex); } @Override public IntOpenHashSet getN2(int vertex) { return neig2.get(vertex); } @Override public AbstractMDSResult getMDS(AbstractMDSAlgorithm algorithm) { return algorithm.mdsAlg(this); } @Override public boolean isMDS(IntOpenHashSet mds) { IntOpenHashSet set = new IntOpenHashSet(maxVertexNumber); for (IntCursor v : mds) { set.addAll(getN1(v.value)); } boolean isContained = true; for (int i = 1; i <= maxVertexNumber; i++) { if (!set.contains(i)) { isContained = false; break; } } return isContained; } @Override public boolean isMDS(AbstractMDSResult mds) { IntOpenHashSet set = new IntOpenHashSet(maxVertexNumber); for (IntCursor v : mds.getIterableStructure()) { set.addAll(getN1(v.value)); } boolean isContained = true; for (int i = 1; i <= maxVertexNumber; i++) { if (!set.contains(i)) { isContained = false; break; } } return isContained; } }
package de.lmu.ifi.dbs.algorithm.clustering; import de.lmu.ifi.dbs.algorithm.Algorithm; import de.lmu.ifi.dbs.algorithm.DistanceBasedAlgorithm; import de.lmu.ifi.dbs.algorithm.result.ClusterOrder; import de.lmu.ifi.dbs.algorithm.result.Result; import de.lmu.ifi.dbs.data.DatabaseObject; import de.lmu.ifi.dbs.database.Database; import de.lmu.ifi.dbs.distance.Distance; import de.lmu.ifi.dbs.utilities.Description; import de.lmu.ifi.dbs.utilities.Progress; import de.lmu.ifi.dbs.utilities.QueryResult; import de.lmu.ifi.dbs.utilities.heap.*; import de.lmu.ifi.dbs.utilities.optionhandling.AttributeSettings; import de.lmu.ifi.dbs.utilities.optionhandling.OptionHandler; import de.lmu.ifi.dbs.utilities.optionhandling.ParameterException; import de.lmu.ifi.dbs.utilities.optionhandling.WrongParameterValueException; import java.io.Serializable; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; /** * OPTICS provides the OPTICS algorithm. * * @author Elke Achtert (<a href="mailto:achtert@dbs.ifi.lmu.de">achtert@dbs.ifi.lmu.de</a>) */ public class OPTICS<O extends DatabaseObject, D extends Distance<D>> extends DistanceBasedAlgorithm<O, D> { /** * Parameter for epsilon. */ public static final String EPSILON_P = DBSCAN.EPSILON_P; /** * Description for parameter epsilon. */ public static final String EPSILON_D = DBSCAN.EPSILON_D; /** * Parameter minimum points. */ public static final String MINPTS_P = DBSCAN.MINPTS_P; /** * Description for parameter minimum points. */ public static final String MINPTS_D = DBSCAN.MINPTS_D; /** * Epsilon. */ private String epsilon; /** * Minimum points. */ private int minpts; /** * Provides the result of the algorithm. */ private ClusterOrder<O, D> clusterOrder; /** * Holds a set of processed ids. */ private Set<Integer> processedIDs; /** * The priority queue for the algorithm. */ private Heap<D, COEntry> heap; /** * Sets epsilon and minimum points to the optionhandler additionally to the * parameters provided by super-classes. Since OPTICS is a non-abstract * class, finally optionHandler is initialized. */ public OPTICS() { super(); parameterToDescription.put(EPSILON_P + OptionHandler.EXPECTS_VALUE, EPSILON_D); parameterToDescription.put(MINPTS_P + OptionHandler.EXPECTS_VALUE, MINPTS_D); optionHandler = new OptionHandler(parameterToDescription, getClass().getName()); } /** * @see de.lmu.ifi.dbs.algorithm.Algorithm#run(de.lmu.ifi.dbs.database.Database) */ protected void runInTime(Database<O> database) throws IllegalStateException { try { Progress progress = new Progress(database.size()); int size = database.size(); processedIDs = new HashSet<Integer>(size); clusterOrder = new ClusterOrder<O, D>(database, getDistanceFunction()); heap = new DefaultHeap<D, COEntry>(); getDistanceFunction().setDatabase(database, isVerbose(), isTime()); for (Iterator<Integer> it = database.iterator(); it.hasNext();) { Integer id = it.next(); if (!processedIDs.contains(id)) expandClusterOrder(database, id, progress); } } catch (Exception e) { throw new IllegalStateException(e); } } /** * OPTICS-function expandClusterOrder. * * @param database the database on which the algorithm is run * @param objectID the currently processed object * @param progress the progress object to actualize the current progess if the * algorithm */ @SuppressWarnings({"unchecked"}) protected void expandClusterOrder(Database<O> database, Integer objectID, Progress progress) { clusterOrder.add(objectID, null, getDistanceFunction().infiniteDistance()); processedIDs.add(objectID); if (isVerbose()) { progress.setProcessed(processedIDs.size()); System.out.print("\r" + progress.toString()); } List<QueryResult<D>> neighbours = database.rangeQuery(objectID, epsilon, getDistanceFunction()); D coreDistance = neighbours.size() < minpts ? getDistanceFunction().infiniteDistance() : neighbours.get(minpts - 1).getDistance(); if (!getDistanceFunction().isInfiniteDistance(coreDistance)) { for (QueryResult<D> neighbour : neighbours) { if (processedIDs.contains(neighbour.getID())) { continue; } D reachability = maximum(neighbour.getDistance(), coreDistance); updateHeap(reachability, new COEntry(neighbour.getID(), objectID)); } while (!heap.isEmpty()) { final HeapNode<D, COEntry> pqNode = heap.getMinNode(); COEntry current = pqNode.getValue(); clusterOrder.add(current.objectID, current.predecessorID, pqNode.getKey()); processedIDs.add(current.objectID); neighbours = database.rangeQuery(current.objectID, epsilon, getDistanceFunction()); coreDistance = neighbours.size() < minpts ? getDistanceFunction().infiniteDistance() : neighbours.get(minpts - 1).getDistance(); if (!getDistanceFunction().isInfiniteDistance(coreDistance)) { for (QueryResult<D> neighbour : neighbours) { if (processedIDs.contains(neighbour.getID())) continue; D distance = neighbour.getDistance(); D reachability = maximum(distance, coreDistance); updateHeap(reachability, new COEntry(neighbour.getID(), current.objectID)); } } if (isVerbose()) { progress.setProcessed(processedIDs.size()); System.out.print("\r" + progress.toString()); } } } } /** * @see de.lmu.ifi.dbs.algorithm.Algorithm#getDescription() */ public Description getDescription() { return new Description("OPTICS", "Density-Based Hierarchical Clustering", "Algorithm to find density-connected sets in a database based on the parameters minimumPoints and epsilon (specifying a volume). These two parameters determine a density threshold for clustering.", "M. Ankerst, M. Breunig, H.-P. Kriegel, and J. Sander: " + "OPTICS: Ordering Points to Identify the Clustering Structure. " + "In: Proc. ACM SIGMOD Int. Conf. on Management of Data (SIGMOD '99)"); } /** * @see de.lmu.ifi.dbs.utilities.optionhandling.Parameterizable#setParameters(String[]) */ public String[] setParameters(String[] args) throws ParameterException { String[] remainingParameters = super.setParameters(args); epsilon = optionHandler.getOptionValue(EPSILON_P); try { // test whether epsilon is compatible with distance function getDistanceFunction().valueOf(epsilon); } catch (IllegalArgumentException e) { throw new WrongParameterValueException(EPSILON_P, epsilon, EPSILON_D); } // minpts String minptsString = optionHandler.getOptionValue(MINPTS_P); try { minpts = Integer.parseInt(minptsString); if (minpts <= 0) throw new WrongParameterValueException(MINPTS_P, minptsString, MINPTS_D); } catch (NumberFormatException e) { throw new WrongParameterValueException(MINPTS_P, minptsString, MINPTS_D, e); } setParameters(args, remainingParameters); return remainingParameters; } /** * Returns the parameter setting of this algorithm. * * @return the parameter setting of this algorithm */ public List<AttributeSettings> getAttributeSettings() { List<AttributeSettings> attributeSettings = super.getAttributeSettings(); AttributeSettings mySettings = attributeSettings.get(0); mySettings.addSetting(EPSILON_P, epsilon); mySettings.addSetting(MINPTS_P, Integer.toString(minpts)); return attributeSettings; } /** * @see Algorithm#getResult() */ public Result<O> getResult() { return clusterOrder; } /** * Returns the epsilon parameter. * * @return the epsilon parameter */ public Distance getEpsilon() { return getDistanceFunction().valueOf(epsilon); } /** * Returns the maximum of both given distances. * * @param d1 the first distance * @param d2 the second distance * @return the maximum of both given distances */ private D maximum(D d1, D d2) { if (d1.compareTo(d2) >= 0) return d1; else return d2; } /** * Adds the specified entry with the specified key tp the heap. * If the entry's object is already in the heap, it will only be updated. * * @param reachability the reachability of the entry's object * @param entry the entry to be added */ private void updateHeap(D reachability, COEntry entry) { Integer index = heap.getIndexOf(entry); // entry is already in the heap if (index != null) { HeapNode<D, COEntry> heapNode = heap.getNodeAt(index); int compare = heapNode.getKey().compareTo(reachability); if (compare < 0) return; if (compare == 0 && heapNode.getValue().predecessorID < entry.predecessorID) return; heapNode.setValue(entry); heapNode.setKey(reachability); heap.flowUp(index); } // entry is not in the heap else { heap.addNode(new DefaultHeapNode<D, COEntry>(reachability, entry)); } } /** * Encapsulates an entry in the cluster order. */ public class COEntry implements Identifiable, Serializable { /** * The id of the entry. */ public Integer objectID; /** * The id of the entry's predecessor. */ Integer predecessorID; /** * Creates a new entry with the specified parameters. * * @param objectID the id of the entry * @param predecessorID the id of the entry's predecessor */ public COEntry(Integer objectID, Integer predecessorID) { this.objectID = objectID; this.predecessorID = predecessorID; } /** * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer as this object is less * than, equal to, or greater than the specified object. * <p/> * * @param o the Object to be compared. * @return a negative integer, zero, or a positive integer as this * object is less than, equal to, or greater than the specified * object. */ public int compareTo(Identifiable o) { COEntry other = (COEntry) o; if (this.objectID < other.objectID) return -1; if (this.objectID > other.objectID) return +1; if (this.predecessorID < other.predecessorID) return -1; if (this.predecessorID > other.predecessorID) return +1; return 0; } /** * Returns a string representation of the object. * * @return a string representation of the object. */ public String toString() { return objectID + " (" + predecessorID + ")"; } /** * Indicates whether some other object is "equal to" this one. * * @param o the reference object with which to compare. * @return <code>true</code> if this object is the same as the obj * argument; <code>false</code> otherwise. */ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final COEntry coEntry = (COEntry) o; return objectID.equals(coEntry.objectID); } /** * Returns a hash code value for the object. This method is * supported for the benefit of hashtables such as those provided by * <code>java.util.Hashtable</code>. * * @return hash code value for the object */ public int hashCode() { return objectID.hashCode(); } /** * Returns the unique id of this object. * * @return the unique id of this object */ public Integer getID() { return objectID; } } }
package dr.app.beauti.generator; import dr.app.beauti.components.ComponentFactory; import dr.app.beauti.enumTypes.FixRateType; import dr.app.beauti.enumTypes.StartingTreeType; import dr.app.beauti.enumTypes.TreePriorParameterizationType; import dr.app.beauti.enumTypes.TreePriorType; import dr.app.beauti.options.*; import dr.app.beauti.util.XMLWriter; import dr.evolution.util.Units; import dr.evomodel.coalescent.*; import dr.evomodel.speciation.BirthDeathGernhard08Model; import dr.evomodel.speciation.SpeciationLikelihood; import dr.evomodel.speciation.SpeciesBindings; import dr.evomodel.speciation.YuleModel; import dr.evomodel.tree.TreeModel; import dr.evomodelxml.BirthDeathModelParser; import dr.evomodelxml.CSVExporterParser; import dr.evomodelxml.YuleModelParser; import dr.inference.distribution.ExponentialDistributionModel; import dr.inference.distribution.ExponentialMarkovModel; import dr.inference.distribution.MixedDistributionLikelihood; import dr.inference.model.BooleanLikelihood; import dr.inference.model.ParameterParser; import dr.inference.model.SumStatistic; import dr.inference.model.TestStatistic; import dr.inferencexml.DistributionModelParser; import dr.inferencexml.ExponentialMarkovModelParser; import dr.util.Attribute; import dr.xml.XMLParser; /** * @author Alexei Drummond */ public class TreePriorGenerator extends Generator { public TreePriorGenerator(BeautiOptions options, ComponentFactory[] components) { super(options, components); } // void writeTreePrior(PartitionTreePrior prior, PartitionTreeModel model, XMLWriter writer) { // for species, partitionName.treeModel // setModelPrefix(prior.getPrefix()); // only has prefix, if (options.getPartitionTreePriors().size() > 1) // writePriorLikelihood(prior, model, writer); /** * Write a tree prior (coalescent or speciational) model * * @param prior the partition tree prior * @param writer the writer */ void writeTreePriorModel(PartitionTreePrior prior, XMLWriter writer) { setModelPrefix(prior.getPrefix()); // only has prefix, if (options.getPartitionTreePriors().size() > 1) String initialPopSize = null; TreePriorType nodeHeightPrior = prior.getNodeHeightPrior(); Units.Type units = options.units; TreePriorParameterizationType parameterization = prior.getParameterization(); switch (nodeHeightPrior) { case CONSTANT: writer.writeComment("A prior assumption that the population size has remained constant"); writer.writeComment("throughout the time spanned by the genealogy."); writer.writeOpenTag( ConstantPopulationModel.CONSTANT_POPULATION_MODEL, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "constant"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(options.units)) } ); writer.writeOpenTag(ConstantPopulationModel.POPULATION_SIZE); writeParameter("constant.popSize", prior, writer); writer.writeCloseTag(ConstantPopulationModel.POPULATION_SIZE); writer.writeCloseTag(ConstantPopulationModel.CONSTANT_POPULATION_MODEL); break; case EXPONENTIAL: // generate an exponential prior tree writer.writeComment("A prior assumption that the population size has grown exponentially"); writer.writeComment("throughout the time spanned by the genealogy."); writer.writeOpenTag( ExponentialGrowthModel.EXPONENTIAL_GROWTH_MODEL, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "exponential"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(options.units)) } ); // write pop size socket writer.writeOpenTag(ExponentialGrowthModel.POPULATION_SIZE); writeParameter("exponential.popSize", prior, writer); writer.writeCloseTag(ExponentialGrowthModel.POPULATION_SIZE); if (parameterization == TreePriorParameterizationType.GROWTH_RATE) { // write growth rate socket writer.writeOpenTag(ExponentialGrowthModel.GROWTH_RATE); writeParameter("exponential.growthRate", prior, writer); writer.writeCloseTag(ExponentialGrowthModel.GROWTH_RATE); } else { // write doubling time socket writer.writeOpenTag(ExponentialGrowthModel.DOUBLING_TIME); writeParameter("exponential.doublingTime", prior, writer); writer.writeCloseTag(ExponentialGrowthModel.DOUBLING_TIME); } writer.writeCloseTag(ExponentialGrowthModel.EXPONENTIAL_GROWTH_MODEL); break; case LOGISTIC: // generate an exponential prior tree writer.writeComment("A prior assumption that the population size has grown logistically"); writer.writeComment("throughout the time spanned by the genealogy."); writer.writeOpenTag( LogisticGrowthModel.LOGISTIC_GROWTH_MODEL, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "logistic"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(options.units)) } ); // write pop size socket writer.writeOpenTag(LogisticGrowthModel.POPULATION_SIZE); writeParameter("logistic.popSize", prior, writer); writer.writeCloseTag(LogisticGrowthModel.POPULATION_SIZE); if (parameterization == TreePriorParameterizationType.GROWTH_RATE) { // write growth rate socket writer.writeOpenTag(LogisticGrowthModel.GROWTH_RATE); writeParameter("logistic.growthRate", prior, writer); writer.writeCloseTag(LogisticGrowthModel.GROWTH_RATE); } else { // write doubling time socket writer.writeOpenTag(LogisticGrowthModel.DOUBLING_TIME); writeParameter("logistic.doublingTime", prior, writer); writer.writeCloseTag(LogisticGrowthModel.DOUBLING_TIME); } // write logistic t50 socket writer.writeOpenTag(LogisticGrowthModel.TIME_50); if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN || options.clockModelOptions.getRateOptionClockModel() == FixRateType.RELATIVE_TO) { writer.writeComment("No calibration"); dr.app.beauti.options.Parameter priorPara = prior.getParameter("logistic.t50"); double initRootHeight; if (options.isShareSameTreePrior()) { initRootHeight = priorPara.initial; for (PartitionTreeModel tree : options.getPartitionTreeModels()) { double tmpRootHeight = tree.getParameter("treeModel.rootHeight").initial; if (initRootHeight > tmpRootHeight) { // take min initRootHeight = tmpRootHeight; } } } else { initRootHeight = prior.getTreeModel().getParameter("treeModel.rootHeight").initial; } if (priorPara.initial >= initRootHeight) { priorPara.initial = initRootHeight / 2; // tree prior.initial has to < treeRootHeight.initial } } else { writer.writeComment("Has calibration"); //TODO throw new IllegalArgumentException("This function is not available in this release !"); } writeParameter("logistic.t50", prior, writer); writer.writeCloseTag(LogisticGrowthModel.TIME_50); writer.writeCloseTag(LogisticGrowthModel.LOGISTIC_GROWTH_MODEL); initialPopSize = "logistic.popSize"; break; case EXPANSION: // generate an exponential prior tree writer.writeComment("A prior assumption that the population size has grown exponentially"); writer.writeComment("from some ancestral population size in the past."); writer.writeOpenTag( ExpansionModel.EXPANSION_MODEL, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "expansion"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(options.units)) } ); // write pop size socket writeParameter(ExpansionModel.POPULATION_SIZE, "expansion.popSize", prior, writer); if (parameterization == TreePriorParameterizationType.GROWTH_RATE) { // write growth rate socket writeParameter(ExpansionModel.GROWTH_RATE, "expansion.growthRate", prior, writer); } else { // write doubling time socket writeParameter(ExpansionModel.DOUBLING_TIME, "expansion.doublingTime", prior, writer); } // write ancestral proportion socket writeParameter(ExpansionModel.ANCESTRAL_POPULATION_PROPORTION, "expansion.ancestralProportion", prior, writer); writer.writeCloseTag(ExpansionModel.EXPANSION_MODEL); initialPopSize = "expansion.popSize"; break; case YULE: writer.writeComment("A prior on the distribution node heights defined given"); writer.writeComment("a Yule speciation process (a pure birth process)."); writer.writeOpenTag( YuleModel.YULE_MODEL, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "yule"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units)) } ); writeParameter(YuleModelParser.BIRTH_RATE, "yule.birthRate", prior, writer); writer.writeCloseTag(YuleModel.YULE_MODEL); break; case BIRTH_DEATH: writer.writeComment("A prior on the distribution node heights defined given"); writer.writeComment("a Birth-Death speciation process (Gernhard 2008)."); writer.writeOpenTag( BirthDeathGernhard08Model.BIRTH_DEATH_MODEL, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "birthDeath"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units)) } ); writeParameter(BirthDeathModelParser.BIRTHDIFF_RATE, BirthDeathModelParser.BIRTHDIFF_RATE_PARAM_NAME, prior, writer); writeParameter(BirthDeathModelParser.RELATIVE_DEATH_RATE, BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME, prior, writer); writer.writeCloseTag(BirthDeathGernhard08Model.BIRTH_DEATH_MODEL); break; case SPECIES_BIRTH_DEATH: case SPECIES_YULE: writer.writeComment("A prior assumption that the population size has remained constant"); writer.writeComment("throughout the time spanned by the genealogy."); writer.writeOpenTag( ConstantPopulationModel.CONSTANT_POPULATION_MODEL, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "constant"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(options.units)) } ); // initial value for pop mean is the same as what used to be the value for the population size Parameter para = options.starBEASTOptions.getParameter(TraitGuesser.Traits.TRAIT_SPECIES + "." + options.starBEASTOptions.POP_MEAN); prior.getParameter("constant.popSize").initial = para.initial; writer.writeOpenTag(ConstantPopulationModel.POPULATION_SIZE); writeParameter("constant.popSize", prior, writer); writer.writeCloseTag(ConstantPopulationModel.POPULATION_SIZE); writer.writeCloseTag(ConstantPopulationModel.CONSTANT_POPULATION_MODEL); break; } if ((!options.starBEASTOptions.isSpeciesAnalysis()) && nodeHeightPrior != TreePriorType.CONSTANT && nodeHeightPrior != TreePriorType.EXPONENTIAL) { // If the node height prior is not one of these two then we need to simulate a // random starting tree under a constant size coalescent. writer.writeComment("This is a simple constant population size coalescent model"); writer.writeComment("that is used to generate an initial tree for the chain."); writer.writeOpenTag( ConstantPopulationModel.CONSTANT_POPULATION_MODEL, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "initialDemo"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units)) } ); writer.writeOpenTag(ConstantPopulationModel.POPULATION_SIZE); if (initialPopSize != null) { writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + initialPopSize); } else { writeParameter(modelPrefix + "initialDemo.popSize", 1, 100.0, Double.NaN, Double.NaN, writer); } writer.writeCloseTag(ConstantPopulationModel.POPULATION_SIZE); writer.writeCloseTag(ConstantPopulationModel.CONSTANT_POPULATION_MODEL); } } /** * Write the prior on node heights (coalescent or speciational models) * * @param prior the partition tree prior * @param writer the writer */ void writePriorLikelihood (PartitionTreePrior prior, PartitionTreeModel model, XMLWriter writer) { //tree model prefix setModelPrefix(model.getPrefix()); // only has prefix, if (options.getPartitionTreePriors().size() > 1) String priorPrefix = prior.getPrefix(); TreePriorType treePrior = prior.getNodeHeightPrior(); switch (treePrior) { case YULE: case BIRTH_DEATH: // generate a speciational process writer.writeComment("Generate a speciation likelihood for Yule or Birth Death"); writer.writeOpenTag( SpeciationLikelihood.SPECIATION_LIKELIHOOD, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "speciation") } ); // write pop size socket writer.writeOpenTag(SpeciationLikelihood.MODEL); writeNodeHeightPriorModelRef(prior, writer); writer.writeCloseTag(SpeciationLikelihood.MODEL); writer.writeOpenTag(SpeciationLikelihood.TREE); writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL); writer.writeCloseTag(SpeciationLikelihood.TREE); writer.writeCloseTag(SpeciationLikelihood.SPECIATION_LIKELIHOOD); break; case LOGISTIC: writer.writeComment("Generate a boolean likelihood for Coalescent: Logistic Growth"); writer.writeOpenTag( BooleanLikelihood.BOOLEAN_LIKELIHOOD, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, modelPrefix + "booleanLikelihood1")} ); writer.writeOpenTag( TestStatistic.TEST_STATISTIC, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, "test1"), new Attribute.Default<String>("name", "test1") } ); writer.writeIDref(ParameterParser.PARAMETER, priorPrefix + "logistic.t50"); //TODO correct? writer.writeOpenTag("lessThan"); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "treeModel.rootHeight"); writer.writeCloseTag("lessThan"); writer.writeCloseTag(TestStatistic.TEST_STATISTIC); writer.writeCloseTag(BooleanLikelihood.BOOLEAN_LIKELIHOOD); writer.writeOpenTag( CoalescentLikelihood.COALESCENT_LIKELIHOOD, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, modelPrefix + COALESCENT)} ); writer.writeOpenTag(CoalescentLikelihood.MODEL); writeNodeHeightPriorModelRef(prior, writer); writer.writeCloseTag(CoalescentLikelihood.MODEL); writer.writeOpenTag(CoalescentLikelihood.POPULATION_TREE); writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL); writer.writeCloseTag(CoalescentLikelihood.POPULATION_TREE); writer.writeCloseTag(CoalescentLikelihood.COALESCENT_LIKELIHOOD); break; case SKYLINE: // generate a Bayesian skyline plot writer.writeComment("Generate a generalizedSkyLineLikelihood for Bayesian Skyline"); writer.writeOpenTag( BayesianSkylineLikelihood.SKYLINE_LIKELIHOOD, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "skyline"), new Attribute.Default<String>("linear", prior.getSkylineModel() == TreePriorParameterizationType.LINEAR_SKYLINE ? "true" : "false") } ); // write pop size socket writer.writeOpenTag(BayesianSkylineLikelihood.POPULATION_SIZES); if (prior.getSkylineModel() == TreePriorParameterizationType.LINEAR_SKYLINE) { writeParameter(prior.getParameter("skyline.popSize"), prior.getSkylineGroupCount() + 1, writer); } else { writeParameter(prior.getParameter("skyline.popSize"), prior.getSkylineGroupCount(), writer); } writer.writeCloseTag(BayesianSkylineLikelihood.POPULATION_SIZES); // write group size socket writer.writeOpenTag(BayesianSkylineLikelihood.GROUP_SIZES); writeParameter(prior.getParameter("skyline.groupSize"), prior.getSkylineGroupCount(), writer); writer.writeCloseTag(BayesianSkylineLikelihood.GROUP_SIZES); writer.writeOpenTag(CoalescentLikelihood.POPULATION_TREE); writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL); writer.writeCloseTag(CoalescentLikelihood.POPULATION_TREE); writer.writeCloseTag(BayesianSkylineLikelihood.SKYLINE_LIKELIHOOD); writer.writeText(""); writeExponentialMarkovLikelihood(writer); break; case EXTENDED_SKYLINE: // different format break; case GMRF_SKYRIDE: writer.writeComment("Generate a gmrfSkyrideLikelihood for GMRF Bayesian Skyride process"); writer.writeOpenTag( GMRFSkyrideLikelihood.SKYLINE_LIKELIHOOD, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "skyride"), new Attribute.Default<String>(GMRFSkyrideLikelihood.TIME_AWARE_SMOOTHING, prior.getSkyrideSmoothing() == TreePriorParameterizationType.TIME_AWARE_SKYRIDE ? "true" : "false"), new Attribute.Default<String>(GMRFSkyrideLikelihood.RANDOMIZE_TREE, //TODO For GMRF, tree model/tree prior combination not implemented by BEAST yet. The validation is in BeastGenerator.checkOptions() prior.getTreeModel().getStartingTreeType() == StartingTreeType.UPGMA ? "true" : "false"), } ); int skyrideIntervalCount = options.taxonList.getTaxonCount() - 1; writer.writeOpenTag(GMRFSkyrideLikelihood.POPULATION_PARAMETER); writeParameter(prior.getParameter("skyride.popSize"), skyrideIntervalCount, writer); writer.writeCloseTag(GMRFSkyrideLikelihood.POPULATION_PARAMETER); writer.writeOpenTag(GMRFSkyrideLikelihood.GROUP_SIZES); writeParameter(prior.getParameter("skyride.groupSize"), skyrideIntervalCount, writer); writer.writeCloseTag(GMRFSkyrideLikelihood.GROUP_SIZES); writer.writeOpenTag(GMRFSkyrideLikelihood.PRECISION_PARAMETER); writeParameter(prior.getParameter("skyride.precision"), 1, writer); writer.writeCloseTag(GMRFSkyrideLikelihood.PRECISION_PARAMETER); writer.writeOpenTag(GMRFSkyrideLikelihood.POPULATION_TREE); writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL); writer.writeCloseTag(GMRFSkyrideLikelihood.POPULATION_TREE); writer.writeCloseTag(GMRFSkyrideLikelihood.SKYLINE_LIKELIHOOD); break; case SPECIES_YULE: case SPECIES_BIRTH_DEATH: break; default: // generate a coalescent process writer.writeComment("Generate a coalescent likelihood"); writer.writeOpenTag( CoalescentLikelihood.COALESCENT_LIKELIHOOD, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, modelPrefix + COALESCENT)} ); writer.writeOpenTag(CoalescentLikelihood.MODEL); writeNodeHeightPriorModelRef(prior, writer); writer.writeCloseTag(CoalescentLikelihood.MODEL); writer.writeOpenTag(CoalescentLikelihood.POPULATION_TREE); writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL); writer.writeCloseTag(CoalescentLikelihood.POPULATION_TREE); writer.writeCloseTag(CoalescentLikelihood.COALESCENT_LIKELIHOOD); } } void writeNodeHeightPriorModelRef(PartitionTreePrior prior, XMLWriter writer) { TreePriorType treePrior = prior.getNodeHeightPrior(); String priorPrefix = prior.getPrefix(); switch (treePrior) { case CONSTANT: case SPECIES_YULE: case SPECIES_BIRTH_DEATH: writer.writeIDref(ConstantPopulationModel.CONSTANT_POPULATION_MODEL, priorPrefix + "constant"); break; case EXPONENTIAL: writer.writeIDref(ExponentialGrowthModel.EXPONENTIAL_GROWTH_MODEL, priorPrefix + "exponential"); break; case LOGISTIC: writer.writeIDref(LogisticGrowthModel.LOGISTIC_GROWTH_MODEL, priorPrefix + "logistic"); break; case EXPANSION: writer.writeIDref(ExpansionModel.EXPANSION_MODEL, priorPrefix + "expansion"); break; case SKYLINE: writer.writeIDref(BayesianSkylineLikelihood.SKYLINE_LIKELIHOOD, priorPrefix + "skyline"); break; case GMRF_SKYRIDE: writer.writeIDref(GMRFSkyrideLikelihood.SKYLINE_LIKELIHOOD, priorPrefix + "skyride"); break; case YULE: writer.writeIDref(YuleModel.YULE_MODEL, priorPrefix + "yule"); break; case BIRTH_DEATH: writer.writeIDref(BirthDeathGernhard08Model.BIRTH_DEATH_MODEL, priorPrefix + "birthDeath"); break; default: throw new RuntimeException("No tree prior has been specified so cannot refer to it"); } } void writeEBSPVariableDemographic(PartitionTreePrior prior, XMLWriter writer) { if (prior.getNodeHeightPrior() == TreePriorType.EXTENDED_SKYLINE) { setModelPrefix(prior.getPrefix()); final String tagName = VariableDemographicModel.PARSER.getParserName(); writer.writeComment("Generate a variableDemographic for extended Bayesian skyline process"); writer.writeOpenTag(tagName, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + VariableDemographicModel.demoElementName), new Attribute.Default<String>(VariableDemographicModel.TYPE, prior.getExtendedSkylineModel().toString()), // use midpoint by default (todo) would be nice to have a user 'tickable' option new Attribute.Default<String>(VariableDemographicModel.USE_MIDPOINTS, "true") } ); // Parameter popSize = prior.getParameter(VariableDemographicModel.demoElementName + ".popSize"); // Parameter populationMean = prior.getParameter(VariableDemographicModel.demoElementName + ".populationMean"); // popSize.initial = populationMean.initial; writer.writeOpenTag(VariableDemographicModel.POPULATION_SIZES); // writer.writeComment("popSize value = populationMean value"); writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.ID, modelPrefix + VariableDemographicModel.demoElementName + ".popSize"), true); // writeParameter(popSize, -1, writer); writer.writeCloseTag(VariableDemographicModel.POPULATION_SIZES); writer.writeOpenTag(VariableDemographicModel.INDICATOR_PARAMETER); writeParameter(prior.getParameter(VariableDemographicModel.demoElementName + ".indicators"), -1, writer); // not need dimension writer.writeCloseTag(VariableDemographicModel.INDICATOR_PARAMETER); writer.writeOpenTag(VariableDemographicModel.POPULATION_TREES); if (options.isShareSameTreePrior()) { for (PartitionTreeModel model : options.getPartitionTreeModels()) { writer.writeOpenTag(VariableDemographicModel.POP_TREE, new Attribute[]{ new Attribute.Default<String>(SpeciesBindings.PLOIDY, Double.toString(model.getPloidyType().getValue())) } ); writer.writeIDref(TreeModel.TREE_MODEL, model.getPrefix() + TreeModel.TREE_MODEL); writer.writeCloseTag(VariableDemographicModel.POP_TREE); } } else {//TODO correct for not sharing same prior? writer.writeOpenTag(VariableDemographicModel.POP_TREE, new Attribute[]{ new Attribute.Default<String>(SpeciesBindings.PLOIDY, Double.toString(prior.getTreeModel().getPloidyType().getValue())) } ); writer.writeIDref(TreeModel.TREE_MODEL, prior.getTreeModel().getPrefix() + TreeModel.TREE_MODEL); writer.writeCloseTag(VariableDemographicModel.POP_TREE); } writer.writeCloseTag(VariableDemographicModel.POPULATION_TREES); writer.writeCloseTag(tagName); writer.writeOpenTag(CoalescentLikelihood.COALESCENT_LIKELIHOOD, new Attribute.Default<String>(XMLParser.ID, modelPrefix + COALESCENT)); writer.writeOpenTag(CoalescentLikelihood.MODEL); writer.writeIDref(tagName, modelPrefix + VariableDemographicModel.demoElementName); writer.writeCloseTag(CoalescentLikelihood.MODEL); writer.writeComment("Take population Tree from demographic"); writer.writeCloseTag(CoalescentLikelihood.COALESCENT_LIKELIHOOD); writer.writeOpenTag(SumStatistic.SUM_STATISTIC, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + VariableDemographicModel.demoElementName + ".populationSizeChanges"), new Attribute.Default<String>("elementwise", "true") }); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + VariableDemographicModel.demoElementName + ".indicators"); writer.writeCloseTag(SumStatistic.SUM_STATISTIC); writer.writeOpenTag(ExponentialDistributionModel.EXPONENTIAL_DISTRIBUTION_MODEL, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + VariableDemographicModel.demoElementName + ".populationMeanDist") //,new Attribute.Default<String>("elementwise", "true") }); writer.writeOpenTag(DistributionModelParser.MEAN); writer.writeComment("prefer populationMean value = 1"); Parameter populationMean = prior.getParameter(VariableDemographicModel.demoElementName + ".populationMean"); writer.writeTag(ParameterParser.PARAMETER, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + VariableDemographicModel.demoElementName + ".populationMean"), new Attribute.Default<String>(ParameterParser.VALUE, Double.toString(populationMean.initial))}, true); writer.writeCloseTag(DistributionModelParser.MEAN); writer.writeCloseTag(ExponentialDistributionModel.EXPONENTIAL_DISTRIBUTION_MODEL); } } void writeParameterLog(PartitionTreePrior prior, XMLWriter writer) { setModelPrefix(prior.getPrefix()); switch (prior.getNodeHeightPrior()) { case CONSTANT: writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "constant.popSize"); break; case EXPONENTIAL: writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "exponential.popSize"); if (prior.getParameterization() == TreePriorParameterizationType.GROWTH_RATE) { writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "exponential.growthRate"); } else { writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "exponential.doublingTime"); } break; case LOGISTIC: writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "logistic.popSize"); if (prior.getParameterization() == TreePriorParameterizationType.GROWTH_RATE) { writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "logistic.growthRate"); } else { writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "logistic.doublingTime"); } writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "logistic.t50"); break; case EXPANSION: writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "expansion.popSize"); if (prior.getParameterization() == TreePriorParameterizationType.GROWTH_RATE) { writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "expansion.growthRate"); } else { writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "expansion.doublingTime"); } writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "expansion.ancestralProportion"); break; case SKYLINE: writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "skyline.popSize"); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "skyline.groupSize"); break; case EXTENDED_SKYLINE: writeSumStatisticColumn(writer, "demographic.populationSizeChanges", "popSize_changes"); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "demographic.populationMean"); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "demographic.popSize"); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "demographic.indicators"); break; case GMRF_SKYRIDE: writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "skyride.precision"); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "skyride.popSize"); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "skyride.groupSize"); break; case YULE: writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "yule.birthRate"); break; case BIRTH_DEATH: writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + BirthDeathModelParser.BIRTHDIFF_RATE_PARAM_NAME); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME); break; case SPECIES_YULE: case SPECIES_BIRTH_DEATH: break; default: throw new RuntimeException("No tree prior has been specified so cannot refer to it"); } } void writeEBSPAnalysisToCSVfile(PartitionTreePrior prior, XMLWriter writer) { setModelPrefix(prior.getPrefix()); String logFileName = options.logFileName; if (prior.getNodeHeightPrior() == TreePriorType.EXTENDED_SKYLINE) { writer.writeOpenTag(EBSPAnalysis.VD_ANALYSIS, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, modelPrefix + "demographic.analysis"), new Attribute.Default<Double>(EBSPAnalysis.BURN_IN, 0.1), new Attribute.Default<Boolean>(VariableDemographicModel.USE_MIDPOINTS, true)} ); writer.writeOpenTag(EBSPAnalysis.LOG_FILE_NAME); writer.writeText(logFileName); writer.writeCloseTag(EBSPAnalysis.LOG_FILE_NAME); writer.writeOpenTag(EBSPAnalysis.TREE_FILE_NAMES); for (String treeFN : options.treeFileName) { writer.writeOpenTag(EBSPAnalysis.TREE_LOG); writer.writeText(treeFN); writer.writeCloseTag(EBSPAnalysis.TREE_LOG); } writer.writeCloseTag(EBSPAnalysis.TREE_FILE_NAMES); writer.writeOpenTag(EBSPAnalysis.MODEL_TYPE); writer.writeText(prior.getExtendedSkylineModel().toString()); writer.writeCloseTag(EBSPAnalysis.MODEL_TYPE); writer.writeOpenTag(EBSPAnalysis.POPULATION_FIRST_COLUMN); writer.writeText(VariableDemographicModel.demoElementName + ".popSize1"); writer.writeCloseTag(EBSPAnalysis.POPULATION_FIRST_COLUMN); writer.writeOpenTag(EBSPAnalysis.INDICATORS_FIRST_COLUMN); writer.writeText(VariableDemographicModel.demoElementName + ".indicators1"); writer.writeCloseTag(EBSPAnalysis.INDICATORS_FIRST_COLUMN); writer.writeCloseTag(EBSPAnalysis.VD_ANALYSIS); writer.writeOpenTag(CSVExporterParser.CSV_EXPORT, new Attribute[]{ new Attribute.Default<String>(CSVExporterParser.FILE_NAME, logFileName.subSequence(0, logFileName.length() - 4) + ".csv"), //.log new Attribute.Default<String>(CSVExporterParser.SEPARATOR, ",") }); writer.writeOpenTag(CSVExporterParser.COLUMNS); writer.writeIDref(EBSPAnalysis.VD_ANALYSIS, modelPrefix + "demographic.analysis"); writer.writeCloseTag(CSVExporterParser.COLUMNS); writer.writeCloseTag(CSVExporterParser.CSV_EXPORT); } } private void writeExponentialMarkovLikelihood(XMLWriter writer) { writer.writeOpenTag( ExponentialMarkovModel.EXPONENTIAL_MARKOV_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, modelPrefix + "eml1"), new Attribute.Default<String>("jeffreys", "true")} ); writer.writeOpenTag(ExponentialMarkovModelParser.CHAIN_PARAMETER); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "skyline.popSize"); writer.writeCloseTag(ExponentialMarkovModelParser.CHAIN_PARAMETER); writer.writeCloseTag(ExponentialMarkovModel.EXPONENTIAL_MARKOV_MODEL); } public void writePriorLikelihoodReferenceLog(PartitionTreePrior prior, PartitionTreeModel model, XMLWriter writer) { //tree model prefix setModelPrefix(model.getPrefix()); // only has prefix, if (options.getPartitionTreePriors().size() > 1) switch (prior.getNodeHeightPrior()) { case YULE: case BIRTH_DEATH: writer.writeIDref(SpeciationLikelihood.SPECIATION_LIKELIHOOD, modelPrefix + "speciation"); break; case SKYLINE: writer.writeIDref(BayesianSkylineLikelihood.SKYLINE_LIKELIHOOD, modelPrefix + "skyline"); // writer.writeIDref(ExponentialMarkovModel.EXPONENTIAL_MARKOV_MODEL, modelPrefix + "eml1"); break; case GMRF_SKYRIDE: writer.writeIDref(GMRFSkyrideLikelihood.SKYLINE_LIKELIHOOD, modelPrefix + "skyride"); break; case LOGISTIC: // writer.writeIDref(BooleanLikelihood.BOOLEAN_LIKELIHOOD, modelPrefix + "booleanLikelihood1"); writer.writeIDref(CoalescentLikelihood.COALESCENT_LIKELIHOOD, modelPrefix + COALESCENT); break; case EXTENDED_SKYLINE: // only 1 coalescent, so write it separately after this method case SPECIES_YULE: case SPECIES_BIRTH_DEATH: // do not need break; default: writer.writeIDref(CoalescentLikelihood.COALESCENT_LIKELIHOOD, modelPrefix + COALESCENT); } } // id is written in writePriorLikelihood (PartitionTreePrior prior, PartitionTreeModel model, XMLWriter writer) public void writePriorLikelihoodReference(PartitionTreePrior prior, PartitionTreeModel model, XMLWriter writer) { //tree model prefix setModelPrefix(model.getPrefix()); // only has prefix, if (options.getPartitionTreePriors().size() > 1) switch (prior.getNodeHeightPrior()) { case YULE: case BIRTH_DEATH: writer.writeIDref(SpeciationLikelihood.SPECIATION_LIKELIHOOD, modelPrefix + "speciation"); break; case SKYLINE: writer.writeIDref(BayesianSkylineLikelihood.SKYLINE_LIKELIHOOD, modelPrefix + "skyline"); writer.writeIDref(ExponentialMarkovModel.EXPONENTIAL_MARKOV_MODEL, modelPrefix + "eml1"); break; case GMRF_SKYRIDE: writer.writeIDref(GMRFSkyrideLikelihood.SKYLINE_LIKELIHOOD, modelPrefix + "skyride"); break; case LOGISTIC: writer.writeIDref(BooleanLikelihood.BOOLEAN_LIKELIHOOD, modelPrefix + "booleanLikelihood1"); writer.writeIDref(CoalescentLikelihood.COALESCENT_LIKELIHOOD, modelPrefix + COALESCENT); break; case EXTENDED_SKYLINE: // only 1 coalescent, so write it in writeEBSPVariableDemographicReference case SPECIES_YULE: case SPECIES_BIRTH_DEATH: // do not need break; default: writer.writeIDref(CoalescentLikelihood.COALESCENT_LIKELIHOOD, modelPrefix + COALESCENT); } } public void writeEBSPVariableDemographicReference(PartitionTreePrior prior, XMLWriter writer) { setModelPrefix(prior.getPrefix()); //TODO: make suitable for *BEAST if (prior.getNodeHeightPrior() == TreePriorType.EXTENDED_SKYLINE) { writer.writeIDref(CoalescentLikelihood.COALESCENT_LIKELIHOOD, modelPrefix + COALESCENT); // only 1 coalescent writer.writeOpenTag(MixedDistributionLikelihood.DISTRIBUTION_LIKELIHOOD); writer.writeOpenTag(MixedDistributionLikelihood.DISTRIBUTION0); writer.writeIDref(ExponentialDistributionModel.EXPONENTIAL_DISTRIBUTION_MODEL, modelPrefix + "demographic.populationMeanDist"); writer.writeCloseTag(MixedDistributionLikelihood.DISTRIBUTION0); writer.writeOpenTag(MixedDistributionLikelihood.DISTRIBUTION1); writer.writeIDref(ExponentialDistributionModel.EXPONENTIAL_DISTRIBUTION_MODEL, modelPrefix + "demographic.populationMeanDist"); writer.writeCloseTag(MixedDistributionLikelihood.DISTRIBUTION1); writer.writeOpenTag(MixedDistributionLikelihood.DATA); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "demographic.popSize"); writer.writeCloseTag(MixedDistributionLikelihood.DATA); writer.writeOpenTag(MixedDistributionLikelihood.INDICATORS); writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + "demographic.indicators"); writer.writeCloseTag(MixedDistributionLikelihood.INDICATORS); writer.writeCloseTag(MixedDistributionLikelihood.DISTRIBUTION_LIKELIHOOD); } } }
package dr.inference.model; import dr.xml.*; import java.util.ArrayList; /** * @author Alexei Drummond * @version $Id: JeffreysPriorLikelihood.java,v 1.9 2005/07/27 22:09:21 rambaut Exp $ */ public class JeffreysPriorLikelihood extends Likelihood.Abstract { public static final String JEFFREYS_PRIOR = "jeffreysPrior"; public static final String DATA = "data"; public JeffreysPriorLikelihood() { super(null); } /** * Adds a statistic, this is the data for which the likelihood is calculated. */ public void addData(Statistic data) { dataList.add(data); } protected ArrayList<Statistic> dataList = new ArrayList<Statistic>(); /** * Overridden to always return false. */ protected boolean getLikelihoodKnown() { return false; } /** * Calculate the log likelihood of the current state. * @return the log likelihood. */ public double calculateLogLikelihood() { double logL = 0.0; for (Statistic statistic : dataList) { for (int j = 0; j < statistic.getDimension(); j++) { // replace v += log(1/x) with v -= log(x) , save a division logL -= Math.log(statistic.getStatisticValue(j)); } } return logL; } /** * Reads a distribution likelihood from a DOM Document element. */ public static XMLObjectParser PARSER = new AbstractXMLObjectParser() { public String getParserName() { return JEFFREYS_PRIOR; } public Object parseXMLObject(XMLObject xo) throws XMLParseException { JeffreysPriorLikelihood likelihood = new JeffreysPriorLikelihood(); XMLObject cxo = xo; if (xo.hasChildNamed(DATA)) { cxo = (XMLObject)xo.getChild(DATA); } for (int i = 0; i < cxo.getChildCount(); i++) { if (cxo.getChild(i) instanceof Statistic) { likelihood.addData( (Statistic)cxo.getChild(i)); } } return likelihood; }
package dr.inference.operators.hmc; import dr.inference.hmc.GradientWrtParameterProvider; import dr.inference.model.Likelihood; import dr.inference.model.Parameter; import dr.inference.operators.AdaptationMode; import dr.inference.operators.GeneralOperator; import dr.inference.operators.GibbsOperator; import dr.math.MathUtils; import dr.math.matrixAlgebra.WrappedVector; import dr.util.Transform; import java.util.Arrays; /** * @author Marc A. Suchard * @author Zhenyu Zhang */ public class NoUTurnOperator extends HamiltonianMonteCarloOperator implements GeneralOperator, GibbsOperator { private final int dim = gradientProvider.getDimension(); class Options { private double logProbErrorTol = 100.0; private int findMax = 100; private int maxHeight = 10; } private final Options options = new Options(); private NoUTurnOperator(AdaptationMode mode, double weight, GradientWrtParameterProvider gradientProvider, Parameter parameter, Transform transform, Parameter mask, double stepSize, int nSteps) { super(mode, weight, gradientProvider, parameter, transform, mask, stepSize, nSteps, 0.0,1E-3); } public NoUTurnOperator(AdaptationMode mode, double weight, GradientWrtParameterProvider gradientProvider, Parameter parameter, Transform transform, Parameter mask, HamiltonianMonteCarloOperator.Options runtimeOptions) { this(mode, weight, gradientProvider, parameter, transform, mask, runtimeOptions.initialStepSize, runtimeOptions.nSteps); } @Override protected InstabilityHandler getDefaultInstabilityHandler() { return InstabilityHandler.IGNORE; } private StepSize stepSizeInformation; @Override public String getOperatorName() { return "No-UTurn-Sampler operator"; } @Override public double doOperation(Likelihood likelihood) { final double[] initialPosition = leapFrogEngine.getInitialPosition(); if (stepSizeInformation == null) { stepSizeInformation = findReasonableStepSize(initialPosition); } double[] position = takeOneStep(getCount() + 1, initialPosition); leapFrogEngine.setParameter(position); return 0.0; } private double[] takeOneStep(long m, double[] initialPosition) { double[] endPosition = Arrays.copyOf(initialPosition, initialPosition.length); // final double[][] mass = massProvider.getMass(); final WrappedVector initialMomentum = mask(preconditioning.drawInitialMomentum(), mask); final double initialJointDensity = getJointProbability(gradientProvider, initialMomentum); double logSliceU = Math.log(MathUtils.nextDouble()) + initialJointDensity; TreeState trajectoryTree = new TreeState(initialPosition, initialMomentum.getBuffer(), 1, true); // Trajectory of Hamiltonian dynamics endowed with a binary tree structure. int height = 0; while (trajectoryTree.flagContinue) { double[] tmp = updateTrajectoryTree(trajectoryTree, height, logSliceU, initialJointDensity); if (tmp != null) { endPosition = tmp; } height++; if (height > options.maxHeight) { trajectoryTree.flagContinue = false; } } stepSizeInformation.update(m, trajectoryTree.cumAcceptProb, trajectoryTree.numAcceptProbStates); return endPosition; } private double[] updateTrajectoryTree(TreeState trajectoryTree, int depth, double logSliceU, double initialJointDensity) { double[] endPosition = null; final double uniform1 = MathUtils.nextDouble(); int direction = (uniform1 < 0.5) ? -1 : 1; TreeState nextTrajectoryTree = buildTree( trajectoryTree.getPosition(direction), trajectoryTree.getMomentum(direction), direction, logSliceU, depth, stepSizeInformation.getStepSize(), initialJointDensity); if (nextTrajectoryTree.flagContinue) { final double uniform = MathUtils.nextDouble(); final double acceptProb = (double) nextTrajectoryTree.numNodes / (double) trajectoryTree.numNodes; if (uniform < acceptProb) { endPosition = nextTrajectoryTree.getSample(); } } trajectoryTree.mergeNextTree(nextTrajectoryTree, direction); return endPosition; } private TreeState buildTree(double[] position, double[] momentum, int direction, double logSliceU, int height, double stepSize, double initialJointDensity) { if (height == 0) { return buildBaseCase(position, momentum, direction, logSliceU, stepSize, initialJointDensity); } else { return buildRecursiveCase(position, momentum, direction, logSliceU, height, stepSize, initialJointDensity); } } private void handleInstability() { throw new RuntimeException("Numerical instability; need to handle"); // TODO } private TreeState buildBaseCase(double[] inPosition, double[] inMomentum, int direction, double logSliceU, double stepSize, double initialJointDensity) { // Make deep copy of position and momentum double[] position = Arrays.copyOf(inPosition, inPosition.length); WrappedVector momentum = new WrappedVector.Raw(Arrays.copyOf(inMomentum, inMomentum.length)); leapFrogEngine.setParameter(position); // "one frog jump!" try { doLeap(position, momentum, direction * stepSize); } catch (NumericInstabilityException e) { handleInstability(); } double logJointProbAfter = getJointProbability(gradientProvider, momentum); final int numNodes = (logSliceU <= logJointProbAfter ? 1 : 0); final boolean flagContinue = (logSliceU < options.logProbErrorTol + logJointProbAfter); // Values for dual-averaging final double acceptProb = Math.min(1.0, Math.exp(logJointProbAfter - initialJointDensity)); final int numAcceptProbStates = 1; leapFrogEngine.setParameter(inPosition); return new TreeState(position, momentum.getBuffer(), numNodes, flagContinue, acceptProb, numAcceptProbStates); } private TreeState buildRecursiveCase(double[] inPosition, double[] inMomentum, int direction, double logSliceU, int height, double stepSize, double initialJointDensity) { TreeState subtree = buildTree(inPosition, inMomentum, direction, logSliceU, height - 1, // Recursion stepSize, initialJointDensity); if (subtree.flagContinue) { TreeState nextSubtree = buildTree(subtree.getPosition(direction), subtree.getMomentum(direction), direction, logSliceU, height - 1, stepSizeInformation.getStepSize(), initialJointDensity); subtree.mergeNextTree(nextSubtree, direction); } return subtree; } private void doLeap(final double[] position, final WrappedVector momentum, final double stepSize) throws NumericInstabilityException { leapFrogEngine.updateMomentum(position, momentum.getBuffer(), mask(gradientProvider.getGradientLogDensity(), mask), stepSize / 2); leapFrogEngine.updatePosition(position, momentum, stepSize); leapFrogEngine.updateMomentum(position, momentum.getBuffer(), mask(gradientProvider.getGradientLogDensity(), mask), stepSize / 2); } private StepSize findReasonableStepSize(double[] initialPosition) { double stepSize = 0.1; // final double[] mass = massProvider.getMass(); WrappedVector momentum = preconditioning.drawInitialMomentum(); int count = 1; double[] position = Arrays.copyOf(initialPosition, dim); double probBefore = getJointProbability(gradientProvider, momentum); try { doLeap(position, momentum, stepSize); } catch (NumericInstabilityException e) { handleInstability(); } double probAfter = getJointProbability(gradientProvider, momentum); double a = ((probAfter - probBefore) > Math.log(0.5) ? 1 : -1); double probRatio = Math.exp(probAfter - probBefore); while (Math.pow(probRatio, a) > Math.pow(2, -a)) { probBefore = probAfter; //"one frog jump!" try { doLeap(position, momentum, stepSize); } catch (NumericInstabilityException e) { handleInstability(); } probAfter = getJointProbability(gradientProvider, momentum); probRatio = Math.exp(probAfter - probBefore); stepSize = Math.pow(2, a) * stepSize; count++; if (count > options.findMax) { throw new RuntimeException("Cannot find a reasonable step-size in " + options.findMax + " iterations"); } } leapFrogEngine.setParameter(initialPosition); return new StepSize(stepSize); } private static boolean computeStopCriterion(boolean flagContinue, TreeState state) { return computeStopCriterion(flagContinue, state.getPosition(1), state.getPosition(-1), state.getMomentum(1), state.getMomentum(-1)); } private static boolean computeStopCriterion(boolean flagContinue, double[] positionPlus, double[] positionMinus, double[] momentumPlus, double[] momentumMinus) { double[] positionDifference = subtractArray(positionPlus, positionMinus); return flagContinue && getDotProduct(positionDifference, momentumMinus) >= 0 && getDotProduct(positionDifference, momentumPlus) >= 0; } private static double getDotProduct(double[] x, double[] y) { assert (x.length == y.length); final int dim = x.length; double total = 0.0; for (int i = 0; i < dim; i++) { total += x[i] * y[i]; } return total; } private static double[] subtractArray(double[] a, double[] b) { assert (a.length == b.length); final int dim = a.length; double[] result = new double[dim]; for (int i = 0; i < dim; i++) { result[i] = a[i] - b[i]; } return result; } private double getJointProbability(GradientWrtParameterProvider gradientProvider, WrappedVector momentum) { assert (gradientProvider != null); assert (momentum != null); return gradientProvider.getLikelihood().getLogLikelihood() - getKineticEnergy(momentum) - leapFrogEngine.getParameterLogJacobian(); } private class TreeState { private TreeState(double[] position, double[] moment, int numNodes, boolean flagContinue) { this(position, moment, numNodes, flagContinue, 0.0, 0); } private TreeState(double[] position, double[] moment, int numNodes, boolean flagContinue, double cumAcceptProb, int numAcceptProbStates) { this.position = new double[3][]; this.momentum = new double[3][]; for (int i = 0; i < 3; ++i) { this.position[i] = position; this.momentum[i] = moment; } // Recursion variables this.numNodes = numNodes; this.flagContinue = flagContinue; // Dual-averaging variables this.cumAcceptProb = cumAcceptProb; this.numAcceptProbStates = numAcceptProbStates; } private double[] getPosition(int direction) { return position[getIndex(direction)]; } private double[] getMomentum(int direction) { return momentum[getIndex(direction)]; } private double[] getSample() { /* Returns a state chosen uniformly from the acceptable states along a hamiltonian dynamics trajectory tree. The sample is updated recursively while building trees. */ return position[getIndex(0)]; } private void setPosition(int direction, double[] position) { this.position[getIndex(direction)] = position; } private void setMomentum(int direction, double[] momentum) { this.momentum[getIndex(direction)] = momentum; } private void setSample(double[] position) { setPosition(0, position); } private int getIndex(int direction) { // valid directions: -1, 0, +1 assert (direction >= -1 && direction <= 1); return direction + 1; } private void mergeNextTree(TreeState nextTree, int direction) { setPosition(direction, nextTree.getPosition(direction)); setMomentum(direction, nextTree.getMomentum(direction)); updateSample(nextTree); numNodes += nextTree.numNodes; flagContinue = computeStopCriterion(nextTree.flagContinue, this); cumAcceptProb += nextTree.cumAcceptProb; numAcceptProbStates += nextTree.numAcceptProbStates; } private void updateSample(TreeState nextTree) { double uniform = MathUtils.nextDouble(); if (nextTree.numNodes > 0 && uniform < ((double) nextTree.numNodes / (double) (numNodes + nextTree.numNodes))) { setSample(nextTree.getSample()); } } final private double[][] position; final private double[][] momentum; private int numNodes; private boolean flagContinue; private double cumAcceptProb; private int numAcceptProbStates; } }